From 2dc3efa3f6eded6327f427c8e7c9c72bb9a6bc2e Mon Sep 17 00:00:00 2001 From: Xavier Brinon Date: Sat, 20 Jan 2024 21:59:47 +0000 Subject: [PATCH] add gptel and configuration for Llamafile --- init.el | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/init.el b/init.el index 82d6ad2..be21e1d 100644 --- a/init.el +++ b/init.el @@ -154,4 +154,21 @@ (add-to-list 'load-path "/home/haqadosch/.opam/ocaml-book/share/emacs/site-lisp") (require 'ocp-indent) +;; GPTel with Llamafile config +;; Llama.cpp offers an OpenAI compatible API +;; (gptel-make-openai "llama-cpp" ;Any name +;; :stream t ;Stream responses +;; :protocol "http" +;; :host "127.0.0.1:8000" ;Llama.cpp server location +;; :models '("local")) ;Any names, doesn't matter for Llama +;; And now set it as the default backend +;; OPTIONAL configuration +(setq gptel-default-mode 'org-mode) +(setq-default gptel-backend (gptel-make-openai "llama-cpp" + :stream t + :host "127.0.0.1:8080" + :protocol "http" + :key "no-key" + :models '("local")) + gptel-model "local") ;;; init.el ends here