add gptel and configuration for Llamafile

This commit is contained in:
Xavier Brinon 2024-01-20 21:59:47 +00:00
parent 84d1937284
commit 2dc3efa3f6

17
init.el
View File

@ -154,4 +154,21 @@
(add-to-list 'load-path "/home/haqadosch/.opam/ocaml-book/share/emacs/site-lisp") (add-to-list 'load-path "/home/haqadosch/.opam/ocaml-book/share/emacs/site-lisp")
(require 'ocp-indent) (require 'ocp-indent)
;; GPTel with Llamafile config
;; Llama.cpp offers an OpenAI compatible API
;; (gptel-make-openai "llama-cpp" ;Any name
;; :stream t ;Stream responses
;; :protocol "http"
;; :host "127.0.0.1:8000" ;Llama.cpp server location
;; :models '("local")) ;Any names, doesn't matter for Llama
;; And now set it as the default backend
;; OPTIONAL configuration
(setq gptel-default-mode 'org-mode)
(setq-default gptel-backend (gptel-make-openai "llama-cpp"
:stream t
:host "127.0.0.1:8080"
:protocol "http"
:key "no-key"
:models '("local"))
gptel-model "local")
;;; init.el ends here ;;; init.el ends here