[doom] Add Ollama local configuration to gptel

Configure gptel to use Ollama local with specified host and models.
This commit is contained in:
2025-09-08 08:39:34 -07:00
parent 14cdee1468
commit 671dc229de

View File

@@ -80,7 +80,11 @@
(after! gptel
:config
(setq! gptel-api-key (my/get-rbw-password "openai-api-key-chatgpt-el")))
(setq! gptel-api-key (my/get-rbw-password "openai-api-key-chatgpt-el"))
(gptel-make-ollama "Ollama-Local"
:host "localhost:11434"
:stream t
:models '(deepseek-r1 deepseek-r1-fullctx qwen3 qwen3-coder)))
(use-package! claude-code-ide
:defer t