[doom] Add Ollama local configuration to gptel
Configure gptel to use Ollama local with specified host and models.
This commit is contained in:
@@ -80,7 +80,11 @@
|
||||
|
||||
(after! gptel
|
||||
:config
|
||||
(setq! gptel-api-key (my/get-rbw-password "openai-api-key-chatgpt-el")))
|
||||
(setq! gptel-api-key (my/get-rbw-password "openai-api-key-chatgpt-el"))
|
||||
(gptel-make-ollama "Ollama-Local"
|
||||
:host "localhost:11434"
|
||||
:stream t
|
||||
:models '(deepseek-r1 deepseek-r1-fullctx qwen3 qwen3-coder)))
|
||||
|
||||
(use-package! claude-code-ide
|
||||
:defer t
|
||||
|
||||
Reference in New Issue
Block a user