From 882c8b72adc56c8e7e577e5fa6fe9b297c46f2e7 Mon Sep 17 00:00:00 2001 From: Tim Felgentreff Date: Wed, 11 Jun 2025 16:09:47 +0200 Subject: [PATCH] Allow streamed responses with Ollama tools As of https://github.com/ollama/ollama/releases/tag/v0.8.0 Ollama allows streaming responses with tool calls. --- gptel-ollama.el | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/gptel-ollama.el b/gptel-ollama.el index a4443033..1fa193f1 100644 --- a/gptel-ollama.el +++ b/gptel-ollama.el @@ -111,8 +111,7 @@ Store response metadata in state INFO." (when (and gptel-use-tools gptel-tools) ;; TODO(tool): Find out how to force tool use for Ollama (plist-put prompts-plist :tools - (gptel--parse-tools backend gptel-tools)) - (plist-put prompts-plist :stream :json-false)) + (gptel--parse-tools backend gptel-tools))) ;; if the temperature and max-tokens aren't set as ;; backend/model-specific, use the global settings (when (and gptel-temperature (not (plist-get options-plist :temperature)))