Skip to content

Commit 76494f6

Browse files
committed
ollama support successfully implemeted, but the local model instruction following with tool usage is weak.
1 parent 08871f6 commit 76494f6

File tree

1 file changed

+30
-6
lines changed

1 file changed

+30
-6
lines changed

n0mail/services/provider/ollama_service.py

Lines changed: 30 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
from typing import List, Dict, Optional, Literal, Union, Iterator, cast
77
import json
88
import datetime
9+
import copy # Import copy for deepcopy
910

1011
from n0mail import config
1112
from n0mail.util.text_utils import clean_markdown_for_dense_information
@@ -290,20 +291,43 @@ def get_chat_completion(
290291
if temperature is not None:
291292
options['temperature'] = temperature
292293

294+
# --- Pre-process messages for Ollama client ---
295+
# The ollama client expects arguments within tool_calls to be dicts,
296+
# but our history (adapted from OpenAI format) stores them as strings.
297+
messages_for_ollama = copy.deepcopy(messages) # Avoid modifying original history
298+
for message in messages_for_ollama:
299+
if message.get('role') == 'assistant' and message.get('tool_calls'):
300+
new_tool_calls = []
301+
for tool_call in message['tool_calls']:
302+
if isinstance(tool_call, dict) and isinstance(tool_call.get('function'), dict):
303+
func = tool_call['function']
304+
if isinstance(func.get('arguments'), str):
305+
try:
306+
# Parse arguments string back to dict
307+
func['arguments'] = json.loads(func['arguments'])
308+
except json.JSONDecodeError:
309+
console.print(f"[yellow]Warning (Ollama):[/yellow] Could not parse tool call arguments JSON string in history message: {func.get('arguments')}")
310+
# Keep it as string if parsing fails? Or remove the call?
311+
# Keeping it might lead to the same validation error again.
312+
# Let's try keeping the structure but arguments might be wrong.
313+
pass # Keep as string if parse fails
314+
# Append the potentially modified tool_call (or original if no change needed)
315+
# We assume the rest of the tool_call structure is compatible
316+
new_tool_calls.append(tool_call)
317+
else:
318+
# If tool_call format is unexpected, keep original
319+
new_tool_calls.append(tool_call)
320+
message['tool_calls'] = new_tool_calls # Update message with processed tool calls
321+
293322
chat_params = {
294323
"model": model,
295-
"messages": messages,
324+
"messages": messages_for_ollama, # Use the pre-processed messages
296325
"stream": stream,
297326
"options": options
298327
}
299328

300-
# Pass tools if provided - using the OpenAI JSON schema format
301329
if tools:
302330
chat_params["tools"] = tools
303-
# Note: Ollama python lib might not support explicit 'tool_choice' yet
304-
# if tool_choice and tool_choice != "auto":
305-
# chat_params["tool_choice"] = tool_choice
306-
# For now, we rely on the model to decide based on the prompt and tools
307331

308332
retries = 0
309333
delay = initial_delay

0 commit comments

Comments
 (0)