Skip to content

Commit 63097db

Browse files
authored
fix(ollama): exclude None parameters from options dictionary (#33208)
1 parent eaa6dcc commit 63097db

File tree

2 files changed

+125
-21
lines changed

2 files changed

+125
-21
lines changed

libs/partners/ollama/langchain_ollama/chat_models.py

Lines changed: 24 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -741,26 +741,30 @@ def _chat_params(
741741
if self.stop is not None:
742742
stop = self.stop
743743

744-
options_dict = kwargs.pop(
745-
"options",
746-
{
747-
"mirostat": self.mirostat,
748-
"mirostat_eta": self.mirostat_eta,
749-
"mirostat_tau": self.mirostat_tau,
750-
"num_ctx": self.num_ctx,
751-
"num_gpu": self.num_gpu,
752-
"num_thread": self.num_thread,
753-
"num_predict": self.num_predict,
754-
"repeat_last_n": self.repeat_last_n,
755-
"repeat_penalty": self.repeat_penalty,
756-
"temperature": self.temperature,
757-
"seed": self.seed,
758-
"stop": self.stop if stop is None else stop,
759-
"tfs_z": self.tfs_z,
760-
"top_k": self.top_k,
761-
"top_p": self.top_p,
762-
},
763-
)
744+
options_dict = kwargs.pop("options", None)
745+
if options_dict is None:
746+
# Only include parameters that are explicitly set (not None)
747+
options_dict = {
748+
k: v
749+
for k, v in {
750+
"mirostat": self.mirostat,
751+
"mirostat_eta": self.mirostat_eta,
752+
"mirostat_tau": self.mirostat_tau,
753+
"num_ctx": self.num_ctx,
754+
"num_gpu": self.num_gpu,
755+
"num_thread": self.num_thread,
756+
"num_predict": self.num_predict,
757+
"repeat_last_n": self.repeat_last_n,
758+
"repeat_penalty": self.repeat_penalty,
759+
"temperature": self.temperature,
760+
"seed": self.seed,
761+
"stop": self.stop if stop is None else stop,
762+
"tfs_z": self.tfs_z,
763+
"top_k": self.top_k,
764+
"top_p": self.top_p,
765+
}.items()
766+
if v is not None
767+
}
764768

765769
params = {
766770
"messages": ollama_messages,

libs/partners/ollama/tests/unit_tests/test_chat_models.py

Lines changed: 101 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424

2525
@contextmanager
2626
def _mock_httpx_client_stream(
27-
*args: Any, **kwargs: Any
27+
*_args: Any, **_kwargs: Any
2828
) -> Generator[Response, Any, Any]:
2929
yield Response(
3030
status_code=200,
@@ -310,3 +310,103 @@ def test_load_response_with_actual_content_is_not_skipped(
310310
assert result.content == "This is actual content"
311311
assert result.response_metadata.get("done_reason") == "load"
312312
assert not caplog.text
313+
314+
315+
def test_none_parameters_excluded_from_options() -> None:
316+
"""Test that None parameters are excluded from the options dict sent to Ollama."""
317+
response = [
318+
{
319+
"model": "test-model",
320+
"created_at": "2025-01-01T00:00:00.000000000Z",
321+
"done": True,
322+
"done_reason": "stop",
323+
"message": {"role": "assistant", "content": "Hello!"},
324+
}
325+
]
326+
327+
with patch("langchain_ollama.chat_models.Client") as mock_client_class:
328+
mock_client = MagicMock()
329+
mock_client_class.return_value = mock_client
330+
mock_client.chat.return_value = response
331+
332+
# Create ChatOllama with only num_ctx set
333+
llm = ChatOllama(model="test-model", num_ctx=4096)
334+
llm.invoke([HumanMessage("Hello")])
335+
336+
# Verify that chat was called
337+
assert mock_client.chat.called
338+
339+
# Get the options dict that was passed to chat
340+
call_kwargs = mock_client.chat.call_args[1]
341+
options = call_kwargs.get("options", {})
342+
343+
# Only num_ctx should be in options, not None parameters
344+
assert "num_ctx" in options
345+
assert options["num_ctx"] == 4096
346+
347+
# These parameters should NOT be in options since they were None
348+
assert "mirostat" not in options
349+
assert "mirostat_eta" not in options
350+
assert "mirostat_tau" not in options
351+
assert "tfs_z" not in options
352+
353+
354+
def test_all_none_parameters_results_in_empty_options() -> None:
355+
"""Test that when all parameters are None, options dict is empty."""
356+
response = [
357+
{
358+
"model": "test-model",
359+
"created_at": "2025-01-01T00:00:00.000000000Z",
360+
"done": True,
361+
"done_reason": "stop",
362+
"message": {"role": "assistant", "content": "Hello!"},
363+
}
364+
]
365+
366+
with patch("langchain_ollama.chat_models.Client") as mock_client_class:
367+
mock_client = MagicMock()
368+
mock_client_class.return_value = mock_client
369+
mock_client.chat.return_value = response
370+
371+
# Create ChatOllama with no parameters set
372+
llm = ChatOllama(model="test-model")
373+
llm.invoke([HumanMessage("Hello")])
374+
375+
# Get the options dict that was passed to chat
376+
call_kwargs = mock_client.chat.call_args[1]
377+
options = call_kwargs.get("options", {})
378+
379+
# Options should be empty when no parameters are set
380+
assert options == {}
381+
382+
383+
def test_explicit_options_dict_preserved() -> None:
384+
"""Test that explicitly provided options dict is preserved and not filtered."""
385+
response = [
386+
{
387+
"model": "test-model",
388+
"created_at": "2025-01-01T00:00:00.000000000Z",
389+
"done": True,
390+
"done_reason": "stop",
391+
"message": {"role": "assistant", "content": "Hello!"},
392+
}
393+
]
394+
395+
with patch("langchain_ollama.chat_models.Client") as mock_client_class:
396+
mock_client = MagicMock()
397+
mock_client_class.return_value = mock_client
398+
mock_client.chat.return_value = response
399+
400+
llm = ChatOllama(model="test-model")
401+
# Pass explicit options dict, including None values
402+
llm.invoke(
403+
[HumanMessage("Hello")],
404+
options={"temperature": 0.5, "custom_param": None},
405+
)
406+
407+
# Get the options dict that was passed to chat
408+
call_kwargs = mock_client.chat.call_args[1]
409+
options = call_kwargs.get("options", {})
410+
411+
# Explicit options should be preserved as-is
412+
assert options == {"temperature": 0.5, "custom_param": None}

0 commit comments

Comments
 (0)