|
24 | 24 |
|
25 | 25 | @contextmanager
|
26 | 26 | def _mock_httpx_client_stream(
|
27 |
| - *args: Any, **kwargs: Any |
| 27 | + *_args: Any, **_kwargs: Any |
28 | 28 | ) -> Generator[Response, Any, Any]:
|
29 | 29 | yield Response(
|
30 | 30 | status_code=200,
|
@@ -310,3 +310,103 @@ def test_load_response_with_actual_content_is_not_skipped(
|
310 | 310 | assert result.content == "This is actual content"
|
311 | 311 | assert result.response_metadata.get("done_reason") == "load"
|
312 | 312 | assert not caplog.text
|
| 313 | + |
| 314 | + |
| 315 | +def test_none_parameters_excluded_from_options() -> None: |
| 316 | + """Test that None parameters are excluded from the options dict sent to Ollama.""" |
| 317 | + response = [ |
| 318 | + { |
| 319 | + "model": "test-model", |
| 320 | + "created_at": "2025-01-01T00:00:00.000000000Z", |
| 321 | + "done": True, |
| 322 | + "done_reason": "stop", |
| 323 | + "message": {"role": "assistant", "content": "Hello!"}, |
| 324 | + } |
| 325 | + ] |
| 326 | + |
| 327 | + with patch("langchain_ollama.chat_models.Client") as mock_client_class: |
| 328 | + mock_client = MagicMock() |
| 329 | + mock_client_class.return_value = mock_client |
| 330 | + mock_client.chat.return_value = response |
| 331 | + |
| 332 | + # Create ChatOllama with only num_ctx set |
| 333 | + llm = ChatOllama(model="test-model", num_ctx=4096) |
| 334 | + llm.invoke([HumanMessage("Hello")]) |
| 335 | + |
| 336 | + # Verify that chat was called |
| 337 | + assert mock_client.chat.called |
| 338 | + |
| 339 | + # Get the options dict that was passed to chat |
| 340 | + call_kwargs = mock_client.chat.call_args[1] |
| 341 | + options = call_kwargs.get("options", {}) |
| 342 | + |
| 343 | + # Only num_ctx should be in options, not None parameters |
| 344 | + assert "num_ctx" in options |
| 345 | + assert options["num_ctx"] == 4096 |
| 346 | + |
| 347 | + # These parameters should NOT be in options since they were None |
| 348 | + assert "mirostat" not in options |
| 349 | + assert "mirostat_eta" not in options |
| 350 | + assert "mirostat_tau" not in options |
| 351 | + assert "tfs_z" not in options |
| 352 | + |
| 353 | + |
| 354 | +def test_all_none_parameters_results_in_empty_options() -> None: |
| 355 | + """Test that when all parameters are None, options dict is empty.""" |
| 356 | + response = [ |
| 357 | + { |
| 358 | + "model": "test-model", |
| 359 | + "created_at": "2025-01-01T00:00:00.000000000Z", |
| 360 | + "done": True, |
| 361 | + "done_reason": "stop", |
| 362 | + "message": {"role": "assistant", "content": "Hello!"}, |
| 363 | + } |
| 364 | + ] |
| 365 | + |
| 366 | + with patch("langchain_ollama.chat_models.Client") as mock_client_class: |
| 367 | + mock_client = MagicMock() |
| 368 | + mock_client_class.return_value = mock_client |
| 369 | + mock_client.chat.return_value = response |
| 370 | + |
| 371 | + # Create ChatOllama with no parameters set |
| 372 | + llm = ChatOllama(model="test-model") |
| 373 | + llm.invoke([HumanMessage("Hello")]) |
| 374 | + |
| 375 | + # Get the options dict that was passed to chat |
| 376 | + call_kwargs = mock_client.chat.call_args[1] |
| 377 | + options = call_kwargs.get("options", {}) |
| 378 | + |
| 379 | + # Options should be empty when no parameters are set |
| 380 | + assert options == {} |
| 381 | + |
| 382 | + |
| 383 | +def test_explicit_options_dict_preserved() -> None: |
| 384 | + """Test that explicitly provided options dict is preserved and not filtered.""" |
| 385 | + response = [ |
| 386 | + { |
| 387 | + "model": "test-model", |
| 388 | + "created_at": "2025-01-01T00:00:00.000000000Z", |
| 389 | + "done": True, |
| 390 | + "done_reason": "stop", |
| 391 | + "message": {"role": "assistant", "content": "Hello!"}, |
| 392 | + } |
| 393 | + ] |
| 394 | + |
| 395 | + with patch("langchain_ollama.chat_models.Client") as mock_client_class: |
| 396 | + mock_client = MagicMock() |
| 397 | + mock_client_class.return_value = mock_client |
| 398 | + mock_client.chat.return_value = response |
| 399 | + |
| 400 | + llm = ChatOllama(model="test-model") |
| 401 | + # Pass explicit options dict, including None values |
| 402 | + llm.invoke( |
| 403 | + [HumanMessage("Hello")], |
| 404 | + options={"temperature": 0.5, "custom_param": None}, |
| 405 | + ) |
| 406 | + |
| 407 | + # Get the options dict that was passed to chat |
| 408 | + call_kwargs = mock_client.chat.call_args[1] |
| 409 | + options = call_kwargs.get("options", {}) |
| 410 | + |
| 411 | + # Explicit options should be preserved as-is |
| 412 | + assert options == {"temperature": 0.5, "custom_param": None} |
0 commit comments