|
13 | 13 | from langchain_core.messages import ( |
14 | 14 | AIMessage, |
15 | 15 | MessageLikeRepresentation, |
16 | | - SystemMessage, |
17 | 16 | ToolMessage, |
18 | 17 | ) |
19 | 18 | from langgraph.runtime import Runtime |
@@ -400,126 +399,3 @@ async def mock_handler(req: ModelRequest) -> AIMessage: |
400 | 399 |
|
401 | 400 | assert isinstance(calc_tool, ToolMessage) |
402 | 401 | assert calc_tool.content == "[cleared]" |
403 | | - |
404 | | - |
405 | | -# ============================================================================== |
406 | | -# SystemMessage Tests |
407 | | -# ============================================================================== |
408 | | - |
409 | | - |
410 | | -def test_handles_system_message_prompt() -> None: |
411 | | - """Test that middleware handles SystemMessage as system_prompt correctly.""" |
412 | | - tool_call_id = "call-1" |
413 | | - ai_message = AIMessage( |
414 | | - content="", |
415 | | - tool_calls=[{"id": tool_call_id, "name": "search", "args": {}}], |
416 | | - ) |
417 | | - tool_message = ToolMessage(content="12345", tool_call_id=tool_call_id) |
418 | | - |
419 | | - system_prompt = SystemMessage(content="You are a helpful assistant.") |
420 | | - state, request = _make_state_and_request([ai_message, tool_message], system_prompt=None) |
421 | | - # Manually set SystemMessage as system_prompt |
422 | | - request.system_prompt = system_prompt |
423 | | - |
424 | | - middleware = ContextEditingMiddleware( |
425 | | - edits=[ClearToolUsesEdit(trigger=50)], |
426 | | - token_count_method="model", |
427 | | - ) |
428 | | - |
429 | | - def mock_handler(req: ModelRequest) -> AIMessage: |
430 | | - return AIMessage(content="mock response") |
431 | | - |
432 | | - # Call wrap_model_call - should not fail with SystemMessage |
433 | | - middleware.wrap_model_call(request, mock_handler) |
434 | | - |
435 | | - # Request should have processed without errors |
436 | | - assert request.system_prompt == system_prompt |
437 | | - assert isinstance(request.system_prompt, SystemMessage) |
438 | | - |
439 | | - |
440 | | -def test_does_not_double_wrap_system_message() -> None: |
441 | | - """Test that middleware doesn't wrap SystemMessage in another SystemMessage.""" |
442 | | - tool_call_id = "call-1" |
443 | | - ai_message = AIMessage( |
444 | | - content="", |
445 | | - tool_calls=[{"id": tool_call_id, "name": "search", "args": {}}], |
446 | | - ) |
447 | | - tool_message = ToolMessage(content="x" * 100, tool_call_id=tool_call_id) |
448 | | - |
449 | | - system_prompt = SystemMessage(content="Original system prompt") |
450 | | - state, request = _make_state_and_request([ai_message, tool_message], system_prompt=None) |
451 | | - request.system_prompt = system_prompt |
452 | | - |
453 | | - middleware = ContextEditingMiddleware( |
454 | | - edits=[ClearToolUsesEdit(trigger=50)], |
455 | | - token_count_method="model", |
456 | | - ) |
457 | | - |
458 | | - def mock_handler(req: ModelRequest) -> AIMessage: |
459 | | - return AIMessage(content="mock response") |
460 | | - |
461 | | - middleware.wrap_model_call(request, mock_handler) |
462 | | - |
463 | | - # System prompt should still be the same SystemMessage, not wrapped |
464 | | - assert request.system_prompt == system_prompt |
465 | | - assert isinstance(request.system_prompt, SystemMessage) |
466 | | - assert request.system_prompt.content == "Original system prompt" |
467 | | - |
468 | | - |
469 | | -async def test_handles_system_message_prompt_async() -> None: |
470 | | - """Test async version - middleware handles SystemMessage as system_prompt correctly.""" |
471 | | - tool_call_id = "call-1" |
472 | | - ai_message = AIMessage( |
473 | | - content="", |
474 | | - tool_calls=[{"id": tool_call_id, "name": "search", "args": {}}], |
475 | | - ) |
476 | | - tool_message = ToolMessage(content="12345", tool_call_id=tool_call_id) |
477 | | - |
478 | | - system_prompt = SystemMessage(content="You are a helpful assistant.") |
479 | | - state, request = _make_state_and_request([ai_message, tool_message], system_prompt=None) |
480 | | - # Manually set SystemMessage as system_prompt |
481 | | - request.system_prompt = system_prompt |
482 | | - |
483 | | - middleware = ContextEditingMiddleware( |
484 | | - edits=[ClearToolUsesEdit(trigger=50)], |
485 | | - token_count_method="model", |
486 | | - ) |
487 | | - |
488 | | - async def mock_handler(req: ModelRequest) -> AIMessage: |
489 | | - return AIMessage(content="mock response") |
490 | | - |
491 | | - # Call awrap_model_call - should not fail with SystemMessage |
492 | | - await middleware.awrap_model_call(request, mock_handler) |
493 | | - |
494 | | - # Request should have processed without errors |
495 | | - assert request.system_prompt == system_prompt |
496 | | - assert isinstance(request.system_prompt, SystemMessage) |
497 | | - |
498 | | - |
499 | | -async def test_does_not_double_wrap_system_message_async() -> None: |
500 | | - """Test async version - middleware doesn't wrap SystemMessage in another SystemMessage.""" |
501 | | - tool_call_id = "call-1" |
502 | | - ai_message = AIMessage( |
503 | | - content="", |
504 | | - tool_calls=[{"id": tool_call_id, "name": "search", "args": {}}], |
505 | | - ) |
506 | | - tool_message = ToolMessage(content="x" * 100, tool_call_id=tool_call_id) |
507 | | - |
508 | | - system_prompt = SystemMessage(content="Original system prompt") |
509 | | - state, request = _make_state_and_request([ai_message, tool_message], system_prompt=None) |
510 | | - request.system_prompt = system_prompt |
511 | | - |
512 | | - middleware = ContextEditingMiddleware( |
513 | | - edits=[ClearToolUsesEdit(trigger=50)], |
514 | | - token_count_method="model", |
515 | | - ) |
516 | | - |
517 | | - async def mock_handler(req: ModelRequest) -> AIMessage: |
518 | | - return AIMessage(content="mock response") |
519 | | - |
520 | | - await middleware.awrap_model_call(request, mock_handler) |
521 | | - |
522 | | - # System prompt should still be the same SystemMessage, not wrapped |
523 | | - assert request.system_prompt == system_prompt |
524 | | - assert isinstance(request.system_prompt, SystemMessage) |
525 | | - assert request.system_prompt.content == "Original system prompt" |
0 commit comments