@@ -210,31 +210,56 @@ async def test_generate_str(self, mock_llm, default_usage):
210210 @pytest .mark .asyncio
211211 async def test_generate_structured (self , mock_llm , default_usage ):
212212 """
213- Tests structured output generation using Instructor .
213+ Tests structured output generation using native Anthropic API .
214214 """
215+ from unittest .mock import patch
216+ import json
215217
216218 # Define a simple response model
217219 class TestResponseModel (BaseModel ):
218220 name : str
219221 value : int
220222
221- # Mock the generate_str method to return a string that will be parsed by the instructor mock
222- mock_llm . generate_str = AsyncMock ( return_value = "name: Test, value: 42" )
223-
224- # Patch executor.execute to return the expected TestResponseModel instance
225- mock_llm . executor . execute = AsyncMock (
226- return_value = TestResponseModel ( name = " Test" , value = 42 )
223+ # Create a mock Message with tool_use block containing the structured data
224+ tool_use_block = ToolUseBlock (
225+ type = "tool_use" ,
226+ id = "tool_123" ,
227+ name = "return_structured_output" ,
228+ input = { " name" : " Test" , " value" : 42 },
227229 )
228230
229- # Call the method
230- result = await AnthropicAugmentedLLM .generate_structured (
231- mock_llm , "Test query" , TestResponseModel
231+ mock_message = Message (
232+ type = "message" ,
233+ id = "msg_123" ,
234+ role = "assistant" ,
235+ content = [tool_use_block ],
236+ model = "claude-3-7-sonnet-latest" ,
237+ stop_reason = "tool_use" ,
238+ usage = default_usage ,
232239 )
233240
234- # Assertions
235- assert isinstance (result , TestResponseModel )
236- assert result .name == "Test"
237- assert result .value == 42
241+ # Mock the AsyncAnthropic client and streaming
242+ with patch (
243+ "mcp_agent.workflows.llm.augmented_llm_anthropic.AsyncAnthropic"
244+ ) as MockAsyncAnthropic :
245+ mock_client = MockAsyncAnthropic .return_value
246+ mock_stream = AsyncMock ()
247+ mock_stream .get_final_message = AsyncMock (return_value = mock_message )
248+ mock_stream .__aenter__ = AsyncMock (return_value = mock_stream )
249+ mock_stream .__aexit__ = AsyncMock (return_value = None )
250+ mock_client .messages .stream = MagicMock (return_value = mock_stream )
251+ mock_client .__aenter__ = AsyncMock (return_value = mock_client )
252+ mock_client .__aexit__ = AsyncMock (return_value = None )
253+
254+ # Call the method
255+ result = await AnthropicAugmentedLLM .generate_structured (
256+ mock_llm , "Test query" , TestResponseModel
257+ )
258+
259+ # Assertions
260+ assert isinstance (result , TestResponseModel )
261+ assert result .name == "Test"
262+ assert result .value == 42
238263
239264 # Test 4: With History
240265 @pytest .mark .asyncio
@@ -779,6 +804,8 @@ async def test_generate_structured_with_mixed_message_types(self, mock_llm):
779804 """
780805 Tests generate_structured() method with mixed message types.
781806 """
807+ from unittest .mock import patch
808+ import json
782809
783810 # Define a simple response model
784811 class TestResponseModel (BaseModel ):
@@ -795,19 +822,51 @@ class TestResponseModel(BaseModel):
795822 ),
796823 ]
797824
798- mock_llm .generate_str = AsyncMock (return_value = "name: MixedTypes, value: 123" )
799- # Patch executor.execute to return the expected TestResponseModel instance
800- mock_llm .executor .execute = AsyncMock (
801- return_value = TestResponseModel (name = "MixedTypes" , value = 123 )
825+ # Create a mock Message with tool_use block containing the structured data
826+ tool_use_block = ToolUseBlock (
827+ type = "tool_use" ,
828+ id = "tool_456" ,
829+ name = "return_structured_output" ,
830+ input = {"name" : "MixedTypes" , "value" : 123 },
802831 )
803832
804- # Call generate_structured with mixed message types
805- result = await mock_llm .generate_structured (messages , TestResponseModel )
833+ mock_message = Message (
834+ type = "message" ,
835+ id = "msg_456" ,
836+ role = "assistant" ,
837+ content = [tool_use_block ],
838+ model = "claude-3-7-sonnet-latest" ,
839+ stop_reason = "tool_use" ,
840+ usage = Usage (
841+ cache_creation_input_tokens = 0 ,
842+ cache_read_input_tokens = 0 ,
843+ input_tokens = 100 ,
844+ output_tokens = 50 ,
845+ server_tool_use = None ,
846+ service_tier = None ,
847+ ),
848+ )
806849
807- # Assertions
808- assert isinstance (result , TestResponseModel )
809- assert result .name == "MixedTypes"
810- assert result .value == 123
850+ # Mock the AsyncAnthropic client and streaming
851+ with patch (
852+ "mcp_agent.workflows.llm.augmented_llm_anthropic.AsyncAnthropic"
853+ ) as MockAsyncAnthropic :
854+ mock_client = MockAsyncAnthropic .return_value
855+ mock_stream = AsyncMock ()
856+ mock_stream .get_final_message = AsyncMock (return_value = mock_message )
857+ mock_stream .__aenter__ = AsyncMock (return_value = mock_stream )
858+ mock_stream .__aexit__ = AsyncMock (return_value = None )
859+ mock_client .messages .stream = MagicMock (return_value = mock_stream )
860+ mock_client .__aenter__ = AsyncMock (return_value = mock_client )
861+ mock_client .__aexit__ = AsyncMock (return_value = None )
862+
863+ # Call generate_structured with mixed message types
864+ result = await mock_llm .generate_structured (messages , TestResponseModel )
865+
866+ # Assertions
867+ assert isinstance (result , TestResponseModel )
868+ assert result .name == "MixedTypes"
869+ assert result .value == 123
811870
812871 # Test 25: System Prompt Not None in API Call
813872 @pytest .mark .asyncio
0 commit comments