From 566b188fb5bafb4f2983a273dc3ad2b8d9d2df09 Mon Sep 17 00:00:00 2001 From: Hassaan Arain <161812835+MuhammadHassaanArain@users.noreply.github.com> Date: Mon, 22 Sep 2025 11:17:15 +0500 Subject: [PATCH 1/3] Enhance function tool schemas with Annotated types (#1777) Co-authored-by: Kazuhiro Sera --- examples/basic/stream_function_call_args.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/examples/basic/stream_function_call_args.py b/examples/basic/stream_function_call_args.py index 3c3538772..e04806169 100644 --- a/examples/basic/stream_function_call_args.py +++ b/examples/basic/stream_function_call_args.py @@ -1,5 +1,5 @@ import asyncio -from typing import Any +from typing import Annotated, Any, Optional from openai.types.responses import ResponseFunctionCallArgumentsDeltaEvent @@ -7,14 +7,18 @@ @function_tool -def write_file(filename: str, content: str) -> str: +def write_file(filename: Annotated[str, "Name of the file"], content: str) -> str: """Write content to a file.""" return f"File {filename} written successfully" @function_tool -def create_config(project_name: str, version: str, dependencies: list[str]) -> str: - """Create a configuration file for a project.""" +def create_config( + project_name: Annotated[str, "Project name"], + version: Annotated[str, "Project version"], + dependencies: Annotated[Optional[list[str]], "Dependencies (list of packages)"], +) -> str: + """Generate a project configuration file.""" return f"Config for {project_name} v{version} created" From e47b47af1b476767e611068ab2cc9a04ab6797fb Mon Sep 17 00:00:00 2001 From: Sung-jin Brian Hong Date: Mon, 22 Sep 2025 15:20:36 +0900 Subject: [PATCH 2/3] fix(chatcmpl): preserve all LiteLLM thinking-block signatures in converter (#1779) --- src/agents/models/chatcmpl_converter.py | 24 ++++++++++------------ tests/test_anthropic_thinking_blocks.py | 27 ++++++++++++++++++++++--- 2 files changed, 35 insertions(+), 16 deletions(-) diff --git a/src/agents/models/chatcmpl_converter.py b/src/agents/models/chatcmpl_converter.py index 96f02a5fe..0ece1664b 100644 --- a/src/agents/models/chatcmpl_converter.py +++ b/src/agents/models/chatcmpl_converter.py @@ -107,7 +107,7 @@ def message_to_output_items(cls, message: ChatCompletionMessage) -> list[TRespon if hasattr(message, "thinking_blocks") and message.thinking_blocks: # Store thinking text in content and signature in encrypted_content reasoning_item.content = [] - signature = None + signatures: list[str] = [] for block in message.thinking_blocks: if isinstance(block, dict): thinking_text = block.get("thinking", "") @@ -116,15 +116,12 @@ def message_to_output_items(cls, message: ChatCompletionMessage) -> list[TRespon Content(text=thinking_text, type="reasoning_text") ) # Store the signature if present - if block.get("signature"): - signature = block.get("signature") + if signature := block.get("signature"): + signatures.append(signature) - # Store only the last signature in encrypted_content - # If there are multiple thinking blocks, this should be a problem. - # In practice, there should only be one signature for the entire reasoning step. - # Tested with: claude-sonnet-4-20250514 - if signature: - reasoning_item.encrypted_content = signature + # Store the signatures in encrypted_content with newline delimiter + if signatures: + reasoning_item.encrypted_content = "\n".join(signatures) items.append(reasoning_item) @@ -518,7 +515,8 @@ def ensure_assistant_message() -> ChatCompletionAssistantMessageParam: elif reasoning_item := cls.maybe_reasoning_message(item): # Reconstruct thinking blocks from content (text) and encrypted_content (signature) content_items = reasoning_item.get("content", []) - signature = reasoning_item.get("encrypted_content") + encrypted_content = reasoning_item.get("encrypted_content") + signatures = encrypted_content.split("\n") if encrypted_content else [] if content_items and preserve_thinking_blocks: # Reconstruct thinking blocks from content and signature @@ -532,9 +530,9 @@ def ensure_assistant_message() -> ChatCompletionAssistantMessageParam: "type": "thinking", "thinking": content_item.get("text", ""), } - # Add signature if available - if signature: - thinking_block["signature"] = signature + # Add signatures if available + if signatures: + thinking_block["signature"] = signatures.pop(0) pending_thinking_blocks.append(thinking_block) # 8) If we haven't recognized it => fail or ignore diff --git a/tests/test_anthropic_thinking_blocks.py b/tests/test_anthropic_thinking_blocks.py index 933be2c0e..35446efe4 100644 --- a/tests/test_anthropic_thinking_blocks.py +++ b/tests/test_anthropic_thinking_blocks.py @@ -125,7 +125,12 @@ def test_anthropic_thinking_blocks_with_tool_calls(): "Let me use the weather tool to get this information." ), "signature": "TestSignature123", - } + }, + { + "type": "thinking", + "thinking": ("We should use the city Tokyo as the city."), + "signature": "TestSignature456", + }, ], tool_calls=[ ChatCompletionMessageToolCall( @@ -143,7 +148,7 @@ def test_anthropic_thinking_blocks_with_tool_calls(): reasoning_items = [ item for item in output_items if hasattr(item, "type") and item.type == "reasoning" ] - assert len(reasoning_items) == 1, "Should have exactly one reasoning item" + assert len(reasoning_items) == 1, "Should have exactly two reasoning items" reasoning_item = reasoning_items[0] @@ -159,7 +164,9 @@ def test_anthropic_thinking_blocks_with_tool_calls(): assert hasattr(reasoning_item, "encrypted_content"), ( "Reasoning item should have encrypted_content" ) - assert reasoning_item.encrypted_content == "TestSignature123", "Signature should be preserved" + assert reasoning_item.encrypted_content == "TestSignature123\nTestSignature456", ( + "Signature should be preserved" + ) # Verify tool calls are present tool_call_items = [ @@ -210,6 +217,20 @@ def test_anthropic_thinking_blocks_with_tool_calls(): "Signature should be preserved in thinking block" ) + first_content = content[1] + assert first_content.get("type") == "thinking", ( + f"Second content must be 'thinking' type for Anthropic compatibility, " + f"but got '{first_content.get('type')}'" + ) + expected_thinking = "We should use the city Tokyo as the city." + assert first_content.get("thinking") == expected_thinking, ( + "Thinking content should be preserved" + ) + # Signature should also be preserved + assert first_content.get("signature") == "TestSignature456", ( + "Signature should be preserved in thinking block" + ) + # Verify tool calls are preserved tool_calls = assistant_msg.get("tool_calls", []) assert len(cast(list[Any], tool_calls)) == 1, "Tool calls should be preserved" From d91e39cd3ae9dbb914fa1c8f096d35177ca0b41b Mon Sep 17 00:00:00 2001 From: Hamza Sheikh <164283922+SheikhMuhammadHamza-HS@users.noreply.github.com> Date: Mon, 22 Sep 2025 11:35:11 +0500 Subject: [PATCH 3/3] Update dynamic_system_prompt.py example to use dataclass for simiplicity (#1774) Co-authored-by: Kazuhiro Sera --- examples/basic/dynamic_system_prompt.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/examples/basic/dynamic_system_prompt.py b/examples/basic/dynamic_system_prompt.py index 7bcf90c0c..7cd39ab66 100644 --- a/examples/basic/dynamic_system_prompt.py +++ b/examples/basic/dynamic_system_prompt.py @@ -1,13 +1,14 @@ import asyncio import random +from dataclasses import dataclass from typing import Literal from agents import Agent, RunContextWrapper, Runner +@dataclass class CustomContext: - def __init__(self, style: Literal["haiku", "pirate", "robot"]): - self.style = style + style: Literal["haiku", "pirate", "robot"] def custom_instructions( @@ -27,11 +28,9 @@ def custom_instructions( instructions=custom_instructions, ) - async def main(): - choice: Literal["haiku", "pirate", "robot"] = random.choice(["haiku", "pirate", "robot"]) - context = CustomContext(style=choice) - print(f"Using style: {choice}\n") + context = CustomContext(style=random.choice(["haiku", "pirate", "robot"])) + print(f"Using style: {context.style}\n") user_message = "Tell me a joke." print(f"User: {user_message}") @@ -43,6 +42,7 @@ async def main(): if __name__ == "__main__": asyncio.run(main()) + """ $ python examples/basic/dynamic_system_prompt.py