From 9e30417a7a39cea2d62fd72dd42f0735ee671657 Mon Sep 17 00:00:00 2001 From: Wen-Tien Chang Date: Tue, 16 Sep 2025 19:56:15 +0800 Subject: [PATCH] fix: include reasoning.encrypted_content by default when store=False --- src/agents/models/openai_responses.py | 4 +++ tests/test_store_response_include.py | 50 +++++++++++++++++++++++++++ 2 files changed, 54 insertions(+) create mode 100644 tests/test_store_response_include.py diff --git a/src/agents/models/openai_responses.py b/src/agents/models/openai_responses.py index 9ca2d324f..e0fe5c5c8 100644 --- a/src/agents/models/openai_responses.py +++ b/src/agents/models/openai_responses.py @@ -259,6 +259,10 @@ async def _fetch_response( include_set: set[str] = set(converted_tools.includes) if model_settings.response_include is not None: include_set.update(model_settings.response_include) + + if model_settings.store is False and model_settings.response_include is None: + include_set.add("reasoning.encrypted_content") + if model_settings.top_logprobs is not None: include_set.add("message.output_text.logprobs") include = cast(list[ResponseIncludable], list(include_set)) diff --git a/tests/test_store_response_include.py b/tests/test_store_response_include.py new file mode 100644 index 000000000..91e61067c --- /dev/null +++ b/tests/test_store_response_include.py @@ -0,0 +1,50 @@ +import pytest +from openai.types.responses.response_usage import InputTokensDetails, OutputTokensDetails + +from agents import ModelSettings, ModelTracing, OpenAIResponsesModel + + +class DummyResponses: + async def create(self, **kwargs): + self.kwargs = kwargs + + class DummyResponse: + id = "dummy" + output = [] + usage = type( + "Usage", + (), + { + "input_tokens": 0, + "output_tokens": 0, + "total_tokens": 0, + "input_tokens_details": InputTokensDetails(cached_tokens=0), + "output_tokens_details": OutputTokensDetails(reasoning_tokens=0), + }, + )() + + return DummyResponse() + + +class DummyClient: + def __init__(self): + self.responses = DummyResponses() + + +@pytest.mark.allow_call_model_methods +@pytest.mark.asyncio +async def test_store_false_includes_encrypted_reasoning(): + client = DummyClient() + model = OpenAIResponsesModel(model="gpt-5", openai_client=client) # type: ignore + await model.get_response( + system_instructions=None, + input="hi", + model_settings=ModelSettings(store=False), + tools=[], + output_schema=None, + handoffs=[], + tracing=ModelTracing.DISABLED, + previous_response_id=None, + ) + include = set(client.responses.kwargs["include"]) + assert "reasoning.encrypted_content" in include