Skip to content

Commit 2b251d9

Browse files
Python: Fix reasoning replay when store=False (#5250)
* fix reasoning content when store=False * Remove accidental worktree entries Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> * remove local session sample * removed left over files * Add attribution override regression test Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --------- Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
1 parent 485af07 commit 2b251d9

2 files changed

Lines changed: 301 additions & 30 deletions

File tree

python/packages/openai/agent_framework_openai/_chat_client.py

Lines changed: 65 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -1161,7 +1161,16 @@ async def _prepare_options(
11611161
# First turn: prepend instructions as system message
11621162
messages = prepend_instructions_to_messages(list(messages), instructions, role="system")
11631163
# Continuation turn: instructions already exist in conversation context, skip prepending
1164-
request_input = self._prepare_messages_for_openai(messages)
1164+
request_uses_service_side_storage = False
1165+
for key in ("conversation_id", "previous_response_id", "conversation"):
1166+
value = options.get(key)
1167+
if isinstance(value, str) and value:
1168+
request_uses_service_side_storage = True
1169+
break
1170+
request_input = self._prepare_messages_for_openai(
1171+
messages,
1172+
request_uses_service_side_storage=request_uses_service_side_storage,
1173+
)
11651174
if not request_input:
11661175
raise ChatClientInvalidRequestException("Messages are required for chat completions")
11671176
conversation_id = options.get("conversation_id")
@@ -1235,7 +1244,12 @@ def _check_model_presence(self, options: dict[str, Any]) -> None:
12351244
raise ValueError("model must be a non-empty string")
12361245
options["model"] = self.model
12371246

1238-
def _prepare_messages_for_openai(self, chat_messages: Sequence[Message]) -> list[dict[str, Any]]:
1247+
def _prepare_messages_for_openai(
1248+
self,
1249+
chat_messages: Sequence[Message],
1250+
*,
1251+
request_uses_service_side_storage: bool = True,
1252+
) -> list[dict[str, Any]]:
12391253
"""Prepare the chat messages for a request.
12401254
12411255
Allowing customization of the key names for role/author, and optionally overriding the role.
@@ -1248,66 +1262,91 @@ def _prepare_messages_for_openai(self, chat_messages: Sequence[Message]) -> list
12481262
12491263
Args:
12501264
chat_messages: The chat history to prepare.
1265+
request_uses_service_side_storage: Whether this request continues a service-managed
1266+
response/conversation and can safely reference service-scoped response items.
12511267
12521268
Returns:
12531269
The prepared chat messages for a request.
12541270
"""
1255-
list_of_list = [self._prepare_message_for_openai(message) for message in chat_messages]
1271+
list_of_list = [
1272+
self._prepare_message_for_openai(
1273+
message,
1274+
request_uses_service_side_storage=request_uses_service_side_storage,
1275+
)
1276+
for message in chat_messages
1277+
]
12561278
# Flatten the list of lists into a single list
12571279
return list(chain.from_iterable(list_of_list))
12581280

1259-
@staticmethod
1260-
def _message_replays_provider_context(message: Message) -> bool:
1261-
"""Return whether the message came from provider-attributed replay context.
1262-
1263-
Responses ``fc_id`` values are response-scoped and only valid while replaying
1264-
the same live tool loop. Once a message comes back through a context provider
1265-
(for example, loaded session history), that message is historical input and
1266-
must not reuse the original response-scoped ``fc_id``.
1267-
"""
1268-
additional_properties = getattr(message, "additional_properties", None)
1269-
if not additional_properties:
1270-
return False
1271-
return "_attribution" in additional_properties
1272-
12731281
def _prepare_message_for_openai(
12741282
self,
12751283
message: Message,
1284+
*,
1285+
request_uses_service_side_storage: bool = True,
12761286
) -> list[dict[str, Any]]:
12771287
"""Prepare a chat message for the OpenAI Responses API format."""
12781288
all_messages: list[dict[str, Any]] = []
12791289
args: dict[str, Any] = {
12801290
"type": "message",
12811291
"role": message.role,
12821292
}
1293+
additional_properties = message.additional_properties
1294+
replays_local_storage = "_attribution" in additional_properties
1295+
uses_service_side_storage = request_uses_service_side_storage and not replays_local_storage
12831296
# Reasoning items are only valid in input when they directly preceded a function_call
1284-
# in the same response. Including a reasoning item that preceded a text response
1297+
# in the same response. Including a reasoning item that preceded a text response
12851298
# (i.e. no function_call in the same message) causes an API error:
12861299
# "reasoning was provided without its required following item."
1300+
#
1301+
# Local storage is stricter: response-scoped reasoning items (rs_*) cannot be replayed
1302+
# back to the service unless that message is using service-side storage.
1303+
# In that mode we omit reasoning items and rely on function call + tool output replay.
12871304
has_function_call = any(c.type == "function_call" for c in message.contents)
12881305
for content in message.contents:
12891306
match content.type:
12901307
case "text_reasoning":
1291-
if not has_function_call:
1308+
if not uses_service_side_storage or not has_function_call:
12921309
continue # reasoning not followed by a function_call is invalid in input
1293-
reasoning = self._prepare_content_for_openai(message.role, content, message=message)
1310+
reasoning = self._prepare_content_for_openai(
1311+
message.role,
1312+
content,
1313+
replays_local_storage=replays_local_storage,
1314+
)
12941315
if reasoning:
12951316
all_messages.append(reasoning)
12961317
case "function_result":
12971318
new_args: dict[str, Any] = {}
1298-
new_args.update(self._prepare_content_for_openai(message.role, content, message=message))
1319+
new_args.update(
1320+
self._prepare_content_for_openai(
1321+
message.role,
1322+
content,
1323+
replays_local_storage=replays_local_storage,
1324+
)
1325+
)
12991326
if new_args:
13001327
all_messages.append(new_args)
13011328
case "function_call":
1302-
function_call = self._prepare_content_for_openai(message.role, content, message=message)
1329+
function_call = self._prepare_content_for_openai(
1330+
message.role,
1331+
content,
1332+
replays_local_storage=replays_local_storage,
1333+
)
13031334
if function_call:
13041335
all_messages.append(function_call)
13051336
case "function_approval_response" | "function_approval_request":
1306-
prepared = self._prepare_content_for_openai(message.role, content, message=message)
1337+
prepared = self._prepare_content_for_openai(
1338+
message.role,
1339+
content,
1340+
replays_local_storage=replays_local_storage,
1341+
)
13071342
if prepared:
13081343
all_messages.append(prepared)
13091344
case _:
1310-
prepared_content = self._prepare_content_for_openai(message.role, content, message=message)
1345+
prepared_content = self._prepare_content_for_openai(
1346+
message.role,
1347+
content,
1348+
replays_local_storage=replays_local_storage,
1349+
)
13111350
if prepared_content:
13121351
if "content" not in args:
13131352
args["content"] = []
@@ -1321,7 +1360,7 @@ def _prepare_content_for_openai(
13211360
role: Role | str,
13221361
content: Content,
13231362
*,
1324-
message: Message | None = None,
1363+
replays_local_storage: bool = False,
13251364
) -> dict[str, Any]:
13261365
"""Prepare content for the OpenAI Responses API format."""
13271366
role = Role(role)
@@ -1401,11 +1440,7 @@ def _prepare_content_for_openai(
14011440
logger.warning(f"FunctionCallContent missing call_id for function '{content.name}'")
14021441
return {}
14031442
fc_id = content.call_id
1404-
if (
1405-
message is not None
1406-
and not self._message_replays_provider_context(message)
1407-
and content.additional_properties
1408-
):
1443+
if not replays_local_storage and content.additional_properties:
14091444
live_fc_id = content.additional_properties.get("fc_id")
14101445
if isinstance(live_fc_id, str) and live_fc_id:
14111446
fc_id = live_fc_id

0 commit comments

Comments
 (0)