Skip to content

Commit aff0082

Browse files
committed
add response_id to StreamedResponse
1 parent 6610c66 commit aff0082

File tree

2 files changed

+20
-0
lines changed

2 files changed

+20
-0
lines changed

pydantic_ai_slim/pydantic_ai/models/__init__.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -609,6 +609,7 @@ def get(self) -> ModelResponse:
609609
timestamp=self.timestamp,
610610
usage=self.usage(),
611611
provider_name=self.provider_name,
612+
provider_response_id=self.provider_response_id,
612613
)
613614

614615
def usage(self) -> RequestUsage:
@@ -627,6 +628,11 @@ def provider_name(self) -> str | None:
627628
"""Get the provider name."""
628629
raise NotImplementedError()
629630

631+
@property
632+
def provider_response_id(self) -> str | None:
633+
"""Get the provider response id."""
634+
return None
635+
630636
@property
631637
@abstractmethod
632638
def timestamp(self) -> datetime:

pydantic_ai_slim/pydantic_ai/models/openai.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -532,6 +532,7 @@ async def _process_streamed_response(
532532
_response=peekable_response,
533533
_timestamp=number_to_datetime(first_chunk.created),
534534
_provider_name=self._provider.name,
535+
_provider_response_id=first_chunk.id,
535536
)
536537

537538
def _get_tools(self, model_request_parameters: ModelRequestParameters) -> list[chat.ChatCompletionToolParam]:
@@ -847,6 +848,7 @@ async def _process_streamed_response(
847848
_response=peekable_response,
848849
_timestamp=number_to_datetime(first_chunk.response.created_at),
849850
_provider_name=self._provider.name,
851+
_provider_response_id=first_chunk.response.id,
850852
)
851853

852854
@overload
@@ -1161,6 +1163,7 @@ class OpenAIStreamedResponse(StreamedResponse):
11611163
_response: AsyncIterable[ChatCompletionChunk]
11621164
_timestamp: datetime
11631165
_provider_name: str
1166+
_provider_response_id: str
11641167

11651168
async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]:
11661169
async for chunk in self._response:
@@ -1209,6 +1212,11 @@ def provider_name(self) -> str:
12091212
"""Get the provider name."""
12101213
return self._provider_name
12111214

1215+
@property
1216+
def provider_response_id(self) -> str:
1217+
"""Get the provider response id."""
1218+
return self._provider_response_id
1219+
12121220
@property
12131221
def timestamp(self) -> datetime:
12141222
"""Get the timestamp of the response."""
@@ -1223,6 +1231,7 @@ class OpenAIResponsesStreamedResponse(StreamedResponse):
12231231
_response: AsyncIterable[responses.ResponseStreamEvent]
12241232
_timestamp: datetime
12251233
_provider_name: str
1234+
_provider_response_id: str
12261235

12271236
async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]: # noqa: C901
12281237
async for chunk in self._response:
@@ -1348,6 +1357,11 @@ def provider_name(self) -> str:
13481357
"""Get the provider name."""
13491358
return self._provider_name
13501359

1360+
@property
1361+
def provider_response_id(self) -> str:
1362+
"""Get the provider response id."""
1363+
return self._provider_response_id
1364+
13511365
@property
13521366
def timestamp(self) -> datetime:
13531367
"""Get the timestamp of the response."""

0 commit comments

Comments
 (0)