Skip to content

Commit 1da6e1d

Browse files
stevenhalexmojaki
andauthored
Record response when streaming OpenAI Responses API (#1457)
Co-authored-by: Alex Hall <[email protected]>
1 parent c820c78 commit 1da6e1d

File tree

5 files changed

+481
-89
lines changed

5 files changed

+481
-89
lines changed

logfire/_internal/integrations/llm_providers/openai.py

Lines changed: 30 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66

77
import openai
88
from openai._legacy_response import LegacyAPIResponse
9+
from openai.lib.streaming.responses import ResponseStreamState
910
from openai.types.chat.chat_completion import ChatCompletion
1011
from openai.types.chat.chat_completion_chunk import ChatCompletionChunk
1112
from openai.types.completion import Completion
@@ -54,15 +55,21 @@ def get_endpoint_config(options: FinalRequestOptions) -> EndpointConfig:
5455
if is_current_agent_span('Responses API', 'Responses API with {gen_ai.request.model!r}'):
5556
return EndpointConfig(message_template='', span_data={})
5657

58+
span_data: dict[str, Any] = {
59+
'gen_ai.request.model': json_data['model'],
60+
}
61+
if json_data.get('stream'): # type: ignore
62+
span_data['request_data'] = json_data
63+
else:
64+
span_data['events'] = inputs_to_events(
65+
json_data['input'], # type: ignore
66+
json_data.get('instructions'), # type: ignore
67+
)
68+
5769
return EndpointConfig(
5870
message_template='Responses API with {gen_ai.request.model!r}',
59-
span_data={
60-
'gen_ai.request.model': json_data['model'],
61-
'events': inputs_to_events(
62-
json_data['input'], # type: ignore
63-
json_data.get('instructions'), # type: ignore
64-
),
65-
},
71+
span_data=span_data,
72+
stream_state_cls=OpenaiResponsesStreamState,
6673
)
6774
elif url == '/completions':
6875
return EndpointConfig(
@@ -81,7 +88,7 @@ def get_endpoint_config(options: FinalRequestOptions) -> EndpointConfig:
8188
span_data={'request_data': json_data, 'gen_ai.request.model': json_data['model']},
8289
)
8390
else:
84-
span_data: dict[str, Any] = {'request_data': json_data, 'url': url}
91+
span_data = {'request_data': json_data, 'url': url}
8592
if 'model' in json_data:
8693
span_data['gen_ai.request.model'] = json_data['model']
8794
return EndpointConfig(
@@ -119,6 +126,21 @@ def get_response_data(self) -> Any:
119126
return {'combined_chunk_content': ''.join(self._content), 'chunk_count': len(self._content)}
120127

121128

129+
class OpenaiResponsesStreamState(StreamState):
130+
def __init__(self):
131+
self._state = ResponseStreamState(input_tools=openai.omit, text_format=openai.omit)
132+
133+
def record_chunk(self, chunk: Any) -> None:
134+
self._state.handle_event(chunk)
135+
136+
def get_response_data(self) -> Any:
137+
response = self._state._completed_response # pyright: ignore[reportPrivateUsage]
138+
if not response: # pragma: no cover
139+
raise RuntimeError("Didn't receive a `response.completed` event.")
140+
141+
return response
142+
143+
122144
try:
123145
# ChatCompletionStreamState only exists in openai>=1.40.0
124146
from openai.lib.streaming.chat._completions import ChatCompletionStreamState

tests/conftest.py

Lines changed: 33 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -186,6 +186,37 @@ def multiple_credentials(tmp_path: Path) -> Path:
186186
return auth_file
187187

188188

189+
SENSITIVE_HEADERS = [
190+
# All sensitive headers should be lower case as
191+
# that's how they are presented by VCR.
192+
'authorization',
193+
'cookie',
194+
'set-cookie',
195+
'x-goog-api-key',
196+
'openai-organization',
197+
'openai-project',
198+
'x-request-id',
199+
'cf-ray',
200+
]
201+
202+
203+
def scrub_headers(response: dict[str, Any]) -> dict[str, Any]:
204+
"""Remove sensitive headers from the response.
205+
206+
Parameters:
207+
response: The response dictionary to scrub.
208+
209+
Returns:
210+
The scrubbed response dictionary with sensitive headers removed.
211+
"""
212+
response['headers'] = {k: v for k, v in response.get('headers', {}).items() if k.lower() not in SENSITIVE_HEADERS}
213+
214+
return response
215+
216+
189217
@pytest.fixture(scope='module')
190-
def vcr_config():
191-
return {'filter_headers': ['authorization', 'cookie', 'Set-Cookie', 'x-goog-api-key']}
218+
def vcr_config() -> dict[str, Any]:
219+
return {
220+
'filter_headers': SENSITIVE_HEADERS,
221+
'before_record_response': scrub_headers,
222+
}

tests/otel_integrations/cassettes/test_openai/test_responses_api.yaml

Lines changed: 57 additions & 70 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
interactions:
22
- request:
3-
body: "{\"input\":[{\"role\":\"user\",\"content\":\"What is the weather like in
4-
Paris today?\"}],\"model\":\"gpt-4.1\",\"instructions\":\"Be nice\",\"tools\":[{\"type\":\"function\",\"name\":\"get_weather\",\"description\":\"Get
3+
body: "{\"input\":\"What is the weather like in Paris today?\",\"instructions\":\"Be
4+
nice\",\"model\":\"gpt-4.1\",\"tools\":[{\"type\":\"function\",\"name\":\"get_weather\",\"description\":\"Get
55
current temperature for a given location.\",\"parameters\":{\"type\":\"object\",\"properties\":{\"location\":{\"type\":\"string\",\"description\":\"City
66
and country e.g. Bogot\xE1, Colombia\"}},\"required\":[\"location\"],\"additionalProperties\":false}}]}"
77
headers:
@@ -12,13 +12,13 @@ interactions:
1212
connection:
1313
- keep-alive
1414
content-length:
15-
- '420'
15+
- '392'
1616
content-type:
1717
- application/json
1818
host:
1919
- api.openai.com
2020
user-agent:
21-
- OpenAI/Python 1.82.0
21+
- OpenAI/Python 1.109.1
2222
x-stainless-arch:
2323
- arm64
2424
x-stainless-async:
@@ -28,52 +28,45 @@ interactions:
2828
x-stainless-os:
2929
- MacOS
3030
x-stainless-package-version:
31-
- 1.82.0
31+
- 1.109.1
3232
x-stainless-read-timeout:
3333
- '600'
3434
x-stainless-retry-count:
3535
- '0'
3636
x-stainless-runtime:
3737
- CPython
3838
x-stainless-runtime-version:
39-
- 3.12.6
39+
- 3.12.9
4040
method: POST
4141
uri: https://api.openai.com/v1/responses
4242
response:
4343
body:
4444
string: !!binary |
45-
H4sIAAAAAAAAA3xUTY/bOAy951cIOmcCO8nk69gCbVH00BbYPXRTGIxEe7QjSy5FTTsY5L8vpCS2
46-
k073Yth8ftTjI8WXiRDSaLkTkjB01WqzWJe1wsV9rTbltjysV/Viq7Eo14fFSi+Koi7WsEGlarXY
47-
KpDTlMAf/kXFlyTeBTzFFSEw6goSVq6Xm+VyUWxWGQsMHEPiKN92Fhn1iXQA9diQjy6pqsEGzGEk
48-
8iR3wkVrc8C4C7HSyGBsuEYDU1RsvMuHvEHhjDrrauFX5SN3kSv2j+iumK3XaBOl6fhuOSvv5sX8
49-
/q5Y3pXLc7WZKXfin4kQQrzkZ29jrXoTVb1aJhO3uC22y3qt9Fxv5wd41cScgp87zEmiy9IrBdYO
50-
8J88yyBQE1t0nPGXvbReQcqxl7u9/AxkwlS8I3AK9/I40NIJ1Ul6ft3OH8uPmy+PH+DH39/evfff
51-
/irh09PX9cBw0GaRDXL1E4EfkGTGjhMhvmeLOiCwFm3F3ttcRFLFFE+97AifjI+huozLSUDfAkII
52-
3hnXyN3ZXol17YlHPyU7YtsCPZ+DEyGOp8lCejIKKzaYBkZqrCFaluex84RjLYxthwQcc7icFefo
53-
Lx4Orz21MHyPGpX/64s/nX+q+cGncdsJCZG97IHw+9zcNn1wWmNQZLoc3An5HlmoSISOxUi2qD0J
54-
EI15QicuXZ/9f796MHWqRUYKo/JGos5XezogHfkOiQ1eM4ToJ+4mPsoWmFJXp9foTZlvDT8LcFoo
55-
Hx3Ts8BZMxNvfON5H4sCy6l4661vDwbkKNGxfz+O1BL+iIZQ97bfau2D30cs0NokFOzncbV5G01u
56-
DslF5e2XZurmIrDvqm48VxRdb5LUJsDBXhZfDNDgMHTGXe2n1f309/ho870MF1o9oB6IxWQkV96u
57-
vXL9GvBa3v5S/ik1ewY7gJt5fyNiwKvV3SKDBoaU/jg5/gcAAP//AwBBmSDhhQYAAA==
45+
H4sIAAAAAAAAA3xUy47jNhC8+ysInj2GpPh929lNckqwQA4BEi+EFtnSMEORWrLpXWPgfw9I2Xp4
46+
Hzepi12s7i7224IxriQ/Mu7Qd2X2ywF3aym32yrPi0ORZdu9rDHPNsW+2u/zg9gXMi/y9f6wy/L1
47+
Ybvmy0hhq/9Q0J3GGo99XDgEQllCxPLd5rDJis1hlzBPQMHHHGHbTiOh7JMqEK+Ns8FEXTVoj31Y
48+
aa1Mw4/sbcEYY7yDC7qYL/GM2nbo+IKxazqMztmImaB1Cihzv6WUSKC0n6OeXBCkrEmKnpEZJW5F
49+
tPC1tIG6QCXZVzSzzAiStboUoOecrZWoI1nT0dN6lT8VWbF5ytZP+b1piZMf2b+pnr6qYR61+Mk0
50+
RFWv0zTkBqv9WkK12cC+PiTiREKXDhNNMKmspG+Ef9T8BIJrQouGEv524toKiBwnfjzxj+CUX7Lf
51+
HBiBJ34d0+INZS8+fQal//nr1+Lzu/DHu+LPvwXtig/PX+R2zDDQJpENUvkFgV76ETJ2XTD2KTWp
52+
Awdao543mVzoTdE5PCsbfHn3XS9gGELnbNtRKUC8YPmKlynmELw1M0thXVtHk0OxVaFtwd0zB4d5
53+
qJEupZJoSNUKZ27z6M5KYEnq7tAagiZ+M751OC2CsO3QAYUUzlfZLfqVRmW1dS2M/5MJp3N9126K
54+
z+gq6xVFzbxFqUI7voy+jy822vvIOASyfAD8t258NNI4PYleONWl4JHx35GYCM6hITapiNXWMWCN
55+
OqNhdyetfu6BAYzTb5HQ+UnlE1G3vbMckc7FRUAK5xmMDS5+iE/YPLnohuUcfSjzvaILAyOZsMGQ
56+
uzBcNSv2bBtLp5BlmC/Ze6ttWyngE6Lr8H2dqHX4OSiHcmj7o9Yh+GmSBVKqiIL+OK02rcrFwyWp
57+
qLSao90eHhfZrtS26ZytIkE2BLupD10wQ+e4VB4qfV/VwUODo0mVmS3J7Wb5bXyyft/GzSFeUI6J
58+
2czOj7s3330P+B7v8MJ/RE2WQI/gvhieSfDzF90igQSCSH9dXP8HAAD//wMAl3wlJTkHAAA=
5859
headers:
59-
CF-RAY:
60-
- 946e7e67de3ddf1a-JNB
6160
Connection:
6261
- keep-alive
6362
Content-Encoding:
6463
- gzip
6564
Content-Type:
6665
- application/json
6766
Date:
68-
- Wed, 28 May 2025 14:38:07 GMT
67+
- Fri, 03 Oct 2025 14:43:18 GMT
6968
Server:
7069
- cloudflare
71-
Set-Cookie:
72-
- __cf_bm=K.2f_Ec4Dafi9GQWf3x4es8GXSoEO_bgT7U4CjGi5Ss-1748443087-1.0.1.1-YZRtgk19Vn46L4lNNSLnubSabnBA3_L3.1k8qFpqzRLm1BGESaenng7i2EQdNzy4kbPOrmK5gDBpQ3bgN2gGYiUec5nJdiMoki35p9dG060;
73-
path=/; expires=Wed, 28-May-25 15:08:07 GMT; domain=.api.openai.com; HttpOnly;
74-
Secure; SameSite=None
75-
- _cfuvid=XgEwvrVxz73nhFuqsHDjyuHQwVIWMuU6XgQEDg2dUbA-1748443087163-0.0.1.1-604800000;
76-
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
7770
Transfer-Encoding:
7871
- chunked
7972
X-Content-Type-Options:
@@ -82,34 +75,32 @@ interactions:
8275
- h3=":443"; ma=86400
8376
cf-cache-status:
8477
- DYNAMIC
85-
openai-organization:
86-
- pydantic-28gund
8778
openai-processing-ms:
88-
- '855'
79+
- '816'
8980
openai-version:
9081
- '2020-10-01'
9182
strict-transport-security:
9283
- max-age=31536000; includeSubDomains; preload
84+
x-envoy-upstream-service-time:
85+
- '819'
9386
x-ratelimit-limit-requests:
94-
- '10000'
87+
- '5000'
9588
x-ratelimit-limit-tokens:
96-
- '2000000'
89+
- '450000'
9790
x-ratelimit-remaining-requests:
98-
- '9999'
91+
- '4999'
9992
x-ratelimit-remaining-tokens:
100-
- '1999718'
93+
- '449718'
10194
x-ratelimit-reset-requests:
102-
- 6ms
95+
- 12ms
10396
x-ratelimit-reset-tokens:
104-
- 8ms
105-
x-request-id:
106-
- req_82a8e571d6bacbdca1f76e0acf26f1bf
97+
- 37ms
10798
status:
10899
code: 200
109100
message: OK
110101
- request:
111102
body: '{"input":[{"role":"user","content":"What is the weather like in Paris today?"},{"arguments":"{\"location\":\"Paris,
112-
France\"}","call_id":"call_92k1J8QkHaqVZFGoZU1aLvR7","name":"get_weather","type":"function_call","id":"fc_68371fcecf6481919e9094f7cd2d92ba00f07a8eccfc39ca","status":"completed"},{"type":"function_call_output","call_id":"call_92k1J8QkHaqVZFGoZU1aLvR7","output":"Rainy"}],"model":"gpt-4.1"}'
103+
France\"}","call_id":"call_uilZSE2qAuMA2NWct72DBwd6","name":"get_weather","type":"function_call","id":"fc_039e74dd66b112920068dfe105cbf4819c8d5eb84dab55a8f9","status":"completed"},{"type":"function_call_output","call_id":"call_uilZSE2qAuMA2NWct72DBwd6","output":"Rainy"}],"model":"gpt-4.1"}'
113104
headers:
114105
accept:
115106
- application/json
@@ -118,13 +109,13 @@ interactions:
118109
connection:
119110
- keep-alive
120111
content-length:
121-
- '404'
112+
- '406'
122113
content-type:
123114
- application/json
124115
host:
125116
- api.openai.com
126117
user-agent:
127-
- OpenAI/Python 1.82.0
118+
- OpenAI/Python 1.109.1
128119
x-stainless-arch:
129120
- arm64
130121
x-stainless-async:
@@ -134,43 +125,41 @@ interactions:
134125
x-stainless-os:
135126
- MacOS
136127
x-stainless-package-version:
137-
- 1.82.0
128+
- 1.109.1
138129
x-stainless-read-timeout:
139130
- '600'
140131
x-stainless-retry-count:
141132
- '0'
142133
x-stainless-runtime:
143134
- CPython
144135
x-stainless-runtime-version:
145-
- 3.12.6
136+
- 3.12.9
146137
method: POST
147138
uri: https://api.openai.com/v1/responses
148139
response:
149140
body:
150141
string: !!binary |
151-
H4sIAAAAAAAAAwAAAP//dFRNb9pAEL3zK6Y+h8jGBgz/oFIPPfSWVtZ4PYZt1jvW7mwSFPHfK6+N
152-
gTS5IHhv5u18vOF9AZDoJtlD4sj31abMt1mr2nVdpGW2y2qVteUG63WTt1tcrdK0TbdYklKtyncK
153-
k4dBgOu/pOQiwtbTiCtHKNRUOHDZtiiLIk/LbeS8oAQ/5CjuekNCzZhUo3o+OA52qKpF4ynC5By7
154-
ZA82GBMBbS+JVUOC2vh71osLSjTbO7zDt4qD9EEq4Wf6QHJDZijp0MuyeMyWq3S1XqbFMiumRmNm
155-
soenBQDAe/ycJ9j5wzxAlY8DLDeb1SbdqfWqzVW92X46wKghp56iCnmPB7oSX00qkoqtkL2WdFvW
156-
neylaXqTOTsGoLUseBnU0587MobvIfl1JHgllCM50BZ+otMehBs8gfbgUNvTI3xv4cThd1il2c4R
157-
9Aat1fYAwnBg4CAP0LAdeYGW3YFkyHAQutqRMfgNfpBAR/Bs+RV01ANL1EDHjmDaM7ADOdKgQAp9
158-
lIqApTeBll6hwZN/TOZGztO3ubfEsYlTQe+1F7QyBg+BMSjp0aExZCphNpVCE+0lLoxu7B29aA6+
159-
uhi+ihaYneQIPQ/NJ/tpHQm1LTu5CRpWG7oO3WkCFwDn8TbIvWhFlWgaLJ801GIw494SL+zothah
160-
rieHEiKcPaYTGjc3Pd6y6/D6+8YXMW5ufnx/7PnIWo1DCsLJTFxdkgj3VX/7pgtWRSvFqrXH2lzO
161-
OkRTzwVpe3eCRf7wP35z13PZCtWRmmtiOpY+ZX+87CL9jPhMd17YV9LCguZKlvk8reDp7o+pI8EG
162-
BQf58+L8DwAA//8DAGjwVuhjBQAA
142+
H4sIAAAAAAAAA3RUy3LbMAy8+ytYXXKJM5Lf9h/01kNvmY4GIiGZDUVwSNCNJuN/74iyZKtNbhIW
143+
WALYJT8WQmRaZSeReQyuzNdH3G+U2u2qolgdV3m+O6gai3x32FebQ3GUVV7J3fpQHKGSCtfb7Lmn
144+
oOo3Sh5pyAYc4tIjMKoSeqzYb4/bfLU9HhIWGDiGvkZS6wwyqqGoAvnWeIq276sGE3AIa2O0bbKT
145+
+FgIIUTmoEPf1yu8oCGHPlsIcU3J6D31mI3GpIC24ymlQgZtwhwN7KNkTXYWb+G9pMgucsn0hv+D
146+
TGRKCWZO15JC03fWOF5uXorlKl9tl/lmWWxu+0qc2Um8plGGgSYp2tB8rcQ+X1eHXgmA/UrJvCgq
147+
3Bcg14k5sXDnMPFgCNDgHfhq5QmUZBntvanHxma040LwnafqlADWEsO4xNdfM9BQ4zxVnyCJ6CSy
148+
n2cUfxD4jF5oK36A10EwKeiEDsKDtt2L+F6LjuKTR+EMWKttI5hEQ4IiPwtF9olFTb5BFmBFbCuP
149+
xsC3bDrwevuaesg8mTQXhKADg+UhuU9MSZkDD8agmavNPg7GdB4vmmIoR++XScbJDc5T67iUIM9Y
150+
vmH3iHmEQHZma6xr8vyQ1AsX2xb8WDm5PECN3JVaoWVda5w5PqC/aIkl6/GW1BDNIFkWmDw+DsHY
151+
OvTAMYWLl/wWTdLcOqvJt3D/f7BEyhu2duv4gr6ioLkbjKh0bO+3c9jjmbQcFh+Zsgm4OyRjcuWD
152+
b/Ip6B579NHK5Lo0pQ5QmfEpicn/0wDazm7yZv38f/zheZjGTNKpe2E+G/XfB2JVfAZ8xjup/xU1
153+
E4O5g7vNtMIY5mq3yKCAoae/Lq5/AQAA//8DAInpJWzZBQAA
163154
headers:
164-
CF-RAY:
165-
- 946e7e6f1a1fdf1a-JNB
166155
Connection:
167156
- keep-alive
168157
Content-Encoding:
169158
- gzip
170159
Content-Type:
171160
- application/json
172161
Date:
173-
- Wed, 28 May 2025 14:38:08 GMT
162+
- Fri, 03 Oct 2025 14:43:19 GMT
174163
Server:
175164
- cloudflare
176165
Transfer-Encoding:
@@ -181,28 +170,26 @@ interactions:
181170
- h3=":443"; ma=86400
182171
cf-cache-status:
183172
- DYNAMIC
184-
openai-organization:
185-
- pydantic-28gund
186173
openai-processing-ms:
187-
- '1276'
174+
- '1023'
188175
openai-version:
189176
- '2020-10-01'
190177
strict-transport-security:
191178
- max-age=31536000; includeSubDomains; preload
179+
x-envoy-upstream-service-time:
180+
- '1025'
192181
x-ratelimit-limit-requests:
193-
- '10000'
182+
- '5000'
194183
x-ratelimit-limit-tokens:
195-
- '2000000'
184+
- '450000'
196185
x-ratelimit-remaining-requests:
197-
- '9999'
186+
- '4999'
198187
x-ratelimit-remaining-tokens:
199-
- '1999938'
188+
- '449937'
200189
x-ratelimit-reset-requests:
201-
- 6ms
190+
- 12ms
202191
x-ratelimit-reset-tokens:
203-
- 1ms
204-
x-request-id:
205-
- req_4a16ad9f9da83f8def54db828cce54ff
192+
- 8ms
206193
status:
207194
code: 200
208195
message: OK

0 commit comments

Comments
 (0)