Skip to content

Commit 4f4afd5

Browse files
authored
openai-v2: add test for chat completions create as streaming context manager (#4035)
1 parent f5a90ad commit 4f4afd5

File tree

2 files changed

+193
-0
lines changed

2 files changed

+193
-0
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,141 @@
1+
interactions:
2+
- request:
3+
body: |-
4+
{
5+
"messages": [
6+
{
7+
"role": "user",
8+
"content": "Say this is a test"
9+
}
10+
],
11+
"model": "gpt-4o-mini",
12+
"stream": true,
13+
"stream_options": {
14+
"include_usage": true
15+
}
16+
}
17+
headers:
18+
Accept:
19+
- application/json
20+
Accept-Encoding:
21+
- gzip, deflate
22+
Connection:
23+
- keep-alive
24+
Content-Length:
25+
- '148'
26+
Content-Type:
27+
- application/json
28+
Host:
29+
- api.openai.com
30+
User-Agent:
31+
- OpenAI/Python 1.109.1
32+
X-Stainless-Arch:
33+
- x64
34+
X-Stainless-Async:
35+
- 'false'
36+
X-Stainless-Lang:
37+
- python
38+
X-Stainless-OS:
39+
- Linux
40+
X-Stainless-Package-Version:
41+
- 1.109.1
42+
X-Stainless-Runtime:
43+
- CPython
44+
X-Stainless-Runtime-Version:
45+
- 3.12.12
46+
authorization:
47+
- Bearer test_openai_api_key
48+
x-stainless-read-timeout:
49+
- '600'
50+
x-stainless-retry-count:
51+
- '0'
52+
method: POST
53+
uri: https://api.openai.com/v1/chat/completions
54+
response:
55+
body:
56+
string: |+
57+
data: {"id":"chatcmpl-CnMGGNmSdY4yYxO9NmV7cSVxXZ5ci","object":"chat.completion.chunk","created":1765879680,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_644f11dd4d","choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"yKkTGHRZw"}
58+
59+
data: {"id":"chatcmpl-CnMGGNmSdY4yYxO9NmV7cSVxXZ5ci","object":"chat.completion.chunk","created":1765879680,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_644f11dd4d","choices":[{"index":0,"delta":{"content":"This"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"bFd3AyW"}
60+
61+
data: {"id":"chatcmpl-CnMGGNmSdY4yYxO9NmV7cSVxXZ5ci","object":"chat.completion.chunk","created":1765879680,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_644f11dd4d","choices":[{"index":0,"delta":{"content":" is"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"LqISoMXL"}
62+
63+
data: {"id":"chatcmpl-CnMGGNmSdY4yYxO9NmV7cSVxXZ5ci","object":"chat.completion.chunk","created":1765879680,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_644f11dd4d","choices":[{"index":0,"delta":{"content":" a"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"mUrlylOKo"}
64+
65+
data: {"id":"chatcmpl-CnMGGNmSdY4yYxO9NmV7cSVxXZ5ci","object":"chat.completion.chunk","created":1765879680,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_644f11dd4d","choices":[{"index":0,"delta":{"content":" test"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"nGqQio"}
66+
67+
data: {"id":"chatcmpl-CnMGGNmSdY4yYxO9NmV7cSVxXZ5ci","object":"chat.completion.chunk","created":1765879680,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_644f11dd4d","choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"gUhadpfoFR"}
68+
69+
data: {"id":"chatcmpl-CnMGGNmSdY4yYxO9NmV7cSVxXZ5ci","object":"chat.completion.chunk","created":1765879680,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_644f11dd4d","choices":[{"index":0,"delta":{"content":" How"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"atLjmXM"}
70+
71+
data: {"id":"chatcmpl-CnMGGNmSdY4yYxO9NmV7cSVxXZ5ci","object":"chat.completion.chunk","created":1765879680,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_644f11dd4d","choices":[{"index":0,"delta":{"content":" can"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"Ac67p8z"}
72+
73+
data: {"id":"chatcmpl-CnMGGNmSdY4yYxO9NmV7cSVxXZ5ci","object":"chat.completion.chunk","created":1765879680,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_644f11dd4d","choices":[{"index":0,"delta":{"content":" I"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"tl4DibKXq"}
74+
75+
data: {"id":"chatcmpl-CnMGGNmSdY4yYxO9NmV7cSVxXZ5ci","object":"chat.completion.chunk","created":1765879680,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_644f11dd4d","choices":[{"index":0,"delta":{"content":" assist"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"p5ei"}
76+
77+
data: {"id":"chatcmpl-CnMGGNmSdY4yYxO9NmV7cSVxXZ5ci","object":"chat.completion.chunk","created":1765879680,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_644f11dd4d","choices":[{"index":0,"delta":{"content":" you"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"97qAT5t"}
78+
79+
data: {"id":"chatcmpl-CnMGGNmSdY4yYxO9NmV7cSVxXZ5ci","object":"chat.completion.chunk","created":1765879680,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_644f11dd4d","choices":[{"index":0,"delta":{"content":" further"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"ZK4"}
80+
81+
data: {"id":"chatcmpl-CnMGGNmSdY4yYxO9NmV7cSVxXZ5ci","object":"chat.completion.chunk","created":1765879680,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_644f11dd4d","choices":[{"index":0,"delta":{"content":"?"},"logprobs":null,"finish_reason":null}],"usage":null,"obfuscation":"5iGjeBvFWd"}
82+
83+
data: {"id":"chatcmpl-CnMGGNmSdY4yYxO9NmV7cSVxXZ5ci","object":"chat.completion.chunk","created":1765879680,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_644f11dd4d","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}],"usage":null,"obfuscation":"SdS98"}
84+
85+
data: {"id":"chatcmpl-CnMGGNmSdY4yYxO9NmV7cSVxXZ5ci","object":"chat.completion.chunk","created":1765879680,"model":"gpt-4o-mini-2024-07-18","service_tier":"default","system_fingerprint":"fp_644f11dd4d","choices":[],"usage":{"prompt_tokens":12,"completion_tokens":12,"total_tokens":24,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"completion_tokens_details":{"reasoning_tokens":0,"audio_tokens":0,"accepted_prediction_tokens":0,"rejected_prediction_tokens":0}},"obfuscation":"parVcHlnsA"}
86+
87+
data: [DONE]
88+
89+
headers:
90+
CF-RAY:
91+
- 9aed607a4c274be7-MXP
92+
Connection:
93+
- keep-alive
94+
Content-Type:
95+
- text/event-stream; charset=utf-8
96+
Date:
97+
- Tue, 16 Dec 2025 10:08:00 GMT
98+
Server:
99+
- cloudflare
100+
Set-Cookie: test_set_cookie
101+
Strict-Transport-Security:
102+
- max-age=31536000; includeSubDomains; preload
103+
Transfer-Encoding:
104+
- chunked
105+
X-Content-Type-Options:
106+
- nosniff
107+
access-control-expose-headers:
108+
- X-Request-ID
109+
alt-svc:
110+
- h3=":443"; ma=86400
111+
cf-cache-status:
112+
- DYNAMIC
113+
openai-organization: test_openai_org_id
114+
openai-processing-ms:
115+
- '274'
116+
openai-project:
117+
- proj_Pf1eM5R55Z35wBy4rt8PxAGq
118+
openai-version:
119+
- '2020-10-01'
120+
x-envoy-upstream-service-time:
121+
- '618'
122+
x-openai-proxy-wasm:
123+
- v0.1
124+
x-ratelimit-limit-requests:
125+
- '10000'
126+
x-ratelimit-limit-tokens:
127+
- '10000000'
128+
x-ratelimit-remaining-requests:
129+
- '9999'
130+
x-ratelimit-remaining-tokens:
131+
- '9999993'
132+
x-ratelimit-reset-requests:
133+
- 6ms
134+
x-ratelimit-reset-tokens:
135+
- 0s
136+
x-request-id:
137+
- req_50a8341c02184cbd8b364182eb40ed38
138+
status:
139+
code: 200
140+
message: OK
141+
version: 1

instrumentation-genai/opentelemetry-instrumentation-openai-v2/tests/test_chat_completions.py

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -957,6 +957,58 @@ def test_chat_completion_with_content_span_unsampled(
957957
assert logs[0].log_record.trace_flags == logs[1].log_record.trace_flags
958958

959959

960+
@pytest.mark.vcr()
961+
def test_chat_completion_with_context_manager_streaming(
962+
span_exporter, log_exporter, openai_client, instrument_with_content
963+
):
964+
llm_model_value = "gpt-4o-mini"
965+
messages_value = [{"role": "user", "content": "Say this is a test"}]
966+
with openai_client.chat.completions.create(
967+
messages=messages_value,
968+
model=llm_model_value,
969+
stream=True,
970+
stream_options={"include_usage": True},
971+
) as response:
972+
message_content = ""
973+
for chunk in response:
974+
if chunk.choices:
975+
message_content += chunk.choices[0].delta.content or ""
976+
# get the last chunk
977+
if getattr(chunk, "usage", None):
978+
response_stream_usage = chunk.usage
979+
response_stream_model = chunk.model
980+
response_stream_id = chunk.id
981+
982+
spans = span_exporter.get_finished_spans()
983+
assert_all_attributes(
984+
spans[0],
985+
llm_model_value,
986+
response_stream_id,
987+
response_stream_model,
988+
response_stream_usage.prompt_tokens,
989+
response_stream_usage.completion_tokens,
990+
response_service_tier="default",
991+
)
992+
993+
logs = log_exporter.get_finished_logs()
994+
assert len(logs) == 2
995+
996+
user_message = {"content": messages_value[0]["content"]}
997+
assert_message_in_logs(
998+
logs[0], "gen_ai.user.message", user_message, spans[0]
999+
)
1000+
1001+
choice_event = {
1002+
"index": 0,
1003+
"finish_reason": "stop",
1004+
"message": {
1005+
"role": "assistant",
1006+
"content": message_content,
1007+
},
1008+
}
1009+
assert_message_in_logs(logs[1], "gen_ai.choice", choice_event, spans[0])
1010+
1011+
9601012
def chat_completion_multiple_tools_streaming(
9611013
span_exporter, log_exporter, openai_client, expect_content
9621014
):

0 commit comments

Comments
 (0)