Skip to content

Commit 7a02bde

Browse files
fix(openai): Attach response model with streamed Completions API (#5557)
Add the `gen_ai.response.model` attribute in the patch for the streaming Completions API.
1 parent 09675e3 commit 7a02bde

File tree

2 files changed

+12
-0
lines changed

2 files changed

+12
-0
lines changed

sentry_sdk/integrations/openai.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -612,6 +612,8 @@ def _set_streaming_completions_api_output_data(
612612
def new_iterator() -> "Iterator[ChatCompletionChunk]":
613613
nonlocal ttft
614614
for x in old_iterator:
615+
span.set_data(SPANDATA.GEN_AI_RESPONSE_MODEL, x.model)
616+
615617
with capture_internal_exceptions():
616618
if hasattr(x, "choices"):
617619
choice_index = 0
@@ -654,6 +656,8 @@ def new_iterator() -> "Iterator[ChatCompletionChunk]":
654656
async def new_iterator_async() -> "AsyncIterator[ChatCompletionChunk]":
655657
nonlocal ttft
656658
async for x in old_iterator:
659+
span.set_data(SPANDATA.GEN_AI_RESPONSE_MODEL, x.model)
660+
657661
with capture_internal_exceptions():
658662
if hasattr(x, "choices"):
659663
choice_index = 0

tests/integrations/openai/test_openai.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -511,6 +511,8 @@ def test_streaming_chat_completion_no_prompts(
511511
span = tx["spans"][0]
512512
assert span["op"] == "gen_ai.chat"
513513

514+
assert span["data"][SPANDATA.GEN_AI_RESPONSE_MODEL] == "model-id"
515+
514516
assert SPANDATA.GEN_AI_SYSTEM_INSTRUCTIONS not in span["data"]
515517
assert SPANDATA.GEN_AI_REQUEST_MESSAGES not in span["data"]
516518
assert SPANDATA.GEN_AI_RESPONSE_TEXT not in span["data"]
@@ -656,6 +658,8 @@ def test_streaming_chat_completion(sentry_init, capture_events, messages, reques
656658
},
657659
]
658660

661+
assert span["data"][SPANDATA.GEN_AI_RESPONSE_MODEL] == "model-id"
662+
659663
assert "hello" in span["data"][SPANDATA.GEN_AI_REQUEST_MESSAGES]
660664
assert "hello world" in span["data"][SPANDATA.GEN_AI_RESPONSE_TEXT]
661665

@@ -762,6 +766,8 @@ async def test_streaming_chat_completion_async_no_prompts(
762766
span = tx["spans"][0]
763767
assert span["op"] == "gen_ai.chat"
764768

769+
assert span["data"][SPANDATA.GEN_AI_RESPONSE_MODEL] == "model-id"
770+
765771
assert SPANDATA.GEN_AI_SYSTEM_INSTRUCTIONS not in span["data"]
766772
assert SPANDATA.GEN_AI_REQUEST_MESSAGES not in span["data"]
767773
assert SPANDATA.GEN_AI_RESPONSE_TEXT not in span["data"]
@@ -897,6 +903,8 @@ async def test_streaming_chat_completion_async(
897903
span = tx["spans"][0]
898904
assert span["op"] == "gen_ai.chat"
899905

906+
assert span["data"][SPANDATA.GEN_AI_RESPONSE_MODEL] == "model-id"
907+
900908
param_id = request.node.callspec.id
901909
if "blocks" in param_id:
902910
assert json.loads(span["data"][SPANDATA.GEN_AI_SYSTEM_INSTRUCTIONS]) == [

0 commit comments

Comments
 (0)