Skip to content

Commit 3096d74

Browse files
committed
Updated attribute names to be OTel compatible
1 parent a7b2d67 commit 3096d74

File tree

2 files changed

+25
-12
lines changed

2 files changed

+25
-12
lines changed

sentry_sdk/consts.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -372,6 +372,12 @@ class SPANDATA:
372372
Example: "chat"
373373
"""
374374

375+
GEN_AI_RESPONSE_MODEL = "gen_ai.response.model"
376+
"""
377+
Exact model identifier used to generate the response
378+
Example: gpt-4o-mini-2024-07-18
379+
"""
380+
375381
GEN_AI_RESPONSE_TEXT = "gen_ai.response.text"
376382
"""
377383
The model's response text messages.
@@ -649,6 +655,7 @@ class OP:
649655
FUNCTION_AWS = "function.aws"
650656
FUNCTION_GCP = "function.gcp"
651657
GEN_AI_CHAT = "gen_ai.chat"
658+
GEN_AI_EMBEDDINGS = "gen_ai.embeddings"
652659
GEN_AI_EXECUTE_TOOL = "gen_ai.execute_tool"
653660
GEN_AI_HANDOFF = "gen_ai.handoff"
654661
GEN_AI_INVOKE_AGENT = "gen_ai.invoke_agent"

sentry_sdk/integrations/openai.py

Lines changed: 18 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -136,8 +136,8 @@ def _new_chat_completion_common(f, *args, **kwargs):
136136
streaming = kwargs.get("stream")
137137

138138
span = sentry_sdk.start_span(
139-
op=consts.OP.OPENAI_CHAT_COMPLETIONS_CREATE,
140-
name="Chat Completion",
139+
op=consts.OP.GEN_AI_CHAT,
140+
name=f"{consts.OP.GEN_AI_CHAT} {model}",
141141
origin=OpenAIIntegration.origin,
142142
)
143143
span.__enter__()
@@ -146,16 +146,16 @@ def _new_chat_completion_common(f, *args, **kwargs):
146146

147147
with capture_internal_exceptions():
148148
if should_send_default_pii() and integration.include_prompts:
149-
set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, messages)
149+
set_data_normalized(span, SPANDATA.GEN_AI_REQUEST_MESSAGES, messages)
150150

151-
set_data_normalized(span, SPANDATA.AI_MODEL_ID, model)
151+
set_data_normalized(span, SPANDATA.GEN_AI_REQUEST_MODEL, model)
152152
set_data_normalized(span, SPANDATA.AI_STREAMING, streaming)
153153

154154
if hasattr(res, "choices"):
155155
if should_send_default_pii() and integration.include_prompts:
156156
set_data_normalized(
157157
span,
158-
SPANDATA.AI_RESPONSES,
158+
SPANDATA.GEN_AI_RESPONSE_TEXT,
159159
list(map(lambda x: x.message, res.choices)),
160160
)
161161
_calculate_chat_completion_usage(
@@ -189,7 +189,7 @@ def new_iterator():
189189
)
190190
if should_send_default_pii() and integration.include_prompts:
191191
set_data_normalized(
192-
span, SPANDATA.AI_RESPONSES, all_responses
192+
span, SPANDATA.GEN_AI_RESPONSE_TEXT, all_responses
193193
)
194194
_calculate_chat_completion_usage(
195195
messages,
@@ -222,7 +222,7 @@ async def new_iterator_async():
222222
)
223223
if should_send_default_pii() and integration.include_prompts:
224224
set_data_normalized(
225-
span, SPANDATA.AI_RESPONSES, all_responses
225+
span, SPANDATA.GEN_AI_RESPONSE_TEXT, all_responses
226226
)
227227
_calculate_chat_completion_usage(
228228
messages,
@@ -320,24 +320,30 @@ def _new_embeddings_create_common(f, *args, **kwargs):
320320
if integration is None:
321321
return f(*args, **kwargs)
322322

323+
model = kwargs.get("model")
324+
323325
with sentry_sdk.start_span(
324-
op=consts.OP.OPENAI_EMBEDDINGS_CREATE,
325-
description="OpenAI Embedding Creation",
326+
op=consts.OP.GEN_AI_EMBEDDINGS,
327+
name=f"{consts.OP.GEN_AI_EMBEDDINGS} {model}",
326328
origin=OpenAIIntegration.origin,
327329
) as span:
328330
if "input" in kwargs and (
329331
should_send_default_pii() and integration.include_prompts
330332
):
331333
if isinstance(kwargs["input"], str):
332-
set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, [kwargs["input"]])
334+
set_data_normalized(
335+
span, SPANDATA.GEN_AI_REQUEST_MESSAGES, [kwargs["input"]]
336+
)
333337
elif (
334338
isinstance(kwargs["input"], list)
335339
and len(kwargs["input"]) > 0
336340
and isinstance(kwargs["input"][0], str)
337341
):
338-
set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, kwargs["input"])
342+
set_data_normalized(
343+
span, SPANDATA.GEN_AI_REQUEST_MESSAGES, kwargs["input"]
344+
)
339345
if "model" in kwargs:
340-
set_data_normalized(span, SPANDATA.AI_MODEL_ID, kwargs["model"])
346+
set_data_normalized(span, SPANDATA.GEN_AI_REQUEST_MODEL, kwargs["model"])
341347

342348
response = yield f, args, kwargs
343349

0 commit comments

Comments
 (0)