Skip to content

Commit 627376d

Browse files
committed
fix: clean up nits
1 parent 309d348 commit 627376d

File tree

3 files changed

+30
-8
lines changed

3 files changed

+30
-8
lines changed

util/opentelemetry-util-genai/src/opentelemetry/util/genai/span_utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -137,8 +137,8 @@ def _apply_response_attributes(span: Span, invocation: LLMInvocation) -> None:
137137
attributes: Dict[str, Any] = {}
138138

139139
finish_reasons: Optional[List[str]]
140-
if invocation.response_finish_reasons is not None:
141-
finish_reasons = invocation.response_finish_reasons
140+
if invocation.finish_reasons is not None:
141+
finish_reasons = invocation.finish_reasons
142142
elif invocation.output_messages:
143143
finish_reasons = [
144144
message.finish_reason for message in invocation.output_messages

util/opentelemetry-util-genai/src/opentelemetry/util/genai/types.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ class LLMInvocation:
111111
provider: Optional[str] = None
112112
response_model_name: Optional[str] = None
113113
response_id: Optional[str] = None
114-
response_finish_reasons: Optional[List[str]] = None
114+
finish_reasons: Optional[List[str]] = None
115115
input_tokens: Optional[int] = None
116116
output_tokens: Optional[int] = None
117117
attributes: Dict[str, Any] = field(default_factory=_new_str_any_dict)

util/opentelemetry-util-genai/tests/test_utils.py

Lines changed: 27 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@
3434
from opentelemetry.semconv.attributes import (
3535
error_attributes as ErrorAttributes,
3636
)
37+
from opentelemetry.semconv.schemas import Schemas
3738
from opentelemetry.trace.status import StatusCode
3839
from opentelemetry.util.genai.environment_variables import (
3940
OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT,
@@ -209,7 +210,7 @@ def test_llm_start_and_stop_creates_span(self): # pylint: disable=no-self-use
209210
"temperature": 0.5,
210211
"top_p": 0.9,
211212
"stop_sequences": ["stop"],
212-
"response_finish_reasons": ["stop"],
213+
"finish_reasons": ["stop"],
213214
"response_model_name": "test-response-model",
214215
"response_id": "response-id",
215216
"input_tokens": 321,
@@ -229,8 +230,8 @@ def test_llm_start_and_stop_creates_span(self): # pylint: disable=no-self-use
229230
_assert_span_attributes(
230231
span_attrs,
231232
{
232-
"gen_ai.operation.name": "chat",
233-
"gen_ai.provider.name": "test-provider",
233+
GenAI.GEN_AI_OPERATION_NAME: "chat",
234+
GenAI.GEN_AI_PROVIDER_NAME: "test-provider",
234235
GenAI.GEN_AI_REQUEST_TEMPERATURE: 0.5,
235236
GenAI.GEN_AI_REQUEST_TOP_P: 0.9,
236237
GenAI.GEN_AI_REQUEST_STOP_SEQUENCES: ("stop",),
@@ -294,7 +295,7 @@ def test_llm_span_finish_reasons_without_output_messages(self):
294295
invocation = LLMInvocation(
295296
request_model="model-without-output",
296297
provider="test-provider",
297-
response_finish_reasons=["length"],
298+
finish_reasons=["length"],
298299
response_model_name="alt-model",
299300
response_id="resp-001",
300301
input_tokens=12,
@@ -319,6 +320,27 @@ def test_llm_span_finish_reasons_without_output_messages(self):
319320
},
320321
)
321322

323+
def test_llm_span_uses_expected_schema_url(self):
324+
invocation = LLMInvocation(
325+
request_model="schema-model",
326+
provider="schema-provider",
327+
)
328+
329+
self.telemetry_handler.start_llm(invocation)
330+
assert invocation.span is not None
331+
self.telemetry_handler.stop_llm(invocation)
332+
333+
span = _get_single_span(self.span_exporter)
334+
instrumentation = getattr(span, "instrumentation_scope", None)
335+
if instrumentation is None:
336+
instrumentation = getattr(span, "instrumentation_info", None)
337+
338+
assert instrumentation is not None
339+
assert (
340+
getattr(instrumentation, "schema_url", None)
341+
== Schemas.V1_37_0.value
342+
)
343+
322344
@patch_env_vars(
323345
stability_mode="gen_ai_latest_experimental",
324346
content_capturing="SPAN_ONLY",
@@ -379,7 +401,7 @@ class BoomError(RuntimeError):
379401
for attr, value in {
380402
"max_tokens": 128,
381403
"seed": 123,
382-
"response_finish_reasons": ["error"],
404+
"finish_reasons": ["error"],
383405
"response_model_name": "error-model",
384406
"response_id": "error-response",
385407
"input_tokens": 11,

0 commit comments

Comments
 (0)