Skip to content

Commit 74f347e

Browse files
committed
Update span attribute semconv besides events
1 parent 60e2c78 commit 74f347e

File tree

3 files changed

+40
-41
lines changed

3 files changed

+40
-41
lines changed

instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/src/opentelemetry/instrumentation/vertexai_v2/__init__.py

Lines changed: 38 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -24,15 +24,13 @@
2424
from opentelemetry import context as context_api
2525
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
2626
from opentelemetry.instrumentation.utils import (
27-
_SUPPRESS_INSTRUMENTATION_KEY,
27+
is_instrumentation_enabled,
2828
unwrap,
2929
)
30-
from opentelemetry.instrumentation.vertexai.config import Config
31-
from opentelemetry.instrumentation.vertexai.utils import dont_throw
32-
from opentelemetry.instrumentation.vertexai.version import __version__
33-
from opentelemetry.semconv_ai import (
34-
SUPPRESS_LANGUAGE_MODEL_INSTRUMENTATION_KEY,
35-
LLMRequestTypeValues,
30+
from opentelemetry.instrumentation.vertexai_v2.utils import dont_throw
31+
from opentelemetry.instrumentation.vertexai_v2.version import __version__
32+
from opentelemetry.semconv._incubating.attributes import gen_ai_attributes
33+
from opentelemetry.semconv.trace import (
3634
SpanAttributes,
3735
)
3836
from opentelemetry.trace import SpanKind, get_tracer
@@ -42,6 +40,7 @@
4240

4341
_instruments = ("google-cloud-aiplatform >= 1.38.1",)
4442

43+
# TODO: span_name should no longer be needed as it comes from `{gen_ai.operation.name} {gen_ai.request.model}`
4544
WRAPPED_METHODS = [
4645
{
4746
"package": "vertexai.generative_models",
@@ -155,30 +154,36 @@ def _set_input_attributes(span, args, kwargs, llm_model):
155154
prompt,
156155
)
157156

158-
_set_span_attribute(span, SpanAttributes.LLM_REQUEST_MODEL, llm_model)
157+
_set_span_attribute(
158+
span, gen_ai_attributes.GEN_AI_REQUEST_MODEL, llm_model
159+
)
159160
_set_span_attribute(
160161
span, f"{SpanAttributes.LLM_PROMPTS}.0.user", kwargs.get("prompt")
161162
)
162163
_set_span_attribute(
163-
span, SpanAttributes.LLM_REQUEST_TEMPERATURE, kwargs.get("temperature")
164+
span,
165+
gen_ai_attributes.GEN_AI_REQUEST_TEMPERATURE,
166+
kwargs.get("temperature"),
164167
)
165168
_set_span_attribute(
166169
span,
167-
SpanAttributes.LLM_REQUEST_MAX_TOKENS,
170+
gen_ai_attributes.GEN_AI_REQUEST_MAX_TOKENS,
168171
kwargs.get("max_output_tokens"),
169172
)
170173
_set_span_attribute(
171-
span, SpanAttributes.LLM_REQUEST_TOP_P, kwargs.get("top_p")
174+
span, gen_ai_attributes.GEN_AI_REQUEST_TOP_P, kwargs.get("top_p")
175+
)
176+
_set_span_attribute(
177+
span, gen_ai_attributes.GEN_AI_REQUEST_TOP_K, kwargs.get("top_k")
172178
)
173-
_set_span_attribute(span, SpanAttributes.LLM_TOP_K, kwargs.get("top_k"))
174179
_set_span_attribute(
175180
span,
176-
SpanAttributes.LLM_PRESENCE_PENALTY,
181+
gen_ai_attributes.GEN_AI_REQUEST_PRESENCE_PENALTY,
177182
kwargs.get("presence_penalty"),
178183
)
179184
_set_span_attribute(
180185
span,
181-
SpanAttributes.LLM_FREQUENCY_PENALTY,
186+
gen_ai_attributes.GEN_AI_REQUEST_FREQUENCY_PENALTY,
182187
kwargs.get("frequency_penalty"),
183188
)
184189

@@ -187,22 +192,19 @@ def _set_input_attributes(span, args, kwargs, llm_model):
187192

188193
@dont_throw
189194
def _set_response_attributes(span, llm_model, generation_text, token_usage):
190-
_set_span_attribute(span, SpanAttributes.LLM_RESPONSE_MODEL, llm_model)
195+
_set_span_attribute(
196+
span, gen_ai_attributes.GEN_AI_RESPONSE_MODEL, llm_model
197+
)
191198

192199
if token_usage:
193200
_set_span_attribute(
194201
span,
195-
SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
196-
token_usage.total_token_count,
197-
)
198-
_set_span_attribute(
199-
span,
200-
SpanAttributes.LLM_USAGE_COMPLETION_TOKENS,
202+
gen_ai_attributes.GEN_AI_USAGE_OUTPUT_TOKENS,
201203
token_usage.candidates_token_count,
202204
)
203205
_set_span_attribute(
204206
span,
205-
SpanAttributes.LLM_USAGE_PROMPT_TOKENS,
207+
gen_ai_attributes.GEN_AI_USAGE_INPUT_TOKENS,
206208
token_usage.prompt_token_count,
207209
)
208210

@@ -284,9 +286,7 @@ def wrapper(wrapped, instance, args, kwargs):
284286
@_with_tracer_wrapper
285287
async def _awrap(tracer, to_wrap, wrapped, instance, args, kwargs):
286288
"""Instruments and calls every function defined in TO_WRAP."""
287-
if context_api.get_value(
288-
_SUPPRESS_INSTRUMENTATION_KEY
289-
) or context_api.get_value(SUPPRESS_LANGUAGE_MODEL_INSTRUMENTATION_KEY):
289+
if not is_instrumentation_enabled():
290290
return await wrapped(*args, **kwargs)
291291

292292
llm_model = "unknown"
@@ -297,13 +297,16 @@ async def _awrap(tracer, to_wrap, wrapped, instance, args, kwargs):
297297
"publishers/google/models/", ""
298298
)
299299

300-
name = to_wrap.get("span_name")
300+
operation_name = (
301+
gen_ai_attributes.GenAiOperationNameValues.TEXT_COMPLETION.value
302+
)
303+
name = f"{operation_name} {llm_model}"
301304
span = tracer.start_span(
302305
name,
303306
kind=SpanKind.CLIENT,
304307
attributes={
305-
SpanAttributes.LLM_SYSTEM: "VertexAI",
306-
SpanAttributes.LLM_REQUEST_TYPE: LLMRequestTypeValues.COMPLETION.value,
308+
gen_ai_attributes.GEN_AI_SYSTEM: gen_ai_attributes.GenAiSystemValues.VERTEX_AI.value,
309+
gen_ai_attributes.GEN_AI_OPERATION_NAME: operation_name,
307310
},
308311
)
309312

@@ -326,9 +329,7 @@ async def _awrap(tracer, to_wrap, wrapped, instance, args, kwargs):
326329
@_with_tracer_wrapper
327330
def _wrap(tracer, to_wrap, wrapped, instance, args, kwargs):
328331
"""Instruments and calls every function defined in TO_WRAP."""
329-
if context_api.get_value(
330-
_SUPPRESS_INSTRUMENTATION_KEY
331-
) or context_api.get_value(SUPPRESS_LANGUAGE_MODEL_INSTRUMENTATION_KEY):
332+
if not is_instrumentation_enabled():
332333
return wrapped(*args, **kwargs)
333334

334335
llm_model = "unknown"
@@ -339,13 +340,16 @@ def _wrap(tracer, to_wrap, wrapped, instance, args, kwargs):
339340
"publishers/google/models/", ""
340341
)
341342

342-
name = to_wrap.get("span_name")
343+
operation_name = (
344+
gen_ai_attributes.GenAiOperationNameValues.TEXT_COMPLETION.value
345+
)
346+
name = f"{operation_name} {llm_model}"
343347
span = tracer.start_span(
344348
name,
345349
kind=SpanKind.CLIENT,
346350
attributes={
347-
SpanAttributes.LLM_SYSTEM: "VertexAI",
348-
SpanAttributes.LLM_REQUEST_TYPE: LLMRequestTypeValues.COMPLETION.value,
351+
gen_ai_attributes.GEN_AI_SYSTEM: gen_ai_attributes.GenAiSystemValues.VERTEX_AI.value,
352+
gen_ai_attributes.GEN_AI_OPERATION_NAME: operation_name,
349353
},
350354
)
351355

@@ -370,7 +374,6 @@ class VertexAIInstrumentor(BaseInstrumentor):
370374

371375
def __init__(self, exception_logger=None):
372376
super().__init__()
373-
Config.exception_logger = exception_logger
374377

375378
def instrumentation_dependencies(self) -> Collection[str]:
376379
return _instruments

instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/src/opentelemetry/instrumentation/vertexai_v2/utils.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,6 @@
1515
import logging
1616
import traceback
1717

18-
from opentelemetry.instrumentation.vertexai.config import Config
19-
2018

2119
def dont_throw(func):
2220
"""
@@ -31,13 +29,11 @@ def dont_throw(func):
3129
def wrapper(*args, **kwargs):
3230
try:
3331
return func(*args, **kwargs)
34-
except Exception as e:
32+
except Exception:
3533
logger.debug(
3634
"OpenLLMetry failed to trace in %s, error: %s",
3735
func.__name__,
3836
traceback.format_exc(),
3937
)
40-
if Config.exception_logger:
41-
Config.exception_logger(e)
4238

4339
return wrapper

instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/tests/conftest.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import pytest
44

55
from opentelemetry import trace
6-
from opentelemetry.instrumentation.vertexai import VertexAIInstrumentor
6+
from opentelemetry.instrumentation.vertexai_v2 import VertexAIInstrumentor
77
from opentelemetry.sdk.trace import TracerProvider
88
from opentelemetry.sdk.trace.export import SimpleSpanProcessor
99
from opentelemetry.sdk.trace.export.in_memory_span_exporter import (

0 commit comments

Comments
 (0)