Skip to content

Adopt OTel semantic conventions for agents and frameworks #2575

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
83 changes: 77 additions & 6 deletions src/google/adk/telemetry.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,10 +70,13 @@ def trace_tool_call(
function_response_event: The event with the function response details.
"""
span = trace.get_current_span()
span.set_attribute('gen_ai.system', 'gcp.vertex.agent')

# Standard OpenTelemetry GenAI attributes as of SemConv 1.36.0 for Agents and Frameworks
span.set_attribute('gen_ai.system', 'gcp.vertex_ai')
span.set_attribute('gen_ai.operation.name', 'execute_tool')
span.set_attribute('gen_ai.tool.name', tool.name)
span.set_attribute('gen_ai.tool.description', tool.description)

tool_call_id = '<not specified>'
tool_response = '<not specified>'
if function_response_event.content.parts:
Expand All @@ -86,6 +89,7 @@ def trace_tool_call(

span.set_attribute('gen_ai.tool.call.id', tool_call_id)

# Vendor-specific attributes (moved from gen_ai.* to gcp.vertex.agent.*)
if not isinstance(tool_response, dict):
tool_response = {'result': tool_response}
span.set_attribute(
Expand Down Expand Up @@ -121,12 +125,15 @@ def trace_merged_tool_calls(
"""

span = trace.get_current_span()
span.set_attribute('gen_ai.system', 'gcp.vertex.agent')

# Standard OpenTelemetry GenAI attributes
span.set_attribute('gen_ai.system', 'gcp.vertex_ai')
span.set_attribute('gen_ai.operation.name', 'execute_tool')
span.set_attribute('gen_ai.tool.name', '(merged tools)')
span.set_attribute('gen_ai.tool.description', '(merged tools)')
span.set_attribute('gen_ai.tool.call.id', response_event_id)

# Vendor-specific attributes
span.set_attribute('gcp.vertex.agent.tool_call_args', 'N/A')
span.set_attribute('gcp.vertex.agent.event_id', response_event_id)
try:
Expand Down Expand Up @@ -167,23 +174,38 @@ def trace_call_llm(
llm_response: The LLM response object.
"""
span = trace.get_current_span()
# Special standard Open Telemetry GenaI attributes that indicate
# that this is a span related to a Generative AI system.
span.set_attribute('gen_ai.system', 'gcp.vertex.agent')

# Standard OpenTelemetry GenAI attributes
span.set_attribute('gen_ai.system', 'gcp.vertex_ai')
span.set_attribute('gen_ai.operation.name', 'generate_content')
span.set_attribute('gen_ai.request.model', llm_request.model)

if hasattr(llm_response, 'id') and llm_response.id:
span.set_attribute('gen_ai.response.id', llm_response.id)

# Set response model if different from request model
if (
hasattr(llm_response, 'model')
and llm_response.model
and llm_response.model != llm_request.model
):
span.set_attribute('gen_ai.response.model', llm_response.model)

span.set_attribute(
'gcp.vertex.agent.invocation_id', invocation_context.invocation_id
)
span.set_attribute(
'gcp.vertex.agent.session_id', invocation_context.session.id
)
span.set_attribute('gcp.vertex.agent.event_id', event_id)

# Consider removing once GenAI SDK provides a way to record this info.
span.set_attribute(
'gcp.vertex.agent.llm_request',
_safe_json_serialize(_build_llm_request_for_trace(llm_request)),
)
# Consider removing once GenAI SDK provides a way to record this info.

# Standard GenAI request attributes
if llm_request.config:
if llm_request.config.top_p:
span.set_attribute(
Expand All @@ -195,6 +217,14 @@ def trace_call_llm(
'gen_ai.request.max_tokens',
llm_request.config.max_output_tokens,
)
if (
hasattr(llm_request.config, 'temperature')
and llm_request.config.temperature is not None
):
span.set_attribute(
'gen_ai.request.temperature',
llm_request.config.temperature,
)

try:
llm_response_json = llm_response.model_dump_json(exclude_none=True)
Expand All @@ -206,6 +236,7 @@ def trace_call_llm(
llm_response_json,
)

# Standard GenAI usage and response attributes
if llm_response.usage_metadata is not None:
span.set_attribute(
'gen_ai.usage.input_tokens',
Expand Down Expand Up @@ -239,6 +270,8 @@ def trace_send_data(
data: A list of content objects.
"""
span = trace.get_current_span()

# Vendor-specific attributes (moved from gen_ai.* to gcp.vertex.agent.*)
span.set_attribute(
'gcp.vertex.agent.invocation_id', invocation_context.invocation_id
)
Expand Down Expand Up @@ -286,3 +319,41 @@ def _build_llm_request_for_trace(llm_request: LlmRequest) -> dict[str, Any]:
)
)
return result


def _create_span_name(operation_name: str, model_name: str) -> str:
"""Creates a span name following OpenTelemetry GenAI conventions.

Args:
operation_name: The GenAI operation name (e.g., 'generate_content', 'execute_tool').
model_name: The model name being used.

Returns:
A span name in the format '{operation_name} {model_name}'.
"""
return f'{operation_name} {model_name}'


def add_genai_prompt_event(span: trace.Span, prompt_content: str):
"""Adds a GenAI prompt event to the span following OpenTelemetry conventions.

Args:
span: The OpenTelemetry span to add the event to.
prompt_content: The prompt content as a JSON string.
"""
span.add_event(
name='gen_ai.content.prompt', attributes={'gen_ai.prompt': prompt_content}
)


def add_genai_completion_event(span: trace.Span, completion_content: str):
"""Adds a GenAI completion event to the span following OpenTelemetry conventions.

Args:
span: The OpenTelemetry span to add the event to.
completion_content: The completion content as a JSON string.
"""
span.add_event(
name='gen_ai.content.completion',
attributes={'gen_ai.completion': completion_content},
)
15 changes: 8 additions & 7 deletions tests/unittests/test_telemetry.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,14 +114,15 @@ async def test_trace_call_llm(monkeypatch, mock_span_fixture):
trace_call_llm(invocation_context, 'test_event_id', llm_request, llm_response)

expected_calls = [
mock.call('gen_ai.system', 'gcp.vertex.agent'),
mock.call('gen_ai.system', 'gcp.vertex_ai'),
mock.call('gen_ai.operation.name', 'generate_content'),
mock.call('gen_ai.request.top_p', 0.95),
mock.call('gen_ai.request.max_tokens', 1024),
mock.call('gen_ai.usage.input_tokens', 50),
mock.call('gen_ai.usage.output_tokens', 50),
mock.call('gen_ai.response.finish_reasons', ['stop']),
]
assert mock_span_fixture.set_attribute.call_count == 12
assert mock_span_fixture.set_attribute.call_count == 13
mock_span_fixture.set_attribute.assert_has_calls(
expected_calls, any_order=True
)
Expand Down Expand Up @@ -173,9 +174,9 @@ async def test_trace_call_llm_with_binary_content(

# Verify basic telemetry attributes are set
expected_calls = [
mock.call('gen_ai.system', 'gcp.vertex.agent'),
mock.call('gen_ai.system', 'gcp.vertex_ai'),
]
assert mock_span_fixture.set_attribute.call_count == 7
assert mock_span_fixture.set_attribute.call_count == 8
mock_span_fixture.set_attribute.assert_has_calls(expected_calls)

# Verify binary content is replaced with '<not serializable>' in JSON
Expand Down Expand Up @@ -230,7 +231,7 @@ def test_trace_tool_call_with_scalar_response(
# Assert
assert mock_span_fixture.set_attribute.call_count == 10
expected_calls = [
mock.call('gen_ai.system', 'gcp.vertex.agent'),
mock.call('gen_ai.system', 'gcp.vertex_ai'),
mock.call('gen_ai.operation.name', 'execute_tool'),
mock.call('gen_ai.tool.name', mock_tool_fixture.name),
mock.call('gen_ai.tool.description', mock_tool_fixture.description),
Expand Down Expand Up @@ -289,7 +290,7 @@ def test_trace_tool_call_with_dict_response(

# Assert
expected_calls = [
mock.call('gen_ai.system', 'gcp.vertex.agent'),
mock.call('gen_ai.system', 'gcp.vertex_ai'),
mock.call('gen_ai.operation.name', 'execute_tool'),
mock.call('gen_ai.tool.name', mock_tool_fixture.name),
mock.call('gen_ai.tool.description', mock_tool_fixture.description),
Expand Down Expand Up @@ -328,7 +329,7 @@ def test_trace_merged_tool_calls_sets_correct_attributes(
)

expected_calls = [
mock.call('gen_ai.system', 'gcp.vertex.agent'),
mock.call('gen_ai.system', 'gcp.vertex_ai'),
mock.call('gen_ai.operation.name', 'execute_tool'),
mock.call('gen_ai.tool.name', '(merged tools)'),
mock.call('gen_ai.tool.description', '(merged tools)'),
Expand Down