1717from dataclasses import asdict
1818from typing import Any
1919
20+ from opentelemetry ._logs import Logger , LogRecord
21+ from opentelemetry .context import get_current
2022from opentelemetry .semconv ._incubating .attributes import (
2123 gen_ai_attributes as GenAI ,
2224)
2628from opentelemetry .trace import (
2729 Span ,
2830)
31+ from opentelemetry .trace .propagation import set_span_in_context
2932from opentelemetry .trace .status import Status , StatusCode
3033from opentelemetry .util .genai .types import (
3134 Error ,
3235 InputMessage ,
3336 LLMInvocation ,
37+ MessagePart ,
3438 OutputMessage ,
3539)
3640from opentelemetry .util .genai .utils import (
4145)
4246
4347
44- def _apply_common_span_attributes (
45- span : Span , invocation : LLMInvocation
46- ) -> None :
47- """Apply attributes shared by finish() and error() and compute metrics .
48+ def _get_llm_common_attributes (
49+ invocation : LLMInvocation ,
50+ ) -> dict [ str , Any ] :
51+ """Get common LLM attributes shared by finish() and error() paths .
4852
49- Returns (genai_attributes) for use with metrics .
53+ Returns a dictionary of attributes .
5054 """
51- span .update_name (
52- f"{ GenAI .GenAiOperationNameValues .CHAT .value } { invocation .request_model } " .strip ()
53- )
54- span .set_attribute (
55- GenAI .GEN_AI_OPERATION_NAME , GenAI .GenAiOperationNameValues .CHAT .value
55+ attributes : dict [str , Any ] = {}
56+ attributes [GenAI .GEN_AI_OPERATION_NAME ] = (
57+ GenAI .GenAiOperationNameValues .CHAT .value
5658 )
5759 if invocation .request_model :
58- span .set_attribute (
59- GenAI .GEN_AI_REQUEST_MODEL , invocation .request_model
60- )
60+ attributes [GenAI .GEN_AI_REQUEST_MODEL ] = invocation .request_model
6161 if invocation .provider is not None :
6262 # TODO: clean provider name to match GenAiProviderNameValues?
63- span .set_attribute (GenAI .GEN_AI_PROVIDER_NAME , invocation .provider )
63+ attributes [GenAI .GEN_AI_PROVIDER_NAME ] = invocation .provider
64+ return attributes
6465
65- _apply_response_attributes (span , invocation )
6666
67+ def _get_llm_span_name (invocation : LLMInvocation ) -> str :
68+ """Get the span name for an LLM invocation."""
69+ return f"{ GenAI .GenAiOperationNameValues .CHAT .value } { invocation .request_model } " .strip ()
6770
68- def _maybe_set_span_messages (
69- span : Span ,
71+
72+ def _get_llm_messages_attributes_for_span (
7073 input_messages : list [InputMessage ],
7174 output_messages : list [OutputMessage ],
72- ) -> None :
75+ system_instruction : list [MessagePart ] | None = None ,
76+ ) -> dict [str , Any ]:
77+ """Get message attributes formatted for span (JSON string format).
78+
79+ Returns empty dict if not in experimental mode or content capturing is disabled.
80+ """
81+ attributes : dict [str , Any ] = {}
7382 if not is_experimental_mode () or get_content_capturing_mode () not in (
7483 ContentCapturingMode .SPAN_ONLY ,
7584 ContentCapturingMode .SPAN_AND_EVENT ,
7685 ):
77- return
86+ return attributes
7887 if input_messages :
79- span .set_attribute (
80- GenAI .GEN_AI_INPUT_MESSAGES ,
81- gen_ai_json_dumps ([asdict (message ) for message in input_messages ]),
88+ attributes [GenAI .GEN_AI_INPUT_MESSAGES ] = gen_ai_json_dumps (
89+ [asdict (message ) for message in input_messages ]
8290 )
8391 if output_messages :
84- span .set_attribute (
85- GenAI .GEN_AI_OUTPUT_MESSAGES ,
86- gen_ai_json_dumps (
87- [asdict (message ) for message in output_messages ]
88- ),
92+ attributes [GenAI .GEN_AI_OUTPUT_MESSAGES ] = gen_ai_json_dumps (
93+ [asdict (message ) for message in output_messages ]
8994 )
95+ if system_instruction :
96+ attributes [GenAI .GEN_AI_SYSTEM_INSTRUCTIONS ] = gen_ai_json_dumps (
97+ [asdict (part ) for part in system_instruction ]
98+ )
99+ return attributes
100+
101+
102+ def _get_llm_messages_attributes_for_event (
103+ input_messages : list [InputMessage ],
104+ output_messages : list [OutputMessage ],
105+ system_instruction : list [MessagePart ] | None = None ,
106+ ) -> dict [str , Any ]:
107+ """Get message attributes formatted for event (structured format).
90108
109+ Returns empty dict if not in experimental mode or content capturing is disabled.
110+ """
111+ attributes : dict [str , Any ] = {}
112+ if not is_experimental_mode () or get_content_capturing_mode () not in (
113+ ContentCapturingMode .EVENT_ONLY ,
114+ ContentCapturingMode .SPAN_AND_EVENT ,
115+ ):
116+ return attributes
117+ if input_messages :
118+ attributes [GenAI .GEN_AI_INPUT_MESSAGES ] = [
119+ asdict (message ) for message in input_messages
120+ ]
121+ if output_messages :
122+ attributes [GenAI .GEN_AI_OUTPUT_MESSAGES ] = [
123+ asdict (message ) for message in output_messages
124+ ]
125+ if system_instruction :
126+ attributes [GenAI .GEN_AI_SYSTEM_INSTRUCTIONS ] = [
127+ asdict (part ) for part in system_instruction
128+ ]
129+ return attributes
91130
92- def _apply_finish_attributes (span : Span , invocation : LLMInvocation ) -> None :
131+
132+ def _maybe_emit_llm_event (
133+ logger : Logger | None ,
134+ span : Span ,
135+ invocation : LLMInvocation ,
136+ error : Error | None = None ,
137+ ) -> None :
138+ """Emit a gen_ai.client.inference.operation.details event to the logger.
139+
140+ This function creates a LogRecord event following the semantic convention
141+ for gen_ai.client.inference.operation.details as specified in the GenAI
142+ event semantic conventions.
143+ """
144+ if not is_experimental_mode () or get_content_capturing_mode () not in (
145+ ContentCapturingMode .EVENT_ONLY ,
146+ ContentCapturingMode .SPAN_AND_EVENT ,
147+ ):
148+ return
149+
150+ if logger is None :
151+ return
152+
153+ # Build event attributes by reusing the attribute getter functions
154+ attributes : dict [str , Any ] = {}
155+ attributes .update (_get_llm_common_attributes (invocation ))
156+ attributes .update (_get_llm_request_attributes (invocation ))
157+ attributes .update (_get_llm_response_attributes (invocation ))
158+ attributes .update (
159+ _get_llm_messages_attributes_for_event (
160+ invocation .input_messages ,
161+ invocation .output_messages ,
162+ invocation .system_instruction ,
163+ )
164+ )
165+
166+ # Add error.type if operation ended in error
167+ if error is not None :
168+ attributes [ErrorAttributes .ERROR_TYPE ] = error .type .__qualname__
169+
170+ # Create and emit the event
171+ context = set_span_in_context (span , get_current ())
172+ event = LogRecord (
173+ event_name = "gen_ai.client.inference.operation.details" ,
174+ attributes = attributes ,
175+ context = context ,
176+ )
177+ logger .emit (event )
178+
179+
180+ def _apply_llm_finish_attributes (
181+ span : Span , invocation : LLMInvocation
182+ ) -> None :
93183 """Apply attributes/messages common to finish() paths."""
94- _apply_common_span_attributes (span , invocation )
95- _maybe_set_span_messages (
96- span , invocation .input_messages , invocation .output_messages
184+ # Update span name
185+ span .update_name (_get_llm_span_name (invocation ))
186+
187+ # Build all attributes by reusing the attribute getter functions
188+ attributes : dict [str , Any ] = {}
189+ attributes .update (_get_llm_common_attributes (invocation ))
190+ attributes .update (_get_llm_request_attributes (invocation ))
191+ attributes .update (_get_llm_response_attributes (invocation ))
192+ attributes .update (
193+ _get_llm_messages_attributes_for_span (
194+ invocation .input_messages ,
195+ invocation .output_messages ,
196+ invocation .system_instruction ,
197+ )
97198 )
98- _apply_request_attributes (span , invocation )
99- _apply_response_attributes (span , invocation )
100- span .set_attributes (invocation .attributes )
199+ attributes .update (invocation .attributes )
200+
201+ # Set all attributes on the span
202+ if attributes :
203+ span .set_attributes (attributes )
101204
102205
103206def _apply_error_attributes (span : Span , error : Error ) -> None :
@@ -107,8 +210,10 @@ def _apply_error_attributes(span: Span, error: Error) -> None:
107210 span .set_attribute (ErrorAttributes .ERROR_TYPE , error .type .__qualname__ )
108211
109212
110- def _apply_request_attributes (span : Span , invocation : LLMInvocation ) -> None :
111- """Attach GenAI request semantic convention attributes to the span."""
213+ def _get_llm_request_attributes (
214+ invocation : LLMInvocation ,
215+ ) -> dict [str , Any ]:
216+ """Get GenAI request semantic convention attributes."""
112217 attributes : dict [str , Any ] = {}
113218 if invocation .temperature is not None :
114219 attributes [GenAI .GEN_AI_REQUEST_TEMPERATURE ] = invocation .temperature
@@ -130,12 +235,13 @@ def _apply_request_attributes(span: Span, invocation: LLMInvocation) -> None:
130235 )
131236 if invocation .seed is not None :
132237 attributes [GenAI .GEN_AI_REQUEST_SEED ] = invocation .seed
133- if attributes :
134- span .set_attributes (attributes )
238+ return attributes
135239
136240
137- def _apply_response_attributes (span : Span , invocation : LLMInvocation ) -> None :
138- """Attach GenAI response semantic convention attributes to the span."""
241+ def _get_llm_response_attributes (
242+ invocation : LLMInvocation ,
243+ ) -> dict [str , Any ]:
244+ """Get GenAI response semantic convention attributes."""
139245 attributes : dict [str , Any ] = {}
140246
141247 finish_reasons : list [str ] | None
@@ -169,13 +275,15 @@ def _apply_response_attributes(span: Span, invocation: LLMInvocation) -> None:
169275 if invocation .output_tokens is not None :
170276 attributes [GenAI .GEN_AI_USAGE_OUTPUT_TOKENS ] = invocation .output_tokens
171277
172- if attributes :
173- span .set_attributes (attributes )
278+ return attributes
174279
175280
176281__all__ = [
177- "_apply_finish_attributes " ,
282+ "_apply_llm_finish_attributes " ,
178283 "_apply_error_attributes" ,
179- "_apply_request_attributes" ,
180- "_apply_response_attributes" ,
284+ "_get_llm_common_attributes" ,
285+ "_get_llm_request_attributes" ,
286+ "_get_llm_response_attributes" ,
287+ "_get_llm_span_name" ,
288+ "_maybe_emit_llm_event" ,
181289]
0 commit comments