2929 handler = get_telemetry_handler()
3030
3131 # Create an invocation object with your request data
32- # The span and context_token attributes are set by the TelemetryHandler, and
32+ # The span and span_scope attributes are set by the TelemetryHandler, and
3333 # managed by the TelemetryHandler during the lifecycle of the span.
3434
3535 # Use the context manager to manage the lifecycle of an LLM invocation.
6363from contextlib import contextmanager
6464from typing import Iterator , Optional
6565
66- from opentelemetry import context as otel_context
6766from opentelemetry .semconv ._incubating .attributes import (
6867 gen_ai_attributes as GenAI ,
6968)
7271 SpanKind ,
7372 TracerProvider ,
7473 get_tracer ,
75- set_span_in_context ,
74+ use_span ,
7675)
7776from opentelemetry .util .genai .span_utils import (
7877 _apply_error_attributes ,
7978 _apply_finish_attributes ,
8079)
81- from opentelemetry .util .genai .types import Error , LLMInvocation
80+ from opentelemetry .util .genai .types import (
81+ Error ,
82+ LLMInvocation ,
83+ )
8284from opentelemetry .util .genai .version import __version__
8385
8486
@@ -101,41 +103,56 @@ def start_llm(
101103 invocation : LLMInvocation ,
102104 ) -> LLMInvocation :
103105 """Start an LLM invocation and create a pending span entry."""
104- # Create a span and attach it as current; keep the token to detach later
106+ # Create a span and activate it using the OpenTelemetry helper scope
105107 span = self ._tracer .start_span (
106108 name = f"{ GenAI .GenAiOperationNameValues .CHAT .value } { invocation .request_model } " ,
107109 kind = SpanKind .CLIENT ,
108110 )
109111 invocation .span = span
110- invocation .context_token = otel_context .attach (
111- set_span_in_context (span )
112+ scope = use_span (
113+ span ,
114+ end_on_exit = False ,
115+ record_exception = False ,
116+ set_status_on_exception = False ,
112117 )
118+ scope .__enter__ ()
119+ invocation .span_scope = scope
113120 return invocation
114121
115122 def stop_llm (self , invocation : LLMInvocation ) -> LLMInvocation : # pylint: disable=no-self-use
116123 """Finalize an LLM invocation successfully and end its span."""
117- if invocation .context_token is None or invocation .span is None :
124+ if invocation .span_scope is None or invocation .span is None :
118125 # TODO: Provide feedback that this invocation was not started
119126 return invocation
120127
121- _apply_finish_attributes (invocation .span , invocation )
122- # Detach context and end span
123- otel_context .detach (invocation .context_token )
124- invocation .span .end ()
128+ scope = invocation .span_scope
129+ span = invocation .span
130+ try :
131+ _apply_finish_attributes (span , invocation )
132+ finally :
133+ scope .__exit__ (None , None , None )
134+ span .end ()
135+ invocation .span_scope = None
136+ invocation .span = None
125137 return invocation
126138
127139 def fail_llm ( # pylint: disable=no-self-use
128140 self , invocation : LLMInvocation , error : Error
129141 ) -> LLMInvocation :
130142 """Fail an LLM invocation and end its span with error status."""
131- if invocation .context_token is None or invocation .span is None :
143+ if invocation .span_scope is None or invocation .span is None :
132144 # TODO: Provide feedback that this invocation was not started
133145 return invocation
134146
135- _apply_error_attributes (invocation .span , error )
136- # Detach context and end span
137- otel_context .detach (invocation .context_token )
138- invocation .span .end ()
147+ scope = invocation .span_scope
148+ span = invocation .span
149+ try :
150+ _apply_error_attributes (span , error )
151+ finally :
152+ scope .__exit__ (None , None , None )
153+ span .end ()
154+ invocation .span_scope = None
155+ invocation .span = None
139156 return invocation
140157
141158 @contextmanager
@@ -144,21 +161,22 @@ def llm(
144161 ) -> Iterator [LLMInvocation ]:
145162 """Context manager for LLM invocations.
146163
147- Only set data attributes on the invocation object, do not modify the span or context.
148-
149- Starts the span on entry. On normal exit, finalizes the invocation and ends the span.
150- If an exception occurs inside the context, marks the span as error, ends it, and
151- re-raises the original exception.
164+ Only set data attributes on the invocation object, do not modify the span or
165+ context. Starts the span on entry. On normal exit, finalizes the invocation and
166+ ends the span. If an exception occurs inside the context, marks the span as
167+ error, ends it, and re-raises the original exception.
152168 """
153169 if invocation is None :
154- invocation = LLMInvocation (
155- request_model = "" ,
156- )
170+ invocation = LLMInvocation (request_model = "" )
171+
157172 self .start_llm (invocation )
158173 try :
159174 yield invocation
160175 except Exception as exc :
161- self .fail_llm (invocation , Error (message = str (exc ), type = type (exc )))
176+ self .fail_llm (
177+ invocation ,
178+ Error (message = str (exc ), type = type (exc )),
179+ )
162180 raise
163181 self .stop_llm (invocation )
164182
0 commit comments