Skip to content

Commit 1be4a6b

Browse files
committed
add async support
1 parent 03b6c57 commit 1be4a6b

File tree

3 files changed

+99
-19
lines changed

3 files changed

+99
-19
lines changed

instrumentation-genai/opentelemetry-instrumentation-openai-v2/src/opentelemetry/instrumentation/openai_v2/__init__.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@
5252
from opentelemetry.semconv.schemas import Schemas
5353
from opentelemetry.trace import get_tracer
5454

55-
from .patch import chat_completions_create
55+
from .patch import async_chat_completions_create, chat_completions_create
5656

5757

5858
class OpenAIInstrumentor(BaseInstrumentor):
@@ -84,7 +84,16 @@ def _instrument(self, **kwargs):
8484
),
8585
)
8686

87+
wrap_function_wrapper(
88+
module="openai.resources.chat.completions",
89+
name="AsyncCompletions.create",
90+
wrapper=async_chat_completions_create(
91+
tracer, event_logger, is_content_enabled()
92+
),
93+
)
94+
8795
def _uninstrument(self, **kwargs):
8896
import openai # pylint: disable=import-outside-toplevel
8997

9098
unwrap(openai.resources.chat.completions.Completions, "create")
99+
unwrap(openai.resources.chat.completions.AsyncCompletions, "create")

instrumentation-genai/opentelemetry-instrumentation-openai-v2/src/opentelemetry/instrumentation/openai_v2/patch.py

Lines changed: 71 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -21,15 +21,12 @@
2121
from opentelemetry.semconv._incubating.attributes import (
2222
gen_ai_attributes as GenAIAttributes,
2323
)
24-
from opentelemetry.semconv.attributes import (
25-
error_attributes as ErrorAttributes,
26-
)
2724
from opentelemetry.trace import Span, SpanKind, Tracer
28-
from opentelemetry.trace.status import Status, StatusCode
2925

3026
from .utils import (
3127
choice_to_event,
3228
get_llm_request_attributes,
29+
handle_span_exception,
3330
is_streaming,
3431
message_to_event,
3532
set_span_attribute,
@@ -72,12 +69,49 @@ def traced_method(wrapped, instance, args, kwargs):
7269
return result
7370

7471
except Exception as error:
75-
span.set_status(Status(StatusCode.ERROR, str(error)))
72+
handle_span_exception(span, error)
73+
raise
74+
75+
return traced_method
76+
77+
78+
def async_chat_completions_create(
79+
tracer: Tracer, event_logger: EventLogger, capture_content: bool
80+
):
81+
"""Wrap the `create` method of the `AsyncChatCompletion` class to trace it."""
82+
83+
async def traced_method(wrapped, instance, args, kwargs):
84+
span_attributes = {**get_llm_request_attributes(kwargs, instance)}
85+
86+
span_name = f"{span_attributes[GenAIAttributes.GEN_AI_OPERATION_NAME]} {span_attributes[GenAIAttributes.GEN_AI_REQUEST_MODEL]}"
87+
with tracer.start_as_current_span(
88+
name=span_name,
89+
kind=SpanKind.CLIENT,
90+
attributes=span_attributes,
91+
end_on_exit=False,
92+
) as span:
93+
if span.is_recording():
94+
for message in kwargs.get("messages", []):
95+
event_logger.emit(
96+
message_to_event(message, capture_content)
97+
)
98+
99+
try:
100+
result = await wrapped(*args, **kwargs)
101+
if is_streaming(kwargs):
102+
return StreamWrapper(
103+
result, span, event_logger, capture_content
104+
)
105+
76106
if span.is_recording():
77-
span.set_attribute(
78-
ErrorAttributes.ERROR_TYPE, type(error).__qualname__
107+
_set_response_attributes(
108+
span, result, event_logger, capture_content
79109
)
80110
span.end()
111+
return result
112+
113+
except Exception as error:
114+
handle_span_exception(span, error)
81115
raise
82116

83117
return traced_method
@@ -286,10 +320,19 @@ def __enter__(self):
286320
def __exit__(self, exc_type, exc_val, exc_tb):
287321
try:
288322
if exc_type is not None:
289-
self.span.set_status(Status(StatusCode.ERROR, str(exc_val)))
290-
self.span.set_attribute(
291-
ErrorAttributes.ERROR_TYPE, exc_type.__qualname__
292-
)
323+
handle_span_exception(self.span, exc_val)
324+
finally:
325+
self.cleanup()
326+
return False # Propagate the exception
327+
328+
async def __aenter__(self):
329+
self.setup()
330+
return self
331+
332+
async def __aexit__(self, exc_type, exc_val, exc_tb):
333+
try:
334+
if exc_type is not None:
335+
handle_span_exception(self.span, exc_val)
293336
finally:
294337
self.cleanup()
295338
return False # Propagate the exception
@@ -301,6 +344,9 @@ def close(self):
301344
def __iter__(self):
302345
return self
303346

347+
def __aiter__(self):
348+
return self
349+
304350
def __next__(self):
305351
try:
306352
chunk = next(self.stream)
@@ -310,10 +356,20 @@ def __next__(self):
310356
self.cleanup()
311357
raise
312358
except Exception as error:
313-
self.span.set_status(Status(StatusCode.ERROR, str(error)))
314-
self.span.set_attribute(
315-
ErrorAttributes.ERROR_TYPE, type(error).__qualname__
316-
)
359+
handle_span_exception(self.span, error)
360+
self.cleanup()
361+
raise
362+
363+
async def __anext__(self):
364+
try:
365+
chunk = await self.stream.__anext__()
366+
self.process_chunk(chunk)
367+
return chunk
368+
except StopAsyncIteration:
369+
self.cleanup()
370+
raise
371+
except Exception as error:
372+
handle_span_exception(self.span, error)
317373
self.cleanup()
318374
raise
319375

instrumentation-genai/opentelemetry-instrumentation-openai-v2/src/opentelemetry/instrumentation/openai_v2/utils.py

Lines changed: 18 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,10 @@
2626
from opentelemetry.semconv._incubating.attributes import (
2727
server_attributes as ServerAttributes,
2828
)
29+
from opentelemetry.semconv.attributes import (
30+
error_attributes as ErrorAttributes,
31+
)
32+
from opentelemetry.trace.status import Status, StatusCode
2933

3034
OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT = (
3135
"OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT"
@@ -138,9 +142,11 @@ def choice_to_event(choice, capture_content):
138142

139143
if choice.message:
140144
message = {
141-
"role": choice.message.role
142-
if choice.message and choice.message.role
143-
else None
145+
"role": (
146+
choice.message.role
147+
if choice.message and choice.message.role
148+
else None
149+
)
144150
}
145151
tool_calls = extract_tool_calls(choice.message, capture_content)
146152
if tool_calls:
@@ -210,3 +216,12 @@ def get_llm_request_attributes(
210216

211217
# filter out None values
212218
return {k: v for k, v in attributes.items() if v is not None}
219+
220+
221+
def handle_span_exception(span, error):
222+
span.set_status(Status(StatusCode.ERROR, str(error)))
223+
if span.is_recording():
224+
span.set_attribute(
225+
ErrorAttributes.ERROR_TYPE, type(error).__qualname__
226+
)
227+
span.end()

0 commit comments

Comments
 (0)