|
7 | 7 | from opentelemetry.trace.propagation import set_span_in_context |
8 | 8 | from opentelemetry.trace.status import Status, StatusCode |
9 | 9 |
|
10 | | -from langtrace_python_sdk.constants.instrumentation.common import \ |
11 | | - SERVICE_PROVIDERS |
| 10 | +from langtrace_python_sdk.constants.instrumentation.common import SERVICE_PROVIDERS |
12 | 11 | from langtrace_python_sdk.constants.instrumentation.openai import APIS |
13 | 12 | from langtrace_python_sdk.instrumentation.openai.types import ( |
14 | | - ChatCompletionsCreateKwargs, ContentItem, EmbeddingsCreateKwargs, |
15 | | - ImagesEditKwargs, ImagesGenerateKwargs, ResultType) |
| 13 | + ChatCompletionsCreateKwargs, |
| 14 | + ContentItem, |
| 15 | + EmbeddingsCreateKwargs, |
| 16 | + ImagesEditKwargs, |
| 17 | + ImagesGenerateKwargs, |
| 18 | + ResultType, |
| 19 | +) |
16 | 20 | from langtrace_python_sdk.types import NOT_GIVEN |
17 | 21 | from langtrace_python_sdk.utils import set_span_attribute |
18 | | -from langtrace_python_sdk.utils.llm import (StreamWrapper, |
19 | | - calculate_prompt_tokens, |
20 | | - get_base_url, get_extra_attributes, |
21 | | - get_langtrace_attributes, |
22 | | - get_llm_request_attributes, |
23 | | - get_llm_url, get_span_name, |
24 | | - get_tool_calls, is_streaming, |
25 | | - set_event_completion, |
26 | | - set_span_attributes, |
27 | | - set_usage_attributes) |
| 22 | +from langtrace_python_sdk.utils.llm import ( |
| 23 | + StreamWrapper, |
| 24 | + calculate_prompt_tokens, |
| 25 | + get_base_url, |
| 26 | + get_extra_attributes, |
| 27 | + get_langtrace_attributes, |
| 28 | + get_llm_request_attributes, |
| 29 | + get_llm_url, |
| 30 | + get_span_name, |
| 31 | + get_tool_calls, |
| 32 | + is_streaming, |
| 33 | + set_event_completion, |
| 34 | + set_span_attributes, |
| 35 | + set_usage_attributes, |
| 36 | +) |
28 | 37 | from langtrace_python_sdk.utils.silently_fail import silently_fail |
29 | 38 |
|
30 | 39 |
|
| 40 | +def async_openai_responses_create(version: str, tracer: Tracer) -> Callable: |
| 41 | + """Wrap the `create` method of the `openai.AsyncResponse.create` class to trace it.""" |
| 42 | + |
| 43 | + async def traced_method( |
| 44 | + wrapped: Callable, instance: Any, args: List[Any], kwargs: Dict[str, Any] |
| 45 | + ): |
| 46 | + input_value = kwargs.get("input") |
| 47 | + prompt = ( |
| 48 | + input_value[0] |
| 49 | + if isinstance(input_value, list) |
| 50 | + else [{"role": "user", "content": input_value}] |
| 51 | + ) |
| 52 | + service_provider = SERVICE_PROVIDERS["OPENAI"] |
| 53 | + span_attributes = { |
| 54 | + "instructions": kwargs.get("instructions"), |
| 55 | + **get_langtrace_attributes(version, service_provider, vendor_type="llm"), |
| 56 | + **get_llm_request_attributes( |
| 57 | + kwargs, |
| 58 | + operation_name="openai.responses.create", |
| 59 | + prompts=prompt, |
| 60 | + ), |
| 61 | + } |
| 62 | + with tracer.start_as_current_span( |
| 63 | + name="openai.responses.create", |
| 64 | + kind=SpanKind.CLIENT, |
| 65 | + context=set_span_in_context(trace.get_current_span()), |
| 66 | + ) as span: |
| 67 | + try: |
| 68 | + set_span_attributes(span, span_attributes) |
| 69 | + |
| 70 | + response = await wrapped(*args, **kwargs) |
| 71 | + _set_openai_agentic_response_attributes(span, response) |
| 72 | + |
| 73 | + return response |
| 74 | + except Exception as err: |
| 75 | + span.record_exception(err) |
| 76 | + raise |
| 77 | + |
| 78 | + return traced_method |
| 79 | + |
| 80 | + |
| 81 | +def openai_responses_create(version: str, tracer: Tracer) -> Callable: |
| 82 | + """Wrap the `create` method of the `openai.responses.create` class to trace it.""" |
| 83 | + |
| 84 | + def traced_method( |
| 85 | + wrapped: Callable, instance: Any, args: List[Any], kwargs: Dict[str, Any] |
| 86 | + ): |
| 87 | + input_value = kwargs.get("input") |
| 88 | + prompt = ( |
| 89 | + input_value[0] |
| 90 | + if isinstance(input_value, list) |
| 91 | + else [{"role": "user", "content": input_value}] |
| 92 | + ) |
| 93 | + service_provider = SERVICE_PROVIDERS["OPENAI"] |
| 94 | + span_attributes = { |
| 95 | + "instructions": kwargs.get("instructions"), |
| 96 | + **get_langtrace_attributes(version, service_provider, vendor_type="llm"), |
| 97 | + **get_llm_request_attributes( |
| 98 | + kwargs, |
| 99 | + operation_name="openai.responses.create", |
| 100 | + prompts=prompt, |
| 101 | + ), |
| 102 | + } |
| 103 | + with tracer.start_as_current_span( |
| 104 | + name="openai.responses.create", |
| 105 | + kind=SpanKind.CLIENT, |
| 106 | + context=set_span_in_context(trace.get_current_span()), |
| 107 | + end_on_exit=False, |
| 108 | + ) as span: |
| 109 | + try: |
| 110 | + set_span_attributes(span, span_attributes) |
| 111 | + |
| 112 | + response = wrapped(*args, **kwargs) |
| 113 | + if is_streaming(kwargs) and span.is_recording(): |
| 114 | + return StreamWrapper(response, span) |
| 115 | + else: |
| 116 | + _set_openai_agentic_response_attributes(span, response) |
| 117 | + return response |
| 118 | + except Exception as err: |
| 119 | + span.record_exception(err) |
| 120 | + raise |
| 121 | + |
| 122 | + return traced_method |
| 123 | + |
| 124 | + |
31 | 125 | def filter_valid_attributes(attributes): |
32 | 126 | """Filter attributes where value is not None, not an empty string, and not openai.NOT_GIVEN.""" |
33 | 127 | return { |
@@ -634,6 +728,21 @@ def extract_content(choice: Any) -> Union[str, List[Dict[str, Any]], Dict[str, A |
634 | 728 | return "" |
635 | 729 |
|
636 | 730 |
|
| 731 | +def _set_openai_agentic_response_attributes(span: Span, response) -> None: |
| 732 | + set_span_attribute(span, SpanAttributes.LLM_RESPONSE_ID, response.id) |
| 733 | + set_span_attribute(span, SpanAttributes.LLM_RESPONSE_MODEL, response.model) |
| 734 | + set_event_completion(span, [{"role": "assistant", "content": response.output_text}]) |
| 735 | + set_usage_attributes( |
| 736 | + span, |
| 737 | + { |
| 738 | + "input_tokens": response.usage.input_tokens, |
| 739 | + "output_tokens": response.usage.output_tokens, |
| 740 | + "total_tokens": response.usage.total_tokens, |
| 741 | + "cached_tokens": response.usage.input_tokens_details["cached_tokens"], |
| 742 | + }, |
| 743 | + ) |
| 744 | + |
| 745 | + |
637 | 746 | @silently_fail |
638 | 747 | def _set_input_attributes( |
639 | 748 | span: Span, kwargs: ChatCompletionsCreateKwargs, attributes: LLMSpanAttributes |
@@ -707,5 +816,9 @@ def _set_response_attributes(span: Span, result: ResultType) -> None: |
707 | 816 | set_span_attribute( |
708 | 817 | span, |
709 | 818 | "gen_ai.usage.cached_tokens", |
710 | | - result.usage.prompt_tokens_details.cached_tokens if result.usage.prompt_tokens_details else 0, |
| 819 | + ( |
| 820 | + result.usage.prompt_tokens_details.cached_tokens |
| 821 | + if result.usage.prompt_tokens_details |
| 822 | + else 0 |
| 823 | + ), |
711 | 824 | ) |
0 commit comments