@@ -30,19 +30,17 @@ class OpenaiStreamWrapper:
3030 - Chunk statistics
3131 """
3232
33- def __init__ (self , stream : Any , span : Span , request_kwargs : dict , tracer = None ):
33+ def __init__ (self , stream : Any , span : Span , request_kwargs : dict ):
3434 """Initialize the stream wrapper.
3535
3636 Args:
3737 stream: The original OpenAI stream object
3838 span: The OpenTelemetry span for tracking
3939 request_kwargs: Original request parameters for context
40- tracer: The OpenTelemetry tracer for creating child spans
4140 """
4241 self ._stream = stream
4342 self ._span = span
4443 self ._request_kwargs = request_kwargs
45- self ._tracer = tracer
4644 self ._start_time = time .time ()
4745 self ._first_token_time = None
4846 self ._chunk_count = 0
@@ -195,10 +193,10 @@ def _finalize_stream(self) -> None:
195193 self ._span .set_attribute (MessageAttributes .COMPLETION_FINISH_REASON .format (i = 0 ), self ._finish_reason )
196194
197195 # Create tool spans for each tool call
198- if len (self ._tool_calls ) > 0 and self . _tracer is not None :
196+ if len (self ._tool_calls ) > 0 :
199197 for idx , tool_call in self ._tool_calls .items ():
200198 # Create a child span for this tool call
201- _create_tool_span (self ._span , tool_call , self . _tracer )
199+ _create_tool_span (self ._span , tool_call )
202200
203201 # Set usage if available from the API
204202 if self ._usage is not None :
@@ -237,19 +235,17 @@ def _finalize_stream(self) -> None:
237235class OpenAIAsyncStreamWrapper :
238236 """Async wrapper for OpenAI Chat Completions streaming responses."""
239237
240- def __init__ (self , stream : Any , span : Span , request_kwargs : dict , tracer = None ):
238+ def __init__ (self , stream : Any , span : Span , request_kwargs : dict ):
241239 """Initialize the async stream wrapper.
242240
243241 Args:
244242 stream: The original OpenAI async stream object
245243 span: The OpenTelemetry span for tracking
246244 request_kwargs: Original request parameters for context
247- tracer: The OpenTelemetry tracer for creating child spans
248245 """
249246 self ._stream = stream
250247 self ._span = span
251248 self ._request_kwargs = request_kwargs
252- self ._tracer = tracer
253249 self ._start_time = time .time ()
254250 self ._first_token_time = None
255251 self ._chunk_count = 0
@@ -356,10 +352,10 @@ def chat_completion_stream_wrapper(tracer, wrapped, instance, args, kwargs):
356352 if is_streaming :
357353 # Wrap the stream
358354 context_api .detach (token )
359- return OpenaiStreamWrapper (response , span , kwargs , tracer )
355+ return OpenaiStreamWrapper (response , span , kwargs )
360356 else :
361357 # Handle non-streaming response
362- response_attributes = handle_chat_attributes (kwargs = kwargs , return_value = response , span = span , tracer = tracer )
358+ response_attributes = handle_chat_attributes (kwargs = kwargs , return_value = response , span = span )
363359
364360 for key , value in response_attributes .items ():
365361 if key not in request_attributes : # Avoid overwriting request attributes
@@ -421,10 +417,10 @@ async def async_chat_completion_stream_wrapper(tracer, wrapped, instance, args,
421417 if is_streaming :
422418 # Wrap the stream
423419 context_api .detach (token )
424- return OpenAIAsyncStreamWrapper (response , span , kwargs , tracer )
420+ return OpenAIAsyncStreamWrapper (response , span , kwargs )
425421 else :
426422 # Handle non-streaming response
427- response_attributes = handle_chat_attributes (kwargs = kwargs , return_value = response , span = span , tracer = tracer )
423+ response_attributes = handle_chat_attributes (kwargs = kwargs , return_value = response , span = span )
428424
429425 for key , value in response_attributes .items ():
430426 if key not in request_attributes : # Avoid overwriting request attributes
0 commit comments