Skip to content

Commit 631e855

Browse files
committed
remove use of tracer
1 parent 7b8faae commit 631e855

File tree

2 files changed

+15
-19
lines changed

2 files changed

+15
-19
lines changed

agentops/instrumentation/providers/openai/stream_wrapper.py

Lines changed: 8 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -30,19 +30,17 @@ class OpenaiStreamWrapper:
3030
- Chunk statistics
3131
"""
3232

33-
def __init__(self, stream: Any, span: Span, request_kwargs: dict, tracer=None):
33+
def __init__(self, stream: Any, span: Span, request_kwargs: dict):
3434
"""Initialize the stream wrapper.
3535
3636
Args:
3737
stream: The original OpenAI stream object
3838
span: The OpenTelemetry span for tracking
3939
request_kwargs: Original request parameters for context
40-
tracer: The OpenTelemetry tracer for creating child spans
4140
"""
4241
self._stream = stream
4342
self._span = span
4443
self._request_kwargs = request_kwargs
45-
self._tracer = tracer
4644
self._start_time = time.time()
4745
self._first_token_time = None
4846
self._chunk_count = 0
@@ -195,10 +193,10 @@ def _finalize_stream(self) -> None:
195193
self._span.set_attribute(MessageAttributes.COMPLETION_FINISH_REASON.format(i=0), self._finish_reason)
196194

197195
# Create tool spans for each tool call
198-
if len(self._tool_calls) > 0 and self._tracer is not None:
196+
if len(self._tool_calls) > 0:
199197
for idx, tool_call in self._tool_calls.items():
200198
# Create a child span for this tool call
201-
_create_tool_span(self._span, tool_call, self._tracer)
199+
_create_tool_span(self._span, tool_call)
202200

203201
# Set usage if available from the API
204202
if self._usage is not None:
@@ -237,19 +235,17 @@ def _finalize_stream(self) -> None:
237235
class OpenAIAsyncStreamWrapper:
238236
"""Async wrapper for OpenAI Chat Completions streaming responses."""
239237

240-
def __init__(self, stream: Any, span: Span, request_kwargs: dict, tracer=None):
238+
def __init__(self, stream: Any, span: Span, request_kwargs: dict):
241239
"""Initialize the async stream wrapper.
242240
243241
Args:
244242
stream: The original OpenAI async stream object
245243
span: The OpenTelemetry span for tracking
246244
request_kwargs: Original request parameters for context
247-
tracer: The OpenTelemetry tracer for creating child spans
248245
"""
249246
self._stream = stream
250247
self._span = span
251248
self._request_kwargs = request_kwargs
252-
self._tracer = tracer
253249
self._start_time = time.time()
254250
self._first_token_time = None
255251
self._chunk_count = 0
@@ -356,10 +352,10 @@ def chat_completion_stream_wrapper(tracer, wrapped, instance, args, kwargs):
356352
if is_streaming:
357353
# Wrap the stream
358354
context_api.detach(token)
359-
return OpenaiStreamWrapper(response, span, kwargs, tracer)
355+
return OpenaiStreamWrapper(response, span, kwargs)
360356
else:
361357
# Handle non-streaming response
362-
response_attributes = handle_chat_attributes(kwargs=kwargs, return_value=response, span=span, tracer=tracer)
358+
response_attributes = handle_chat_attributes(kwargs=kwargs, return_value=response, span=span)
363359

364360
for key, value in response_attributes.items():
365361
if key not in request_attributes: # Avoid overwriting request attributes
@@ -421,10 +417,10 @@ async def async_chat_completion_stream_wrapper(tracer, wrapped, instance, args,
421417
if is_streaming:
422418
# Wrap the stream
423419
context_api.detach(token)
424-
return OpenAIAsyncStreamWrapper(response, span, kwargs, tracer)
420+
return OpenAIAsyncStreamWrapper(response, span, kwargs)
425421
else:
426422
# Handle non-streaming response
427-
response_attributes = handle_chat_attributes(kwargs=kwargs, return_value=response, span=span, tracer=tracer)
423+
response_attributes = handle_chat_attributes(kwargs=kwargs, return_value=response, span=span)
428424

429425
for key, value in response_attributes.items():
430426
if key not in request_attributes: # Avoid overwriting request attributes

agentops/instrumentation/providers/openai/wrappers/chat.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -21,22 +21,24 @@
2121
from agentops.semconv.span_kinds import AgentOpsSpanKindValues
2222

2323
from opentelemetry import context as context_api
24-
from opentelemetry.trace import SpanKind, Status, StatusCode
24+
from opentelemetry.trace import SpanKind, Status, StatusCode, get_tracer
2525

2626
logger = logging.getLogger(__name__)
2727

2828
LLM_REQUEST_TYPE = LLMRequestTypeValues.CHAT
2929

3030

31-
def _create_tool_span(parent_span, tool_call_data, tracer):
31+
def _create_tool_span(parent_span, tool_call_data):
3232
"""
3333
Create a distinct span for each tool call.
3434
3535
Args:
3636
parent_span: The parent LLM span
3737
tool_call_data: The tool call data dictionary
38-
tracer: The OpenTelemetry tracer instance
3938
"""
39+
# Get the tracer for this module
40+
tracer = get_tracer(__name__)
41+
4042
# Create a child span for the tool call
4143
with tracer.start_as_current_span(
4244
name=f"tool_call.{tool_call_data['function']['name']}",
@@ -61,7 +63,6 @@ def handle_chat_attributes(
6163
kwargs: Optional[Dict] = None,
6264
return_value: Optional[Any] = None,
6365
span: Optional[Span] = None,
64-
tracer: Optional[Any] = None,
6566
) -> AttributeMap:
6667
"""Extract attributes from chat completion calls.
6768
@@ -73,7 +74,6 @@ def handle_chat_attributes(
7374
kwargs: Method keyword arguments
7475
return_value: Method return value
7576
span: The parent span for creating tool spans
76-
tracer: The OpenTelemetry tracer for creating child spans
7777
"""
7878
attributes = {
7979
SpanAttributes.LLM_REQUEST_TYPE: LLM_REQUEST_TYPE.value,
@@ -235,7 +235,7 @@ def handle_chat_attributes(
235235
# Tool calls
236236
if "tool_calls" in message:
237237
tool_calls = message["tool_calls"]
238-
if tool_calls and span is not None and tracer is not None:
238+
if tool_calls and span is not None:
239239
for i, tool_call in enumerate(tool_calls):
240240
# Convert tool_call to the format expected by _create_tool_span
241241
function = tool_call.get("function", {})
@@ -248,7 +248,7 @@ def handle_chat_attributes(
248248
}
249249
}
250250
# Create a child span for this tool call
251-
_create_tool_span(span, tool_call_data, tracer)
251+
_create_tool_span(span, tool_call_data)
252252

253253
# Prompt filter results
254254
if "prompt_filter_results" in response_dict:

0 commit comments

Comments
 (0)