Skip to content

Commit 8fa1b28

Browse files
committed
remove handle streaming response from anthropic
1 parent 2e0fc75 commit 8fa1b28

File tree

1 file changed

+6
-58
lines changed
  • src/langtrace_python_sdk/instrumentation/anthropic

1 file changed

+6
-58
lines changed

src/langtrace_python_sdk/instrumentation/anthropic/patch.py

Lines changed: 6 additions & 58 deletions
Original file line numberDiff line numberDiff line change
@@ -17,16 +17,18 @@
1717
import json
1818

1919
from langtrace.trace_attributes import Event, LLMSpanAttributes
20-
from langtrace_python_sdk.utils import set_span_attribute, silently_fail
20+
from langtrace_python_sdk.utils import set_span_attribute
21+
from langtrace_python_sdk.utils.silently_fail import silently_fail
22+
2123
from langtrace_python_sdk.utils.llm import (
24+
StreamWrapper,
2225
get_extra_attributes,
2326
get_langtrace_attributes,
2427
get_llm_request_attributes,
2528
get_llm_url,
2629
get_span_name,
2730
is_streaming,
2831
set_event_completion,
29-
set_event_completion_chunk,
3032
set_usage_attributes,
3133
)
3234
from opentelemetry.trace import SpanKind
@@ -83,61 +85,7 @@ def traced_method(wrapped, instance, args, kwargs):
8385
span.end()
8486
raise
8587

86-
def handle_streaming_response(result, span):
87-
"""Process and yield streaming response chunks."""
88-
result_content = []
89-
span.add_event(Event.STREAM_START.value)
90-
input_tokens = 0
91-
output_tokens = 0
92-
try:
93-
for chunk in result:
94-
if (
95-
hasattr(chunk, "message")
96-
and chunk.message is not None
97-
and hasattr(chunk.message, "model")
98-
and chunk.message.model is not None
99-
):
100-
span.set_attribute(
101-
SpanAttributes.LLM_RESPONSE_MODEL, chunk.message.model
102-
)
103-
content = ""
104-
if hasattr(chunk, "delta") and chunk.delta is not None:
105-
content = chunk.delta.text if hasattr(chunk.delta, "text") else ""
106-
# Assuming content needs to be aggregated before processing
107-
result_content.append(content if len(content) > 0 else "")
108-
109-
if hasattr(chunk, "message") and hasattr(chunk.message, "usage"):
110-
input_tokens += (
111-
chunk.message.usage.input_tokens
112-
if hasattr(chunk.message.usage, "input_tokens")
113-
else 0
114-
)
115-
output_tokens += (
116-
chunk.message.usage.output_tokens
117-
if hasattr(chunk.message.usage, "output_tokens")
118-
else 0
119-
)
120-
121-
# Assuming span.add_event is part of a larger logging or event system
122-
# Add event for each chunk of content
123-
if content:
124-
set_event_completion_chunk(span, "".join(content))
125-
126-
# Assuming this is part of a generator, yield chunk or aggregated content
127-
yield content
128-
finally:
129-
130-
# Finalize span after processing all chunks
131-
span.add_event(Event.STREAM_END.value)
132-
set_usage_attributes(
133-
span, {"input_tokens": input_tokens, "output_tokens": output_tokens}
134-
)
135-
completion = [{"role": "assistant", "content": "".join(result_content)}]
136-
set_event_completion(span, completion)
137-
138-
span.set_status(StatusCode.OK)
139-
span.end()
140-
88+
@silently_fail
14189
def set_response_attributes(result, span, kwargs):
14290
if not is_streaming(kwargs):
14391
if hasattr(result, "content") and result.content is not None:
@@ -174,7 +122,7 @@ def set_response_attributes(result, span, kwargs):
174122
span.end()
175123
return result
176124
else:
177-
return handle_streaming_response(result, span)
125+
return StreamWrapper(result, span)
178126

179127
# return the wrapped method
180128
return traced_method

0 commit comments

Comments
 (0)