Skip to content

Commit e1dce63

Browse files
committed
add completion and prompt events
1 parent 5816cdc commit e1dce63

File tree

11 files changed

+105
-164
lines changed

11 files changed

+105
-164
lines changed

src/langtrace_python_sdk/instrumentation/anthropic/patch.py

Lines changed: 13 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
get_llm_request_attributes,
2525
get_llm_url,
2626
is_streaming,
27+
set_event_completion,
2728
set_usage_attributes,
2829
)
2930
from opentelemetry.trace import SpanKind
@@ -132,10 +133,9 @@ def handle_streaming_response(result, span):
132133
set_usage_attributes(
133134
span, {"input_tokens": input_tokens, "output_tokens": output_tokens}
134135
)
135-
span.set_attribute(
136-
SpanAttributes.LLM_COMPLETIONS,
137-
json.dumps([{"role": "assistant", "content": "".join(result_content)}]),
138-
)
136+
completion = [{"role": "assistant", "content": "".join(result_content)}]
137+
set_event_completion(span, completion)
138+
139139
span.set_status(StatusCode.OK)
140140
span.end()
141141

@@ -145,25 +145,18 @@ def set_response_attributes(result, span, kwargs):
145145
set_span_attribute(
146146
span, SpanAttributes.LLM_RESPONSE_MODEL, result.model
147147
)
148-
set_span_attribute(
149-
span,
150-
SpanAttributes.LLM_COMPLETIONS,
151-
json.dumps(
152-
[
153-
{
154-
"role": result.role if result.role else "assistant",
155-
"content": result.content[0].text,
156-
"type": result.content[0].type,
157-
}
158-
]
159-
),
160-
)
148+
completion = [
149+
{
150+
"role": result.role if result.role else "assistant",
151+
"content": result.content[0].text,
152+
"type": result.content[0].type,
153+
}
154+
]
155+
set_event_completion(span, completion)
161156

162157
else:
163158
responses = []
164-
set_span_attribute(
165-
span, SpanAttributes.LLM_COMPLETIONS, json.dumps(responses)
166-
)
159+
set_event_completion(span, responses)
167160

168161
if (
169162
hasattr(result, "system_fingerprint")

src/langtrace_python_sdk/instrumentation/cohere/patch.py

Lines changed: 8 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
get_llm_request_attributes,
2222
get_extra_attributes,
2323
get_llm_url,
24+
set_event_completion,
2425
set_usage_attributes,
2526
)
2627
from langtrace.trace_attributes import Event, LLMSpanAttributes
@@ -278,27 +279,19 @@ def traced_method(wrapped, instance, args, kwargs):
278279
}
279280
for item in result.chat_history
280281
]
281-
span.set_attribute(
282-
SpanAttributes.LLM_COMPLETIONS, json.dumps(responses)
283-
)
282+
set_event_completion(span, responses)
283+
284284
else:
285285
responses = [{"role": "CHATBOT", "content": result.text}]
286-
span.set_attribute(
287-
SpanAttributes.LLM_COMPLETIONS, json.dumps(responses)
288-
)
286+
set_event_completion(span, responses)
287+
289288
elif hasattr(result, "tool_calls") and result.tool_calls is not None:
290289
tool_calls = []
291290
for tool_call in result.tool_calls:
292291
tool_calls.append(tool_call.json())
293292
span.set_attribute(
294293
SpanAttributes.LLM_TOOL_RESULTS, json.dumps(tool_calls)
295294
)
296-
span.set_attribute(SpanAttributes.LLM_COMPLETIONS, json.dumps([]))
297-
else:
298-
responses = []
299-
span.set_attribute(
300-
SpanAttributes.LLM_COMPLETIONS, json.dumps(responses)
301-
)
302295

303296
# Get the usage
304297
if hasattr(result, "meta") and result.meta is not None:
@@ -467,18 +460,13 @@ def traced_method(wrapped, instance, args, kwargs):
467460
}
468461
for item in response.chat_history
469462
]
470-
span.set_attribute(
471-
SpanAttributes.LLM_COMPLETIONS,
472-
json.dumps(responses),
473-
)
463+
set_event_completion(span, responses)
464+
474465
else:
475466
responses = [
476467
{"role": "CHATBOT", "content": response.text}
477468
]
478-
span.set_attribute(
479-
SpanAttributes.LLM_COMPLETIONS,
480-
json.dumps(responses),
481-
)
469+
set_event_completion(span, responses)
482470

483471
# Get the usage
484472
if hasattr(response, "meta") and response.meta is not None:

src/langtrace_python_sdk/instrumentation/groq/patch.py

Lines changed: 16 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@
2929
get_llm_request_attributes,
3030
get_llm_url,
3131
get_langtrace_attributes,
32+
set_event_completion,
3233
set_usage_attributes,
3334
)
3435
from langtrace_python_sdk.constants.instrumentation.common import (
@@ -139,18 +140,8 @@ def traced_method(wrapped, instance, args, kwargs):
139140
}
140141
for choice in result.choices
141142
]
142-
set_span_attribute(
143-
span,
144-
SpanAttributes.LLM_COMPLETIONS,
145-
json.dumps(responses),
146-
)
147-
else:
148-
responses = []
149-
set_span_attribute(
150-
span,
151-
SpanAttributes.LLM_COMPLETIONS,
152-
json.dumps(responses),
153-
)
143+
set_event_completion(span, responses)
144+
154145
if (
155146
hasattr(result, "system_fingerprint")
156147
and result.system_fingerprint is not None
@@ -270,11 +261,8 @@ def handle_streaming_response(
270261
span,
271262
{"input_tokens": prompt_tokens, "output_tokens": completion_tokens},
272263
)
273-
274-
set_span_attribute(
275-
span,
276-
SpanAttributes.LLM_COMPLETIONS,
277-
json.dumps([{"role": "assistant", "content": "".join(result_content)}]),
264+
set_event_completion(
265+
span, [{"role": "assistant", "content": "".join(result_content)}]
278266
)
279267

280268
span.set_status(StatusCode.OK)
@@ -379,18 +367,9 @@ async def traced_method(wrapped, instance, args, kwargs):
379367
}
380368
for choice in result.choices
381369
]
382-
set_span_attribute(
383-
span,
384-
SpanAttributes.LLM_COMPLETIONS,
385-
json.dumps(responses),
386-
)
387-
else:
388-
responses = []
389-
set_span_attribute(
390-
span,
391-
SpanAttributes.LLM_COMPLETIONS,
392-
json.dumps(responses),
393-
)
370+
371+
set_event_completion(span, responses)
372+
394373
if (
395374
hasattr(result, "system_fingerprint")
396375
and result.system_fingerprint is not None
@@ -515,17 +494,15 @@ async def ahandle_streaming_response(
515494
span,
516495
{"input_tokens": prompt_tokens, "output_tokens": completion_tokens},
517496
)
518-
set_span_attribute(
497+
498+
set_event_completion(
519499
span,
520-
SpanAttributes.LLM_COMPLETIONS,
521-
json.dumps(
522-
[
523-
{
524-
"role": "assistant",
525-
"content": "".join(result_content),
526-
}
527-
]
528-
),
500+
[
501+
{
502+
"role": "assistant",
503+
"content": "".join(result_content),
504+
}
505+
],
529506
)
530507

531508
span.set_status(StatusCode.OK)

src/langtrace_python_sdk/instrumentation/ollama/patch.py

Lines changed: 4 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
get_langtrace_attributes,
88
get_llm_request_attributes,
99
get_llm_url,
10+
set_event_completion,
1011
)
1112
from langtrace_python_sdk.utils.silently_fail import silently_fail
1213
from langtrace_python_sdk.constants.instrumentation.common import (
@@ -130,17 +131,11 @@ def _set_response_attributes(span, response):
130131
response.get("done_reason"),
131132
)
132133
if "message" in response:
133-
set_span_attribute(
134-
span,
135-
SpanAttributes.LLM_COMPLETIONS,
136-
json.dumps([response.get("message")]),
137-
)
134+
set_event_completion(span, [response.get("message")])
138135

139136
if "response" in response:
140-
set_span_attribute(
141-
span,
142-
SpanAttributes.LLM_COMPLETIONS,
143-
json.dumps([{"role": "assistant", "content": response.get("response")}]),
137+
set_event_completion(
138+
span, [{"role": "assistant", "content": response.get("response")}]
144139
)
145140

146141

src/langtrace_python_sdk/instrumentation/openai/patch.py

Lines changed: 13 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@
4242
get_llm_url,
4343
get_tool_calls,
4444
is_streaming,
45+
set_event_completion,
4546
)
4647
from openai._types import NOT_GIVEN
4748
from opentelemetry.trace.span import Span
@@ -93,12 +94,7 @@ def traced_method(wrapped, instance, args, kwargs):
9394
},
9495
}
9596
]
96-
span.add_event(
97-
Event.RESPONSE.value,
98-
attributes={
99-
SpanAttributes.LLM_COMPLETIONS: json.dumps(response)
100-
},
101-
)
97+
set_event_completion(span, response)
10298

10399
span.set_status(StatusCode.OK)
104100
return result
@@ -162,12 +158,7 @@ async def traced_method(wrapped, instance, args, kwargs):
162158
},
163159
}
164160
]
165-
span.add_event(
166-
Event.RESPONSE.value,
167-
attributes={
168-
SpanAttributes.LLM_COMPLETIONS: json.dumps(response)
169-
},
170-
)
161+
set_event_completion(span, response)
171162

172163
span.set_status(StatusCode.OK)
173164
return result
@@ -230,10 +221,7 @@ def traced_method(wrapped, instance, args, kwargs):
230221
}
231222
)
232223

233-
span.add_event(
234-
Event.RESPONSE.value,
235-
attributes={SpanAttributes.LLM_COMPLETIONS: json.dumps(response)},
236-
)
224+
set_event_completion(span, response)
237225

238226
span.set_status(StatusCode.OK)
239227
return result
@@ -289,18 +277,14 @@ def cleanup(self):
289277
SpanAttributes.LLM_USAGE_TOTAL_TOKENS,
290278
self.prompt_tokens + self.completion_tokens,
291279
)
292-
293-
set_span_attribute(
280+
set_event_completion(
294281
self.span,
295-
SpanAttributes.LLM_COMPLETIONS,
296-
json.dumps(
297-
[
298-
{
299-
"role": "assistant",
300-
"content": "".join(self.result_content),
301-
}
302-
]
303-
),
282+
[
283+
{
284+
"role": "assistant",
285+
"content": "".join(self.result_content),
286+
}
287+
],
304288
)
305289

306290
self.span.set_status(StatusCode.OK)
@@ -789,10 +773,8 @@ def _set_response_attributes(span, kwargs, result):
789773
}
790774
for choice in result.choices
791775
]
792-
set_span_attribute(span, SpanAttributes.LLM_COMPLETIONS, json.dumps(responses))
793-
else:
794-
responses = []
795-
set_span_attribute(span, SpanAttributes.LLM_COMPLETIONS, json.dumps(responses))
776+
set_event_completion(span, responses)
777+
796778
if (
797779
hasattr(result, "system_fingerprint")
798780
and result.system_fingerprint is not None

src/langtrace_python_sdk/utils/llm.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828
from importlib_metadata import version as v
2929
import json
3030
from opentelemetry import baggage
31+
from opentelemetry.trace import Span
3132

3233

3334
def estimate_tokens(prompt):
@@ -179,3 +180,13 @@ def get_tool_calls(item):
179180
if hasattr(item, "tool_calls") and item.tool_calls is not None:
180181
return item.tool_calls
181182
return None
183+
184+
185+
def set_event_completion(span: Span, result_content):
186+
187+
span.add_event(
188+
name=SpanAttributes.LLM_CONTENT_COMPLETION,
189+
attributes={
190+
SpanAttributes.LLM_COMPLETIONS: json.dumps(result_content),
191+
},
192+
)

src/tests/anthropic/test_anthropic.py

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
from langtrace_python_sdk.constants.instrumentation.anthropic import APIS
55
from langtrace_python_sdk.constants import LANGTRACE_SDK_NAME
66
from tests.utils import (
7+
assert_completion_in_events,
78
assert_langtrace_attributes,
89
assert_prompt_in_events,
910
assert_response_format,
@@ -34,18 +35,15 @@ def test_anthropic(anthropic_client, exporter):
3435
attributes = completion_span.attributes
3536
assert_langtrace_attributes(attributes, "Anthropic")
3637
assert_prompt_in_events(completion_span.events)
38+
assert_completion_in_events(completion_span.events)
3739
assert attributes.get(SpanAttributes.LLM_URL) == "https://api.anthropic.com"
3840
assert (
3941
attributes.get(SpanAttributes.LLM_PATH) == APIS["MESSAGES_CREATE"]["ENDPOINT"]
4042
)
4143
assert attributes.get(SpanAttributes.LLM_REQUEST_MODEL) == llm_model_value
42-
# assert json.loads(attributes.get(SpanAttributes.LLM_PROMPTS)) == json.dumps(
43-
# messages_value
44-
# )
4544
assert attributes.get(SpanAttributes.LLM_IS_STREAMING) is False
4645

4746
assert_token_count(attributes)
48-
assert_response_format(attributes)
4947

5048

5149
@pytest.mark.vcr()
@@ -75,6 +73,7 @@ def test_anthropic_streaming(anthropic_client, exporter):
7573

7674
assert_langtrace_attributes(attributes, "Anthropic")
7775
assert_prompt_in_events(streaming_span.events)
76+
assert_completion_in_events(streaming_span.events)
7877

7978
assert attributes.get(SpanAttributes.LLM_URL) == "https://api.anthropic.com"
8079
assert (
@@ -85,9 +84,6 @@ def test_anthropic_streaming(anthropic_client, exporter):
8584

8685
events = streaming_span.events
8786

88-
assert (
89-
len(events) - 3 == chunk_count
90-
) # -2 for start and end events and prompt event
87+
assert len(events) - 4 == chunk_count
9188

9289
assert_token_count(attributes)
93-
assert_response_format(attributes)

0 commit comments

Comments
 (0)