Skip to content

Commit e3c2b24

Browse files
committed
Merge branch 'development' of github.com:Scale3-Labs/langtrace-python-sdk into development
2 parents 85f6cef + 655fe76 commit e3c2b24

File tree

8 files changed

+52
-54
lines changed

8 files changed

+52
-54
lines changed

README.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -228,6 +228,11 @@ from langtrace_python_sdk import get_prompt_from_registry
228228
prompt = get_prompt_from_registry(<Registry ID>, options={"prompt_version": 1, "variables": {"foo": "bar"} })
229229
```
230230

231+
### Opt out of tracing prompt and completion data
232+
By default, prompt and completion data are captured. If you would like to opt out of it, set the following env var,
233+
234+
`TRACE_PROMPT_COMPLETION_DATA=false`
235+
231236
## Supported integrations
232237

233238
Langtrace automatically captures traces from the following vendors:

src/langtrace_python_sdk/instrumentation/anthropic/patch.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
get_llm_url,
2626
is_streaming,
2727
set_event_completion,
28+
set_event_completion_chunk,
2829
set_usage_attributes,
2930
)
3031
from opentelemetry.trace import SpanKind
@@ -119,10 +120,7 @@ def handle_streaming_response(result, span):
119120
# Assuming span.add_event is part of a larger logging or event system
120121
# Add event for each chunk of content
121122
if content:
122-
span.add_event(
123-
Event.STREAM_OUTPUT.value,
124-
{SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK: "".join(content)},
125-
)
123+
set_event_completion_chunk(span, "".join(content))
126124

127125
# Assuming this is part of a generator, yield chunk or aggregated content
128126
yield content

src/langtrace_python_sdk/instrumentation/cohere/patch.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
get_extra_attributes,
2323
get_llm_url,
2424
set_event_completion,
25+
set_event_completion_chunk,
2526
set_usage_attributes,
2627
)
2728
from langtrace.trace_attributes import Event, LLMSpanAttributes
@@ -403,10 +404,7 @@ def traced_method(wrapped, instance, args, kwargs):
403404
content = event.text
404405
else:
405406
content = ""
406-
span.add_event(
407-
Event.STREAM_OUTPUT.value,
408-
{SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK: "".join(content)},
409-
)
407+
set_event_completion_chunk(span, "".join(content))
410408

411409
if (
412410
hasattr(event, "finish_reason")

src/langtrace_python_sdk/instrumentation/groq/patch.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@
3030
get_llm_url,
3131
get_langtrace_attributes,
3232
set_event_completion,
33+
set_event_completion_chunk,
3334
set_usage_attributes,
3435
)
3536
from langtrace_python_sdk.constants.instrumentation.common import (
@@ -242,15 +243,14 @@ def handle_streaming_response(
242243
content = content + []
243244
else:
244245
content = []
245-
span.add_event(
246-
Event.STREAM_OUTPUT.value,
247-
{
248-
SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK: (
249-
"".join(content)
250-
if len(content) > 0 and content[0] is not None
251-
else ""
252-
)
253-
},
246+
247+
set_event_completion_chunk(
248+
span,
249+
(
250+
"".join(content)
251+
if len(content) > 0 and content[0] is not None
252+
else ""
253+
),
254254
)
255255
result_content.append(content[0] if len(content) > 0 else "")
256256
yield chunk

src/langtrace_python_sdk/instrumentation/ollama/patch.py

Lines changed: 4 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
get_llm_request_attributes,
77
get_llm_url,
88
set_event_completion,
9+
set_event_completion_chunk,
910
)
1011
from langtrace_python_sdk.utils.silently_fail import silently_fail
1112
from langtrace_python_sdk.constants.instrumentation.common import SERVICE_PROVIDERS
@@ -177,12 +178,8 @@ def _handle_streaming_response(span, response, api):
177178
if api == "generate":
178179
accumulated_tokens["response"] += chunk["response"]
179180

180-
span.add_event(
181-
Event.STREAM_OUTPUT.value,
182-
{
183-
SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK: chunk.get("response")
184-
or chunk.get("message").get("content"),
185-
},
181+
set_event_completion_chunk(
182+
span, chunk.get("response") or chunk.get("message").get("content")
186183
)
187184

188185
_set_response_attributes(span, chunk | accumulated_tokens)
@@ -211,12 +208,7 @@ async def _ahandle_streaming_response(span, response, api):
211208
if api == "generate":
212209
accumulated_tokens["response"] += chunk["response"]
213210

214-
span.add_event(
215-
Event.STREAM_OUTPUT.value,
216-
{
217-
SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK: json.dumps(chunk),
218-
},
219-
)
211+
set_event_completion_chunk(span, chunk)
220212
_set_response_attributes(span, chunk | accumulated_tokens)
221213
finally:
222214
# Finalize span after processing all chunks

src/langtrace_python_sdk/utils/__init__.py

Lines changed: 15 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,23 +2,32 @@
22
from .sdk_version_checker import SDKVersionChecker
33
from opentelemetry.trace import Span
44
from langtrace.trace_attributes import SpanAttributes
5+
import os
56

67

78
def set_span_attribute(span: Span, name, value):
89
if value is not None:
910
if value != "" or value != NOT_GIVEN:
1011
if name == SpanAttributes.LLM_PROMPTS:
11-
span.add_event(
12-
name=SpanAttributes.LLM_CONTENT_PROMPT,
13-
attributes={
14-
SpanAttributes.LLM_PROMPTS: value,
15-
},
16-
)
12+
set_event_prompt(span, value)
1713
else:
1814
span.set_attribute(name, value)
1915
return
2016

2117

18+
def set_event_prompt(span: Span, prompt):
19+
enabled = os.environ.get("TRACE_PROMPT_COMPLETION_DATA", "true")
20+
if enabled.lower() == "false":
21+
return
22+
23+
span.add_event(
24+
name=SpanAttributes.LLM_CONTENT_PROMPT,
25+
attributes={
26+
SpanAttributes.LLM_PROMPTS: prompt,
27+
},
28+
)
29+
30+
2231
def check_if_sdk_is_outdated():
2332
SDKVersionChecker().check()
2433
return

src/langtrace_python_sdk/utils/llm.py

Lines changed: 14 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@
3030
from opentelemetry import baggage
3131
from opentelemetry.trace import Span
3232
from opentelemetry.trace.status import StatusCode
33+
import os
3334

3435

3536
def estimate_tokens(prompt):
@@ -42,6 +43,9 @@ def estimate_tokens(prompt):
4243

4344

4445
def set_event_completion_chunk(span: Span, chunk):
46+
enabled = os.environ.get("TRACE_PROMPT_COMPLETION_DATA", "true")
47+
if enabled.lower() == "false":
48+
return
4549
span.add_event(
4650
name=SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK,
4751
attributes={
@@ -203,6 +207,9 @@ def get_tool_calls(item):
203207

204208

205209
def set_event_completion(span: Span, result_content):
210+
enabled = os.environ.get("TRACE_PROMPT_COMPLETION_DATA", "true")
211+
if enabled.lower() == "false":
212+
return
206213

207214
span.add_event(
208215
name=SpanAttributes.LLM_CONTENT_COMPLETION,
@@ -352,15 +359,9 @@ def process_chunk(self, chunk):
352359
)
353360
self.completion_tokens += token_counts
354361
content.append(tool_call.function.arguments)
355-
self.span.add_event(
356-
Event.STREAM_OUTPUT.value,
357-
{
358-
SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK: (
359-
"".join(content)
360-
if len(content) > 0 and content[0] is not None
361-
else ""
362-
)
363-
},
362+
set_event_completion_chunk(
363+
self.span,
364+
"".join(content) if len(content) > 0 and content[0] is not None else "",
364365
)
365366
if content:
366367
self.result_content.append(content[0])
@@ -369,16 +370,11 @@ def process_chunk(self, chunk):
369370
token_counts = estimate_tokens(chunk.text)
370371
self.completion_tokens += token_counts
371372
content = [chunk.text]
372-
self.span.add_event(
373-
Event.STREAM_OUTPUT.value,
374-
{
375-
SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK: (
376-
"".join(content)
377-
if len(content) > 0 and content[0] is not None
378-
else ""
379-
)
380-
},
373+
set_event_completion_chunk(
374+
self.span,
375+
"".join(content) if len(content) > 0 and content[0] is not None else "",
381376
)
377+
382378
if content:
383379
self.result_content.append(content[0])
384380

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "2.2.8"
1+
__version__ = "2.2.9"

0 commit comments

Comments
 (0)