Skip to content

Commit d231f90

Browse files
authored
Merge pull request #294 from Scale3-Labs/obinna/S3EN-2711-remove-completion-chunks
remove chunk addition to events of the span
2 parents c7d80b1 + 1b47337 commit d231f90

File tree

5 files changed

+1
-15
lines changed

5 files changed

+1
-15
lines changed

src/langtrace_python_sdk/instrumentation/cohere/patch.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@
2323
get_llm_url,
2424
get_span_name,
2525
set_event_completion,
26-
set_event_completion_chunk,
2726
set_usage_attributes,
2827
)
2928
from langtrace.trace_attributes import Event, LLMSpanAttributes

src/langtrace_python_sdk/instrumentation/gemini/patch.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@
1313
get_span_name,
1414
is_streaming,
1515
set_event_completion,
16-
set_event_completion_chunk,
1716
set_span_attributes,
1817
set_usage_attributes,
1918
)
@@ -156,7 +155,6 @@ def build_streaming_response(span, response):
156155
item_to_yield = item
157156
complete_response += str(item.text)
158157
yield item_to_yield
159-
set_event_completion_chunk(span, item.text)
160158
if hasattr(item, "usage_metadata"):
161159
usage = item.usage_metadata
162160
input_tokens = usage.prompt_token_count
@@ -176,7 +174,6 @@ async def abuild_streaming_response(span, response):
176174
item_to_yield = item
177175
complete_response += str(item.text)
178176
yield item_to_yield
179-
set_event_completion_chunk(span, item.text)
180177
if hasattr(item, "usage_metadata"):
181178
usage = item.usage_metadata
182179
input_tokens = usage.prompt_token_count

src/langtrace_python_sdk/instrumentation/groq/patch.py

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,6 @@
3131
get_langtrace_attributes,
3232
get_span_name,
3333
set_event_completion,
34-
set_event_completion_chunk,
3534
set_usage_attributes,
3635
)
3736
from langtrace_python_sdk.constants.instrumentation.common import (
@@ -245,14 +244,6 @@ def handle_streaming_response(
245244
else:
246245
content = []
247246

248-
set_event_completion_chunk(
249-
span,
250-
(
251-
"".join(content)
252-
if len(content) > 0 and content[0] is not None
253-
else ""
254-
),
255-
)
256247
result_content.append(content[0] if len(content) > 0 else "")
257248
yield chunk
258249
finally:

src/langtrace_python_sdk/instrumentation/ollama/patch.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88
get_llm_url,
99
get_span_name,
1010
set_event_completion,
11-
set_event_completion_chunk,
1211
)
1312
from langtrace_python_sdk.utils.silently_fail import silently_fail
1413
from langtrace_python_sdk.constants.instrumentation.common import SERVICE_PROVIDERS
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "2.2.23"
1+
__version__ = "2.2.24"

0 commit comments

Comments
 (0)