Skip to content

Commit 2d77b5c

Browse files
committed
Merge branch 'development' of github.com:Scale3-Labs/langtrace-python-sdk into development
2 parents 939d8ec + 8140d0f commit 2d77b5c

File tree

3 files changed

+12
-7
lines changed

3 files changed

+12
-7
lines changed

src/langtrace_python_sdk/constants/instrumentation/common.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
"gpt-4-1106-preview": "cl100k_base",
66
"gpt-4-1106-vision-preview": "cl100k_base",
77
"gpt-4o": "0200k_base",
8+
"gpt-4o-mini": "0200k_base",
89
}
910

1011
SERVICE_PROVIDERS = {

src/langtrace_python_sdk/instrumentation/ollama/patch.py

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -167,20 +167,21 @@ def _handle_streaming_response(span, response, api):
167167
accumulated_tokens = None
168168
if api == "chat":
169169
accumulated_tokens = {"message": {"content": "", "role": ""}}
170-
if api == "completion":
170+
if api == "completion" or api == "generate":
171171
accumulated_tokens = {"response": ""}
172172
span.add_event(Event.STREAM_START.value)
173173
try:
174174
for chunk in response:
175+
content = None
175176
if api == "chat":
177+
content = chunk["message"]["content"]
176178
accumulated_tokens["message"]["content"] += chunk["message"]["content"]
177179
accumulated_tokens["message"]["role"] = chunk["message"]["role"]
178180
if api == "generate":
181+
content = chunk["response"]
179182
accumulated_tokens["response"] += chunk["response"]
180183

181-
set_event_completion_chunk(
182-
span, chunk.get("response") or chunk.get("message").get("content")
183-
)
184+
set_event_completion_chunk(span, content)
184185

185186
_set_response_attributes(span, chunk | accumulated_tokens)
186187
finally:
@@ -196,19 +197,22 @@ async def _ahandle_streaming_response(span, response, api):
196197
accumulated_tokens = None
197198
if api == "chat":
198199
accumulated_tokens = {"message": {"content": "", "role": ""}}
199-
if api == "completion":
200+
if api == "completion" or api == "generate":
200201
accumulated_tokens = {"response": ""}
201202

202203
span.add_event(Event.STREAM_START.value)
203204
try:
204205
async for chunk in response:
206+
content = None
205207
if api == "chat":
208+
content = chunk["message"]["content"]
206209
accumulated_tokens["message"]["content"] += chunk["message"]["content"]
207210
accumulated_tokens["message"]["role"] = chunk["message"]["role"]
208211
if api == "generate":
212+
content = chunk["response"]
209213
accumulated_tokens["response"] += chunk["response"]
210214

211-
set_event_completion_chunk(span, chunk)
215+
set_event_completion_chunk(span, content)
212216
_set_response_attributes(span, chunk | accumulated_tokens)
213217
finally:
214218
# Finalize span after processing all chunks
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "2.2.15"
1+
__version__ = "2.2.17"

0 commit comments

Comments
 (0)