Skip to content

Commit 1f46733

Browse files
committed
handle streaming generate edge case for ollama
1 parent 4daffdc commit 1f46733

File tree

2 files changed

+7
-5
lines changed

2 files changed

+7
-5
lines changed

src/langtrace_python_sdk/instrumentation/ollama/patch.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -165,22 +165,24 @@ def _set_input_attributes(span, kwargs, attributes):
165165

166166
def _handle_streaming_response(span, response, api):
167167
accumulated_tokens = None
168+
print("APIIII", api)
168169
if api == "chat":
169170
accumulated_tokens = {"message": {"content": "", "role": ""}}
170-
if api == "completion":
171+
if api == "completion" or api == "generate":
171172
accumulated_tokens = {"response": ""}
172173
span.add_event(Event.STREAM_START.value)
173174
try:
174175
for chunk in response:
176+
content = None
175177
if api == "chat":
178+
content = chunk["message"]["content"]
176179
accumulated_tokens["message"]["content"] += chunk["message"]["content"]
177180
accumulated_tokens["message"]["role"] = chunk["message"]["role"]
178181
if api == "generate":
182+
content = chunk["response"]
179183
accumulated_tokens["response"] += chunk["response"]
180184

181-
set_event_completion_chunk(
182-
span, chunk.get("response") or chunk.get("message").get("content")
183-
)
185+
set_event_completion_chunk(span, content)
184186

185187
_set_response_attributes(span, chunk | accumulated_tokens)
186188
finally:
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "2.2.16"
1+
__version__ = "2.2.17"

0 commit comments

Comments
 (0)