Skip to content

Commit 903f11e

Browse files
authored
Ignore empty text alongside tool calls when streaming from Ollama (#2286)
1 parent 0b3d020 commit 903f11e

File tree

3 files changed

+38
-2
lines changed

3 files changed

+38
-2
lines changed

pydantic_ai_slim/pydantic_ai/models/openai.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1003,7 +1003,7 @@ async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]:
10031003

10041004
# Handle the text part of the response
10051005
content = choice.delta.content
1006-
if content is not None:
1006+
if content:
10071007
yield self._parts_manager.handle_text_delta(vendor_part_id='content', content=content)
10081008

10091009
# Handle reasoning part of the response, present in DeepSeek models

tests/models/test_deepseek.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,6 @@ async def test_deepseek_model_thinking_stream(allow_model_requests: None, deepse
8787
200: PartDeltaEvent(index=1, delta=TextPartDelta(content_delta=' there')),
8888
201: PartDeltaEvent(index=1, delta=TextPartDelta(content_delta='!')),
8989
},
90-
length=211,
90+
length=210,
9191
)
9292
)

tests/models/test_openai.py

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -574,6 +574,42 @@ async def test_stream_native_output(allow_model_requests: None):
574574
assert result.is_complete
575575

576576

577+
async def test_stream_tool_call_with_empty_text(allow_model_requests: None):
578+
stream = [
579+
chunk(
580+
[
581+
ChoiceDelta(
582+
content='', # Ollama will include an empty text delta even when it's going to call a tool
583+
tool_calls=[
584+
ChoiceDeltaToolCall(
585+
index=0, function=ChoiceDeltaToolCallFunction(name='final_result', arguments=None)
586+
)
587+
],
588+
),
589+
]
590+
),
591+
struc_chunk(None, '{"first": "One'),
592+
struc_chunk(None, '", "second": "Two"'),
593+
struc_chunk(None, '}'),
594+
chunk([]),
595+
]
596+
mock_client = MockOpenAI.create_mock_stream(stream)
597+
m = OpenAIModel('gpt-4o', provider=OpenAIProvider(openai_client=mock_client))
598+
agent = Agent(m, output_type=[str, MyTypedDict])
599+
600+
async with agent.run_stream('') as result:
601+
assert not result.is_complete
602+
assert [c async for c in result.stream(debounce_by=None)] == snapshot(
603+
[
604+
{'first': 'One'},
605+
{'first': 'One', 'second': 'Two'},
606+
{'first': 'One', 'second': 'Two'},
607+
{'first': 'One', 'second': 'Two'},
608+
]
609+
)
610+
assert await result.get_output() == snapshot({'first': 'One', 'second': 'Two'})
611+
612+
577613
async def test_no_content(allow_model_requests: None):
578614
stream = [chunk([ChoiceDelta()]), chunk([ChoiceDelta()])]
579615
mock_client = MockOpenAI.create_mock_stream(stream)

0 commit comments

Comments
 (0)