Skip to content

Commit 7421545

Browse files
committed
continue to next chunk on None delta altogether + add test
1 parent f5bc8e9 commit 7421545

File tree

2 files changed

+36
-1
lines changed

2 files changed

+36
-1
lines changed

pydantic_ai_slim/pydantic_ai/models/openai.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1171,8 +1171,12 @@ async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]:
11711171
except IndexError:
11721172
continue
11731173

1174+
if choice.delta is None: # pyright: ignore[reportUnnecessaryComparison]
1175+
continue
1176+
11741177
# Handle the text part of the response
1175-
if (delta := choice.delta) is not None and (content := delta.content) is not None: # pyright: ignore[reportUnnecessaryComparison]
1178+
content = choice.delta.content
1179+
if content is not None:
11761180
maybe_event = self._parts_manager.handle_text_delta(
11771181
vendor_part_id='content',
11781182
content=content,

tests/models/test_openai.py

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -603,6 +603,37 @@ async def test_no_delta(allow_model_requests: None):
603603
assert result.usage() == snapshot(RunUsage(requests=1, input_tokens=6, output_tokens=3))
604604

605605

606+
def none_delta_chunk(finish_reason: FinishReason | None = None) -> chat.ChatCompletionChunk:
607+
choice = ChunkChoice(index=0, delta=ChoiceDelta())
608+
# When using Azure OpenAI and an async content filter is enabled, the openai SDK can return None deltas.
609+
choice.delta = None # pyright: ignore[reportAttributeAccessIssue]
610+
return chat.ChatCompletionChunk(
611+
id='x',
612+
choices=[choice],
613+
created=1704067200, # 2024-01-01
614+
model='gpt-4o',
615+
object='chat.completion.chunk',
616+
usage=CompletionUsage(completion_tokens=1, prompt_tokens=2, total_tokens=3),
617+
)
618+
619+
620+
async def test_none_delta(allow_model_requests: None):
621+
stream = [
622+
none_delta_chunk(),
623+
text_chunk('hello '),
624+
text_chunk('world'),
625+
]
626+
mock_client = MockOpenAI.create_mock_stream(stream)
627+
m = OpenAIChatModel('gpt-4o', provider=OpenAIProvider(openai_client=mock_client))
628+
agent = Agent(m)
629+
630+
async with agent.run_stream('') as result:
631+
assert not result.is_complete
632+
assert [c async for c in result.stream_text(debounce_by=None)] == snapshot(['hello ', 'hello world'])
633+
assert result.is_complete
634+
assert result.usage() == snapshot(RunUsage(requests=1, input_tokens=6, output_tokens=3))
635+
636+
606637
@pytest.mark.filterwarnings('ignore:Set the `system_prompt_role` in the `OpenAIModelProfile` instead.')
607638
@pytest.mark.parametrize('system_prompt_role', ['system', 'developer', 'user', None])
608639
async def test_system_prompt_role(

0 commit comments

Comments
 (0)