@@ -603,6 +603,37 @@ async def test_no_delta(allow_model_requests: None):
603
603
assert result .usage () == snapshot (RunUsage (requests = 1 , input_tokens = 6 , output_tokens = 3 ))
604
604
605
605
606
+ def none_delta_chunk (finish_reason : FinishReason | None = None ) -> chat .ChatCompletionChunk :
607
+ choice = ChunkChoice (index = 0 , delta = ChoiceDelta ())
608
+ # When using Azure OpenAI and an async content filter is enabled, the openai SDK can return None deltas.
609
+ choice .delta = None # pyright: ignore[reportAttributeAccessIssue]
610
+ return chat .ChatCompletionChunk (
611
+ id = 'x' ,
612
+ choices = [choice ],
613
+ created = 1704067200 , # 2024-01-01
614
+ model = 'gpt-4o' ,
615
+ object = 'chat.completion.chunk' ,
616
+ usage = CompletionUsage (completion_tokens = 1 , prompt_tokens = 2 , total_tokens = 3 ),
617
+ )
618
+
619
+
620
+ async def test_none_delta (allow_model_requests : None ):
621
+ stream = [
622
+ none_delta_chunk (),
623
+ text_chunk ('hello ' ),
624
+ text_chunk ('world' ),
625
+ ]
626
+ mock_client = MockOpenAI .create_mock_stream (stream )
627
+ m = OpenAIChatModel ('gpt-4o' , provider = OpenAIProvider (openai_client = mock_client ))
628
+ agent = Agent (m )
629
+
630
+ async with agent .run_stream ('' ) as result :
631
+ assert not result .is_complete
632
+ assert [c async for c in result .stream_text (debounce_by = None )] == snapshot (['hello ' , 'hello world' ])
633
+ assert result .is_complete
634
+ assert result .usage () == snapshot (RunUsage (requests = 1 , input_tokens = 6 , output_tokens = 3 ))
635
+
636
+
606
637
@pytest .mark .filterwarnings ('ignore:Set the `system_prompt_role` in the `OpenAIModelProfile` instead.' )
607
638
@pytest .mark .parametrize ('system_prompt_role' , ['system' , 'developer' , 'user' , None ])
608
639
async def test_system_prompt_role (
0 commit comments