diff --git a/pydantic_ai_slim/pydantic_ai/models/google.py b/pydantic_ai_slim/pydantic_ai/models/google.py index 8411c3b3cf..4978f90e8b 100644 --- a/pydantic_ai_slim/pydantic_ai/models/google.py +++ b/pydantic_ai_slim/pydantic_ai/models/google.py @@ -471,11 +471,9 @@ def _process_response(self, response: GenerateContentResponse) -> ModelResponse: raise UnexpectedModelBehavior( f'Content filter {raw_finish_reason.value!r} triggered', response.model_dump_json() ) - else: - raise UnexpectedModelBehavior( - 'Content field missing from Gemini response', response.model_dump_json() - ) # pragma: no cover - parts = candidate.content.parts or [] + parts = [] # pragma: no cover + else: + parts = candidate.content.parts or [] usage = _metadata_as_usage(response) return _process_response_from_parts( @@ -649,17 +647,12 @@ async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]: # ) if candidate.content is None or candidate.content.parts is None: - if self.finish_reason == 'stop': # pragma: no cover - # Normal completion - skip this chunk - continue - elif self.finish_reason == 'content_filter' and raw_finish_reason: # pragma: no cover + if self.finish_reason == 'content_filter' and raw_finish_reason: # pragma: no cover raise UnexpectedModelBehavior( f'Content filter {raw_finish_reason.value!r} triggered', chunk.model_dump_json() ) else: # pragma: no cover - raise UnexpectedModelBehavior( - 'Content field missing from streaming Gemini response', chunk.model_dump_json() - ) + continue parts = candidate.content.parts if not parts: