Skip to content
17 changes: 5 additions & 12 deletions pydantic_ai_slim/pydantic_ai/models/google.py
Original file line number Diff line number Diff line change
Expand Up @@ -471,11 +471,9 @@ def _process_response(self, response: GenerateContentResponse) -> ModelResponse:
raise UnexpectedModelBehavior(
f'Content filter {raw_finish_reason.value!r} triggered', response.model_dump_json()
)
else:
raise UnexpectedModelBehavior(
'Content field missing from Gemini response', response.model_dump_json()
) # pragma: no cover
parts = candidate.content.parts or []
parts = []
else:
parts = candidate.content.parts or []
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Note that we could get here if candidate.content is None

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If candidate.content is None, parts would be [] and we would continue with that, no?

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ah, you're totally right, I wasn't reading right


usage = _metadata_as_usage(response)
return _process_response_from_parts(
Expand Down Expand Up @@ -649,17 +647,12 @@ async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]:
# )

if candidate.content is None or candidate.content.parts is None:
if self.finish_reason == 'stop': # pragma: no cover
# Normal completion - skip this chunk
continue
elif self.finish_reason == 'content_filter' and raw_finish_reason: # pragma: no cover
if self.finish_reason == 'content_filter' and raw_finish_reason: # pragma: no cover
raise UnexpectedModelBehavior(
f'Content filter {raw_finish_reason.value!r} triggered', chunk.model_dump_json()
)
else: # pragma: no cover
raise UnexpectedModelBehavior(
'Content field missing from streaming Gemini response', chunk.model_dump_json()
)
continue

parts = candidate.content.parts
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Note that we could get here if candidate.content is None

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Wouldn't that call continue and thus let us proceed with the next chunk? I am not sure I can follow you right now.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You're quite right, I think I had been at my desk for too long when I reviewed this :)

if not parts:
Expand Down
9 changes: 8 additions & 1 deletion tests/models/test_google.py
Original file line number Diff line number Diff line change
Expand Up @@ -1895,6 +1895,7 @@ def dummy() -> None: ... # pragma: no cover
PartDeltaEvent(index=1, delta=TextPartDelta(content_delta=IsStr())),
PartDeltaEvent(index=1, delta=TextPartDelta(content_delta=IsStr())),
PartDeltaEvent(index=1, delta=TextPartDelta(content_delta=IsStr())),
PartDeltaEvent(index=1, delta=TextPartDelta(content_delta=IsStr())),
PartEndEvent(
index=1,
part=TextPart(
Expand Down Expand Up @@ -3081,7 +3082,13 @@ async def test_google_vertexai_image_generation(allow_model_requests: None, vert
agent = Agent(model, output_type=BinaryImage)

result = await agent.run('Generate an image of an axolotl.')
assert result.output == snapshot(BinaryImage(data=IsBytes(), media_type='image/png', identifier='b037a4'))
assert result.output == snapshot(
BinaryImage(
data=IsBytes(),
media_type='image/png',
identifier='f3edd8',
)
)


async def test_google_httpx_client_is_not_closed(allow_model_requests: None, gemini_api_key: str):
Expand Down
Loading