Skip to content

Commit e1ab79f

Browse files
committed
Fix type checking errors for CachePoint
- Fix TypedDict mutation in anthropic.py using cast() - Handle CachePoint in otel message conversion (skip for telemetry) - Add CachePoint handling in all model providers for compatibility - Models without caching support (Bedrock, Gemini, Google, HuggingFace, OpenAI) now filter out CachePoint markers All pyright type checks now pass.
1 parent 41107f1 commit e1ab79f

File tree

6 files changed

+23
-3
lines changed

6 files changed

+23
-3
lines changed

pydantic_ai_slim/pydantic_ai/messages.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -742,6 +742,9 @@ def otel_message_parts(self, settings: InstrumentationSettings) -> list[_otel_me
742742
if settings.include_content and settings.include_binary_content:
743743
converted_part['content'] = base64.b64encode(part.data).decode()
744744
parts.append(converted_part)
745+
elif isinstance(part, CachePoint):
746+
# CachePoint is a marker, not actual content - skip it for otel
747+
pass
745748
else:
746749
parts.append({'type': part.kind}) # pragma: no cover
747750
return parts

pydantic_ai_slim/pydantic_ai/models/anthropic.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -654,11 +654,12 @@ def _add_cache_control_to_last_param(params: list[BetaContentBlockParam]) -> Non
654654

655655
# Only certain types support cache_control
656656
cacheable_types = {'text', 'tool_use', 'server_tool_use', 'image', 'tool_result'}
657-
if params[-1]['type'] not in cacheable_types:
658-
raise UserError(f'Cache control not supported for param type: {params[-1]["type"]}')
657+
last_param = cast(dict[str, Any], params[-1]) # Cast to dict for mutation
658+
if last_param['type'] not in cacheable_types:
659+
raise UserError(f'Cache control not supported for param type: {last_param["type"]}')
659660

660661
# Add cache_control to the last param
661-
params[-1]['cache_control'] = BetaCacheControlEphemeralParam(type='ephemeral')
662+
last_param['cache_control'] = BetaCacheControlEphemeralParam(type='ephemeral')
662663

663664
@staticmethod
664665
async def _map_user_prompt(

pydantic_ai_slim/pydantic_ai/models/bedrock.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
BinaryContent,
1919
BuiltinToolCallPart,
2020
BuiltinToolReturnPart,
21+
CachePoint,
2122
DocumentUrl,
2223
FinishReason,
2324
ImageUrl,
@@ -624,6 +625,9 @@ async def _map_user_prompt(part: UserPromptPart, document_count: Iterator[int])
624625
content.append({'video': video})
625626
elif isinstance(item, AudioUrl): # pragma: no cover
626627
raise NotImplementedError('Audio is not supported yet.')
628+
elif isinstance(item, CachePoint):
629+
# Bedrock doesn't support prompt caching via CachePoint in this implementation
630+
pass
627631
else:
628632
assert_never(item)
629633
return [{'role': 'user', 'content': content}]

pydantic_ai_slim/pydantic_ai/models/gemini.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
BinaryContent,
2222
BuiltinToolCallPart,
2323
BuiltinToolReturnPart,
24+
CachePoint,
2425
FilePart,
2526
FileUrl,
2627
ModelMessage,
@@ -391,6 +392,9 @@ async def _map_user_prompt(self, part: UserPromptPart) -> list[_GeminiPartUnion]
391392
else: # pragma: lax no cover
392393
file_data = _GeminiFileDataPart(file_data={'file_uri': item.url, 'mime_type': item.media_type})
393394
content.append(file_data)
395+
elif isinstance(item, CachePoint):
396+
# Gemini doesn't support prompt caching via CachePoint
397+
pass
394398
else:
395399
assert_never(item) # pragma: lax no cover
396400
return content

pydantic_ai_slim/pydantic_ai/models/google.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
BinaryContent,
2020
BuiltinToolCallPart,
2121
BuiltinToolReturnPart,
22+
CachePoint,
2223
FilePart,
2324
FileUrl,
2425
FinishReason,
@@ -602,6 +603,9 @@ async def _map_user_prompt(self, part: UserPromptPart) -> list[PartDict]:
602603
else:
603604
file_data_dict: FileDataDict = {'file_uri': item.url, 'mime_type': item.media_type}
604605
content.append({'file_data': file_data_dict}) # pragma: lax no cover
606+
elif isinstance(item, CachePoint):
607+
# Google Gemini doesn't support prompt caching via CachePoint
608+
pass
605609
else:
606610
assert_never(item)
607611
return content

pydantic_ai_slim/pydantic_ai/models/huggingface.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
BinaryContent,
1919
BuiltinToolCallPart,
2020
BuiltinToolReturnPart,
21+
CachePoint,
2122
DocumentUrl,
2223
FilePart,
2324
FinishReason,
@@ -447,6 +448,9 @@ async def _map_user_prompt(part: UserPromptPart) -> ChatCompletionInputMessage:
447448
raise NotImplementedError('DocumentUrl is not supported for Hugging Face')
448449
elif isinstance(item, VideoUrl):
449450
raise NotImplementedError('VideoUrl is not supported for Hugging Face')
451+
elif isinstance(item, CachePoint):
452+
# Hugging Face doesn't support prompt caching via CachePoint
453+
pass
450454
else:
451455
assert_never(item)
452456
return ChatCompletionInputMessage(role='user', content=content) # type: ignore

0 commit comments

Comments
 (0)