Skip to content

Commit 380e25c

Browse files
committed
clean up FileSearchTool comments
Removed unnecessary explanatory comments from the file search implementation. The code is self-explanatory and these comments were just adding noise.
1 parent 6acbd76 commit 380e25c

File tree

2 files changed

+0
-14
lines changed

2 files changed

+0
-14
lines changed

pydantic_ai_slim/pydantic_ai/models/google.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -343,11 +343,6 @@ def _get_tools(self, model_request_parameters: ModelRequestParameters) -> list[T
343343
elif isinstance(tool, CodeExecutionTool):
344344
tools.append(ToolDict(code_execution=ToolCodeExecutionDict()))
345345
elif isinstance(tool, FileSearchTool): # pragma: no cover
346-
# File Search Tool for Gemini API - tested via initialization tests
347-
# The file_search tool uses file resource names (vector_store_ids) to search through uploaded files
348-
# Note: This requires files to be uploaded via the Files API first
349-
# The structure below is based on the Gemini File Search Tool announcement (Nov 2025)
350-
# and may require adjustment when the official google-genai SDK is updated
351346
tools.append(ToolDict(file_search={'file_names': tool.vector_store_ids})) # type: ignore[reportGeneralTypeIssues]
352347
elif isinstance(tool, ImageGenerationTool): # pragma: no branch
353348
if not self.profile.supports_image_output:

pydantic_ai_slim/pydantic_ai/models/openai.py

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1071,7 +1071,6 @@ def _process_response( # noqa: C901
10711071
# Pydantic AI doesn't yet support the `codex-mini-latest` LocalShell built-in tool
10721072
pass
10731073
elif isinstance(item, responses.ResponseFileSearchToolCall): # pragma: no cover
1074-
# File Search Tool handling - requires actual OpenAI API responses with file_search_call
10751074
call_part, return_part = _map_file_search_tool_call(item, self.system)
10761075
items.append(call_part)
10771076
items.append(return_part)
@@ -1270,7 +1269,6 @@ def _get_builtin_tools(self, model_request_parameters: ModelRequestParameters) -
12701269
)
12711270
tools.append(web_search_tool)
12721271
elif isinstance(tool, FileSearchTool): # pragma: no cover
1273-
# File Search Tool configuration - tested via initialization tests
12741272
file_search_tool = responses.FileSearchToolParam(
12751273
type='file_search', vector_store_ids=tool.vector_store_ids
12761274
)
@@ -1483,12 +1481,10 @@ async def _map_messages( # noqa: C901
14831481
)
14841482
openai_messages.append(web_search_item)
14851483
elif ( # pragma: no cover
1486-
# File Search Tool - requires actual file_search responses in message history
14871484
item.tool_name == FileSearchTool.kind
14881485
and item.tool_call_id
14891486
and (args := item.args_as_dict())
14901487
):
1491-
# The cast is necessary because of incomplete OpenAI SDK types for FileSearchToolCall
14921488
file_search_item = cast(
14931489
responses.ResponseFileSearchToolCallParam,
14941490
{
@@ -1559,7 +1555,6 @@ async def _map_messages( # noqa: C901
15591555
):
15601556
web_search_item['status'] = status
15611557
elif ( # pragma: no cover
1562-
# File Search Tool status update - only called from API-dependent paths
15631558
item.tool_name == FileSearchTool.kind
15641559
and file_search_item is not None
15651560
and isinstance(item.content, dict) # pyright: ignore[reportUnknownMemberType]
@@ -1881,7 +1876,6 @@ async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]:
18811876
vendor_part_id=f'{chunk.item.id}-call', part=replace(call_part, args=None)
18821877
)
18831878
elif isinstance(chunk.item, responses.ResponseFileSearchToolCall): # pragma: no cover
1884-
# File Search Tool streaming - requires actual OpenAI streaming responses
18851879
call_part, _ = _map_file_search_tool_call(chunk.item, self.provider_name)
18861880
yield self._parts_manager.handle_part(
18871881
vendor_part_id=f'{chunk.item.id}-call', part=replace(call_part, args=None)
@@ -1963,7 +1957,6 @@ async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]:
19631957

19641958
yield self._parts_manager.handle_part(vendor_part_id=f'{chunk.item.id}-return', part=return_part)
19651959
elif isinstance(chunk.item, responses.ResponseFileSearchToolCall): # pragma: no cover
1966-
# File Search Tool streaming response handling - requires actual OpenAI streaming responses
19671960
call_part, return_part = _map_file_search_tool_call(chunk.item, self.provider_name)
19681961

19691962
maybe_event = self._parts_manager.handle_tool_call_delta(
@@ -2270,7 +2263,6 @@ def _map_web_search_tool_call(
22702263

22712264

22722265
def _map_file_search_tool_call( # pragma: no cover
2273-
# File Search Tool mapping - only called from API-dependent response processing paths
22742266
item: responses.ResponseFileSearchToolCall,
22752267
provider_name: str,
22762268
) -> tuple[BuiltinToolCallPart, BuiltinToolReturnPart]:
@@ -2280,7 +2272,6 @@ def _map_file_search_tool_call( # pragma: no cover
22802272
'status': item.status,
22812273
}
22822274

2283-
# The OpenAI SDK has incomplete types for FileSearchToolCall.action
22842275
if action := item.action: # type: ignore[reportAttributeAccessIssue]
22852276
args = action.model_dump(mode='json') # type: ignore[reportUnknownMemberType]
22862277

0 commit comments

Comments
 (0)