@@ -7436,7 +7436,93 @@ def get_meaning_of_life() -> int:
74367436 )
74377437
74387438
7439- # Integration tests for FileSearchTool require vector store setup and cassette recording.
7439+ async def test_openai_responses_model_file_search_tool (allow_model_requests : None , openai_api_key : str ):
7440+ """Integration test for FileSearchTool with OpenAI."""
7441+ from openai import AsyncOpenAI
7442+
7443+ from pydantic_ai .builtin_tools import FileSearchTool
7444+ from pydantic_ai .providers .openai import OpenAIProvider
7445+
7446+ async_client = AsyncOpenAI (api_key = openai_api_key )
7447+
7448+ import tempfile
7449+ with tempfile .NamedTemporaryFile (mode = 'w' , suffix = '.txt' , delete = False ) as f :
7450+ f .write ('Paris is the capital of France. It is known for the Eiffel Tower.' )
7451+ test_file_path = f .name
7452+
7453+ try :
7454+ with open (test_file_path , 'rb' ) as f :
7455+ file = await async_client .files .create (file = f , purpose = 'assistants' )
7456+
7457+ vector_store = await async_client .vector_stores .create (name = 'test-file-search' )
7458+ await async_client .vector_stores .files .create (vector_store_id = vector_store .id , file_id = file .id )
7459+
7460+ import asyncio
7461+ await asyncio .sleep (2 )
7462+
7463+ model = OpenAIResponsesModel ('gpt-4o' , provider = OpenAIProvider (openai_client = async_client ))
7464+ agent = Agent (model = model , builtin_tools = [FileSearchTool (vector_store_ids = [vector_store .id ])])
7465+
7466+ result = await agent .run ('What is the capital of France according to my files?' )
7467+
7468+ assert 'Paris' in result .output or 'paris' in result .output .lower ()
7469+
7470+ finally :
7471+ import os
7472+ os .unlink (test_file_path )
7473+ if 'file' in locals ():
7474+ await async_client .files .delete (file .id )
7475+ if 'vector_store' in locals ():
7476+ await async_client .vector_stores .delete (vector_store .id )
7477+ await async_client .close ()
7478+
7479+
7480+ async def test_openai_responses_model_file_search_tool_stream (allow_model_requests : None , openai_api_key : str ):
7481+ """Integration test for FileSearchTool streaming with OpenAI."""
7482+ from openai import AsyncOpenAI
7483+
7484+ from pydantic_ai .builtin_tools import FileSearchTool
7485+ from pydantic_ai .providers .openai import OpenAIProvider
7486+
7487+ async_client = AsyncOpenAI (api_key = openai_api_key )
7488+
7489+ import tempfile
7490+ with tempfile .NamedTemporaryFile (mode = 'w' , suffix = '.txt' , delete = False ) as f :
7491+ f .write ('The Eiffel Tower is located in Paris, France.' )
7492+ test_file_path = f .name
7493+
7494+ try :
7495+ with open (test_file_path , 'rb' ) as f :
7496+ file = await async_client .files .create (file = f , purpose = 'assistants' )
7497+
7498+ vector_store = await async_client .vector_stores .create (name = 'test-file-search-stream' )
7499+ await async_client .vector_stores .files .create (vector_store_id = vector_store .id , file_id = file .id )
7500+
7501+ import asyncio
7502+ await asyncio .sleep (2 )
7503+
7504+ model = OpenAIResponsesModel ('gpt-4o' , provider = OpenAIProvider (openai_client = async_client ))
7505+ agent = Agent (model = model , builtin_tools = [FileSearchTool (vector_store_ids = [vector_store .id ])])
7506+
7507+ parts = []
7508+ async with agent .run_stream ('Where is the Eiffel Tower according to my files?' ) as result :
7509+ async for part in result .stream_responses (debounce_by = None ):
7510+ parts .append (part )
7511+ output = await result .get_output ()
7512+
7513+ assert len (parts ) > 0
7514+ assert 'Paris' in output or 'France' in output or 'paris' in output .lower ()
7515+
7516+ finally :
7517+ import os
7518+ os .unlink (test_file_path )
7519+ if 'file' in locals ():
7520+ await async_client .files .delete (file .id )
7521+ if 'vector_store' in locals ():
7522+ await async_client .vector_stores .delete (vector_store .id )
7523+ await async_client .close ()
7524+
7525+
74407526# Unit tests below validate the parsing logic.
74417527
74427528
0 commit comments