@@ -24,7 +24,7 @@ async def test_upload_from_pathlike_async(caplog: LogCap) -> None:
2424 caplog .set_level (logging .DEBUG )
2525 async with AsyncClient () as client :
2626 session = client ._files
27- file = await session ._add_temp_file (IMAGE_FILEPATH )
27+ file = await session .prepare_file (IMAGE_FILEPATH )
2828 assert file
2929 assert isinstance (file , FileHandle )
3030 logging .info (f"Uploaded file: { file } " )
@@ -37,7 +37,7 @@ async def test_upload_from_file_obj_async(caplog: LogCap) -> None:
3737 async with AsyncClient () as client :
3838 session = client ._files
3939 with open (IMAGE_FILEPATH , "rb" ) as f :
40- file = await session ._add_temp_file (f )
40+ file = await session .prepare_file (f )
4141 assert file
4242 assert isinstance (file , FileHandle )
4343 logging .info (f"Uploaded file: { file } " )
@@ -50,7 +50,7 @@ async def test_upload_from_bytesio_async(caplog: LogCap) -> None:
5050 async with AsyncClient () as client :
5151 session = client ._files
5252 with open (IMAGE_FILEPATH , "rb" ) as f :
53- file = await session ._add_temp_file (BytesIO (f .read ()))
53+ file = await session .prepare_file (BytesIO (f .read ()))
5454 assert file
5555 assert isinstance (file , FileHandle )
5656 logging .info (f"Uploaded file: { file } " )
@@ -64,7 +64,7 @@ async def test_vlm_predict_async(caplog: LogCap) -> None:
6464 caplog .set_level (logging .DEBUG )
6565 model_id = EXPECTED_VLM_ID
6666 async with AsyncClient () as client :
67- file_handle = await client ._files ._add_temp_file (IMAGE_FILEPATH )
67+ file_handle = await client ._files .prepare_file (IMAGE_FILEPATH )
6868 history = Chat ()
6969 history .add_user_message ((prompt , file_handle ))
7070 vlm = await client .llm .model (model_id )
@@ -84,7 +84,7 @@ async def test_non_vlm_predict_async(caplog: LogCap) -> None:
8484 caplog .set_level (logging .DEBUG )
8585 model_id = "hugging-quants/llama-3.2-1b-instruct"
8686 async with AsyncClient () as client :
87- file_handle = await client ._files ._add_temp_file (IMAGE_FILEPATH )
87+ file_handle = await client ._files .prepare_file (IMAGE_FILEPATH )
8888 history = Chat ()
8989 history .add_user_message ((prompt , file_handle ))
9090 llm = await client .llm .model (model_id )
@@ -101,7 +101,7 @@ async def test_vlm_predict_image_param_async(caplog: LogCap) -> None:
101101 caplog .set_level (logging .DEBUG )
102102 model_id = EXPECTED_VLM_ID
103103 async with AsyncClient () as client :
104- file_handle = await client ._files ._add_temp_file (IMAGE_FILEPATH )
104+ file_handle = await client ._files .prepare_file (IMAGE_FILEPATH )
105105 history = Chat ()
106106 history .add_user_message (prompt , images = [file_handle ])
107107 vlm = await client .llm .model (model_id )
@@ -121,7 +121,7 @@ async def test_non_vlm_predict_image_param_async(caplog: LogCap) -> None:
121121 caplog .set_level (logging .DEBUG )
122122 model_id = "hugging-quants/llama-3.2-1b-instruct"
123123 async with AsyncClient () as client :
124- file_handle = await client ._files ._add_temp_file (IMAGE_FILEPATH )
124+ file_handle = await client ._files .prepare_file (IMAGE_FILEPATH )
125125 history = Chat ()
126126 history .add_user_message (prompt , images = [file_handle ])
127127 llm = await client .llm .model (model_id )
0 commit comments