2323async def test_upload_from_pathlike_async (caplog : LogCap ) -> None :
2424 caplog .set_level (logging .DEBUG )
2525 async with AsyncClient () as client :
26- session = client ._files
26+ session = client .files
2727 file = await session .prepare_file (IMAGE_FILEPATH )
2828 assert file
2929 assert isinstance (file , FileHandle )
3030 logging .info (f"Uploaded file: { file } " )
31+ image = await session .prepare_image (IMAGE_FILEPATH )
32+ assert image
33+ assert isinstance (image , FileHandle )
34+ logging .info (f"Uploaded image: { image } " )
35+ # Even with the same data uploaded, assigned identifiers should differ
36+ assert image != file
3137
3238
3339@pytest .mark .asyncio
3440@pytest .mark .lmstudio
3541async def test_upload_from_file_obj_async (caplog : LogCap ) -> None :
3642 caplog .set_level (logging .DEBUG )
3743 async with AsyncClient () as client :
38- session = client ._files
44+ session = client .files
3945 with open (IMAGE_FILEPATH , "rb" ) as f :
4046 file = await session .prepare_file (f )
4147 assert file
4248 assert isinstance (file , FileHandle )
4349 logging .info (f"Uploaded file: { file } " )
50+ with open (IMAGE_FILEPATH , "rb" ) as f :
51+ image = await session .prepare_image (f )
52+ assert image
53+ assert isinstance (image , FileHandle )
54+ logging .info (f"Uploaded image: { image } " )
55+ # Even with the same data uploaded, assigned identifiers should differ
56+ assert image != file
4457
4558
4659@pytest .mark .asyncio
4760@pytest .mark .lmstudio
4861async def test_upload_from_bytesio_async (caplog : LogCap ) -> None :
4962 caplog .set_level (logging .DEBUG )
5063 async with AsyncClient () as client :
51- session = client ._files
52- with open (IMAGE_FILEPATH , "rb" ) as f :
53- file = await session .prepare_file (BytesIO (f .read ()))
64+ session = client .files
65+ file = await session .prepare_file (BytesIO (IMAGE_FILEPATH .read_bytes ()))
5466 assert file
5567 assert isinstance (file , FileHandle )
5668 logging .info (f"Uploaded file: { file } " )
69+ image = await session .prepare_image (BytesIO (IMAGE_FILEPATH .read_bytes ()))
70+ assert image
71+ assert isinstance (image , FileHandle )
72+ logging .info (f"Uploaded image: { image } " )
73+ # Even with the same data uploaded, assigned identifiers should differ
74+ assert image != file
5775
5876
5977@pytest .mark .asyncio
@@ -64,9 +82,9 @@ async def test_vlm_predict_async(caplog: LogCap) -> None:
6482 caplog .set_level (logging .DEBUG )
6583 model_id = EXPECTED_VLM_ID
6684 async with AsyncClient () as client :
67- file_handle = await client ._files . prepare_file (IMAGE_FILEPATH )
85+ image_handle = await client .files . prepare_image (IMAGE_FILEPATH )
6886 history = Chat ()
69- history .add_user_message ((prompt , file_handle ))
87+ history .add_user_message ((prompt , image_handle ))
7088 vlm = await client .llm .model (model_id )
7189 response = await vlm .respond (history , config = SHORT_PREDICTION_CONFIG )
7290 logging .info (f"VLM response: { response !r} " )
@@ -84,9 +102,9 @@ async def test_non_vlm_predict_async(caplog: LogCap) -> None:
84102 caplog .set_level (logging .DEBUG )
85103 model_id = "hugging-quants/llama-3.2-1b-instruct"
86104 async with AsyncClient () as client :
87- file_handle = await client ._files . prepare_file (IMAGE_FILEPATH )
105+ image_handle = await client .files . prepare_image (IMAGE_FILEPATH )
88106 history = Chat ()
89- history .add_user_message ((prompt , file_handle ))
107+ history .add_user_message ((prompt , image_handle ))
90108 llm = await client .llm .model (model_id )
91109 with pytest .raises (LMStudioServerError ) as exc_info :
92110 await llm .respond (history )
@@ -101,9 +119,9 @@ async def test_vlm_predict_image_param_async(caplog: LogCap) -> None:
101119 caplog .set_level (logging .DEBUG )
102120 model_id = EXPECTED_VLM_ID
103121 async with AsyncClient () as client :
104- file_handle = await client ._files . prepare_file (IMAGE_FILEPATH )
122+ image_handle = await client .files . prepare_image (IMAGE_FILEPATH )
105123 history = Chat ()
106- history .add_user_message (prompt , images = [file_handle ])
124+ history .add_user_message (prompt , images = [image_handle ])
107125 vlm = await client .llm .model (model_id )
108126 response = await vlm .respond (history , config = SHORT_PREDICTION_CONFIG )
109127 logging .info (f"VLM response: { response !r} " )
@@ -121,9 +139,9 @@ async def test_non_vlm_predict_image_param_async(caplog: LogCap) -> None:
121139 caplog .set_level (logging .DEBUG )
122140 model_id = "hugging-quants/llama-3.2-1b-instruct"
123141 async with AsyncClient () as client :
124- file_handle = await client ._files . prepare_file (IMAGE_FILEPATH )
142+ image_handle = await client .files . prepare_image (IMAGE_FILEPATH )
125143 history = Chat ()
126- history .add_user_message (prompt , images = [file_handle ])
144+ history .add_user_message (prompt , images = [image_handle ])
127145 llm = await client .llm .model (model_id )
128146 with pytest .raises (LMStudioServerError ) as exc_info :
129147 await llm .respond (history )
0 commit comments