File tree Expand file tree Collapse file tree 5 files changed +74
-5
lines changed
litellm_core_utils/prompt_templates
tests/test_litellm/litellm_core_utils Expand file tree Collapse file tree 5 files changed +74
-5
lines changed Original file line number Diff line number Diff line change @@ -12,6 +12,7 @@ All exceptions can be imported from `litellm` - e.g. `from litellm import BadReq
12
12
| 400 | UnsupportedParamsError | litellm.BadRequestError | Raised when unsupported params are passed |
13
13
| 400 | ContextWindowExceededError| litellm.BadRequestError | Special error type for context window exceeded error messages - enables context window fallbacks |
14
14
| 400 | ContentPolicyViolationError| litellm.BadRequestError | Special error type for content policy violation error messages - enables content policy fallbacks |
15
+ | 400 | ImageFetchError | litellm.BadRequestError | Raised when there are errors fetching or processing images |
15
16
| 400 | InvalidRequestError | openai.BadRequestError | Deprecated error, use BadRequestError instead |
16
17
| 401 | AuthenticationError | openai.AuthenticationError |
17
18
| 403 | PermissionDeniedError | openai.PermissionDeniedError |
Original file line number Diff line number Diff line change @@ -1261,6 +1261,7 @@ def add_known_models():
1261
1261
AuthenticationError ,
1262
1262
InvalidRequestError ,
1263
1263
BadRequestError ,
1264
+ ImageFetchError ,
1264
1265
NotFoundError ,
1265
1266
RateLimitError ,
1266
1267
ServiceUnavailableError ,
Original file line number Diff line number Diff line change @@ -153,6 +153,29 @@ def __repr__(self):
153
153
_message += f", LiteLLM Max Retries: { self .max_retries } "
154
154
return _message
155
155
156
+ class ImageFetchError (BadRequestError ):
157
+ def __init__ (
158
+ self ,
159
+ message ,
160
+ model = None ,
161
+ llm_provider = None ,
162
+ response : Optional [httpx .Response ] = None ,
163
+ litellm_debug_info : Optional [str ] = None ,
164
+ max_retries : Optional [int ] = None ,
165
+ num_retries : Optional [int ] = None ,
166
+ body : Optional [dict ] = None ,
167
+ ):
168
+ super ().__init__ (
169
+ message = message ,
170
+ model = model ,
171
+ llm_provider = llm_provider ,
172
+ response = response ,
173
+ litellm_debug_info = litellm_debug_info ,
174
+ max_retries = max_retries ,
175
+ num_retries = num_retries ,
176
+ body = body ,
177
+ )
178
+
156
179
157
180
class UnprocessableEntityError (openai .UnprocessableEntityError ): # type: ignore
158
181
def __init__ (
Original file line number Diff line number Diff line change 17
17
18
18
def _process_image_response (response : Response , url : str ) -> str :
19
19
if response .status_code != 200 :
20
- raise Exception (
20
+ raise litellm . ImageFetchError (
21
21
f"Error: Unable to fetch image from URL. Status code: { response .status_code } , url={ url } "
22
22
)
23
23
@@ -57,9 +57,11 @@ async def async_convert_url_to_base64(url: str) -> str:
57
57
try :
58
58
response = await client .get (url , follow_redirects = True )
59
59
return _process_image_response (response , url )
60
+ except litellm .ImageFetchError :
61
+ raise
60
62
except Exception :
61
63
pass
62
- raise Exception (
64
+ raise litellm . ImageFetchError (
63
65
f"Error: Unable to fetch image from URL after 3 attempts. url={ url } "
64
66
)
65
67
@@ -74,10 +76,11 @@ def convert_url_to_base64(url: str) -> str:
74
76
try :
75
77
response = client .get (url , follow_redirects = True )
76
78
return _process_image_response (response , url )
79
+ except litellm .ImageFetchError :
80
+ raise
77
81
except Exception as e :
78
82
verbose_logger .exception (e )
79
- # print(e)
80
83
pass
81
- raise Exception (
82
- f"Error: Unable to fetch image from URL after 3 attempts. url={ url } "
84
+ raise litellm . ImageFetchError (
85
+ f"Error: Unable to fetch image from URL after 3 attempts. url={ url } " ,
83
86
)
Original file line number Diff line number Diff line change
1
+ import pytest
2
+ from httpx import Request , Response
3
+
4
+ import litellm
5
+ from litellm .litellm_core_utils .prompt_templates .image_handling import (
6
+ convert_url_to_base64 ,
7
+ )
8
+
9
+
10
+ class DummyClient :
11
+ def get (self , url , follow_redirects = True ):
12
+ return Response (status_code = 404 , request = Request ("GET" , url ))
13
+
14
+
15
+ def test_invalid_image_url_raises_bad_request (monkeypatch ):
16
+ monkeypatch .setattr (litellm , "module_level_client" , DummyClient ())
17
+ with pytest .raises (litellm .ImageFetchError ) as excinfo :
18
+ convert_url_to_base64 ("https://invalid.example/image.png" )
19
+ assert "Unable to fetch image" in str (excinfo .value )
20
+
21
+
22
+ def test_completion_with_invalid_image_url (monkeypatch ):
23
+ monkeypatch .setattr (litellm , "module_level_client" , DummyClient ())
24
+ messages = [
25
+ {
26
+ "role" : "user" ,
27
+ "content" : [
28
+ {"type" : "text" , "text" : "hi" },
29
+ {
30
+ "type" : "image_url" ,
31
+ "image_url" : {"url" : "https://invalid.example/image.png" },
32
+ },
33
+ ],
34
+ }
35
+ ]
36
+ with pytest .raises (litellm .ImageFetchError ) as excinfo :
37
+ litellm .completion (
38
+ model = "gemini/gemini-pro" , messages = messages , api_key = "test"
39
+ )
40
+ assert excinfo .value .status_code == 400
41
+ assert "Unable to fetch image" in str (excinfo .value )
You can’t perform that action at this time.
0 commit comments