3535import base64
3636import logging
3737import re
38- import time
3938import warnings
4039from typing import TYPE_CHECKING , Any , Dict , Iterable , List , Literal , Optional , Union , overload
4140
@@ -301,8 +300,6 @@ def _inner_post(
301300 if request_parameters .task in TASKS_EXPECTING_IMAGES and "Accept" not in request_parameters .headers :
302301 request_parameters .headers ["Accept" ] = "image/png"
303302
304- t0 = time .time ()
305- timeout = self .timeout
306303 while True :
307304 with _open_as_binary (request_parameters .data ) as data_as_binary :
308305 try :
@@ -326,30 +323,9 @@ def _inner_post(
326323 except HTTPError as error :
327324 if error .response .status_code == 422 and request_parameters .task != "unknown" :
328325 msg = str (error .args [0 ])
329- print (error .response .text )
330326 if len (error .response .text ) > 0 :
331327 msg += f"\n { error .response .text } \n "
332- msg += f"\n Make sure '{ request_parameters .task } ' task is supported by the model."
333328 error .args = (msg ,) + error .args [1 :]
334- if error .response .status_code == 503 :
335- # If Model is unavailable, either raise a TimeoutError...
336- if timeout is not None and time .time () - t0 > timeout :
337- raise InferenceTimeoutError (
338- f"Model not loaded on the server: { request_parameters .url } . Please retry with a higher timeout (current:"
339- f" { self .timeout } )." ,
340- request = error .request ,
341- response = error .response ,
342- ) from error
343- # ...or wait 1s and retry
344- logger .info (f"Waiting for model to be loaded on the server: { error } " )
345- time .sleep (1 )
346- if "X-wait-for-model" not in request_parameters .headers and request_parameters .url .startswith (
347- INFERENCE_ENDPOINT
348- ):
349- request_parameters .headers ["X-wait-for-model" ] = "1"
350- if timeout is not None :
351- timeout = max (self .timeout - (time .time () - t0 ), 1 ) # type: ignore
352- continue
353329 raise
354330
355331 def audio_classification (
@@ -3261,6 +3237,13 @@ def zero_shot_image_classification(
32613237 response = self ._inner_post (request_parameters )
32623238 return ZeroShotImageClassificationOutputElement .parse_obj_as_list (response )
32633239
3240+ @_deprecate_method (
3241+ version = "0.33.0" ,
3242+ message = (
3243+ "HF Inference API is getting revamped and will only support warm models in the future (no cold start allowed)."
3244+ " Use `HfApi.list_models(..., inference_provider='...')` to list warm models per provider."
3245+ ),
3246+ )
32643247 def list_deployed_models (
32653248 self , frameworks : Union [None , str , Literal ["all" ], List [str ]] = None
32663249 ) -> Dict [str , List [str ]]:
@@ -3444,6 +3427,13 @@ def health_check(self, model: Optional[str] = None) -> bool:
34443427 response = get_session ().get (url , headers = build_hf_headers (token = self .token ))
34453428 return response .status_code == 200
34463429
3430+ @_deprecate_method (
3431+ version = "0.33.0" ,
3432+ message = (
3433+ "HF Inference API is getting revamped and will only support warm models in the future (no cold start allowed)."
3434+ " Use `HfApi.model_info` to get the model status both with HF Inference API and external providers."
3435+ ),
3436+ )
34473437 def get_model_status (self , model : Optional [str ] = None ) -> ModelStatus :
34483438 """
34493439 Get the status of a model hosted on the HF Inference API.
0 commit comments