Skip to content

Commit b66934e

Browse files
Wauplinhanouticelina
authored andcommitted
Fix few things (inference providers) (#2793)
1 parent 8264eb8 commit b66934e

File tree

5 files changed

+28
-10
lines changed

5 files changed

+28
-10
lines changed

src/huggingface_hub/inference/_providers/fal_ai.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
from huggingface_hub import constants
66
from huggingface_hub.inference._common import RequestParameters, TaskProviderHelper, _as_dict
7-
from huggingface_hub.utils import build_hf_headers, get_session, logging
7+
from huggingface_hub.utils import build_hf_headers, get_session, get_token, logging
88

99

1010
logger = logging.get_logger(__name__)
@@ -52,7 +52,11 @@ def prepare_request(
5252
extra_payload: Optional[Dict[str, Any]] = None,
5353
) -> RequestParameters:
5454
if api_key is None:
55-
raise ValueError("You must provide an api_key to work with fal.ai API.")
55+
api_key = get_token()
56+
if api_key is None:
57+
raise ValueError(
58+
"You must provide an api_key to work with fal.ai API or log in with `huggingface-cli login`."
59+
)
5660

5761
mapped_model = self._map_model(model)
5862
headers = {

src/huggingface_hub/inference/_providers/replicate.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from huggingface_hub import constants
44
from huggingface_hub.inference._common import RequestParameters, TaskProviderHelper, _as_dict
5-
from huggingface_hub.utils import build_hf_headers, get_session, logging
5+
from huggingface_hub.utils import build_hf_headers, get_session, get_token, logging
66

77

88
logger = logging.get_logger(__name__)
@@ -52,7 +52,11 @@ def prepare_request(
5252
extra_payload: Optional[Dict[str, Any]] = None,
5353
) -> RequestParameters:
5454
if api_key is None:
55-
raise ValueError("You must provide an api_key to work with Replicate API.")
55+
api_key = get_token()
56+
if api_key is None:
57+
raise ValueError(
58+
"You must provide an api_key to work with Replicate API or log in with `huggingface-cli login`."
59+
)
5660

5761
# Route to the proxy if the api_key is a HF TOKEN
5862
if api_key.startswith("hf_"):

src/huggingface_hub/inference/_providers/sambanova.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from huggingface_hub import constants
44
from huggingface_hub.inference._common import RequestParameters, TaskProviderHelper
5-
from huggingface_hub.utils import build_hf_headers, logging
5+
from huggingface_hub.utils import build_hf_headers, get_token, logging
66

77

88
logger = logging.get_logger(__name__)
@@ -44,7 +44,11 @@ def prepare_request(
4444
extra_payload: Optional[Dict[str, Any]] = None,
4545
) -> RequestParameters:
4646
if api_key is None:
47-
raise ValueError("You must provide an api_key to work with Sambanova API.")
47+
api_key = get_token()
48+
if api_key is None:
49+
raise ValueError(
50+
"You must provide an api_key to work with Sambanova API or log in with `huggingface-cli login`."
51+
)
4852

4953
# Route to the proxy if the api_key is a HF TOKEN
5054
if api_key.startswith("hf_"):

src/huggingface_hub/inference/_providers/together.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
from huggingface_hub import constants
66
from huggingface_hub.inference._common import RequestParameters, TaskProviderHelper, _as_dict
7-
from huggingface_hub.utils import build_hf_headers, logging
7+
from huggingface_hub.utils import build_hf_headers, get_token, logging
88

99

1010
logger = logging.get_logger(__name__)
@@ -29,7 +29,7 @@
2929
"meta-llama/Llama-3.2-90B-Vision-Instruct": "meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo",
3030
"meta-llama/Llama-3.3-70B-Instruct": "meta-llama/Llama-3.3-70B-Instruct-Turbo",
3131
"meta-llama/Meta-Llama-3-70B-Instruct": "meta-llama/Llama-3-70b-chat-hf",
32-
"meta-llama/Meta-Llama-3-8B-Instruct": "togethercomputer/Llama-3-8b-chat-hf-int4",
32+
"meta-llama/Meta-Llama-3-8B-Instruct": "meta-llama/Meta-Llama-3-8B-Instruct-Turbo",
3333
"meta-llama/Meta-Llama-3.1-405B-Instruct": "meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo",
3434
"meta-llama/Meta-Llama-3.1-70B-Instruct": "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
3535
"meta-llama/Meta-Llama-3.1-8B-Instruct": "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo",
@@ -86,7 +86,11 @@ def prepare_request(
8686
extra_payload: Optional[Dict[str, Any]] = None,
8787
) -> RequestParameters:
8888
if api_key is None:
89-
raise ValueError("You must provide an api_key to work with Together API.")
89+
api_key = get_token()
90+
if api_key is None:
91+
raise ValueError(
92+
"You must provide an api_key to work with Together API or log in with `huggingface-cli login`."
93+
)
9094
headers = {**build_hf_headers(token=api_key), **headers}
9195

9296
# Route to the proxy if the api_key is a HF TOKEN
@@ -97,6 +101,8 @@ def prepare_request(
97101
base_url = BASE_URL
98102
logger.info("Calling Together provider directly.")
99103
mapped_model = self._map_model(model)
104+
if "model" in parameters:
105+
parameters["model"] = mapped_model
100106
payload = self._prepare_payload(inputs, parameters=parameters)
101107

102108
return RequestParameters(

src/huggingface_hub/utils/_http.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -513,7 +513,7 @@ def _format(error_type: Type[HfHubHTTPError], custom_message: str, response: Res
513513
server_errors.append(response.text)
514514

515515
# Strip all server messages
516-
server_errors = [line.strip() for line in server_errors if line.strip()]
516+
server_errors = [str(line).strip() for line in server_errors if str(line).strip()]
517517

518518
# Deduplicate server messages (keep order)
519519
# taken from https://stackoverflow.com/a/17016257

0 commit comments

Comments
 (0)