44
55from huggingface_hub import constants
66from huggingface_hub .inference ._common import RequestParameters , TaskProviderHelper , _as_dict
7- from huggingface_hub .utils import build_hf_headers , logging
7+ from huggingface_hub .utils import build_hf_headers , get_token , logging
88
99
1010logger = logging .get_logger (__name__ )
2929 "meta-llama/Llama-3.2-90B-Vision-Instruct" : "meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo" ,
3030 "meta-llama/Llama-3.3-70B-Instruct" : "meta-llama/Llama-3.3-70B-Instruct-Turbo" ,
3131 "meta-llama/Meta-Llama-3-70B-Instruct" : "meta-llama/Llama-3-70b-chat-hf" ,
32- "meta-llama/Meta-Llama-3-8B-Instruct" : "togethercomputer/ Llama-3-8b-chat-hf-int4 " ,
32+ "meta-llama/Meta-Llama-3-8B-Instruct" : "meta-llama/Meta- Llama-3-8B-Instruct-Turbo " ,
3333 "meta-llama/Meta-Llama-3.1-405B-Instruct" : "meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo" ,
3434 "meta-llama/Meta-Llama-3.1-70B-Instruct" : "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo" ,
3535 "meta-llama/Meta-Llama-3.1-8B-Instruct" : "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo" ,
@@ -86,7 +86,11 @@ def prepare_request(
8686 extra_payload : Optional [Dict [str , Any ]] = None ,
8787 ) -> RequestParameters :
8888 if api_key is None :
89- raise ValueError ("You must provide an api_key to work with Together API." )
89+ api_key = get_token ()
90+ if api_key is None :
91+ raise ValueError (
92+ "You must provide an api_key to work with Together API or log in with `huggingface-cli login`."
93+ )
9094 headers = {** build_hf_headers (token = api_key ), ** headers }
9195
9296 # Route to the proxy if the api_key is a HF TOKEN
@@ -97,6 +101,8 @@ def prepare_request(
97101 base_url = BASE_URL
98102 logger .info ("Calling Together provider directly." )
99103 mapped_model = self ._map_model (model )
104+ if "model" in parameters :
105+ parameters ["model" ] = mapped_model
100106 payload = self ._prepare_payload (inputs , parameters = parameters )
101107
102108 return RequestParameters (
0 commit comments