Skip to content

Commit d2f8787

Browse files
Merge pull request BerriAI#14808 from eycjur/fix_load_credentials_in_token_counter_proxy
Fix load credentials in token counter proxy
2 parents 2e3e7de + b4512cb commit d2f8787

File tree

1 file changed

+2
-0
lines changed

1 file changed

+2
-0
lines changed

litellm/proxy/proxy_server.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@
3535
LITELLM_SETTINGS_SAFE_DB_OVERRIDES,
3636
)
3737
from litellm.litellm_core_utils.safe_json_dumps import safe_dumps
38+
from litellm.utils import load_credentials_from_list
3839
from litellm.types.utils import (
3940
ModelResponse,
4041
ModelResponseStream,
@@ -5894,6 +5895,7 @@ async def token_counter(request: TokenCountRequest, call_endpoint: bool = False)
58945895
pass
58955896
if deployment is not None:
58965897
litellm_model_name = deployment.get("litellm_params", {}).get("model")
5898+
load_credentials_from_list(deployment.get("litellm_params", {}))
58975899
# remove the custom_llm_provider_prefix in the litellm_model_name
58985900
if "/" in litellm_model_name:
58995901
litellm_model_name = litellm_model_name.split("/", 1)[1]

0 commit comments

Comments
 (0)