File tree Expand file tree Collapse file tree 1 file changed +7
-5
lines changed
charts/all/vllm-inference-service/templates Expand file tree Collapse file tree 1 file changed +7
-5
lines changed Original file line number Diff line number Diff line change @@ -35,13 +35,15 @@ spec:
3535 python - <<'PY'
3636 from huggingface_hub import snapshot_download, login
3737 import os
38- token = os.environ.get("HF_TOKEN", "").strip()
38+ raw_token = os.environ.get("HF_TOKEN", "")
39+ token = raw_token.strip()
3940 model = os.environ.get("MODEL_ID")
40- if token:
41- print("HF_TOKEN found, logging in to Hugging Face... ")
42- login(token=token )
41+ if not token or not token.startswith("hf_") :
42+ print("[HF] HF_TOKEN empty or invalid format; skipping login ")
43+ os.environ.pop("HF_TOKEN", None )
4344 else:
44- print("No HF_TOKEN found, downloading model without authentication...")
45+ print("[HF] HF_TOKEN present; attempting login")
46+ login(token=token)
4547 snapshot_download(
4648 repo_id=model,
4749 local_dir="/cache/models"
You can’t perform that action at this time.
0 commit comments