Skip to content

Commit cb5177c

Browse files
committed
transformers cache
Signed-off-by: adil-a <adil.asif2000@hotmail.com>
1 parent 1ab6a76 commit cb5177c

File tree

1 file changed

+2
-2
lines changed
  • nemo_automodel/recipes/llm

1 file changed

+2
-2
lines changed

nemo_automodel/recipes/llm/kd.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,8 @@
4242

4343
import torch
4444
import wandb
45+
from huggingface_hub import constants as hf_constants
4546
from torchao.float8 import precompute_float8_dynamic_scale_for_fsdp
46-
from transformers.utils import TRANSFORMERS_CACHE
4747

4848
from nemo_automodel._transformers.auto_tokenizer import NeMoAutoTokenizer
4949
from nemo_automodel.components.config._arg_parser import parse_args_and_load_config
@@ -113,7 +113,7 @@ def _build_teacher_model(
113113
teacher_checkpointer = Checkpointer(
114114
CheckpointingConfig(
115115
model_repo_id=cfg_teacher.get("pretrained_model_name_or_path"),
116-
model_cache_dir=cfg_teacher.get("cache_dir", TRANSFORMERS_CACHE),
116+
model_cache_dir=cfg_teacher.get("cache_dir", hf_constants.HF_HUB_CACHE),
117117
# Dummy values
118118
is_peft=False,
119119
enabled=False,

0 commit comments

Comments
 (0)