File tree Expand file tree Collapse file tree 1 file changed +2
-2
lines changed
nemo_automodel/recipes/llm Expand file tree Collapse file tree 1 file changed +2
-2
lines changed Original file line number Diff line number Diff line change 4242
4343import torch
4444import wandb
45+ from huggingface_hub import constants as hf_constants
4546from torchao .float8 import precompute_float8_dynamic_scale_for_fsdp
46- from transformers .utils import TRANSFORMERS_CACHE
4747
4848from nemo_automodel ._transformers .auto_tokenizer import NeMoAutoTokenizer
4949from nemo_automodel .components .config ._arg_parser import parse_args_and_load_config
@@ -113,7 +113,7 @@ def _build_teacher_model(
113113 teacher_checkpointer = Checkpointer (
114114 CheckpointingConfig (
115115 model_repo_id = cfg_teacher .get ("pretrained_model_name_or_path" ),
116- model_cache_dir = cfg_teacher .get ("cache_dir" , TRANSFORMERS_CACHE ),
116+ model_cache_dir = cfg_teacher .get ("cache_dir" , hf_constants . HF_HUB_CACHE ),
117117 # Dummy values
118118 is_peft = False ,
119119 enabled = False ,
You can’t perform that action at this time.
0 commit comments