Skip to content

Commit f9e0ce7

Browse files
committed
drop _tied_weights_keys
Signed-off-by: Mayank Mishra <mayank31398@gmail.com>
1 parent f61bc7b commit f9e0ce7

File tree

2 files changed

+1
-4
lines changed

2 files changed

+1
-4
lines changed

lm_engine/hf_models/modeling_utils/position_embedding/rope.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,6 @@
22
# Copyright (c) 2025, Mayank Mishra
33
# **************************************************
44

5-
"""Logic is copied from transformers.models.llama.modeling_utils with slight modifications"""
6-
75
from __future__ import annotations
86

97
import math

lm_engine/utils/safetensors.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111
from huggingface_hub import split_torch_state_dict_into_shards
1212
from safetensors import safe_open
1313
from safetensors.torch import save_file
14-
from transformers.modeling_utils import SAFE_WEIGHTS_INDEX_NAME
1514

1615

1716
_DEBUG = False
@@ -117,5 +116,5 @@ def save_state_dict(state_dict: dict, save_path: str) -> None:
117116
"weight_map": state_dict_split.tensor_to_filename,
118117
}
119118

120-
with open(os.path.join(save_path, SAFE_WEIGHTS_INDEX_NAME), "w") as f:
119+
with open(os.path.join(save_path, "model.safetensors.index.json"), "w") as f:
121120
f.write(json.dumps(index, indent=2))

0 commit comments

Comments
 (0)