Skip to content

Commit 8abc0ef

Browse files
committed
drop _tied_weights_keys
Signed-off-by: Mayank Mishra <mayank31398@gmail.com>
1 parent c2bfe35 commit 8abc0ef

File tree

1 file changed

+0
-27
lines changed

1 file changed

+0
-27
lines changed

lm_engine/hf_models/config/__init__.py

Lines changed: 0 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -36,30 +36,6 @@ def _run(self, *args, **kwargs):
3636
return _holded_function
3737

3838

39-
# for erroring out on legacy configs
40-
_NAKED_DISALLOWED_ARGS = [
41-
"activation_function",
42-
"attn_pdrop",
43-
"embd_pdrop",
44-
"resid_pdrop",
45-
"intermediate_size",
46-
"shared_intermediate_size",
47-
"num_experts",
48-
"num_experts_per_tok",
49-
"add_bias",
50-
"attention_blocks",
51-
"num_key_value_heads",
52-
"attention_head_type",
53-
"attention_multiplier",
54-
"n_embd",
55-
"n_head",
56-
"n_inner",
57-
"n_layer",
58-
"n_positions",
59-
"scale_attn_weights",
60-
"num_attention_heads",
61-
]
62-
6339
_SEQUENCE_MIXER_CONFIG_CLASSES = {
6440
"causal_convolution": _CausalConvolution,
6541
"gru": _GRUArgs,
@@ -148,9 +124,6 @@ def __init__(
148124

149125
self.router_aux_loss_coef = router_aux_loss_coef
150126

151-
for i in _NAKED_DISALLOWED_ARGS:
152-
assert i not in kwargs, f"found naked argument ({i})"
153-
154127
super().__init__(
155128
bos_token_id=bos_token_id,
156129
eos_token_id=eos_token_id,

0 commit comments

Comments
 (0)