We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 4c4e1e0 commit c8b7423Copy full SHA for c8b7423
keras_hub/src/utils/transformers/convert_smollm3.py
@@ -29,8 +29,7 @@ def convert_backbone_config(transformers_config):
29
"attention_dropout": transformers_config["attention_dropout"],
30
"rope_layer_enabled_list": transformers_config["no_rope_layers"],
31
"layer_types": transformers_config["layer_types"],
32
- "mlp_bias": transformers_config["mlp_bias"],
33
- "rope_scaling": transformers_config["rope_scaling"]
+ "mlp_bias": transformers_config["mlp_bias"]
34
}
35
36
0 commit comments