Skip to content

Commit 8f25717

Browse files
committed
checkout main branch's fix for megatron+importer
Signed-off-by: Ye Yu <[email protected]>
1 parent e40fb07 commit 8f25717

File tree

1 file changed

+1
-4
lines changed

1 file changed

+1
-4
lines changed

modelopt/torch/export/plugins/megatron_importer.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -512,10 +512,7 @@ def _import_state_dict(self):
512512
self.rules["k_layernorm"](attention.k_layernorm, layer_id)
513513
self.rules["linear_qkv"](attention.linear_qkv, layer_id)
514514
self.rules["linear_proj"](attention.linear_proj, layer_id)
515-
if (
516-
hasattr(attention.core_attention, "softmax_offset")
517-
and attention.core_attention.softmax_offset is not None
518-
):
515+
if hasattr(attention.core_attention, "softmax_offset"):
519516
self.rules["softmax_offset"](
520517
attention.core_attention.softmax_offset, layer_id
521518
)

0 commit comments

Comments
 (0)