We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent ac20fd2 commit a9def58Copy full SHA for a9def58
megatron/core/transformer/attention.py
@@ -57,7 +57,9 @@
57
rearrange = None
58
59
try:
60
- from flash_attn_3.flash_attn_interface import _flash_attn_forward
+ from flash_attn_3.flash_attn_interface import (
61
+ _flash_attn_forward,
62
+ )
63
from flash_attn_3.flash_attn_interface import (
64
flash_attn_with_kvcache as flash_attn3_with_kvcache,
65
)
@@ -68,7 +70,9 @@
68
70
69
71
if not HAVE_FA3:
72
- from flashattn_hopper.flash_attn_interface import _flash_attn_forward
73
+ from flashattn_hopper.flash_attn_interface import (
74
75
76
from flashattn_hopper.flash_attn_interface import (
77
78
0 commit comments