Skip to content

Commit 0180c3e

Browse files
committed
lint still complains about the nemo_vfm file.
Signed-off-by: Sajad Norouzi <snorouzi@nvidia.com>
1 parent 72970ac commit 0180c3e

File tree

1 file changed

+0
-8
lines changed

1 file changed

+0
-8
lines changed

nemo_vfm/diffusion/models/dit/dit_layer_spec.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,6 @@
2727
SelfAttention,
2828
SelfAttentionSubmodules,
2929
)
30-
from megatron.core.transformer.cuda_graphs import CudaGraphManager
3130
from megatron.core.transformer.custom_layers.transformer_engine import (
3231
TEColumnParallelLinear,
3332
TEDotProductAttention,
@@ -43,11 +42,6 @@
4342
from megatron.core.transformer.transformer_config import TransformerConfig
4443
from megatron.core.transformer.transformer_layer import TransformerLayer, TransformerLayerSubmodules
4544
from megatron.core.utils import make_viewless_tensor
46-
from nemo.collections.diffusion.models.dit.dit_attention import (
47-
FluxSingleAttention,
48-
JointSelfAttention,
49-
JointSelfAttentionSubmodules,
50-
)
5145

5246

5347
# pylint: disable=C0116
@@ -518,7 +512,6 @@ def forward(
518512
return hidden_states, context
519513

520514

521-
522515
def get_dit_adaln_block_with_transformer_engine_spec() -> ModuleSpec:
523516
params = {"attn_mask_type": AttnMaskType.padding}
524517
return ModuleSpec(
@@ -556,4 +549,3 @@ def get_dit_adaln_block_with_transformer_engine_spec() -> ModuleSpec:
556549
),
557550
),
558551
)
559-

0 commit comments

Comments
 (0)