Skip to content

Commit bd67af5

Browse files
committed
Fix isort indentation format
1 parent afe3a85 commit bd67af5

File tree

3 files changed

+6
-13
lines changed

3 files changed

+6
-13
lines changed

native_sparse_attention/modeling_nsa.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,7 @@
1111
import torch.utils.checkpoint
1212
from einops import rearrange
1313
from transformers.generation import GenerationMixin
14-
from transformers.modeling_outputs import (BaseModelOutputWithPast,
15-
CausalLMOutputWithPast)
14+
from transformers.modeling_outputs import BaseModelOutputWithPast, CausalLMOutputWithPast
1615
from transformers.modeling_utils import PreTrainedModel
1716
from transformers.utils import logging
1817
from transformers.utils.deprecation import deprecate_kwarg

native_sparse_attention/ops/parallel.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,7 @@
99
import triton.language as tl
1010
from einops import rearrange
1111

12-
from fla.ops.common.utils import (prepare_chunk_indices, prepare_chunk_offsets,
13-
prepare_lens, prepare_token_indices)
12+
from fla.ops.common.utils import prepare_chunk_indices, prepare_chunk_offsets, prepare_lens, prepare_token_indices
1413
from fla.ops.utils import mean_pooling
1514
from fla.utils import autocast_custom_bwd, autocast_custom_fwd, contiguous
1615
from native_sparse_attention.ops.utils import _bitonic_merge

train.py

Lines changed: 4 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -15,20 +15,15 @@
1515
from torchtitan.components.checkpoint import CheckpointManager
1616
from torchtitan.components.ft import FTParallelDims, init_ft_manager
1717
from torchtitan.components.loss import cross_entropy_loss
18-
from torchtitan.components.metrics import (_build_metric_logger,
19-
build_device_memory_monitor,
20-
ensure_pp_loss_visible)
21-
from torchtitan.components.optimizer import (build_lr_schedulers,
22-
build_optimizers)
18+
from torchtitan.components.metrics import _build_metric_logger, build_device_memory_monitor, ensure_pp_loss_visible
19+
from torchtitan.components.optimizer import build_lr_schedulers, build_optimizers
2320
from torchtitan.distributed import ParallelDims
2421
from torchtitan.distributed import utils as dist_utils
2522
from torchtitan.protocols.model_converter import build_model_converters
26-
from torchtitan.protocols.train_spec import (TrainSpec, get_train_spec,
27-
register_train_spec)
23+
from torchtitan.protocols.train_spec import TrainSpec, get_train_spec, register_train_spec
2824
from torchtitan.tools import utils
2925
from torchtitan.tools.logging import init_logger, logger
30-
from torchtitan.tools.profiling import (maybe_enable_memory_snapshot,
31-
maybe_enable_profiling)
26+
from torchtitan.tools.profiling import maybe_enable_memory_snapshot, maybe_enable_profiling
3227
from transformers import AutoConfig, AutoModelForCausalLM, AutoTokenizer
3328

3429
import native_sparse_attention # noqa

0 commit comments

Comments
 (0)