Skip to content

Commit 21045b5

Browse files
committed
drop use_kernels_from_apex
1 parent 4a33f29 commit 21045b5

File tree

5 files changed

+0
-12
lines changed

5 files changed

+0
-12
lines changed

megatron/arguments.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -641,8 +641,6 @@ def _add_training_args(parser):
641641
'training if SIGTERM is received')
642642
group.add_argument('--tensorboard-dir', type=str, default=None,
643643
help='Write TensorBoard logs to this directory.')
644-
group.add_argument("--use-kernels-from-apex", action="store_true",
645-
help="use Apex kernels instead of Megatron")
646644
group.add_argument('--no-masked-softmax-fusion',
647645
action='store_false',
648646
help='Disable fusion of query_key_value scaling, '

megatron/fused_kernels/__init__.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,6 @@
22

33

44
def load(args):
5-
if args.use_kernels_from_apex:
6-
return
7-
85
if torch.version.hip is None:
96
print("running on CUDA devices")
107
from megatron.fused_kernels.cuda import load as load_kernels

megatron/fused_kernels/tests/test_fused_kernels.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -283,7 +283,6 @@ class DummyArgs:
283283
rank: int = 0
284284
masked_softmax_fusion: bool = True
285285
gradient_accumulation_fusion: bool = True
286-
use_kernels_from_apex: bool = False
287286

288287

289288
if __name__ == "__main__":

tools/checkpoint_loader_megatron.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55

66
import torch
77

8-
98
def add_arguments(parser):
109
group = parser.add_argument_group(title='Megatron loader')
1110

@@ -16,8 +15,6 @@ def add_arguments(parser):
1615
'trim padding from the embedding table.')
1716
group.add_argument('--megatron-path', type=str, default=None,
1817
help='Base directory of deepspeed repository')
19-
group.add_argument("--use-kernels-from-apex", action="store_true",
20-
help="use Apex kernels instead of Megatron")
2118

2219
def _load_checkpoint(queue, args):
2320

tools/checkpoint_saver_megatron.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@
66

77
import torch
88

9-
109
def add_arguments(parser):
1110
group = parser.add_argument_group(title='Megatron saver')
1211

@@ -19,8 +18,6 @@ def add_arguments(parser):
1918
group.add_argument('--target-pipeline-parallel-size', type=int,
2019
help='Target tensor model parallel size, default to the pipeline parall size '
2120
'in the input checkpoint if provided by the loader, otherwise to 1')
22-
group.add_argument("--use-kernels-from-apex", action="store_true",
23-
help="use Apex kernels instead of Megatron")
2421

2522
def save_checkpoint(queue, args):
2623

0 commit comments

Comments
 (0)