Skip to content

Commit 7c5c0b5

Browse files
authored
Remove warnings in favor of skiptests for Moe code (#2654)
* Remove warnings in favor of skiptests for Moe code * update * lint
1 parent ffaf572 commit 7c5c0b5

File tree

5 files changed

+21
-33
lines changed

5 files changed

+21
-33
lines changed

test/float8/test_base.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88
import random
99
import re
1010
import unittest
11-
import warnings
1211

1312
import pytest
1413
import torch
@@ -381,6 +380,9 @@ def test_linear_from_config_params(
381380
"linear_dtype", [torch.bfloat16, torch.float16, torch.float32]
382381
)
383382
@unittest.skipIf(not torch.cuda.is_available(), "CUDA not available")
383+
@unittest.skipIf(
384+
torch.cuda.is_available() and not is_sm_at_least_90(), "CUDA capability < 9.0"
385+
)
384386
@skip_if_rocm("ROCm enablement in progress")
385387
def test_linear_from_recipe(
386388
self,
@@ -389,12 +391,6 @@ def test_linear_from_recipe(
389391
linear_dtype: torch.dtype,
390392
linear_bias: bool,
391393
):
392-
if torch.cuda.get_device_capability() < (9, 0):
393-
warnings.warn(
394-
f"CUDA capability {torch.cuda.get_device_capability()} < (9.0)"
395-
)
396-
pytest.skip()
397-
398394
x = torch.randn(*x_shape, device="cuda", dtype=linear_dtype)
399395
m_ref = nn.Linear(16, 32, bias=linear_bias, device="cuda", dtype=linear_dtype)
400396
config = Float8LinearConfig.from_recipe_name(recipe_name)

test/prototype/moe_training/test_fsdp.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -38,10 +38,9 @@
3838
from torchtitan.experiments.llama4.model.args import TransformerModelArgs
3939
from torchtitan.experiments.llama4.model.moe import MoE
4040
except ImportError:
41-
import warnings
42-
43-
warnings.warn("torchtitan not installed, skipping MoE tests.")
44-
pytest.skip(allow_module_level=True)
41+
pytest.skip(
42+
"torchtitan not installed, skipping MoE tests.", allow_module_level=True
43+
)
4544

4645

4746
def test_moe_float8_training_fsdp():

test/prototype/moe_training/test_fsdp_tp.py

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -30,12 +30,10 @@
3030
parallelize_module,
3131
)
3232
except ImportError:
33-
import warnings
34-
35-
warnings.warn(
36-
"torch version is too old, these tests require nightly build. Skipping MoE training tests."
33+
pytest.skip(
34+
"torch version is too old, these tests require nightly build. Skipping MoE training tests.",
35+
allow_module_level=True,
3736
)
38-
pytest.skip(allow_module_level=True)
3937

4038
# this feature requires CUDA and SM89+
4139
if not torch.cuda.is_available() or torch.cuda.get_device_capability() < (8, 9):
@@ -60,10 +58,9 @@
6058
from torchtitan.experiments.llama4.model.args import TransformerModelArgs
6159
from torchtitan.experiments.llama4.model.moe import MoE
6260
except ImportError:
63-
import warnings
64-
65-
warnings.warn("torchtitan not installed, skipping MoE tests.")
66-
pytest.skip(allow_module_level=True)
61+
pytest.skip(
62+
"torchtitan not installed, skipping MoE tests.", allow_module_level=True
63+
)
6764

6865

6966
@pytest.mark.parametrize(

test/prototype/moe_training/test_tp.py

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -29,12 +29,10 @@
2929
parallelize_module,
3030
)
3131
except ImportError:
32-
import warnings
33-
34-
warnings.warn(
35-
"torch version is too old, these tests require nightly build. Skipping MoE training tests."
32+
pytest.skip(
33+
"torch version is too old, these tests require nightly build. Skipping MoE training tests.",
34+
allow_module_level=True,
3635
)
37-
pytest.skip(allow_module_level=True)
3836

3937

4038
# this feature requires CUDA and SM89+
@@ -60,10 +58,9 @@
6058
from torchtitan.experiments.llama4.model.args import TransformerModelArgs
6159
from torchtitan.experiments.llama4.model.moe import MoE
6260
except ImportError:
63-
import warnings
64-
65-
warnings.warn("torchtitan not installed, skipping MoE tests.")
66-
pytest.skip(allow_module_level=True)
61+
pytest.skip(
62+
"torchtitan not installed, skipping MoE tests.", allow_module_level=True
63+
)
6764

6865

6966
@pytest.mark.parametrize(

test/prototype/moe_training/test_training.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,10 +22,9 @@
2222
from torchtitan.experiments.llama4.model.args import TransformerModelArgs
2323
from torchtitan.experiments.llama4.model.moe import MoE
2424
except ImportError:
25-
import warnings
26-
27-
warnings.warn("torchtitan not installed, skipping MoE tests.")
28-
pytest.skip(allow_module_level=True)
25+
pytest.skip(
26+
"torchtitan not installed, skipping MoE tests.", allow_module_level=True
27+
)
2928

3029

3130
@pytest.mark.parametrize(

0 commit comments

Comments
 (0)