Skip to content

Commit 2ee3442

Browse files
authored
Fixed debug tests
1 parent 7b55639 commit 2ee3442

File tree

1 file changed

+18
-5
lines changed

1 file changed

+18
-5
lines changed

tests/pytorch/debug/test_sanity.py

Lines changed: 18 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,13 @@
3030
stats: [min, max, mean, std, l1_norm, l2_norm, cur_amax, dynamic_range]
3131
start_step : 0
3232
end_step: 1
33+
""",
34+
"log_fp8": """log_fp8:
35+
layers:
36+
layer_types: [linear]
37+
enabled:
38+
True
39+
transformer_engine:
3340
LogFp8TensorStats:
3441
enabled: True
3542
tensors: [activation, gradient, weight]
@@ -46,22 +53,26 @@
4653
FakeQuant:
4754
enabled: True
4855
gemms: [fprop, dgrad, wgrad]
56+
tensors: [activation, weight, gradient]
4957
quant_format: FP8E5M2
5058
""",
5159
}
5260

61+
# Configs that require FP8 to be enabled
62+
fp8_required_configs = {"log_fp8"}
63+
5364

5465
def _get_model(model_key):
5566
if model_key == "linear":
56-
return te.Linear(D, D)
67+
return te.Linear(D, D, name="layer")
5768
if model_key == "layernorm_linear":
58-
return te.LayerNormLinear(D, D)
69+
return te.LayerNormLinear(D, D, name="layer")
5970
if model_key == "layernorm_mlp":
60-
return te.LayerNormMLP(D, D, D)
71+
return te.LayerNormMLP(D, D, D, name="layer")
6172
if model_key == "mha_attention":
62-
return te.MultiheadAttention(D, H)
73+
return te.MultiheadAttention(D, H, name="layer")
6374
if model_key == "transformer_layer":
64-
return te.TransformerLayer(D, D, H)
75+
return te.TransformerLayer(D, D, H, name="layer")
6576

6677

6778
def _run_forward_backward(model, fp8):
@@ -95,4 +106,6 @@ def _run_test(model_key, fp8, config, feature_dirs, config_file, log_dir):
95106
def test_sanity_debug(model_key, fp8, config_key, feature_dirs):
96107
if fp8 and not fp8_available:
97108
pytest.skip(reason_for_no_fp8)
109+
if not fp8 and config_key in fp8_required_configs:
110+
pytest.skip(f"Config '{config_key}' requires FP8")
98111
_run_test(model_key, fp8, configs[config_key], feature_dirs)

0 commit comments

Comments
 (0)