Skip to content

Commit 665147a

Browse files
skip 3D NCHW FP16 batchnorm test due to Native accuracy issue
Cherry-pick of #2370 Co-authored-by: Dmitry Nikolaev <[email protected]>
1 parent a20c72c commit 665147a

File tree

1 file changed

+2
-4
lines changed

1 file changed

+2
-4
lines changed

test/test_nn.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5180,6 +5180,8 @@ def test_batchnorm_nhwc_cuda(self):
51805180
name_fn=lambda f, b, m, t: f"{f}_vs_{b}{'_mixed' if m else ''}_{dtype_name(t)}"
51815181
)
51825182
def test_batchnorm(self, dims, mode, memory_format, ref_backend, mixed, dtype):
5183+
if self._testMethodName == "test_batchnorm_3D_train_NCHW_vs_native_mixed_float16":
5184+
self.skipTest("3D float16 NCHW train failed on CUDA and ROCm due to Native batchnorm accuracy issue SWDEV-541024")
51835185
if torch.version.hip:
51845186
if self._testMethodName in ("test_batchnorm_2D_train_NHWC_vs_NCHW_mixed_bfloat16",
51855187
"test_batchnorm_2D_train_NCHW_vs_cpu_mixed_bfloat16",
@@ -5195,10 +5197,6 @@ def test_batchnorm(self, dims, mode, memory_format, ref_backend, mixed, dtype):
51955197
) and _get_torch_rocm_version() >= (6, 4):
51965198
self.skipTest("bfloat16 NCHW train failed due to native tolerance issue SWDEV-507600")
51975199

5198-
if self._testMethodName == "test_batchnorm_3D_train_NCHW_vs_native_mixed_float16" \
5199-
and _get_torch_rocm_version() < (6, 4):
5200-
self.skipTest("3D float16 NCHW train failed on ROCm<=6.3 ")
5201-
52025200
if dims == 3 and memory_format in ("NHWC", "NCHW"):
52035201
memory_format = memory_format + "3D"
52045202

0 commit comments

Comments
 (0)