From 2f9e18c5fb255cbd1f554c070bcf6d852ab9b848 Mon Sep 17 00:00:00 2001 From: Dmitry Nikolaev Date: Tue, 15 Jul 2025 15:20:53 +0000 Subject: [PATCH] Skip 3D NCHW FP16 batchnorm test due to Native accuracy issue --- test/test_nn.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/test/test_nn.py b/test/test_nn.py index 0353d20a54ece..cc0f0aae41c40 100644 --- a/test/test_nn.py +++ b/test/test_nn.py @@ -5179,6 +5179,8 @@ def test_batchnorm_nhwc_cuda(self): name_fn=lambda f, b, m, t: f"{f}_vs_{b}{'_mixed' if m else ''}_{dtype_name(t)}" ) def test_batchnorm(self, dims, mode, memory_format, ref_backend, mixed, dtype): + if self._testMethodName == "test_batchnorm_3D_train_NCHW_vs_native_mixed_float16": + self.skipTest("3D float16 NCHW train failed on CUDA and ROCm due to Native batchnorm accuracy issue SWDEV-541024") if torch.version.hip: if self._testMethodName in ("test_batchnorm_2D_train_NHWC_vs_NCHW_mixed_bfloat16", "test_batchnorm_2D_train_NCHW_vs_cpu_mixed_bfloat16", @@ -5194,10 +5196,6 @@ def test_batchnorm(self, dims, mode, memory_format, ref_backend, mixed, dtype): ) and _get_torch_rocm_version() >= (6, 4): self.skipTest("bfloat16 NCHW train failed due to native tolerance issue SWDEV-507600") - if self._testMethodName == "test_batchnorm_3D_train_NCHW_vs_native_mixed_float16" \ - and _get_torch_rocm_version() < (6, 4): - self.skipTest("3D float16 NCHW train failed on ROCm<=6.3 ") - if dims == 3 and memory_format in ("NHWC", "NCHW"): memory_format = memory_format + "3D"