@@ -398,7 +398,7 @@ def wrapped_fn(*args, **kwargs):
398
398
# BUG: runs and produces numerical differences
399
399
skip ('nn.functional.max_unpool1d' ), # fails everywhere except on mac
400
400
skip ('nn.functional.max_unpool2d' ), # fails everywhere except on windows
401
- xfail ('nn.functional.max_unpool3d' ),
401
+ skip ('nn.functional.max_unpool3d' ), # fails everywhere except on mac
402
402
}))
403
403
@opsToleranceOverride ('TestOperators' , 'test_jvp' , (
404
404
tol1 ('nn.functional.conv_transpose3d' ,
@@ -709,9 +709,9 @@ def test_vmapvjp(self, device, dtype, op):
709
709
xfail ('double' ), # required rank 4 tensor to use channels_last format
710
710
711
711
# BUG: runs and produces numerical differences
712
- xfail ('nn.functional.max_unpool1d' , device_type = 'cpu' ),
713
- xfail ('nn.functional.max_unpool2d' ),
714
- xfail ('nn.functional.max_unpool3d' ),
712
+ skip ('nn.functional.max_unpool1d' , device_type = 'cpu' ), # fails everywhere except on mac
713
+ skip ('nn.functional.max_unpool2d' ), # fails everywhere except on mac
714
+ skip ('nn.functional.max_unpool3d' ), # fails everywhere except on mac
715
715
716
716
xfail ('put' ), # calls put_ during vmap with only vmaps over other, not self
717
717
})
@@ -781,9 +781,8 @@ def test_vmapjvp(self, device, dtype, op):
781
781
xfail ('double' ), # required rank 4 tensor to use channels_last format
782
782
783
783
skip ('nn.functional.max_unpool1d' ), # Flaky, seems to sometimes his max_unpool2d
784
- # BUG: runs and produces numerical differences
785
- xfail ('nn.functional.max_unpool2d' ),
786
- xfail ('nn.functional.max_unpool3d' ),
784
+ skip ('nn.functional.max_unpool2d' ), # fails everywhere except on mac
785
+ skip ('nn.functional.max_unpool3d' ), # fails everywhere except on mac
787
786
788
787
xfail ('put' ), # calls put_ during vmap with only vmaps over other, not self
789
788
xfail ('nn.functional.prelu' ), # Call Tensor.as_strided
@@ -1051,6 +1050,7 @@ def test():
1051
1050
xfail ('pca_lowrank' , '' ),
1052
1051
xfail ('nn.functional.feature_alpha_dropout' , 'without_train' ),
1053
1052
xfail ('nn.functional.feature_alpha_dropout' , 'with_train' ),
1053
+ xfail ('clamp' ),
1054
1054
# something weird happening with channels_last
1055
1055
xfail ('bfloat16' ),
1056
1056
xfail ('double' ),
0 commit comments