@@ -575,7 +575,6 @@ def vjp_of_vjp(*args_and_cotangents):
575
575
576
576
vmapvjp_fail = vjp_fail .union ({
577
577
# The following are not bugs and are expected behavior
578
- xfail ('fill_' ), # Not possible, wontfix
579
578
xfail ('masked_select' ), # Not possible due to dynamic shapes
580
579
skip ('bernoulli' ), # randomness
581
580
skip ('normal' , '' ), # randomness
@@ -665,7 +664,6 @@ def test_vmapvjp(self, device, dtype, op):
665
664
666
665
# Try to in-place batched tensor into non-batched tensor
667
666
xfail ('matrix_exp' ),
668
- xfail ('fill_' ),
669
667
xfail ('block_diag' ), # TODO: We expect this to fail in core, but it doesn't
670
668
671
669
# Apprently these support forward AD, but we get "Trying to use forward AD..."
@@ -763,7 +761,6 @@ def test_vmapjvp(self, device, dtype, op):
763
761
xfail ('quantile' ),
764
762
xfail ('var_mean' ),
765
763
xfail ('as_strided' ),
766
- xfail ('fill_' ),
767
764
xfail ('nn.functional.gaussian_nll_loss' ),
768
765
xfail ('std_mean' ),
769
766
xfail ('block_diag' ),
@@ -917,7 +914,6 @@ def test():
917
914
xfail ('fmin' ),
918
915
xfail ('fmax' ),
919
916
xfail ('special.log_ndtr' ),
920
- xfail ('fill_' ),
921
917
xfail ('index_copy' ),
922
918
xfail ('index_fill' ),
923
919
xfail ('linalg.cholesky' ),
@@ -1040,7 +1036,6 @@ def test():
1040
1036
# All of the following are bugs and need to be fixed
1041
1037
xfail ('__getitem__' , '' ),
1042
1038
xfail ('clamp' , '' ),
1043
- xfail ('fill_' ),
1044
1039
xfail ('index_put' , '' ),
1045
1040
xfail ('matrix_exp' ),
1046
1041
xfail ('view_as_complex' ),
0 commit comments