Skip to content
This repository was archived by the owner on Aug 21, 2025. It is now read-only.

Commit 00ce7dc

Browse files
committed
Fix CI
1 parent 39f0906 commit 00ce7dc

File tree

1 file changed

+19
-3
lines changed

1 file changed

+19
-3
lines changed

test/test_ops.py

Lines changed: 19 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -462,12 +462,18 @@ def vjp_of_vjp(*args_and_cotangents):
462462
xfail('_masked.prod'), # calls aten::item
463463
xfail('stft'),
464464
xfail('nn.functional.glu'),
465-
xfail('nn.functional.conv_transpose1d', device_type='cuda'),
466-
skip('nn.functional.conv_transpose2d', device_type='cuda'),
467-
xfail('nn.functional.conv_transpose3d', device_type='cuda'),
465+
468466
xfail('nn.functional.fractional_max_pool3d'),
469467
xfail('as_strided'),
470468
xfail('nn.functional.fractional_max_pool2d'),
469+
470+
# PyTorch changed its convolution recently.
471+
# Maybe it is responsible for all of the following changes.
472+
xfail('nn.functional.conv1d'),
473+
xfail('nn.functional.conv_transpose1d'),
474+
xfail('nn.functional.conv_transpose2d'),
475+
xfail('nn.functional.conv_transpose3d'),
476+
471477
})
472478
@ops(functorch_lagging_op_db + additional_op_db, allowed_dtypes=(torch.float,))
473479
@skipOps('TestOperators', 'test_vmapvjp', vmapvjp_fail)
@@ -542,6 +548,16 @@ def test_vmapvjp(self, device, dtype, op):
542548
# See https://github.com/pytorch/pytorch/issues/66357
543549
xfail('nn.functional.pad', 'circular'),
544550
551+
# RuntimeError: expand: the number of sizes provided (1) must be greater or equal to the number of dimensions in the tensor (2)
552+
xfail('nanquantile'),
553+
xfail('quantile'),
554+
555+
# RuntimeError: vmap: inplace arithmetic(self, *extra_args)
556+
xfail('nn.functional.gelu'),
557+
558+
# Not implemented
559+
xfail('scatter'),
560+
545561
# =============================================
546562
# NB: The above failures also fail in PyTorch core.
547563
# The failures below only fail in functorch

0 commit comments

Comments
 (0)