Skip to content
This repository was archived by the owner on Aug 21, 2025. It is now read-only.

Commit b55c95c

Browse files
committed
Fix CI hopefully
1 parent 19fa4a1 commit b55c95c

File tree

2 files changed

+5
-3
lines changed

2 files changed

+5
-3
lines changed

test/test_ops.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -179,6 +179,8 @@ def is_inplace(op, variant):
179179

180180

181181
vjp_fail = {
182+
xfail('nn.functional.dropout'), # randomness testing artifact
183+
xfail('nn.functional.rrelu'), # randomness testing artifact
182184
xfail('linalg.cholesky'),
183185
xfail('linalg.inv'),
184186
xfail('linalg.matrix_power'),
@@ -234,6 +236,9 @@ def wrapped_fn(*args, **kwargs):
234236

235237
@ops(functorch_lagging_op_db + additional_op_db, allowed_dtypes=(torch.float,))
236238
@skipOps('TestOperators', 'test_jvp', set({
239+
xfail('nn.functional.dropout'), # randomness testing artifact; not actually a problem
240+
xfail('nn.functional.rrelu'), # randomness testing artifact; not actually a problem
241+
237242
# See https://github.com/pytorch/pytorch/issues/69034
238243
# RuntimeError: expected scalar type double but found float
239244
xfail('minimum'),

test/test_vmap.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2873,7 +2873,6 @@ def test_div(self, device):
28732873
self._test_arithmetic(lambda x, y: x / y, device)
28742874

28752875
@allowVmapFallbackUsage
2876-
@unittest.expectedFailure
28772876
def test_binary_cross_entropy(self, device):
28782877
x = F.sigmoid(torch.randn(3, 2, device=device, requires_grad=True))
28792878
target = torch.rand(3, 2, device=device)
@@ -3163,7 +3162,6 @@ def test_vmap_exhaustive(self, device, dtype, op):
31633162
xfail('cdist'),
31643163
xfail('complex'),
31653164
xfail('copysign'),
3166-
xfail('diag_embed'),
31673165
xfail('dsplit'),
31683166
xfail('eig'),
31693167
xfail('fft.fftn'),
@@ -3223,7 +3221,6 @@ def test_vmap_exhaustive(self, device, dtype, op):
32233221
xfail('linalg.multi_dot'),
32243222
xfail('nanmean'),
32253223
xfail('vstack'),
3226-
xfail('block_diag'),
32273224
xfail('nn.functional.dropout'),
32283225
xfail('nn.functional.conv2d', ''),
32293226
xfail('nn.functional.batch_norm'),

0 commit comments

Comments
 (0)