Skip to content

Commit e428146

Browse files
author
samdow
committed
fix ci
1 parent 0c03323 commit e428146

File tree

1 file changed

+3
-0
lines changed

1 file changed

+3
-0
lines changed

test/test_ops.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -874,6 +874,8 @@ def test_vmapjvpall(self, device, dtype, op):
874874
xfail('lu_unpack'),
875875
xfail('nn.functional.glu'),
876876
xfail('nn.functional.bilinear'), # trilinear doesn't have batching rule
877+
xfail('linalg.eigh'), # _linalg_eigh doesn't have batching rule
878+
xfail('linalg.eigvalsh'), # _linalg_eigh doesn't have batching rule
877879
}))
878880
@toleranceOverride({torch.float32: tol(atol=1e-04, rtol=1e-04)})
879881
def test_vmapjvpall_has_batch_rule(self, device, dtype, op):
@@ -998,6 +1000,7 @@ def test():
9981000
xfail('nn.functional.l1_loss', ''),
9991001
xfail('nn.functional.max_unpool2d', 'grad'),
10001002
xfail('qr'),
1003+
xfail('linalg.eigvalsh'), # _linalg_eigh doesn't have batching rule
10011004
}))
10021005
def test_vmapvjp_has_batch_rule(self, device, dtype, op):
10031006
if not op.supports_autograd:

0 commit comments

Comments
 (0)