Skip to content
This repository was archived by the owner on Aug 21, 2025. It is now read-only.

Commit eaa88eb

Browse files
committed
update discover coverage
1 parent 3df6dde commit eaa88eb

File tree

2 files changed

+4
-2
lines changed

2 files changed

+4
-2
lines changed

test/discover_coverage.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -628,6 +628,7 @@ class Support(enum.Enum):
628628
'nn.functional.rrelu', # not actually problem, randomness testing artifact
629629
'normal', # not actually problem, randomness testing artifact
630630
'bernoulli', # not actually problem, randomness testing artifact
631+
'torch.nn.functional.embedding', # max_norm causes testing to be weird
631632
# 'multinomial',
632633
}
633634

test/test_ops.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -755,8 +755,9 @@ def test_vmapjvp(self, device, dtype, op):
755755
# Causing issues with multiple cpu levels of forward mode AD
756756
xfail('nn.functional.batch_norm', device_type='cpu'),
757757

758-
# https://github.com/pytorch/functorch/issues/857
759-
skip('nn.functional.embedding', ''),
758+
# Not actually a problem: embedding with max_norm mutates the weight
759+
# and causes different runs to produce different results.
760+
xfail('nn.functional.embedding', ''),
760761
xfail('nn.functional.soft_margin_loss', ''),
761762
xfail('nn.functional.binary_cross_entropy_with_logits', ''),
762763
xfail('linalg.householder_product'),

0 commit comments

Comments
 (0)