We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 19fde78 commit 6e6c0c6Copy full SHA for 6e6c0c6
test/test_transformers.py
@@ -132,7 +132,7 @@ def _check_equal(
132
if golden.is_cuda and golden.dtype == torch.float32:
133
assert torch.backends.cuda.math_sdp.fp32_precision == "ieee", (
134
"Testing script error: FP32 golden tensor must be calculated with IEEE"
135
- " precision"
+ " precision. Add @math_sdp_precision('ieee') to related tests to fix it."
136
)
137
138
# Compute error between golden
0 commit comments