Skip to content

Commit 69674be

Browse files
Fix scaled_dot fp16 test cases (#3154)
Co-authored-by: Whitney Tsang <[email protected]>
1 parent 6c389fd commit 69674be

File tree

6 files changed

+1738
-1294
lines changed

6 files changed

+1738
-1294
lines changed

python/test/unit/language/test_core.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3613,10 +3613,6 @@ def test_scaled_dot(M, N, K, col_a, col_b, rhs_scale, mxfp_type, normal_type, nu
36133613
pytest.skip(f"scaled_dot({mxfp_type}, {normal_type}) not yet implemented for MI300")
36143614
if mma == 16 and K == 64:
36153615
pytest.skip(f"K == {K} too small for mfma {mma} in scaled_dot")
3616-
if is_xpu():
3617-
if normal_type == "fp16":
3618-
# https://github.com/intel/intel-xpu-backend-for-triton/issues/3141
3619-
pytest.skip("scaled_dot with fp16 input not supported on XPU yet")
36203616

36213617
@triton.jit
36223618
def dot_scale_kernel(a_base, stride_a0, stride_a1, a_scale, b_base, stride_b0, stride_b1, b_scale, out,
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
11
# https://github.com/intel/intel-xpu-backend-for-triton/issues/2968
22
test/unit/language/test_core.py::test_scaled_dot[32-64-128-False-False-True-e5m2-bf16-4-16-1]
33
test/unit/language/test_core.py::test_scaled_dot[64-32-128-False-False-True-e4m3-bf16-4-16-1]
4+
test/unit/language/test_core.py::test_scaled_dot[64-32-128-False-False-True-e4m3-fp16-4-16-1]

0 commit comments

Comments
 (0)