Skip to content

Commit 8744b95

Browse files
committed
cr changes
1 parent aa27676 commit 8744b95

File tree

1 file changed

+4
-2
lines changed

1 file changed

+4
-2
lines changed

python/test/unit/language/test_matmul.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1221,8 +1221,10 @@ def test_mxfp8_mxfp4_matmul(M, N, K, BLOCK_M, BLOCK_N, BLOCK_K, NUM_STAGES, B_TR
12211221
pytest.skip("None scale has not been tested on XPU backend")
12221222
if not (A_DATA_TYPE == "float8e5" and B_DATA_TYPE == "float4"):
12231223
pytest.skip(f"(A: {A_DATA_TYPE}, B: {B_DATA_TYPE}) has not been tested on XPU backend")
1224-
if (BLOCK_M, BLOCK_N, BLOCK_K) == (128, 256,
1225-
256) and CONST_SCALE and "GPU Max 1100" in torch.xpu.get_device_name():
1224+
if (BLOCK_M, BLOCK_N,
1225+
BLOCK_K) == (128, 256,
1226+
256) and CONST_SCALE and triton.runtime.driver.active.utils.get_device_properties(
1227+
triton.runtime.driver.active.get_current_device())["max_shared_mem"] < 196608:
12261228
pytest.skip("XPU Max 1100 does not fit in memory large block size for CONST_SCALE mxfp matmul")
12271229
if not PACK_B_ALONG_K and B_DATA_TYPE != "float4":
12281230
pytest.xfail("Pack along K can only be False for float4")

0 commit comments

Comments
 (0)