Skip to content

Commit 36b6dd2

Browse files
Unskip more UTs (#3103)
Before: ``` language: passed: 11933, failed: 0, skipped: 104, xfailed: 453, total: 12490, fixme: 0, pass rate (w/o xfailed): 99.14% tools: passed: 8, failed: 0, skipped: 1, xfailed: 0, total: 9, fixme: 0, pass rate (w/o xfailed): 88.89% all: passed: 18605, failed: 0, skipped: 126, xfailed: 1215, total: 19946, fixme: 48, pass rate (w/o xfailed): 99.33% ``` After: ``` language: passed: 11934, failed: 0, skipped: 103, xfailed: 453, total: 12490, fixme: 0, pass rate (w/o xfailed): 99.14% tools: passed: 8, failed: 0, skipped: 0, xfailed: 0, total: 8, fixme: 0, pass rate (w/o xfailed): 100.0% all: passed: 18606, failed: 0, skipped: 124, xfailed: 1215, total: 19945, fixme: 48, pass rate (w/o xfailed): 99.34% ``` Signed-off-by: Whitney Tsang <[email protected]>
1 parent 50da997 commit 36b6dd2

File tree

4 files changed

+7
-7
lines changed

4 files changed

+7
-7
lines changed

python/test/unit/language/test_core.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3779,9 +3779,8 @@ def test_dot3d(B, num_warps, M, N, K, BLOCK_M, BLOCK_N, in_dtype_str, out_dtype_
37793779
pytest.skip("small dots are supported only on HIP at the moment")
37803780

37813781
if B == 8 and M == 64 and in_dtype_str == "float32" and out_dtype_str == "float32":
3782-
if not is_interpreter() and torch.cuda.is_available(
3783-
) and triton.runtime.driver.active.utils.get_device_properties(
3784-
torch.cuda.current_device())["max_shared_mem"] < 131072:
3782+
if not is_interpreter() and triton.runtime.driver.active.utils.get_device_properties(
3783+
triton.runtime.driver.active.get_current_device())["max_shared_mem"] < 131072:
37853784
pytest.skip(
37863785
"Skipping tests with B = 8, M = 64, in_type = float32, out_type = float32 due to insufficient shared memory (less than 128 KB per SM) on this GPU."
37873786
)

python/test/unit/language/test_pipeliner.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import triton.language as tl
77
import triton.tools.experimental_descriptor
88

9-
from triton._internal_testing import is_cuda, is_hopper, is_hip_cdna, is_hip_mi200
9+
from triton._internal_testing import is_cuda, is_hopper, is_hip_cdna, is_hip_mi200, is_xpu
1010

1111

1212
def check_capabilities():
@@ -214,8 +214,8 @@ def __exit__(self, exc_type, exc_val, exc_tb):
214214
@pytest.mark.parametrize("scale", [True, False])
215215
def test_pipeline_matmul(scale, device):
216216
check_capabilities()
217-
if scale and not (is_cuda() or is_hip_cdna()):
218-
pytest.skip("NYI: scale_dot just implemented in CUDA/HIP")
217+
if scale and not (is_cuda() or is_hip_cdna() or is_xpu()):
218+
pytest.skip("NYI: scale_dot just implemented in CUDA/HIP/XPU")
219219
M, N, K = 512, 512, 128
220220
BLOCK_M, BLOCK_N, BLOCK_K = 64, 64, 32
221221
NUM_STAGES = 4

scripts/skiplist/lts/language.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1585,3 +1585,4 @@ test/unit/language/test_core.py::test_dot[1-128-128-64-4-True-True-none-tf32-flo
15851585
test/unit/language/test_core.py::test_dot[1-128-128-64-4-True-True-none-tf32-float16-float16-1_1]
15861586
test/unit/language/test_core.py::test_dot[1-128-128-64-4-True-False-none-tf32-float16-float16-1_0]
15871587
test/unit/language/test_core.py::test_dot[1-128-128-64-4-True-False-none-tf32-float16-float16-1_1]
1588+
test/unit/language/test_pipeliner.py::test_pipeline_matmul[True]

scripts/test-triton.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -202,7 +202,7 @@ run_core_tests() {
202202
pytest -k "not test_line_info_interpreter" --verbose --device xpu language/test_line_info.py
203203

204204
TRITON_DISABLE_LINE_INFO=1 TRITON_TEST_SUITE=tools \
205-
pytest --verbose tools/
205+
pytest -k "not test_disam_cubin" --verbose tools
206206

207207
cd $TRITON_PROJ/third_party/intel/python/test
208208
TRITON_DISABLE_LINE_INFO=1 TRITON_TEST_SUITE=third_party \

0 commit comments

Comments
 (0)