Skip to content

Commit 7da1676

Browse files
authored
Skip bf16 paged attention UT on unsupported platforms. (#4711)(#4715)
* Skip bf16 paged attention tests on unsupported platforms. * Fix format * Correct condition for XeLpg
1 parent 140b137 commit 7da1676

File tree

1 file changed

+5
-0
lines changed

1 file changed

+5
-0
lines changed

tests/gpu/examples/test_paged_attention.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
import random
33
from typing import List, Optional, Tuple
44
import intel_extension_for_pytorch as ipex # noqa
5+
import pytest
56
from torch.testing._internal.common_utils import (
67
TestCase,
78
)
@@ -310,6 +311,10 @@ def test_fp16(self):
310311
for version in ["v1", "v2"]:
311312
self.paged_attention(version, torch.float16)
312313

314+
@pytest.mark.skipif(
315+
not torch.xpu.has_xmx(),
316+
reason="Paged_attention: No bf16 support for current gpu arch.",
317+
)
313318
def test_bf16(self):
314319
for version in ["v1", "v2"]:
315320
self.paged_attention(version, torch.bfloat16)

0 commit comments

Comments
 (0)