Skip to content

Commit f922be5

Browse files
Update tests/_test_utils/deploy_utils.py
Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com> Signed-off-by: noeyy-mino <[email protected]>
1 parent 4c8261a commit f922be5

File tree

1 file changed

+3
-1
lines changed

1 file changed

+3
-1
lines changed

tests/_test_utils/deploy_utils.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,9 @@ def __init__(
5858

5959
def run(self):
6060
"""Run the deployment based on the specified backend."""
61+
if not torch.cuda.is_available() or torch.cuda.device_count() == 0:
62+
pytest.skip("CUDA is not available")
63+
return
6164
if torch.cuda.get_device_capability() < (
6265
self.mini_sm // 10,
6366
self.mini_sm % 10,
@@ -68,7 +71,6 @@ def run(self):
6871
if torch.cuda.device_count() < self.tensor_parallel_size:
6972
pytest.skip(reason=f"Requires at least {self.tensor_parallel_size} GPUs")
7073
return
71-
7274
if self.backend == "vllm":
7375
self._deploy_vllm()
7476
elif self.backend == "trtllm":

0 commit comments

Comments
 (0)