Skip to content

Commit 123db62

Browse files
fix vllm test skip (#394)
Signed-off-by: Akihiko Kuroda <akihikokuroda2020@gmail.com>
1 parent b2e5a52 commit 123db62

File tree

3 files changed

+39
-18
lines changed

3 files changed

+39
-18
lines changed

mellea/backends/vllm.py

Lines changed: 14 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -16,13 +16,20 @@
1616
from collections.abc import Callable, Sequence
1717
from typing import Any, overload
1818

19-
import msgspec
20-
import outlines
21-
import outlines_core
22-
import torch
23-
import vllm
24-
from transformers import AutoTokenizer
25-
from transformers.tokenization_utils_base import PreTrainedTokenizerBase
19+
try:
20+
import msgspec
21+
import outlines
22+
import outlines_core
23+
import torch
24+
import vllm
25+
from transformers import AutoTokenizer
26+
from transformers.tokenization_utils_base import PreTrainedTokenizerBase
27+
except ImportError as e:
28+
raise ImportError(
29+
"vLLM backend dependencies are not installed. "
30+
"Please install with: pip install mellea[vllm]\n"
31+
"Note: vLLM is not supported on macOS. Use other backends like Ollama or HuggingFace instead."
32+
) from e
2633

2734
from ..backends import ModelIdentifier
2835
from ..core import (

test/backends/test_vllm.py

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -18,12 +18,19 @@
1818
),
1919
]
2020

21-
import mellea.backends.model_ids as model_ids
22-
from mellea import MelleaSession
23-
from mellea.backends import ModelOption
24-
from mellea.backends.vllm import LocalVLLMBackend
25-
from mellea.core import CBlock
26-
from mellea.stdlib.context import ChatContext, SimpleContext
21+
# Try to import vLLM backend - skip all tests if not available
22+
try:
23+
import mellea.backends.model_ids as model_ids
24+
from mellea import MelleaSession
25+
from mellea.backends import ModelOption
26+
from mellea.backends.vllm import LocalVLLMBackend
27+
from mellea.core import CBlock
28+
from mellea.stdlib.context import ChatContext, SimpleContext
29+
except ImportError as e:
30+
pytest.skip(
31+
f"vLLM backend not available: {e}. Install with: pip install mellea[vllm]",
32+
allow_module_level=True,
33+
)
2734

2835

2936
@pytest.fixture(scope="module")

test/backends/test_vllm_tools.py

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,11 +16,18 @@
1616
),
1717
]
1818

19-
import mellea.backends.model_ids as model_ids
20-
from mellea import MelleaSession
21-
from mellea.backends import ModelOption
22-
from mellea.backends.vllm import LocalVLLMBackend
23-
from mellea.stdlib.context import ChatContext
19+
# Try to import vLLM backend - skip all tests if not available
20+
try:
21+
import mellea.backends.model_ids as model_ids
22+
from mellea import MelleaSession
23+
from mellea.backends import ModelOption
24+
from mellea.backends.vllm import LocalVLLMBackend
25+
from mellea.stdlib.context import ChatContext
26+
except ImportError as e:
27+
pytest.skip(
28+
f"vLLM backend not available: {e}. Install with: pip install mellea[vllm]",
29+
allow_module_level=True,
30+
)
2431

2532

2633
@pytest.fixture(scope="module")

0 commit comments

Comments
 (0)