Skip to content

Commit b4734b9

Browse files
[Bugfix] Fix default MM LoRA alignment for single str prompts (#29140)
Signed-off-by: Alex-Brooks <[email protected]>
1 parent 30b9c67 commit b4734b9

File tree

2 files changed

+36
-1
lines changed

2 files changed

+36
-1
lines changed

tests/lora/test_default_mm_loras.py

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,9 @@
55
"""
66

77
import os
8+
import unittest.mock as mock
89

10+
import pytest
911
from huggingface_hub import snapshot_download
1012

1113
from vllm.lora.request import LoRARequest
@@ -114,3 +116,36 @@ def test_default_mm_lora_fails_with_overridden_lora_request(
114116
default_mm_loras={"audio": IMAGE_LORA_PATH},
115117
expected_suffix=RESPONSE_SUFFIX_WITH_LORA,
116118
)
119+
120+
121+
def test_default_mm_lora_does_not_expand_string_reqs(vllm_runner):
122+
class MockEngineException(Exception):
123+
pass
124+
125+
# Regression test for ensuring default multimodal lora resolution
126+
# does not expand the lora req if the prompt type is a string.
127+
vllm_runner_kwargs = {
128+
**VLLM_RUNNER_BASE_KWARGS,
129+
**{"default_mm_loras": {"audio": AUDIO_LORA_PATH}},
130+
}
131+
132+
# Avoid the full generation call since these tests are expensive;
133+
# just check what lora request is actually submitted to the engine
134+
mock_err = "Engine is mocked for this test"
135+
136+
with (
137+
mock.patch(
138+
"vllm.v1.engine.llm_engine.LLMEngine.add_request",
139+
side_effect=MockEngineException(mock_err),
140+
) as mock_add_request,
141+
vllm_runner(**vllm_runner_kwargs) as vllm_model,
142+
):
143+
# Die once we actually submit the request to the engine
144+
with pytest.raises(MockEngineException):
145+
vllm_model.llm.generate(prompts=AUDIO_PROMPT)
146+
147+
# Then check to make sure the submitted lora request
148+
# and text prompt were zipped together correctly
149+
engine_args, engine_kwargs = mock_add_request.call_args
150+
assert engine_kwargs["lora_request"] is None
151+
assert engine_kwargs["prompt_text"] == AUDIO_PROMPT

vllm/entrypoints/llm.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -466,7 +466,7 @@ def _get_modality_specific_lora_reqs(
466466
):
467467
return lora_request
468468

469-
if not isinstance(prompts, Sequence):
469+
if not isinstance(prompts, Sequence) or isinstance(prompts, str):
470470
prompts = [prompts]
471471

472472
optional_loras = (

0 commit comments

Comments
 (0)