Skip to content

Commit e7fc700

Browse files
bingchen-mijeejeeleeIsotr0py
authored
[Model] Add MiDashengLM model support (vllm-project#23652)
Signed-off-by: chenbing8 <[email protected]> Signed-off-by: bingchen-mi <[email protected]> Co-authored-by: Jee Jee Li <[email protected]> Co-authored-by: Isotr0py <[email protected]>
1 parent 12e1e63 commit e7fc700

File tree

9 files changed

+928
-0
lines changed

9 files changed

+928
-0
lines changed

docs/models/supported_models.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -643,6 +643,7 @@ These models primarily accept the [`LLM.generate`](./generative_models.md#llmgen
643643
| `LlavaNextForConditionalGeneration` | LLaVA-NeXT | T + I<sup>E+</sup> | `llava-hf/llava-v1.6-mistral-7b-hf`, `llava-hf/llava-v1.6-vicuna-7b-hf`, etc. | | ✅︎ | ✅︎ |
644644
| `LlavaNextVideoForConditionalGeneration` | LLaVA-NeXT-Video | T + V | `llava-hf/LLaVA-NeXT-Video-7B-hf`, etc. | | ✅︎ | ✅︎ |
645645
| `LlavaOnevisionForConditionalGeneration` | LLaVA-Onevision | T + I<sup>+</sup> + V<sup>+</sup> | `llava-hf/llava-onevision-qwen2-7b-ov-hf`, `llava-hf/llava-onevision-qwen2-0.5b-ov-hf`, etc. | | ✅︎ | ✅︎ |
646+
| `MiDashengLMModel` | MiDashengLM | T + A<sup>+</sup> | `mispeech/midashenglm-7b` | | ✅︎ | ✅︎ |
646647
| `MiniCPMO` | MiniCPM-O | T + I<sup>E+</sup> + V<sup>E+</sup> + A<sup>E+</sup> | `openbmb/MiniCPM-o-2_6`, etc. | ✅︎ | ✅︎ | ✅︎ |
647648
| `MiniCPMV` | MiniCPM-V | T + I<sup>E+</sup> + V<sup>E+</sup> | `openbmb/MiniCPM-V-2` (see note), `openbmb/MiniCPM-Llama3-V-2_5`, `openbmb/MiniCPM-V-2_6`, `openbmb/MiniCPM-V-4`, `openbmb/MiniCPM-V-4_5`, etc. | ✅︎ | | ✅︎ |
648649
| `MiniMaxVL01ForConditionalGeneration` | MiniMax-VL | T + I<sup>E+</sup> | `MiniMaxAI/MiniMax-VL-01`, etc. | | ✅︎ | ✅︎ |

examples/offline_inference/audio_language.py

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -146,6 +146,36 @@ def run_granite_speech(question: str, audio_count: int) -> ModelRequestData:
146146
)
147147

148148

149+
# MiDashengLM
150+
def run_midashenglm(question: str, audio_count: int):
151+
model_name = "mispeech/midashenglm-7b"
152+
153+
engine_args = EngineArgs(
154+
model=model_name,
155+
trust_remote_code=True,
156+
max_model_len=4096,
157+
max_num_seqs=5,
158+
limit_mm_per_prompt={"audio": audio_count},
159+
)
160+
161+
audio_in_prompt = "".join(
162+
["<|audio_bos|><|AUDIO|><|audio_eos|>" for idx in range(audio_count)]
163+
)
164+
165+
default_system = "You are a helpful language and speech assistant."
166+
167+
prompt = (
168+
f"<|im_start|>system\n{default_system}<|im_end|>\n"
169+
"<|im_start|>user\n"
170+
f"{audio_in_prompt}{question}<|im_end|>\n"
171+
"<|im_start|>assistant\n"
172+
)
173+
return ModelRequestData(
174+
engine_args=engine_args,
175+
prompt=prompt,
176+
)
177+
178+
149179
# MiniCPM-O
150180
def run_minicpmo(question: str, audio_count: int) -> ModelRequestData:
151181
model_name = "openbmb/MiniCPM-o-2_6"
@@ -352,6 +382,7 @@ def run_whisper(question: str, audio_count: int) -> ModelRequestData:
352382
"voxtral": run_voxtral,
353383
"gemma3n": run_gemma3n,
354384
"granite_speech": run_granite_speech,
385+
"midashenglm": run_midashenglm,
355386
"minicpmo": run_minicpmo,
356387
"phi4_mm": run_phi4mm,
357388
"phi4_multimodal": run_phi4_multimodal,

tests/models/multimodal/processing/test_common.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -302,6 +302,7 @@ def _test_processing_correctness_one(
302302
"llava-hf/llava-onevision-qwen2-0.5b-ov-hf",
303303
"meta-llama/Llama-3.2-11B-Vision-Instruct",
304304
"TIGER-Lab/Mantis-8B-siglip-llama3",
305+
"mispeech/midashenglm-7b",
305306
"openbmb/MiniCPM-Llama3-V-2_5",
306307
"openbmb/MiniCPM-o-2_6",
307308
"openbmb/MiniCPM-V-2_6",

tests/models/registry.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -460,6 +460,8 @@ def check_available_online(
460460
max_transformers_version="4.48", # noqa: E501
461461
transformers_version_reason="HF model is not compatible.", # noqa: E501
462462
hf_overrides={"architectures": ["MantisForConditionalGeneration"]}), # noqa: E501
463+
"MiDashengLMModel": _HfExamplesInfo("mispeech/midashenglm-7b",
464+
trust_remote_code=True),
463465
"MiniCPMO": _HfExamplesInfo("openbmb/MiniCPM-o-2_6",
464466
trust_remote_code=True),
465467
"MiniCPMV": _HfExamplesInfo("openbmb/MiniCPM-Llama3-V-2_5",

0 commit comments

Comments
 (0)