Skip to content

Commit e61eb5e

Browse files
authored
[Model] Remove MotifForCausalLM (vllm-project#25866)
Signed-off-by: Jee Jee Li <[email protected]>
1 parent 0899ba5 commit e61eb5e

File tree

5 files changed

+1
-355
lines changed

5 files changed

+1
-355
lines changed

docs/models/supported_models.md

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -396,7 +396,6 @@ th {
396396
| `MiniCPM3ForCausalLM` | MiniCPM3 | `openbmb/MiniCPM3-4B`, etc. | ✅︎ | ✅︎ | ✅︎ |
397397
| `MistralForCausalLM` | Mistral, Mistral-Instruct | `mistralai/Mistral-7B-v0.1`, `mistralai/Mistral-7B-Instruct-v0.1`, etc. | ✅︎ | ✅︎ | ✅︎ |
398398
| `MixtralForCausalLM` | Mixtral-8x7B, Mixtral-8x7B-Instruct | `mistralai/Mixtral-8x7B-v0.1`, `mistralai/Mixtral-8x7B-Instruct-v0.1`, `mistral-community/Mixtral-8x22B-v0.1`, etc. | ✅︎ | ✅︎ | ✅︎ |
399-
| `MotifForCausalLM` | Motif-1-Tiny | `Motif-Technologies/Motif-2.6B`, `Motif-Technologies/Motif-2.6b-v1.1-LC`, etc. | ✅︎ | ✅︎ | |
400399
| `MPTForCausalLM` | MPT, MPT-Instruct, MPT-Chat, MPT-StoryWriter | `mosaicml/mpt-7b`, `mosaicml/mpt-7b-storywriter`, `mosaicml/mpt-30b`, etc. | | ✅︎ | ✅︎ |
401400
| `NemotronForCausalLM` | Nemotron-3, Nemotron-4, Minitron | `nvidia/Minitron-8B-Base`, `mgoin/Nemotron-4-340B-Base-hf-FP8`, etc. | ✅︎ | ✅︎ | ✅︎ |
402401
| `NemotronHForCausalLM` | Nemotron-H | `nvidia/Nemotron-H-8B-Base-8K`, `nvidia/Nemotron-H-47B-Base-8K`, `nvidia/Nemotron-H-56B-Base-8K`, etc. | ✅︎ | ✅︎ | ✅︎ |
@@ -409,7 +408,6 @@ th {
409408
| `PhiForCausalLM` | Phi | `microsoft/phi-1_5`, `microsoft/phi-2`, etc. | ✅︎ | ✅︎ | ✅︎ |
410409
| `Phi3ForCausalLM` | Phi-4, Phi-3 | `microsoft/Phi-4-mini-instruct`, `microsoft/Phi-4`, `microsoft/Phi-3-mini-4k-instruct`, `microsoft/Phi-3-mini-128k-instruct`, `microsoft/Phi-3-medium-128k-instruct`, etc. | ✅︎ | ✅︎ | ✅︎ |
411410
| `PhiMoEForCausalLM` | Phi-3.5-MoE | `microsoft/Phi-3.5-MoE-instruct`, etc. | ✅︎ | ✅︎ | ✅︎ |
412-
| `Phi4FlashForCausalLM` | Phi-4-mini-flash-reasoning | `microsoft/microsoft/Phi-4-mini-instruct`, etc. | | | |
413411
| `PersimmonForCausalLM` | Persimmon | `adept/persimmon-8b-base`, `adept/persimmon-8b-chat`, etc. | | ✅︎ | ✅︎ |
414412
| `Plamo2ForCausalLM` | PLaMo2 | `pfnet/plamo-2-1b`, `pfnet/plamo-2-8b`, etc. | | ✅︎ | ✅︎ |
415413
| `QWenLMHeadModel` | Qwen | `Qwen/Qwen-7B`, `Qwen/Qwen-7B-Chat`, etc. | ✅︎ | ✅︎ | ✅︎ |

tests/models/registry.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -298,9 +298,6 @@ def check_available_online(
298298
"MistralForCausalLM": _HfExamplesInfo("mistralai/Mistral-7B-Instruct-v0.1"),
299299
"MixtralForCausalLM": _HfExamplesInfo("mistralai/Mixtral-8x7B-Instruct-v0.1", # noqa: E501
300300
{"tiny": "TitanML/tiny-mixtral"}), # noqa: E501
301-
"MotifForCausalLM": _HfExamplesInfo("Motif-Technologies/Motif-2.6B",
302-
trust_remote_code=True,
303-
v0_only=True),
304301
"MptForCausalLM": _HfExamplesInfo("mpt", is_available_online=False),
305302
"MPTForCausalLM": _HfExamplesInfo("mosaicml/mpt-7b"),
306303
"NemotronForCausalLM": _HfExamplesInfo("nvidia/Minitron-8B-Base"),

tests/models/test_initialization.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -76,10 +76,6 @@ def _initialize_kv_caches_v1(self, vllm_config):
7676
if model_info.v0_only:
7777
# NOTE(woosuk): skip the test for V0-only models
7878
return
79-
80-
if model_arch in ("Phi4FlashForCausalLM", "MotifForCausalLM"):
81-
pytest.skip(
82-
"Differential Flash Attention backend has been removed.")
8379
if model_arch == "GptOssForCausalLM":
8480
# FIXME: A hack to bypass FA3 assertion because our CI's L4 GPU
8581
# has cc==8.9 which hasn't supported FA3 yet. Remove this hack when

vllm/model_executor/models/motif.py

Lines changed: 0 additions & 345 deletions
This file was deleted.

vllm/model_executor/models/registry.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,6 @@
119119
"MiniCPM3ForCausalLM": ("minicpm3", "MiniCPM3ForCausalLM"),
120120
"MistralForCausalLM": ("llama", "LlamaForCausalLM"),
121121
"MixtralForCausalLM": ("mixtral", "MixtralForCausalLM"),
122-
"MotifForCausalLM": ("motif", "MotifForCausalLM"),
123122
# transformers's mpt class has lower case
124123
"MptForCausalLM": ("mpt", "MPTForCausalLM"),
125124
"MPTForCausalLM": ("mpt", "MPTForCausalLM"),
@@ -332,6 +331,7 @@
332331
]
333332

334333
_PREVIOUSLY_SUPPORTED_MODELS = {
334+
"MotifForCausalLM": "0.10.2",
335335
"Phi3SmallForCausalLM": "0.9.2",
336336
"Phi4FlashForCausalLM": "0.10.2",
337337
# encoder-decoder models except whisper

0 commit comments

Comments
 (0)