Skip to content

Commit 8e13d9f

Browse files
authored
[Misc] Further clean up some redundant config definitions (#22649)
Signed-off-by: Isotr0py <[email protected]>
1 parent 3fa5b25 commit 8e13d9f

File tree

4 files changed

+34
-85
lines changed

4 files changed

+34
-85
lines changed

vllm/transformers_utils/config.py

Lines changed: 33 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -32,11 +32,10 @@
3232
from vllm.transformers_utils.configs import (ChatGLMConfig, DeepseekVLV2Config,
3333
EAGLEConfig, JAISConfig,
3434
KimiVLConfig, MedusaConfig,
35-
MllamaConfig, MLPSpeculatorConfig,
35+
MLPSpeculatorConfig,
3636
Nemotron_Nano_VL_Config,
37-
NemotronConfig, NVLM_D_Config,
38-
OvisConfig, RWConfig,
39-
SpeculatorsConfig,
37+
NemotronConfig, OvisConfig,
38+
RWConfig, SpeculatorsConfig,
4039
Step3TextConfig, Step3VLConfig,
4140
UltravoxConfig)
4241
# yapf: enable
@@ -68,10 +67,6 @@ def _get_hf_token() -> Optional[str]:
6867
return None
6968

7069

71-
_CONFIG_REGISTRY_OVERRIDE_HF: dict[str, type[PretrainedConfig]] = {
72-
"mllama": MllamaConfig
73-
}
74-
7570
_CONFIG_REGISTRY: dict[str, type[PretrainedConfig]] = {
7671
"chatglm": ChatGLMConfig,
7772
"deepseek_vl_v2": DeepseekVLV2Config,
@@ -85,18 +80,30 @@ def _get_hf_token() -> Optional[str]:
8580
"eagle": EAGLEConfig,
8681
"speculators": SpeculatorsConfig,
8782
"nemotron": NemotronConfig,
88-
"NVLM_D": NVLM_D_Config,
8983
"ovis": OvisConfig,
9084
"ultravox": UltravoxConfig,
9185
"step3_vl": Step3VLConfig,
9286
"step3_text": Step3TextConfig,
93-
**_CONFIG_REGISTRY_OVERRIDE_HF
9487
}
9588

9689
_CONFIG_ATTRS_MAPPING: dict[str, str] = {
9790
"llm_config": "text_config",
9891
}
9992

93+
_AUTO_CONFIG_KWARGS_OVERRIDES: dict[str, dict[str, Any]] = {
94+
"internvl_chat": {
95+
"has_no_defaults_at_init": True
96+
},
97+
# transformers regards mllama as is_encoder_decoder=False
98+
# vllm needs is_encoder_decoder=True to enable cross-attention
99+
"mllama": {
100+
"is_encoder_decoder": True
101+
},
102+
"NVLM_D": {
103+
"has_no_defaults_at_init": True
104+
},
105+
}
106+
100107

101108
class ConfigFormat(str, enum.Enum):
102109
AUTO = "auto"
@@ -273,11 +280,12 @@ def thinker_uses_mrope(config: PretrainedConfig) -> bool:
273280

274281
def is_encoder_decoder(config: PretrainedConfig) -> bool:
275282
"""Detect if the model with this config is used as an encoder/decoder."""
276-
text_config = getattr(config, "text_config", None)
277-
if text_config is not None:
278-
return is_encoder_decoder(text_config)
279283

280-
return getattr(config, "is_encoder_decoder", False)
284+
def _is_encoder_decoder(config: PretrainedConfig) -> bool:
285+
return getattr(config, "is_encoder_decoder", False)
286+
287+
return (_is_encoder_decoder(config)
288+
or _is_encoder_decoder(config.get_text_config()))
281289

282290

283291
def is_interleaved(config: PretrainedConfig) -> bool:
@@ -291,13 +299,21 @@ def is_interleaved(config: PretrainedConfig) -> bool:
291299
return False
292300

293301

302+
def _maybe_update_auto_config_kwargs(kwargs: dict[str, Any], model_type: str):
303+
"""
304+
Update kwargs for AutoConfig initialization based on model_type
305+
"""
306+
if model_type in _AUTO_CONFIG_KWARGS_OVERRIDES:
307+
kwargs.update(_AUTO_CONFIG_KWARGS_OVERRIDES[model_type])
308+
return kwargs
309+
310+
294311
def _maybe_remap_hf_config_attrs(config: PretrainedConfig) -> PretrainedConfig:
295312
"""Remap config attributes to match the expected names."""
296313
for old_attr, new_attr in _CONFIG_ATTRS_MAPPING.items():
297314
if hasattr(config, old_attr):
298315
if not hasattr(config, new_attr):
299316
config.update({new_attr: getattr(config, old_attr)})
300-
delattr(config, old_attr)
301317
logger.debug("Remapped config attribute '%s' to '%s'", old_attr,
302318
new_attr)
303319
return config
@@ -408,15 +424,14 @@ def get_config(
408424
)
409425
else:
410426
try:
427+
kwargs = _maybe_update_auto_config_kwargs(
428+
kwargs, model_type=model_type)
411429
config = AutoConfig.from_pretrained(
412430
model,
413431
trust_remote_code=trust_remote_code,
414432
revision=revision,
415433
code_revision=code_revision,
416434
token=_get_hf_token(),
417-
# some old custom model's config needs
418-
# `has_no_defaults_at_init=True` to work.
419-
has_no_defaults_at_init=trust_remote_code,
420435
**kwargs,
421436
)
422437
except ValueError as e:

vllm/transformers_utils/configs/__init__.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,13 +17,11 @@
1717
from vllm.transformers_utils.configs.jais import JAISConfig
1818
from vllm.transformers_utils.configs.kimi_vl import KimiVLConfig
1919
from vllm.transformers_utils.configs.medusa import MedusaConfig
20-
from vllm.transformers_utils.configs.mllama import MllamaConfig
2120
from vllm.transformers_utils.configs.mlp_speculator import MLPSpeculatorConfig
2221
from vllm.transformers_utils.configs.moonvit import MoonViTConfig
2322
from vllm.transformers_utils.configs.nemotron import NemotronConfig
2423
from vllm.transformers_utils.configs.nemotron_h import NemotronHConfig
2524
from vllm.transformers_utils.configs.nemotron_vl import Nemotron_Nano_VL_Config
26-
from vllm.transformers_utils.configs.nvlm_d import NVLM_D_Config
2725
from vllm.transformers_utils.configs.ovis import OvisConfig
2826
from vllm.transformers_utils.configs.speculators.base import SpeculatorsConfig
2927
from vllm.transformers_utils.configs.step3_vl import (Step3TextConfig,
@@ -34,18 +32,16 @@
3432
__all__ = [
3533
"ChatGLMConfig",
3634
"DeepseekVLV2Config",
35+
"EAGLEConfig",
3736
"RWConfig",
3837
"JAISConfig",
3938
"MedusaConfig",
40-
"EAGLEConfig",
41-
"MllamaConfig",
4239
"MLPSpeculatorConfig",
4340
"MoonViTConfig",
4441
"KimiVLConfig",
4542
"NemotronConfig",
4643
"NemotronHConfig",
4744
"Nemotron_Nano_VL_Config",
48-
"NVLM_D_Config",
4945
"OvisConfig",
5046
"SpeculatorsConfig",
5147
"UltravoxConfig",

vllm/transformers_utils/configs/mllama.py

Lines changed: 0 additions & 31 deletions
This file was deleted.

vllm/transformers_utils/configs/nvlm_d.py

Lines changed: 0 additions & 31 deletions
This file was deleted.

0 commit comments

Comments
 (0)