Skip to content

Commit a8d2e32

Browse files
authored
[Bugfix][CI] Fix config resolving logic with remote models (#27610)
1 parent 53a56e6 commit a8d2e32

File tree

1 file changed

+8
-3
lines changed

1 file changed

+8
-3
lines changed

vllm/transformers_utils/config.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -622,9 +622,14 @@ def get_config(
622622
# Architecture mapping for models without explicit architectures field
623623
if not config.architectures:
624624
if config.model_type not in MODEL_MAPPING_NAMES:
625-
raise ValueError(f"Cannot find architecture name for {config.model_type}")
626-
model_type = MODEL_MAPPING_NAMES[config.model_type]
627-
config.update({"architectures": [model_type]})
625+
logger.warning(
626+
"Model config does not have a top-level 'architectures' field: "
627+
"expecting `hf_overrides={'architectures': ['...']}` to be passed "
628+
"in engine args."
629+
)
630+
else:
631+
model_type = MODEL_MAPPING_NAMES[config.model_type]
632+
config.update({"architectures": [model_type]})
628633

629634
# ModelOpt 0.31.0 and after saves the quantization config in the model
630635
# config file.

0 commit comments

Comments
 (0)