Skip to content

Commit 77ed950

Browse files
committed
[Bugfix] Fix Dense module loading for sentence-transformers embedding models v12
Signed-off-by: FFFfff1FFFfff <[email protected]>
1 parent 4351f99 commit 77ed950

File tree

2 files changed

+17
-8
lines changed

2 files changed

+17
-8
lines changed

requirements/test.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -968,6 +968,7 @@ setuptools==77.0.3
968968
# via
969969
# lightning-utilities
970970
# pytablewriter
971+
# torch
971972
# triton
972973
shapely==2.1.1
973974
# via

vllm/transformers_utils/config.py

Lines changed: 16 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -280,11 +280,12 @@ def thinker_uses_mrope(config: PretrainedConfig) -> bool:
280280

281281
def is_encoder_decoder(config: PretrainedConfig) -> bool:
282282
"""Detect if the model with this config is used as an encoder/decoder."""
283-
text_config = getattr(config, "text_config", None)
284-
if text_config is not None:
285-
return is_encoder_decoder(text_config)
286283

287-
return getattr(config, "is_encoder_decoder", False)
284+
def _is_encoder_decoder(config: PretrainedConfig) -> bool:
285+
return getattr(config, "is_encoder_decoder", False)
286+
287+
return (_is_encoder_decoder(config)
288+
or _is_encoder_decoder(config.get_text_config()))
288289

289290

290291
def is_interleaved(config: PretrainedConfig) -> bool:
@@ -298,13 +299,21 @@ def is_interleaved(config: PretrainedConfig) -> bool:
298299
return False
299300

300301

302+
def _maybe_update_auto_config_kwargs(kwargs: dict[str, Any], model_type: str):
303+
"""
304+
Update kwargs for AutoConfig initialization based on model_type
305+
"""
306+
if model_type in _AUTO_CONFIG_KWARGS_OVERRIDES:
307+
kwargs.update(_AUTO_CONFIG_KWARGS_OVERRIDES[model_type])
308+
return kwargs
309+
310+
301311
def _maybe_remap_hf_config_attrs(config: PretrainedConfig) -> PretrainedConfig:
302312
"""Remap config attributes to match the expected names."""
303313
for old_attr, new_attr in _CONFIG_ATTRS_MAPPING.items():
304314
if hasattr(config, old_attr):
305315
if not hasattr(config, new_attr):
306316
config.update({new_attr: getattr(config, old_attr)})
307-
delattr(config, old_attr)
308317
logger.debug("Remapped config attribute '%s' to '%s'", old_attr,
309318
new_attr)
310319
return config
@@ -415,15 +424,14 @@ def get_config(
415424
)
416425
else:
417426
try:
427+
kwargs = _maybe_update_auto_config_kwargs(
428+
kwargs, model_type=model_type)
418429
config = AutoConfig.from_pretrained(
419430
model,
420431
trust_remote_code=trust_remote_code,
421432
revision=revision,
422433
code_revision=code_revision,
423434
token=_get_hf_token(),
424-
# some old custom model's config needs
425-
# `has_no_defaults_at_init=True` to work.
426-
has_no_defaults_at_init=trust_remote_code,
427435
**kwargs,
428436
)
429437
except ValueError as e:

0 commit comments

Comments
 (0)