Skip to content

Commit e7d885c

Browse files
added fallback for incomplete configs for vlm models launched as llms (#828)
Co-authored-by: Nathan Habib <[email protected]>
1 parent 70acb85 commit e7d885c

File tree

1 file changed

+6
-3
lines changed

1 file changed

+6
-3
lines changed

src/lighteval/models/transformers/transformers_model.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -468,13 +468,16 @@ def _init_max_length(self) -> int:
468468
return self.config.max_length
469469

470470
# Try to get the sequence length from the model config.
471+
text_model_config = self.transformers_config.get_text_config()
472+
471473
seqlen_config_attrs = ("n_positions", "max_position_embeddings", "n_ctx")
472474
for attr in seqlen_config_attrs:
473-
if hasattr(self.transformers_config, attr):
474-
return getattr(self.transformers_config, attr)
475+
if hasattr(text_model_config, attr):
476+
return getattr(text_model_config, attr)
475477

476478
logger.warning(
477-
"No max_length attribute found in the model config. Using the default max sequence length setting {2048}. It is recomended to set max_length through the model args"
479+
"No max_length attribute found in the model config. Using the default max sequence length setting `2048`. "
480+
"It is recommended to set max_length trough the model args: max_length=..."
478481
)
479482

480483
return 2048

0 commit comments

Comments
 (0)