Skip to content

Commit 357d4ea

Browse files
[Fix] Update resolve_hf_chat_template arguments (#2992)
* fix arguments * pacify pre-commit --------- Co-authored-by: Baber <[email protected]>
1 parent e1a7a39 commit 357d4ea

File tree

1 file changed

+22
-4
lines changed

1 file changed

+22
-4
lines changed

lm_eval/models/vllm_causallms.py

Lines changed: 22 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import copy
2+
import inspect
23
import logging
34
from importlib.metadata import version
45
from importlib.util import find_spec
@@ -140,11 +141,28 @@ def __init__(
140141
)
141142

142143
if parse_version(version("vllm")) >= parse_version("0.8.3"):
144+
kwargs_resolve_hf_chat_template = {
145+
"tokenizer": self.tokenizer,
146+
"chat_template": None,
147+
"tools": None,
148+
}
149+
150+
if parse_version(version("vllm")) >= parse_version("0.9.0"):
151+
kwargs_resolve_hf_chat_template["model_config"] = (
152+
self.model.llm_engine.model_config
153+
)
154+
155+
# https://github.com/vllm-project/vllm/pull/18259
156+
if (
157+
"trsut_remote_code"
158+
in inspect.signature(resolve_hf_chat_template).parameters
159+
):
160+
kwargs_resolve_hf_chat_template["trsut_remote_code"] = trust_remote_code
161+
else:
162+
kwargs_resolve_hf_chat_template["trust_remote_code"] = trust_remote_code
163+
143164
self.hf_chat_template = resolve_hf_chat_template(
144-
tokenizer=self.tokenizer,
145-
chat_template=None,
146-
tools=None,
147-
trust_remote_code=trust_remote_code,
165+
**kwargs_resolve_hf_chat_template
148166
)
149167
else:
150168
self.hf_chat_template = None

0 commit comments

Comments
 (0)