Skip to content

Commit d36eb0b

Browse files
add fix for self-hosted HF models
1 parent b115b27 commit d36eb0b

File tree

2 files changed

+4
-0
lines changed

2 files changed

+4
-0
lines changed

src/agentlab/llm/huggingface_utils.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77

88
from agentlab.llm.base_api import AbstractChatModel
99
from agentlab.llm.prompt_templates import PromptTemplate, get_prompt_template
10+
from agentlab.llm.llm_utils import Discussion
1011

1112

1213
class HFBaseChatModel(AbstractChatModel):
@@ -59,6 +60,7 @@ def __call__(
5960
if self.tokenizer:
6061
# messages_formated = _convert_messages_to_dict(messages) ## ?
6162
try:
63+
messages.merge()
6264
prompt = self.tokenizer.apply_chat_template(messages, tokenize=False)
6365
except Exception as e:
6466
if "Conversation roles must alternate" in str(e):

src/agentlab/llm/llm_utils.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -386,6 +386,8 @@ def merge(self):
386386
else:
387387
new_content.append(elem)
388388
self["content"] = new_content
389+
if len(self["content"]) == 1:
390+
self["content"] = self["content"][0]["text"]
389391

390392

391393
class SystemMessage(BaseMessage):

0 commit comments

Comments
 (0)