Skip to content

Commit bc90b0b

Browse files
authored
fix wrong template in GLM4-0414
1 parent 87616f0 commit bc90b0b

File tree

1 file changed

+3
-0
lines changed

1 file changed

+3
-0
lines changed

src/llama-chat.cpp

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -123,6 +123,9 @@ llm_chat_template llm_chat_detect_template(const std::string & tmpl) {
123123
} else if (tmpl_contains("<|assistant|>") && tmpl_contains("<|end|>")) {
124124
return LLM_CHAT_TEMPLATE_PHI_3;
125125
} else if (tmpl_contains("<|assistant|>") && tmpl_contains("<|user|>")) {
126+
if (tmpl_contains("[gMASK]<sop>")) { /* new GLM4 0414 models */
127+
return LLM_CHAT_TEMPLATE_CHATGML_4;
128+
}
126129
return tmpl_contains("</s>") ? LLM_CHAT_TEMPLATE_FALCON_3 : LLM_CHAT_TEMPLATE_GLMEDGE;
127130
} else if (tmpl_contains("<|{{ item['role'] }}|>") && tmpl_contains("<|begin_of_image|>")) {
128131
return LLM_CHAT_TEMPLATE_GLMEDGE;

0 commit comments

Comments
 (0)