Skip to content

Commit 9b4139b

Browse files
authored
Merge pull request #19 from Thireus/glm-4.5-clean
Glm 4.5 clean
2 parents eed86de + cae058f commit 9b4139b

File tree

1 file changed

+11
-11
lines changed

1 file changed

+11
-11
lines changed

src/llama.cpp

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1740,8 +1740,8 @@ enum llm_chat_template {
17401740
LLM_CHAT_TEMPLATE_DEEPSEEK_3,
17411741
LLM_CHAT_TEMPLATE_COMMAND_R,
17421742
LLM_CHAT_TEMPLATE_LLAMA_3,
1743-
LLM_CHAT_TEMPLATE_CHATGML_3,
1744-
LLM_CHAT_TEMPLATE_CHATGML_4,
1743+
LLM_CHAT_TEMPLATE_CHATGLM_3,
1744+
LLM_CHAT_TEMPLATE_CHATGLM_4,
17451745
LLM_CHAT_TEMPLATE_MINICPM,
17461746
LLM_CHAT_TEMPLATE_EXAONE_3,
17471747
LLM_CHAT_TEMPLATE_RWKV_WORLD,
@@ -1781,8 +1781,8 @@ static const std::map<std::string, llm_chat_template> LLM_CHAT_TEMPLATES = {
17811781
{ "deepseek3", LLM_CHAT_TEMPLATE_DEEPSEEK_3 },
17821782
{ "command-r", LLM_CHAT_TEMPLATE_COMMAND_R },
17831783
{ "llama3", LLM_CHAT_TEMPLATE_LLAMA_3 },
1784-
{ "chatglm3", LLM_CHAT_TEMPLATE_CHATGML_3 },
1785-
{ "chatglm4", LLM_CHAT_TEMPLATE_CHATGML_4 },
1784+
{ "chatglm3", LLM_CHAT_TEMPLATE_CHATGLM_3 },
1785+
{ "chatglm4", LLM_CHAT_TEMPLATE_CHATGLM_4 },
17861786
{ "minicpm", LLM_CHAT_TEMPLATE_MINICPM },
17871787
{ "exaone3", LLM_CHAT_TEMPLATE_EXAONE_3 },
17881788
{ "rwkv-world", LLM_CHAT_TEMPLATE_RWKV_WORLD },
@@ -23841,6 +23841,11 @@ static llm_chat_template llama_chat_detect_template(const std::string & tmpl) {
2384123841
return LLM_CHAT_TEMPLATE_LLAMA_2;
2384223842
}
2384323843
}
23844+
} else if (tmpl_contains("[gMASK]sop")) {
23845+
// chatglm3-6b
23846+
return LLM_CHAT_TEMPLATE_CHATGLM_3;
23847+
} else if (tmpl_contains("[gMASK]<sop>")) {
23848+
return LLM_CHAT_TEMPLATE_CHATGLM_4;
2384423849
} else if (tmpl_contains("<|assistant|>") && tmpl_contains("<|end|>")) {
2384523850
return LLM_CHAT_TEMPLATE_PHI_3;
2384623851
} else if (tmpl_contains("<|assistant|>") && tmpl_contains("<|user|>")) {
@@ -23873,11 +23878,6 @@ static llm_chat_template llama_chat_detect_template(const std::string & tmpl) {
2387323878
return LLM_CHAT_TEMPLATE_COMMAND_R;
2387423879
} else if (tmpl_contains("<|start_header_id|>") && tmpl_contains("<|end_header_id|>")) {
2387523880
return LLM_CHAT_TEMPLATE_LLAMA_3;
23876-
} else if (tmpl_contains("[gMASK]sop")) {
23877-
// chatglm3-6b
23878-
return LLM_CHAT_TEMPLATE_CHATGML_3;
23879-
} else if (tmpl_contains("[gMASK]<sop>")) {
23880-
return LLM_CHAT_TEMPLATE_CHATGML_4;
2388123881
} else if (tmpl_contains(LU8("<用户>"))) {
2388223882
// MiniCPM-3B-OpenHermes-2.5-v2-GGUF
2388323883
return LLM_CHAT_TEMPLATE_MINICPM;
@@ -24160,7 +24160,7 @@ static int32_t llama_chat_apply_template_internal(
2416024160
if (add_ass) {
2416124161
ss << "<|start_header_id|>assistant<|end_header_id|>\n\n";
2416224162
}
24163-
} else if (tmpl == LLM_CHAT_TEMPLATE_CHATGML_3) {
24163+
} else if (tmpl == LLM_CHAT_TEMPLATE_CHATGLM_3) {
2416424164
// chatglm3-6b
2416524165
ss << "[gMASK]" << "sop";
2416624166
for (auto message : chat) {
@@ -24170,7 +24170,7 @@ static int32_t llama_chat_apply_template_internal(
2417024170
if (add_ass) {
2417124171
ss << "<|assistant|>";
2417224172
}
24173-
} else if (tmpl == LLM_CHAT_TEMPLATE_CHATGML_4) {
24173+
} else if (tmpl == LLM_CHAT_TEMPLATE_CHATGLM_4) {
2417424174
ss << "[gMASK]" << "<sop>";
2417524175
for (auto message : chat) {
2417624176
std::string role(message->role);

0 commit comments

Comments
 (0)