Skip to content

Commit cae058f

Browse files
authored
Merge pull request #18 from Thireus/glm-4.5-testing
Glm 4.5 testing
2 parents 46c3fa9 + 41a235b commit cae058f

File tree

1 file changed

+11
-11
lines changed

1 file changed

+11
-11
lines changed

src/llama.cpp

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1740,8 +1740,8 @@ enum llm_chat_template {
17401740
LLM_CHAT_TEMPLATE_DEEPSEEK_3,
17411741
LLM_CHAT_TEMPLATE_COMMAND_R,
17421742
LLM_CHAT_TEMPLATE_LLAMA_3,
1743-
LLM_CHAT_TEMPLATE_CHATGML_3,
1744-
LLM_CHAT_TEMPLATE_CHATGML_4,
1743+
LLM_CHAT_TEMPLATE_CHATGLM_3,
1744+
LLM_CHAT_TEMPLATE_CHATGLM_4,
17451745
LLM_CHAT_TEMPLATE_MINICPM,
17461746
LLM_CHAT_TEMPLATE_EXAONE_3,
17471747
LLM_CHAT_TEMPLATE_RWKV_WORLD,
@@ -1781,8 +1781,8 @@ static const std::map<std::string, llm_chat_template> LLM_CHAT_TEMPLATES = {
17811781
{ "deepseek3", LLM_CHAT_TEMPLATE_DEEPSEEK_3 },
17821782
{ "command-r", LLM_CHAT_TEMPLATE_COMMAND_R },
17831783
{ "llama3", LLM_CHAT_TEMPLATE_LLAMA_3 },
1784-
{ "chatglm3", LLM_CHAT_TEMPLATE_CHATGML_3 },
1785-
{ "chatglm4", LLM_CHAT_TEMPLATE_CHATGML_4 },
1784+
{ "chatglm3", LLM_CHAT_TEMPLATE_CHATGLM_3 },
1785+
{ "chatglm4", LLM_CHAT_TEMPLATE_CHATGLM_4 },
17861786
{ "minicpm", LLM_CHAT_TEMPLATE_MINICPM },
17871787
{ "exaone3", LLM_CHAT_TEMPLATE_EXAONE_3 },
17881788
{ "rwkv-world", LLM_CHAT_TEMPLATE_RWKV_WORLD },
@@ -23851,6 +23851,11 @@ static llm_chat_template llama_chat_detect_template(const std::string & tmpl) {
2385123851
return LLM_CHAT_TEMPLATE_LLAMA_2;
2385223852
}
2385323853
}
23854+
} else if (tmpl_contains("[gMASK]sop")) {
23855+
// chatglm3-6b
23856+
return LLM_CHAT_TEMPLATE_CHATGLM_3;
23857+
} else if (tmpl_contains("[gMASK]<sop>")) {
23858+
return LLM_CHAT_TEMPLATE_CHATGLM_4;
2385423859
} else if (tmpl_contains("<|assistant|>") && tmpl_contains("<|end|>")) {
2385523860
return LLM_CHAT_TEMPLATE_PHI_3;
2385623861
} else if (tmpl_contains("<|assistant|>") && tmpl_contains("<|user|>")) {
@@ -23883,11 +23888,6 @@ static llm_chat_template llama_chat_detect_template(const std::string & tmpl) {
2388323888
return LLM_CHAT_TEMPLATE_COMMAND_R;
2388423889
} else if (tmpl_contains("<|start_header_id|>") && tmpl_contains("<|end_header_id|>")) {
2388523890
return LLM_CHAT_TEMPLATE_LLAMA_3;
23886-
} else if (tmpl_contains("[gMASK]sop")) {
23887-
// chatglm3-6b
23888-
return LLM_CHAT_TEMPLATE_CHATGML_3;
23889-
} else if (tmpl_contains("[gMASK]<sop>")) {
23890-
return LLM_CHAT_TEMPLATE_CHATGML_4;
2389123891
} else if (tmpl_contains(LU8("<用户>"))) {
2389223892
// MiniCPM-3B-OpenHermes-2.5-v2-GGUF
2389323893
return LLM_CHAT_TEMPLATE_MINICPM;
@@ -24170,7 +24170,7 @@ static int32_t llama_chat_apply_template_internal(
2417024170
if (add_ass) {
2417124171
ss << "<|start_header_id|>assistant<|end_header_id|>\n\n";
2417224172
}
24173-
} else if (tmpl == LLM_CHAT_TEMPLATE_CHATGML_3) {
24173+
} else if (tmpl == LLM_CHAT_TEMPLATE_CHATGLM_3) {
2417424174
// chatglm3-6b
2417524175
ss << "[gMASK]" << "sop";
2417624176
for (auto message : chat) {
@@ -24180,7 +24180,7 @@ static int32_t llama_chat_apply_template_internal(
2418024180
if (add_ass) {
2418124181
ss << "<|assistant|>";
2418224182
}
24183-
} else if (tmpl == LLM_CHAT_TEMPLATE_CHATGML_4) {
24183+
} else if (tmpl == LLM_CHAT_TEMPLATE_CHATGLM_4) {
2418424184
ss << "[gMASK]" << "<sop>";
2418524185
for (auto message : chat) {
2418624186
std::string role(message->role);

0 commit comments

Comments
 (0)