Skip to content

Commit 2d30fd4

Browse files
committed
improve template code
1 parent 9f89d7d commit 2d30fd4

File tree

1 file changed

+4
-4
lines changed

1 file changed

+4
-4
lines changed

src/llama.cpp

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22016,10 +22016,7 @@ static llm_chat_template llama_chat_detect_template(const std::string & tmpl) {
2201622016
auto tmpl_contains = [&tmpl](const char * haystack) -> bool {
2201722017
return tmpl.find(haystack) != std::string::npos;
2201822018
};
22019-
// gigachat
22020-
if (tmpl_contains("additional_special_tokens") && tmpl_contains("functions")) {
22021-
return LLM_CHAT_TEMPLATE_GIGACHAT;
22022-
} else if (tmpl_contains("<|im_start|>")) {
22019+
if (tmpl_contains("<|im_start|>")) {
2202322020
return LLM_CHAT_TEMPLATE_CHATML;
2202422021
} else if (tmpl.find("mistral") == 0 || tmpl_contains("[INST]")) {
2202522022
if (tmpl_contains("[SYSTEM_PROMPT]")) {
@@ -22102,6 +22099,9 @@ static llm_chat_template llama_chat_detect_template(const std::string & tmpl) {
2210222099
return LLM_CHAT_TEMPLATE_RWKV_WORLD;
2210322100
} else if (tmpl_contains("<|start_of_role|>")) {
2210422101
return LLM_CHAT_TEMPLATE_GRANITE;
22102+
} else if (tmpl_contains("message['role'] + additional_special_tokens[0] + message['content'] + additional_special_tokens[1]")) {
22103+
// gigachat
22104+
return LLM_CHAT_TEMPLATE_GIGACHAT;
2210522105
}
2210622106
return LLM_CHAT_TEMPLATE_UNKNOWN;
2210722107
}

0 commit comments

Comments
 (0)