We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent f3d0a23 commit 8971417Copy full SHA for 8971417
src/llama.cpp
@@ -22100,7 +22100,6 @@ static llm_chat_template llama_chat_detect_template(const std::string & tmpl) {
22100
} else if (tmpl_contains("<|start_of_role|>")) {
22101
return LLM_CHAT_TEMPLATE_GRANITE;
22102
} else if (tmpl_contains("message['role'] + additional_special_tokens[0] + message['content'] + additional_special_tokens[1]")) {
22103
- // gigachat
22104
return LLM_CHAT_TEMPLATE_GIGACHAT;
22105
}
22106
return LLM_CHAT_TEMPLATE_UNKNOWN;
0 commit comments