File tree Expand file tree Collapse file tree 1 file changed +22
-0
lines changed Expand file tree Collapse file tree 1 file changed +22
-0
lines changed Original file line number Diff line number Diff line change @@ -21862,6 +21862,28 @@ static int32_t llama_chat_apply_template_internal(
2186221862 if (add_ass) {
2186321863 ss << "<|im_start|>assistant\n";
2186421864 }
21865+ } else if (tmpl == "mistral-v1" || tmpl == "mistral-v2" || tmpl == "mistral-v3" || tmpl == "mistral-v3-tekken") {
21866+ // See: https://github.com/mistralai/cookbook/blob/main/concept-deep-dive/tokenization/chat_templates.md
21867+ // See: https://github.com/mistralai/cookbook/blob/main/concept-deep-dive/tokenization/templates.md
21868+ std::string leading_space = (tmpl == "mistral-v1" ? " " : "");
21869+ std::string trailing_space = (tmpl != "mistral-v3-tekken" ? " " : "");
21870+ std::string system_message = "";
21871+ for (auto message : chat) {
21872+ std::string role(message->role);
21873+ std::string content = trim(message->content);
21874+ if (role == "system") {
21875+ system_message = content;
21876+ } else if (role == "user") {
21877+ ss << leading_space << "[INST]" << trailing_space;
21878+ if (!system_message.empty()) {
21879+ ss << system_message << "\n\n";
21880+ system_message = "";
21881+ }
21882+ ss << content << leading_space << "[/INST]";
21883+ } else {
21884+ ss << trailing_space << content << "</s>";
21885+ }
21886+ }
2186521887 } else if (tmpl == "llama2" || tmpl == "mistral" || tmpl_contains("[INST]")) {
2186621888 // llama2 template and its variants
2186721889 // [variant] support system message
You can’t perform that action at this time.
0 commit comments