We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 8b0b64b commit 3373388Copy full SHA for 3373388
src/llama.cpp
@@ -21715,7 +21715,7 @@ static int32_t llama_chat_apply_template_internal(
21715
}
21716
} else if (tmpl == "granite" || tmpl_contains("<|start_of_role|>")) {
21717
// IBM Granite template
21718
- for (const auto & message: chat) {
+ for (const auto & message : chat) {
21719
std::string role(message->role);
21720
ss << "<|start_of_role|>" << role << "<|end_of_role|>"
21721
<< message->content << "<|end_of_text|>\n";
0 commit comments