File tree Expand file tree Collapse file tree 3 files changed +8
-15
lines changed Expand file tree Collapse file tree 3 files changed +8
-15
lines changed Original file line number Diff line number Diff line change 7474#endif
7575#define LLAMA_CURL_MAX_URL_LENGTH 2084 // Maximum URL Length in Chrome: 2083
7676
77- const std::string LLAMA_CHATML_TEMPLATE (R"(
78- {%- for message in messages -%}
79- {{- "<|im_start|>" + message.role + "\n" + message.content + "<|im_end|>\n" -}}
80- {%- endfor -%}
81- {%- if add_generation_prompt -%}
82- {{- "<|im_start|>assistant\n" -}}
83- {%- endif -%}
84- )" );
85-
8677//
8778// CURL utils
8879//
@@ -1846,7 +1837,14 @@ llama_chat_templates llama_chat_templates_from_model(const struct llama_model *
18461837 if (!tool_use_template_src.empty ()) {
18471838 default_template_src = tool_use_template_src;
18481839 } else {
1849- default_template_src = LLAMA_CHATML_TEMPLATE;
1840+ default_template_src = R"(
1841+ {%- for message in messages -%}
1842+ {{- "<|im_start|>" + message.role + "\n" + message.content + "<|im_end|>\n" -}}
1843+ {%- endfor -%}
1844+ {%- if add_generation_prompt -%}
1845+ {{- "<|im_start|>assistant\n" -}}
1846+ {%- endif -%}
1847+ )" ;
18501848 }
18511849 }
18521850 return {
Original file line number Diff line number Diff line change 2626
2727#define DEFAULT_MODEL_PATH " models/7B/ggml-model-f16.gguf"
2828
29- extern const std::string LLAMA_CHATML_TEMPLATE;
30-
3129struct common_adapter_lora_info {
3230 std::string path;
3331 float scale;
Original file line number Diff line number Diff line change 88#include " llama.h"
99#include " common.h"
1010#include " chat-template.hpp"
11- #include " llama-chat.h"
1211
1312int main (void ) {
1413 std::vector<llama_chat_message> conversation {
@@ -365,7 +364,5 @@ int main(void) {
365364 assert (fmt_single (" llama3" ) == " <|start_header_id|>user<|end_header_id|>\n\n How are you<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n " );
366365 assert (fmt_single (" gigachat" ) == " user<|role_sep|>How are you<|message_sep|>available functions<|role_sep|>[]<|message_sep|>assistant<|role_sep|>" );
367366
368- assert (llm_chat_detect_template (LLAMA_CHATML_TEMPLATE) == LLM_CHAT_TEMPLATE_CHATML);
369-
370367 return 0 ;
371368}
You can’t perform that action at this time.
0 commit comments