File tree Expand file tree Collapse file tree 3 files changed +43
-2
lines changed Expand file tree Collapse file tree 3 files changed +43
-2
lines changed Original file line number Diff line number Diff line change @@ -1082,7 +1082,14 @@ def _set_vocab_rwkv_world(self):
10821082 self .gguf_writer .add_token_list (tokens )
10831083 self .gguf_writer .add_token_types (toktypes )
10841084 special_vocab = gguf .SpecialVocab (self .dir_model , load_merges = False )
1085- special_vocab .chat_template = "rwkv-world"
1085+ if special_vocab .chat_template is None :
1086+ template_path = Path (__file__ ).parent / "models" / "templates" / "llama-cpp-rwkv-world.jinja"
1087+ if template_path .is_file ():
1088+ with open (template_path , "r" , encoding = "utf-8" ) as f :
1089+ template = f .read ()
1090+ else :
1091+ template = "rwkv-world"
1092+ special_vocab .chat_template = template
10861093 # hack: Add '\n\n' as the EOT token to make it chat normally
10871094 special_vocab ._set_special_token ("eot" , 261 )
10881095 # hack: Override these as they have already been set (incorrectly)
Original file line number Diff line number Diff line change 1+ {% - if not add_generation_prompt is defined -%}
2+ {% - set add_generation_prompt = true -%}
3+ {% - endif -%}
4+ {% - set ns = namespace (system_prompt ='' ) -%}
5+ {% - for message in messages -%}
6+ {% - if message ['role' ] == 'system' -%}
7+ {% - set ns .system_prompt = message ['content' ] -%}
8+ {% - endif -%}
9+ {% - endfor -%}
10+ {{bos_token}}
11+ {% - if ns .system_prompt != '' -%}
12+ {{- 'System: ' + ns.system_prompt + '\n\n' -}}
13+ {% - endif -%}
14+ {% - for message in messages -%}
15+ {% - if message ['role' ] == 'user' -%}
16+ {{- 'User: ' + message['content'] |trim + '\n\n' -}}
17+ {% - endif -%}
18+ {% - if message ['role' ] == 'assistant' and message ['content' ] is not none -%}
19+ {% - set content = message ['content' ] -%}
20+ {% - if '</think>' in content -%}
21+ {% - set content = content .split ('</think>' )[-1] -%}
22+ {% - endif -%}
23+ {{- 'Assistant: ' + content|trim + '\n\n' -}}
24+ {% - endif -%}
25+ {% - endfor -%}
26+ {% - if add_generation_prompt -%}
27+ {{- 'Assistant:' -}}
28+ {% - if enable_thinking is defined and enable_thinking is false %}
29+ {{- ' <think>\n</think>' }}
30+ {% - endif %}
31+ {% - if enable_thinking is defined and enable_thinking is true %}
32+ {{- ' <think>' }}
33+ {% - endif %}
34+ {% - endif -%}
Original file line number Diff line number Diff line change @@ -170,7 +170,7 @@ llm_chat_template llm_chat_detect_template(const std::string & tmpl) {
170170 // ref: https://huggingface.co/LGAI-EXAONE/EXAONE-3.0-7.8B-Instruct/discussions/8#66bae61b1893d14ee8ed85bb
171171 // EXAONE-3.0-7.8B-Instruct
172172 return LLM_CHAT_TEMPLATE_EXAONE_3;
173- } else if (tmpl_contains (" rwkv-world" )) {
173+ } else if (tmpl_contains (" rwkv-world" ) || tmpl_contains ( " {{- 'User: ' + message['content']|trim + ' \\ n \\ n' -}} " ) ) {
174174 return LLM_CHAT_TEMPLATE_RWKV_WORLD;
175175 } else if (tmpl_contains (" <|start_of_role|>" )) {
176176 return LLM_CHAT_TEMPLATE_GRANITE;
You can’t perform that action at this time.
0 commit comments