@@ -560,6 +560,7 @@ static void test_template_output_parsers() {
560560 {
561561 // Not supported yet
562562 auto tmpls = read_templates (" models/templates/CohereForAI-c4ai-command-r-plus-tool_use.jinja" );
563+ assert_equals (COMMON_CHAT_FORMAT_CONTENT_ONLY, common_chat_templates_apply (tmpls.get (), inputs_no_tools).format );
563564 assert_equals (COMMON_CHAT_FORMAT_GENERIC, common_chat_templates_apply (tmpls.get (), inputs_tools).format );
564565 }
565566 {
@@ -746,11 +747,8 @@ static void test_template_output_parsers() {
746747 auto tmpls = read_templates (" models/templates/NousResearch-Hermes-2-Pro-Llama-3-8B-tool_use.jinja" );
747748 std::vector<std::string> end_tokens{ " <|im_end|>" };
748749
749- for (const auto & inputs : { inputs_no_tools, inputs_tools }) {
750- auto params = common_chat_templates_apply (tmpls.get (), inputs);
751- assert_equals (COMMON_CHAT_FORMAT_HERMES_2_PRO, params.format );
752- assert_equals (false , params.thinking_forced_open );
753- }
750+ assert_equals (COMMON_CHAT_FORMAT_CONTENT_ONLY, common_chat_templates_apply (tmpls.get (), inputs_no_tools).format );
751+ assert_equals (COMMON_CHAT_FORMAT_HERMES_2_PRO, common_chat_templates_apply (tmpls.get (), inputs_tools).format );
754752 assert_equals (
755753 COMMON_CHAT_FORMAT_HERMES_2_PRO,
756754 common_chat_templates_apply (
@@ -1022,6 +1020,7 @@ static void test_template_output_parsers() {
10221020 auto tmpls = read_templates (" models/templates/meta-llama-Llama-3.1-8B-Instruct.jinja" );
10231021 std::vector<std::string> end_tokens{ " <|eom_id|>" , " <|eot_id|>" };
10241022
1023+ assert_equals (COMMON_CHAT_FORMAT_CONTENT_ONLY, common_chat_templates_apply (tmpls.get (), inputs_no_tools).format );
10251024 assert_equals (COMMON_CHAT_FORMAT_LLAMA_3_X, common_chat_templates_apply (tmpls.get (), inputs_tools).format );
10261025 assert_equals (COMMON_CHAT_FORMAT_LLAMA_3_X_WITH_BUILTIN_TOOLS,
10271026 common_chat_templates_apply (tmpls.get (), inputs_tools_builtin).format );
@@ -1051,8 +1050,12 @@ static void test_template_output_parsers() {
10511050 std::vector<std::string> end_tokens{ " <|eom_id|>" , " <|eot_id|>" };
10521051
10531052 assert_equals (COMMON_CHAT_FORMAT_LLAMA_3_X, common_chat_templates_apply (tmpls.get (), inputs_tools).format );
1053+ <<<<<<< HEAD
10541054 assert_equals (COMMON_CHAT_FORMAT_CONTENT_ONLY,
10551055 common_chat_templates_apply (tmpls.get (), inputs_no_tools).format );
1056+ =======
1057+ assert_equals (COMMON_CHAT_FORMAT_CONTENT_ONLY, common_chat_templates_apply (tmpls.get (), inputs_no_tools).format );
1058+ >>>>>>> b6930ebc (`tool-call`: fix non-tool-calling grammar crashes w/ Qwen / Hermes 2 templates (#12900 ))
10561059
10571060 test_templates (tmpls.get (), end_tokens, message_assist, tools, " Hello, world!\n What's up?" , /* expect_grammar_triggered= */ false );
10581061 test_templates (tmpls.get (), end_tokens, message_assist_call, tools,
@@ -1062,6 +1065,8 @@ static void test_template_output_parsers() {
10621065 auto tmpls = read_templates (" models/templates/meetkai-functionary-medium-v3.1.jinja" );
10631066 std::vector<std::string> end_tokens{ " <|eom_id|>" , " <|eot_id|>" };
10641067
1068+ assert_equals (COMMON_CHAT_FORMAT_CONTENT_ONLY,
1069+ common_chat_templates_apply (tmpls.get (), inputs_no_tools).format );
10651070 assert_equals (COMMON_CHAT_FORMAT_FUNCTIONARY_V3_1_LLAMA_3_1,
10661071 common_chat_templates_apply (tmpls.get (), inputs_tools).format );
10671072 assert_equals (COMMON_CHAT_FORMAT_CONTENT_ONLY,
@@ -1141,6 +1146,7 @@ static void test_template_output_parsers() {
11411146 auto tmpls = read_templates (" models/templates/fireworks-ai-llama-3-firefunction-v2.jinja" );
11421147 std::vector<std::string> end_tokens{ " <|eot_id|>" };
11431148
1149+ assert_equals (COMMON_CHAT_FORMAT_CONTENT_ONLY, common_chat_templates_apply (tmpls.get (), inputs_no_tools).format );
11441150 assert_equals (COMMON_CHAT_FORMAT_FIREFUNCTION_V2, common_chat_templates_apply (tmpls.get (), inputs_tools).format );
11451151
11461152 test_templates (tmpls.get (), end_tokens, message_assist, tools, " Hello, world!\n What's up?" , /* expect_grammar_triggered= */ false );
@@ -1152,11 +1158,17 @@ static void test_template_output_parsers() {
11521158 auto tmpls = read_templates (" models/templates/deepseek-ai-DeepSeek-R1-Distill-Llama-8B.jinja" );
11531159 std::vector<std::string> end_tokens{ " <|end▁of▁sentence|>" };
11541160
1161+ <<<<<<< HEAD
11551162 for (const auto & inputs : { inputs_no_tools, inputs_tools }) {
11561163 auto params = common_chat_templates_apply (tmpls.get (), inputs);
11571164 assert_equals (COMMON_CHAT_FORMAT_DEEPSEEK_R1, params.format );
11581165 assert_equals (true , params.thinking_forced_open );
11591166 }
1167+ =======
1168+ assert_equals (COMMON_CHAT_FORMAT_DEEPSEEK_R1, common_chat_templates_apply (tmpls.get (), inputs_no_tools).format );
1169+ assert_equals (COMMON_CHAT_FORMAT_DEEPSEEK_R1, common_chat_templates_apply (tmpls.get (), inputs_tools).format );
1170+ assert_equals (COMMON_CHAT_FORMAT_DEEPSEEK_R1_EXTRACT_REASONING, common_chat_templates_apply (tmpls.get (), inputs_tools_think).format );
1171+ >>>>>>> b6930ebc (`tool-call`: fix non-tool-calling grammar crashes w/ Qwen / Hermes 2 templates (#12900 ))
11601172
11611173 test_templates (tmpls.get (), end_tokens, message_assist, tools, " Hello, world!\n What's up?" , /* expect_grammar_triggered= */ false );
11621174 test_templates (tmpls.get (), end_tokens, message_assist_thoughts, tools, " Hello, world!\n What's up?" , /* expect_grammar_triggered= */ false );
@@ -1237,6 +1249,7 @@ static void test_template_output_parsers() {
12371249 auto tmpls = read_templates (" models/templates/llama-cpp-deepseek-r1.jinja" );
12381250 std::vector<std::string> end_tokens{ " <|end▁of▁sentence|>" };
12391251
1252+ assert_equals (COMMON_CHAT_FORMAT_DEEPSEEK_R1, common_chat_templates_apply (tmpls.get (), inputs_no_tools).format );
12401253 assert_equals (COMMON_CHAT_FORMAT_DEEPSEEK_R1, common_chat_templates_apply (tmpls.get (), inputs_tools).format );
12411254
12421255 test_templates (tmpls.get (), end_tokens, message_assist, tools, " Hello, world!\n What's up?" , /* expect_grammar_triggered= */ false );
0 commit comments