Skip to content

Commit c9b0731

Browse files
author
ochafik
committed
test all chat formats w/o tools
1 parent 5351daa commit c9b0731

File tree

1 file changed

+9
-0
lines changed

1 file changed

+9
-0
lines changed

tests/test-chat.cpp

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -569,6 +569,7 @@ static void test_template_output_parsers() {
569569
{
570570
// Not supported yet
571571
auto tmpls = read_templates("models/templates/CohereForAI-c4ai-command-r-plus-tool_use.jinja");
572+
assert_equals(COMMON_CHAT_FORMAT_CONTENT_ONLY, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
572573
assert_equals(COMMON_CHAT_FORMAT_GENERIC, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
573574
}
574575
{
@@ -665,6 +666,7 @@ static void test_template_output_parsers() {
665666
auto tmpls = read_templates("models/templates/NousResearch-Hermes-2-Pro-Llama-3-8B-tool_use.jinja");
666667
std::vector<std::string> end_tokens{ "<|im_end|>" };
667668

669+
assert_equals(COMMON_CHAT_FORMAT_CONTENT_ONLY, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
668670
assert_equals(COMMON_CHAT_FORMAT_HERMES_2_PRO, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
669671
assert_equals(
670672
COMMON_CHAT_FORMAT_HERMES_2_PRO,
@@ -793,6 +795,7 @@ static void test_template_output_parsers() {
793795
auto tmpls = read_templates("models/templates/meta-llama-Llama-3.1-8B-Instruct.jinja");
794796
std::vector<std::string> end_tokens{ "<|eom_id|>", "<|eot_id|>" };
795797

798+
assert_equals(COMMON_CHAT_FORMAT_CONTENT_ONLY, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
796799
assert_equals(COMMON_CHAT_FORMAT_LLAMA_3_X, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
797800
assert_equals(COMMON_CHAT_FORMAT_LLAMA_3_X_WITH_BUILTIN_TOOLS,
798801
common_chat_templates_apply(tmpls.get(), inputs_tools_builtin).format);
@@ -815,6 +818,7 @@ static void test_template_output_parsers() {
815818
std::vector<std::string> end_tokens{ "<|eom_id|>", "<|eot_id|>" };
816819

817820
assert_equals(COMMON_CHAT_FORMAT_LLAMA_3_X, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
821+
assert_equals(COMMON_CHAT_FORMAT_CONTENT_ONLY, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
818822

819823
test_templates(tmpls.get(), end_tokens, message_assist, tools, "Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
820824
test_templates(tmpls.get(), end_tokens, message_assist_call, tools,
@@ -824,6 +828,8 @@ static void test_template_output_parsers() {
824828
auto tmpls = read_templates("models/templates/meetkai-functionary-medium-v3.1.jinja");
825829
std::vector<std::string> end_tokens{ "<|eom_id|>", "<|eot_id|>" };
826830

831+
assert_equals(COMMON_CHAT_FORMAT_CONTENT_ONLY,
832+
common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
827833
assert_equals(COMMON_CHAT_FORMAT_FUNCTIONARY_V3_1_LLAMA_3_1,
828834
common_chat_templates_apply(tmpls.get(), inputs_tools).format);
829835

@@ -851,6 +857,7 @@ static void test_template_output_parsers() {
851857
auto tmpls = read_templates("models/templates/fireworks-ai-llama-3-firefunction-v2.jinja");
852858
std::vector<std::string> end_tokens{ "<|eot_id|>" };
853859

860+
assert_equals(COMMON_CHAT_FORMAT_CONTENT_ONLY, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
854861
assert_equals(COMMON_CHAT_FORMAT_FIREFUNCTION_V2, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
855862

856863
test_templates(tmpls.get(), end_tokens, message_assist, tools, "Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
@@ -862,6 +869,7 @@ static void test_template_output_parsers() {
862869
auto tmpls = read_templates("models/templates/deepseek-ai-DeepSeek-R1-Distill-Llama-8B.jinja");
863870
std::vector<std::string> end_tokens{ "<|end▁of▁sentence|>" };
864871

872+
assert_equals(COMMON_CHAT_FORMAT_DEEPSEEK_R1, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
865873
assert_equals(COMMON_CHAT_FORMAT_DEEPSEEK_R1, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
866874
assert_equals(COMMON_CHAT_FORMAT_DEEPSEEK_R1_EXTRACT_REASONING, common_chat_templates_apply(tmpls.get(), inputs_tools_think).format);
867875

@@ -891,6 +899,7 @@ static void test_template_output_parsers() {
891899
auto tmpls = read_templates("models/templates/llama-cpp-deepseek-r1.jinja");
892900
std::vector<std::string> end_tokens{ "<|end▁of▁sentence|>" };
893901

902+
assert_equals(COMMON_CHAT_FORMAT_DEEPSEEK_R1, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
894903
assert_equals(COMMON_CHAT_FORMAT_DEEPSEEK_R1, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
895904
assert_equals(COMMON_CHAT_FORMAT_DEEPSEEK_R1_EXTRACT_REASONING, common_chat_templates_apply(tmpls.get(), inputs_tools_think).format);
896905

0 commit comments

Comments
 (0)