Skip to content

Commit 595e11c

Browse files
author
ochafik
committed
tool-call: fix/test functionary v3
1 parent c124ab4 commit 595e11c

File tree

4 files changed

+40
-35
lines changed

4 files changed

+40
-35
lines changed

common/tool-call.cpp

Lines changed: 13 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,8 @@ static bool parse_json(std::string::const_iterator & it, const std::string::cons
3939
std::size_t position;
4040
bool found_error;
4141

42+
json_error_locator() : position(0), found_error(false) {}
43+
4244
bool parse_error(std::size_t position, const std::string & last_token, const json::exception & ex) override {
4345
// LOG_WARNING("JSON error (Expected)", {{"position", position}, {"last_token", last_token}, {"error", ex.what()}});
4446
this->position = position - 1;
@@ -67,7 +69,7 @@ static bool parse_json(std::string::const_iterator & it, const std::string::cons
6769
} else {
6870
temptative_end = end;
6971
}
70-
std::string json_sub {it, it + err_loc.position};
72+
std::string json_sub {it, temptative_end};
7173
// LOG_WARNING("Parsing json", {{"json_sub", json_sub}});
7274
try {
7375
out = json::parse(json_sub);
@@ -155,9 +157,7 @@ static llama_tool_calls parse_llama_3_1_tool_calls(const json & tools, const std
155157
return {input, {}};
156158
}
157159

158-
static llama_tool_calls parse_functionary_v3_llama_3_1_tool_calls(const std::string& input) {
159-
static std::regex function_regex(R"(<function=(\w+)>)");
160-
static std::regex close_regex(R"(</function>)");
160+
static llama_tool_calls parse_functionary_tool_calls(const std::string& input, const std::regex & function_regex, const std::regex & close_regex) {
161161
std::smatch match;
162162

163163
llama_tool_calls result;
@@ -190,22 +190,16 @@ static llama_tool_calls parse_functionary_v3_llama_3_1_tool_calls(const std::str
190190
return result;
191191
}
192192

193+
static llama_tool_calls parse_functionary_v3_llama_3_1_tool_calls(const std::string& input) {
194+
static std::regex function_regex(R"(<function=(\w+)>)");
195+
static std::regex close_regex(R"(</function>)");
196+
return parse_functionary_tool_calls(input, function_regex, close_regex);
197+
}
198+
193199
static llama_tool_calls parse_functionary_v3_tool_calls(const std::string& input) {
194-
static std::regex python_tag_regex(R"(>>>(\w+)\n((?!>>>)[\s\S\n]*))");
195-
std::smatch match;
196-
llama_tool_calls result;
197-
std::string content;
198-
std::string in = input;
199-
while (std::regex_search(in, match, python_tag_regex)) {
200-
content += match.prefix().str();
201-
result.tool_calls.push_back({
202-
match[1].str(),
203-
(json {{"code", match[2].str()}}).dump(),
204-
});
205-
in = match.suffix().str();
206-
}
207-
result.content = content + in;
208-
return result;
200+
static std::regex function_regex(R"(>>>(\w+)\n)");
201+
static std::regex close_regex(R"($|\n(?=>>>))");
202+
return parse_functionary_tool_calls(input, function_regex, close_regex);
209203
}
210204

211205
llama_tool_calls parse_tool_calls(const json & tools, const std::string & chat_template, const std::string& input) {

examples/server/tests/features/steps/steps.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,7 @@ def step_use_jinja(context):
166166
context.use_jinja = True
167167

168168

169-
@step('chat template file {file}')
169+
@step('a chat template file {file}')
170170
def step_use_jinja(context, file):
171171
context.chat_template_file = file
172172

examples/server/tests/features/tool_call.feature

Lines changed: 16 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -15,34 +15,36 @@ Feature: llama.cpp server
1515
And 64 server max tokens to predict
1616
And prometheus compatible metrics exposed
1717
And jinja templates are enabled
18-
And chat template file ../../../tests/chat/templates/meta-llama-Meta-Llama-3.1-8B-Instruct.jinja
19-
Then the server is starting
20-
Then the server is healthy
21-
22-
Scenario: Health
23-
Then the server is ready
24-
And all slots are idle
2518

19+
@wip
2620
Scenario Outline: OAI Compatibility w/ required tool
27-
Given a model test
21+
Given a chat template file ../../../tests/chat/templates/<template_name>.jinja
22+
And the server is starting
23+
And the server is healthy
24+
And a model test
2825
And <n> max tokens to predict
2926
And a user prompt write a hello world in python
3027
And a tool choice <tool_choice>
3128
And tools <tools>
32-
Given an OAI compatible chat completions request with no api error
29+
And an OAI compatible chat completions request with no api error
3330
Then tool <tool_name> is called with arguments <tool_arguments>
3431

3532
Examples: Prompts
36-
| n | tool_name | tool_arguments | tool_choice | tools |
37-
| 64 | test | {} | required | [{"type":"function", "function": {"name": "test", "description": "", "parameters": {"type": "object", "properties": {}}}}] |
38-
| 16 | ipython | {"code": "it and "} | required | [{"type":"function", "function": {"name": "ipython", "description": "", "parameters": {"type": "object", "properties": {"code": {"type": "string", "description": ""}}, "required": ["code"]}}}] |
33+
| template_name | n | tool_name | tool_arguments | tool_choice | tools |
34+
| meta-llama-Meta-Llama-3.1-8B-Instruct | 64 | test | {} | required | [{"type":"function", "function": {"name": "test", "description": "", "parameters": {"type": "object", "properties": {}}}}] |
35+
| meta-llama-Meta-Llama-3.1-8B-Instruct | 16 | ipython | {"code": "it and "} | required | [{"type":"function", "function": {"name": "ipython", "description": "", "parameters": {"type": "object", "properties": {"code": {"type": "string", "description": ""}}, "required": ["code"]}}}] |
36+
| meetkai-functionary-medium-v3.2 | 64 | test | {} | required | [{"type":"function", "function": {"name": "test", "description": "", "parameters": {"type": "object", "properties": {}}}}] |
37+
| meetkai-functionary-medium-v3.2 | 64 | ipython | {"code": "Yes,"} | required | [{"type":"function", "function": {"name": "ipython", "description": "", "parameters": {"type": "object", "properties": {"code": {"type": "string", "description": ""}}, "required": ["code"]}}}] |
3938

4039
Scenario: OAI Compatibility w/ no tool
41-
Given a model test
40+
Given a chat template file ../../../tests/chat/templates/meta-llama-Meta-Llama-3.1-8B-Instruct.jinja
41+
And the server is starting
42+
And the server is healthy
43+
And a model test
4244
And 16 max tokens to predict
4345
And a user prompt write a hello world in python
4446
And a tool choice <tool_choice>
4547
And tools []
46-
Given an OAI compatible chat completions request with no api error
48+
And an OAI compatible chat completions request with no api error
4749
Then no tool is called
4850

tests/test-tool-call.cpp

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ int main() {
7474

7575
std::string functionary_v3_like_tmpl = "Functionary 3.2 template should have <|start_header_id|> and then some >>>all inside it";
7676
test_parse_tool_call(tools, functionary_v3_like_tmpl,
77-
">>>ipython\nprint('Hello, world!')",
77+
">>>ipython\n{\"code\": \"print('Hello, world!')\"}",
7878
"",
7979
json {{
8080
{"function", {
@@ -84,6 +84,15 @@ int main() {
8484
}).dump()}
8585
}}
8686
}});
87+
test_parse_tool_call(tools, functionary_v3_like_tmpl,
88+
">>>test\n{ } \n ",
89+
"",
90+
json {{
91+
{"function", {
92+
{"name", "test"},
93+
{"arguments", "{}"}
94+
}}
95+
}});
8796

8897
std::string functionary_v3_llama_3_1_like_tmpl = "Functionary 3.2 template for llama 3.1 should have <|start_header_id|> and then some <function=foo>{...}</function> inside it";
8998
test_parse_tool_call(tools, functionary_v3_llama_3_1_like_tmpl,

0 commit comments

Comments
 (0)