Skip to content

Commit 8550b76

Browse files
committed
Move function format specification to function_tool.py
1 parent af0a9fa commit 8550b76

File tree

2 files changed

+59
-39
lines changed

2 files changed

+59
-39
lines changed

examples/function-calling/function_tool.py

Lines changed: 31 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,8 @@
33
import inspect
44
import re
55

6+
import json
7+
68
# Extract OpenAI function calling style definitions from functions
79
#
810
# Generated with: Create a python function to to generate the OpenAI function calling definition from a given function, getting the description, parameter type and parameter description from the function documentation, assuming the function documentation contains sphynx style parameter descriptions, marked with :param.
@@ -36,7 +38,7 @@ def get_type(s):
3638
# Generate function definition schema from function definitions
3739
#
3840
# This is from llama-cpp-python, llama_chat_format.py
39-
def generate_schema_from_functions(functions, namespace="functions") -> str:
41+
def generate_functionary_schema_from_functions(functions, namespace="functions") -> str:
4042
schema = (
4143
"// Supported function definitions that should be called when necessary.\n"
4244
)
@@ -61,3 +63,31 @@ def generate_schema_from_functions(functions, namespace="functions") -> str:
6163

6264
schema += "}} // namespace {}".format(namespace)
6365
return schema
66+
67+
functionary_prompt_start = """<|start_header_id|>system<|end_header_id|>
68+
69+
You are capable of executing available function(s) if required.
70+
Execute function(s) as needed.
71+
The function calls are not shown in the conversation and should be called covertly to answer questions.
72+
Ask for the required input to:recipient==all
73+
Use JSON for function arguments.
74+
Respond in this format:
75+
>>>${recipient}
76+
${content}
77+
Available functions:
78+
"""
79+
functionary_prompt_end = """<|eot_id|><|start_header_id|>system<|end_header_id|>
80+
81+
When you send a message containing Python code to python, it will be executed in a stateful Jupyter notebook environment. python will respond with the output of the execution or time out after 60.0 seconds. The drive at '/mnt/data' can be used to save and persist user files.<|eot_id|><|start_header_id|>user<|end_header_id|>
82+
"""
83+
84+
def get_chat_tool_format(args, tools):
85+
return {
86+
'prompt': functionary_prompt_start + generate_functionary_schema_from_functions(tools) + functionary_prompt_end,
87+
'function_marker': '>>>',
88+
'function_re': r'>>>([^\n]*)\n(.*)<\|eot_id\|>',
89+
'user_start': '<|start_header_id|>user<|end_header_id|>\n',
90+
'user_end': '<|eot_id|><|start_header_id|>assistant<|end_header_id|>' + '\n',
91+
'tool_start': '',
92+
'tool_end': '<|eot_id|><|start_header_id|>assistant<|end_header_id|>'
93+
}

examples/function-calling/llama-cli-function-runner.py

Lines changed: 28 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -10,42 +10,29 @@
1010
import json
1111

1212
import functions
13-
from function_tool import get_function_tool_json, generate_schema_from_functions
13+
from function_tool import get_function_tool_json, get_chat_tool_format
1414

1515
function_name_list = [ name for name in dir(functions) if not name.startswith('_') ]
1616
function_lookup = { name: getattr(functions, name) for name in function_name_list }
1717
tools = [ get_function_tool_json(f) for (n, f) in function_lookup.items() ]
18-
function_schema = generate_schema_from_functions(tools)
19-
20-
prompt = """<|start_header_id|>system<|end_header_id|>
21-
22-
You are capable of executing available function(s) if required.
23-
Execute function(s) as needed.
24-
The function calls are not shown in the conversation and should be called covertly to answer questions.
25-
Ask for the required input to:recipient==all
26-
Use JSON for function arguments.
27-
Respond in this format:
28-
>>>${recipient}
29-
${content}
30-
Available functions:
31-
""" + function_schema + """<|eot_id|><|start_header_id|>system<|end_header_id|>
32-
33-
When you send a message containing Python code to python, it will be executed in a stateful Jupyter notebook environment. python will respond with the output of the execution or time out after 60.0 seconds. The drive at '/mnt/data' can be used to save and persist user files.<|eot_id|><|start_header_id|>user<|end_header_id|>
34-
"""
3518

3619
def main():
3720
import argparse
3821

3922
parser = argparse.ArgumentParser(epilog='For more options: llama-cli --help')
4023
parser.add_argument('--display-prompt', action=argparse.BooleanOptionalAction, default=False)
4124
parser.add_argument('--special', action=argparse.BooleanOptionalAction, default=False)
42-
parser.add_argument('--reverse-prompt', type=str, default='<|start_header_id|>user<|end_header_id|>\n')
25+
parser.add_argument('--reverse-prompt', type=str)
4326
parser.add_argument('--ctx-size', type=int, default=1024)
4427
args, other_args = parser.parse_known_args()
4528

46-
if args.display_prompt: print(prompt)
29+
tool_format = get_chat_tool_format(args, tools)
30+
if args.reverse_prompt is None: args.reverse_prompt = tool_format['user_start']
4731

48-
command = [ './llama-cli', '-i', '-p', prompt, '--reverse-prompt', args.reverse_prompt, '--escape', '--special', '--no-display-prompt', '--log-disable', '--simple-io', '--ctx-size', str(args.ctx_size), *other_args]
32+
if args.display_prompt: print(tool_format['prompt'])
33+
34+
command = [ './llama-cli', '-i', '-p', tool_format['prompt'], '--reverse-prompt', args.reverse_prompt, '--escape', '--special', '--no-display-prompt', '--log-disable', '--simple-io', '--ctx-size', str(args.ctx_size), *other_args]
35+
print("'" + "' '".join(command) + "'")
4936

5037
process = subprocess.Popen(
5138
command,
@@ -57,14 +44,14 @@ def main():
5744
if process.stdout is not None: os.set_blocking(process.stdout.fileno(), False)
5845

5946
try:
60-
run_loop(process, args)
47+
run_loop(process, args, tool_format)
6148
except KeyboardInterrupt:
6249
print("\nInterrupted by user.")
6350
finally:
6451
process.terminate()
6552
process.wait()
6653

67-
def run_loop(process, args):
54+
def run_loop(process, args, tool_format):
6855
pbuffer = ''
6956
skip_output_until_result = False
7057
while True:
@@ -76,29 +63,32 @@ def run_loop(process, args):
7663
if not pdata: continue
7764
pbuffer += pdata
7865

79-
if(match := re.search(r'>>>([^\n]*)\n(.*)<\|eot_id\|>', pbuffer, re.S)):
66+
if(match := re.search(tool_format['function_re'], pbuffer, re.S)):
8067
if not args.special:
8168
pdata = pdata[:match.pos]
8269
pbuffer = ''
8370
skip_output_until_result = False
84-
85-
tool_name = match.group(1)
86-
tool_args = match.group(2)
87-
88-
if tool_name == 'python':
89-
result = functions._run_python(tool_args);
90-
else:
91-
try:
92-
tool_args = json.loads(tool_args)
71+
try:
72+
if 1 < len(match.groups()):
73+
tool_name = match.group(1)
74+
tool_args = json.loads(match.group(2))
75+
else:
76+
tool = json.loads(match.group(1))
77+
tool_name = tool['name']
78+
tool_args = tool['arguments']
79+
80+
if tool_name == 'python':
81+
result = functions._run_python(tool_args);
82+
else:
9383
result = function_lookup[tool_name](**tool_args)
94-
except ValueError as e:
95-
result = {'error': 'unknown'}
84+
except ValueError as e:
85+
result = {'error': 'unknown'}
9686

97-
result = json.dumps(result) + '<|eot_id|><|start_header_id|>assistant<|end_header_id|>'
87+
result = tool_format['tool_start'] + json.dumps(result) + tool_format['tool_end']
9888
process.stdin.write(result + '\n')
9989
process.stdin.flush()
10090
if(args.special): pdata += '\n' + result
101-
elif (n := pdata.find('>>>')) >= 0:
91+
elif (n := pdata.find(tool_format['function_marker'])) >= 0:
10292
if not args.special:
10393
pdata = pdata[:n]
10494
skip_output_until_result = True
@@ -114,7 +104,7 @@ def run_loop(process, args):
114104
user_input = sys.stdin.readline()
115105
if user_input:
116106
user_input = user_input.rstrip()
117-
process.stdin.write(user_input + '<|eot_id|><|start_header_id|>assistant<|end_header_id|>' + '\n')
107+
process.stdin.write(user_input + tool_format['user_end'] + '\n')
118108
process.stdin.flush()
119109

120110
if __name__ == '__main__':

0 commit comments

Comments
 (0)