Skip to content

Commit b201d43

Browse files
committed
improve gguf-function-calling parser
1 parent 9a44b43 commit b201d43

File tree

1 file changed

+17
-3
lines changed

1 file changed

+17
-3
lines changed

llama_cpp/llama_chat_format.py

Lines changed: 17 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3975,9 +3975,23 @@ def chatml_function_calling(
39753975
),
39763976
)
39773977
text = completion["choices"][0]["text"]
3978-
tool_name = None if text.startswith("message") else text.split("\n")[-1][len("functions.") :]
3978+
# Extract message content and/or function call
3979+
tool_name = None
3980+
message_content = None
3981+
3982+
if text.startswith("message:"):
3983+
# Handle message with or without function call
3984+
parts = text.split("<function_calls>", 1)
3985+
message_content = parts[0][len("message:"):].strip()
3986+
if len(parts) > 1:
3987+
# Has both message and function call
3988+
function_text = parts[1].strip()
3989+
tool_name = function_text.split("\n")[0][len("functions."):].rstrip(":")
3990+
else:
3991+
# Only function call
3992+
tool_name = text.split("\n")[0][len("<function_calls>\nfunctions."):].rstrip(":")
39793993

3980-
# Case 2 step 2A: Respond with a message
3994+
# Case 2 step 2A: Message only
39813995
if tool_name is None:
39823996
prompt = template_renderer.render(
39833997
messages=messages, tools=[], tool_calls=None, add_generation_prompt=True
@@ -4067,7 +4081,7 @@ def chatml_function_calling(
40674081
),
40684082
"message": {
40694083
"role": "assistant",
4070-
"content": None,
4084+
"content": message_content, # Include message content when present
40714085
"tool_calls": [
40724086
{
40734087
"id": "call_" + f"_{i}_" + tool_name + "_" + completion["id"],

0 commit comments

Comments
 (0)