Skip to content

Commit 24bd77c

Browse files
committed
checkpoınt
1 parent e98dd36 commit 24bd77c

File tree

1 file changed

+2
-1
lines changed

1 file changed

+2
-1
lines changed

llama_cpp/llama_chat_template.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -486,6 +486,8 @@ def _handle_streaming_tool_calls(
486486
):
487487
param_text = param_chunk["choices"][0]["text"]
488488
# Convert to chat completion chunk and yield
489+
print(f"param_text: {param_text}")
490+
accumulated_text += param_text
489491
yield {
490492
"id": "chat" + name_completion["id"],
491493
"object": "chat.completion.chunk",
@@ -509,7 +511,6 @@ def _handle_streaming_tool_calls(
509511
"finish_reason": None
510512
}]
511513
}
512-
accumulated_text += param_text
513514

514515
# After completing the tool call parameters, continue with more completions
515516
# Recursively handle the next completion by starting a new generation

0 commit comments

Comments
 (0)