We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 24bd77c commit 7a9bb55Copy full SHA for 7a9bb55
llama_cpp/llama_chat_template.py
@@ -536,7 +536,7 @@ def _handle_streaming_tool_calls(
536
# Keep streaming normally until we find a tool call
537
yield from _convert_text_completion_chunks_to_chat(iter([chunk]))
538
539
-
+# TODO: Non streaming might not work as expected yet, we need to test it
540
def _handle_non_streaming_tool_calls(
541
completion: llama_types.CreateCompletionResponse,
542
tools: List[llama_types.ChatCompletionTool],
0 commit comments