File tree Expand file tree Collapse file tree 3 files changed +20
-0
lines changed
Expand file tree Collapse file tree 3 files changed +20
-0
lines changed Original file line number Diff line number Diff line change @@ -1435,6 +1435,8 @@ def _convert_text_completion_chunks_to_chat(
14351435 def create_chat_completion (
14361436 self ,
14371437 messages : List [ChatCompletionMessage ],
1438+ functions : Optional [List [ChatCompletionFunction ]] = None ,
1439+ function_call : Optional [Union [str , ChatCompletionFunctionCall ]] = None ,
14381440 temperature : float = 0.2 ,
14391441 top_p : float = 0.95 ,
14401442 top_k : int = 40 ,
Original file line number Diff line number Diff line change @@ -63,6 +63,16 @@ class ChatCompletionMessage(TypedDict):
6363 user : NotRequired [str ]
6464
6565
66+ class ChatCompletionFunction (TypedDict ):
67+ name : str
68+ description : NotRequired [str ]
69+ parameters : Dict [str , Any ] # TODO: make this more specific
70+
71+
72+ class ChatCompletionFunctionCall (TypedDict ):
73+ name : str
74+
75+
6676class ChatCompletionChoice (TypedDict ):
6777 index : int
6878 message : ChatCompletionMessage
Original file line number Diff line number Diff line change @@ -446,6 +446,14 @@ class CreateChatCompletionRequest(BaseModel):
446446 messages : List [ChatCompletionRequestMessage ] = Field (
447447 default = [], description = "A list of messages to generate completions for."
448448 )
449+ functions : Optional [List [llama_cpp .ChatCompletionFunction ]] = Field (
450+ default = None ,
451+ description = "A list of functions to apply to the generated completions." ,
452+ )
453+ function_call : Optional [Union [str , llama_cpp .ChatCompletionFunctionCall ]] = Field (
454+ default = None ,
455+ description = "A function to apply to the generated completions." ,
456+ )
449457 max_tokens : int = max_tokens_field
450458 temperature : float = temperature_field
451459 top_p : float = top_p_field
You can’t perform that action at this time.
0 commit comments