11from __future__ import annotations
22
33import os
4+ from pathlib import Path
45
56from openai .types .chat import (
67 ChatCompletion ,
@@ -54,10 +55,11 @@ def is_prompt_supported(
5455 tool_choice : ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN ,
5556 top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
5657 top_p : Optional [float ] | NotGiven = NOT_GIVEN ,
58+ file : Path | NotGiven = NOT_GIVEN ,
5759 ) -> int :
5860 for client in self .__clients :
5961 if client .is_model_supported (model ):
60- return client . is_prompt_supported (
62+ inputs = dict (
6163 messages = messages ,
6264 model = model ,
6365 frequency_penalty = frequency_penalty ,
@@ -74,6 +76,9 @@ def is_prompt_supported(
7476 top_logprobs = top_logprobs ,
7577 top_p = top_p ,
7678 )
79+ if file is not NotGiven :
80+ inputs ["file" ] = file
81+ return client .is_prompt_supported (** inputs )
7782 return - 1
7883
7984 def truncate_messages (
@@ -101,27 +106,31 @@ def chat_completion(
101106 tool_choice : ChatCompletionToolChoiceOptionParam | NotGiven = NOT_GIVEN ,
102107 top_logprobs : Optional [int ] | NotGiven = NOT_GIVEN ,
103108 top_p : Optional [float ] | NotGiven = NOT_GIVEN ,
109+ file : Path | NotGiven = NOT_GIVEN ,
104110 ) -> ChatCompletion :
105111 for client in self .__clients :
106112 if client .is_model_supported (model ):
107113 logger .debug (f"Using { client .__class__ .__name__ } for model { model } " )
108- return client . chat_completion (
109- messages ,
110- model ,
111- frequency_penalty ,
112- logit_bias ,
113- logprobs ,
114- max_tokens ,
115- n ,
116- presence_penalty ,
117- response_format ,
118- stop ,
119- temperature ,
120- tools ,
121- tool_choice ,
122- top_logprobs ,
123- top_p ,
114+ inputs = dict (
115+ messages = messages ,
116+ model = model ,
117+ frequency_penalty = frequency_penalty ,
118+ logit_bias = logit_bias ,
119+ logprobs = logprobs ,
120+ max_tokens = max_tokens ,
121+ n = n ,
122+ presence_penalty = presence_penalty ,
123+ response_format = response_format ,
124+ stop = stop ,
125+ temperature = temperature ,
126+ tools = tools ,
127+ tool_choice = tool_choice ,
128+ top_logprobs = top_logprobs ,
129+ top_p = top_p ,
124130 )
131+ if file is not NotGiven :
132+ inputs ["file" ] = file
133+ return client .chat_completion (** inputs )
125134 client_names = [client .__class__ .__name__ for client in self .__original_clients ]
126135 raise ValueError (
127136 f"Model { model } is not supported by { client_names } clients. "
0 commit comments