File tree Expand file tree Collapse file tree 1 file changed +6
-3
lines changed Expand file tree Collapse file tree 1 file changed +6
-3
lines changed Original file line number Diff line number Diff line change @@ -137,6 +137,7 @@ def get_supported_openai_params(self, model: str):
137
137
"tool_choice" ,
138
138
"functions" ,
139
139
"response_format" ,
140
+ "reasoning_effort" ,
140
141
]
141
142
142
143
def map_openai_params (
@@ -175,6 +176,8 @@ def map_openai_params(
175
176
if value .get ("json_schema" ) and value ["json_schema" ].get ("schema" ):
176
177
optional_params ["format" ] = value ["json_schema" ]["schema" ]
177
178
### FUNCTION CALLING LOGIC ###
179
+ if param == "reasoning_effort" and value is not None :
180
+ optional_params ["think" ] = True
178
181
if param == "tools" :
179
182
## CHECK IF MODEL SUPPORTS TOOL CALLING ##
180
183
try :
@@ -212,9 +215,9 @@ def map_openai_params(
212
215
litellm .add_function_to_prompt = (
213
216
True # so that main.py adds the function call to the prompt
214
217
)
215
- optional_params [
216
- "functions_unsupported_model"
217
- ] = non_default_params . get ( "functions" )
218
+ optional_params ["functions_unsupported_model" ] = (
219
+ non_default_params . get ( "functions" )
220
+ )
218
221
non_default_params .pop ("tool_choice" , None ) # causes ollama requests to hang
219
222
non_default_params .pop ("functions" , None ) # causes ollama requests to hang
220
223
return optional_params
You can’t perform that action at this time.
0 commit comments