Skip to content

Commit 3a68ca5

Browse files
fix(ollama/chat): add 'think' param support
1 parent 130c1dd commit 3a68ca5

File tree

1 file changed

+6
-3
lines changed

1 file changed

+6
-3
lines changed

litellm/llms/ollama/chat/transformation.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -137,6 +137,7 @@ def get_supported_openai_params(self, model: str):
137137
"tool_choice",
138138
"functions",
139139
"response_format",
140+
"reasoning_effort",
140141
]
141142

142143
def map_openai_params(
@@ -175,6 +176,8 @@ def map_openai_params(
175176
if value.get("json_schema") and value["json_schema"].get("schema"):
176177
optional_params["format"] = value["json_schema"]["schema"]
177178
### FUNCTION CALLING LOGIC ###
179+
if param == "reasoning_effort" and value is not None:
180+
optional_params["think"] = True
178181
if param == "tools":
179182
## CHECK IF MODEL SUPPORTS TOOL CALLING ##
180183
try:
@@ -212,9 +215,9 @@ def map_openai_params(
212215
litellm.add_function_to_prompt = (
213216
True # so that main.py adds the function call to the prompt
214217
)
215-
optional_params[
216-
"functions_unsupported_model"
217-
] = non_default_params.get("functions")
218+
optional_params["functions_unsupported_model"] = (
219+
non_default_params.get("functions")
220+
)
218221
non_default_params.pop("tool_choice", None) # causes ollama requests to hang
219222
non_default_params.pop("functions", None) # causes ollama requests to hang
220223
return optional_params

0 commit comments

Comments
 (0)