Skip to content

Commit 13e58c1

Browse files
authored
Merge pull request open-webui#10360 from ferret99gt/ollama-option-conversion-fixes
fix: Update Ollama option handling in payload.py's convert_payload_openai_to_ollama
2 parents d5a049d + 8125b04 commit 13e58c1

File tree

1 file changed

+10
-25
lines changed

1 file changed

+10
-25
lines changed

backend/open_webui/utils/payload.py

Lines changed: 10 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -212,34 +212,19 @@ def convert_payload_openai_to_ollama(openai_payload: dict) -> dict:
212212
ollama_payload["format"] = openai_payload["format"]
213213

214214
# If there are advanced parameters in the payload, format them in Ollama's options field
215-
ollama_options = {}
216-
217215
if openai_payload.get("options"):
218216
ollama_payload["options"] = openai_payload["options"]
219217
ollama_options = openai_payload["options"]
220-
221-
# Handle parameters which map directly
222-
for param in ["temperature", "top_p", "seed"]:
223-
if param in openai_payload:
224-
ollama_options[param] = openai_payload[param]
225-
226-
# Mapping OpenAI's `max_tokens` -> Ollama's `num_predict`
227-
if "max_completion_tokens" in openai_payload:
228-
ollama_options["num_predict"] = openai_payload["max_completion_tokens"]
229-
elif "max_tokens" in openai_payload:
230-
ollama_options["num_predict"] = openai_payload["max_tokens"]
231-
232-
# Handle frequency / presence_penalty, which needs renaming and checking
233-
if "frequency_penalty" in openai_payload:
234-
ollama_options["repeat_penalty"] = openai_payload["frequency_penalty"]
235-
236-
if "presence_penalty" in openai_payload and "penalty" not in ollama_options:
237-
# We are assuming presence penalty uses a similar concept in Ollama, which needs custom handling if exists.
238-
ollama_options["new_topic_penalty"] = openai_payload["presence_penalty"]
239-
240-
# Add options to payload if any have been set
241-
if ollama_options:
242-
ollama_payload["options"] = ollama_options
218+
219+
# Re-Mapping OpenAI's `max_tokens` -> Ollama's `num_predict`
220+
if "max_tokens" in ollama_options:
221+
ollama_options["num_predict"] = ollama_options["max_tokens"]
222+
del ollama_options["max_tokens"] # To prevent Ollama warning of invalid option provided
223+
224+
# Ollama lacks a "system" prompt option. It has to be provided as a direct parameter, so we copy it down.
225+
if "system" in ollama_options:
226+
ollama_payload["system"] = ollama_options["system"]
227+
del ollama_options["system"] # To prevent Ollama warning of invalid option provided
243228

244229
if "metadata" in openai_payload:
245230
ollama_payload["metadata"] = openai_payload["metadata"]

0 commit comments

Comments
 (0)