Skip to content

Commit 7ba717d

Browse files
committed
refac: system prompt template
1 parent fb92694 commit 7ba717d

File tree

4 files changed

+20
-6
lines changed

4 files changed

+20
-6
lines changed

backend/open_webui/functions.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -250,7 +250,7 @@ def get_function_params(function_module, form_data, user, extra_params=None):
250250

251251
params = model_info.params.model_dump()
252252
form_data = apply_model_params_to_body_openai(params, form_data)
253-
form_data = apply_model_system_prompt_to_body(params, form_data, user)
253+
form_data = apply_model_system_prompt_to_body(params, form_data, metadata, user)
254254

255255
pipe_id = get_pipe_id(form_data)
256256
function_module = get_function_module_by_id(request, pipe_id)

backend/open_webui/routers/ollama.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1007,7 +1007,7 @@ async def generate_chat_completion(
10071007
payload["options"] = apply_model_params_to_body_ollama(
10081008
params, payload["options"]
10091009
)
1010-
payload = apply_model_system_prompt_to_body(params, payload, metadata)
1010+
payload = apply_model_system_prompt_to_body(params, payload, metadata, user)
10111011

10121012
# Check if user has access to the model
10131013
if not bypass_filter and user.role == "user":
@@ -1160,6 +1160,8 @@ async def generate_openai_chat_completion(
11601160
url_idx: Optional[int] = None,
11611161
user=Depends(get_verified_user),
11621162
):
1163+
metadata = form_data.pop("metadata", None)
1164+
11631165
try:
11641166
completion_form = OpenAIChatCompletionForm(**form_data)
11651167
except Exception as e:
@@ -1186,7 +1188,7 @@ async def generate_openai_chat_completion(
11861188

11871189
if params:
11881190
payload = apply_model_params_to_body_openai(params, payload)
1189-
payload = apply_model_system_prompt_to_body(params, payload, user)
1191+
payload = apply_model_system_prompt_to_body(params, payload, metadata, user)
11901192

11911193
# Check if user has access to the model
11921194
if user.role == "user":

backend/open_webui/routers/openai.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -566,7 +566,7 @@ async def generate_chat_completion(
566566

567567
params = model_info.params.model_dump()
568568
payload = apply_model_params_to_body_openai(params, payload)
569-
payload = apply_model_system_prompt_to_body(params, payload, metadata)
569+
payload = apply_model_system_prompt_to_body(params, payload, metadata, user)
570570

571571
# Check if user has access to the model
572572
if not bypass_filter and user.role == "user":

backend/open_webui/utils/payload.py

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from open_webui.utils.task import prompt_variables_template
1+
from open_webui.utils.task import prompt_template, prompt_variables_template
22
from open_webui.utils.misc import (
33
add_or_update_system_message,
44
)
@@ -8,12 +8,24 @@
88

99
# inplace function: form_data is modified
1010
def apply_model_system_prompt_to_body(
11-
params: dict, form_data: dict, metadata: Optional[dict] = None
11+
params: dict, form_data: dict, metadata: Optional[dict] = None, user=None
1212
) -> dict:
1313
system = params.get("system", None)
1414
if not system:
1515
return form_data
1616

17+
# Legacy (API Usage)
18+
if user:
19+
template_params = {
20+
"user_name": user.name,
21+
"user_location": user.info.get("location") if user.info else None,
22+
}
23+
else:
24+
template_params = {}
25+
26+
system = prompt_template(system, **template_params)
27+
28+
# Metadata (WebUI Usage)
1729
if metadata:
1830
variables = metadata.get("variables", {})
1931
if variables:

0 commit comments

Comments
 (0)