Skip to content

Commit a84aa79

Browse files
authored
add support for system prompts!
1 parent 5845743 commit a84aa79

File tree

1 file changed

+16
-0
lines changed

1 file changed

+16
-0
lines changed

chatmock.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -271,6 +271,14 @@ def chat_completions() -> Response:
271271
messages = []
272272
if not isinstance(messages, list):
273273
return jsonify({"error": {"message": "Request must include messages: []"}}), 400
274+
275+
# Convert first system prompt into a user message by default (beta thing)
276+
if isinstance(messages, list):
277+
sys_idx = next((i for i, m in enumerate(messages) if isinstance(m, dict) and m.get("role") == "system"), None)
278+
if isinstance(sys_idx, int):
279+
sys_msg = messages.pop(sys_idx)
280+
content = sys_msg.get("content") if isinstance(sys_msg, dict) else ""
281+
messages.insert(0, {"role": "user", "content": content})
274282
is_stream = bool(payload.get("stream"))
275283

276284
tools_responses = convert_tools_chat_to_responses(payload.get("tools"))
@@ -555,6 +563,13 @@ def ollama_chat() -> Response:
555563
model = payload.get("model")
556564
raw_messages = payload.get("messages")
557565
messages = _convert_ollama_messages(raw_messages, payload.get("images") if isinstance(payload.get("images"), list) else None)
566+
# Convert first system prompt into a user message by default
567+
if isinstance(messages, list):
568+
sys_idx = next((i for i, m in enumerate(messages) if isinstance(m, dict) and m.get("role") == "system"), None)
569+
if isinstance(sys_idx, int):
570+
sys_msg = messages.pop(sys_idx)
571+
content = sys_msg.get("content") if isinstance(sys_msg, dict) else ""
572+
messages.insert(0, {"role": "user", "content": content})
558573
stream_req = payload.get("stream")
559574
if stream_req is None:
560575
stream_req = True
@@ -1103,6 +1118,7 @@ def main() -> None:
11031118
default=os.getenv("CHATGPT_LOCAL_REASONING_COMPAT", "think-tags").lower(),
11041119
help="Compatibility mode for exposing reasoning to clients (legacy|o3|think-tags). 'current' is accepted as an alias for 'legacy'",
11051120
)
1121+
# System prompt handling is now default; no flag needed.
11061122

11071123
p_info = sub.add_parser("info", help="Print current stored tokens and derived account id")
11081124
p_info.add_argument("--json", action="store_true", help="Output raw auth.json contents")

0 commit comments

Comments
 (0)