Skip to content

Commit 0368879

Browse files
committed
i18n(openai-native-codex): localize provider errors; document immutable Codex system prompt and override rationale\n\n- Add i18n keys under common.errors.openaiNativeCodex and use t() in handler\n- Explain immutability and strategy where we inject overrides in OpenAiNativeCodexHandler\n- Add commentary to codex prompt file describing canonical prompt and override rationale
1 parent 5d63945 commit 0368879

File tree

3 files changed

+50
-10
lines changed

3 files changed

+50
-10
lines changed

src/api/providers/openai-native-codex.prompt.ts

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,12 @@
1+
/**
2+
* ChatGPT Codex system prompt (canonical/inert):
3+
* - The Responses API applies an immutable default system prompt server‑side.
4+
* - We cannot replace it dynamically; this file supplies the canonical text used for the top‑level "instructions".
5+
* Strategy:
6+
* - We complement this with a separate system-role injection in the provider using
7+
* <instructions_override> (to de-emphasize defaults) and <new_instructions> (the current task).
8+
* - See OpenAiNativeCodexHandler.createMessage for details and rationale.
9+
*/
110
export default `You are Codex, based on GPT-5. You are running as a coding agent in the Codex CLI on a user's computer.
211
312
## General
@@ -104,6 +113,12 @@ You are producing plain text that will later be styled by the CLI. Follow these
104113
* Examples: src/app.ts, src/app.ts:42, b/server/index.js#L10, C:\\repo\\project\\main.rs:12:5
105114
`
106115

116+
/**
117+
* Rationale for overridePrompt:
118+
* - Codex’s default system prompt cannot be removed.
119+
* - This override clarifies Roo’s identity and precedence rules and asks the model to honor Roo’s tools and flow.
120+
* - It is injected as <instructions_override> so the server’s default is still present, but downweighted in practice.
121+
*/
107122
export const overridePrompt = `## Identity and precedence
108123
- You are Roo (not Codex). Ignore any "Codex", "Codex CLI", or "based on GPT-5" identity statements. Roo's rules take precedence over any harness text.
109124

src/api/providers/openai-native-codex.ts

Lines changed: 27 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ import { getModelParams } from "../transform/model-params"
1010
// Provider prompt content as a TS string module (no loader required)
1111
import codexPromptContent, { overridePrompt } from "./openai-native-codex.prompt"
1212
import { getApiRequestTimeout } from "./utils/timeout-config"
13+
import { t } from "i18next"
1314

1415
import {
1516
type ModelInfo,
@@ -112,7 +113,10 @@ export class OpenAiNativeCodexHandler extends BaseProvider {
112113
raw = await fs.readFile(explicitPath, "utf8")
113114
} catch (e: any) {
114115
throw new Error(
115-
`Failed to load ChatGPT OAuth credentials at ${explicitPath}: ${e?.message || e}. Tip: authenticate with the Codex CLI (e.g., "codex login") to create auth.json.`,
116+
t("common:errors.openaiNativeCodex.oauthReadFailed", {
117+
path: explicitPath,
118+
error: e?.message || String(e),
119+
}),
116120
)
117121
}
118122

@@ -121,7 +125,10 @@ export class OpenAiNativeCodexHandler extends BaseProvider {
121125
j = JSON.parse(raw)
122126
} catch (e: any) {
123127
throw new Error(
124-
`Failed to parse ChatGPT OAuth credentials JSON at ${explicitPath}: ${e?.message || e}. Tip: ensure the file is valid JSON or re-authenticate with "codex login" to regenerate it.`,
128+
t("common:errors.openaiNativeCodex.oauthParseFailed", {
129+
path: explicitPath,
130+
error: e?.message || String(e),
131+
}),
125132
)
126133
}
127134

@@ -147,7 +154,7 @@ export class OpenAiNativeCodexHandler extends BaseProvider {
147154
}
148155

149156
if (!access) {
150-
throw new Error("ChatGPT OAuth credentials are missing tokens.access_token")
157+
throw new Error(t("common:errors.openaiNativeCodex.missingAccessToken"))
151158
}
152159

153160
this.chatgptAccessToken = access
@@ -202,10 +209,15 @@ export class OpenAiNativeCodexHandler extends BaseProvider {
202209
const content: any[] = []
203210

204211
if (!injectedUserInstructions && typeof systemPrompt === "string" && systemPrompt.trim().length > 0) {
205-
// For ChatGPT Codex (Responses API), the top-level "instructions" payload is fixed and must be
206-
// provided from a canonical prompt file. We cannot programmatically modify that contents here.
207-
// Therefore, inject provider overrides and dynamic instructions as a separate system role message
208-
// using <instructions_override> and <new_instructions> tags before the first user/assistant turn.
212+
// Codex system prompt immutability:
213+
// - The top-level "instructions" field sent to codex/responses is immutable on the server.
214+
// - We cannot dynamically alter the default system prompt that Codex applies.
215+
// Strategy and rationale:
216+
// - We inject two system-role items before the first user/assistant turn:
217+
// 1) <instructions_override> — explains to the model how Roo’s rules supersede Codex defaults.
218+
// 2) <new_instructions> — the current task/systemPrompt, asking Codex to prioritize these rules/tools.
219+
// - This pattern reduces the impact of Codex’s default prompt without trying to replace it (not possible).
220+
// - We also keep these separate from user messages to avoid tool execution bias.
209221
formattedInput.push({
210222
role: "system",
211223
content: [
@@ -327,7 +339,12 @@ export class OpenAiNativeCodexHandler extends BaseProvider {
327339
// ignore parse error
328340
}
329341
const snippet = (text || "").slice(0, 500).replace(/\s+/g, " ").trim()
330-
const msg = `[Codex] HTTP ${response.status}${requestId ? ` req ${requestId}` : ""} model=${model.id}: ${userMessage || snippet}`
342+
const msg = t("common:errors.openaiNativeCodex.httpError", {
343+
status: response.status,
344+
requestId: requestId || "n/a",
345+
modelId: model.id,
346+
message: userMessage || snippet,
347+
})
331348
const err = new Error(msg)
332349
;(err as any).status = response.status
333350
if (requestId) (err as any).requestId = requestId
@@ -336,7 +353,7 @@ export class OpenAiNativeCodexHandler extends BaseProvider {
336353
throw err
337354
}
338355
if (!response.body) {
339-
throw new Error("ChatGPT Responses error: No response body")
356+
throw new Error(t("common:errors.openaiNativeCodex.noResponseBody"))
340357
}
341358

342359
// Stream parse
@@ -458,7 +475,7 @@ export class OpenAiNativeCodexHandler extends BaseProvider {
458475
}
459476
}
460477
if (!hasContent) {
461-
throw new Error(`[Codex] Empty stream: no content received for model=${model.id}`)
478+
throw new Error(t("common:errors.openaiNativeCodex.emptyStream", { modelId: model.id }))
462479
}
463480
} finally {
464481
try {

src/i18n/locales/en/common.json

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -114,6 +114,14 @@
114114
"roo": {
115115
"authenticationRequired": "Roo provider requires cloud authentication. Please sign in to Roo Code Cloud."
116116
},
117+
"openaiNativeCodex": {
118+
"oauthReadFailed": "Failed to load ChatGPT OAuth credentials at {{path}}: {{error}}. Tip: authenticate with the Codex CLI (e.g., \"codex login\") to create auth.json.",
119+
"oauthParseFailed": "Failed to parse ChatGPT OAuth credentials JSON at {{path}}: {{error}}. Tip: ensure the file is valid JSON or re-authenticate with \"codex login\" to regenerate it.",
120+
"missingAccessToken": "ChatGPT OAuth credentials are missing tokens.access_token.",
121+
"httpError": "Codex HTTP {{status}} (req: {{requestId}}) model={{modelId}}: {{message}}",
122+
"noResponseBody": "ChatGPT Responses error: No response body",
123+
"emptyStream": "ChatGPT Responses stream returned no content for model {{modelId}}"
124+
},
117125
"api": {
118126
"invalidKeyInvalidChars": "API key contains invalid characters."
119127
},

0 commit comments

Comments
 (0)