Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions packages/types/src/global-settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,10 @@ export const globalSettingsSchema = z.object({
openRouterImageApiKey: z.string().optional(),
openRouterImageGenerationSelectedModel: z.string().optional(),

// Codex CLI settings
codexCliSessionToken: z.string().optional(),
codexCliBaseUrl: z.string().optional(),

condensingApiConfigId: z.string().optional(),
customCondensingPrompt: z.string().optional(),

Expand Down Expand Up @@ -210,6 +214,7 @@ export const SECRET_STATE_KEYS = [
// Global secrets that are part of GlobalSettings (not ProviderSettings)
export const GLOBAL_SECRET_KEYS = [
"openRouterImageApiKey", // For image generation
"codexCliSessionToken", // For Codex CLI authentication
] as const

// Type for the actual secret storage keys
Expand Down
10 changes: 10 additions & 0 deletions packages/types/src/provider-settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import {
cerebrasModels,
chutesModels,
claudeCodeModels,
codexCliModels,
deepSeekModels,
doubaoModels,
featherlessModels,
Expand All @@ -34,6 +35,7 @@ import {
export const providerNames = [
"anthropic",
"claude-code",
"codex-cli",
"glama",
"openrouter",
"bedrock",
Expand Down Expand Up @@ -343,13 +345,19 @@ const vercelAiGatewaySchema = baseProviderSettingsSchema.extend({
vercelAiGatewayModelId: z.string().optional(),
})

const codexCliSchema = apiModelIdProviderModelSchema.extend({
codexCliPath: z.string().optional(),
codexCliBaseUrl: z.string().optional(),
})

const defaultSchema = z.object({
apiProvider: z.undefined(),
})

export const providerSettingsSchemaDiscriminated = z.discriminatedUnion("apiProvider", [
anthropicSchema.merge(z.object({ apiProvider: z.literal("anthropic") })),
claudeCodeSchema.merge(z.object({ apiProvider: z.literal("claude-code") })),
codexCliSchema.merge(z.object({ apiProvider: z.literal("codex-cli") })),
glamaSchema.merge(z.object({ apiProvider: z.literal("glama") })),
openRouterSchema.merge(z.object({ apiProvider: z.literal("openrouter") })),
bedrockSchema.merge(z.object({ apiProvider: z.literal("bedrock") })),
Expand Down Expand Up @@ -391,6 +399,7 @@ export const providerSettingsSchema = z.object({
apiProvider: providerNamesSchema.optional(),
...anthropicSchema.shape,
...claudeCodeSchema.shape,
...codexCliSchema.shape,
...glamaSchema.shape,
...openRouterSchema.shape,
...bedrockSchema.shape,
Expand Down Expand Up @@ -507,6 +516,7 @@ export const MODELS_BY_PROVIDER: Record<
models: Object.keys(chutesModels),
},
"claude-code": { id: "claude-code", label: "Claude Code", models: Object.keys(claudeCodeModels) },
"codex-cli": { id: "codex-cli", label: "Codex CLI", models: Object.keys(codexCliModels) },
deepseek: {
id: "deepseek",
label: "DeepSeek",
Expand Down
91 changes: 91 additions & 0 deletions packages/types/src/providers/codex-cli.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
import type { ModelInfo } from "../model.js"

// Codex CLI models - mirrors OpenAI models since it's OpenAI-compatible
export type CodexCliModelId =
| "gpt-4o"
| "gpt-4o-mini"
| "gpt-4-turbo"
| "gpt-4"
| "gpt-3.5-turbo"
| "o1-preview"
| "o1-mini"
| "o1"
| "o3-mini"

export const codexCliDefaultModelId: CodexCliModelId = "gpt-4o-mini"

export const codexCliModels: Record<CodexCliModelId, ModelInfo> = {
"gpt-4o": {
maxTokens: 16384,
contextWindow: 128000,
supportsImages: true,
supportsPromptCache: false,
inputPrice: 2.5,
outputPrice: 10,
},
"gpt-4o-mini": {
maxTokens: 16384,
contextWindow: 128000,
supportsImages: true,
supportsPromptCache: false,
inputPrice: 0.15,
outputPrice: 0.6,
},
"gpt-4-turbo": {
maxTokens: 4096,
contextWindow: 128000,
supportsImages: true,
supportsPromptCache: false,
inputPrice: 10,
outputPrice: 30,
},
"gpt-4": {
maxTokens: 8192,
contextWindow: 8192,
supportsImages: false,
supportsPromptCache: false,
inputPrice: 30,
outputPrice: 60,
},
"gpt-3.5-turbo": {
maxTokens: 4096,
contextWindow: 16385,
supportsImages: false,
supportsPromptCache: false,
inputPrice: 0.5,
outputPrice: 1.5,
},
"o1-preview": {
maxTokens: 32768,
contextWindow: 128000,
supportsImages: false,
supportsPromptCache: false,
inputPrice: 15,
outputPrice: 60,
},
"o1-mini": {
maxTokens: 65536,
contextWindow: 128000,
supportsImages: false,
supportsPromptCache: false,
inputPrice: 3,
outputPrice: 12,
},
o1: {
maxTokens: 100000,
contextWindow: 200000,
supportsImages: true,
supportsPromptCache: false,
inputPrice: 15,
outputPrice: 60,
},
"o3-mini": {
maxTokens: 65536,
contextWindow: 200000,
supportsImages: true,
supportsPromptCache: false,
inputPrice: 1.1,
outputPrice: 4.4,
reasoningEffort: "medium",
},
}
1 change: 1 addition & 0 deletions packages/types/src/providers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ export * from "./bedrock.js"
export * from "./cerebras.js"
export * from "./chutes.js"
export * from "./claude-code.js"
export * from "./codex-cli.js"
export * from "./deepseek.js"
export * from "./doubao.js"
export * from "./featherless.js"
Expand Down
3 changes: 3 additions & 0 deletions src/api/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ import {
ChutesHandler,
LiteLLMHandler,
ClaudeCodeHandler,
CodexCliHandler,
QwenCodeHandler,
SambaNovaHandler,
IOIntelligenceHandler,
Expand Down Expand Up @@ -95,6 +96,8 @@ export function buildApiHandler(configuration: ProviderSettings): ApiHandler {
return new AnthropicHandler(options)
case "claude-code":
return new ClaudeCodeHandler(options)
case "codex-cli":
return new CodexCliHandler(options)
case "glama":
return new GlamaHandler(options)
case "openrouter":
Expand Down
Loading
Loading