Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions packages/types/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ export * from "./mcp.js"
export * from "./message.js"
export * from "./mode.js"
export * from "./model.js"
export * from "./oauth.js"
export * from "./provider-settings.js"
export * from "./sharing.js"
export * from "./task.js"
Expand Down
86 changes: 86 additions & 0 deletions packages/types/src/oauth.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
import { z } from "zod"

/**
* OAuth configuration for ChatGPT authentication
*/
export const CHATGPT_OAUTH_CONFIG = {
clientId: "app_EMoamEEZ73f0CkXaXp7hrann", // Codex CLI client ID for compatibility
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Using Codex CLI's client ID for compatibility is clever, but what's the fallback plan if OpenAI revokes this ID? Should we consider making this configurable or having a backup client ID?

authorizationUrl: "https://auth.openai.com/oauth/authorize",
tokenUrl: "https://auth.openai.com/oauth/token",
redirectUri: "http://localhost:1455/auth/callback",
defaultPort: 1455,
scopes: ["openid", "profile", "email", "offline_access"],
} as const

/**
* OAuth tokens structure
*/
export const oauthTokensSchema = z.object({
accessToken: z.string(),
idToken: z.string(),
refreshToken: z.string(),
expiresIn: z.number().optional(),
tokenType: z.string().optional(),
})

export type OAuthTokens = z.infer<typeof oauthTokensSchema>

/**
* ChatGPT credentials stored in SecretStorage
*/
export const chatGptCredentialsSchema = z.object({
apiKey: z.string().optional(), // Exchanged API key
idToken: z.string(),
refreshToken: z.string(),
lastRefreshIso: z.string().optional(),
responseId: z.string().optional(), // For conversation continuity
})

export type ChatGptCredentials = z.infer<typeof chatGptCredentialsSchema>

/**
* Codex CLI auth.json structure for import
*/
export const codexAuthJsonSchema = z.object({
OPENAI_API_KEY: z.string().optional(),
tokens: z
.object({
id_token: z.string(),
access_token: z.string().optional(),
refresh_token: z.string().optional(),
})
.optional(),
last_refresh: z.string().optional(),
})

export type CodexAuthJson = z.infer<typeof codexAuthJsonSchema>

/**
* OAuth state for CSRF protection
*/
export interface OAuthState {
state: string
codeVerifier: string
timestamp: number
}

/**
* Token exchange request for getting API key from OAuth tokens
*/
export interface TokenExchangeRequest {
grant_type: "urn:ietf:params:oauth:grant-type:token-exchange"
requested_token_type: "openai-api-key"
subject_token: string // ID token
subject_token_type: "urn:ietf:params:oauth:token-type:id_token"
client_id: string
}

/**
* OAuth error response
*/
export const oauthErrorSchema = z.object({
error: z.string(),
error_description: z.string().optional(),
})

export type OAuthError = z.infer<typeof oauthErrorSchema>
1 change: 1 addition & 0 deletions packages/types/src/provider-settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,7 @@ const openAiSchema = baseProviderSettingsSchema.extend({
openAiStreamingEnabled: z.boolean().optional(),
openAiHostHeader: z.string().optional(), // Keep temporarily for backward compatibility during migration.
openAiHeaders: z.record(z.string(), z.string()).optional(),
openAiAuthMode: z.enum(["apiKey", "chatgpt"]).optional(), // New: Authentication mode
})

const ollamaSchema = baseProviderSettingsSchema.extend({
Expand Down
52 changes: 49 additions & 3 deletions src/api/providers/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,17 +29,47 @@ import type { SingleCompletionHandler, ApiHandlerCreateMessageMetadata } from ".
// compatible with the OpenAI API. We can also rename it to `OpenAIHandler`.
export class OpenAiHandler extends BaseProvider implements SingleCompletionHandler {
protected options: ApiHandlerOptions
private client: OpenAI
private client!: OpenAI // Using definite assignment assertion since we initialize it
private apiKeyPromise?: Promise<string>

constructor(options: ApiHandlerOptions) {
super()
this.options = options

// Initialize the client asynchronously if using ChatGPT auth
if (this.options.openAiAuthMode === "chatgpt") {
this.apiKeyPromise = this.getApiKeyFromChatGpt()
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this race condition intentional? The async initialization could cause issues if multiple requests come in before the API key is fetched. Consider using a more robust initialization pattern:

Suggested change
this.apiKeyPromise = this.getApiKeyFromChatGpt()
private clientPromise: Promise<OpenAI>
private client?: OpenAI
constructor(options: ApiHandlerOptions) {
super()
this.options = options
this.clientPromise = this.initializeClientAsync()
}
private async ensureClient(): Promise<OpenAI> {
if (!this.client) {
this.client = await this.clientPromise
}
return this.client
}

Then use await this.ensureClient() in your methods instead of checking this.apiKeyPromise.

this.apiKeyPromise
.then((apiKey) => {
this.initializeClient(apiKey)
})
.catch((error) => {
console.error("Failed to get API key from ChatGPT:", error)
})
} else {
// Initialize immediately for regular API key mode
this.initializeClient(this.options.openAiApiKey ?? "not-provided")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

When not using ChatGPT auth mode, the API key defaults to 'not-provided'. Consider explicit error handling instead of using a fallback string.

}
}

private async getApiKeyFromChatGpt(): Promise<string> {
// Lazy import to avoid circular dependencies
const { getCredentialsManager } = await import("../../core/auth/chatgpt-credentials-manager")
const credentialsManager = getCredentialsManager()
const apiKey = await credentialsManager.getApiKey()

if (!apiKey) {
throw new Error("No API key found for ChatGPT authentication. Please sign in with your ChatGPT account.")
}

return apiKey
}

private initializeClient(apiKey: string): void {
const baseURL = this.options.openAiBaseUrl ?? "https://api.openai.com/v1"
const apiKey = this.options.openAiApiKey ?? "not-provided"
const isAzureAiInference = this._isAzureAiInference(this.options.openAiBaseUrl)
const urlHost = this._getUrlHost(this.options.openAiBaseUrl)
const isAzureOpenAi = urlHost === "azure.com" || urlHost.endsWith(".azure.com") || options.openAiUseAzure
const isAzureOpenAi = urlHost === "azure.com" || urlHost.endsWith(".azure.com") || this.options.openAiUseAzure

const headers = {
...DEFAULT_HEADERS,
Expand Down Expand Up @@ -77,6 +107,14 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
messages: Anthropic.Messages.MessageParam[],
metadata?: ApiHandlerCreateMessageMetadata,
): ApiStream {
// Ensure client is initialized for ChatGPT auth mode
if (this.apiKeyPromise) {
await this.apiKeyPromise
}

if (!this.client) {
throw new Error("OpenAI client not initialized")
}
const { info: modelInfo, reasoning } = this.getModel()
const modelUrl = this.options.openAiBaseUrl ?? ""
const modelId = this.options.openAiModelId ?? ""
Expand Down Expand Up @@ -256,6 +294,14 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl

async completePrompt(prompt: string): Promise<string> {
try {
// Ensure client is initialized for ChatGPT auth mode
if (this.apiKeyPromise) {
await this.apiKeyPromise
}

if (!this.client) {
throw new Error("OpenAI client not initialized")
}
const isAzureAiInference = this._isAzureAiInference(this.options.openAiBaseUrl)
const model = this.getModel()
const modelInfo = model.info
Expand Down
Loading
Loading