Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 13 additions & 15 deletions src/api/providers/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,17 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
const urlHost = this._getUrlHost(this.options.openAiBaseUrl)
const isAzureOpenAi = urlHost === "azure.com" || urlHost.endsWith(".azure.com") || options.openAiUseAzure

const headers = {
...DEFAULT_HEADERS,
...(this.options.openAiHeaders || {}),
}

if (isAzureAiInference) {
// Azure AI Inference Service (e.g., for DeepSeek) uses a different path structure
this.client = new OpenAI({
baseURL,
apiKey,
defaultHeaders: DEFAULT_HEADERS,
defaultHeaders: headers,
defaultQuery: { "api-version": this.options.azureApiVersion || "2024-05-01-preview" },
})
} else if (isAzureOpenAi) {
Expand All @@ -50,19 +55,13 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
baseURL,
apiKey,
apiVersion: this.options.azureApiVersion || azureOpenAiDefaultApiVersion,
defaultHeaders: {
...DEFAULT_HEADERS,
...(this.options.openAiHostHeader ? { Host: this.options.openAiHostHeader } : {}),
},
defaultHeaders: headers,
})
} else {
this.client = new OpenAI({
baseURL,
apiKey,
defaultHeaders: {
...DEFAULT_HEADERS,
...(this.options.openAiHostHeader ? { Host: this.options.openAiHostHeader } : {}),
},
defaultHeaders: headers,
})
}
}
Expand Down Expand Up @@ -361,7 +360,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
}
}

export async function getOpenAiModels(baseUrl?: string, apiKey?: string, hostHeader?: string) {
export async function getOpenAiModels(baseUrl?: string, apiKey?: string, openAiHeaders?: Record<string, string>) {
try {
if (!baseUrl) {
return []
Expand All @@ -372,16 +371,15 @@ export async function getOpenAiModels(baseUrl?: string, apiKey?: string, hostHea
}

const config: Record<string, any> = {}
const headers: Record<string, string> = {}
const headers: Record<string, string> = {
...DEFAULT_HEADERS,
...(openAiHeaders || {}),
}

if (apiKey) {
headers["Authorization"] = `Bearer ${apiKey}`
}

if (hostHeader) {
headers["Host"] = hostHeader
}

if (Object.keys(headers).length > 0) {
config["headers"] = headers
}
Expand Down
10 changes: 10 additions & 0 deletions src/core/config/ContextProxy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,16 @@ export class ContextProxy {
// If a value is not present in the new configuration, then it is assumed
// that the setting's value should be `undefined` and therefore we
// need to remove it from the state cache if it exists.

// Ensure openAiHeaders is always an object even when empty
// This is critical for proper serialization/deserialization through IPC
if (values.openAiHeaders !== undefined) {
// Check if it's empty or null
if (!values.openAiHeaders || Object.keys(values.openAiHeaders).length === 0) {
values.openAiHeaders = {}
}
}

await this.setValues({
...PROVIDER_SETTINGS_KEYS.filter((key) => !isSecretStateKey(key))
.filter((key) => !!this.stateCache[key])
Expand Down
33 changes: 33 additions & 0 deletions src/core/config/ProviderSettingsManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ export const providerProfilesSchema = z.object({
.object({
rateLimitSecondsMigrated: z.boolean().optional(),
diffSettingsMigrated: z.boolean().optional(),
openAiHeadersMigrated: z.boolean().optional(),
})
.optional(),
})
Expand All @@ -38,6 +39,7 @@ export class ProviderSettingsManager {
migrations: {
rateLimitSecondsMigrated: true, // Mark as migrated on fresh installs
diffSettingsMigrated: true, // Mark as migrated on fresh installs
openAiHeadersMigrated: true, // Mark as migrated on fresh installs
},
}

Expand Down Expand Up @@ -90,6 +92,7 @@ export class ProviderSettingsManager {
providerProfiles.migrations = {
rateLimitSecondsMigrated: false,
diffSettingsMigrated: false,
openAiHeadersMigrated: false,
} // Initialize with default values
isDirty = true
}
Expand All @@ -106,6 +109,12 @@ export class ProviderSettingsManager {
isDirty = true
}

if (!providerProfiles.migrations.openAiHeadersMigrated) {
await this.migrateOpenAiHeaders(providerProfiles)
providerProfiles.migrations.openAiHeadersMigrated = true
isDirty = true
}

if (isDirty) {
await this.store(providerProfiles)
}
Expand Down Expand Up @@ -175,6 +184,30 @@ export class ProviderSettingsManager {
}
}

private async migrateOpenAiHeaders(providerProfiles: ProviderProfiles) {
try {
for (const [_name, apiConfig] of Object.entries(providerProfiles.apiConfigs)) {
// Use type assertion to access the deprecated property safely
const configAny = apiConfig as any

// Check if openAiHostHeader exists but openAiHeaders doesn't
if (
configAny.openAiHostHeader &&
(!apiConfig.openAiHeaders || Object.keys(apiConfig.openAiHeaders || {}).length === 0)
) {
// Create the headers object with the Host value
apiConfig.openAiHeaders = { Host: configAny.openAiHostHeader }

// Delete the old property to prevent re-migration
// This prevents the header from reappearing after deletion
configAny.openAiHostHeader = undefined
}
}
} catch (error) {
console.error(`[MigrateOpenAiHeaders] Failed to migrate OpenAI headers:`, error)
}
}

/**
* List all available configs with metadata.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ describe("ProviderSettingsManager", () => {
migrations: {
rateLimitSecondsMigrated: true,
diffSettingsMigrated: true,
openAiHeadersMigrated: true,
},
}),
)
Expand Down
2 changes: 1 addition & 1 deletion src/core/webview/webviewMessageHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,7 @@ export const webviewMessageHandler = async (provider: ClineProvider, message: We
const openAiModels = await getOpenAiModels(
message?.values?.baseUrl,
message?.values?.apiKey,
message?.values?.hostHeader,
message?.values?.openAiHeaders,
)

provider.postMessageToWebview({ type: "openAiModels", openAiModels })
Expand Down
7 changes: 6 additions & 1 deletion src/exports/roo-code.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@ type ProviderSettings = {
vertexRegion?: string | undefined
openAiBaseUrl?: string | undefined
openAiApiKey?: string | undefined
openAiHostHeader?: string | undefined
openAiLegacyFormat?: boolean | undefined
openAiR1FormatEnabled?: boolean | undefined
openAiModelId?: string | undefined
Expand Down Expand Up @@ -88,6 +87,12 @@ type ProviderSettings = {
azureApiVersion?: string | undefined
openAiStreamingEnabled?: boolean | undefined
enableReasoningEffort?: boolean | undefined
openAiHostHeader?: string | undefined
openAiHeaders?:
| {
[x: string]: string
}
| undefined
ollamaModelId?: string | undefined
ollamaBaseUrl?: string | undefined
vsCodeLmModelSelector?:
Expand Down
7 changes: 6 additions & 1 deletion src/exports/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,6 @@ type ProviderSettings = {
vertexRegion?: string | undefined
openAiBaseUrl?: string | undefined
openAiApiKey?: string | undefined
openAiHostHeader?: string | undefined
openAiLegacyFormat?: boolean | undefined
openAiR1FormatEnabled?: boolean | undefined
openAiModelId?: string | undefined
Expand Down Expand Up @@ -89,6 +88,12 @@ type ProviderSettings = {
azureApiVersion?: string | undefined
openAiStreamingEnabled?: boolean | undefined
enableReasoningEffort?: boolean | undefined
openAiHostHeader?: string | undefined
openAiHeaders?:
| {
[x: string]: string
}
| undefined
ollamaModelId?: string | undefined
ollamaBaseUrl?: string | undefined
vsCodeLmModelSelector?:
Expand Down
6 changes: 4 additions & 2 deletions src/schemas/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -370,7 +370,6 @@ export const providerSettingsSchema = z.object({
// OpenAI
openAiBaseUrl: z.string().optional(),
openAiApiKey: z.string().optional(),
openAiHostHeader: z.string().optional(),
openAiLegacyFormat: z.boolean().optional(),
openAiR1FormatEnabled: z.boolean().optional(),
openAiModelId: z.string().optional(),
Expand All @@ -379,6 +378,8 @@ export const providerSettingsSchema = z.object({
azureApiVersion: z.string().optional(),
openAiStreamingEnabled: z.boolean().optional(),
enableReasoningEffort: z.boolean().optional(),
openAiHostHeader: z.string().optional(), // Keep temporarily for backward compatibility during migration
openAiHeaders: z.record(z.string(), z.string()).optional(),
// Ollama
ollamaModelId: z.string().optional(),
ollamaBaseUrl: z.string().optional(),
Expand Down Expand Up @@ -469,7 +470,6 @@ const providerSettingsRecord: ProviderSettingsRecord = {
// OpenAI
openAiBaseUrl: undefined,
openAiApiKey: undefined,
openAiHostHeader: undefined,
openAiLegacyFormat: undefined,
openAiR1FormatEnabled: undefined,
openAiModelId: undefined,
Expand All @@ -478,6 +478,8 @@ const providerSettingsRecord: ProviderSettingsRecord = {
azureApiVersion: undefined,
openAiStreamingEnabled: undefined,
enableReasoningEffort: undefined,
openAiHostHeader: undefined, // Keep temporarily for backward compatibility during migration
openAiHeaders: undefined,
// Ollama
ollamaModelId: undefined,
ollamaBaseUrl: undefined,
Expand Down
Loading
Loading