Skip to content

Commit b01615f

Browse files
authored
Add the option to use a custom Host header for openai-compatible (#2399)
1 parent 9f724bd commit b01615f

File tree

22 files changed

+112
-8
lines changed

22 files changed

+112
-8
lines changed

.vscodeignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,11 @@ demo.gif
2626
.prettierignore
2727
.clinerules*
2828
.roomodes
29+
.roo/**
2930
cline_docs/**
3031
coverage/**
3132
locales/**
33+
benchmark/**
3234

3335
# Ignore all webview-ui files except the build directory (https://github.com/microsoft/vscode-webview-ui-toolkit-samples/blob/main/frameworks/hello-world-react-cra/.vscodeignore)
3436
webview-ui/src/**

src/api/providers/openai.ts

Lines changed: 28 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -55,10 +55,20 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
5555
baseURL,
5656
apiKey,
5757
apiVersion: this.options.azureApiVersion || azureOpenAiDefaultApiVersion,
58-
defaultHeaders,
58+
defaultHeaders: {
59+
...defaultHeaders,
60+
...(this.options.openAiHostHeader ? { Host: this.options.openAiHostHeader } : {}),
61+
},
5962
})
6063
} else {
61-
this.client = new OpenAI({ baseURL, apiKey, defaultHeaders })
64+
this.client = new OpenAI({
65+
baseURL,
66+
apiKey,
67+
defaultHeaders: {
68+
...defaultHeaders,
69+
...(this.options.openAiHostHeader ? { Host: this.options.openAiHostHeader } : {}),
70+
},
71+
})
6272
}
6373
}
6474

@@ -67,6 +77,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
6777
const modelUrl = this.options.openAiBaseUrl ?? ""
6878
const modelId = this.options.openAiModelId ?? ""
6979
const enabledR1Format = this.options.openAiR1FormatEnabled ?? false
80+
const enabledLegacyFormat = this.options.openAiLegacyFormat ?? false
7081
const isAzureAiInference = this._isAzureAiInference(modelUrl)
7182
const urlHost = this._getUrlHost(modelUrl)
7283
const deepseekReasoner = modelId.includes("deepseek-reasoner") || enabledR1Format
@@ -85,7 +96,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
8596
let convertedMessages
8697
if (deepseekReasoner) {
8798
convertedMessages = convertToR1Format([{ role: "user", content: systemPrompt }, ...messages])
88-
} else if (ark) {
99+
} else if (ark || enabledLegacyFormat) {
89100
convertedMessages = [systemMessage, ...convertToSimpleMessages(messages)]
90101
} else {
91102
if (modelInfo.supportsPromptCache) {
@@ -190,7 +201,9 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
190201
model: modelId,
191202
messages: deepseekReasoner
192203
? convertToR1Format([{ role: "user", content: systemPrompt }, ...messages])
193-
: [systemMessage, ...convertToOpenAiMessages(messages)],
204+
: enabledLegacyFormat
205+
? [systemMessage, ...convertToSimpleMessages(messages)]
206+
: [systemMessage, ...convertToOpenAiMessages(messages)],
194207
}
195208

196209
const response = await this.client.chat.completions.create(
@@ -330,7 +343,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
330343
}
331344
}
332345

333-
export async function getOpenAiModels(baseUrl?: string, apiKey?: string) {
346+
export async function getOpenAiModels(baseUrl?: string, apiKey?: string, hostHeader?: string) {
334347
try {
335348
if (!baseUrl) {
336349
return []
@@ -341,9 +354,18 @@ export async function getOpenAiModels(baseUrl?: string, apiKey?: string) {
341354
}
342355

343356
const config: Record<string, any> = {}
357+
const headers: Record<string, string> = {}
344358

345359
if (apiKey) {
346-
config["headers"] = { Authorization: `Bearer ${apiKey}` }
360+
headers["Authorization"] = `Bearer ${apiKey}`
361+
}
362+
363+
if (hostHeader) {
364+
headers["Host"] = hostHeader
365+
}
366+
367+
if (Object.keys(headers).length > 0) {
368+
config["headers"] = headers
347369
}
348370

349371
const response = await axios.get(`${baseUrl}/models`, config)

src/core/webview/webviewMessageHandler.ts

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -423,7 +423,11 @@ export const webviewMessageHandler = async (provider: ClineProvider, message: We
423423
break
424424
case "refreshOpenAiModels":
425425
if (message?.values?.baseUrl && message?.values?.apiKey) {
426-
const openAiModels = await getOpenAiModels(message?.values?.baseUrl, message?.values?.apiKey)
426+
const openAiModels = await getOpenAiModels(
427+
message?.values?.baseUrl,
428+
message?.values?.apiKey,
429+
message?.values?.hostHeader,
430+
)
427431
provider.postMessageToWebview({ type: "openAiModels", openAiModels })
428432
}
429433

src/exports/roo-code.d.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,8 @@ type ProviderSettings = {
8686
vertexRegion?: string | undefined
8787
openAiBaseUrl?: string | undefined
8888
openAiApiKey?: string | undefined
89+
openAiHostHeader?: string | undefined
90+
openAiLegacyFormat?: boolean | undefined
8991
openAiR1FormatEnabled?: boolean | undefined
9092
openAiModelId?: string | undefined
9193
openAiCustomModelInfo?:

src/exports/types.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,8 @@ type ProviderSettings = {
8787
vertexRegion?: string | undefined
8888
openAiBaseUrl?: string | undefined
8989
openAiApiKey?: string | undefined
90+
openAiHostHeader?: string | undefined
91+
openAiLegacyFormat?: boolean | undefined
9092
openAiR1FormatEnabled?: boolean | undefined
9193
openAiModelId?: string | undefined
9294
openAiCustomModelInfo?:

src/schemas/index.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -338,6 +338,8 @@ export const providerSettingsSchema = z.object({
338338
// OpenAI
339339
openAiBaseUrl: z.string().optional(),
340340
openAiApiKey: z.string().optional(),
341+
openAiHostHeader: z.string().optional(),
342+
openAiLegacyFormat: z.boolean().optional(),
341343
openAiR1FormatEnabled: z.boolean().optional(),
342344
openAiModelId: z.string().optional(),
343345
openAiCustomModelInfo: modelInfoSchema.nullish(),
@@ -431,6 +433,8 @@ const providerSettingsRecord: ProviderSettingsRecord = {
431433
// OpenAI
432434
openAiBaseUrl: undefined,
433435
openAiApiKey: undefined,
436+
openAiHostHeader: undefined,
437+
openAiLegacyFormat: undefined,
434438
openAiR1FormatEnabled: undefined,
435439
openAiModelId: undefined,
436440
openAiCustomModelInfo: undefined,

webview-ui/src/components/settings/ApiOptions.tsx

Lines changed: 39 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -103,6 +103,8 @@ const ApiOptions = ({
103103
const [anthropicBaseUrlSelected, setAnthropicBaseUrlSelected] = useState(!!apiConfiguration?.anthropicBaseUrl)
104104
const [azureApiVersionSelected, setAzureApiVersionSelected] = useState(!!apiConfiguration?.azureApiVersion)
105105
const [openRouterBaseUrlSelected, setOpenRouterBaseUrlSelected] = useState(!!apiConfiguration?.openRouterBaseUrl)
106+
const [openAiHostHeaderSelected, setOpenAiHostHeaderSelected] = useState(!!apiConfiguration?.openAiHostHeader)
107+
const [openAiLegacyFormatSelected, setOpenAiLegacyFormatSelected] = useState(!!apiConfiguration?.openAiLegacyFormat)
106108
const [googleGeminiBaseUrlSelected, setGoogleGeminiBaseUrlSelected] = useState(
107109
!!apiConfiguration?.googleGeminiBaseUrl,
108110
)
@@ -145,7 +147,11 @@ const ApiOptions = ({
145147
} else if (selectedProvider === "openai") {
146148
vscode.postMessage({
147149
type: "refreshOpenAiModels",
148-
values: { baseUrl: apiConfiguration?.openAiBaseUrl, apiKey: apiConfiguration?.openAiApiKey },
150+
values: {
151+
baseUrl: apiConfiguration?.openAiBaseUrl,
152+
apiKey: apiConfiguration?.openAiApiKey,
153+
hostHeader: apiConfiguration?.openAiHostHeader,
154+
},
149155
})
150156
} else if (selectedProvider === "ollama") {
151157
vscode.postMessage({ type: "requestOllamaModels", text: apiConfiguration?.ollamaBaseUrl })
@@ -779,6 +785,16 @@ const ApiOptions = ({
779785
onChange={handleInputChange("openAiR1FormatEnabled", noTransform)}
780786
openAiR1FormatEnabled={apiConfiguration?.openAiR1FormatEnabled ?? false}
781787
/>
788+
<div>
789+
<Checkbox
790+
checked={openAiLegacyFormatSelected}
791+
onChange={(checked: boolean) => {
792+
setOpenAiLegacyFormatSelected(checked)
793+
setApiConfigurationField("openAiLegacyFormat", checked)
794+
}}>
795+
{t("settings:providers.useLegacyFormat")}
796+
</Checkbox>
797+
</div>
782798
<Checkbox
783799
checked={apiConfiguration?.openAiStreamingEnabled ?? true}
784800
onChange={handleInputChange("openAiStreamingEnabled", noTransform)}>
@@ -811,6 +827,28 @@ const ApiOptions = ({
811827
)}
812828
</div>
813829

830+
<div>
831+
<Checkbox
832+
checked={openAiHostHeaderSelected}
833+
onChange={(checked: boolean) => {
834+
setOpenAiHostHeaderSelected(checked)
835+
836+
if (!checked) {
837+
setApiConfigurationField("openAiHostHeader", "")
838+
}
839+
}}>
840+
{t("settings:providers.useHostHeader")}
841+
</Checkbox>
842+
{openAiHostHeaderSelected && (
843+
<VSCodeTextField
844+
value={apiConfiguration?.openAiHostHeader || ""}
845+
onInput={handleInputChange("openAiHostHeader")}
846+
placeholder="custom-api-hostname.example.com"
847+
className="w-full mt-1"
848+
/>
849+
)}
850+
</div>
851+
814852
<div className="flex flex-col gap-3">
815853
<div className="text-sm text-vscode-descriptionForeground">
816854
{t("settings:providers.customModel.capabilities")}

webview-ui/src/i18n/locales/ca/settings.json

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -105,6 +105,8 @@
105105
"awsCustomArnDesc": "Assegureu-vos que la regió a l'ARN coincideix amb la regió d'AWS seleccionada anteriorment.",
106106
"apiKeyStorageNotice": "Les claus API s'emmagatzemen de forma segura a l'Emmagatzematge Secret de VSCode",
107107
"useCustomBaseUrl": "Utilitzar URL base personalitzada",
108+
"useHostHeader": "Utilitzar capçalera Host personalitzada",
109+
"useLegacyFormat": "Utilitzar el format d'API OpenAI antic",
108110
"openRouterTransformsText": "Comprimir prompts i cadenes de missatges a la mida del context (<a>Transformacions d'OpenRouter</a>)",
109111
"model": "Model",
110112
"getOpenRouterApiKey": "Obtenir clau API d'OpenRouter",

webview-ui/src/i18n/locales/de/settings.json

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -109,6 +109,8 @@
109109
"glamaApiKey": "Glama API-Schlüssel",
110110
"getGlamaApiKey": "Glama API-Schlüssel erhalten",
111111
"useCustomBaseUrl": "Benutzerdefinierte Basis-URL verwenden",
112+
"useHostHeader": "Benutzerdefinierten Host-Header verwenden",
113+
"useLegacyFormat": "Altes OpenAI API-Format verwenden",
112114
"requestyApiKey": "Requesty API-Schlüssel",
113115
"getRequestyApiKey": "Requesty API-Schlüssel erhalten",
114116
"openRouterTransformsText": "Prompts und Nachrichtenketten auf Kontextgröße komprimieren (<a>OpenRouter Transformationen</a>)",

webview-ui/src/i18n/locales/en/settings.json

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -109,6 +109,8 @@
109109
"glamaApiKey": "Glama API Key",
110110
"getGlamaApiKey": "Get Glama API Key",
111111
"useCustomBaseUrl": "Use custom base URL",
112+
"useHostHeader": "Use custom Host header",
113+
"useLegacyFormat": "Use legacy OpenAI API format",
112114
"requestyApiKey": "Requesty API Key",
113115
"getRequestyApiKey": "Get Requesty API Key",
114116
"openRouterTransformsText": "Compress prompts and message chains to the context size (<a>OpenRouter Transforms</a>)",

0 commit comments

Comments
 (0)