Skip to content
Open
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 15 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2955,6 +2955,13 @@
"type": "boolean",
"default": false,
"description": "Whether the model supports thinking capabilities"
},
"requestHeaders": {
"type": "object",
"description": "Additional HTTP headers to include with requests to this model. These reserved headers are not allowed and ignored if present: ['api-key', 'authorization', 'content-type', 'openai-intent', 'x-github-api-version', 'x-initiator', 'x-interaction-id', 'x-interaction-type', 'x-onbehalf-extension-id', 'x-request-id', 'x-vscode-user-agent-library-version']",
"additionalProperties": {
"type": "string"
}
}
},
"required": [
Expand Down Expand Up @@ -3024,6 +3031,13 @@
"type": "boolean",
"default": false,
"description": "Whether the model supports thinking capabilities"
},
"requestHeaders": {
"type": "object",
"description": "Additional HTTP headers to include with requests to this model. These reserved headers are not allowed and ignored if present: ['api-key', 'authorization', 'content-type', 'openai-intent', 'x-github-api-version', 'x-initiator', 'x-interaction-id', 'x-interaction-type', 'x-onbehalf-extension-id', 'x-request-id', 'x-vscode-user-agent-library-version']",
"additionalProperties": {
"type": "string"
}
}
},
"required": [
Expand Down Expand Up @@ -4267,4 +4281,4 @@
"string_decoder": "npm:[email protected]",
"node-gyp": "npm:[email protected]"
}
}
}
7 changes: 6 additions & 1 deletion src/extension/byok/common/byokProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ export interface BYOKModelCapabilities {
vision: boolean;
thinking?: boolean;
editTools?: EndpointEditToolName[];
requestHeaders?: Record<string, string>;
}

export interface BYOKModelRegistry {
Expand Down Expand Up @@ -117,7 +118,7 @@ export function resolveModelInfo(modelId: string, providerName: string, knownMod
}
const modelName = knownModelInfo?.name || modelId;
const contextWinow = knownModelInfo ? (knownModelInfo.maxInputTokens + knownModelInfo.maxOutputTokens) : 128000;
return {
const modelInfo: IChatModelInformation = {
id: modelId,
name: modelName,
version: '1.0.0',
Expand All @@ -141,6 +142,10 @@ export function resolveModelInfo(modelId: string, providerName: string, knownMod
is_chat_fallback: false,
model_picker_enabled: true
};
if (knownModelInfo?.requestHeaders && Object.keys(knownModelInfo.requestHeaders).length > 0) {
modelInfo.requestHeaders = { ...knownModelInfo.requestHeaders };
}
return modelInfo;
}

export function byokKnownModelsToAPIInfo(providerName: string, knownModels: BYOKKnownModels | undefined): LanguageModelChatInformation[] {
Expand Down
50 changes: 49 additions & 1 deletion src/extension/byok/node/openAIEndpoint.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,21 @@ function hydrateBYOKErrorMessages(response: ChatResponse): ChatResponse {
}

export class OpenAIEndpoint extends ChatEndpoint {
private static readonly _reservedHeaders: ReadonlySet<string> = new Set([
'api-key',
'authorization',
'content-type',
'openai-intent',
'x-github-api-version',
'x-initiator',
'x-interaction-id',
'x-interaction-type',
'x-onbehalf-extension-id',
'x-request-id',
'x-vscode-user-agent-library-version'
]);

private readonly _customHeaders: Record<string, string>;
constructor(
protected readonly modelMetadata: IChatModelInformation,
protected readonly _apiKey: string,
Expand All @@ -58,7 +73,7 @@ export class OpenAIEndpoint extends ChatEndpoint {
@IInstantiationService protected instantiationService: IInstantiationService,
@IConfigurationService configurationService: IConfigurationService,
@IExperimentationService expService: IExperimentationService,
@ILogService logService: ILogService
@ILogService protected logService: ILogService
) {
super(
modelMetadata,
Expand All @@ -74,6 +89,27 @@ export class OpenAIEndpoint extends ChatEndpoint {
expService,
logService
);
this._customHeaders = this._sanitizeCustomHeaders(modelMetadata.requestHeaders);
}

private _sanitizeCustomHeaders(headers: Readonly<Record<string, string>> | undefined): Record<string, string> {
if (!headers) {
return {};
}
const sanitized: Record<string, string> = {};
for (const [rawKey, rawValue] of Object.entries(headers)) {
const key = rawKey.trim();
if (!key) {
continue;
}
const lowerKey = key.toLowerCase();
if (OpenAIEndpoint._reservedHeaders.has(lowerKey)) {
this.logService.warn(`[OpenAIEndpoint] Ignoring reserved header '${key}' for model '${this.modelMetadata.id}'.`);
continue;
}
sanitized[key] = rawValue;
}
return sanitized;
}

override createRequestBody(options: ICreateEndpointBodyOptions): IEndpointBody {
Expand Down Expand Up @@ -149,6 +185,18 @@ export class OpenAIEndpoint extends ChatEndpoint {
} else {
headers['Authorization'] = `Bearer ${this._apiKey}`;
}
for (const [key, value] of Object.entries(this._customHeaders)) {
const lowerKey = key.toLowerCase();
if (OpenAIEndpoint._reservedHeaders.has(lowerKey)) {
continue;
}
const existingKey = Object.keys(headers).find(headerKey => headerKey.toLowerCase() === lowerKey);
if (existingKey) {
this.logService.warn(`[OpenAIEndpoint] Ignoring custom header '${key}' for model '${this.modelMetadata.id}' because it conflicts with an existing header.`);
continue;
}
headers[key] = value;
}
return headers;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ interface ModelConfig {
editTools?: EndpointEditToolName[];
requiresAPIKey?: boolean;
thinking?: boolean;
requestHeaders?: Record<string, string>;
}

interface ModelQuickPickItem extends QuickPickItem {
Expand Down
11 changes: 8 additions & 3 deletions src/extension/byok/vscode-node/customOAIProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ export function resolveCustomOAIUrl(modelId: string, url: string): string {
interface CustomOAIModelInfo extends LanguageModelChatInformation {
url: string;
thinking: boolean;
requestHeaders?: Record<string, string>;
}

export class CustomOAIBYOKModelProvider implements BYOKModelProvider<CustomOAIModelInfo> {
Expand Down Expand Up @@ -67,8 +68,8 @@ export class CustomOAIBYOKModelProvider implements BYOKModelProvider<CustomOAIMo
return resolveCustomOAIUrl(modelId, url);
}

private getUserModelConfig(): Record<string, { name: string; url: string; toolCalling: boolean; vision: boolean; maxInputTokens: number; maxOutputTokens: number; requiresAPIKey: boolean; thinking?: boolean; editTools?: EndpointEditToolName[] }> {
const modelConfig = this._configurationService.getConfig(this.getConfigKey()) as Record<string, { name: string; url: string; toolCalling: boolean; vision: boolean; maxInputTokens: number; maxOutputTokens: number; requiresAPIKey: boolean; thinking?: boolean; editTools?: EndpointEditToolName[] }>;
private getUserModelConfig(): Record<string, { name: string; url: string; toolCalling: boolean; vision: boolean; maxInputTokens: number; maxOutputTokens: number; requiresAPIKey: boolean; thinking?: boolean; editTools?: EndpointEditToolName[]; requestHeaders?: Record<string, string> }> {
const modelConfig = this._configurationService.getConfig(this.getConfigKey()) as Record<string, { name: string; url: string; toolCalling: boolean; vision: boolean; maxInputTokens: number; maxOutputTokens: number; requiresAPIKey: boolean; thinking?: boolean; editTools?: EndpointEditToolName[]; requestHeaders?: Record<string, string> }>;
return modelConfig;
}

Expand All @@ -90,6 +91,7 @@ export class CustomOAIBYOKModelProvider implements BYOKModelProvider<CustomOAIMo
maxOutputTokens: modelInfo.maxOutputTokens,
thinking: modelInfo.thinking,
editTools: modelInfo.editTools,
requestHeaders: modelInfo.requestHeaders ? { ...modelInfo.requestHeaders } : undefined
};
}
return models;
Expand Down Expand Up @@ -135,6 +137,7 @@ export class CustomOAIBYOKModelProvider implements BYOKModelProvider<CustomOAIMo
editTools: capabilities.editTools
},
thinking: capabilities.thinking || false,
requestHeaders: capabilities.requestHeaders,
};
return baseInfo;
}
Expand Down Expand Up @@ -173,6 +176,7 @@ export class CustomOAIBYOKModelProvider implements BYOKModelProvider<CustomOAIMo
url: model.url,
thinking: model.thinking,
editTools: model.capabilities.editTools?.filter(isEndpointEditToolName),
requestHeaders: model.requestHeaders,
});
const openAIChatEndpoint = this._instantiationService.createInstance(OpenAIEndpoint, modelInfo, apiKey ?? '', model.url);
return this._lmWrapper.provideLanguageModelResponse(openAIChatEndpoint, messages, options, options.requestInitiator, progress, token);
Expand All @@ -196,7 +200,8 @@ export class CustomOAIBYOKModelProvider implements BYOKModelProvider<CustomOAIMo
vision: !!model.capabilities?.imageInput || false,
name: model.name,
url: model.url,
thinking: model.thinking
thinking: model.thinking,
requestHeaders: model.requestHeaders
});
const openAIChatEndpoint = this._instantiationService.createInstance(OpenAIEndpoint, modelInfo, apiKey ?? '', model.url);
return this._lmWrapper.provideTokenCount(openAIChatEndpoint, text);
Expand Down
2 changes: 1 addition & 1 deletion src/platform/configuration/common/configurationService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -799,7 +799,7 @@ export namespace ConfigKey {
/** BYOK */
export const OllamaEndpoint = defineSetting<string>('chat.byok.ollamaEndpoint', 'http://localhost:11434');
export const AzureModels = defineSetting<Record<string, { name: string; url: string; toolCalling: boolean; vision: boolean; maxInputTokens: number; maxOutputTokens: number; requiresAPIKey?: boolean; thinking?: boolean }>>('chat.azureModels', {});
export const CustomOAIModels = defineSetting<Record<string, { name: string; url: string; toolCalling: boolean; vision: boolean; maxInputTokens: number; maxOutputTokens: number; requiresAPIKey?: boolean; thinking?: boolean }>>('chat.customOAIModels', {});
export const CustomOAIModels = defineSetting<Record<string, { name: string; url: string; toolCalling: boolean; vision: boolean; maxInputTokens: number; maxOutputTokens: number; requiresAPIKey?: boolean; thinking?: boolean; requestHeaders?: Record<string, string> }>>('chat.customOAIModels', {});
export const AutoFixDiagnostics = defineSetting<boolean>('chat.agent.autoFix', true);
export const NotebookFollowCellExecution = defineSetting<boolean>('chat.notebook.followCellExecution.enabled', false);
export const UseAlternativeNESNotebookFormat = defineExpSetting<boolean>('chat.notebook.enhancedNextEditSuggestions.enabled', false);
Expand Down
1 change: 1 addition & 0 deletions src/platform/endpoint/common/endpointProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@ export interface IModelAPIResponse {
export type IChatModelInformation = IModelAPIResponse & {
capabilities: IChatModelCapabilities;
urlOrRequestMetadata?: string | RequestMetadata;
requestHeaders?: Readonly<Record<string, string>>;
};

export function isChatModelInformation(model: IModelAPIResponse): model is IChatModelInformation {
Expand Down