Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions packages/types/src/codebase-index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ export const codebaseIndexConfigSchema = z.object({
// OpenAI Compatible specific fields
codebaseIndexOpenAiCompatibleBaseUrl: z.string().optional(),
codebaseIndexOpenAiCompatibleModelDimension: z.number().optional(),
codebaseIndexOpenAiCompatibleHeaders: z.record(z.string()).optional(),
})

export type CodebaseIndexConfig = z.infer<typeof codebaseIndexConfigSchema>
Expand Down Expand Up @@ -65,6 +66,7 @@ export const codebaseIndexProviderSchema = z.object({
codebaseIndexOpenAiCompatibleBaseUrl: z.string().optional(),
codebaseIndexOpenAiCompatibleApiKey: z.string().optional(),
codebaseIndexOpenAiCompatibleModelDimension: z.number().optional(),
codebaseIndexOpenAiCompatibleHeaders: z.record(z.string()).optional(),
codebaseIndexGeminiApiKey: z.string().optional(),
codebaseIndexMistralApiKey: z.string().optional(),
codebaseIndexVercelAiGatewayApiKey: z.string().optional(),
Expand Down
6 changes: 4 additions & 2 deletions src/services/code-index/config-manager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ export class CodeIndexConfigManager {
private modelDimension?: number
private openAiOptions?: ApiHandlerOptions
private ollamaOptions?: ApiHandlerOptions
private openAiCompatibleOptions?: { baseUrl: string; apiKey: string }
private openAiCompatibleOptions?: { baseUrl: string; apiKey: string; headers?: Record<string, string> }
private geminiOptions?: { apiKey: string }
private mistralOptions?: { apiKey: string }
private vercelAiGatewayOptions?: { apiKey: string }
Expand Down Expand Up @@ -68,6 +68,7 @@ export class CodeIndexConfigManager {
// Fix: Read OpenAI Compatible settings from the correct location within codebaseIndexConfig
const openAiCompatibleBaseUrl = codebaseIndexConfig.codebaseIndexOpenAiCompatibleBaseUrl ?? ""
const openAiCompatibleApiKey = this.contextProxy?.getSecret("codebaseIndexOpenAiCompatibleApiKey") ?? ""
const openAiCompatibleHeaders = codebaseIndexConfig.codebaseIndexOpenAiCompatibleHeaders ?? undefined
const geminiApiKey = this.contextProxy?.getSecret("codebaseIndexGeminiApiKey") ?? ""
const mistralApiKey = this.contextProxy?.getSecret("codebaseIndexMistralApiKey") ?? ""
const vercelAiGatewayApiKey = this.contextProxy?.getSecret("codebaseIndexVercelAiGatewayApiKey") ?? ""
Expand Down Expand Up @@ -123,6 +124,7 @@ export class CodeIndexConfigManager {
? {
baseUrl: openAiCompatibleBaseUrl,
apiKey: openAiCompatibleApiKey,
headers: openAiCompatibleHeaders,
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Custom header changes don't trigger a service restart. When users update their headers, the doesConfigChangeRequireRestart() method (lines 259-357) should detect this change and return true, similar to how it checks openAiCompatibleBaseUrl and openAiCompatibleApiKey changes. You'll also need to add an openAiCompatibleHeaders field to the PreviousConfigSnapshot type and update the snapshot capture logic in loadConfiguration() to track previous header state.

}
: undefined

Expand All @@ -143,7 +145,7 @@ export class CodeIndexConfigManager {
modelDimension?: number
openAiOptions?: ApiHandlerOptions
ollamaOptions?: ApiHandlerOptions
openAiCompatibleOptions?: { baseUrl: string; apiKey: string }
openAiCompatibleOptions?: { baseUrl: string; apiKey: string; headers?: Record<string, string> }
geminiOptions?: { apiKey: string }
mistralOptions?: { apiKey: string }
vercelAiGatewayOptions?: { apiKey: string }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,31 @@ describe("OpenAICompatibleEmbedder", () => {
expect(embedder).toBeDefined()
})

it("should create embedder with custom headers", () => {
const customHeaders = {
"X-Custom-Header": "custom-value",
"X-Another-Header": "another-value",
}
embedder = new OpenAICompatibleEmbedder(testBaseUrl, testApiKey, testModelId, undefined, customHeaders)

expect(MockedOpenAI).toHaveBeenCalledWith({
baseURL: testBaseUrl,
apiKey: testApiKey,
defaultHeaders: customHeaders,
})
expect(embedder).toBeDefined()
})

it("should create embedder without custom headers when not provided", () => {
embedder = new OpenAICompatibleEmbedder(testBaseUrl, testApiKey, testModelId, undefined, undefined)

expect(MockedOpenAI).toHaveBeenCalledWith({
baseURL: testBaseUrl,
apiKey: testApiKey,
})
expect(embedder).toBeDefined()
})

it("should throw error when baseUrl is missing", () => {
expect(() => new OpenAICompatibleEmbedder("", testApiKey, testModelId)).toThrow(
"embeddings:validation.baseUrlRequired",
Expand Down Expand Up @@ -813,6 +838,81 @@ describe("OpenAICompatibleEmbedder", () => {
expect(baseResult.embeddings[0]).toEqual([0.4, 0.5, 0.6])
})

it("should include custom headers in direct fetch requests", async () => {
const testTexts = ["Test text"]
const customHeaders = {
"X-Custom-Header": "custom-value",
"X-API-Version": "v2",
}
const base64String = createBase64Embedding([0.1, 0.2, 0.3])

// Test Azure URL with custom headers (direct fetch)
const azureEmbedder = new OpenAICompatibleEmbedder(
azureUrl,
testApiKey,
testModelId,
undefined,
customHeaders,
)
const mockFetchResponse = createMockResponse({
data: [{ embedding: base64String }],
usage: { prompt_tokens: 10, total_tokens: 15 },
})
;(global.fetch as MockedFunction<typeof fetch>).mockResolvedValue(mockFetchResponse as any)

const azureResult = await azureEmbedder.createEmbeddings(testTexts)
expect(global.fetch).toHaveBeenCalledWith(
azureUrl,
expect.objectContaining({
method: "POST",
headers: expect.objectContaining({
"Content-Type": "application/json",
"api-key": testApiKey,
Authorization: `Bearer ${testApiKey}`,
"X-Custom-Header": "custom-value",
"X-API-Version": "v2",
}),
}),
)
expect(mockEmbeddingsCreate).not.toHaveBeenCalled()
expectEmbeddingValues(azureResult.embeddings[0], [0.1, 0.2, 0.3])
})

it("should handle custom headers that override default headers", async () => {
const testTexts = ["Test text"]
const customHeaders = {
"api-key": "override-key", // Override the default api-key
"X-Custom-Header": "custom-value",
}
const base64String = createBase64Embedding([0.1, 0.2, 0.3])

const azureEmbedder = new OpenAICompatibleEmbedder(
azureUrl,
testApiKey,
testModelId,
undefined,
customHeaders,
)
const mockFetchResponse = createMockResponse({
data: [{ embedding: base64String }],
usage: { prompt_tokens: 10, total_tokens: 15 },
})
;(global.fetch as MockedFunction<typeof fetch>).mockResolvedValue(mockFetchResponse as any)

const azureResult = await azureEmbedder.createEmbeddings(testTexts)
expect(global.fetch).toHaveBeenCalledWith(
azureUrl,
expect.objectContaining({
method: "POST",
headers: expect.objectContaining({
"api-key": "override-key", // Custom header overrides default
"X-Custom-Header": "custom-value",
}),
}),
)
expectEmbeddingValues(azureResult.embeddings[0], [0.1, 0.2, 0.3])
})

it.each([
[401, "Authentication failed. Please check your API key."],
[500, "Failed to create embeddings after 3 attempts"],
Expand Down
43 changes: 33 additions & 10 deletions src/services/code-index/embedders/openai-compatible.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ export class OpenAICompatibleEmbedder implements IEmbedder {
private readonly defaultModelId: string
private readonly baseUrl: string
private readonly apiKey: string
private readonly customHeaders?: Record<string, string>
private readonly isFullUrl: boolean
private readonly maxItemTokens: number

Expand All @@ -56,8 +57,15 @@ export class OpenAICompatibleEmbedder implements IEmbedder {
* @param apiKey The API key for authentication
* @param modelId Optional model identifier (defaults to "text-embedding-3-small")
* @param maxItemTokens Optional maximum tokens per item (defaults to MAX_ITEM_TOKENS)
* @param customHeaders Optional custom headers to include in requests
*/
constructor(baseUrl: string, apiKey: string, modelId?: string, maxItemTokens?: number) {
constructor(
baseUrl: string,
apiKey: string,
modelId?: string,
maxItemTokens?: number,
customHeaders?: Record<string, string>,
) {
if (!baseUrl) {
throw new Error(t("embeddings:validation.baseUrlRequired"))
}
Expand All @@ -67,13 +75,21 @@ export class OpenAICompatibleEmbedder implements IEmbedder {

this.baseUrl = baseUrl
this.apiKey = apiKey
this.customHeaders = customHeaders

// Wrap OpenAI client creation to handle invalid API key characters
try {
this.embeddingsClient = new OpenAI({
// If custom headers are provided, we need to use defaultHeaders in OpenAI config
const openAIConfig: any = {
baseURL: baseUrl,
apiKey: apiKey,
})
}

if (customHeaders) {
openAIConfig.defaultHeaders = customHeaders
}

this.embeddingsClient = new OpenAI(openAIConfig)
} catch (error) {
// Use the error handler to transform ByteString conversion errors
throw handleOpenAIError(error, "OpenAI Compatible")
Expand Down Expand Up @@ -204,15 +220,22 @@ export class OpenAICompatibleEmbedder implements IEmbedder {
batchTexts: string[],
model: string,
): Promise<OpenAIEmbeddingResponse> {
const headers: Record<string, string> = {
"Content-Type": "application/json",
// Azure OpenAI uses 'api-key' header, while OpenAI uses 'Authorization'
// We'll try 'api-key' first for Azure compatibility
"api-key": this.apiKey,
Authorization: `Bearer ${this.apiKey}`,
}

// Add custom headers if provided
if (this.customHeaders) {
Object.assign(headers, this.customHeaders)
}

const response = await fetch(url, {
method: "POST",
headers: {
"Content-Type": "application/json",
// Azure OpenAI uses 'api-key' header, while OpenAI uses 'Authorization'
// We'll try 'api-key' first for Azure compatibility
"api-key": this.apiKey,
Authorization: `Bearer ${this.apiKey}`,
},
headers,
body: JSON.stringify({
input: batchTexts,
model: model,
Expand Down
2 changes: 1 addition & 1 deletion src/services/code-index/interfaces/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ export interface CodeIndexConfig {
modelDimension?: number // Generic dimension property for all providers
openAiOptions?: ApiHandlerOptions
ollamaOptions?: ApiHandlerOptions
openAiCompatibleOptions?: { baseUrl: string; apiKey: string }
openAiCompatibleOptions?: { baseUrl: string; apiKey: string; headers?: Record<string, string> }
geminiOptions?: { apiKey: string }
mistralOptions?: { apiKey: string }
vercelAiGatewayOptions?: { apiKey: string }
Expand Down
2 changes: 2 additions & 0 deletions src/services/code-index/service-factory.ts
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,8 @@ export class CodeIndexServiceFactory {
config.openAiCompatibleOptions.baseUrl,
config.openAiCompatibleOptions.apiKey,
config.modelId,
undefined, // maxItemTokens (use default)
config.openAiCompatibleOptions.headers,
)
} else if (provider === "gemini") {
if (!config.geminiOptions?.apiKey) {
Expand Down
Loading