Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion packages/types/src/codebase-index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import { z } from "zod"
export const codebaseIndexConfigSchema = z.object({
codebaseIndexEnabled: z.boolean().optional(),
codebaseIndexQdrantUrl: z.string().optional(),
codebaseIndexEmbedderProvider: z.enum(["openai", "ollama", "openai-compatible"]).optional(),
codebaseIndexEmbedderProvider: z.enum(["openai", "ollama", "openai-compatible", "gemini"]).optional(),
codebaseIndexEmbedderBaseUrl: z.string().optional(),
codebaseIndexEmbedderModelId: z.string().optional(),
})
Expand All @@ -22,6 +22,7 @@ export const codebaseIndexModelsSchema = z.object({
openai: z.record(z.string(), z.object({ dimension: z.number() })).optional(),
ollama: z.record(z.string(), z.object({ dimension: z.number() })).optional(),
"openai-compatible": z.record(z.string(), z.object({ dimension: z.number() })).optional(),
gemini: z.record(z.string(), z.object({ dimension: z.number() })).optional(),
})

export type CodebaseIndexModels = z.infer<typeof codebaseIndexModelsSchema>
Expand All @@ -36,6 +37,7 @@ export const codebaseIndexProviderSchema = z.object({
codebaseIndexOpenAiCompatibleBaseUrl: z.string().optional(),
codebaseIndexOpenAiCompatibleApiKey: z.string().optional(),
codebaseIndexOpenAiCompatibleModelDimension: z.number().optional(),
codebaseIndexGeminiApiKey: z.string().optional(),
})

export type CodebaseIndexProvider = z.infer<typeof codebaseIndexProviderSchema>
1 change: 1 addition & 0 deletions packages/types/src/global-settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,7 @@ export const SECRET_STATE_KEYS = [
"codeIndexOpenAiKey",
"codeIndexQdrantApiKey",
"codebaseIndexOpenAiCompatibleApiKey",
"codebaseIndexGeminiApiKey",
] as const satisfies readonly (keyof ProviderSettings)[]
export type SecretState = Pick<ProviderSettings, (typeof SECRET_STATE_KEYS)[number]>

Expand Down
40 changes: 40 additions & 0 deletions src/services/code-index/__tests__/config-manager.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -902,6 +902,46 @@ describe("CodeIndexConfigManager", () => {
expect(configManager.isFeatureConfigured).toBe(false)
})

it("should validate Gemini configuration correctly", async () => {
mockContextProxy.getGlobalState.mockImplementation((key: string) => {
if (key === "codebaseIndexConfig") {
return {
codebaseIndexEnabled: true,
codebaseIndexQdrantUrl: "http://qdrant.local",
codebaseIndexEmbedderProvider: "gemini",
}
}
return undefined
})
mockContextProxy.getSecret.mockImplementation((key: string) => {
if (key === "codebaseIndexGeminiApiKey") return "test-gemini-key"
return undefined
})

await configManager.loadConfiguration()
expect(configManager.isFeatureConfigured).toBe(true)
})

it("should return false when Gemini API key is missing", async () => {
mockContextProxy.getGlobalState.mockImplementation((key: string) => {
if (key === "codebaseIndexConfig") {
return {
codebaseIndexEnabled: true,
codebaseIndexQdrantUrl: "http://qdrant.local",
codebaseIndexEmbedderProvider: "gemini",
}
}
return undefined
})
mockContextProxy.getSecret.mockImplementation((key: string) => {
if (key === "codebaseIndexGeminiApiKey") return ""
return undefined
})

await configManager.loadConfiguration()
expect(configManager.isFeatureConfigured).toBe(false)
})

it("should return false when required values are missing", async () => {
mockContextProxy.getGlobalState.mockReturnValue({
codebaseIndexEnabled: true,
Expand Down
70 changes: 70 additions & 0 deletions src/services/code-index/__tests__/service-factory.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,14 @@ import { CodeIndexServiceFactory } from "../service-factory"
import { OpenAiEmbedder } from "../embedders/openai"
import { CodeIndexOllamaEmbedder } from "../embedders/ollama"
import { OpenAICompatibleEmbedder } from "../embedders/openai-compatible"
import { GeminiEmbedder } from "../embedders/gemini"
import { QdrantVectorStore } from "../vector-store/qdrant-client"

// Mock the embedders and vector store
vitest.mock("../embedders/openai")
vitest.mock("../embedders/ollama")
vitest.mock("../embedders/openai-compatible")
vitest.mock("../embedders/gemini")
vitest.mock("../vector-store/qdrant-client")

// Mock the embedding models module
Expand All @@ -20,6 +22,7 @@ vitest.mock("../../../shared/embeddingModels", () => ({
const MockedOpenAiEmbedder = OpenAiEmbedder as MockedClass<typeof OpenAiEmbedder>
const MockedCodeIndexOllamaEmbedder = CodeIndexOllamaEmbedder as MockedClass<typeof CodeIndexOllamaEmbedder>
const MockedOpenAICompatibleEmbedder = OpenAICompatibleEmbedder as MockedClass<typeof OpenAICompatibleEmbedder>
const MockedGeminiEmbedder = GeminiEmbedder as MockedClass<typeof GeminiEmbedder>
const MockedQdrantVectorStore = QdrantVectorStore as MockedClass<typeof QdrantVectorStore>

// Import the mocked functions
Expand Down Expand Up @@ -259,6 +262,49 @@ describe("CodeIndexServiceFactory", () => {
)
})

it("should create GeminiEmbedder when using Gemini provider", () => {
// Arrange
const testConfig = {
embedderProvider: "gemini",
geminiOptions: {
apiKey: "test-gemini-api-key",
},
}
mockConfigManager.getConfig.mockReturnValue(testConfig as any)

// Act
factory.createEmbedder()

// Assert
expect(MockedGeminiEmbedder).toHaveBeenCalledWith("test-gemini-api-key")
})

it("should throw error when Gemini API key is missing", () => {
// Arrange
const testConfig = {
embedderProvider: "gemini",
geminiOptions: {
apiKey: undefined,
},
}
mockConfigManager.getConfig.mockReturnValue(testConfig as any)

// Act & Assert
expect(() => factory.createEmbedder()).toThrow("Gemini configuration missing for embedder creation")
})

it("should throw error when Gemini options are missing", () => {
// Arrange
const testConfig = {
embedderProvider: "gemini",
geminiOptions: undefined,
}
mockConfigManager.getConfig.mockReturnValue(testConfig as any)

// Act & Assert
expect(() => factory.createEmbedder()).toThrow("Gemini configuration missing for embedder creation")
})

it("should throw error for invalid embedder provider", () => {
// Arrange
const testConfig = {
Expand Down Expand Up @@ -454,6 +500,30 @@ describe("CodeIndexServiceFactory", () => {
)
})

it("should use fixed dimension 768 for Gemini provider", () => {
// Arrange
const testConfig = {
embedderProvider: "gemini",
modelId: "text-embedding-004", // This is ignored by Gemini
qdrantUrl: "http://localhost:6333",
qdrantApiKey: "test-key",
}
mockConfigManager.getConfig.mockReturnValue(testConfig as any)

// Act
factory.createVectorStore()

// Assert
// getModelDimension should not be called for Gemini
expect(mockGetModelDimension).not.toHaveBeenCalled()
expect(MockedQdrantVectorStore).toHaveBeenCalledWith(
"/test/workspace",
"http://localhost:6333",
768, // Fixed dimension for Gemini
"test-key",
)
})

it("should use default model when config.modelId is undefined", () => {
// Arrange
const testConfig = {
Expand Down
22 changes: 22 additions & 0 deletions src/services/code-index/config-manager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ export class CodeIndexConfigManager {
private openAiOptions?: ApiHandlerOptions
private ollamaOptions?: ApiHandlerOptions
private openAiCompatibleOptions?: { baseUrl: string; apiKey: string; modelDimension?: number }
private geminiOptions?: { apiKey: string }
private qdrantUrl?: string = "http://localhost:6333"
private qdrantApiKey?: string
private searchMinScore?: number
Expand Down Expand Up @@ -55,6 +56,7 @@ export class CodeIndexConfigManager {
const openAiCompatibleModelDimension = this.contextProxy?.getGlobalState(
"codebaseIndexOpenAiCompatibleModelDimension",
) as number | undefined
const geminiApiKey = this.contextProxy?.getSecret("codebaseIndexGeminiApiKey") ?? ""

// Update instance variables with configuration
this.isEnabled = codebaseIndexEnabled || false
Expand All @@ -68,6 +70,8 @@ export class CodeIndexConfigManager {
this.embedderProvider = "ollama"
} else if (codebaseIndexEmbedderProvider === "openai-compatible") {
this.embedderProvider = "openai-compatible"
} else if (codebaseIndexEmbedderProvider === "gemini") {
this.embedderProvider = "gemini"
} else {
this.embedderProvider = "openai"
}
Expand All @@ -86,6 +90,8 @@ export class CodeIndexConfigManager {
modelDimension: openAiCompatibleModelDimension,
}
: undefined

this.geminiOptions = geminiApiKey ? { apiKey: geminiApiKey } : undefined
}

/**
Expand All @@ -101,6 +107,7 @@ export class CodeIndexConfigManager {
openAiOptions?: ApiHandlerOptions
ollamaOptions?: ApiHandlerOptions
openAiCompatibleOptions?: { baseUrl: string; apiKey: string }
geminiOptions?: { apiKey: string }
qdrantUrl?: string
qdrantApiKey?: string
searchMinScore?: number
Expand All @@ -118,6 +125,7 @@ export class CodeIndexConfigManager {
openAiCompatibleBaseUrl: this.openAiCompatibleOptions?.baseUrl ?? "",
openAiCompatibleApiKey: this.openAiCompatibleOptions?.apiKey ?? "",
openAiCompatibleModelDimension: this.openAiCompatibleOptions?.modelDimension,
geminiApiKey: this.geminiOptions?.apiKey ?? "",
qdrantUrl: this.qdrantUrl ?? "",
qdrantApiKey: this.qdrantApiKey ?? "",
}
Expand All @@ -137,6 +145,7 @@ export class CodeIndexConfigManager {
openAiOptions: this.openAiOptions,
ollamaOptions: this.ollamaOptions,
openAiCompatibleOptions: this.openAiCompatibleOptions,
geminiOptions: this.geminiOptions,
qdrantUrl: this.qdrantUrl,
qdrantApiKey: this.qdrantApiKey,
searchMinScore: this.searchMinScore,
Expand Down Expand Up @@ -165,6 +174,10 @@ export class CodeIndexConfigManager {
const apiKey = this.openAiCompatibleOptions?.apiKey
const qdrantUrl = this.qdrantUrl
return !!(baseUrl && apiKey && qdrantUrl)
} else if (this.embedderProvider === "gemini") {
const apiKey = this.geminiOptions?.apiKey
const qdrantUrl = this.qdrantUrl
return !!(apiKey && qdrantUrl)
}
return false // Should not happen if embedderProvider is always set correctly
}
Expand All @@ -185,6 +198,7 @@ export class CodeIndexConfigManager {
const prevOpenAiCompatibleBaseUrl = prev?.openAiCompatibleBaseUrl ?? ""
const prevOpenAiCompatibleApiKey = prev?.openAiCompatibleApiKey ?? ""
const prevOpenAiCompatibleModelDimension = prev?.openAiCompatibleModelDimension
const prevGeminiApiKey = prev?.geminiApiKey ?? ""
const prevQdrantUrl = prev?.qdrantUrl ?? ""
const prevQdrantApiKey = prev?.qdrantApiKey ?? ""

Expand Down Expand Up @@ -242,6 +256,13 @@ export class CodeIndexConfigManager {
}
}

if (this.embedderProvider === "gemini") {
const currentGeminiApiKey = this.geminiOptions?.apiKey ?? ""
if (prevGeminiApiKey !== currentGeminiApiKey) {
return true
}
}

// Qdrant configuration changes
const currentQdrantUrl = this.qdrantUrl ?? ""
const currentQdrantApiKey = this.qdrantApiKey ?? ""
Expand Down Expand Up @@ -292,6 +313,7 @@ export class CodeIndexConfigManager {
openAiOptions: this.openAiOptions,
ollamaOptions: this.ollamaOptions,
openAiCompatibleOptions: this.openAiCompatibleOptions,
geminiOptions: this.geminiOptions,
qdrantUrl: this.qdrantUrl,
qdrantApiKey: this.qdrantApiKey,
searchMinScore: this.searchMinScore,
Expand Down
3 changes: 3 additions & 0 deletions src/services/code-index/constants/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,3 +23,6 @@ export const PARSING_CONCURRENCY = 10
export const MAX_BATCH_TOKENS = 100000
export const MAX_ITEM_TOKENS = 8191
export const BATCH_PROCESSING_CONCURRENCY = 10

/**Gemini Embedder */
export const GEMINI_MAX_ITEM_TOKENS = 2048
58 changes: 58 additions & 0 deletions src/services/code-index/embedders/__tests__/gemini.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
import { vitest, describe, it, expect, beforeEach } from "vitest"
import type { MockedClass } from "vitest"
import { GeminiEmbedder } from "../gemini"
import { OpenAICompatibleEmbedder } from "../openai-compatible"

// Mock the OpenAICompatibleEmbedder
vitest.mock("../openai-compatible")

const MockedOpenAICompatibleEmbedder = OpenAICompatibleEmbedder as MockedClass<typeof OpenAICompatibleEmbedder>

describe("GeminiEmbedder", () => {
let embedder: GeminiEmbedder

beforeEach(() => {
vitest.clearAllMocks()
})

describe("constructor", () => {
it("should create an instance with correct fixed values passed to OpenAICompatibleEmbedder", () => {
// Arrange
const apiKey = "test-gemini-api-key"

// Act
embedder = new GeminiEmbedder(apiKey)

// Assert
expect(MockedOpenAICompatibleEmbedder).toHaveBeenCalledWith(
"https://generativelanguage.googleapis.com/v1beta/openai/",
apiKey,
"text-embedding-004",
2048,
)
})

it("should throw error when API key is not provided", () => {
// Act & Assert
expect(() => new GeminiEmbedder("")).toThrow("API key is required for Gemini embedder")
expect(() => new GeminiEmbedder(null as any)).toThrow("API key is required for Gemini embedder")
expect(() => new GeminiEmbedder(undefined as any)).toThrow("API key is required for Gemini embedder")
})
})

describe("embedderInfo", () => {
it("should return correct embedder info with dimension 768", () => {
// Arrange
embedder = new GeminiEmbedder("test-api-key")

// Act
const info = embedder.embedderInfo

// Assert
expect(info).toEqual({
name: "gemini",
})
expect(GeminiEmbedder.dimension).toBe(768)
})
})
})
Loading
Loading