Skip to content

Commit 514eaa9

Browse files
committed
refactor(code-index): move default model ID logic to embedders
1 parent 91cba93 commit 514eaa9

File tree

5 files changed

+319
-63
lines changed

5 files changed

+319
-63
lines changed

src/services/code-index/__tests__/service-factory.spec.ts

Lines changed: 185 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -584,4 +584,189 @@ describe("CodeIndexServiceFactory", () => {
584584
expect(() => factory.createVectorStore()).toThrow("Qdrant URL missing for vector store creation")
585585
})
586586
})
587+
588+
describe("validateEmbedderConfig", () => {
589+
beforeEach(() => {
590+
vitest.clearAllMocks()
591+
// Mock the static validation methods
592+
MockedOpenAiEmbedder.validateEndpoint = vitest.fn().mockResolvedValue(true)
593+
MockedCodeIndexOllamaEmbedder.validateEndpoint = vitest.fn().mockResolvedValue(true)
594+
MockedOpenAICompatibleEmbedder.validateEndpoint = vitest.fn().mockResolvedValue(true)
595+
})
596+
597+
it("should validate OpenAI configuration with provided config", async () => {
598+
// Arrange
599+
const providedConfig = {
600+
embedderProvider: "openai",
601+
modelId: "text-embedding-3-large",
602+
openAiOptions: {
603+
openAiNativeApiKey: "test-api-key",
604+
},
605+
}
606+
607+
// Act
608+
const result = await factory.validateEmbedderConfig(providedConfig)
609+
610+
// Assert
611+
expect(result).toBe(true)
612+
expect(MockedOpenAiEmbedder.validateEndpoint).toHaveBeenCalledWith("test-api-key", "text-embedding-3-large")
613+
})
614+
615+
it("should validate Ollama configuration with provided config", async () => {
616+
// Arrange
617+
const providedConfig = {
618+
embedderProvider: "ollama",
619+
modelId: "nomic-embed-text:latest",
620+
ollamaOptions: {
621+
ollamaBaseUrl: "http://localhost:11434",
622+
},
623+
}
624+
625+
// Act
626+
const result = await factory.validateEmbedderConfig(providedConfig)
627+
628+
// Assert
629+
expect(result).toBe(true)
630+
expect(MockedCodeIndexOllamaEmbedder.validateEndpoint).toHaveBeenCalledWith(
631+
"http://localhost:11434",
632+
"nomic-embed-text:latest",
633+
)
634+
})
635+
636+
it("should validate OpenAI-compatible configuration with provided config", async () => {
637+
// Arrange
638+
const providedConfig = {
639+
embedderProvider: "openai-compatible",
640+
modelId: "custom-model",
641+
openAiCompatibleOptions: {
642+
baseUrl: "https://api.example.com/v1",
643+
apiKey: "test-api-key",
644+
},
645+
}
646+
647+
// Act
648+
const result = await factory.validateEmbedderConfig(providedConfig)
649+
650+
// Assert
651+
expect(result).toBe(true)
652+
expect(MockedOpenAICompatibleEmbedder.validateEndpoint).toHaveBeenCalledWith(
653+
"https://api.example.com/v1",
654+
"test-api-key",
655+
"custom-model",
656+
)
657+
})
658+
659+
it("should use current config when no config is provided", async () => {
660+
// Arrange
661+
const currentConfig = {
662+
embedderProvider: "openai",
663+
modelId: "text-embedding-3-small",
664+
openAiOptions: {
665+
openAiNativeApiKey: "current-api-key",
666+
},
667+
}
668+
mockConfigManager.getConfig.mockReturnValue(currentConfig as any)
669+
670+
// Act
671+
const result = await factory.validateEmbedderConfig()
672+
673+
// Assert
674+
expect(result).toBe(true)
675+
expect(mockConfigManager.getConfig).toHaveBeenCalled()
676+
expect(MockedOpenAiEmbedder.validateEndpoint).toHaveBeenCalledWith(
677+
"current-api-key",
678+
"text-embedding-3-small",
679+
)
680+
})
681+
682+
it("should throw error for missing OpenAI API key", async () => {
683+
// Arrange
684+
const providedConfig = {
685+
embedderProvider: "openai",
686+
modelId: "text-embedding-3-large",
687+
openAiOptions: {
688+
openAiNativeApiKey: undefined,
689+
},
690+
}
691+
692+
// Act & Assert
693+
await expect(factory.validateEmbedderConfig(providedConfig)).rejects.toThrow("OpenAI API key is required")
694+
})
695+
696+
it("should throw error for missing Ollama base URL", async () => {
697+
// Arrange
698+
const providedConfig = {
699+
embedderProvider: "ollama",
700+
modelId: "nomic-embed-text:latest",
701+
ollamaOptions: {
702+
ollamaBaseUrl: undefined,
703+
},
704+
}
705+
706+
// Act & Assert
707+
await expect(factory.validateEmbedderConfig(providedConfig)).rejects.toThrow("Ollama base URL is required")
708+
})
709+
710+
it("should throw error for missing OpenAI-compatible credentials", async () => {
711+
// Arrange
712+
const providedConfig = {
713+
embedderProvider: "openai-compatible",
714+
modelId: "custom-model",
715+
openAiCompatibleOptions: {
716+
baseUrl: undefined,
717+
apiKey: "test-api-key",
718+
},
719+
}
720+
721+
// Act & Assert
722+
await expect(factory.validateEmbedderConfig(providedConfig)).rejects.toThrow(
723+
"OpenAI-compatible base URL and API key are required",
724+
)
725+
})
726+
727+
it("should throw error for invalid embedder type", async () => {
728+
// Arrange
729+
const providedConfig = {
730+
embedderProvider: "invalid-provider",
731+
modelId: "some-model",
732+
}
733+
734+
// Act & Assert
735+
await expect(factory.validateEmbedderConfig(providedConfig)).rejects.toThrow(
736+
"Invalid embedder type: invalid-provider",
737+
)
738+
})
739+
740+
it("should propagate validation errors from embedder", async () => {
741+
// Arrange
742+
const providedConfig = {
743+
embedderProvider: "openai",
744+
modelId: "text-embedding-3-large",
745+
openAiOptions: {
746+
openAiNativeApiKey: "invalid-key",
747+
},
748+
}
749+
MockedOpenAiEmbedder.validateEndpoint = vitest.fn().mockRejectedValue(new Error("Invalid API key"))
750+
751+
// Act & Assert
752+
await expect(factory.validateEmbedderConfig(providedConfig)).rejects.toThrow("Invalid API key")
753+
})
754+
755+
it("should use default model ID when not provided", async () => {
756+
// Arrange
757+
const providedConfig = {
758+
embedderProvider: "openai",
759+
openAiOptions: {
760+
openAiNativeApiKey: "test-api-key",
761+
},
762+
}
763+
764+
// Act
765+
const result = await factory.validateEmbedderConfig(providedConfig)
766+
767+
// Assert
768+
expect(result).toBe(true)
769+
expect(MockedOpenAiEmbedder.validateEndpoint).toHaveBeenCalledWith("test-api-key", undefined)
770+
})
771+
})
587772
})

src/services/code-index/embedders/ollama.ts

Lines changed: 48 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
import { ApiHandlerOptions } from "../../../shared/api"
22
import { EmbedderInfo, EmbeddingResponse, IEmbedder } from "../interfaces"
3-
import { getModelQueryPrefix } from "../../../shared/embeddingModels"
3+
import { getModelQueryPrefix, getDefaultModelId } from "../../../shared/embeddingModels"
44
import { MAX_ITEM_TOKENS } from "../constants"
55
import { t } from "../../../i18n"
6+
import { serializeError } from "serialize-error"
67

78
/**
89
* Implements the IEmbedder interface using a local Ollama instance.
@@ -113,7 +114,8 @@ export class CodeIndexOllamaEmbedder implements IEmbedder {
113114
* @param modelId - The model ID to check
114115
* @returns A promise that resolves to true if valid, or throws an error with details
115116
*/
116-
static async validateEndpoint(baseUrl: string, modelId: string): Promise<boolean> {
117+
static async validateEndpoint(baseUrl: string, modelId: string | undefined): Promise<boolean> {
118+
const effectiveModelId = modelId || getDefaultModelId("ollama")
117119
const url = `${baseUrl}/api/tags`
118120

119121
try {
@@ -126,26 +128,63 @@ export class CodeIndexOllamaEmbedder implements IEmbedder {
126128

127129
if (!response.ok) {
128130
if (response.status === 404) {
129-
throw new Error(`Ollama API not found at ${baseUrl}. Is Ollama running?`)
131+
throw new Error(t("embeddings:validation.apiNotFound", { provider: "Ollama", baseUrl }))
130132
}
131-
throw new Error(`Failed to connect to Ollama: ${response.status} ${response.statusText}`)
133+
throw new Error(
134+
t("embeddings:validation.connectionFailed", {
135+
provider: "Ollama",
136+
status: response.status,
137+
statusText: response.statusText,
138+
}),
139+
)
132140
}
133141

134142
const data = await response.json()
135143
const models = data.models || []
136144
const modelNames = models.map((m: any) => m.name)
137145

138146
// Check if the specified model exists
139-
if (!modelNames.includes(modelId)) {
140-
throw new Error(`Model '${modelId}' not found. Available models: ${modelNames.join(", ") || "none"}`)
147+
if (!modelNames.includes(effectiveModelId)) {
148+
throw new Error(
149+
t("embeddings:validation.modelNotFound", {
150+
modelId: effectiveModelId,
151+
availableModels: modelNames.join(", ") || "none",
152+
}),
153+
)
141154
}
142155

143156
return true
144157
} catch (error: any) {
145-
if (error.message.includes("fetch failed") || error.message.includes("ECONNREFUSED")) {
146-
throw new Error(`Cannot connect to Ollama at ${baseUrl}. Please ensure Ollama is running.`)
158+
// If it's already a translated error, re-throw it
159+
if (
160+
error?.message?.includes(
161+
t("embeddings:validation.modelNotFound", { modelId: "", availableModels: "" }).split(":")[0],
162+
) ||
163+
error?.message?.includes(
164+
t("embeddings:validation.apiNotFound", { provider: "", baseUrl: "" }).split(":")[0],
165+
) ||
166+
error?.message?.includes(
167+
t("embeddings:validation.connectionFailed", { provider: "", status: "", statusText: "" }).split(
168+
":",
169+
)[0],
170+
)
171+
) {
172+
throw error
147173
}
148-
throw error
174+
175+
const serialized = serializeError(error)
176+
177+
if (error.message?.includes("fetch failed") || error.message?.includes("ECONNREFUSED")) {
178+
throw new Error(t("embeddings:validation.cannotConnect", { provider: "Ollama", baseUrl }))
179+
}
180+
181+
const errorDetails = serialized.message || t("embeddings:unknownError")
182+
throw new Error(
183+
t("embeddings:genericError", {
184+
provider: "Ollama",
185+
errorDetails,
186+
}),
187+
)
149188
}
150189
}
151190
}

src/services/code-index/embedders/openai-compatible.ts

Lines changed: 17 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ import {
88
} from "../constants"
99
import { getDefaultModelId, getModelQueryPrefix } from "../../../shared/embeddingModels"
1010
import { t } from "../../../i18n"
11+
import { serializeError } from "serialize-error"
1112

1213
interface EmbeddingItem {
1314
embedding: string | number[]
@@ -50,10 +51,10 @@ export class OpenAICompatibleEmbedder implements IEmbedder {
5051
*/
5152
constructor(baseUrl: string, apiKey: string, modelId?: string, maxItemTokens?: number) {
5253
if (!baseUrl) {
53-
throw new Error("Base URL is required for OpenAI Compatible embedder")
54+
throw new Error(t("embeddings:validation.baseUrlRequired", { provider: "OpenAI Compatible" }))
5455
}
5556
if (!apiKey) {
56-
throw new Error("API key is required for OpenAI Compatible embedder")
57+
throw new Error(t("embeddings:validation.apiKeyRequired", { provider: "OpenAI Compatible" }))
5758
}
5859

5960
this.baseUrl = baseUrl
@@ -76,36 +77,40 @@ export class OpenAICompatibleEmbedder implements IEmbedder {
7677
* @returns Promise resolving to true if valid
7778
* @throws Error with descriptive message if validation fails
7879
*/
79-
static async validateEndpoint(baseUrl: string, apiKey: string, modelId?: string): Promise<boolean> {
80+
static async validateEndpoint(baseUrl: string, apiKey: string, modelId: string | undefined): Promise<boolean> {
8081
try {
8182
const client = new OpenAI({
8283
baseURL: baseUrl,
8384
apiKey: apiKey,
8485
})
8586

86-
const testModel = modelId || getDefaultModelId("openai-compatible")
87+
const effectiveModelId = modelId || getDefaultModelId("openai-compatible")
8788

8889
// Try a minimal embedding request
8990
await client.embeddings.create({
9091
input: "test",
91-
model: testModel,
92+
model: effectiveModelId,
9293
})
9394

9495
return true
9596
} catch (error: any) {
96-
let errorMessage = t("embeddings:unknownError")
97+
const serialized = serializeError(error)
9798

9899
if (error?.status === 401) {
99-
errorMessage = t("embeddings:authenticationFailed")
100+
throw new Error(t("embeddings:authenticationFailed"))
100101
} else if (error?.status === 404) {
101-
errorMessage = `Endpoint not found: ${baseUrl}`
102+
throw new Error(t("embeddings:validation.endpointNotFound", { baseUrl }))
102103
} else if (error?.code === "ECONNREFUSED" || error?.code === "ENOTFOUND") {
103-
errorMessage = `Cannot connect to ${baseUrl}`
104-
} else if (error?.message) {
105-
errorMessage = error.message
104+
throw new Error(t("embeddings:validation.cannotConnect", { provider: "OpenAI Compatible", baseUrl }))
106105
}
107106

108-
throw new Error(errorMessage)
107+
const errorDetails = serialized.message || t("embeddings:unknownError")
108+
throw new Error(
109+
t("embeddings:genericError", {
110+
provider: "OpenAI Compatible",
111+
errorDetails,
112+
}),
113+
)
109114
}
110115
}
111116

0 commit comments

Comments
 (0)