Skip to content

Commit fa60a31

Browse files
MuriloFPellipsis-dev[bot]daniel-lxs
authored
Indexing field validation (#5483)
Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com> Co-authored-by: Daniel Riccio <[email protected]> Co-authored-by: Daniel <[email protected]>
1 parent a2815e2 commit fa60a31

File tree

22 files changed

+1481
-505
lines changed

22 files changed

+1481
-505
lines changed

src/services/code-index/embedders/__tests__/ollama.spec.ts

Lines changed: 7 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ describe("CodeIndexOllamaEmbedder", () => {
127127
const result = await embedder.validateConfiguration()
128128

129129
expect(result.valid).toBe(false)
130-
expect(result.error).toBe("Connection to Ollama timed out at http://localhost:11434")
130+
expect(result.error).toBe("embeddings:ollama.serviceNotRunning")
131131
})
132132

133133
it("should fail validation when tags endpoint returns 404", async () => {
@@ -141,9 +141,7 @@ describe("CodeIndexOllamaEmbedder", () => {
141141
const result = await embedder.validateConfiguration()
142142

143143
expect(result.valid).toBe(false)
144-
expect(result.error).toBe(
145-
"Ollama service is not running at http://localhost:11434. Please start Ollama first.",
146-
)
144+
expect(result.error).toBe("embeddings:ollama.serviceNotRunning")
147145
})
148146

149147
it("should fail validation when tags endpoint returns other error", async () => {
@@ -157,7 +155,7 @@ describe("CodeIndexOllamaEmbedder", () => {
157155
const result = await embedder.validateConfiguration()
158156

159157
expect(result.valid).toBe(false)
160-
expect(result.error).toBe("Ollama service is unavailable at http://localhost:11434. HTTP status: 500")
158+
expect(result.error).toBe("embeddings:ollama.serviceUnavailable")
161159
})
162160

163161
it("should fail validation when model does not exist", async () => {
@@ -176,9 +174,7 @@ describe("CodeIndexOllamaEmbedder", () => {
176174
const result = await embedder.validateConfiguration()
177175

178176
expect(result.valid).toBe(false)
179-
expect(result.error).toBe(
180-
"Model 'nomic-embed-text' not found. Available models: llama2:latest, mistral:latest",
181-
)
177+
expect(result.error).toBe("embeddings:ollama.modelNotFound")
182178
})
183179

184180
it("should fail validation when model exists but doesn't support embeddings", async () => {
@@ -205,7 +201,7 @@ describe("CodeIndexOllamaEmbedder", () => {
205201
const result = await embedder.validateConfiguration()
206202

207203
expect(result.valid).toBe(false)
208-
expect(result.error).toBe("Model 'nomic-embed-text' is not embedding capable")
204+
expect(result.error).toBe("embeddings:ollama.modelNotEmbeddingCapable")
209205
})
210206

211207
it("should handle ECONNREFUSED errors", async () => {
@@ -214,7 +210,7 @@ describe("CodeIndexOllamaEmbedder", () => {
214210
const result = await embedder.validateConfiguration()
215211

216212
expect(result.valid).toBe(false)
217-
expect(result.error).toBe("Connection to Ollama timed out at http://localhost:11434")
213+
expect(result.error).toBe("embeddings:ollama.serviceNotRunning")
218214
})
219215

220216
it("should handle ENOTFOUND errors", async () => {
@@ -223,7 +219,7 @@ describe("CodeIndexOllamaEmbedder", () => {
223219
const result = await embedder.validateConfiguration()
224220

225221
expect(result.valid).toBe(false)
226-
expect(result.error).toBe("Ollama host not found: http://localhost:11434")
222+
expect(result.error).toBe("embeddings:ollama.hostNotFound")
227223
})
228224

229225
it("should handle generic network errors", async () => {

src/services/code-index/embedders/ollama.ts

Lines changed: 32 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,11 @@ export class CodeIndexOllamaEmbedder implements IEmbedder {
5656
try {
5757
// Note: Standard Ollama API uses 'prompt' for single text, not 'input' for array.
5858
// Implementing based on user's specific request structure.
59+
60+
// Add timeout to prevent indefinite hanging
61+
const controller = new AbortController()
62+
const timeoutId = setTimeout(() => controller.abort(), 10000) // 10 second timeout
63+
5964
const response = await fetch(url, {
6065
method: "POST",
6166
headers: {
@@ -65,7 +70,9 @@ export class CodeIndexOllamaEmbedder implements IEmbedder {
6570
model: modelToUse,
6671
input: processedTexts, // Using 'input' as requested
6772
}),
73+
signal: controller.signal,
6874
})
75+
clearTimeout(timeoutId)
6976

7077
if (!response.ok) {
7178
let errorBody = t("embeddings:ollama.couldNotReadErrorBody")
@@ -97,6 +104,16 @@ export class CodeIndexOllamaEmbedder implements IEmbedder {
97104
} catch (error: any) {
98105
// Log the original error for debugging purposes
99106
console.error("Ollama embedding failed:", error)
107+
108+
// Handle specific error types with better messages
109+
if (error.name === "AbortError") {
110+
throw new Error(t("embeddings:validation.connectionFailed"))
111+
} else if (error.message?.includes("fetch failed") || error.code === "ECONNREFUSED") {
112+
throw new Error(t("embeddings:ollama.serviceNotRunning", { baseUrl: this.baseUrl }))
113+
} else if (error.code === "ENOTFOUND") {
114+
throw new Error(t("embeddings:ollama.hostNotFound", { baseUrl: this.baseUrl }))
115+
}
116+
100117
// Re-throw a more specific error for the caller
101118
throw new Error(t("embeddings:ollama.embeddingFailed", { message: error.message }))
102119
}
@@ -129,12 +146,12 @@ export class CodeIndexOllamaEmbedder implements IEmbedder {
129146
if (modelsResponse.status === 404) {
130147
return {
131148
valid: false,
132-
error: t("embeddings:errors.ollama.serviceNotRunning", { baseUrl: this.baseUrl }),
149+
error: t("embeddings:ollama.serviceNotRunning", { baseUrl: this.baseUrl }),
133150
}
134151
}
135152
return {
136153
valid: false,
137-
error: t("embeddings:errors.ollama.serviceUnavailable", {
154+
error: t("embeddings:ollama.serviceUnavailable", {
138155
baseUrl: this.baseUrl,
139156
status: modelsResponse.status,
140157
}),
@@ -159,8 +176,8 @@ export class CodeIndexOllamaEmbedder implements IEmbedder {
159176
const availableModels = models.map((m: any) => m.name).join(", ")
160177
return {
161178
valid: false,
162-
error: t("embeddings:errors.ollama.modelNotFound", {
163-
model: this.defaultModelId,
179+
error: t("embeddings:ollama.modelNotFound", {
180+
modelId: this.defaultModelId,
164181
availableModels,
165182
}),
166183
}
@@ -189,7 +206,7 @@ export class CodeIndexOllamaEmbedder implements IEmbedder {
189206
if (!testResponse.ok) {
190207
return {
191208
valid: false,
192-
error: t("embeddings:errors.ollama.modelNotEmbedding", { model: this.defaultModelId }),
209+
error: t("embeddings:ollama.modelNotEmbeddingCapable", { modelId: this.defaultModelId }),
193210
}
194211
}
195212

@@ -199,21 +216,26 @@ export class CodeIndexOllamaEmbedder implements IEmbedder {
199216
{
200217
beforeStandardHandling: (error: any) => {
201218
// Handle Ollama-specific connection errors
202-
if (error?.message === "ECONNREFUSED") {
219+
// Check for fetch failed errors which indicate Ollama is not running
220+
if (
221+
error?.message?.includes("fetch failed") ||
222+
error?.code === "ECONNREFUSED" ||
223+
error?.message?.includes("ECONNREFUSED")
224+
) {
203225
return {
204226
valid: false,
205-
error: t("embeddings:errors.ollama.connectionTimeout", { baseUrl: this.baseUrl }),
227+
error: t("embeddings:ollama.serviceNotRunning", { baseUrl: this.baseUrl }),
206228
}
207-
} else if (error?.message === "ENOTFOUND") {
229+
} else if (error?.code === "ENOTFOUND" || error?.message?.includes("ENOTFOUND")) {
208230
return {
209231
valid: false,
210-
error: t("embeddings:errors.ollama.hostNotFound", { baseUrl: this.baseUrl }),
232+
error: t("embeddings:ollama.hostNotFound", { baseUrl: this.baseUrl }),
211233
}
212234
} else if (error?.name === "AbortError") {
213235
// Handle timeout
214236
return {
215237
valid: false,
216-
error: t("embeddings:errors.ollama.connectionTimeout", { baseUrl: this.baseUrl }),
238+
error: t("embeddings:validation.connectionFailed"),
217239
}
218240
}
219241
// Let standard handling take over

0 commit comments

Comments
 (0)