Skip to content

Commit 93dbf55

Browse files
committed
fix: address Daniel's feedback on error handling in embedders
- Remove generic try-catch wrappers that hide actual API errors - Implement robust error message extraction using error?.message instead of error.toString() - Add consistent retry behavior for all errors (not just rate limits) - Update tests to verify new error propagation behavior - Ensure users see specific error messages (auth failures, rate limits, etc.) Resolves feedback from PR #4432
1 parent db66396 commit 93dbf55

File tree

3 files changed

+47
-44
lines changed

3 files changed

+47
-44
lines changed

src/services/code-index/embedders/__tests__/openai-compatible.spec.ts

Lines changed: 13 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -273,33 +273,33 @@ describe("OpenAICompatibleEmbedder", () => {
273273
})
274274
})
275275

276-
it("should not retry on non-rate-limit errors", async () => {
276+
it("should retry on non-rate-limit errors without delay", async () => {
277277
const testTexts = ["Hello world"]
278278
const authError = new Error("Unauthorized")
279279
;(authError as any).status = 401
280280

281281
mockEmbeddingsCreate.mockRejectedValue(authError)
282282

283283
await expect(embedder.createEmbeddings(testTexts)).rejects.toThrow(
284-
"Failed to create embeddings: batch processing error",
284+
"Failed to create embeddings after 3 attempts: Unauthorized",
285285
)
286286

287-
expect(mockEmbeddingsCreate).toHaveBeenCalledTimes(1)
287+
expect(mockEmbeddingsCreate).toHaveBeenCalledTimes(3)
288288
expect(console.warn).not.toHaveBeenCalledWith(expect.stringContaining("Rate limit hit"))
289289
})
290290

291-
it("should throw error immediately on non-retryable errors", async () => {
291+
it("should retry on all errors and exhaust attempts", async () => {
292292
const testTexts = ["Hello world"]
293293
const serverError = new Error("Internal server error")
294294
;(serverError as any).status = 500
295295

296296
mockEmbeddingsCreate.mockRejectedValue(serverError)
297297

298298
await expect(embedder.createEmbeddings(testTexts)).rejects.toThrow(
299-
"Failed to create embeddings: batch processing error",
299+
"Failed to create embeddings after 3 attempts: Internal server error",
300300
)
301301

302-
expect(mockEmbeddingsCreate).toHaveBeenCalledTimes(1)
302+
expect(mockEmbeddingsCreate).toHaveBeenCalledTimes(3)
303303
})
304304
})
305305

@@ -314,11 +314,11 @@ describe("OpenAICompatibleEmbedder", () => {
314314
mockEmbeddingsCreate.mockRejectedValue(apiError)
315315

316316
await expect(embedder.createEmbeddings(testTexts)).rejects.toThrow(
317-
"Failed to create embeddings: batch processing error",
317+
"Failed to create embeddings after 3 attempts: API connection failed",
318318
)
319319

320320
expect(console.error).toHaveBeenCalledWith(
321-
expect.stringContaining("Failed to process batch"),
321+
expect.stringContaining("OpenAI Compatible embedder error"),
322322
expect.any(Error),
323323
)
324324
})
@@ -330,10 +330,13 @@ describe("OpenAICompatibleEmbedder", () => {
330330
mockEmbeddingsCreate.mockRejectedValue(batchError)
331331

332332
await expect(embedder.createEmbeddings(testTexts)).rejects.toThrow(
333-
"Failed to create embeddings: batch processing error",
333+
"Failed to create embeddings after 3 attempts: Batch processing failed",
334334
)
335335

336-
expect(console.error).toHaveBeenCalledWith("Failed to process batch:", batchError)
336+
expect(console.error).toHaveBeenCalledWith(
337+
expect.stringContaining("OpenAI Compatible embedder error"),
338+
batchError,
339+
)
337340
})
338341

339342
it("should handle empty text arrays", async () => {

src/services/code-index/embedders/openai-compatible.ts

Lines changed: 16 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -81,15 +81,10 @@ export class OpenAICompatibleEmbedder implements IEmbedder {
8181
}
8282

8383
if (currentBatch.length > 0) {
84-
try {
85-
const batchResult = await this._embedBatchWithRetries(currentBatch, modelToUse)
86-
allEmbeddings.push(...batchResult.embeddings)
87-
usage.promptTokens += batchResult.usage.promptTokens
88-
usage.totalTokens += batchResult.usage.totalTokens
89-
} catch (error) {
90-
console.error("Failed to process batch:", error)
91-
throw new Error("Failed to create embeddings: batch processing error")
92-
}
84+
const batchResult = await this._embedBatchWithRetries(currentBatch, modelToUse)
85+
allEmbeddings.push(...batchResult.embeddings)
86+
usage.promptTokens += batchResult.usage.promptTokens
87+
usage.totalTokens += batchResult.usage.totalTokens
9388
}
9489
}
9590

@@ -124,23 +119,23 @@ export class OpenAICompatibleEmbedder implements IEmbedder {
124119
const isRateLimitError = error?.status === 429
125120
const hasMoreAttempts = attempts < MAX_RETRIES - 1
126121

127-
if (isRateLimitError && hasMoreAttempts) {
128-
const delayMs = INITIAL_DELAY_MS * Math.pow(2, attempts)
129-
console.warn(`Rate limit hit, retrying in ${delayMs}ms (attempt ${attempts + 1}/${MAX_RETRIES})`)
130-
await new Promise((resolve) => setTimeout(resolve, delayMs))
131-
continue
132-
}
133-
134122
// Log the error for debugging
135123
console.error(`OpenAI Compatible embedder error (attempt ${attempts + 1}/${MAX_RETRIES}):`, error)
136124

137-
if (!hasMoreAttempts) {
138-
throw new Error(
139-
`Failed to create embeddings after ${MAX_RETRIES} attempts: ${error.message || error}`,
140-
)
125+
if (hasMoreAttempts) {
126+
if (isRateLimitError) {
127+
const delayMs = INITIAL_DELAY_MS * Math.pow(2, attempts)
128+
console.warn(
129+
`Rate limit hit, retrying in ${delayMs}ms (attempt ${attempts + 1}/${MAX_RETRIES})`,
130+
)
131+
await new Promise((resolve) => setTimeout(resolve, delayMs))
132+
}
133+
continue
141134
}
142135

143-
throw error
136+
// Provide more context in the error message using robust error extraction
137+
const errorMessage = error?.message || (typeof error === "string" ? error : "Unknown error")
138+
throw new Error(`Failed to create embeddings after ${MAX_RETRIES} attempts: ${errorMessage}`)
144139
}
145140
}
146141

src/services/code-index/embedders/openai.ts

Lines changed: 18 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -71,15 +71,10 @@ export class OpenAiEmbedder extends OpenAiNativeHandler implements IEmbedder {
7171
}
7272

7373
if (currentBatch.length > 0) {
74-
try {
75-
const batchResult = await this._embedBatchWithRetries(currentBatch, modelToUse)
76-
allEmbeddings.push(...batchResult.embeddings)
77-
usage.promptTokens += batchResult.usage.promptTokens
78-
usage.totalTokens += batchResult.usage.totalTokens
79-
} catch (error) {
80-
console.error("Failed to process batch:", error)
81-
throw new Error("Failed to create embeddings: batch processing error")
82-
}
74+
const batchResult = await this._embedBatchWithRetries(currentBatch, modelToUse)
75+
allEmbeddings.push(...batchResult.embeddings)
76+
usage.promptTokens += batchResult.usage.promptTokens
77+
usage.totalTokens += batchResult.usage.totalTokens
8378
}
8479
}
8580

@@ -114,13 +109,23 @@ export class OpenAiEmbedder extends OpenAiNativeHandler implements IEmbedder {
114109
const isRateLimitError = error?.status === 429
115110
const hasMoreAttempts = attempts < MAX_RETRIES - 1
116111

117-
if (isRateLimitError && hasMoreAttempts) {
118-
const delayMs = INITIAL_DELAY_MS * Math.pow(2, attempts)
119-
await new Promise((resolve) => setTimeout(resolve, delayMs))
112+
// Log the error for debugging
113+
console.error(`OpenAI embedder error (attempt ${attempts + 1}/${MAX_RETRIES}):`, error)
114+
115+
if (hasMoreAttempts) {
116+
if (isRateLimitError) {
117+
const delayMs = INITIAL_DELAY_MS * Math.pow(2, attempts)
118+
console.warn(
119+
`Rate limit hit, retrying in ${delayMs}ms (attempt ${attempts + 1}/${MAX_RETRIES})`,
120+
)
121+
await new Promise((resolve) => setTimeout(resolve, delayMs))
122+
}
120123
continue
121124
}
122125

123-
throw error
126+
// Provide more context in the error message using robust error extraction
127+
const errorMessage = error?.message || (typeof error === "string" ? error : "Unknown error")
128+
throw new Error(`Failed to create embeddings after ${MAX_RETRIES} attempts: ${errorMessage}`)
124129
}
125130
}
126131

0 commit comments

Comments
 (0)