Skip to content

Commit 4277572

Browse files
hannesrudolphdaniel-lxs
authored andcommitted
fix: prevent silent failures in code indexing with OpenAI-compatible endpoints (#4398)
1 parent b7d5a96 commit 4277572

29 files changed

+535
-55
lines changed

src/core/webview/webviewMessageHandler.ts

Lines changed: 65 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1807,6 +1807,43 @@ export const webviewMessageHandler = async (
18071807
break
18081808
}
18091809
case "codebaseIndexConfig": {
1810+
// Handle test action separately
1811+
if (message.action === "test") {
1812+
try {
1813+
if (!provider.codeIndexManager) {
1814+
throw new Error("Code index manager not available")
1815+
}
1816+
1817+
// Get the service factory from the manager
1818+
const serviceFactory = provider.codeIndexManager.getServiceFactory()
1819+
if (!serviceFactory) {
1820+
throw new Error("Service factory not available")
1821+
}
1822+
1823+
// Test the configuration
1824+
const isValid = await serviceFactory.validateEmbedderConfig()
1825+
1826+
// Send test result back to webview
1827+
provider.postMessageToWebview({
1828+
type: "codebaseIndexTestResult",
1829+
success: isValid,
1830+
message: isValid ? "Configuration is valid" : "Configuration test failed",
1831+
})
1832+
} catch (error) {
1833+
const errorMessage = error instanceof Error ? error.message : String(error)
1834+
provider.log(`[CodeIndexManager] Configuration test error: ${errorMessage}`)
1835+
1836+
// Send error result back to webview
1837+
provider.postMessageToWebview({
1838+
type: "codebaseIndexTestResult",
1839+
success: false,
1840+
message: errorMessage,
1841+
})
1842+
}
1843+
break
1844+
}
1845+
1846+
// Normal configuration update flow
18101847
const codebaseIndexConfig = message.values ?? {
18111848
codebaseIndexEnabled: false,
18121849
codebaseIndexQdrantUrl: "http://localhost:6333",
@@ -1823,16 +1860,42 @@ export const webviewMessageHandler = async (
18231860
// If now configured and enabled, start indexing automatically
18241861
if (provider.codeIndexManager.isFeatureEnabled && provider.codeIndexManager.isFeatureConfigured) {
18251862
if (!provider.codeIndexManager.isInitialized) {
1826-
await provider.codeIndexManager.initialize(provider.contextProxy)
1863+
try {
1864+
await provider.codeIndexManager.initialize(provider.contextProxy)
1865+
} catch (initError) {
1866+
// Initialization failed - send error status to webview
1867+
const errorMessage = initError instanceof Error ? initError.message : String(initError)
1868+
provider.log(`[CodeIndexManager] Initialization error: ${errorMessage}`)
1869+
1870+
// Send error status update to webview
1871+
const status = provider.codeIndexManager.getCurrentStatus()
1872+
provider.postMessageToWebview({
1873+
type: "indexingStatusUpdate",
1874+
values: status,
1875+
})
1876+
1877+
// Re-throw to prevent indexing attempt
1878+
throw initError
1879+
}
18271880
}
18281881
// Start indexing in background (no await)
18291882
provider.codeIndexManager.startIndexing()
18301883
}
18311884
}
18321885
} catch (error) {
1886+
const errorMessage = error instanceof Error ? error.message : String(error)
18331887
provider.log(
1834-
`[CodeIndexManager] Error during background CodeIndexManager configuration/indexing: ${error.message || error}`,
1888+
`[CodeIndexManager] Error during background CodeIndexManager configuration/indexing: ${errorMessage}`,
18351889
)
1890+
1891+
// Send error notification to webview if manager exists
1892+
if (provider.codeIndexManager) {
1893+
const status = provider.codeIndexManager.getCurrentStatus()
1894+
provider.postMessageToWebview({
1895+
type: "indexingStatusUpdate",
1896+
values: status,
1897+
})
1898+
}
18361899
}
18371900

18381901
await provider.postStateToWebview()

src/services/code-index/embedders/ollama.ts

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -106,4 +106,46 @@ export class CodeIndexOllamaEmbedder implements IEmbedder {
106106
name: "ollama",
107107
}
108108
}
109+
110+
/**
111+
* Validates the Ollama configuration by attempting to connect to the endpoint.
112+
* @param baseUrl - The base URL of the Ollama instance
113+
* @param modelId - The model ID to check
114+
* @returns A promise that resolves to true if valid, or throws an error with details
115+
*/
116+
static async validateEndpoint(baseUrl: string, modelId: string): Promise<boolean> {
117+
const url = `${baseUrl}/api/tags`
118+
119+
try {
120+
const response = await fetch(url, {
121+
method: "GET",
122+
headers: {
123+
"Content-Type": "application/json",
124+
},
125+
})
126+
127+
if (!response.ok) {
128+
if (response.status === 404) {
129+
throw new Error(`Ollama API not found at ${baseUrl}. Is Ollama running?`)
130+
}
131+
throw new Error(`Failed to connect to Ollama: ${response.status} ${response.statusText}`)
132+
}
133+
134+
const data = await response.json()
135+
const models = data.models || []
136+
const modelNames = models.map((m: any) => m.name)
137+
138+
// Check if the specified model exists
139+
if (!modelNames.includes(modelId)) {
140+
throw new Error(`Model '${modelId}' not found. Available models: ${modelNames.join(", ") || "none"}`)
141+
}
142+
143+
return true
144+
} catch (error: any) {
145+
if (error.message.includes("fetch failed") || error.message.includes("ECONNREFUSED")) {
146+
throw new Error(`Cannot connect to Ollama at ${baseUrl}. Please ensure Ollama is running.`)
147+
}
148+
throw error
149+
}
150+
}
109151
}

src/services/code-index/embedders/openai-compatible.ts

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,47 @@ export class OpenAICompatibleEmbedder implements IEmbedder {
6868
this.maxItemTokens = maxItemTokens || MAX_ITEM_TOKENS
6969
}
7070

71+
/**
72+
* Validates the endpoint by attempting a minimal embedding request
73+
* @param baseUrl The base URL to validate
74+
* @param apiKey The API key to use for validation
75+
* @param modelId Optional model ID to test with
76+
* @returns Promise resolving to true if valid
77+
* @throws Error with descriptive message if validation fails
78+
*/
79+
static async validateEndpoint(baseUrl: string, apiKey: string, modelId?: string): Promise<boolean> {
80+
try {
81+
const client = new OpenAI({
82+
baseURL: baseUrl,
83+
apiKey: apiKey,
84+
})
85+
86+
const testModel = modelId || getDefaultModelId("openai-compatible")
87+
88+
// Try a minimal embedding request
89+
await client.embeddings.create({
90+
input: "test",
91+
model: testModel,
92+
})
93+
94+
return true
95+
} catch (error: any) {
96+
let errorMessage = t("embeddings:unknownError")
97+
98+
if (error?.status === 401) {
99+
errorMessage = t("embeddings:authenticationFailed")
100+
} else if (error?.status === 404) {
101+
errorMessage = `Endpoint not found: ${baseUrl}`
102+
} else if (error?.code === "ECONNREFUSED" || error?.code === "ENOTFOUND") {
103+
errorMessage = `Cannot connect to ${baseUrl}`
104+
} else if (error?.message) {
105+
errorMessage = error.message
106+
}
107+
108+
throw new Error(errorMessage)
109+
}
110+
}
111+
71112
/**
72113
* Creates embeddings for the given texts with batching and rate limiting
73114
* @param texts Array of text strings to embed

src/services/code-index/embedders/openai.ts

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -193,4 +193,42 @@ export class OpenAiEmbedder extends OpenAiNativeHandler implements IEmbedder {
193193
name: "openai",
194194
}
195195
}
196+
197+
/**
198+
* Validates the OpenAI configuration by attempting to list models.
199+
* @param apiKey - The OpenAI API key
200+
* @param modelId - The model ID to check
201+
* @returns A promise that resolves to true if valid, or throws an error with details
202+
*/
203+
static async validateEndpoint(apiKey: string, modelId: string): Promise<boolean> {
204+
const client = new OpenAI({ apiKey })
205+
206+
try {
207+
// Try to list models to validate the API key
208+
const models = await client.models.list()
209+
const modelIds = models.data.map((m) => m.id)
210+
211+
// Check if the specified embedding model exists or is a known model
212+
const knownEmbeddingModels = ["text-embedding-3-small", "text-embedding-3-large", "text-embedding-ada-002"]
213+
214+
if (!modelIds.includes(modelId) && !knownEmbeddingModels.includes(modelId)) {
215+
throw new Error(
216+
`Model '${modelId}' not found. Available embedding models: ${knownEmbeddingModels.join(", ")}`,
217+
)
218+
}
219+
220+
return true
221+
} catch (error: any) {
222+
if (error?.status === 401) {
223+
throw new Error("Invalid API key. Please check your OpenAI API key.")
224+
}
225+
if (error?.status === 429) {
226+
throw new Error("Rate limit exceeded. Please try again later.")
227+
}
228+
if (error?.message?.includes("fetch failed") || error?.message?.includes("ECONNREFUSED")) {
229+
throw new Error("Network error. Please check your internet connection.")
230+
}
231+
throw new Error(`Failed to validate OpenAI configuration: ${error?.message || "Unknown error"}`)
232+
}
233+
}
196234
}

src/services/code-index/interfaces/manager.ts

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,21 @@ export interface ICodeIndexManager {
7272
export type IndexingState = "Standby" | "Indexing" | "Indexed" | "Error"
7373
export type EmbedderProvider = "openai" | "ollama" | "openai-compatible" | "gemini"
7474

75+
export interface IndexingStatus {
76+
systemStatus: IndexingState
77+
message?: string
78+
processedItems?: number
79+
totalItems?: number
80+
currentItemUnit?: string
81+
errorDetails?: {
82+
type: "configuration" | "authentication" | "network" | "validation" | "unknown"
83+
message: string
84+
suggestion?: string
85+
endpoint?: string
86+
timestamp: number
87+
}
88+
}
89+
7590
export interface IndexProgressUpdate {
7691
systemStatus: IndexingState
7792
message?: string

src/services/code-index/manager.ts

Lines changed: 66 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -229,31 +229,66 @@ export class CodeIndexManager {
229229
console.error("Unexpected error loading .gitignore:", error)
230230
}
231231

232-
// (Re)Create shared service instances
233-
const { embedder, vectorStore, scanner, fileWatcher } = this._serviceFactory.createServices(
234-
this.context,
235-
this._cacheManager!,
236-
ignoreInstance,
237-
)
232+
try {
233+
// (Re)Create shared service instances
234+
const { embedder, vectorStore, scanner, fileWatcher } = this._serviceFactory.createServices(
235+
this.context,
236+
this._cacheManager!,
237+
ignoreInstance,
238+
)
239+
240+
// (Re)Initialize orchestrator
241+
this._orchestrator = new CodeIndexOrchestrator(
242+
this._configManager!,
243+
this._stateManager,
244+
this.workspacePath,
245+
this._cacheManager!,
246+
vectorStore,
247+
scanner,
248+
fileWatcher,
249+
)
250+
251+
// (Re)Initialize search service
252+
this._searchService = new CodeIndexSearchService(
253+
this._configManager!,
254+
this._stateManager,
255+
embedder,
256+
vectorStore,
257+
)
258+
} catch (error) {
259+
// Handle service creation errors
260+
console.error("Failed to create code index services:", error)
261+
262+
// Determine error type and create appropriate error details
263+
let errorType: "configuration" | "authentication" | "network" | "validation" | "unknown" = "unknown"
264+
let errorMessage = error instanceof Error ? error.message : String(error)
265+
let suggestion: string | undefined
266+
267+
if (errorMessage.includes("configuration missing") || errorMessage.includes("missing for")) {
268+
errorType = "configuration"
269+
suggestion = "Please check your embedder configuration in the settings."
270+
} else if (errorMessage.includes("authentication") || errorMessage.includes("API key")) {
271+
errorType = "authentication"
272+
suggestion = "Please verify your API key is correct and has the necessary permissions."
273+
} else if (errorMessage.includes("network") || errorMessage.includes("connect")) {
274+
errorType = "network"
275+
suggestion = "Please check your network connection and ensure the service endpoints are accessible."
276+
} else if (errorMessage.includes("dimension") || errorMessage.includes("model")) {
277+
errorType = "validation"
278+
suggestion = "Please ensure your model configuration is compatible with the selected provider."
279+
}
238280

239-
// (Re)Initialize orchestrator
240-
this._orchestrator = new CodeIndexOrchestrator(
241-
this._configManager!,
242-
this._stateManager,
243-
this.workspacePath,
244-
this._cacheManager!,
245-
vectorStore,
246-
scanner,
247-
fileWatcher,
248-
)
281+
// Set error state with details
282+
this._stateManager.setSystemState("Error", errorMessage, {
283+
type: errorType,
284+
message: errorMessage,
285+
suggestion,
286+
timestamp: Date.now(),
287+
})
249288

250-
// (Re)Initialize search service
251-
this._searchService = new CodeIndexSearchService(
252-
this._configManager!,
253-
this._stateManager,
254-
embedder,
255-
vectorStore,
256-
)
289+
// Re-throw to be handled by caller
290+
throw error
291+
}
257292
}
258293

259294
/**
@@ -279,4 +314,12 @@ export class CodeIndexManager {
279314
}
280315
}
281316
}
317+
318+
/**
319+
* Gets the service factory instance for testing configurations.
320+
* @returns The service factory instance or undefined if not initialized
321+
*/
322+
public getServiceFactory(): CodeIndexServiceFactory | undefined {
323+
return this._serviceFactory
324+
}
282325
}

0 commit comments

Comments
 (0)