Skip to content

Commit c41ccbf

Browse files
committed
feat: add comprehensive logging infrastructure for embedders
- Added standardized logging methods (log, logWarning, logError) to all embedder classes - Integrated VS Code output channel for consistent logging across OpenAI, Gemini, Mistral, and Ollama embedders - Enhanced debug logging in CodeIndexManager and webviewMessageHandler - Added detailed logging for embedding creation, validation, and error handling - Updated CodeIndexServiceFactory to pass output channel to all embedders - Fixed test files to account for new constructor signatures This logging infrastructure provides better debugging and troubleshooting capabilities as requested in PR #7200
1 parent 0aba9c0 commit c41ccbf

File tree

8 files changed

+408
-20
lines changed

8 files changed

+408
-20
lines changed

src/core/webview/webviewMessageHandler.ts

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2027,6 +2027,9 @@ export const webviewMessageHandler = async (
20272027

20282028
const settings = message.codeIndexSettings
20292029

2030+
// Debug logging for settings save
2031+
console.log("[DEBUG] saveCodeIndexSettingsAtomic called with settings:", JSON.stringify(settings, null, 2))
2032+
20302033
try {
20312034
// Check if embedder provider has changed
20322035
const currentConfig = getGlobalState("codebaseIndexConfig") || {}
@@ -2047,6 +2050,8 @@ export const webviewMessageHandler = async (
20472050
codebaseIndexSearchMinScore: settings.codebaseIndexSearchMinScore,
20482051
}
20492052

2053+
console.log("[DEBUG] globalStateConfig to be saved:", JSON.stringify(globalStateConfig, null, 2))
2054+
20502055
// Save global state first
20512056
await updateGlobalState("codebaseIndexConfig", globalStateConfig)
20522057

@@ -2076,26 +2081,35 @@ export const webviewMessageHandler = async (
20762081
)
20772082
}
20782083

2084+
console.log("[DEBUG] Secrets saved successfully")
2085+
20792086
// Send success response first - settings are saved regardless of validation
20802087
await provider.postMessageToWebview({
20812088
type: "codeIndexSettingsSaved",
20822089
success: true,
20832090
settings: globalStateConfig,
20842091
})
20852092

2093+
console.log("[DEBUG] Success response sent to webview")
2094+
20862095
// Update webview state
20872096
await provider.postStateToWebview()
20882097

20892098
// Then handle validation and initialization for the current workspace
20902099
const currentCodeIndexManager = provider.getCurrentWorkspaceCodeIndexManager()
20912100
if (currentCodeIndexManager) {
2101+
console.log("[DEBUG] CodeIndexManager found, handling settings change")
2102+
20922103
// If embedder provider changed, perform proactive validation
20932104
if (embedderProviderChanged) {
2105+
console.log("[DEBUG] Embedder provider changed, performing validation")
20942106
try {
20952107
// Force handleSettingsChange which will trigger validation
20962108
await currentCodeIndexManager.handleSettingsChange()
2109+
console.log("[DEBUG] Settings change handled successfully")
20972110
} catch (error) {
20982111
// Validation failed - the error state is already set by handleSettingsChange
2112+
console.log("[DEBUG] Embedder validation failed:", error)
20992113
provider.log(
21002114
`Embedder validation failed after provider change: ${error instanceof Error ? error.message : String(error)}`,
21012115
)
@@ -2109,10 +2123,13 @@ export const webviewMessageHandler = async (
21092123
}
21102124
} else {
21112125
// No provider change, just handle settings normally
2126+
console.log("[DEBUG] No provider change, handling settings normally")
21122127
try {
21132128
await currentCodeIndexManager.handleSettingsChange()
2129+
console.log("[DEBUG] Settings change handled successfully")
21142130
} catch (error) {
21152131
// Log but don't fail - settings are saved
2132+
console.log("[DEBUG] Settings change handling error:", error)
21162133
provider.log(
21172134
`Settings change handling error: ${error instanceof Error ? error.message : String(error)}`,
21182135
)
@@ -2124,11 +2141,15 @@ export const webviewMessageHandler = async (
21242141

21252142
// Auto-start indexing if now enabled and configured
21262143
if (currentCodeIndexManager.isFeatureEnabled && currentCodeIndexManager.isFeatureConfigured) {
2144+
console.log("[DEBUG] Feature enabled and configured, checking initialization")
21272145
if (!currentCodeIndexManager.isInitialized) {
2146+
console.log("[DEBUG] Manager not initialized, initializing now")
21282147
try {
21292148
await currentCodeIndexManager.initialize(provider.contextProxy)
2149+
console.log("[DEBUG] Code index manager initialized successfully")
21302150
provider.log(`Code index manager initialized after settings save`)
21312151
} catch (error) {
2152+
console.log("[DEBUG] Code index initialization failed:", error)
21322153
provider.log(
21332154
`Code index initialization failed: ${error instanceof Error ? error.message : String(error)}`,
21342155
)
@@ -2138,10 +2159,15 @@ export const webviewMessageHandler = async (
21382159
values: currentCodeIndexManager.getCurrentStatus(),
21392160
})
21402161
}
2162+
} else {
2163+
console.log("[DEBUG] Manager already initialized")
21412164
}
2165+
} else {
2166+
console.log("[DEBUG] Feature not enabled or not configured")
21422167
}
21432168
} else {
21442169
// No workspace open - send error status
2170+
console.log("[DEBUG] No workspace open, cannot save code index settings")
21452171
provider.log("Cannot save code index settings: No workspace folder open")
21462172
await provider.postMessageToWebview({
21472173
type: "indexingStatusUpdate",
@@ -2155,6 +2181,7 @@ export const webviewMessageHandler = async (
21552181
})
21562182
}
21572183
} catch (error) {
2184+
console.log("[DEBUG] Error saving code index settings:", error)
21582185
provider.log(`Error saving code index settings: ${error.message || error}`)
21592186
await provider.postMessageToWebview({
21602187
type: "codeIndexSettingsSaved",

src/services/code-index/embedders/__tests__/gemini.spec.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,8 @@ describe("GeminiEmbedder", () => {
3838
apiKey,
3939
"gemini-embedding-001",
4040
2048,
41+
undefined, // useFloatEncoding
42+
undefined, // outputChannel
4143
)
4244
})
4345

@@ -55,6 +57,8 @@ describe("GeminiEmbedder", () => {
5557
apiKey,
5658
"text-embedding-004",
5759
2048,
60+
undefined, // useFloatEncoding
61+
undefined, // outputChannel
5862
)
5963
})
6064

src/services/code-index/embedders/__tests__/mistral.spec.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,8 @@ describe("MistralEmbedder", () => {
3838
apiKey,
3939
"codestral-embed-2505",
4040
8191,
41+
undefined, // useFloatEncoding
42+
undefined, // outputChannel
4143
)
4244
})
4345

@@ -55,6 +57,8 @@ describe("MistralEmbedder", () => {
5557
apiKey,
5658
"custom-embed-model",
5759
8191,
60+
undefined, // useFloatEncoding
61+
undefined, // outputChannel
5862
)
5963
})
6064

src/services/code-index/embedders/gemini.ts

Lines changed: 82 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import * as vscode from "vscode"
12
import { OpenAICompatibleEmbedder } from "./openai-compatible"
23
import { IEmbedder, EmbeddingResponse, EmbedderInfo } from "../interfaces/embedder"
34
import { GEMINI_MAX_ITEM_TOKENS } from "../constants"
@@ -18,27 +19,72 @@ export class GeminiEmbedder implements IEmbedder {
1819
private static readonly GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/"
1920
private static readonly DEFAULT_MODEL = "gemini-embedding-001"
2021
private readonly modelId: string
22+
private readonly outputChannel?: vscode.OutputChannel
2123

2224
/**
2325
* Creates a new Gemini embedder
2426
* @param apiKey The Gemini API key for authentication
2527
* @param modelId The model ID to use (defaults to gemini-embedding-001)
28+
* @param outputChannel Optional VS Code output channel for logging
2629
*/
27-
constructor(apiKey: string, modelId?: string) {
30+
constructor(apiKey: string, modelId?: string, outputChannel?: vscode.OutputChannel) {
2831
if (!apiKey) {
2932
throw new Error(t("embeddings:validation.apiKeyRequired"))
3033
}
3134

3235
// Use provided model or default
3336
this.modelId = modelId || GeminiEmbedder.DEFAULT_MODEL
37+
this.outputChannel = outputChannel
3438

3539
// Create an OpenAI Compatible embedder with Gemini's configuration
3640
this.openAICompatibleEmbedder = new OpenAICompatibleEmbedder(
3741
GeminiEmbedder.GEMINI_BASE_URL,
3842
apiKey,
3943
this.modelId,
4044
GEMINI_MAX_ITEM_TOKENS,
45+
undefined, // useFloatEncoding
46+
this.outputChannel,
4147
)
48+
49+
// Log construction
50+
this.log("info", "Gemini Embedder constructed", {
51+
modelId: this.modelId,
52+
baseUrl: GeminiEmbedder.GEMINI_BASE_URL,
53+
})
54+
}
55+
56+
/**
57+
* Logs a message to the output channel if available
58+
* @param level The log level (debug, info, warn, error)
59+
* @param message The message to log
60+
* @param data Optional structured data to include
61+
*/
62+
private log(level: "debug" | "info" | "warn" | "error", message: string, data?: any): void {
63+
if (!this.outputChannel) return
64+
65+
const timestamp = new Date().toISOString()
66+
const prefix = `[${timestamp}] [${level.toUpperCase()}] [GEMINI]`
67+
68+
let logMessage = `${prefix} ${message}`
69+
if (data) {
70+
logMessage += `\n${JSON.stringify(data, null, 2)}`
71+
}
72+
73+
this.outputChannel.appendLine(logMessage)
74+
}
75+
76+
/**
77+
* Helper method for warning logs
78+
*/
79+
private logWarning(message: string, data?: any): void {
80+
this.log("warn", message, data)
81+
}
82+
83+
/**
84+
* Helper method for error logs
85+
*/
86+
private logError(message: string, data?: any): void {
87+
this.log("error", message, data)
4288
}
4389

4490
/**
@@ -51,8 +97,26 @@ export class GeminiEmbedder implements IEmbedder {
5197
try {
5298
// Use the provided model or fall back to the instance's model
5399
const modelToUse = model || this.modelId
54-
return await this.openAICompatibleEmbedder.createEmbeddings(texts, modelToUse)
100+
101+
this.log("debug", "Starting embedding creation", {
102+
textCount: texts.length,
103+
model: modelToUse,
104+
})
105+
106+
const result = await this.openAICompatibleEmbedder.createEmbeddings(texts, modelToUse)
107+
108+
this.log("info", "Successfully created embeddings", {
109+
count: result.embeddings.length,
110+
usage: result.usage,
111+
})
112+
113+
return result
55114
} catch (error) {
115+
this.logError("Failed to create embeddings", {
116+
error: error instanceof Error ? error.message : String(error),
117+
stack: error instanceof Error ? error.stack : undefined,
118+
})
119+
56120
TelemetryService.instance.captureEvent(TelemetryEventName.CODE_INDEX_ERROR, {
57121
error: error instanceof Error ? error.message : String(error),
58122
stack: error instanceof Error ? error.stack : undefined,
@@ -68,10 +132,25 @@ export class GeminiEmbedder implements IEmbedder {
68132
*/
69133
async validateConfiguration(): Promise<{ valid: boolean; error?: string }> {
70134
try {
135+
this.log("info", "Starting configuration validation")
136+
71137
// Delegate validation to the OpenAI-compatible embedder
72138
// The error messages will be specific to Gemini since we're using Gemini's base URL
73-
return await this.openAICompatibleEmbedder.validateConfiguration()
139+
const result = await this.openAICompatibleEmbedder.validateConfiguration()
140+
141+
if (result.valid) {
142+
this.log("info", "Configuration validation successful")
143+
} else {
144+
this.logError("Configuration validation failed", { error: result.error })
145+
}
146+
147+
return result
74148
} catch (error) {
149+
this.logError("Configuration validation error", {
150+
error: error instanceof Error ? error.message : String(error),
151+
stack: error instanceof Error ? error.stack : undefined,
152+
})
153+
75154
TelemetryService.instance.captureEvent(TelemetryEventName.CODE_INDEX_ERROR, {
76155
error: error instanceof Error ? error.message : String(error),
77156
stack: error instanceof Error ? error.stack : undefined,

0 commit comments

Comments
 (0)