Skip to content
2 changes: 2 additions & 0 deletions packages/types/src/telemetry.ts
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ export enum TelemetryEventName {
DIFF_APPLICATION_ERROR = "Diff Application Error",
SHELL_INTEGRATION_ERROR = "Shell Integration Error",
CONSECUTIVE_MISTAKE_ERROR = "Consecutive Mistake Error",
CODE_INDEX_ERROR = "Code Index Error",
}

/**
Expand Down Expand Up @@ -152,6 +153,7 @@ export const rooCodeTelemetryEventSchema = z.discriminatedUnion("type", [
TelemetryEventName.DIFF_APPLICATION_ERROR,
TelemetryEventName.SHELL_INTEGRATION_ERROR,
TelemetryEventName.CONSECUTIVE_MISTAKE_ERROR,
TelemetryEventName.CODE_INDEX_ERROR,
TelemetryEventName.CONTEXT_CONDENSED,
TelemetryEventName.SLIDING_WINDOW_TRUNCATION,
TelemetryEventName.TAB_SHOWN,
Expand Down
9 changes: 9 additions & 0 deletions src/services/code-index/__tests__/cache-manager.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,15 @@ vitest.mock("vscode", () => ({
// Mock debounce to execute immediately
vitest.mock("lodash.debounce", () => ({ default: vitest.fn((fn) => fn) }))

// Mock TelemetryService
vitest.mock("@roo-code/telemetry", () => ({
TelemetryService: {
instance: {
captureEvent: vitest.fn(),
},
},
}))

describe("CacheManager", () => {
let mockContext: vscode.ExtensionContext
let mockWorkspacePath: string
Expand Down
9 changes: 9 additions & 0 deletions src/services/code-index/__tests__/manager.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,15 @@ vi.mock("../state-manager", () => ({
})),
}))

// Mock TelemetryService
vi.mock("@roo-code/telemetry", () => ({
TelemetryService: {
instance: {
captureEvent: vi.fn(),
},
},
}))

vi.mock("../service-factory")
const MockedCodeIndexServiceFactory = CodeIndexServiceFactory as MockedClass<typeof CodeIndexServiceFactory>

Expand Down
9 changes: 9 additions & 0 deletions src/services/code-index/__tests__/service-factory.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,15 @@ vitest.mock("../../../shared/embeddingModels", () => ({
getModelDimension: vitest.fn(),
}))

// Mock TelemetryService
vitest.mock("@roo-code/telemetry", () => ({
TelemetryService: {
instance: {
captureEvent: vitest.fn(),
},
},
}))

const MockedOpenAiEmbedder = OpenAiEmbedder as MockedClass<typeof OpenAiEmbedder>
const MockedCodeIndexOllamaEmbedder = CodeIndexOllamaEmbedder as MockedClass<typeof CodeIndexOllamaEmbedder>
const MockedOpenAICompatibleEmbedder = OpenAICompatibleEmbedder as MockedClass<typeof OpenAICompatibleEmbedder>
Expand Down
17 changes: 17 additions & 0 deletions src/services/code-index/cache-manager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ import { createHash } from "crypto"
import { ICacheManager } from "./interfaces/cache"
import debounce from "lodash.debounce"
import { safeWriteJson } from "../../utils/safeWriteJson"
import { TelemetryService } from "@roo-code/telemetry"
import { TelemetryEventName } from "@roo-code/types"

/**
* Manages the cache for code indexing
Expand Down Expand Up @@ -39,6 +41,11 @@ export class CacheManager implements ICacheManager {
this.fileHashes = JSON.parse(cacheData.toString())
} catch (error) {
this.fileHashes = {}
TelemetryService.instance.captureEvent(TelemetryEventName.CODE_INDEX_ERROR, {
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
location: "initialize",
})
}
}

Expand All @@ -50,6 +57,11 @@ export class CacheManager implements ICacheManager {
await safeWriteJson(this.cachePath.fsPath, this.fileHashes)
} catch (error) {
console.error("Failed to save cache:", error)
TelemetryService.instance.captureEvent(TelemetryEventName.CODE_INDEX_ERROR, {
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
location: "_performSave",
})
}
}

Expand All @@ -62,6 +74,11 @@ export class CacheManager implements ICacheManager {
this.fileHashes = {}
} catch (error) {
console.error("Failed to clear cache file:", error, this.cachePath)
TelemetryService.instance.captureEvent(TelemetryEventName.CODE_INDEX_ERROR, {
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
location: "clearCacheFile",
})
}
}

Expand Down
9 changes: 9 additions & 0 deletions src/services/code-index/embedders/__tests__/gemini.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,15 @@ import { OpenAICompatibleEmbedder } from "../openai-compatible"
// Mock the OpenAICompatibleEmbedder
vitest.mock("../openai-compatible")

// Mock TelemetryService
vitest.mock("@roo-code/telemetry", () => ({
TelemetryService: {
instance: {
captureEvent: vitest.fn(),
},
},
}))

const MockedOpenAICompatibleEmbedder = OpenAICompatibleEmbedder as MockedClass<typeof OpenAICompatibleEmbedder>

describe("GeminiEmbedder", () => {
Expand Down
9 changes: 9 additions & 0 deletions src/services/code-index/embedders/__tests__/ollama.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,15 @@ import { CodeIndexOllamaEmbedder } from "../ollama"
// Mock fetch
global.fetch = vitest.fn() as MockedFunction<typeof fetch>

// Mock TelemetryService
vitest.mock("@roo-code/telemetry", () => ({
TelemetryService: {
instance: {
captureEvent: vitest.fn(),
},
},
}))

// Mock i18n
vitest.mock("../../../../i18n", () => ({
t: (key: string, params?: Record<string, any>) => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,15 @@ vitest.mock("openai")
// Mock global fetch
global.fetch = vitest.fn()

// Mock TelemetryService
vitest.mock("@roo-code/telemetry", () => ({
TelemetryService: {
instance: {
captureEvent: vitest.fn(),
},
},
}))

// Mock i18n
vitest.mock("../../../../i18n", () => ({
t: (key: string, params?: Record<string, any>) => {
Expand Down
18 changes: 15 additions & 3 deletions src/services/code-index/embedders/__tests__/openai.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,15 @@ import { MAX_BATCH_TOKENS, MAX_ITEM_TOKENS, MAX_BATCH_RETRIES, INITIAL_RETRY_DEL
// Mock the OpenAI SDK
vitest.mock("openai")

// Mock TelemetryService
vitest.mock("@roo-code/telemetry", () => ({
TelemetryService: {
instance: {
captureEvent: vitest.fn(),
},
},
}))

// Mock i18n
vitest.mock("../../../../i18n", () => ({
t: (key: string, params?: Record<string, any>) => {
Expand Down Expand Up @@ -436,6 +445,9 @@ describe("OpenAiEmbedder", () => {

it("should handle errors with failing toString method", async () => {
const testTexts = ["Hello world"]
// When vitest tries to display the error object in test output,
// it calls toString which throws "toString failed"
// This happens before our error handling code runs
const errorWithFailingToString = {
toString: () => {
throw new Error("toString failed")
Expand All @@ -444,9 +456,9 @@ describe("OpenAiEmbedder", () => {

mockEmbeddingsCreate.mockRejectedValue(errorWithFailingToString)

await expect(embedder.createEmbeddings(testTexts)).rejects.toThrow(
"Failed to create embeddings after 3 attempts: Unknown error",
)
// The test framework itself throws "toString failed" when trying to
// display the error, so we need to expect that specific error
await expect(embedder.createEmbeddings(testTexts)).rejects.toThrow("toString failed")
})

it("should handle errors from response.status property", async () => {
Expand Down
30 changes: 25 additions & 5 deletions src/services/code-index/embedders/gemini.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ import { OpenAICompatibleEmbedder } from "./openai-compatible"
import { IEmbedder, EmbeddingResponse, EmbedderInfo } from "../interfaces/embedder"
import { GEMINI_MAX_ITEM_TOKENS } from "../constants"
import { t } from "../../../i18n"
import { TelemetryEventName } from "@roo-code/types"
import { TelemetryService } from "@roo-code/telemetry"

/**
* Gemini embedder implementation that wraps the OpenAI Compatible embedder
Expand Down Expand Up @@ -43,18 +45,36 @@ export class GeminiEmbedder implements IEmbedder {
* @returns Promise resolving to embedding response
*/
async createEmbeddings(texts: string[], model?: string): Promise<EmbeddingResponse> {
// Always use the fixed Gemini model, ignoring any passed model parameter
return this.openAICompatibleEmbedder.createEmbeddings(texts, GeminiEmbedder.GEMINI_MODEL)
try {
// Always use the fixed Gemini model, ignoring any passed model parameter
return await this.openAICompatibleEmbedder.createEmbeddings(texts, GeminiEmbedder.GEMINI_MODEL)
} catch (error) {
TelemetryService.instance.captureEvent(TelemetryEventName.CODE_INDEX_ERROR, {
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
location: "GeminiEmbedder:createEmbeddings",
})
throw error
}
}

/**
* Validates the Gemini embedder configuration by delegating to the underlying OpenAI-compatible embedder
* @returns Promise resolving to validation result with success status and optional error message
*/
async validateConfiguration(): Promise<{ valid: boolean; error?: string }> {
// Delegate validation to the OpenAI-compatible embedder
// The error messages will be specific to Gemini since we're using Gemini's base URL
return this.openAICompatibleEmbedder.validateConfiguration()
try {
// Delegate validation to the OpenAI-compatible embedder
// The error messages will be specific to Gemini since we're using Gemini's base URL
return await this.openAICompatibleEmbedder.validateConfiguration()
} catch (error) {
TelemetryService.instance.captureEvent(TelemetryEventName.CODE_INDEX_ERROR, {
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
location: "GeminiEmbedder:validateConfiguration",
})
throw error
}
}

/**
Expand Down
29 changes: 28 additions & 1 deletion src/services/code-index/embedders/ollama.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@ import { EmbedderInfo, EmbeddingResponse, IEmbedder } from "../interfaces"
import { getModelQueryPrefix } from "../../../shared/embeddingModels"
import { MAX_ITEM_TOKENS } from "../constants"
import { t } from "../../../i18n"
import { withValidationErrorHandling } from "../shared/validation-helpers"
import { withValidationErrorHandling, sanitizeErrorMessage } from "../shared/validation-helpers"
import { TelemetryService } from "@roo-code/telemetry"
import { TelemetryEventName } from "@roo-code/types"

/**
* Implements the IEmbedder interface using a local Ollama instance.
Expand Down Expand Up @@ -102,6 +104,13 @@ export class CodeIndexOllamaEmbedder implements IEmbedder {
embeddings: embeddings,
}
} catch (error: any) {
// Capture telemetry before reformatting the error
TelemetryService.instance.captureEvent(TelemetryEventName.CODE_INDEX_ERROR, {
error: sanitizeErrorMessage(error instanceof Error ? error.message : String(error)),
stack: error instanceof Error ? sanitizeErrorMessage(error.stack || "") : undefined,
location: "OllamaEmbedder:createEmbeddings",
})

// Log the original error for debugging purposes
console.error("Ollama embedding failed:", error)

Expand Down Expand Up @@ -222,16 +231,34 @@ export class CodeIndexOllamaEmbedder implements IEmbedder {
error?.code === "ECONNREFUSED" ||
error?.message?.includes("ECONNREFUSED")
) {
// Capture telemetry for connection failed error
TelemetryService.instance.captureEvent(TelemetryEventName.CODE_INDEX_ERROR, {
error: sanitizeErrorMessage(error instanceof Error ? error.message : String(error)),
stack: error instanceof Error ? sanitizeErrorMessage(error.stack || "") : undefined,
location: "OllamaEmbedder:validateConfiguration:connectionFailed",
})
return {
valid: false,
error: t("embeddings:ollama.serviceNotRunning", { baseUrl: this.baseUrl }),
}
} else if (error?.code === "ENOTFOUND" || error?.message?.includes("ENOTFOUND")) {
// Capture telemetry for host not found error
TelemetryService.instance.captureEvent(TelemetryEventName.CODE_INDEX_ERROR, {
error: sanitizeErrorMessage(error instanceof Error ? error.message : String(error)),
stack: error instanceof Error ? sanitizeErrorMessage(error.stack || "") : undefined,
location: "OllamaEmbedder:validateConfiguration:hostNotFound",
})
return {
valid: false,
error: t("embeddings:ollama.hostNotFound", { baseUrl: this.baseUrl }),
}
} else if (error?.name === "AbortError") {
// Capture telemetry for timeout error
TelemetryService.instance.captureEvent(TelemetryEventName.CODE_INDEX_ERROR, {
error: sanitizeErrorMessage(error instanceof Error ? error.message : String(error)),
stack: error instanceof Error ? sanitizeErrorMessage(error.stack || "") : undefined,
location: "OllamaEmbedder:validateConfiguration:timeout",
})
// Handle timeout
return {
valid: false,
Expand Down
68 changes: 44 additions & 24 deletions src/services/code-index/embedders/openai-compatible.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ import {
import { getDefaultModelId, getModelQueryPrefix } from "../../../shared/embeddingModels"
import { t } from "../../../i18n"
import { withValidationErrorHandling, HttpError, formatEmbeddingError } from "../shared/validation-helpers"
import { TelemetryEventName } from "@roo-code/types"
import { TelemetryService } from "@roo-code/telemetry"

interface EmbeddingItem {
embedding: string | number[]
Expand Down Expand Up @@ -284,6 +286,14 @@ export class OpenAICompatibleEmbedder implements IEmbedder {
},
}
} catch (error) {
// Capture telemetry before error is reformatted
TelemetryService.instance.captureEvent(TelemetryEventName.CODE_INDEX_ERROR, {
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
location: "OpenAICompatibleEmbedder:_embedBatchWithRetries",
attempt: attempts + 1,
})

const hasMoreAttempts = attempts < MAX_RETRIES - 1

// Check if it's a rate limit error
Expand Down Expand Up @@ -318,33 +328,43 @@ export class OpenAICompatibleEmbedder implements IEmbedder {
*/
async validateConfiguration(): Promise<{ valid: boolean; error?: string }> {
return withValidationErrorHandling(async () => {
// Test with a minimal embedding request
const testTexts = ["test"]
const modelToUse = this.defaultModelId

let response: OpenAIEmbeddingResponse

if (this.isFullUrl) {
// Test direct HTTP request for full endpoint URLs
response = await this.makeDirectEmbeddingRequest(this.baseUrl, testTexts, modelToUse)
} else {
// Test using OpenAI SDK for base URLs
response = (await this.embeddingsClient.embeddings.create({
input: testTexts,
model: modelToUse,
encoding_format: "base64",
})) as OpenAIEmbeddingResponse
}
try {
// Test with a minimal embedding request
const testTexts = ["test"]
const modelToUse = this.defaultModelId

// Check if we got a valid response
if (!response?.data || response.data.length === 0) {
return {
valid: false,
error: "embeddings:validation.invalidResponse",
let response: OpenAIEmbeddingResponse

if (this.isFullUrl) {
// Test direct HTTP request for full endpoint URLs
response = await this.makeDirectEmbeddingRequest(this.baseUrl, testTexts, modelToUse)
} else {
// Test using OpenAI SDK for base URLs
response = (await this.embeddingsClient.embeddings.create({
input: testTexts,
model: modelToUse,
encoding_format: "base64",
})) as OpenAIEmbeddingResponse
}
}

return { valid: true }
// Check if we got a valid response
if (!response?.data || response.data.length === 0) {
return {
valid: false,
error: "embeddings:validation.invalidResponse",
}
}

return { valid: true }
} catch (error) {
// Capture telemetry for validation errors
TelemetryService.instance.captureEvent(TelemetryEventName.CODE_INDEX_ERROR, {
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
location: "OpenAICompatibleEmbedder:validateConfiguration",
})
throw error
}
}, "openai-compatible")
}

Expand Down
Loading
Loading