Skip to content

Commit 3322e08

Browse files
ctemrubens
andauthored
Relax provider profiles schema and log parse error to PostHog (#2139)
* Relax provider profiles schema and log parse error to PostHog * Create wise-moose-shop.md --------- Co-authored-by: Matt Rubens <[email protected]>
1 parent f25812d commit 3322e08

File tree

13 files changed

+56
-24
lines changed

13 files changed

+56
-24
lines changed

.changeset/wise-moose-shop.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"roo-cline": patch
3+
---
4+
5+
Relax provider profiles schema and add telemetry

src/api/providers/glama.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ export class GlamaHandler extends BaseProvider implements SingleCompletionHandle
8888
let maxTokens: number | undefined
8989

9090
if (this.getModel().id.startsWith("anthropic/")) {
91-
maxTokens = this.getModel().info.maxTokens
91+
maxTokens = this.getModel().info.maxTokens ?? undefined
9292
}
9393

9494
const requestOptions: OpenAI.Chat.ChatCompletionCreateParams = {

src/api/providers/unbound.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ export class UnboundHandler extends BaseProvider implements SingleCompletionHand
7979
let maxTokens: number | undefined
8080

8181
if (this.getModel().id.startsWith("anthropic/")) {
82-
maxTokens = this.getModel().info.maxTokens
82+
maxTokens = this.getModel().info.maxTokens ?? undefined
8383
}
8484

8585
const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming = {

src/api/providers/vertex.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -230,7 +230,7 @@ export class VertexHandler extends BaseProvider implements SingleCompletionHandl
230230
const result = await model.generateContentStream({
231231
contents: messages.map(convertAnthropicMessageToVertexGemini),
232232
generationConfig: {
233-
maxOutputTokens: this.getModel().info.maxTokens,
233+
maxOutputTokens: this.getModel().info.maxTokens ?? undefined,
234234
temperature: this.options.modelTemperature ?? 0,
235235
},
236236
})

src/core/config/ContextProxy.ts

Lines changed: 14 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import * as vscode from "vscode"
2+
import { ZodError } from "zod"
23

34
import {
45
PROVIDER_SETTINGS_KEYS,
@@ -15,6 +16,7 @@ import {
1516
isSecretStateKey,
1617
} from "../../schemas"
1718
import { logger } from "../../utils/logging"
19+
import { telemetryService } from "../../services/telemetry/TelemetryService"
1820

1921
type GlobalStateKey = keyof GlobalState
2022
type SecretStateKey = keyof SecretState
@@ -157,8 +159,10 @@ export class ContextProxy {
157159
try {
158160
return globalSettingsSchema.parse(values)
159161
} catch (error) {
160-
// Log to Posthog?
161-
// We'll want to know about bad type assumptions or bad ExtensionState data.
162+
if (error instanceof ZodError) {
163+
telemetryService.captureSchemaValidationError({ schemaName: "GlobalSettings", error })
164+
}
165+
162166
return GLOBAL_SETTINGS_KEYS.reduce((acc, key) => ({ ...acc, [key]: values[key] }), {} as GlobalSettings)
163167
}
164168
}
@@ -173,8 +177,10 @@ export class ContextProxy {
173177
try {
174178
return providerSettingsSchema.parse(values)
175179
} catch (error) {
176-
// Log to Posthog?
177-
// We'll want to know about bad type assumptions or bad ExtensionState data.
180+
if (error instanceof ZodError) {
181+
telemetryService.captureSchemaValidationError({ schemaName: "ProviderSettings", error })
182+
}
183+
178184
return PROVIDER_SETTINGS_KEYS.reduce((acc, key) => ({ ...acc, [key]: values[key] }), {} as ProviderSettings)
179185
}
180186
}
@@ -225,7 +231,10 @@ export class ContextProxy {
225231
const globalSettings = globalSettingsExportSchema.parse(this.getValues())
226232
return Object.fromEntries(Object.entries(globalSettings).filter(([_, value]) => value !== undefined))
227233
} catch (error) {
228-
console.log(error.message)
234+
if (error instanceof ZodError) {
235+
telemetryService.captureSchemaValidationError({ schemaName: "GlobalSettings", error })
236+
}
237+
229238
return undefined
230239
}
231240
}

src/core/config/ProviderSettingsManager.ts

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
import { ExtensionContext } from "vscode"
2-
import { z } from "zod"
2+
import { z, ZodError } from "zod"
33

44
import { providerSettingsSchema, ApiConfigMeta } from "../../schemas"
55
import { Mode, modes } from "../../shared/modes"
6+
import { telemetryService } from "../../services/telemetry/TelemetryService"
67

78
const providerSettingsWithIdSchema = providerSettingsSchema.extend({ id: z.string().optional() })
89

@@ -272,6 +273,10 @@ export class ProviderSettingsManager {
272273
const content = await this.context.secrets.get(this.secretsKey)
273274
return content ? providerProfilesSchema.parse(JSON.parse(content)) : this.defaultProviderProfiles
274275
} catch (error) {
276+
if (error instanceof ZodError) {
277+
telemetryService.captureSchemaValidationError({ schemaName: "ProviderProfiles", error })
278+
}
279+
275280
throw new Error(`Failed to read provider profiles from secrets: ${error}`)
276281
}
277282
}

src/core/sliding-window/index.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ type TruncateOptions = {
6060
messages: Anthropic.Messages.MessageParam[]
6161
totalTokens: number
6262
contextWindow: number
63-
maxTokens?: number
63+
maxTokens?: number | null
6464
apiHandler: ApiHandler
6565
}
6666

src/exports/roo-code.d.ts

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ type ProviderSettings = {
2828
glamaModelId?: string | undefined
2929
glamaModelInfo?:
3030
| ({
31-
maxTokens?: number | undefined
31+
maxTokens?: (number | null) | undefined
3232
contextWindow: number
3333
supportsImages?: boolean | undefined
3434
supportsComputerUse?: boolean | undefined
@@ -47,7 +47,7 @@ type ProviderSettings = {
4747
openRouterModelId?: string | undefined
4848
openRouterModelInfo?:
4949
| ({
50-
maxTokens?: number | undefined
50+
maxTokens?: (number | null) | undefined
5151
contextWindow: number
5252
supportsImages?: boolean | undefined
5353
supportsComputerUse?: boolean | undefined
@@ -84,7 +84,7 @@ type ProviderSettings = {
8484
openAiModelId?: string | undefined
8585
openAiCustomModelInfo?:
8686
| ({
87-
maxTokens?: number | undefined
87+
maxTokens?: (number | null) | undefined
8888
contextWindow: number
8989
supportsImages?: boolean | undefined
9090
supportsComputerUse?: boolean | undefined
@@ -126,7 +126,7 @@ type ProviderSettings = {
126126
unboundModelId?: string | undefined
127127
unboundModelInfo?:
128128
| ({
129-
maxTokens?: number | undefined
129+
maxTokens?: (number | null) | undefined
130130
contextWindow: number
131131
supportsImages?: boolean | undefined
132132
supportsComputerUse?: boolean | undefined
@@ -144,7 +144,7 @@ type ProviderSettings = {
144144
requestyModelId?: string | undefined
145145
requestyModelInfo?:
146146
| ({
147-
maxTokens?: number | undefined
147+
maxTokens?: (number | null) | undefined
148148
contextWindow: number
149149
supportsImages?: boolean | undefined
150150
supportsComputerUse?: boolean | undefined

src/exports/types.ts

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ type ProviderSettings = {
2929
glamaModelId?: string | undefined
3030
glamaModelInfo?:
3131
| ({
32-
maxTokens?: number | undefined
32+
maxTokens?: (number | null) | undefined
3333
contextWindow: number
3434
supportsImages?: boolean | undefined
3535
supportsComputerUse?: boolean | undefined
@@ -48,7 +48,7 @@ type ProviderSettings = {
4848
openRouterModelId?: string | undefined
4949
openRouterModelInfo?:
5050
| ({
51-
maxTokens?: number | undefined
51+
maxTokens?: (number | null) | undefined
5252
contextWindow: number
5353
supportsImages?: boolean | undefined
5454
supportsComputerUse?: boolean | undefined
@@ -85,7 +85,7 @@ type ProviderSettings = {
8585
openAiModelId?: string | undefined
8686
openAiCustomModelInfo?:
8787
| ({
88-
maxTokens?: number | undefined
88+
maxTokens?: (number | null) | undefined
8989
contextWindow: number
9090
supportsImages?: boolean | undefined
9191
supportsComputerUse?: boolean | undefined
@@ -127,7 +127,7 @@ type ProviderSettings = {
127127
unboundModelId?: string | undefined
128128
unboundModelInfo?:
129129
| ({
130-
maxTokens?: number | undefined
130+
maxTokens?: (number | null) | undefined
131131
contextWindow: number
132132
supportsImages?: boolean | undefined
133133
supportsComputerUse?: boolean | undefined
@@ -145,7 +145,7 @@ type ProviderSettings = {
145145
requestyModelId?: string | undefined
146146
requestyModelInfo?:
147147
| ({
148-
maxTokens?: number | undefined
148+
maxTokens?: (number | null) | undefined
149149
contextWindow: number
150150
supportsImages?: boolean | undefined
151151
supportsComputerUse?: boolean | undefined

src/schemas/index.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,7 @@ export type TelemetrySetting = z.infer<typeof telemetrySettingsSchema>
100100
*/
101101

102102
export const modelInfoSchema = z.object({
103-
maxTokens: z.number().optional(),
103+
maxTokens: z.number().nullish(),
104104
contextWindow: z.number(),
105105
supportsImages: z.boolean().optional(),
106106
supportsComputerUse: z.boolean().optional(),

0 commit comments

Comments
 (0)