Skip to content

Commit e1d9e8e

Browse files
ctemehmetsunkur
authored andcommitted
Enable Gemini prompt caching by default (RooCodeInc#3225)
1 parent 38adefc commit e1d9e8e

File tree

25 files changed

+50
-45
lines changed

25 files changed

+50
-45
lines changed

.changeset/three-rings-approve.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"roo-cline": patch
3+
---
4+
5+
Enable Gemini prompt caching by default

evals/packages/types/src/roo-code.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -418,7 +418,7 @@ export const providerSettingsSchema = z.object({
418418
// Generic
419419
includeMaxTokens: z.boolean().optional(),
420420
reasoningEffort: reasoningEffortsSchema.optional(),
421-
promptCachingEnabled: z.boolean().optional(),
421+
promptCachingDisabled: z.boolean().optional(),
422422
diffEnabled: z.boolean().optional(),
423423
fuzzyMatchThreshold: z.number().optional(),
424424
modelTemperature: z.number().nullish(),
@@ -507,7 +507,7 @@ const providerSettingsRecord: ProviderSettingsRecord = {
507507
// Generic
508508
includeMaxTokens: undefined,
509509
reasoningEffort: undefined,
510-
promptCachingEnabled: undefined,
510+
promptCachingDisabled: undefined,
511511
diffEnabled: undefined,
512512
fuzzyMatchThreshold: undefined,
513513
modelTemperature: undefined,

src/api/providers/__tests__/gemini.test.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -291,7 +291,7 @@ describe("Caching Logic", () => {
291291
apiKey: "test-key",
292292
apiModelId: "gemini-1.5-flash-latest", // Use a model that supports caching
293293
geminiApiKey: "test-key",
294-
promptCachingEnabled: true, // Enable caching for these tests
294+
promptCachingDisabled: false,
295295
})
296296

297297
handlerWithCache["client"] = {
@@ -309,8 +309,8 @@ describe("Caching Logic", () => {
309309
} as any
310310
})
311311

312-
it("should not use cache if promptCachingEnabled is false", async () => {
313-
handlerWithCache["options"].promptCachingEnabled = false
312+
it("should not use cache if promptCachingDisabled is true", async () => {
313+
handlerWithCache["options"].promptCachingDisabled = true
314314
const stream = handlerWithCache.createMessage(systemPrompt, mockMessagesLong, cacheKey)
315315

316316
for await (const _ of stream) {

src/api/providers/gemini.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@ export class GeminiHandler extends BaseProvider implements SingleCompletionHandl
9393
// https://ai.google.dev/gemini-api/docs/tokens?lang=node
9494
const isCacheAvailable =
9595
info.supportsPromptCache &&
96-
this.options.promptCachingEnabled &&
96+
!this.options.promptCachingDisabled &&
9797
cacheKey &&
9898
contentsLength > 4 * CONTEXT_CACHE_TOKEN_MINIMUM
9999

src/api/providers/openrouter.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH
9494
openAiMessages = convertToR1Format([{ role: "user", content: systemPrompt }, ...messages])
9595
}
9696

97-
const isCacheAvailable = promptCache.supported && (!promptCache.optional || this.options.promptCachingEnabled)
97+
const isCacheAvailable = promptCache.supported && (!promptCache.optional || !this.options.promptCachingDisabled)
9898

9999
// https://openrouter.ai/docs/features/prompt-caching
100100
if (isCacheAvailable) {

src/exports/roo-code.d.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ type ProviderSettings = {
127127
modelMaxThinkingTokens?: number | undefined
128128
includeMaxTokens?: boolean | undefined
129129
reasoningEffort?: ("low" | "medium" | "high") | undefined
130-
promptCachingEnabled?: boolean | undefined
130+
promptCachingDisabled?: boolean | undefined
131131
diffEnabled?: boolean | undefined
132132
fuzzyMatchThreshold?: number | undefined
133133
modelTemperature?: (number | null) | undefined

src/exports/types.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,7 @@ type ProviderSettings = {
128128
modelMaxThinkingTokens?: number | undefined
129129
includeMaxTokens?: boolean | undefined
130130
reasoningEffort?: ("low" | "medium" | "high") | undefined
131-
promptCachingEnabled?: boolean | undefined
131+
promptCachingDisabled?: boolean | undefined
132132
diffEnabled?: boolean | undefined
133133
fuzzyMatchThreshold?: number | undefined
134134
modelTemperature?: (number | null) | undefined

src/schemas/index.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -434,7 +434,7 @@ export const providerSettingsSchema = z.object({
434434
// Generic
435435
includeMaxTokens: z.boolean().optional(),
436436
reasoningEffort: reasoningEffortsSchema.optional(),
437-
promptCachingEnabled: z.boolean().optional(),
437+
promptCachingDisabled: z.boolean().optional(),
438438
diffEnabled: z.boolean().optional(),
439439
fuzzyMatchThreshold: z.number().optional(),
440440
modelTemperature: z.number().nullish(),
@@ -524,7 +524,7 @@ const providerSettingsRecord: ProviderSettingsRecord = {
524524
// Generic
525525
includeMaxTokens: undefined,
526526
reasoningEffort: undefined,
527-
promptCachingEnabled: undefined,
527+
promptCachingDisabled: undefined,
528528
diffEnabled: undefined,
529529
fuzzyMatchThreshold: undefined,
530530
modelTemperature: undefined,

webview-ui/src/components/settings/PromptCachingControl.tsx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,8 @@ export const PromptCachingControl = ({ apiConfiguration, setApiConfigurationFiel
1616
<>
1717
<div>
1818
<VSCodeCheckbox
19-
checked={apiConfiguration.promptCachingEnabled}
20-
onChange={(e: any) => setApiConfigurationField("promptCachingEnabled", e.target.checked)}>
19+
checked={apiConfiguration.promptCachingDisabled}
20+
onChange={(e: any) => setApiConfigurationField("promptCachingDisabled", e.target.checked)}>
2121
<label className="block font-medium mb-1">{t("settings:promptCaching.label")}</label>
2222
</VSCodeCheckbox>
2323
<div className="text-sm text-vscode-descriptionForeground mt-1">

webview-ui/src/i18n/locales/ca/settings.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -416,8 +416,8 @@
416416
}
417417
},
418418
"promptCaching": {
419-
"label": "Habilitar emmagatzematge en caché de prompts",
420-
"description": "Quan està habilitat, Roo utilitzarà aquest model amb la memòria cau de prompts activada per reduir costos."
419+
"label": "Desactivar la memòria cau de prompts",
420+
"description": "Quan està marcat, Roo no utilitzarà la memòria cau de prompts per a aquest model."
421421
},
422422
"temperature": {
423423
"useCustom": "Utilitzar temperatura personalitzada",

0 commit comments

Comments
 (0)