Skip to content

Commit 1372143

Browse files
mrubensPrasangAPrajapati
authored andcommitted
Fix caching logic in Roo provider (RooCodeInc#8860)
1 parent 47dcd25 commit 1372143

File tree

2 files changed

+20
-10
lines changed

2 files changed

+20
-10
lines changed

packages/types/src/provider-settings.ts

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -615,7 +615,12 @@ export const getApiProtocol = (provider: ProviderName | undefined, modelId?: str
615615
}
616616

617617
// Vercel AI Gateway uses anthropic protocol for anthropic models.
618-
if (provider && provider === "vercel-ai-gateway" && modelId && modelId.toLowerCase().startsWith("anthropic/")) {
618+
if (
619+
provider &&
620+
["vercel-ai-gateway", "roo"].includes(provider) &&
621+
modelId &&
622+
modelId.toLowerCase().startsWith("anthropic/")
623+
) {
619624
return "anthropic"
620625
}
621626

src/api/providers/roo.ts

Lines changed: 14 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -90,6 +90,8 @@ export class RooHandler extends BaseOpenAiCompatibleProvider<string> {
9090
metadata?.taskId ? { headers: { "X-Roo-Task-ID": metadata.taskId } } : undefined,
9191
)
9292

93+
let lastUsage: RooUsage | undefined = undefined
94+
9395
for await (const chunk of stream) {
9496
const delta = chunk.choices[0]?.delta
9597

@@ -110,15 +112,18 @@ export class RooHandler extends BaseOpenAiCompatibleProvider<string> {
110112
}
111113

112114
if (chunk.usage) {
113-
const usage = chunk.usage as RooUsage
114-
yield {
115-
type: "usage",
116-
inputTokens: usage.prompt_tokens || 0,
117-
outputTokens: usage.completion_tokens || 0,
118-
cacheWriteTokens: usage.cache_creation_input_tokens,
119-
cacheReadTokens: usage.prompt_tokens_details?.cached_tokens,
120-
totalCost: usage.cost ?? 0,
121-
}
115+
lastUsage = chunk.usage as RooUsage
116+
}
117+
}
118+
119+
if (lastUsage) {
120+
yield {
121+
type: "usage",
122+
inputTokens: lastUsage.prompt_tokens || 0,
123+
outputTokens: lastUsage.completion_tokens || 0,
124+
cacheWriteTokens: lastUsage.cache_creation_input_tokens,
125+
cacheReadTokens: lastUsage.prompt_tokens_details?.cached_tokens,
126+
totalCost: lastUsage.cost ?? 0,
122127
}
123128
}
124129
}

0 commit comments

Comments
 (0)