Skip to content

Commit b92a22b

Browse files
authored
Fix caching logic in Roo provider (#8860)
1 parent bd1f890 commit b92a22b

File tree

2 files changed

+20
-10
lines changed

2 files changed

+20
-10
lines changed

packages/types/src/provider-settings.ts

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -598,7 +598,12 @@ export const getApiProtocol = (provider: ProviderName | undefined, modelId?: str
598598
}
599599

600600
// Vercel AI Gateway uses anthropic protocol for anthropic models.
601-
if (provider && provider === "vercel-ai-gateway" && modelId && modelId.toLowerCase().startsWith("anthropic/")) {
601+
if (
602+
provider &&
603+
["vercel-ai-gateway", "roo"].includes(provider) &&
604+
modelId &&
605+
modelId.toLowerCase().startsWith("anthropic/")
606+
) {
602607
return "anthropic"
603608
}
604609

src/api/providers/roo.ts

Lines changed: 14 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -90,6 +90,8 @@ export class RooHandler extends BaseOpenAiCompatibleProvider<string> {
9090
metadata?.taskId ? { headers: { "X-Roo-Task-ID": metadata.taskId } } : undefined,
9191
)
9292

93+
let lastUsage: RooUsage | undefined = undefined
94+
9395
for await (const chunk of stream) {
9496
const delta = chunk.choices[0]?.delta
9597

@@ -110,15 +112,18 @@ export class RooHandler extends BaseOpenAiCompatibleProvider<string> {
110112
}
111113

112114
if (chunk.usage) {
113-
const usage = chunk.usage as RooUsage
114-
yield {
115-
type: "usage",
116-
inputTokens: usage.prompt_tokens || 0,
117-
outputTokens: usage.completion_tokens || 0,
118-
cacheWriteTokens: usage.cache_creation_input_tokens,
119-
cacheReadTokens: usage.prompt_tokens_details?.cached_tokens,
120-
totalCost: usage.cost ?? 0,
121-
}
115+
lastUsage = chunk.usage as RooUsage
116+
}
117+
}
118+
119+
if (lastUsage) {
120+
yield {
121+
type: "usage",
122+
inputTokens: lastUsage.prompt_tokens || 0,
123+
outputTokens: lastUsage.completion_tokens || 0,
124+
cacheWriteTokens: lastUsage.cache_creation_input_tokens,
125+
cacheReadTokens: lastUsage.prompt_tokens_details?.cached_tokens,
126+
totalCost: lastUsage.cost ?? 0,
122127
}
123128
}
124129
}

0 commit comments

Comments
 (0)