Skip to content

Commit 079d05c

Browse files
arafatkatzeCline Evaluation
andauthored
Fixing OpenAI compatible to support cache token display (RooCodeInc#3957)
Co-authored-by: Cline Evaluation <[email protected]>
1 parent acc795e commit 079d05c

File tree

3 files changed

+13
-4
lines changed

3 files changed

+13
-4
lines changed
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"claude-dev": patch
3+
---
4+
5+
fixing token counting for xai provider

src/api/providers/openai.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,10 @@ export class OpenAiHandler implements ApiHandler {
9898
type: "usage",
9999
inputTokens: chunk.usage.prompt_tokens || 0,
100100
outputTokens: chunk.usage.completion_tokens || 0,
101+
// @ts-ignore-next-line
102+
cacheReadTokens: chunk.usage.prompt_tokens_details?.cached_tokens || 0,
103+
// @ts-ignore-next-line
104+
cacheWriteTokens: chunk.usage.prompt_cache_miss_tokens || 0,
101105
}
102106
}
103107
}

webview-ui/src/components/chat/TaskHeader.tsx

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -141,10 +141,10 @@ const TaskHeader: React.FC<TaskHeaderProps> = ({
141141
}, [apiConfiguration?.apiProvider, apiConfiguration?.openAiModelInfo])
142142

143143
const shouldShowPromptCacheInfo = () => {
144-
return (
145-
doesModelSupportPromptCache &&
146-
((cacheReads !== undefined && cacheReads > 0) || (cacheWrites !== undefined && cacheWrites > 0))
147-
)
144+
// Hybrid logic: Show cache info if we have actual cache data,
145+
// regardless of whether the model explicitly supports prompt cache.
146+
// This allows OpenAI-compatible providers to show cache tokens.
147+
return (cacheReads !== undefined && cacheReads > 0) || (cacheWrites !== undefined && cacheWrites > 0)
148148
}
149149

150150
console.log("IS_DEV", { IS_DEV, isItTrue: IS_DEV === '"true"' })

0 commit comments

Comments
 (0)