Skip to content

Commit c8f8754

Browse files
committed
Fix: Provider like SiliconFlow show wrong tokens number on task header
1 parent 75f4a6c commit c8f8754

File tree

4 files changed

+38
-5
lines changed

4 files changed

+38
-5
lines changed

src/api/providers/openai.ts

Lines changed: 22 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,9 @@ export class OpenAiHandler implements ApiHandler, SingleCompletionHandler {
6060
const deepseekReasoner = modelId.includes("deepseek-reasoner")
6161
const ark = modelUrl.includes(".volces.com")
6262

63+
let last_prompt_tokens = 0
64+
let last_completion_tokens = 0
65+
6366
if (this.options.openAiStreamingEnabled ?? true) {
6467
const systemMessage: OpenAI.Chat.ChatCompletionSystemMessageParam = {
6568
role: "system",
@@ -107,7 +110,9 @@ export class OpenAiHandler implements ApiHandler, SingleCompletionHandler {
107110
}
108111
}
109112
if (chunk.usage) {
110-
yield this.processUsageMetrics(chunk.usage)
113+
yield this.processUsageMetrics(chunk.usage, last_prompt_tokens, last_completion_tokens)
114+
last_prompt_tokens = chunk.usage?.prompt_tokens || 0
115+
last_completion_tokens = chunk.usage?.completion_tokens || 0
111116
}
112117
}
113118
} else {
@@ -130,15 +135,27 @@ export class OpenAiHandler implements ApiHandler, SingleCompletionHandler {
130135
type: "text",
131136
text: response.choices[0]?.message.content || "",
132137
}
133-
yield this.processUsageMetrics(response.usage)
138+
yield this.processUsageMetrics(response.usage, last_prompt_tokens, last_completion_tokens)
134139
}
135140
}
136141

137-
protected processUsageMetrics(usage: any): ApiStreamUsageChunk {
142+
protected processUsageMetrics(
143+
usage: any,
144+
last_prompt_tokens: number,
145+
last_completion_tokens: number,
146+
): ApiStreamUsageChunk {
147+
if (this.options.openAiUsageCumulation) {
148+
return {
149+
type: "usage",
150+
inputTokens: usage?.prompt_tokens || 0,
151+
outputTokens: usage?.completion_tokens || 0,
152+
}
153+
}
154+
138155
return {
139156
type: "usage",
140-
inputTokens: usage?.prompt_tokens || 0,
141-
outputTokens: usage?.completion_tokens || 0,
157+
inputTokens: usage?.prompt_tokens - last_prompt_tokens || 0,
158+
outputTokens: usage?.completion_tokens - last_completion_tokens || 0,
142159
}
143160
}
144161

src/core/webview/ClineProvider.ts

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -84,6 +84,7 @@ type GlobalStateKey =
8484
| "openAiModelId"
8585
| "openAiCustomModelInfo"
8686
| "openAiUseAzure"
87+
| "openAiUsageCumulation"
8788
| "ollamaModelId"
8889
| "ollamaBaseUrl"
8990
| "lmStudioModelId"
@@ -1615,6 +1616,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
16151616
openAiModelId,
16161617
openAiCustomModelInfo,
16171618
openAiUseAzure,
1619+
openAiUsageCumulation,
16181620
ollamaModelId,
16191621
ollamaBaseUrl,
16201622
lmStudioModelId,
@@ -1660,6 +1662,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
16601662
await this.updateGlobalState("openAiModelId", openAiModelId)
16611663
await this.updateGlobalState("openAiCustomModelInfo", openAiCustomModelInfo)
16621664
await this.updateGlobalState("openAiUseAzure", openAiUseAzure)
1665+
await this.updateGlobalState("openAiUsageCumulation", openAiUsageCumulation)
16631666
await this.updateGlobalState("ollamaModelId", ollamaModelId)
16641667
await this.updateGlobalState("ollamaBaseUrl", ollamaBaseUrl)
16651668
await this.updateGlobalState("lmStudioModelId", lmStudioModelId)
@@ -2481,6 +2484,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
24812484
openAiModelId,
24822485
openAiCustomModelInfo,
24832486
openAiUseAzure,
2487+
openAiUsageCumulation,
24842488
ollamaModelId,
24852489
ollamaBaseUrl,
24862490
lmStudioModelId,
@@ -2561,6 +2565,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
25612565
this.getGlobalState("openAiModelId") as Promise<string | undefined>,
25622566
this.getGlobalState("openAiCustomModelInfo") as Promise<ModelInfo | undefined>,
25632567
this.getGlobalState("openAiUseAzure") as Promise<boolean | undefined>,
2568+
this.getGlobalState("openAiUsageCumulation") as Promise<boolean | undefined>,
25642569
this.getGlobalState("ollamaModelId") as Promise<string | undefined>,
25652570
this.getGlobalState("ollamaBaseUrl") as Promise<string | undefined>,
25662571
this.getGlobalState("lmStudioModelId") as Promise<string | undefined>,
@@ -2658,6 +2663,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
26582663
openAiModelId,
26592664
openAiCustomModelInfo,
26602665
openAiUseAzure,
2666+
openAiUsageCumulation,
26612667
ollamaModelId,
26622668
ollamaBaseUrl,
26632669
lmStudioModelId,

src/shared/api.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@ export interface ApiHandlerOptions {
4545
openAiModelId?: string
4646
openAiCustomModelInfo?: ModelInfo
4747
openAiUseAzure?: boolean
48+
openAiUsageCumulation?: boolean
4849
ollamaModelId?: string
4950
ollamaBaseUrl?: string
5051
lmStudioModelId?: string

webview-ui/src/components/settings/ApiOptions.tsx

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -628,6 +628,15 @@ const ApiOptions = ({ apiErrorMessage, modelIdErrorMessage, fromWelcomeView }: A
628628
Enable streaming
629629
</Checkbox>
630630
</div>
631+
<Checkbox
632+
checked={apiConfiguration?.openAiUsageCumulation ?? true}
633+
onChange={(checked: boolean) => {
634+
handleInputChange("openAiUsageCumulation")({
635+
target: { value: checked },
636+
})
637+
}}>
638+
Enable token usage cumulation
639+
</Checkbox>
631640
<Checkbox
632641
checked={apiConfiguration?.openAiUseAzure ?? false}
633642
onChange={(checked: boolean) => {

0 commit comments

Comments
 (0)