Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 22 additions & 5 deletions src/api/providers/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,9 @@ export class OpenAiHandler implements ApiHandler, SingleCompletionHandler {
const deepseekReasoner = modelId.includes("deepseek-reasoner")
const ark = modelUrl.includes(".volces.com")

let last_prompt_tokens = 0
let last_completion_tokens = 0

if (this.options.openAiStreamingEnabled ?? true) {
const systemMessage: OpenAI.Chat.ChatCompletionSystemMessageParam = {
role: "system",
Expand Down Expand Up @@ -107,7 +110,9 @@ export class OpenAiHandler implements ApiHandler, SingleCompletionHandler {
}
}
if (chunk.usage) {
yield this.processUsageMetrics(chunk.usage)
yield this.processUsageMetrics(chunk.usage, last_prompt_tokens, last_completion_tokens)
last_prompt_tokens = chunk.usage?.prompt_tokens || 0
last_completion_tokens = chunk.usage?.completion_tokens || 0
}
}
} else {
Expand All @@ -130,15 +135,27 @@ export class OpenAiHandler implements ApiHandler, SingleCompletionHandler {
type: "text",
text: response.choices[0]?.message.content || "",
}
yield this.processUsageMetrics(response.usage)
yield this.processUsageMetrics(response.usage, last_prompt_tokens, last_completion_tokens)
}
}

protected processUsageMetrics(usage: any): ApiStreamUsageChunk {
protected processUsageMetrics(
usage: any,
last_prompt_tokens: number,
last_completion_tokens: number,
): ApiStreamUsageChunk {
if (this.options.openAiUsageCumulation) {
return {
type: "usage",
inputTokens: usage?.prompt_tokens || 0,
outputTokens: usage?.completion_tokens || 0,
}
}

return {
type: "usage",
inputTokens: usage?.prompt_tokens || 0,
outputTokens: usage?.completion_tokens || 0,
inputTokens: usage?.prompt_tokens - last_prompt_tokens || 0,
outputTokens: usage?.completion_tokens - last_completion_tokens || 0,
}
}

Expand Down
6 changes: 6 additions & 0 deletions src/core/webview/ClineProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ type GlobalStateKey =
| "openAiModelId"
| "openAiCustomModelInfo"
| "openAiUseAzure"
| "openAiUsageCumulation"
| "ollamaModelId"
| "ollamaBaseUrl"
| "lmStudioModelId"
Expand Down Expand Up @@ -1615,6 +1616,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
openAiModelId,
openAiCustomModelInfo,
openAiUseAzure,
openAiUsageCumulation,
ollamaModelId,
ollamaBaseUrl,
lmStudioModelId,
Expand Down Expand Up @@ -1660,6 +1662,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
await this.updateGlobalState("openAiModelId", openAiModelId)
await this.updateGlobalState("openAiCustomModelInfo", openAiCustomModelInfo)
await this.updateGlobalState("openAiUseAzure", openAiUseAzure)
await this.updateGlobalState("openAiUsageCumulation", openAiUsageCumulation)
await this.updateGlobalState("ollamaModelId", ollamaModelId)
await this.updateGlobalState("ollamaBaseUrl", ollamaBaseUrl)
await this.updateGlobalState("lmStudioModelId", lmStudioModelId)
Expand Down Expand Up @@ -2481,6 +2484,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
openAiModelId,
openAiCustomModelInfo,
openAiUseAzure,
openAiUsageCumulation,
ollamaModelId,
ollamaBaseUrl,
lmStudioModelId,
Expand Down Expand Up @@ -2561,6 +2565,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
this.getGlobalState("openAiModelId") as Promise<string | undefined>,
this.getGlobalState("openAiCustomModelInfo") as Promise<ModelInfo | undefined>,
this.getGlobalState("openAiUseAzure") as Promise<boolean | undefined>,
this.getGlobalState("openAiUsageCumulation") as Promise<boolean | undefined>,
this.getGlobalState("ollamaModelId") as Promise<string | undefined>,
this.getGlobalState("ollamaBaseUrl") as Promise<string | undefined>,
this.getGlobalState("lmStudioModelId") as Promise<string | undefined>,
Expand Down Expand Up @@ -2658,6 +2663,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
openAiModelId,
openAiCustomModelInfo,
openAiUseAzure,
openAiUsageCumulation,
ollamaModelId,
ollamaBaseUrl,
lmStudioModelId,
Expand Down
1 change: 1 addition & 0 deletions src/shared/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ export interface ApiHandlerOptions {
openAiModelId?: string
openAiCustomModelInfo?: ModelInfo
openAiUseAzure?: boolean
openAiUsageCumulation?: boolean
ollamaModelId?: string
ollamaBaseUrl?: string
lmStudioModelId?: string
Expand Down
9 changes: 9 additions & 0 deletions webview-ui/src/components/settings/ApiOptions.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -628,6 +628,15 @@ const ApiOptions = ({ apiErrorMessage, modelIdErrorMessage, fromWelcomeView }: A
Enable streaming
</Checkbox>
</div>
<Checkbox
checked={apiConfiguration?.openAiUsageCumulation ?? true}
onChange={(checked: boolean) => {
handleInputChange("openAiUsageCumulation")({
target: { value: checked },
})
}}>
Enable token usage cumulation
</Checkbox>
<Checkbox
checked={apiConfiguration?.openAiUseAzure ?? false}
onChange={(checked: boolean) => {
Expand Down
Loading