File tree Expand file tree Collapse file tree 3 files changed +39
-6
lines changed
Expand file tree Collapse file tree 3 files changed +39
-6
lines changed Original file line number Diff line number Diff line change @@ -33,6 +33,8 @@ export class ClineHandler implements ApiHandler {
3333 this . options . openRouterProviderSorting ,
3434 )
3535
36+ let didOutputUsage : boolean = false
37+
3638 for await ( const chunk of stream ) {
3739 // openrouter returns an error object instead of the openai sdk throwing an error
3840 if ( "error" in chunk ) {
@@ -63,11 +65,25 @@ export class ClineHandler implements ApiHandler {
6365 reasoning : delta . reasoning ,
6466 }
6567 }
68+
69+ if ( ! didOutputUsage && chunk . usage ) {
70+ yield {
71+ type : "usage" ,
72+ inputTokens : chunk . usage . prompt_tokens || 0 ,
73+ outputTokens : chunk . usage . completion_tokens || 0 ,
74+ // @ts -ignore-next-line
75+ totalCost : chunk . usage . cost || 0 ,
76+ }
77+ didOutputUsage = true
78+ }
6679 }
6780
68- const apiStreamUsage = await this . getApiStreamUsage ( )
69- if ( apiStreamUsage ) {
70- yield apiStreamUsage
81+ // Fallback to generation endpoint if usage chunk not returned
82+ if ( ! didOutputUsage ) {
83+ const apiStreamUsage = await this . getApiStreamUsage ( )
84+ if ( apiStreamUsage ) {
85+ yield apiStreamUsage
86+ }
7187 }
7288 }
7389
Original file line number Diff line number Diff line change @@ -40,6 +40,8 @@ export class OpenRouterHandler implements ApiHandler {
4040 this . options . openRouterProviderSorting ,
4141 )
4242
43+ let didOutputUsage : boolean = false
44+
4345 for await ( const chunk of stream ) {
4446 // openrouter returns an error object instead of the openai sdk throwing an error
4547 if ( "error" in chunk ) {
@@ -70,11 +72,25 @@ export class OpenRouterHandler implements ApiHandler {
7072 reasoning : delta . reasoning ,
7173 }
7274 }
75+
76+ if ( ! didOutputUsage && chunk . usage ) {
77+ yield {
78+ type : "usage" ,
79+ inputTokens : chunk . usage . prompt_tokens || 0 ,
80+ outputTokens : chunk . usage . completion_tokens || 0 ,
81+ // @ts -ignore-next-line
82+ totalCost : chunk . usage . cost || 0 ,
83+ }
84+ didOutputUsage = true
85+ }
7386 }
7487
75- const apiStreamUsage = await this . getApiStreamUsage ( )
76- if ( apiStreamUsage ) {
77- yield apiStreamUsage
88+ // Fallback to generation endpoint if usage chunk not returned
89+ if ( ! didOutputUsage ) {
90+ const apiStreamUsage = await this . getApiStreamUsage ( )
91+ if ( apiStreamUsage ) {
92+ yield apiStreamUsage
93+ }
7894 }
7995 }
8096
Original file line number Diff line number Diff line change @@ -142,6 +142,7 @@ export async function createOpenRouterStream(
142142 top_p : topP ,
143143 messages : openAiMessages ,
144144 stream : true ,
145+ stream_options : { include_usage : true } ,
145146 transforms : shouldApplyMiddleOutTransform ? [ "middle-out" ] : undefined ,
146147 include_reasoning : true ,
147148 ...( model . id === "openai/o3-mini" ? { reasoning_effort : o3MiniReasoningEffort || "medium" } : { } ) ,
You can’t perform that action at this time.
0 commit comments