@@ -23,7 +23,7 @@ const mockOpenRouterModelInfo: ModelInfo = {
2323describe ( "OpenRouterHandler" , ( ) => {
2424 const mockOptions : ApiHandlerOptions = {
2525 openRouterApiKey : "test-key" ,
26- openRouterModelId : "test-model " ,
26+ openRouterModelId : "anthropic/claude-3.7-sonnet " ,
2727 }
2828
2929 beforeEach ( ( ) => {
@@ -45,55 +45,54 @@ describe("OpenRouterHandler", () => {
4545 } )
4646
4747 describe ( "getModel" , ( ) => {
48- it ( "returns correct model info when options are provided" , ( ) => {
48+ it ( "returns correct model info when options are provided" , async ( ) => {
4949 const handler = new OpenRouterHandler ( mockOptions )
50- const result = handler . getModel ( )
50+ const result = await handler . fetchModel ( )
5151
52- expect ( result ) . toEqual ( {
52+ expect ( result ) . toMatchObject ( {
5353 id : mockOptions . openRouterModelId ,
54- maxTokens : 1000 ,
54+ maxTokens : 8192 ,
5555 thinking : undefined ,
5656 temperature : 0 ,
5757 reasoningEffort : undefined ,
5858 topP : undefined ,
5959 promptCache : {
60- supported : false ,
60+ supported : true ,
6161 optional : false ,
6262 } ,
6363 } )
6464 } )
6565
66- it ( "returns default model info when options are not provided" , ( ) => {
66+ it ( "returns default model info when options are not provided" , async ( ) => {
6767 const handler = new OpenRouterHandler ( { } )
68- const result = handler . getModel ( )
69-
68+ const result = await handler . fetchModel ( )
7069 expect ( result . id ) . toBe ( "anthropic/claude-3.7-sonnet" )
7170 expect ( result . info . supportsPromptCache ) . toBe ( true )
7271 } )
7372
74- it ( "honors custom maxTokens for thinking models" , ( ) => {
73+ it ( "honors custom maxTokens for thinking models" , async ( ) => {
7574 const handler = new OpenRouterHandler ( {
7675 openRouterApiKey : "test-key" ,
77- openRouterModelId : "test-model " ,
76+ openRouterModelId : "anthropic/claude-3.7-sonnet:thinking " ,
7877 modelMaxTokens : 32_768 ,
7978 modelMaxThinkingTokens : 16_384 ,
8079 } )
8180
82- const result = handler . getModel ( )
81+ const result = await handler . fetchModel ( )
8382 expect ( result . maxTokens ) . toBe ( 32_768 )
8483 expect ( result . thinking ) . toEqual ( { type : "enabled" , budget_tokens : 16_384 } )
8584 expect ( result . temperature ) . toBe ( 1.0 )
8685 } )
8786
88- it ( "does not honor custom maxTokens for non-thinking models" , ( ) => {
87+ it ( "does not honor custom maxTokens for non-thinking models" , async ( ) => {
8988 const handler = new OpenRouterHandler ( {
9089 ...mockOptions ,
9190 modelMaxTokens : 32_768 ,
9291 modelMaxThinkingTokens : 16_384 ,
9392 } )
9493
95- const result = handler . getModel ( )
96- expect ( result . maxTokens ) . toBe ( 1000 )
94+ const result = await handler . fetchModel ( )
95+ expect ( result . maxTokens ) . toBe ( 8192 )
9796 expect ( result . thinking ) . toBeUndefined ( )
9897 expect ( result . temperature ) . toBe ( 0 )
9998 } )
@@ -106,7 +105,7 @@ describe("OpenRouterHandler", () => {
106105 const mockStream = {
107106 async * [ Symbol . asyncIterator ] ( ) {
108107 yield {
109- id : "test-id" ,
108+ id : mockOptions . openRouterModelId ,
110109 choices : [ { delta : { content : "test response" } } ] ,
111110 }
112111 yield {
@@ -139,16 +138,29 @@ describe("OpenRouterHandler", () => {
139138 expect ( chunks [ 0 ] ) . toEqual ( { type : "text" , text : "test response" } )
140139 expect ( chunks [ 1 ] ) . toEqual ( { type : "usage" , inputTokens : 10 , outputTokens : 20 , totalCost : 0.001 } )
141140
142- // Verify OpenAI client was called with correct parameters
141+ // Verify OpenAI client was called with correct parameters.
143142 expect ( mockCreate ) . toHaveBeenCalledWith (
144143 expect . objectContaining ( {
145- model : mockOptions . openRouterModelId ,
146- temperature : 0 ,
147- messages : expect . arrayContaining ( [
148- { role : "system" , content : systemPrompt } ,
149- { role : "user" , content : "test message" } ,
150- ] ) ,
144+ max_tokens : 8192 ,
145+ messages : [
146+ {
147+ content : [
148+ { cache_control : { type : "ephemeral" } , text : "test system prompt" , type : "text" } ,
149+ ] ,
150+ role : "system" ,
151+ } ,
152+ {
153+ content : [ { cache_control : { type : "ephemeral" } , text : "test message" , type : "text" } ] ,
154+ role : "user" ,
155+ } ,
156+ ] ,
157+ model : "anthropic/claude-3.7-sonnet" ,
151158 stream : true ,
159+ stream_options : { include_usage : true } ,
160+ temperature : 0 ,
161+ thinking : undefined ,
162+ top_p : undefined ,
163+ transforms : [ "middle-out" ] ,
152164 } ) ,
153165 )
154166 } )
@@ -255,7 +267,7 @@ describe("OpenRouterHandler", () => {
255267
256268 expect ( mockCreate ) . toHaveBeenCalledWith ( {
257269 model : mockOptions . openRouterModelId ,
258- max_tokens : 1000 ,
270+ max_tokens : 8192 ,
259271 thinking : undefined ,
260272 temperature : 0 ,
261273 messages : [ { role : "user" , content : "test prompt" } ] ,
0 commit comments