@@ -315,71 +315,6 @@ describe("OpenAiHandler", () => {
315315 const callArgs = mockCreate . mock . calls [ 0 ] [ 0 ]
316316 expect ( callArgs . max_completion_tokens ) . toBe ( 4096 )
317317 } )
318-
319- it ( "should omit temperature when modelTemperature is undefined" , async ( ) => {
320- const optionsWithoutTemperature : ApiHandlerOptions = {
321- ...mockOptions ,
322- // modelTemperature is not set, should not include temperature
323- }
324- const handlerWithoutTemperature = new OpenAiHandler ( optionsWithoutTemperature )
325- const stream = handlerWithoutTemperature . createMessage ( systemPrompt , messages )
326- // Consume the stream to trigger the API call
327- for await ( const _chunk of stream ) {
328- }
329- // Assert the mockCreate was called without temperature
330- expect ( mockCreate ) . toHaveBeenCalled ( )
331- const callArgs = mockCreate . mock . calls [ 0 ] [ 0 ]
332- expect ( callArgs ) . not . toHaveProperty ( "temperature" )
333- } )
334-
335- it ( "should include temperature when modelTemperature is explicitly set to 0" , async ( ) => {
336- const optionsWithZeroTemperature : ApiHandlerOptions = {
337- ...mockOptions ,
338- modelTemperature : 0 ,
339- }
340- const handlerWithZeroTemperature = new OpenAiHandler ( optionsWithZeroTemperature )
341- const stream = handlerWithZeroTemperature . createMessage ( systemPrompt , messages )
342- // Consume the stream to trigger the API call
343- for await ( const _chunk of stream ) {
344- }
345- // Assert the mockCreate was called with temperature: 0
346- expect ( mockCreate ) . toHaveBeenCalled ( )
347- const callArgs = mockCreate . mock . calls [ 0 ] [ 0 ]
348- expect ( callArgs . temperature ) . toBe ( 0 )
349- } )
350-
351- it ( "should include temperature when modelTemperature is set to a non-zero value" , async ( ) => {
352- const optionsWithCustomTemperature : ApiHandlerOptions = {
353- ...mockOptions ,
354- modelTemperature : 0.7 ,
355- }
356- const handlerWithCustomTemperature = new OpenAiHandler ( optionsWithCustomTemperature )
357- const stream = handlerWithCustomTemperature . createMessage ( systemPrompt , messages )
358- // Consume the stream to trigger the API call
359- for await ( const _chunk of stream ) {
360- }
361- // Assert the mockCreate was called with temperature: 0.7
362- expect ( mockCreate ) . toHaveBeenCalled ( )
363- const callArgs = mockCreate . mock . calls [ 0 ] [ 0 ]
364- expect ( callArgs . temperature ) . toBe ( 0.7 )
365- } )
366-
367- it ( "should include DEEP_SEEK_DEFAULT_TEMPERATURE for deepseek-reasoner models when temperature is not set" , async ( ) => {
368- const deepseekOptions : ApiHandlerOptions = {
369- ...mockOptions ,
370- openAiModelId : "deepseek-reasoner" ,
371- // modelTemperature is not set
372- }
373- const deepseekHandler = new OpenAiHandler ( deepseekOptions )
374- const stream = deepseekHandler . createMessage ( systemPrompt , messages )
375- // Consume the stream to trigger the API call
376- for await ( const _chunk of stream ) {
377- }
378- // Assert the mockCreate was called with DEEP_SEEK_DEFAULT_TEMPERATURE (0.6)
379- expect ( mockCreate ) . toHaveBeenCalled ( )
380- const callArgs = mockCreate . mock . calls [ 0 ] [ 0 ]
381- expect ( callArgs . temperature ) . toBe ( 0.6 )
382- } )
383318 } )
384319
385320 describe ( "error handling" , ( ) => {
@@ -515,7 +450,7 @@ describe("OpenAiHandler", () => {
515450 ] ,
516451 stream : true ,
517452 stream_options : { include_usage : true } ,
518- // temperature should be omitted when not set
453+ temperature : 0 ,
519454 } ,
520455 { path : "/models/chat/completions" } ,
521456 )
0 commit comments