@@ -416,9 +416,9 @@ describe('GeminiChat', () => {
416416 expect ( modelTurn ?. parts ! [ 0 ] ! . functionCall ) . toBeDefined ( ) ;
417417 } ) ;
418418
419- it ( 'should succeed if the stream ends with an empty part but has a valid finishReason' , async ( ) => {
420- // 1. Mock a stream that ends with an invalid part but has a 'STOP' finish reason.
421- const streamWithValidFinish = ( async function * ( ) {
419+ it ( 'should fail if the stream ends with an empty part and has no finishReason' , async ( ) => {
420+ // 1. Mock a stream that ends with an invalid part and has no finish reason.
421+ const streamWithNoFinish = ( async function * ( ) {
422422 yield {
423423 candidates : [
424424 {
@@ -429,44 +429,35 @@ describe('GeminiChat', () => {
429429 } ,
430430 ] ,
431431 } as unknown as GenerateContentResponse ;
432- // This second chunk is invalid, but the finishReason should save it from retrying .
432+ // This second chunk is invalid and has no finishReason, so it should fail .
433433 yield {
434434 candidates : [
435435 {
436436 content : {
437437 role : 'model' ,
438438 parts : [ { text : '' } ] ,
439439 } ,
440- finishReason : 'STOP' ,
441440 } ,
442441 ] ,
443442 } as unknown as GenerateContentResponse ;
444443 } ) ( ) ;
445444
446445 vi . mocked ( mockModelsModule . generateContentStream ) . mockResolvedValue (
447- streamWithValidFinish ,
446+ streamWithNoFinish ,
448447 ) ;
449448
450- // 2. Action & Assert: The stream should complete successfully because the valid
451- // finishReason overrides the invalid final chunk.
449+ // 2. Action & Assert: The stream should fail because there's no finish reason.
452450 const stream = await chat . sendMessageStream (
453451 { message : 'test message' } ,
454- 'prompt-id-valid -finish-empty-end' ,
452+ 'prompt-id-no -finish-empty-end' ,
455453 ) ;
456454 await expect (
457455 ( async ( ) => {
458456 for await ( const _ of stream ) {
459457 /* consume stream */
460458 }
461459 } ) ( ) ,
462- ) . resolves . not . toThrow ( ) ;
463-
464- // 3. Verify history was recorded correctly
465- const history = chat . getHistory ( ) ;
466- expect ( history . length ) . toBe ( 2 ) ;
467- const modelTurn = history [ 1 ] ! ;
468- expect ( modelTurn ?. parts ?. length ) . toBe ( 1 ) ; // The empty part is discarded
469- expect ( modelTurn ?. parts ! [ 0 ] ! . text ) . toBe ( 'Initial content...' ) ;
460+ ) . rejects . toThrow ( EmptyStreamError ) ;
470461 } ) ;
471462 it ( 'should not consolidate text into a part that also contains a functionCall' , async ( ) => {
472463 // 1. Mock the API to stream a malformed part followed by a valid text part.
@@ -542,7 +533,10 @@ describe('GeminiChat', () => {
542533 // as the important part is consolidating what comes after.
543534 yield {
544535 candidates : [
545- { content : { role : 'model' , parts : [ { text : ' World!' } ] } } ,
536+ {
537+ content : { role : 'model' , parts : [ { text : ' World!' } ] } ,
538+ finishReason : 'STOP' ,
539+ } ,
546540 ] ,
547541 } as unknown as GenerateContentResponse ;
548542 } ) ( ) ;
@@ -645,6 +639,7 @@ describe('GeminiChat', () => {
645639 { text : 'This is the visible text that should not be lost.' } ,
646640 ] ,
647641 } ,
642+ finishReason : 'STOP' ,
648643 } ,
649644 ] ,
650645 } as unknown as GenerateContentResponse ;
@@ -705,7 +700,10 @@ describe('GeminiChat', () => {
705700 const emptyStreamResponse = ( async function * ( ) {
706701 yield {
707702 candidates : [
708- { content : { role : 'model' , parts : [ { thought : true } ] } } ,
703+ {
704+ content : { role : 'model' , parts : [ { thought : true } ] } ,
705+ finishReason : 'STOP' ,
706+ } ,
709707 ] ,
710708 } as unknown as GenerateContentResponse ;
711709 } ) ( ) ;
@@ -975,7 +973,12 @@ describe('GeminiChat', () => {
975973 // Second attempt (the retry): A minimal valid stream.
976974 ( async function * ( ) {
977975 yield {
978- candidates : [ { content : { parts : [ { text : 'Success' } ] } } ] ,
976+ candidates : [
977+ {
978+ content : { parts : [ { text : 'Success' } ] } ,
979+ finishReason : 'STOP' ,
980+ } ,
981+ ] ,
979982 } as unknown as GenerateContentResponse ;
980983 } ) ( ) ,
981984 ) ;
@@ -1012,7 +1015,10 @@ describe('GeminiChat', () => {
10121015 ( async function * ( ) {
10131016 yield {
10141017 candidates : [
1015- { content : { parts : [ { text : 'Successful response' } ] } } ,
1018+ {
1019+ content : { parts : [ { text : 'Successful response' } ] } ,
1020+ finishReason : 'STOP' ,
1021+ } ,
10161022 ] ,
10171023 } as unknown as GenerateContentResponse ;
10181024 } ) ( ) ,
@@ -1123,7 +1129,12 @@ describe('GeminiChat', () => {
11231129 // Second attempt succeeds
11241130 ( async function * ( ) {
11251131 yield {
1126- candidates : [ { content : { parts : [ { text : 'Second answer' } ] } } ] ,
1132+ candidates : [
1133+ {
1134+ content : { parts : [ { text : 'Second answer' } ] } ,
1135+ finishReason : 'STOP' ,
1136+ } ,
1137+ ] ,
11271138 } as unknown as GenerateContentResponse ;
11281139 } ) ( ) ,
11291140 ) ;
@@ -1272,6 +1283,7 @@ describe('GeminiChat', () => {
12721283 content : {
12731284 parts : [ { text : 'Successful response after empty' } ] ,
12741285 } ,
1286+ finishReason : 'STOP' ,
12751287 } ,
12761288 ] ,
12771289 } as unknown as GenerateContentResponse ;
@@ -1333,13 +1345,23 @@ describe('GeminiChat', () => {
13331345 } as unknown as GenerateContentResponse ;
13341346 await firstStreamContinuePromise ; // Pause the stream
13351347 yield {
1336- candidates : [ { content : { parts : [ { text : ' part 2' } ] } } ] ,
1348+ candidates : [
1349+ {
1350+ content : { parts : [ { text : ' part 2' } ] } ,
1351+ finishReason : 'STOP' ,
1352+ } ,
1353+ ] ,
13371354 } as unknown as GenerateContentResponse ;
13381355 } ) ( ) ;
13391356
13401357 const secondStreamGenerator = ( async function * ( ) {
13411358 yield {
1342- candidates : [ { content : { parts : [ { text : 'second response' } ] } } ] ,
1359+ candidates : [
1360+ {
1361+ content : { parts : [ { text : 'second response' } ] } ,
1362+ finishReason : 'STOP' ,
1363+ } ,
1364+ ] ,
13431365 } as unknown as GenerateContentResponse ;
13441366 } ) ( ) ;
13451367
@@ -1424,6 +1446,7 @@ describe('GeminiChat', () => {
14241446 content : {
14251447 parts : [ { text : 'Successful final response' } ] ,
14261448 } ,
1449+ finishReason : 'STOP' ,
14271450 } ,
14281451 ] ,
14291452 } as unknown as GenerateContentResponse ;
0 commit comments