@@ -2,6 +2,7 @@ import { Response } from 'node-fetch';
22import { PassThrough } from 'stream' ;
33import assert from 'assert' ;
44import { _iterSSEMessages , _decodeChunks as decodeChunks } from 'openai/streaming' ;
5+ import { LineDecoder } from 'openai/internal/decoders/line' ;
56
67describe ( 'line decoder' , ( ) => {
78 test ( 'basic' , ( ) => {
@@ -10,8 +11,8 @@ describe('line decoder', () => {
1011 } ) ;
1112
1213 test ( 'basic with \\r' , ( ) => {
13- // baz is not included because the line hasn't ended yet
1414 expect ( decodeChunks ( [ 'foo' , ' bar\r\nbaz' ] ) ) . toEqual ( [ 'foo bar' ] ) ;
15+ expect ( decodeChunks ( [ 'foo' , ' bar\r\nbaz' ] , { flush : true } ) ) . toEqual ( [ 'foo bar' , 'baz' ] ) ;
1516 } ) ;
1617
1718 test ( 'trailing new lines' , ( ) => {
@@ -29,6 +30,56 @@ describe('line decoder', () => {
2930 test ( 'escaped new lines with \\r' , ( ) => {
3031 expect ( decodeChunks ( [ 'foo' , ' bar\\r\\nbaz\n' ] ) ) . toEqual ( [ 'foo bar\\r\\nbaz' ] ) ;
3132 } ) ;
33+
34+ test ( '\\r & \\n split across multiple chunks' , ( ) => {
35+ expect ( decodeChunks ( [ 'foo\r' , '\n' , 'bar' ] , { flush : true } ) ) . toEqual ( [ 'foo' , 'bar' ] ) ;
36+ } ) ;
37+
38+ test ( 'single \\r' , ( ) => {
39+ expect ( decodeChunks ( [ 'foo\r' , 'bar' ] , { flush : true } ) ) . toEqual ( [ 'foo' , 'bar' ] ) ;
40+ } ) ;
41+
42+ test ( 'double \\r' , ( ) => {
43+ expect ( decodeChunks ( [ 'foo\r' , 'bar\r' ] , { flush : true } ) ) . toEqual ( [ 'foo' , 'bar' ] ) ;
44+ expect ( decodeChunks ( [ 'foo\r' , '\r' , 'bar' ] , { flush : true } ) ) . toEqual ( [ 'foo' , '' , 'bar' ] ) ;
45+ // implementation detail that we don't yield the single \r line until a new \r or \n is encountered
46+ expect ( decodeChunks ( [ 'foo\r' , '\r' , 'bar' ] , { flush : false } ) ) . toEqual ( [ 'foo' ] ) ;
47+ } ) ;
48+
49+ test ( 'double \\r then \\r\\n' , ( ) => {
50+ expect ( decodeChunks ( [ 'foo\r' , '\r' , '\r' , '\n' , 'bar' , '\n' ] ) ) . toEqual ( [ 'foo' , '' , '' , 'bar' ] ) ;
51+ expect ( decodeChunks ( [ 'foo\n' , '\n' , '\n' , 'bar' , '\n' ] ) ) . toEqual ( [ 'foo' , '' , '' , 'bar' ] ) ;
52+ } ) ;
53+
54+ test ( 'double newline' , ( ) => {
55+ expect ( decodeChunks ( [ 'foo\n\nbar' ] , { flush : true } ) ) . toEqual ( [ 'foo' , '' , 'bar' ] ) ;
56+ expect ( decodeChunks ( [ 'foo' , '\n' , '\nbar' ] , { flush : true } ) ) . toEqual ( [ 'foo' , '' , 'bar' ] ) ;
57+ expect ( decodeChunks ( [ 'foo\n' , '\n' , 'bar' ] , { flush : true } ) ) . toEqual ( [ 'foo' , '' , 'bar' ] ) ;
58+ expect ( decodeChunks ( [ 'foo' , '\n' , '\n' , 'bar' ] , { flush : true } ) ) . toEqual ( [ 'foo' , '' , 'bar' ] ) ;
59+ } ) ;
60+
61+ test ( 'multi-byte characters across chunks' , ( ) => {
62+ const decoder = new LineDecoder ( ) ;
63+
64+ // bytes taken from the string 'известни' and arbitrarily split
65+ // so that some multi-byte characters span multiple chunks
66+ expect ( decoder . decode ( new Uint8Array ( [ 0xd0 ] ) ) ) . toHaveLength ( 0 ) ;
67+ expect ( decoder . decode ( new Uint8Array ( [ 0xb8 , 0xd0 , 0xb7 , 0xd0 ] ) ) ) . toHaveLength ( 0 ) ;
68+ expect (
69+ decoder . decode ( new Uint8Array ( [ 0xb2 , 0xd0 , 0xb5 , 0xd1 , 0x81 , 0xd1 , 0x82 , 0xd0 , 0xbd , 0xd0 , 0xb8 ] ) ) ,
70+ ) . toHaveLength ( 0 ) ;
71+
72+ const decoded = decoder . decode ( new Uint8Array ( [ 0xa ] ) ) ;
73+ expect ( decoded ) . toEqual ( [ 'известни' ] ) ;
74+ } ) ;
75+
76+ test ( 'flushing trailing newlines' , ( ) => {
77+ expect ( decodeChunks ( [ 'foo\n' , '\nbar' ] , { flush : true } ) ) . toEqual ( [ 'foo' , '' , 'bar' ] ) ;
78+ } ) ;
79+
80+ test ( 'flushing empty buffer' , ( ) => {
81+ expect ( decodeChunks ( [ ] , { flush : true } ) ) . toEqual ( [ ] ) ;
82+ } ) ;
3283} ) ;
3384
3485describe ( 'streaming decoding' , ( ) => {
0 commit comments