@@ -14,6 +14,7 @@ const cloneDeep = require('lodash/cloneDeep');
1414const assert = require ( 'assert' ) ;
1515
1616const IMAGE_FILE = helper . IMAGE_FILE ;
17+ const LARGE_IMAGE_FILE = helper . LARGE_IMAGE_FILE ;
1718const LARGE_RAW_FILE = helper . LARGE_RAW_FILE ;
1819const LARGE_VIDEO = helper . LARGE_VIDEO ;
1920const EMPTY_IMAGE = helper . EMPTY_IMAGE ;
@@ -1002,9 +1003,8 @@ describe("uploader", function () {
10021003 } ) ;
10031004 } ) ;
10041005 it ( "should successfully upload with pipes" , function ( done ) {
1005- var file_reader , upload ;
10061006 this . timeout ( TIMEOUT . LONG ) ;
1007- upload = cloudinary . v2 . uploader . upload_stream ( {
1007+ const upload = cloudinary . v2 . uploader . upload_stream ( {
10081008 tags : UPLOAD_TAGS
10091009 } , function ( error , result ) {
10101010 var expected_signature ;
@@ -1017,8 +1017,19 @@ describe("uploader", function () {
10171017 expect ( result . signature ) . to . eql ( expected_signature ) ;
10181018 done ( ) ;
10191019 } ) ;
1020- file_reader = fs . createReadStream ( IMAGE_FILE ) ;
1021- file_reader . pipe ( upload ) ;
1020+ fs . createReadStream ( IMAGE_FILE ) . pipe ( upload ) ;
1021+ } ) ;
1022+ it ( "should successfully upload in chunks with pipes" , ( done ) => {
1023+ this . timeout ( TIMEOUT . LONG ) ;
1024+ const upload = cloudinary . v2 . uploader . upload_chunked_stream ( {
1025+ chunk_size : 7000000 ,
1026+ timeout : TIMEOUT . LONG
1027+ } , ( error , result ) => {
1028+ assert . strictEqual ( error , undefined ) ;
1029+ assert . ok ( result . public_id ) ;
1030+ done ( ) ;
1031+ } ) ;
1032+ fs . createReadStream ( LARGE_IMAGE_FILE ) . pipe ( upload ) ;
10221033 } ) ;
10231034 it ( "should fail with http.Agent (non secure)" , function ( ) {
10241035 this . timeout ( TIMEOUT . LONG ) ;
0 commit comments