@@ -11,6 +11,15 @@ import {Upload} from '@tus/utils'
1111const fixturesPath = path . resolve ( '../' , '../' , 'test' , 'fixtures' )
1212const storePath = path . resolve ( '../' , '../' , 'test' , 'output' , 's3-store' )
1313
14+ const s3ClientConfig = {
15+ bucket : process . env . AWS_BUCKET as string ,
16+ credentials : {
17+ accessKeyId : process . env . AWS_ACCESS_KEY_ID as string ,
18+ secretAccessKey : process . env . AWS_SECRET_ACCESS_KEY as string ,
19+ } ,
20+ region : process . env . AWS_REGION ,
21+ }
22+
1423describe ( 'S3DataStore' , ( ) => {
1524 before ( function ( ) {
1625 this . testFileSize = 960_244
@@ -21,14 +30,7 @@ describe('S3DataStore', () => {
2130 beforeEach ( function ( ) {
2231 this . datastore = new S3Store ( {
2332 partSize : 8 * 1024 * 1024 , // Each uploaded part will have ~8MiB,
24- s3ClientConfig : {
25- bucket : process . env . AWS_BUCKET as string ,
26- credentials : {
27- accessKeyId : process . env . AWS_ACCESS_KEY_ID as string ,
28- secretAccessKey : process . env . AWS_SECRET_ACCESS_KEY as string ,
29- } ,
30- region : process . env . AWS_REGION ,
31- } ,
33+ s3ClientConfig,
3234 } )
3335 } )
3436
@@ -196,6 +198,50 @@ describe('S3DataStore', () => {
196198 assert . equal ( offset , incompleteSize )
197199 } )
198200
201+ it ( 'should use strictly sequential part numbers when uploading multiple chunks' , async ( ) => {
202+ const store = new S3Store ( {
203+ partSize : 5 * 1024 * 1024 ,
204+ maxConcurrentPartUploads : 1 ,
205+ s3ClientConfig,
206+ } )
207+
208+ // @ts -expect-error private method
209+ const uploadPartSpy = sinon . spy ( store , 'uploadPart' )
210+
211+ const size = 15 * 1024 * 1024
212+ const upload = new Upload ( {
213+ id : shared . testId ( 'increment-bug' ) ,
214+ size : size ,
215+ offset : 0 ,
216+ } )
217+
218+ await store . create ( upload )
219+
220+ // Write all 15 MB in a single call (S3Store will internally chunk to ~3 parts):
221+ const offset = await store . write ( Readable . from ( Buffer . alloc ( size ) ) , upload . id , 0 )
222+
223+ assert . equal ( offset , size )
224+
225+ const finalUpload = await store . getUpload ( upload . id )
226+ assert . equal ( finalUpload . offset , size , 'getUpload offset should match total size' )
227+
228+ const partNumbers = uploadPartSpy . getCalls ( ) . map ( ( call ) => call . args [ 2 ] )
229+
230+ for ( let i = 0 ; i < partNumbers . length ; i ++ ) {
231+ if ( i === 0 ) {
232+ assert . equal ( partNumbers [ i ] , 1 , 'First part number must be 1' )
233+ } else {
234+ const prev = partNumbers [ i - 1 ]
235+ const curr = partNumbers [ i ]
236+ assert . equal (
237+ curr ,
238+ prev + 1 ,
239+ `Part numbers should increment by 1. Found jump from ${ prev } to ${ curr } `
240+ )
241+ }
242+ }
243+ } )
244+
199245 shared . shouldHaveStoreMethods ( )
200246 shared . shouldCreateUploads ( )
201247 shared . shouldRemoveUploads ( ) // Termination extension
0 commit comments