Skip to content

Commit 32d847d

Browse files
fenosMurderlon
andauthored
@tus/s3-store: fix part number increment (#689)
Co-authored-by: Murderlon <[email protected]>
1 parent d0765da commit 32d847d

File tree

3 files changed

+60
-9
lines changed

3 files changed

+60
-9
lines changed
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"@tus/s3-store": patch
3+
---
4+
5+
Fix increment for part numbers

packages/s3-store/src/index.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -367,8 +367,8 @@ export class S3Store extends DataStore {
367367
.on('chunkFinished', ({path, size: partSize}) => {
368368
pendingChunkFilepath = null
369369

370-
const partNumber = currentPartNumber + 1
371370
const acquiredPermit = permit
371+
const partNumber = currentPartNumber++
372372

373373
offset += partSize
374374

packages/s3-store/test/index.ts

Lines changed: 54 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,15 @@ import {Upload} from '@tus/utils'
1111
const fixturesPath = path.resolve('../', '../', 'test', 'fixtures')
1212
const storePath = path.resolve('../', '../', 'test', 'output', 's3-store')
1313

14+
const s3ClientConfig = {
15+
bucket: process.env.AWS_BUCKET as string,
16+
credentials: {
17+
accessKeyId: process.env.AWS_ACCESS_KEY_ID as string,
18+
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY as string,
19+
},
20+
region: process.env.AWS_REGION,
21+
}
22+
1423
describe('S3DataStore', () => {
1524
before(function () {
1625
this.testFileSize = 960_244
@@ -21,14 +30,7 @@ describe('S3DataStore', () => {
2130
beforeEach(function () {
2231
this.datastore = new S3Store({
2332
partSize: 8 * 1024 * 1024, // Each uploaded part will have ~8MiB,
24-
s3ClientConfig: {
25-
bucket: process.env.AWS_BUCKET as string,
26-
credentials: {
27-
accessKeyId: process.env.AWS_ACCESS_KEY_ID as string,
28-
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY as string,
29-
},
30-
region: process.env.AWS_REGION,
31-
},
33+
s3ClientConfig,
3234
})
3335
})
3436

@@ -196,6 +198,50 @@ describe('S3DataStore', () => {
196198
assert.equal(offset, incompleteSize)
197199
})
198200

201+
it('should use strictly sequential part numbers when uploading multiple chunks', async () => {
202+
const store = new S3Store({
203+
partSize: 5 * 1024 * 1024,
204+
maxConcurrentPartUploads: 1,
205+
s3ClientConfig,
206+
})
207+
208+
// @ts-expect-error private method
209+
const uploadPartSpy = sinon.spy(store, 'uploadPart')
210+
211+
const size = 15 * 1024 * 1024
212+
const upload = new Upload({
213+
id: shared.testId('increment-bug'),
214+
size: size,
215+
offset: 0,
216+
})
217+
218+
await store.create(upload)
219+
220+
// Write all 15 MB in a single call (S3Store will internally chunk to ~3 parts):
221+
const offset = await store.write(Readable.from(Buffer.alloc(size)), upload.id, 0)
222+
223+
assert.equal(offset, size)
224+
225+
const finalUpload = await store.getUpload(upload.id)
226+
assert.equal(finalUpload.offset, size, 'getUpload offset should match total size')
227+
228+
const partNumbers = uploadPartSpy.getCalls().map((call) => call.args[2])
229+
230+
for (let i = 0; i < partNumbers.length; i++) {
231+
if (i === 0) {
232+
assert.equal(partNumbers[i], 1, 'First part number must be 1')
233+
} else {
234+
const prev = partNumbers[i - 1]
235+
const curr = partNumbers[i]
236+
assert.equal(
237+
curr,
238+
prev + 1,
239+
`Part numbers should increment by 1. Found jump from ${prev} to ${curr}`
240+
)
241+
}
242+
}
243+
})
244+
199245
shared.shouldHaveStoreMethods()
200246
shared.shouldCreateUploads()
201247
shared.shouldRemoveUploads() // Termination extension

0 commit comments

Comments
 (0)