Skip to content

Commit f875666

Browse files
committed
test: add more integration cases
1 parent 034eddb commit f875666

File tree

3 files changed

+349
-5
lines changed

3 files changed

+349
-5
lines changed

src/files/infra/repositories/FilesRepository.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@ export interface UploadedFileRequestBody {
5151
directoryLabel?: string
5252
categories?: string[]
5353
restrict?: boolean
54+
forceReplace?: boolean
5455
}
5556

5657
export interface ChecksumRequestBody {
@@ -311,6 +312,7 @@ export class FilesRepository extends ApiRepository implements IFilesRepository {
311312
},
312313
mimeType: uploadedFileDTO.mimeType,
313314
storageIdentifier: uploadedFileDTO.storageId,
315+
forceReplace: uploadedFileDTO.forceReplace,
314316
...(uploadedFileDTO.description && { description: uploadedFileDTO.description }),
315317
...(uploadedFileDTO.categories && { categories: uploadedFileDTO.categories }),
316318
...(uploadedFileDTO.restrict && { restrict: uploadedFileDTO.restrict }),

test/integration/files/DirectUpload.test.ts

Lines changed: 338 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ import {
44
DatasetNotNumberedVersion,
55
FileOrderCriteria,
66
UploadedFileDTO,
7+
WriteError,
78
createDataset
89
} from '../../../src'
910
import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig'
@@ -29,6 +30,9 @@ describe('Direct Upload', () => {
2930
let testDataset1Ids: CreatedDatasetIdentifiers
3031
let testDataset2Ids: CreatedDatasetIdentifiers
3132
let testDataset3Ids: CreatedDatasetIdentifiers
33+
let testDatset4Ids: CreatedDatasetIdentifiers
34+
let testDataset5Ids: CreatedDatasetIdentifiers
35+
let testDataset6Ids: CreatedDatasetIdentifiers
3236

3337
const filesRepositorySut = new FilesRepository()
3438
const directUploadSut: DirectUploadClient = new DirectUploadClient(filesRepositorySut)
@@ -63,6 +67,18 @@ describe('Direct Upload', () => {
6367
TestConstants.TEST_NEW_DATASET_DTO,
6468
testCollectionAlias
6569
)
70+
testDatset4Ids = await createDataset.execute(
71+
TestConstants.TEST_NEW_DATASET_DTO,
72+
testCollectionAlias
73+
)
74+
testDataset5Ids = await createDataset.execute(
75+
TestConstants.TEST_NEW_DATASET_DTO,
76+
testCollectionAlias
77+
)
78+
testDataset6Ids = await createDataset.execute(
79+
TestConstants.TEST_NEW_DATASET_DTO,
80+
testCollectionAlias
81+
)
6682
} catch (error) {
6783
throw new Error('Tests beforeAll(): Error while creating test dataset')
6884
}
@@ -74,6 +90,9 @@ describe('Direct Upload', () => {
7490
await deleteUnpublishedDatasetViaApi(testDataset1Ids.numericId)
7591
await deleteUnpublishedDatasetViaApi(testDataset2Ids.numericId)
7692
await deleteUnpublishedDatasetViaApi(testDataset3Ids.numericId)
93+
await deleteUnpublishedDatasetViaApi(testDatset4Ids.numericId)
94+
await deleteUnpublishedDatasetViaApi(testDataset5Ids.numericId)
95+
await deleteUnpublishedDatasetViaApi(testDataset6Ids.numericId)
7796
await deleteCollectionViaApi(testCollectionAlias)
7897
})
7998

@@ -220,7 +239,7 @@ describe('Direct Upload', () => {
220239
).rejects.toThrow(FileUploadCancelError)
221240
})
222241

223-
test('should upload file add it to the dataset, upload a new one and replace it', async () => {
242+
test('should replace a file succesfully', async () => {
224243
// 1 - Upload first file and add it to the dataset
225244
const destination = await createTestFileUploadDestination(
226245
singlepartFile,
@@ -327,6 +346,324 @@ describe('Direct Upload', () => {
327346
expect(datasetFiles.files[0].storageIdentifier).toContain('localstack1://mybucket:')
328347
})
329348

349+
test('should fail to replace a file when mimetype is different and forceReplace is false', async () => {
350+
// 1 - Upload first file and add it to the dataset
351+
const destination = await createTestFileUploadDestination(
352+
singlepartFile,
353+
testDatset4Ids.numericId
354+
)
355+
const singlepartFileUrl = destination.urls[0]
356+
357+
const progressMock = jest.fn()
358+
const abortController = new AbortController()
359+
360+
expect(await singlepartFileExistsInBucket(singlepartFileUrl)).toBe(false)
361+
362+
const actualStorageId = await directUploadSut.uploadFile(
363+
testDatset4Ids.numericId,
364+
singlepartFile,
365+
progressMock,
366+
abortController,
367+
destination
368+
)
369+
expect(actualStorageId).toBe(destination.storageId)
370+
371+
expect(await singlepartFileExistsInBucket(singlepartFileUrl)).toBe(true)
372+
373+
let datasetFiles = await filesRepositorySut.getDatasetFiles(
374+
testDatset4Ids.numericId,
375+
DatasetNotNumberedVersion.LATEST,
376+
true,
377+
FileOrderCriteria.NAME_AZ
378+
)
379+
380+
expect(datasetFiles.totalFilesCount).toBe(0)
381+
382+
const fileArrayBuffer = await singlepartFile.arrayBuffer()
383+
const fileBuffer = Buffer.from(fileArrayBuffer)
384+
385+
const uploadedFileDTO = {
386+
fileName: singlepartFile.name,
387+
storageId: actualStorageId,
388+
checksumType: checksumAlgorithm,
389+
checksumValue: calculateBlobChecksum(fileBuffer),
390+
mimeType: singlepartFile.type
391+
}
392+
393+
await filesRepositorySut.addUploadedFilesToDataset(testDatset4Ids.numericId, [uploadedFileDTO])
394+
395+
datasetFiles = await filesRepositorySut.getDatasetFiles(
396+
testDatset4Ids.numericId,
397+
DatasetNotNumberedVersion.LATEST,
398+
true,
399+
FileOrderCriteria.NAME_AZ
400+
)
401+
402+
expect(datasetFiles.totalFilesCount).toBe(1)
403+
expect(datasetFiles.files[0].name).toBe('singlepart-file')
404+
expect(datasetFiles.files[0].sizeBytes).toBe(singlepartFile.size)
405+
expect(datasetFiles.files[0].storageIdentifier).toContain('localstack1://mybucket:')
406+
407+
// 2 - Upload a new file and get the new storage id
408+
const newSinglepartFile = await createSinglepartFileBlob(
409+
'new-singlepart-file',
410+
1500,
411+
'text/csv'
412+
)
413+
const newDestination = await createTestFileUploadDestination(
414+
newSinglepartFile,
415+
testDatset4Ids.numericId
416+
)
417+
const newSinglepartFileUrl = newDestination.urls[0]
418+
419+
expect(await singlepartFileExistsInBucket(newSinglepartFileUrl)).toBe(false)
420+
421+
const newFileStorageId = await directUploadSut.uploadFile(
422+
testDatset4Ids.numericId,
423+
newSinglepartFile,
424+
progressMock,
425+
abortController,
426+
newDestination
427+
)
428+
expect(newFileStorageId).toBe(newDestination.storageId)
429+
430+
expect(await singlepartFileExistsInBucket(newSinglepartFileUrl)).toBe(true)
431+
432+
// 3 - Replace the old file with the new file (must have different content)
433+
const currentFileId = datasetFiles.files[0].id
434+
const newFileArrayBuffer = await newSinglepartFile.arrayBuffer()
435+
const newFileBuffer = Buffer.from(newFileArrayBuffer)
436+
const newUploadedFileDTO: UploadedFileDTO = {
437+
fileName: newSinglepartFile.name,
438+
storageId: newFileStorageId,
439+
checksumType: checksumAlgorithm,
440+
checksumValue: calculateBlobChecksum(newFileBuffer),
441+
mimeType: newSinglepartFile.type,
442+
forceReplace: false
443+
}
444+
445+
const expectedError = new WriteError(
446+
'[400] The original file (Plain Text) and replacement file (Comma Separated Values) are different file types.'
447+
)
448+
449+
await expect(filesRepositorySut.replaceFile(currentFileId, newUploadedFileDTO)).rejects.toThrow(
450+
expectedError
451+
)
452+
})
453+
454+
test('should replace a file succesfully when mimetype is different but forceReplace is true', async () => {
455+
// 1 - Upload first file and add it to the dataset
456+
const destination = await createTestFileUploadDestination(
457+
singlepartFile,
458+
testDataset5Ids.numericId
459+
)
460+
const singlepartFileUrl = destination.urls[0]
461+
462+
const progressMock = jest.fn()
463+
const abortController = new AbortController()
464+
465+
expect(await singlepartFileExistsInBucket(singlepartFileUrl)).toBe(false)
466+
467+
const actualStorageId = await directUploadSut.uploadFile(
468+
testDataset5Ids.numericId,
469+
singlepartFile,
470+
progressMock,
471+
abortController,
472+
destination
473+
)
474+
expect(actualStorageId).toBe(destination.storageId)
475+
476+
expect(await singlepartFileExistsInBucket(singlepartFileUrl)).toBe(true)
477+
478+
let datasetFiles = await filesRepositorySut.getDatasetFiles(
479+
testDataset5Ids.numericId,
480+
DatasetNotNumberedVersion.LATEST,
481+
true,
482+
FileOrderCriteria.NAME_AZ
483+
)
484+
485+
expect(datasetFiles.totalFilesCount).toBe(0)
486+
487+
const fileArrayBuffer = await singlepartFile.arrayBuffer()
488+
const fileBuffer = Buffer.from(fileArrayBuffer)
489+
490+
const uploadedFileDTO = {
491+
fileName: singlepartFile.name,
492+
storageId: actualStorageId,
493+
checksumType: checksumAlgorithm,
494+
checksumValue: calculateBlobChecksum(fileBuffer),
495+
mimeType: singlepartFile.type
496+
}
497+
498+
await filesRepositorySut.addUploadedFilesToDataset(testDataset5Ids.numericId, [uploadedFileDTO])
499+
500+
datasetFiles = await filesRepositorySut.getDatasetFiles(
501+
testDataset5Ids.numericId,
502+
DatasetNotNumberedVersion.LATEST,
503+
true,
504+
FileOrderCriteria.NAME_AZ
505+
)
506+
507+
expect(datasetFiles.totalFilesCount).toBe(1)
508+
expect(datasetFiles.files[0].name).toBe('singlepart-file')
509+
expect(datasetFiles.files[0].sizeBytes).toBe(singlepartFile.size)
510+
expect(datasetFiles.files[0].storageIdentifier).toContain('localstack1://mybucket:')
511+
512+
// 2 - Upload a new file and get the new storage id
513+
const newSinglepartFile = await createSinglepartFileBlob(
514+
'new-singlepart-file-diff-mimetype',
515+
1500,
516+
'text/csv'
517+
)
518+
const newDestination = await createTestFileUploadDestination(
519+
newSinglepartFile,
520+
testDataset5Ids.numericId
521+
)
522+
const newSinglepartFileUrl = newDestination.urls[0]
523+
524+
expect(await singlepartFileExistsInBucket(newSinglepartFileUrl)).toBe(false)
525+
526+
const newFileStorageId = await directUploadSut.uploadFile(
527+
testDataset5Ids.numericId,
528+
newSinglepartFile,
529+
progressMock,
530+
abortController,
531+
newDestination
532+
)
533+
expect(newFileStorageId).toBe(newDestination.storageId)
534+
535+
expect(await singlepartFileExistsInBucket(newSinglepartFileUrl)).toBe(true)
536+
537+
// 3 - Replace the old file with the new file (must have different content), the new file has a different mimetype but forceReplace is true
538+
const currentFileId = datasetFiles.files[0].id
539+
const newFileArrayBuffer = await newSinglepartFile.arrayBuffer()
540+
const newFileBuffer = Buffer.from(newFileArrayBuffer)
541+
const newUploadedFileDTO: UploadedFileDTO = {
542+
fileName: newSinglepartFile.name,
543+
storageId: newFileStorageId,
544+
checksumType: checksumAlgorithm,
545+
checksumValue: calculateBlobChecksum(newFileBuffer),
546+
mimeType: newSinglepartFile.type,
547+
forceReplace: true
548+
}
549+
550+
await filesRepositorySut.replaceFile(currentFileId, newUploadedFileDTO)
551+
552+
// 4 - Verify that the new file is in the dataset and the old file is not
553+
datasetFiles = await filesRepositorySut.getDatasetFiles(
554+
testDataset5Ids.numericId,
555+
DatasetNotNumberedVersion.LATEST,
556+
true,
557+
FileOrderCriteria.NAME_AZ
558+
)
559+
560+
expect(datasetFiles.totalFilesCount).toBe(1)
561+
expect(datasetFiles.files[0].name).toBe('new-singlepart-file-diff-mimetype')
562+
expect(datasetFiles.files[0].contentType).toBe(newSinglepartFile.type)
563+
expect(datasetFiles.files[0].sizeBytes).toBe(newSinglepartFile.size)
564+
expect(datasetFiles.files[0].storageIdentifier).toContain('localstack1://mybucket:')
565+
})
566+
567+
test('should fail to replace a file when the new image has the same content as the old image', async () => {
568+
// 1 - Upload first file and add it to the dataset
569+
const destination = await createTestFileUploadDestination(
570+
singlepartFile,
571+
testDataset6Ids.numericId
572+
)
573+
const singlepartFileUrl = destination.urls[0]
574+
575+
const progressMock = jest.fn()
576+
const abortController = new AbortController()
577+
578+
expect(await singlepartFileExistsInBucket(singlepartFileUrl)).toBe(false)
579+
580+
const actualStorageId = await directUploadSut.uploadFile(
581+
testDataset6Ids.numericId,
582+
singlepartFile,
583+
progressMock,
584+
abortController,
585+
destination
586+
)
587+
expect(actualStorageId).toBe(destination.storageId)
588+
589+
expect(await singlepartFileExistsInBucket(singlepartFileUrl)).toBe(true)
590+
591+
let datasetFiles = await filesRepositorySut.getDatasetFiles(
592+
testDataset6Ids.numericId,
593+
DatasetNotNumberedVersion.LATEST,
594+
true,
595+
FileOrderCriteria.NAME_AZ
596+
)
597+
598+
expect(datasetFiles.totalFilesCount).toBe(0)
599+
600+
const fileArrayBuffer = await singlepartFile.arrayBuffer()
601+
const fileBuffer = Buffer.from(fileArrayBuffer)
602+
603+
const uploadedFileDTO = {
604+
fileName: singlepartFile.name,
605+
storageId: actualStorageId,
606+
checksumType: checksumAlgorithm,
607+
checksumValue: calculateBlobChecksum(fileBuffer),
608+
mimeType: singlepartFile.type
609+
}
610+
611+
await filesRepositorySut.addUploadedFilesToDataset(testDataset6Ids.numericId, [uploadedFileDTO])
612+
613+
datasetFiles = await filesRepositorySut.getDatasetFiles(
614+
testDataset6Ids.numericId,
615+
DatasetNotNumberedVersion.LATEST,
616+
true,
617+
FileOrderCriteria.NAME_AZ
618+
)
619+
620+
expect(datasetFiles.totalFilesCount).toBe(1)
621+
expect(datasetFiles.files[0].name).toBe('singlepart-file')
622+
expect(datasetFiles.files[0].sizeBytes).toBe(singlepartFile.size)
623+
expect(datasetFiles.files[0].storageIdentifier).toContain('localstack1://mybucket:')
624+
625+
// 2 - Upload a new file with the same content and get the new storage id
626+
const newSinglepartFile = await createSinglepartFileBlob()
627+
const newDestination = await createTestFileUploadDestination(
628+
newSinglepartFile,
629+
testDataset6Ids.numericId
630+
)
631+
const newSinglepartFileUrl = newDestination.urls[0]
632+
633+
expect(await singlepartFileExistsInBucket(newSinglepartFileUrl)).toBe(false)
634+
635+
const newFileStorageId = await directUploadSut.uploadFile(
636+
testDataset6Ids.numericId,
637+
newSinglepartFile,
638+
progressMock,
639+
abortController,
640+
newDestination
641+
)
642+
expect(newFileStorageId).toBe(newDestination.storageId)
643+
644+
expect(await singlepartFileExistsInBucket(newSinglepartFileUrl)).toBe(true)
645+
646+
// 3 - Replace the old file with the new file (must have different content), the new file has a different mimetype but forceReplace is true
647+
const currentFileId = datasetFiles.files[0].id
648+
const newFileArrayBuffer = await newSinglepartFile.arrayBuffer()
649+
const newFileBuffer = Buffer.from(newFileArrayBuffer)
650+
const newUploadedFileDTO: UploadedFileDTO = {
651+
fileName: newSinglepartFile.name,
652+
storageId: newFileStorageId,
653+
checksumType: checksumAlgorithm,
654+
checksumValue: calculateBlobChecksum(newFileBuffer),
655+
mimeType: newSinglepartFile.type
656+
}
657+
658+
const expectedError = new WriteError(
659+
'[400] Error! You may not replace a file with a file that has duplicate content.'
660+
)
661+
662+
await expect(filesRepositorySut.replaceFile(currentFileId, newUploadedFileDTO)).rejects.toThrow(
663+
expectedError
664+
)
665+
})
666+
330667
const createTestFileUploadDestination = async (file: File, testDatasetId: number) => {
331668
const filesRepository = new FilesRepository()
332669
const destination = await filesRepository.getFileUploadDestination(testDatasetId, file)

0 commit comments

Comments
 (0)