Skip to content
Open
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
"@dnd-kit/sortable": "8.0.0",
"@dnd-kit/utilities": "3.2.2",
"@faker-js/faker": "7.6.0",
"@iqss/dataverse-client-javascript": "2.0.0-alpha.85",
"@iqss/dataverse-client-javascript": "v2.1.0-pr421.6d795bb",
"@iqss/dataverse-design-system": "*",
"@istanbuljs/nyc-config-typescript": "1.0.2",
"@tanstack/react-table": "8.9.2",
Expand Down
3 changes: 3 additions & 0 deletions public/locales/en/shared.json
Original file line number Diff line number Diff line change
Expand Up @@ -220,6 +220,9 @@
"selectFileMultiple": "Select files to add",
"dragDropSingle": "Drag and drop file here.",
"dragDropMultiple": "Drag and drop files and/or directories here.",
"uploadWidgetHelp": "Select files or drag and drop into the upload widget. Maximum of {{maxFilesPerUpload}} files per upload.",
"uploadWidgetMaxFilesHelp": "Maximum of {{maxFilesAvailableToUpload}} files available to upload.",
"uploadWidgetStorageQuotaHelp": "Storage quota: {{storageQuotaRemaining}} remaining.",
"cancelUpload": "Cancel upload",
"uploadFailed": "There was an error uploading this file.",
"loadingConfiguration": "Loading configuration",
Expand Down
3 changes: 3 additions & 0 deletions public/locales/es/shared.json
Original file line number Diff line number Diff line change
Expand Up @@ -220,6 +220,9 @@
"selectFileMultiple": "Seleccionar ficheros para agregar",
"dragDropSingle": "Arrastra y suelta el fichero aquí.",
"dragDropMultiple": "Arrastra y suelta ficheros y/o directorios aquí.",
"uploadWidgetHelp": "Selecciona ficheros o arrástralos y suéltalos en el widget de carga. Máximo de {{maxFilesPerUpload}} ficheros por carga.",
"uploadWidgetMaxFilesHelp": "Máximo de {{maxFilesAvailableToUpload}} ficheros disponibles para cargar.",
"uploadWidgetStorageQuotaHelp": "Cuota de almacenamiento: {{storageQuotaRemaining}} restantes.",
"cancelUpload": "Cancelar carga",
"uploadFailed": "Ocurrió un error al cargar este fichero.",
"loadingConfiguration": "Cargando configuración",
Expand Down
4 changes: 4 additions & 0 deletions src/dataset/domain/models/DatasetUploadLimits.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
export interface DatasetUploadLimits {
numberOfFilesRemaining?: number
storageQuotaRemaining?: number
}
8 changes: 8 additions & 0 deletions src/dataset/domain/useCases/getDatasetUploadLimits.ts
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This should use the DatasetRepository, rather than directly calling the jsDataverse function

Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
import { getDatasetUploadLimits as jsGetDatasetUploadLimits } from '@iqss/dataverse-client-javascript'
import { DatasetUploadLimits } from '../models/DatasetUploadLimits'

export async function getDatasetUploadLimits(
datasetId: string | number
): Promise<DatasetUploadLimits> {
return jsGetDatasetUploadLimits.execute(datasetId)
}
7 changes: 6 additions & 1 deletion src/sections/shared/file-uploader/FileUploader.tsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { File as FileModel } from '@/files/domain/models/File'
import { FileRepository } from '@/files/domain/repositories/FileRepository'
import { DatasetUploadLimits } from '@/dataset/domain/models/DatasetUploadLimits'
import { ReplaceFileReferrer } from '@/sections/replace-file/ReplaceFile'
import { FileUploaderProvider } from './context/FileUploaderContext'
import { useGetFixityAlgorithm } from './useGetFixityAlgorithm'
Expand All @@ -15,6 +16,7 @@ type FileUploaderProps =
operationType: OperationType.REPLACE_FILE
originalFile: FileModel
referrer?: ReplaceFileReferrer
fetchUploadLimits?: (datasetId: string) => Promise<DatasetUploadLimits>
}
| {
fileRepository: FileRepository
Expand All @@ -23,6 +25,7 @@ type FileUploaderProps =
operationType: OperationType.ADD_FILES_TO_DATASET
originalFile?: never
referrer?: never
fetchUploadLimits?: (datasetId: string) => Promise<DatasetUploadLimits>
}

export type StorageType = 'S3'
Expand All @@ -41,7 +44,8 @@ export const FileUploader = ({
storageType,
operationType,
originalFile,
referrer
referrer,
fetchUploadLimits
}: FileUploaderProps) => {
const { fixityAlgorithm, isLoadingFixityAlgorithm } = useGetFixityAlgorithm(fileRepository)

Expand All @@ -68,6 +72,7 @@ export const FileUploader = ({
<FileUploaderPanel
fileRepository={fileRepository}
datasetPersistentId={datasetPersistentId}
fetchUploadLimits={fetchUploadLimits}
referrer={referrer}
/>
</FileUploaderProvider>
Expand Down
11 changes: 9 additions & 2 deletions src/sections/shared/file-uploader/FileUploaderPanel.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -12,17 +12,20 @@ import { useFileUploaderContext } from './context/FileUploaderContext'
import FileUploadInput from './file-upload-input/FileUploadInput'
import { UploadedFilesList } from './uploaded-files-list/UploadedFilesList'
import { ConfirmLeaveModal } from './confirm-leave-modal/ConfirmLeaveModal'
import { DatasetUploadLimits } from '@/dataset/domain/models/DatasetUploadLimits'

interface FileUploaderPanelProps {
fileRepository: FileRepository
datasetPersistentId: string
referrer?: ReplaceFileReferrer
fetchUploadLimits?: (datasetId: string) => Promise<DatasetUploadLimits>
}

const FileUploaderPanel = ({
fileRepository,
datasetPersistentId,
referrer
referrer,
fetchUploadLimits
}: FileUploaderPanelProps) => {
const { t } = useTranslation('shared')
const navigate = useNavigate()
Expand Down Expand Up @@ -101,7 +104,11 @@ const FileUploaderPanel = ({

return (
<Stack gap={4}>
<FileUploadInput fileRepository={fileRepository} datasetPersistentId={datasetPersistentId} />
<FileUploadInput
fileRepository={fileRepository}
datasetPersistentId={datasetPersistentId}
fetchUploadLimits={fetchUploadLimits}
/>

{uploadedFiles.length > 0 && (
<UploadedFilesList
Expand Down
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It would be good to have a FileUploadInput story that shows the file upload limit messages.

Original file line number Diff line number Diff line change
Expand Up @@ -8,22 +8,31 @@ import cn from 'classnames'
import { FileRepository } from '@/files/domain/repositories/FileRepository'
import MimeTypeDisplay from '@/files/domain/models/FileTypeToFriendlyTypeMap'
import { uploadFile } from '@/files/domain/useCases/uploadFile'
import { DatasetUploadLimits } from '@/dataset/domain/models/DatasetUploadLimits'
import { useFileUploaderContext } from '../context/FileUploaderContext'
import { FileUploadState, FileUploadStatus } from '../context/fileUploaderReducer'
import { OperationType } from '../FileUploader'
import { FileUploaderHelper } from '../FileUploaderHelper'
import { SwalModal } from '../../swal-modal/SwalModal'
import styles from './FileUploadInput.module.scss'
import { useUploadLimit } from './useUploadLimit'

type FileUploadInputProps = {
fileRepository: FileRepository
datasetPersistentId: string
fetchUploadLimits?: (datasetId: string) => Promise<DatasetUploadLimits>
}

const limit = 6
const semaphore = new Semaphore(limit)

const FileUploadInput = ({ fileRepository, datasetPersistentId }: FileUploadInputProps) => {
const maxFilesPerUpload = 1000

const FileUploadInput = ({
fileRepository,
datasetPersistentId,
fetchUploadLimits
}: FileUploadInputProps) => {
const {
fileUploaderState,
addFile,
Expand All @@ -44,6 +53,7 @@ const FileUploadInput = ({ fileRepository, datasetPersistentId }: FileUploadInpu
const inputRef = useRef<HTMLInputElement>(null)

const [isDragging, setIsDragging] = useState(false)
const { uploadLimit } = useUploadLimit(datasetPersistentId, fetchUploadLimits)

const totalFiles = Object.keys(fileUploaderState.files).length

Expand Down Expand Up @@ -267,6 +277,27 @@ const FileUploadInput = ({ fileRepository, datasetPersistentId }: FileUploadInpu
<Accordion.Item eventKey="0">
<Accordion.Header>{t('fileUploader.accordionTitle')}</Accordion.Header>
<Accordion.Body>
<p className={styles.helper_text}>
{t('fileUploader.uploadWidgetHelp', {
maxFilesPerUpload: maxFilesPerUpload.toLocaleString()
})}
{uploadLimit.maxFilesAvailableToUploadFormatted && (
<>
{' '}
{t('fileUploader.uploadWidgetMaxFilesHelp', {
maxFilesAvailableToUpload: uploadLimit.maxFilesAvailableToUploadFormatted
})}
</>
)}
{uploadLimit.storageQuotaRemainingFormatted && (
<>
{' '}
{t('fileUploader.uploadWidgetStorageQuotaHelp', {
storageQuotaRemaining: uploadLimit.storageQuotaRemainingFormatted
})}
</>
)}
</p>
<Card>
<Card.Header>
<Button
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
import { useCallback, useEffect, useState } from 'react'
import { ReadError } from '@iqss/dataverse-client-javascript'
import { FileSize, FileSizeUnit } from '@/files/domain/models/FileMetadata'
import { getDatasetUploadLimits } from '@/dataset/domain/useCases/getDatasetUploadLimits'
import { DatasetUploadLimits } from '@/dataset/domain/models/DatasetUploadLimits'
import { JSDataverseReadErrorHandler } from '@/shared/helpers/JSDataverseReadErrorHandler'

interface UploadLimit {
maxFilesAvailableToUploadFormatted?: string
storageQuotaRemainingFormatted?: string
}

export function useUploadLimit(
datasetPersistentId: string,
fetchUploadLimits: (datasetId: string) => Promise<DatasetUploadLimits> = getDatasetUploadLimits
) {
const [uploadLimit, setUploadLimit] = useState<UploadLimit>({})
const [isLoadingUploadLimits, setIsLoadingUploadLimits] = useState<boolean>(true)
const [errorUploadLimits, setErrorUploadLimits] = useState<string | null>(null)

const fetchUploadLimitsCallback = useCallback(async () => {
setIsLoadingUploadLimits(true)
setErrorUploadLimits(null)

try {
const limits = await fetchUploadLimits(datasetPersistentId)

if (Object.keys(limits).length === 0) {
setUploadLimit({})
return
}

setUploadLimit({
maxFilesAvailableToUploadFormatted:
limits.numberOfFilesRemaining !== undefined
? limits.numberOfFilesRemaining.toLocaleString()
: undefined,
storageQuotaRemainingFormatted:
limits.storageQuotaRemaining !== undefined
? new FileSize(limits.storageQuotaRemaining, FileSizeUnit.BYTES).toString()
: undefined
})
} catch (error) {
setUploadLimit({})
if (error instanceof ReadError) {
const readError = new JSDataverseReadErrorHandler(error)
const formattedError =
readError.getReasonWithoutStatusCode() ??
/* istanbul ignore next */ readError.getErrorMessage()
setErrorUploadLimits(formattedError)
} else {
setErrorUploadLimits('Something went wrong getting the upload limits. Try again later.')
}
} finally {
setIsLoadingUploadLimits(false)
}
}, [datasetPersistentId, fetchUploadLimits])

useEffect(() => {
void fetchUploadLimitsCallback()
}, [fetchUploadLimitsCallback])

return {
uploadLimit,
isLoadingUploadLimits,
errorUploadLimits,
fetchUploadLimits: fetchUploadLimitsCallback
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -1007,4 +1007,62 @@ describe('FileUploader', () => {
})
})
})

describe.only('upload limits UI', () => {
it('shows both upload limit messages when both limits are present', () => {
const fetchUploadLimits = cy.stub().resolves({
numberOfFilesRemaining: 20,
storageQuotaRemaining: 1048576
})

cy.customMount(
<FileUploader
fileRepository={fileMockRepository}
datasetPersistentId=":latest"
storageType="S3"
operationType={OperationType.ADD_FILES_TO_DATASET}
fetchUploadLimits={fetchUploadLimits}
/>
)

cy.findByText(/Maximum of 20 files available to upload./i).should('exist')
cy.findByText(/Storage quota: 1 MB remaining./i).should('exist')
})

it('shows only the available limit message when one limit is missing', () => {
const fetchUploadLimits = cy.stub().resolves({
numberOfFilesRemaining: 5
})

cy.customMount(
<FileUploader
fileRepository={fileMockRepository}
datasetPersistentId=":latest"
storageType="S3"
operationType={OperationType.ADD_FILES_TO_DATASET}
fetchUploadLimits={fetchUploadLimits}
/>
)

cy.findByText(/Maximum of 5 files available to upload./i).should('exist')
cy.findByText(/Storage quota:/i).should('not.exist')
})

it('does not show limits message when limits are not present', () => {
const fetchUploadLimits = cy.stub().resolves({})

cy.customMount(
<FileUploader
fileRepository={fileMockRepository}
datasetPersistentId=":latest"
storageType="S3"
operationType={OperationType.ADD_FILES_TO_DATASET}
fetchUploadLimits={fetchUploadLimits}
/>
)

cy.findByText(/Maximum of \d+ files available to upload./i).should('not.exist')
cy.findByText(/Storage quota:/i).should('not.exist')
})
})
})
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import { act, renderHook, waitFor } from '@testing-library/react'
import { useUploadLimit } from '@/sections/shared/file-uploader/file-upload-input/useUploadLimit'

const DATASET_PERSISTENT_ID = 'doi:10.5072/FK2/8YOKQI'

describe('useUploadLimit', () => {
it('formats upload limit values when limits are present', async () => {
const fetchUploadLimits = cy.stub().resolves({
numberOfFilesRemaining: 1200,
storageQuotaRemaining: 1048576
})

const { result } = renderHook(() => useUploadLimit(DATASET_PERSISTENT_ID, fetchUploadLimits))

await act(() => {
expect(result.current.isLoadingUploadLimits).to.deep.equal(true)
return expect(result.current.uploadLimit).to.deep.equal({})
})

await act(() => {
expect(result.current.isLoadingUploadLimits).to.deep.equal(false)

return expect(result.current.uploadLimit).to.deep.equal({
maxFilesAvailableToUploadFormatted: '1,200',
storageQuotaRemainingFormatted: '1 MB'
})
})
})

it('returns empty uploadLimit when no limits are present', async () => {
const fetchUploadLimits = cy.stub().resolves({})

const { result } = renderHook(() => useUploadLimit(DATASET_PERSISTENT_ID, fetchUploadLimits))

await waitFor(() => {
expect(result.current.isLoadingUploadLimits).to.equal(false)
expect(result.current.uploadLimit).to.deep.equal({})
})
})
})
Loading