Skip to content

Commit 41fc637

Browse files
committed
adding datasetFileUploadsAvailable to response
1 parent 0a9b30e commit 41fc637

File tree

11 files changed

+112
-53
lines changed

11 files changed

+112
-53
lines changed

src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1082,8 +1082,9 @@ public long getDatasetCount() {
10821082
* @param id - owner id
10831083
* @return Total number of datafiles for this dataset/owner
10841084
*/
1085-
public long getDataFileCountByOwner(long id) {
1086-
return em.createNamedQuery("Dataset.countFilesByOwnerId", Long.class).setParameter("ownerId", id).getSingleResult();
1085+
public int getDataFileCountByOwner(long id) {
1086+
Long c = em.createNamedQuery("Dataset.countFilesByOwnerId", Long.class).setParameter("ownerId", id).getSingleResult();
1087+
return c.intValue(); // ignoring the truncation since the number should never be too large
10871088
}
10881089

10891090
}

src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -270,15 +270,22 @@ public void setDatasetFileCountLimit(Integer datasetFileCountLimit) {
270270
}
271271

272272
public Integer getEffectiveDatasetFileCountLimit() {
273-
if (isDatasetFileCountLimitNotSet(getDatasetFileCountLimit()) && getOwner() != null) {
273+
if (!isDatasetFileCountLimitSet(getDatasetFileCountLimit()) && getOwner() != null) {
274274
return getOwner().getEffectiveDatasetFileCountLimit();
275-
} else if (isDatasetFileCountLimitNotSet(getDatasetFileCountLimit())) {
275+
} else if (!isDatasetFileCountLimitSet(getDatasetFileCountLimit())) {
276276
Optional<Integer> opt = JvmSettings.DEFAULT_DATASET_FILE_COUNT_LIMIT.lookupOptional(Integer.class);
277277
return (opt.isPresent()) ? opt.get() : null;
278278
}
279279
return getDatasetFileCountLimit();
280280
}
281-
public boolean isDatasetFileCountLimitNotSet(Integer datasetFileCountLimit) {
282-
return datasetFileCountLimit == null || datasetFileCountLimit <= 0 ? true : false;
281+
public boolean isDatasetFileCountLimitSet(Integer datasetFileCountLimit) {
282+
return datasetFileCountLimit != null && datasetFileCountLimit > 0 ? true : false;
283+
}
284+
public boolean isAvailableFileUpload(DatasetServiceBean datasetService) {
285+
Integer limit = getEffectiveDatasetFileCountLimit();
286+
if (isDatasetFileCountLimitSet(limit)) {
287+
return datasetService.getDataFileCountByOwner(getId()) < limit;
288+
}
289+
return true;
283290
}
284291
}

src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -198,7 +198,10 @@ public enum Referrer {
198198
private Long maxIngestSizeInBytes = null;
199199
// CSV: 4.8 MB, DTA: 976.6 KB, XLSX: 5.7 MB, etc.
200200
private String humanPerFormatTabularLimits = null;
201-
private Integer multipleUploadFilesLimit = null;
201+
private Integer multipleUploadFilesLimit = null;
202+
// Maximum number of files per dataset allowed ot be uploaded
203+
private Integer maxFileUploadCount = null;
204+
private Integer fileUploadsAvailable = null;
202205

203206
//MutableBoolean so it can be passed from DatasetPage, supporting DatasetPage.cancelCreate()
204207
private MutableBoolean uploadInProgress = null;
@@ -390,6 +393,13 @@ public String populateHumanPerFormatTabularLimits() {
390393
return String.join(", ", formatLimits);
391394
}
392395

396+
public Integer getMaxFileUploadCount() {
397+
return maxFileUploadCount;
398+
}
399+
public Integer getFileUploadsAvailable() {
400+
return fileUploadsAvailable;
401+
}
402+
393403
/*
394404
The number of files the GUI user is allowed to upload in one batch,
395405
via drag-and-drop, or through the file select dialog. Now configurable
@@ -540,6 +550,8 @@ public String initCreateMode(String modeToken, DatasetVersion version, MutableBo
540550
this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit();
541551
this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits();
542552
this.multipleUploadFilesLimit = systemConfig.getMultipleUploadFilesLimit();
553+
this.maxFileUploadCount = dataset.getEffectiveDatasetFileCountLimit();
554+
this.fileUploadsAvailable = this.maxFileUploadCount != null && dataset.getId() != null ? this.maxFileUploadCount = datasetService.getDataFileCountByOwner(dataset.getId()) : null;
543555

544556
logger.fine("done");
545557

@@ -601,7 +613,9 @@ public String init() {
601613
}
602614
this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit();
603615
this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits();
604-
this.multipleUploadFilesLimit = systemConfig.getMultipleUploadFilesLimit();
616+
this.multipleUploadFilesLimit = systemConfig.getMultipleUploadFilesLimit();
617+
this.maxFileUploadCount = dataset.getEffectiveDatasetFileCountLimit();
618+
this.fileUploadsAvailable = this.maxFileUploadCount != null ? this.maxFileUploadCount = datasetService.getDataFileCountByOwner(dataset.getId()) : null;
605619

606620
hasValidTermsOfAccess = isHasValidTermsOfAccess();
607621
if (!hasValidTermsOfAccess) {

src/main/java/edu/harvard/iq/dataverse/api/Datasets.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2605,9 +2605,9 @@ public Response getMPUploadUrls(@Context ContainerRequestContext crc, @PathParam
26052605
}
26062606
if (!user.isSuperuser()) {
26072607
Integer effectiveDatasetFileCountLimit = dataset.getEffectiveDatasetFileCountLimit();
2608-
boolean hasFileCountLimit = !dataset.isDatasetFileCountLimitNotSet(effectiveDatasetFileCountLimit);
2608+
boolean hasFileCountLimit = dataset.isDatasetFileCountLimitSet(effectiveDatasetFileCountLimit);
26092609
if (hasFileCountLimit) {
2610-
long uploadedFileCount = datasetService.getDataFileCountByOwner(dataset.getId());
2610+
int uploadedFileCount = datasetService.getDataFileCountByOwner(dataset.getId());
26112611
if (uploadedFileCount >= effectiveDatasetFileCountLimit) {
26122612
return error(Response.Status.BAD_REQUEST,
26132613
BundleUtil.getStringFromBundle("file.add.count_exceeds_limit", Arrays.asList(String.valueOf(effectiveDatasetFileCountLimit))));

src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -140,11 +140,11 @@ public CreateDataFileResult execute(CommandContext ctxt) throws CommandException
140140
if (!replaceMode && !isSuperuser && version.getDataset() != null) {
141141
DvObjectContainer dvo = version.getDataset();
142142
Integer effectiveDatasetFileCountLimit = dvo.getEffectiveDatasetFileCountLimit();
143-
boolean hasFileCountLimit = !dvo.isDatasetFileCountLimitNotSet(effectiveDatasetFileCountLimit);
143+
boolean hasFileCountLimit = dvo.isDatasetFileCountLimitSet(effectiveDatasetFileCountLimit);
144144
if (hasFileCountLimit) {
145145
// Get the number of uploaded files
146146
DatasetServiceBean datasetService = datasetServiceBean == null ? CDI.current().select(DatasetServiceBean.class).get() : datasetServiceBean;
147-
long uploadedFileCount = datasetService.getDataFileCountByOwner(dvo.getId());
147+
int uploadedFileCount = datasetService.getDataFileCountByOwner(dvo.getId());
148148
if (uploadedFileCount >= effectiveDatasetFileCountLimit) {
149149
throw new CommandExecutionException(BundleUtil.getStringFromBundle("file.add.count_exceeds_limit", Arrays.asList(String.valueOf(effectiveDatasetFileCountLimit))), this);
150150
}

src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -201,7 +201,14 @@ public List<DataFile> saveAndAddFilesToDataset(DatasetVersion version,
201201
// Check if this dataset is subject to any storage quotas:
202202
uploadSessionQuota = fileService.getUploadSessionQuotaLimit(dataset);
203203
}
204-
204+
205+
Integer maxFiles = version.getDataset().getEffectiveDatasetFileCountLimit();
206+
if (version.getDataset().getId() != null && version.getDataset().isDatasetFileCountLimitSet(maxFiles)) {
207+
maxFiles = maxFiles - datasetService.getDataFileCountByOwner(version.getDataset().getId());
208+
} else {
209+
maxFiles = Integer.MAX_VALUE;
210+
}
211+
205212
for (DataFile dataFile : newFiles) {
206213
boolean unattached = false;
207214
boolean savedSuccess = false;
@@ -212,6 +219,11 @@ public List<DataFile> saveAndAddFilesToDataset(DatasetVersion version,
212219
unattached = true;
213220
dataFile.setOwner(dataset);
214221
}
222+
223+
if (--maxFiles < 0) {
224+
logger.warning("Failed to save all the files due to the limit on the number of files that can be uploaded to this dataset.");
225+
break;
226+
}
215227

216228
String[] storageInfo = DataAccess.getDriverIdAndStorageLocation(dataFile.getStorageIdentifier());
217229
String driverType = DataAccess.getDriverType(storageInfo[0]);

src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -73,10 +73,14 @@ public class JsonPrinter {
7373

7474
@EJB
7575
static DatasetFieldServiceBean datasetFieldService;
76+
77+
@EJB
78+
static DatasetServiceBean datasetService;
7679

77-
public static void injectSettingsService(SettingsServiceBean ssb, DatasetFieldServiceBean dfsb, DataverseFieldTypeInputLevelServiceBean dfils) {
80+
public static void injectSettingsService(SettingsServiceBean ssb, DatasetFieldServiceBean dfsb, DataverseFieldTypeInputLevelServiceBean dfils, DatasetServiceBean ds) {
7881
settingsService = ssb;
7982
datasetFieldService = dfsb;
83+
datasetService = ds;
8084
}
8185

8286
public JsonPrinter() {
@@ -425,13 +429,17 @@ public static JsonObjectBuilder json(Dataset ds, Boolean returnOwners) {
425429

426430
private static void addDatasetFileCountLimit(DvObjectContainer dvo, JsonObjectBuilder bld) {
427431
Integer effectiveDatasetFileCountLimit = dvo.getEffectiveDatasetFileCountLimit();
428-
if (!dvo.isDatasetFileCountLimitNotSet(effectiveDatasetFileCountLimit)) {
432+
if (dvo.isDatasetFileCountLimitSet(effectiveDatasetFileCountLimit)) {
429433
bld.add("effectiveDatasetFileCountLimit", effectiveDatasetFileCountLimit);
430434
}
431435
Integer datasetFileCountLimit = dvo.getDatasetFileCountLimit();
432-
if (!dvo.isDatasetFileCountLimitNotSet(datasetFileCountLimit)) {
436+
if (dvo.isDatasetFileCountLimitSet(datasetFileCountLimit)) {
433437
bld.add("datasetFileCountLimit", datasetFileCountLimit);
434438
}
439+
if (dvo.isInstanceofDataset() && dvo.isDatasetFileCountLimitSet(effectiveDatasetFileCountLimit)) {
440+
int available = effectiveDatasetFileCountLimit - datasetService.getDataFileCountByOwner(dvo.getId());
441+
bld.add("datasetFileUploadsAvailable", Math.max(0, available));
442+
}
435443
}
436444

437445
public static JsonObjectBuilder json(FileDetailsHolder ds) {
@@ -476,6 +484,7 @@ public static JsonObjectBuilder json(DatasetVersion dsv, List<String> anonymized
476484
.add("publicationDate", dataset.getPublicationDateFormattedYYYYMMDD())
477485
.add("citationDate", dataset.getCitationDateFormattedYYYYMMDD())
478486
.add("versionNote", dsv.getVersionNote());
487+
addDatasetFileCountLimit(dataset, bld);
479488

480489
License license = DatasetUtil.getLicense(dsv);
481490
if (license != null) {
Lines changed: 37 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -1,33 +1,37 @@
1-
package edu.harvard.iq.dataverse.util.json;
2-
3-
import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
4-
import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevelServiceBean;
5-
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
6-
7-
import jakarta.annotation.PostConstruct;
8-
import jakarta.ejb.EJB;
9-
import jakarta.ejb.Singleton;
10-
import jakarta.ejb.Startup;
11-
12-
/**
13-
* This is a small helper bean
14-
* As it is a singleton and built at application start (=deployment), it will inject the (stateless)
15-
* settings service into the OREMap once it's ready.
16-
*/
17-
@Singleton
18-
@Startup
19-
public class JsonPrinterHelper {
20-
@EJB
21-
SettingsServiceBean settingsSvc;
22-
23-
@EJB
24-
DatasetFieldServiceBean datasetFieldSvc;
25-
26-
@EJB
27-
DataverseFieldTypeInputLevelServiceBean datasetFieldInpuLevelSvc;
28-
29-
@PostConstruct
30-
public void injectService() {
31-
JsonPrinter.injectSettingsService(settingsSvc, datasetFieldSvc, datasetFieldInpuLevelSvc);
32-
}
33-
}
1+
package edu.harvard.iq.dataverse.util.json;
2+
3+
import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
4+
import edu.harvard.iq.dataverse.DatasetServiceBean;
5+
import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevelServiceBean;
6+
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
7+
8+
import jakarta.annotation.PostConstruct;
9+
import jakarta.ejb.EJB;
10+
import jakarta.ejb.Singleton;
11+
import jakarta.ejb.Startup;
12+
13+
/**
14+
* This is a small helper bean
15+
* As it is a singleton and built at application start (=deployment), it will inject the (stateless)
16+
* settings service into the OREMap once it's ready.
17+
*/
18+
@Singleton
19+
@Startup
20+
public class JsonPrinterHelper {
21+
@EJB
22+
SettingsServiceBean settingsSvc;
23+
24+
@EJB
25+
DatasetFieldServiceBean datasetFieldSvc;
26+
27+
@EJB
28+
DataverseFieldTypeInputLevelServiceBean datasetFieldInpuLevelSvc;
29+
30+
@EJB
31+
DatasetServiceBean datasetSvc;
32+
33+
@PostConstruct
34+
public void injectService() {
35+
JsonPrinter.injectSettingsService(settingsSvc, datasetFieldSvc, datasetFieldInpuLevelSvc, datasetSvc);
36+
}
37+
}

src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3234,7 +3234,9 @@ public void testUploadFilesWithLimits() throws JsonParseException {
32343234
.body("data.datasetFileCountLimit", equalTo(1));
32353235

32363236
Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
3237+
createDatasetResponse.prettyPrint();
32373238
Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
3239+
String datasetPersistenceId = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId");
32383240
createDatasetResponse.then().assertThat()
32393241
.statusCode(CREATED.getStatusCode());
32403242

@@ -3276,6 +3278,13 @@ public void testUploadFilesWithLimits() throws JsonParseException {
32763278
.body("data.effectiveDatasetFileCountLimit", equalTo(1))
32773279
.body("data.datasetFileCountLimit", equalTo(1));
32783280

3281+
Response getDatasetResponse = UtilIT.getDatasetVersion(datasetPersistenceId, DS_VERSION_DRAFT, apiToken);
3282+
getDatasetResponse.prettyPrint();
3283+
getDatasetResponse.then().assertThat()
3284+
.statusCode(OK.getStatusCode())
3285+
.body("data.effectiveDatasetFileCountLimit", equalTo(1))
3286+
.body("data.datasetFileUploadsAvailable", equalTo(0));
3287+
32793288
// Replace a file should be allowed
32803289
pathToFile = "scripts/search/data/tabular/120745.dta";
32813290
Response replaceFileResponse = UtilIT.replaceFile(fileId, pathToFile, apiToken);

src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesTest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -274,7 +274,7 @@ public void extract_zip_performance() throws Exception {
274274

275275
private static @NotNull DatasetServiceBean mockDatasetServiceBean() {
276276
var datasetService = Mockito.mock(DatasetServiceBean.class);
277-
Mockito.when(datasetService.getDataFileCountByOwner(2L)).thenReturn(0L);
277+
Mockito.when(datasetService.getDataFileCountByOwner(2L)).thenReturn(0);
278278
return datasetService;
279279
}
280280
}

0 commit comments

Comments
 (0)