Skip to content

Commit 5dd7ce8

Browse files
committed
re-work the deleteion of datafile featured items when publishing dataset
1 parent 5d7e8a5 commit 5dd7ce8

File tree

5 files changed

+107
-23
lines changed

5 files changed

+107
-23
lines changed

src/main/java/edu/harvard/iq/dataverse/dataverse/featured/DataverseFeaturedItem.java

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -146,9 +146,13 @@ public static void validateTypeAndDvObject(String dvIdtf, DvObject dvObject, Dat
146146
throw new IllegalArgumentException(BundleUtil.getStringFromBundle("dataverse.update.featuredItems.error.typeAndDvObjectMismatch"));
147147
}
148148
if (dvObject instanceof DataFile) {
149-
if (((DataFile)dvObject).isRestricted()) {
149+
DataFile df = (DataFile)dvObject;
150+
if (df.isRestricted()) {
150151
throw new IllegalArgumentException(BundleUtil.getStringFromBundle("dataverseFeaturedItems.errors.restricted"));
151152
}
153+
if (!df.isReleased()) {
154+
throw new IllegalArgumentException(BundleUtil.getStringFromBundle("dataverseFeaturedItems.errors.notPublished", List.of("Dataset")));
155+
}
152156
} else if (!dvObject.isReleased()) {
153157
throw new IllegalArgumentException(BundleUtil.getStringFromBundle("dataverseFeaturedItems.errors.notPublished", List.of(CaseUtils.toCamelCase(dvType.name(), true))));
154158
}

src/main/java/edu/harvard/iq/dataverse/dataverse/featured/DataverseFeaturedItemServiceBean.java

Lines changed: 22 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,6 @@
11
package edu.harvard.iq.dataverse.dataverse.featured;
22

3-
import com.google.common.collect.Lists;
43
import edu.harvard.iq.dataverse.*;
5-
import edu.harvard.iq.dataverse.authorization.Permission;
6-
import edu.harvard.iq.dataverse.authorization.users.User;
7-
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
84
import edu.harvard.iq.dataverse.settings.JvmSettings;
95
import edu.harvard.iq.dataverse.util.BundleUtil;
106
import edu.harvard.iq.dataverse.util.FileUtil;
@@ -13,7 +9,6 @@
139
import jakarta.inject.Named;
1410
import jakarta.persistence.EntityManager;
1511
import jakarta.persistence.PersistenceContext;
16-
import jakarta.servlet.http.HttpServletRequest;
1712

1813
import java.io.File;
1914
import java.io.IOException;
@@ -22,12 +17,14 @@
2217
import java.nio.file.Files;
2318
import java.nio.file.Path;
2419
import java.nio.file.StandardCopyOption;
25-
import java.util.EnumSet;
20+
import java.util.ArrayList;
2621
import java.util.List;
22+
import java.util.logging.Logger;
2723

2824
@Stateless
2925
@Named
3026
public class DataverseFeaturedItemServiceBean implements Serializable {
27+
private static final Logger logger = Logger.getLogger(DataverseFeaturedItemServiceBean.class.getCanonicalName());
3128

3229
public static class InvalidImageFileException extends Exception {
3330
public InvalidImageFileException(String message) {
@@ -70,6 +67,25 @@ public void deleteAllByDvObjectId(Long id) {
7067
.executeUpdate();
7168
}
7269

70+
public void deleteInvalidatedFeaturedItemsByDataset(Dataset dataset) {
71+
// Delete any Featured Items that contain Datafiles that were removed or restricted in the latest published version
72+
List<DataverseFeaturedItem> featuredItems = findAllByDataverseOrdered(dataset.getOwner());
73+
for (DataverseFeaturedItem featuredItem : featuredItems) {
74+
if (featuredItem.getDvObject() != null && featuredItem.getType().equalsIgnoreCase(DataverseFeaturedItem.TYPES.DATAFILE.name())) {
75+
DataFile df = (DataFile) featuredItem.getDvObject();
76+
List<Long> latestVersionFileIds = new ArrayList<>();
77+
dataset.getLatestVersion().getFileMetadatas().stream()
78+
.map(FileMetadata::getId)
79+
.forEachOrdered(latestVersionFileIds::add);
80+
// If the datafile is restricted or part of this dataset but not in the latest version we need to delete the featured item
81+
if (df.isRestricted() || (dataset.getFiles().contains(df) && !latestVersionFileIds.contains(df.getId()))) {
82+
logger.fine("Deleting invalidated Featured Item for " + (df.isRestricted() ? "Restricted" : "Deleted") + "Datafile ID: " + df.getId());
83+
deleteAllByDvObjectId(df.getId());
84+
}
85+
}
86+
}
87+
}
88+
7389
public List<DataverseFeaturedItem> findAllByDataverseOrdered(Dataverse dataverse) {
7490
List<DataverseFeaturedItem> items = em
7591
.createNamedQuery("DataverseFeaturedItem.findByDataverseOrderedByDisplayOrder", DataverseFeaturedItem.class)

src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -246,6 +246,9 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
246246

247247
logger.info("Successfully published the dataset "+readyDataset.getGlobalId().asString());
248248
readyDataset = ctxt.em().merge(readyDataset);
249+
250+
// Delete any Featured Items that are invalidated by publishing this version
251+
ctxt.dataverseFeaturedItems().deleteInvalidatedFeaturedItemsByDataset(readyDataset);
249252

250253
return readyDataset;
251254
}

src/test/java/edu/harvard/iq/dataverse/api/DataverseFeaturedItemsIT.java

Lines changed: 54 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,16 @@ public void testCreateFeaturedItemWithDvOdbject() {
3737
Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
3838
String datasetPersistentId = UtilIT.getDatasetPersistentIdFromResponse(createDatasetResponse);
3939
Integer datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse);
40+
// Upload a file
41+
String fileName = "50by1000.dta";
42+
String pathToFile = "scripts/search/data/tabular/" + fileName;
43+
Response uploadFileResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
44+
uploadFileResponse.prettyPrint();
45+
JsonPath uploadedFile = JsonPath.from(uploadFileResponse.body().asString());
46+
String fileId = String.valueOf(uploadedFile.getInt("data.files[0].dataFile.id"));
47+
assertTrue(UtilIT.sleepForLock(datasetId, "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration");
48+
49+
// Publish Dataverse and Dataset with Datafile
4050
UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken);
4151
UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).prettyPrint();
4252

@@ -66,14 +76,6 @@ public void testCreateFeaturedItemWithDvOdbject() {
6676
dvObjectDisplayName = createdFeaturedItem.getString("data.dvObjectDisplayName");
6777
assertEquals(dataverseAlias, dvObjectDisplayName); // create dataverse sets the name = alias
6878

69-
// Upload a file
70-
String fileName = "50by1000.dta";
71-
String pathToFile = "scripts/search/data/tabular/" + fileName;
72-
Response uploadFileResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
73-
uploadFileResponse.prettyPrint();
74-
JsonPath uploadedFile = JsonPath.from(uploadFileResponse.body().asString());
75-
String fileId = String.valueOf(uploadedFile.getInt("data.files[0].dataFile.id"));
76-
7779
// Test creating a featured item of type Datafile with good file id. Returns OK
7880
createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, null, 0, null, "datafile", fileId);
7981
createFeatureItemResponse.prettyPrint();
@@ -82,7 +84,8 @@ public void testCreateFeaturedItemWithDvOdbject() {
8284
dvObjectIdentifier = createdFeaturedItem.getString("data.dvObjectIdentifier");
8385
assertEquals(fileId, dvObjectIdentifier);
8486
dvObjectDisplayName = createdFeaturedItem.getString("data.dvObjectDisplayName");
85-
assertEquals(fileName, dvObjectDisplayName);
87+
String tabFileNameConvert = fileName.substring(0, fileName.indexOf(".dta")) + ".tab";
88+
assertEquals(tabFileNameConvert, dvObjectDisplayName);
8689
}
8790

8891
@Test
@@ -153,6 +156,43 @@ public void testUnpublishedPublishedDatasetFeatureItem() {
153156
.statusCode(OK.getStatusCode());
154157
}
155158

159+
@Test
160+
public void testUnpublishedPublishedDataFileFeatureItem() {
161+
// Set up a new dataverse and dataset without publishing
162+
String apiToken = createUserAndGetApiToken();
163+
String dataverseAlias = createDataverseAndGetAlias(apiToken);
164+
UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken);
165+
Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
166+
createDatasetResponse.prettyPrint();
167+
String datasetPersistentId = UtilIT.getDatasetPersistentIdFromResponse(createDatasetResponse);
168+
Integer datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse);
169+
170+
// Upload a file to be a Featured Item
171+
String fileName = "50by1000.dta";
172+
String pathToFile = "scripts/search/data/tabular/" + fileName;
173+
Response uploadFileResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
174+
uploadFileResponse.prettyPrint();
175+
JsonPath uploadedFile = JsonPath.from(uploadFileResponse.body().asString());
176+
String fileId = String.valueOf(uploadedFile.getInt("data.files[0].dataFile.id"));
177+
assertTrue(UtilIT.sleepForLock(datasetId, "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration");
178+
179+
// Test creating a featured item of type Datafile. Returns Bad request due to the dataset not being published
180+
Response createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, null, 0, null, "datafile", fileId);
181+
createFeatureItemResponse.prettyPrint();
182+
createFeatureItemResponse.then().assertThat()
183+
.statusCode(BAD_REQUEST.getStatusCode())
184+
.body("message", equalTo(BundleUtil.getStringFromBundle("dataverseFeaturedItems.errors.notPublished", List.of("Dataset"))));
185+
186+
// Publish the Dataset
187+
UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken).prettyPrint();
188+
189+
// Test creating a featured item of type DataFile. Returns OK due to the dataset being published
190+
createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, null, 0, null, "datafile", fileId);
191+
createFeatureItemResponse.prettyPrint();
192+
createFeatureItemResponse.then().assertThat()
193+
.statusCode(OK.getStatusCode());
194+
}
195+
156196
@Test
157197
public void testRestrictedUnrestrictedDatafileFeatureItem() {
158198
// Set up a new dataverse and dataset without publishing
@@ -162,7 +202,6 @@ public void testRestrictedUnrestrictedDatafileFeatureItem() {
162202
Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
163203
createDatasetResponse.prettyPrint();
164204
String datasetId = String.valueOf(UtilIT.getDatasetIdFromResponse(createDatasetResponse));
165-
UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).prettyPrint();
166205

167206
// Upload a file
168207
String pathToFile = "scripts/search/data/tabular/50by1000.dta";
@@ -175,6 +214,9 @@ public void testRestrictedUnrestrictedDatafileFeatureItem() {
175214
// Restrict the file
176215
UtilIT.restrictFile(fileId, true, apiToken).prettyPrint();
177216

217+
// Publish the Dataset with the uploaded file
218+
UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).prettyPrint();
219+
178220
// Test creating a featured item of type Datafile with good file id. Returns Bad request due to the datafile being restricted
179221
Response createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, null, 0, null, "datafile", fileId);
180222
createFeatureItemResponse.prettyPrint();
@@ -184,6 +226,8 @@ public void testRestrictedUnrestrictedDatafileFeatureItem() {
184226

185227
// Un-restrict the file
186228
UtilIT.restrictFile(fileId, false, apiToken).prettyPrint();
229+
// Publish the Dataset with the unrestricted file
230+
UtilIT.publishDatasetViaNativeApi(datasetId, "minor", apiToken).prettyPrint();
187231

188232
// Test creating a featured item of type Datafile with good file id. Returns OK request due to the datafile being un-restricted
189233
createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, null, 0, null, "datafile", fileId);

src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java

Lines changed: 23 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1673,16 +1673,19 @@ public void testCreateFeaturedItem() {
16731673
Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
16741674
createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
16751675
String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
1676-
UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken).prettyPrint();
16771676
Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
16781677
createDatasetResponse.prettyPrint();
16791678
String datasetPersistentId = UtilIT.getDatasetPersistentIdFromResponse(createDatasetResponse);
16801679
Integer datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse);
1681-
UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).prettyPrint();
16821680
String pathToFile1 = "src/main/webapp/resources/images/cc0.png";
16831681
Response uploadFileResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile1, apiToken);
16841682
uploadFileResponse.prettyPrint();
16851683
String datafileId = String.valueOf(UtilIT.getDataFileIdFromResponse(uploadFileResponse));
1684+
assertTrue(UtilIT.sleepForLock(datasetId, "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration");
1685+
1686+
// Publish Dataverse and Dataset with Datafile
1687+
UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken).prettyPrint();
1688+
UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).prettyPrint();
16861689

16871690
// Should not return any error when not passing a file
16881691

@@ -1727,9 +1730,9 @@ public void testCreateFeaturedItem() {
17271730
.statusCode(NOT_FOUND.getStatusCode());
17281731

17291732
// Testing new dvobject-type featured items
1730-
createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, "test dataset", 10, null, "dataset", datasetPersistentId);
1733+
createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, null, 10, null, "dataset", datasetPersistentId);
17311734
createFeatureItemResponse.prettyPrint();
1732-
createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, "test datafile", 11, null, "datafile", datafileId);
1735+
createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, null, 11, null, "datafile", datafileId);
17331736
createFeatureItemResponse.prettyPrint();
17341737
Response listDataverseFeaturedItemsResponse = UtilIT.listDataverseFeaturedItems(dataverseAlias, apiToken);
17351738
listDataverseFeaturedItemsResponse.prettyPrint();
@@ -1957,6 +1960,7 @@ public void testDeleteFeaturedItemWithDvObject() {
19571960
createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
19581961
String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
19591962
Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
1963+
String datasetPersistentId = UtilIT.getDatasetPersistentIdFromResponse(createDatasetResponse);
19601964
Integer datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse);
19611965

19621966
// Upload a file
@@ -1965,6 +1969,11 @@ public void testDeleteFeaturedItemWithDvObject() {
19651969
uploadFileResponse.prettyPrint();
19661970
Integer datafileId = UtilIT.getDataFileIdFromResponse(uploadFileResponse);
19671971
assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile1);
1972+
1973+
// Publish the Dataverse and Dataset
1974+
UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken).prettyPrint();
1975+
UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken).prettyPrint();
1976+
19681977
Response createDataverseFeaturedItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, null, 0, pathToFile1, "datafile", String.valueOf(datafileId));
19691978
createDataverseFeaturedItemResponse.prettyPrint();
19701979
int featuredItemId = UtilIT.getDatasetIdFromResponse(createDataverseFeaturedItemResponse);
@@ -1975,15 +1984,23 @@ public void testDeleteFeaturedItemWithDvObject() {
19751984
.body("data.size()", equalTo(1))
19761985
.assertThat().statusCode(OK.getStatusCode());
19771986

1978-
// delete file (cascade deletes the featured item)
1987+
// delete the file creates a new DRAFT version of the Dataset but the File still exists in the latest published version
19791988
UtilIT.deleteFile(datafileId,apiToken).prettyPrint();
19801989
listFeaturedItemsResponse = UtilIT.listDataverseFeaturedItems(dataverseAlias, apiToken);
19811990
listFeaturedItemsResponse.prettyPrint();
1991+
listFeaturedItemsResponse.then()
1992+
.body("data.size()", equalTo(1))
1993+
.assertThat().statusCode(OK.getStatusCode());
1994+
1995+
// publish the draft version with the file deleted will cause the featured item to be deleted
1996+
UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken).prettyPrint();
1997+
listFeaturedItemsResponse = UtilIT.listDataverseFeaturedItems(dataverseAlias, apiToken);
1998+
listFeaturedItemsResponse.prettyPrint();
19821999
listFeaturedItemsResponse.then()
19832000
.body("data.size()", equalTo(0))
19842001
.assertThat().statusCode(OK.getStatusCode());
19852002

1986-
// try to delete the featured item and if it's already deleted (by deleting the file) it should be NOT FOUND
2003+
// try to delete the featured item if it's already deleted should be NOT FOUND
19872004
Response deleteItemResponse = UtilIT.deleteDataverseFeaturedItem(featuredItemId, apiToken);
19882005
deleteItemResponse.prettyPrint();
19892006
deleteItemResponse.then()

0 commit comments

Comments
 (0)