Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -146,9 +146,13 @@ public static void validateTypeAndDvObject(String dvIdtf, DvObject dvObject, Dat
throw new IllegalArgumentException(BundleUtil.getStringFromBundle("dataverse.update.featuredItems.error.typeAndDvObjectMismatch"));
}
if (dvObject instanceof DataFile) {
if (((DataFile)dvObject).isRestricted()) {
DataFile df = (DataFile)dvObject;
if (df.isRestricted()) {
throw new IllegalArgumentException(BundleUtil.getStringFromBundle("dataverseFeaturedItems.errors.restricted"));
}
if (!df.isReleased()) {
throw new IllegalArgumentException(BundleUtil.getStringFromBundle("dataverseFeaturedItems.errors.notPublished", List.of("Dataset")));
}
} else if (!dvObject.isReleased()) {
throw new IllegalArgumentException(BundleUtil.getStringFromBundle("dataverseFeaturedItems.errors.notPublished", List.of(CaseUtils.toCamelCase(dvType.name(), true))));
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,6 @@
package edu.harvard.iq.dataverse.dataverse.featured;

import com.google.common.collect.Lists;
import edu.harvard.iq.dataverse.*;
import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.settings.JvmSettings;
import edu.harvard.iq.dataverse.util.BundleUtil;
import edu.harvard.iq.dataverse.util.FileUtil;
Expand All @@ -13,7 +9,6 @@
import jakarta.inject.Named;
import jakarta.persistence.EntityManager;
import jakarta.persistence.PersistenceContext;
import jakarta.servlet.http.HttpServletRequest;

import java.io.File;
import java.io.IOException;
Expand All @@ -22,12 +17,14 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.EnumSet;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;

@Stateless
@Named
public class DataverseFeaturedItemServiceBean implements Serializable {
private static final Logger logger = Logger.getLogger(DataverseFeaturedItemServiceBean.class.getCanonicalName());

public static class InvalidImageFileException extends Exception {
public InvalidImageFileException(String message) {
Expand Down Expand Up @@ -70,6 +67,25 @@ public void deleteAllByDvObjectId(Long id) {
.executeUpdate();
}

public void deleteInvalidatedFeaturedItemsByDataset(Dataset dataset) {
// Delete any Featured Items that contain Datafiles that were removed or restricted in the latest published version
List<DataverseFeaturedItem> featuredItems = findAllByDataverseOrdered(dataset.getOwner());
for (DataverseFeaturedItem featuredItem : featuredItems) {
if (featuredItem.getDvObject() != null && featuredItem.getType().equalsIgnoreCase(DataverseFeaturedItem.TYPES.DATAFILE.name())) {
DataFile df = (DataFile) featuredItem.getDvObject();
List<Long> latestVersionFileIds = new ArrayList<>();
dataset.getLatestVersion().getFileMetadatas().stream()
.map(FileMetadata::getId)
.forEachOrdered(latestVersionFileIds::add);
// If the datafile is restricted or part of this dataset but not in the latest version we need to delete the featured item
if (df.isRestricted() || (dataset.getFiles().contains(df) && !latestVersionFileIds.contains(df.getId()))) {
logger.fine("Deleting invalidated Featured Item for " + (df.isRestricted() ? "Restricted" : "Deleted") + "Datafile ID: " + df.getId());
deleteAllByDvObjectId(df.getId());
}
}
}
}

public List<DataverseFeaturedItem> findAllByDataverseOrdered(Dataverse dataverse) {
List<DataverseFeaturedItem> items = em
.createNamedQuery("DataverseFeaturedItem.findByDataverseOrderedByDisplayOrder", DataverseFeaturedItem.class)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -246,6 +246,9 @@ public Dataset execute(CommandContext ctxt) throws CommandException {

logger.info("Successfully published the dataset "+readyDataset.getGlobalId().asString());
readyDataset = ctxt.em().merge(readyDataset);

// Delete any Featured Items that are invalidated by publishing this version
ctxt.dataverseFeaturedItems().deleteInvalidatedFeaturedItemsByDataset(readyDataset);

return readyDataset;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,16 @@ public void testCreateFeaturedItemWithDvOdbject() {
Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
String datasetPersistentId = UtilIT.getDatasetPersistentIdFromResponse(createDatasetResponse);
Integer datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse);
// Upload a file
String fileName = "50by1000.dta";
String pathToFile = "scripts/search/data/tabular/" + fileName;
Response uploadFileResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
uploadFileResponse.prettyPrint();
JsonPath uploadedFile = JsonPath.from(uploadFileResponse.body().asString());
String fileId = String.valueOf(uploadedFile.getInt("data.files[0].dataFile.id"));
assertTrue(UtilIT.sleepForLock(datasetId, "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration");

// Publish Dataverse and Dataset with Datafile
UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken);
UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).prettyPrint();

Expand Down Expand Up @@ -66,14 +76,6 @@ public void testCreateFeaturedItemWithDvOdbject() {
dvObjectDisplayName = createdFeaturedItem.getString("data.dvObjectDisplayName");
assertEquals(dataverseAlias, dvObjectDisplayName); // create dataverse sets the name = alias

// Upload a file
String fileName = "50by1000.dta";
String pathToFile = "scripts/search/data/tabular/" + fileName;
Response uploadFileResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
uploadFileResponse.prettyPrint();
JsonPath uploadedFile = JsonPath.from(uploadFileResponse.body().asString());
String fileId = String.valueOf(uploadedFile.getInt("data.files[0].dataFile.id"));

// Test creating a featured item of type Datafile with good file id. Returns OK
createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, null, 0, null, "datafile", fileId);
createFeatureItemResponse.prettyPrint();
Expand All @@ -82,7 +84,8 @@ public void testCreateFeaturedItemWithDvOdbject() {
dvObjectIdentifier = createdFeaturedItem.getString("data.dvObjectIdentifier");
assertEquals(fileId, dvObjectIdentifier);
dvObjectDisplayName = createdFeaturedItem.getString("data.dvObjectDisplayName");
assertEquals(fileName, dvObjectDisplayName);
String tabFileNameConvert = fileName.substring(0, fileName.indexOf(".dta")) + ".tab";
assertEquals(tabFileNameConvert, dvObjectDisplayName);
}

@Test
Expand Down Expand Up @@ -153,6 +156,43 @@ public void testUnpublishedPublishedDatasetFeatureItem() {
.statusCode(OK.getStatusCode());
}

@Test
public void testUnpublishedPublishedDataFileFeatureItem() {
// Set up a new dataverse and dataset without publishing
String apiToken = createUserAndGetApiToken();
String dataverseAlias = createDataverseAndGetAlias(apiToken);
UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken);
Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
createDatasetResponse.prettyPrint();
String datasetPersistentId = UtilIT.getDatasetPersistentIdFromResponse(createDatasetResponse);
Integer datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse);

// Upload a file to be a Featured Item
String fileName = "50by1000.dta";
String pathToFile = "scripts/search/data/tabular/" + fileName;
Response uploadFileResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
uploadFileResponse.prettyPrint();
JsonPath uploadedFile = JsonPath.from(uploadFileResponse.body().asString());
String fileId = String.valueOf(uploadedFile.getInt("data.files[0].dataFile.id"));
assertTrue(UtilIT.sleepForLock(datasetId, "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration");

// Test creating a featured item of type Datafile. Returns Bad request due to the dataset not being published
Response createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, null, 0, null, "datafile", fileId);
createFeatureItemResponse.prettyPrint();
createFeatureItemResponse.then().assertThat()
.statusCode(BAD_REQUEST.getStatusCode())
.body("message", equalTo(BundleUtil.getStringFromBundle("dataverseFeaturedItems.errors.notPublished", List.of("Dataset"))));

// Publish the Dataset
UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken).prettyPrint();

// Test creating a featured item of type DataFile. Returns OK due to the dataset being published
createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, null, 0, null, "datafile", fileId);
createFeatureItemResponse.prettyPrint();
createFeatureItemResponse.then().assertThat()
.statusCode(OK.getStatusCode());
}

@Test
public void testRestrictedUnrestrictedDatafileFeatureItem() {
// Set up a new dataverse and dataset without publishing
Expand All @@ -162,7 +202,6 @@ public void testRestrictedUnrestrictedDatafileFeatureItem() {
Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
createDatasetResponse.prettyPrint();
String datasetId = String.valueOf(UtilIT.getDatasetIdFromResponse(createDatasetResponse));
UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).prettyPrint();

// Upload a file
String pathToFile = "scripts/search/data/tabular/50by1000.dta";
Expand All @@ -175,6 +214,9 @@ public void testRestrictedUnrestrictedDatafileFeatureItem() {
// Restrict the file
UtilIT.restrictFile(fileId, true, apiToken).prettyPrint();

// Publish the Dataset with the uploaded file
UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).prettyPrint();

// Test creating a featured item of type Datafile with good file id. Returns Bad request due to the datafile being restricted
Response createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, null, 0, null, "datafile", fileId);
createFeatureItemResponse.prettyPrint();
Expand All @@ -184,6 +226,8 @@ public void testRestrictedUnrestrictedDatafileFeatureItem() {

// Un-restrict the file
UtilIT.restrictFile(fileId, false, apiToken).prettyPrint();
// Publish the Dataset with the unrestricted file
UtilIT.publishDatasetViaNativeApi(datasetId, "minor", apiToken).prettyPrint();

// Test creating a featured item of type Datafile with good file id. Returns OK request due to the datafile being un-restricted
createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, null, 0, null, "datafile", fileId);
Expand Down
29 changes: 23 additions & 6 deletions src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java
Original file line number Diff line number Diff line change
Expand Up @@ -1673,16 +1673,19 @@ public void testCreateFeaturedItem() {
Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken).prettyPrint();
Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
createDatasetResponse.prettyPrint();
String datasetPersistentId = UtilIT.getDatasetPersistentIdFromResponse(createDatasetResponse);
Integer datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse);
UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).prettyPrint();
String pathToFile1 = "src/main/webapp/resources/images/cc0.png";
Response uploadFileResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile1, apiToken);
uploadFileResponse.prettyPrint();
String datafileId = String.valueOf(UtilIT.getDataFileIdFromResponse(uploadFileResponse));
assertTrue(UtilIT.sleepForLock(datasetId, "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration");

// Publish Dataverse and Dataset with Datafile
UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken).prettyPrint();
UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).prettyPrint();

// Should not return any error when not passing a file

Expand Down Expand Up @@ -1727,9 +1730,9 @@ public void testCreateFeaturedItem() {
.statusCode(NOT_FOUND.getStatusCode());

// Testing new dvobject-type featured items
createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, "test dataset", 10, null, "dataset", datasetPersistentId);
createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, null, 10, null, "dataset", datasetPersistentId);
createFeatureItemResponse.prettyPrint();
createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, "test datafile", 11, null, "datafile", datafileId);
createFeatureItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, null, 11, null, "datafile", datafileId);
createFeatureItemResponse.prettyPrint();
Response listDataverseFeaturedItemsResponse = UtilIT.listDataverseFeaturedItems(dataverseAlias, apiToken);
listDataverseFeaturedItemsResponse.prettyPrint();
Expand Down Expand Up @@ -1957,6 +1960,7 @@ public void testDeleteFeaturedItemWithDvObject() {
createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
String datasetPersistentId = UtilIT.getDatasetPersistentIdFromResponse(createDatasetResponse);
Integer datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse);

// Upload a file
Expand All @@ -1965,6 +1969,11 @@ public void testDeleteFeaturedItemWithDvObject() {
uploadFileResponse.prettyPrint();
Integer datafileId = UtilIT.getDataFileIdFromResponse(uploadFileResponse);
assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile1);

// Publish the Dataverse and Dataset
UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken).prettyPrint();
UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken).prettyPrint();

Response createDataverseFeaturedItemResponse = UtilIT.createDataverseFeaturedItem(dataverseAlias, apiToken, null, 0, pathToFile1, "datafile", String.valueOf(datafileId));
createDataverseFeaturedItemResponse.prettyPrint();
int featuredItemId = UtilIT.getDatasetIdFromResponse(createDataverseFeaturedItemResponse);
Expand All @@ -1975,15 +1984,23 @@ public void testDeleteFeaturedItemWithDvObject() {
.body("data.size()", equalTo(1))
.assertThat().statusCode(OK.getStatusCode());

// delete file (cascade deletes the featured item)
// delete the file creates a new DRAFT version of the Dataset but the File still exists in the latest published version
UtilIT.deleteFile(datafileId,apiToken).prettyPrint();
listFeaturedItemsResponse = UtilIT.listDataverseFeaturedItems(dataverseAlias, apiToken);
listFeaturedItemsResponse.prettyPrint();
listFeaturedItemsResponse.then()
.body("data.size()", equalTo(1))
.assertThat().statusCode(OK.getStatusCode());

// publish the draft version with the file deleted will cause the featured item to be deleted
UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken).prettyPrint();
listFeaturedItemsResponse = UtilIT.listDataverseFeaturedItems(dataverseAlias, apiToken);
listFeaturedItemsResponse.prettyPrint();
listFeaturedItemsResponse.then()
.body("data.size()", equalTo(0))
.assertThat().statusCode(OK.getStatusCode());

// try to delete the featured item and if it's already deleted (by deleting the file) it should be NOT FOUND
// try to delete the featured item if it's already deleted should be NOT FOUND
Response deleteItemResponse = UtilIT.deleteDataverseFeaturedItem(featuredItemId, apiToken);
deleteItemResponse.prettyPrint();
deleteItemResponse.then()
Expand Down
Loading