Skip to content

Commit 5fbfea2

Browse files
authored
Merge pull request #11273 from IQSS/11243-editmetadata-api-empty-values
Edit Metadata API endpoint - support empty values
2 parents 2e02be2 + 42a78aa commit 5fbfea2

File tree

18 files changed

+1193
-197
lines changed

18 files changed

+1193
-197
lines changed
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
### Edit Dataset Metadata API extension
2+
3+
- This endpoint now allows removing fields (by sending empty values), as long as they are not required by the dataset.
4+
- New ``sourceInternalVersionNumber`` optional query parameter, which prevents inconsistencies by managing updates that
5+
may occur from other users while a dataset is being edited.
Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
{
2+
"fields": [
3+
{
4+
"typeName": "alternativeTitle",
5+
"multiple": true,
6+
"typeClass": "primitive",
7+
"value": []
8+
},
9+
{
10+
"typeName": "distributor",
11+
"multiple": true,
12+
"typeClass": "compound",
13+
"value": [
14+
{
15+
"distributorName": {
16+
"typeName": "distributorName",
17+
"multiple": false,
18+
"typeClass": "primitive",
19+
"value": ""
20+
},
21+
"distributorAffiliation": {
22+
"typeName": "distributorAffiliation",
23+
"multiple": false,
24+
"typeClass": "primitive",
25+
"value": ""
26+
}
27+
}
28+
]
29+
},
30+
{
31+
"fields": [
32+
{
33+
"typeName": "author",
34+
"value": [
35+
{
36+
"authorName": {
37+
"typeName": "authorName",
38+
"value": "Belicheck, Bill"
39+
},
40+
"authorAffiliation": {
41+
"typeName": "authorIdentifierScheme",
42+
"value": ""
43+
}
44+
}
45+
]
46+
}
47+
]
48+
}
49+
]
50+
}

doc/sphinx-guides/source/api/native-api.rst

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2124,6 +2124,30 @@ The fully expanded example above (without environment variables) looks like this
21242124
21252125
For these edits your JSON file need only include those dataset fields which you would like to edit. A sample JSON file may be downloaded here: :download:`dataset-edit-metadata-sample.json <../_static/api/dataset-edit-metadata-sample.json>`
21262126

2127+
This endpoint also allows removing fields, as long as they are not required by the dataset. To remove a field, send an empty value (``""``) for individual fields. For multiple fields, send an empty array (``[]``). A sample JSON file for removing fields may be downloaded here: :download:`dataset-edit-metadata-delete-fields-sample.json <../_static/api/dataset-edit-metadata-delete-fields-sample.json>`
2128+
2129+
If another user updates the dataset version metadata before you send the update request, data inconsistencies may occur. To prevent this, you can use the optional ``sourceInternalVersionNumber`` query parameter. This parameter must include the internal version number corresponding to the dataset version being updated. Note that internal version numbers increase sequentially with each version update.
2130+
2131+
If this parameter is provided, the update will proceed only if the internal version number remains unchanged. Otherwise, the request will fail with an error.
2132+
2133+
Example using ``sourceInternalVersionNumber``:
2134+
2135+
.. code-block:: bash
2136+
2137+
export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
2138+
export SERVER_URL=https://demo.dataverse.org
2139+
export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/BCCP9Z
2140+
export SOURCE_INTERNAL_VERSION_NUMBER=5
2141+
2142+
curl -H "X-Dataverse-key: $API_TOKEN" -X PUT "$SERVER_URL/api/datasets/:persistentId/editMetadata?persistentId=$PERSISTENT_IDENTIFIER&replace=true&sourceInternalVersionNumber=$SOURCE_INTERNAL_VERSION_NUMBER" --upload-file dataset-update-metadata.json
2143+
2144+
The fully expanded example above (without environment variables) looks like this:
2145+
2146+
.. code-block:: bash
2147+
2148+
curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/:persistentId/editMetadata/?persistentId=doi:10.5072/FK2/BCCP9Z&replace=true&sourceInternalVersionNumber=5" --upload-file dataset-update-metadata.json
2149+
2150+
21272151
Delete Dataset Metadata
21282152
~~~~~~~~~~~~~~~~~~~~~~~
21292153

src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java

Lines changed: 37 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -923,6 +923,42 @@ public List<DatasetFieldType> findAllInMetadataBlockAndDataverse(MetadataBlock m
923923
return em.createQuery(criteriaQuery).getResultList();
924924
}
925925

926+
public boolean isFieldRequiredInDataverse(DatasetFieldType datasetFieldType, Dataverse dataverse) {
927+
CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder();
928+
CriteriaQuery<Long> criteriaQuery = criteriaBuilder.createQuery(Long.class);
929+
930+
Root<Dataverse> dataverseRoot = criteriaQuery.from(Dataverse.class);
931+
Root<DatasetFieldType> datasetFieldTypeRoot = criteriaQuery.from(DatasetFieldType.class);
932+
933+
// Join Dataverse with DataverseFieldTypeInputLevel on the "dataverseFieldTypeInputLevels" attribute, using a LEFT JOIN.
934+
Join<Dataverse, DataverseFieldTypeInputLevel> datasetFieldTypeInputLevelJoin = dataverseRoot.join("dataverseFieldTypeInputLevels", JoinType.LEFT);
935+
936+
// Define a predicate to include DatasetFieldTypes that are marked as required in the input level.
937+
Predicate requiredAsInputLevelPredicate = criteriaBuilder.and(
938+
criteriaBuilder.equal(datasetFieldTypeRoot, datasetFieldTypeInputLevelJoin.get("datasetFieldType")),
939+
criteriaBuilder.isTrue(datasetFieldTypeInputLevelJoin.get("required"))
940+
);
941+
942+
// Define a predicate to include the required fields in the installation.
943+
Predicate requiredInTheInstallationPredicate = buildFieldRequiredInTheInstallationPredicate(criteriaBuilder, datasetFieldTypeRoot);
944+
945+
// Build the final WHERE clause by combining all the predicates.
946+
criteriaQuery.where(
947+
criteriaBuilder.equal(dataverseRoot.get("id"), dataverse.getId()),
948+
criteriaBuilder.equal(datasetFieldTypeRoot.get("id"), datasetFieldType.getId()),
949+
criteriaBuilder.or(
950+
requiredAsInputLevelPredicate,
951+
requiredInTheInstallationPredicate
952+
)
953+
);
954+
955+
criteriaQuery.select(criteriaBuilder.count(datasetFieldTypeRoot));
956+
957+
Long count = em.createQuery(criteriaQuery).getSingleResult();
958+
959+
return count != null && count > 0;
960+
}
961+
926962
private Predicate buildFieldPresentInDataversePredicate(Dataverse dataverse, boolean onlyDisplayedOnCreate, CriteriaQuery<DatasetFieldType> criteriaQuery, CriteriaBuilder criteriaBuilder, Root<DatasetFieldType> datasetFieldTypeRoot, Root<MetadataBlock> metadataBlockRoot) {
927963
Root<Dataverse> dataverseRoot = criteriaQuery.from(Dataverse.class);
928964

@@ -960,7 +996,7 @@ private Predicate buildFieldPresentInDataversePredicate(Dataverse dataverse, boo
960996
// Define a predicate to exclude DatasetFieldTypes that have no associated input level (i.e., the subquery does not return a result).
961997
Predicate hasNoInputLevelPredicate = criteriaBuilder.not(criteriaBuilder.exists(subquery));
962998

963-
// Define a predicate to include the required fields in Dataverse.
999+
// Define a predicate to include the required fields in the installation.
9641000
Predicate fieldRequiredInTheInstallation = buildFieldRequiredInTheInstallationPredicate(criteriaBuilder, datasetFieldTypeRoot);
9651001

9661002
// Define a predicate for displaying DatasetFieldTypes on create.

src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
44
import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean;
5+
import edu.harvard.iq.dataverse.dataset.DatasetFieldsValidator;
56
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
67
import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean;
78
import edu.harvard.iq.dataverse.dataverse.featured.DataverseFeaturedItemServiceBean;
@@ -189,6 +190,9 @@ public class EjbDataverseEngine {
189190

190191
@EJB
191192
DataverseFeaturedItemServiceBean dataverseFeaturedItemServiceBean;
193+
194+
@EJB
195+
DatasetFieldsValidator datasetFieldsValidator;
192196

193197
@EJB
194198
EjbDataverseEngineInner innerEngine;
@@ -531,6 +535,11 @@ public DataverseFeaturedItemServiceBean dataverseFeaturedItems() {
531535
return dataverseFeaturedItemServiceBean;
532536
}
533537

538+
@Override
539+
public DatasetFieldsValidator datasetFieldsValidator() {
540+
return datasetFieldsValidator;
541+
}
542+
534543
@Override
535544
public StorageUseServiceBean storageUse() {
536545
return storageUseService;

src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -51,10 +51,7 @@
5151

5252
import java.io.InputStream;
5353
import java.net.URI;
54-
import java.util.Arrays;
55-
import java.util.Collections;
56-
import java.util.Map;
57-
import java.util.UUID;
54+
import java.util.*;
5855
import java.util.concurrent.Callable;
5956
import java.util.logging.Level;
6057
import java.util.logging.Logger;
@@ -446,6 +443,14 @@ public Command<DatasetVersion> handleLatestPublished() {
446443
return dsv;
447444
}
448445

446+
protected void validateInternalVersionNumberIsNotOutdated(Dataset dataset, int internalVersion) throws WrappedResponse {
447+
if (dataset.getLatestVersion().getVersion() > internalVersion) {
448+
throw new WrappedResponse(
449+
badRequest(BundleUtil.getStringFromBundle("abstractApiBean.error.datasetInternalVersionNumberIsOutdated", Collections.singletonList(Integer.toString(internalVersion))))
450+
);
451+
}
452+
}
453+
449454
protected DataFile findDataFileOrDie(String id) throws WrappedResponse {
450455
DataFile datafile;
451456
if (id.equals(PERSISTENT_ID_KEY)) {

0 commit comments

Comments
 (0)