Skip to content

Commit 55d53dd

Browse files
authored
Merge pull request #11224 from IQSS/10476-display-on-create-field-option
10476 display on create field option
2 parents 1400d57 + 3fdc642 commit 55d53dd

File tree

15 files changed

+243
-74
lines changed

15 files changed

+243
-74
lines changed
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
New feature: Collection administrators can now configure which metadata fields appear during dataset creation through the `displayOnCreate` property, even when fields are not required. This provides greater control over metadata visibility and can help improve metadata completeness.
2+
3+
- The feature is currently available through the API endpoint `/api/dataverses/{alias}/inputLevels`
4+
- UI implementation will be available in a future release [#11221](https://github.com/IQSS/dataverse/issues/11221)
5+
6+
For more information, see the [API Guide](https://guides.dataverse.org/en/latest/api/native-api.html#update-collection-input-levels) and issues [#10476](https://github.com/IQSS/dataverse/issues/10476) and [#11224](https://github.com/IQSS/dataverse/pull/11224).

doc/sphinx-guides/source/api/native-api.rst

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1116,15 +1116,24 @@ This endpoint expects a JSON with the following format::
11161116
{
11171117
"datasetFieldTypeName": "datasetFieldTypeName1",
11181118
"required": true,
1119-
"include": true
1119+
"include": true,
1120+
"displayOnCreate": false
11201121
},
11211122
{
11221123
"datasetFieldTypeName": "datasetFieldTypeName2",
11231124
"required": true,
1124-
"include": true
1125+
"include": true,
1126+
"displayOnCreate": true
11251127
}
11261128
]
11271129

1130+
Parameters:
1131+
1132+
- ``datasetFieldTypeName``: Name of the metadata field
1133+
- ``required``: Whether the field is required (boolean)
1134+
- ``include``: Whether the field is included (boolean)
1135+
- ``displayOnCreate`` (optional): Whether the field is displayed during dataset creation, even when not required (boolean)
1136+
11281137
.. code-block:: bash
11291138
11301139
export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx

src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -941,6 +941,12 @@ private Predicate buildFieldPresentInDataversePredicate(Dataverse dataverse, boo
941941
criteriaBuilder.isTrue(datasetFieldTypeInputLevelJoin.get("required"))
942942
);
943943

944+
// Predicate for displayOnCreate in input level
945+
Predicate displayOnCreateInputLevelPredicate = criteriaBuilder.and(
946+
criteriaBuilder.equal(datasetFieldTypeRoot, datasetFieldTypeInputLevelJoin.get("datasetFieldType")),
947+
criteriaBuilder.isTrue(datasetFieldTypeInputLevelJoin.get("displayOnCreate"))
948+
);
949+
944950
// Create a subquery to check for the absence of a specific DataverseFieldTypeInputLevel.
945951
Subquery<Long> subquery = criteriaQuery.subquery(Long.class);
946952
Root<DataverseFieldTypeInputLevel> subqueryRoot = subquery.from(DataverseFieldTypeInputLevel.class);
@@ -963,10 +969,19 @@ private Predicate buildFieldPresentInDataversePredicate(Dataverse dataverse, boo
963969
// Otherwise, use an always-true predicate (conjunction).
964970
Predicate displayedOnCreatePredicate = onlyDisplayedOnCreate
965971
? criteriaBuilder.or(
966-
criteriaBuilder.or(
972+
// 1. Field marked as displayOnCreate in input level
973+
displayOnCreateInputLevelPredicate,
974+
975+
// 2. Field without input level that is marked as displayOnCreate or required
976+
criteriaBuilder.and(
977+
hasNoInputLevelPredicate,
978+
criteriaBuilder.or(
967979
criteriaBuilder.isTrue(datasetFieldTypeRoot.get("displayOnCreate")),
968980
fieldRequiredInTheInstallation
981+
)
969982
),
983+
984+
// 3. Field required by input level
970985
requiredAsInputLevelPredicate
971986
)
972987
: criteriaBuilder.conjunction();

src/main/java/edu/harvard/iq/dataverse/DatasetPage.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1855,6 +1855,7 @@ private void updateDatasetFieldInputLevels() {
18551855
if (dsf != null){
18561856
// Yes, call "setInclude"
18571857
dsf.setInclude(oneDSFieldTypeInputLevel.isInclude());
1858+
dsf.getDatasetFieldType().setDisplayOnCreate(oneDSFieldTypeInputLevel.isDisplayOnCreate());
18581859
// remove from hash
18591860
mapDatasetFields.remove(oneDSFieldTypeInputLevel.getDatasetFieldType().getId());
18601861
}

src/main/java/edu/harvard/iq/dataverse/Dataverse.java

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -438,6 +438,12 @@ public boolean isDatasetFieldTypeInInputLevels(Long datasetFieldTypeId) {
438438
.anyMatch(inputLevel -> inputLevel.getDatasetFieldType().getId().equals(datasetFieldTypeId));
439439
}
440440

441+
public boolean isDatasetFieldTypeDisplayOnCreateAsInputLevel(Long datasetFieldTypeId) {
442+
return dataverseFieldTypeInputLevels.stream()
443+
.anyMatch(inputLevel -> inputLevel.getDatasetFieldType().getId().equals(datasetFieldTypeId)
444+
&& inputLevel.isDisplayOnCreate());
445+
}
446+
441447
public Template getDefaultTemplate() {
442448
return defaultTemplate;
443449
}

src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevel.java

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,14 +58,16 @@ public class DataverseFieldTypeInputLevel implements Serializable {
5858
private DatasetFieldType datasetFieldType;
5959
private boolean include;
6060
private boolean required;
61+
private boolean displayOnCreate;
6162

6263
public DataverseFieldTypeInputLevel () {}
6364

64-
public DataverseFieldTypeInputLevel (DatasetFieldType fieldType, Dataverse dataverse, boolean required, boolean include) {
65+
public DataverseFieldTypeInputLevel (DatasetFieldType fieldType, Dataverse dataverse, boolean required, boolean include, boolean displayOnCreate) {
6566
this.datasetFieldType = fieldType;
6667
this.dataverse = dataverse;
6768
this.required = required;
6869
this.include = include;
70+
this.displayOnCreate = displayOnCreate;
6971
}
7072

7173
public Long getId() {
@@ -115,6 +117,14 @@ public void setRequired(boolean required) {
115117
this.required = required;
116118
}
117119

120+
public boolean isDisplayOnCreate() {
121+
return displayOnCreate;
122+
}
123+
124+
public void setDisplayOnCreate(boolean displayOnCreate) {
125+
this.displayOnCreate = displayOnCreate;
126+
}
127+
118128
@Override
119129
public boolean equals(Object object) {
120130
// TODO: Warning - this method won't work in the case the id fields are not set

src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevelServiceBean.java

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -117,4 +117,13 @@ public void create(DataverseFieldTypeInputLevel dataverseFieldTypeInputLevel) {
117117
em.persist(dataverseFieldTypeInputLevel);
118118
}
119119

120+
public DataverseFieldTypeInputLevel save(DataverseFieldTypeInputLevel inputLevel) {
121+
if (inputLevel.getId() == null) {
122+
em.persist(inputLevel);
123+
return inputLevel;
124+
} else {
125+
return em.merge(inputLevel);
126+
}
127+
}
128+
120129
}

src/main/java/edu/harvard/iq/dataverse/DataversePage.java

Lines changed: 79 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -627,44 +627,17 @@ public String save() {
627627
if (dataverse.isMetadataBlockRoot() && (mdb.isSelected() || mdb.isRequired())) {
628628
selectedBlocks.add(mdb);
629629
for (DatasetFieldType dsft : mdb.getDatasetFieldTypes()) {
630-
// currently we don't allow input levels for setting an optional field as conditionally required
631-
// so we skip looking at parents (which get set automatically with their children)
632-
if (!dsft.isHasChildren() && dsft.isRequiredDV()) {
633-
boolean addRequiredInputLevels = false;
634-
boolean parentAlreadyAdded = false;
630+
if (!dsft.isChild()) {
631+
// Save input level for parent field
632+
saveInputLevels(listDFTIL, dsft, dataverse);
635633

636-
if (!dsft.isHasParent() && dsft.isInclude()) {
637-
addRequiredInputLevels = !dsft.isRequired();
638-
}
639-
if (dsft.isHasParent() && dsft.getParentDatasetFieldType().isInclude()) {
640-
addRequiredInputLevels = !dsft.isRequired() || !dsft.getParentDatasetFieldType().isRequired();
641-
}
642-
643-
if (addRequiredInputLevels) {
644-
listDFTIL.add(new DataverseFieldTypeInputLevel(dsft, dataverse,true, true));
645-
646-
//also add the parent as required (if it hasn't been added already)
647-
// todo: review needed .equals() methods, then change this to use a Set, in order to simplify code
648-
if (dsft.isHasParent()) {
649-
DataverseFieldTypeInputLevel parentToAdd = new DataverseFieldTypeInputLevel(dsft.getParentDatasetFieldType(), dataverse, true, true);
650-
for (DataverseFieldTypeInputLevel dataverseFieldTypeInputLevel : listDFTIL) {
651-
if (dataverseFieldTypeInputLevel.getDatasetFieldType().getId() == parentToAdd.getDatasetFieldType().getId()) {
652-
parentAlreadyAdded = true;
653-
break;
654-
}
655-
}
656-
if (!parentAlreadyAdded) {
657-
// Only add the parent once. There's a UNIQUE (dataverse_id, datasetfieldtype_id)
658-
// constraint on the dataversefieldtypeinputlevel table we need to avoid.
659-
listDFTIL.add(parentToAdd);
660-
}
661-
}
634+
// Handle child fields
635+
if (dsft.isHasChildren()) {
636+
for (DatasetFieldType child : dsft.getChildDatasetFieldTypes()) {
637+
saveInputLevels(listDFTIL, child, dataverse);
638+
}
662639
}
663640
}
664-
if ((!dsft.isHasParent() && !dsft.isInclude())
665-
|| (dsft.isHasParent() && !dsft.getParentDatasetFieldType().isInclude())) {
666-
listDFTIL.add(new DataverseFieldTypeInputLevel(dsft, dataverse,false, false));
667-
}
668641
}
669642
}
670643
}
@@ -1030,27 +1003,11 @@ private void refreshAllMetadataBlocks() {
10301003

10311004
for (DatasetFieldType dsft : mdb.getDatasetFieldTypes()) {
10321005
if (!dsft.isChild()) {
1033-
DataverseFieldTypeInputLevel dsfIl = dataverseFieldTypeInputLevelService.findByDataverseIdDatasetFieldTypeId(dataverseIdForInputLevel, dsft.getId());
1034-
if (dsfIl != null) {
1035-
dsft.setRequiredDV(dsfIl.isRequired());
1036-
dsft.setInclude(dsfIl.isInclude());
1037-
} else {
1038-
dsft.setRequiredDV(dsft.isRequired());
1039-
dsft.setInclude(true);
1040-
}
1006+
loadInputLevels(dsft, dataverseIdForInputLevel);
10411007
dsft.setOptionSelectItems(resetSelectItems(dsft));
10421008
if (dsft.isHasChildren()) {
10431009
for (DatasetFieldType child : dsft.getChildDatasetFieldTypes()) {
1044-
DataverseFieldTypeInputLevel dsfIlChild = dataverseFieldTypeInputLevelService.findByDataverseIdDatasetFieldTypeId(dataverseIdForInputLevel, child.getId());
1045-
if (dsfIlChild != null) {
1046-
child.setRequiredDV(dsfIlChild.isRequired());
1047-
child.setInclude(dsfIlChild.isInclude());
1048-
} else {
1049-
// in the case of conditionally required (child = true, parent = false)
1050-
// we set this to false; i.e this is the default "don't override" value
1051-
child.setRequiredDV(child.isRequired() && dsft.isRequired());
1052-
child.setInclude(true);
1053-
}
1010+
loadInputLevels(child, dataverseIdForInputLevel);
10541011
child.setOptionSelectItems(resetSelectItems(child));
10551012
}
10561013
}
@@ -1061,6 +1018,22 @@ private void refreshAllMetadataBlocks() {
10611018
setAllMetadataBlocks(retList);
10621019
}
10631020

1021+
private void loadInputLevels(DatasetFieldType dsft, Long dataverseIdForInputLevel) {
1022+
DataverseFieldTypeInputLevel dsfIl = dataverseFieldTypeInputLevelService
1023+
.findByDataverseIdDatasetFieldTypeId(dataverseIdForInputLevel, dsft.getId());
1024+
1025+
if (dsfIl != null) {
1026+
dsft.setRequiredDV(dsfIl.isRequired());
1027+
dsft.setInclude(dsfIl.isInclude());
1028+
dsft.setDisplayOnCreate(dsfIl.isDisplayOnCreate());
1029+
} else {
1030+
// If there is no input level, use the default values
1031+
dsft.setRequiredDV(dsft.isRequired());
1032+
dsft.setInclude(true);
1033+
dsft.setDisplayOnCreate(false);
1034+
}
1035+
}
1036+
10641037
public void validateAlias(FacesContext context, UIComponent toValidate, Object value) {
10651038
if (!StringUtils.isEmpty((String) value)) {
10661039
String alias = (String) value;
@@ -1337,4 +1310,57 @@ public Set<Entry<String, String>> getPidProviderOptions() {
13371310
}
13381311
return options;
13391312
}
1313+
1314+
public void updateDisplayOnCreate(Long mdbId, Long dsftId, boolean currentValue) {
1315+
for (MetadataBlock mdb : allMetadataBlocks) {
1316+
if (mdb.getId().equals(mdbId)) {
1317+
for (DatasetFieldType dsft : mdb.getDatasetFieldTypes()) {
1318+
if (dsft.getId().equals(dsftId)) {
1319+
// Update value in memory
1320+
dsft.setDisplayOnCreate(!currentValue);
1321+
1322+
// Update or create input level
1323+
DataverseFieldTypeInputLevel existingLevel = dataverseFieldTypeInputLevelService
1324+
.findByDataverseIdDatasetFieldTypeId(dataverse.getId(), dsftId);
1325+
1326+
if (existingLevel != null) {
1327+
existingLevel.setDisplayOnCreate(!currentValue);
1328+
dataverseFieldTypeInputLevelService.save(existingLevel);
1329+
} else {
1330+
DataverseFieldTypeInputLevel newLevel = new DataverseFieldTypeInputLevel(
1331+
dsft,
1332+
dataverse,
1333+
dsft.isRequiredDV(),
1334+
true, // default include
1335+
!currentValue // new value of displayOnCreate
1336+
);
1337+
dataverseFieldTypeInputLevelService.save(newLevel);
1338+
}
1339+
}
1340+
}
1341+
}
1342+
}
1343+
}
1344+
1345+
private void saveInputLevels(List<DataverseFieldTypeInputLevel> listDFTIL, DatasetFieldType dsft, Dataverse dataverse) {
1346+
// If the field already has an input level, update it
1347+
DataverseFieldTypeInputLevel existingLevel = dataverseFieldTypeInputLevelService
1348+
.findByDataverseIdDatasetFieldTypeId(dataverse.getId(), dsft.getId());
1349+
1350+
if (existingLevel != null) {
1351+
existingLevel.setDisplayOnCreate(dsft.isDisplayOnCreate());
1352+
existingLevel.setInclude(dsft.isInclude());
1353+
existingLevel.setRequired(dsft.isRequiredDV());
1354+
listDFTIL.add(existingLevel);
1355+
} else if (dsft.isInclude() || dsft.isDisplayOnCreate() || dsft.isRequiredDV()) {
1356+
// Only create new input level if there is any specific configuration
1357+
listDFTIL.add(new DataverseFieldTypeInputLevel(
1358+
dsft,
1359+
dataverse,
1360+
dsft.isRequiredDV(),
1361+
dsft.isInclude(),
1362+
dsft.isDisplayOnCreate()
1363+
));
1364+
}
1365+
}
13401366
}

src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -959,9 +959,11 @@ public String getCollectionDatasetSchema(String dataverseAlias, Map<String, Map
959959
if (dsfIl != null) {
960960
dsft.setRequiredDV(dsfIl.isRequired());
961961
dsft.setInclude(dsfIl.isInclude());
962+
dsft.setDisplayOnCreate(dsfIl.isDisplayOnCreate());
962963
} else {
963964
dsft.setRequiredDV(dsft.isRequired());
964965
dsft.setInclude(true);
966+
dsft.setDisplayOnCreate(false);
965967
}
966968
List<String> childrenRequired = new ArrayList<>();
967969
List<String> childrenAllowed = new ArrayList<>();
@@ -971,11 +973,13 @@ public String getCollectionDatasetSchema(String dataverseAlias, Map<String, Map
971973
if (dsfIlChild != null) {
972974
child.setRequiredDV(dsfIlChild.isRequired());
973975
child.setInclude(dsfIlChild.isInclude());
976+
child.setDisplayOnCreate(dsfIlChild.isDisplayOnCreate());
974977
} else {
975978
// in the case of conditionally required (child = true, parent = false)
976979
// we set this to false; i.e this is the default "don't override" value
977980
child.setRequiredDV(child.isRequired() && dsft.isRequired());
978981
child.setInclude(true);
982+
child.setDisplayOnCreate(false);
979983
}
980984
if (child.isRequired()) {
981985
childrenRequired.add(child.getName());

0 commit comments

Comments
 (0)