Skip to content

Commit

Permalink
Merge pull request IQSS#11306 from IQSS/revert-11224-10476-display-on…
Browse files Browse the repository at this point in the history
…-create-field-option

Revert "10476 display on create field option"
  • Loading branch information
ofahimIQSS authored Mar 5, 2025
2 parents 99019de + 2441ec8 commit ab8110f
Show file tree
Hide file tree
Showing 15 changed files with 74 additions and 237 deletions.
6 changes: 0 additions & 6 deletions doc/release-notes/10476-display-on-create-field-option.md

This file was deleted.

7 changes: 2 additions & 5 deletions doc/sphinx-guides/source/api/native-api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -1116,14 +1116,12 @@ This endpoint expects a JSON with the following format::
{
"datasetFieldTypeName": "datasetFieldTypeName1",
"required": true,
"include": true,
"displayOnCreate": false
"include": true
},
{
"datasetFieldTypeName": "datasetFieldTypeName2",
"required": true,
"include": true,
"displayOnCreate": true
"include": true
}
]

Expand All @@ -1132,7 +1130,6 @@ Parameters:
- ``datasetFieldTypeName``: Name of the metadata field
- ``required``: Whether the field is required (boolean)
- ``include``: Whether the field is included (boolean)
- ``displayOnCreate`` (optional): Whether the field is displayed during dataset creation, even when not required (boolean)

.. code-block:: bash
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -941,12 +941,6 @@ private Predicate buildFieldPresentInDataversePredicate(Dataverse dataverse, boo
criteriaBuilder.isTrue(datasetFieldTypeInputLevelJoin.get("required"))
);

// Predicate for displayOnCreate in input level
Predicate displayOnCreateInputLevelPredicate = criteriaBuilder.and(
criteriaBuilder.equal(datasetFieldTypeRoot, datasetFieldTypeInputLevelJoin.get("datasetFieldType")),
criteriaBuilder.isTrue(datasetFieldTypeInputLevelJoin.get("displayOnCreate"))
);

// Create a subquery to check for the absence of a specific DataverseFieldTypeInputLevel.
Subquery<Long> subquery = criteriaQuery.subquery(Long.class);
Root<DataverseFieldTypeInputLevel> subqueryRoot = subquery.from(DataverseFieldTypeInputLevel.class);
Expand All @@ -969,19 +963,10 @@ private Predicate buildFieldPresentInDataversePredicate(Dataverse dataverse, boo
// Otherwise, use an always-true predicate (conjunction).
Predicate displayedOnCreatePredicate = onlyDisplayedOnCreate
? criteriaBuilder.or(
// 1. Field marked as displayOnCreate in input level
displayOnCreateInputLevelPredicate,

// 2. Field without input level that is marked as displayOnCreate or required
criteriaBuilder.and(
hasNoInputLevelPredicate,
criteriaBuilder.or(
criteriaBuilder.or(
criteriaBuilder.isTrue(datasetFieldTypeRoot.get("displayOnCreate")),
fieldRequiredInTheInstallation
)
),

// 3. Field required by input level
requiredAsInputLevelPredicate
)
: criteriaBuilder.conjunction();
Expand Down
1 change: 0 additions & 1 deletion src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
Original file line number Diff line number Diff line change
Expand Up @@ -1855,7 +1855,6 @@ private void updateDatasetFieldInputLevels() {
if (dsf != null){
// Yes, call "setInclude"
dsf.setInclude(oneDSFieldTypeInputLevel.isInclude());
dsf.getDatasetFieldType().setDisplayOnCreate(oneDSFieldTypeInputLevel.isDisplayOnCreate());
// remove from hash
mapDatasetFields.remove(oneDSFieldTypeInputLevel.getDatasetFieldType().getId());
}
Expand Down
6 changes: 0 additions & 6 deletions src/main/java/edu/harvard/iq/dataverse/Dataverse.java
Original file line number Diff line number Diff line change
Expand Up @@ -438,12 +438,6 @@ public boolean isDatasetFieldTypeInInputLevels(Long datasetFieldTypeId) {
.anyMatch(inputLevel -> inputLevel.getDatasetFieldType().getId().equals(datasetFieldTypeId));
}

public boolean isDatasetFieldTypeDisplayOnCreateAsInputLevel(Long datasetFieldTypeId) {
return dataverseFieldTypeInputLevels.stream()
.anyMatch(inputLevel -> inputLevel.getDatasetFieldType().getId().equals(datasetFieldTypeId)
&& inputLevel.isDisplayOnCreate());
}

public Template getDefaultTemplate() {
return defaultTemplate;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,16 +58,14 @@ public class DataverseFieldTypeInputLevel implements Serializable {
private DatasetFieldType datasetFieldType;
private boolean include;
private boolean required;
private boolean displayOnCreate;

public DataverseFieldTypeInputLevel () {}

public DataverseFieldTypeInputLevel (DatasetFieldType fieldType, Dataverse dataverse, boolean required, boolean include, boolean displayOnCreate) {
public DataverseFieldTypeInputLevel (DatasetFieldType fieldType, Dataverse dataverse, boolean required, boolean include) {
this.datasetFieldType = fieldType;
this.dataverse = dataverse;
this.required = required;
this.include = include;
this.displayOnCreate = displayOnCreate;
}

public Long getId() {
Expand Down Expand Up @@ -117,14 +115,6 @@ public void setRequired(boolean required) {
this.required = required;
}

public boolean isDisplayOnCreate() {
return displayOnCreate;
}

public void setDisplayOnCreate(boolean displayOnCreate) {
this.displayOnCreate = displayOnCreate;
}

@Override
public boolean equals(Object object) {
// TODO: Warning - this method won't work in the case the id fields are not set
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,13 +117,4 @@ public void create(DataverseFieldTypeInputLevel dataverseFieldTypeInputLevel) {
em.persist(dataverseFieldTypeInputLevel);
}

public DataverseFieldTypeInputLevel save(DataverseFieldTypeInputLevel inputLevel) {
if (inputLevel.getId() == null) {
em.persist(inputLevel);
return inputLevel;
} else {
return em.merge(inputLevel);
}
}

}
132 changes: 53 additions & 79 deletions src/main/java/edu/harvard/iq/dataverse/DataversePage.java
Original file line number Diff line number Diff line change
Expand Up @@ -627,17 +627,44 @@ public String save() {
if (dataverse.isMetadataBlockRoot() && (mdb.isSelected() || mdb.isRequired())) {
selectedBlocks.add(mdb);
for (DatasetFieldType dsft : mdb.getDatasetFieldTypes()) {
if (!dsft.isChild()) {
// Save input level for parent field
saveInputLevels(listDFTIL, dsft, dataverse);
// currently we don't allow input levels for setting an optional field as conditionally required
// so we skip looking at parents (which get set automatically with their children)
if (!dsft.isHasChildren() && dsft.isRequiredDV()) {
boolean addRequiredInputLevels = false;
boolean parentAlreadyAdded = false;

// Handle child fields
if (dsft.isHasChildren()) {
for (DatasetFieldType child : dsft.getChildDatasetFieldTypes()) {
saveInputLevels(listDFTIL, child, dataverse);
}
if (!dsft.isHasParent() && dsft.isInclude()) {
addRequiredInputLevels = !dsft.isRequired();
}
if (dsft.isHasParent() && dsft.getParentDatasetFieldType().isInclude()) {
addRequiredInputLevels = !dsft.isRequired() || !dsft.getParentDatasetFieldType().isRequired();
}

if (addRequiredInputLevels) {
listDFTIL.add(new DataverseFieldTypeInputLevel(dsft, dataverse,true, true));

//also add the parent as required (if it hasn't been added already)
// todo: review needed .equals() methods, then change this to use a Set, in order to simplify code
if (dsft.isHasParent()) {
DataverseFieldTypeInputLevel parentToAdd = new DataverseFieldTypeInputLevel(dsft.getParentDatasetFieldType(), dataverse, true, true);
for (DataverseFieldTypeInputLevel dataverseFieldTypeInputLevel : listDFTIL) {
if (dataverseFieldTypeInputLevel.getDatasetFieldType().getId() == parentToAdd.getDatasetFieldType().getId()) {
parentAlreadyAdded = true;
break;
}
}
if (!parentAlreadyAdded) {
// Only add the parent once. There's a UNIQUE (dataverse_id, datasetfieldtype_id)
// constraint on the dataversefieldtypeinputlevel table we need to avoid.
listDFTIL.add(parentToAdd);
}
}
}
}
if ((!dsft.isHasParent() && !dsft.isInclude())
|| (dsft.isHasParent() && !dsft.getParentDatasetFieldType().isInclude())) {
listDFTIL.add(new DataverseFieldTypeInputLevel(dsft, dataverse,false, false));
}
}
}
}
Expand Down Expand Up @@ -1003,11 +1030,27 @@ private void refreshAllMetadataBlocks() {

for (DatasetFieldType dsft : mdb.getDatasetFieldTypes()) {
if (!dsft.isChild()) {
loadInputLevels(dsft, dataverseIdForInputLevel);
DataverseFieldTypeInputLevel dsfIl = dataverseFieldTypeInputLevelService.findByDataverseIdDatasetFieldTypeId(dataverseIdForInputLevel, dsft.getId());
if (dsfIl != null) {
dsft.setRequiredDV(dsfIl.isRequired());
dsft.setInclude(dsfIl.isInclude());
} else {
dsft.setRequiredDV(dsft.isRequired());
dsft.setInclude(true);
}
dsft.setOptionSelectItems(resetSelectItems(dsft));
if (dsft.isHasChildren()) {
for (DatasetFieldType child : dsft.getChildDatasetFieldTypes()) {
loadInputLevels(child, dataverseIdForInputLevel);
DataverseFieldTypeInputLevel dsfIlChild = dataverseFieldTypeInputLevelService.findByDataverseIdDatasetFieldTypeId(dataverseIdForInputLevel, child.getId());
if (dsfIlChild != null) {
child.setRequiredDV(dsfIlChild.isRequired());
child.setInclude(dsfIlChild.isInclude());
} else {
// in the case of conditionally required (child = true, parent = false)
// we set this to false; i.e this is the default "don't override" value
child.setRequiredDV(child.isRequired() && dsft.isRequired());
child.setInclude(true);
}
child.setOptionSelectItems(resetSelectItems(child));
}
}
Expand All @@ -1018,22 +1061,6 @@ private void refreshAllMetadataBlocks() {
setAllMetadataBlocks(retList);
}

private void loadInputLevels(DatasetFieldType dsft, Long dataverseIdForInputLevel) {
DataverseFieldTypeInputLevel dsfIl = dataverseFieldTypeInputLevelService
.findByDataverseIdDatasetFieldTypeId(dataverseIdForInputLevel, dsft.getId());

if (dsfIl != null) {
dsft.setRequiredDV(dsfIl.isRequired());
dsft.setInclude(dsfIl.isInclude());
dsft.setDisplayOnCreate(dsfIl.isDisplayOnCreate());
} else {
// If there is no input level, use the default values
dsft.setRequiredDV(dsft.isRequired());
dsft.setInclude(true);
dsft.setDisplayOnCreate(false);
}
}

public void validateAlias(FacesContext context, UIComponent toValidate, Object value) {
if (!StringUtils.isEmpty((String) value)) {
String alias = (String) value;
Expand Down Expand Up @@ -1310,57 +1337,4 @@ public Set<Entry<String, String>> getPidProviderOptions() {
}
return options;
}

public void updateDisplayOnCreate(Long mdbId, Long dsftId, boolean currentValue) {
for (MetadataBlock mdb : allMetadataBlocks) {
if (mdb.getId().equals(mdbId)) {
for (DatasetFieldType dsft : mdb.getDatasetFieldTypes()) {
if (dsft.getId().equals(dsftId)) {
// Update value in memory
dsft.setDisplayOnCreate(!currentValue);

// Update or create input level
DataverseFieldTypeInputLevel existingLevel = dataverseFieldTypeInputLevelService
.findByDataverseIdDatasetFieldTypeId(dataverse.getId(), dsftId);

if (existingLevel != null) {
existingLevel.setDisplayOnCreate(!currentValue);
dataverseFieldTypeInputLevelService.save(existingLevel);
} else {
DataverseFieldTypeInputLevel newLevel = new DataverseFieldTypeInputLevel(
dsft,
dataverse,
dsft.isRequiredDV(),
true, // default include
!currentValue // new value of displayOnCreate
);
dataverseFieldTypeInputLevelService.save(newLevel);
}
}
}
}
}
}

private void saveInputLevels(List<DataverseFieldTypeInputLevel> listDFTIL, DatasetFieldType dsft, Dataverse dataverse) {
// If the field already has an input level, update it
DataverseFieldTypeInputLevel existingLevel = dataverseFieldTypeInputLevelService
.findByDataverseIdDatasetFieldTypeId(dataverse.getId(), dsft.getId());

if (existingLevel != null) {
existingLevel.setDisplayOnCreate(dsft.isDisplayOnCreate());
existingLevel.setInclude(dsft.isInclude());
existingLevel.setRequired(dsft.isRequiredDV());
listDFTIL.add(existingLevel);
} else if (dsft.isInclude() || dsft.isDisplayOnCreate() || dsft.isRequiredDV()) {
// Only create new input level if there is any specific configuration
listDFTIL.add(new DataverseFieldTypeInputLevel(
dsft,
dataverse,
dsft.isRequiredDV(),
dsft.isInclude(),
dsft.isDisplayOnCreate()
));
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -959,11 +959,9 @@ public String getCollectionDatasetSchema(String dataverseAlias, Map<String, Map
if (dsfIl != null) {
dsft.setRequiredDV(dsfIl.isRequired());
dsft.setInclude(dsfIl.isInclude());
dsft.setDisplayOnCreate(dsfIl.isDisplayOnCreate());
} else {
dsft.setRequiredDV(dsft.isRequired());
dsft.setInclude(true);
dsft.setDisplayOnCreate(false);
}
List<String> childrenRequired = new ArrayList<>();
List<String> childrenAllowed = new ArrayList<>();
Expand All @@ -973,13 +971,11 @@ public String getCollectionDatasetSchema(String dataverseAlias, Map<String, Map
if (dsfIlChild != null) {
child.setRequiredDV(dsfIlChild.isRequired());
child.setInclude(dsfIlChild.isInclude());
child.setDisplayOnCreate(dsfIlChild.isDisplayOnCreate());
} else {
// in the case of conditionally required (child = true, parent = false)
// we set this to false; i.e this is the default "don't override" value
child.setRequiredDV(child.isRequired() && dsft.isRequired());
child.setInclude(true);
child.setDisplayOnCreate(false);
}
if (child.isRequired()) {
childrenRequired.add(child.getName());
Expand Down
Loading

0 comments on commit ab8110f

Please sign in to comment.