Skip to content

Commit

Permalink
Merge remote-tracking branch 'IQSS/develop' into IQSS/11126-Payara6.2…
Browse files Browse the repository at this point in the history
…024.12
  • Loading branch information
qqmyers committed Mar 5, 2025
2 parents feffb27 + ab8110f commit 5ffe880
Show file tree
Hide file tree
Showing 25 changed files with 239 additions and 346 deletions.
6 changes: 0 additions & 6 deletions doc/release-notes/10476-display-on-create-field-option.md

This file was deleted.

3 changes: 3 additions & 0 deletions doc/release-notes/11242-fix-oricid-recognition.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
A bug that caused ORCIDs start with https://orcid.org/ entered as author identifier to be ignored when creating the DataCite metadata has been fixed. This primarily affected users of the ORCID external vocabulary script as the manual entry form recommends not using the URL form.

The display of authorIdentifier, when not using any external vocabulary scripts, has been improved so that either the plain identifier (e.g. "0000-0002-1825-0097") or its URL form (e.g. "https://orcid.org/0000-0002-1825-0097") will result in valid links in the display (for identifier types that have a URL form). The URL form is now recommended when doing manual entry.
7 changes: 2 additions & 5 deletions doc/sphinx-guides/source/api/native-api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -1116,14 +1116,12 @@ This endpoint expects a JSON with the following format::
{
"datasetFieldTypeName": "datasetFieldTypeName1",
"required": true,
"include": true,
"displayOnCreate": false
"include": true
},
{
"datasetFieldTypeName": "datasetFieldTypeName2",
"required": true,
"include": true,
"displayOnCreate": true
"include": true
}
]

Expand All @@ -1132,7 +1130,6 @@ Parameters:
- ``datasetFieldTypeName``: Name of the metadata field
- ``required``: Whether the field is required (boolean)
- ``include``: Whether the field is included (boolean)
- ``displayOnCreate`` (optional): Whether the field is displayed during dataset creation, even when not required (boolean)

.. code-block:: bash
Expand Down
2 changes: 1 addition & 1 deletion doc/sphinx-guides/source/user/dataset-management.rst
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ Adding a New Dataset
#. Click on the "Add Data" button and select "New Dataset" in the dropdown menu. **Note:** If you are on the root Dataverse collection, your My Data page or click the "Add Data" link in the navbar, the dataset you create will be hosted in the root Dataverse collection. You can change this by selecting another Dataverse collection you have proper permissions to create datasets in, from the Host Dataverse collection dropdown in the create dataset form. This option to choose will not be available after you create the dataset.
#. To quickly get started, enter at minimum all the required fields with an asterisk (e.g., the Dataset Title, Author Name, Description Text, Point of Contact Email, and Subject) to get a Data Citation with a DOI.

#. When entering author identifiers, select the type from the dropdown (e.g. "ORCID") and under "Identifier" enter just the unique identifier (e.g. "0000-0002-1825-0097") rather than the full URL (e.g. "https://orcid.org/0000-0002-1825-0097").
#. When entering author identifiers, select the type from the dropdown (e.g. "ORCID") and under "Identifier" enter the full URL (e.g. "https://orcid.org/0000-0002-1825-0097") for identifiers that have a URL form. The shorter form of the unique identifier (e.g. "0000-0002-1825-0097") can also be entered, but URL form is preferred when available.

#. Scroll down to the "Files" section and click on "Select Files to Add" to add all the relevant files to your Dataset.
You can also upload your files directly from your Dropbox. **Tip:** You can drag and drop or select multiple files at a time from your desktop
Expand Down
9 changes: 7 additions & 2 deletions src/main/java/edu/harvard/iq/dataverse/DatasetAuthor.java
Original file line number Diff line number Diff line change
Expand Up @@ -97,8 +97,13 @@ public static String getIdentifierAsUrl(String idType, String idValue) {
if (idType != null && !idType.isEmpty() && idValue != null && !idValue.isEmpty()) {
try {
ExternalIdentifier externalIdentifier = ExternalIdentifier.valueOf(idType);
if (externalIdentifier.isValidIdentifier(idValue))
return externalIdentifier.format(idValue);
if (externalIdentifier.isValidIdentifier(idValue)) {
String uri = externalIdentifier.format(idValue);
//The DAI identifier is a URI starting with "info" - we don't want to return it as a URL (we assume non-null URLs should be links in the display)
if(uri.startsWith("http")) {
return uri;
}
}
} catch (Exception e) {
// non registered identifier
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -941,12 +941,6 @@ private Predicate buildFieldPresentInDataversePredicate(Dataverse dataverse, boo
criteriaBuilder.isTrue(datasetFieldTypeInputLevelJoin.get("required"))
);

// Predicate for displayOnCreate in input level
Predicate displayOnCreateInputLevelPredicate = criteriaBuilder.and(
criteriaBuilder.equal(datasetFieldTypeRoot, datasetFieldTypeInputLevelJoin.get("datasetFieldType")),
criteriaBuilder.isTrue(datasetFieldTypeInputLevelJoin.get("displayOnCreate"))
);

// Create a subquery to check for the absence of a specific DataverseFieldTypeInputLevel.
Subquery<Long> subquery = criteriaQuery.subquery(Long.class);
Root<DataverseFieldTypeInputLevel> subqueryRoot = subquery.from(DataverseFieldTypeInputLevel.class);
Expand All @@ -969,19 +963,10 @@ private Predicate buildFieldPresentInDataversePredicate(Dataverse dataverse, boo
// Otherwise, use an always-true predicate (conjunction).
Predicate displayedOnCreatePredicate = onlyDisplayedOnCreate
? criteriaBuilder.or(
// 1. Field marked as displayOnCreate in input level
displayOnCreateInputLevelPredicate,

// 2. Field without input level that is marked as displayOnCreate or required
criteriaBuilder.and(
hasNoInputLevelPredicate,
criteriaBuilder.or(
criteriaBuilder.or(
criteriaBuilder.isTrue(datasetFieldTypeRoot.get("displayOnCreate")),
fieldRequiredInTheInstallation
)
),

// 3. Field required by input level
requiredAsInputLevelPredicate
)
: criteriaBuilder.conjunction();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -241,10 +241,6 @@ private boolean isValidDate(String dateString, String pattern) {
return valid;
}

public boolean isValidAuthorIdentifier(String userInput, Pattern pattern) {
return pattern.matcher(userInput).matches();
}

// Validate child fields against each other and return failure message or Optional.empty() if success
public Optional<String> validateChildConstraints(DatasetField dsf) {
final String fieldName = dsf.getDatasetFieldType().getName() != null ? dsf.getDatasetFieldType().getName() : "";
Expand Down
1 change: 0 additions & 1 deletion src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
Original file line number Diff line number Diff line change
Expand Up @@ -1855,7 +1855,6 @@ private void updateDatasetFieldInputLevels() {
if (dsf != null){
// Yes, call "setInclude"
dsf.setInclude(oneDSFieldTypeInputLevel.isInclude());
dsf.getDatasetFieldType().setDisplayOnCreate(oneDSFieldTypeInputLevel.isDisplayOnCreate());
// remove from hash
mapDatasetFields.remove(oneDSFieldTypeInputLevel.getDatasetFieldType().getId());
}
Expand Down
6 changes: 0 additions & 6 deletions src/main/java/edu/harvard/iq/dataverse/Dataverse.java
Original file line number Diff line number Diff line change
Expand Up @@ -438,12 +438,6 @@ public boolean isDatasetFieldTypeInInputLevels(Long datasetFieldTypeId) {
.anyMatch(inputLevel -> inputLevel.getDatasetFieldType().getId().equals(datasetFieldTypeId));
}

public boolean isDatasetFieldTypeDisplayOnCreateAsInputLevel(Long datasetFieldTypeId) {
return dataverseFieldTypeInputLevels.stream()
.anyMatch(inputLevel -> inputLevel.getDatasetFieldType().getId().equals(datasetFieldTypeId)
&& inputLevel.isDisplayOnCreate());
}

public Template getDefaultTemplate() {
return defaultTemplate;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,16 +58,14 @@ public class DataverseFieldTypeInputLevel implements Serializable {
private DatasetFieldType datasetFieldType;
private boolean include;
private boolean required;
private boolean displayOnCreate;

public DataverseFieldTypeInputLevel () {}

public DataverseFieldTypeInputLevel (DatasetFieldType fieldType, Dataverse dataverse, boolean required, boolean include, boolean displayOnCreate) {
public DataverseFieldTypeInputLevel (DatasetFieldType fieldType, Dataverse dataverse, boolean required, boolean include) {
this.datasetFieldType = fieldType;
this.dataverse = dataverse;
this.required = required;
this.include = include;
this.displayOnCreate = displayOnCreate;
}

public Long getId() {
Expand Down Expand Up @@ -117,14 +115,6 @@ public void setRequired(boolean required) {
this.required = required;
}

public boolean isDisplayOnCreate() {
return displayOnCreate;
}

public void setDisplayOnCreate(boolean displayOnCreate) {
this.displayOnCreate = displayOnCreate;
}

@Override
public boolean equals(Object object) {
// TODO: Warning - this method won't work in the case the id fields are not set
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,13 +117,4 @@ public void create(DataverseFieldTypeInputLevel dataverseFieldTypeInputLevel) {
em.persist(dataverseFieldTypeInputLevel);
}

public DataverseFieldTypeInputLevel save(DataverseFieldTypeInputLevel inputLevel) {
if (inputLevel.getId() == null) {
em.persist(inputLevel);
return inputLevel;
} else {
return em.merge(inputLevel);
}
}

}
132 changes: 53 additions & 79 deletions src/main/java/edu/harvard/iq/dataverse/DataversePage.java
Original file line number Diff line number Diff line change
Expand Up @@ -627,17 +627,44 @@ public String save() {
if (dataverse.isMetadataBlockRoot() && (mdb.isSelected() || mdb.isRequired())) {
selectedBlocks.add(mdb);
for (DatasetFieldType dsft : mdb.getDatasetFieldTypes()) {
if (!dsft.isChild()) {
// Save input level for parent field
saveInputLevels(listDFTIL, dsft, dataverse);
// currently we don't allow input levels for setting an optional field as conditionally required
// so we skip looking at parents (which get set automatically with their children)
if (!dsft.isHasChildren() && dsft.isRequiredDV()) {
boolean addRequiredInputLevels = false;
boolean parentAlreadyAdded = false;

// Handle child fields
if (dsft.isHasChildren()) {
for (DatasetFieldType child : dsft.getChildDatasetFieldTypes()) {
saveInputLevels(listDFTIL, child, dataverse);
}
if (!dsft.isHasParent() && dsft.isInclude()) {
addRequiredInputLevels = !dsft.isRequired();
}
if (dsft.isHasParent() && dsft.getParentDatasetFieldType().isInclude()) {
addRequiredInputLevels = !dsft.isRequired() || !dsft.getParentDatasetFieldType().isRequired();
}

if (addRequiredInputLevels) {
listDFTIL.add(new DataverseFieldTypeInputLevel(dsft, dataverse,true, true));

//also add the parent as required (if it hasn't been added already)
// todo: review needed .equals() methods, then change this to use a Set, in order to simplify code
if (dsft.isHasParent()) {
DataverseFieldTypeInputLevel parentToAdd = new DataverseFieldTypeInputLevel(dsft.getParentDatasetFieldType(), dataverse, true, true);
for (DataverseFieldTypeInputLevel dataverseFieldTypeInputLevel : listDFTIL) {
if (dataverseFieldTypeInputLevel.getDatasetFieldType().getId() == parentToAdd.getDatasetFieldType().getId()) {
parentAlreadyAdded = true;
break;
}
}
if (!parentAlreadyAdded) {
// Only add the parent once. There's a UNIQUE (dataverse_id, datasetfieldtype_id)
// constraint on the dataversefieldtypeinputlevel table we need to avoid.
listDFTIL.add(parentToAdd);
}
}
}
}
if ((!dsft.isHasParent() && !dsft.isInclude())
|| (dsft.isHasParent() && !dsft.getParentDatasetFieldType().isInclude())) {
listDFTIL.add(new DataverseFieldTypeInputLevel(dsft, dataverse,false, false));
}
}
}
}
Expand Down Expand Up @@ -1003,11 +1030,27 @@ private void refreshAllMetadataBlocks() {

for (DatasetFieldType dsft : mdb.getDatasetFieldTypes()) {
if (!dsft.isChild()) {
loadInputLevels(dsft, dataverseIdForInputLevel);
DataverseFieldTypeInputLevel dsfIl = dataverseFieldTypeInputLevelService.findByDataverseIdDatasetFieldTypeId(dataverseIdForInputLevel, dsft.getId());
if (dsfIl != null) {
dsft.setRequiredDV(dsfIl.isRequired());
dsft.setInclude(dsfIl.isInclude());
} else {
dsft.setRequiredDV(dsft.isRequired());
dsft.setInclude(true);
}
dsft.setOptionSelectItems(resetSelectItems(dsft));
if (dsft.isHasChildren()) {
for (DatasetFieldType child : dsft.getChildDatasetFieldTypes()) {
loadInputLevels(child, dataverseIdForInputLevel);
DataverseFieldTypeInputLevel dsfIlChild = dataverseFieldTypeInputLevelService.findByDataverseIdDatasetFieldTypeId(dataverseIdForInputLevel, child.getId());
if (dsfIlChild != null) {
child.setRequiredDV(dsfIlChild.isRequired());
child.setInclude(dsfIlChild.isInclude());
} else {
// in the case of conditionally required (child = true, parent = false)
// we set this to false; i.e this is the default "don't override" value
child.setRequiredDV(child.isRequired() && dsft.isRequired());
child.setInclude(true);
}
child.setOptionSelectItems(resetSelectItems(child));
}
}
Expand All @@ -1018,22 +1061,6 @@ private void refreshAllMetadataBlocks() {
setAllMetadataBlocks(retList);
}

private void loadInputLevels(DatasetFieldType dsft, Long dataverseIdForInputLevel) {
DataverseFieldTypeInputLevel dsfIl = dataverseFieldTypeInputLevelService
.findByDataverseIdDatasetFieldTypeId(dataverseIdForInputLevel, dsft.getId());

if (dsfIl != null) {
dsft.setRequiredDV(dsfIl.isRequired());
dsft.setInclude(dsfIl.isInclude());
dsft.setDisplayOnCreate(dsfIl.isDisplayOnCreate());
} else {
// If there is no input level, use the default values
dsft.setRequiredDV(dsft.isRequired());
dsft.setInclude(true);
dsft.setDisplayOnCreate(false);
}
}

public void validateAlias(FacesContext context, UIComponent toValidate, Object value) {
if (!StringUtils.isEmpty((String) value)) {
String alias = (String) value;
Expand Down Expand Up @@ -1310,57 +1337,4 @@ public Set<Entry<String, String>> getPidProviderOptions() {
}
return options;
}

public void updateDisplayOnCreate(Long mdbId, Long dsftId, boolean currentValue) {
for (MetadataBlock mdb : allMetadataBlocks) {
if (mdb.getId().equals(mdbId)) {
for (DatasetFieldType dsft : mdb.getDatasetFieldTypes()) {
if (dsft.getId().equals(dsftId)) {
// Update value in memory
dsft.setDisplayOnCreate(!currentValue);

// Update or create input level
DataverseFieldTypeInputLevel existingLevel = dataverseFieldTypeInputLevelService
.findByDataverseIdDatasetFieldTypeId(dataverse.getId(), dsftId);

if (existingLevel != null) {
existingLevel.setDisplayOnCreate(!currentValue);
dataverseFieldTypeInputLevelService.save(existingLevel);
} else {
DataverseFieldTypeInputLevel newLevel = new DataverseFieldTypeInputLevel(
dsft,
dataverse,
dsft.isRequiredDV(),
true, // default include
!currentValue // new value of displayOnCreate
);
dataverseFieldTypeInputLevelService.save(newLevel);
}
}
}
}
}
}

private void saveInputLevels(List<DataverseFieldTypeInputLevel> listDFTIL, DatasetFieldType dsft, Dataverse dataverse) {
// If the field already has an input level, update it
DataverseFieldTypeInputLevel existingLevel = dataverseFieldTypeInputLevelService
.findByDataverseIdDatasetFieldTypeId(dataverse.getId(), dsft.getId());

if (existingLevel != null) {
existingLevel.setDisplayOnCreate(dsft.isDisplayOnCreate());
existingLevel.setInclude(dsft.isInclude());
existingLevel.setRequired(dsft.isRequiredDV());
listDFTIL.add(existingLevel);
} else if (dsft.isInclude() || dsft.isDisplayOnCreate() || dsft.isRequiredDV()) {
// Only create new input level if there is any specific configuration
listDFTIL.add(new DataverseFieldTypeInputLevel(
dsft,
dataverse,
dsft.isRequiredDV(),
dsft.isInclude(),
dsft.isDisplayOnCreate()
));
}
}
}
Loading

0 comments on commit 5ffe880

Please sign in to comment.