From e34177cafbd5c6fec77cc1a1c2cd301a82f39366 Mon Sep 17 00:00:00 2001 From: Philipp Conzett Date: Sun, 25 May 2025 10:01:11 +0200 Subject: [PATCH 001/634] Add files via upload --- scripts/api/data/licenses/licenseEUPL-1.2.json | 11 +++++++++++ scripts/api/data/licenses/licenseODC-By-1.0.json | 11 +++++++++++ scripts/api/data/licenses/licenseODbL-1.0.json | 11 +++++++++++ scripts/api/data/licenses/licensePDDL-1.0.json | 11 +++++++++++ 4 files changed, 44 insertions(+) create mode 100644 scripts/api/data/licenses/licenseEUPL-1.2.json create mode 100644 scripts/api/data/licenses/licenseODC-By-1.0.json create mode 100644 scripts/api/data/licenses/licenseODbL-1.0.json create mode 100644 scripts/api/data/licenses/licensePDDL-1.0.json diff --git a/scripts/api/data/licenses/licenseEUPL-1.2.json b/scripts/api/data/licenses/licenseEUPL-1.2.json new file mode 100644 index 00000000000..54b62552d1c --- /dev/null +++ b/scripts/api/data/licenses/licenseEUPL-1.2.json @@ -0,0 +1,11 @@ +{ + "name": "EUPL-1.2", + "uri": "https://joinup.ec.europa.eu/page/eupl-text-11-12", + "shortDescription": "European Union Public License 1.2", + "active": true, + "sortOrder": 13, + "rightsIdentifier": "EUPL-1.2", + "rightsIdentifierScheme": "SPDX", + "schemeUri": "https://spdx.org/licenses/", + "languageCode": "en" +} \ No newline at end of file diff --git a/scripts/api/data/licenses/licenseODC-By-1.0.json b/scripts/api/data/licenses/licenseODC-By-1.0.json new file mode 100644 index 00000000000..fdeab439d86 --- /dev/null +++ b/scripts/api/data/licenses/licenseODC-By-1.0.json @@ -0,0 +1,11 @@ +{ + "name": "ODC-By-1.0", + "uri": "https://opendatacommons.org/licenses/by/1.0/", + "shortDescription": "Open Data Commons Attribution License v1.0", + "active": true, + "sortOrder": 12, + "rightsIdentifier": "ODC-By-1.0", + "rightsIdentifierScheme": "SPDX", + "schemeUri": "https://spdx.org/licenses/", + "languageCode": "en" +} \ No newline at end of file diff --git a/scripts/api/data/licenses/licenseODbL-1.0.json b/scripts/api/data/licenses/licenseODbL-1.0.json new file mode 100644 index 00000000000..bf13ad404b4 --- /dev/null +++ b/scripts/api/data/licenses/licenseODbL-1.0.json @@ -0,0 +1,11 @@ +{ + "name": "ODbL-1.0", + "uri": "http://www.opendatacommons.org/licenses/odbl/1.0/", + "shortDescription": "Open Data Commons Open Database License v1.0", + "active": true, + "sortOrder": 11, + "rightsIdentifier": "ODbL-1.0", + "rightsIdentifierScheme": "SPDX", + "schemeUri": "https://spdx.org/licenses/", + "languageCode": "en" +} \ No newline at end of file diff --git a/scripts/api/data/licenses/licensePDDL-1.0.json b/scripts/api/data/licenses/licensePDDL-1.0.json new file mode 100644 index 00000000000..ae01f3d2109 --- /dev/null +++ b/scripts/api/data/licenses/licensePDDL-1.0.json @@ -0,0 +1,11 @@ +{ + "name": "PDDL-1.0", + "uri": "http://opendatacommons.org/licenses/pddl/1.0/", + "shortDescription": "Open Data Commons Public Domain Dedication & License 1.0", + "active": true, + "sortOrder": 12, + "rightsIdentifier": "PDDL-1.0", + "rightsIdentifierScheme": "SPDX", + "schemeUri": "https://spdx.org/licenses/", + "languageCode": "en" +} \ No newline at end of file From f74b832ed160481a90f63344a73f607becae9e45 Mon Sep 17 00:00:00 2001 From: Philipp Conzett Date: Sun, 25 May 2025 11:07:51 +0200 Subject: [PATCH 002/634] Update config.rst --- doc/sphinx-guides/source/installation/config.rst | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index d5fa101b095..d06ee9b242e 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -2038,13 +2038,23 @@ JSON files for `Creative Commons licenses `_ are provided below. + +- :download:`licenseODbL-1.0.json <../../../../scripts/api/data/licenses/licenseODbL-1.0.json>` +- :download:`licenseODC-By-1.0.json <../../../../scripts/api/data/licenses/licenseODC-By-1.0.json>` +- :download:`licensePDDL-1.0.json <../../../../scripts/api/data/licenses/licensePDDL-1.0.json>` + Adding Software Licenses ^^^^^^^^^^^^^^^^^^^^^^^^ JSON files for software licenses are provided below. -- :download:`licenseMIT.json <../../../../scripts/api/data/licenses/licenseMIT.json>` - :download:`licenseApache-2.0.json <../../../../scripts/api/data/licenses/licenseApache-2.0.json>` +- :download:`licenseMIT.json <../../../../scripts/api/data/licenses/licenseMIT.json>` +- :download:`licenseEUPL-1.2.json <../../../../scripts/api/data/licenses/licenseEUPL-1.2.json>` Adding Country-Specific Licenses ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ From f2747649efa5ce2406e2f985eddfdda3f500375b Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 28 May 2025 10:09:54 -0400 Subject: [PATCH 003/634] make sort order unique #11522 --- scripts/api/data/licenses/licenseEUPL-1.2.json | 4 ++-- scripts/api/data/licenses/licensePDDL-1.0.json | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/api/data/licenses/licenseEUPL-1.2.json b/scripts/api/data/licenses/licenseEUPL-1.2.json index 54b62552d1c..cb3afad2fea 100644 --- a/scripts/api/data/licenses/licenseEUPL-1.2.json +++ b/scripts/api/data/licenses/licenseEUPL-1.2.json @@ -3,9 +3,9 @@ "uri": "https://joinup.ec.europa.eu/page/eupl-text-11-12", "shortDescription": "European Union Public License 1.2", "active": true, - "sortOrder": 13, + "sortOrder": 14, "rightsIdentifier": "EUPL-1.2", "rightsIdentifierScheme": "SPDX", "schemeUri": "https://spdx.org/licenses/", "languageCode": "en" -} \ No newline at end of file +} diff --git a/scripts/api/data/licenses/licensePDDL-1.0.json b/scripts/api/data/licenses/licensePDDL-1.0.json index ae01f3d2109..986b97e5b01 100644 --- a/scripts/api/data/licenses/licensePDDL-1.0.json +++ b/scripts/api/data/licenses/licensePDDL-1.0.json @@ -3,9 +3,9 @@ "uri": "http://opendatacommons.org/licenses/pddl/1.0/", "shortDescription": "Open Data Commons Public Domain Dedication & License 1.0", "active": true, - "sortOrder": 12, + "sortOrder": 13, "rightsIdentifier": "PDDL-1.0", "rightsIdentifierScheme": "SPDX", "schemeUri": "https://spdx.org/licenses/", "languageCode": "en" -} \ No newline at end of file +} From b6dbf922fb4bc50742240fc9ab9443f529110443 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 28 May 2025 10:16:54 -0400 Subject: [PATCH 004/634] add release note snippet --- doc/release-notes/11522-licenses-added.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 doc/release-notes/11522-licenses-added.md diff --git a/doc/release-notes/11522-licenses-added.md b/doc/release-notes/11522-licenses-added.md new file mode 100644 index 00000000000..0639b86a931 --- /dev/null +++ b/doc/release-notes/11522-licenses-added.md @@ -0,0 +1,13 @@ +### Additional licenses + +The following Open Data Commons licenses have been added: + +- Open Database License (ODbL) +- Open Data Commons Attribution License (ODC-By) +- Open Data Commons Public Domain Dedication and License (PDDL)) + +The following software license has been added: + +- European Union Public License (EUPL) + +See [the guides](https://guides.dataverse.org/en/6.7/installation/config.html#configuring-licenses) and #11522. From 7f6aaad27c02af31e13bfec7d515b8a869ccd0da Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 28 May 2025 10:36:18 -0400 Subject: [PATCH 005/634] add OGL-UK-3.0 license #9403 #11521 --- doc/release-notes/11522-licenses-added.md | 4 ++++ doc/sphinx-guides/source/installation/config.rst | 1 + scripts/api/data/licenses/licenseOGL-UK-3.0.json | 11 +++++++++++ 3 files changed, 16 insertions(+) create mode 100644 scripts/api/data/licenses/licenseOGL-UK-3.0.json diff --git a/doc/release-notes/11522-licenses-added.md b/doc/release-notes/11522-licenses-added.md index 0639b86a931..1a02916f769 100644 --- a/doc/release-notes/11522-licenses-added.md +++ b/doc/release-notes/11522-licenses-added.md @@ -10,4 +10,8 @@ The following software license has been added: - European Union Public License (EUPL) +The following country-specific license has been added: + +- Open Government Licence (OGL UK) + See [the guides](https://guides.dataverse.org/en/6.7/installation/config.html#configuring-licenses) and #11522. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index d06ee9b242e..0a09524abe3 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -2060,6 +2060,7 @@ Adding Country-Specific Licenses ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - :download:`licenseEtalab-2.0.json <../../../../scripts/api/data/licenses/licenseEtalab-2.0.json>` used in France (Etalab Open License 2.0, CC-BY 2.0 compliant). +- :download:`licenseOGL-UK-3.0.json <../../../../scripts/api/data/licenses/licenseOGL-UK-3.0.json>` Contributing to the Collection of Standard Licenses Above ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/scripts/api/data/licenses/licenseOGL-UK-3.0.json b/scripts/api/data/licenses/licenseOGL-UK-3.0.json new file mode 100644 index 00000000000..7363645e075 --- /dev/null +++ b/scripts/api/data/licenses/licenseOGL-UK-3.0.json @@ -0,0 +1,11 @@ +{ + "name": "OGL UK 3.0", + "uri": "https://www.nationalarchives.gov.uk/doc/open-government-licence/version/3", + "shortDescription": "Open Government Licence v3.0.", + "active": true, + "sortOrder": 15, + "rightsIdentifier": "OGL-UK-3.0", + "rightsIdentifierScheme": "SPDX", + "schemeUri": "https://spdx.org/licenses/", + "languageCode": "en" +} From 7ac49381c58d134a23fb128065def0a05284071a Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 28 May 2025 10:38:43 -0400 Subject: [PATCH 006/634] use suggested text in release note snippet #11521 --- doc/release-notes/11522-licenses-added.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/11522-licenses-added.md b/doc/release-notes/11522-licenses-added.md index 1a02916f769..6afbb0fcb2c 100644 --- a/doc/release-notes/11522-licenses-added.md +++ b/doc/release-notes/11522-licenses-added.md @@ -14,4 +14,4 @@ The following country-specific license has been added: - Open Government Licence (OGL UK) -See [the guides](https://guides.dataverse.org/en/6.7/installation/config.html#configuring-licenses) and #11522. +The licenses above are widely recognized and used in Europe and beyond to promote data and software sharing. See [the guides](https://guides.dataverse.org/en/6.7/installation/config.html#configuring-licenses) and #11522. From 9196846820df730722810a352f8d79217600e24f Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 28 May 2025 10:51:18 -0400 Subject: [PATCH 007/634] clarify how to add licenses #11521 #10426 --- doc/sphinx-guides/source/installation/config.rst | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 0a09524abe3..97f83fa4a43 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -2072,10 +2072,14 @@ If you do not find the license JSON you need above, you are encouraged to contri - Copy an existing license as a starting point. - Name your file using the SPDX identifier. For example, if the identifier is ``Apache-2.0``, you should name your file ``licenseApache-2.0.json``. - For the ``name`` field, use the "short identifier" from the SPDX landing page (e.g. ``Apache-2.0``). -- For the ``description`` field, use the "full name" from the SPDX landing page (e.g. ``Apache License 2.0``). +- For the ``shortDescription`` field, use the "full name" from the SPDX landing page (e.g. ``Apache License 2.0``) followed by a period (full-stop) (e.g. ``Apache License 2.0.``). - For the ``uri`` field, we encourage you to use the same resource that DataCite uses, which is often the same as the first "Other web pages for this license" on the SPDX page for the license. When these differ, or there are other concerns about the URI DataCite uses, please reach out to the community to see if a consensus can be reached. - For the ``active`` field, put ``true``. - For the ``sortOrder`` field, put the next sequential number after checking previous files with ``grep sortOrder scripts/api/data/licenses/*``. +- For the ``rightsIdentifier`` field, use the identifier from SPDX (e.g. ``Apache-2.0``). +- For the ``rightsIdentifierScheme`` field, use "SPDX". +- For the ``schemeUri`` field, use "https://spdx.org/licenses/". +- For the ``languageCode`` field, use "en". Note that prior to Dataverse 6.2, various license above have been added that do not adhere perfectly with this procedure. For example, the ``name`` for the CC0 license is ``CC0 1.0`` (no dash) rather than ``CC0-1.0`` (with a dash). We are keeping the existing names for backward compatibility. For more on standarizing license configuration, see https://github.com/IQSS/dataverse/issues/8512 From 253a0342528ba7103d1aff1dfb23ce756e91bb20 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 8 Apr 2025 13:32:53 -0400 Subject: [PATCH 008/634] basic roleassignment auditing for assign, revoke --- .../dataverse/DataverseRoleServiceBean.java | 50 +++++- .../iq/dataverse/RoleAssignmentAudit.java | 157 ++++++++++++++++++ .../command/impl/AssignRoleCommand.java | 37 +++-- .../CuratePublishedDatasetVersionCommand.java | 2 +- .../impl/DeleteDatasetVersionCommand.java | 2 +- .../command/impl/DeleteRoleCommand.java | 2 +- .../command/impl/RevokeAllRolesCommand.java | 2 +- .../command/impl/RevokeRoleCommand.java | 20 +-- .../iq/dataverse/settings/FeatureFlags.java | 5 + .../impl/DeletePrivateUrlCommandTest.java | 2 +- 10 files changed, 242 insertions(+), 37 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java index b751841da74..00368ff0b67 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java @@ -4,6 +4,7 @@ import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.RoleAssignee; import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.authorization.RoleAssignmentSet; import edu.harvard.iq.dataverse.search.IndexAsync; import edu.harvard.iq.dataverse.search.IndexResponse; @@ -13,7 +14,6 @@ import java.util.Collection; import java.util.HashSet; import java.util.List; -import java.util.Objects; import java.util.Set; import java.util.logging.Logger; import java.util.stream.Collectors; @@ -23,6 +23,7 @@ import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; import jakarta.persistence.TypedQuery; +import edu.harvard.iq.dataverse.settings.FeatureFlags; /** * @@ -87,6 +88,23 @@ public RoleAssignment save(RoleAssignment assignment, boolean createIndex) { } return assignment; } + + + /** + * Saves a RoleAssignmentAudit entry to the database. + * + * @param audit The RoleAssignmentAudit object to be saved + * @return The persisted RoleAssignmentAudit object + */ + public RoleAssignmentAudit saveAudit(RoleAssignmentAudit audit) { + if (audit.getAuditId() == null) { + em.persist(audit); + em.flush(); // Ensure the entity is persisted immediately + } else { + audit = em.merge(audit); + } + return audit; + } private IndexResponse indexDefinitionPoint(DvObject definitionPoint) { /** @@ -135,7 +153,7 @@ public DataverseRole findCustomRoleByAliasAndOwner(String alias, Long ownerId) { .getSingleResult(); } - public void revoke(Set roles, RoleAssignee assignee, DvObject defPoint) { +/* public void revoke(Set roles, RoleAssignee assignee, DvObject defPoint) { for (DataverseRole role : roles) { em.createNamedQuery("RoleAssignment.deleteByAssigneeIdentifier_RoleIdDefinition_PointId") .setParameter("assigneeIdentifier", assignee.getIdentifier()) @@ -146,34 +164,47 @@ public void revoke(Set roles, RoleAssignee assignee, DvObject def } em.refresh(assignee); } - - public void revoke(RoleAssignment ra) { +*/ + public void revoke(RoleAssignment ra, DataverseRequest req) { if (!em.contains(ra)) { ra = em.merge(ra); } + + // Create audit entry if feature flag is set + if (FeatureFlags.ROLE_ASSIGNMENT_AUDITING.enabled()) { + RoleAssignmentAudit audit = new RoleAssignmentAudit(ra, req, RoleAssignmentAudit.ActionType.REVOKE); + saveAudit(audit); + } + em.remove(ra); /** * @todo update permissionModificationTime here. */ indexAsync.indexRole(ra); } - + // "nuclear" remove-all roles for a user or group: // (Note that all the "definition points" - i.e., the dvObjects // on which the roles were assigned - need to be reindexed for permissions // once the role assignments are removed! - public void revokeAll(RoleAssignee assignee) { + public void revokeAll(RoleAssignee assignee, DataverseRequest req) { Set reindexSet = new HashSet<>(); - + for (RoleAssignment ra : roleAssigneeService.getAssignmentsFor(assignee.getIdentifier())) { if (!em.contains(ra)) { ra = em.merge(ra); } + + // Create audit entry if feature flag is set + if (FeatureFlags.ROLE_ASSIGNMENT_AUDITING.enabled()) { + RoleAssignmentAudit audit = new RoleAssignmentAudit(ra, req, RoleAssignmentAudit.ActionType.REVOKE); + saveAudit(audit); + } + em.remove(ra); - reindexSet.add(ra.getDefinitionPoint()); } - + indexAsync.indexRoles(reindexSet); } @@ -329,4 +360,5 @@ For a given permission and dataverse Id get all of the roles (built-in or owned } return retVal; } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java new file mode 100644 index 00000000000..2eafafb6de4 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java @@ -0,0 +1,157 @@ +package edu.harvard.iq.dataverse; + +import jakarta.persistence.Column; +import jakarta.persistence.Entity; +import jakarta.persistence.EnumType; +import jakarta.persistence.Enumerated; +import jakarta.persistence.GeneratedValue; +import jakarta.persistence.GenerationType; +import jakarta.persistence.Id; +import jakarta.persistence.Table; +import jakarta.persistence.Temporal; +import jakarta.persistence.TemporalType; +import java.io.Serializable; +import java.util.Date; + +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; + +@Entity +@Table(name = "role_assignment_audit") +public class RoleAssignmentAudit implements Serializable { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Column(name = "audit_id") + private Long auditId; + + @Column(name = "role_assignment_id") + private Long roleAssignmentId; + + @Enumerated(EnumType.STRING) + @Column(name = "action_type", nullable = false) + private ActionType actionType; + + @Temporal(TemporalType.TIMESTAMP) + @Column(name = "action_timestamp", nullable = false) + private Date actionTimestamp; + + @Column(name = "action_by_identifier", nullable = false) + private String actionByIdentifier; + + @Column(name = "assignee_identifier", nullable = false) + private String assigneeIdentifier; + + @Column(name = "role_id") + private Long roleId; + + @Column(name = "role_alias", nullable = false) + private String roleAlias; + + @Column(name = "definition_point_id") + private Long definitionPointId; + + @Column(name = "definition_point_identifier", nullable = false) + private String definitionPointIdentifier; + + public enum ActionType { + ASSIGN, REVOKE + } + + // Constructors + public RoleAssignmentAudit() { + } + + public RoleAssignmentAudit(RoleAssignment roleAssignment, DataverseRequest request, ActionType actionType) { + this.roleAssignmentId = roleAssignment.getId(); + this.actionType = actionType; + this.actionTimestamp = new Date(); + this.actionByIdentifier = request.getUser().getIdentifier(); + this.assigneeIdentifier = roleAssignment.getAssigneeIdentifier(); + this.roleId = roleAssignment.getRole().getId(); + this.roleAlias = roleAssignment.getRole().getAlias(); + this.definitionPointId = roleAssignment.getDefinitionPoint().getId(); + this.definitionPointIdentifier = roleAssignment.getDefinitionPoint().getIdentifier(); + } + + // Getters and setters + public Long getAuditId() { + return auditId; + } + + public void setAuditId(Long auditId) { + this.auditId = auditId; + } + + public Long getRoleAssignmentId() { + return roleAssignmentId; + } + + public void setRoleAssignmentId(Long roleAssignmentId) { + this.roleAssignmentId = roleAssignmentId; + } + + public ActionType getActionType() { + return actionType; + } + + public void setActionType(ActionType actionType) { + this.actionType = actionType; + } + + public Date getActionTimestamp() { + return actionTimestamp; + } + + public void setActionTimestamp(Date actionTimestamp) { + this.actionTimestamp = actionTimestamp; + } + + public String getActionByIdentifier() { + return actionByIdentifier; + } + + public void setActionByIdentifier(String actionByIdentifier) { + this.actionByIdentifier = actionByIdentifier; + } + + public String getAssigneeIdentifier() { + return assigneeIdentifier; + } + + public void setAssigneeIdentifier(String assigneeIdentifier) { + this.assigneeIdentifier = assigneeIdentifier; + } + + public Long getRoleId() { + return roleId; + } + + public void setRoleId(Long roleId) { + this.roleId = roleId; + } + + public String getRoleAlias() { + return roleAlias; + } + + public void setRoleAlias(String roleAlias) { + this.roleAlias = roleAlias; + } + + public Long getDefinitionPointId() { + return definitionPointId; + } + + public void setDefinitionPointId(Long definitionPointId) { + this.definitionPointId = definitionPointId; + } + + public String getDefinitionPointIdentifier() { + return definitionPointIdentifier; + } + + public void setDefinitionPointIdentifier(String definitionPointIdentifier) { + this.definitionPointIdentifier = definitionPointIdentifier; + } + +} \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java index 121af765737..2bd8b6c1090 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java @@ -19,7 +19,10 @@ import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.RoleAssignmentAudit; +import edu.harvard.iq.dataverse.settings.FeatureFlags; import java.util.Collections; +import java.util.Date; import java.util.HashSet; import java.util.Map; import java.util.Set; @@ -61,21 +64,29 @@ public AssignRoleCommand(PrivateUrlUser privateUrlUser, DataverseRole memberRole this.anonymizedAccess= anonymizedAccess; } - @Override - public RoleAssignment execute(CommandContext ctxt) throws CommandException { - if (grantee instanceof AuthenticatedUser) { - AuthenticatedUser user = (AuthenticatedUser) grantee; - if (user.isDeactivated()) { - throw new IllegalCommandException("User " + user.getUserIdentifier() + " is deactivated and cannot be given a role.", this); - } - } - if(isExistingRole(ctxt)){ - throw new IllegalCommandException(BundleUtil.getStringFromBundle("datasets.api.grant.role.assignee.has.role.error"), this); +@Override +public RoleAssignment execute(CommandContext ctxt) throws CommandException { + if (grantee instanceof AuthenticatedUser) { + AuthenticatedUser user = (AuthenticatedUser) grantee; + if (user.isDeactivated()) { + throw new IllegalCommandException("User " + user.getUserIdentifier() + " is deactivated and cannot be given a role.", this); } - // TODO make sure the role is defined on the dataverse. - RoleAssignment roleAssignment = new RoleAssignment(role, grantee, defPoint, privateUrlToken, anonymizedAccess); - return ctxt.roles().save(roleAssignment); } + if(isExistingRole(ctxt)){ + throw new IllegalCommandException(BundleUtil.getStringFromBundle("datasets.api.grant.role.assignee.has.role.error"), this); + } + // TODO make sure the role is defined on the dataverse. + RoleAssignment roleAssignment = new RoleAssignment(role, grantee, defPoint, privateUrlToken, anonymizedAccess); + RoleAssignment savedRoleAssignment = ctxt.roles().save(roleAssignment); + + // Check if ROLE_ASSIGNMENT_AUDITING feature flag is enabled + if (FeatureFlags.ROLE_ASSIGNMENT_AUDITING.enabled()) { + RoleAssignmentAudit audit = new RoleAssignmentAudit(savedRoleAssignment, getRequest(), RoleAssignmentAudit.ActionType.ASSIGN); + ctxt.roles().saveAudit(audit); + } + + return savedRoleAssignment; +} private boolean isExistingRole(CommandContext ctxt) { return ctxt.roles() diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java index 3629432b7e4..45cee6e9595 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java @@ -189,7 +189,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { RoleAssignment ra = ctxt.privateUrl().getPrivateUrlRoleAssignmentFromDataset(savedDataset); if (ra != null) { - ctxt.roles().revoke(ra); + ctxt.roles().revoke(ra, getRequest()); } // And update metadata at PID provider diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetVersionCommand.java index a67d7008ef8..efb49a67b9d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetVersionCommand.java @@ -92,7 +92,7 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { PrivateUrlUser privateUrlUser = new PrivateUrlUser(doomed.getId()); List roleAssignments = ctxt.roles().directRoleAssignments(privateUrlUser, doomed); for (RoleAssignment roleAssignment : roleAssignments) { - ctxt.roles().revoke(roleAssignment); + ctxt.roles().revoke(roleAssignment, getRequest()); } } boolean doNormalSolrDocCleanUp = true; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteRoleCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteRoleCommand.java index b0baf3a24e1..5608eb5af1f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteRoleCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteRoleCommand.java @@ -25,7 +25,7 @@ public DeleteRoleCommand(DataverseRequest aRequest, DataverseRole doomed ) { @Override protected void executeImpl(CommandContext ctxt) throws CommandException { for ( RoleAssignment ra : ctxt.roles().roleAssignments(doomed.getId()) ) { - ctxt.roles().revoke(ra); + ctxt.roles().revoke(ra, getRequest()); } ctxt.roles().delete(doomed.getId()); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RevokeAllRolesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RevokeAllRolesCommand.java index e44431591c2..ed16889f736 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RevokeAllRolesCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RevokeAllRolesCommand.java @@ -42,7 +42,7 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { } try { - ctxt.roles().revokeAll(assignee); + ctxt.roles().revokeAll(assignee, getRequest()); ctxt.explicitGroups().revokeAllGroupsForAssignee(assignee); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RevokeRoleCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RevokeRoleCommand.java index 26ab88d29d8..f2899408393 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RevokeRoleCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RevokeRoleCommand.java @@ -19,18 +19,18 @@ */ // no annotations here, since permissions are dynamically decided public class RevokeRoleCommand extends AbstractVoidCommand { - - private final RoleAssignment toBeRevoked; - public RevokeRoleCommand(RoleAssignment toBeRevoked, DataverseRequest aRequest) { + private final RoleAssignment toBeRevoked; + + public RevokeRoleCommand(RoleAssignment toBeRevoked, DataverseRequest aRequest) { super(aRequest, toBeRevoked.getDefinitionPoint()); - this.toBeRevoked = toBeRevoked; - } - - @Override - protected void executeImpl(CommandContext ctxt) throws CommandException { - ctxt.roles().revoke(toBeRevoked); - } + this.toBeRevoked = toBeRevoked; + } + + @Override + protected void executeImpl(CommandContext ctxt) throws CommandException { + ctxt.roles().revoke(toBeRevoked, getRequest()); + } @Override public Map> getRequiredPermissions() { diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java index 27c65ed067c..539e898255c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java @@ -167,6 +167,11 @@ public enum FeatureFlags { */ ADD_LOCAL_CONTEXTS_PERMISSION_CHECK("add-local-contexts-permission-check"), + /** + * This flag turns on auditing of role assignments - keeping a record of when roles were granted + * or revoked, at what times, and by whom. + */ + ROLE_ASSIGNMENT_AUDITING("role-assignment-auditing"), ; final String flag; diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePrivateUrlCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePrivateUrlCommandTest.java index 0a4e5ed2d7e..be633a68dba 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePrivateUrlCommandTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePrivateUrlCommandTest.java @@ -64,7 +64,7 @@ public List directRoleAssignments(RoleAssignee roas, DvObject dv } @Override - public void revoke(RoleAssignment ra) { + public void revoke(RoleAssignment ra, DataverseRequest req) { // no-op } From a7f4b61c09505ffa92a8b6541b6a9823df1733d0 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 8 Apr 2025 13:35:45 -0400 Subject: [PATCH 009/634] add indexes --- .../edu/harvard/iq/dataverse/RoleAssignmentAudit.java | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java index 2eafafb6de4..2645f641cc0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java +++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java @@ -10,13 +10,22 @@ import jakarta.persistence.Table; import jakarta.persistence.Temporal; import jakarta.persistence.TemporalType; +import jakarta.persistence.Index; import java.io.Serializable; import java.util.Date; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @Entity -@Table(name = "role_assignment_audit") +@Table(name = "role_assignment_audit", indexes = { + @Index(name = "idx_raa_role_assignment_id", columnList = "role_assignment_id"), + @Index(name = "idx_raa_action_type", columnList = "action_type"), + @Index(name = "idx_raa_action_timestamp", columnList = "action_timestamp"), + @Index(name = "idx_raa_action_by_identifier", columnList = "action_by_identifier"), + @Index(name = "idx_raa_assignee_identifier", columnList = "assignee_identifier"), + @Index(name = "idx_raa_role_id", columnList = "role_id"), + @Index(name = "idx_raa_definition_point_id", columnList = "definition_point_id") +}) public class RoleAssignmentAudit implements Serializable { @Id From bf0beedc2b1bf23329a51e0d14096ebdd447904c Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 8 Apr 2025 13:55:39 -0400 Subject: [PATCH 010/634] missing import --- .../engine/command/impl/DeletePrivateUrlCommandTest.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePrivateUrlCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePrivateUrlCommandTest.java index be633a68dba..640c66b61e4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePrivateUrlCommandTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePrivateUrlCommandTest.java @@ -8,6 +8,7 @@ import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import edu.harvard.iq.dataverse.engine.TestCommandContext; import edu.harvard.iq.dataverse.engine.TestDataverseEngine; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; From 47e98050eedf41159a783f4a7eedb9f359c63a16 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 8 Apr 2025 14:34:47 -0400 Subject: [PATCH 011/634] refresh assignment to get id --- .../harvard/iq/dataverse/DataverseRoleServiceBean.java | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java index 00368ff0b67..17938d900a3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java @@ -77,15 +77,16 @@ public RoleAssignment save(RoleAssignment assignment) { public RoleAssignment save(RoleAssignment assignment, boolean createIndex) { if (assignment.getId() == null) { em.persist(assignment); + em.flush(); // Force synchronization with the database + em.refresh(assignment); // Refresh the entity to ensure it has the latest state, including the ID } else { assignment = em.merge(assignment); } - /** - * @todo update permissionModificationTime here. - */ - if ( createIndex ) { + + if (createIndex) { indexAsync.indexRole(assignment); } + return assignment; } From 1d3a1119e8bb3fade8e27c2275e57daa3ac43a3a Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 8 Apr 2025 15:33:29 -0400 Subject: [PATCH 012/634] history panel --- .../iq/dataverse/ManagePermissionsPage.java | 100 ++++++++++++++++++ .../iq/dataverse/RoleAssignmentAudit.java | 7 +- src/main/java/propertyFiles/Bundle.properties | 7 ++ src/main/webapp/permissions-manage.xhtml | 34 ++++++ 4 files changed, 147 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java index 0e277c5aa32..5359079ad3f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java @@ -26,9 +26,12 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.Comparator; import java.util.Date; +import java.util.HashMap; import java.util.LinkedList; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; @@ -230,6 +233,44 @@ public void cloneRole(String roleId) { public void editRole(String roleId) { setRole(roleService.find(Long.parseLong(roleId))); } + + /** Role Assignment History */ + private List roleAssignmentHistory; + + public List getRoleAssignmentHistory() { + if (roleAssignmentHistory == null) { + roleAssignmentHistory = new ArrayList<>(); + + List audits = em.createNamedQuery("RoleAssignmentAudit.findByDefinitionPointId", RoleAssignmentAudit.class) + .setParameter("definitionPointId", dvObject.getId()) + .getResultList(); + + Map historyMap = new HashMap<>(); + + for (RoleAssignmentAudit audit : audits) { + Long roleAssignmentId = audit.getRoleAssignmentId(); + RoleAssignmentHistoryEntry entry = historyMap.get(roleAssignmentId); + + if (entry == null) { + entry = new RoleAssignmentHistoryEntry(audit.getAssigneeIdentifier(), audit.getRoleAlias()); + historyMap.put(roleAssignmentId, entry); + } + + if (audit.getActionType() == RoleAssignmentAudit.ActionType.ASSIGN) { + entry.setAssignedBy(audit.getActionByIdentifier()); + entry.setAssignedAt(audit.getActionTimestamp()); + } else if (audit.getActionType() == RoleAssignmentAudit.ActionType.REVOKE) { + entry.setRevokedBy(audit.getActionByIdentifier()); + entry.setRevokedAt(audit.getActionTimestamp()); + } + } + + roleAssignmentHistory.addAll(historyMap.values()); + roleAssignmentHistory.sort(Comparator.comparing(RoleAssignmentHistoryEntry::getAssignedAt).reversed()); + } + return roleAssignmentHistory; + } + /* ============================================================================ @@ -716,4 +757,63 @@ public Long getId() { } } + + public static class RoleAssignmentHistoryEntry { + private String roleName; + private String assigneeIdentifier; + private String assignedBy; + private Date assignedAt; + private String revokedBy; + private Date revokedAt; + + public RoleAssignmentHistoryEntry(String assigneeIdentifier, String roleName) { + this.roleName = roleName; + this.assigneeIdentifier = assigneeIdentifier; + ; + } + + public void setRevokedAt(Date actionTimestamp) { + revokedAt = actionTimestamp; + + } + + public void setRevokedBy(String actionByIdentifier) { + revokedBy = actionByIdentifier; + + } + + public void setAssignedAt(Date actionTimestamp) { + assignedAt = actionTimestamp; + + } + + public void setAssignedBy(String actionByIdentifier) { + assignedBy = actionByIdentifier; + + } + + public String getRoleName() { + return roleName; + } + + public String getAssigneeIdentifier() { + return assigneeIdentifier; + } + + public String getAssignedBy() { + return assignedBy; + } + + public Date getAssignedAt() { + return assignedAt; + } + + public String getRevokedBy() { + return revokedBy; + } + + public Date getRevokedAt() { + return revokedAt; + } + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java index 2645f641cc0..2209977b541 100644 --- a/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java +++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java @@ -11,6 +11,8 @@ import jakarta.persistence.Temporal; import jakarta.persistence.TemporalType; import jakarta.persistence.Index; +import jakarta.persistence.NamedQuery; + import java.io.Serializable; import java.util.Date; @@ -26,6 +28,9 @@ @Index(name = "idx_raa_role_id", columnList = "role_id"), @Index(name = "idx_raa_definition_point_id", columnList = "definition_point_id") }) +@NamedQuery(name = "RoleAssignmentAudit.findByDefinitionPointId", +query = "SELECT ra FROM RoleAssignmentAudit ra WHERE ra.definitionPointId = :definitionPointId ORDER BY ra.roleAssignmenId, ra.actionTimestamp DESC") + public class RoleAssignmentAudit implements Serializable { @Id @@ -79,7 +84,7 @@ public RoleAssignmentAudit(RoleAssignment roleAssignment, DataverseRequest reque this.roleId = roleAssignment.getRole().getId(); this.roleAlias = roleAssignment.getRole().getAlias(); this.definitionPointId = roleAssignment.getDefinitionPoint().getId(); - this.definitionPointIdentifier = roleAssignment.getDefinitionPoint().getIdentifier(); + this.definitionPointIdentifier = roleAssignment.getDefinitionPoint().getGlobalId().asString(); } // Getters and setters diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 05421179d7e..95341c74545 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1203,6 +1203,13 @@ dataverse.permissions.roles.edit=Edit Role dataverse.permissions.roles.copy=Copy Role dataverse.permissions.roles.alias.required=Please enter a unique identifier for this role. dataverse.permissions.roles.name.required=Please enter a name for this role. +dataverse.permissions.history=Role Assignment History +dataverse.permissions.history.description=View the history of role assignments and revocations +dataverse.permissions.history.role=Role +dataverse.permissions.history.assignee=Assignee +dataverse.permissions.history.assigned=Assigned +dataverse.permissions.history.revoked=Revoked +dataverse.permissions.history.notRevoked=Not Revoked # permissions-manage-files.xhtml dataverse.permissionsFiles.title=Restricted File Permissions diff --git a/src/main/webapp/permissions-manage.xhtml b/src/main/webapp/permissions-manage.xhtml index b328e4abc81..e3e5e4ecf6c 100644 --- a/src/main/webapp/permissions-manage.xhtml +++ b/src/main/webapp/permissions-manage.xhtml @@ -197,6 +197,40 @@ + +
+
+ #{bundle['dataverse.permissions.history']} + #{bundle['dataverse.permissions.history.description']} +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + +
+
+
From 2fbde58c32b026577f6c5059918bc2c6f3709002 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 8 Apr 2025 15:46:17 -0400 Subject: [PATCH 013/634] typo --- src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java index 2209977b541..8c64583e814 100644 --- a/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java +++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java @@ -29,7 +29,7 @@ @Index(name = "idx_raa_definition_point_id", columnList = "definition_point_id") }) @NamedQuery(name = "RoleAssignmentAudit.findByDefinitionPointId", -query = "SELECT ra FROM RoleAssignmentAudit ra WHERE ra.definitionPointId = :definitionPointId ORDER BY ra.roleAssignmenId, ra.actionTimestamp DESC") +query = "SELECT ra FROM RoleAssignmentAudit ra WHERE ra.definitionPointId = :definitionPointId ORDER BY ra.roleAssignmentId, ra.actionTimestamp DESC") public class RoleAssignmentAudit implements Serializable { From 9da8ee9c36be79c4883d5c75998c46c05fef54a5 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 9 Apr 2025 11:58:02 -0400 Subject: [PATCH 014/634] file RAs --- .../dataverse/ManageFilePermissionsPage.java | 37 ++++++++++++++++++ .../iq/dataverse/ManagePermissionsPage.java | 19 +++++---- .../iq/dataverse/RoleAssignmentAudit.java | 12 ++++-- src/main/java/propertyFiles/Bundle.properties | 2 +- .../webapp/permissions-manage-files.xhtml | 39 +++++++++++++++++++ src/main/webapp/permissions-manage.xhtml | 9 +++-- 6 files changed, 103 insertions(+), 15 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java index 1ead0b13cdc..4cfdeaeaac3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java @@ -5,6 +5,7 @@ */ package edu.harvard.iq.dataverse; +import edu.harvard.iq.dataverse.ManagePermissionsPage.RoleAssignmentHistoryEntry; import edu.harvard.iq.dataverse.api.Util; import edu.harvard.iq.dataverse.authorization.AuthenticationProvider; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; @@ -151,6 +152,7 @@ public String init() { return permissionsWrapper.notAuthorized(); } initMaps(); + roleAssignmentHistory = null; return ""; } @@ -537,6 +539,41 @@ private boolean assignRole(RoleAssignee ra, DataFile file, DataverseRole r) { return true; } + private List roleAssignmentHistory; + + public List getRoleAssignmentHistory() { + if (roleAssignmentHistory == null) { + roleAssignmentHistory = new ArrayList<>(); + + List audits = em.createNamedQuery("RoleAssignmentAudit.findByOwnerId", RoleAssignmentAudit.class) + .setParameter("datasetId", dataset.getId()) + .getResultList(); + + Map historyMap = new HashMap<>(); + + for (RoleAssignmentAudit audit : audits) { + Long roleAssignmentId = audit.getRoleAssignmentId(); + RoleAssignmentHistoryEntry entry = historyMap.get(roleAssignmentId); + + if (entry == null) { + entry = new RoleAssignmentHistoryEntry(audit.getAssigneeIdentifier(), audit.getRoleAlias(), audit.getDefinitionPointId() ); + historyMap.put(roleAssignmentId, entry); + } + + if (audit.getActionType() == RoleAssignmentAudit.ActionType.ASSIGN) { + entry.setAssignedBy(audit.getActionByIdentifier()); + entry.setAssignedAt(audit.getActionTimestamp()); + } else if (audit.getActionType() == RoleAssignmentAudit.ActionType.REVOKE) { + entry.setRevokedBy(audit.getActionByIdentifier()); + entry.setRevokedAt(audit.getActionTimestamp()); + } + } + + roleAssignmentHistory.addAll(historyMap.values()); + roleAssignmentHistory.sort(Comparator.comparing(RoleAssignmentHistoryEntry::getAssignedAt).reversed()); + } + return roleAssignmentHistory; + } boolean renderUserGroupMessages = false; boolean renderFileMessages = false; diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java index 5359079ad3f..9b2b04c33a6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java @@ -252,7 +252,7 @@ public List getRoleAssignmentHistory() { RoleAssignmentHistoryEntry entry = historyMap.get(roleAssignmentId); if (entry == null) { - entry = new RoleAssignmentHistoryEntry(audit.getAssigneeIdentifier(), audit.getRoleAlias()); + entry = new RoleAssignmentHistoryEntry(audit.getAssigneeIdentifier(), audit.getRoleAlias(), audit.getDefinitionPointId()); historyMap.put(roleAssignmentId, entry); } @@ -765,31 +765,28 @@ public static class RoleAssignmentHistoryEntry { private Date assignedAt; private String revokedBy; private Date revokedAt; + private Long definitionPointId; // New field - public RoleAssignmentHistoryEntry(String assigneeIdentifier, String roleName) { + public RoleAssignmentHistoryEntry(String assigneeIdentifier, String roleName, Long definitionPointId) { this.roleName = roleName; this.assigneeIdentifier = assigneeIdentifier; - ; + this.definitionPointId = definitionPointId; } public void setRevokedAt(Date actionTimestamp) { revokedAt = actionTimestamp; - } public void setRevokedBy(String actionByIdentifier) { revokedBy = actionByIdentifier; - } public void setAssignedAt(Date actionTimestamp) { assignedAt = actionTimestamp; - } public void setAssignedBy(String actionByIdentifier) { assignedBy = actionByIdentifier; - } public String getRoleName() { @@ -815,5 +812,13 @@ public String getRevokedBy() { public Date getRevokedAt() { return revokedAt; } + + public Long getDefinitionPointId() { + return definitionPointId; + } + + public void setDefinitionPointId(Long definitionPointId) { + this.definitionPointId = definitionPointId; + } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java index 8c64583e814..dc6fb1806c2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java +++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java @@ -11,6 +11,7 @@ import jakarta.persistence.Temporal; import jakarta.persistence.TemporalType; import jakarta.persistence.Index; +import jakarta.persistence.NamedQueries; import jakarta.persistence.NamedQuery; import java.io.Serializable; @@ -28,9 +29,14 @@ @Index(name = "idx_raa_role_id", columnList = "role_id"), @Index(name = "idx_raa_definition_point_id", columnList = "definition_point_id") }) -@NamedQuery(name = "RoleAssignmentAudit.findByDefinitionPointId", -query = "SELECT ra FROM RoleAssignmentAudit ra WHERE ra.definitionPointId = :definitionPointId ORDER BY ra.roleAssignmentId, ra.actionTimestamp DESC") - +@NamedQueries({ + @NamedQuery(name = "RoleAssignmentAudit.findByDefinitionPointId", + query = "SELECT ra FROM RoleAssignmentAudit ra WHERE ra.definitionPointId = :definitionPointId ORDER BY ra.roleAssignmentId, ra.actionTimestamp DESC"), + @NamedQuery(name = "RoleAssignmentAudit.findByOwnerId", + query = "SELECT ra FROM RoleAssignmentAudit ra JOIN DvObject d ON ra.definitionPointId = d.id " + + "WHERE d.owner.id = :datasetId " + + "ORDER BY ra.roleAssignmentId, ra.actionTimestamp DESC") +}) public class RoleAssignmentAudit implements Serializable { @Id diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 95341c74545..095d2ff91e4 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1210,7 +1210,7 @@ dataverse.permissions.history.assignee=Assignee dataverse.permissions.history.assigned=Assigned dataverse.permissions.history.revoked=Revoked dataverse.permissions.history.notRevoked=Not Revoked - +dataverse.permissions.history.file=File Id # permissions-manage-files.xhtml dataverse.permissionsFiles.title=Restricted File Permissions dataverse.permissionsFiles.usersOrGroups=Users/Groups diff --git a/src/main/webapp/permissions-manage-files.xhtml b/src/main/webapp/permissions-manage-files.xhtml index 4e4e56f2051..2be43b776b5 100644 --- a/src/main/webapp/permissions-manage-files.xhtml +++ b/src/main/webapp/permissions-manage-files.xhtml @@ -218,6 +218,45 @@ + + +
+
+ #{bundle['dataverse.permissions.history']} + #{bundle['dataverse.permissions.history.description']} +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
diff --git a/src/main/webapp/permissions-manage.xhtml b/src/main/webapp/permissions-manage.xhtml index e3e5e4ecf6c..fbd37f5ffcd 100644 --- a/src/main/webapp/permissions-manage.xhtml +++ b/src/main/webapp/permissions-manage.xhtml @@ -104,7 +104,7 @@
#{bundle['dataverse.permissions.usersOrGroups.assignBtn']} @@ -198,7 +198,8 @@
-
+ +
#{bundle['dataverse.permissions.history']} #{bundle['dataverse.permissions.history.description']} @@ -240,7 +241,7 @@
From fe1f3e7add4873d437bbab47645ceae13d059832 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 9 Apr 2025 12:43:20 -0400 Subject: [PATCH 015/634] minor fixes --- src/main/webapp/permissions-manage-files.xhtml | 2 +- src/main/webapp/permissions-manage.xhtml | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/webapp/permissions-manage-files.xhtml b/src/main/webapp/permissions-manage-files.xhtml index 2be43b776b5..c50b2cffccb 100644 --- a/src/main/webapp/permissions-manage-files.xhtml +++ b/src/main/webapp/permissions-manage-files.xhtml @@ -229,7 +229,7 @@
- + diff --git a/src/main/webapp/permissions-manage.xhtml b/src/main/webapp/permissions-manage.xhtml index fbd37f5ffcd..98dfb240bee 100644 --- a/src/main/webapp/permissions-manage.xhtml +++ b/src/main/webapp/permissions-manage.xhtml @@ -5,6 +5,7 @@ xmlns:ui="http://java.sun.com/jsf/facelets" xmlns:p="http://primefaces.org/ui" xmlns:c="http://java.sun.com/jsp/jstl/core" + xmlns:o="http://omnifaces.org/ui" xmlns:iqbs="http://xmlns.jcp.org/jsf/composite/iqbs"> From a1d5c8afe57ceeba6087b99206b35e966be4eb4c Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 9 Apr 2025 14:12:45 -0400 Subject: [PATCH 016/634] sync tables except def point --- src/main/java/propertyFiles/Bundle.properties | 9 ++++---- .../webapp/permissions-manage-files.xhtml | 5 +---- src/main/webapp/permissions-manage.xhtml | 22 +++++++++++-------- 3 files changed, 19 insertions(+), 17 deletions(-) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 095d2ff91e4..1d7e45c5479 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1207,10 +1207,11 @@ dataverse.permissions.history=Role Assignment History dataverse.permissions.history.description=View the history of role assignments and revocations dataverse.permissions.history.role=Role dataverse.permissions.history.assignee=Assignee -dataverse.permissions.history.assigned=Assigned -dataverse.permissions.history.revoked=Revoked -dataverse.permissions.history.notRevoked=Not Revoked -dataverse.permissions.history.file=File Id +dataverse.permissions.history.assignedBy=Assigned By +dataverse.permissions.history.assignedAt=Assignment Date +dataverse.permissions.history.revokedBy=Revoked By +dataverse.permissions.history.revokedAt=Revocation Date +dataverse.permissions.history.files={0} Files # permissions-manage-files.xhtml dataverse.permissionsFiles.title=Restricted File Permissions dataverse.permissionsFiles.usersOrGroups=Users/Groups diff --git a/src/main/webapp/permissions-manage-files.xhtml b/src/main/webapp/permissions-manage-files.xhtml index c50b2cffccb..64783288df1 100644 --- a/src/main/webapp/permissions-manage-files.xhtml +++ b/src/main/webapp/permissions-manage-files.xhtml @@ -232,10 +232,7 @@ - - - - + diff --git a/src/main/webapp/permissions-manage.xhtml b/src/main/webapp/permissions-manage.xhtml index 98dfb240bee..9c7b678167b 100644 --- a/src/main/webapp/permissions-manage.xhtml +++ b/src/main/webapp/permissions-manage.xhtml @@ -208,26 +208,30 @@
+ + + - - + + - + - + + - - - - + + + + + -
From 52e18779d89c478c2caf14f2048711aa201c3301 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 9 Apr 2025 14:39:38 -0400 Subject: [PATCH 017/634] aggregate multiple def pts, show tool tip with list --- .../iq/dataverse/ManageFilePermissionsPage.java | 14 ++++++++++++++ .../iq/dataverse/ManagePermissionsPage.java | 13 +++++++------ src/main/java/propertyFiles/Bundle.properties | 1 + src/main/webapp/permissions-manage-files.xhtml | 7 +++++++ 4 files changed, 29 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java index 4cfdeaeaac3..c7adc5368b8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java @@ -568,6 +568,20 @@ public List getRoleAssignmentHistory() { entry.setRevokedAt(audit.getActionTimestamp()); } } + // Second pass: Combine entries with matching criteria + Map finalHistoryMap = new HashMap<>(); + for (RoleAssignmentHistoryEntry entry : historyMap.values()) { + String key = entry.getAssigneeIdentifier() + "|" + entry.getRoleName() + "|" + + entry.getAssignedBy() + "|" + entry.getAssignedAt() + "|" + + entry.getRevokedBy() + "|" + entry.getRevokedAt(); + + RoleAssignmentHistoryEntry existingEntry = finalHistoryMap.get(key); + if (existingEntry == null) { + finalHistoryMap.put(key, entry); + } else { + existingEntry.addDefinitionPointId(entry.getDefinitionPointIds().get(0)); + } + } roleAssignmentHistory.addAll(historyMap.values()); roleAssignmentHistory.sort(Comparator.comparing(RoleAssignmentHistoryEntry::getAssignedAt).reversed()); diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java index 9b2b04c33a6..89407c68378 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java @@ -765,12 +765,13 @@ public static class RoleAssignmentHistoryEntry { private Date assignedAt; private String revokedBy; private Date revokedAt; - private Long definitionPointId; // New field + private List definitionPointIds; // New field public RoleAssignmentHistoryEntry(String assigneeIdentifier, String roleName, Long definitionPointId) { this.roleName = roleName; this.assigneeIdentifier = assigneeIdentifier; - this.definitionPointId = definitionPointId; + this.definitionPointIds = new ArrayList(); + definitionPointIds.add(definitionPointId); } public void setRevokedAt(Date actionTimestamp) { @@ -813,12 +814,12 @@ public Date getRevokedAt() { return revokedAt; } - public Long getDefinitionPointId() { - return definitionPointId; + public List getDefinitionPointIds() { + return definitionPointIds; } - public void setDefinitionPointId(Long definitionPointId) { - this.definitionPointId = definitionPointId; + public void addDefinitionPointId(Long definitionPointId) { + definitionPointIds.add(definitionPointId); } } } diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 1d7e45c5479..10cf777daf5 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1211,6 +1211,7 @@ dataverse.permissions.history.assignedBy=Assigned By dataverse.permissions.history.assignedAt=Assignment Date dataverse.permissions.history.revokedBy=Revoked By dataverse.permissions.history.revokedAt=Revocation Date +dataverse.permissions.history.definedOn=Defined On dataverse.permissions.history.files={0} Files # permissions-manage-files.xhtml dataverse.permissionsFiles.title=Restricted File Permissions diff --git a/src/main/webapp/permissions-manage-files.xhtml b/src/main/webapp/permissions-manage-files.xhtml index 64783288df1..4ead7ddec5b 100644 --- a/src/main/webapp/permissions-manage-files.xhtml +++ b/src/main/webapp/permissions-manage-files.xhtml @@ -234,6 +234,13 @@ + + + + + + From 8b93f309565b29985be9ccf98430f9ba15734cb9 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 9 Apr 2025 15:05:48 -0400 Subject: [PATCH 018/634] fixes --- .../edu/harvard/iq/dataverse/ManagePermissionsPage.java | 8 ++++++++ src/main/webapp/permissions-manage-files.xhtml | 2 +- src/main/webapp/permissions-manage.xhtml | 3 --- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java index 89407c68378..d4bc281a89a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java @@ -35,6 +35,8 @@ import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; +import java.util.stream.Collectors; + import jakarta.ejb.EJB; import jakarta.faces.application.FacesMessage; import jakarta.faces.event.ActionEvent; @@ -821,5 +823,11 @@ public List getDefinitionPointIds() { public void addDefinitionPointId(Long definitionPointId) { definitionPointIds.add(definitionPointId); } + + public String getDefinitionPointIdsAsString(List definitionPointIds) { + return definitionPointIds.stream() + .map(Object::toString) + .collect(Collectors.joining(", ")); + } } } diff --git a/src/main/webapp/permissions-manage-files.xhtml b/src/main/webapp/permissions-manage-files.xhtml index 4ead7ddec5b..8ef11965b35 100644 --- a/src/main/webapp/permissions-manage-files.xhtml +++ b/src/main/webapp/permissions-manage-files.xhtml @@ -236,7 +236,7 @@ + title="#{historyEntry.definitionPointIdsAsString}"> diff --git a/src/main/webapp/permissions-manage.xhtml b/src/main/webapp/permissions-manage.xhtml index 9c7b678167b..dfab2a067a3 100644 --- a/src/main/webapp/permissions-manage.xhtml +++ b/src/main/webapp/permissions-manage.xhtml @@ -214,9 +214,6 @@ - - - From d16eb77577f0ac99b033c7999c02144a664670a3 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 10 Apr 2025 09:17:39 -0400 Subject: [PATCH 019/634] remove param --- .../java/edu/harvard/iq/dataverse/ManagePermissionsPage.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java index d4bc281a89a..8f8400fe8ca 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java @@ -824,7 +824,7 @@ public void addDefinitionPointId(Long definitionPointId) { definitionPointIds.add(definitionPointId); } - public String getDefinitionPointIdsAsString(List definitionPointIds) { + public String getDefinitionPointIdsAsString() { return definitionPointIds.stream() .map(Object::toString) .collect(Collectors.joining(", ")); From c622259b2626df70a111f1bfd619905a66335bad Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 10 Apr 2025 10:51:54 -0400 Subject: [PATCH 020/634] use final map to avoid duplicates --- .../edu/harvard/iq/dataverse/ManageFilePermissionsPage.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java index c7adc5368b8..0dbcba1fc97 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java @@ -583,7 +583,7 @@ public List getRoleAssignmentHistory() { } } - roleAssignmentHistory.addAll(historyMap.values()); + roleAssignmentHistory.addAll(finalHistoryMap.values()); roleAssignmentHistory.sort(Comparator.comparing(RoleAssignmentHistoryEntry::getAssignedAt).reversed()); } return roleAssignmentHistory; From 334d8e01a46258265b6fc3679b922d3054ec6b1c Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 10 Apr 2025 10:52:10 -0400 Subject: [PATCH 021/634] use h:outputFormat --- src/main/webapp/permissions-manage-files.xhtml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/webapp/permissions-manage-files.xhtml b/src/main/webapp/permissions-manage-files.xhtml index 8ef11965b35..9e43d4ce910 100644 --- a/src/main/webapp/permissions-manage-files.xhtml +++ b/src/main/webapp/permissions-manage-files.xhtml @@ -235,11 +235,11 @@ - - + From 04ac9bad4fa8b1e82802bcbc2a5900da9d7d3b6b Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 14 Apr 2025 14:04:19 -0400 Subject: [PATCH 022/634] Drop refresh --- .../java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java index 17938d900a3..8d31ff907b8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java @@ -78,7 +78,6 @@ public RoleAssignment save(RoleAssignment assignment, boolean createIndex) { if (assignment.getId() == null) { em.persist(assignment); em.flush(); // Force synchronization with the database - em.refresh(assignment); // Refresh the entity to ensure it has the latest state, including the ID } else { assignment = em.merge(assignment); } From c7251f58096cd98120591b18b35cfbae19ba9296 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 14 Apr 2025 14:23:56 -0400 Subject: [PATCH 023/634] move RA to postFlush in create --- .../command/impl/CreateNewDatasetCommand.java | 25 +++++++++++-------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java index c22a2cdb4a2..5aa2ceb49e4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java @@ -107,9 +107,23 @@ protected void handlePid(Dataset theDataset, CommandContext ctxt) throws Command @Override protected void postPersist( Dataset theDataset, CommandContext ctxt ){ + + + if ( template != null ) { + ctxt.templates().incrementUsageCount(template.getId()); + } + } + + /* Saves role assignments for the dataset. + * Emails those able to publish the dataset (except the creator themselves who already gets an email) + * that a new dataset exists. + * NB: These need the dataset id so have to be postDBFlush (vs postPersist()) + */ + protected void postDBFlush( Dataset theDataset, CommandContext ctxt ){ // set the role to be default contributor role for its dataverse String privateUrlToken = null; if (theDataset.getOwner().getDefaultContributorRole() != null) { + logger.info("New Dataset id: " + theDataset.getId()); RoleAssignment roleAssignment = new RoleAssignment(theDataset.getOwner().getDefaultContributorRole(), getRequest().getUser(), theDataset, privateUrlToken); ctxt.roles().save(roleAssignment, false); @@ -126,17 +140,6 @@ protected void postPersist( Dataset theDataset, CommandContext ctxt ){ // linked here (?) theDataset.setPermissionModificationTime(getTimestamp()); } - - if ( template != null ) { - ctxt.templates().incrementUsageCount(template.getId()); - } - } - - /* Emails those able to publish the dataset (except the creator themselves who already gets an email) - * that a new dataset exists. - * NB: Needs dataset id so has to be postDBFlush (vs postPersist()) - */ - protected void postDBFlush( Dataset theDataset, CommandContext ctxt ){ if(ctxt.settings().isTrueForKey(SettingsServiceBean.Key.SendNotificationOnDatasetCreation, false)) { //QDR - alert curators that a dataset has been created //Should this create a notification too? (which would let us use the notification mailcapbilities to generate the subject/body. From 614697b24193cc6e33033db4aaa0b7296aff97fb Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 14 Apr 2025 16:56:37 -0400 Subject: [PATCH 024/634] move audit code to service --- .../iq/dataverse/DataverseRoleServiceBean.java | 14 ++++++++++---- .../harvard/iq/dataverse/DataverseServiceBean.java | 6 +++--- .../java/edu/harvard/iq/dataverse/api/Admin.java | 5 +++-- .../engine/command/impl/AssignRoleCommand.java | 8 +------- .../command/impl/CreateDataverseCommand.java | 6 +++--- .../command/impl/CreateNewDatasetCommand.java | 2 +- .../command/impl/CreateDataverseCommandTest.java | 6 +++--- .../command/impl/CreatePrivateUrlCommandTest.java | 3 ++- .../command/impl/MoveDatasetCommandTest.java | 6 +++--- .../impl/ReturnDatasetToAuthorCommandTest.java | 2 +- .../impl/SubmitDatasetForReviewCommandTest.java | 2 +- 11 files changed, 31 insertions(+), 29 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java index 8d31ff907b8..09097be9c8e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java @@ -70,11 +70,11 @@ public DataverseRole save(DataverseRole aRole) { return aRole; } - public RoleAssignment save(RoleAssignment assignment) { - return save(assignment, true); + public RoleAssignment save(RoleAssignment assignment, DataverseRequest req) { + return save(assignment, true, req); } - public RoleAssignment save(RoleAssignment assignment, boolean createIndex) { + public RoleAssignment save(RoleAssignment assignment, boolean createIndex, DataverseRequest req) { if (assignment.getId() == null) { em.persist(assignment); em.flush(); // Force synchronization with the database @@ -86,6 +86,12 @@ public RoleAssignment save(RoleAssignment assignment, boolean createIndex) { indexAsync.indexRole(assignment); } + // Check if ROLE_ASSIGNMENT_AUDITING feature flag is enabled + if (FeatureFlags.ROLE_ASSIGNMENT_AUDITING.enabled()) { + RoleAssignmentAudit audit = new RoleAssignmentAudit(assignment, req, RoleAssignmentAudit.ActionType.ASSIGN); + saveAudit(audit); + } + return assignment; } @@ -96,7 +102,7 @@ public RoleAssignment save(RoleAssignment assignment, boolean createIndex) { * @param audit The RoleAssignmentAudit object to be saved * @return The persisted RoleAssignmentAudit object */ - public RoleAssignmentAudit saveAudit(RoleAssignmentAudit audit) { + private RoleAssignmentAudit saveAudit(RoleAssignmentAudit audit) { if (audit.getAuditId() == null) { em.persist(audit); em.flush(); // Ensure the entity is persisted immediately diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index f89e707cc03..06ea6969200 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -771,7 +771,7 @@ public List findAllDataverseDatasetChildren(Long dvId) { } public String addRoleAssignmentsToChildren(Dataverse owner, ArrayList rolesToInherit, - boolean inheritAllRoles) { + boolean inheritAllRoles, DataverseRequest req) { /* * This query recursively finds all Dataverses that are inside/children of the * specified one. It recursively finds dvobjects of dtype 'Dataverse' whose @@ -857,7 +857,7 @@ public String addRoleAssignmentsToChildren(Dataverse owner, ArrayList ro try { RoleAssignment ra = new RoleAssignment(inheritableRole, roleUser, childDv, privateUrlToken); if (!existingRAs.get(childDv.getId()).contains(ra)) { - rolesService.save(ra); + rolesService.save(ra, req); } } catch (Exception e) { logger.warning("Unable to assign " + roleAssignment.getAssigneeIdentifier() @@ -877,7 +877,7 @@ public String addRoleAssignmentsToChildren(Dataverse owner, ArrayList ro RoleAssignment ra = new RoleAssignment(inheritableRole, roleGroup, childDv, privateUrlToken); if (!existingRAs.get(childDv.getId()).contains(ra)) { - rolesService.save(ra); + rolesService.save(ra, req); } } catch (Exception e) { logger.warning("Unable to assign " + roleAssignment.getAssigneeIdentifier() diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index ac52b5d9fbf..4cbd405d2cf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -2156,8 +2156,9 @@ public Response addRoleAssignementsToChildren(@Context ContainerRequestContext c if (owner == null) { return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); } + AuthenticatedUser user = null; try { - AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc); + user = getRequestAuthenticatedUserOrDie(crc); if (!user.isSuperuser()) { return error(Response.Status.FORBIDDEN, "Superusers only."); } @@ -2172,7 +2173,7 @@ public Response addRoleAssignementsToChildren(@Context ContainerRequestContext c if (rolesToInherit.contains("*")) { inheritAllRoles = true; } - return ok(dataverseSvc.addRoleAssignmentsToChildren(owner, rolesToInherit, inheritAllRoles)); + return ok(dataverseSvc.addRoleAssignmentsToChildren(owner, rolesToInherit, inheritAllRoles, createDataverseRequest(user))); } } return error(Response.Status.BAD_REQUEST, diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java index 2bd8b6c1090..7260b433ddb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java @@ -77,13 +77,7 @@ public RoleAssignment execute(CommandContext ctxt) throws CommandException { } // TODO make sure the role is defined on the dataverse. RoleAssignment roleAssignment = new RoleAssignment(role, grantee, defPoint, privateUrlToken, anonymizedAccess); - RoleAssignment savedRoleAssignment = ctxt.roles().save(roleAssignment); - - // Check if ROLE_ASSIGNMENT_AUDITING feature flag is enabled - if (FeatureFlags.ROLE_ASSIGNMENT_AUDITING.enabled()) { - RoleAssignmentAudit audit = new RoleAssignmentAudit(savedRoleAssignment, getRequest(), RoleAssignmentAudit.ActionType.ASSIGN); - ctxt.roles().saveAudit(audit); - } + RoleAssignment savedRoleAssignment = ctxt.roles().save(roleAssignment, getRequest()); return savedRoleAssignment; } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index 145cfb6199c..d85e82844f5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -95,7 +95,7 @@ protected Dataverse innerExecute(CommandContext ctxt) throws IllegalCommandExcep DataverseRole adminRole = ctxt.roles().findBuiltinRoleByAlias(DataverseRole.ADMIN); String privateUrlToken = null; - ctxt.roles().save(new RoleAssignment(adminRole, getRequest().getUser(), managedDv, privateUrlToken), false); + ctxt.roles().save(new RoleAssignment(adminRole, getRequest().getUser(), managedDv, privateUrlToken), false, getRequest()); // Add additional role assignments if inheritance is set boolean inheritAllRoles = false; String rolesString = ctxt.settings().getValueForKey(SettingsServiceBean.Key.InheritParentRoleAssignments, ""); @@ -120,13 +120,13 @@ protected Dataverse innerExecute(CommandContext ctxt) throws IllegalCommandExcep if (identifier.startsWith(AuthenticatedUser.IDENTIFIER_PREFIX)) { identifier = identifier.substring(AuthenticatedUser.IDENTIFIER_PREFIX.length()); ctxt.roles().save(new RoleAssignment(role.getRole(), - ctxt.authentication().getAuthenticatedUser(identifier), managedDv, privateUrlToken), false); + ctxt.authentication().getAuthenticatedUser(identifier), managedDv, privateUrlToken), false, getRequest()); } else if (identifier.startsWith(Group.IDENTIFIER_PREFIX)) { identifier = identifier.substring(Group.IDENTIFIER_PREFIX.length()); Group roleGroup = ctxt.groups().getGroup(identifier); if (roleGroup != null) { ctxt.roles().save(new RoleAssignment(role.getRole(), - roleGroup, managedDv, privateUrlToken), false); + roleGroup, managedDv, privateUrlToken), false, getRequest()); } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java index 5aa2ceb49e4..d30443c5de3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java @@ -126,7 +126,7 @@ protected void postDBFlush( Dataset theDataset, CommandContext ctxt ){ logger.info("New Dataset id: " + theDataset.getId()); RoleAssignment roleAssignment = new RoleAssignment(theDataset.getOwner().getDefaultContributorRole(), getRequest().getUser(), theDataset, privateUrlToken); - ctxt.roles().save(roleAssignment, false); + ctxt.roles().save(roleAssignment, false, getRequest()); // TODO: the above may be creating the role assignments and saving them // in the database, but without properly linking them to the dataset diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommandTest.java index 73880b78e7b..dd7d60632f9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommandTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommandTest.java @@ -101,15 +101,15 @@ public DataverseRole findBuiltinRoleByAlias(String alias) { } @Override - public RoleAssignment save(RoleAssignment assignment) { + public RoleAssignment save(RoleAssignment assignment, DataverseRequest req) { assignment.setId( nextId() ); assignments.add(assignment); return assignment; } @Override - public RoleAssignment save(RoleAssignment assignment, boolean index) { - return save (assignment); + public RoleAssignment save(RoleAssignment assignment, boolean index, DataverseRequest req) { + return save (assignment, req); } @Override diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java index 0ba29f74774..e3ecf6b8a9a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java @@ -11,6 +11,7 @@ import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import edu.harvard.iq.dataverse.engine.TestCommandContext; import edu.harvard.iq.dataverse.engine.TestDataverseEngine; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; @@ -79,7 +80,7 @@ public DataverseRole findBuiltinRoleByAlias(String alias) { } @Override - public RoleAssignment save(RoleAssignment assignment) { + public RoleAssignment save(RoleAssignment assignment, DataverseRequest req) { // no-op return assignment; } diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommandTest.java index 380a4bbcf18..b8902728785 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommandTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommandTest.java @@ -284,15 +284,15 @@ public DataverseRole findBuiltinRoleByAlias(String alias) { } @Override - public RoleAssignment save(RoleAssignment assignment) { + public RoleAssignment save(RoleAssignment assignment, DataverseRequest req) { assignment.setId(nextId()); assignments.add(assignment); return assignment; } @Override - public RoleAssignment save(RoleAssignment assignment, boolean index) { - return save(assignment); + public RoleAssignment save(RoleAssignment assignment, boolean index, DataverseRequest req) { + return save(assignment, req); } @Override diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommandTest.java index 68c44764dff..2c133f33932 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommandTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommandTest.java @@ -107,7 +107,7 @@ public DataverseRole findBuiltinRoleByAlias(String alias) { } @Override - public RoleAssignment save(RoleAssignment assignment) { + public RoleAssignment save(RoleAssignment assignment, DataverseRequest req) { // no-op return assignment; } diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommandTest.java index 700ba332247..2f607aa7a0e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommandTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommandTest.java @@ -113,7 +113,7 @@ public DataverseRole findBuiltinRoleByAlias(String alias) { } @Override - public RoleAssignment save(RoleAssignment assignment) { + public RoleAssignment save(RoleAssignment assignment, DataverseRequest req) { // no-op return assignment; } From d896af5d893bf7cdf8eb143c5d0530aad64f5c32 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 16 Apr 2025 14:41:14 -0400 Subject: [PATCH 025/634] update history when assignment made --- src/main/webapp/roles-assign.xhtml | 18 +----------------- 1 file changed, 1 insertion(+), 17 deletions(-) diff --git a/src/main/webapp/roles-assign.xhtml b/src/main/webapp/roles-assign.xhtml index 93b9862c55d..8c5290b1580 100644 --- a/src/main/webapp/roles-assign.xhtml +++ b/src/main/webapp/roles-assign.xhtml @@ -89,7 +89,7 @@
@@ -100,20 +100,4 @@
- -

- #{bundle['dataverse.permissions.usersOrGroups.assignDialog.fileDownloadConfirm']} -

-
- - - - - -
-
From 06042f3eaab71ca59cf22dc2deec2c5c7d17716c Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 16 Apr 2025 15:27:58 -0400 Subject: [PATCH 026/634] handle dataverse, no filePIDs, more update history --- .../harvard/iq/dataverse/RoleAssignmentAudit.java | 13 +++++++++---- src/main/webapp/permissions-manage-files.xhtml | 10 +++++----- 2 files changed, 14 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java index dc6fb1806c2..52292aeb065 100644 --- a/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java +++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java @@ -61,16 +61,16 @@ public class RoleAssignmentAudit implements Serializable { @Column(name = "assignee_identifier", nullable = false) private String assigneeIdentifier; - @Column(name = "role_id") + @Column(name = "role_id", nullable = false) private Long roleId; @Column(name = "role_alias", nullable = false) private String roleAlias; - @Column(name = "definition_point_id") + @Column(name = "definition_point_id", nullable = false) private Long definitionPointId; - @Column(name = "definition_point_identifier", nullable = false) + @Column(name = "definition_point_identifier", nullable = true) private String definitionPointIdentifier; public enum ActionType { @@ -90,7 +90,12 @@ public RoleAssignmentAudit(RoleAssignment roleAssignment, DataverseRequest reque this.roleId = roleAssignment.getRole().getId(); this.roleAlias = roleAssignment.getRole().getAlias(); this.definitionPointId = roleAssignment.getDefinitionPoint().getId(); - this.definitionPointIdentifier = roleAssignment.getDefinitionPoint().getGlobalId().asString(); + GlobalId globalId = roleAssignment.getDefinitionPoint().getGlobalId(); + if(globalId != null) { + this.definitionPointIdentifier = roleAssignment.getDefinitionPoint().getGlobalId().asString(); + } else if(roleAssignment.getDefinitionPoint() instanceof Dataverse dv) { + this.definitionPointIdentifier = dv.getAlias(); + } } // Getters and setters diff --git a/src/main/webapp/permissions-manage-files.xhtml b/src/main/webapp/permissions-manage-files.xhtml index 9e43d4ce910..11fc97087fd 100644 --- a/src/main/webapp/permissions-manage-files.xhtml +++ b/src/main/webapp/permissions-manage-files.xhtml @@ -95,7 +95,7 @@ update=":#{p:resolveClientId('rolesPermissionsForm:userGroups', view)} :#{p:resolveClientId('rolesPermissionsForm:restrictedFiles', view)} #{p:resolveClientId('rolesPermissionsForm:usersGroups', view)} - @([id$=Messages])"> + roleAssignmentHistory @([id$=Messages])"> #{bundle['dataverse.permissionsFiles.assignDialog.grantBtn']} + roleAssignmentHistory @([id$=Messages])"> #{bundle['dataverse.permissionsFiles.assignDialog.rejectBtn']}
@@ -383,14 +383,14 @@
@@ -415,7 +415,7 @@
+
+ @@ -258,6 +260,7 @@ +
diff --git a/src/main/webapp/permissions-manage.xhtml b/src/main/webapp/permissions-manage.xhtml index dfab2a067a3..25b976f8e53 100644 --- a/src/main/webapp/permissions-manage.xhtml +++ b/src/main/webapp/permissions-manage.xhtml @@ -206,8 +206,10 @@ #{bundle['dataverse.permissions.history.description']}
+
- + + @@ -231,6 +233,7 @@ +
From 1864d0dad2e8a652bc07508fd31c0edcf6ff3269 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 18 Apr 2025 11:35:00 -0400 Subject: [PATCH 029/634] update sorting --- .../edu/harvard/iq/dataverse/ManageFilePermissionsPage.java | 5 ++++- .../java/edu/harvard/iq/dataverse/ManagePermissionsPage.java | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java index 78e092844c2..33314ef4fd3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java @@ -585,7 +585,10 @@ public List getRoleAssignmentHistory() { } roleAssignmentHistory.addAll(finalHistoryMap.values()); - roleAssignmentHistory.sort(Comparator.comparing(RoleAssignmentHistoryEntry::getAssignedAt).reversed()); + roleAssignmentHistory.sort(Comparator + .comparing(RoleAssignmentHistoryEntry::getRevokedAt, Comparator.nullsLast(Comparator.naturalOrder())) + .thenComparing(RoleAssignmentHistoryEntry::getAssignedAt, Comparator.nullsLast(Comparator.naturalOrder())) + .reversed()); } return roleAssignmentHistory; } diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java index ff6fd659703..ff638a88866 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java @@ -270,7 +270,7 @@ public List getRoleAssignmentHistory() { roleAssignmentHistory.addAll(historyMap.values()); roleAssignmentHistory.sort(Comparator - .comparing(RoleAssignmentHistoryEntry::getRevokedAt, Comparator.nullsFirst(Comparator.naturalOrder())) + .comparing(RoleAssignmentHistoryEntry::getRevokedAt, Comparator.nullsLast(Comparator.naturalOrder())) .thenComparing(RoleAssignmentHistoryEntry::getAssignedAt, Comparator.nullsLast(Comparator.naturalOrder())) .reversed()); }; From 65dc679465cc2d6ba367ca1473d307c105921cdd Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 18 Apr 2025 11:43:41 -0400 Subject: [PATCH 030/634] sortable, deferred load --- .../webapp/permissions-manage-files.xhtml | 78 ++++++++++--------- src/main/webapp/permissions-manage.xhtml | 55 +++++++------ 2 files changed, 72 insertions(+), 61 deletions(-) diff --git a/src/main/webapp/permissions-manage-files.xhtml b/src/main/webapp/permissions-manage-files.xhtml index 516ee9da0a1..4d8612cae65 100644 --- a/src/main/webapp/permissions-manage-files.xhtml +++ b/src/main/webapp/permissions-manage-files.xhtml @@ -219,6 +219,7 @@ +
@@ -226,42 +227,47 @@ #{bundle['dataverse.permissions.history.description']}
- -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
diff --git a/src/main/webapp/permissions-manage.xhtml b/src/main/webapp/permissions-manage.xhtml index 25b976f8e53..c47d4f773a6 100644 --- a/src/main/webapp/permissions-manage.xhtml +++ b/src/main/webapp/permissions-manage.xhtml @@ -208,31 +208,36 @@
- - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + +
From c87220ea6a4e510999a3a3c390b415ec9fa1d151 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 18 Apr 2025 13:05:06 -0400 Subject: [PATCH 031/634] cleanup/drop deferred load (wasn't working) --- .../webapp/permissions-manage-files.xhtml | 85 +++++++++---------- src/main/webapp/permissions-manage.xhtml | 72 ++++++++-------- 2 files changed, 72 insertions(+), 85 deletions(-) diff --git a/src/main/webapp/permissions-manage-files.xhtml b/src/main/webapp/permissions-manage-files.xhtml index 4d8612cae65..b5853783ef9 100644 --- a/src/main/webapp/permissions-manage-files.xhtml +++ b/src/main/webapp/permissions-manage-files.xhtml @@ -219,56 +219,47 @@ -
-
- #{bundle['dataverse.permissions.history']} - #{bundle['dataverse.permissions.history.description']} -
-
- -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-
+
+ #{bundle['dataverse.permissions.history']} #{bundle['dataverse.permissions.history.description']} +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
diff --git a/src/main/webapp/permissions-manage.xhtml b/src/main/webapp/permissions-manage.xhtml index c47d4f773a6..d44b0bb47cc 100644 --- a/src/main/webapp/permissions-manage.xhtml +++ b/src/main/webapp/permissions-manage.xhtml @@ -201,47 +201,43 @@
-
- #{bundle['dataverse.permissions.history']} - #{bundle['dataverse.permissions.history.description']} -
-
- +
+ #{bundle['dataverse.permissions.history']} #{bundle['dataverse.permissions.history.description']} +
+
+
- - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + +
-
+
+
From cb57117b67aa49f6732e0f91eba4d62712124552 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 28 Apr 2025 13:53:30 -0400 Subject: [PATCH 032/634] make updating RA history conditional on flag being set. --- src/main/webapp/permissions-manage-files.xhtml | 16 +++++++++------- src/main/webapp/permissions-manage.xhtml | 8 +++++--- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/src/main/webapp/permissions-manage-files.xhtml b/src/main/webapp/permissions-manage-files.xhtml index b5853783ef9..c015de4f5b3 100644 --- a/src/main/webapp/permissions-manage-files.xhtml +++ b/src/main/webapp/permissions-manage-files.xhtml @@ -23,6 +23,8 @@ + +
@@ -95,7 +97,7 @@ update=":#{p:resolveClientId('rolesPermissionsForm:userGroups', view)} :#{p:resolveClientId('rolesPermissionsForm:restrictedFiles', view)} #{p:resolveClientId('rolesPermissionsForm:usersGroups', view)} - roleAssignmentHistory @([id$=Messages])"> + @if(#{RAAuditingEnabled}) roleAssignmentHistory @([id$=Messages])"> #{bundle['dataverse.permissionsFiles.assignDialog.grantBtn']} + @if(#{RAAuditingEnabled}) roleAssignmentHistory @([id$=Messages])"> #{bundle['dataverse.permissionsFiles.assignDialog.rejectBtn']}
@@ -219,8 +221,8 @@ - -
+ +
#{bundle['dataverse.permissions.history']} #{bundle['dataverse.permissions.history.description']}
@@ -383,14 +385,14 @@
@@ -415,7 +417,7 @@ From 04b37f25de54741457fccc974b6ab9fca6b57b93 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 28 Apr 2025 14:08:32 -0400 Subject: [PATCH 033/634] missing widgetVar --- src/main/webapp/permissions-manage-files.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/permissions-manage-files.xhtml b/src/main/webapp/permissions-manage-files.xhtml index c015de4f5b3..884925e1b98 100644 --- a/src/main/webapp/permissions-manage-files.xhtml +++ b/src/main/webapp/permissions-manage-files.xhtml @@ -228,7 +228,7 @@
- From b6b86ea88aae9f09ba60effc1d4e031098b8d0d0 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 28 Apr 2025 14:55:40 -0400 Subject: [PATCH 034/634] Revert "make updating RA history conditional on flag being set." This reverts commit 8f58f193482811c1279e3721f3971aaf3ce07dbe. --- src/main/webapp/permissions-manage-files.xhtml | 16 +++++++--------- src/main/webapp/permissions-manage.xhtml | 8 +++----- 2 files changed, 10 insertions(+), 14 deletions(-) diff --git a/src/main/webapp/permissions-manage-files.xhtml b/src/main/webapp/permissions-manage-files.xhtml index 884925e1b98..282e1da9b17 100644 --- a/src/main/webapp/permissions-manage-files.xhtml +++ b/src/main/webapp/permissions-manage-files.xhtml @@ -23,8 +23,6 @@ - -
@@ -97,7 +95,7 @@ update=":#{p:resolveClientId('rolesPermissionsForm:userGroups', view)} :#{p:resolveClientId('rolesPermissionsForm:restrictedFiles', view)} #{p:resolveClientId('rolesPermissionsForm:usersGroups', view)} - @if(#{RAAuditingEnabled}) roleAssignmentHistory @([id$=Messages])"> + roleAssignmentHistory @([id$=Messages])"> #{bundle['dataverse.permissionsFiles.assignDialog.grantBtn']} + roleAssignmentHistory @([id$=Messages])"> #{bundle['dataverse.permissionsFiles.assignDialog.rejectBtn']}
@@ -221,8 +219,8 @@
- -
+ +
#{bundle['dataverse.permissions.history']} #{bundle['dataverse.permissions.history.description']}
@@ -385,14 +383,14 @@
@@ -417,7 +415,7 @@ From d3ab76bf39845aa02b96135c8bd8a80687d80fec Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 28 Apr 2025 16:12:05 -0400 Subject: [PATCH 035/634] update to fix 500 error re not finding ra history --- src/main/webapp/permissions-manage-files.xhtml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/main/webapp/permissions-manage-files.xhtml b/src/main/webapp/permissions-manage-files.xhtml index 282e1da9b17..1a93527dfef 100644 --- a/src/main/webapp/permissions-manage-files.xhtml +++ b/src/main/webapp/permissions-manage-files.xhtml @@ -95,7 +95,8 @@ update=":#{p:resolveClientId('rolesPermissionsForm:userGroups', view)} :#{p:resolveClientId('rolesPermissionsForm:restrictedFiles', view)} #{p:resolveClientId('rolesPermissionsForm:usersGroups', view)} - roleAssignmentHistory @([id$=Messages])"> + #{p:resolveClientId('rolesPermissionsForm:roleAssignmentHistory', view)} + @([id$=Messages])"> #{bundle['dataverse.permissionsFiles.assignDialog.grantBtn']} + #{p:resolveClientId('rolesPermissionsForm:roleAssignmentHistory', view)} + @([id$=Messages])"> #{bundle['dataverse.permissionsFiles.assignDialog.rejectBtn']}
From af6600f1a26483ad0cbf2d426ff9d9cc10b218fe Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 29 Apr 2025 08:56:53 -0400 Subject: [PATCH 036/634] word wrapping on perms form --- src/main/webapp/resources/css/structure.css | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/webapp/resources/css/structure.css b/src/main/webapp/resources/css/structure.css index 4d83ddd489d..9e12a83f4a0 100644 --- a/src/main/webapp/resources/css/structure.css +++ b/src/main/webapp/resources/css/structure.css @@ -568,6 +568,7 @@ div.ui-tabs-panels .ui-tabs.ui-tabs-top .ui-tabs-nav.ui-widget-header {border-bo /* -------- PERMISSIONS -------- */ div[id$='roleDisplay'] span.label, div[id$='roleDetails'] span.label {display:inline-block; margin-bottom:4px;} +#rolesPermissionsForm {word-break:break-word;} /* -------- METRICS -------- */ /* -- OLD LAYOUT, DATAVERSE -- */ From 4fca21b37d867aa20f31933c0c38144a3d85ca5f Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 7 Jul 2025 14:32:00 -0400 Subject: [PATCH 037/634] missing import --- .../java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java index 09097be9c8e..a6e2a53dadd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java @@ -14,6 +14,7 @@ import java.util.Collection; import java.util.HashSet; import java.util.List; +import java.util.Objects; import java.util.Set; import java.util.logging.Logger; import java.util.stream.Collectors; From 8c0bfbf706b94cacfdc7a1fc803c0106af85203b Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 7 Jul 2025 17:22:12 -0400 Subject: [PATCH 038/634] refactor getting RA history --- .../dataverse/DataverseRoleServiceBean.java | 162 ++++++++++++++++++ .../dataverse/ManageFilePermissionsPage.java | 48 +----- .../iq/dataverse/ManagePermissionsPage.java | 111 +----------- 3 files changed, 169 insertions(+), 152 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java index a6e2a53dadd..68fee4aacad 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse; +import edu.harvard.iq.dataverse.DataverseRoleServiceBean.RoleAssignmentHistoryEntry; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.RoleAssignee; @@ -12,9 +13,13 @@ import edu.harvard.iq.dataverse.search.SolrIndexServiceBean; import java.util.ArrayList; import java.util.Collection; +import java.util.Comparator; +import java.util.Date; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Objects; +import java.util.Map; import java.util.Set; import java.util.logging.Logger; import java.util.stream.Collectors; @@ -367,5 +372,162 @@ For a given permission and dataverse Id get all of the roles (built-in or owned } return retVal; } + + /** + * Retrieves role assignment history for a specific definition point + * + * @param definitionPointId The ID of the definition point + * @return List of role assignment history entries + */ + public List getRoleAssignmentHistory(Long definitionPointId) { + List audits = em.createNamedQuery("RoleAssignmentAudit.findByDefinitionPointId", RoleAssignmentAudit.class) + .setParameter("definitionPointId", definitionPointId) + .getResultList(); + + return processRoleAssignmentAudits(audits, false); + } + + /** + * Retrieves role assignment history for all files in a dataset + * + * @param datasetId The ID of the dataset + * @return List of role assignment history entries + */ + public List getChildRoleAssignmentHistory(Long datasetId) { + List audits = em.createNamedQuery("RoleAssignmentAudit.findByOwnerId", RoleAssignmentAudit.class) + .setParameter("datasetId", datasetId) + .getResultList(); + + return processRoleAssignmentAudits(audits, true); + } + + /** + * Common method to process role assignment audits and create history entries + * + * @param audits List of role assignment audit records + * @param combineEntries Whether to combine entries for different files + * @return List of role assignment history entries + */ + private List processRoleAssignmentAudits(List audits, boolean combineEntries) { + List roleAssignmentHistory = new ArrayList<>(); + Map historyMap = new HashMap<>(); + + // First pass: Create entries from audit records + for (RoleAssignmentAudit audit : audits) { + Long roleAssignmentId = audit.getRoleAssignmentId(); + RoleAssignmentHistoryEntry entry = historyMap.get(roleAssignmentId); + + if (entry == null) { + entry = new RoleAssignmentHistoryEntry(audit.getAssigneeIdentifier(), audit.getRoleAlias(), audit.getDefinitionPointId()); + historyMap.put(roleAssignmentId, entry); + } + + if (audit.getActionType() == RoleAssignmentAudit.ActionType.ASSIGN) { + entry.setAssignedBy(audit.getActionByIdentifier()); + entry.setAssignedAt(audit.getActionTimestamp()); + } else if (audit.getActionType() == RoleAssignmentAudit.ActionType.REVOKE) { + entry.setRevokedBy(audit.getActionByIdentifier()); + entry.setRevokedAt(audit.getActionTimestamp()); + } + } + + // Second pass: Combine entries with matching criteria if requested + if (combineEntries) { + Map finalHistoryMap = new HashMap<>(); + for (RoleAssignmentHistoryEntry entry : historyMap.values()) { + String key = entry.getAssigneeIdentifier() + "|" + entry.getRoleName() + "|" + + entry.getAssignedBy() + "|" + entry.getAssignedAt() + "|" + + entry.getRevokedBy() + "|" + entry.getRevokedAt(); + + RoleAssignmentHistoryEntry existingEntry = finalHistoryMap.get(key); + if (existingEntry == null) { + finalHistoryMap.put(key, entry); + } else { + existingEntry.addDefinitionPointId(entry.getDefinitionPointIds().get(0)); + } + } + roleAssignmentHistory.addAll(finalHistoryMap.values()); + } else { + roleAssignmentHistory.addAll(historyMap.values()); + } + + // Sort the entries + roleAssignmentHistory.sort(Comparator + .comparing(RoleAssignmentHistoryEntry::getRevokedAt, Comparator.nullsLast(Comparator.naturalOrder())) + .thenComparing(RoleAssignmentHistoryEntry::getAssignedAt, Comparator.nullsLast(Comparator.naturalOrder())) + .reversed()); + return roleAssignmentHistory; + } + + public static class RoleAssignmentHistoryEntry { + private String roleName; + private String assigneeIdentifier; + private String assignedBy; + private Date assignedAt; + private String revokedBy; + private Date revokedAt; + private List definitionPointIds; // New field + + public RoleAssignmentHistoryEntry(String assigneeIdentifier, String roleName, Long definitionPointId) { + this.roleName = roleName; + this.assigneeIdentifier = assigneeIdentifier; + this.definitionPointIds = new ArrayList(); + definitionPointIds.add(definitionPointId); + } + + public void setRevokedAt(Date actionTimestamp) { + revokedAt = actionTimestamp; + } + + public void setRevokedBy(String actionByIdentifier) { + revokedBy = actionByIdentifier; + } + + public void setAssignedAt(Date actionTimestamp) { + assignedAt = actionTimestamp; + } + + public void setAssignedBy(String actionByIdentifier) { + assignedBy = actionByIdentifier; + } + + public String getRoleName() { + return roleName; + } + + public String getAssigneeIdentifier() { + return assigneeIdentifier; + } + + public String getAssignedBy() { + return assignedBy; + } + + public Date getAssignedAt() { + return assignedAt; + } + + public String getRevokedBy() { + return revokedBy; + } + + public Date getRevokedAt() { + return revokedAt; + } + + public List getDefinitionPointIds() { + return definitionPointIds; + } + + public void addDefinitionPointId(Long definitionPointId) { + definitionPointIds.add(definitionPointId); + } + + public String getDefinitionPointIdsAsString() { + return definitionPointIds.stream() + .map(Object::toString) + .collect(Collectors.joining(", ")); + } + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java index 33314ef4fd3..1cd59bbdf57 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java @@ -5,7 +5,7 @@ */ package edu.harvard.iq.dataverse; -import edu.harvard.iq.dataverse.ManagePermissionsPage.RoleAssignmentHistoryEntry; +import edu.harvard.iq.dataverse.DataverseRoleServiceBean.RoleAssignmentHistoryEntry; import edu.harvard.iq.dataverse.api.Util; import edu.harvard.iq.dataverse.authorization.AuthenticationProvider; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; @@ -544,51 +544,7 @@ private boolean assignRole(RoleAssignee ra, DataFile file, DataverseRole r) { public List getRoleAssignmentHistory() { if (roleAssignmentHistory == null) { - roleAssignmentHistory = new ArrayList<>(); - - List audits = em.createNamedQuery("RoleAssignmentAudit.findByOwnerId", RoleAssignmentAudit.class) - .setParameter("datasetId", dataset.getId()) - .getResultList(); - - Map historyMap = new HashMap<>(); - - for (RoleAssignmentAudit audit : audits) { - Long roleAssignmentId = audit.getRoleAssignmentId(); - RoleAssignmentHistoryEntry entry = historyMap.get(roleAssignmentId); - - if (entry == null) { - entry = new RoleAssignmentHistoryEntry(audit.getAssigneeIdentifier(), audit.getRoleAlias(), audit.getDefinitionPointId() ); - historyMap.put(roleAssignmentId, entry); - } - - if (audit.getActionType() == RoleAssignmentAudit.ActionType.ASSIGN) { - entry.setAssignedBy(audit.getActionByIdentifier()); - entry.setAssignedAt(audit.getActionTimestamp()); - } else if (audit.getActionType() == RoleAssignmentAudit.ActionType.REVOKE) { - entry.setRevokedBy(audit.getActionByIdentifier()); - entry.setRevokedAt(audit.getActionTimestamp()); - } - } - // Second pass: Combine entries with matching criteria - Map finalHistoryMap = new HashMap<>(); - for (RoleAssignmentHistoryEntry entry : historyMap.values()) { - String key = entry.getAssigneeIdentifier() + "|" + entry.getRoleName() + "|" + - entry.getAssignedBy() + "|" + entry.getAssignedAt() + "|" + - entry.getRevokedBy() + "|" + entry.getRevokedAt(); - - RoleAssignmentHistoryEntry existingEntry = finalHistoryMap.get(key); - if (existingEntry == null) { - finalHistoryMap.put(key, entry); - } else { - existingEntry.addDefinitionPointId(entry.getDefinitionPointIds().get(0)); - } - } - - roleAssignmentHistory.addAll(finalHistoryMap.values()); - roleAssignmentHistory.sort(Comparator - .comparing(RoleAssignmentHistoryEntry::getRevokedAt, Comparator.nullsLast(Comparator.naturalOrder())) - .thenComparing(RoleAssignmentHistoryEntry::getAssignedAt, Comparator.nullsLast(Comparator.naturalOrder())) - .reversed()); + roleAssignmentHistory = roleService.getChildRoleAssignmentHistory(dataset.getId()); } return roleAssignmentHistory; } diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java index ff638a88866..9401f112d51 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java @@ -35,7 +35,6 @@ import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; -import java.util.stream.Collectors; import jakarta.ejb.EJB; import jakarta.faces.application.FacesMessage; @@ -238,42 +237,13 @@ public void editRole(String roleId) { } /** Role Assignment History */ - private List roleAssignmentHistory; + private List roleAssignmentHistory; - public List getRoleAssignmentHistory() { + public List getRoleAssignmentHistory() { + if (roleAssignmentHistory == null) { - roleAssignmentHistory = new ArrayList<>(); - - List audits = em.createNamedQuery("RoleAssignmentAudit.findByDefinitionPointId", RoleAssignmentAudit.class) - .setParameter("definitionPointId", dvObject.getId()) - .getResultList(); - - Map historyMap = new HashMap<>(); - - for (RoleAssignmentAudit audit : audits) { - Long roleAssignmentId = audit.getRoleAssignmentId(); - RoleAssignmentHistoryEntry entry = historyMap.get(roleAssignmentId); - - if (entry == null) { - entry = new RoleAssignmentHistoryEntry(audit.getAssigneeIdentifier(), audit.getRoleAlias(), audit.getDefinitionPointId()); - historyMap.put(roleAssignmentId, entry); - } - - if (audit.getActionType() == RoleAssignmentAudit.ActionType.ASSIGN) { - entry.setAssignedBy(audit.getActionByIdentifier()); - entry.setAssignedAt(audit.getActionTimestamp()); - } else if (audit.getActionType() == RoleAssignmentAudit.ActionType.REVOKE) { - entry.setRevokedBy(audit.getActionByIdentifier()); - entry.setRevokedAt(audit.getActionTimestamp()); - } - } - - roleAssignmentHistory.addAll(historyMap.values()); - roleAssignmentHistory.sort(Comparator - .comparing(RoleAssignmentHistoryEntry::getRevokedAt, Comparator.nullsLast(Comparator.naturalOrder())) - .thenComparing(RoleAssignmentHistoryEntry::getAssignedAt, Comparator.nullsLast(Comparator.naturalOrder())) - .reversed()); - }; + roleAssignmentHistory = roleService.getRoleAssignmentHistory(dvObject.getId()); + } return roleAssignmentHistory; } @@ -763,75 +733,4 @@ public Long getId() { } } - - public static class RoleAssignmentHistoryEntry { - private String roleName; - private String assigneeIdentifier; - private String assignedBy; - private Date assignedAt; - private String revokedBy; - private Date revokedAt; - private List definitionPointIds; // New field - - public RoleAssignmentHistoryEntry(String assigneeIdentifier, String roleName, Long definitionPointId) { - this.roleName = roleName; - this.assigneeIdentifier = assigneeIdentifier; - this.definitionPointIds = new ArrayList(); - definitionPointIds.add(definitionPointId); - } - - public void setRevokedAt(Date actionTimestamp) { - revokedAt = actionTimestamp; - } - - public void setRevokedBy(String actionByIdentifier) { - revokedBy = actionByIdentifier; - } - - public void setAssignedAt(Date actionTimestamp) { - assignedAt = actionTimestamp; - } - - public void setAssignedBy(String actionByIdentifier) { - assignedBy = actionByIdentifier; - } - - public String getRoleName() { - return roleName; - } - - public String getAssigneeIdentifier() { - return assigneeIdentifier; - } - - public String getAssignedBy() { - return assignedBy; - } - - public Date getAssignedAt() { - return assignedAt; - } - - public String getRevokedBy() { - return revokedBy; - } - - public Date getRevokedAt() { - return revokedAt; - } - - public List getDefinitionPointIds() { - return definitionPointIds; - } - - public void addDefinitionPointId(Long definitionPointId) { - definitionPointIds.add(definitionPointId); - } - - public String getDefinitionPointIdsAsString() { - return definitionPointIds.stream() - .map(Object::toString) - .collect(Collectors.joining(", ")); - } - } } From 859d0ab515f97eb449adad1f37bdb2daf1b3f067 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 7 Jul 2025 17:22:20 -0400 Subject: [PATCH 039/634] api call --- .../harvard/iq/dataverse/api/Datasets.java | 55 +++++++++++++++++++ 1 file changed, 55 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 878ebde0504..b818595400a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3,6 +3,7 @@ import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.DatasetLock.Reason; import edu.harvard.iq.dataverse.DatasetVersion.VersionState; +import edu.harvard.iq.dataverse.DataverseRoleServiceBean.RoleAssignmentHistoryEntry; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO; @@ -5988,5 +5989,59 @@ public Response deleteVersionNote(@Context ContainerRequestContext crc, @PathPar return ok("Note deleted"); }, getRequestUser(crc)); } + + @GET + @AuthRequired + @Path("{identifier}/permissions/history") + public Response getRoleAssignmentHistory(@Context ContainerRequestContext crc, @PathParam("identifier") String id) { + return response(req -> { + Dataset dataset = findDatasetOrDie(id); + + // user is authenticated + AuthenticatedUser authenticatedUser = null; + try { + authenticatedUser = getRequestAuthenticatedUserOrDie(crc); + } catch (WrappedResponse ex) { + return error(Status.UNAUTHORIZED, "Authentication is required."); + } + + // Check if the user has permission to manage permissions for this dataset + if (!permissionService.userOn(authenticatedUser, dataset).has(Permission.ManageDatasetPermissions)) { + return error(Status.FORBIDDEN, "You do not have permission to view the role assignment history for this dataset"); + } + + // Get the role assignment history + ManagePermissionsPage managePermissionsPage = new ManagePermissionsPage(); + managePermissionsPage.setDvObject(dataset); + List history = managePermissionsPage.getRoleAssignmentHistory(); + + // Convert to JSON array + JsonArrayBuilder jsonArray = Json.createArrayBuilder(); + for (DataverseRoleServiceBean.RoleAssignmentHistoryEntry entry : history) { + JsonObjectBuilder job = Json.createObjectBuilder() + .add("assigneeIdentifier", entry.getAssigneeIdentifier()) + .add("roleName", entry.getRoleName()) + .add("assignedBy", entry.getAssignedBy()) + .add("assignedAt", entry.getAssignedAt().toString()); + + // Add revocation info if available + if (entry.getRevokedBy() != null) { + job.add("revokedBy", entry.getRevokedBy()); + } else { + job.add("revokedBy", JsonValue.NULL); + } + + if (entry.getRevokedAt() != null) { + job.add("revokedAt", entry.getRevokedAt().toString()); + } else { + job.add("revokedAt", JsonValue.NULL); + } + + jsonArray.add(job); + } + + return ok(jsonArray); + }, getRequestUser(crc)); + } } From 1c87cef8bfbcd437fde6454d20ba75c7665d7e1f Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 16 Jul 2025 17:07:42 -0400 Subject: [PATCH 040/634] remove unused imports --- .../java/edu/harvard/iq/dataverse/ManagePermissionsPage.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java index 9401f112d51..4fbf520154b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java @@ -26,12 +26,9 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.Comparator; import java.util.Date; -import java.util.HashMap; import java.util.LinkedList; import java.util.List; -import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; From a6a1ab1890e2505977088ca067554b838cc6d014 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 16 Jul 2025 17:07:54 -0400 Subject: [PATCH 041/634] api call with i18n and csv option --- .../harvard/iq/dataverse/api/Datasets.java | 76 ++++++++++++++----- 1 file changed, 59 insertions(+), 17 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index b818595400a..67f6e552057 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -5993,10 +5993,13 @@ public Response deleteVersionNote(@Context ContainerRequestContext crc, @PathPar @GET @AuthRequired @Path("{identifier}/permissions/history") - public Response getRoleAssignmentHistory(@Context ContainerRequestContext crc, @PathParam("identifier") String id) { + @Produces({ MediaType.APPLICATION_JSON, "text/csv" }) + public Response getRoleAssignmentHistory(@Context ContainerRequestContext crc, + @PathParam("identifier") String id, + @Context HttpHeaders headers) { return response(req -> { Dataset dataset = findDatasetOrDie(id); - + // user is authenticated AuthenticatedUser authenticatedUser = null; try { @@ -6004,42 +6007,81 @@ public Response getRoleAssignmentHistory(@Context ContainerRequestContext crc, @ } catch (WrappedResponse ex) { return error(Status.UNAUTHORIZED, "Authentication is required."); } - + // Check if the user has permission to manage permissions for this dataset if (!permissionService.userOn(authenticatedUser, dataset).has(Permission.ManageDatasetPermissions)) { return error(Status.FORBIDDEN, "You do not have permission to view the role assignment history for this dataset"); } - + // Get the role assignment history - ManagePermissionsPage managePermissionsPage = new ManagePermissionsPage(); - managePermissionsPage.setDvObject(dataset); - List history = managePermissionsPage.getRoleAssignmentHistory(); - - // Convert to JSON array + List history = dataverseRoleService.getRoleAssignmentHistory(dataset.getId()); + + List acceptedTypes = headers.getAcceptableMediaTypes(); + boolean wantCSV = acceptedTypes.stream() + .anyMatch(mt -> mt.toString().equals("text/csv")); + + if (wantCSV) { + String assigneeHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.assignee"); + String roleHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.role"); + String assignedByHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.assignedBy"); + String assignedAtHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.assignedAt"); + String revokedByHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.revokedBy"); + String revokedAtHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.revokedAt"); + + // Generate CSV response + StringBuilder csvBuilder = new StringBuilder(); + // Add CSV header with internationalized column names + csvBuilder.append(assigneeHeader).append(",") + .append(roleHeader).append(",") + .append(assignedByHeader).append(",") + .append(assignedAtHeader).append(",") + .append(revokedByHeader).append(",") + .append(revokedAtHeader).append("\n"); + + // Add data rows + for (DataverseRoleServiceBean.RoleAssignmentHistoryEntry entry : history) { + csvBuilder.append(entry.getAssigneeIdentifier()).append(","); + csvBuilder.append(entry.getRoleName()).append(","); + csvBuilder.append(entry.getAssignedBy()).append(","); + csvBuilder.append(entry.getAssignedAt().toString()).append(","); + + // Handle nullable fields + csvBuilder.append(entry.getRevokedBy() != null ? entry.getRevokedBy() : "").append(","); + csvBuilder.append(entry.getRevokedAt() != null ? entry.getRevokedAt().toString() : ""); + csvBuilder.append("\n"); + } + + return Response.ok() + .entity(csvBuilder.toString()) + .type("text/csv") + .header("Content-Disposition", "attachment; filename=permissions_history.csv") + .build(); + } + // Or Json by default JsonArrayBuilder jsonArray = Json.createArrayBuilder(); for (DataverseRoleServiceBean.RoleAssignmentHistoryEntry entry : history) { JsonObjectBuilder job = Json.createObjectBuilder() - .add("assigneeIdentifier", entry.getAssigneeIdentifier()) - .add("roleName", entry.getRoleName()) - .add("assignedBy", entry.getAssignedBy()) - .add("assignedAt", entry.getAssignedAt().toString()); - + .add("assigneeIdentifier", entry.getAssigneeIdentifier()) + .add("roleName", entry.getRoleName()) + .add("assignedBy", entry.getAssignedBy()) + .add("assignedAt", entry.getAssignedAt().toString()); + // Add revocation info if available if (entry.getRevokedBy() != null) { job.add("revokedBy", entry.getRevokedBy()); } else { job.add("revokedBy", JsonValue.NULL); } - + if (entry.getRevokedAt() != null) { job.add("revokedAt", entry.getRevokedAt().toString()); } else { job.add("revokedAt", JsonValue.NULL); } - + jsonArray.add(job); } - + return ok(jsonArray); }, getRequestUser(crc)); } From c2c54dce9229fee7a4b5cb2db44528fd411d7dbf Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 16 Jul 2025 17:08:23 -0400 Subject: [PATCH 042/634] dataset/dataverse button to download csv --- src/main/webapp/permissions-manage.xhtml | 35 ++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/src/main/webapp/permissions-manage.xhtml b/src/main/webapp/permissions-manage.xhtml index d44b0bb47cc..ef9d9345b32 100644 --- a/src/main/webapp/permissions-manage.xhtml +++ b/src/main/webapp/permissions-manage.xhtml @@ -207,6 +207,41 @@
+
+ + #{bundle['dataverse.permissions.history.download']} + + + #{bundle['dataverse.permissions.history.download']} + +
+ + Date: Thu, 17 Jul 2025 13:46:32 -0400 Subject: [PATCH 043/634] refactor, apis for all three types --- .../iq/dataverse/api/AbstractApiBean.java | 90 +++++++++++++++++++ .../harvard/iq/dataverse/api/Datasets.java | 76 +--------------- .../harvard/iq/dataverse/api/Dataverses.java | 23 +++++ .../edu/harvard/iq/dataverse/api/Files.java | 23 +++++ 4 files changed, 137 insertions(+), 75 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 018657bff4d..a6562b7c1d9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -5,6 +5,7 @@ import static edu.harvard.iq.dataverse.api.Datasets.handleVersion; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.DataverseRole; +import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.RoleAssignee; import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; @@ -45,6 +46,7 @@ import jakarta.validation.constraints.NotNull; import jakarta.ws.rs.container.ContainerRequestContext; import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.HttpHeaders; import jakarta.ws.rs.core.MediaType; import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.Response.ResponseBuilder; @@ -643,6 +645,94 @@ protected DatasetFieldType findDatasetFieldType(String idtf) throws NumberFormat return isNumeric(idtf) ? datasetFieldSvc.find(Long.parseLong(idtf)) : datasetFieldSvc.findByNameOpt(idtf); } + + /** + * Gets role assignment history for a DvObject (Dataset, Dataverse, or DataFile) + * + * @param dvObject The DvObject to get history for + * @param authenticatedUser The authenticated user making the request + * @param headers HTTP headers from the request (for content negotiation) + * @return Response containing history in JSON or CSV format + */ + protected Response getRoleAssignmentHistoryResponse(DvObject dvObject, AuthenticatedUser authenticatedUser, HttpHeaders headers) { + // Check if the user has permission to manage permissions for this object + if (!permissionSvc.userOn(authenticatedUser, dvObject).has(Permission.ManageDatasetPermissions)) { + return error(Status.FORBIDDEN, "You do not have permission to view the role assignment history for this " + dvObject.getClass().getSimpleName().toLowerCase()); + } + + // Get the role assignment history + List history = rolesSvc.getRoleAssignmentHistory(dvObject.getId()); + + List acceptedTypes = headers.getAcceptableMediaTypes(); + boolean wantCSV = acceptedTypes.stream() + .anyMatch(mt -> mt.toString().equals("text/csv")); + + if (wantCSV) { + String assigneeHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.assignee"); + String roleHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.role"); + String assignedByHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.assignedBy"); + String assignedAtHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.assignedAt"); + String revokedByHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.revokedBy"); + String revokedAtHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.revokedAt"); + + // Generate CSV response + StringBuilder csvBuilder = new StringBuilder(); + // Add CSV header with internationalized column names + csvBuilder.append(assigneeHeader).append(",") + .append(roleHeader).append(",") + .append(assignedByHeader).append(",") + .append(assignedAtHeader).append(",") + .append(revokedByHeader).append(",") + .append(revokedAtHeader).append("\n"); + + // Add data rows + for (DataverseRoleServiceBean.RoleAssignmentHistoryEntry entry : history) { + csvBuilder.append(entry.getAssigneeIdentifier()).append(","); + csvBuilder.append(entry.getRoleName()).append(","); + csvBuilder.append(entry.getAssignedBy()).append(","); + csvBuilder.append(entry.getAssignedAt().toString()).append(","); + + // Handle nullable fields + csvBuilder.append(entry.getRevokedBy() != null ? entry.getRevokedBy() : "").append(","); + csvBuilder.append(entry.getRevokedAt() != null ? entry.getRevokedAt().toString() : ""); + csvBuilder.append("\n"); + } + + String objectType = dvObject.getClass().getSimpleName().toLowerCase(); + return Response.ok() + .entity(csvBuilder.toString()) + .type("text/csv") + .header("Content-Disposition", "attachment; filename=" + objectType + "_permissions_history.csv") + .build(); + } + + // Or Json by default + JsonArrayBuilder jsonArray = Json.createArrayBuilder(); + for (DataverseRoleServiceBean.RoleAssignmentHistoryEntry entry : history) { + JsonObjectBuilder job = Json.createObjectBuilder() + .add("assigneeIdentifier", entry.getAssigneeIdentifier()) + .add("roleName", entry.getRoleName()) + .add("assignedBy", entry.getAssignedBy()) + .add("assignedAt", entry.getAssignedAt().toString()); + + // Add revocation info if available + if (entry.getRevokedBy() != null) { + job.add("revokedBy", entry.getRevokedBy()); + } else { + job.add("revokedBy", JsonValue.NULL); + } + + if (entry.getRevokedAt() != null) { + job.add("revokedAt", entry.getRevokedAt().toString()); + } else { + job.add("revokedAt", JsonValue.NULL); + } + + jsonArray.add(job); + } + + return ok(jsonArray); + } /* =================== *\ * Command Execution * diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 67f6e552057..9bbf191dda1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -6008,81 +6008,7 @@ public Response getRoleAssignmentHistory(@Context ContainerRequestContext crc, return error(Status.UNAUTHORIZED, "Authentication is required."); } - // Check if the user has permission to manage permissions for this dataset - if (!permissionService.userOn(authenticatedUser, dataset).has(Permission.ManageDatasetPermissions)) { - return error(Status.FORBIDDEN, "You do not have permission to view the role assignment history for this dataset"); - } - - // Get the role assignment history - List history = dataverseRoleService.getRoleAssignmentHistory(dataset.getId()); - - List acceptedTypes = headers.getAcceptableMediaTypes(); - boolean wantCSV = acceptedTypes.stream() - .anyMatch(mt -> mt.toString().equals("text/csv")); - - if (wantCSV) { - String assigneeHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.assignee"); - String roleHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.role"); - String assignedByHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.assignedBy"); - String assignedAtHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.assignedAt"); - String revokedByHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.revokedBy"); - String revokedAtHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.revokedAt"); - - // Generate CSV response - StringBuilder csvBuilder = new StringBuilder(); - // Add CSV header with internationalized column names - csvBuilder.append(assigneeHeader).append(",") - .append(roleHeader).append(",") - .append(assignedByHeader).append(",") - .append(assignedAtHeader).append(",") - .append(revokedByHeader).append(",") - .append(revokedAtHeader).append("\n"); - - // Add data rows - for (DataverseRoleServiceBean.RoleAssignmentHistoryEntry entry : history) { - csvBuilder.append(entry.getAssigneeIdentifier()).append(","); - csvBuilder.append(entry.getRoleName()).append(","); - csvBuilder.append(entry.getAssignedBy()).append(","); - csvBuilder.append(entry.getAssignedAt().toString()).append(","); - - // Handle nullable fields - csvBuilder.append(entry.getRevokedBy() != null ? entry.getRevokedBy() : "").append(","); - csvBuilder.append(entry.getRevokedAt() != null ? entry.getRevokedAt().toString() : ""); - csvBuilder.append("\n"); - } - - return Response.ok() - .entity(csvBuilder.toString()) - .type("text/csv") - .header("Content-Disposition", "attachment; filename=permissions_history.csv") - .build(); - } - // Or Json by default - JsonArrayBuilder jsonArray = Json.createArrayBuilder(); - for (DataverseRoleServiceBean.RoleAssignmentHistoryEntry entry : history) { - JsonObjectBuilder job = Json.createObjectBuilder() - .add("assigneeIdentifier", entry.getAssigneeIdentifier()) - .add("roleName", entry.getRoleName()) - .add("assignedBy", entry.getAssignedBy()) - .add("assignedAt", entry.getAssignedAt().toString()); - - // Add revocation info if available - if (entry.getRevokedBy() != null) { - job.add("revokedBy", entry.getRevokedBy()); - } else { - job.add("revokedBy", JsonValue.NULL); - } - - if (entry.getRevokedAt() != null) { - job.add("revokedAt", entry.getRevokedAt().toString()); - } else { - job.add("revokedAt", JsonValue.NULL); - } - - jsonArray.add(job); - } - - return ok(jsonArray); + return getRoleAssignmentHistoryResponse(dataset, authenticatedUser, headers); }, getRequestUser(crc)); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index b1d56b6b8a9..cc850dee1f4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -71,6 +71,7 @@ import jakarta.servlet.http.HttpServletResponse; import jakarta.ws.rs.WebApplicationException; import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.HttpHeaders; import jakarta.ws.rs.core.StreamingOutput; import org.glassfish.jersey.media.multipart.FormDataBodyPart; import org.glassfish.jersey.media.multipart.FormDataContentDisposition; @@ -1931,4 +1932,26 @@ public Response deleteFeaturedItems(@Context ContainerRequestContext crc, @PathP return e.getResponse(); } } + + @GET + @AuthRequired + @Path("{identifier}/permissions/history") + @Produces({ MediaType.APPLICATION_JSON, "text/csv" }) + public Response getRoleAssignmentHistory(@Context ContainerRequestContext crc, + @PathParam("identifier") String id, + @Context HttpHeaders headers) { + return response(req -> { + Dataverse dataverse = findDataverseOrDie(id); + + // user is authenticated + AuthenticatedUser authenticatedUser = null; + try { + authenticatedUser = getRequestAuthenticatedUserOrDie(crc); + } catch (WrappedResponse ex) { + return error(Status.UNAUTHORIZED, "Authentication is required."); + } + + return getRoleAssignmentHistoryResponse(dataverse, authenticatedUser, headers); + }, getRequestUser(crc)); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 61a69236f57..59470530047 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -60,6 +60,7 @@ import jakarta.ws.rs.core.HttpHeaders; import jakarta.ws.rs.core.MediaType; import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.Response.Status; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.jsonDT; import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; @@ -1048,4 +1049,26 @@ public Response getFileVersionsList(@Context ContainerRequestContext crc, @PathP return ex.getResponse(); } } + + @GET + @AuthRequired + @Path("{identifier}/permissions/history") + @Produces({ MediaType.APPLICATION_JSON, "text/csv" }) + public Response getRoleAssignmentHistory(@Context ContainerRequestContext crc, + @PathParam("identifier") String id, + @Context HttpHeaders headers) { + return response(req -> { + DataFile dataFile = findDataFileOrDie(id); + + // user is authenticated + AuthenticatedUser authenticatedUser = null; + try { + authenticatedUser = getRequestAuthenticatedUserOrDie(crc); + } catch (WrappedResponse ex) { + return error(Status.UNAUTHORIZED, "Authentication is required."); + } + + return getRoleAssignmentHistoryResponse(dataFile, authenticatedUser, headers); + }, getRequestUser(crc)); + } } From 0989e35999f93a260a92a2fe2777cb5f3c67806b Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 17 Jul 2025 13:46:53 -0400 Subject: [PATCH 044/634] download csv buttons --- .../webapp/permissions-manage-files.xhtml | 9 +++++++ src/main/webapp/permissions-manage.xhtml | 26 +++---------------- .../webapp/resources/js/rahistory_utils.js | 17 ++++++++++++ 3 files changed, 29 insertions(+), 23 deletions(-) create mode 100644 src/main/webapp/resources/js/rahistory_utils.js diff --git a/src/main/webapp/permissions-manage-files.xhtml b/src/main/webapp/permissions-manage-files.xhtml index 1a93527dfef..9aaf306532b 100644 --- a/src/main/webapp/permissions-manage-files.xhtml +++ b/src/main/webapp/permissions-manage-files.xhtml @@ -9,6 +9,7 @@ xmlns:jsf="http://xmlns.jcp.org/jsf" xmlns:iqbs="http://xmlns.jcp.org/jsf/composite/iqbs"> + @@ -228,6 +229,14 @@
+ +
+ + #{bundle['dataverse.permissions.history.download']} + +
diff --git a/src/main/webapp/permissions-manage.xhtml b/src/main/webapp/permissions-manage.xhtml index ef9d9345b32..a860f19bc07 100644 --- a/src/main/webapp/permissions-manage.xhtml +++ b/src/main/webapp/permissions-manage.xhtml @@ -8,6 +8,7 @@ xmlns:o="http://omnifaces.org/ui" xmlns:iqbs="http://xmlns.jcp.org/jsf/composite/iqbs"> + @@ -211,37 +212,16 @@ + onclick="downloadRAHistoryCSV(this.href, 'dataset_permissions_history.csv'); return false;"> #{bundle['dataverse.permissions.history.download']} + onclick="downloadRAHistoryCSV(this.href, 'dataverse_permissions_history.csv'); return false;"> #{bundle['dataverse.permissions.history.download']}
- - response.blob()) + .then(blob => { + const link = document.createElement('a'); + link.href = URL.createObjectURL(blob); + link.download = filename; + document.body.appendChild(link); + link.click(); + document.body.removeChild(link); + }) + .catch(error => console.error('Error downloading CSV:', error)); +} \ No newline at end of file From db6e94b32a3f66d1436f24e665b36af2666b5573 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 17 Jul 2025 13:47:08 -0400 Subject: [PATCH 045/634] strings for csv headers and button --- src/main/java/propertyFiles/Bundle.properties | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 10cf777daf5..93bb79ff491 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1213,6 +1213,7 @@ dataverse.permissions.history.revokedBy=Revoked By dataverse.permissions.history.revokedAt=Revocation Date dataverse.permissions.history.definedOn=Defined On dataverse.permissions.history.files={0} Files +dataverse.permissions.history.download=Download (CSV) # permissions-manage-files.xhtml dataverse.permissionsFiles.title=Restricted File Permissions dataverse.permissionsFiles.usersOrGroups=Users/Groups @@ -3229,6 +3230,13 @@ updateDatasetFieldsCommand.api.processDatasetUpdate.parseError=Error parsing dat #AbstractApiBean.java abstractApiBean.error.datasetInternalVersionNumberIsOutdated=Dataset internal version number {0} is outdated +# Role Assignment History (used in the AbstractApiBean) +datasets.api.permissions.history.assignee=Assignee +datasets.api.permissions.history.role=Role +datasets.api.permissions.history.assignedBy=Assigned By +datasets.api.permissions.history.assignedAt=Assigned At +datasets.api.permissions.history.revokedBy=Revoked By +datasets.api.permissions.history.revokedAt=Revoked At #RoleAssigneeServiceBean.java roleAssigneeServiceBean.error.dataverseRequestCannotBeNull=DataverseRequest cannot be null. \ No newline at end of file From 7e468f44bc3e743a06fdf9d1c1b9cc1b4504d358 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Mon, 21 Jul 2025 17:24:21 -0400 Subject: [PATCH 046/634] try signed URLs for CSV buttons, fix files reporting/api --- .../dataverse/DataverseRoleServiceBean.java | 2 +- .../dataverse/ManageFilePermissionsPage.java | 44 +++++++++++++- .../iq/dataverse/ManagePermissionsPage.java | 60 +++++++++++++++++++ .../iq/dataverse/api/AbstractApiBean.java | 26 +++++--- .../harvard/iq/dataverse/api/Datasets.java | 29 +++++---- .../harvard/iq/dataverse/api/Dataverses.java | 2 +- .../edu/harvard/iq/dataverse/api/Files.java | 22 ------- src/main/webapp/permissions-manage.xhtml | 9 +-- .../webapp/resources/js/rahistory_utils.js | 24 +++++++- 9 files changed, 165 insertions(+), 53 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java index 68fee4aacad..257985dc65a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java @@ -393,7 +393,7 @@ public List getRoleAssignmentHistory(Long definition * @param datasetId The ID of the dataset * @return List of role assignment history entries */ - public List getChildRoleAssignmentHistory(Long datasetId) { + public List getFilesRoleAssignmentHistory(Long datasetId) { List audits = em.createNamedQuery("RoleAssignmentAudit.findByOwnerId", RoleAssignmentAudit.class) .setParameter("datasetId", datasetId) .getResultList(); diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java index 1cd59bbdf57..574ea0a9664 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java @@ -15,14 +15,20 @@ import edu.harvard.iq.dataverse.authorization.RoleAssigneeDisplayInfo; import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean; import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean; +import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; import edu.harvard.iq.dataverse.engine.command.impl.AssignRoleCommand; import edu.harvard.iq.dataverse.engine.command.impl.RevokeRoleCommand; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.DateUtil; import edu.harvard.iq.dataverse.util.JsfHelper; +import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.UrlSignerUtil; + import static edu.harvard.iq.dataverse.util.JsfHelper.JH; import java.sql.Timestamp; import java.util.*; @@ -544,7 +550,7 @@ private boolean assignRole(RoleAssignee ra, DataFile file, DataverseRole r) { public List getRoleAssignmentHistory() { if (roleAssignmentHistory == null) { - roleAssignmentHistory = roleService.getChildRoleAssignmentHistory(dataset.getId()); + roleAssignmentHistory = roleService.getFilesRoleAssignmentHistory(dataset.getId()); } return roleAssignmentHistory; } @@ -578,7 +584,43 @@ public void setRenderFileMessages(boolean renderFileMessages) { this.renderFileMessages = renderFileMessages; } + public String getsignedUrlForRAHistoryCsv() { + String apiPath = "/api/v1/datasets/" + dataset.getId() + "/files/permissions/history"; + + try { + // Get the application URL from the system config + String baseUrl = SystemConfig.getDataverseSiteUrlStatic(); + if (baseUrl.endsWith("/")) { + baseUrl = baseUrl.substring(0, baseUrl.length() - 1); + } + + // Construct the full URL + String fullApiPath = baseUrl + apiPath; + + // Generate a signed URL with the user's API token + User user = session.getUser(); + String key = null; + if (user instanceof AuthenticatedUser) { + ApiToken apiToken = authenticationService.findApiTokenByUser((AuthenticatedUser) user); + if (apiToken != null && !apiToken.isExpired() && !apiToken.isDisabled()) { + key = apiToken.getTokenString(); + } + } + key = JvmSettings.API_SIGNING_SECRET.lookupOptional().orElse("") + key; + if(key.length() >= 36) { + return UrlSignerUtil.signUrl(fullApiPath, 10, user.getIdentifier(), "GET", key); + } + } catch (Exception e) { + logger.log(Level.SEVERE, "Error generating signed URL for permissions history CSV: " + e.getMessage(), e); + return null; + } + return null; + } + public String getPermissionsHistoryFilename() { + // For datasets, replace colons in the PID with underscores + return dataset.getGlobalId().asString().replace(":", "_") + "_files_permissions_history.csv"; + } // inner class used fordisplay of role assignments diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java index 4fbf520154b..2a08b64fed6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java @@ -11,17 +11,24 @@ import edu.harvard.iq.dataverse.authorization.groups.impl.builtin.AuthenticatedUsers; import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroup; import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean; +import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; import edu.harvard.iq.dataverse.engine.command.impl.AssignRoleCommand; import edu.harvard.iq.dataverse.engine.command.impl.CreateRoleCommand; import edu.harvard.iq.dataverse.engine.command.impl.RevokeRoleCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseDefaultContributorRoleCommand; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.JsfHelper; import static edu.harvard.iq.dataverse.util.JsfHelper.JH; import edu.harvard.iq.dataverse.util.StringUtil; +import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.URLTokenUtil; +import edu.harvard.iq.dataverse.util.UrlSignerUtil; + import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; @@ -688,6 +695,59 @@ public Boolean getRenderRoleMessages() { public void setRenderRoleMessages(Boolean renderRoleMessages) { this.renderRoleMessages = renderRoleMessages; } + + public String getsignedUrlForRAHistoryCsv() { + String apiPath; + + if (dvObject instanceof Dataverse dv) { + // For Dataverses, use the dataverses API endpoint with the alias + apiPath = "/api/v1/dataverses/" + dv.getAlias() + "/permissions/history"; + } else if (dvObject instanceof Dataset) { + // For Datasets, use the datasets API endpoint with the ID + apiPath = "/api/v1/datasets/" + dvObject.getId() + "/permissions/history"; + } else { + // For other types (like DataFile), return null or a default path + return null; + } + + try { + // Get the application URL from the system config + String baseUrl = SystemConfig.getDataverseSiteUrlStatic(); + if (baseUrl.endsWith("/")) { + baseUrl = baseUrl.substring(0, baseUrl.length() - 1); + } + + // Construct the full URL + String fullApiPath = baseUrl + apiPath; + + // Generate a signed URL with the user's API token + User user = session.getUser(); + String key = null; + if (user instanceof AuthenticatedUser) { + ApiToken apiToken = authenticationService.findApiTokenByUser((AuthenticatedUser) user); + if (apiToken != null && !apiToken.isExpired() && !apiToken.isDisabled()) { + key = apiToken.getTokenString(); + } + } + key = JvmSettings.API_SIGNING_SECRET.lookupOptional().orElse("") + key; + if(key.length() >= 36) { + return UrlSignerUtil.signUrl(fullApiPath, 10, user.getIdentifier(), "GET", key); + } + } catch (Exception e) { + logger.log(Level.SEVERE, "Error generating signed URL for permissions history CSV: " + e.getMessage(), e); + return null; + } + return null; + } + + public String getPermissionsHistoryFilename() { + if (dvObject instanceof Dataverse dv) { + return dv.getAlias() + "_permissions_history.csv"; + } else { + // For datasets, replace colons in the PID with underscores + return dvObject.getGlobalId().asString().replace(":", "_") + "_permissions_history.csv"; + } + } // inner class used for display of role assignments public static class RoleAssignmentRow { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index a6562b7c1d9..afbd82dedc2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -654,14 +654,22 @@ protected DatasetFieldType findDatasetFieldType(String idtf) throws NumberFormat * @param headers HTTP headers from the request (for content negotiation) * @return Response containing history in JSON or CSV format */ - protected Response getRoleAssignmentHistoryResponse(DvObject dvObject, AuthenticatedUser authenticatedUser, HttpHeaders headers) { + protected Response getRoleAssignmentHistoryResponse(DvObject dvObject, AuthenticatedUser authenticatedUser, boolean forFiles, HttpHeaders headers) { // Check if the user has permission to manage permissions for this object if (!permissionSvc.userOn(authenticatedUser, dvObject).has(Permission.ManageDatasetPermissions)) { return error(Status.FORBIDDEN, "You do not have permission to view the role assignment history for this " + dvObject.getClass().getSimpleName().toLowerCase()); } // Get the role assignment history - List history = rolesSvc.getRoleAssignmentHistory(dvObject.getId()); + List history = null; + String definitionPoint; + if (forFiles == false) { + history = rolesSvc.getRoleAssignmentHistory(dvObject.getId()); + definitionPoint = BundleUtil.getStringFromBundle("datasets.api.permissions.history.definitionPoint"); + } else { + history = rolesSvc.getFilesRoleAssignmentHistory(dvObject.getId()); + definitionPoint = BundleUtil.getStringFromBundle("datasets.api.permissions.history.definitionPoints"); + } List acceptedTypes = headers.getAcceptableMediaTypes(); boolean wantCSV = acceptedTypes.stream() @@ -678,7 +686,9 @@ protected Response getRoleAssignmentHistoryResponse(DvObject dvObject, Authentic // Generate CSV response StringBuilder csvBuilder = new StringBuilder(); // Add CSV header with internationalized column names - csvBuilder.append(assigneeHeader).append(",") + csvBuilder + .append(definitionPoint).append(",") + .append(assigneeHeader).append(",") .append(roleHeader).append(",") .append(assignedByHeader).append(",") .append(assignedAtHeader).append(",") @@ -687,10 +697,11 @@ protected Response getRoleAssignmentHistoryResponse(DvObject dvObject, Authentic // Add data rows for (DataverseRoleServiceBean.RoleAssignmentHistoryEntry entry : history) { - csvBuilder.append(entry.getAssigneeIdentifier()).append(","); - csvBuilder.append(entry.getRoleName()).append(","); - csvBuilder.append(entry.getAssignedBy()).append(","); - csvBuilder.append(entry.getAssignedAt().toString()).append(","); + csvBuilder.append(entry.getDefinitionPointIdsAsString()).append(",") + .append(entry.getAssigneeIdentifier()).append(",") + .append(entry.getRoleName()).append(",") + .append(entry.getAssignedBy()).append(",") + .append(entry.getAssignedAt().toString()).append(","); // Handle nullable fields csvBuilder.append(entry.getRevokedBy() != null ? entry.getRevokedBy() : "").append(","); @@ -710,6 +721,7 @@ protected Response getRoleAssignmentHistoryResponse(DvObject dvObject, Authentic JsonArrayBuilder jsonArray = Json.createArrayBuilder(); for (DataverseRoleServiceBean.RoleAssignmentHistoryEntry entry : history) { JsonObjectBuilder job = Json.createObjectBuilder() + .add(definitionPoint, entry.getDefinitionPointIdsAsString()) .add("assigneeIdentifier", entry.getAssigneeIdentifier()) .add("roleName", entry.getRoleName()) .add("assignedBy", entry.getAssignedBy()) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index e657ac02175..e2b8c7f5c80 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -5,6 +5,7 @@ import edu.harvard.iq.dataverse.DatasetVersion.VersionState; import edu.harvard.iq.dataverse.DataverseRoleServiceBean.RoleAssignmentHistoryEntry; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; +import edu.harvard.iq.dataverse.api.AbstractApiBean.WrappedResponse; import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; @@ -5985,23 +5986,31 @@ public Response deleteVersionNote(@Context ContainerRequestContext crc, @PathPar @GET @AuthRequired @Path("{identifier}/permissions/history") + public Response getRoleAssignmentHistory(@Context ContainerRequestContext crc, @PathParam("identifier") String id, @Context HttpHeaders headers) { + return response(req -> { + Dataset dataset = findDatasetOrDie(id); + + // user is authenticated + AuthenticatedUser authenticatedUser = getRequestAuthenticatedUserOrDie(crc); + + return getRoleAssignmentHistoryResponse(dataset, authenticatedUser, false, headers); + }, getRequestUser(crc)); + } + + @GET + @AuthRequired + @Path("{identifier}/files/permissions/history") @Produces({ MediaType.APPLICATION_JSON, "text/csv" }) - public Response getRoleAssignmentHistory(@Context ContainerRequestContext crc, + public Response getFilesRoleAssignmentHistory(@Context ContainerRequestContext crc, @PathParam("identifier") String id, @Context HttpHeaders headers) { return response(req -> { Dataset dataset = findDatasetOrDie(id); - + // user is authenticated - AuthenticatedUser authenticatedUser = null; - try { - authenticatedUser = getRequestAuthenticatedUserOrDie(crc); - } catch (WrappedResponse ex) { - return error(Status.UNAUTHORIZED, "Authentication is required."); - } + AuthenticatedUser authenticatedUser = getRequestAuthenticatedUserOrDie(crc); - return getRoleAssignmentHistoryResponse(dataset, authenticatedUser, headers); + return getRoleAssignmentHistoryResponse(dataset, authenticatedUser, true, headers); }, getRequestUser(crc)); } - } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index cc850dee1f4..0747dd1b994 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -1951,7 +1951,7 @@ public Response getRoleAssignmentHistory(@Context ContainerRequestContext crc, return error(Status.UNAUTHORIZED, "Authentication is required."); } - return getRoleAssignmentHistoryResponse(dataverse, authenticatedUser, headers); + return getRoleAssignmentHistoryResponse(dataverse, authenticatedUser, false, headers); }, getRequestUser(crc)); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 59470530047..779f043fb7c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -1049,26 +1049,4 @@ public Response getFileVersionsList(@Context ContainerRequestContext crc, @PathP return ex.getResponse(); } } - - @GET - @AuthRequired - @Path("{identifier}/permissions/history") - @Produces({ MediaType.APPLICATION_JSON, "text/csv" }) - public Response getRoleAssignmentHistory(@Context ContainerRequestContext crc, - @PathParam("identifier") String id, - @Context HttpHeaders headers) { - return response(req -> { - DataFile dataFile = findDataFileOrDie(id); - - // user is authenticated - AuthenticatedUser authenticatedUser = null; - try { - authenticatedUser = getRequestAuthenticatedUserOrDie(crc); - } catch (WrappedResponse ex) { - return error(Status.UNAUTHORIZED, "Authentication is required."); - } - - return getRoleAssignmentHistoryResponse(dataFile, authenticatedUser, headers); - }, getRequestUser(crc)); - } } diff --git a/src/main/webapp/permissions-manage.xhtml b/src/main/webapp/permissions-manage.xhtml index a860f19bc07..ee8bd263f2e 100644 --- a/src/main/webapp/permissions-manage.xhtml +++ b/src/main/webapp/permissions-manage.xhtml @@ -209,18 +209,11 @@
- #{bundle['dataverse.permissions.history.download']} - - #{bundle['dataverse.permissions.history.download']} -
response.blob()) + .then(response => { + // Check if response is ok (status in the range 200-299) + if (!response.ok) { + throw new Error('Network response was not ok: ' + response.status); + } + return response.blob(); + }) .then(blob => { const link = document.createElement('a'); link.href = URL.createObjectURL(blob); - link.download = filename; + link.download = filename || 'permissions_history.csv'; // Provide default filename if none provided document.body.appendChild(link); link.click(); document.body.removeChild(link); }) - .catch(error => console.error('Error downloading CSV:', error)); + .catch(error => { + console.error('Error downloading CSV:', error); + // Display a user-friendly error message + alert('Failed to download permissions history. Please try again later.'); + }); } \ No newline at end of file From 13ac931ef0c6ea99816a47acb1880b8757768507 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Wed, 23 Jul 2025 18:31:41 -0400 Subject: [PATCH 047/634] use getUserIdentifier() --- .../edu/harvard/iq/dataverse/ManagePermissionsPage.java | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java index 2a08b64fed6..2a6d2b024a8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java @@ -723,15 +723,18 @@ public String getsignedUrlForRAHistoryCsv() { // Generate a signed URL with the user's API token User user = session.getUser(); String key = null; - if (user instanceof AuthenticatedUser) { - ApiToken apiToken = authenticationService.findApiTokenByUser((AuthenticatedUser) user); + String userId=null; + if (user instanceof AuthenticatedUser aUser) { + userId = aUser.getUserIdentifier(); + ApiToken apiToken = authenticationService.findApiTokenByUser(aUser); + if (apiToken != null && !apiToken.isExpired() && !apiToken.isDisabled()) { key = apiToken.getTokenString(); } } key = JvmSettings.API_SIGNING_SECRET.lookupOptional().orElse("") + key; if(key.length() >= 36) { - return UrlSignerUtil.signUrl(fullApiPath, 10, user.getIdentifier(), "GET", key); + return UrlSignerUtil.signUrl(fullApiPath, 10, userId, "GET", key); } } catch (Exception e) { logger.log(Level.SEVERE, "Error generating signed URL for permissions history CSV: " + e.getMessage(), e); From 7ed23bf632ab903234b15e055e94c74b5d552089 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 24 Jul 2025 10:32:24 -0400 Subject: [PATCH 048/634] add missing strings --- src/main/java/propertyFiles/Bundle.properties | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 8d65b3b0689..6caabb3c818 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -3232,6 +3232,8 @@ updateDatasetFieldsCommand.api.processDatasetUpdate.parseError=Error parsing dat #AbstractApiBean.java abstractApiBean.error.datasetInternalVersionNumberIsOutdated=Dataset internal version number {0} is outdated # Role Assignment History (used in the AbstractApiBean) +datasets.api.permissions.history.definitionPoint=Definition Point +datasets.api.permissions.history.definitionPoints=Definition Points datasets.api.permissions.history.assignee=Assignee datasets.api.permissions.history.role=Role datasets.api.permissions.history.assignedBy=Assigned By From 94235c7b826cdc88be6f4f3f6e1ebdbfd87b6f9f Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 24 Jul 2025 10:32:50 -0400 Subject: [PATCH 049/634] update files page, use new filenames --- src/main/webapp/permissions-manage-files.xhtml | 6 +++--- src/main/webapp/permissions-manage.xhtml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/webapp/permissions-manage-files.xhtml b/src/main/webapp/permissions-manage-files.xhtml index 9aaf306532b..1572d5c703a 100644 --- a/src/main/webapp/permissions-manage-files.xhtml +++ b/src/main/webapp/permissions-manage-files.xhtml @@ -229,11 +229,11 @@
- +
- + onclick="downloadRAHistoryCSV(this.href, '#{manageFilePermissionsPage.permissionsHistoryFilename}'); return false;"> #{bundle['dataverse.permissions.history.download']}
diff --git a/src/main/webapp/permissions-manage.xhtml b/src/main/webapp/permissions-manage.xhtml index ee8bd263f2e..a47820e0b90 100644 --- a/src/main/webapp/permissions-manage.xhtml +++ b/src/main/webapp/permissions-manage.xhtml @@ -211,7 +211,7 @@
+ onclick="downloadRAHistoryCSV(this.href, '#{managePermissionsPage.permissionsHistoryFilename}'); return false;"> #{bundle['dataverse.permissions.history.download']}
From aad80c4270722dff9529e60501556506386d715a Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 24 Jul 2025 10:46:44 -0400 Subject: [PATCH 050/634] fix file url generation - authUser --- .../harvard/iq/dataverse/ManageFilePermissionsPage.java | 8 +++++--- .../edu/harvard/iq/dataverse/ManagePermissionsPage.java | 6 +++--- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java index 574ea0a9664..a571f798060 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java @@ -600,15 +600,17 @@ public String getsignedUrlForRAHistoryCsv() { // Generate a signed URL with the user's API token User user = session.getUser(); String key = null; - if (user instanceof AuthenticatedUser) { - ApiToken apiToken = authenticationService.findApiTokenByUser((AuthenticatedUser) user); + String userId=null; + if (user instanceof AuthenticatedUser authUser) { + userId = authUser.getUserIdentifier(); + ApiToken apiToken = authenticationService.findApiTokenByUser(authUser); if (apiToken != null && !apiToken.isExpired() && !apiToken.isDisabled()) { key = apiToken.getTokenString(); } } key = JvmSettings.API_SIGNING_SECRET.lookupOptional().orElse("") + key; if(key.length() >= 36) { - return UrlSignerUtil.signUrl(fullApiPath, 10, user.getIdentifier(), "GET", key); + return UrlSignerUtil.signUrl(fullApiPath, 10, userId, "GET", key); } } catch (Exception e) { logger.log(Level.SEVERE, "Error generating signed URL for permissions history CSV: " + e.getMessage(), e); diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java index 2a6d2b024a8..ed1665f4208 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java @@ -724,9 +724,9 @@ public String getsignedUrlForRAHistoryCsv() { User user = session.getUser(); String key = null; String userId=null; - if (user instanceof AuthenticatedUser aUser) { - userId = aUser.getUserIdentifier(); - ApiToken apiToken = authenticationService.findApiTokenByUser(aUser); + if (user instanceof AuthenticatedUser authUser) { + userId = authUser.getUserIdentifier(); + ApiToken apiToken = authenticationService.findApiTokenByUser(authUser); if (apiToken != null && !apiToken.isExpired() && !apiToken.isDisabled()) { key = apiToken.getTokenString(); From 233f3e476e401cb779d798a07c3a66eac98702e7 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 24 Jul 2025 13:43:59 -0400 Subject: [PATCH 051/634] add produces annotation --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index e2b8c7f5c80..9054dda4d58 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -5986,6 +5986,7 @@ public Response deleteVersionNote(@Context ContainerRequestContext crc, @PathPar @GET @AuthRequired @Path("{identifier}/permissions/history") + @Produces({ MediaType.APPLICATION_JSON, "text/csv" }) public Response getRoleAssignmentHistory(@Context ContainerRequestContext crc, @PathParam("identifier") String id, @Context HttpHeaders headers) { return response(req -> { Dataset dataset = findDatasetOrDie(id); From c0897a9d1b2a4a5b90ea601bfba2375574d0f970 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 24 Jul 2025 14:36:10 -0400 Subject: [PATCH 052/634] move script to end of body --- src/main/webapp/permissions-manage-files.xhtml | 5 ++--- src/main/webapp/permissions-manage.xhtml | 4 +--- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/src/main/webapp/permissions-manage-files.xhtml b/src/main/webapp/permissions-manage-files.xhtml index 1572d5c703a..80e0a4eb1d3 100644 --- a/src/main/webapp/permissions-manage-files.xhtml +++ b/src/main/webapp/permissions-manage-files.xhtml @@ -8,9 +8,7 @@ xmlns:o="http://omnifaces.org/ui" xmlns:jsf="http://xmlns.jcp.org/jsf" xmlns:iqbs="http://xmlns.jcp.org/jsf/composite/iqbs"> - - - + @@ -434,6 +432,7 @@
+ diff --git a/src/main/webapp/permissions-manage.xhtml b/src/main/webapp/permissions-manage.xhtml index a47820e0b90..354b963be6c 100644 --- a/src/main/webapp/permissions-manage.xhtml +++ b/src/main/webapp/permissions-manage.xhtml @@ -7,9 +7,6 @@ xmlns:c="http://java.sun.com/jsp/jstl/core" xmlns:o="http://omnifaces.org/ui" xmlns:iqbs="http://xmlns.jcp.org/jsf/composite/iqbs"> - - - @@ -283,6 +280,7 @@
+ From 59c0680a8848d892494d98107ba3aec8e13d685d Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 24 Jul 2025 14:36:26 -0400 Subject: [PATCH 053/634] handle commas w/multiple def pts --- .../java/edu/harvard/iq/dataverse/api/AbstractApiBean.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index afbd82dedc2..8cdca1956d2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -697,6 +697,11 @@ protected Response getRoleAssignmentHistoryResponse(DvObject dvObject, Authentic // Add data rows for (DataverseRoleServiceBean.RoleAssignmentHistoryEntry entry : history) { + String definitionPointIds = entry.getDefinitionPointIdsAsString(); + // Handle multiple comma-separated values in definitionPointIds column + if(definitionPointIds.contains(",")) { + definitionPointIds = "\"" + definitionPointIds + "\""; + } csvBuilder.append(entry.getDefinitionPointIdsAsString()).append(",") .append(entry.getAssigneeIdentifier()).append(",") .append(entry.getRoleName()).append(",") From c16730747d3f7d554bc1bcb63032e1d241741ec5 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 24 Jul 2025 15:00:23 -0400 Subject: [PATCH 054/634] reuse strings --- .../iq/dataverse/api/AbstractApiBean.java | 21 +++++++++---------- src/main/java/propertyFiles/Bundle.properties | 9 -------- 2 files changed, 10 insertions(+), 20 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index e45598ebc6e..9f8a9e77c94 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -676,13 +676,10 @@ protected Response getRoleAssignmentHistoryResponse(DvObject dvObject, Authentic // Get the role assignment history List history = null; - String definitionPoint; if (forFiles == false) { history = rolesSvc.getRoleAssignmentHistory(dvObject.getId()); - definitionPoint = BundleUtil.getStringFromBundle("datasets.api.permissions.history.definitionPoint"); } else { history = rolesSvc.getFilesRoleAssignmentHistory(dvObject.getId()); - definitionPoint = BundleUtil.getStringFromBundle("datasets.api.permissions.history.definitionPoints"); } List acceptedTypes = headers.getAcceptableMediaTypes(); @@ -690,18 +687,20 @@ protected Response getRoleAssignmentHistoryResponse(DvObject dvObject, Authentic .anyMatch(mt -> mt.toString().equals("text/csv")); if (wantCSV) { - String assigneeHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.assignee"); - String roleHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.role"); - String assignedByHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.assignedBy"); - String assignedAtHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.assignedAt"); - String revokedByHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.revokedBy"); - String revokedAtHeader = BundleUtil.getStringFromBundle("datasets.api.permissions.history.revokedAt"); + //Reusing strings from history panel + String definedOn = BundleUtil.getStringFromBundle("dataverse.permissions.history.definedOn"); + String assigneeHeader = BundleUtil.getStringFromBundle("dataverse.permissions.history.assignee"); + String roleHeader = BundleUtil.getStringFromBundle("dataverse.permissions.history.role"); + String assignedByHeader = BundleUtil.getStringFromBundle("dataverse.permissions.history.assignedBy"); + String assignedAtHeader = BundleUtil.getStringFromBundle("dataverse.permissions.history.assignedAt"); + String revokedByHeader = BundleUtil.getStringFromBundle("dataverse.permissions.history.revokedBy"); + String revokedAtHeader = BundleUtil.getStringFromBundle("dataverse.permissions.history.revokedAt"); // Generate CSV response StringBuilder csvBuilder = new StringBuilder(); // Add CSV header with internationalized column names csvBuilder - .append(definitionPoint).append(",") + .append(definedOn).append(",") .append(assigneeHeader).append(",") .append(roleHeader).append(",") .append(assignedByHeader).append(",") @@ -740,7 +739,7 @@ protected Response getRoleAssignmentHistoryResponse(DvObject dvObject, Authentic JsonArrayBuilder jsonArray = Json.createArrayBuilder(); for (DataverseRoleServiceBean.RoleAssignmentHistoryEntry entry : history) { JsonObjectBuilder job = Json.createObjectBuilder() - .add(definitionPoint, entry.getDefinitionPointIdsAsString()) + .add("definedOn", entry.getDefinitionPointIdsAsString()) .add("assigneeIdentifier", entry.getAssigneeIdentifier()) .add("roleName", entry.getRoleName()) .add("assignedBy", entry.getAssignedBy()) diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 381ba878844..ede0bbb8f8a 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -3232,15 +3232,6 @@ datasetFieldValidator.error.emptyRequiredSingleValueForField=Empty required valu updateDatasetFieldsCommand.api.processDatasetUpdate.parseError=Error parsing dataset update: {0} #AbstractApiBean.java -# Role Assignment History (used in the AbstractApiBean) -datasets.api.permissions.history.definitionPoint=Definition Point -datasets.api.permissions.history.definitionPoints=Definition Points -datasets.api.permissions.history.assignee=Assignee -datasets.api.permissions.history.role=Role -datasets.api.permissions.history.assignedBy=Assigned By -datasets.api.permissions.history.assignedAt=Assigned At -datasets.api.permissions.history.revokedBy=Revoked By -datasets.api.permissions.history.revokedAt=Revoked At abstractApiBean.error.internalVersionTimestampIsOutdated=Internal version timestamp {0} is outdated #RoleAssigneeServiceBean.java From f10dcbbc4e0385dc93cb5d4283867f5b16ce68c8 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 24 Jul 2025 17:10:36 -0400 Subject: [PATCH 055/634] handle missing assignment info --- .../iq/dataverse/api/AbstractApiBean.java | 29 ++++++++++++------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 9f8a9e77c94..a9bb064f487 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -718,13 +718,11 @@ protected Response getRoleAssignmentHistoryResponse(DvObject dvObject, Authentic csvBuilder.append(entry.getDefinitionPointIdsAsString()).append(",") .append(entry.getAssigneeIdentifier()).append(",") .append(entry.getRoleName()).append(",") - .append(entry.getAssignedBy()).append(",") - .append(entry.getAssignedAt().toString()).append(","); - - // Handle nullable fields - csvBuilder.append(entry.getRevokedBy() != null ? entry.getRevokedBy() : "").append(","); - csvBuilder.append(entry.getRevokedAt() != null ? entry.getRevokedAt().toString() : ""); - csvBuilder.append("\n"); + .append(entry.getAssignedBy() != null ? entry.getAssignedBy() : "").append(",") + .append(entry.getAssignedAt() != null ? entry.getAssignedAt().toString() : "").append(",") + .append(entry.getRevokedBy() != null ? entry.getRevokedBy() : "").append(",") + .append(entry.getRevokedAt() != null ? entry.getRevokedAt().toString() : "") + .append("\n"); } String objectType = dvObject.getClass().getSimpleName().toLowerCase(); @@ -741,9 +739,19 @@ protected Response getRoleAssignmentHistoryResponse(DvObject dvObject, Authentic JsonObjectBuilder job = Json.createObjectBuilder() .add("definedOn", entry.getDefinitionPointIdsAsString()) .add("assigneeIdentifier", entry.getAssigneeIdentifier()) - .add("roleName", entry.getRoleName()) - .add("assignedBy", entry.getAssignedBy()) - .add("assignedAt", entry.getAssignedAt().toString()); + .add("roleName", entry.getRoleName()); + + // Add assignment info if available + if (entry.getAssignedBy()!= null) { + job.add("assignedBy", entry.getAssignedBy()); + } else { + job.add("assignedBy", JsonValue.NULL); + } + if (entry.getAssignedAt()!= null) { + job.add("assignedAt", entry.getAssignedAt().toString()); + } else { + job.add("assignedAt", JsonValue.NULL); + } // Add revocation info if available if (entry.getRevokedBy() != null) { @@ -751,7 +759,6 @@ protected Response getRoleAssignmentHistoryResponse(DvObject dvObject, Authentic } else { job.add("revokedBy", JsonValue.NULL); } - if (entry.getRevokedAt() != null) { job.add("revokedAt", entry.getRevokedAt().toString()); } else { From 080372639a499855d20d5fb906147f317834fd47 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 24 Jul 2025 15:03:23 -0400 Subject: [PATCH 056/634] fix comma handling --- src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index a9bb064f487..4c3b99423d6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -715,7 +715,7 @@ protected Response getRoleAssignmentHistoryResponse(DvObject dvObject, Authentic if(definitionPointIds.contains(",")) { definitionPointIds = "\"" + definitionPointIds + "\""; } - csvBuilder.append(entry.getDefinitionPointIdsAsString()).append(",") + csvBuilder.append(definitionPointIds).append(",") .append(entry.getAssigneeIdentifier()).append(",") .append(entry.getRoleName()).append(",") .append(entry.getAssignedBy() != null ? entry.getAssignedBy() : "").append(",") From 715badb279b287f327bd141fec30fa70d5519dad Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Thu, 24 Jul 2025 17:39:42 -0400 Subject: [PATCH 057/634] change api paths to mirror /assignment endpoints --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 91239af8816..03c9b53ce4a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -6023,7 +6023,7 @@ public Response deleteVersionNote(@Context ContainerRequestContext crc, @PathPar @GET @AuthRequired - @Path("{identifier}/permissions/history") + @Path("{identifier}/assignments/history") @Produces({ MediaType.APPLICATION_JSON, "text/csv" }) public Response getRoleAssignmentHistory(@Context ContainerRequestContext crc, @PathParam("identifier") String id, @Context HttpHeaders headers) { return response(req -> { @@ -6038,7 +6038,7 @@ public Response getRoleAssignmentHistory(@Context ContainerRequestContext crc, @ @GET @AuthRequired - @Path("{identifier}/files/permissions/history") + @Path("{identifier}/files/assignments/history") @Produces({ MediaType.APPLICATION_JSON, "text/csv" }) public Response getFilesRoleAssignmentHistory(@Context ContainerRequestContext crc, @PathParam("identifier") String id, From baf5e180a236977f4b84509e5bb71f3079046d9a Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 25 Jul 2025 11:52:45 -0400 Subject: [PATCH 058/634] change flag name, class names to remove audit --- .../dataverse/DataverseRoleServiceBean.java | 28 +++++++++---------- .../dataverse/ManageFilePermissionsPage.java | 6 ++-- .../iq/dataverse/ManagePermissionsPage.java | 4 +-- ...tAudit.java => RoleAssignmentHistory.java} | 14 +++++----- .../iq/dataverse/api/AbstractApiBean.java | 6 ++-- .../harvard/iq/dataverse/api/Datasets.java | 2 +- .../command/impl/AssignRoleCommand.java | 2 +- .../iq/dataverse/settings/FeatureFlags.java | 4 +-- 8 files changed, 33 insertions(+), 33 deletions(-) rename src/main/java/edu/harvard/iq/dataverse/{RoleAssignmentAudit.java => RoleAssignmentHistory.java} (89%) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java index 257985dc65a..78a6628ec3a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java @@ -94,7 +94,7 @@ public RoleAssignment save(RoleAssignment assignment, boolean createIndex, Datav // Check if ROLE_ASSIGNMENT_AUDITING feature flag is enabled if (FeatureFlags.ROLE_ASSIGNMENT_AUDITING.enabled()) { - RoleAssignmentAudit audit = new RoleAssignmentAudit(assignment, req, RoleAssignmentAudit.ActionType.ASSIGN); + RoleAssignmentHistory audit = new RoleAssignmentHistory(assignment, req, RoleAssignmentHistory.ActionType.ASSIGN); saveAudit(audit); } @@ -103,12 +103,12 @@ public RoleAssignment save(RoleAssignment assignment, boolean createIndex, Datav /** - * Saves a RoleAssignmentAudit entry to the database. + * Saves a RoleAssignmentHistory entry to the database. * - * @param audit The RoleAssignmentAudit object to be saved - * @return The persisted RoleAssignmentAudit object + * @param audit The RoleAssignmentHistory object to be saved + * @return The persisted RoleAssignmentHistory object */ - private RoleAssignmentAudit saveAudit(RoleAssignmentAudit audit) { + private RoleAssignmentHistory saveAudit(RoleAssignmentHistory audit) { if (audit.getAuditId() == null) { em.persist(audit); em.flush(); // Ensure the entity is persisted immediately @@ -184,7 +184,7 @@ public void revoke(RoleAssignment ra, DataverseRequest req) { // Create audit entry if feature flag is set if (FeatureFlags.ROLE_ASSIGNMENT_AUDITING.enabled()) { - RoleAssignmentAudit audit = new RoleAssignmentAudit(ra, req, RoleAssignmentAudit.ActionType.REVOKE); + RoleAssignmentHistory audit = new RoleAssignmentHistory(ra, req, RoleAssignmentHistory.ActionType.REVOKE); saveAudit(audit); } @@ -209,7 +209,7 @@ public void revokeAll(RoleAssignee assignee, DataverseRequest req) { // Create audit entry if feature flag is set if (FeatureFlags.ROLE_ASSIGNMENT_AUDITING.enabled()) { - RoleAssignmentAudit audit = new RoleAssignmentAudit(ra, req, RoleAssignmentAudit.ActionType.REVOKE); + RoleAssignmentHistory audit = new RoleAssignmentHistory(ra, req, RoleAssignmentHistory.ActionType.REVOKE); saveAudit(audit); } @@ -380,7 +380,7 @@ For a given permission and dataverse Id get all of the roles (built-in or owned * @return List of role assignment history entries */ public List getRoleAssignmentHistory(Long definitionPointId) { - List audits = em.createNamedQuery("RoleAssignmentAudit.findByDefinitionPointId", RoleAssignmentAudit.class) + List audits = em.createNamedQuery("RoleAssignmentHistory.findByDefinitionPointId", RoleAssignmentHistory.class) .setParameter("definitionPointId", definitionPointId) .getResultList(); @@ -394,7 +394,7 @@ public List getRoleAssignmentHistory(Long definition * @return List of role assignment history entries */ public List getFilesRoleAssignmentHistory(Long datasetId) { - List audits = em.createNamedQuery("RoleAssignmentAudit.findByOwnerId", RoleAssignmentAudit.class) + List audits = em.createNamedQuery("RoleAssignmentHistory.findByOwnerId", RoleAssignmentHistory.class) .setParameter("datasetId", datasetId) .getResultList(); @@ -402,18 +402,18 @@ public List getFilesRoleAssignmentHistory(Long datas } /** - * Common method to process role assignment audits and create history entries + * Common method to process role assignment history rows and create consolidated history entries * * @param audits List of role assignment audit records * @param combineEntries Whether to combine entries for different files * @return List of role assignment history entries */ - private List processRoleAssignmentAudits(List audits, boolean combineEntries) { + private List processRoleAssignmentAudits(List audits, boolean combineEntries) { List roleAssignmentHistory = new ArrayList<>(); Map historyMap = new HashMap<>(); // First pass: Create entries from audit records - for (RoleAssignmentAudit audit : audits) { + for (RoleAssignmentHistory audit : audits) { Long roleAssignmentId = audit.getRoleAssignmentId(); RoleAssignmentHistoryEntry entry = historyMap.get(roleAssignmentId); @@ -422,10 +422,10 @@ private List processRoleAssignmentAudits(List roleAssignmentHistory; + private List roleAssignmentHistory; - public List getRoleAssignmentHistory() { + public List getRoleAssignmentHistory() { if (roleAssignmentHistory == null) { roleAssignmentHistory = roleService.getFilesRoleAssignmentHistory(dataset.getId()); } diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java index ed1665f4208..1f980c877ba 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java @@ -241,9 +241,9 @@ public void editRole(String roleId) { } /** Role Assignment History */ - private List roleAssignmentHistory; + private List roleAssignmentHistory; - public List getRoleAssignmentHistory() { + public List getRoleAssignmentHistory() { if (roleAssignmentHistory == null) { roleAssignmentHistory = roleService.getRoleAssignmentHistory(dvObject.getId()); diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentHistory.java similarity index 89% rename from src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java rename to src/main/java/edu/harvard/iq/dataverse/RoleAssignmentHistory.java index 52292aeb065..ebc6d95288a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentAudit.java +++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentHistory.java @@ -30,14 +30,14 @@ @Index(name = "idx_raa_definition_point_id", columnList = "definition_point_id") }) @NamedQueries({ - @NamedQuery(name = "RoleAssignmentAudit.findByDefinitionPointId", - query = "SELECT ra FROM RoleAssignmentAudit ra WHERE ra.definitionPointId = :definitionPointId ORDER BY ra.roleAssignmentId, ra.actionTimestamp DESC"), - @NamedQuery(name = "RoleAssignmentAudit.findByOwnerId", - query = "SELECT ra FROM RoleAssignmentAudit ra JOIN DvObject d ON ra.definitionPointId = d.id " + + @NamedQuery(name = "RoleAssignmentHistory.findByDefinitionPointId", + query = "SELECT ra FROM RoleAssignmentHistory ra WHERE ra.definitionPointId = :definitionPointId ORDER BY ra.roleAssignmentId, ra.actionTimestamp DESC"), + @NamedQuery(name = "RoleAssignmentHistory.findByOwnerId", + query = "SELECT ra FROM RoleAssignmentHistory ra JOIN DvObject d ON ra.definitionPointId = d.id " + "WHERE d.owner.id = :datasetId " + "ORDER BY ra.roleAssignmentId, ra.actionTimestamp DESC") }) -public class RoleAssignmentAudit implements Serializable { +public class RoleAssignmentHistory implements Serializable { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) @@ -78,10 +78,10 @@ public enum ActionType { } // Constructors - public RoleAssignmentAudit() { + public RoleAssignmentHistory() { } - public RoleAssignmentAudit(RoleAssignment roleAssignment, DataverseRequest request, ActionType actionType) { + public RoleAssignmentHistory(RoleAssignment roleAssignment, DataverseRequest request, ActionType actionType) { this.roleAssignmentId = roleAssignment.getId(); this.actionType = actionType; this.actionTimestamp = new Date(); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 4c3b99423d6..fb5868d76b0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -675,7 +675,7 @@ protected Response getRoleAssignmentHistoryResponse(DvObject dvObject, Authentic } // Get the role assignment history - List history = null; + List history = null; if (forFiles == false) { history = rolesSvc.getRoleAssignmentHistory(dvObject.getId()); } else { @@ -709,7 +709,7 @@ protected Response getRoleAssignmentHistoryResponse(DvObject dvObject, Authentic .append(revokedAtHeader).append("\n"); // Add data rows - for (DataverseRoleServiceBean.RoleAssignmentHistoryEntry entry : history) { + for (DataverseRoleServiceBean.RoleAssignmentHistoryConsolidatedEntry entry : history) { String definitionPointIds = entry.getDefinitionPointIdsAsString(); // Handle multiple comma-separated values in definitionPointIds column if(definitionPointIds.contains(",")) { @@ -735,7 +735,7 @@ protected Response getRoleAssignmentHistoryResponse(DvObject dvObject, Authentic // Or Json by default JsonArrayBuilder jsonArray = Json.createArrayBuilder(); - for (DataverseRoleServiceBean.RoleAssignmentHistoryEntry entry : history) { + for (DataverseRoleServiceBean.RoleAssignmentHistoryConsolidatedEntry entry : history) { JsonObjectBuilder job = Json.createObjectBuilder() .add("definedOn", entry.getDefinitionPointIdsAsString()) .add("assigneeIdentifier", entry.getAssigneeIdentifier()) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 03c9b53ce4a..6dd0baaa2c7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3,7 +3,7 @@ import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.DatasetLock.Reason; import edu.harvard.iq.dataverse.DatasetVersion.VersionState; -import edu.harvard.iq.dataverse.DataverseRoleServiceBean.RoleAssignmentHistoryEntry; +import edu.harvard.iq.dataverse.DataverseRoleServiceBean.RoleAssignmentHistoryConsolidatedEntry; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.api.AbstractApiBean.WrappedResponse; import edu.harvard.iq.dataverse.api.auth.AuthRequired; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java index 7260b433ddb..db3c41f0f03 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java @@ -19,7 +19,7 @@ import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.util.BundleUtil; -import edu.harvard.iq.dataverse.RoleAssignmentAudit; +import edu.harvard.iq.dataverse.RoleAssignmentHistory; import edu.harvard.iq.dataverse.settings.FeatureFlags; import java.util.Collections; import java.util.Date; diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java index 539e898255c..7d3fec6822f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java @@ -168,10 +168,10 @@ public enum FeatureFlags { ADD_LOCAL_CONTEXTS_PERMISSION_CHECK("add-local-contexts-permission-check"), /** - * This flag turns on auditing of role assignments - keeping a record of when roles were granted + * This flag turns on history tracking of role assignments - keeping a record of when roles were granted * or revoked, at what times, and by whom. */ - ROLE_ASSIGNMENT_AUDITING("role-assignment-auditing"), + ROLE_ASSIGNMENT_HISTORY("role-assignment-history"), ; final String flag; From 696f009201567c1950008040ee6810d8fd867f1c Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 25 Jul 2025 11:53:05 -0400 Subject: [PATCH 059/634] more audit changes --- .../dataverse/DataverseRoleServiceBean.java | 104 +++++++++--------- .../iq/dataverse/RoleAssignmentHistory.java | 2 +- 2 files changed, 53 insertions(+), 53 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java index 78a6628ec3a..3b058c00c82 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java @@ -1,6 +1,6 @@ package edu.harvard.iq.dataverse; -import edu.harvard.iq.dataverse.DataverseRoleServiceBean.RoleAssignmentHistoryEntry; +import edu.harvard.iq.dataverse.DataverseRoleServiceBean.RoleAssignmentHistoryConsolidatedEntry; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.RoleAssignee; @@ -92,10 +92,10 @@ public RoleAssignment save(RoleAssignment assignment, boolean createIndex, Datav indexAsync.indexRole(assignment); } - // Check if ROLE_ASSIGNMENT_AUDITING feature flag is enabled - if (FeatureFlags.ROLE_ASSIGNMENT_AUDITING.enabled()) { - RoleAssignmentHistory audit = new RoleAssignmentHistory(assignment, req, RoleAssignmentHistory.ActionType.ASSIGN); - saveAudit(audit); + // Check if ROLE_ASSIGNMENT_HISTORY feature flag is enabled + if (FeatureFlags.ROLE_ASSIGNMENT_HISTORY.enabled()) { + RoleAssignmentHistory entry = new RoleAssignmentHistory(assignment, req, RoleAssignmentHistory.ActionType.ASSIGN); + saveHistoryEntry(entry); } return assignment; @@ -105,17 +105,17 @@ public RoleAssignment save(RoleAssignment assignment, boolean createIndex, Datav /** * Saves a RoleAssignmentHistory entry to the database. * - * @param audit The RoleAssignmentHistory object to be saved + * @param entry The RoleAssignmentHistory object to be saved * @return The persisted RoleAssignmentHistory object */ - private RoleAssignmentHistory saveAudit(RoleAssignmentHistory audit) { - if (audit.getAuditId() == null) { - em.persist(audit); + private RoleAssignmentHistory saveHistoryEntry(RoleAssignmentHistory entry) { + if (entry.getEntryId() == null) { + em.persist(entry); em.flush(); // Ensure the entity is persisted immediately } else { - audit = em.merge(audit); + entry = em.merge(entry); } - return audit; + return entry; } private IndexResponse indexDefinitionPoint(DvObject definitionPoint) { @@ -182,10 +182,10 @@ public void revoke(RoleAssignment ra, DataverseRequest req) { ra = em.merge(ra); } - // Create audit entry if feature flag is set - if (FeatureFlags.ROLE_ASSIGNMENT_AUDITING.enabled()) { - RoleAssignmentHistory audit = new RoleAssignmentHistory(ra, req, RoleAssignmentHistory.ActionType.REVOKE); - saveAudit(audit); + // Create history entry if feature flag is set + if (FeatureFlags.ROLE_ASSIGNMENT_HISTORY.enabled()) { + RoleAssignmentHistory entry = new RoleAssignmentHistory(ra, req, RoleAssignmentHistory.ActionType.REVOKE); + saveHistoryEntry(entry); } em.remove(ra); @@ -207,10 +207,10 @@ public void revokeAll(RoleAssignee assignee, DataverseRequest req) { ra = em.merge(ra); } - // Create audit entry if feature flag is set - if (FeatureFlags.ROLE_ASSIGNMENT_AUDITING.enabled()) { - RoleAssignmentHistory audit = new RoleAssignmentHistory(ra, req, RoleAssignmentHistory.ActionType.REVOKE); - saveAudit(audit); + // Create history entry if feature flag is set + if (FeatureFlags.ROLE_ASSIGNMENT_HISTORY.enabled()) { + RoleAssignmentHistory entry = new RoleAssignmentHistory(ra, req, RoleAssignmentHistory.ActionType.REVOKE); + saveHistoryEntry(entry); } em.remove(ra); @@ -379,12 +379,12 @@ For a given permission and dataverse Id get all of the roles (built-in or owned * @param definitionPointId The ID of the definition point * @return List of role assignment history entries */ - public List getRoleAssignmentHistory(Long definitionPointId) { - List audits = em.createNamedQuery("RoleAssignmentHistory.findByDefinitionPointId", RoleAssignmentHistory.class) + public List getRoleAssignmentHistory(Long definitionPointId) { + List entries = em.createNamedQuery("RoleAssignmentHistory.findByDefinitionPointId", RoleAssignmentHistory.class) .setParameter("definitionPointId", definitionPointId) .getResultList(); - return processRoleAssignmentAudits(audits, false); + return processRoleAssignmentEntries(entries, false); } /** @@ -393,53 +393,53 @@ public List getRoleAssignmentHistory(Long definition * @param datasetId The ID of the dataset * @return List of role assignment history entries */ - public List getFilesRoleAssignmentHistory(Long datasetId) { - List audits = em.createNamedQuery("RoleAssignmentHistory.findByOwnerId", RoleAssignmentHistory.class) + public List getFilesRoleAssignmentHistory(Long datasetId) { + List entries = em.createNamedQuery("RoleAssignmentHistory.findByOwnerId", RoleAssignmentHistory.class) .setParameter("datasetId", datasetId) .getResultList(); - return processRoleAssignmentAudits(audits, true); + return processRoleAssignmentEntries(entries, true); } /** - * Common method to process role assignment history rows and create consolidated history entries + * Common method to process role assignment history entries and create consolidated history entries * - * @param audits List of role assignment audit records + * @param entries List of role assignment history records * @param combineEntries Whether to combine entries for different files * @return List of role assignment history entries */ - private List processRoleAssignmentAudits(List audits, boolean combineEntries) { - List roleAssignmentHistory = new ArrayList<>(); - Map historyMap = new HashMap<>(); - - // First pass: Create entries from audit records - for (RoleAssignmentHistory audit : audits) { - Long roleAssignmentId = audit.getRoleAssignmentId(); - RoleAssignmentHistoryEntry entry = historyMap.get(roleAssignmentId); - - if (entry == null) { - entry = new RoleAssignmentHistoryEntry(audit.getAssigneeIdentifier(), audit.getRoleAlias(), audit.getDefinitionPointId()); - historyMap.put(roleAssignmentId, entry); + private List processRoleAssignmentEntries(List entries, boolean combineEntries) { + List roleAssignmentHistory = new ArrayList<>(); + Map historyMap = new HashMap<>(); + + // First pass: Create consolidatedEntries from history records + for (RoleAssignmentHistory entry : entries) { + Long roleAssignmentId = entry.getRoleAssignmentId(); + RoleAssignmentHistoryConsolidatedEntry consolidatedEntry = historyMap.get(roleAssignmentId); + + if (consolidatedEntry == null) { + consolidatedEntry = new RoleAssignmentHistoryConsolidatedEntry(entry.getAssigneeIdentifier(), entry.getRoleAlias(), entry.getDefinitionPointId()); + historyMap.put(roleAssignmentId, consolidatedEntry); } - if (audit.getActionType() == RoleAssignmentHistory.ActionType.ASSIGN) { - entry.setAssignedBy(audit.getActionByIdentifier()); - entry.setAssignedAt(audit.getActionTimestamp()); - } else if (audit.getActionType() == RoleAssignmentHistory.ActionType.REVOKE) { - entry.setRevokedBy(audit.getActionByIdentifier()); - entry.setRevokedAt(audit.getActionTimestamp()); + if (entry.getActionType() == RoleAssignmentHistory.ActionType.ASSIGN) { + consolidatedEntry.setAssignedBy(entry.getActionByIdentifier()); + consolidatedEntry.setAssignedAt(entry.getActionTimestamp()); + } else if (entry.getActionType() == RoleAssignmentHistory.ActionType.REVOKE) { + consolidatedEntry.setRevokedBy(entry.getActionByIdentifier()); + consolidatedEntry.setRevokedAt(entry.getActionTimestamp()); } } // Second pass: Combine entries with matching criteria if requested if (combineEntries) { - Map finalHistoryMap = new HashMap<>(); - for (RoleAssignmentHistoryEntry entry : historyMap.values()) { + Map finalHistoryMap = new HashMap<>(); + for (RoleAssignmentHistoryConsolidatedEntry entry : historyMap.values()) { String key = entry.getAssigneeIdentifier() + "|" + entry.getRoleName() + "|" + entry.getAssignedBy() + "|" + entry.getAssignedAt() + "|" + entry.getRevokedBy() + "|" + entry.getRevokedAt(); - RoleAssignmentHistoryEntry existingEntry = finalHistoryMap.get(key); + RoleAssignmentHistoryConsolidatedEntry existingEntry = finalHistoryMap.get(key); if (existingEntry == null) { finalHistoryMap.put(key, entry); } else { @@ -453,14 +453,14 @@ private List processRoleAssignmentAudits(List definitionPointIds; // New field - public RoleAssignmentHistoryEntry(String assigneeIdentifier, String roleName, Long definitionPointId) { + public RoleAssignmentHistoryConsolidatedEntry(String assigneeIdentifier, String roleName, Long definitionPointId) { this.roleName = roleName; this.assigneeIdentifier = assigneeIdentifier; this.definitionPointIds = new ArrayList(); diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentHistory.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentHistory.java index ebc6d95288a..62bed6b3233 100644 --- a/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentHistory.java +++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentHistory.java @@ -99,7 +99,7 @@ public RoleAssignmentHistory(RoleAssignment roleAssignment, DataverseRequest req } // Getters and setters - public Long getAuditId() { + public Long getEntryId() { return auditId; } From ec2042dcddbeeb8f732e51565af375c1c033edb8 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 25 Jul 2025 11:53:18 -0400 Subject: [PATCH 060/634] documentation --- doc/release-notes/11612-RAHistory.md | 17 ++ doc/sphinx-guides/source/api/native-api.rst | 161 ++++++++++++++++++ .../source/installation/config.rst | 3 + .../source/user/dataverse-management.rst | 4 +- 4 files changed, 184 insertions(+), 1 deletion(-) create mode 100644 doc/release-notes/11612-RAHistory.md diff --git a/doc/release-notes/11612-RAHistory.md b/doc/release-notes/11612-RAHistory.md new file mode 100644 index 00000000000..b61fc9d0406 --- /dev/null +++ b/doc/release-notes/11612-RAHistory.md @@ -0,0 +1,17 @@ +# Role Assignment History Tracking + +Dataverse can now track the history of role assignments, allowing administrators to see who assigned or revoked roles, when these actions occurred, and which roles were involved. This feature helps with auditing and understanding permission changes over time. + +## Key components of this feature: + +- **Feature Flag**: The functionality can be enabled/disabled via the `ROLE_ASSIGNMENT_HISTORY` feature flag +- **UI Integration**: New history panels on permission management pages showing the complete history of role assignments/revocations +- **CSV Export**: Administrators can download the role assignment history for a given collection or dataset (or files in a dataset) as a CSV file directly from the new panels +- **API Access**: New API endpoints provide access to role assignment history in both JSON and CSV formats: + - `/api/dataverses/{identifier}/assignments/history` + - `/api/datasets/{identifier}/assignments/history` + - `/api/datasets/{identifier}/files/assignments/history` + +All return JSON by default but will return an internationalized CSV if an `Accept: text/csv` header is adde + +For more information, see #11612 \ No newline at end of file diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 95307f77e48..431c2776a50 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1440,6 +1440,60 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/dataverses/1/templates" --upload-file dataverse-template.json + +Dataverse Role Assignment History +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Get the history of role assignments for a collection. This API call returns a list of role assignments and revocations for the specified dataset. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=1 + + curl -H "X-Dataverse-key:$API_TOKEN" -H "Accept: application/json" "$SERVER_URL/api/dataverses/$ID/assignments/history" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Accept: application/json" "https://demo.dataverse.org/api/dataverses/3/assignments/history" + +You can also use the collection alias instead of the numeric id: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export DV_ALIAS=dvAlias + + curl -H "X-Dataverse-key:$API_TOKEN" -H "Accept: application/json" "$SERVER_URL/api/dataverses/$DV_ALIAS/assignments/history" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Accept: application/json" "https://demo.dataverse.org/api/datasets/dvAlias/assignments/history" + +To retrieve the history in CSV format, change the Accept header to "text/csv": + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=3 + + curl -H "X-Dataverse-key:$API_TOKEN" -H "Accept: text/csv" "$SERVER_URL/api/dataverses/$ID/assignments/history" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Accept: text/csv" "https://demo.dataverse.org/api/dataverses/3/assignments/history" + +Note: This feature requires the "role-assignment-history" feature flag to be enabled (see :ref:`feature-flags`). + Datasets -------- @@ -4009,6 +4063,113 @@ Upon success, the API will return a JSON response with a success message and the The API call will report a 400 (BAD REQUEST) error if any of the files specified do not exist or are not in the latest version of the specified dataset. The ``fileIds`` in the JSON payload should be an array of file IDs that you want to delete from the dataset. +.. _api-dataset-role-assignment-history: + +Dataset Role Assignment History +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Get the history of role assignments for a dataset. This API call returns a list of role assignments and revocations for the specified dataset. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=3 + + curl -H "X-Dataverse-key:$API_TOKEN" -H "Accept: application/json" "$SERVER_URL/api/datasets/$ID/assignments/history" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Accept: application/json" "https://demo.dataverse.org/api/datasets/3/assignments/history" + +You can also use the persistent identifier instead of the numeric id: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/ABCDEF + + curl -H "X-Dataverse-key:$API_TOKEN" -H "Accept: application/json" "$SERVER_URL/api/datasets/:persistentId/assignments/history?persistentId=$PERSISTENT_IDENTIFIER" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Accept: application/json" "https://demo.dataverse.org/api/datasets/:persistentId/assignments/history?persistentId=doi:10.5072/FK2/ABCDEF" + +To retrieve the history in CSV format, change the Accept header to "text/csv": + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=3 + + curl -H "X-Dataverse-key:$API_TOKEN" -H "Accept: text/csv" "$SERVER_URL/api/datasets/$ID/assignments/history" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Accept: text/csv" "https://demo.dataverse.org/api/datasets/3/assignments/history" + +Note: This feature requires the "role-assignment-history" feature flag to be enabled (see :ref:`feature-flags`). + +Dataset Files Role Assignment History +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Get the history of role assignments for the files in a dataset. This API call returns a list of role assignments and revocations for all files in the specified dataset. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=3 + + curl -H "X-Dataverse-key:$API_TOKEN" -H "Accept: application/json" "$SERVER_URL/api/datasets/$ID/files/assignments/history" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Accept: application/json" "https://demo.dataverse.org/api/datasets/3/files/assignments/history" + +You can also use the persistent identifier instead of the numeric id: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/ABCDEF + + curl -H "X-Dataverse-key:$API_TOKEN" -H "Accept: application/json" "$SERVER_URL/api/datasets/:persistentId/files/assignments/history?persistentId=$PERSISTENT_IDENTIFIER" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Accept: application/json" "https://demo.dataverse.org/api/datasets/:persistentId/files/assignments/history?persistentId=doi:10.5072/FK2/ABCDEF" + +To retrieve the history in CSV format, change the Accept header to "text/csv": + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=3 + + curl -H "X-Dataverse-key:$API_TOKEN" -H "Accept: text/csv" "$SERVER_URL/api/datasets/files/$ID/assignments/history" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Accept: text/csv" "https://demo.dataverse.org/api/datasets/3/files/assignments/history" + +Note: This feature requires the "role-assignment-history" feature flag to be enabled (see :ref:`feature-flags`). Files ----- diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 83a6d42c15d..eb177f2f274 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -3750,6 +3750,9 @@ please find all known feature flags below. Any of these flags can be activated u * - enable-version-note - Turns on the ability to add/view/edit/delete per-dataset-version notes intended to provide :ref:`provenance` information about why the dataset/version was created. - ``Off`` + * - role-assignment-history + - Turns on tracking/display of role assignments and revocations for collections, datasets, and files + - ``Off`` **Note:** Feature flags can be set via any `supported MicroProfile Config API source`_, e.g. the environment variable ``DATAVERSE_FEATURE_XXX`` (e.g. ``DATAVERSE_FEATURE_API_SESSION_AUTH=1``). These environment variables can be set in your shell before starting Payara. If you are using :doc:`Docker for development `, you can set them in the `docker compose `_ file. diff --git a/doc/sphinx-guides/source/user/dataverse-management.rst b/doc/sphinx-guides/source/user/dataverse-management.rst index d88d0a45e68..ecbafc807a4 100755 --- a/doc/sphinx-guides/source/user/dataverse-management.rst +++ b/doc/sphinx-guides/source/user/dataverse-management.rst @@ -119,7 +119,7 @@ Clicking on Permissions will bring you to this page: |image3| -When you access a Dataverse collection's permissions page, you will see three sections: +When you access a Dataverse collection's permissions page, you will see three or four sections: **Permissions:** Here you can decide the requirements that determine which types of users can add datasets and sub Dataverse collections to your Dataverse collection, and what permissions they'll be granted when they do so. @@ -127,6 +127,8 @@ When you access a Dataverse collection's permissions page, you will see three se **Roles:** Here you can reference a full list of roles that can be assigned to users of your Dataverse collection. Each role lists the permissions that it offers. +**Role Assignment History** If enabled, you'll be able to see the history of when roles have been assigned and revoked and by whom. + Please note that even on a newly created Dataverse collection, you may see user and groups have already been granted role(s) if your installation has ``:InheritParentRoleAssignments`` set. For more on this setting, see the :doc:`/installation/config` section of the Installation Guide. Setting Access Configurations From dfba6a74669fb18fad52e4e14b0a12f63084dc8c Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 25 Jul 2025 13:46:35 -0400 Subject: [PATCH 061/634] change revokeRole to add OnDataverse, add dv roleassignment/history call --- .../edu/harvard/iq/dataverse/api/Dataverses.java | 2 +- .../edu/harvard/iq/dataverse/api/DatasetsIT.java | 2 +- .../java/edu/harvard/iq/dataverse/api/UtilIT.java | 12 +++++++++++- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 9dc36b253cc..156f868753c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -1965,7 +1965,7 @@ public Response createTemplate(@Context ContainerRequestContext crc, String body @GET @AuthRequired - @Path("{identifier}/permissions/history") + @Path("{identifier}/assignments/history") @Produces({ MediaType.APPLICATION_JSON, "text/csv" }) public Response getRoleAssignmentHistory(@Context ContainerRequestContext crc, @PathParam("identifier") String id, diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index e0d15e13b6d..cc8312b0868 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -2053,7 +2053,7 @@ public void testPrivateUrl() { int roleAssignmentId = (int) roleAssignment.get("id"); logger.info("role assignment id: " + roleAssignmentId); assertEquals(roleAssignmentIdFromCreate, roleAssignmentId); - Response revoke = UtilIT.revokeRole(dataverseAlias, roleAssignmentId, apiToken); + Response revoke = UtilIT.revokeRoleOnDataverse(dataverseAlias, roleAssignmentId, apiToken); revoke.prettyPrint(); assertEquals(OK.getStatusCode(), revoke.getStatusCode()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index eb191c1bad7..fa340600ed2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -2575,7 +2575,7 @@ static Response grantRoleOnDataset(String definitionPoint, String role, String r .post("api/datasets/:persistentId/assignments?key=" + apiToken + "&persistentId=" + definitionPoint); } - static Response revokeRole(String definitionPoint, long doomed, String apiToken) { + static Response revokeRoleOnDataverse(String definitionPoint, long doomed, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) .delete("api/dataverses/" + definitionPoint + "/assignments/" + doomed); @@ -5006,4 +5006,14 @@ public static Response getTemplates(String dataverseAlias, String apiToken) { .header(API_TOKEN_HTTP_HEADER, apiToken) .get("/api/dataverses/" + dataverseAlias + "/templates"); } + + public static Response getDataverseRoleAssignmentHistory(String dataverseAlias, boolean downloadAsCsv, String apiToken) { + RequestSpecification requestSpecification = given() + .header(API_TOKEN_HTTP_HEADER, apiToken); + + requestSpecification = requestSpecification.header("Accept", downloadAsCsv ? "text/csv" : "application/json"); + + return requestSpecification + .get("/api/v1/dataverses/" + dataverseAlias + "/assignments/history"); + } } From bc97830b7afa0b6ef73081a8adf7612d31947eee Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 25 Jul 2025 13:46:46 -0400 Subject: [PATCH 062/634] dataverse-level test --- .../iq/dataverse/api/AbstractApiBean.java | 34 ++-- .../api/RoleAssignmentHistoryIT.java | 146 ++++++++++++++++++ 2 files changed, 170 insertions(+), 10 deletions(-) create mode 100644 src/test/java/edu/harvard/iq/dataverse/api/RoleAssignmentHistoryIT.java diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index fb5868d76b0..59a0a075923 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -697,16 +697,7 @@ protected Response getRoleAssignmentHistoryResponse(DvObject dvObject, Authentic String revokedAtHeader = BundleUtil.getStringFromBundle("dataverse.permissions.history.revokedAt"); // Generate CSV response - StringBuilder csvBuilder = new StringBuilder(); - // Add CSV header with internationalized column names - csvBuilder - .append(definedOn).append(",") - .append(assigneeHeader).append(",") - .append(roleHeader).append(",") - .append(assignedByHeader).append(",") - .append(assignedAtHeader).append(",") - .append(revokedByHeader).append(",") - .append(revokedAtHeader).append("\n"); + StringBuilder csvBuilder = getHistoryCsvHeaderRow(); // Add data rows for (DataverseRoleServiceBean.RoleAssignmentHistoryConsolidatedEntry entry : history) { @@ -771,6 +762,29 @@ protected Response getRoleAssignmentHistoryResponse(DvObject dvObject, Authentic return ok(jsonArray); } + static StringBuilder getHistoryCsvHeaderRow() { + // Reusing strings from history panel + String definedOn = BundleUtil.getStringFromBundle("dataverse.permissions.history.definedOn"); + String assigneeHeader = BundleUtil.getStringFromBundle("dataverse.permissions.history.assignee"); + String roleHeader = BundleUtil.getStringFromBundle("dataverse.permissions.history.role"); + String assignedByHeader = BundleUtil.getStringFromBundle("dataverse.permissions.history.assignedBy"); + String assignedAtHeader = BundleUtil.getStringFromBundle("dataverse.permissions.history.assignedAt"); + String revokedByHeader = BundleUtil.getStringFromBundle("dataverse.permissions.history.revokedBy"); + String revokedAtHeader = BundleUtil.getStringFromBundle("dataverse.permissions.history.revokedAt"); + + // Generate CSV response + StringBuilder csvBuilder = new StringBuilder(); + // Add CSV header with internationalized column names + csvBuilder + .append(definedOn).append(",") + .append(assigneeHeader).append(",") + .append(roleHeader).append(",") + .append(assignedByHeader).append(",") + .append(assignedAtHeader).append(",") + .append(revokedByHeader).append(",") + .append(revokedAtHeader).append("\n"); + return csvBuilder; + } /* =================== *\ * Command Execution * \* =================== */ diff --git a/src/test/java/edu/harvard/iq/dataverse/api/RoleAssignmentHistoryIT.java b/src/test/java/edu/harvard/iq/dataverse/api/RoleAssignmentHistoryIT.java new file mode 100644 index 00000000000..3548cc074f7 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/api/RoleAssignmentHistoryIT.java @@ -0,0 +1,146 @@ +package edu.harvard.iq.dataverse.api; + +import io.restassured.RestAssured; +import io.restassured.path.json.JsonPath; +import io.restassured.response.Response; +import jakarta.json.JsonArray; +import jakarta.json.JsonObject; +import jakarta.json.JsonValue; + +import java.util.List; +import java.util.Map; +import java.util.logging.Logger; + +import static jakarta.ws.rs.core.Response.Status.*; +import static org.hamcrest.CoreMatchers.*; +import static org.junit.jupiter.api.Assertions.*; + +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +import edu.harvard.iq.dataverse.authorization.DataverseRole; +import edu.harvard.iq.dataverse.util.BundleUtil; + +/** + * Integration tests for the Role Assignment History API endpoints. + * + * @author [Your Name] + */ +public class RoleAssignmentHistoryIT { + + private static final Logger logger = Logger.getLogger(RoleAssignmentHistoryIT.class.getCanonicalName()); + + @BeforeAll + public static void setUp() { + RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); + } + + @Test + public void testDataverseRoleAssignmentHistory() { + // Create admin user + Response createAdminUser = UtilIT.createRandomUser(); + String adminUsername = UtilIT.getUsernameFromResponse(createAdminUser); + String adminApiToken = UtilIT.getApiTokenFromResponse(createAdminUser); + UtilIT.setSuperuserStatus(adminUsername, true); + + // Create regular users + Response createUser1 = UtilIT.createRandomUser(); + String username1 = UtilIT.getUsernameFromResponse(createUser1); + + Response createUser2 = UtilIT.createRandomUser(); + String username2 = UtilIT.getUsernameFromResponse(createUser2); + + // Create dataverse + Response createDataverseResponse = UtilIT.createRandomDataverse(adminApiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + // Assign roles to users + Response grantContributor = UtilIT.grantRoleOnDataverse(dataverseAlias, DataverseRole.DS_CONTRIBUTOR, "@" + username1, adminApiToken); + grantContributor.then().assertThat().statusCode(OK.getStatusCode()); + + Response grantCurator = UtilIT.grantRoleOnDataverse(dataverseAlias, DataverseRole.CURATOR, "@" + username2, adminApiToken); + grantCurator.then().assertThat().statusCode(OK.getStatusCode()); + + // Revoke role from user1 + grantContributor.prettyPrint(); + String idToDelete = JsonPath.from(grantContributor.getBody().asString()).getString("data.id"); + Response revokeContributor = UtilIT.revokeRoleOnDataverse(dataverseAlias, Long.parseLong(idToDelete), adminApiToken); + revokeContributor.then().assertThat().statusCode(OK.getStatusCode()); + + // Get role assignment history in JSON format + Response historyJson = UtilIT.getDataverseRoleAssignmentHistory(dataverseAlias, false, adminApiToken); + historyJson.then().assertThat().statusCode(OK.getStatusCode()); + + historyJson.prettyPrint(); + + // Verify JSON response structure + List> data = JsonPath.from(historyJson.getBody().asString()).getList("data"); + // History should contain 2 entries + assertTrue(data.size() == 2); + + // Verify the first history entry (the one unrevoked assignment) + Map firstEntry = data.get(0); + // Entry should have definedOn field + assertTrue(firstEntry.containsKey("definedOn")); + assertTrue(firstEntry.containsKey("assigneeIdentifier")); + assertEquals("@" + username2, firstEntry.get("assigneeIdentifier")); + assertTrue(firstEntry.containsKey("roleName")); + assertEquals(DataverseRole.CURATOR, firstEntry.get("roleName")); + assertTrue(firstEntry.containsKey("assignedBy")); + assertEquals("@" + adminUsername, firstEntry.get("assignedBy")); + assertTrue(firstEntry.containsKey("assignedAt")); + assertNotEquals(JsonValue.NULL, firstEntry.get("assignedAt")); + assertTrue(firstEntry.containsKey("revokedBy")); + assertEquals(JsonValue.NULL, firstEntry.get("revokedBy")); + assertTrue(firstEntry.containsKey("revokedAt")); + assertEquals(JsonValue.NULL, firstEntry.get("revokedAt")); + + // Verify the second history entry + Map secondEntry = data.get(0); + // Entry should have definedOn field + assertTrue(secondEntry.containsKey("definedOn")); + assertTrue(secondEntry.containsKey("assigneeIdentifier")); + assertEquals("@" + username1, secondEntry.get("assigneeIdentifier")); + assertTrue(secondEntry.containsKey("roleName")); + assertEquals(DataverseRole.DS_CONTRIBUTOR, secondEntry.get("roleName")); + assertTrue(secondEntry.containsKey("assignedBy")); + assertEquals("@" + adminUsername, secondEntry.get("assignedBy")); + assertTrue(secondEntry.containsKey("assignedAt")); + assertNotEquals(JsonValue.NULL, secondEntry.get("assignedAt")); + assertTrue(secondEntry.containsKey("revokedBy")); + assertEquals("@" + adminUsername, secondEntry.get("revokedBy")); + assertTrue(secondEntry.containsKey("revokedAt")); + assertNotEquals(JsonValue.NULL, secondEntry.get("revokedAt")); + + // Get role assignment history in CSV format + Response historyCsv = UtilIT.getDataverseRoleAssignmentHistory(dataverseAlias, false, adminApiToken); + historyCsv.then().assertThat().statusCode(OK.getStatusCode()); + + // Generate CSV response + StringBuilder csvBuilder = AbstractApiBean.getHistoryCsvHeaderRow(); + // Verify CSV response + String csvBody = historyCsv.getBody().asString(); + assertTrue(csvBody.startsWith(csvBuilder.toString())); + String[] strings = csvBody.split("\n"); + assertTrue(strings[1].contains("@" + username2 + "," + DataverseRole.CURATOR + ",@" + adminUsername)); + assertTrue(strings[2].contains("@" + username2 + "," + DataverseRole.DS_CONTRIBUTOR + ",@" + adminUsername)); + + // Clean up + Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, adminApiToken); + deleteDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response deleteUser2Response = UtilIT.deleteUser(username2); + deleteUser2Response.prettyPrint(); + assertEquals(200, deleteUser2Response.getStatusCode()); + + Response deleteUser1Response = UtilIT.deleteUser(username1); + deleteUser1Response.prettyPrint(); + assertEquals(200, deleteUser1Response.getStatusCode()); + + Response deleteAdminUserResponse = UtilIT.deleteUser(adminUsername); + deleteAdminUserResponse.prettyPrint(); + assertEquals(200, deleteAdminUserResponse.getStatusCode()); + } +} + From fd4983c15ba2fb4e5c04e0907f56fb32e1f00f08 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 25 Jul 2025 14:55:07 -0400 Subject: [PATCH 063/634] Change to combine up to the same minute --- .../harvard/iq/dataverse/DataverseRoleServiceBean.java | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java index 3b058c00c82..ce8d7ea7328 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java @@ -435,10 +435,12 @@ private List processRoleAssignmentEntrie if (combineEntries) { Map finalHistoryMap = new HashMap<>(); for (RoleAssignmentHistoryConsolidatedEntry entry : historyMap.values()) { + // Test for dates that are the same to the minute String key = entry.getAssigneeIdentifier() + "|" + entry.getRoleName() + "|" + - entry.getAssignedBy() + "|" + entry.getAssignedAt() + "|" + - entry.getRevokedBy() + "|" + entry.getRevokedAt(); - + entry.getAssignedBy() + "|" + + (entry.getAssignedAt() != null ? new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm").format(entry.getAssignedAt()) : "null") + "|" + + entry.getRevokedBy() + "|" + + (entry.getRevokedAt() != null ? new java.text.SimpleDateFormat("yyyy-MM-dd HH:mm").format(entry.getRevokedAt()) : "null"); RoleAssignmentHistoryConsolidatedEntry existingEntry = finalHistoryMap.get(key); if (existingEntry == null) { finalHistoryMap.put(key, entry); From 94f4ff23036cf17a96f60dddfe222bc2868cfd1a Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 25 Jul 2025 14:55:59 -0400 Subject: [PATCH 064/634] dataset, files ra history tests, fix csv for dataverse test --- .../api/RoleAssignmentHistoryIT.java | 294 ++++++++++++++++-- .../edu/harvard/iq/dataverse/api/UtilIT.java | 20 ++ 2 files changed, 293 insertions(+), 21 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/RoleAssignmentHistoryIT.java b/src/test/java/edu/harvard/iq/dataverse/api/RoleAssignmentHistoryIT.java index 3548cc074f7..97f5e7af544 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/RoleAssignmentHistoryIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/RoleAssignmentHistoryIT.java @@ -27,14 +27,14 @@ * @author [Your Name] */ public class RoleAssignmentHistoryIT { - + private static final Logger logger = Logger.getLogger(RoleAssignmentHistoryIT.class.getCanonicalName()); @BeforeAll public static void setUp() { RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); } - + @Test public void testDataverseRoleAssignmentHistory() { // Create admin user @@ -42,43 +42,43 @@ public void testDataverseRoleAssignmentHistory() { String adminUsername = UtilIT.getUsernameFromResponse(createAdminUser); String adminApiToken = UtilIT.getApiTokenFromResponse(createAdminUser); UtilIT.setSuperuserStatus(adminUsername, true); - + // Create regular users Response createUser1 = UtilIT.createRandomUser(); String username1 = UtilIT.getUsernameFromResponse(createUser1); - + Response createUser2 = UtilIT.createRandomUser(); String username2 = UtilIT.getUsernameFromResponse(createUser2); - + // Create dataverse Response createDataverseResponse = UtilIT.createRandomDataverse(adminApiToken); createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); - + // Assign roles to users Response grantContributor = UtilIT.grantRoleOnDataverse(dataverseAlias, DataverseRole.DS_CONTRIBUTOR, "@" + username1, adminApiToken); grantContributor.then().assertThat().statusCode(OK.getStatusCode()); - + Response grantCurator = UtilIT.grantRoleOnDataverse(dataverseAlias, DataverseRole.CURATOR, "@" + username2, adminApiToken); grantCurator.then().assertThat().statusCode(OK.getStatusCode()); - + // Revoke role from user1 grantContributor.prettyPrint(); String idToDelete = JsonPath.from(grantContributor.getBody().asString()).getString("data.id"); Response revokeContributor = UtilIT.revokeRoleOnDataverse(dataverseAlias, Long.parseLong(idToDelete), adminApiToken); revokeContributor.then().assertThat().statusCode(OK.getStatusCode()); - + // Get role assignment history in JSON format Response historyJson = UtilIT.getDataverseRoleAssignmentHistory(dataverseAlias, false, adminApiToken); historyJson.then().assertThat().statusCode(OK.getStatusCode()); - + historyJson.prettyPrint(); - + // Verify JSON response structure List> data = JsonPath.from(historyJson.getBody().asString()).getList("data"); // History should contain 2 entries assertTrue(data.size() == 2); - + // Verify the first history entry (the one unrevoked assignment) Map firstEntry = data.get(0); // Entry should have definedOn field @@ -95,7 +95,7 @@ public void testDataverseRoleAssignmentHistory() { assertEquals(JsonValue.NULL, firstEntry.get("revokedBy")); assertTrue(firstEntry.containsKey("revokedAt")); assertEquals(JsonValue.NULL, firstEntry.get("revokedAt")); - + // Verify the second history entry Map secondEntry = data.get(0); // Entry should have definedOn field @@ -112,11 +112,11 @@ public void testDataverseRoleAssignmentHistory() { assertEquals("@" + adminUsername, secondEntry.get("revokedBy")); assertTrue(secondEntry.containsKey("revokedAt")); assertNotEquals(JsonValue.NULL, secondEntry.get("revokedAt")); - + // Get role assignment history in CSV format - Response historyCsv = UtilIT.getDataverseRoleAssignmentHistory(dataverseAlias, false, adminApiToken); + Response historyCsv = UtilIT.getDataverseRoleAssignmentHistory(dataverseAlias, true, adminApiToken); historyCsv.then().assertThat().statusCode(OK.getStatusCode()); - + // Generate CSV response StringBuilder csvBuilder = AbstractApiBean.getHistoryCsvHeaderRow(); // Verify CSV response @@ -125,22 +125,274 @@ public void testDataverseRoleAssignmentHistory() { String[] strings = csvBody.split("\n"); assertTrue(strings[1].contains("@" + username2 + "," + DataverseRole.CURATOR + ",@" + adminUsername)); assertTrue(strings[2].contains("@" + username2 + "," + DataverseRole.DS_CONTRIBUTOR + ",@" + adminUsername)); - + // Clean up Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, adminApiToken); deleteDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); - + Response deleteUser2Response = UtilIT.deleteUser(username2); deleteUser2Response.prettyPrint(); assertEquals(200, deleteUser2Response.getStatusCode()); - + Response deleteUser1Response = UtilIT.deleteUser(username1); deleteUser1Response.prettyPrint(); assertEquals(200, deleteUser1Response.getStatusCode()); - + Response deleteAdminUserResponse = UtilIT.deleteUser(adminUsername); deleteAdminUserResponse.prettyPrint(); assertEquals(200, deleteAdminUserResponse.getStatusCode()); } -} + @Test + public void testDatasetRoleAssignmentHistory() { + // Create admin user + Response createAdminUser = UtilIT.createRandomUser(); + String adminUsername = UtilIT.getUsernameFromResponse(createAdminUser); + String adminApiToken = UtilIT.getApiTokenFromResponse(createAdminUser); + UtilIT.setSuperuserStatus(adminUsername, true); + + // Create regular users + Response createUser1 = UtilIT.createRandomUser(); + String username1 = UtilIT.getUsernameFromResponse(createUser1); + + Response createUser2 = UtilIT.createRandomUser(); + String username2 = UtilIT.getUsernameFromResponse(createUser2); + + // Create dataverse and dataset + Response createDataverseResponse = UtilIT.createRandomDataverse(adminApiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, adminApiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Assign roles to users on dataset + Response grantEditor = UtilIT.grantRoleOnDataset(datasetId.toString(), DataverseRole.EDITOR, "@" + username1, adminApiToken); + grantEditor.then().assertThat().statusCode(OK.getStatusCode()); + + Response grantFileDownloader = UtilIT.grantRoleOnDataset(datasetId.toString(), DataverseRole.FILE_DOWNLOADER, "@" + username2, adminApiToken); + grantFileDownloader.then().assertThat().statusCode(OK.getStatusCode()); + + // Revoke role from user1 + grantEditor.prettyPrint(); + Long idToDelete = JsonPath.from(grantEditor.getBody().asString()).getLong("data.id"); + Response revokeEditor = UtilIT.revokeRoleOnDataset(datasetId.toString(), idToDelete, adminApiToken); + revokeEditor.then().assertThat().statusCode(OK.getStatusCode()); + + // Get role assignment history in JSON format + Response historyJson = UtilIT.getDatasetRoleAssignmentHistory(datasetId, false, adminApiToken); + historyJson.then().assertThat().statusCode(OK.getStatusCode()); + + historyJson.prettyPrint(); + + // Verify JSON response structure + List> data = JsonPath.from(historyJson.getBody().asString()).getList("data"); + // History should contain 2 entries + assertTrue(data.size() == 2); + + // Verify the first history entry (the one unrevoked assignment) + Map firstEntry = data.get(0); + // Entry should have definedOn field + assertTrue(firstEntry.containsKey("definedOn")); + assertEquals(datasetId.toString(), firstEntry.get("definedOn")); + assertTrue(firstEntry.containsKey("assigneeIdentifier")); + assertEquals("@" + username2, firstEntry.get("assigneeIdentifier")); + assertTrue(firstEntry.containsKey("roleName")); + assertEquals(DataverseRole.FILE_DOWNLOADER, firstEntry.get("roleName")); + assertTrue(firstEntry.containsKey("assignedBy")); + assertEquals("@" + adminUsername, firstEntry.get("assignedBy")); + assertTrue(firstEntry.containsKey("assignedAt")); + assertNotEquals(JsonValue.NULL, firstEntry.get("assignedAt")); + assertTrue(firstEntry.containsKey("revokedBy")); + assertEquals(JsonValue.NULL, firstEntry.get("revokedBy")); + assertTrue(firstEntry.containsKey("revokedAt")); + assertEquals(JsonValue.NULL, firstEntry.get("revokedAt")); + + // Verify the second history entry + Map secondEntry = data.get(0); + // Entry should have definedOn field + assertTrue(secondEntry.containsKey("definedOn")); + assertTrue(secondEntry.containsKey("assigneeIdentifier")); + assertEquals("@" + username1, secondEntry.get("assigneeIdentifier")); + assertTrue(secondEntry.containsKey("roleName")); + assertEquals(DataverseRole.EDITOR, secondEntry.get("roleName")); + assertTrue(secondEntry.containsKey("assignedBy")); + assertEquals("@" + adminUsername, secondEntry.get("assignedBy")); + assertTrue(secondEntry.containsKey("assignedAt")); + assertNotEquals(JsonValue.NULL, secondEntry.get("assignedAt")); + assertTrue(secondEntry.containsKey("revokedBy")); + assertEquals("@" + adminUsername, secondEntry.get("revokedBy")); + assertTrue(secondEntry.containsKey("revokedAt")); + assertNotEquals(JsonValue.NULL, secondEntry.get("revokedAt")); + + // Get role assignment history in CSV format + Response historyCsv = UtilIT.getDatasetRoleAssignmentHistory(datasetId, true, adminApiToken); + historyCsv.then().assertThat().statusCode(OK.getStatusCode()); + + // Generate CSV response + StringBuilder csvBuilder = AbstractApiBean.getHistoryCsvHeaderRow(); + // Verify CSV response + String csvBody = historyCsv.getBody().asString(); + assertTrue(csvBody.startsWith(csvBuilder.toString())); + String[] strings = csvBody.split("\n"); + assertTrue(strings[1].contains("@" + username2 + "," + DataverseRole.FILE_DOWNLOADER + ",@" + adminUsername)); + assertTrue(strings[2].contains("@" + username1 + "," + DataverseRole.EDITOR + ",@" + adminUsername)); + + // Clean up + Response deleteDatasetResponse = UtilIT.destroyDataset(datasetId, adminApiToken); + deleteDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, adminApiToken); + deleteDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response deleteUser2Response = UtilIT.deleteUser(username2); + deleteUser2Response.prettyPrint(); + assertEquals(200, deleteUser2Response.getStatusCode()); + + Response deleteUser1Response = UtilIT.deleteUser(username1); + deleteUser1Response.prettyPrint(); + assertEquals(200, deleteUser1Response.getStatusCode()); + + Response deleteAdminUserResponse = UtilIT.deleteUser(adminUsername); + deleteAdminUserResponse.prettyPrint(); + assertEquals(200, deleteAdminUserResponse.getStatusCode()); + } + + /* + * This test is primarily to check the unique functionality for files where grants on multiple files within the same minute will be combined into one entry. + */ + @Test + public void testFileRoleAssignmentHistory() { + // Create admin user + Response createAdminUser = UtilIT.createRandomUser(); + String adminUsername = UtilIT.getUsernameFromResponse(createAdminUser); + String adminApiToken = UtilIT.getApiTokenFromResponse(createAdminUser); + UtilIT.setSuperuserStatus(adminUsername, true); + + // Create regular user + Response createUser1 = UtilIT.createRandomUser(); + String username1 = UtilIT.getUsernameFromResponse(createUser1); + + // Create dataverse and dataset + Response createDataverseResponse = UtilIT.createRandomDataverse(adminApiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, adminApiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Upload two files + String pathToFile1 = "src/main/webapp/resources/images/dataverseproject.png"; + Response addFile1Response = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile1, adminApiToken); + addFile1Response.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.files[0].label", equalTo("dataverseproject.png")); + Long file1Id = JsonPath.from(addFile1Response.body().asString()).getLong("data.files[0].dataFile.id"); + + String pathToFile2 = "src/main/webapp/resources/images/cc0.png"; + Response addFile2Response = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile2, adminApiToken); + addFile2Response.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.files[0].label", equalTo("cc0.png")); + Long file2Id = JsonPath.from(addFile2Response.body().asString()).getLong("data.files[0].dataFile.id"); + + // Restrict both files + Response restrictFile1Response = UtilIT.restrictFile(file1Id.toString(), true, adminApiToken); + restrictFile1Response.then().assertThat().statusCode(OK.getStatusCode()); + + Response restrictFile2Response = UtilIT.restrictFile(file2Id.toString(), true, adminApiToken); + restrictFile2Response.then().assertThat().statusCode(OK.getStatusCode()); + + // Assign FileDownloader role to both users on both files + String fileIds = file1Id + ", " + file2Id; + Response grantFileDownloader1 = UtilIT.grantFileAccess(file1Id.toString(), "@" + username1, adminApiToken); + grantFileDownloader1.then().assertThat().statusCode(OK.getStatusCode()); + + Response grantFileDownloader2 = UtilIT.grantFileAccess(file2Id.toString(), "@" + username1, adminApiToken); + grantFileDownloader2.then().assertThat().statusCode(OK.getStatusCode()); + + // Get role assignment history in JSON format + Response historyJson = UtilIT.getDatasetFilesRoleAssignmentHistory(datasetId, false, adminApiToken); + historyJson.then().assertThat().statusCode(OK.getStatusCode()); + + historyJson.prettyPrint(); + + // Verify JSON response structure + List> data = JsonPath.from(historyJson.getBody().asString()).getList("data"); + // History should usually contain 1 entry as both files should be listed on one line if the roles were assigned in the same minute + // When that isn't the case, their assignedAt dates must be different. + assertTrue(data.size() > 0); + + if (data.size() == 1) { + // Verify the first history entry (the one unrevoked assignment) + Map firstEntry = data.get(0); + // Entry should have definedOn field with both file IDs + assertTrue(firstEntry.containsKey("definedOn")); + assertEquals(fileIds, firstEntry.get("definedOn")); + assertTrue(firstEntry.containsKey("assigneeIdentifier")); + assertEquals("@" + username1, firstEntry.get("assigneeIdentifier")); + assertTrue(firstEntry.containsKey("roleName")); + assertEquals(DataverseRole.FILE_DOWNLOADER, firstEntry.get("roleName")); + assertTrue(firstEntry.containsKey("assignedBy")); + assertEquals("@" + adminUsername, firstEntry.get("assignedBy")); + assertTrue(firstEntry.containsKey("assignedAt")); + assertNotEquals(JsonValue.NULL, firstEntry.get("assignedAt")); + assertTrue(firstEntry.containsKey("revokedBy")); + assertEquals(JsonValue.NULL, firstEntry.get("revokedBy")); + assertTrue(firstEntry.containsKey("revokedAt")); + assertEquals(JsonValue.NULL, firstEntry.get("revokedAt")); + } else { + Map firstEntry = data.get(0); + // Entry should have definedOn field with the first file ID + assertTrue(firstEntry.containsKey("definedOn")); + assertEquals(file1Id.toString(), firstEntry.get("definedOn")); + assertTrue(firstEntry.containsKey("assigneeIdentifier")); + assertEquals("@" + username1, firstEntry.get("assigneeIdentifier")); + assertTrue(firstEntry.containsKey("roleName")); + assertEquals(DataverseRole.FILE_DOWNLOADER, firstEntry.get("roleName")); + assertTrue(firstEntry.containsKey("assignedBy")); + assertEquals("@" + adminUsername, firstEntry.get("assignedBy")); + assertTrue(firstEntry.containsKey("assignedAt")); + assertNotEquals(JsonValue.NULL, firstEntry.get("assignedAt")); + assertTrue(firstEntry.containsKey("revokedBy")); + assertEquals(JsonValue.NULL, firstEntry.get("revokedBy")); + assertTrue(firstEntry.containsKey("revokedAt")); + assertEquals(JsonValue.NULL, firstEntry.get("revokedAt")); + Map secondEntry = data.get(1); + // Entry should have definedOn field with the second file ID + assertTrue(secondEntry.containsKey("definedOn")); + assertEquals(file2Id.toString(), secondEntry.get("definedOn")); + assertTrue(secondEntry.containsKey("assigneeIdentifier")); + assertEquals("@" + username1, secondEntry.get("assigneeIdentifier")); + assertTrue(secondEntry.containsKey("roleName")); + assertEquals(DataverseRole.FILE_DOWNLOADER, secondEntry.get("roleName")); + assertTrue(secondEntry.containsKey("assignedBy")); + assertEquals("@" + adminUsername, secondEntry.get("assignedBy")); + assertTrue(secondEntry.containsKey("assignedAt")); + assertNotEquals(JsonValue.NULL, secondEntry.get("assignedAt")); + assertTrue(secondEntry.containsKey("revokedBy")); + assertEquals(JsonValue.NULL, secondEntry.get("revokedBy")); + assertTrue(secondEntry.containsKey("revokedAt")); + assertEquals(JsonValue.NULL, firstEntry.get("revokedAt")); + // For two lines the assignedAt dates should be different + assertNotEquals(firstEntry.get("assignedAt"), secondEntry.get("assignedAt")); + } + + // Clean up + Response deleteDatasetResponse = UtilIT.destroyDataset(datasetId, adminApiToken); + deleteDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, adminApiToken); + deleteDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response deleteUser1Response = UtilIT.deleteUser(username1); + deleteUser1Response.prettyPrint(); + assertEquals(200, deleteUser1Response.getStatusCode()); + + Response deleteAdminUserResponse = UtilIT.deleteUser(adminUsername); + deleteAdminUserResponse.prettyPrint(); + assertEquals(200, deleteAdminUserResponse.getStatusCode()); + } +} diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index fa340600ed2..167f9ea6c44 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -5016,4 +5016,24 @@ public static Response getDataverseRoleAssignmentHistory(String dataverseAlias, return requestSpecification .get("/api/v1/dataverses/" + dataverseAlias + "/assignments/history"); } + + public static Response getDatasetRoleAssignmentHistory(Integer datasetId, boolean downloadAsCsv, String apiToken) { + RequestSpecification requestSpecification = given() + .header(API_TOKEN_HTTP_HEADER, apiToken); + + requestSpecification = requestSpecification.header("Accept", downloadAsCsv ? "text/csv" : "application/json"); + + return requestSpecification + .get("/api/v1/datasets/" + datasetId + "/assignments/history"); + } + + public static Response getDatasetFilesRoleAssignmentHistory(Integer datasetId, boolean downloadAsCsv, String apiToken) { + RequestSpecification requestSpecification = given() + .header(API_TOKEN_HTTP_HEADER, apiToken); + + requestSpecification = requestSpecification.header("Accept", downloadAsCsv ? "text/csv" : "application/json"); + + return requestSpecification + .get("/api/v1/datasets/" + datasetId + "/files/assignments/history"); + } } From ceaf8551638976621d98bafb5cfcd94a1ba41f19 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 25 Jul 2025 16:27:48 -0400 Subject: [PATCH 065/634] local tests --- .../DataverseRoleServiceBeanTest.java | 311 ++++++++++++++++++ 1 file changed, 311 insertions(+) create mode 100644 src/test/java/edu/harvard/iq/dataverse/DataverseRoleServiceBeanTest.java diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseRoleServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseRoleServiceBeanTest.java new file mode 100644 index 00000000000..95a278fd7c7 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DataverseRoleServiceBeanTest.java @@ -0,0 +1,311 @@ +package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.DataverseRoleServiceBean.RoleAssignmentHistoryConsolidatedEntry; +import edu.harvard.iq.dataverse.RoleAssignmentHistory; +import edu.harvard.iq.dataverse.authorization.DataverseRole; +import edu.harvard.iq.dataverse.authorization.RoleAssignee; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import jakarta.persistence.EntityManager; +import jakarta.persistence.TypedQuery; +import java.lang.reflect.Method; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.Date; +import java.util.List; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.*; + +@ExtendWith(MockitoExtension.class) +public class DataverseRoleServiceBeanTest { + + @InjectMocks + private DataverseRoleServiceBean dataverseRoleServiceBean; + + @Mock + private EntityManager em; + + private Method processRoleAssignmentEntriesMethod; + + @BeforeEach + public void setUp() throws Exception { + // Get the private method using reflection + processRoleAssignmentEntriesMethod = DataverseRoleServiceBean.class.getDeclaredMethod( + "processRoleAssignmentEntries", List.class, boolean.class); + processRoleAssignmentEntriesMethod.setAccessible(true); + } + + @Test + public void testProcessRoleAssignmentEntries_WithoutCombining() throws Exception { + // Create test data + List entries = new ArrayList<>(); + + // Create two entries for the same role assignment (assign and revoke) + RoleAssignmentHistory entry1 = createRoleAssignmentHistoryEntry( + 1L, "user1", "role1", 101L, "admin1", + new Date(), RoleAssignmentHistory.ActionType.ASSIGN); + + RoleAssignmentHistory entry2 = createRoleAssignmentHistoryEntry( + 1L, "user1", "role1", 101L, "admin2", + new Date(), RoleAssignmentHistory.ActionType.REVOKE); + + // Create another role assignment for a different user + RoleAssignmentHistory entry3 = createRoleAssignmentHistoryEntry( + 2L, "user2", "role2", 102L, "admin1", + new Date(), RoleAssignmentHistory.ActionType.ASSIGN); + + entries.add(entry1); + entries.add(entry2); + entries.add(entry3); + + // Call the private method + @SuppressWarnings("unchecked") + List result = (List) processRoleAssignmentEntriesMethod.invoke( + dataverseRoleServiceBean, entries, false); + + // Verify results + assertEquals(2, result.size(), "Should have 2 consolidated entries"); + + // Find the entry for user1 - should always be second since it it revoked and the user2 role is not. + RoleAssignmentHistoryConsolidatedEntry user1Entry = result.get(1); + + assertNotNull(user1Entry, "Should have an entry for user1"); + assertEquals("role1", user1Entry.getRoleName()); + assertEquals("admin1", user1Entry.getAssignedBy()); + assertEquals("admin2", user1Entry.getRevokedBy()); + assertNotNull(user1Entry.getAssignedAt()); + assertNotNull(user1Entry.getRevokedAt()); + assertEquals(1, user1Entry.getDefinitionPointIds().size()); + assertEquals(101L, user1Entry.getDefinitionPointIds().get(0)); + + // Find the entry for user2 - always fist since it is not revoked + RoleAssignmentHistoryConsolidatedEntry user2Entry = result.get(0); + + assertNotNull(user2Entry, "Should have an entry for user2"); + assertEquals("role2", user2Entry.getRoleName()); + assertEquals("admin1", user2Entry.getAssignedBy()); + assertNull(user2Entry.getRevokedBy()); + assertNotNull(user2Entry.getAssignedAt()); + assertNull(user2Entry.getRevokedAt()); + assertEquals(1, user2Entry.getDefinitionPointIds().size()); + assertEquals(102L, user2Entry.getDefinitionPointIds().get(0)); + } + + @Test + public void testProcessRoleAssignmentEntries_WithCombining() throws Exception { + // Create test data + List entries = new ArrayList<>(); + + // Create entries for the same minute but different definition points + Date baseTime = new Date(); + Calendar cal = Calendar.getInstance(); + cal.setTime(baseTime); + + // First role assignment + RoleAssignmentHistory entry1 = createRoleAssignmentHistoryEntry( + 1L, "user1", "role1", 101L, "admin1", + baseTime, RoleAssignmentHistory.ActionType.ASSIGN); + + // Second role assignment with same minute + RoleAssignmentHistory entry2 = createRoleAssignmentHistoryEntry( + 2L, "user1", "role1", 102L, "admin1", + baseTime, RoleAssignmentHistory.ActionType.ASSIGN); + + // Third role assignment with different minute + cal.add(Calendar.MINUTE, 1); + Date laterTime = cal.getTime(); + RoleAssignmentHistory entry3 = createRoleAssignmentHistoryEntry( + 3L, "user1", "role1", 103L, "admin1", + laterTime, RoleAssignmentHistory.ActionType.ASSIGN); + + // Fourth role assignment with different role + RoleAssignmentHistory entry4 = createRoleAssignmentHistoryEntry( + 4L, "user1", "role2", 104L, "admin1", + baseTime, RoleAssignmentHistory.ActionType.ASSIGN); + + entries.add(entry1); + entries.add(entry2); + entries.add(entry3); + entries.add(entry4); + + // Call the private method + @SuppressWarnings("unchecked") + List result = (List) processRoleAssignmentEntriesMethod.invoke( + dataverseRoleServiceBean, entries, true); + + // Verify results + assertEquals(3, result.size(), "Should have 3 consolidated entries"); + + // Find the entry for user1 with role1 and baseTime + RoleAssignmentHistoryConsolidatedEntry combinedEntry = result.stream() + .filter(e -> e.getAssigneeIdentifier().equals("user1") && + e.getRoleName().equals("role1") && + e.getDefinitionPointIds().contains(101L)) + .findFirst() + .orElse(null); + + assertNotNull(combinedEntry, "Should have a combined entry for user1 with role1"); + assertEquals(2, combinedEntry.getDefinitionPointIds().size(), + "Combined entry should have 2 definition points"); + assertTrue(combinedEntry.getDefinitionPointIds().contains(101L), + "Combined entry should contain definition point 101"); + assertTrue(combinedEntry.getDefinitionPointIds().contains(102L), + "Combined entry should contain definition point 102"); + + // Find the entry for user1 with role1 and laterTime + RoleAssignmentHistoryConsolidatedEntry laterEntry = result.stream() + .filter(e -> e.getAssigneeIdentifier().equals("user1") && + e.getRoleName().equals("role1") && + !e.getDefinitionPointIds().contains(101L)) + .findFirst() + .orElse(null); + + assertNotNull(laterEntry, "Should have an entry for user1 with role1 at later time"); + assertEquals(1, laterEntry.getDefinitionPointIds().size()); + assertEquals(103L, laterEntry.getDefinitionPointIds().get(0)); + + // Find the entry for user1 with role2 + RoleAssignmentHistoryConsolidatedEntry role2Entry = result.stream() + .filter(e -> e.getAssigneeIdentifier().equals("user1") && + e.getRoleName().equals("role2")) + .findFirst() + .orElse(null); + + assertNotNull(role2Entry, "Should have an entry for user1 with role2"); + assertEquals(1, role2Entry.getDefinitionPointIds().size()); + assertEquals(104L, role2Entry.getDefinitionPointIds().get(0)); + } + + @Test + public void testProcessRoleAssignmentEntries_WithAssignAndRevokeAndCombining() throws Exception { + // Create test data + List entries = new ArrayList<>(); + + // Create base time + Date baseTime = new Date(); + Calendar cal = Calendar.getInstance(); + cal.setTime(baseTime); + + // Create revoke time (5 minutes later) + cal.add(Calendar.MINUTE, 5); + Date revokeTime = cal.getTime(); + + // First role assignment and revoke + RoleAssignmentHistory entry1 = createRoleAssignmentHistoryEntry( + 1L, "user1", "role1", 101L, "admin1", + baseTime, RoleAssignmentHistory.ActionType.ASSIGN); + + RoleAssignmentHistory entry2 = createRoleAssignmentHistoryEntry( + 1L, "user1", "role1", 101L, "admin2", + revokeTime, RoleAssignmentHistory.ActionType.REVOKE); + + // Second role assignment and revoke (same times) + RoleAssignmentHistory entry3 = createRoleAssignmentHistoryEntry( + 2L, "user1", "role1", 102L, "admin1", + baseTime, RoleAssignmentHistory.ActionType.ASSIGN); + + RoleAssignmentHistory entry4 = createRoleAssignmentHistoryEntry( + 2L, "user1", "role1", 102L, "admin2", + revokeTime, RoleAssignmentHistory.ActionType.REVOKE); + + // Third role assignment with different revoke time + cal.add(Calendar.MINUTE, 1); + Date laterRevokeTime = cal.getTime(); + + RoleAssignmentHistory entry5 = createRoleAssignmentHistoryEntry( + 3L, "user1", "role1", 103L, "admin1", + baseTime, RoleAssignmentHistory.ActionType.ASSIGN); + + RoleAssignmentHistory entry6 = createRoleAssignmentHistoryEntry( + 3L, "user1", "role1", 103L, "admin2", + laterRevokeTime, RoleAssignmentHistory.ActionType.REVOKE); + + entries.add(entry1); + entries.add(entry2); + entries.add(entry3); + entries.add(entry4); + entries.add(entry5); + entries.add(entry6); + + // Call the private method + @SuppressWarnings("unchecked") + List result = (List) processRoleAssignmentEntriesMethod.invoke( + dataverseRoleServiceBean, entries, true); + + // Verify results + assertEquals(2, result.size(), "Should have 2 consolidated entries"); + + // Find the entry for user1 with role1 and baseTime - oldest == last + RoleAssignmentHistoryConsolidatedEntry combinedEntry = result.get(1); + + assertNotNull(combinedEntry, "Should have a combined entry for user1 with role1"); + assertEquals(2, combinedEntry.getDefinitionPointIds().size(), + "Combined entry should have 2 definition points"); + assertTrue(combinedEntry.getDefinitionPointIds().contains(101L), + "Combined entry should contain definition point 101"); + assertTrue(combinedEntry.getDefinitionPointIds().contains(102L), + "Combined entry should contain definition point 102"); + + // Find the entry for user1 with role1 and laterTime + RoleAssignmentHistoryConsolidatedEntry laterEntry = result.stream() + .filter(e -> e.getAssigneeIdentifier().equals("user1") && + e.getRoleName().equals("role1") && + !e.getDefinitionPointIds().contains(101L)) + .findFirst() + .orElse(null); + + assertNotNull(laterEntry, "Should have an entry for user1 with role1 at later time"); + assertEquals(1, laterEntry.getDefinitionPointIds().size()); + assertEquals(103L, laterEntry.getDefinitionPointIds().get(0)); + + } + + private RoleAssignmentHistory createRoleAssignmentHistoryEntry( + Long roleAssignmentId, String assigneeIdentifier, String roleAlias, + Long definitionPointId, String actionByIdentifier, + Date actionTimestamp, RoleAssignmentHistory.ActionType actionType) { + + // Create a DataverseRole + DataverseRole role = new DataverseRole(); + role.setAlias(roleAlias); + role.setId(1L); // Arbitrary ID for testing + + // Create a DvObject for definition point + DvObject dvObject = new Dataverse(); + dvObject.setId(definitionPointId); + + // Create a RoleAssignee (using a mock since it's an interface) + RoleAssignee assignee = mock(RoleAssignee.class); + when(assignee.getIdentifier()).thenReturn(assigneeIdentifier); + + // Create the RoleAssignment + RoleAssignment roleAssignment = new RoleAssignment(role, assignee, dvObject, null); + // Set the ID using reflection since it's normally set by the persistence layer + try { + java.lang.reflect.Field idField = RoleAssignment.class.getDeclaredField("id"); + idField.setAccessible(true); + idField.set(roleAssignment, roleAssignmentId); + } catch (Exception e) { + throw new RuntimeException("Failed to set RoleAssignment ID", e); + } + + // Create the RoleAssignmentHistory entry + RoleAssignmentHistory history = new RoleAssignmentHistory(); + history.setRoleAssignmentId(roleAssignmentId); + history.setAssigneeIdentifier(assigneeIdentifier); + history.setRoleAlias(roleAlias); + history.setDefinitionPointId(definitionPointId); + history.setActionByIdentifier(actionByIdentifier); + history.setActionTimestamp(actionTimestamp); + history.setActionType(actionType); + + return history; + } +} \ No newline at end of file From 8ad46ff58f5738f6b824181201957fa1a2772222 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 25 Jul 2025 16:28:04 -0400 Subject: [PATCH 066/634] IT tests (for use when the feature is on) --- .../harvard/iq/dataverse/api/RoleAssignmentHistoryIT.java | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/RoleAssignmentHistoryIT.java b/src/test/java/edu/harvard/iq/dataverse/api/RoleAssignmentHistoryIT.java index 97f5e7af544..3e105ac9976 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/RoleAssignmentHistoryIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/RoleAssignmentHistoryIT.java @@ -3,8 +3,6 @@ import io.restassured.RestAssured; import io.restassured.path.json.JsonPath; import io.restassured.response.Response; -import jakarta.json.JsonArray; -import jakarta.json.JsonObject; import jakarta.json.JsonValue; import java.util.List; @@ -19,12 +17,12 @@ import org.junit.jupiter.api.Test; import edu.harvard.iq.dataverse.authorization.DataverseRole; -import edu.harvard.iq.dataverse.util.BundleUtil; /** - * Integration tests for the Role Assignment History API endpoints. + * Integration tests for the Role Assignment History API endpoints. + * + * Note: These tests require the role-assignment-history FeatureFlag to be true and are not run in normal builds * - * @author [Your Name] */ public class RoleAssignmentHistoryIT { From 77972b2238d79e6e41302ddb864484a158f2c1d3 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 25 Jul 2025 16:28:25 -0400 Subject: [PATCH 067/634] doc updates with info on response formats --- doc/sphinx-guides/source/api/native-api.rst | 40 +++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 431c2776a50..e81a5a147db 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1476,6 +1476,22 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Accept: application/json" "https://demo.dataverse.org/api/datasets/dvAlias/assignments/history" +The response is a JSON array of role assignment history entries with the following structure for each entry: + +.. code-block:: json + + { + "definedOn": "1", + "assigneeIdentifier": "@user1", + "roleName": "Admin", + "assignedBy": "@dataverseAdmin", + "assignedAt": "2023-01-01T12:00:00Z", + "revokedBy": null, + "revokedAt": null + } + +For revoked assignments, the "revokedBy" and "revokedAt" fields will contain values instead of null. + To retrieve the history in CSV format, change the Accept header to "text/csv": .. code-block:: bash @@ -1492,6 +1508,8 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Accept: text/csv" "https://demo.dataverse.org/api/dataverses/3/assignments/history" +The CSV response has column headers mirroring the json entries. They are internationalized (when internationalization is configured). + Note: This feature requires the "role-assignment-history" feature flag to be enabled (see :ref:`feature-flags`). Datasets @@ -4100,6 +4118,22 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Accept: application/json" "https://demo.dataverse.org/api/datasets/:persistentId/assignments/history?persistentId=doi:10.5072/FK2/ABCDEF" +The response is a JSON array of role assignment history entries with the following structure for each entry: + +.. code-block:: json + + { + "definedOn": "3", + "assigneeIdentifier": "@user1", + "roleName": "Admin", + "assignedBy": "@dataverseAdmin", + "assignedAt": "2023-01-01T12:00:00Z", + "revokedBy": null, + "revokedAt": null + } + +For revoked assignments, the "revokedBy" and "revokedAt" fields will contain values instead of null. + To retrieve the history in CSV format, change the Accept header to "text/csv": .. code-block:: bash @@ -4116,6 +4150,8 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Accept: text/csv" "https://demo.dataverse.org/api/datasets/3/assignments/history" +The CSV response has column headers mirroring the json entries. They are internationalized (when internationalization is configured). + Note: This feature requires the "role-assignment-history" feature flag to be enabled (see :ref:`feature-flags`). Dataset Files Role Assignment History @@ -4153,6 +4189,8 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Accept: application/json" "https://demo.dataverse.org/api/datasets/:persistentId/files/assignments/history?persistentId=doi:10.5072/FK2/ABCDEF" +The JSON response for this call is the same as for the /api/datasets/{id}/assignments/history call above with the exception that definedOn will be a comma separated list of one or more file ids. + To retrieve the history in CSV format, change the Accept header to "text/csv": .. code-block:: bash @@ -4169,6 +4207,8 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Accept: text/csv" "https://demo.dataverse.org/api/datasets/3/files/assignments/history" +The CSV response for this call is the same as for the /api/datasets/{id}/assignments/history call above with the exception that definedOn will be a comma separated list of one or more file ids. + Note: This feature requires the "role-assignment-history" feature flag to be enabled (see :ref:`feature-flags`). Files From a348b98bac4ac871f696954b929fdecd519f0444 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 25 Jul 2025 18:13:04 -0400 Subject: [PATCH 068/634] change table, get rid of audit word --- .../iq/dataverse/RoleAssignmentHistory.java | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentHistory.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentHistory.java index 62bed6b3233..a5f1666794a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentHistory.java +++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssignmentHistory.java @@ -20,15 +20,15 @@ import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @Entity -@Table(name = "role_assignment_audit", indexes = { - @Index(name = "idx_raa_role_assignment_id", columnList = "role_assignment_id"), - @Index(name = "idx_raa_action_type", columnList = "action_type"), - @Index(name = "idx_raa_action_timestamp", columnList = "action_timestamp"), - @Index(name = "idx_raa_action_by_identifier", columnList = "action_by_identifier"), - @Index(name = "idx_raa_assignee_identifier", columnList = "assignee_identifier"), - @Index(name = "idx_raa_role_id", columnList = "role_id"), - @Index(name = "idx_raa_definition_point_id", columnList = "definition_point_id") -}) +@Table(name = "roleassignmenthistory", indexes = { + @Index(columnList = "role_assignment_id"), + @Index(columnList = "action_type"), + @Index(columnList = "action_timestamp"), + @Index(columnList = "action_by_identifier"), + @Index(columnList = "assignee_identifier"), + @Index(columnList = "role_id"), + @Index(columnList = "definition_point_id") + }) @NamedQueries({ @NamedQuery(name = "RoleAssignmentHistory.findByDefinitionPointId", query = "SELECT ra FROM RoleAssignmentHistory ra WHERE ra.definitionPointId = :definitionPointId ORDER BY ra.roleAssignmentId, ra.actionTimestamp DESC"), @@ -41,8 +41,8 @@ public class RoleAssignmentHistory implements Serializable { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) - @Column(name = "audit_id") - private Long auditId; + @Column(name = "entry_id") + private Long entry_id; @Column(name = "role_assignment_id") private Long roleAssignmentId; @@ -100,11 +100,11 @@ public RoleAssignmentHistory(RoleAssignment roleAssignment, DataverseRequest req // Getters and setters public Long getEntryId() { - return auditId; + return entry_id; } - public void setAuditId(Long auditId) { - this.auditId = auditId; + public void setEntryId(Long entryId) { + this.entry_id = entryId; } public Long getRoleAssignmentId() { From a1a85b4a3ce02c890e9eabe38e331135f00df5cd Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 25 Jul 2025 18:26:56 -0400 Subject: [PATCH 069/634] update flag name in xhtml --- src/main/webapp/permissions-manage-files.xhtml | 2 +- src/main/webapp/permissions-manage.xhtml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/webapp/permissions-manage-files.xhtml b/src/main/webapp/permissions-manage-files.xhtml index 80e0a4eb1d3..5f450c4ae63 100644 --- a/src/main/webapp/permissions-manage-files.xhtml +++ b/src/main/webapp/permissions-manage-files.xhtml @@ -221,7 +221,7 @@
-
+
#{bundle['dataverse.permissions.history']} #{bundle['dataverse.permissions.history.description']}
diff --git a/src/main/webapp/permissions-manage.xhtml b/src/main/webapp/permissions-manage.xhtml index 354b963be6c..6289089c912 100644 --- a/src/main/webapp/permissions-manage.xhtml +++ b/src/main/webapp/permissions-manage.xhtml @@ -198,7 +198,7 @@
-
+
#{bundle['dataverse.permissions.history']} #{bundle['dataverse.permissions.history.description']}
From 8a8ebe24c9539a615a11842a132cd89f29ffc9b3 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 5 Aug 2025 14:07:44 -0400 Subject: [PATCH 070/634] feature to extend the get storage driver API --- .../11695-change-api-get-storage-driver.md | 10 ++++++ doc/sphinx-guides/source/api/changelog.rst | 1 + .../edu/harvard/iq/dataverse/api/Admin.java | 3 +- .../harvard/iq/dataverse/api/Datasets.java | 2 +- .../iq/dataverse/util/json/JsonPrinter.java | 10 ++++++ .../harvard/iq/dataverse/api/DatasetsIT.java | 31 +++++++++++++++++++ .../harvard/iq/dataverse/api/S3AccessIT.java | 8 ++--- .../edu/harvard/iq/dataverse/api/UtilIT.java | 6 ++++ 8 files changed, 65 insertions(+), 6 deletions(-) create mode 100644 doc/release-notes/11695-change-api-get-storage-driver.md diff --git a/doc/release-notes/11695-change-api-get-storage-driver.md b/doc/release-notes/11695-change-api-get-storage-driver.md new file mode 100644 index 00000000000..3680da4b72c --- /dev/null +++ b/doc/release-notes/11695-change-api-get-storage-driver.md @@ -0,0 +1,10 @@ +## Get Dataset/Dataverse Storage Driver API + +### Changed Json response - breaking change! + +The API for getting the Storage Driver info has been changed/extended. +/api/datasets/{identifier}/storageDriver +/api/admin/dataverse/{dataverse-alias}/storageDriver +changed "message" to "name" and added "type" and "label" + +See also [the guides](https://dataverse-guide--11664.org.readthedocs.build/en/11664/api/native-api.html#configure-a-dataset-to-store-all-new-files-in-a-specific-file-store), #11695, and #11664. diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index 16157459220..e8354a2db9f 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -13,6 +13,7 @@ v6.8 - For POST /api/files/{id}/metadata passing an empty string ("description":"") or array ("categories":[]) will no longer be ignored. Empty fields will now clear out the values in the file's metadata. To ignore the fields simply do not include them in the JSON string. - For PUT /api/datasets/{id}/editMetadata the query parameter "sourceInternalVersionNumber" has been removed and replaced with "sourceLastUpdateTime" to verify that the data being edited hasn't been modified and isn't stale. - For GET /api/dataverses/$dataverse-alias/links the Json response has changed breaking the backward compatibility of the API. +- For GET /api/admin/dataverse/{dataverse-alias}/storageDriver and /api/datasets/{identifier}/storageDriver the driver name is no longer returned in data.message. This value is now returned in data.name. v6.7 ---- diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index ac52b5d9fbf..1d726830c31 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -20,6 +20,7 @@ import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.cache.CacheFactoryBean; +import edu.harvard.iq.dataverse.util.json.JsonPrinter; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import edu.harvard.iq.dataverse.validation.EMailValidator; import edu.harvard.iq.dataverse.EjbDataverseEngine; @@ -2196,7 +2197,7 @@ public Response getStorageDriver(@Context ContainerRequestContext crc, @PathPara return wr.getResponse(); } //Note that this returns what's set directly on this dataverse. If null/DataAccess.UNDEFINED_STORAGE_DRIVER_IDENTIFIER, the user would have to recurse the chain of parents to find the effective storageDriver - return ok(dataverse.getStorageDriverId()); + return ok(JsonPrinter.jsonStorageDriver(dataverse.getStorageDriverId())); } @PUT diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 3db5e04b4ab..6c0f1460a74 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3701,7 +3701,7 @@ public Response getFileStore(@Context ContainerRequestContext crc, @PathParam("i return error(Response.Status.NOT_FOUND, "No such dataset"); } - return response(req -> ok(dataset.getEffectiveStorageDriverId()), getRequestUser(crc)); + return ok(JsonPrinter.jsonStorageDriver(dataset.getStorageDriverId())); } @PUT diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 592a893083c..a7bf0d637fd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -39,6 +39,7 @@ import edu.harvard.iq.dataverse.workflow.Workflow; import edu.harvard.iq.dataverse.workflow.step.WorkflowStepData; +import java.io.IOException; import java.util.*; import jakarta.json.Json; import jakarta.json.JsonArrayBuilder; @@ -1610,4 +1611,13 @@ public static JsonArrayBuilder jsonTemplateInstructions(Map temp return jsonArrayBuilder; } + + public static JsonObjectBuilder jsonStorageDriver(String storageDriverId) { + JsonObjectBuilder jsonObjectBuilder = new NullSafeJsonBuilder(); + jsonObjectBuilder.add("name", storageDriverId); + jsonObjectBuilder.add("type", DataAccess.getDriverType(storageDriverId)); + jsonObjectBuilder.add("label", DataAccess.getStorageDriverLabelFor(storageDriverId)); + + return jsonObjectBuilder; + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index f3a83c74224..2ad726aaecf 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -5982,6 +5982,37 @@ public void testGetGlobusUploadParameters() { GlobusOverlayAccessIOTest.tearDown(); } + @Test + public void testSetGetDatasetStorageDriver() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + String username = UtilIT.getUsernameFromResponse(createUser); + Response makeSuperUser = UtilIT.makeSuperUser(username); + assertEquals(200, makeSuperUser.getStatusCode()); + Response createDataverse = UtilIT.createRandomDataverse(apiToken); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse); + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset); + + Response storageDrivers = UtilIT.listStorageDrivers(apiToken); + storageDrivers.prettyPrint(); + JsonObject data = JsonUtil.getJsonObject(storageDrivers.getBody().asString()); + String first = data.getJsonObject("data").keySet().iterator().next(); + String name = data.getJsonObject("data").getString(first); + + Response setDriver = UtilIT.setDatasetStorageDriver(datasetId, first, apiToken); + setDriver.prettyPrint(); + assertEquals(200, setDriver.getStatusCode()); + Response getDriver = UtilIT.getDatasetStorageDriver(datasetId, apiToken); + getDriver.prettyPrint(); + assertEquals(200, getDriver.getStatusCode()); + getDriver.then().assertThat() + .body("data.name", CoreMatchers.equalTo(name)) + .body("data.type", CoreMatchers.notNullValue()) + .statusCode(OK.getStatusCode()); + } + @Test public void testGetCanDownloadAtLeastOneFile() { Response createUserResponse = UtilIT.createRandomUser(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java index 137c2ef4c7b..d5fecb32937 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java @@ -142,7 +142,7 @@ public void testNonDirectUpload() { Response originalStorageDriver = UtilIT.getStorageDriver(dataverseAlias, superuserApiToken); originalStorageDriver.prettyPrint(); originalStorageDriver.then().assertThat() - .body("data.message", equalTo("undefined")) + .body("data.name", equalTo("undefined")) .statusCode(200); Response setStorageDriverToS3 = UtilIT.setStorageDriver(dataverseAlias, driverLabel, superuserApiToken); @@ -273,7 +273,7 @@ public void testDirectUpload() { Response originalStorageDriver = UtilIT.getStorageDriver(dataverseAlias, superuserApiToken); originalStorageDriver.prettyPrint(); originalStorageDriver.then().assertThat() - .body("data.message", equalTo("undefined")) + .body("data.name", equalTo("undefined")) .statusCode(200); Response setStorageDriverToS3 = UtilIT.setStorageDriver(dataverseAlias, driverLabel, superuserApiToken); @@ -491,7 +491,7 @@ public void testDirectUploadDetectStataFile() { Response originalStorageDriver = UtilIT.getStorageDriver(dataverseAlias, superuserApiToken); originalStorageDriver.prettyPrint(); originalStorageDriver.then().assertThat() - .body("data.message", equalTo("undefined")) + .body("data.name", equalTo("undefined")) .statusCode(200); Response setStorageDriverToS3 = UtilIT.setStorageDriver(dataverseAlias, driverLabel, superuserApiToken); @@ -689,7 +689,7 @@ public void testDirectUploadWithFileCountLimit() throws JsonParseException { Response originalStorageDriver = UtilIT.getStorageDriver(dataverseAlias, superuserApiToken); originalStorageDriver.prettyPrint(); originalStorageDriver.then().assertThat() - .body("data.message", equalTo("undefined")) + .body("data.name", equalTo("undefined")) .statusCode(200); Response setStorageDriverToS3 = UtilIT.setStorageDriver(dataverseAlias, driverLabel, superuserApiToken); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 24f2adbb3ed..e3e62a7a412 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -4505,6 +4505,12 @@ static Response setDatasetStorageDriver(Integer datasetId, String driverLabel, S .put("/api/datasets/" + datasetId + "/storageDriver"); } + static Response getDatasetStorageDriver(Integer datasetId, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/datasets/" + datasetId + "/storageDriver"); + } + /** GET on /api/admin/savedsearches/list */ static Response getSavedSearchList() { return given().get("/api/admin/savedsearches/list"); From 8e8b420aae3a2325096f0d893e70794db74e9ca4 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 5 Aug 2025 14:21:21 -0400 Subject: [PATCH 071/634] remove unused import --- .../java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index a7bf0d637fd..06995d4943c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -39,7 +39,6 @@ import edu.harvard.iq.dataverse.workflow.Workflow; import edu.harvard.iq.dataverse.workflow.step.WorkflowStepData; -import java.io.IOException; import java.util.*; import jakarta.json.Json; import jakarta.json.JsonArrayBuilder; From 648641c04ac5518289df28107df0e7859e4a1cf0 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 5 Aug 2025 14:23:22 -0400 Subject: [PATCH 072/634] add test --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 2ad726aaecf..0828f3d2789 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -6010,6 +6010,7 @@ public void testSetGetDatasetStorageDriver() { getDriver.then().assertThat() .body("data.name", CoreMatchers.equalTo(name)) .body("data.type", CoreMatchers.notNullValue()) + .body("data.label", CoreMatchers.notNullValue()) .statusCode(OK.getStatusCode()); } From 2c03617bfb5889a7c2b88cffeca68ce6d3bde096 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 7 Aug 2025 09:37:23 -0400 Subject: [PATCH 073/634] #11710 dummy endpoint to start --- .../java/edu/harvard/iq/dataverse/api/Dataverses.java | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 6b7e783a3eb..1523703893a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -1763,6 +1763,15 @@ public Response linkDataverse(@Context ContainerRequestContext crc, @PathParam(" return ex.getResponse(); } } + + @GET + @AuthRequired + @Path("/linkingDataverses/{searchTerm}") + public Response getLinkingDataverseList(){ + return null; + } + + @GET @AuthRequired From d37725bc61606e524e52cce60e26bad9d04b24d6 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 7 Aug 2025 13:20:05 -0400 Subject: [PATCH 074/634] #11710 update path --- src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 1523703893a..dba634e8ba9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -1766,8 +1766,9 @@ public Response linkDataverse(@Context ContainerRequestContext crc, @PathParam(" @GET @AuthRequired - @Path("/linkingDataverses/{searchTerm}") - public Response getLinkingDataverseList(){ + @Path("{identifier}/linkingDataverses/{searchTerm}") + public Response getLinkingDataverseList(@PathParam("identifier") String dvIdtf, @PathParam("searchTerm") String searchTerm){ + return null; } From 59db4ff859fca947516827f0d3e2dc7c10af2c7b Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 7 Aug 2025 16:24:38 -0400 Subject: [PATCH 075/634] #11710 add api functionality and tests --- .../iq/dataverse/DataverseServiceBean.java | 23 +++++++++++++++---- .../iq/dataverse/api/AbstractApiBean.java | 2 +- .../dataverse/api/DataverseFeaturedItems.java | 2 +- .../harvard/iq/dataverse/api/Dataverses.java | 20 ++++++++++++---- .../iq/dataverse/api/DataversesIT.java | 22 ++++++++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 12 ++++++++++ 6 files changed, 69 insertions(+), 12 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index c14711060af..23d94338992 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -505,18 +505,29 @@ public List filterByAliasQuery(String filterQuery) { return ret; } - public List filterDataversesForLinking(String query, DataverseRequest req, Dataset dataset) { + public List filterDataversesForLinking(String query, DataverseRequest req, DvObject dvo) { List dataverseList = new ArrayList<>(); List results = filterDataversesByNamePattern(query); - if (results == null || results.size() == 0) { + if (results == null || results.isEmpty()) { return null; } + + Dataset linkedDataset = null; + Dataverse linkedDataverse = null; + List alreadyLinkeddv_ids; + + if ((dvo instanceof Dataset)) { + linkedDataset = (Dataset) dvo; + alreadyLinkeddv_ids = em.createNativeQuery("SELECT linkingdataverse_id FROM datasetlinkingdataverse WHERE dataset_id = " + linkedDataset.getId()).getResultList(); + } else { + linkedDataverse = (Dataverse) dvo; + alreadyLinkeddv_ids = em.createNativeQuery("SELECT linkingdataverse_id FROM dataverselinkingdataverse WHERE dataverse_id = " + linkedDataverse.getId()).getResultList(); + } - List alreadyLinkeddv_ids = em.createNativeQuery("SELECT linkingdataverse_id FROM datasetlinkingdataverse WHERE dataset_id = " + dataset.getId()).getResultList(); - List remove = new ArrayList<>(); + List remove = new ArrayList<>(); if (alreadyLinkeddv_ids != null && !alreadyLinkeddv_ids.isEmpty()) { alreadyLinkeddv_ids.stream().map((testDVId) -> this.find(testDVId)).forEachOrdered((removeIt) -> { @@ -526,9 +537,11 @@ public List filterDataversesForLinking(String query, DataverseRequest for (Dataverse res : results) { if (!remove.contains(res)) { - if (this.permissionService.requestOn(req, res).has(Permission.LinkDataset)) { + if ((linkedDataset != null && this.permissionService.requestOn(req, res).has(Permission.LinkDataset)) + || (linkedDataverse != null && this.permissionService.requestOn(req, res).has(Permission.LinkDataverse))) { dataverseList.add(res); } + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 76ef91fbd3a..f99727b16db 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -601,7 +601,7 @@ protected DvObject findDvo(@NotNull final String id) throws WrappedResponse { * @throws WrappedResponse */ @NotNull - protected DvObject findDvoByIdAndFeaturedItemTypeOrDie(@NotNull final String dvIdtf, String type) throws WrappedResponse { + protected DvObject findDvoByIdAndTypeOrDie(@NotNull final String dvIdtf, String type) throws WrappedResponse { try { DataverseFeaturedItem.TYPES dvType = DataverseFeaturedItem.getDvType(type); DvObject dvObject = null; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DataverseFeaturedItems.java b/src/main/java/edu/harvard/iq/dataverse/api/DataverseFeaturedItems.java index 30c3146fbfb..00f1aa76e7e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DataverseFeaturedItems.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DataverseFeaturedItems.java @@ -63,7 +63,7 @@ public Response updateFeaturedItem(@Context ContainerRequestContext crc, if (dataverseFeaturedItem == null) { throw new WrappedResponse(error(Response.Status.NOT_FOUND, MessageFormat.format(BundleUtil.getStringFromBundle("dataverseFeaturedItems.errors.notFound"), id))); } - DvObject dvObject = (dvObjectIdtf != null) ? findDvoByIdAndFeaturedItemTypeOrDie(dvObjectIdtf, type) : null; + DvObject dvObject = (dvObjectIdtf != null) ? findDvoByIdAndTypeOrDie(dvObjectIdtf, type) : null; UpdatedDataverseFeaturedItemDTO updatedDataverseFeaturedItemDTO = UpdatedDataverseFeaturedItemDTO.fromFormData(content, displayOrder, keepFile, imageFileInputStream, contentDispositionHeader, type, dvObject); return ok(json(execCommand(new UpdateDataverseFeaturedItemCommand(createDataverseRequest(getRequestUser(crc)), dataverseFeaturedItem, updatedDataverseFeaturedItemDTO)))); } catch (WrappedResponse e) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index dba634e8ba9..ae49e34d693 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -1767,9 +1767,19 @@ public Response linkDataverse(@Context ContainerRequestContext crc, @PathParam(" @GET @AuthRequired @Path("{identifier}/linkingDataverses/{searchTerm}") - public Response getLinkingDataverseList(@PathParam("identifier") String dvIdtf, @PathParam("searchTerm") String searchTerm){ - - return null; + public Response getLinkingDataverseList(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @PathParam("searchTerm") String searchTerm, @FormDataParam("type") String type){ + //first determine what you are linking based on identifier and type + try{ + DvObject dvObject = findDvoByIdAndTypeOrDie(dvIdtf, type); + List dataversesForLinking = dataverseService.filterDataversesForLinking(searchTerm, createDataverseRequest(getRequestUser(crc)), dvObject); + JsonArrayBuilder dvBuilder = Json.createArrayBuilder(); + for (Dataverse dv : dataversesForLinking) { + dvBuilder.add(dv.getAlias()); + } + return ok(dvBuilder); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } } @@ -1813,7 +1823,7 @@ public Response createFeaturedItem(@Context ContainerRequestContext crc, try { dataverse = findDataverseOrDie(dvIdtf); if (dvObjectIdtf != null) { - dvObject = findDvoByIdAndFeaturedItemTypeOrDie(dvObjectIdtf, type); + dvObject = findDvoByIdAndTypeOrDie(dvObjectIdtf, type); } } catch (WrappedResponse wr) { return wr.getResponse(); @@ -1901,7 +1911,7 @@ public Response updateFeaturedItems( // ignore dvObject if the id is missing or an empty string DvObject dvObject = dvObjectIdtf.get(i) != null && !dvObjectIdtf.get(i).isEmpty() - ? findDvoByIdAndFeaturedItemTypeOrDie(dvObjectIdtf.get(i), types.get(i)) : null; + ? findDvoByIdAndTypeOrDie(dvObjectIdtf.get(i), types.get(i)) : null; if (ids.get(i) == 0) { newItems.add(NewDataverseFeaturedItemDTO.fromFormData( contents.get(i), displayOrders.get(i), fileInputStream, contentDisposition, types.get(i), dvObject)); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 3e1a160c9f2..4d9f8aa9509 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -677,6 +677,28 @@ public void testImportDDI() throws IOException, InterruptedException { Response deleteUserResponse = UtilIT.deleteUser(username); assertEquals(200, deleteUserResponse.getStatusCode()); } + + @Test + public void testGetLinkableDataverses(){ + Response createUser = UtilIT.createRandomUser(); + String username = UtilIT.getUsernameFromResponse(createUser); + Response makeSuperUser = UtilIT.makeSuperUser(username); + assertEquals(200, makeSuperUser.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response publishDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + assertEquals(200, publishDataverse.getStatusCode()); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.prettyPrint(); + String datasetPersistentId = UtilIT.getDatasetPersistentIdFromResponse(createDatasetResponse); + + Response getLinkableDataverses = UtilIT.getLinkableDataverses("dataset", datasetPersistentId, apiToken, dataverseAlias); + getLinkableDataverses.prettyPrint(); + } @Test public void testImport() throws IOException, InterruptedException { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 24f2adbb3ed..77b4303393e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -4773,6 +4773,18 @@ static Response updateDataverseFeaturedItem(long featuredItemId, String apiToken) { return updateDataverseFeaturedItem(featuredItemId, content, displayOrder, keepFile, pathToFile, null, null, apiToken); } + + static Response getLinkableDataverses (String type, String dvObjectId, String apiToken, String dataverseAlias) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .contentType("application/json") + .get("/api/dataverses/" + dataverseAlias + "/featuredItems"); + + /* + {identifier}/linkingDataverses/{searchTerm} + */ + + } static Response updateDataverseFeaturedItem(long featuredItemId, String content, int displayOrder, From 1203c435472d980fc648d3b25940f8d2d6cad5a2 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 8 Aug 2025 16:54:52 -0400 Subject: [PATCH 076/634] #11710 fix test --- .../harvard/iq/dataverse/api/Dataverses.java | 14 +++++++++-- .../iq/dataverse/api/DataversesIT.java | 9 +++++++- .../edu/harvard/iq/dataverse/api/UtilIT.java | 23 +++++++++++-------- 3 files changed, 34 insertions(+), 12 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index ae49e34d693..21b67d1c3c9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -1766,9 +1766,16 @@ public Response linkDataverse(@Context ContainerRequestContext crc, @PathParam(" @GET @AuthRequired - @Path("{identifier}/linkingDataverses/{searchTerm}") - public Response getLinkingDataverseList(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @PathParam("searchTerm") String searchTerm, @FormDataParam("type") String type){ + @Produces(MediaType.APPLICATION_JSON) + @Consumes(MediaType.MULTIPART_FORM_DATA) + @Path("{identifier}/{type}/linkingDataverses/{searchTerm}") + public Response getLinkingDataverseList(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @PathParam("searchTerm") String searchTerm, @PathParam("type") String type){ //first determine what you are linking based on identifier and type + System.out.print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@"); + System.out.print("in dataverses method"); + System.out.print("id: " + dvIdtf); + System.out.print("searchTerm: " + searchTerm); + System.out.print("type: " + type); try{ DvObject dvObject = findDvoByIdAndTypeOrDie(dvIdtf, type); List dataversesForLinking = dataverseService.filterDataversesForLinking(searchTerm, createDataverseRequest(getRequestUser(crc)), dvObject); @@ -1779,6 +1786,9 @@ public Response getLinkingDataverseList(@Context ContainerRequestContext crc, @P return ok(dvBuilder); } catch (WrappedResponse wr) { return wr.getResponse(); + } catch (Exception e){ + return error(Status.BAD_REQUEST, e.getLocalizedMessage()); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 4d9f8aa9509..e13d83f6f88 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -696,8 +696,15 @@ public void testGetLinkableDataverses(){ createDatasetResponse.prettyPrint(); String datasetPersistentId = UtilIT.getDatasetPersistentIdFromResponse(createDatasetResponse); - Response getLinkableDataverses = UtilIT.getLinkableDataverses("dataset", datasetPersistentId, apiToken, dataverseAlias); + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse); + UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken); + + System.out.print("After pub dataset"); + + Response getLinkableDataverses = UtilIT.getLinkableDataverses("dataset", datasetPersistentId, apiToken, "dv0"); getLinkableDataverses.prettyPrint(); + getLinkableDataverses.then().assertThat() + .statusCode(OK.getStatusCode()); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 77b4303393e..f28c0b3e640 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -4774,16 +4774,21 @@ static Response updateDataverseFeaturedItem(long featuredItemId, return updateDataverseFeaturedItem(featuredItemId, content, displayOrder, keepFile, pathToFile, null, null, apiToken); } - static Response getLinkableDataverses (String type, String dvObjectId, String apiToken, String dataverseAlias) { - return given() + static Response getLinkableDataverses (String type, String dvObjectId, String apiToken, String searchTerm) { + + String idInPath = dvObjectId; // Assume it's a number to start. + String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path. + if (type.equals("dataset")) { + if (!NumberUtils.isCreatable(idInPath)) { + idInPath = ":persistentId"; + optionalQueryParam = "?persistentId=" + dvObjectId; + } + } + + return given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .contentType("application/json") - .get("/api/dataverses/" + dataverseAlias + "/featuredItems"); - - /* - {identifier}/linkingDataverses/{searchTerm} - */ - + .get("/api/dataverses/" + idInPath + "/" + type + "/linkingDataverses/" + searchTerm + optionalQueryParam); + } static Response updateDataverseFeaturedItem(long featuredItemId, String content, From 8e8b084c038a0b103314e203e5b735ea624330f3 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 11 Aug 2025 08:49:06 -0400 Subject: [PATCH 077/634] #11710 add "mini" dv to json printer --- .../java/edu/harvard/iq/dataverse/api/Dataverses.java | 2 +- .../harvard/iq/dataverse/util/json/JsonPrinter.java | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 21b67d1c3c9..a6aa24f417b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -1781,7 +1781,7 @@ public Response getLinkingDataverseList(@Context ContainerRequestContext crc, @P List dataversesForLinking = dataverseService.filterDataversesForLinking(searchTerm, createDataverseRequest(getRequestUser(crc)), dvObject); JsonArrayBuilder dvBuilder = Json.createArrayBuilder(); for (Dataverse dv : dataversesForLinking) { - dvBuilder.add(dv.getAlias()); + dvBuilder.add(json(dv, true)); } return ok(dvBuilder); } catch (WrappedResponse wr) { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 592a893083c..ec9abdfa7cc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -269,6 +269,17 @@ public static JsonObjectBuilder json(Workflow wf){ return bld; } + + public static JsonObjectBuilder json(Dataverse dv, Boolean mini) { + if (!mini){ + return json(dv, false, false, null); + } else { + return jsonObjectBuilder() + .add("id", dv.getId()) + .add("alias", dv.getAlias()) + .add("name", dv.getName()); + } + } public static JsonObjectBuilder json(Dataverse dv) { return json(dv, false, false, null); From bb40ca4b40e35a9f2315606913f2c309af363213 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 11 Aug 2025 14:42:05 -0400 Subject: [PATCH 078/634] #11710 fix templates test for existing databases/templates --- .../iq/dataverse/api/DataversesIT.java | 26 +++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index e13d83f6f88..53643e3d5a5 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -2314,17 +2314,39 @@ public void testUpdateInputLevelDisplayOnCreateOverride() { } @Test - public void testCreateAndGetTemplates() { + public void testCreateAndGetTemplates() throws JsonParseException { Response createUserResponse = UtilIT.createRandomUser(); String apiToken = UtilIT.getApiTokenFromResponse(createUserResponse); Response createSecondUserResponse = UtilIT.createRandomUser(); String secondApiToken = UtilIT.getApiTokenFromResponse(createSecondUserResponse); - + /* + We need to make this a non-inherited metadatablocks so the get template will only get templates from current dv + */ + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + String newName = "New Test Dataverse Name"; + String newAffiliation = "New Test Dataverse Affiliation"; + String newDataverseType = Dataverse.DataverseType.TEACHING_COURSES.toString(); + String[] newContactEmails = new String[]{"new_email@dataverse.com"}; + String[] newInputLevelNames = new String[]{"geographicCoverage"}; + String[] newFacetIds = new String[]{"contributorName"}; + String[] newMetadataBlockNames = new String[]{"citation", "geospatial", "biomedical"}; + + // Assert that the error is returned for having both MetadataBlockNames and inheritMetadataBlocksFromParent + Response updateDataverseResponse = UtilIT.updateDataverse( + dataverseAlias, dataverseAlias, newName, newAffiliation, newDataverseType, newContactEmails, newInputLevelNames, + null, newMetadataBlockNames, apiToken, + Boolean.FALSE, Boolean.FALSE, null + ); + updateDataverseResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + + // Create a template String jsonString = """ From 791edfb2a4237eb28ae2ef71f9dd76b3e23d4a33 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 12 Aug 2025 09:47:49 -0400 Subject: [PATCH 079/634] #11710 code cleanup --- src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 53643e3d5a5..889f9589819 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -2320,6 +2320,7 @@ public void testCreateAndGetTemplates() throws JsonParseException { Response createSecondUserResponse = UtilIT.createRandomUser(); String secondApiToken = UtilIT.getApiTokenFromResponse(createSecondUserResponse); + /* We need to make this a non-inherited metadatablocks so the get template will only get templates from current dv */ @@ -2342,10 +2343,9 @@ public void testCreateAndGetTemplates() throws JsonParseException { null, newMetadataBlockNames, apiToken, Boolean.FALSE, Boolean.FALSE, null ); + updateDataverseResponse.then().assertThat() .statusCode(OK.getStatusCode()); - - // Create a template @@ -2384,6 +2384,7 @@ public void testCreateAndGetTemplates() throws JsonParseException { jsonString, apiToken ); + createTemplateResponse.then().assertThat().statusCode(OK.getStatusCode()) .body("data.name", equalTo("Dataverse template")) .body("data.usageCount", equalTo(0)) From 61dae90a8f203b6dca0df9c5317e70c40df40d24 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 12 Aug 2025 10:51:39 -0400 Subject: [PATCH 080/634] #11710 add test for perms --- .../harvard/iq/dataverse/api/Dataverses.java | 6 +-- .../iq/dataverse/api/DataversesIT.java | 46 ++++++++++++++++--- 2 files changed, 40 insertions(+), 12 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index a6aa24f417b..f4788120ae5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -1771,11 +1771,7 @@ public Response linkDataverse(@Context ContainerRequestContext crc, @PathParam(" @Path("{identifier}/{type}/linkingDataverses/{searchTerm}") public Response getLinkingDataverseList(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @PathParam("searchTerm") String searchTerm, @PathParam("type") String type){ //first determine what you are linking based on identifier and type - System.out.print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@"); - System.out.print("in dataverses method"); - System.out.print("id: " + dvIdtf); - System.out.print("searchTerm: " + searchTerm); - System.out.print("type: " + type); + try{ DvObject dvObject = findDvoByIdAndTypeOrDie(dvIdtf, type); List dataversesForLinking = dataverseService.filterDataversesForLinking(searchTerm, createDataverseRequest(getRequestUser(crc)), dvObject); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 889f9589819..d35270de1a3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -681,30 +681,62 @@ public void testImportDDI() throws IOException, InterruptedException { @Test public void testGetLinkableDataverses(){ Response createUser = UtilIT.createRandomUser(); - String username = UtilIT.getUsernameFromResponse(createUser); - Response makeSuperUser = UtilIT.makeSuperUser(username); - assertEquals(200, makeSuperUser.getStatusCode()); String apiToken = UtilIT.getApiTokenFromResponse(createUser); Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + //Create dataverse for linking + Response createDataverseResponseForLinking = UtilIT.createRandomDataverse(apiToken); + String dataverseAliasForLinking = UtilIT.getAliasFromResponse(createDataverseResponseForLinking); Response publishDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); assertEquals(200, publishDataverse.getStatusCode()); + publishDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAliasForLinking, apiToken); + assertEquals(200, publishDataverse.getStatusCode()); + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); createDatasetResponse.prettyPrint(); String datasetPersistentId = UtilIT.getDatasetPersistentIdFromResponse(createDatasetResponse); Integer datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse); UtilIT.publishDatasetViaNativeApi(datasetPersistentId, "major", apiToken); - - System.out.print("After pub dataset"); - Response getLinkableDataverses = UtilIT.getLinkableDataverses("dataset", datasetPersistentId, apiToken, "dv0"); + Response getLinkableDataverses = UtilIT.getLinkableDataverses("dataset", datasetPersistentId, apiToken, dataverseAliasForLinking); getLinkableDataverses.prettyPrint(); getLinkableDataverses.then().assertThat() - .statusCode(OK.getStatusCode()); + .statusCode(OK.getStatusCode()) + .body("data[0].alias", equalTo(dataverseAliasForLinking)); + + Response getLinkableDataversesForDataverse = UtilIT.getLinkableDataverses("dataverse", dataverseAlias, apiToken, dataverseAliasForLinking); + getLinkableDataversesForDataverse.prettyPrint(); + getLinkableDataverses.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].alias", equalTo(dataverseAliasForLinking)); + + // create new user and dataverse - the new dataverse should not be available to the first user for linking... + Response createUserTwo = UtilIT.createRandomUser(); + String apiTokenTwo = UtilIT.getApiTokenFromResponse(createUserTwo); + + //Create dataverse that should be unavailable for linking + Response createDataverseResponseUnavailableForLinking = UtilIT.createRandomDataverse(apiTokenTwo); + createDataverseResponseUnavailableForLinking.prettyPrint(); + String dataverseAliasUnavailableForLinking = UtilIT.getAliasFromResponse(createDataverseResponseUnavailableForLinking); + + Response getUnavailableForDataset = UtilIT.getLinkableDataverses("dataset", datasetPersistentId, apiToken, dataverseAliasUnavailableForLinking); + getUnavailableForDataset.prettyPrint(); + getUnavailableForDataset.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(0)); + + Response getUnavailableForDataverse = UtilIT.getLinkableDataverses("dataverse", dataverseAlias, apiToken, dataverseAliasUnavailableForLinking); + getUnavailableForDataverse.prettyPrint(); + getUnavailableForDataverse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(0)); + + } @Test From 32d06ed75bbb7f06a25d856a37963978855cc8a2 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 12 Aug 2025 11:02:07 -0400 Subject: [PATCH 081/634] #11710 add publish to test --- src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index d35270de1a3..678616d7db8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -723,6 +723,8 @@ public void testGetLinkableDataverses(){ Response createDataverseResponseUnavailableForLinking = UtilIT.createRandomDataverse(apiTokenTwo); createDataverseResponseUnavailableForLinking.prettyPrint(); String dataverseAliasUnavailableForLinking = UtilIT.getAliasFromResponse(createDataverseResponseUnavailableForLinking); + publishDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAliasUnavailableForLinking, apiTokenTwo); + assertEquals(200, publishDataverse.getStatusCode()); Response getUnavailableForDataset = UtilIT.getLinkableDataverses("dataset", datasetPersistentId, apiToken, dataverseAliasUnavailableForLinking); getUnavailableForDataset.prettyPrint(); From 59572c9485e49f2d00d1857034819c07b7337a47 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 12 Aug 2025 14:20:51 -0400 Subject: [PATCH 082/634] #11710 code cleanup --- src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java | 1 - src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java | 1 - 2 files changed, 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index 23d94338992..b8ce8cb1a6b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -541,7 +541,6 @@ public List filterDataversesForLinking(String query, DataverseRequest || (linkedDataverse != null && this.permissionService.requestOn(req, res).has(Permission.LinkDataverse))) { dataverseList.add(res); } - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index f4788120ae5..8bfa3594602 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -1767,7 +1767,6 @@ public Response linkDataverse(@Context ContainerRequestContext crc, @PathParam(" @GET @AuthRequired @Produces(MediaType.APPLICATION_JSON) - @Consumes(MediaType.MULTIPART_FORM_DATA) @Path("{identifier}/{type}/linkingDataverses/{searchTerm}") public Response getLinkingDataverseList(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @PathParam("searchTerm") String searchTerm, @PathParam("type") String type){ //first determine what you are linking based on identifier and type From d6625a4692ecfbf06ab026379e35afedaa4e1deb Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 12 Aug 2025 14:53:07 -0400 Subject: [PATCH 083/634] #11710 test that linked dv's are removed from list --- .../iq/dataverse/api/DataversesIT.java | 23 +++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 678616d7db8..5358debacda 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -682,7 +682,8 @@ public void testImportDDI() throws IOException, InterruptedException { public void testGetLinkableDataverses(){ Response createUser = UtilIT.createRandomUser(); String apiToken = UtilIT.getApiTokenFromResponse(createUser); - + String username = UtilIT.getUsernameFromResponse(createUser); + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); @@ -737,7 +738,25 @@ public void testGetLinkableDataverses(){ getUnavailableForDataverse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.size()", equalTo(0)); - + + //now link a dataverse and see that it's unavailable in the future + + Response makeSuperUser = UtilIT.setSuperuserStatus(username, Boolean.TRUE); + + Response linkDataset = UtilIT.linkDataset(datasetPersistentId, dataverseAliasForLinking, apiToken); + linkDataset.prettyPrint(); + linkDataset.then().assertThat() + .statusCode(OK.getStatusCode()); + + //set it back to non-super user so perms are limited + UtilIT.setSuperuserStatus(username, Boolean.FALSE); + + //should get an empty list because dataset is already linked + getLinkableDataverses = UtilIT.getLinkableDataverses("dataset", datasetPersistentId, apiToken, dataverseAliasForLinking); + getLinkableDataverses.prettyPrint(); + getLinkableDataverses.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(0)); } From 12f69ffaae42873c9008238e4fd9dfc7c013fd5a Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 12 Aug 2025 15:00:04 -0400 Subject: [PATCH 084/634] #11710 clean up comments --- src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 5358debacda..7782dea8fea 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -739,7 +739,7 @@ public void testGetLinkableDataverses(){ .statusCode(OK.getStatusCode()) .body("data.size()", equalTo(0)); - //now link a dataverse and see that it's unavailable in the future + //now link a dataset and see that it's unavailable in the future Response makeSuperUser = UtilIT.setSuperuserStatus(username, Boolean.TRUE); @@ -748,7 +748,7 @@ public void testGetLinkableDataverses(){ linkDataset.then().assertThat() .statusCode(OK.getStatusCode()); - //set it back to non-super user so perms are limited + //set user api back to non-super user so perms are limited UtilIT.setSuperuserStatus(username, Boolean.FALSE); //should get an empty list because dataset is already linked From 8456b90a1cc015ff3f935217bdf79649f43f93c6 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 12 Aug 2025 15:30:32 -0400 Subject: [PATCH 085/634] Update native-api.rst --- doc/sphinx-guides/source/api/native-api.rst | 44 +++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 8de396e14b3..53270476437 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -732,6 +732,50 @@ Note: you must have "Add Dataset" permission in the given collection to invoke t .. _featured-collections: +List Dataverse Collections that a given Dataset or Dataverse Collection may be linked to +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The user may provide a search term to limit the list of Dataverse Collections returned +The response is a JSON array of the ids, aliases, and names of the Dataverse collections that a given Dataset or Dataverse Collection may be linked to: + +For a given Dataverse Collection: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export OBJECT_TYPE=dataverse + export ID=collectionAlias + export SEARCH_TERM=searchOn + + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/dataverses/$ID/$OBJECT_TYPE/linkingDataverses/$SEARCH_TERM" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/dataverses/collectionAlias/dataverse/linkingDataverses/searchOn" + +For a given Dataset: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export OBJECT_TYPE=dataset + export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB + export SEARCH_TERM=searchOn + + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/dataverses/:persistentId/$OBJECT_TYPE/linkingDataverses/$SEARCH_TERM?persistentId=$PERSISTENT_IDENTIFIER"" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/dataverses/:persistentId/dataset/linkingDataverses/searchOn?persistentId=doi:10.5072/FK2/J8SJZB" + + + List Featured Collections for a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From bb74b24000878104063f0ddf816290388c8aafcc Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 12 Aug 2025 15:32:50 -0400 Subject: [PATCH 086/634] Update native-api.rst --- doc/sphinx-guides/source/api/native-api.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 53270476437..9a861c353d7 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -732,11 +732,11 @@ Note: you must have "Add Dataset" permission in the given collection to invoke t .. _featured-collections: -List Dataverse Collections that a given Dataset or Dataverse Collection may be linked to -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +List Dataverse Collections to which a given Dataset or Dataverse Collection may be linked +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -The user may provide a search term to limit the list of Dataverse Collections returned -The response is a JSON array of the ids, aliases, and names of the Dataverse collections that a given Dataset or Dataverse Collection may be linked to: +The user may provide a search term to limit the list of Dataverse Collections returned. +The response is a JSON array of the ids, aliases, and names of the Dataverse collections to which a given Dataset or Dataverse Collection may be linked: For a given Dataverse Collection: From 01de521e93b58c5dd7cc89cc6edf5795a8d60434 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 12 Aug 2025 16:53:21 -0400 Subject: [PATCH 087/634] #11710 add test for partial search term --- .../edu/harvard/iq/dataverse/api/Dataverses.java | 12 +++++++----- .../edu/harvard/iq/dataverse/api/DataversesIT.java | 12 +++++++++++- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 8bfa3594602..4d11cc0152a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -1771,14 +1771,16 @@ public Response linkDataverse(@Context ContainerRequestContext crc, @PathParam(" public Response getLinkingDataverseList(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @PathParam("searchTerm") String searchTerm, @PathParam("type") String type){ //first determine what you are linking based on identifier and type - try{ - DvObject dvObject = findDvoByIdAndTypeOrDie(dvIdtf, type); - List dataversesForLinking = dataverseService.filterDataversesForLinking(searchTerm, createDataverseRequest(getRequestUser(crc)), dvObject); - JsonArrayBuilder dvBuilder = Json.createArrayBuilder(); + try { + DvObject dvObject = findDvoByIdAndTypeOrDie(dvIdtf, type); + List dataversesForLinking = dataverseService.filterDataversesForLinking(searchTerm, createDataverseRequest(getRequestUser(crc)), dvObject); + JsonArrayBuilder dvBuilder = Json.createArrayBuilder(); + if (dataversesForLinking != null && !dataversesForLinking.isEmpty()) { for (Dataverse dv : dataversesForLinking) { dvBuilder.add(json(dv, true)); } - return ok(dvBuilder); + } + return ok(dvBuilder); } catch (WrappedResponse wr) { return wr.getResponse(); } catch (Exception e){ diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 7782dea8fea..3967666d798 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -715,7 +715,17 @@ public void testGetLinkableDataverses(){ getLinkableDataverses.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].alias", equalTo(dataverseAliasForLinking)); - + + //Should be able to get based on a partial alias... + // Partial must include the first part of the name + String searchTerm = dataverseAliasForLinking.substring(0, 5); + + Response getLinkableDataversesForDataversePartial = UtilIT.getLinkableDataverses("dataverse", dataverseAlias, apiToken, searchTerm); + getLinkableDataversesForDataversePartial.prettyPrint(); + getLinkableDataversesForDataversePartial.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].alias", equalTo(dataverseAliasForLinking)); + // create new user and dataverse - the new dataverse should not be available to the first user for linking... Response createUserTwo = UtilIT.createRandomUser(); String apiTokenTwo = UtilIT.getApiTokenFromResponse(createUserTwo); From 9e391921948730c5d34663d2bd0bde74b34828df Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 12 Aug 2025 16:56:34 -0400 Subject: [PATCH 088/634] Update native-api.rst --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 9a861c353d7..22a77cf0d4a 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -735,7 +735,7 @@ Note: you must have "Add Dataset" permission in the given collection to invoke t List Dataverse Collections to which a given Dataset or Dataverse Collection may be linked ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -The user may provide a search term to limit the list of Dataverse Collections returned. +The user must provide a search term to limit the list of Dataverse Collections returned. The search term will be compared to the name of the Dataverse Collections and must include the beginning of the Dataverse Collections' names. The response is a JSON array of the ids, aliases, and names of the Dataverse collections to which a given Dataset or Dataverse Collection may be linked: For a given Dataverse Collection: From 3157b2461f789f059329f58c1bdc142573fab551 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 13 Aug 2025 10:32:35 -0400 Subject: [PATCH 089/634] Create 11710-get-available-dataverses-api.md --- doc/release-notes/11710-get-available-dataverses-api.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 doc/release-notes/11710-get-available-dataverses-api.md diff --git a/doc/release-notes/11710-get-available-dataverses-api.md b/doc/release-notes/11710-get-available-dataverses-api.md new file mode 100644 index 00000000000..6658d1d8fd5 --- /dev/null +++ b/doc/release-notes/11710-get-available-dataverses-api.md @@ -0,0 +1,5 @@ +### New API endpoint for retrieving a list of Dataverse Collections to which a given Dataset or Dataverse Collection may be linked + +-The end point also takes in a search term which currently must be the start of the collections' names. +-The user calling this API must have Link Dataset or Link Dataverse permission on the Dataverse Collections returned. +-If the Collection has already been linked to the given Dataset or Collection, it will not be returned. From 250e0e4c46107f0de3d106579260786993402614 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 13 Aug 2025 10:45:31 -0400 Subject: [PATCH 090/634] #11710 add cleanup after tests --- .../iq/dataverse/api/DataversesIT.java | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 3967666d798..34a5c6a6cec 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -729,6 +729,7 @@ public void testGetLinkableDataverses(){ // create new user and dataverse - the new dataverse should not be available to the first user for linking... Response createUserTwo = UtilIT.createRandomUser(); String apiTokenTwo = UtilIT.getApiTokenFromResponse(createUserTwo); + String usernameTwo = UtilIT.getUsernameFromResponse(createUserTwo); //Create dataverse that should be unavailable for linking Response createDataverseResponseUnavailableForLinking = UtilIT.createRandomDataverse(apiTokenTwo); @@ -767,6 +768,29 @@ public void testGetLinkableDataverses(){ getLinkableDataverses.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.size()", equalTo(0)); + + //set user api back to super user for cleanup + UtilIT.setSuperuserStatus(username, Boolean.TRUE); + + + // Clean up + Response destroyDatasetResponse = UtilIT.destroyDataset(datasetId, apiToken); + assertEquals(200, destroyDatasetResponse.getStatusCode()); + + Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, apiToken); + assertEquals(200, deleteDataverseResponse.getStatusCode()); + + deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAliasForLinking, apiToken); + assertEquals(200, deleteDataverseResponse.getStatusCode()); + + deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAliasUnavailableForLinking, apiToken); + assertEquals(200, deleteDataverseResponse.getStatusCode()); + + Response deleteUserResponse = UtilIT.deleteUser(usernameTwo); + assertEquals(200, deleteUserResponse.getStatusCode()); + + deleteUserResponse = UtilIT.deleteUser(username); + assertEquals(200, deleteUserResponse.getStatusCode()); } From 9be5fb38f77b150f6a7d22be46cba109ac8e089f Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 13 Aug 2025 14:21:17 -0400 Subject: [PATCH 091/634] Update native-api.rst --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 22a77cf0d4a..2cdcbc65eb4 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -766,7 +766,7 @@ For a given Dataset: export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB export SEARCH_TERM=searchOn - curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/dataverses/:persistentId/$OBJECT_TYPE/linkingDataverses/$SEARCH_TERM?persistentId=$PERSISTENT_IDENTIFIER"" + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/dataverses/:persistentId/$OBJECT_TYPE/linkingDataverses/$SEARCH_TERM?persistentId=$PERSISTENT_IDENTIFIER" The fully expanded example above (without environment variables) looks like this: From d6404b9142d51b74a5e29fdb8aedbe6d1e1133e2 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 14 Aug 2025 16:55:47 -0400 Subject: [PATCH 092/634] #11387 update command to allow for leaving input levels alone --- .../impl/AbstractWriteDataverseCommand.java | 8 ++- .../iq/dataverse/api/DataversesIT.java | 53 ++++++++++++++++--- .../edu/harvard/iq/dataverse/api/UtilIT.java | 20 +++++++ 3 files changed, 73 insertions(+), 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java index 8227572da3b..e4fd5373c7d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java @@ -105,10 +105,14 @@ private void processInputLevels(CommandContext ctxt) { ctxt.fieldTypeInputLevels().deleteDataverseFieldTypeInputLevelFor(dataverse); } else { dataverse.addInputLevelsMetadataBlocksIfNotPresent(inputLevels); - ctxt.fieldTypeInputLevels().deleteDataverseFieldTypeInputLevelFor(dataverse); + //if levels not empty either create or update (handled by save - update when id not null create if null) inputLevels.forEach(inputLevel -> { + DataverseFieldTypeInputLevel ftil = ctxt.fieldTypeInputLevels().findByDataverseIdDatasetFieldTypeId(dataverse.getId(), inputLevel.getDatasetFieldType().getId()); + if(ftil != null){ + inputLevel.setId(ftil.getId()); + } inputLevel.setDataverse(dataverse); - ctxt.fieldTypeInputLevels().create(inputLevel); + ctxt.fieldTypeInputLevels().save(inputLevel); }); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 3e1a160c9f2..50f171de352 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -1255,6 +1255,45 @@ public void testUpdateInputLevels() { updateDataverseInputLevelsResponse.then().assertThat() .body("message", equalTo("Error while updating dataverse input levels: Input level list cannot be null or empty")) .statusCode(INTERNAL_SERVER_ERROR.getStatusCode()); + + //Add new types and see that previously changed ones remain as before... #11387 + testInputLevelNames = new String[]{"subtitle", "relatedMaterial"}; + + testRequiredInputLevels = new boolean[] {false, false}; + testIncludedInputLevels = new boolean[] {true, true}; + boolean [] testDisplayOnCreate = new boolean[] {true, true}; + updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, testRequiredInputLevels, testIncludedInputLevels, testDisplayOnCreate, apiToken); + actualInputLevelName = updateDataverseInputLevelsResponse.then().extract().path("data.inputLevels[0].datasetFieldTypeName"); + int subtitleInputLevelIndex = actualInputLevelName.equals("subtitle") ? 0 : 1; + updateDataverseInputLevelsResponse.prettyPrint(); + + updateDataverseInputLevelsResponse.then().assertThat() + .body(String.format("data.inputLevels[%d].include", subtitleInputLevelIndex), equalTo(true)) + .body(String.format("data.inputLevels[%d].required", subtitleInputLevelIndex), equalTo(false)) + .body(String.format("data.inputLevels[%d].displayOnCreate", subtitleInputLevelIndex), equalTo(true)) + .body(String.format("data.inputLevels[%d].include", 1 - subtitleInputLevelIndex), equalTo(true)) + .body(String.format("data.inputLevels[%d].required", 1 - subtitleInputLevelIndex), equalTo(false)) + .body(String.format("data.inputLevels[%d].displayOnCreate", 1 - subtitleInputLevelIndex), equalTo(true)) + .statusCode(OK.getStatusCode()); + actualFieldTypeName1 = updateDataverseInputLevelsResponse.then().extract().path("data.inputLevels[0].datasetFieldTypeName"); + actualFieldTypeName2 = updateDataverseInputLevelsResponse.then().extract().path("data.inputLevels[1].datasetFieldTypeName"); + assertNotEquals(actualFieldTypeName1, actualFieldTypeName2); + assertThat(testInputLevelNames, hasItemInArray(actualFieldTypeName1)); + assertThat(testInputLevelNames, hasItemInArray(actualFieldTypeName2)); + + + testInputLevelNames = new String[]{"subtitle", "otherReferences"}; + testRequiredInputLevels = new boolean[] {false, false}; + testIncludedInputLevels = new boolean[] {true, true}; + testDisplayOnCreate = new boolean[] {false, true}; + + updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, testRequiredInputLevels, testIncludedInputLevels, testDisplayOnCreate, apiToken); + updateDataverseInputLevelsResponse.prettyPrint(); + + updateDataverseInputLevelsResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + } @Test @@ -2274,14 +2313,16 @@ public void testUpdateInputLevelDisplayOnCreateOverride() { .body("data[0].displayName", equalTo("Citation Metadata")) .body("data.size()", equalTo(expectedOnlyDisplayedOnCreateNumberOfMetadataBlocks)) .body("data[0].fields.author.childFields.size()", is(4)); - + updateResponse = UtilIT.updateDataverseInputLevelDisplayOnCreate( - dataverseAlias, "subtitle", false, apiToken); + dataverseAlias, "subtitle", false, apiToken); + String actualInputLevelName = updateResponse.then().extract().path("data.inputLevels[0].datasetFieldTypeName"); + int subtitleIndex = actualInputLevelName.equals("subtitle") ? 0 : 1; updateResponse.then().assertThat() - .statusCode(OK.getStatusCode()) - .body("data.inputLevels[0].displayOnCreate", equalTo(false)) - .body("data.inputLevels[0].datasetFieldTypeName", equalTo("subtitle")); - + .body(String.format("data.inputLevels[%d].displayOnCreate", subtitleIndex), equalTo(false)) + .body(String.format("data.inputLevels[%d].datasetFieldTypeName", subtitleIndex), equalTo("subtitle")) + .statusCode(OK.getStatusCode()); + } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 24f2adbb3ed..8c858a17ec9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -4578,6 +4578,18 @@ public static Response updateDataverseInputLevels(String dataverseAlias, String[ .contentType(ContentType.JSON) .put("/api/dataverses/" + dataverseAlias + "/inputLevels"); } + + public static Response updateDataverseInputLevels(String dataverseAlias, String[] inputLevelNames, boolean[] requiredInputLevels, boolean[] includedInputLevels, boolean[] displayOnCreate, String apiToken) { + JsonArrayBuilder inputLevelsArrayBuilder = Json.createArrayBuilder(); + for (int i = 0; i < inputLevelNames.length; i++) { + inputLevelsArrayBuilder.add(createInputLevelObject(inputLevelNames[i], requiredInputLevels[i], includedInputLevels[i], displayOnCreate[i])); + } + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(inputLevelsArrayBuilder.build().toString()) + .contentType(ContentType.JSON) + .put("/api/dataverses/" + dataverseAlias + "/inputLevels"); + } private static JsonObjectBuilder createInputLevelObject(String name, boolean required, boolean include) { return Json.createObjectBuilder() @@ -4585,6 +4597,14 @@ private static JsonObjectBuilder createInputLevelObject(String name, boolean req .add("required", required) .add("include", include); } + + private static JsonObjectBuilder createInputLevelObject(String name, boolean required, boolean include, boolean displayOnCreate) { + return Json.createObjectBuilder() + .add("datasetFieldTypeName", name) + .add("required", required) + .add("include", include) + .add("displayOnCreate", displayOnCreate); + } public static Response getOpenAPI(String accept, String format) { Response response = given() From 2788a6d7e5d2959fef406c1dee47dc16081a7ab1 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 15 Aug 2025 10:42:46 -0400 Subject: [PATCH 093/634] #11387 update test; cleanup --- .../edu/harvard/iq/dataverse/api/DataversesIT.java | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 50f171de352..b07c3e4ffa0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -1289,10 +1289,21 @@ public void testUpdateInputLevels() { updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, testRequiredInputLevels, testIncludedInputLevels, testDisplayOnCreate, apiToken); updateDataverseInputLevelsResponse.prettyPrint(); + + actualInputLevelName = updateDataverseInputLevelsResponse.then().extract().path("data.inputLevels[0].datasetFieldTypeName"); + subtitleInputLevelIndex = actualInputLevelName.equals("subtitle") ? 0 : 1; + //make sure subtitle got changed to false updateDataverseInputLevelsResponse.then().assertThat() + .body(String.format("data.inputLevels[%d].displayOnCreate", subtitleInputLevelIndex), equalTo(false)) .statusCode(OK.getStatusCode()); + //make superuser for cleanup + String username = UtilIT.getUsernameFromResponse(createUserResponse); + UtilIT.setSuperuserStatus(username, Boolean.TRUE); + Response deleteDataverse1Response = UtilIT.deleteDataverse(dataverseAlias, apiToken); + deleteDataverse1Response.prettyPrint(); + assertEquals(200, deleteDataverse1Response.getStatusCode()); } From 08dfd98b0bbc4165068bf077c952865db526770d Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 15 Aug 2025 11:31:23 -0400 Subject: [PATCH 094/634] #11387 fix test --- .../engine/command/impl/CreateDataverseCommandTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommandTest.java index 73880b78e7b..2d2fd943d4d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommandTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommandTest.java @@ -307,7 +307,7 @@ public void testCustomOptions() throws CommandException { i++; } - assertTrue( dftilsDeleted ); + // assertTrue( dftilsDeleted ); we no longer delete when adding new input levels to preserve previously created for ( DataverseFieldTypeInputLevel dftil : createdDftils ) { assertEquals( result, dftil.getDataverse() ); } From 949b61758c7712cc91ab998c9f427e303b810bd9 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 15 Aug 2025 15:43:07 -0400 Subject: [PATCH 095/634] #11387 source format --- src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index b07c3e4ffa0..36901548f2d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -2324,9 +2324,8 @@ public void testUpdateInputLevelDisplayOnCreateOverride() { .body("data[0].displayName", equalTo("Citation Metadata")) .body("data.size()", equalTo(expectedOnlyDisplayedOnCreateNumberOfMetadataBlocks)) .body("data[0].fields.author.childFields.size()", is(4)); - - updateResponse = UtilIT.updateDataverseInputLevelDisplayOnCreate( - dataverseAlias, "subtitle", false, apiToken); + + updateResponse = UtilIT.updateDataverseInputLevelDisplayOnCreate(dataverseAlias, "subtitle", false, apiToken); String actualInputLevelName = updateResponse.then().extract().path("data.inputLevels[0].datasetFieldTypeName"); int subtitleIndex = actualInputLevelName.equals("subtitle") ? 0 : 1; updateResponse.then().assertThat() From a451f375b7bf9b497d997d1e188aef19b6ac6683 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 18 Aug 2025 15:32:50 -0400 Subject: [PATCH 096/634] #11710 change search term to query param --- src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java | 4 ++-- src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 22f12ca06ff..b771275a112 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -1775,8 +1775,8 @@ public Response linkDataverse(@Context ContainerRequestContext crc, @PathParam(" @GET @AuthRequired @Produces(MediaType.APPLICATION_JSON) - @Path("{identifier}/{type}/linkingDataverses/{searchTerm}") - public Response getLinkingDataverseList(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @PathParam("searchTerm") String searchTerm, @PathParam("type") String type){ + @Path("{identifier}/{type}/linkingDataverses") + public Response getLinkingDataverseList(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("searchTerm") String searchTerm, @PathParam("type") String type){ //first determine what you are linking based on identifier and type try { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index f28c0b3e640..fc2adc16682 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -4781,13 +4781,13 @@ static Response getLinkableDataverses (String type, String dvObjectId, String ap if (type.equals("dataset")) { if (!NumberUtils.isCreatable(idInPath)) { idInPath = ":persistentId"; - optionalQueryParam = "?persistentId=" + dvObjectId; + optionalQueryParam = "&persistentId=" + dvObjectId; } } return given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .get("/api/dataverses/" + idInPath + "/" + type + "/linkingDataverses/" + searchTerm + optionalQueryParam); + .get("/api/dataverses/" + idInPath + "/" + type + "/linkingDataverses?searchTerm=" + searchTerm + optionalQueryParam); } static Response updateDataverseFeaturedItem(long featuredItemId, From 645dd38079c39632b0c07d6b5f8721fa0a2d63ad Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 18 Aug 2025 16:12:04 -0400 Subject: [PATCH 097/634] #11710 test for empty search --- .../edu/harvard/iq/dataverse/DataverseServiceBean.java | 4 ++++ .../java/edu/harvard/iq/dataverse/api/DataversesIT.java | 9 +++++++++ 2 files changed, 13 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index b8ce8cb1a6b..6e3a3dcc1c8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -535,6 +535,10 @@ public List filterDataversesForLinking(String query, DataverseRequest }); } + if (dvo instanceof Dataverse dataverse){ + remove.add(dataverse); + } + for (Dataverse res : results) { if (!remove.contains(res)) { if ((linkedDataset != null && this.permissionService.requestOn(req, res).has(Permission.LinkDataset)) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 34a5c6a6cec..167aa4ad756 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -721,6 +721,15 @@ public void testGetLinkableDataverses(){ String searchTerm = dataverseAliasForLinking.substring(0, 5); Response getLinkableDataversesForDataversePartial = UtilIT.getLinkableDataverses("dataverse", dataverseAlias, apiToken, searchTerm); + getLinkableDataversesForDataversePartial.prettyPrint(); + getLinkableDataversesForDataversePartial.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].alias", equalTo(dataverseAliasForLinking)); + + + //Try with empty string search term + searchTerm = ""; + getLinkableDataversesForDataversePartial = UtilIT.getLinkableDataverses("dataverse", dataverseAlias, apiToken, searchTerm); getLinkableDataversesForDataversePartial.prettyPrint(); getLinkableDataversesForDataversePartial.then().assertThat() .statusCode(OK.getStatusCode()) From 694cd3431cf4fc5c92e405551d873df1242dd8bb Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 19 Aug 2025 10:13:03 -0400 Subject: [PATCH 098/634] Update native-api.rst --- doc/sphinx-guides/source/api/native-api.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 2e0d7a6f230..0a208967f07 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -735,7 +735,7 @@ Note: you must have "Add Dataset" permission in the given collection to invoke t List Dataverse Collections to which a given Dataset or Dataverse Collection may be linked ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -The user must provide a search term to limit the list of Dataverse Collections returned. The search term will be compared to the name of the Dataverse Collections and must include the beginning of the Dataverse Collections' names. +The user may provide a search term to limit the list of Dataverse Collections returned. The search term will be compared to the name of the Dataverse Collections and must include the beginning of the Dataverse Collections' names. The response is a JSON array of the ids, aliases, and names of the Dataverse collections to which a given Dataset or Dataverse Collection may be linked: For a given Dataverse Collection: @@ -748,13 +748,13 @@ For a given Dataverse Collection: export ID=collectionAlias export SEARCH_TERM=searchOn - curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/dataverses/$ID/$OBJECT_TYPE/linkingDataverses/$SEARCH_TERM" + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/dataverses/$ID/$OBJECT_TYPE/linkingDataverses?searchTerm=$SEARCH_TERM" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/dataverses/collectionAlias/dataverse/linkingDataverses/searchOn" + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/dataverses/collectionAlias/dataverse/linkingDataverses?searchTerm=searchOn" For a given Dataset: @@ -766,13 +766,13 @@ For a given Dataset: export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB export SEARCH_TERM=searchOn - curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/dataverses/:persistentId/$OBJECT_TYPE/linkingDataverses/$SEARCH_TERM?persistentId=$PERSISTENT_IDENTIFIER" + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/dataverses/:persistentId/$OBJECT_TYPE/linkingDataverses?searchTerm=SEARCH_TERM&persistentId=$PERSISTENT_IDENTIFIER" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/dataverses/:persistentId/dataset/linkingDataverses/searchOn?persistentId=doi:10.5072/FK2/J8SJZB" + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/dataverses/:persistentId/dataset/linkingDataverses?searchTerm=searchOn&persistentId=doi:10.5072/FK2/J8SJZB" From 2644fc73dbeeb7abf8acebb601781403f4e7eb9c Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 19 Aug 2025 15:38:02 -0400 Subject: [PATCH 099/634] #11387 get actual index for test --- .../java/edu/harvard/iq/dataverse/api/DataversesIT.java | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 36901548f2d..e851134a8c7 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -2306,12 +2306,15 @@ public void testUpdateInputLevelDisplayOnCreateOverride() { .body("data.inputLevels[0].displayOnCreate", equalTo(true)) .body("data.inputLevels[0].datasetFieldTypeName", equalTo("notesText")); + String actualInputLevelName = updateResponse.then().extract().path("data.inputLevels[0].datasetFieldTypeName"); + int subtitleInputLevelIndex = actualInputLevelName.equals("subtitle") ? 0 : 1; + updateResponse = UtilIT.updateDataverseInputLevelDisplayOnCreate( dataverseAlias, "subtitle", true, apiToken); updateResponse.then().assertThat() .statusCode(OK.getStatusCode()) - .body("data.inputLevels[0].displayOnCreate", equalTo(true)) - .body("data.inputLevels[0].datasetFieldTypeName", equalTo("subtitle")); + .body(String.format("data.inputLevels[%d].displayOnCreate", subtitleInputLevelIndex), equalTo(true)) + .body(String.format("data.inputLevels[%d].datasetFieldTypeName", subtitleInputLevelIndex), equalTo("subtitle")); listMetadataBlocksResponse = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken); listMetadataBlocksResponse.prettyPrint(); @@ -2326,7 +2329,7 @@ public void testUpdateInputLevelDisplayOnCreateOverride() { .body("data[0].fields.author.childFields.size()", is(4)); updateResponse = UtilIT.updateDataverseInputLevelDisplayOnCreate(dataverseAlias, "subtitle", false, apiToken); - String actualInputLevelName = updateResponse.then().extract().path("data.inputLevels[0].datasetFieldTypeName"); + actualInputLevelName = updateResponse.then().extract().path("data.inputLevels[0].datasetFieldTypeName"); int subtitleIndex = actualInputLevelName.equals("subtitle") ? 0 : 1; updateResponse.then().assertThat() .body(String.format("data.inputLevels[%d].displayOnCreate", subtitleIndex), equalTo(false)) From a9f7f078b76bd2681b4fcb79ac6f62fed8984b26 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 19 Aug 2025 17:04:16 -0400 Subject: [PATCH 100/634] #11387 fix get index for test --- .../java/edu/harvard/iq/dataverse/api/DataversesIT.java | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index e851134a8c7..17256d3317b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -2306,11 +2306,13 @@ public void testUpdateInputLevelDisplayOnCreateOverride() { .body("data.inputLevels[0].displayOnCreate", equalTo(true)) .body("data.inputLevels[0].datasetFieldTypeName", equalTo("notesText")); - String actualInputLevelName = updateResponse.then().extract().path("data.inputLevels[0].datasetFieldTypeName"); - int subtitleInputLevelIndex = actualInputLevelName.equals("subtitle") ? 0 : 1; - + updateResponse = UtilIT.updateDataverseInputLevelDisplayOnCreate( dataverseAlias, "subtitle", true, apiToken); + + String actualInputLevelName = updateResponse.then().extract().path("data.inputLevels[0].datasetFieldTypeName"); + int subtitleInputLevelIndex = actualInputLevelName.equals("subtitle") ? 0 : 1; + updateResponse.prettyPrint(); updateResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body(String.format("data.inputLevels[%d].displayOnCreate", subtitleInputLevelIndex), equalTo(true)) From 04f6534e4da7a3ff373fe9e72fb1aba371fbf97c Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 20 Aug 2025 11:20:58 -0400 Subject: [PATCH 101/634] #11387 guard against differing json array in test --- .../iq/dataverse/api/DataversesIT.java | 28 +++++++++++++------ 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 17256d3317b..e650cd11f6f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -1261,19 +1261,31 @@ public void testUpdateInputLevels() { testRequiredInputLevels = new boolean[] {false, false}; testIncludedInputLevels = new boolean[] {true, true}; - boolean [] testDisplayOnCreate = new boolean[] {true, true}; + boolean [] testDisplayOnCreate = new boolean[] {true, false}; updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, testRequiredInputLevels, testIncludedInputLevels, testDisplayOnCreate, apiToken); - actualInputLevelName = updateDataverseInputLevelsResponse.then().extract().path("data.inputLevels[0].datasetFieldTypeName"); - int subtitleInputLevelIndex = actualInputLevelName.equals("subtitle") ? 0 : 1; - updateDataverseInputLevelsResponse.prettyPrint(); - + updateDataverseInputLevelsResponse.prettyPrint(); + int subtitleInputLevelIndex = 0; + int relatedMaterialInputLevelIndex = 0; + int i = 0; + + while (updateDataverseInputLevelsResponse.then().extract().path(String.format("data.inputLevels[%d].datasetFieldTypeName", i)) != null){ + actualInputLevelName = updateDataverseInputLevelsResponse.then().extract().path(String.format("data.inputLevels[%d].datasetFieldTypeName", i)).toString(); + if (actualInputLevelName.equals("subtitle")){ + subtitleInputLevelIndex = i; + } + if (actualInputLevelName.equals("relatedMaterial")){ + relatedMaterialInputLevelIndex = i; + } + i++; + } + updateDataverseInputLevelsResponse.then().assertThat() .body(String.format("data.inputLevels[%d].include", subtitleInputLevelIndex), equalTo(true)) .body(String.format("data.inputLevels[%d].required", subtitleInputLevelIndex), equalTo(false)) .body(String.format("data.inputLevels[%d].displayOnCreate", subtitleInputLevelIndex), equalTo(true)) - .body(String.format("data.inputLevels[%d].include", 1 - subtitleInputLevelIndex), equalTo(true)) - .body(String.format("data.inputLevels[%d].required", 1 - subtitleInputLevelIndex), equalTo(false)) - .body(String.format("data.inputLevels[%d].displayOnCreate", 1 - subtitleInputLevelIndex), equalTo(true)) + .body(String.format("data.inputLevels[%d].include", relatedMaterialInputLevelIndex), equalTo(true)) + .body(String.format("data.inputLevels[%d].required", relatedMaterialInputLevelIndex), equalTo(false)) + .body(String.format("data.inputLevels[%d].displayOnCreate", relatedMaterialInputLevelIndex), equalTo(false)) .statusCode(OK.getStatusCode()); actualFieldTypeName1 = updateDataverseInputLevelsResponse.then().extract().path("data.inputLevels[0].datasetFieldTypeName"); actualFieldTypeName2 = updateDataverseInputLevelsResponse.then().extract().path("data.inputLevels[1].datasetFieldTypeName"); From e001ff048dcb7593acbc2b7b44a46aef8eded424 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 20 Aug 2025 13:26:27 -0400 Subject: [PATCH 102/634] #11387 more array testing fun --- .../iq/dataverse/api/DataversesIT.java | 23 +++++++++++++++---- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index e650cd11f6f..682b2ddc8a3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -1287,8 +1287,9 @@ public void testUpdateInputLevels() { .body(String.format("data.inputLevels[%d].required", relatedMaterialInputLevelIndex), equalTo(false)) .body(String.format("data.inputLevels[%d].displayOnCreate", relatedMaterialInputLevelIndex), equalTo(false)) .statusCode(OK.getStatusCode()); - actualFieldTypeName1 = updateDataverseInputLevelsResponse.then().extract().path("data.inputLevels[0].datasetFieldTypeName"); - actualFieldTypeName2 = updateDataverseInputLevelsResponse.then().extract().path("data.inputLevels[1].datasetFieldTypeName"); + + actualFieldTypeName1 = updateDataverseInputLevelsResponse.then().extract().path(String.format("data.inputLevels[%d].datasetFieldTypeName", subtitleInputLevelIndex)); + actualFieldTypeName2 = updateDataverseInputLevelsResponse.then().extract().path(String.format("data.inputLevels[%d].datasetFieldTypeName", relatedMaterialInputLevelIndex)); assertNotEquals(actualFieldTypeName1, actualFieldTypeName2); assertThat(testInputLevelNames, hasItemInArray(actualFieldTypeName1)); assertThat(testInputLevelNames, hasItemInArray(actualFieldTypeName2)); @@ -1302,9 +1303,21 @@ public void testUpdateInputLevels() { updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, testRequiredInputLevels, testIncludedInputLevels, testDisplayOnCreate, apiToken); updateDataverseInputLevelsResponse.prettyPrint(); - actualInputLevelName = updateDataverseInputLevelsResponse.then().extract().path("data.inputLevels[0].datasetFieldTypeName"); - subtitleInputLevelIndex = actualInputLevelName.equals("subtitle") ? 0 : 1; - + subtitleInputLevelIndex = 0; + int otherReferencesInputLevelIndex = 0; + i = 0; + + while (updateDataverseInputLevelsResponse.then().extract().path(String.format("data.inputLevels[%d].datasetFieldTypeName", i)) != null) { + actualInputLevelName = updateDataverseInputLevelsResponse.then().extract().path(String.format("data.inputLevels[%d].datasetFieldTypeName", i)).toString(); + if (actualInputLevelName.equals("subtitle")) { + subtitleInputLevelIndex = i; + } + if (actualInputLevelName.equals("otherReferences")) { + otherReferencesInputLevelIndex = i; + } + i++; + } + //make sure subtitle got changed to false updateDataverseInputLevelsResponse.then().assertThat() .body(String.format("data.inputLevels[%d].displayOnCreate", subtitleInputLevelIndex), equalTo(false)) From de9f68768555affdad2ef0e2d193e362246c6de4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 22 Aug 2025 00:11:29 +0000 Subject: [PATCH 103/634] Bump actions/setup-java from 4 to 5 Bumps [actions/setup-java](https://github.com/actions/setup-java) from 4 to 5. - [Release notes](https://github.com/actions/setup-java/releases) - [Commits](https://github.com/actions/setup-java/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/setup-java dependency-version: '5' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/container_app_pr.yml | 2 +- .github/workflows/deploy_beta_testing.yml | 2 +- .github/workflows/maven_cache_management.yml | 2 +- .github/workflows/maven_unit_test.yml | 6 +++--- .github/workflows/spi_release.yml | 6 +++--- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/container_app_pr.yml b/.github/workflows/container_app_pr.yml index a4c52805156..b9f9fc6a51c 100644 --- a/.github/workflows/container_app_pr.yml +++ b/.github/workflows/container_app_pr.yml @@ -23,7 +23,7 @@ jobs: - uses: actions/checkout@v4 with: ref: 'refs/pull/${{ github.event.client_payload.pull_request.number }}/merge' - - uses: actions/setup-java@v4 + - uses: actions/setup-java@v5 with: java-version: "17" distribution: 'adopt' diff --git a/.github/workflows/deploy_beta_testing.yml b/.github/workflows/deploy_beta_testing.yml index 7a236a316fb..dc49c51e9d3 100644 --- a/.github/workflows/deploy_beta_testing.yml +++ b/.github/workflows/deploy_beta_testing.yml @@ -16,7 +16,7 @@ jobs: steps: - uses: actions/checkout@v4 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@v5 with: distribution: 'zulu' java-version: '17' diff --git a/.github/workflows/maven_cache_management.yml b/.github/workflows/maven_cache_management.yml index fedf63b7c54..8099119760d 100644 --- a/.github/workflows/maven_cache_management.yml +++ b/.github/workflows/maven_cache_management.yml @@ -36,7 +36,7 @@ jobs: - name: Determine Java version from Parent POM run: echo "JAVA_VERSION=$(grep '' modules/dataverse-parent/pom.xml | cut -f2 -d'>' | cut -f1 -d'<')" >> ${GITHUB_ENV} - name: Set up JDK ${{ env.JAVA_VERSION }} - uses: actions/setup-java@v4 + uses: actions/setup-java@v5 with: java-version: ${{ env.JAVA_VERSION }} distribution: temurin diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index f0cf461d8e6..5335efc2c76 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -39,7 +39,7 @@ jobs: # Basic setup chores - uses: actions/checkout@v4 - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v4 + uses: actions/setup-java@v5 with: java-version: ${{ matrix.jdk }} distribution: temurin @@ -105,7 +105,7 @@ jobs: # Basic setup chores - uses: actions/checkout@v4 - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v4 + uses: actions/setup-java@v5 with: java-version: ${{ matrix.jdk }} distribution: temurin @@ -138,7 +138,7 @@ jobs: # TODO: As part of #10618 change to setup-maven custom action # Basic setup chores - uses: actions/checkout@v4 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@v5 with: java-version: '17' distribution: temurin diff --git a/.github/workflows/spi_release.yml b/.github/workflows/spi_release.yml index 6398edca412..1c896aaff6f 100644 --- a/.github/workflows/spi_release.yml +++ b/.github/workflows/spi_release.yml @@ -38,7 +38,7 @@ jobs: if: github.event_name == 'pull_request' && needs.check-secrets.outputs.available == 'true' steps: - uses: actions/checkout@v4 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@v5 with: java-version: '17' distribution: 'adopt' @@ -64,7 +64,7 @@ jobs: if: github.event_name == 'push' && needs.check-secrets.outputs.available == 'true' steps: - uses: actions/checkout@v4 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@v5 with: java-version: '17' distribution: 'adopt' @@ -76,7 +76,7 @@ jobs: # Running setup-java again overwrites the settings.xml - IT'S MANDATORY TO DO THIS SECOND SETUP!!! - name: Set up Maven Central Repository - uses: actions/setup-java@v4 + uses: actions/setup-java@v5 with: java-version: '17' distribution: 'adopt' From 9d24e0946bea91110b6f25c809133ee2aafb14bb Mon Sep 17 00:00:00 2001 From: Florian Fritze Date: Thu, 21 Aug 2025 15:33:33 +0200 Subject: [PATCH 104/634] integer detection for array access --- .../edu/harvard/iq/dataverse/DatasetFieldServiceBean.java | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java index dce7a98fd75..49fdafd2f5e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java @@ -764,7 +764,13 @@ Object processPathSegment(int index, String[] pathParts, JsonValue curPath, Stri } } else { - curPath = ((JsonObject) curPath).get(pathParts[index]); + try { + int indexNumber = Integer.parseInt(pathParts[index]); + curPath = ((JsonObject) curPath).get(indexNumber); + } catch (NumberFormatException nfe) { + curPath = ((JsonObject) curPath).get(pathParts[index]); + } + // curPath = ((JsonObject) curPath).get(pathParts[index]); logger.fine("Found next Path object " + curPath.toString()); return processPathSegment(index + 1, pathParts, curPath, termUri); } From 894d3d5b2ea4eba018c55c7c074a2ca527819835 Mon Sep 17 00:00:00 2001 From: Florian Fritze Date: Fri, 22 Aug 2025 07:27:25 +0200 Subject: [PATCH 105/634] first check if it's a number --- .../iq/dataverse/DatasetFieldServiceBean.java | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java index 49fdafd2f5e..35b34c8d3a5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java @@ -43,6 +43,7 @@ import jakarta.persistence.criteria.*; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.math.NumberUtils; import org.apache.http.HttpResponse; import org.apache.http.HttpResponseInterceptor; import org.apache.http.client.methods.HttpGet; @@ -764,10 +765,14 @@ Object processPathSegment(int index, String[] pathParts, JsonValue curPath, Stri } } else { - try { - int indexNumber = Integer.parseInt(pathParts[index]); - curPath = ((JsonObject) curPath).get(indexNumber); - } catch (NumberFormatException nfe) { + if (NumberUtils.isCreatable(pathParts[index])) { + try { + int indexNumber = Integer.parseInt(pathParts[index]); + curPath = ((JsonObject) curPath).get(indexNumber); + } catch (NumberFormatException nfe) { + logger.fine("Please provide a valid integer number " + pathParts[index]); + } + } else { curPath = ((JsonObject) curPath).get(pathParts[index]); } // curPath = ((JsonObject) curPath).get(pathParts[index]); From 69e5a349f3cd44f28b86ff0e1dac4ba5eb71f803 Mon Sep 17 00:00:00 2001 From: Florian Fritze Date: Fri, 22 Aug 2025 08:03:34 +0200 Subject: [PATCH 106/634] JsonArray fix --- .../java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java index 35b34c8d3a5..244e7f959d8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java @@ -768,7 +768,7 @@ Object processPathSegment(int index, String[] pathParts, JsonValue curPath, Stri if (NumberUtils.isCreatable(pathParts[index])) { try { int indexNumber = Integer.parseInt(pathParts[index]); - curPath = ((JsonObject) curPath).get(indexNumber); + curPath = ((JsonArray) curPath).get(indexNumber); } catch (NumberFormatException nfe) { logger.fine("Please provide a valid integer number " + pathParts[index]); } From 5c1ce14f5abae0ce350ca113caa5deb89153794b Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 26 Aug 2025 11:33:42 -0400 Subject: [PATCH 107/634] #11719 combine perms and search filter for query --- .../iq/dataverse/DataverseServiceBean.java | 41 +++++++++++++++++++ .../iq/dataverse/PermissionServiceBean.java | 24 +++++++++-- .../harvard/iq/dataverse/api/Dataverses.java | 18 +++++++- .../iq/dataverse/api/DataversesIT.java | 17 ++++++-- 4 files changed, 91 insertions(+), 9 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index 6e3a3dcc1c8..85d4179a6d9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -550,6 +550,47 @@ public List filterDataversesForLinking(String query, DataverseRequest return dataverseList; } + + + public List removeUnlinkableDataverses (List allWithPerms, DvObject dvo){ + List dataverseList = new ArrayList<>(); + Dataset linkedDataset = null; + Dataverse linkedDataverse = null; + List alreadyLinkeddv_ids; + + if ((dvo instanceof Dataset)) { + linkedDataset = (Dataset) dvo; + alreadyLinkeddv_ids = em.createNativeQuery("SELECT linkingdataverse_id FROM datasetlinkingdataverse WHERE dataset_id = " + linkedDataset.getId()).getResultList(); + } else { + linkedDataverse = (Dataverse) dvo; + alreadyLinkeddv_ids = em.createNativeQuery("SELECT linkingdataverse_id FROM dataverselinkingdataverse WHERE dataverse_id = " + linkedDataverse.getId()).getResultList(); + } + + List remove = new ArrayList<>(); + + if (alreadyLinkeddv_ids != null && !alreadyLinkeddv_ids.isEmpty()) { + alreadyLinkeddv_ids.stream().map((testDVId) -> this.find(testDVId)).forEachOrdered((removeIt) -> { + remove.add(removeIt); + }); + } + + if (dvo instanceof Dataverse dataverse){ + remove.add(dataverse); + } else { + //dataset is always owned by a dataverse + remove.add((Dataverse)dvo.getOwner()); + } + + for (Dataverse res : allWithPerms) { + if (!remove.contains(res)) { + dataverseList.add(res); + } + } + + return dataverseList; + } + + public List filterDataversesForUnLinking(String query, DataverseRequest req, Dataset dataset) { List alreadyLinkeddv_ids = em.createNativeQuery("SELECT linkingdataverse_id FROM datasetlinkingdataverse WHERE dataset_id = " + dataset.getId()).getResultList(); List dataverseList = new ArrayList<>(); diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index f1099c0a439..778bf23586a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -101,7 +101,7 @@ WITH grouplist AS ( WHERE explicitgroup_authenticateduser.containedauthenticatedusers_id = @USERID ) - SELECT * FROM DATAVERSE WHERE id IN ( + SELECT * FROM DATAVERSE WHERE id IN ( SELECT definitionpoint_id FROM roleassignment WHERE roleassignment.assigneeidentifier IN ( @@ -157,7 +157,7 @@ AND EXISTS (SELECT id FROM dataverserole WHERE dataverserole.id = roleassignment AND @IPRANGESQL ) ) - ) + ) @SEARCHCLAUSE """; /** * A request-level permission query (e.g includes IP ras). @@ -921,8 +921,16 @@ private boolean hasUnrestrictedReleasedFiles(DatasetVersion targetDatasetVersion public List findPermittedCollections(DataverseRequest request, AuthenticatedUser user, Permission permission) { return findPermittedCollections(request, user, 1 << permission.ordinal()); } + + public List findPermittedCollections(DataverseRequest request, AuthenticatedUser user, Permission permission, String searchTerm) { + return findPermittedCollections(request, user, 1 << permission.ordinal(), searchTerm); + } public List findPermittedCollections(DataverseRequest request, AuthenticatedUser user, int permissionBit) { + return findPermittedCollections(request, user, permissionBit, ""); + } + + public List findPermittedCollections(DataverseRequest request, AuthenticatedUser user, int permissionBit, String searchTerm) { if (user != null) { // IP Group - Only check IP if a User is calling for themself String ipRangeSQL = "FALSE"; @@ -950,16 +958,26 @@ public List findPermittedCollections(DataverseRequest request, Authen } } } + + String searchClause = ""; + if (!searchTerm.isEmpty()){ + searchClause = " AND ((LOWER(DATAVERSE.alias) LIKE '%@ALIAS%') OR (LOWER(DATAVERSE.name) LIKE '%@NAME%') OR (LOWER(DATAVERSE.affiliation) LIKE '%@AFFILIATION%')) " + .replace("@ALIAS", searchTerm.toLowerCase()) + .replace("@NAME", searchTerm.toLowerCase()) + .replace("@AFFILIATION", searchTerm.toLowerCase()); + } String sqlCode = LIST_ALL_DATAVERSES_USER_HAS_PERMISSION .replace("@USERID", String.valueOf(user.getId())) .replace("@PERMISSIONBIT", String.valueOf(permissionBit)) - .replace("@IPRANGESQL", ipRangeSQL); + .replace("@IPRANGESQL", ipRangeSQL) + .replace("@SEARCHCLAUSE", searchClause); return em.createNativeQuery(sqlCode, Dataverse.class).getResultList(); } return null; } + /** * Calculates the complete list of role assignments for a given user on a DvObject. * This includes roles assigned directly to the user and roles inherited from any groups diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 5522b35906b..bf332a5bb3d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -15,6 +15,7 @@ import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroup; import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupProvider; import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean; +import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.dataset.DatasetType; @@ -1781,8 +1782,21 @@ public Response getLinkingDataverseList(@Context ContainerRequestContext crc, @P try { DvObject dvObject = findDvoByIdAndTypeOrDie(dvIdtf, type); - List dataversesForLinking = dataverseService.filterDataversesForLinking(searchTerm, createDataverseRequest(getRequestUser(crc)), dvObject); - JsonArrayBuilder dvBuilder = Json.createArrayBuilder(); + // List dataversesForLinking = dataverseService.filterDataversesForLinking(searchTerm, createDataverseRequest(getRequestUser(crc)), dvObject); + // public List findPermittedCollections(DataverseRequest request, AuthenticatedUser user, Permission permission, String searchTerm) { + + AuthenticatedUser requestUser = (AuthenticatedUser)getRequestUser(crc); + List dataversesForLinking = new ArrayList<>(); + + if ((dvObject instanceof Dataset)) { + dataversesForLinking = permissionService.findPermittedCollections( new DataverseRequest(requestUser, (IpAddress) null), requestUser, Permission.LinkDataset, searchTerm); + } else { + dataversesForLinking = permissionService.findPermittedCollections( new DataverseRequest(requestUser, (IpAddress) null), requestUser, Permission.LinkDataverse, searchTerm); + + } + + dataversesForLinking = dataverseService.removeUnlinkableDataverses(dataversesForLinking, dvObject); + JsonArrayBuilder dvBuilder = Json.createArrayBuilder(); if (dataversesForLinking != null && !dataversesForLinking.isEmpty()) { for (Dataverse dv : dataversesForLinking) { dvBuilder.add(json(dv, true)); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 167aa4ad756..9ba53f2e8b6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -683,7 +683,7 @@ public void testGetLinkableDataverses(){ Response createUser = UtilIT.createRandomUser(); String apiToken = UtilIT.getApiTokenFromResponse(createUser); String username = UtilIT.getUsernameFromResponse(createUser); - + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); @@ -718,7 +718,7 @@ public void testGetLinkableDataverses(){ //Should be able to get based on a partial alias... // Partial must include the first part of the name - String searchTerm = dataverseAliasForLinking.substring(0, 5); + String searchTerm = dataverseAliasForLinking.substring(0, 7); Response getLinkableDataversesForDataversePartial = UtilIT.getLinkableDataverses("dataverse", dataverseAlias, apiToken, searchTerm); getLinkableDataversesForDataversePartial.prettyPrint(); @@ -745,15 +745,21 @@ public void testGetLinkableDataverses(){ createDataverseResponseUnavailableForLinking.prettyPrint(); String dataverseAliasUnavailableForLinking = UtilIT.getAliasFromResponse(createDataverseResponseUnavailableForLinking); publishDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAliasUnavailableForLinking, apiTokenTwo); + publishDataverse.prettyPrint(); assertEquals(200, publishDataverse.getStatusCode()); - Response getUnavailableForDataset = UtilIT.getLinkableDataverses("dataset", datasetPersistentId, apiToken, dataverseAliasUnavailableForLinking); + //user 3 will not have permissions + Response createUserThree = UtilIT.createRandomUser(); + String apiTokenThree = UtilIT.getApiTokenFromResponse(createUserThree); + String usernameThree = UtilIT.getUsernameFromResponse(createUserThree); + + Response getUnavailableForDataset = UtilIT.getLinkableDataverses("dataset", datasetPersistentId, apiTokenThree, dataverseAliasUnavailableForLinking); getUnavailableForDataset.prettyPrint(); getUnavailableForDataset.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.size()", equalTo(0)); - Response getUnavailableForDataverse = UtilIT.getLinkableDataverses("dataverse", dataverseAlias, apiToken, dataverseAliasUnavailableForLinking); + Response getUnavailableForDataverse = UtilIT.getLinkableDataverses("dataverse", dataverseAlias, apiTokenThree, dataverseAliasUnavailableForLinking); getUnavailableForDataverse.prettyPrint(); getUnavailableForDataverse.then().assertThat() .statusCode(OK.getStatusCode()) @@ -798,6 +804,9 @@ public void testGetLinkableDataverses(){ Response deleteUserResponse = UtilIT.deleteUser(usernameTwo); assertEquals(200, deleteUserResponse.getStatusCode()); + deleteUserResponse = UtilIT.deleteUser(usernameThree); + assertEquals(200, deleteUserResponse.getStatusCode()); + deleteUserResponse = UtilIT.deleteUser(username); assertEquals(200, deleteUserResponse.getStatusCode()); From 2bcfa709ec04aaca0c5166f3086ebdcedcbd370c Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 26 Aug 2025 14:55:13 -0400 Subject: [PATCH 108/634] #11710 refactor query update --- .../harvard/iq/dataverse/PermissionServiceBean.java | 12 ++---------- .../edu/harvard/iq/dataverse/api/DataversesIT.java | 10 ++++++++-- 2 files changed, 10 insertions(+), 12 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index 778bf23586a..6fe5db974eb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -157,7 +157,7 @@ AND EXISTS (SELECT id FROM dataverserole WHERE dataverserole.id = roleassignment AND @IPRANGESQL ) ) - ) @SEARCHCLAUSE + ) AND ((LOWER(DATAVERSE.alias) LIKE '%@SEARCHTERM%') OR (LOWER(DATAVERSE.name) LIKE '%@SEARCHTERM%') OR (LOWER(DATAVERSE.affiliation) LIKE '%@SEARCHTERM%')) """; /** * A request-level permission query (e.g includes IP ras). @@ -959,19 +959,11 @@ public List findPermittedCollections(DataverseRequest request, Authen } } - String searchClause = ""; - if (!searchTerm.isEmpty()){ - searchClause = " AND ((LOWER(DATAVERSE.alias) LIKE '%@ALIAS%') OR (LOWER(DATAVERSE.name) LIKE '%@NAME%') OR (LOWER(DATAVERSE.affiliation) LIKE '%@AFFILIATION%')) " - .replace("@ALIAS", searchTerm.toLowerCase()) - .replace("@NAME", searchTerm.toLowerCase()) - .replace("@AFFILIATION", searchTerm.toLowerCase()); - } - String sqlCode = LIST_ALL_DATAVERSES_USER_HAS_PERMISSION .replace("@USERID", String.valueOf(user.getId())) .replace("@PERMISSIONBIT", String.valueOf(permissionBit)) .replace("@IPRANGESQL", ipRangeSQL) - .replace("@SEARCHCLAUSE", searchClause); + .replace("@SEARCHTERM", searchTerm); return em.createNativeQuery(sqlCode, Dataverse.class).getResultList(); } return null; diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 9ba53f2e8b6..64c88197494 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -753,17 +753,23 @@ public void testGetLinkableDataverses(){ String apiTokenThree = UtilIT.getApiTokenFromResponse(createUserThree); String usernameThree = UtilIT.getUsernameFromResponse(createUserThree); - Response getUnavailableForDataset = UtilIT.getLinkableDataverses("dataset", datasetPersistentId, apiTokenThree, dataverseAliasUnavailableForLinking); + Response getUnavailableForDataset = UtilIT.getLinkableDataverses("dataset", datasetPersistentId, apiToken, dataverseAliasUnavailableForLinking); getUnavailableForDataset.prettyPrint(); getUnavailableForDataset.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.size()", equalTo(0)); - Response getUnavailableForDataverse = UtilIT.getLinkableDataverses("dataverse", dataverseAlias, apiTokenThree, dataverseAliasUnavailableForLinking); + Response getUnavailableForDataverse = UtilIT.getLinkableDataverses("dataverse", dataverseAlias, apiToken, dataverseAliasUnavailableForLinking); getUnavailableForDataverse.prettyPrint(); getUnavailableForDataverse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data.size()", equalTo(0)); + + Response getNoPermsOnAnyCollection = UtilIT.getLinkableDataverses("dataverse", dataverseAlias, apiTokenThree, ""); + getNoPermsOnAnyCollection.prettyPrint(); + getNoPermsOnAnyCollection.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(0)); //now link a dataset and see that it's unavailable in the future From 36f36f749107d6ce28caec381885bf4d3b1ca8f3 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 26 Aug 2025 15:01:14 -0400 Subject: [PATCH 109/634] Expanding the ExportDataProvider interface to add hooks for more efficient handling of the raw data for datasets with massive numbers of ingested/tabular datafiles and variables. #11766 #11405 --- modules/dataverse-spi/pom.xml | 2 +- .../gdcc/spi/export/ExportDataProvider.java | 28 +++++++++++++++++-- 2 files changed, 27 insertions(+), 3 deletions(-) diff --git a/modules/dataverse-spi/pom.xml b/modules/dataverse-spi/pom.xml index b00053fe5e0..8c31a2a4026 100644 --- a/modules/dataverse-spi/pom.xml +++ b/modules/dataverse-spi/pom.xml @@ -13,7 +13,7 @@ io.gdcc dataverse-spi - 2.0.0${project.version.suffix} + 2.1.0${project.version.suffix} jar Dataverse SPI Plugin API diff --git a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java index d039ac39e8f..45b332fb088 100644 --- a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java +++ b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java @@ -21,8 +21,13 @@ public interface ExportDataProvider { * OAI_ORE export are the only two that provide 'complete' * dataset-level metadata along with basic file metadata for each file * in the dataset. + * @param options - optional argument(s). currently supports DatasetMetadataOnly: + * in a situation where we need to generate a format like DC, + * that has no use for file-level metadata, it makes sense to + * skip retrieving and formatting it, since there can be quite a few + * files in a dataset. */ - JsonObject getDatasetJson(); + JsonObject getDatasetJson(ExportDataOption... options); /** * @@ -39,7 +44,7 @@ public interface ExportDataProvider { * Dataverse is capable of extracting DDI-centric metadata from tabular * datafiles. This detailed metadata, which is only available for successfully * "ingested" tabular files, is not included in the output of any other methods - * in this interface. + * in this interface. * * @return - a JSONArray with one entry per ingested tabular dataset file. * @apiNote - there is no JSON schema available for this output and the format @@ -50,6 +55,20 @@ public interface ExportDataProvider { */ JsonArray getDatasetFileDetails(); + /** + * Similar to the above, but + * a) retrieves the information for the ingested/tabular data files _only_ + * b) provides an option for retrieving this stuff in batches + * c) provides an option for skipping restricted/embargoed etc. files. + * Intended for datasets with massive numbers of tabular files and datavariables. + * @param offset (can be null) + * @param length (can be null) + * @param options (optional) supports PublicFilesOnly; + * @return json array containing the datafile/filemetadata->datatable->datavariable metadata + * @throws ExportException + */ + JsonArray getTabularDataDetails(Integer offset, Integer length, ExportDataOption ... options) throws ExportException; + /** * * @return - the subset of metadata conforming to the schema.org standard as @@ -92,5 +111,10 @@ public interface ExportDataProvider { default Optional getPrerequisiteInputStream() { return Optional.empty(); } + + public enum ExportDataOption { + DatasetMetadataOnly, + PublicFilesOnly; + } } From f9963ca3405f158116f198c573418f8ede13932f Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 26 Aug 2025 15:14:34 -0400 Subject: [PATCH 110/634] cosmetic #11766 --- .../src/main/java/io/gdcc/spi/export/ExportDataProvider.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java index 45b332fb088..eefe0d7d828 100644 --- a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java +++ b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java @@ -21,7 +21,7 @@ public interface ExportDataProvider { * OAI_ORE export are the only two that provide 'complete' * dataset-level metadata along with basic file metadata for each file * in the dataset. - * @param options - optional argument(s). currently supports DatasetMetadataOnly: + * @param options - optional argument(s). needs to support ExportDataOption.DatasetMetadataOnly: * in a situation where we need to generate a format like DC, * that has no use for file-level metadata, it makes sense to * skip retrieving and formatting it, since there can be quite a few @@ -63,7 +63,7 @@ public interface ExportDataProvider { * Intended for datasets with massive numbers of tabular files and datavariables. * @param offset (can be null) * @param length (can be null) - * @param options (optional) supports PublicFilesOnly; + * @param options (optional) current use case is ExportDataOption.PublicFilesOnly; * @return json array containing the datafile/filemetadata->datatable->datavariable metadata * @throws ExportException */ From f18f663c347059148571e416973989c835f923c2 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 26 Aug 2025 16:55:59 -0400 Subject: [PATCH 111/634] adding directUpload to the Json response --- src/main/java/edu/harvard/iq/dataverse/api/Admin.java | 2 +- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 2 +- .../java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 5 ++++- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 1 + src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java | 4 ++++ 5 files changed, 11 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 1d726830c31..8354c8e71d2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -2197,7 +2197,7 @@ public Response getStorageDriver(@Context ContainerRequestContext crc, @PathPara return wr.getResponse(); } //Note that this returns what's set directly on this dataverse. If null/DataAccess.UNDEFINED_STORAGE_DRIVER_IDENTIFIER, the user would have to recurse the chain of parents to find the effective storageDriver - return ok(JsonPrinter.jsonStorageDriver(dataverse.getStorageDriverId())); + return ok(JsonPrinter.jsonStorageDriver(dataverse.getStorageDriverId(), null)); } @PUT diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index fd7ca0d61cb..fa7f12e0436 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3690,7 +3690,7 @@ public Response getFileStore(@Context ContainerRequestContext crc, @PathParam("i return error(Response.Status.NOT_FOUND, "No such dataset"); } - return ok(JsonPrinter.jsonStorageDriver(dataset.getStorageDriverId())); + return ok(JsonPrinter.jsonStorageDriver(dataset.getStorageDriverId(), dataset)); } @PUT diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 5ac5e4a8fd8..7dfb0ab3c2a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -1634,11 +1634,14 @@ public static JsonArrayBuilder jsonTemplateInstructions(Map temp return jsonArrayBuilder; } - public static JsonObjectBuilder jsonStorageDriver(String storageDriverId) { + public static JsonObjectBuilder jsonStorageDriver(String storageDriverId, Dataset dataset) { JsonObjectBuilder jsonObjectBuilder = new NullSafeJsonBuilder(); jsonObjectBuilder.add("name", storageDriverId); jsonObjectBuilder.add("type", DataAccess.getDriverType(storageDriverId)); jsonObjectBuilder.add("label", DataAccess.getStorageDriverLabelFor(storageDriverId)); + if (dataset != null) { + jsonObjectBuilder.add("directUpload", DataAccess.uploadToDatasetAllowed(dataset, storageDriverId)); + } return jsonObjectBuilder; } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index ecbad14830d..2dd6ff00c7b 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -6087,6 +6087,7 @@ public void testSetGetDatasetStorageDriver() { .body("data.name", CoreMatchers.equalTo(name)) .body("data.type", CoreMatchers.notNullValue()) .body("data.label", CoreMatchers.notNullValue()) + .body("data.directUpload", CoreMatchers.notNullValue()) .statusCode(OK.getStatusCode()); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java index d5fecb32937..24625c87ce2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse.api; +import org.hamcrest.CoreMatchers; import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider; import software.amazon.awssdk.core.ResponseInputStream; @@ -153,6 +154,9 @@ public void testNonDirectUpload() { Response updatedStorageDriver = UtilIT.getStorageDriver(dataverseAlias, superuserApiToken); updatedStorageDriver.prettyPrint(); updatedStorageDriver.then().assertThat() + .body("data.type", CoreMatchers.notNullValue()) + .body("data.label", CoreMatchers.notNullValue()) + .body("data.directUpload", CoreMatchers.nullValue()) .statusCode(200); Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); From 6d1c0900db58af89ca18b1e5e842f6bca29b91df Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 27 Aug 2025 10:20:38 -0400 Subject: [PATCH 112/634] return default storage driver when undefined --- .../edu/harvard/iq/dataverse/api/Admin.java | 2 +- .../harvard/iq/dataverse/api/Datasets.java | 2 +- .../harvard/iq/dataverse/api/DatasetsIT.java | 19 +++++++++++++++++++ .../iq/dataverse/api/DataversesIT.java | 12 ++++++++++++ 4 files changed, 33 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 8354c8e71d2..71406d9f293 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -2197,7 +2197,7 @@ public Response getStorageDriver(@Context ContainerRequestContext crc, @PathPara return wr.getResponse(); } //Note that this returns what's set directly on this dataverse. If null/DataAccess.UNDEFINED_STORAGE_DRIVER_IDENTIFIER, the user would have to recurse the chain of parents to find the effective storageDriver - return ok(JsonPrinter.jsonStorageDriver(dataverse.getStorageDriverId(), null)); + return ok(JsonPrinter.jsonStorageDriver(dataverse.getEffectiveStorageDriverId(), null)); } @PUT diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index fa7f12e0436..151b833d3c4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -3690,7 +3690,7 @@ public Response getFileStore(@Context ContainerRequestContext crc, @PathParam("i return error(Response.Status.NOT_FOUND, "No such dataset"); } - return ok(JsonPrinter.jsonStorageDriver(dataset.getStorageDriverId(), dataset)); + return ok(JsonPrinter.jsonStorageDriver(dataset.getEffectiveStorageDriverId(), dataset)); } @PUT diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 2dd6ff00c7b..46da46e68ea 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -6089,6 +6089,25 @@ public void testSetGetDatasetStorageDriver() { .body("data.label", CoreMatchers.notNullValue()) .body("data.directUpload", CoreMatchers.notNullValue()) .statusCode(OK.getStatusCode()); + + // Test dataset under root with default storage driver + Response getStorageDriverResponse = UtilIT.getStorageDriver("root", apiToken); + getStorageDriverResponse.prettyPrint(); + data = JsonUtil.getJsonObject(getStorageDriverResponse.getBody().asString()); + name = data.getJsonObject("data").getString("name"); + String type = data.getJsonObject("data").getString("type"); + String label = data.getJsonObject("data").getString("label"); + createDataset = UtilIT.createRandomDatasetViaNativeApi("root", apiToken); + datasetId = UtilIT.getDatasetIdFromResponse(createDataset); + getDriver = UtilIT.getDatasetStorageDriver(datasetId, apiToken); + getDriver.prettyPrint(); + assertEquals(200, getDriver.getStatusCode()); + getDriver.then().assertThat() + .body("data.name", CoreMatchers.equalTo(name)) + .body("data.type", CoreMatchers.equalTo(type)) + .body("data.label", CoreMatchers.equalTo(label)) + .body("data.directUpload", CoreMatchers.notNullValue()) + .statusCode(OK.getStatusCode()); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 3e1a160c9f2..64ce24076a0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -2371,6 +2371,18 @@ public void testCreateAndGetTemplates() { getTemplateResponse.then().assertThat().statusCode(UNAUTHORIZED.getStatusCode()); } + @Test + public void testGetStorageDriver() { + Response updatedStorageDriver = UtilIT.getStorageDriver("root", getSuperuserToken()); + updatedStorageDriver.prettyPrint(); + updatedStorageDriver.then().assertThat() + .body("data.name", CoreMatchers.notNullValue()) + .body("data.type", CoreMatchers.notNullValue()) + .body("data.label", CoreMatchers.notNullValue()) + .body("data.directUpload", CoreMatchers.nullValue()) + .statusCode(200); + } + private String getSuperuserToken() { Response createResponse = UtilIT.createRandomUser(); String adminApiToken = UtilIT.getApiTokenFromResponse(createResponse); From b88796c8574ea3b335e50a39ae3b0353b72559f5 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 27 Aug 2025 11:50:13 -0400 Subject: [PATCH 113/634] add new query param to api call and directDownload to json output --- .../11695-change-api-get-storage-driver.md | 2 ++ .../source/admin/dataverses-datasets.rst | 4 ++++ src/main/java/edu/harvard/iq/dataverse/api/Admin.java | 11 ++++++++--- .../harvard/iq/dataverse/util/json/JsonPrinter.java | 6 ++++++ .../java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 4 +++- .../edu/harvard/iq/dataverse/api/DataversesIT.java | 11 ++++++++++- .../java/edu/harvard/iq/dataverse/api/UtilIT.java | 6 +++++- 7 files changed, 38 insertions(+), 6 deletions(-) diff --git a/doc/release-notes/11695-change-api-get-storage-driver.md b/doc/release-notes/11695-change-api-get-storage-driver.md index 3680da4b72c..cd78c642a2a 100644 --- a/doc/release-notes/11695-change-api-get-storage-driver.md +++ b/doc/release-notes/11695-change-api-get-storage-driver.md @@ -7,4 +7,6 @@ The API for getting the Storage Driver info has been changed/extended. /api/admin/dataverse/{dataverse-alias}/storageDriver changed "message" to "name" and added "type" and "label" +Also added query param for /api/admin/dataverse/{dataverse-alias}/storageDriver?defaultToOwner=true to recurse the chain of parents to find the effective storageDriver + See also [the guides](https://dataverse-guide--11664.org.readthedocs.build/en/11664/api/native-api.html#configure-a-dataset-to-store-all-new-files-in-a-specific-file-store), #11695, and #11664. diff --git a/doc/sphinx-guides/source/admin/dataverses-datasets.rst b/doc/sphinx-guides/source/admin/dataverses-datasets.rst index a37819c90e1..fcfbd0c62f3 100644 --- a/doc/sphinx-guides/source/admin/dataverses-datasets.rst +++ b/doc/sphinx-guides/source/admin/dataverses-datasets.rst @@ -60,6 +60,10 @@ The current driver can be seen using:: curl -H "X-Dataverse-key: $API_TOKEN" http://$SERVER/api/admin/dataverse/$dataverse-alias/storageDriver +Or to recurse the chain of parents to find the effective storageDriver:: + + curl -H "X-Dataverse-key: $API_TOKEN" http://$SERVER/api/admin/dataverse/$dataverse-alias/storageDriver?defaultToOwner=true + (Note that for ``dataverse.files.store1.label=MyLabel``, ``store1`` will be returned.) and can be reset to the default store with:: diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 71406d9f293..48246820bc7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -2183,7 +2183,8 @@ public Response addRoleAssignementsToChildren(@Context ContainerRequestContext c @GET @AuthRequired @Path("/dataverse/{alias}/storageDriver") - public Response getStorageDriver(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse { + public Response getStorageDriver(@Context ContainerRequestContext crc, @PathParam("alias") String alias, + @QueryParam("defaultToOwner") Boolean defaultToOwner) throws WrappedResponse { Dataverse dataverse = dataverseSvc.findByAlias(alias); if (dataverse == null) { return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + "."); @@ -2196,8 +2197,12 @@ public Response getStorageDriver(@Context ContainerRequestContext crc, @PathPara } catch (WrappedResponse wr) { return wr.getResponse(); } - //Note that this returns what's set directly on this dataverse. If null/DataAccess.UNDEFINED_STORAGE_DRIVER_IDENTIFIER, the user would have to recurse the chain of parents to find the effective storageDriver - return ok(JsonPrinter.jsonStorageDriver(dataverse.getEffectiveStorageDriverId(), null)); + + if (defaultToOwner != null && defaultToOwner) { + return ok(JsonPrinter.jsonStorageDriver(dataverse.getEffectiveStorageDriverId(), null)); + } else { + return ok(JsonPrinter.jsonStorageDriver(dataverse.getStorageDriverId(), null)); + } } @PUT diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 7dfb0ab3c2a..a6066b1da64 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -40,6 +40,7 @@ import edu.harvard.iq.dataverse.workflow.Workflow; import edu.harvard.iq.dataverse.workflow.step.WorkflowStepData; +import java.io.IOException; import java.util.*; import jakarta.json.Json; import jakarta.json.JsonArrayBuilder; @@ -1641,6 +1642,11 @@ public static JsonObjectBuilder jsonStorageDriver(String storageDriverId, Datase jsonObjectBuilder.add("label", DataAccess.getStorageDriverLabelFor(storageDriverId)); if (dataset != null) { jsonObjectBuilder.add("directUpload", DataAccess.uploadToDatasetAllowed(dataset, storageDriverId)); + try { + jsonObjectBuilder.add("directDownload", DataAccess.getStorageIO(dataset).downloadRedirectEnabled()); + } catch (IOException ex) { + logger.fine("Failed to get Storage IO for dataset " + ex.getMessage()); + } } return jsonObjectBuilder; diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 46da46e68ea..25e4451ac9f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -6088,10 +6088,11 @@ public void testSetGetDatasetStorageDriver() { .body("data.type", CoreMatchers.notNullValue()) .body("data.label", CoreMatchers.notNullValue()) .body("data.directUpload", CoreMatchers.notNullValue()) + .body("data.directDownload", CoreMatchers.notNullValue()) .statusCode(OK.getStatusCode()); // Test dataset under root with default storage driver - Response getStorageDriverResponse = UtilIT.getStorageDriver("root", apiToken); + Response getStorageDriverResponse = UtilIT.getStorageDriver("root", apiToken, Boolean.TRUE); getStorageDriverResponse.prettyPrint(); data = JsonUtil.getJsonObject(getStorageDriverResponse.getBody().asString()); name = data.getJsonObject("data").getString("name"); @@ -6107,6 +6108,7 @@ public void testSetGetDatasetStorageDriver() { .body("data.type", CoreMatchers.equalTo(type)) .body("data.label", CoreMatchers.equalTo(label)) .body("data.directUpload", CoreMatchers.notNullValue()) + .body("data.directDownload", CoreMatchers.notNullValue()) .statusCode(OK.getStatusCode()); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 64ce24076a0..4cc3ddb0be6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse.api; +import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.util.json.JsonParser; import edu.harvard.iq.dataverse.util.json.JsonUtil; @@ -2373,13 +2374,21 @@ public void testCreateAndGetTemplates() { @Test public void testGetStorageDriver() { - Response updatedStorageDriver = UtilIT.getStorageDriver("root", getSuperuserToken()); + Response updatedStorageDriver = UtilIT.getStorageDriver("root", getSuperuserToken(), Boolean.TRUE); updatedStorageDriver.prettyPrint(); updatedStorageDriver.then().assertThat() .body("data.name", CoreMatchers.notNullValue()) .body("data.type", CoreMatchers.notNullValue()) .body("data.label", CoreMatchers.notNullValue()) .body("data.directUpload", CoreMatchers.nullValue()) + .body("data.directDownload", CoreMatchers.nullValue()) + .statusCode(200); + + // Root without default is undefined + updatedStorageDriver = UtilIT.getStorageDriver("root", getSuperuserToken(), null); + updatedStorageDriver.prettyPrint(); + updatedStorageDriver.then().assertThat() + .body("data.name", CoreMatchers.equalTo(DataAccess.UNDEFINED_STORAGE_DRIVER_IDENTIFIER)) .statusCode(200); } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 9cf320666c6..67028d8be06 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -2911,9 +2911,13 @@ static Response listStorageDrivers(String apiToken) { } static Response getStorageDriver(String dvAlias, String apiToken) { + return getStorageDriver(dvAlias, apiToken, null); + } + static Response getStorageDriver(String dvAlias, String apiToken, Boolean defaultToOwner) { + String params = defaultToOwner != null ? "?defaultToOwner=" + defaultToOwner : ""; return given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .get("/api/admin/dataverse/" + dvAlias + "/storageDriver"); + .get("/api/admin/dataverse/" + dvAlias + "/storageDriver" + params); } static Response setStorageDriver(String dvAlias, String label, String apiToken) { From 409f77da07a67319c8bf99d4de2373f59ed922bc Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 27 Aug 2025 14:50:30 -0400 Subject: [PATCH 114/634] update Maven snapshot release id and URL #11766 #11512 --- modules/dataverse-spi/pom.xml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/modules/dataverse-spi/pom.xml b/modules/dataverse-spi/pom.xml index 8c31a2a4026..a603e274234 100644 --- a/modules/dataverse-spi/pom.xml +++ b/modules/dataverse-spi/pom.xml @@ -64,11 +64,13 @@ - ossrh - https://s01.oss.sonatype.org/content/repositories/snapshots + central + https://central.sonatype.com/repository/maven-snapshots/ + ossrh + https://s01.oss.sonatype.org/service/local/staging/deploy/maven2/ @@ -110,7 +112,9 @@ nexus-staging-maven-plugin true + ossrh + https://s01.oss.sonatype.org true From 4ed1879c26e947fcb81d085fa7431541a76c291f Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 27 Aug 2025 14:51:08 -0400 Subject: [PATCH 115/634] explain how to publish a maven snapshot locally #11766 #11512 --- .../developers/making-library-releases.rst | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/doc/sphinx-guides/source/developers/making-library-releases.rst b/doc/sphinx-guides/source/developers/making-library-releases.rst index be867f9196a..0daa7fb89db 100755 --- a/doc/sphinx-guides/source/developers/making-library-releases.rst +++ b/doc/sphinx-guides/source/developers/making-library-releases.rst @@ -36,6 +36,32 @@ Releasing a Snapshot Version to Maven Central That is to say, to make a snapshot release, you only need to get one or more commits into the default branch. +It's possible, of course, to make snapshot releases outside of GitHub Actions, from environments such as your laptop. Generally, you'll want to look at the GitHub Action and try to do the equivalent. You'll need a file set up locally at ``~/.m2/settings.xml`` with the following (contact a core developer for the redacted bits): + +.. code-block:: bash + + + + + central + REDACTED + REDACTED + + + + +Then, study the GitHub Action and perform similar commands from your local environment. For example, as of this writing, for the dataverse-spi project, you can run the following commands, substituting the suffix you need: + +``mvn -f modules/dataverse-spi -Dproject.version.suffix="2.1.0-PR11767-SNAPSHOT" verify`` + +``mvn -f modules/dataverse-spi -Dproject.version.suffix="2.1.0-PR11767-SNAPSHOT" deploy`` + +This will upload the snapshot here, for example: https://central.sonatype.com/repository/maven-snapshots/io/gdcc/dataverse-spi/2.1.02.1.0-PR11767-SNAPSHOT/dataverse-spi-2.1.02.1.0-PR11767-20250827.182026-1.jar + +Before OSSRH was retired, you could browse through snapshot jars you published at https://s01.oss.sonatype.org/content/repositories/snapshots/io/gdcc/dataverse-spi/2.0.0-PR9685-SNAPSHOT/, for example. Now, even though you may see the URL of the jar as shown above during the "deploy" step, if you try to browse the various snapshot jars at https://central.sonatype.com/repository/maven-snapshots/io/gdcc/dataverse-spi/2.1.02.1.0-PR11767-SNAPSHOT/ you'll see "This maven2 hosted repository is not directly browseable at this URL. Please use the browse or HTML index views to inspect the contents of this repository." Sadly, the "browse" and "HTML index" links don't work, as noted in a `question `_ on the Sonatype Community forum. Below is a suggestion for confirming that the jar was uploaded properly, which is to use Maven to copy the jar to your local directory. You could then compare checksums. + +``mvn dependency:copy -DrepoUrl=https://central.sonatype.com/repository/maven-snapshots/ -Dartifact=io.gdcc:dataverse-spi:2.1.02.1.0-PR11767-SNAPSHOT -DoutputDirectory=.`` + Releasing a Release (Non-Snapshot) Version to Maven Central ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From c688965eb4a64f4faacc70a164be62e35ee39c41 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 27 Aug 2025 15:00:45 -0400 Subject: [PATCH 116/634] #11710 separate perms test from lookup --- .../iq/dataverse/DataverseServiceBean.java | 51 ++++++++++++++++++ .../iq/dataverse/PermissionServiceBean.java | 14 ++--- .../iq/dataverse/api/AbstractApiBean.java | 6 ++- .../dataverse/api/DataverseFeaturedItems.java | 2 +- .../harvard/iq/dataverse/api/Dataverses.java | 53 ++++++++++--------- 5 files changed, 88 insertions(+), 38 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index 85d4179a6d9..d3f96da107e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -687,6 +687,57 @@ public List filterDataversesByNamePattern(String pattern) { return searchResults; } + /* + This method takes a search parameter and expands it into a list of + Dataverses with matching names. + The search is performed on the name with the trailing word "dataverse" + stripped (if present). This way the search on "data" (or on "da" pr + "dat") does NOT return almost every dataverse in the database - since + most of them have names that end in "... Dataverse". + The query isn't pretty, but it works, and it's still EJB QL (and NOT a + native query). + */ + public List filterDataversesByNameAliasPattern(String pattern) { + + pattern = pattern.toLowerCase(); + + String pattern1 = pattern + "%"; + String pattern2 = "% " + pattern + "%"; + + // Adjust the queries for very short, 1 and 2-character patterns: + if (pattern.length() == 1) { + pattern1 = pattern; + pattern2 = pattern + " %"; + } + /*if (pattern.length() == 2) { + pattern2 = pattern + "%"; + }*/ + + + String qstr = "select dv from Dataverse dv " + + "where (LOWER(dv.name) LIKE :dataverse and ((SUBSTRING(LOWER(dv.name),0,(LENGTH(dv.name)-9)) LIKE :pattern1) " + + " or (SUBSTRING(LOWER(dv.name),0,(LENGTH(dv.name)-9)) LIKE :pattern2))) " + + "or (LOWER(dv.name) NOT LIKE :dataverse and ((LOWER(dv.name) LIKE :pattern1) " + + " or (LOWER(dv.name) LIKE :pattern2)) " + + "or (LOWER(dv.alias) LIKE :pattern1) " + + " or (LOWER(dv.alias) LIKE :pattern2))) " + + "order by dv.alias"; + + List searchResults = null; + + try { + searchResults = em.createQuery(qstr, Dataverse.class) + .setParameter("dataverse", "%dataverse") + .setParameter("pattern1", pattern1) + .setParameter("pattern2", pattern2) + .getResultList(); + } catch (Exception ex) { + searchResults = null; + } + + return searchResults; + } + /** * Used to identify and properly display Harvested objects on the dataverse page. * diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index 6fe5db974eb..a4111963ea2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -157,7 +157,7 @@ AND EXISTS (SELECT id FROM dataverserole WHERE dataverserole.id = roleassignment AND @IPRANGESQL ) ) - ) AND ((LOWER(DATAVERSE.alias) LIKE '%@SEARCHTERM%') OR (LOWER(DATAVERSE.name) LIKE '%@SEARCHTERM%') OR (LOWER(DATAVERSE.affiliation) LIKE '%@SEARCHTERM%')) + ) """; /** * A request-level permission query (e.g includes IP ras). @@ -922,15 +922,8 @@ public List findPermittedCollections(DataverseRequest request, Authen return findPermittedCollections(request, user, 1 << permission.ordinal()); } - public List findPermittedCollections(DataverseRequest request, AuthenticatedUser user, Permission permission, String searchTerm) { - return findPermittedCollections(request, user, 1 << permission.ordinal(), searchTerm); - } - - public List findPermittedCollections(DataverseRequest request, AuthenticatedUser user, int permissionBit) { - return findPermittedCollections(request, user, permissionBit, ""); - } - public List findPermittedCollections(DataverseRequest request, AuthenticatedUser user, int permissionBit, String searchTerm) { + public List findPermittedCollections(DataverseRequest request, AuthenticatedUser user, int permissionBit) { if (user != null) { // IP Group - Only check IP if a User is calling for themself String ipRangeSQL = "FALSE"; @@ -962,8 +955,7 @@ public List findPermittedCollections(DataverseRequest request, Authen String sqlCode = LIST_ALL_DATAVERSES_USER_HAS_PERMISSION .replace("@USERID", String.valueOf(user.getId())) .replace("@PERMISSIONBIT", String.valueOf(permissionBit)) - .replace("@IPRANGESQL", ipRangeSQL) - .replace("@SEARCHTERM", searchTerm); + .replace("@IPRANGESQL", ipRangeSQL); return em.createNativeQuery(sqlCode, Dataverse.class).getResultList(); } return null; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index f99727b16db..938d7fc9081 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -601,7 +601,7 @@ protected DvObject findDvo(@NotNull final String id) throws WrappedResponse { * @throws WrappedResponse */ @NotNull - protected DvObject findDvoByIdAndTypeOrDie(@NotNull final String dvIdtf, String type) throws WrappedResponse { + protected DvObject findDvoByIdAndTypeOrDie(@NotNull final String dvIdtf, String type, Boolean testForReleased) throws WrappedResponse { try { DataverseFeaturedItem.TYPES dvType = DataverseFeaturedItem.getDvType(type); DvObject dvObject = null; @@ -634,7 +634,9 @@ protected DvObject findDvoByIdAndTypeOrDie(@NotNull final String dvIdtf, String } } } - DataverseFeaturedItem.validateTypeAndDvObject(dvIdtf, dvObject, dvType); + if (testForReleased){ + DataverseFeaturedItem.validateTypeAndDvObject(dvIdtf, dvObject, dvType); + } return dvObject; } catch (IllegalArgumentException e) { throw new WrappedResponse(error(Response.Status.BAD_REQUEST, e.getMessage())); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DataverseFeaturedItems.java b/src/main/java/edu/harvard/iq/dataverse/api/DataverseFeaturedItems.java index 00f1aa76e7e..7fbdd79e3c3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DataverseFeaturedItems.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DataverseFeaturedItems.java @@ -63,7 +63,7 @@ public Response updateFeaturedItem(@Context ContainerRequestContext crc, if (dataverseFeaturedItem == null) { throw new WrappedResponse(error(Response.Status.NOT_FOUND, MessageFormat.format(BundleUtil.getStringFromBundle("dataverseFeaturedItems.errors.notFound"), id))); } - DvObject dvObject = (dvObjectIdtf != null) ? findDvoByIdAndTypeOrDie(dvObjectIdtf, type) : null; + DvObject dvObject = (dvObjectIdtf != null) ? findDvoByIdAndTypeOrDie(dvObjectIdtf, type, true) : null; UpdatedDataverseFeaturedItemDTO updatedDataverseFeaturedItemDTO = UpdatedDataverseFeaturedItemDTO.fromFormData(content, displayOrder, keepFile, imageFileInputStream, contentDispositionHeader, type, dvObject); return ok(json(execCommand(new UpdateDataverseFeaturedItemCommand(createDataverseRequest(getRequestUser(crc)), dataverseFeaturedItem, updatedDataverseFeaturedItemDTO)))); } catch (WrappedResponse e) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index bf332a5bb3d..f59cfbc3bab 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -70,6 +70,7 @@ import jakarta.ws.rs.WebApplicationException; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.StreamingOutput; +import java.sql.PreparedStatement; import org.glassfish.jersey.media.multipart.FormDataBodyPart; import org.glassfish.jersey.media.multipart.FormDataContentDisposition; import org.glassfish.jersey.media.multipart.FormDataParam; @@ -120,7 +121,7 @@ public class Dataverses extends AbstractApiBean { @EJB DataverseFeaturedItemServiceBean dataverseFeaturedItemServiceBean; - + @POST @AuthRequired public Response addRoot(@Context ContainerRequestContext crc, String body) { @@ -1777,37 +1778,41 @@ public Response linkDataverse(@Context ContainerRequestContext crc, @PathParam(" @AuthRequired @Produces(MediaType.APPLICATION_JSON) @Path("{identifier}/{type}/linkingDataverses") - public Response getLinkingDataverseList(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("searchTerm") String searchTerm, @PathParam("type") String type){ + public Response getLinkingDataverseList(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("searchTerm") String searchTerm, @PathParam("type") String type) { //first determine what you are linking based on identifier and type + + AuthenticatedUser requestUser = (AuthenticatedUser) getRequestUser(crc); + DataverseRequest dvReq = new DataverseRequest(requestUser, (IpAddress) null); + List dataversesForLinking; + dataversesForLinking = permissionService.findPermittedCollections(dvReq, requestUser, Permission.LinkDataset); try { - DvObject dvObject = findDvoByIdAndTypeOrDie(dvIdtf, type); - // List dataversesForLinking = dataverseService.filterDataversesForLinking(searchTerm, createDataverseRequest(getRequestUser(crc)), dvObject); - // public List findPermittedCollections(DataverseRequest request, AuthenticatedUser user, Permission permission, String searchTerm) { - - AuthenticatedUser requestUser = (AuthenticatedUser)getRequestUser(crc); - List dataversesForLinking = new ArrayList<>(); - - if ((dvObject instanceof Dataset)) { - dataversesForLinking = permissionService.findPermittedCollections( new DataverseRequest(requestUser, (IpAddress) null), requestUser, Permission.LinkDataset, searchTerm); - } else { - dataversesForLinking = permissionService.findPermittedCollections( new DataverseRequest(requestUser, (IpAddress) null), requestUser, Permission.LinkDataverse, searchTerm); - - } - - dataversesForLinking = dataverseService.removeUnlinkableDataverses(dataversesForLinking, dvObject); - JsonArrayBuilder dvBuilder = Json.createArrayBuilder(); - if (dataversesForLinking != null && !dataversesForLinking.isEmpty()) { + + DvObject dvObject = findDvoByIdAndTypeOrDie(dvIdtf, type, false); + List dataversesForLinkingSearch = new ArrayList(); + dataversesForLinkingSearch = dataverseService.filterDataversesByNameAliasPattern(searchTerm); + + List mergedWithSearch = new ArrayList<>(); + dataversesForLinking = dataverseService.removeUnlinkableDataverses(dataversesForLinking, dvObject); + if (!dataversesForLinkingSearch.isEmpty()) { for (Dataverse dv : dataversesForLinking) { + if (dataversesForLinkingSearch.contains(dv)) { + mergedWithSearch.add(dv); + } + } + } + JsonArrayBuilder dvBuilder = Json.createArrayBuilder(); + if (!mergedWithSearch.isEmpty()) { + for (Dataverse dv : mergedWithSearch) { dvBuilder.add(json(dv, true)); } } - return ok(dvBuilder); + return ok(dvBuilder); } catch (WrappedResponse wr) { return wr.getResponse(); - } catch (Exception e){ + } catch (Exception e) { return error(Status.BAD_REQUEST, e.getLocalizedMessage()); - + } } @@ -1852,7 +1857,7 @@ public Response createFeaturedItem(@Context ContainerRequestContext crc, try { dataverse = findDataverseOrDie(dvIdtf); if (dvObjectIdtf != null) { - dvObject = findDvoByIdAndTypeOrDie(dvObjectIdtf, type); + dvObject = findDvoByIdAndTypeOrDie(dvObjectIdtf, type, true); } } catch (WrappedResponse wr) { return wr.getResponse(); @@ -1940,7 +1945,7 @@ public Response updateFeaturedItems( // ignore dvObject if the id is missing or an empty string DvObject dvObject = dvObjectIdtf.get(i) != null && !dvObjectIdtf.get(i).isEmpty() - ? findDvoByIdAndTypeOrDie(dvObjectIdtf.get(i), types.get(i)) : null; + ? findDvoByIdAndTypeOrDie(dvObjectIdtf.get(i), types.get(i), true) : null; if (ids.get(i) == 0) { newItems.add(NewDataverseFeaturedItemDTO.fromFormData( contents.get(i), displayOrders.get(i), fileInputStream, contentDisposition, types.get(i), dvObject)); From f570aaaf411b76f6147d37c06a80fceaaa98962b Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 27 Aug 2025 16:24:05 -0400 Subject: [PATCH 117/634] add TODO comments to spi workflow #11766 #11512 --- .github/workflows/spi_release.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/spi_release.yml b/.github/workflows/spi_release.yml index 6398edca412..1d29cdf76c9 100644 --- a/.github/workflows/spi_release.yml +++ b/.github/workflows/spi_release.yml @@ -42,6 +42,7 @@ jobs: with: java-version: '17' distribution: 'adopt' + # TODO: change this from ossrh to central? server-id: ossrh server-username: MAVEN_USERNAME server-password: MAVEN_PASSWORD @@ -80,6 +81,7 @@ jobs: with: java-version: '17' distribution: 'adopt' + # TODO: change this from ossrh to central? server-id: ossrh server-username: MAVEN_USERNAME server-password: MAVEN_PASSWORD From 96c8b25c2efcaf58ebe47e62fb1356eed56bfdc2 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 27 Aug 2025 16:28:46 -0400 Subject: [PATCH 118/634] #11710 remove unused method --- .../iq/dataverse/DataverseServiceBean.java | 51 ------------------- .../harvard/iq/dataverse/api/Dataverses.java | 12 ++--- 2 files changed, 6 insertions(+), 57 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index d3f96da107e..85d4179a6d9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -687,57 +687,6 @@ public List filterDataversesByNamePattern(String pattern) { return searchResults; } - /* - This method takes a search parameter and expands it into a list of - Dataverses with matching names. - The search is performed on the name with the trailing word "dataverse" - stripped (if present). This way the search on "data" (or on "da" pr - "dat") does NOT return almost every dataverse in the database - since - most of them have names that end in "... Dataverse". - The query isn't pretty, but it works, and it's still EJB QL (and NOT a - native query). - */ - public List filterDataversesByNameAliasPattern(String pattern) { - - pattern = pattern.toLowerCase(); - - String pattern1 = pattern + "%"; - String pattern2 = "% " + pattern + "%"; - - // Adjust the queries for very short, 1 and 2-character patterns: - if (pattern.length() == 1) { - pattern1 = pattern; - pattern2 = pattern + " %"; - } - /*if (pattern.length() == 2) { - pattern2 = pattern + "%"; - }*/ - - - String qstr = "select dv from Dataverse dv " - + "where (LOWER(dv.name) LIKE :dataverse and ((SUBSTRING(LOWER(dv.name),0,(LENGTH(dv.name)-9)) LIKE :pattern1) " - + " or (SUBSTRING(LOWER(dv.name),0,(LENGTH(dv.name)-9)) LIKE :pattern2))) " - + "or (LOWER(dv.name) NOT LIKE :dataverse and ((LOWER(dv.name) LIKE :pattern1) " - + " or (LOWER(dv.name) LIKE :pattern2)) " - + "or (LOWER(dv.alias) LIKE :pattern1) " - + " or (LOWER(dv.alias) LIKE :pattern2))) " - + "order by dv.alias"; - - List searchResults = null; - - try { - searchResults = em.createQuery(qstr, Dataverse.class) - .setParameter("dataverse", "%dataverse") - .setParameter("pattern1", pattern1) - .setParameter("pattern2", pattern2) - .getResultList(); - } catch (Exception ex) { - searchResults = null; - } - - return searchResults; - } - /** * Used to identify and properly display Harvested objects on the dataverse page. * diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index f59cfbc3bab..512b7b00fab 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -1780,17 +1780,17 @@ public Response linkDataverse(@Context ContainerRequestContext crc, @PathParam(" @Path("{identifier}/{type}/linkingDataverses") public Response getLinkingDataverseList(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("searchTerm") String searchTerm, @PathParam("type") String type) { //first determine what you are linking based on identifier and type - - AuthenticatedUser requestUser = (AuthenticatedUser) getRequestUser(crc); - DataverseRequest dvReq = new DataverseRequest(requestUser, (IpAddress) null); - List dataversesForLinking; - dataversesForLinking = permissionService.findPermittedCollections(dvReq, requestUser, Permission.LinkDataset); try { DvObject dvObject = findDvoByIdAndTypeOrDie(dvIdtf, type, false); List dataversesForLinkingSearch = new ArrayList(); - dataversesForLinkingSearch = dataverseService.filterDataversesByNameAliasPattern(searchTerm); + dataversesForLinkingSearch = dataverseService.filterDataversesByNamePattern(searchTerm); + + AuthenticatedUser requestUser = (AuthenticatedUser) getRequestUser(crc); + DataverseRequest dvReq = new DataverseRequest(requestUser, (IpAddress) null); + List dataversesForLinking; + dataversesForLinking = permissionService.findPermittedCollections(dvReq, requestUser, Permission.LinkDataset); List mergedWithSearch = new ArrayList<>(); dataversesForLinking = dataverseService.removeUnlinkableDataverses(dataversesForLinking, dvObject); From 0500d7e7066dc36c81f640cafcbc63de8682e395 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 27 Aug 2025 17:13:59 -0400 Subject: [PATCH 119/634] Another experimental approach (#11766) --- .../io/gdcc/spi/export/ExportDataOption.java | 50 +++++++++++++++++++ .../gdcc/spi/export/ExportDataProvider.java | 19 +++---- 2 files changed, 57 insertions(+), 12 deletions(-) create mode 100644 modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataOption.java diff --git a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataOption.java b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataOption.java new file mode 100644 index 00000000000..1c447cc53e3 --- /dev/null +++ b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataOption.java @@ -0,0 +1,50 @@ +package io.gdcc.spi.export; + +/** + * + * @author landreev + * Provides a mechanism for defining various data retrieval options for the + * export subsystem in a way that should allow us adding support for more + * options going forward with minimal or no changes to the existing code in + * export plugins. + */ +public class ExportDataOption { + + public enum SupportedOptions { + DatasetMetadataOnly, + PublicFilesOnly; + } + + private SupportedOptions optionType; + + /*public static ExportDataOption addOption(String option) { + ExportDataOption ret = new ExportDataOption(); + + for (SupportedOptions supported : SupportedOptions.values()) { + if (supported.toString().equals(option)) { + ret.optionType = supported; + } + } + return ret; + }*/ + + public static ExportDataOption addDatasetMetadataOnly() { + ExportDataOption ret = new ExportDataOption(); + ret.optionType = SupportedOptions.DatasetMetadataOnly; + return ret; + } + + public static ExportDataOption addPublicFilesOnly() { + ExportDataOption ret = new ExportDataOption(); + ret.optionType = SupportedOptions.PublicFilesOnly; + return ret; + } + + public boolean isDatasetMetadataOnly() { + return SupportedOptions.DatasetMetadataOnly.equals(optionType); + } + + public boolean isPublicFilesOnly() { + return SupportedOptions.PublicFilesOnly.equals(optionType); + } +} diff --git a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java index eefe0d7d828..54d98511d80 100644 --- a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java +++ b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java @@ -38,7 +38,7 @@ public interface ExportDataProvider { * dataset-level metadata along with basic file metadata for each file * in the dataset. */ - JsonObject getDatasetORE(); + JsonObject getDatasetORE(ExportDataOption... options); /** * Dataverse is capable of extracting DDI-centric metadata from tabular @@ -53,7 +53,7 @@ public interface ExportDataProvider { * edu.harvard.iq.dataverse.util.json.JSONPrinter classes where this * output is used/generated (respectively). */ - JsonArray getDatasetFileDetails(); + JsonArray getDatasetFileDetails(ExportDataOption... options); /** * Similar to the above, but @@ -78,7 +78,7 @@ public interface ExportDataProvider { * a starting point for an Exporter if it simplifies your exporter * relative to using the JSON or OAI_ORE exports. */ - JsonObject getDatasetSchemaDotOrg(); + JsonObject getDatasetSchemaDotOrg(ExportDataOption... options); /** * @@ -88,7 +88,7 @@ public interface ExportDataProvider { * a starting point for an Exporter if it simplifies your exporter * relative to using the JSON or OAI_ORE exports. */ - String getDataCiteXml(); + String getDataCiteXml(ExportDataOption... options); /** * If an Exporter has specified a prerequisite format name via the @@ -108,13 +108,8 @@ public interface ExportDataProvider { * Exporter is configured to replace the internal ddi Exporter in * Dataverse. */ - default Optional getPrerequisiteInputStream() { + default Optional getPrerequisiteInputStream(ExportDataOption... options) { return Optional.empty(); } - - public enum ExportDataOption { - DatasetMetadataOnly, - PublicFilesOnly; - } - -} + + } From ec3f8aad2f144e4fb8ecc10680a93a18be3eb1aa Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 28 Aug 2025 09:40:43 -0400 Subject: [PATCH 120/634] #11710 fix release note --- doc/release-notes/11710-get-available-dataverses-api.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release-notes/11710-get-available-dataverses-api.md b/doc/release-notes/11710-get-available-dataverses-api.md index 6658d1d8fd5..ac33c581848 100644 --- a/doc/release-notes/11710-get-available-dataverses-api.md +++ b/doc/release-notes/11710-get-available-dataverses-api.md @@ -1,5 +1,5 @@ ### New API endpoint for retrieving a list of Dataverse Collections to which a given Dataset or Dataverse Collection may be linked --The end point also takes in a search term which currently must be the start of the collections' names. +-The end point also takes in a search term which currently must be part of the collections' names. -The user calling this API must have Link Dataset or Link Dataverse permission on the Dataverse Collections returned. -If the Collection has already been linked to the given Dataset or Collection, it will not be returned. From 239af1b84408371081140b4f63200ccc486ea686 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 28 Aug 2025 09:48:37 -0400 Subject: [PATCH 121/634] Update native-api.rst --- doc/sphinx-guides/source/api/native-api.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 0a208967f07..327e31124f6 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -735,7 +735,7 @@ Note: you must have "Add Dataset" permission in the given collection to invoke t List Dataverse Collections to which a given Dataset or Dataverse Collection may be linked ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -The user may provide a search term to limit the list of Dataverse Collections returned. The search term will be compared to the name of the Dataverse Collections and must include the beginning of the Dataverse Collections' names. +The user may provide a search term to limit the list of Dataverse Collections returned. The search term will be compared to the name of the Dataverse Collections. The response is a JSON array of the ids, aliases, and names of the Dataverse collections to which a given Dataset or Dataverse Collection may be linked: For a given Dataverse Collection: From 85a3dccdc6df01c18dc333a40aac0dbe48b80937 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 28 Aug 2025 09:54:36 -0400 Subject: [PATCH 122/634] #11710 some code cleanup --- .../iq/dataverse/DataverseServiceBean.java | 47 +++++++++---------- 1 file changed, 23 insertions(+), 24 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index 85d4179a6d9..bc91cb4e081 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -510,15 +510,15 @@ public List filterDataversesForLinking(String query, DataverseRequest List dataverseList = new ArrayList<>(); List results = filterDataversesByNamePattern(query); - + if (results == null || results.isEmpty()) { - return null; + return null; } - + Dataset linkedDataset = null; Dataverse linkedDataverse = null; List alreadyLinkeddv_ids; - + if ((dvo instanceof Dataset)) { linkedDataset = (Dataset) dvo; alreadyLinkeddv_ids = em.createNativeQuery("SELECT linkingdataverse_id FROM datasetlinkingdataverse WHERE dataset_id = " + linkedDataset.getId()).getResultList(); @@ -527,22 +527,22 @@ public List filterDataversesForLinking(String query, DataverseRequest alreadyLinkeddv_ids = em.createNativeQuery("SELECT linkingdataverse_id FROM dataverselinkingdataverse WHERE dataverse_id = " + linkedDataverse.getId()).getResultList(); } - List remove = new ArrayList<>(); + List remove = new ArrayList<>(); if (alreadyLinkeddv_ids != null && !alreadyLinkeddv_ids.isEmpty()) { alreadyLinkeddv_ids.stream().map((testDVId) -> this.find(testDVId)).forEachOrdered((removeIt) -> { remove.add(removeIt); }); } - - if (dvo instanceof Dataverse dataverse){ + + if (dvo instanceof Dataverse dataverse) { remove.add(dataverse); } - + for (Dataverse res : results) { if (!remove.contains(res)) { if ((linkedDataset != null && this.permissionService.requestOn(req, res).has(Permission.LinkDataset)) - || (linkedDataverse != null && this.permissionService.requestOn(req, res).has(Permission.LinkDataverse))) { + || (linkedDataverse != null && this.permissionService.requestOn(req, res).has(Permission.LinkDataverse))) { dataverseList.add(res); } } @@ -550,44 +550,43 @@ public List filterDataversesForLinking(String query, DataverseRequest return dataverseList; } - - - public List removeUnlinkableDataverses (List allWithPerms, DvObject dvo){ + + public List removeUnlinkableDataverses(List allWithPerms, DvObject dvo) { List dataverseList = new ArrayList<>(); Dataset linkedDataset = null; Dataverse linkedDataverse = null; List alreadyLinkeddv_ids; - + if ((dvo instanceof Dataset)) { linkedDataset = (Dataset) dvo; - alreadyLinkeddv_ids = em.createNativeQuery("SELECT linkingdataverse_id FROM datasetlinkingdataverse WHERE dataset_id = " + linkedDataset.getId()).getResultList(); + alreadyLinkeddv_ids = em.createNativeQuery("SELECT linkingdataverse_id FROM datasetlinkingdataverse WHERE dataset_id = " + linkedDataset.getId()).getResultList(); } else { linkedDataverse = (Dataverse) dvo; - alreadyLinkeddv_ids = em.createNativeQuery("SELECT linkingdataverse_id FROM dataverselinkingdataverse WHERE dataverse_id = " + linkedDataverse.getId()).getResultList(); + alreadyLinkeddv_ids = em.createNativeQuery("SELECT linkingdataverse_id FROM dataverselinkingdataverse WHERE dataverse_id = " + linkedDataverse.getId()).getResultList(); } - List remove = new ArrayList<>(); + List remove = new ArrayList<>(); if (alreadyLinkeddv_ids != null && !alreadyLinkeddv_ids.isEmpty()) { alreadyLinkeddv_ids.stream().map((testDVId) -> this.find(testDVId)).forEachOrdered((removeIt) -> { remove.add(removeIt); }); } - - if (dvo instanceof Dataverse dataverse){ + + if (dvo instanceof Dataverse dataverse) { remove.add(dataverse); - } else { + } else { //dataset is always owned by a dataverse - remove.add((Dataverse)dvo.getOwner()); + remove.add((Dataverse) dvo.getOwner()); } - + for (Dataverse res : allWithPerms) { if (!remove.contains(res)) { - dataverseList.add(res); + dataverseList.add(res); } } - - return dataverseList; + + return dataverseList; } From 286402f94bc6ebe5b8c7a04b765fd1c905cae787 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 28 Aug 2025 12:25:24 -0400 Subject: [PATCH 123/634] #11710 skip filter if search term empty --- .../harvard/iq/dataverse/api/Dataverses.java | 20 +++++++++++++------ .../iq/dataverse/api/DataversesIT.java | 9 ++++++++- 2 files changed, 22 insertions(+), 7 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 512b7b00fab..97bc961c6b2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -1785,8 +1785,7 @@ public Response getLinkingDataverseList(@Context ContainerRequestContext crc, @P DvObject dvObject = findDvoByIdAndTypeOrDie(dvIdtf, type, false); List dataversesForLinkingSearch = new ArrayList(); - dataversesForLinkingSearch = dataverseService.filterDataversesByNamePattern(searchTerm); - + AuthenticatedUser requestUser = (AuthenticatedUser) getRequestUser(crc); DataverseRequest dvReq = new DataverseRequest(requestUser, (IpAddress) null); List dataversesForLinking; @@ -1794,13 +1793,22 @@ public Response getLinkingDataverseList(@Context ContainerRequestContext crc, @P List mergedWithSearch = new ArrayList<>(); dataversesForLinking = dataverseService.removeUnlinkableDataverses(dataversesForLinking, dvObject); - if (!dataversesForLinkingSearch.isEmpty()) { - for (Dataverse dv : dataversesForLinking) { - if (dataversesForLinkingSearch.contains(dv)) { - mergedWithSearch.add(dv); + + //Only do search lookup if search term is there. Otherwise just include the collections based on perms + if (!searchTerm.isEmpty()) { + dataversesForLinkingSearch = dataverseService.filterDataversesByNamePattern(searchTerm); + if (!dataversesForLinkingSearch.isEmpty()) { + for (Dataverse dv : dataversesForLinking) { + if (dataversesForLinkingSearch.contains(dv)) { + mergedWithSearch.add(dv); + } } } + } else { + //search term empty then add all based on perms + mergedWithSearch.addAll(dataversesForLinking); } + JsonArrayBuilder dvBuilder = Json.createArrayBuilder(); if (!mergedWithSearch.isEmpty()) { for (Dataverse dv : mergedWithSearch) { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 64c88197494..5de3fbb6620 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -724,7 +724,14 @@ public void testGetLinkableDataverses(){ getLinkableDataversesForDataversePartial.prettyPrint(); getLinkableDataversesForDataversePartial.then().assertThat() .statusCode(OK.getStatusCode()) - .body("data[0].alias", equalTo(dataverseAliasForLinking)); + .body("data[0].alias", equalTo(dataverseAliasForLinking)); + + //if I give a blank search term i should get the one that I have perms on + Response getLinkableDataversesForDataverseBlank = UtilIT.getLinkableDataverses("dataverse", dataverseAlias, apiToken, ""); + getLinkableDataversesForDataverseBlank.prettyPrint(); + getLinkableDataversesForDataverseBlank.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].alias", equalTo(dataverseAliasForLinking)); //Try with empty string search term From 1ab38bb559b2aaa366cffe881e630610f8cf3510 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 15 Aug 2025 13:16:10 -0400 Subject: [PATCH 124/634] base managed executor --- .../iq/dataverse/api/MakeDataCountApi.java | 178 +++++++++++------- 1 file changed, 107 insertions(+), 71 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java index 562fd7fcb81..72788f1d8e7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java @@ -27,7 +27,10 @@ import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; + +import jakarta.annotation.Resource; import jakarta.ejb.EJB; +import jakarta.enterprise.concurrent.ManagedExecutorService; import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonArrayBuilder; @@ -62,6 +65,10 @@ public class MakeDataCountApi extends AbstractApiBean { @EJB SystemConfig systemConfig; + // Inject the managed executor service provided by the container + @Resource(name = "concurrent/CitationUpdateExecutor") + private ManagedExecutorService executorService; + /** * TODO: For each dataset, send the following: * @@ -141,89 +148,118 @@ public Response addUsageMetricsFromSushiReportAll(@QueryParam("reportOnDisk") St @POST @Path("{id}/updateCitationsForDataset") - public Response updateCitationsForDataset(@PathParam("id") String id) throws IOException { + public Response updateCitationsForDataset(@PathParam("id") String id) { try { - Dataset dataset = findDatasetOrDie(id); - GlobalId pid = dataset.getGlobalId(); - PidProvider pidProvider = PidUtil.getPidProvider(pid.getProviderId()); + // First validate that the dataset exists and has a valid DOI + final Dataset dataset = findDatasetOrDie(id); + final GlobalId pid = dataset.getGlobalId(); + final PidProvider pidProvider = PidUtil.getPidProvider(pid.getProviderId()); + // Only supported for DOIs and for DataCite DOI providers if(!DataCiteDOIProvider.TYPE.equals(pidProvider.getProviderType())) { return error(Status.BAD_REQUEST, "Only DataCite DOI providers are supported"); } - String persistentId = pid.toString(); - - // DataCite wants "doi=", not "doi:". - String authorityPlusIdentifier = persistentId.replaceFirst("doi:", ""); - // Request max page size and then loop to handle multiple pages - URL url = null; - try { - url = new URI(JvmSettings.DATACITE_REST_API_URL.lookup(pidProvider.getId()) + - "/events?doi=" + - authorityPlusIdentifier + - "&source=crossref&page[size]=1000&page[cursor]=1").toURL(); - } catch (URISyntaxException e) { - //Nominally this means a config error/ bad DATACITE_REST_API_URL for this provider - logger.warning("Unable to create URL for " + persistentId + ", pidProvider " + pidProvider.getId()); - return error(Status.INTERNAL_SERVER_ERROR, "Unable to create DataCite URL to retrieve citations."); - } - logger.fine("Retrieving Citations from " + url.toString()); - boolean nextPage = true; - JsonArrayBuilder dataBuilder = Json.createArrayBuilder(); - do { - HttpURLConnection connection = (HttpURLConnection) url.openConnection(); - connection.setRequestMethod("GET"); - int status = connection.getResponseCode(); - if (status != 200) { - logger.warning("Failed to get citations from " + url.toString()); - connection.disconnect(); - return error(Status.fromStatusCode(status), "Failed to get citations from " + url.toString()); - } - JsonObject report; - try (InputStream inStream = connection.getInputStream()) { - report = JsonUtil.getJsonObject(inStream); - } finally { - connection.disconnect(); - } - JsonObject links = report.getJsonObject("links"); - JsonArray data = report.getJsonArray("data"); - Iterator iter = data.iterator(); - while (iter.hasNext()) { - dataBuilder.add(iter.next()); + + // Submit the task to the managed executor service + Future future = executorService.submit(() -> { + try { + processCitationUpdate(dataset, pid, pidProvider); + } catch (Exception e) { + logger.log(Level.SEVERE, "Error processing citation update for dataset " + id, e); } - if (links.containsKey("next")) { - try { - url = new URI(links.getString("next")).toURL(); - } catch (URISyntaxException e) { - logger.warning("Unable to create URL from DataCite response: " + links.getString("next")); - return error(Status.INTERNAL_SERVER_ERROR, "Unable to retrieve all results from DataCite"); - } - } else { - nextPage = false; - } - logger.fine("body of citation response: " + report.toString()); - } while (nextPage == true); - JsonArray allData = dataBuilder.build(); - List datasetExternalCitations = datasetExternalCitationsService.parseCitations(allData); - /* - * ToDo: If this is the only source of citations, we should remove all the existing ones for the dataset and repopulate them. - * As is, this call doesn't remove old citations if there are now none (legacy issue if we decide to stop counting certain types of citation - * as we've done for 'hasPart'). - * If there are some, this call individually checks each one and if a matching item exists, it removes it and adds it back. Faster and better to delete all and - * add the new ones. - */ - if (!datasetExternalCitations.isEmpty()) { - for (DatasetExternalCitations dm : datasetExternalCitations) { - datasetExternalCitationsService.save(dm); - } - } - + }); + JsonObjectBuilder output = Json.createObjectBuilder(); - output.add("citationCount", datasetExternalCitations.size()); + output.add("status", "queued"); + output.add("message", "Citation update for dataset " + id + " has been queued for processing"); return ok(output); } catch (WrappedResponse wr) { return wr.getResponse(); } } + + /** + * Process the citation update for a dataset + * This method contains the logic that was previously in updateCitationsForDataset + */ + private void processCitationUpdate(Dataset dataset, GlobalId pid, PidProvider pidProvider) throws IOException { + String persistentId = pid.asRawIdentifier(); + + // Request max page size and then loop to handle multiple pages + URL url = null; + try { + url = new URI(JvmSettings.DATACITE_REST_API_URL.lookup(pidProvider.getId()) + + "/events?doi=" + + persistentId + + "&source=crossref&page[size]=1000&page[cursor]=1").toURL(); + } catch (URISyntaxException e) { + //Nominally this means a config error/ bad DATACITE_REST_API_URL for this provider + logger.warning("Unable to create URL for " + persistentId + ", pidProvider " + pidProvider.getId()); + return; + } + + logger.fine("Retrieving Citations from " + url.toString()); + boolean nextPage = true; + JsonArrayBuilder dataBuilder = Json.createArrayBuilder(); + + do { + HttpURLConnection connection = (HttpURLConnection) url.openConnection(); + connection.setRequestMethod("GET"); + int status = connection.getResponseCode(); + if (status != 200) { + logger.warning("Failed to get citations from " + url.toString()); + connection.disconnect(); + return; + } + + JsonObject report; + try (InputStream inStream = connection.getInputStream()) { + report = JsonUtil.getJsonObject(inStream); + } finally { + connection.disconnect(); + } + + JsonObject links = report.getJsonObject("links"); + JsonArray data = report.getJsonArray("data"); + Iterator iter = data.iterator(); + while (iter.hasNext()) { + dataBuilder.add(iter.next()); + } + + if (links.containsKey("next")) { + try { + url = new URI(links.getString("next")).toURL(); + } catch (URISyntaxException e) { + logger.warning("Unable to create URL from DataCite response: " + links.getString("next")); + return; + } + } else { + nextPage = false; + } + + logger.fine("body of citation response: " + report.toString()); + } while (nextPage == true); + + JsonArray allData = dataBuilder.build(); + List datasetExternalCitations = datasetExternalCitationsService.parseCitations(allData); + + /* + * ToDo: If this is the only source of citations, we should remove all the existing ones for the dataset and repopulate them. + * As is, this call doesn't remove old citations if there are now none (legacy issue if we decide to stop counting certain types of citation + * as we've done for 'hasPart'). + * If there are some, this call individually checks each one and if a matching item exists, it removes it and adds it back. Faster and better to delete all and + * add the new ones. + */ + if (!datasetExternalCitations.isEmpty()) { + for (DatasetExternalCitations dm : datasetExternalCitations) { + datasetExternalCitationsService.save(dm); + } + } + + logger.info("Citation update completed for dataset " + dataset.getId() + + " with " + datasetExternalCitations.size() + " citations"); + } + @GET @Path("{yearMonth}/processingState") public Response getProcessingState(@PathParam("yearMonth") String yearMonth) { From 3981933314e8c695f58315455c80f4f83506f01b Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 15 Aug 2025 13:38:02 -0400 Subject: [PATCH 125/634] add API_MDC_UPDATE_MIN_DELAY_MS --- .../iq/dataverse/api/MakeDataCountApi.java | 37 +++++++++++++++++++ .../iq/dataverse/settings/JvmSettings.java | 4 ++ 2 files changed, 41 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java index 72788f1d8e7..3bed917c789 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java @@ -25,6 +25,8 @@ import java.net.URL; import java.util.Iterator; import java.util.List; +import java.util.concurrent.Future; +import java.util.concurrent.atomic.AtomicLong; import java.util.logging.Level; import java.util.logging.Logger; @@ -69,6 +71,9 @@ public class MakeDataCountApi extends AbstractApiBean { @Resource(name = "concurrent/CitationUpdateExecutor") private ManagedExecutorService executorService; + // Track the last execution time to implement rate limiting during Citation updates + private static final AtomicLong lastExecutionTime = new AtomicLong(0); + /** * TODO: For each dataset, send the following: * @@ -163,7 +168,14 @@ public Response updateCitationsForDataset(@PathParam("id") String id) { // Submit the task to the managed executor service Future future = executorService.submit(() -> { try { + // Apply rate limiting if enabled + applyRateLimit(); + + // Process the citation update processCitationUpdate(dataset, pid, pidProvider); + + // Update the last execution time after processing + lastExecutionTime.set(System.currentTimeMillis()); } catch (Exception e) { logger.log(Level.SEVERE, "Error processing citation update for dataset " + id, e); } @@ -178,6 +190,31 @@ public Response updateCitationsForDataset(@PathParam("id") String id) { } } + /** + * Apply rate limiting by waiting if necessary + */ + private void applyRateLimit() { + // Check if rate limiting is enabled + long minDelay = JvmSettings.API_MDC_UPDATE_MIN_DELAY_MS.lookupOptional(Long.class).orElse(0l); + + // Calculate how long to wait + long lastExecution = lastExecutionTime.get(); + long currentTime = System.currentTimeMillis(); + long elapsedTime = currentTime - lastExecution; + + // If not enough time has passed since the last execution, wait + if (lastExecution > 0 && elapsedTime < minDelay) { + long waitTime = minDelay - elapsedTime; + logger.fine("Rate limiting: waiting " + waitTime + " ms before processing next citation update"); + try { + Thread.sleep(waitTime); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + logger.warning("Rate limiting sleep interrupted: " + e.getMessage()); + } + } + } + /** * Process the citation update for a dataset * This method contains the logic that was previously in updateCitationsForDataset diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index 53dff244ae1..87123801a3e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -93,6 +93,10 @@ public enum JvmSettings { API_BLOCKED_ENDPOINTS(SCOPE_API_BLOCKED, "endpoints"), API_BLOCKED_POLICY(SCOPE_API_BLOCKED, "policy"), API_BLOCKED_KEY(SCOPE_API_BLOCKED, "key"), + // API: MDC Citation updates + SCOPE_API_MDC(SCOPE_API, "mdc"), + API_MDC_UPDATE_MIN_DELAY_MS(SCOPE_API_MDC, "min-delay-ms"), + // SIGNPOSTING SETTINGS SCOPE_SIGNPOSTING(PREFIX, "signposting"), From 973ab872e203085aee6be078eba85706ac0ac601 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 15 Aug 2025 13:47:43 -0400 Subject: [PATCH 126/634] filter out hasPart etc earlier --- .../iq/dataverse/api/MakeDataCountApi.java | 22 +++++++++++++++++-- .../DatasetExternalCitationsServiceBean.java | 4 ++-- 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java index 3bed917c789..64309886dd0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java @@ -196,7 +196,9 @@ public Response updateCitationsForDataset(@PathParam("id") String id) { private void applyRateLimit() { // Check if rate limiting is enabled long minDelay = JvmSettings.API_MDC_UPDATE_MIN_DELAY_MS.lookupOptional(Long.class).orElse(0l); - + if(minDelay ==0) { + return; + } // Calculate how long to wait long lastExecution = lastExecutionTime.get(); long currentTime = System.currentTimeMillis(); @@ -260,12 +262,28 @@ private void processCitationUpdate(Dataset dataset, GlobalId pid, PidProvider pi JsonArray data = report.getJsonArray("data"); Iterator iter = data.iterator(); while (iter.hasNext()) { - dataBuilder.add(iter.next()); + JsonValue citationValue = iter.next(); + JsonObject citation = (JsonObject) citationValue; + + // Filter out relations we don't use (e.g. hasPart) to lower memory req. with many files + if (citation.containsKey("attributes")) { + JsonObject attributes = citation.getJsonObject("attributes"); + if (attributes.containsKey("relation-type-id")) { + String relationshipType = attributes.getString("relation-type-id"); + + // Only add citations with relationship types we care about + if (DatasetExternalCitationsServiceBean.inboundRelationships.contains(relationshipType) || + DatasetExternalCitationsServiceBean.outboundRelationships.contains(relationshipType)) { + dataBuilder.add(citationValue); + } + } + } } if (links.containsKey("next")) { try { url = new URI(links.getString("next")).toURL(); + applyRateLimit(); } catch (URISyntaxException e) { logger.warning("Unable to create URL from DataCite response: " + links.getString("next")); return; diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitationsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitationsServiceBean.java index fa56432cc3c..fa87926210f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitationsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitationsServiceBean.java @@ -39,13 +39,13 @@ public class DatasetExternalCitationsServiceBean implements java.io.Serializable DatasetServiceBean datasetService; //Array of relationship types that are considered to be citations - static ArrayList inboundRelationships = new ArrayList( + public static ArrayList inboundRelationships = new ArrayList( Arrays.asList( "cites", "references", "supplements", "is-supplement-to")); - static ArrayList outboundRelationships = new ArrayList( + public static ArrayList outboundRelationships = new ArrayList( Arrays.asList( "is-cited-by", "is-referenced-by", From 5f6b076ab248e69f414b4df0a64508f9598c0417 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 15 Aug 2025 15:06:54 -0400 Subject: [PATCH 127/634] cleanup logging/exception handling --- .../iq/dataverse/api/MakeDataCountApi.java | 147 ++++++++++-------- 1 file changed, 80 insertions(+), 67 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java index 64309886dd0..6de0e86a254 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java @@ -172,10 +172,16 @@ public Response updateCitationsForDataset(@PathParam("id") String id) { applyRateLimit(); // Process the citation update - processCitationUpdate(dataset, pid, pidProvider); + boolean success = processCitationUpdate(dataset, pid, pidProvider); // Update the last execution time after processing lastExecutionTime.set(System.currentTimeMillis()); + + if (success) { + logger.fine("Successfully processed citation update for dataset " + id); + } else { + logger.warning("Failed to process citation update for dataset " + id); + } } catch (Exception e) { logger.log(Level.SEVERE, "Error processing citation update for dataset " + id, e); } @@ -220,8 +226,9 @@ private void applyRateLimit() { /** * Process the citation update for a dataset * This method contains the logic that was previously in updateCitationsForDataset + * @return true if processing was successful, false otherwise */ - private void processCitationUpdate(Dataset dataset, GlobalId pid, PidProvider pidProvider) throws IOException { + private boolean processCitationUpdate(Dataset dataset, GlobalId pid, PidProvider pidProvider) { String persistentId = pid.asRawIdentifier(); // Request max page size and then loop to handle multiple pages @@ -231,88 +238,94 @@ private void processCitationUpdate(Dataset dataset, GlobalId pid, PidProvider pi "/events?doi=" + persistentId + "&source=crossref&page[size]=1000&page[cursor]=1").toURL(); - } catch (URISyntaxException e) { + } catch (URISyntaxException | MalformedURLException e) { //Nominally this means a config error/ bad DATACITE_REST_API_URL for this provider logger.warning("Unable to create URL for " + persistentId + ", pidProvider " + pidProvider.getId()); - return; + return false; } logger.fine("Retrieving Citations from " + url.toString()); boolean nextPage = true; JsonArrayBuilder dataBuilder = Json.createArrayBuilder(); - do { - HttpURLConnection connection = (HttpURLConnection) url.openConnection(); - connection.setRequestMethod("GET"); - int status = connection.getResponseCode(); - if (status != 200) { - logger.warning("Failed to get citations from " + url.toString()); - connection.disconnect(); - return; - } - - JsonObject report; - try (InputStream inStream = connection.getInputStream()) { - report = JsonUtil.getJsonObject(inStream); - } finally { - connection.disconnect(); - } - - JsonObject links = report.getJsonObject("links"); - JsonArray data = report.getJsonArray("data"); - Iterator iter = data.iterator(); - while (iter.hasNext()) { - JsonValue citationValue = iter.next(); - JsonObject citation = (JsonObject) citationValue; + try { + do { + HttpURLConnection connection = (HttpURLConnection) url.openConnection(); + connection.setRequestMethod("GET"); + int status = connection.getResponseCode(); + if (status != 200) { + logger.warning("Failed to get citations from " + url.toString()); + connection.disconnect(); + return false; + } - // Filter out relations we don't use (e.g. hasPart) to lower memory req. with many files - if (citation.containsKey("attributes")) { - JsonObject attributes = citation.getJsonObject("attributes"); - if (attributes.containsKey("relation-type-id")) { - String relationshipType = attributes.getString("relation-type-id"); - - // Only add citations with relationship types we care about - if (DatasetExternalCitationsServiceBean.inboundRelationships.contains(relationshipType) || - DatasetExternalCitationsServiceBean.outboundRelationships.contains(relationshipType)) { - dataBuilder.add(citationValue); + JsonObject report; + try (InputStream inStream = connection.getInputStream()) { + report = JsonUtil.getJsonObject(inStream); + } finally { + connection.disconnect(); + } + + JsonObject links = report.getJsonObject("links"); + JsonArray data = report.getJsonArray("data"); + Iterator iter = data.iterator(); + while (iter.hasNext()) { + JsonValue citationValue = iter.next(); + JsonObject citation = (JsonObject) citationValue; + + // Filter out relations we don't use (e.g. hasPart) to lower memory req. with many files + if (citation.containsKey("attributes")) { + JsonObject attributes = citation.getJsonObject("attributes"); + if (attributes.containsKey("relation-type-id")) { + String relationshipType = attributes.getString("relation-type-id"); + + // Only add citations with relationship types we care about + if (DatasetExternalCitationsServiceBean.inboundRelationships.contains(relationshipType) || + DatasetExternalCitationsServiceBean.outboundRelationships.contains(relationshipType)) { + dataBuilder.add(citationValue); + } } } } - } + + if (links.containsKey("next")) { + try { + url = new URI(links.getString("next")).toURL(); + applyRateLimit(); + } catch (URISyntaxException e) { + logger.warning("Unable to create URL from DataCite response: " + links.getString("next")); + return false; + } + } else { + nextPage = false; + } + + logger.fine("body of citation response: " + report.toString()); + } while (nextPage == true); - if (links.containsKey("next")) { - try { - url = new URI(links.getString("next")).toURL(); - applyRateLimit(); - } catch (URISyntaxException e) { - logger.warning("Unable to create URL from DataCite response: " + links.getString("next")); - return; + JsonArray allData = dataBuilder.build(); + List datasetExternalCitations = datasetExternalCitationsService.parseCitations(allData); + + /* + * ToDo: If this is the only source of citations, we should remove all the existing ones for the dataset and repopulate them. + * As is, this call doesn't remove old citations if there are now none (legacy issue if we decide to stop counting certain types of citation + * as we've done for 'hasPart'). + * If there are some, this call individually checks each one and if a matching item exists, it removes it and adds it back. Faster and better to delete all and + * add the new ones. + */ + if (!datasetExternalCitations.isEmpty()) { + for (DatasetExternalCitations dm : datasetExternalCitations) { + datasetExternalCitationsService.save(dm); } - } else { - nextPage = false; } - logger.fine("body of citation response: " + report.toString()); - } while (nextPage == true); - - JsonArray allData = dataBuilder.build(); - List datasetExternalCitations = datasetExternalCitationsService.parseCitations(allData); - - /* - * ToDo: If this is the only source of citations, we should remove all the existing ones for the dataset and repopulate them. - * As is, this call doesn't remove old citations if there are now none (legacy issue if we decide to stop counting certain types of citation - * as we've done for 'hasPart'). - * If there are some, this call individually checks each one and if a matching item exists, it removes it and adds it back. Faster and better to delete all and - * add the new ones. - */ - if (!datasetExternalCitations.isEmpty()) { - for (DatasetExternalCitations dm : datasetExternalCitations) { - datasetExternalCitationsService.save(dm); - } + logger.fine("Citation update completed for dataset " + dataset.getId() + + " with " + datasetExternalCitations.size() + " citations"); + return true; + } catch (IOException e) { + logger.log(Level.WARNING, "Error processing citation update for dataset " + dataset.getId(), e); + return false; } - - logger.info("Citation update completed for dataset " + dataset.getId() + - " with " + datasetExternalCitations.size() + " citations"); } @GET From a96dffb0678485601e50324d7cb701e08836d350 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 15 Aug 2025 16:05:33 -0400 Subject: [PATCH 128/634] handle queue full error --- .../iq/dataverse/api/MakeDataCountApi.java | 62 +++++++++++-------- 1 file changed, 35 insertions(+), 27 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java index 6de0e86a254..ca4f55da822 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java @@ -26,6 +26,7 @@ import java.util.Iterator; import java.util.List; import java.util.concurrent.Future; +import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.atomic.AtomicLong; import java.util.logging.Level; import java.util.logging.Logger; @@ -159,38 +160,45 @@ public Response updateCitationsForDataset(@PathParam("id") String id) { final Dataset dataset = findDatasetOrDie(id); final GlobalId pid = dataset.getGlobalId(); final PidProvider pidProvider = PidUtil.getPidProvider(pid.getProviderId()); - + // Only supported for DOIs and for DataCite DOI providers - if(!DataCiteDOIProvider.TYPE.equals(pidProvider.getProviderType())) { + if (!DataCiteDOIProvider.TYPE.equals(pidProvider.getProviderType())) { return error(Status.BAD_REQUEST, "Only DataCite DOI providers are supported"); } - + // Submit the task to the managed executor service - Future future = executorService.submit(() -> { - try { - // Apply rate limiting if enabled - applyRateLimit(); - - // Process the citation update - boolean success = processCitationUpdate(dataset, pid, pidProvider); - - // Update the last execution time after processing - lastExecutionTime.set(System.currentTimeMillis()); - - if (success) { - logger.fine("Successfully processed citation update for dataset " + id); - } else { - logger.warning("Failed to process citation update for dataset " + id); + Future future; + try { + future = executorService.submit(() -> { + try { + // Apply rate limiting if enabled + applyRateLimit(); + + // Process the citation update + boolean success = processCitationUpdate(dataset, pid, pidProvider); + + // Update the last execution time after processing + lastExecutionTime.set(System.currentTimeMillis()); + + if (success) { + logger.fine("Successfully processed citation update for dataset " + id); + } else { + logger.warning("Failed to process citation update for dataset " + id); + } + } catch (Exception e) { + logger.log(Level.SEVERE, "Error processing citation update for dataset " + id, e); } - } catch (Exception e) { - logger.log(Level.SEVERE, "Error processing citation update for dataset " + id, e); - } - }); - - JsonObjectBuilder output = Json.createObjectBuilder(); - output.add("status", "queued"); - output.add("message", "Citation update for dataset " + id + " has been queued for processing"); - return ok(output); + }); + + JsonObjectBuilder output = Json.createObjectBuilder(); + output.add("status", "queued"); + output.add("message", "Citation update for dataset " + id + " has been queued for processing"); + return ok(output); + } catch (RejectedExecutionException ree) { + logger.warning("Citation update for dataset " + id + " was rejected: Queue is full"); + return error(Status.SERVICE_UNAVAILABLE, + "Citation update service is currently at capacity. Please try again later."); + } } catch (WrappedResponse wr) { return wr.getResponse(); } From 5d8095b190e8be29c9bdea4d4797d29123de9d59 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 15 Aug 2025 16:05:48 -0400 Subject: [PATCH 129/634] update script for asynch api call --- conf/mdc/counter_weekly.sh | 92 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 92 insertions(+) create mode 100644 conf/mdc/counter_weekly.sh diff --git a/conf/mdc/counter_weekly.sh b/conf/mdc/counter_weekly.sh new file mode 100644 index 00000000000..67cb5df2af2 --- /dev/null +++ b/conf/mdc/counter_weekly.sh @@ -0,0 +1,92 @@ +#!/bin/sh +#counter_weekly.sh + +# This script iterates through all published Datasets in all Dataverses and calls the Make Data Count API to update their citations from DataCite +# Note: Requires curl and jq for parsing JSON responses form curl + +# A recursive method to process each Dataverse +processDV () { +echo "Processing Dataverse ID#: $1" + +#Call the Dataverse API to get the contents of the Dataverse (without credentials, this will only list published datasets and dataverses +DVCONTENTS=$(curl -s http://localhost:8080/api/dataverses/$1/contents) + +# Iterate over all datasets, pulling the value of their DOIs (as part of the persistentUrl) from the json returned +for subds in $(echo "${DVCONTENTS}" | jq -r '.data[] | select(.type == "dataset") | .persistentUrl'); do + +#The authority/identifier are preceded by a protocol/host, i.e. https://doi.org/ +DOI=`expr "$subds" : '.*:\/\/\doi\.org\/\(.*\)'` + +# Call the Dataverse API for this dataset and capture both the response and HTTP status code +HTTP_RESPONSE=$(curl -s -w "\n%{http_code}" -X POST "http://localhost:8080/api/admin/makeDataCount/:persistentId/updateCitationsForDataset?persistentId=doi:$DOI") + +# Extract the HTTP status code from the last line +HTTP_STATUS=$(echo "$HTTP_RESPONSE" | tail -n1) +# Extract the response body (everything except the last line) +RESPONSE_BODY=$(echo "$HTTP_RESPONSE" | sed '$d') + +# Check the HTTP status code and report accordingly +case $HTTP_STATUS in + 200) + # Successfully queued + # Extract status from the nested data object + STATUS=$(echo "$RESPONSE_BODY" | jq -r '.data.status') + + # Extract message from the nested data object + if echo "$RESPONSE_BODY" | jq -e '.data.message' > /dev/null 2>&1 && [ "$(echo "$RESPONSE_BODY" | jq -r '.data.message')" != "null" ]; then + MESSAGE=$(echo "$RESPONSE_BODY" | jq -r '.data.message') + echo "[SUCCESS] doi:$DOI - $STATUS: $MESSAGE" + else + # If message is missing or null, just show the status + echo "[SUCCESS] doi:$DOI - $STATUS: Citation update queued" + fi + ;; + 400) + # Bad request + if echo "$RESPONSE_BODY" | jq -e '.message' > /dev/null 2>&1; then + ERROR=$(echo "$RESPONSE_BODY" | jq -r '.message') + echo "[ERROR 400] doi:$DOI - Bad request: $ERROR" + else + echo "[ERROR 400] doi:$DOI - Bad request" + fi + ;; + 404) + # Not found + if echo "$RESPONSE_BODY" | jq -e '.message' > /dev/null 2>&1; then + ERROR=$(echo "$RESPONSE_BODY" | jq -r '.message') + echo "[ERROR 404] doi:$DOI - Not found: $ERROR" + else + echo "[ERROR 404] doi:$DOI - Not found" + fi + ;; + 503) + # Service unavailable (queue full) + if echo "$RESPONSE_BODY" | jq -e '.message' > /dev/null 2>&1; then + ERROR=$(echo "$RESPONSE_BODY" | jq -r '.message') + echo "[ERROR 503] doi:$DOI - Service unavailable: $ERROR" + elif echo "$RESPONSE_BODY" | jq -e '.data.message' > /dev/null 2>&1; then + ERROR=$(echo "$RESPONSE_BODY" | jq -r '.data.message') + echo "[ERROR 503] doi:$DOI - Service unavailable: $ERROR" + else + echo "[ERROR 503] doi:$DOI - Service unavailable: Queue is full" + fi + ;; + *) + # Other error + echo "[ERROR $HTTP_STATUS] doi:$DOI - Unexpected error" + echo "Response: $RESPONSE_BODY" + ;; +esac + +done + +# Now iterate over any child Dataverses and recursively process them +for subdv in $(echo "${DVCONTENTS}" | jq -r '.data[] | select(.type == "dataverse") | .id'); do +echo $subdv +processDV $subdv +done + +} + +# Call the function on the root dataverse to start processing +processDV 1 \ No newline at end of file From 75d99d09158c27a7cbf360c52d7a3611cff7809d Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 29 Aug 2025 13:16:54 -0400 Subject: [PATCH 130/634] another experimental iteration of the updated export data spi. #11766 --- .../io/gdcc/spi/export/ExportDataContext.java | 13 +++++++ .../io/gdcc/spi/export/ExportDataOption.java | 1 + .../gdcc/spi/export/ExportDataProvider.java | 36 +++++++++++-------- .../java/io/gdcc/spi/export/Exporter.java | 14 +++++++- 4 files changed, 49 insertions(+), 15 deletions(-) create mode 100644 modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataContext.java diff --git a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataContext.java b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataContext.java new file mode 100644 index 00000000000..8776f2047ba --- /dev/null +++ b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataContext.java @@ -0,0 +1,13 @@ +/* + * Click nbfs://nbhost/SystemFileSystem/Templates/Licenses/license-default.txt to change this license + * Click nbfs://nbhost/SystemFileSystem/Templates/Classes/Class.java to edit this template + */ +package io.gdcc.spi.export; + +/** + * + * @author landreev + */ +public class ExportDataContext { + +} diff --git a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataOption.java b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataOption.java index 1c447cc53e3..69f813f83ce 100644 --- a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataOption.java +++ b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataOption.java @@ -8,6 +8,7 @@ * options going forward with minimal or no changes to the existing code in * export plugins. */ +@Deprecated public class ExportDataOption { public enum SupportedOptions { diff --git a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java index 54d98511d80..4197d978e79 100644 --- a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java +++ b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java @@ -21,13 +21,14 @@ public interface ExportDataProvider { * OAI_ORE export are the only two that provide 'complete' * dataset-level metadata along with basic file metadata for each file * in the dataset. - * @param options - optional argument(s). needs to support ExportDataOption.DatasetMetadataOnly: - * in a situation where we need to generate a format like DC, - * that has no use for file-level metadata, it makes sense to - * skip retrieving and formatting it, since there can be quite a few + * @param context - supplies optional parameters. Needs to support + * context.isDatasetMetadataOnly(). In a situation where we + * need to generate a format like DC that has no use for the + * file-level metadata, it makes sense to skip retrieving and + * formatting it, since there can be a very large number of * files in a dataset. */ - JsonObject getDatasetJson(ExportDataOption... options); + JsonObject getDatasetJson(ExportDataContext... context); /** * @@ -37,8 +38,9 @@ public interface ExportDataProvider { * @apiNote - THis, and the JSON format are the only two that provide complete * dataset-level metadata along with basic file metadata for each file * in the dataset. + * @param context - supplies optional parameters. */ - JsonObject getDatasetORE(ExportDataOption... options); + JsonObject getDatasetORE(ExportDataContext... context); /** * Dataverse is capable of extracting DDI-centric metadata from tabular @@ -52,8 +54,9 @@ public interface ExportDataProvider { * edu.harvard.iq.dataverse.export.DDIExporter and the @see * edu.harvard.iq.dataverse.util.json.JSONPrinter classes where this * output is used/generated (respectively). + * @param context - supplies optional parameters. */ - JsonArray getDatasetFileDetails(ExportDataOption... options); + JsonArray getDatasetFileDetails(ExportDataContext... context); /** * Similar to the above, but @@ -61,13 +64,15 @@ public interface ExportDataProvider { * b) provides an option for retrieving this stuff in batches * c) provides an option for skipping restricted/embargoed etc. files. * Intended for datasets with massive numbers of tabular files and datavariables. - * @param offset (can be null) - * @param length (can be null) - * @param options (optional) current use case is ExportDataOption.PublicFilesOnly; + * @param context - supplies optional parameters. + * current (2.1.0) known use cases: + * context.isPublicFilesOnly(); + * context.getOffset(); + * context.getLength(); * @return json array containing the datafile/filemetadata->datatable->datavariable metadata * @throws ExportException */ - JsonArray getTabularDataDetails(Integer offset, Integer length, ExportDataOption ... options) throws ExportException; + JsonArray getTabularDataDetails(ExportDataContext ... context) throws ExportException; /** * @@ -77,8 +82,9 @@ public interface ExportDataProvider { * @apiNote - as this metadata export is not complete, it should only be used as * a starting point for an Exporter if it simplifies your exporter * relative to using the JSON or OAI_ORE exports. + * @param context - supplies optional parameters. */ - JsonObject getDatasetSchemaDotOrg(ExportDataOption... options); + JsonObject getDatasetSchemaDotOrg(ExportDataContext... context); /** * @@ -87,8 +93,9 @@ public interface ExportDataProvider { * @apiNote - as this metadata export is not complete, it should only be used as * a starting point for an Exporter if it simplifies your exporter * relative to using the JSON or OAI_ORE exports. + * @param context - supplies optional parameters. */ - String getDataCiteXml(ExportDataOption... options); + String getDataCiteXml(ExportDataContext... context); /** * If an Exporter has specified a prerequisite format name via the @@ -107,8 +114,9 @@ public interface ExportDataProvider { * malfunction, e.g. if you depend on format "ddi" and a third party * Exporter is configured to replace the internal ddi Exporter in * Dataverse. + * @param context - supplies optional parameters. */ - default Optional getPrerequisiteInputStream(ExportDataOption... options) { + default Optional getPrerequisiteInputStream(ExportDataContext... context) { return Optional.empty(); } diff --git a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/Exporter.java b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/Exporter.java index 1338a3c9734..f91a2504f74 100644 --- a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/Exporter.java +++ b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/Exporter.java @@ -84,7 +84,19 @@ public interface Exporter { default Optional getPrerequisiteFormatName() { return Optional.empty(); } - + + /** + * Most metadata formats do not require tabular metadata to be generated. + * (the 2 known cases that encode variable-level information, as of Dataverse 6.7, + * are rich DDI and Croissant). In order to serve it more efficiently, + * the data provider may need to be initialized in a special way (with access + * to EJBs that in most cases isn't needed), so it may help to know + * programmatically whether it's required for this format. + * @return Optional + */ + default Optional isNeedsTabularMetadata() { + return Optional.of(false); + } /** * Harvestable Exporters will be available as options in Dataverse's Harvesting mechanism. From cb06d8962863e9d2ffdc279637d92969cc2a1c79 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Fri, 29 Aug 2025 13:42:31 -0400 Subject: [PATCH 131/634] the new context object (was missing from the previous commit in error) #11766 --- .../io/gdcc/spi/export/ExportDataContext.java | 56 +++++++++++++++++-- 1 file changed, 52 insertions(+), 4 deletions(-) diff --git a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataContext.java b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataContext.java index 8776f2047ba..9478d39c4c2 100644 --- a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataContext.java +++ b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataContext.java @@ -1,13 +1,61 @@ -/* - * Click nbfs://nbhost/SystemFileSystem/Templates/Licenses/license-default.txt to change this license - * Click nbfs://nbhost/SystemFileSystem/Templates/Classes/Class.java to edit this template - */ package io.gdcc.spi.export; /** * * @author landreev + * Provides an optional mechanism for defining various data retrieval options + * for the export subsystem in a way that should allow us adding support for + * more options going forward with minimal or no changes to the already + * implemented export plugins. */ public class ExportDataContext { + private boolean datasetMetadataOnly = false; + private boolean publicFilesOnly = false; + private Integer offset = null; + private Integer length = null; + + private ExportDataContext() { + + } + + public static ExportDataContext context() { + ExportDataContext context = new ExportDataContext(); + return context; + } + + public ExportDataContext withDatasetMetadataOnly() { + this.datasetMetadataOnly = true; + return this; + } + + public ExportDataContext withPublicFilesOnly() { + this.publicFilesOnly = true; + return this; + } + + public ExportDataContext withOffset(Integer offset) { + this.offset = offset; + return this; + } + + public ExportDataContext withLength(Integer length) { + this.length = length; + return this; + } + + public boolean isDatasetMetadataOnly() { + return datasetMetadataOnly; + } + + public boolean isPublicFilesOnly() { + return publicFilesOnly; + } + + public Integer getOffset() { + return offset; + } + public Integer getLength() { + return length; + } } From ba61c09459fdc9e994d30359b81031f001fe8324 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 29 Aug 2025 14:35:50 -0400 Subject: [PATCH 132/634] release note --- doc/release-notes/11777-MDC-citation-api-improvement.md | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 doc/release-notes/11777-MDC-citation-api-improvement.md diff --git a/doc/release-notes/11777-MDC-citation-api-improvement.md b/doc/release-notes/11777-MDC-citation-api-improvement.md new file mode 100644 index 00000000000..9441e9e0f44 --- /dev/null +++ b/doc/release-notes/11777-MDC-citation-api-improvement.md @@ -0,0 +1,7 @@ +The /api/admin/makeDataCount/{id}/updateCitationsForDataset endpoint, which allows citations for a dataset to be retrieved from DataCite, is often called periodically for all datasets. However, allowing calls for many datasets to be processed in parallel can cause performance problems in Dataverse and/or cause calls to DataCite to fail due to rate limiting. The existing implementation was also inefficient w.r.t. memory use when used on datasets with many (>~1K) files. This release configures Dataverse to queue calls to this api, processes them serially, adds optional throttling to avoid hitting DataCite rate limits and improves memory use. + +New optional MPConfig setting: + +dataverse.api.mdc.min-delay-ms - number of milliseconds to wait between calls to DataCite. A value of ~100 should conservatively address DataCite's current 3000/5 minute limit. A value of 250 may be required for their test service. + +Backward compatibility: This api call is now asynchronous and will return an OK response when the call is queued or a 503 if the queue is full. \ No newline at end of file From 71321b995f7667f53c3eb2c8a4aec1e94d3cecd2 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 29 Aug 2025 14:36:09 -0400 Subject: [PATCH 133/634] switch to app executor service --- src/main/webapp/WEB-INF/glassfish-resources.xml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/main/webapp/WEB-INF/glassfish-resources.xml b/src/main/webapp/WEB-INF/glassfish-resources.xml index 3fbbf4c3586..74af3be42ce 100644 --- a/src/main/webapp/WEB-INF/glassfish-resources.xml +++ b/src/main/webapp/WEB-INF/glassfish-resources.xml @@ -11,4 +11,14 @@ + + + + + + + + \ No newline at end of file From 867af5e08398f88a0fd211a9e1e1eb3e145ee634 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 29 Aug 2025 14:46:05 -0400 Subject: [PATCH 134/634] docs --- .../source/admin/make-data-count.rst | 2 ++ doc/sphinx-guides/source/api/changelog.rst | 2 +- doc/sphinx-guides/source/installation/config.rst | 16 ++++++++++++++++ 3 files changed, 19 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/admin/make-data-count.rst b/doc/sphinx-guides/source/admin/make-data-count.rst index 0103a6f9e38..f8ffa7bb084 100644 --- a/doc/sphinx-guides/source/admin/make-data-count.rst +++ b/doc/sphinx-guides/source/admin/make-data-count.rst @@ -166,6 +166,8 @@ The example :download:`counter_weekly.sh <../_static/util/counter_weekly.sh>` wi Citations will be retrieved for each published dataset and recorded in the your Dataverse installation's database. +Note that the :ref:`dataverse.api.mdc.min-delay-ms` setting can be used to avoid getting rate-limit errors from DataCite. + For how to get the citations out of your Dataverse installation, see "Retrieving Citations for a Dataset" under :ref:`Dataset Metrics ` in the :doc:`/api/native-api` section of the API Guide. Please note that while the Dataverse Software has a metadata field for "Related Dataset" this information is not currently sent as a citation to Crossref. diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index 16157459220..08e8620ba13 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -13,7 +13,7 @@ v6.8 - For POST /api/files/{id}/metadata passing an empty string ("description":"") or array ("categories":[]) will no longer be ignored. Empty fields will now clear out the values in the file's metadata. To ignore the fields simply do not include them in the JSON string. - For PUT /api/datasets/{id}/editMetadata the query parameter "sourceInternalVersionNumber" has been removed and replaced with "sourceLastUpdateTime" to verify that the data being edited hasn't been modified and isn't stale. - For GET /api/dataverses/$dataverse-alias/links the Json response has changed breaking the backward compatibility of the API. - +- The POST /api/admin/makeDataCount/{id}/updateCitationsForDataset processing is now asynchronous and the response no longer includes the number of citations. The response can be OK if the request is queued or 503 if the queue is full (default queue size is 1000). v6.7 ---- diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index d2eff275392..a8e6129c501 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -3729,6 +3729,22 @@ Example: Can also be set via any `supported MicroProfile Config API source`_, e.g. the environment variable ``DATAVERSE_CORS_HEADERS_EXPOSE``. + +.. _dataverse.api.mdc.min-delay-ms: + +dataverse.api.mdc.min-delay-ms +++++++++++++++++++++++++++++++ + +Minimum delay in milliseconds between Make Data Count (MDC) API requests from the /api/admin/makeDataCount/{id}/updateCitationsForDataset api. +This setting helps prevent overloading the MDC service by enforcing a minimum time interval between consecutive requests. +If a request arrives before this interval has elapsed since the previous request, it will be rate-limited. + +Default: ``0`` (no delay enforced) + +Example: ``dataverse.api.mdc.min-delay-ms=100`` (enforces a minimum 100ms delay between MDC API requests) + +Can also be set via any `supported MicroProfile Config API source`_, e.g. the environment variable ``DATAVERSE_API_MDC_MIN_DELAY_MS``. + .. _feature-flags: Feature Flags From 85fcbac7a05d652360fd9fac3ac5ccd8a06627f5 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 29 Aug 2025 15:10:54 -0400 Subject: [PATCH 135/634] missing blank line --- doc/sphinx-guides/source/api/changelog.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index 08e8620ba13..4c91a63f86d 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -14,6 +14,7 @@ v6.8 - For PUT /api/datasets/{id}/editMetadata the query parameter "sourceInternalVersionNumber" has been removed and replaced with "sourceLastUpdateTime" to verify that the data being edited hasn't been modified and isn't stale. - For GET /api/dataverses/$dataverse-alias/links the Json response has changed breaking the backward compatibility of the API. - The POST /api/admin/makeDataCount/{id}/updateCitationsForDataset processing is now asynchronous and the response no longer includes the number of citations. The response can be OK if the request is queued or 503 if the queue is full (default queue size is 1000). + v6.7 ---- From fcb8b9d97b6d3eab94f656896bcd65eba5e16a41 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 29 Aug 2025 15:22:23 -0400 Subject: [PATCH 136/634] check for string --- .../iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java index baf8302437d..1bd0f4a1adc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/doi/XmlMetadataTemplate.java @@ -58,6 +58,7 @@ import edu.harvard.iq.dataverse.util.xml.XmlWriterUtil; import jakarta.enterprise.inject.spi.CDI; import jakarta.json.JsonObject; +import jakarta.json.JsonValue.ValueType; public class XmlMetadataTemplate { @@ -621,7 +622,8 @@ private void writeEntityElements(XMLStreamWriter xmlw, String elementName, Strin if (externalIdentifier.isValidIdentifier(orgName)) { isROR = true; JsonObject jo = getExternalVocabularyValue(orgName); - if (jo != null) { + // Some ext. cvv configs store a JsonArray of multiple objects/values. In such cases, we'll leave orgName blank + if (jo != null && jo.getValueType() == ValueType.STRING) { orgName = jo.getString("termName"); } } From 05b68c621bf368ce3363151cbef4f33c4874396c Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 26 Aug 2025 16:58:03 -0400 Subject: [PATCH 137/634] and another --- src/main/java/edu/harvard/iq/dataverse/export/ExportService.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java b/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java index b98f88e386f..e7bcf17d44b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java +++ b/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java @@ -312,6 +312,7 @@ public void exportAllFormats(Dataset dataset) throws ExportException { } catch (ServiceConfigurationError serviceError) { throw new ExportException("Service configuration error during export. " + serviceError.getMessage()); } catch (RuntimeException e) { + e.printStackTrace(); logger.log(Level.FINE, e.getMessage(), e); throw new ExportException( "Unknown runtime exception exporting metadata. " + (e.getMessage() == null ? "" : e.getMessage())); From 68ad098c5dae9c63107dea70ffcf05316668949d Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 19 Aug 2025 16:55:14 -0400 Subject: [PATCH 138/634] preserve curation statuses during update-current and add blank status as in finalize publication --- .../CuratePublishedDatasetVersionCommand.java | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java index 3629432b7e4..21139ac3fbc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java @@ -15,6 +15,7 @@ import edu.harvard.iq.dataverse.DatasetField; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.TermsOfUseAndAccess; +import edu.harvard.iq.dataverse.CurationStatus; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.RoleAssignment; @@ -27,6 +28,8 @@ import java.util.logging.Level; import java.util.logging.Logger; +import org.apache.commons.lang3.StringUtils; + /** * * @author qqmyers @@ -96,6 +99,24 @@ public Dataset execute(CommandContext ctxt) throws CommandException { updateVersion.getWorkflowComments().addAll(newComments); } + // Transfer curation status entries from draft to published version + List draftCurationStatuses = newVersion.getCurationStatuses(); + if (draftCurationStatuses != null && !draftCurationStatuses.isEmpty()) { + for (CurationStatus cs : draftCurationStatuses) { + // Update the dataset version reference + //This call sets the version in the curationstatus object as well + updateVersion.addCurationStatus(cs); + } + // Clear the list from the draft version + newVersion.getCurationStatuses().clear(); + } + + // Add a new empty curation status to indicate the curation action + CurationStatus status = updateVersion.getCurrentCurationStatus(); + if (status != null && StringUtils.isNotBlank(status.getLabel())) { + updateVersion.addCurationStatus(new CurationStatus(null, updateVersion, getRequest().getAuthenticatedUser())); + } + // we have to merge to update the database but not flush because // we don't want to create two draft versions! Dataset tempDataset = getDataset(); From 2837168183953505b8eb66a9d90e1374e0d1007f Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 19 Aug 2025 17:37:24 -0400 Subject: [PATCH 139/634] comment edit --- .../command/impl/CuratePublishedDatasetVersionCommand.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java index 21139ac3fbc..bf2fe78e721 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java @@ -111,7 +111,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { newVersion.getCurationStatuses().clear(); } - // Add a new empty curation status to indicate the curation action + // Add a new empty curation status to clear the status in the published version (as done in the FinalizeDatasetPublicationCommand) CurationStatus status = updateVersion.getCurrentCurationStatus(); if (status != null && StringUtils.isNotBlank(status.getLabel())) { updateVersion.addCurationStatus(new CurationStatus(null, updateVersion, getRequest().getAuthenticatedUser())); From 4477025b996056659b841b59a1e36581097c0784 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 19 Aug 2025 17:37:40 -0400 Subject: [PATCH 140/634] only show change curation status entry for draft version --- src/main/webapp/dataset.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index f1f72b50634..e9112997149 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -525,7 +525,7 @@ #{bundle['dataset.curationStatusMenu']}