diff --git a/.env b/.env
index 9d604630073..6d99d85b3a7 100644
--- a/.env
+++ b/.env
@@ -1,5 +1,5 @@
APP_IMAGE=gdcc/dataverse:unstable
POSTGRES_VERSION=17
DATAVERSE_DB_USER=dataverse
-SOLR_VERSION=9.3.0
-SKIP_DEPLOY=0
+SOLR_VERSION=9.8.0
+SKIP_DEPLOY=0
\ No newline at end of file
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 00000000000..6325029dac1
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,11 @@
+# Set update schedule for GitHub Actions
+# https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/keeping-your-actions-up-to-date-with-dependabot
+
+version: 2
+updates:
+
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ # Check for updates to GitHub Actions daily
+ interval: "daily"
diff --git a/.github/workflows/container_app_pr.yml b/.github/workflows/container_app_pr.yml
index c86d284e74b..4a06cb567b0 100644
--- a/.github/workflows/container_app_pr.yml
+++ b/.github/workflows/container_app_pr.yml
@@ -20,14 +20,14 @@ jobs:
if: ${{ github.repository_owner == 'IQSS' }}
steps:
# Checkout the pull request code as when merged
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
ref: 'refs/pull/${{ github.event.client_payload.pull_request.number }}/merge'
- - uses: actions/setup-java@v3
+ - uses: actions/setup-java@v4
with:
java-version: "17"
distribution: 'adopt'
- - uses: actions/cache@v3
+ - uses: actions/cache@v4
with:
path: ~/.m2
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
@@ -35,14 +35,14 @@ jobs:
# Note: Accessing, pushing tags etc. to GHCR will only succeed in upstream because secrets.
- name: Login to Github Container Registry
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ secrets.GHCR_USERNAME }}
password: ${{ secrets.GHCR_TOKEN }}
- name: Set up QEMU for multi-arch builds
- uses: docker/setup-qemu-action@v2
+ uses: docker/setup-qemu-action@v3
# Get the image tag from either the command or default to branch name (Not used for now)
#- name: Get the target tag name
@@ -87,7 +87,7 @@ jobs:
:ship: [See on GHCR](https://github.com/orgs/gdcc/packages/container). Use by referencing with full name as printed above, mind the registry name.
# Leave a note when things have gone sideways
- - uses: peter-evans/create-or-update-comment@v3
+ - uses: peter-evans/create-or-update-comment@v4
if: ${{ failure() }}
with:
issue-number: ${{ github.event.client_payload.pull_request.number }}
diff --git a/.github/workflows/container_app_push.yml b/.github/workflows/container_app_push.yml
index 3b7ce066d73..71ffffb5f48 100644
--- a/.github/workflows/container_app_push.yml
+++ b/.github/workflows/container_app_push.yml
@@ -68,15 +68,15 @@ jobs:
if: ${{ github.event_name != 'pull_request' && github.ref_name == 'develop' && github.repository_owner == 'IQSS' }}
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
- - uses: peter-evans/dockerhub-description@v3
+ - uses: actions/checkout@v4
+ - uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
repository: gdcc/dataverse
short-description: "Dataverse Application Container Image providing the executable"
readme-filepath: ./src/main/docker/README.md
- - uses: peter-evans/dockerhub-description@v3
+ - uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
@@ -126,20 +126,20 @@ jobs:
# Depending on context, we push to different targets. Login accordingly.
- if: github.event_name != 'pull_request'
name: Log in to Docker Hub registry
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- if: ${{ github.event_name == 'pull_request' }}
name: Login to Github Container Registry
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ secrets.GHCR_USERNAME }}
password: ${{ secrets.GHCR_TOKEN }}
- name: Set up QEMU for multi-arch builds
- uses: docker/setup-qemu-action@v2
+ uses: docker/setup-qemu-action@v3
- name: Re-set image tag based on branch (if master)
if: ${{ github.ref_name == 'master' }}
diff --git a/.github/workflows/copy_labels.yml b/.github/workflows/copy_labels.yml
new file mode 100644
index 00000000000..8e9061c6655
--- /dev/null
+++ b/.github/workflows/copy_labels.yml
@@ -0,0 +1,15 @@
+name: Copy labels from issue to pull request
+
+on:
+ pull_request:
+ types: [opened]
+
+jobs:
+ copy-labels:
+ runs-on: ubuntu-latest
+ name: Copy labels from linked issues
+ steps:
+ - name: copy-labels
+ uses: michalvankodev/copy-issue-labels@v1.3.0
+ with:
+ repo-token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/deploy_beta_testing.yml b/.github/workflows/deploy_beta_testing.yml
index eca8416732a..7d3a45c6235 100644
--- a/.github/workflows/deploy_beta_testing.yml
+++ b/.github/workflows/deploy_beta_testing.yml
@@ -68,7 +68,7 @@ jobs:
overwrite: true
- name: Execute payara war deployment remotely
- uses: appleboy/ssh-action@v1.0.0
+ uses: appleboy/ssh-action@v1.2.1
env:
INPUT_WAR_FILE: ${{ env.war_file }}
with:
diff --git a/.github/workflows/guides_build_sphinx.yml b/.github/workflows/guides_build_sphinx.yml
index 86b59b11d35..fa3a876c418 100644
--- a/.github/workflows/guides_build_sphinx.yml
+++ b/.github/workflows/guides_build_sphinx.yml
@@ -10,7 +10,7 @@ jobs:
docs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v4
- uses: uncch-rdmc/sphinx-action@master
with:
docs-folder: "doc/sphinx-guides/"
diff --git a/.github/workflows/pr_comment_commands.yml b/.github/workflows/pr_comment_commands.yml
index 5ff75def623..06b11b1ac5b 100644
--- a/.github/workflows/pr_comment_commands.yml
+++ b/.github/workflows/pr_comment_commands.yml
@@ -9,7 +9,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Dispatch
- uses: peter-evans/slash-command-dispatch@v3
+ uses: peter-evans/slash-command-dispatch@v4
with:
# This token belongs to @dataversebot and has sufficient scope.
token: ${{ secrets.GHCR_TOKEN }}
diff --git a/.github/workflows/reviewdog_checkstyle.yml b/.github/workflows/reviewdog_checkstyle.yml
index 90a0dd7d06b..804b04f696a 100644
--- a/.github/workflows/reviewdog_checkstyle.yml
+++ b/.github/workflows/reviewdog_checkstyle.yml
@@ -10,7 +10,7 @@ jobs:
name: Checkstyle job
steps:
- name: Checkout
- uses: actions/checkout@v2
+ uses: actions/checkout@v4
- name: Run check style
uses: nikitasavinov/checkstyle-action@master
with:
diff --git a/.github/workflows/shellcheck.yml b/.github/workflows/shellcheck.yml
index 56f7d648dc4..fb9cf5a0a1f 100644
--- a/.github/workflows/shellcheck.yml
+++ b/.github/workflows/shellcheck.yml
@@ -21,7 +21,7 @@ jobs:
permissions:
pull-requests: write
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: shellcheck
uses: reviewdog/action-shellcheck@v1
with:
diff --git a/.github/workflows/shellspec.yml b/.github/workflows/shellspec.yml
index 3320d9d08a4..cc09992edac 100644
--- a/.github/workflows/shellspec.yml
+++ b/.github/workflows/shellspec.yml
@@ -19,7 +19,7 @@ jobs:
steps:
- name: Install shellspec
run: curl -fsSL https://git.io/shellspec | sh -s ${{ env.SHELLSPEC_VERSION }} --yes
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v4
- name: Run Shellspec
run: |
cd tests/shell
@@ -30,7 +30,7 @@ jobs:
container:
image: rockylinux/rockylinux:9
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v4
- name: Install shellspec
run: |
curl -fsSL https://github.com/shellspec/shellspec/releases/download/${{ env.SHELLSPEC_VERSION }}/shellspec-dist.tar.gz | tar -xz -C /usr/share
@@ -47,7 +47,7 @@ jobs:
steps:
- name: Install shellspec
run: curl -fsSL https://git.io/shellspec | sh -s 0.28.1 --yes
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v4
- name: Run Shellspec
run: |
cd tests/shell
diff --git a/.github/workflows/spi_release.yml b/.github/workflows/spi_release.yml
index 8ad74b3e4bb..6398edca412 100644
--- a/.github/workflows/spi_release.yml
+++ b/.github/workflows/spi_release.yml
@@ -37,15 +37,15 @@ jobs:
runs-on: ubuntu-latest
if: github.event_name == 'pull_request' && needs.check-secrets.outputs.available == 'true'
steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-java@v3
+ - uses: actions/checkout@v4
+ - uses: actions/setup-java@v4
with:
java-version: '17'
distribution: 'adopt'
server-id: ossrh
server-username: MAVEN_USERNAME
server-password: MAVEN_PASSWORD
- - uses: actions/cache@v2
+ - uses: actions/cache@v4
with:
path: ~/.m2
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
@@ -63,12 +63,12 @@ jobs:
runs-on: ubuntu-latest
if: github.event_name == 'push' && needs.check-secrets.outputs.available == 'true'
steps:
- - uses: actions/checkout@v3
- - uses: actions/setup-java@v3
+ - uses: actions/checkout@v4
+ - uses: actions/setup-java@v4
with:
java-version: '17'
distribution: 'adopt'
- - uses: actions/cache@v2
+ - uses: actions/cache@v4
with:
path: ~/.m2
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
@@ -76,7 +76,7 @@ jobs:
# Running setup-java again overwrites the settings.xml - IT'S MANDATORY TO DO THIS SECOND SETUP!!!
- name: Set up Maven Central Repository
- uses: actions/setup-java@v3
+ uses: actions/setup-java@v4
with:
java-version: '17'
distribution: 'adopt'
diff --git a/README.md b/README.md
index 77720453d5f..2303c001d2c 100644
--- a/README.md
+++ b/README.md
@@ -1,20 +1,81 @@
Dataverse®
===============
-Dataverse is an [open source][] software platform for sharing, finding, citing, and preserving research data (developed by the [Dataverse team](https://dataverse.org/about) at the [Institute for Quantitative Social Science](https://iq.harvard.edu/) and the [Dataverse community][]).
+
-[dataverse.org][] is our home on the web and shows a map of Dataverse installations around the world, a list of [features][], [integrations][] that have been made possible through [REST APIs][], our [project board][], our development [roadmap][], and more.
+## Table of Contents
-We maintain a demo site at [demo.dataverse.org][] which you are welcome to use for testing and evaluating Dataverse.
+1. [โ What is Dataverse?](#what-is-dataverse)
+2. [โ Try Dataverse](#try-dataverse)
+3. [๐ Features, Integrations, Roadmaps, and More](#website)
+4. [๐ฅ Installation](#installation)
+5. [๐ Community and Support](#community-and-support)
+6. [๐งโ๐ป๏ธ Contributing](#contributing)
+7. [โ๏ธ Legal Information](#legal-informations)
-To install Dataverse, please see our [Installation Guide][] which will prompt you to download our [latest release][]. Docker users should consult the [Container Guide][].
+
-To discuss Dataverse with the community, please join our [mailing list][], participate in a [community call][], chat with us at [chat.dataverse.org][], or attend our annual [Dataverse Community Meeting][].
+## โ What is Dataverse?
-We love contributors! Please see our [Contributing Guide][] for ways you can help.
+Welcome to Dataverseยฎ, the [open source][] software platform designed for sharing, finding, citing, and preserving research data. Developed by the Dataverse team at the [Institute for Quantitative Social Science](https://iq.harvard.edu/) and the [Dataverse community][], our platform makes it easy for research organizations to host, manage, and share their data with the world.
+
+
+
+## โ Try Dataverse
+
+We invite you to explore our demo site at [demo.dataverse.org][]. This site is ideal for testing and evaluating Dataverse in a risk-free environment.
+
+
+
+## ๐ Features, Integrations, Roadmaps, and More
+
+Visit [dataverse.org][], our home on the web, for a comprehensive overview of Dataverse. Here, you will find:
+
+- An interactive map showcasing Dataverse installations worldwide.
+- A detailed list of [features][].
+- Information on [integrations][] that have been made possible through our [REST APIs][].
+- Our [project board][] and development [roadmap][].
+- News, events, and more.
+
+
+
+## ๐ฅ Installation
+
+Ready to get started? Follow our [Installation Guide][] to download and install the latest release of Dataverse.
+
+If you are using Docker, please refer to our [Container Guide][] for detailed instructions.
+
+
+
+## ๐ Community and Support
+
+Engage with the vibrant Dataverse community through various channels:
+
+- **[Mailing List][]**: Join the conversation on our [mailing list][].
+- **[Community Calls][]**: Participate in our regular [community calls][] to discuss new features, ask questions, and share your experiences.
+- **[Chat][]**: Connect with us and other users in real-time at [dataverse.zulipchat.com][].
+- **[Dataverse Community Meeting][]**: Attend our annual [Dataverse Community Meeting][] to network, learn, and collaborate with peers and experts.
+- **[DataverseTV][]**: Watch the video content from the Dataverse community on [DataverseTV][] and on [Harvard's IQSS YouTube channel][].
+
+
+## ๐งโ๐ป๏ธ Contribute to Dataverse
+
+We love contributors! Whether you are a developer, researcher, or enthusiast, there are many ways you can help.
+
+Visit our [Contributing Guide][] to learn how you can get involved.
+
+Join us in building and enhancing Dataverse to make research data more accessible and impactful. Your support and participation are crucial to our success!
+
+
+## โ๏ธ Legal Information
Dataverse is a trademark of President and Fellows of Harvard College and is registered in the United States.
+---
+For more detailed information, visit our website at [dataverse.org][].
+
+Feel free to [reach out] with any questions or feedback. Happy researching!
+
[](http://dataverse.org)
[](https://jenkins.dataverse.org/job/IQSS-dataverse-develop/)
@@ -37,6 +98,11 @@ Dataverse is a trademark of President and Fellows of Harvard College and is regi
[Contributing Guide]: CONTRIBUTING.md
[mailing list]: https://groups.google.com/group/dataverse-community
[community call]: https://dataverse.org/community-calls
-[chat.dataverse.org]: https://chat.dataverse.org
+[Chat]: https://dataverse.zulipchat.com
+[dataverse.zulipchat.com]: https://dataverse.zulipchat.com
[Dataverse Community Meeting]: https://dataverse.org/events
[open source]: LICENSE.md
+[community calls]: https://dataverse.org/community-calls
+[DataverseTV]: https://dataverse.org/dataversetv
+[Harvard's IQSS YouTube channel]: https://www.youtube.com/@iqssatharvarduniversity8672
+[reach out]: https://dataverse.org/contact
diff --git a/conf/keycloak/test-realm.json b/conf/keycloak/test-realm.json
index efe71cc5d29..2e5ed1c4d69 100644
--- a/conf/keycloak/test-realm.json
+++ b/conf/keycloak/test-realm.json
@@ -45,287 +45,411 @@
"quickLoginCheckMilliSeconds" : 1000,
"maxDeltaTimeSeconds" : 43200,
"failureFactor" : 30,
- "roles" : {
- "realm" : [ {
- "id" : "075daee1-5ab2-44b5-adbf-fa49a3da8305",
- "name" : "uma_authorization",
- "description" : "${role_uma_authorization}",
- "composite" : false,
- "clientRole" : false,
- "containerId" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983",
- "attributes" : { }
- }, {
- "id" : "b4ff9091-ddf9-4536-b175-8cfa3e331d71",
- "name" : "default-roles-test",
- "description" : "${role_default-roles}",
- "composite" : true,
- "composites" : {
- "realm" : [ "offline_access", "uma_authorization" ],
- "client" : {
- "account" : [ "view-profile", "manage-account" ]
- }
+ "roles": {
+ "realm": [
+ {
+ "id": "075daee1-5ab2-44b5-adbf-fa49a3da8305",
+ "name": "uma_authorization",
+ "description": "${role_uma_authorization}",
+ "composite": false,
+ "clientRole": false,
+ "containerId": "80a7e04b-a2b5-4891-a2d1-5ad4e915f983",
+ "attributes": {}
},
- "clientRole" : false,
- "containerId" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983",
- "attributes" : { }
- }, {
- "id" : "e6d31555-6be6-4dee-bc6a-40a53108e4c2",
- "name" : "offline_access",
- "description" : "${role_offline-access}",
- "composite" : false,
- "clientRole" : false,
- "containerId" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983",
- "attributes" : { }
- } ],
- "client" : {
- "realm-management" : [ {
- "id" : "1955bd12-5f86-4a74-b130-d68a8ef6f0ee",
- "name" : "impersonation",
- "description" : "${role_impersonation}",
- "composite" : false,
- "clientRole" : true,
- "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
- "attributes" : { }
- }, {
- "id" : "1109c350-9ab1-426c-9876-ef67d4310f35",
- "name" : "view-authorization",
- "description" : "${role_view-authorization}",
- "composite" : false,
- "clientRole" : true,
- "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
- "attributes" : { }
- }, {
- "id" : "980c3fd3-1ae3-4b8f-9a00-d764c939035f",
- "name" : "query-users",
- "description" : "${role_query-users}",
- "composite" : false,
- "clientRole" : true,
- "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
- "attributes" : { }
- }, {
- "id" : "5363e601-0f9d-4633-a8c8-28cb0f859b7b",
- "name" : "query-groups",
- "description" : "${role_query-groups}",
- "composite" : false,
- "clientRole" : true,
- "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
- "attributes" : { }
- }, {
- "id" : "59aa7992-ad78-48db-868a-25d6e1d7db50",
- "name" : "realm-admin",
- "description" : "${role_realm-admin}",
- "composite" : true,
- "composites" : {
- "client" : {
- "realm-management" : [ "impersonation", "view-authorization", "query-users", "query-groups", "manage-clients", "manage-realm", "view-identity-providers", "query-realms", "manage-authorization", "manage-identity-providers", "manage-users", "view-users", "view-realm", "create-client", "view-clients", "manage-events", "query-clients", "view-events" ]
+ {
+ "id": "b4ff9091-ddf9-4536-b175-8cfa3e331d71",
+ "name": "default-roles-test",
+ "description": "${role_default-roles}",
+ "composite": true,
+ "composites": {
+ "realm": [
+ "offline_access",
+ "uma_authorization"
+ ],
+ "client": {
+ "account": [
+ "view-profile",
+ "manage-account"
+ ]
}
},
- "clientRole" : true,
- "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
- "attributes" : { }
- }, {
- "id" : "112f53c2-897d-4c01-81db-b8dc10c5b995",
- "name" : "manage-clients",
- "description" : "${role_manage-clients}",
- "composite" : false,
- "clientRole" : true,
- "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
- "attributes" : { }
- }, {
- "id" : "c7f57bbd-ef32-4a64-9888-7b8abd90777a",
- "name" : "manage-realm",
- "description" : "${role_manage-realm}",
- "composite" : false,
- "clientRole" : true,
- "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
- "attributes" : { }
- }, {
- "id" : "8885dac8-0af3-45af-94ce-eff5e801bb80",
- "name" : "view-identity-providers",
- "description" : "${role_view-identity-providers}",
- "composite" : false,
- "clientRole" : true,
- "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
- "attributes" : { }
- }, {
- "id" : "2673346c-b0ef-4e01-8a90-be03866093af",
- "name" : "manage-authorization",
- "description" : "${role_manage-authorization}",
- "composite" : false,
- "clientRole" : true,
- "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
- "attributes" : { }
- }, {
- "id" : "b7182885-9e57-445f-8dae-17c16eb31b5d",
- "name" : "manage-identity-providers",
- "description" : "${role_manage-identity-providers}",
- "composite" : false,
- "clientRole" : true,
- "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
- "attributes" : { }
- }, {
- "id" : "ba7bfe0c-cb07-4a47-b92c-b8132b57e181",
- "name" : "manage-users",
- "description" : "${role_manage-users}",
- "composite" : false,
- "clientRole" : true,
- "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
- "attributes" : { }
- }, {
- "id" : "13a8f0fc-647d-4bfe-b525-73956898e550",
- "name" : "query-realms",
- "description" : "${role_query-realms}",
- "composite" : false,
- "clientRole" : true,
- "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
- "attributes" : { }
- }, {
- "id" : "ef4c57dc-78c2-4f9a-8d2b-0e97d46fc842",
- "name" : "view-realm",
- "description" : "${role_view-realm}",
- "composite" : false,
- "clientRole" : true,
- "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
- "attributes" : { }
- }, {
- "id" : "2875da34-006c-4b7f-bfc8-9ae8e46af3a2",
- "name" : "view-users",
- "description" : "${role_view-users}",
- "composite" : true,
- "composites" : {
- "client" : {
- "realm-management" : [ "query-users", "query-groups" ]
+ "clientRole": false,
+ "containerId": "80a7e04b-a2b5-4891-a2d1-5ad4e915f983",
+ "attributes": {}
+ },
+ {
+ "id": "131ff85b-0c25-491b-8e13-dde779ec0854",
+ "name": "admin",
+ "description": "",
+ "composite": true,
+ "composites": {
+ "client": {
+ "realm-management": [
+ "impersonation",
+ "view-authorization",
+ "query-users",
+ "manage-realm",
+ "view-identity-providers",
+ "manage-authorization",
+ "view-clients",
+ "manage-events",
+ "query-clients",
+ "view-events",
+ "query-groups",
+ "realm-admin",
+ "manage-clients",
+ "query-realms",
+ "manage-identity-providers",
+ "manage-users",
+ "view-users",
+ "view-realm",
+ "create-client"
+ ],
+ "broker": [
+ "read-token"
+ ],
+ "account": [
+ "delete-account",
+ "manage-consent",
+ "view-consent",
+ "view-applications",
+ "view-groups",
+ "manage-account-links",
+ "view-profile",
+ "manage-account"
+ ]
}
},
- "clientRole" : true,
- "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
- "attributes" : { }
- }, {
- "id" : "c8c8f7dc-876b-4263-806f-3329f7cd5fd3",
- "name" : "create-client",
- "description" : "${role_create-client}",
- "composite" : false,
- "clientRole" : true,
- "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
- "attributes" : { }
- }, {
- "id" : "21b84f90-5a9a-4845-a7ba-bbd98ac0fcc4",
- "name" : "view-clients",
- "description" : "${role_view-clients}",
- "composite" : true,
- "composites" : {
- "client" : {
- "realm-management" : [ "query-clients" ]
- }
+ "clientRole": false,
+ "containerId": "80a7e04b-a2b5-4891-a2d1-5ad4e915f983",
+ "attributes": {}
+ },
+ {
+ "id": "e6d31555-6be6-4dee-bc6a-40a53108e4c2",
+ "name": "offline_access",
+ "description": "${role_offline-access}",
+ "composite": false,
+ "clientRole": false,
+ "containerId": "80a7e04b-a2b5-4891-a2d1-5ad4e915f983",
+ "attributes": {}
+ }
+ ],
+ "client": {
+ "realm-management": [
+ {
+ "id": "1955bd12-5f86-4a74-b130-d68a8ef6f0ee",
+ "name": "impersonation",
+ "description": "${role_impersonation}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660",
+ "attributes": {}
},
- "clientRole" : true,
- "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
- "attributes" : { }
- }, {
- "id" : "6fd64c94-d663-4501-ad77-0dcf8887d434",
- "name" : "manage-events",
- "description" : "${role_manage-events}",
- "composite" : false,
- "clientRole" : true,
- "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
- "attributes" : { }
- }, {
- "id" : "b321927a-023c-4d2a-99ad-24baf7ff6d83",
- "name" : "query-clients",
- "description" : "${role_query-clients}",
- "composite" : false,
- "clientRole" : true,
- "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
- "attributes" : { }
- }, {
- "id" : "2fc21160-78de-457b-8594-e5c76cde1d5e",
- "name" : "view-events",
- "description" : "${role_view-events}",
- "composite" : false,
- "clientRole" : true,
- "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
- "attributes" : { }
- } ],
- "test" : [ ],
- "security-admin-console" : [ ],
- "admin-cli" : [ ],
- "account-console" : [ ],
- "broker" : [ {
- "id" : "07ee59b5-dca6-48fb-83d4-2994ef02850e",
- "name" : "read-token",
- "description" : "${role_read-token}",
- "composite" : false,
- "clientRole" : true,
- "containerId" : "b57d62bb-77ff-42bd-b8ff-381c7288f327",
- "attributes" : { }
- } ],
- "account" : [ {
- "id" : "17d2f811-7bdf-4c73-83b4-1037001797b8",
- "name" : "view-applications",
- "description" : "${role_view-applications}",
- "composite" : false,
- "clientRole" : true,
- "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
- "attributes" : { }
- }, {
- "id" : "d1ff44f9-419e-42fd-98e8-1add1169a972",
- "name" : "delete-account",
- "description" : "${role_delete-account}",
- "composite" : false,
- "clientRole" : true,
- "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
- "attributes" : { }
- }, {
- "id" : "14c23a18-ae2d-43c9-b0c0-aaf6e0c7f5b0",
- "name" : "manage-account-links",
- "description" : "${role_manage-account-links}",
- "composite" : false,
- "clientRole" : true,
- "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
- "attributes" : { }
- }, {
- "id" : "6fbe58af-d2fe-4d66-95fe-a2e8a818cb55",
- "name" : "view-profile",
- "description" : "${role_view-profile}",
- "composite" : false,
- "clientRole" : true,
- "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
- "attributes" : { }
- }, {
- "id" : "bdfd02bc-6f6a-47d2-82bc-0ca52d78ff48",
- "name" : "manage-consent",
- "description" : "${role_manage-consent}",
- "composite" : true,
- "composites" : {
- "client" : {
- "account" : [ "view-consent" ]
- }
+ {
+ "id": "1109c350-9ab1-426c-9876-ef67d4310f35",
+ "name": "view-authorization",
+ "description": "${role_view-authorization}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660",
+ "attributes": {}
},
- "clientRole" : true,
- "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
- "attributes" : { }
- }, {
- "id" : "782f3b0c-a17b-4a87-988b-1a711401f3b0",
- "name" : "manage-account",
- "description" : "${role_manage-account}",
- "composite" : true,
- "composites" : {
- "client" : {
- "account" : [ "manage-account-links" ]
- }
+ {
+ "id": "980c3fd3-1ae3-4b8f-9a00-d764c939035f",
+ "name": "query-users",
+ "description": "${role_query-users}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660",
+ "attributes": {}
+ },
+ {
+ "id": "5363e601-0f9d-4633-a8c8-28cb0f859b7b",
+ "name": "query-groups",
+ "description": "${role_query-groups}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660",
+ "attributes": {}
+ },
+ {
+ "id": "59aa7992-ad78-48db-868a-25d6e1d7db50",
+ "name": "realm-admin",
+ "description": "${role_realm-admin}",
+ "composite": true,
+ "composites": {
+ "client": {
+ "realm-management": [
+ "impersonation",
+ "view-authorization",
+ "query-users",
+ "query-groups",
+ "manage-clients",
+ "manage-realm",
+ "view-identity-providers",
+ "query-realms",
+ "manage-authorization",
+ "manage-identity-providers",
+ "manage-users",
+ "view-users",
+ "view-realm",
+ "create-client",
+ "view-clients",
+ "manage-events",
+ "query-clients",
+ "view-events"
+ ]
+ }
+ },
+ "clientRole": true,
+ "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660",
+ "attributes": {}
+ },
+ {
+ "id": "112f53c2-897d-4c01-81db-b8dc10c5b995",
+ "name": "manage-clients",
+ "description": "${role_manage-clients}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660",
+ "attributes": {}
+ },
+ {
+ "id": "c7f57bbd-ef32-4a64-9888-7b8abd90777a",
+ "name": "manage-realm",
+ "description": "${role_manage-realm}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660",
+ "attributes": {}
+ },
+ {
+ "id": "8885dac8-0af3-45af-94ce-eff5e801bb80",
+ "name": "view-identity-providers",
+ "description": "${role_view-identity-providers}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660",
+ "attributes": {}
+ },
+ {
+ "id": "2673346c-b0ef-4e01-8a90-be03866093af",
+ "name": "manage-authorization",
+ "description": "${role_manage-authorization}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660",
+ "attributes": {}
+ },
+ {
+ "id": "b7182885-9e57-445f-8dae-17c16eb31b5d",
+ "name": "manage-identity-providers",
+ "description": "${role_manage-identity-providers}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660",
+ "attributes": {}
+ },
+ {
+ "id": "ba7bfe0c-cb07-4a47-b92c-b8132b57e181",
+ "name": "manage-users",
+ "description": "${role_manage-users}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660",
+ "attributes": {}
},
- "clientRole" : true,
- "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
- "attributes" : { }
- }, {
- "id" : "8a3bfe15-66d9-4f3d-83ac-801d682d42b0",
- "name" : "view-consent",
- "description" : "${role_view-consent}",
- "composite" : false,
- "clientRole" : true,
- "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
- "attributes" : { }
- } ]
+ {
+ "id": "13a8f0fc-647d-4bfe-b525-73956898e550",
+ "name": "query-realms",
+ "description": "${role_query-realms}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660",
+ "attributes": {}
+ },
+ {
+ "id": "ef4c57dc-78c2-4f9a-8d2b-0e97d46fc842",
+ "name": "view-realm",
+ "description": "${role_view-realm}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660",
+ "attributes": {}
+ },
+ {
+ "id": "2875da34-006c-4b7f-bfc8-9ae8e46af3a2",
+ "name": "view-users",
+ "description": "${role_view-users}",
+ "composite": true,
+ "composites": {
+ "client": {
+ "realm-management": [
+ "query-users",
+ "query-groups"
+ ]
+ }
+ },
+ "clientRole": true,
+ "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660",
+ "attributes": {}
+ },
+ {
+ "id": "c8c8f7dc-876b-4263-806f-3329f7cd5fd3",
+ "name": "create-client",
+ "description": "${role_create-client}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660",
+ "attributes": {}
+ },
+ {
+ "id": "21b84f90-5a9a-4845-a7ba-bbd98ac0fcc4",
+ "name": "view-clients",
+ "description": "${role_view-clients}",
+ "composite": true,
+ "composites": {
+ "client": {
+ "realm-management": [
+ "query-clients"
+ ]
+ }
+ },
+ "clientRole": true,
+ "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660",
+ "attributes": {}
+ },
+ {
+ "id": "6fd64c94-d663-4501-ad77-0dcf8887d434",
+ "name": "manage-events",
+ "description": "${role_manage-events}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660",
+ "attributes": {}
+ },
+ {
+ "id": "b321927a-023c-4d2a-99ad-24baf7ff6d83",
+ "name": "query-clients",
+ "description": "${role_query-clients}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660",
+ "attributes": {}
+ },
+ {
+ "id": "2fc21160-78de-457b-8594-e5c76cde1d5e",
+ "name": "view-events",
+ "description": "${role_view-events}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "dada0ae8-ee9f-415a-9685-42da7c563660",
+ "attributes": {}
+ }
+ ],
+ "test": [],
+ "security-admin-console": [],
+ "admin-cli": [],
+ "account-console": [],
+ "broker": [
+ {
+ "id": "07ee59b5-dca6-48fb-83d4-2994ef02850e",
+ "name": "read-token",
+ "description": "${role_read-token}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "b57d62bb-77ff-42bd-b8ff-381c7288f327",
+ "attributes": {}
+ }
+ ],
+ "account": [
+ {
+ "id": "17d2f811-7bdf-4c73-83b4-1037001797b8",
+ "name": "view-applications",
+ "description": "${role_view-applications}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
+ "attributes": {}
+ },
+ {
+ "id": "f5918d56-bd4d-4035-8fa7-8622075ed690",
+ "name": "view-groups",
+ "description": "${role_view-groups}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
+ "attributes": {}
+ },
+ {
+ "id": "d1ff44f9-419e-42fd-98e8-1add1169a972",
+ "name": "delete-account",
+ "description": "${role_delete-account}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
+ "attributes": {}
+ },
+ {
+ "id": "14c23a18-ae2d-43c9-b0c0-aaf6e0c7f5b0",
+ "name": "manage-account-links",
+ "description": "${role_manage-account-links}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
+ "attributes": {}
+ },
+ {
+ "id": "6fbe58af-d2fe-4d66-95fe-a2e8a818cb55",
+ "name": "view-profile",
+ "description": "${role_view-profile}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
+ "attributes": {}
+ },
+ {
+ "id": "bdfd02bc-6f6a-47d2-82bc-0ca52d78ff48",
+ "name": "manage-consent",
+ "description": "${role_manage-consent}",
+ "composite": true,
+ "composites": {
+ "client": {
+ "account": [
+ "view-consent"
+ ]
+ }
+ },
+ "clientRole": true,
+ "containerId": "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
+ "attributes": {}
+ },
+ {
+ "id": "782f3b0c-a17b-4a87-988b-1a711401f3b0",
+ "name": "manage-account",
+ "description": "${role_manage-account}",
+ "composite": true,
+ "composites": {
+ "client": {
+ "account": [
+ "manage-account-links"
+ ]
+ }
+ },
+ "clientRole": true,
+ "containerId": "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
+ "attributes": {}
+ },
+ {
+ "id": "8a3bfe15-66d9-4f3d-83ac-801d682d42b0",
+ "name": "view-consent",
+ "description": "${role_view-consent}",
+ "composite": false,
+ "clientRole": true,
+ "containerId": "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
+ "attributes": {}
+ }
+ ]
}
},
"groups" : [ {
@@ -409,7 +533,7 @@
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
- "realmRoles" : [ "default-roles-test" ],
+ "realmRoles" : [ "default-roles-test", "admin" ],
"notBefore" : 0,
"groups" : [ "/admins" ]
}, {
diff --git a/conf/solr/schema.xml b/conf/solr/schema.xml
index d5c789c7189..50835957b04 100644
--- a/conf/solr/schema.xml
+++ b/conf/solr/schema.xml
@@ -38,36 +38,37 @@
catchall "text" field, and use that for searching.
-->
-
+
@@ -115,12 +116,12 @@
-
-
-
-
-
-
+
+
+
+
+
+
@@ -167,6 +168,8 @@
+
+
@@ -201,7 +204,10 @@
-
+
+
+
+
@@ -212,7 +218,7 @@
-
+
@@ -250,6 +256,21 @@
WARNING: Do not remove the following include guards if you intend to use the neat helper scripts we provide.
-->
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -273,38 +294,38 @@
-
-
-
+
+
+
-
-
-
-
+
+
+
+
-
-
-
+
+
+
-
-
+
+
-
+
-
-
-
+
+
+
-
+
@@ -312,7 +333,7 @@
-
+
@@ -324,12 +345,12 @@
-
+
-
+
@@ -349,19 +370,19 @@
-
+
-
+
-
+
@@ -385,28 +406,28 @@
-
-
+
+
-
+
-
+
-
-
+
+
+
-
@@ -492,6 +513,21 @@
WARNING: Do not remove the following include guards if you intend to use the neat helper scripts we provide.
-->
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -534,12 +570,12 @@
-
+
@@ -570,8 +606,8 @@
-
+
@@ -595,9 +631,9 @@
-
+
@@ -627,13 +663,13 @@
-
-
+
+
-
+
@@ -645,10 +681,10 @@
+
-
@@ -751,7 +787,7 @@
-
-
+
+
@@ -815,7 +851,9 @@
-
+
+
+
diff --git a/conf/solr/solrconfig.xml b/conf/solr/solrconfig.xml
index 34386375fe1..97965bd77d7 100644
--- a/conf/solr/solrconfig.xml
+++ b/conf/solr/solrconfig.xml
@@ -35,52 +35,7 @@
that you fully re-index after changing this setting as it can
affect both how text is indexed and queried.
-->
- 9.7
-
-
-
-
-
-
-
-
+ 9.11${solr.ulog.dir:}
- ${solr.ulog.numVersionBuckets:65536}${solr.max.booleanClauses:1024}
+
+ ${solr.query.minPrefixLength:-1}
+
@@ -494,23 +457,6 @@
-->
200
-
-
-
+ processor="uuid,remove-blank,field-name-mutating,max-fields,parse-boolean,parse-long,parse-double,parse-date,add-schema-fields">
diff --git a/doc/release-notes/6.6-release-notes.md b/doc/release-notes/6.6-release-notes.md
new file mode 100644
index 00000000000..751c471a0a3
--- /dev/null
+++ b/doc/release-notes/6.6-release-notes.md
@@ -0,0 +1,549 @@
+# Dataverse 6.6
+
+Please note: To read these instructions in full, please go to https://github.com/IQSS/dataverse/releases/tag/v6.6 rather than the [list of releases](https://github.com/IQSS/dataverse/releases), which will cut them off.
+
+This release brings new features, enhancements, and bug fixes to Dataverse. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project!
+
+## Release Highlights
+
+Highlights for Dataverse 6.6 include:
+
+- metadata fields can be "display on create" per collection
+- ORCIDs linked to accounts
+- version notes
+- harvesting from DataCite
+- citations using Citation Style Language (CSL)
+- license metadata enhancements
+- metadata fields now support range searches (dates, integers, etc.)
+- more accurate search highlighting
+- collections can be moved by using the superuser dashboard
+- new 3D Objects metadata block
+- new Archival metadata block (experimental)
+- optionally prevent publishing of datasets without files
+- Signposting output now contains links to all dataset metadata export formats
+- infrastructure updates (Payara and Solr)
+
+In a recent community call, we talked about many of these highlights if you'd like to watch the [video](https://harvard.zoom.us/rec/share/Ir5CkFHkzoya9b5Nk69rLFUpTyGics3-KGLl9WITSLMy4ezHRsB8CnY22cUNg2g.JPpxrjzHMeCii_zO) (around 22:30).
+
+## Features Added
+
+### Metadata Fields Can Be "Display on Create" Per Collection
+
+Collection administrators can now configure which metadata fields appear during dataset creation through the `displayOnCreate` property, even when fields are not required. This provides greater control over metadata visibility and can help improve metadata completeness.
+
+Currently this feature can only be configured [via API](https://guides.dataverse.org/en/6.6/api/native-api.html#update-collection-input-levels), but a UI implementation is planned in #11221. See #10476, #11224, and #11312.
+
+### ORCIDs Linked to Accounts
+
+Dataverse now includes improved integration with ORCID, supported through a grant to GDCC from the ([ORCID Global Participation Fund](https://info.orcid.org/global-participation-fund-announces-fourth-round-of-awardees/)).
+
+Specifically, Dataverse users can now link their Dataverse account with their ORCID profile. Previously, this was only available to users who logged in with ORCID. Once linked, Dataverse will automatically prepopulate their ORCID to their author metadata when they create a dataset.
+
+This functionality leverages Dataverse's existing support for login via ORCID, but can be turned on independently of it. If ORCID login is enabled, the user's ORCID will automatically be added to their profile. If the user has logged in via some other mechanism, they are able to click a button to initiate a similar authentication process in which the user must login to their ORCID account and approve the connection.
+
+Feedback from installations that enable this functionality is requested and we expect that updates can be made in the next Dataverse release.
+
+See the [User Guide](http://guides.dataverse.org/en/6.6/user/account.html#linking-orcid-with-your-account-profile), [Installation Guide](http://guides.dataverse.org/en/6.6/installation/orcid.html), #7284, and #11222.
+
+### Version Notes
+
+Dataverse now supports the option of adding a version note before or during the publication of a dataset. These notes can be used, for example, to indicate why a version was created or how it differs from the prior version. Whether this feature is enabled is controlled by the flag `dataverse.feature.enable-version-note`. Version notes are shown in the user interface (in the dataset page version table), indexed (as `versionNote`), available via the API, and have been added to the JSON, DDI, DataCite, and OAI-ORE exports.
+
+With the addition of this feature, work has been done to clean-up and rename fields that have been used for specifying the reason for deaccessioning a dataset and providing an optional link to a non-Dataverse location where the dataset still can be found. The former was listed in some JSON-based API calls and exports as "versionNote" and is now "deaccessionNote", while the latter was referred to as "archiveNote" and is now "deaccessionLink".
+
+Further, some database consolidation has been done to combine the deaccessionlink and archivenote fields, which appear to have both been used for the same purpose. The deaccessionlink database field is older and also was not displayed in the current UI. Going forward, only the deaccessionlink column exists.
+
+See the [User Guide](https://guides.dataverse.org/en/6.6/user/dataset-management.html#data-provenance), [API Guide](https://guides.dataverse.org/en/6.6/api/native-api.html#dataset-version-notes) #8431, and #11068.
+
+### OAI-PMH Harvesting from DataCite
+
+DataCite maintains an OAI server () that serves records for every DOI they have registered. There's been a lot of interest in the community in being able to harvest from them. This way, it will be possible to harvest metadata from institution X even if the institution X does not maintain an OAI server of their own, if they happen to register their DOIs with DataCite. One extra element of this harvesting model that makes it especially powerful and flexible is the DataCite's concept of a "dynamic OAI set": a harvester is not limited to harvesting the pre-defined set of ALL the records registered by the institution X, but can instead harvest virtually any arbitrary subset thereof; any query that the DataCite search API understands can be used as an OAI set. The feature is already in use at Harvard Dataverse, as a beta version patch.
+
+For various reasons, in order to take advantage of this feature harvesting clients must be created using the `/api/harvest/clients` API. Once configured however, harvests can be run from the Harvesting Clients control panel in the UI.
+
+DataCite-harvesting clients must be configured with 2 new feature flags, `useListRecords` and `useOaiIdentifiersAsPids` (added in Dataverse 6.5). Note that these features may be of use when harvesting from other sources, not just from DataCite.
+
+See the [Admin Guide](http://guides.dataverse.org/en/6.6/admin/harvestclients.html#harvesting-from-datacite), [API Guide](http://guides.dataverse.org/en/6.6/api/native-api.html#harvesting-from-datacite), #10909, and #11011.
+
+### Citations Using Citation Style Language (CSL)
+
+This release adds support for generating citations in any of the standard independent formats specified using the [Citation Style Language](https://citationstyles.org).
+
+The CSL formats are available to copy/paste if you click "Cite Dataset" and then "View Styled Citations" on the dataset page. An API call to retrieve a dataset citation in EndNote, RIS, BibTeX, and CSLJson format has also been added. The first three have been available as downloads from the UI (CSLJson is not) but have not been directly accessible via API until now. The CSLJson format is new to Dataverse and can be used with open source libraries to generate all of the other CSL-style citations.
+
+Admins can use a new `dataverse.csl.common-styles` setting to highlight commonly used styles. Common styles are listed in the pop-up, while others can be found by type-ahead search in a list of 1000+ options.
+
+See the [User Guide](http://guides.dataverse.org/en/6.6/user/find-use-data.html#cite-data), [Settings](http://guides.dataverse.org/en/6.6/installation/config.html#dataverse-csl-common-styles), [API Guide](http://guides.dataverse.org/en/6.6/api/native-api.html#get-citation-in-other-formats), and #11163.
+
+### License Metadata Enhancements
+
+- Added new fields to licenses: rightsIdentifier, rightsIdentifierScheme, schemeUri, languageCode. See JSON files under [Adding Licenses](https://guides.dataverse.org/en/6.6/installation/config.html#adding-licenses) in the guides
+- Updated DataCite metadata export to include rightsIdentifier, rightsIdentifierScheme, and schemeUri consistent with the DataCite 4.5 schema and examples
+- Enhanced metadata exports to include all new license fields
+- Existing licenses from the example set included with Dataverse will be automatically updated with new fields
+- Existing API calls support the new optional fields
+
+See below for upgrade instructions. See also #10883 and #11232.
+
+### Range Search
+
+This release enhances how numerical and date fields are indexed in Solr. Previously, all fields were indexed as English text (`text_en`), but with this update:
+
+* Integer fields are indexed as `plong`
+* Float fields are indexed as `pdouble`
+* Date fields are indexed as `date_range` (`solr.DateRangeField`)
+
+This change enables range queries when searching from both the UI and the API, such as `dateOfDeposit:[2000-01-01 TO 2014-12-31]` or `targetSampleActualSize:[25 TO 50]`. See below for a full list of fields that now support range search.
+
+Additionally, search result highlighting is now more accurate, ensuring that only fields relevant to the query are highlighted in search results. If the query is specifically limited to certain fields, the highlighting is now limited to those fields as well. See #10887.
+
+Specifically, the following fields were updated:
+
+- coverage.Depth
+- coverage.ObjectCount
+- coverage.ObjectDensity
+- coverage.Redshift.MaximumValue
+- coverage.Redshift.MinimumValue
+- coverage.RedshiftValue
+- coverage.SkyFraction
+- coverage.Spectral.CentralWavelength
+- coverage.Spectral.MaximumWavelength
+- coverage.Spectral.MinimumWavelength
+- coverage.Temporal.StartTime
+- coverage.Temporal.StopTime
+- dateOfCollectionEnd
+- dateOfCollectionStart
+- dateOfDeposit
+- distributionDate
+- dsDescriptionDate
+- journalPubDate
+- productionDate
+- resolution.Redshift
+- targetSampleActualSize
+- timePeriodCoveredEnd
+- timePeriodCoveredStart
+
+### New 3D Objects Metadata Block
+
+A new metadata block has been added for describing 3D object data. You can download it from the [guides](https://guides.dataverse.org/en/6.6/user/appendix.html). See also #11120 and #11167.
+
+All new Dataverse installations will receive this metadata block by default. We recommend adding it by following the upgrade instructions below.
+
+### New Archival Metadata Block (Experimental)
+
+An experimental "Archival" metadata block has been added, [downloadable](https://guides.dataverse.org/en/6.6/user/appendix.html) from the User Guide. The purpose of the metadata block is to enable repositories to register metadata relating to the potential archiving of the dataset at a depositor archive, whether that being your own institutional archive or an external archive, i.e. a historical archive. Feedback is welcome! See also #10626.
+
+### Prevent Publishing of Datasets Without Files
+
+Datasets without files can be optionally prevented from being published through a new "requireFilesToPublishDataset" boolean defined at the collection level. This boolean can be set only via API and only by a superuser. See [Change Collection Attributes](https://guides.dataverse.org/en/6.6/api/native-api.html#change-collection-attributes). If the boolean is not set, the parent collection is consulted. If you do not set the boolean, the existing behavior of datasets being able to be published without files will continue. Superusers can still publish datasets whether or not the boolean is set. See #10981 and #10994.
+
+### Metadata Source Facet Can Now Differentiate Between Harvested Sources
+
+The behavior of the feature flag `index-harvested-metadata-source` and the "Metadata Source" facet, which were added and updated, respectively, in [Dataverse 6.3](https://github.com/IQSS/dataverse/releases/tag/v6.3) (through pull requests #10464 and #10651), have been updated. A new field called "Source Name" has been added to harvesting clients.
+
+Before Dataverse 6.3, all harvested content (datasets and files) appeared together under "Harvested" under the "Metadata Source" facet. This is still the behavior of Dataverse out of the box. Since Dataverse 6.3, enabling the `index-harvested-metadata-source` feature flag (and reindexing) resulted in harvested content appearing under the nickname for whatever harvesting client was used to bring in the content. This meant that instead of having all harvested content lumped together under "Harvested", content would appear under "client1", "client2", etc.
+
+With this release, enabling the `index-harvested-metadata-source` feature flag, populating a new field for harvesting clients called "Source Name" ("sourceName" in the [API](https://dataverse-guide--11217.org.readthedocs.build/en/11217/api/native-api.html#create-a-harvesting-client)), and reindexing (see upgrade instructions below) results in the source name appearing under the "Metadata Source" facet rather than the harvesting client nickname. This gives you more control over the name that appears under the "Metadata Source" facet and allows you to reuse the same source name to group harvested content from various harvesting clients under the same name if you wish.
+
+Previously, `index-harvested-metadata-source` was not documented in the guides, but now you can find information about it under [Feature Flags](https://guides.dataverse.org/en/6.6/installation/config.html#feature-flags). See also #10217 and #11217.
+
+### Globus Framework Improvements
+
+The improvements and optimizations in this release build on top of the earlier work (such as #10781). They are based on the experience gained at IQSS as part of the production rollout of the Large Data Storage services that utilizes Globus.
+
+The changes in this release focus on improving Globus *downloads*, i.e., transfers from Dataverse-linked Globus volumes to users' Globus collections. Most importantly, the mechanism of "Asynchronous Task Monitoring", first introduced in #10781 for *uploads*, has been extended to handle downloads as well. This generally makes downloads more reliable, specifically in how Dataverse manages temporary access rules granted to users, minimizing the risk of consequent downloads failing because of stale access rules left in place.
+
+Multiple other improvements have been made making the underlying Globus framework more reliable and robust.
+
+See `globus-use-experimental-async-framework` under [Feature Flags](https://guides.dataverse.org/en/6.6/installation/config.html#feature-flags) and [dataverse.files.globus-monitoring-server](https://guides.dataverse.org/en/6.6/installation/config.html#dataverse-files-globus-monitoring-server) in the Installation Guide, #11057, and #11125.
+
+### OIDC Bearer Tokens
+
+The release extends the OIDC API auth mechanism, available through feature flag `api-bearer-auth`, to properly handle cases where ``BearerTokenAuthMechanism`` successfully validates the token but cannot identify any Dataverse user because there is no account associated with the token.
+
+To register a new user who has authenticated via an OIDC provider, a new endpoint has been implemented (`/users/register`). A feature flag named `api-bearer-auth-provide-missing-claims` has been implemented to allow sending missing user claims in the request JSON. This is useful when the identity provider does not supply the necessary claims. However, this flag will only be considered if the `api-bearer-auth` feature flag is enabled. If the latter is not enabled, the `api-bearer-auth-provide-missing-claims` flag will be ignored.
+
+A feature flag named `api-bearer-auth-handle-tos-acceptance-in-idp` has been implemented. When enabled, it specifies that Terms of Service acceptance is managed by the identity provider, eliminating the need to explicitly include the acceptance in the user registration request JSON.
+
+See [the guides](https://guides.dataverse.org/en/6.6/api/auth.html#bearer-tokens), #10959, and #10972.
+
+### Signposting Output Now Contains Links to All Dataset Metadata Export Formats
+
+When Signposting was added in Dataverse 5.14 (#8981), it provided links only for the `schema.org` metadata export format.
+
+The output of HEAD, GET, and the Signposting "linkset" API have all been updated to include links to all available dataset metadata export formats, including any external exporters, such as Croissant, that have been enabled.
+
+This provides a lightweight machine-readable way to first retrieve a list of links, such as via a HTTP HEAD request, to each available metadata export format and then follow up with a request for the export format of interest.
+
+In addition, the content type for the `schema.org` dataset metadata export format has been corrected. It was `application/json` and now it is `application/ld+json`.
+
+See also [the guides](https://guides.dataverse.org/en/6.6/api/native-api.html#retrieve-signposting-information), #10542 and #11045.
+
+### Dataset Types Can Be Linked to Metadata Blocks
+
+Metadata blocks, such as (e.g. "CodeMeta") can now be linked to dataset types (e.g. "software") using new superuser APIs.
+
+This will have the following effects for the APIs used by the [new Dataverse UI](https://github.com/IQSS/dataverse-frontend):
+
+- The list of fields shown when creating a dataset will include fields marked as "displayoncreate" (in the tsv/database) for metadata blocks (e.g. "CodeMeta") that are linked to the dataset type (e.g. "software") that is passed to the API.
+- The metadata blocks shown when editing a dataset will include metadata blocks (e.g. "CodeMeta") that are linked to the dataset type (e.g. "software") that is passed to the API.
+
+Mostly in order to write automated tests for the above, a [displayOnCreate](https://guides.dataverse.org/en/6.6/api/native-api.html#set-displayoncreate-for-a-dataset-field) API endpoint has been added.
+
+For more information, see the guides ([overview](https://guides.dataverse.org/en/6.6/user/dataset-management.html#dataset-types), [new APIs](https://guides.dataverse.org/en/6.6/api/native-api.html#link-dataset-type-with-metadata-blocks)), #10519 and #11001.
+
+### Other Features
+
+- In addition to the API [Move a Dataverse Collection](https://guides.dataverse.org/en/6.6/admin/dataverses-datasets.html#move-a-dataverse-collection), it is now possible for a Dataverse administrator to move a collection using the Dataverse dashboard. See #10304 and #11150.
+- The Preview URL popup and [related documentation](https://guides.dataverse.org/en/6.6/user/dataset-management.html#preview-url-to-review-unpublished-dataset) have been updated to give more information about anonymous access, including the names of the dataset fields that will be withheld from the Anonymous Preview URL user and to suggest how to review the URL before releasing it. See also #11159 and #11164.
+- [ROR](https://ror.org) (Research Organization Registry) has been added as an Author Identifier Type for when the author is an organization rather than a person. Like ORCID, ROR will appear in the "Datacite" metadata export format. See #11075 and #11118.
+- The publisher value of harvested datasets is now attributed to the dataset's distributor instead of its producer. This improves the citation associated with these datasets, but the change affects only newly harvested datasets. See "Upgrade Instructions" below on how to re-harvest. For more information, see [the guides](http://guides.dataverse.org/en/6.6/admin/harvestclients.html#harvesting-client-changelog), #8739, and #9013.
+- A new harvest status differentiates between a complete harvest with errors ("completed with failures") and without errors ("completed"). Also, harvest status labels are now internationalized. See #9294 and #11017.
+- The OAI-ORE exporter can now export metadata containing nested compound fields or compound fields within compound fields. See #10809 and #11190.
+- It is now possible to edit a custom role with the same alias. See #8808 and #10612.
+- The [Metadata Customization](https://guides.dataverse.org/en/6.6/admin/metadatacustomization.html#controlledvocabulary-enumerated-properties) documentation has been updated to explain how to implement a boolean fieldtype (look for "boolean"). See #7961 and #11064.
+- The version of Stata files is now detected during S3 direct upload (as it was for normal uploads), allowing ingest of Stata 14 and 15 files that have been uploaded directly. See [the guides](https://guides.dataverse.org/en/6.6/developers/big-data-support.html#features-that-are-disabled-if-s3-direct-upload-is-enabled) #10108, and #11054.
+- It is now possible to populate the "Keyword" metadata field from an [OntoPortal](https://ontoportal.org) service. The code has been shared to the GDCC [dataverse-external-vocab-support](https://github.com/gdcc/dataverse-external-vocab-support#scripts-in-production) GitHub repository. See #11258.
+- Support for legacy configuration of a PermaLink PID provider, such as using the :Protocol,:Authority, and :Shoulder settings, has been fixed. See #10516 and #10521.
+- On the home page for each guide (User Guide, etc.) there was an overwhelming amount of information in the form of a deeply nested table of contents. The depth of the table of contents has been reduced to two levels, making the home page for each guide more readable. Compare the User Guide for [6.5](https://guides.dataverse.org/en/6.5/user/index.html) vs. [6.6](https://guides.dataverse.org/en/6.6/user/index.html) and see #11166.
+- For compliance with GDPR and other privacy regulations, advice on adding a cookie consent popup has been added to the guides. See the new [cookie consent](https://guides.dataverse.org/en/6.6/installation/config.html#adding-cookie-consent-for-gdpr-etc) section and #10320.
+- A new file has been added to import the French Open License to Dataverse: licenseEtalab-2.0.json. You can download it from [the guides](http://guides.dataverse.org/en/6.6/installation/config.html#adding-licenses). This license, which is compatible with the Creative Commons license, is recommended by the French government for open documents. See #9301, #9302, and #11302.
+- The API that lists versions of a dataset now features an optional `excludeMetadataBlocks` parameter, which defaults to "false" for backward compatibility. For a dataset with a large number of versions and/or metadataBlocks, having the metadata blocks included can dramatically increase the volume of the output. See also [the guides](https://guides.dataverse.org/en/6.6/api/native-api.html#list-versions-of-a-dataset), #10171, and #10778.
+- Deeply nested metadata fields are not supported but the code used to generate the Solr schema has been adjusted to support them. See #11136.
+- The [tutorial](https://guides.dataverse.org/en/6.6/container/running/demo.html) on running Dataverse in Docker has been updated to explain how to configure the root collection using a JSON file (#10541 and #11201) and now uses the Permalink PID provider instead of the FAKE DOI Provider (#11107 and #11108).
+- Payara application server has been upgraded to version 6.2025.2. See #11126 and #11128.
+- Solr has been upgraded to version 9.8.0. See #10713.
+- For testing purposes, the FAKE PID provider can now be used with [file PIDs enabled](https://guides.dataverse.org/en/6.6/installation/config.html#filepidsenabled). (The FAKE provider is not recommended for any production use.) See #10979.
+
+## Bugs Fixed
+
+- A bug which causes users of the Anonymous Review URL to have some metadata of published datasets withheld has been fixed. See #11202 and #11164.
+- A bug that caused ORCIDs starting with "https://orcid.org/" entered as author identifier to be ignored when creating the DataCite metadata has been fixed. This primarily affected users of the ORCID external vocabulary script; for the manual entry form, we used to recommend not using the URL form. The display of authorIdentifier, when not using any external vocabulary scripts, has been improved so that either the plain identifier (e.g. "0000-0002-1825-0097") or its URL form (e.g. "https://orcid.org/0000-0002-1825-0097") will result in valid links in the display (for identifier types that have a URL form). The URL form is now [recommended](http://guides.dataverse.org/en/6.6/user/dataset-management.html#adding-a-new-dataset) when doing manual entry. See #11242 and #11242.
+- Multiple small issues with the formatting of PIDs in the DDI exporters, and EndNote and BibTeX citation formats have been addressed. These should improve the ability to import Dataverse citations into reference managers and fix potential issues harvesting datasets using PermaLinks. See #10768, #10769, #11165, and #10790.
+- On the Advanced Search page, the metadata fields are now displayed in the correct order as defined in the TSV file via the displayOrder value, making the order the same as when you view or edit metadata. Note that fields that are not defined in the TSV file, like the "Persistent ID" and "Publication Date", will be displayed at the end. See #11272 and #11279.
+- Bugs that caused 1) guestbook questions to appear along with terms of use/terms of access in the request access dialog when no guestbook was configured, and 2) terms of access to not be shown when using the per-file request access/download menu items have been fixed. Text related to configuring the choice to have guestbooks appear when file access is requested or when files are downloaded has been updated to make it clearer that this affects only datasets where guestbooks have been configured. See #11203.
+- The file page version table now shows whether a file has been replaced. See #11142 and #11145.
+- We fixed an issue where draft versions of datasets were sorted using the release timestamp of their most recent major version. This caused newer drafts to appear incorrectly alongside their corresponding major version, instead of at the top, when sorted by "newest first". Sorting now uses the last update timestamp when sorting draft datasets. The sorting behavior of published major and minor dataset versions is unchanged. There is no need to reindex datasets because Solr is being upgraded (see "Upgrade Instructions"), which will result in an empty database that will be reindexed. See #11178.
+- Some external controlled vocabulary scripts/configurations, when used on a metadata field that is single-valued, could result in indexing failure for the dataset, e.g. when the script tried to index both the identifier and name of the identified entity for indexing. Dataverse has been updated to correctly indicate the need for a multi-valued Solr field in these cases in the call to `/api/admin/index/solr/schema`. Configuring the Solr schema and running the update-fields.sh script as usually recommended when using custom metadata blocks (see "Upgrade Instructions") will resolve the issue. See [the guides](https://guides.dataverse.org/en/6.6/admin/metadatacustomization.html#using-external-vocabulary-services), #11095, and #11096.
+- The OpenAIRE metadata export format can now correctly process one or multiple productionPlaces as geolocation. See #9546 and #11194
+- We fixed a bug that caused adding free-form provenance to a file to fail. See #11145.
+- A bug has been fixed which could cause publication of datasets to fail in cases where they were not assigned a DOI at creation. See #11234 and #11236.
+- When users request access to files, the people who have permission to grant access received an email with a link that didn't work due to a trailing period (full stop) right next to the link, e.g. `https://demo.dataverse.org/permissions-manage-files.xhtml?id=9.` A space has been added to fix this. See #10384 and #11115.
+- Harvesting clients now use the correct granularity while re-running a partial harvest, using the `from` parameter. The correct granularity comes from the `Identify` verb request. See #11020 and #11038.
+- Access requests were missing on the File Permission page after upgrading from Dataverse 6.0. This has been corrected with a database update script. See #10714 and #11061.
+- When a dataset has a long running lock, including when it is "in review", Dataverse will now slow the page refresh rate over time. See #11264 and #11269.
+- The `/api/info/metrics/files/monthly` API call had a bug that resulted in files being counted each time they were published in a new version if those publication events occurred in different months. This resulted in an over-count. The `/api/info/metrics/files` and `/api/info/metrics/files/toMonth` API calls had a bug that resulted in files that were published but no longer in the latest published version as of the specified date (now, or the date entered in the `/toMonth` variant). This resulted in an under-count. See #11189.
+- DatasetFieldTypes in MetadataBlock response that are also a child of another DatasetFieldType were being returned twice. The child DatasetFieldType was included in the "fields" object as well as in the "childFields" of its parent DatasetFieldType. This fix suppresses the standalone object so only one instance of the DatasetFieldType is returned (in the "childFields" of its parent). This fix changes the JSON output of the API `/api/dataverses/{dataverseAlias}/metadatablocks` (see "Backward Incompatible Changes", below). See #10472 and #11066.
+- A bug that caused replacing files via API when file PIDs were enabled to fail has been fixed. See #10975 and #10979.
+- The [:CustomDatasetSummaryFields](https://guides.dataverse.org/en/6.6/installation/config.html#customdatasetsummaryfields) setting now allows spaces along with a comma separating field names. In addition, a bug that caused license information to be hidden if there are no values for any of the custom fields specified has been fixed. See #11228 and #11229.
+- Dataverse 6.5 introduced a bug which causes search to fail for non-superusers in multiple groups when the `AVOID_EXPENSIVE_SOLR_JOIN` feature flag is set to true. This release fixes the bug. See #11133 and #11134.
+- We fixed a bug with My Data where listing collections for a user with only rights on harvested collections would result in a server error response. See #11083.
+- Minor styling fixes for the Related Publication field and fields using ORCID or ROR have been made. See #11053, #10964, and #11106.
+- In the Search API, files were displaying DRAFT version instead of latest released version under `dataset_citation`. See #10735 and #11051.
+- Unnecessary Solr documents were being created when a file was added or deleted from a draft dataset. These documents could accumulate and potentially impact performance. There is no action to take because this release includes a new Solr version, which will start with an empty database. See #11113 and #11114.
+- When using the API to update a collection, omitting optional fields such as `inputLevels`, `facetIds`, or `metadataBlockNames` caused data to be deleted. The fix no longer deletes data for these fields. Two new flags have been added to the `metadataBlocks` JSON object to signal the deletion of the data: `inheritMetadataBlocksFromParent: true` and `inheritFacetsFromParent: true`. See [the guides](https://guides.dataverse.org/en/6.6/api/native-api.html#update-a-dataverse-collection), #11130, and #11144.
+
+## API Updates
+
+### Search API Returns Additional Fields for Files
+
+Added new fields to search results type=files
+
+For Files:
+
+- restricted: boolean
+- canDownloadFile: boolean (from file user permission)
+- categories: array of string "categories" would be similar to what it is in metadata api.
+
+For tabular files:
+
+- tabularTags: array of string for example, `{"tabularTags" : ["Event", "Genomics", "Geospatial"]}`
+- variables: number/int shows how many variables we have for the tabular file
+- observations: number/int shows how many observations for the tabular file
+
+See #11027 and #11097.
+
+### Backend Support for Collection Featured Items
+
+CRUD endpoints for Collection Featured Items have been implemented. In particular, the following endpoints have been implemented:
+
+- Create a feature item (POST `/api/dataverses//featuredItems`)
+- Update a feature item (PUT `/api/dataverseFeaturedItems/`)
+- Delete a feature item (DELETE `/api/dataverseFeaturedItems/`)
+- List all featured items in a collection (GET `/api/dataverses//featuredItems`)
+- Delete all featured items in a collection (DELETE `/api/dataverses//featuredItems`)
+- Update all featured items in a collection (PUT `/api/dataverses//featuredItems`)
+
+See also the "Settings Added" section, #10943 and #11124.
+
+### Other API Updates
+
+- Multiple files can be deleted from a dataset at once. See the [the guides](https://guides.dataverse.org/en/6.6/api/native-api.html#delete-files-from-a-dataset) and #11230.
+- An API has been added to get the "classic" download count from a dataset with an optional `includeMDC` parameter (for Make Data Count). See [the guides](http://guides.dataverse.org/en/6.6/api/native-api.html#get-the-download-count-of-a-dataset), #11244 and #11282.
+- An API has been added that lists the collections that the user has access to via the permission passed. See [the guides](http://guides.dataverse.org/en/6.6/api/native-api.html#list-dataverse-collections-a-user-can-act-on-based-on-their-permissions), #6467, and #10906.
+- An API has been added to get dataset versions including a summary of differences between consecutive versions where available. See [the docs](https://guides.dataverse.org/en/6.6/api/native-api.html#get-versions-of-a-dataset-with-summary-of-changes ), #10888, and #10945.
+- An API has been added to list of versions of a data file showing any changes that affected the file with each version. See [the guides](https://guides.dataverse.org/en/6.6/api/native-api.html#get-json-representation-of-a-file-s-versions), #11198 and #11237.
+- The Search API has a new [parameter](https://guides.dataverse.org/en/6.6/api/search.html#parameters) called `show_type_counts`. If you set it to true, it will return `total_count_per_object_type` for the types dataverse, dataset, and files (#11065 and #11082) even if the search result for any given type is 0 (#11127 and #11138).
+- CRUD operations for external tools are now available for superusers from non-localhost. See [the guides](https://guides.dataverse.org/en/6.6/admin/external-tools.html#managing-external-tools), #10930 and #11079.
+- A new API endpoint has been added that allows a global role to be updated. See [the guides](https://guides.dataverse.org/en/6.6/api/native-api.html#update-global-role) and #10612.
+- An API has been added to send feedback to the collection, dataset, or data file's contacts. If necessary, you can [rate limit](https://guides.dataverse.org/en/6.6/installation/config.html#rate-limiting) the `CheckRateLimitForDatasetFeedbackCommand` and configure the new [:ContactFeedbackMessageSizeLimit](https://guides.dataverse.org/en/6.6/installation/config.html#contactfeedbackmessagesizelimit) database setting. See [the guides](http://guides.dataverse.org/en/6.6/api/native-api.html#send-feedback-to-contact-s), #11129, and #11162.
+- /api/metadatablocks is no longer returning duplicated metadata properties and does not omit metadata properties when called. See "Backward Incompatible Changes" below and #10764.
+- A new query param, `returnChildCount`, has been added to the getDataverse endpoint (`/api/dataverses/{id}`) for optionally retrieving the child count, which represents the number of collections, datasets, or files within the collection (direct children only). See also #11255 and #11259.
+
+## End-Of-Life (EOL) Announcements
+
+### PostgreSQL 13 reaches EOL on 13 November 2025
+
+Per PostgreSQL 13 reaches EOL on 13 November 2025. Our first step toward moving off version 13 was to [switch](https://github.com/gdcc/dataverse-ansible/commit/8ebbd84ad2cf3903b8f995f0d34578250f4223ff) our testing to version 16, as we've [noted](https://guides.dataverse.org/en/6.6/installation/prerequisites.html#postgresql) in the guides. You are encouraged to start planning your upgrade and may want to review the [Dataverse 5.4 release notes](https://github.com/IQSS/dataverse/releases/tag/v5.4) as the upgrade process (e.g. `pg_dumpall`, etc.) will likely be similar. If you notice any bumps along the way, please let us know!
+
+Dataverse developers [using Docker](https://guides.dataverse.org/en/6.6/container/dev-usage.html) have been using PostgreSQL 17 since Dataverse 6.5 (#10912). (Developers not using Docker who are still on PostgreSQL 13 are encouraged to upgrade.) Older or newer versions should work, within reason.
+
+See also #11212 and #11215.
+
+## Security
+
+### SameSite Cookie Attribute
+
+The SameSite cookie attribute is defined in an upcoming revision to [RFC 6265](https://datatracker.ietf.org/doc/html/rfc6265) (HTTP State Management Mechanism) called [6265bis](https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis-19>) ("bis" meaning "repeated"). The possible values are "None", "Lax", and "Strict".
+
+"If no SameSite attribute is set, the cookie is treated as Lax by default" by browsers according to [MDN](https://developer.mozilla.org/en-US/docs/Web/HTTP/Cookies#controlling_third-party_cookies_with_samesite). This was the previous behavior of Dataverse, to not set the SameSite attribute.
+
+New Dataverse installations now explicitly set to the SameSite cookie attribute to "Lax" out of the box through the installer (in the case of a "classic" installation) or through an updated base image (in the case of a Docker installation). Classic installations should follow the upgrade instructions below to bring their installation up to date with the behavior for new installations. Docker installations will automatically get the updated base image.
+
+While you are welcome to experiment with "Strict", which is intended to help prevent Cross-Site Request Forgery (CSRF) attacks, as described in the RFC proposal and an OWASP [cheatsheet](https://cheatsheetseries.owasp.org/cheatsheets/Cross-Site_Request_Forgery_Prevention_Cheat_Sheet.html#samesite-cookie-attribute), our testing so far indicates that some functionality, such as OIDC login, seems to be incompatible with "Strict".
+
+You should avoid the use of "None" as it is less secure than "Lax". See also [the guides](https://guides.dataverse.org/en/6.6/installation/config.html#samesite-cookie-attribute), https://github.com/IQSS/dataverse-security/issues/27, #11210, and the upgrade instructions below.
+
+## Settings Added
+
+- dataverse.feature.enable-version-note
+- dataverse.csl.common-styles
+- dataverse.files.featured-items.image-maxsize - It sets the maximum allowed size of the image that can be added to a featured item.
+- dataverse.files.featured-items.image-uploads - It specifies the name of the subdirectory for saving featured item images within the docroot directory.
+- dataverse.feature.api-bearer-auth-provide-missing-claims
+- dataverse.feature.api-bearer-auth-handle-tos-acceptance-in-idp
+- :ContactFeedbackMessageSizeLimit
+
+## Backward Incompatible Changes
+
+Generally speaking, see the [API Changelog](https://guides.dataverse.org/en/latest/api/changelog.html) for a list of backward-incompatible API changes.
+
+- /api/metadatablocks is no longer returning duplicated metadata properties and does not omit metadata properties when called. See #10764.
+- The JSON response of API call `/api/dataverses/{dataverseAlias}/metadatablocks` will no longer include the DatasetFieldTypes in "fields" if they are children of another DatasetFieldType. The child DatasetFieldType will only be included in the "childFields" of its parent DatasetFieldType. See #10472 and #11066.
+- `versionNote` has been renamed to `deaccessionNote`. `archiveNote` has been renamed to `deaccessionLink`. See #11068.
+- The [Show Role](https://guides.dataverse.org/en/6.6/api/native-api.html#show-role) API endpoint was returning 401 Unauthorized when a permission check failed. This has been corrected to return 403 Forbidden instead. That is, the API token is known to be good (401 otherwise) but the user lacks permission (403 is now sent). See also the [API Changelog](https://guides.dataverse.org/en/6.6/11116/api/changelog.html), #10340, and #11116.
+- Changes to PID formatting occur in the DDI/DDI Html export formats and the EndNote and BibTex citation formats. These changes correct errors and improve conformance with best practices but could break parsing of these formats. See #10768, #10769, #11165, and #10790.
+
+## Complete List of Changes
+
+For the complete list of code changes in this release, see the [6.6 milestone](https://github.com/IQSS/dataverse/issues?q=milestone%3A6.6+is%3Aclosed) in GitHub.
+
+## Getting Help
+
+For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/g/dataverse-community) or email support@dataverse.org.
+
+## Installation
+
+If this is a new installation, please follow our [Installation Guide](https://guides.dataverse.org/en/latest/installation/). Please don't be shy about [asking for help](https://guides.dataverse.org/en/latest/installation/intro.html#getting-help) if you need it!
+
+Once you are in production, we would be delighted to update our [map of Dataverse installations](https://dataverse.org/installations) around the world to include yours! Please [create an issue](https://github.com/IQSS/dataverse-installations/issues) or email us at support@dataverse.org to join the club!
+
+You are also very welcome to join the [Global Dataverse Community Consortium](https://www.gdcc.io/) (GDCC).
+
+## Upgrade Instructions
+
+Upgrading requires a maintenance window and downtime. Please plan accordingly, create backups of your database, etc.
+
+These instructions assume that you've already upgraded through all the 5.x releases and are now running Dataverse 6.5.
+
+0\. These instructions assume that you are upgrading from the immediate previous version. If you are running an earlier version, the only supported way to upgrade is to progress through the upgrades to all the releases in between before attempting the upgrade to this version.
+
+If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. By default, Payara runs as the `dataverse` user. In the commands below, we use sudo to run the commands as a non-root user.
+
+Also, we assume that Payara 6 is installed in `/usr/local/payara6`. If not, adjust as needed.
+
+```shell
+export PAYARA=/usr/local/payara6
+```
+
+(or `setenv PAYARA /usr/local/payara6` if you are using a `csh`-like shell)
+
+1\. List deployed applications
+
+```shell
+$PAYARA/bin/asadmin list-applications
+```
+
+2\. Undeploy the previous version (should match "list-applications" above)
+
+```shell
+$PAYARA/bin/asadmin undeploy dataverse-6.5
+```
+
+3\. Stop Payara
+
+```shell
+sudo service payara stop
+```
+
+4\. Upgrade to Payara 6.2025.2
+
+The steps below reuse your existing domain directory with the new distribution of Payara. You may also want to review the Payara upgrade instructions as it could be helpful during any troubleshooting:
+[Payara Release Notes](https://docs.payara.fish/community/docs/6.2025.2/Release%20Notes/Release%20Notes%206.2025.2.html).
+We also recommend you ensure you followed all update instructions from the past releases regarding Payara.
+(The most recent Payara update was for [v6.3](https://github.com/IQSS/dataverse/releases/tag/v6.3).)
+
+Move the current Payara directory out of the way:
+
+```shell
+mv $PAYARA $PAYARA.6.2024.6
+```
+
+Download the new Payara version 6.2025.2 (from https://www.payara.fish/downloads/payara-platform-community-edition/ or https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2025.2/payara-6.2025.2.zip), and unzip it in its place:
+
+```shell
+cd /usr/local
+unzip payara-6.2025.2.zip
+```
+
+Replace the brand new `payara/glassfish/domains/domain1` with your old, preserved domain1:
+
+```shell
+mv payara6/glassfish/domains/domain1 payara6/glassfish/domains/domain1_DIST
+mv payara6.6.2024.6/glassfish/domains/domain1 payara6/glassfish/domains/
+```
+
+5\. Download and deploy this version
+
+```shell
+wget https://github.com/IQSS/dataverse/releases/download/v6.6/dataverse-6.6.war
+$PAYARA/bin/asadmin deploy dataverse-6.6.war
+```
+
+Note: if you have any trouble deploying, stop Payara, remove the following directories, start Payara, and try to deploy again.
+
+```shell
+sudo service payara stop
+sudo rm -rf $PAYARA/glassfish/domains/domain1/generated
+sudo rm -rf $PAYARA/glassfish/domains/domain1/osgi-cache
+sudo rm -rf $PAYARA/glassfish/domains/domain1/lib/databases
+sudo service payara start
+```
+
+6\. For installations with internationalization or text customizations:
+
+Please remember to update translations via [Dataverse language packs](https://github.com/GlobalDataverseCommunityConsortium/dataverse-language-packs).
+
+If you have text customizations you can get the latest English files from .
+
+7\. Decide to enable (or not) the `index-harvested-metadata-source` feature flag
+
+Decide whether or not to enable the `dataverse.feature.index-harvested-metadata-source` feature flag described above, in [the guides](https://guides.dataverse.org/en/6.6/installation/config.html#feature-flags), #10217 and #11217. The reason to decide now is that reindexing is required and the next steps involve restarting Payara and upgrading Solr, which will result in a fresh index.
+
+8\. Configure SameSite
+
+To bring your Dataverse installation in line with new installations, as described above and in [the guides](https://guides.dataverse.org/en/6.6/installation/config.html#samesite-cookie-attribute), we recommend running the following commands:
+
+```
+./asadmin set server-config.network-config.protocols.protocol.http-listener-1.http.cookie-same-site-value=Lax
+
+./asadmin set server-config.network-config.protocols.protocol.http-listener-1.http.cookie-same-site-enabled=true
+```
+
+Please note that "None" is less secure than "Lax" and should be avoided. You can test the setting by inspecting headers with curl, looking at the JSESSIONID cookie for "SameSite=Lax" (yes, it's expected to be repeated, probably due to a bug in Payara) like this:
+
+```
+% curl -s -I http://localhost:8080 | grep JSESSIONID
+Set-Cookie: JSESSIONID=6574324d75aebeb86dc96ecb3bb0; Path=/;SameSite=Lax;SameSite=Lax
+```
+
+Before making the changes above, SameSite attribute should be absent, like this:
+
+```
+% curl -s -I http://localhost:8080 | grep JSESSIONID
+Set-Cookie: JSESSIONID=6574324d75aebeb86dc96ecb3bb0; Path=/
+```
+
+8\. Restart Payara
+
+```shell
+sudo service payara stop
+sudo service payara start
+```
+
+9\. Update metadata blocks
+
+These changes reflect incremental improvements made to the handling of core metadata fields.
+
+Expect the loading of the citation block to take several seconds because of its size (especially due to the number of languages).
+
+```shell
+wget https://raw.githubusercontent.com/IQSS/dataverse/v6.6/scripts/api/data/metadatablocks/citation.tsv
+
+curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file citation.tsv
+```
+
+The 3D Objects metadata block is included in all new installations of Dataverse so we recommend adding it.
+
+```shell
+wget https://raw.githubusercontent.com/IQSS/dataverse/v6.6/scripts/api/data/metadatablocks/3d_objects.tsv
+
+curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file 3d_objects.tsv
+```
+
+10\. Upgrade Solr
+
+Solr 9.8.0 is now the version recommended in our Installation Guide and used with automated testing. Additionally, due to the new range search support feature and the addition of fields (e.g. versionNote, fileRestricted, canDownloadFile, variableCount, and observations), the default `schema.xml` files has changed so you must upgrade.
+
+Install Solr 9.8.0 following the [instructions](https://guides.dataverse.org/en/6.6/installation/prerequisites.html#solr) from the Installation Guide.
+
+The instructions in the guide suggest to use the config files from the installer zip bundle. When upgrading an existing instance, it may be easier to download them from the source tree:
+
+```shell
+wget https://raw.githubusercontent.com/IQSS/dataverse/v6.6/conf/solr/solrconfig.xml
+wget https://raw.githubusercontent.com/IQSS/dataverse/v6.6/conf/solr/schema.xml
+cp solrconfig.xml schema.xml /usr/local/solr/solr-9.8.0/server/solr/collection1/conf
+```
+
+10a\. For installations with additional metadata blocks or external controlled vocabulary scripts, update fields
+
+- Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/6.6/installation/prerequisites.html#solr-init-script)).
+
+- Run the `update-fields.sh` script that we supply, as in the example below (modify the command lines as needed to reflect the correct path of your Solr installation):
+
+```shell
+wget https://raw.githubusercontent.com/IQSS/dataverse/v6.6/conf/solr/update-fields.sh
+chmod +x update-fields.sh
+curl "http://localhost:8080/api/admin/index/solr/schema" | ./update-fields.sh /usr/local/solr/solr-9.8.0/server/solr/collection1/conf/schema.xml
+```
+
+- Start Solr instance (usually `service solr start` depending on Solr/OS).
+
+11\. Reindex Solr
+
+```shell
+curl http://localhost:8080/api/admin/index
+```
+
+12\. Run reExportAll to update dataset metadata exports
+
+For existing published datasets, additional license metadata will not be available from DataCite or in metadata exports until
+
+- the dataset is republished or
+- the /api/admin/metadata/{id}/reExportDataset is run for the dataset or
+- the /api/datasets/{id}/modifyRegistrationMetadata API is run for the dataset or
+- the global version of these API calls (/api/admin/metadata/reExportAll, /api/datasets/modifyRegistrationPIDMetadataAll) are used.
+
+For this reason, we recommend reexporting all dataset metadata. For more advanced usage, please see [the guides](http://guides.dataverse.org/en/6.6/admin/metadataexport.html#batch-exports-through-the-api).
+
+```shell
+curl http://localhost:8080/api/admin/metadata/reExportAll
+```
+
+13\. (Optional) Re-harvest datasets
+
+The publisher value of harvested datasets is now attributed to the dataset's distributor instead of its producer. For more information, see [the guides](http://guides.dataverse.org/en/6.6/admin/harvestclients.html#harvesting-client-changelog), #8739, and #9013.
+
+This improves the citation associated with these datasets, but the change only affects newly harvested datasets.
+
+If you would like to pick up this change for existing harvested datasets, you should re-harvest them. This can be accomplished by deleting and re-adding each harvesting client, followed by a harvesting run. You may want to use [harvesting client APIs](https://guides.dataverse.org/en/6.6/api/native-api.html#managing-harvesting-clients) to save (serialize), add, and remove clients.
diff --git a/doc/sphinx-guides/source/_static/admin/counter-processor-config.yaml b/doc/sphinx-guides/source/_static/admin/counter-processor-config.yaml
index 26144544d9e..f3501ead7b3 100644
--- a/doc/sphinx-guides/source/_static/admin/counter-processor-config.yaml
+++ b/doc/sphinx-guides/source/_static/admin/counter-processor-config.yaml
@@ -33,8 +33,10 @@ path_types:
# Robots and machines urls are urls where the script can download a list of regular expressions to determine
# if something is a robot or machine user-agent. The text file has one regular expression per line
-robots_url: https://raw.githubusercontent.com/CDLUC3/Make-Data-Count/master/user-agents/lists/robot.txt
-machines_url: https://raw.githubusercontent.com/CDLUC3/Make-Data-Count/master/user-agents/lists/machine.txt
+#robots_url: https://raw.githubusercontent.com/CDLUC3/Make-Data-Count/master/user-agents/lists/robot.txt
+#machines_url: https://raw.githubusercontent.com/CDLUC3/Make-Data-Count/master/user-agents/lists/machine.txt
+robots_url: https://raw.githubusercontent.com/IQSS/counter-processor/refs/heads/goto-gdcc/user-agents/lists/robots.txt
+machines_url: https://raw.githubusercontent.com/IQSS/counter-processor/refs/heads/goto-gdcc/user-agents/lists/machine.txt
# the year and month for the report you are creating.
year_month: 2019-01
diff --git a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv
index 3df5ed5d24f..9a3d2a89acb 100644
--- a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv
+++ b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv
@@ -1,9 +1,8 @@
Tool Type Scope Description
-Data Explorer explore file "A GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. See the README.md file at https://github.com/scholarsportal/dataverse-data-explorer-v2 for the instructions on adding Data Explorer to your Dataverse."
+Data Explorer explore file "A GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. The latest version incorporates the Data Curation Tool, a GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Explorer for the instructions on adding Data Explorer to your Dataverse."
Whole Tale explore dataset "A platform for the creation of reproducible research packages that allows users to launch containerized interactive analysis environments based on popular tools such as Jupyter and RStudio. Using this integration, Dataverse users can launch Jupyter and RStudio environments to analyze published datasets. For more information, see the `Whole Tale User Guide `_."
Binder explore dataset Binder allows you to spin up custom computing environments in the cloud (including Jupyter notebooks) with the files from your dataset. See https://github.com/IQSS/dataverse-binder-redirect for installation instructions.
File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, Markdown, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers"
-Data Curation Tool configure file "A GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Curation-Tool for the installation instructions."
Ask the Data query file Ask the Data is an experimental tool that allows you ask natural language questions about the data contained in Dataverse tables (tabular data). See the README.md file at https://github.com/IQSS/askdataverse/tree/main/askthedata for the instructions on adding Ask the Data to your Dataverse installation.
TurboCurator by ICPSR configure dataset TurboCurator generates metadata improvements for title, description, and keywords. It relies on open AI's ChatGPT & ICPSR best practices. See the `TurboCurator Dataverse Administrator `_ page for more details on how it works and adding TurboCurator to your Dataverse installation.
JupyterHub explore file The `Dataverse-to-JupyterHub Data Transfer Connector `_ is a tool that simplifies the transfer of data between Dataverse repositories and the cloud-based platform JupyterHub. It is designed for researchers, scientists, and data analysts, facilitating collaboration on projects by seamlessly moving datasets and files. The tool is a lightweight client-side web application built using React and relies on the Dataverse External Tool feature, allowing for easy deployment on modern integration systems. Currently optimized for small to medium-sized files, future plans include extending support for larger files and signed Dataverse endpoints. For more details, you can refer to the external tool manifest: https://forgemia.inra.fr/dipso/eosc-pillar/dataverse-jupyterhub-connector/-/blob/master/externalTools.json
diff --git a/doc/sphinx-guides/source/_static/api/add-license.json b/doc/sphinx-guides/source/_static/api/add-license.json
index a9d5dd34093..9a5c478dc36 100644
--- a/doc/sphinx-guides/source/_static/api/add-license.json
+++ b/doc/sphinx-guides/source/_static/api/add-license.json
@@ -4,5 +4,9 @@
"shortDescription": "Creative Commons Attribution 4.0 International License.",
"iconUrl": "https://i.creativecommons.org/l/by/4.0/88x31.png",
"active": true,
- "sortOrder": 2
-}
+ "sortOrder": 2,
+ "rightsIdentifier": "CC-BY-4.0",
+ "rightsIdentifierScheme": "SPDX",
+ "schemeUri": "https://spdx.org/licenses/",
+ "languageCode": "en"
+}
\ No newline at end of file
diff --git a/doc/sphinx-guides/source/_static/api/dataset-add-single-compound-field-metadata.json b/doc/sphinx-guides/source/_static/api/dataset-add-single-compound-field-metadata.json
new file mode 100644
index 00000000000..f49a9e47d5b
--- /dev/null
+++ b/doc/sphinx-guides/source/_static/api/dataset-add-single-compound-field-metadata.json
@@ -0,0 +1,13 @@
+{
+ "fields": [
+ {
+ "typeName": "targetSampleSize",
+ "value": {
+ "targetSampleActualSize": {
+ "typeName": "targetSampleSizeFormula",
+ "value": "n = N*X / (X + N โ 1)"
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/doc/sphinx-guides/source/_static/api/dataset-add-single-cvoc-field-metadata.json b/doc/sphinx-guides/source/_static/api/dataset-add-single-cvoc-field-metadata.json
new file mode 100644
index 00000000000..620f3df10d1
--- /dev/null
+++ b/doc/sphinx-guides/source/_static/api/dataset-add-single-cvoc-field-metadata.json
@@ -0,0 +1,4 @@
+{
+ "typeName": "journalArticleType",
+ "value": "abstract"
+}
\ No newline at end of file
diff --git a/doc/sphinx-guides/source/_static/api/dataset-migrate.jsonld b/doc/sphinx-guides/source/_static/api/dataset-migrate.jsonld
index 8f43d1dd6e9..bd882846da1 100644
--- a/doc/sphinx-guides/source/_static/api/dataset-migrate.jsonld
+++ b/doc/sphinx-guides/source/_static/api/dataset-migrate.jsonld
@@ -1,11 +1,31 @@
{
"citation:depositor": "Admin, Dataverse",
"title": "Test Dataset",
+"socialscience:collectionMode": [
+ "demonstration"
+],
"subject": "Computer and Information Science",
+"geospatial:geographicCoverage": [
+ {
+ "geospatial:otherGeographicCoverage": "Cambridge"
+ },
+ {
+ "geospatial:otherGeographicCoverage": "Massachusetts"
+ }
+],
"author": {
"citation:authorName": "Admin, Dataverse",
"citation:authorAffiliation": "GDCC"
},
+"kindOfData": "demonstration data",
+"citation:keyword": [
+ {
+ "citation:keywordValue": "first keyword"
+ },
+ {
+ "citation:keywordValue": "second keyword"
+ }
+],
"dateOfDeposit": "2020-10-08",
"citation:distributor": {
"citation:distributorName": "Demo Dataverse Repository",
@@ -35,5 +55,9 @@
"title": "http://purl.org/dc/terms/title",
"citation": "https://dataverse.org/schema/citation/",
"dvcore": "https://dataverse.org/schema/core#",
- "schema": "http://schema.org/"
-}}
+ "schema": "http://schema.org/",
+ "geospatial": "dataverse.siteUrl/schema/geospatial#",
+ "socialscience": "dataverse.siteUrl/schema/socialscience#",
+ "kindOfData": "http://rdf-vocabulary.ddialliance.org/discovery#kindOfData"
+ }
+}
diff --git a/doc/sphinx-guides/source/_static/api/dataset-schema.json b/doc/sphinx-guides/source/_static/api/dataset-schema.json
index 34b8a1eeedb..85ea5a0d773 100644
--- a/doc/sphinx-guides/source/_static/api/dataset-schema.json
+++ b/doc/sphinx-guides/source/_static/api/dataset-schema.json
@@ -26,6 +26,9 @@
},
"typeName": {
"type": "string"
+ },
+ "displayOnCreate": {
+ "type": "boolean"
}
}
}
diff --git a/doc/sphinx-guides/source/_static/api/harvesting-client.json b/doc/sphinx-guides/source/_static/api/harvesting-client.json
new file mode 100644
index 00000000000..82a817fc38f
--- /dev/null
+++ b/doc/sphinx-guides/source/_static/api/harvesting-client.json
@@ -0,0 +1,11 @@
+{
+ "nickName": "zenodo",
+ "dataverseAlias": "zenodoHarvested",
+ "harvestUrl": "https://zenodo.org/oai2d",
+ "archiveUrl": "https://zenodo.org",
+ "archiveDescription": "Moissonnรฉ depuis la collection LMOPS de l'entrepรดt Zenodo. En cliquant sur ce jeu de donnรฉes, vous serez redirigรฉ vers Zenodo.",
+ "metadataFormat": "oai_dc",
+ "customHeaders": "x-oai-api-key: xxxyyyzzz",
+ "set": "user-lmops",
+ "allowHarvestingMissingCVV":true
+}
diff --git a/doc/sphinx-guides/source/_static/api/transform-oai-ore-jsonld.xq b/doc/sphinx-guides/source/_static/api/transform-oai-ore-jsonld.xq
new file mode 100644
index 00000000000..6292f39fbde
--- /dev/null
+++ b/doc/sphinx-guides/source/_static/api/transform-oai-ore-jsonld.xq
@@ -0,0 +1,16 @@
+declare option output:method "json";
+
+let $parameters:={ 'method': 'json' }
+for $record in /json
+ let $metadata:=$record/ore_003adescribes
+
+
+ let $json:=
+
+ {$metadata/*}
+ {$record/_0040context}
+
+
+
+ return if ($metadata) then
+ file:write("converted.json",$json, $parameters)
\ No newline at end of file
diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr
index 14df734cca7..303cd3c7534 100755
--- a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr
+++ b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr
@@ -5,7 +5,7 @@
# chkconfig: 35 92 08
# description: Starts and stops Apache Solr
-SOLR_DIR="/usr/local/solr/solr-9.4.1"
+SOLR_DIR="/usr/local/solr/solr-9.8.0"
SOLR_COMMAND="bin/solr"
SOLR_ARGS="-m 1g"
SOLR_USER=solr
diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service b/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service
index 8ccf7652a49..f3eed1479bc 100644
--- a/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service
+++ b/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service
@@ -5,9 +5,9 @@ After = syslog.target network.target remote-fs.target nss-lookup.target
[Service]
User = solr
Type = forking
-WorkingDirectory = /usr/local/solr/solr-9.4.1
-ExecStart = /usr/local/solr/solr-9.4.1/bin/solr start -m 1g
-ExecStop = /usr/local/solr/solr-9.4.1/bin/solr stop
+WorkingDirectory = /usr/local/solr/solr-9.8.0
+ExecStart = /usr/local/solr/solr-9.8.0/bin/solr start -m 1g
+ExecStop = /usr/local/solr/solr-9.8.0/bin/solr stop
LimitNOFILE=65000
LimitNPROC=65000
Restart=on-failure
diff --git a/doc/sphinx-guides/source/_static/util/counter_daily.sh b/doc/sphinx-guides/source/_static/util/counter_daily.sh
index 5095a83b7e2..ef10db26895 100644
--- a/doc/sphinx-guides/source/_static/util/counter_daily.sh
+++ b/doc/sphinx-guides/source/_static/util/counter_daily.sh
@@ -1,6 +1,6 @@
#! /bin/bash
-COUNTER_PROCESSOR_DIRECTORY="/usr/local/counter-processor-1.05"
+COUNTER_PROCESSOR_DIRECTORY="/usr/local/counter-processor-1.06"
MDC_LOG_DIRECTORY="/usr/local/payara6/glassfish/domains/domain1/logs/mdc"
# counter_daily.sh
diff --git a/doc/sphinx-guides/source/admin/discoverability.rst b/doc/sphinx-guides/source/admin/discoverability.rst
index 19ef7250a29..22ff66246f0 100644
--- a/doc/sphinx-guides/source/admin/discoverability.rst
+++ b/doc/sphinx-guides/source/admin/discoverability.rst
@@ -51,7 +51,7 @@ The Dataverse team has been working with Google on both formats. Google has `ind
Signposting
+++++++++++
-The Dataverse software supports `Signposting `_. This allows machines to request more information about a dataset through the `Link `_ HTTP header.
+The Dataverse software supports `Signposting `_. This allows machines to request more information about a dataset through the `Link `_ HTTP header. Links to all enabled metadata export formats are given. See :ref:`metadata-export-formats` for a list.
There are 2 Signposting profile levels, level 1 and level 2. In this implementation,
* Level 1 links are shown `as recommended `_ in the "Link"
diff --git a/doc/sphinx-guides/source/admin/external-tools.rst b/doc/sphinx-guides/source/admin/external-tools.rst
index 346ca0b15ee..c3e71c13ac6 100644
--- a/doc/sphinx-guides/source/admin/external-tools.rst
+++ b/doc/sphinx-guides/source/admin/external-tools.rst
@@ -35,7 +35,13 @@ Configure the tool with the curl command below, making sure to replace the ``fab
.. code-block:: bash
- curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools --upload-file fabulousFileTool.json
+ curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools --upload-file fabulousFileTool.json
+
+This API is Superuser only. Note the endpoint difference (/api/externalTools instead of /api/admin/externalTools).
+
+.. code-block:: bash
+
+ curl -s -H "X-Dataverse-key:$API_TOKEN" -X POST -H 'Content-type: application/json' http://localhost:8080/api/externalTools --upload-file fabulousFileTool.json
Listing All External Tools in a Dataverse Installation
++++++++++++++++++++++++++++++++++++++++++++++++++++++
@@ -46,6 +52,12 @@ To list all the external tools that are available in a Dataverse installation:
curl http://localhost:8080/api/admin/externalTools
+This API is open to any user. Note the endpoint difference (/api/externalTools instead of /api/admin/externalTools).
+
+.. code-block:: bash
+
+ curl http://localhost:8080/api/externalTools
+
Showing an External Tool in a Dataverse Installation
++++++++++++++++++++++++++++++++++++++++++++++++++++
@@ -56,6 +68,12 @@ To show one of the external tools that are available in a Dataverse installation
export TOOL_ID=1
curl http://localhost:8080/api/admin/externalTools/$TOOL_ID
+This API is open to any user. Note the endpoint difference (/api/externalTools instead of /api/admin/externalTools).
+
+.. code-block:: bash
+
+ curl http://localhost:8080/api/externalTools/$TOOL_ID
+
Removing an External Tool From a Dataverse Installation
+++++++++++++++++++++++++++++++++++++++++++++++++++++++
@@ -66,6 +84,12 @@ Assuming the external tool database id is "1", remove it with the following comm
export TOOL_ID=1
curl -X DELETE http://localhost:8080/api/admin/externalTools/$TOOL_ID
+This API is Superuser only. Note the endpoint difference (/api/externalTools instead of /api/admin/externalTools).
+
+.. code-block:: bash
+
+ curl -s -H "X-Dataverse-key:$API_TOKEN" -X DELETE http://localhost:8080/api/externalTools/$TOOL_ID
+
.. _testing-external-tools:
Testing External Tools
diff --git a/doc/sphinx-guides/source/admin/harvestclients.rst b/doc/sphinx-guides/source/admin/harvestclients.rst
index c4c63c80786..6ac8c480745 100644
--- a/doc/sphinx-guides/source/admin/harvestclients.rst
+++ b/doc/sphinx-guides/source/admin/harvestclients.rst
@@ -12,6 +12,8 @@ Harvesting is a process of exchanging metadata with other repositories. As a har
Harvested records can be kept in sync with the original repository through scheduled incremental updates, daily or weekly.
Alternatively, harvests can be run on demand, by the Admin.
+.. _managing-harvesting-clients:
+
Managing Harvesting Clients
---------------------------
@@ -23,8 +25,16 @@ The process of creating a new, or editing an existing client, is largely self-ex
Please note that in some rare cases this GUI may fail to create a client because of some unexpected errors during these real time exchanges with an OAI server that is otherwise known to be valid. For example, in the past we have had issues with servers offering very long lists of sets (*really* long, in the thousands). To allow an admin to still be able to create a client in a situation like that, we provide the REST API that will do so without attempting any validation in real time. This obviously makes it the responsibility of the admin to supply the values that are definitely known to be valid - a working OAI url, the name of a set that does exist on the server, and/or a supported metadata format. See the :ref:`managing-harvesting-clients-api` section of the :doc:`/api/native-api` guide for more information.
-Note that as of 5.13, a new entry "Custom HTTP Header" has been added to the Step 1. of Create or Edit form. This optional field can be used to configure this client with a specific HTTP header to be added to every OAI request. This is to accommodate a (rare) use case where the remote server may require a special token of some kind in order to offer some content not available to other clients. Most OAI servers offer the same publicly-available content to all clients, so few admins will have a use for this feature. It is however on the very first, Step 1. screen in case the OAI server requires this token even for the "ListSets" and "ListMetadataFormats" requests, which need to be sent in the Step 2. of creating or editing a client. Multiple headers can be supplied separated by `\\n` - actual "backslash" and "n" characters, not a single "new line" character.
+"Custom HTTP Header" is part of step 1 of the Create or Edit form. This optional field can be used to configure this client with a specific HTTP header to be added to every OAI request. This is to accommodate a (rare) use case where the remote server may require a special token of some kind in order to offer some content not available to other clients. Most OAI servers offer the same publicly-available content to all clients, so few admins will have a use for this feature. However, it appears in Step 1 of the form screen in case the OAI server requires this token even for the "ListSets" and "ListMetadataFormats" requests, which need to be sent in Step 2 of creating or editing a client. Multiple headers can be supplied separated by `\\n` - actual "backslash" and "n" characters, not a single "new line" character.
+
+.. _harvesting-from-datacite:
+
+Harvesting from Datacite
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+It is possible to harvest metadata directly from DataCite. Their OAI gateway (https://oai.datacite.org/oai) serves records for every DOI they have registered. Therefore, it is now possible to harvest metadata from any participating institution even if they do not maintain an OAI server of their own. Their OAI implementation offers a concept of a "dynamic set", making it possible to use any query supported by the DataCite search API as though it were a "set". This makes harvesting from them extra flexible, allowing users to harvest virtually any arbitrary subset of metadata records, potentially spanning multiple institutions and registration authorities.
+For various reasons, in order to take advantage of this feature harvesting clients must be created via the ``/api/harvest/clients`` API. Once configured however, harvests can be run from the Harvesting Clients control panel in the UI. See the :ref:`managing-harvesting-clients-api` section of the :doc:`/api/native-api` guide for more information.
How to Stop a Harvesting Run in Progress
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -48,7 +58,14 @@ Each harvesting client run logs a separate file per run to the app server's defa
Note that you'll want to run a minimum of Dataverse Software 4.6, optimally 4.18 or beyond, for the best OAI-PMH interoperability.
+Harvesting Client Changelog
+---------------------------
+
+- As of Dataverse 6.6, it is possible to harvest metadata directly from DataCite. See :ref:`harvesting-from-datacite`.
+- As of Dataverse 6.6, the publisher value of harvested datasets is now attributed to the dataset's distributor instead of its producer. This change affects all newly harvested datasets. For more information, see https://github.com/IQSS/dataverse/pull/9013
+- As of Dataverse 5.13, a new entry called "Custom HTTP Header" has been added to the Step 1. of Create or Edit form. For usage see :ref:`managing-harvesting-clients`.
+
Harvesting Non-OAI-PMH
-~~~~~~~~~~~~~~~~~~~~~~
+----------------------
-`DOI2PMH `__ is a community-driven project intended to allow OAI-PMH harvesting from non-OAI-PMH sources.
\ No newline at end of file
+`DOI2PMH `__ is a community-driven project intended to allow OAI-PMH harvesting from non-OAI-PMH sources.
diff --git a/doc/sphinx-guides/source/admin/index.rst b/doc/sphinx-guides/source/admin/index.rst
index 633842044b4..bada3ea20b4 100755
--- a/doc/sphinx-guides/source/admin/index.rst
+++ b/doc/sphinx-guides/source/admin/index.rst
@@ -11,6 +11,7 @@ This guide documents the functionality only available to superusers (such as "da
**Contents:**
.. toctree::
+ :maxdepth: 2
dashboard
external-tools
diff --git a/doc/sphinx-guides/source/admin/make-data-count.rst b/doc/sphinx-guides/source/admin/make-data-count.rst
index 51bc2c4a9fe..2d9f4a94b55 100644
--- a/doc/sphinx-guides/source/admin/make-data-count.rst
+++ b/doc/sphinx-guides/source/admin/make-data-count.rst
@@ -84,9 +84,9 @@ Configure Counter Processor
* Change to the directory where you installed Counter Processor.
- * ``cd /usr/local/counter-processor-1.05``
+ * ``cd /usr/local/counter-processor-1.06``
-* Download :download:`counter-processor-config.yaml <../_static/admin/counter-processor-config.yaml>` to ``/usr/local/counter-processor-1.05``.
+* Download :download:`counter-processor-config.yaml <../_static/admin/counter-processor-config.yaml>` to ``/usr/local/counter-processor-1.06``.
* Edit the config file and pay particular attention to the FIXME lines.
@@ -99,7 +99,7 @@ Soon we will be setting up a cron job to run nightly but we start with a single
* Change to the directory where you installed Counter Processor.
- * ``cd /usr/local/counter-processor-1.05``
+ * ``cd /usr/local/counter-processor-1.06``
* If you are running Counter Processor for the first time in the middle of a month, you will need create blank log files for the previous days. e.g.:
diff --git a/doc/sphinx-guides/source/admin/metadatacustomization.rst b/doc/sphinx-guides/source/admin/metadatacustomization.rst
index e5326efebef..df07b65153b 100644
--- a/doc/sphinx-guides/source/admin/metadatacustomization.rst
+++ b/doc/sphinx-guides/source/admin/metadatacustomization.rst
@@ -244,6 +244,8 @@ Each of the three main sections own sets of properties:
| | #metadataBlock) | | |
+---------------------------+--------------------------------------------------------+----------------------------------------------------------+-----------------------+
+.. _cvoc-props:
+
#controlledVocabulary (enumerated) properties
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -259,10 +261,10 @@ Each of the three main sections own sets of properties:
| | | an existing #datasetField from |
| | | another metadata block.) |
+--------------+--------------------------------------------+-----------------------------------------+
-| Value | A short display string, representing | Free text |
-| | an enumerated value for this field. If | |
-| | the identifier property is empty, | |
-| | this value is used as the identifier. | |
+| Value | A short display string, representing | Free text. When defining a boolean, the |
+| | an enumerated value for this field. If | values "True" and "False" are |
+| | the identifier property is empty, | recommended and "Unknown" can be added |
+| | this value is used as the identifier. | if needed. |
+--------------+--------------------------------------------+-----------------------------------------+
| identifier | A string used to encode the selected | Free text |
| | enumerated value of a field. If this | |
@@ -270,7 +272,11 @@ Each of the three main sections own sets of properties:
| | โValueโ field is used as the identifier. | |
+--------------+--------------------------------------------+-----------------------------------------+
| displayOrder | Control the order in which the enumerated | Non-negative integer. |
-| | values are displayed for selection. | |
+| | values are displayed for selection. When | |
+| | adding new values, you don't have to add | |
+| | them at the end. You can renumber existing | |
+| | values to update the order in which they | |
+| | appear. | |
+--------------+--------------------------------------------+-----------------------------------------+
FieldType definitions
@@ -293,6 +299,9 @@ FieldType definitions
+---------------+------------------------------------+
| text | Any text other than newlines may |
| | be entered into this field. |
+| | The text fieldtype may be used to |
+| | define a boolean (see "Value" |
+| | under :ref:`cvoc-props`). |
+---------------+------------------------------------+
| textbox | Any text may be entered. For |
| | input, the Dataverse Software |
@@ -539,7 +548,7 @@ a necessary re-index, but for your custom metadata you will need to keep track o
Please note also that if you are going to make a pull request updating ``conf/solr/schema.xml`` with fields you have
added, you should first load all the custom metadata blocks in ``scripts/api/data/metadatablocks`` (including ones you
-don't care about) to create a complete list of fields. (This might change in the future.)
+don't care about) to create a complete list of fields. (This might change in the future.) Please see :ref:`update-solr-schema-dev` in the Developer Guide.
Reloading a Metadata Block
--------------------------
@@ -559,8 +568,7 @@ Using External Vocabulary Services
The Dataverse software has a mechanism to associate specific fields defined in metadata blocks with a vocabulary(ies) managed by external services. The mechanism relies on trusted third-party Javascripts. The mapping from field type to external vocabulary(ies) is managed via the :ref:`:CVocConf <:CVocConf>` setting.
-*This functionality is considered 'experimental'. It may require significant effort to configure and is likely to evolve in subsequent Dataverse software releases.*
-
+*This functionality may require significant effort to configure and is likely to evolve in subsequent Dataverse software releases.*
The effect of configuring this mechanism is similar to that of defining a field in a metadata block with 'allowControlledVocabulary=true':
@@ -585,6 +593,9 @@ Configuration involves specifying which fields are to be mapped, to which Solr f
These are all defined in the :ref:`:CVocConf <:CVocConf>` setting as a JSON array. Details about the required elements as well as example JSON arrays are available at https://github.com/gdcc/dataverse-external-vocab-support, along with an example metadata block that can be used for testing.
The scripts required can be hosted locally or retrieved dynamically from https://gdcc.github.io/ (similar to how dataverse-previewers work).
+Since external vocabulary scripts can change how fields are indexed (storing an identifier and name and/or values in different languages),
+updating the Solr schema as described in :ref:`update-solr-schema` should be done after adding new scripts to your configuration.
+
Please note that in addition to the :ref:`:CVocConf` described above, an alternative is the :ref:`:ControlledVocabularyCustomJavaScript` setting.
Protecting MetadataBlocks
diff --git a/doc/sphinx-guides/source/admin/troubleshooting.rst b/doc/sphinx-guides/source/admin/troubleshooting.rst
index acbdcaae17e..178938af6dc 100644
--- a/doc/sphinx-guides/source/admin/troubleshooting.rst
+++ b/doc/sphinx-guides/source/admin/troubleshooting.rst
@@ -162,7 +162,9 @@ A full backup of the table can be made with pg_dump, for example:
``pg_dump --table=actionlogrecord --data-only > /tmp/actionlogrecord_backup.sql``
-(In the example above, the output will be saved in raw SQL format. It is portable and human-readable, but uses a lot of space. It does, however, compress very well. Add the ``-Fc`` option to save the output in a proprietary, binary format that's already compressed).
+In the example above, the output will be saved in raw SQL format. It is portable and human-readable, but uses a lot of space. It does, however, compress very well.
+
+Add the ``-Fc`` option to save the output in a proprietary, compressed binary format which will dump and restore much more quickly, but note that in this format dead tuples will be copied as well. To reduce the amount of storage consumed by the newly-trimmed ``actionlogrecord`` table, you must issue ``vacuum full actionlogrecord`` before dumping the database in this custom format.
Getting Help
diff --git a/doc/sphinx-guides/source/api/auth.rst b/doc/sphinx-guides/source/api/auth.rst
index eae3bd3c969..8dffb914e29 100644
--- a/doc/sphinx-guides/source/api/auth.rst
+++ b/doc/sphinx-guides/source/api/auth.rst
@@ -81,6 +81,27 @@ To test if bearer tokens are working, you can try something like the following (
curl -H "Authorization: Bearer $TOKEN" http://localhost:8080/api/users/:me
+To register a new user who has authenticated via an OIDC provider, the following endpoint should be used:
+
+.. code-block:: bash
+
+ curl -H "Authorization: Bearer $TOKEN" -X POST http://localhost:8080/api/users/register --data '{"termsAccepted":true}'
+
+By default, the Bearer token is expected to include the following claims that will be used to create the user account:
+
+- ``username``
+- ``firstName``
+- ``lastName``
+- ``emailAddress``
+
+The one parameter required by default is ``termsAccepted`` which must be set to true, indicating that the user has seen and accepted the Terms of Use of the installation.
+
+If the feature flag ``api-bearer-auth-handle-tos-acceptance-in-idp`` is enabled (along with the ``api-bearer-auth`` feature flag), Dataverse assumes that the Terms of Service acceptance was handled by the identity provider, e.g. in the OIDC ``consent`` dialog, and the ``termsAccepted`` parameter is not needed.
+
+There is another flag called ``api-bearer-auth-provide-missing-claims`` that can be enabled (along with the ``api-bearer-auth`` feature flag) to allow sending missing user claims in the registration JSON. This is useful when the identity provider does not supply the necessary claims listed above. If properties are provided in the JSON, but corresponding claims already exist in the identity provider, an error will be thrown, outlining the conflicting properties. Note that supplying missing claims is configured via a separate feature flag because using it may introduce user impersonation issues, for example if the identity provider does not provide an email field and the user submits an email address they do not own.
+
+In all cases, the submitted JSON can optionally include the fields ``position`` or ``affiliation``, which will be added to the user's Dataverse account. These fields are optional, and if not provided, they will be persisted as empty in Dataverse.
+
Signed URLs
-----------
diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst
index 14958095658..73935e6562a 100644
--- a/doc/sphinx-guides/source/api/changelog.rst
+++ b/doc/sphinx-guides/source/api/changelog.rst
@@ -7,6 +7,19 @@ This API changelog is experimental and we would love feedback on its usefulness.
:local:
:depth: 1
+v6.6
+----
+
+- The JSON representation for a datasetVersion sent or received in API calls has changed such that
+
+ - "versionNote" -> "deaccessionNote"
+ - "archiveNote" --> "deaccessionLink"
+ - These may be non-null for deaccessioned versions and an optional new "versionNote" field indicating the reason a version was created may be present on any datasetversion.
+
+- **/api/metadatablocks** is no longer returning duplicated metadata properties and does not omit metadata properties when called.
+- **/api/roles**: :ref:`show-role` now properly returns 403 Forbidden instead of 401 Unauthorized when you pass a working API token that doesn't have the right permission.
+- The content type for the ``schema.org`` dataset metadata export format has been corrected. It was ``application/json`` and now it is ``application/ld+json``. See also :ref:`export-dataset-metadata-api`.
+
v6.5
----
diff --git a/doc/sphinx-guides/source/api/index.rst b/doc/sphinx-guides/source/api/index.rst
index dd195aa9d62..5ff7626271d 100755
--- a/doc/sphinx-guides/source/api/index.rst
+++ b/doc/sphinx-guides/source/api/index.rst
@@ -9,6 +9,7 @@ API Guide
**Contents:**
.. toctree::
+ :maxdepth: 2
intro
getting-started
@@ -24,4 +25,4 @@ API Guide
linkeddatanotification
apps
faq
- changelog
\ No newline at end of file
+ changelog
diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst
index dabca195e37..0f37bde35b8 100644
--- a/doc/sphinx-guides/source/api/native-api.rst
+++ b/doc/sphinx-guides/source/api/native-api.rst
@@ -128,12 +128,23 @@ Note that setting any of these fields overwrites the previous configuration.
When it comes to omitting these fields in the JSON:
-- Omitting ``facetIds`` or ``metadataBlockNames`` causes the Dataverse collection to inherit the corresponding configuration from its parent.
-- Omitting ``inputLevels`` removes any existing custom input levels in the Dataverse collection.
-- Omitting the entire ``metadataBlocks`` object in the request JSON would exclude the three sub-objects, resulting in the application of the two changes described above.
+- Omitting ``facetIds`` or ``metadataBlockNames`` causes no change to the Dataverse collection. To delete the current configuration and inherit the corresponding configuration from its parent include the flag ``inheritFacetsFromParent`` and/or ``inheritMetadataBlocksFromParent`` respectively.
+- Omitting ``inputLevels`` causes no change to the Dataverse collection. Including the flag ``inheritMetadataBlocksFromParent`` will cause the custom ``inputLevels`` to be deleted and inherited from the parent.
+- Omitting the entire ``metadataBlocks`` object in the request JSON would cause no change to the ``inputLevels``, ``facetIds`` or ``metadataBlockNames`` of the Dataverse collection.
To obtain an example of how these objects are included in the JSON file, download :download:`dataverse-complete-optional-params.json <../_static/api/dataverse-complete-optional-params.json>` file and modify it to suit your needs.
+To force the configurations to be deleted and inherited from the parent's configuration include the following ``metadataBlocks`` object in your JSON
+
+.. code-block:: json
+
+ "metadataBlocks": {
+ "inheritMetadataBlocksFromParent": true,
+ "inheritFacetsFromParent": true
+ }
+
+.. note:: Including both the list ``metadataBlockNames`` and the flag ``"inheritMetadataBlocksFromParent": true`` will result in an error being returned {"status": "ERROR", "message": "Metadata block can not contain both metadataBlockNames and inheritMetadataBlocksFromParent: true"}. The same is true for ``facetIds`` and ``inheritFacetsFromParent``.
+
See also :ref:`collection-attributes-api`.
.. _view-dataverse:
@@ -166,6 +177,15 @@ Usage example:
curl "https://demo.dataverse.org/api/dataverses/root?returnOwners=true"
+If you want to include the child count of the Dataverse, which represents the number of dataverses, datasets, or files within the dataverse, you must set ``returnChildCount`` query parameter to ``true``. Please note that this count is for direct children only. It doesn't count children of subdataverses.
+
+Usage example:
+
+.. code-block:: bash
+
+ curl "https://demo.dataverse.org/api/dataverses/root?returnChildCount=true"
+
+
To view an unpublished Dataverse collection:
.. code-block:: bash
@@ -424,13 +444,13 @@ Creates a new role under Dataverse collection ``id``. Needs a json file with the
export SERVER_URL=https://demo.dataverse.org
export ID=root
- curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$ID/roles" --upload-file roles.json
+ curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:application/json" -X POST "$SERVER_URL/api/dataverses/$ID/roles" --upload-file roles.json
The fully expanded example above (without environment variables) looks like this:
.. code-block:: bash
- curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -H "Content-type:application/json" "https://demo.dataverse.org/api/dataverses/root/roles" --upload-file roles.json
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Content-type:application/json" -X POST "https://demo.dataverse.org/api/dataverses/root/roles" --upload-file roles.json
For ``roles.json`` see :ref:`json-representation-of-a-role`
@@ -529,6 +549,8 @@ The fully expanded example above (without environment variables) looks like this
curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/dataverses/root/assignments/6"
+.. _list-metadata-blocks-for-a-collection:
+
List Metadata Blocks Defined on a Dataverse Collection
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -556,6 +578,7 @@ This endpoint supports the following optional query parameters:
- ``returnDatasetFieldTypes``: Whether or not to return the dataset field types present in each metadata block. If not set, the default value is false.
- ``onlyDisplayedOnCreate``: Whether or not to return only the metadata blocks that are displayed on dataset creation. If ``returnDatasetFieldTypes`` is true, only the dataset field types shown on dataset creation will be returned within each metadata block. If not set, the default value is false.
+- ``datasetType``: Optionally return additional fields from metadata blocks that are linked with a particular dataset type (see :ref:`dataset-types` in the User Guide). Pass a single dataset type as a string. For a list of dataset types you can pass, see :ref:`api-list-dataset-types`.
An example using the optional query parameters is presented below:
@@ -564,14 +587,17 @@ An example using the optional query parameters is presented below:
export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
export SERVER_URL=https://demo.dataverse.org
export ID=root
+ export DATASET_TYPE=software
- curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/metadatablocks?returnDatasetFieldTypes=true&onlyDisplayedOnCreate=true"
+ curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/metadatablocks?returnDatasetFieldTypes=true&onlyDisplayedOnCreate=true&datasetType=$DATASET_TYPE"
The fully expanded example above (without environment variables) looks like this:
.. code-block:: bash
- curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/metadatablocks?returnDatasetFieldTypes=true&onlyDisplayedOnCreate=true"
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/metadatablocks?returnDatasetFieldTypes=true&onlyDisplayedOnCreate=true&datasetType=software"
+
+.. _define-metadata-blocks-for-a-dataverse-collection:
Define Metadata Blocks for a Dataverse Collection
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -598,6 +624,8 @@ The fully expanded example above (without environment variables) looks like this
curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -H "Content-type:application/json" --upload-file define-metadatablocks.json "https://demo.dataverse.org/api/dataverses/root/metadatablocks"
+An alternative to defining metadata blocks at a collection level is to create and use a dataset type. See :ref:`api-link-dataset-type`.
+
Determine if a Dataverse Collection Inherits Its Metadata Blocks from Its Parent
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1067,6 +1095,7 @@ The following attributes are supported:
* ``description`` Description
* ``affiliation`` Affiliation
* ``filePIDsEnabled`` ("true" or "false") Restricted to use by superusers and only when the :ref:`:AllowEnablingFilePIDsPerCollection <:AllowEnablingFilePIDsPerCollection>` setting is true. Enables or disables registration of file-level PIDs in datasets within the collection (overriding the instance-wide setting).
+* ``requireFilesToPublishDataset`` ("true" or "false") Restricted to use by superusers. Defines if Dataset needs files in order to be published. If not set the determination will be made through inheritance by checking the owners of this collection. Publishing by a superusers will not be blocked.
See also :ref:`update-dataverse-api`.
@@ -1087,15 +1116,28 @@ This endpoint expects a JSON with the following format::
{
"datasetFieldTypeName": "datasetFieldTypeName1",
"required": true,
- "include": true
+ "include": true,
+ "displayOnCreate": null
},
{
"datasetFieldTypeName": "datasetFieldTypeName2",
"required": true,
- "include": true
+ "include": true,
+ "displayOnCreate": true
}
]
+.. note::
+ Required fields will always be displayed regardless of their displayOnCreate setting, as this is necessary for dataset creation.
+ When displayOnCreate is null, the field's default display behavior is used.
+
+Parameters:
+
+- ``datasetFieldTypeName``: Name of the metadata field
+- ``required``: Whether the field is required (boolean)
+- ``include``: Whether the field is included (boolean)
+- ``displayOnCreate`` (optional): Whether the field is displayed during dataset creation, even when not required (boolean)
+
.. code-block:: bash
export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
@@ -1144,6 +1186,209 @@ Use the ``/settings`` API to enable or disable the enforcement of storage quotas
curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:UseStorageQuotas
+List All Collection Featured Items
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+List the featured items configured for a given Dataverse collection ``id``:
+
+.. code-block:: bash
+
+ export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ export SERVER_URL=https://demo.dataverse.org
+ export ID=root
+
+ curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/featuredItems"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/featuredItems"
+
+Update All Collection Featured Items
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Updates all featured items in the given Dataverse collection ``id``.
+
+The data sent to the endpoint represents the desired final state of the featured items in the Dataverse collection and overwrites any existing featured items configuration.
+
+The parameters ``id``, ``content``, ``displayOrder``, and ``fileName`` must be specified as many times as the number of items we want to add or update. The order in which these parameters are repeated must match to ensure they correspond to the same featured item.
+
+The ``file`` parameter must be specified for each image we want to attach to featured items. Note that images can be shared between featured items, so ``fileName`` can have the same value in different featured items.
+
+The ``id`` parameter must be ``0`` for new items or set to the item's identifier for updates. The ``fileName`` parameter should be empty to exclude an image or match the name of a file sent in a ``file`` parameter to set a new image. ``keepFile`` must always be set to ``false``, unless it's an update to a featured item where we want to preserve the existing image, if one exists.
+
+Note that any existing featured item not included in the call with its associated identifier and corresponding properties will be removed from the collection.
+
+The following example creates two featured items, with an image assigned to the second one:
+
+.. code-block:: bash
+
+ export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ export SERVER_URL=https://demo.dataverse.org
+ export ID=root
+
+ export FIRST_ITEM_CONTENT='Content 1'
+ export FIRST_ITEM_DISPLAY_ORDER=1
+
+ export SECOND_ITEM_IMAGE_FILENAME='image.png'
+ export SECOND_ITEM_CONTENT='Content 2'
+ export SECOND_ITEM_DISPLAY_ORDER=2
+
+ curl -H "X-Dataverse-key:$API_TOKEN" \
+ -X PUT \
+ -F "id=0" -F "id=0" \
+ -F "content=$FIRST_ITEM_CONTENT" -F "content=$SECOND_ITEM_CONTENT" \
+ -F "displayOrder=$FIRST_ITEM_DISPLAY_ORDER" -F "displayOrder=$SECOND_ITEM_DISPLAY_ORDER" \
+ -F "fileName=" -F "fileName=$SECOND_ITEM_IMAGE_FILENAME" \
+ -F "keepFile=false" -F "keepFile=false" \
+ -F "file=@$SECOND_ITEM_IMAGE_FILENAME" \
+ "$SERVER_URL/api/dataverses/$ID/featuredItems"
+
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" \
+ -X PUT \
+ -F "id=0" -F "id=0" \
+ -F "content=Content 1" -F "content=Content 2" \
+ -F "displayOrder=1" -F "displayOrder=2" \
+ -F "fileName=" -F "fileName=image.png" \
+ -F "keepFile=false" -F "keepFile=false" \
+ -F "file=@image.png" \
+ "https://demo.dataverse.org/api/dataverses/root/featuredItems"
+
+The following example creates one featured item and updates a second one, keeping the existing image it may have had:
+
+.. code-block:: bash
+
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" \
+ -X PUT \
+ -F "id=0" -F "id=1" \
+ -F "content=Content 1" -F "content=Updated content 2" \
+ -F "displayOrder=1" -F "displayOrder=2" \
+ -F "fileName=" -F "fileName=" \
+ -F "keepFile=false" -F "keepFile=true" \
+ "https://demo.dataverse.org/api/dataverses/root/featuredItems"
+
+Delete All Collection Featured Items
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Deletes the featured items configured for a given Dataverse collection ``id``:
+
+.. code-block:: bash
+
+ export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ export SERVER_URL=https://demo.dataverse.org
+ export ID=root
+
+ curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE "$SERVER_URL/api/dataverses/$ID/featuredItems"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/dataverses/root/featuredItems"
+
+Create a Collection Featured Item
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Creates a featured item in the given Dataverse collection ``id``:
+
+.. code-block:: bash
+
+ export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ export IMAGE_FILENAME='image.png'
+ export CONTENT='Content for featured item.'
+ export DISPLAY_ORDER=1
+ export SERVER_URL=https://demo.dataverse.org
+ export ID=root
+
+ curl -H "X-Dataverse-key:$API_TOKEN" -X POST -F "file=@$IMAGE_FILENAME" -F "content=$CONTENT" -F "displayOrder=$DISPLAY_ORDER" "$SERVER_URL/api/dataverses/$ID/featuredItems"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -F "file=@image.png" -F "content=Content for featured item." -F "displayOrder=1" "https://demo.dataverse.org/api/dataverses/root/featuredItems"
+
+A featured item may or may not contain an image. If you wish to create it without an image, omit the file parameter in the request.
+
+Update a Collection Featured Item
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Updates a featured item given its ``id``:
+
+.. code-block:: bash
+
+ export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ export IMAGE_FILENAME='image.png'
+ export CONTENT='Content for featured item.'
+ export DISPLAY_ORDER=1
+ export SERVER_URL=https://demo.dataverse.org
+ export ID=1
+
+ curl -H "X-Dataverse-key:$API_TOKEN" -X PUT -F "file=@$IMAGE_FILENAME" -F "content=$CONTENT" -F "displayOrder=$DISPLAY_ORDER" "$SERVER_URL/api/dataverseFeaturedItems/$ID"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT -F "file=@image.png" -F "content=Content for featured item." -F "displayOrder=1" "https://demo.dataverse.org/api/dataverseFeaturedItems/1"
+
+``content`` and ``displayOrder`` must always be provided; otherwise, an error will occur. Use the ``file`` parameter to set a new image for the featured item. To keep the existing image, omit ``file`` and send ``keepFile=true``. To remove the image, omit the file parameter.
+
+Updating the featured item keeping the existing image:
+
+.. code-block:: bash
+
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT -F "keepFile=true" -F "content=Content for featured item." -F "displayOrder=1" "https://demo.dataverse.org/api/dataverseFeaturedItems/1"
+
+Updating the featured item removing the existing image:
+
+.. code-block:: bash
+
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT -F "content=Content for featured item." -F "displayOrder=1" "https://demo.dataverse.org/api/dataverseFeaturedItems/1"
+
+Delete a Collection Featured Item
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Deletes a featured item given its ``id``:
+
+.. code-block:: bash
+
+ export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ export SERVER_URL=https://demo.dataverse.org
+ export ID=1
+
+ curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/dataverseFeaturedItems/$ID"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/dataverseFeaturedItems/1"
+
+Get a Collection Featured Item Image
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Returns the image of a featured item if one is assigned, given the featured item ``id``:
+
+.. code-block:: bash
+
+ export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ export SERVER_URL=https://demo.dataverse.org
+ export ID=1
+
+ curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/access/dataverseFeaturedItemImage/{ID}"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/access/dataverseFeaturedItemImage/1"
Datasets
--------
@@ -1261,7 +1506,12 @@ It returns a list of versions with their metadata, and file list:
"createTime": "2015-04-20T09:57:32Z",
"license": {
"name": "CC0 1.0",
- "uri": "http://creativecommons.org/publicdomain/zero/1.0"
+ "uri": "http://creativecommons.org/publicdomain/zero/1.0",
+ "iconUri": "https://licensebuttons.net/p/zero/1.0/88x31.png",
+ "rightsIdentifier": "CC0",
+ "rightsIdentifierScheme": "Creative Commons",
+ "schemeUri": "https://creativecommons.org/",
+ "languageCode": "en",
},
"termsOfAccess": "You need to request for access.",
"fileAccessRequest": true,
@@ -1282,7 +1532,12 @@ It returns a list of versions with their metadata, and file list:
"createTime": "2015-04-20T09:43:45Z",
"license": {
"name": "CC0 1.0",
- "uri": "http://creativecommons.org/publicdomain/zero/1.0"
+ "uri": "http://creativecommons.org/publicdomain/zero/1.0",
+ "iconUri": "https://licensebuttons.net/p/zero/1.0/88x31.png",
+ "rightsIdentifier": "CC0",
+ "rightsIdentifierScheme": "Creative Commons",
+ "schemeUri": "https://creativecommons.org/",
+ "languageCode": "en",
},
"termsOfAccess": "You need to request for access.",
"fileAccessRequest": true,
@@ -1294,6 +1549,8 @@ It returns a list of versions with their metadata, and file list:
The optional ``excludeFiles`` parameter specifies whether the files should be listed in the output. It defaults to ``true``, preserving backward compatibility. (Note that for a dataset with a large number of versions and/or files having the files included can dramatically increase the volume of the output). A separate ``/files`` API can be used for listing the files, or a subset thereof in a given version.
+The optional ``excludeMetadataBlocks`` parameter specifies whether the metadata blocks should be listed in the output. It defaults to ``false``, preserving backward compatibility. (Note that for a dataset with a large number of versions and/or metadata blocks having the metadata blocks included can dramatically increase the volume of the output).
+
The optional ``offset`` and ``limit`` parameters can be used to specify the range of the versions list to be shown. This can be used to paginate through the list in a dataset with a large number of versions.
@@ -1318,6 +1575,12 @@ The fully expanded example above (without environment variables) looks like this
The optional ``excludeFiles`` parameter specifies whether the files should be listed in the output (defaults to ``true``). Note that a separate ``/files`` API can be used for listing the files, or a subset thereof in a given version.
+.. code-block:: bash
+
+ curl "https://demo.dataverse.org/api/datasets/24/versions/1.0?excludeMetadataBlocks=false"
+
+The optional ``excludeMetadataBlocks`` parameter specifies whether the metadata blocks should be listed in the output (defaults to ``false``).
+
By default, deaccessioned dataset versions are not included in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a "not found" error if the version is deaccessioned and you do not enable the ``includeDeaccessioned`` option described below.
@@ -1344,6 +1607,8 @@ Export Metadata of a Dataset in Various Formats
|CORS| Export the metadata of the current published version of a dataset in various formats.
+To get a list of available formats, see :ref:`available-exporters` and :ref:`get-export-formats`.
+
See also :ref:`batch-exports-through-the-api` and the note below:
.. code-block:: bash
@@ -1360,9 +1625,30 @@ The fully expanded example above (without environment variables) looks like this
curl "https://demo.dataverse.org/api/datasets/export?exporter=ddi&persistentId=doi:10.5072/FK2/J8SJZB"
-.. note:: Supported exporters (export formats) are ``ddi``, ``oai_ddi``, ``dcterms``, ``oai_dc``, ``schema.org`` , ``OAI_ORE`` , ``Datacite``, ``oai_datacite`` and ``dataverse_json``. Descriptive names can be found under :ref:`metadata-export-formats` in the User Guide.
+.. _available-exporters:
+
+Available Dataset Metadata Exporters
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The following dataset metadata exporters ship with Dataverse:
+
+- ``Datacite``
+- ``dataverse_json``
+- ``dcterms``
+- ``ddi``
+- ``oai_datacite``
+- ``oai_dc``
+- ``oai_ddi``
+- ``OAI_ORE``
+- ``schema.org``
+
+These are the strings to pass as ``$METADATA_FORMAT`` in the examples above. Descriptive names for each format can be found under :ref:`metadata-export-formats` in the User Guide.
-.. note:: Additional exporters can be enabled, as described under :ref:`external-exporters` in the Installation Guide. To discover the machine-readable name of each exporter (e.g. ``ddi``), check :ref:`inventory-of-external-exporters` or ``getFormatName`` in the exporter's source code.
+Additional exporters can be enabled, as described under :ref:`external-exporters` in the Installation Guide. The machine-readable name/identifier for each external exporter can be found under :ref:`inventory-of-external-exporters`. If you are interested in creating your own exporter, see :doc:`/developers/metadataexport`.
+
+To discover the machine-readable name of exporters (e.g. ``ddi``) that have been enabled on the installation of Dataverse you are using see :ref:`get-export-formats`. Alternatively, you can use the Signposting "linkset" API documented under :ref:`signposting-api`.
+
+To discover the machine-readable name of exporters generally, check :ref:`inventory-of-external-exporters` or ``getFormatName`` in the exporter's source code.
Schema.org JSON-LD
^^^^^^^^^^^^^^^^^^
@@ -1376,6 +1662,8 @@ Both forms are valid according to Google's Structured Data Testing Tool at https
The standard has further evolved into a format called Croissant. For details, see :ref:`schema.org-head` in the Admin Guide.
+The ``schema.org`` format changed after Dataverse 6.4 as well. Previously its content type was "application/json" but now it is "application/ld+json".
+
List Files in a Dataset
~~~~~~~~~~~~~~~~~~~~~~~
@@ -1728,6 +2016,27 @@ The fully expanded example above (without environment variables) looks like this
curl "https://demo.dataverse.org/api/datasets/24/versions/:latest-published/compare/:draft"
+Get Versions of a Dataset with Summary of Changes
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Returns a list of versions for a given dataset including a summary of differences between consecutive versions where available. Draft versions will only
+be available to users who have permission to view unpublished drafts. The api token is optional.
+
+.. code-block:: bash
+
+ export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ export SERVER_URL=https://demo.dataverse.org
+ export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/BCCP9Z
+
+ curl -H "X-Dataverse-key: $API_TOKEN" -X PUT "$SERVER_URL/api/datasets/:persistentId/versions/compareSummary?persistentId=$PERSISTENT_IDENTIFIER"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+ curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/:persistentId/versions/compareSummary?persistentId=doi:10.5072/FK2/BCCP9Z"
+
+
Update Metadata For a Dataset
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -2406,6 +2715,29 @@ Usage example:
.. note:: Keep in mind that you can combine all of the above query parameters depending on the results you are looking for.
+Get the Download count of a Dataset
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Shows the total number of downloads requested for a dataset. If MDC is enabled the count will be limited to the time before MDC start if the optional `includeMDC` parameter is not included or set to False.
+Setting `includeMDC` to True will ignore the `:MDCStartDate` setting and return a total count.
+
+.. code-block:: bash
+
+ API_TOKEN='xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx'
+ export DATASET_ID=1
+ export includeMDC=True
+
+ curl -s -H "X-Dataverse-key:$API_TOKEN" -X GET http://localhost:8080/api/datasets/$DATASET_ID/download/count
+ curl -s -H "X-Dataverse-key:$API_TOKEN" -X GET http://localhost:8080/api/datasets/$DATASET_ID/download/count?includeMDC=true
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/datasets/1/download/count?includeMDC=False"
+
+
+
Submit a Dataset for Review
~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -2457,6 +2789,7 @@ The fully expanded example above (without environment variables) looks like this
The review process can sometimes resemble a tennis match, with the authors submitting and resubmitting the dataset over and over until the curators are satisfied. Each time the curators send a "reason for return" via API, that reason is sent by email and is persisted into the database, stored at the dataset version level.
Note the reason is required, unless the `disable-return-to-author-reason` feature flag has been set (see :ref:`feature-flags`). Reason is a free text field and could be as simple as "The author would like to modify his dataset", "Files are missing", "Nothing to report" or "A curation report with comments and suggestions/instructions will follow in another email" that suits your situation.
+The :ref:`send-feedback-admin` Admin only API call may be useful as a way to move the conversation to email. However, note that these emails go to contacts (versus authors) and there is no database record of the email contents. (:ref:`dataverse.mail.cc-support-on-contact-email` will send a copy of these emails to the support email address which would provide a record.)
The :ref:`send-feedback` API call may be useful as a way to move the conversation to email. However, note that these emails go to contacts (versus authors) and there is no database record of the email contents. (:ref:`dataverse.mail.cc-support-on-contact-email` will send a copy of these emails to the support email address which would provide a record.)
Link a Dataset
@@ -2724,7 +3057,7 @@ The fully expanded example above (without environment variables) looks like this
.. code-block:: bash
curl "https://demo.dataverse.org/api/datasets/:persistentId/makeDataCount/citations?persistentId=10.5072/FK2/J8SJZB"
-
+
Delete Unpublished Dataset
~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -2942,15 +3275,23 @@ Retrieve Signposting Information
Dataverse supports :ref:`discovery-sign-posting` as a discovery mechanism.
Signposting involves the addition of a `Link `__ HTTP header providing summary information on GET and HEAD requests to retrieve the dataset page and a separate /linkset API call to retrieve additional information.
-Here is an example of a "Link" header:
+Signposting Link HTTP Header
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-``Link: ;rel="cite-as", ;rel="describedby";type="application/vnd.citationstyles.csl+json",;rel="describedby";type="application/ld+json", ;rel="type",;rel="type", ;rel="license", ; rel="linkset";type="application/linkset+json"``
+Here is an example of a HTTP "Link" header from a GET or HEAD request for a dataset landing page:
-The URL for linkset information is discoverable under the ``rel="linkset";type="application/linkset+json`` entry in the "Link" header, such as in the example above.
+``Link: ;rel="cite-as", ;rel="describedby";type="application/vnd.citationstyles.csl+json",;rel="describedby";type="application/json",;rel="describedby";type="application/xml",;rel="describedby";type="application/xml",;rel="describedby";type="application/xml",;rel="describedby";type="application/ld+json",;rel="describedby";type="application/xml",;rel="describedby";type="application/xml",;rel="describedby";type="text/html",;rel="describedby";type="application/json",;rel="describedby";type="application/xml", ;rel="type",;rel="type", ;rel="license", ; rel="linkset";type="application/linkset+json"``
+
+The URL for linkset information (described below) is discoverable under the ``rel="linkset";type="application/linkset+json`` entry in the "Link" header, such as in the example above.
+
+Signposting Linkset API Endpoint
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The reponse includes a JSON object conforming to the `Signposting `__ specification. As part of this conformance, unlike most Dataverse API responses, the output is not wrapped in a ``{"status":"OK","data":{`` object.
Signposting is not supported for draft dataset versions.
+Like :ref:`get-export-formats`, this API can be used to get URLs to dataset metadata export formats, but with URLs for the dataset in question.
+
.. code-block:: bash
export SERVER_URL=https://demo.dataverse.org
@@ -2983,6 +3324,8 @@ Usage example:
Get Citation
~~~~~~~~~~~~
+This API call returns the dataset citation as seen on the dataset page, wrapped as a JSON object, with the value in the "data" sub-object's "message" key.
+
.. code-block:: bash
export SERVER_URL=https://demo.dataverse.org
@@ -3000,6 +3343,35 @@ Usage example:
.. code-block:: bash
curl -H "Accept:application/json" "$SERVER_URL/api/datasets/:persistentId/versions/$VERSION/{version}/citation?persistentId=$PERSISTENT_IDENTIFIER&includeDeaccessioned=true"
+
+Get Citation In Other Formats
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Dataverse can also generate dataset citations in "EndNote", "RIS", "BibTeX", and "CSLJson" formats.
+Unlike the call above, which wraps the result in JSON, this API call sends the raw format with the appropriate content-type (EndNote is XML, RIS and BibTeX are plain text, and CSLJson is JSON). ("Internal" is also a valid value, returning the same content as the above call as HTML).
+This API call adds a format parameter in the API call which can be any of the values listed above.
+
+Usage example:
+
+.. code-block:: bash
+
+ export SERVER_URL=https://demo.dataverse.org
+ export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/YD5QDG
+ export VERSION=1.0
+ export FORMAT=EndNote
+
+ curl "$SERVER_URL/api/datasets/:persistentId/versions/$VERSION/{version}/citation/$FORMAT?persistentId=$PERSISTENT_IDENTIFIER"
+
+By default, deaccessioned dataset versions are not included in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a "not found" error if the version is deaccessioned and you do not enable the ``includeDeaccessioned`` option described below.
+
+If you want to include deaccessioned dataset versions, you must set ``includeDeaccessioned`` query parameter to ``true``.
+
+Usage example:
+
+.. code-block:: bash
+
+ curl "$SERVER_URL/api/datasets/:persistentId/versions/$VERSION/{version}/citation/$FORMAT?persistentId=$PERSISTENT_IDENTIFIER&includeDeaccessioned=true"
+
Get Citation by Preview URL Token
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -3249,6 +3621,130 @@ The fully expanded example above (without environment variables) looks like this
curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/datasetTypes/3"
+.. _api-link-dataset-type:
+
+Link Dataset Type with Metadata Blocks
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Linking a dataset type with one or more metadata blocks results in additional fields from those blocks appearing in the output from the :ref:`list-metadata-blocks-for-a-collection` API endpoint. The new frontend for Dataverse (https://github.com/IQSS/dataverse-frontend) uses the JSON output from this API endpoint to construct the page that users see when creating or editing a dataset. Once the frontend has been updated to pass in the dataset type (https://github.com/IQSS/dataverse-client-javascript/issues/210), specifying a dataset type in this way can be an alternative way to display additional metadata fields than the traditional method, which is to enable a metadata block at the collection level (see :ref:`define-metadata-blocks-for-a-dataverse-collection`).
+
+For example, a superuser could create a type called "software" and link it to the "CodeMeta" metadata block (this example is below). Then, once the new frontend allows it, the user can specify that they want to create a dataset of type software and see the additional metadata fields from the CodeMeta block when creating or editing their dataset.
+
+This API endpoint is for superusers only.
+
+.. code-block:: bash
+
+ export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ export SERVER_URL=https://demo.dataverse.org
+ export TYPE=software
+ export JSON='["codeMeta20"]'
+
+ curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-Type: application/json" "$SERVER_URL/api/datasets/datasetTypes/$TYPE" -X PUT -d $JSON
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Content-Type: application/json" "https://demo.dataverse.org/api/datasets/datasetTypes/software" -X PUT -d '["codeMeta20"]'
+
+To update the blocks that are linked, send an array with those blocks.
+
+To remove all links to blocks, send an empty array.
+
+.. _api-dataset-version-note:
+
+Dataset Version Notes
+~~~~~~~~~~~~~~~~~~~~~
+
+Intended as :ref:`provenance` information about why the version was created/how it differs from the prior version
+
+Depositors who can edit the dataset and curators can add a version note for the draft version. Superusers can add/delete version notes for any version.
+
+Version notes can be retrieved via the following, with authorization required to see a note on the :draft version
+
+.. code-block:: bash
+
+ export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ export SERVER_URL=https://demo.dataverse.org
+ export ID=3
+ export VERSION=:draft
+
+ curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/datasets/$ID/versions/$VERSION/versionNote"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/datasets/3/versions/:draft/versionNote"
+
+Notes can be set with:
+
+.. code-block:: bash
+
+ export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ export SERVER_URL=https://demo.dataverse.org
+ export ID=3
+ export VERSION=:draft
+ export NOTE=Files updated to correct typos
+
+ curl -H "X-Dataverse-key:$API_TOKEN" -X PUT -d "$NOTE" "$SERVER_URL/api/datasets/$ID/versions/$VERSION/versionNote"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT -d "Files updated to correct typos" "https://demo.dataverse.org/api/datasets/3/versions/:draft/versionNote"
+
+And deleted via:
+
+.. code-block:: bash
+
+ export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ export SERVER_URL=https://demo.dataverse.org
+ export ID=3
+ export VERSION=2.0
+
+ curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/datasets/$ID/versions/$VERSION/versionNote"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/3/versions/2.0/versionNote"
+
+Delete Files from a Dataset
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Delete files from a dataset. This API call allows you to delete multiple files from a dataset in a single operation.
+
+.. code-block:: bash
+
+ export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ export SERVER_URL=https://demo.dataverse.org
+ export PERSISTENT_IDENTIFIER=doi:10.5072/FK2ABCDEF
+
+ curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/datasets/:persistentId/deleteFiles?persistentId=$PERSISTENT_IDENTIFIER" \
+ -H "Content-Type: application/json" \
+ -d '{"fileIds": [1, 2, 3]}'
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/:persistentId/deleteFiles?persistentId=doi:10.5072/FK2ABCDEF" \
+ -H "Content-Type: application/json" \
+ -d '{"fileIds": [1, 2, 3]}'
+
+The ``fileIds`` in the JSON payload should be an array of file IDs that you want to delete from the dataset.
+
+You must have the appropriate permissions to delete files from the dataset.
+
+Upon success, the API will return a JSON response with a success message and the number of files deleted.
+
+The API call will report a 400 (BAD REQUEST) error if any of the files specified do not exist or are not in the latest version of the specified dataset.
+The ``fileIds`` in the JSON payload should be an array of file IDs that you want to delete from the dataset.
+
+
Files
-----
@@ -3446,7 +3942,29 @@ The fully expanded example above (without environment variables) looks like this
curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/:persistentId/versions/:draft?persistentId=doi:10.5072/FK2/J8SJZB&returnOwners=true"
+Get JSON Representation of a file's versions
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Gets a list of versions of a data file showing any changes that affected the file with each version.
+The fileIdOrPersistentId can be either "persistentId": "doi:10.5072/FK2/ADMYJF" or "datafileId": 19.
+
+Usage example:
+
+.. code-block:: bash
+
+ export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ export SERVER_URL=https://demo.dataverse.org
+ export ID=1234
+ export PERSISTENT_ID=doi:10.5072/FK2/J8SJZB
+
+ curl -H "X-Dataverse-key: $API_TOKEN" -X GET "$SERVER_URL/api/files/$ID/versionDifferences"
+ curl -H "X-Dataverse-key: $API_TOKEN" -X GET "$SERVER_URL/api/files/:persistentId/versionDifferences?persistentId=$PERSISTENT_ID"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+ curl -X GET "https://demo.dataverse.org/api/files/1234/versionDifferences"
+ curl -X GET "https://demo.dataverse.org/api/files/:persistentId/versionDifferences?persistentId=doi:10.5072/FK2/J8SJZB"
Adding Files
~~~~~~~~~~~~
@@ -3648,7 +4166,7 @@ The fully expanded example above (without environment variables) looks like this
Currently the following methods are used to detect file types:
- The file type detected by the browser (or sent via API).
-- Custom code that reads the first few bytes. As explained at :ref:`s3-direct-upload-features-disabled`, this method of file type detection is not utilized during direct upload to S3, since by nature of direct upload Dataverse never sees the contents of the file. However, this code is utilized when the "redetect" API is used.
+- Custom code that reads the first few bytes. As explained at :ref:`s3-direct-upload-features-disabled`, most of these methods are not utilized during direct upload to S3, since by nature of direct upload Dataverse never sees the contents of the file. However, this code is utilized when the "redetect" API is used.
- JHOVE: https://jhove.openpreservation.org . Note that the same applies about direct upload to S3 and the "redetect" API.
- The file extension (e.g. ".ipybn") is used, defined in a file called ``MimeTypeDetectionByFileExtension.properties``.
- The file name (e.g. "Dockerfile") is used, defined in a file called ``MimeTypeDetectionByFileName.properties``.
@@ -4549,12 +5067,12 @@ The JSON representation of a role (``roles.json``) looks like this::
{
"alias": "sys1",
- "name": โRestricted System Roleโ,
- "description": โA person who may only add datasets.โ,
+ "name": "Restricted System Role",
+ "description": "A person who may only add datasets.",
"permissions": [
"AddDataset"
]
- }
+ }
.. note:: alias is constrained to a length of 16 characters
@@ -4563,17 +5081,49 @@ Create Role
Roles can be created globally (:ref:`create-global-role`) or for individual Dataverse collections (:ref:`create-role-in-collection`).
+.. _show-role:
+
Show Role
~~~~~~~~~
-Shows the role with ``id``::
+You must have ``ManageDataversePermissions`` to be able to show a role that was created using :ref:`create-role-in-collection`. Global roles (:ref:`create-global-role`) can only be shown with a superuser API token.
+
+An example using a role alias:
+
+.. code-block:: bash
+
+ export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ export SERVER_URL=https://demo.dataverse.org
+ export ALIAS=sys1
+
+ curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/roles/:alias?alias=$ALIAS"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/roles/:alias?alias=sys1"
- GET http://$SERVER/api/roles/$id
+An example using a role id:
+
+.. code-block:: bash
+
+ export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ export SERVER_URL=https://demo.dataverse.org
+ export ID=11
+
+ curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/roles/$ID"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/roles/11"
Delete Role
~~~~~~~~~~~
-A curl example using an ``ID``
+An example using a role id:
.. code-block:: bash
@@ -4589,13 +5139,13 @@ The fully expanded example above (without environment variables) looks like this
curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/roles/24"
-A curl example using a Role alias ``ALIAS``
+An example using a role alias:
.. code-block:: bash
export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
export SERVER_URL=https://demo.dataverse.org
- export ALIAS=roleAlias
+ export ALIAS=sys1
curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/roles/:alias?alias=$ALIAS"
@@ -4603,8 +5153,7 @@ The fully expanded example above (without environment variables) looks like this
.. code-block:: bash
- curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/roles/:alias?alias=roleAlias"
-
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/roles/:alias?alias=sys1"
Explicit Groups
---------------
@@ -4889,12 +5438,14 @@ The fully expanded example above (without environment variables) looks like this
curl "https://demo.dataverse.org/api/info/settings/:MaxEmbargoDurationInMonths"
-Get Export Formats
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
+.. _get-export-formats:
-Get the available export formats, including custom formats.
+Get Dataset Metadata Export Formats
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-The response contains an object with available format names as keys, and as values an object with the following properties:
+Get the available dataset metadata export formats, including formats from external exporters (see :ref:`available-exporters`).
+
+The response contains a JSON object with the available format names as keys (these can be passed to :ref:`export-dataset-metadata-api`), and values as objects with the following properties:
* ``displayName``
* ``mediaType``
@@ -5001,6 +5552,27 @@ The fully expanded example above (without environment variables) looks like this
curl "https://demo.dataverse.org/api/datasetfields/facetables"
+.. _setDisplayOnCreate:
+
+Set displayOnCreate for a Dataset Field
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Set displayOnCreate for a dataset field. See also :doc:`/admin/metadatacustomization` in the Admin Guide.
+
+.. code-block:: bash
+
+ export SERVER_URL=http://localhost:8080
+ export FIELD=subtitle
+ export BOOLEAN=true
+
+ curl -X POST "$SERVER_URL/api/admin/datasetfield/setDisplayOnCreate?datasetFieldType=$FIELD&setDisplayOnCreate=$BOOLEAN"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+ curl -X POST "http://localhost:8080/api/admin/datasetfield/setDisplayOnCreate?datasetFieldType=studyAssayCellType&setDisplayOnCreate=true"
+
.. _Notifications:
Notifications
@@ -5128,7 +5700,7 @@ Create a Harvesting Set
To create a harvesting set you must supply a JSON file that contains the following fields:
-- Name: Alpha-numeric may also contain -, _, or %, but no spaces. Must also be unique in the installation.
+- Name: Alpha-numeric may also contain -, _, or %, but no spaces. It must also be unique in the installation.
- Definition: A search query to select the datasets to be harvested. For example, a query containing authorName:YYY would include all datasets where โYYYโ is the authorName.
- Description: Text that describes the harvesting set. The description appears in the Manage Harvesting Sets dashboard and in API responses. This field is optional.
@@ -5224,20 +5796,43 @@ The following API can be used to create and manage "Harvesting Clients". A Harve
List All Configured Harvesting Clients
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Shows all the Harvesting Clients configured::
+Shows all the harvesting clients configured.
- GET http://$SERVER/api/harvest/clients/
+.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of export below.
+
+.. code-block:: bash
+
+ export SERVER_URL=https://demo.dataverse.org
+
+ curl "$SERVER_URL/api/harvest/clients"
+
+The fully expanded example above (without the environment variables) looks like this:
+
+.. code-block:: bash
+
+ curl "https://demo.dataverse.org/api/harvest/clients"
Show a Specific Harvesting Client
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Shows a Harvesting Client with a defined nickname::
+Shows a harvesting client by nickname.
+
+.. code-block:: bash
+
+ export SERVER_URL=https://demo.dataverse.org
+ export NICKNAME=myclient
+
+ curl "$SERVER_URL/api/harvest/clients/$NICKNAME"
- GET http://$SERVER/api/harvest/clients/$nickname
+The fully expanded example above (without the environment variables) looks like this:
.. code-block:: bash
- curl "http://localhost:8080/api/harvest/clients/myclient"
+ curl "https://demo.dataverse.org/api/harvest/clients/myclient"
+
+The output will look something like the following.
+
+.. code-block:: bash
{
"status":"OK",
@@ -5253,8 +5848,10 @@ Shows a Harvesting Client with a defined nickname::
"type": "oai",
"dataverseAlias": "fooData",
"nickName": "myClient",
+ "sourceName": "",
"set": "fooSet",
- "useOaiIdentifiersAsPids": false
+ "useOaiIdentifiersAsPids": false,
+ "useListRecords": false,
"schedule": "none",
"status": "inActive",
"lastHarvest": "Thu Oct 13 14:48:57 EDT 2022",
@@ -5266,64 +5863,60 @@ Shows a Harvesting Client with a defined nickname::
}
+.. _create-a-harvesting-client:
+
Create a Harvesting Client
~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-To create a new harvesting client::
-
- POST http://$SERVER/api/harvest/clients/$nickname
-
-``nickName`` is the name identifying the new client. It should be alpha-numeric and may also contain -, _, or %, but no spaces. Must also be unique in the installation.
-You must supply a JSON file that describes the configuration, similarly to the output of the GET API above. The following fields are mandatory:
+To create a harvesting client you must supply a JSON file that describes the configuration, similarly to the output of the GET API above. The following fields are mandatory:
-- dataverseAlias: The alias of an existing collection where harvested datasets will be deposited
-- harvestUrl: The URL of the remote OAI archive
-- archiveUrl: The URL of the remote archive that will be used in the redirect links pointing back to the archival locations of the harvested records. It may or may not be on the same server as the harvestUrl above. If this OAI archive is another Dataverse installation, it will be the same URL as harvestUrl minus the "/oai". For example: https://demo.dataverse.org/ vs. https://demo.dataverse.org/oai
-- metadataFormat: A supported metadata format. As of writing this the supported formats are "oai_dc", "oai_ddi" and "dataverse_json".
+- ``dataverseAlias``: The alias of an existing collection where harvested datasets will be deposited
+- ``harvestUrl``: The URL of the remote OAI archive
+- ``archiveUrl``: The URL of the remote archive that will be used in the redirect links pointing back to the archival locations of the harvested records. It may or may not be on the same server as the harvestUrl above. If this OAI archive is another Dataverse installation, it will be the same URL as harvestUrl minus the "/oai". For example: https://demo.dataverse.org/ vs. https://demo.dataverse.org/oai
+- ``metadataFormat``: A supported metadata format. As of writing this the supported formats are "oai_dc", "oai_ddi" and "dataverse_json".
The following optional fields are supported:
-- archiveDescription: What the name suggests. If not supplied, will default to "This Dataset is harvested from our partners. Clicking the link will take you directly to the archival source of the data."
-- set: The OAI set on the remote server. If not supplied, will default to none, i.e., "harvest everything".
-- style: Defaults to "default" - a generic OAI archive. (Make sure to use "dataverse" when configuring harvesting from another Dataverse installation).
-- customHeaders: This can be used to configure this client with a specific HTTP header that will be added to every OAI request. This is to accommodate a use case where the remote server requires this header to supply some form of a token in order to offer some content not available to other clients. See the example below. Multiple headers can be supplied separated by `\\n` - actual "backslash" and "n" characters, not a single "new line" character.
-- allowHarvestingMissingCVV: Flag to allow datasets to be harvested with Controlled Vocabulary Values that existed in the originating Dataverse Project but are not in the harvesting Dataverse Project. (Default is false). Currently only settable using API.
-- useOaiIdentifiersAsPids: Defaults to false; if set to true, the harvester will attempt to use the identifier from the OAI-PMH record header as the **first choice** for the persistent id of the harvested dataset. When set to false, Dataverse will still attempt to use this identifier, but only if none of the `` entries in the OAI_DC record contain a valid persistent id (this is new as of v6.5).
-
-Generally, the API will accept the output of the GET version of the API for an existing client as valid input, but some fields will be ignored. For example, as of writing this there is no way to configure a harvesting schedule via this API.
+- ``sourceName``: When ``index-harvested-metadata-source`` is enabled (see :ref:`feature-flags`), sourceName will override the nickname in the Metadata Source facet. It can be used to group the content from many harvesting clients under the same name.
+- ``archiveDescription``: What the name suggests. If not supplied, will default to "This Dataset is harvested from our partners. Clicking the link will take you directly to the archival source of the data."
+- ``set``: The OAI set on the remote server. If not supplied, will default to none, i.e., "harvest everything". (Note: see the note below on using sets when harvesting from DataCite; this is new as of v6.6).
+- ``style``: Defaults to "default" - a generic OAI archive. (Make sure to use "dataverse" when configuring harvesting from another Dataverse installation).
+- ``schedule``: Defaults to "none" (not scheduled). Two formats are supported, for weekly- and daily-scheduled harvests; examples: ``Weekly, Sat 5 AM``; ``Daily, 11 PM``. Note that if a schedule definition is not formatted exactly as described here, it will be ignored silently and the client will be left unscheduled.
+- ``customHeaders``: This can be used to configure this client with a specific HTTP header that will be added to every OAI request. This is to accommodate a use case where the remote server requires this header to supply some form of a token in order to offer some content not available to other clients. See the example below. Multiple headers can be supplied separated by `\\n` - actual "backslash" and "n" characters, not a single "new line" character.
+- ``allowHarvestingMissingCVV``: Flag to allow datasets to be harvested with Controlled Vocabulary Values that existed in the originating Dataverse Project but are not in the harvesting Dataverse Project. (Default is false). Currently only settable using API.
+- ``useOaiIdentifiersAsPids``: Defaults to false; if set to true, the harvester will attempt to use the identifier from the OAI-PMH record header as the **first choice** for the persistent id of the harvested dataset. When set to false, Dataverse will still attempt to use this identifier, but only if none of the ```` entries in the OAI_DC record contain a valid persistent id (this is new as of v6.5).
+- ``useListRecords``: Defaults to false; if set to true, the harvester will attempt to retrieve multiple records in a single pass using the OAI-PMH verb ListRecords. By default, our harvester relies on the combination of ListIdentifiers followed by multiple GetRecord calls for each individual record. Note that this option is required when configuring harvesting from DataCite. (this is new as of v6.6).
+
+Generally, the API will accept the output of the GET version of the API for an existing client as valid input, but some fields will be ignored.
-An example JSON file would look like this::
+You can download this :download:`harvesting-client.json <../_static/api/harvesting-client.json>` file to use as a starting point.
- {
- "nickName": "zenodo",
- "dataverseAlias": "zenodoHarvested",
- "harvestUrl": "https://zenodo.org/oai2d",
- "archiveUrl": "https://zenodo.org",
- "archiveDescription": "Moissonnรฉ depuis la collection LMOPS de l'entrepรดt Zenodo. En cliquant sur ce jeu de donnรฉes, vous serez redirigรฉ vers Zenodo.",
- "metadataFormat": "oai_dc",
- "customHeaders": "x-oai-api-key: xxxyyyzzz",
- "set": "user-lmops",
- "allowHarvestingMissingCVV":true
- }
+.. literalinclude:: ../_static/api/harvesting-client.json
Something important to keep in mind about this API is that, unlike the harvesting clients GUI, it will create a client with the values supplied without making any attempts to validate them in real time. In other words, for the `harvestUrl` it will accept anything that looks like a well-formed url, without making any OAI calls to verify that the name of the set and/or the metadata format entered are supported by it. This is by design, to give an admin an option to still be able to create a client, in a rare case when it cannot be done via the GUI because of some real time failures in an exchange with an otherwise valid OAI server. This however puts the responsibility on the admin to supply the values already confirmed to be valid.
-
.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of export below.
+
+``nickName`` in the JSON file and ``$NICKNAME`` in the URL path below is the name identifying the new client. It should be alpha-numeric and may also contain -, _, or %, but no spaces. It must be unique in the installation.
+
.. code-block:: bash
export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
export SERVER_URL=http://localhost:8080
+ export NICKNAME=zenodo
- curl -H "X-Dataverse-key:$API_TOKEN" -X POST -H "Content-Type: application/json" "$SERVER_URL/api/harvest/clients/zenodo" --upload-file client.json
+ curl -H "X-Dataverse-key:$API_TOKEN" -X POST -H "Content-Type: application/json" "$SERVER_URL/api/harvest/clients/$NICKNAME" --upload-file harvesting-client.json
The fully expanded example above (without the environment variables) looks like this:
.. code-block:: bash
- curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -H "Content-Type: application/json" "http://localhost:8080/api/harvest/clients/zenodo" --upload-file "client.json"
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -H "Content-Type: application/json" "http://localhost:8080/api/harvest/clients/zenodo" --upload-file "harvesting-client.json"
+
+The output will look something like the following.
+
+.. code-block:: bash
{
"status": "OK",
@@ -5357,14 +5950,85 @@ Similar to the API above, using the same JSON format, but run on an existing cli
Delete a Harvesting Client
~~~~~~~~~~~~~~~~~~~~~~~~~~
-Self-explanatory:
+.. code-block:: bash
+
+ export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ export SERVER_URL=http://localhost:8080
+ export NICKNAME=zenodo
+
+ curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/harvest/clients/$NICKNAME"
+
+The fully expanded example above (without the environment variables) looks like this:
.. code-block:: bash
- curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "http://localhost:8080/api/harvest/clients/$nickName"
+ curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "http://localhost:8080/api/harvest/clients/zenodo"
Only users with superuser permissions may delete harvesting clients.
+Harvesting from DataCite
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+The following 2 options are **required** when harvesting from DataCite (https://oai.datacite.org/oai):
+
+.. code-block:: bash
+
+ "useOaiIdentifiersAsPids": true,
+ "useListRecords": true,
+
+There are two ways the ``set`` parameter can be used when harvesting from DataCite:
+
+- DataCite maintains pre-configured OAI sets for every subscribing institution that registers DOIs with them. This can be used to harvest the entire set of metadata registered by this organization or school, etc. (this is identical to how the set parameter is used with any other standard OAI archive);
+- As a unique, proprietary DataCite feature, it can be used to harvest virtually any arbitrary subset of records (potentially spanning different institutions and authorities, etc.). Any query that the DataCite search API understands can be used as an OAI set name (!). For example, the following search query finds one specific dataset:
+
+.. code-block:: bash
+
+ https://api.datacite.org/dois?query=doi:10.7910/DVN/TJCLKP
+
+you can now create a single-record OAI set by using its base64-encoded form as the set name:
+
+.. code-block:: bash
+
+ echo "doi:10.7910/DVN/TJCLKP" | base64
+ ZG9pOjEwLjc5MTAvRFZOL1RKQ0xLUAo=
+
+use the encoded string above prefixed by the ``~`` character in your harvesting client configuration:
+
+.. code-block:: bash
+
+ "set": "~ZG9pOjEwLjc5MTAvRFZOL1RKQ0xLUAo="
+
+The following configuration will create a client that will harvest the IQSS dataset specified above on a weekly schedule:
+
+.. code-block:: bash
+
+ {
+ "useOaiIdentifiersAsPids": true,
+ "useListRecords": true,
+ "set": "~ZG9pOjEwLjc5MTAvRFZOL1RKQ0xLUAo=",
+ "nickName": "iqssTJCLKP",
+ "dataverseAlias": "harvestedCollection",
+ "type": "oai",
+ "style": "default",
+ "harvestUrl": "https://oai.datacite.org/oai",
+ "archiveUrl": "https://oai.datacite.org",
+ "archiveDescription": "The metadata for this IQSS Dataset was harvested from DataCite. Clicking the dataset link will take you directly to the original archival location, as registered with DataCite.",
+ "schedule": "Weekly, Tue 4 AM",
+ "metadataFormat": "oai_dc"
+ }
+
+The queries can be as complex and/or long as necessary, with sub-queries combined via logical ANDs and ORs. Please keep in mind that white spaces must be encoded as ``%20``. For example, the following query:
+
+.. code-block:: bash
+
+ prefix:10.17603 AND (types.resourceType:Report* OR types.resourceType:Mission*)
+
+must be encoded as follows:
+
+.. code-block:: bash
+
+ echo "prefix:10.17603%20AND%20(types.resourceType:Report*%20OR%20types.resourceType:Mission*)" | base64
+ cHJlZml4OjEwLjE3NjAzJTIwQU5EJTIwKHR5cGVzLnJlc291cmNlVHlwZTpSZXBvcnQqJTIwT1IlMjB0eXBlcy5yZXNvdXJjZVR5cGU6TWlzc2lvbiopCg==
.. _pids-api:
@@ -5712,22 +6376,43 @@ Creates a global role in the Dataverse installation. The data POSTed are assumed
.. code-block:: bash
export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
- export SERVER_URL=https://demo.dataverse.org
- export ID=root
+ export SERVER_URL=http://localhost:8080
- curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/admin/roles" --upload-file roles.json
+ curl -H "Content-Type: application/json" -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/admin/roles" --upload-file roles.json
+
+``roles.json`` see :ref:`json-representation-of-a-role`
+
+Update Global Role
+~~~~~~~~~~~~~~~~~~
+
+Update a global role in the Dataverse installation. The PUTed data is assumed to be a complete JSON role as it will overwrite the existing role. ::
+
+ PUT http://$SERVER/api/admin/roles/$ID
+
+A curl example using an ``ID``
+
+.. code-block:: bash
+
+ export SERVER_URL=http://localhost:8080
+ export ID=24
+
+ curl -H "Content-Type: application/json" -X PUT "$SERVER_URL/api/admin/roles/$ID" --upload-file roles.json
``roles.json`` see :ref:`json-representation-of-a-role`
Delete Global Role
~~~~~~~~~~~~~~~~~~
+Deletes an ``DataverseRole`` whose ``id`` is passed. ::
+
+ DELETE http://$SERVER/api/admin/roles/$ID
+
A curl example using an ``ID``
.. code-block:: bash
export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
- export SERVER_URL=https://demo.dataverse.org
+ export SERVER_URL=http://localhost:8080
export ID=24
curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/admin/roles/$ID"
@@ -6131,6 +6816,27 @@ Example: List permissions a user (based on API Token used) has on a dataset whos
curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/admin/permissions/:persistentId?persistentId=$PERSISTENT_IDENTIFIER"
+List Dataverse collections a user can act on based on their permissions
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+List Dataverse collections a user can act on based on a particular permission ::
+
+ GET http://$SERVER/api/users/$identifier/allowedCollections/$permission
+
+.. note:: This API can only be called by an Administrator or by a User requesting their own list of accessible collections.
+
+The ``$identifier`` is the username of the requested user.
+The ``$permission`` is the permission (tied to the roles) that gives the user access to the collection.
+Passing ``$permission`` as 'any' will return the collection as long as the user has any access/permission on the collection
+
+.. code-block:: bash
+
+ export SERVER_URL=https://demo.dataverse.org
+ export $USERNAME=jsmith
+ export PERMISSION=PublishDataverse
+
+ curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/users/$USERNAME/allowedCollections/$PERMISSION"
+
Show Role Assignee
~~~~~~~~~~~~~~~~~~
@@ -6466,6 +7172,10 @@ View the details of the standard license with the database ID specified in ``$ID
Superusers can add a new license by posting a JSON file adapted from this example :download:`add-license.json <../_static/api/add-license.json>`. The ``name`` and ``uri`` of the new license must be unique. Sort order field is mandatory. If you are interested in adding a Creative Commons license, you are encouarged to use the JSON files under :ref:`adding-creative-commons-licenses`:
+Licenses must have a "name" and "uri" and may have the following optional fields: "shortDescription", "iconUri", "rightsIdentifier", "rightsIdentifierScheme", "schemeUri", "languageCode", "active", "sortOrder".
+The "name" and "uri" are used to display the license in the user interface, with "shortDescription" and "iconUri" being used to enhance the display if available.
+The "rightsIdentifier", "rightsIdentifierScheme", and "schemeUri" should be added if the license is available from https://spdx.org . "languageCode" should be sent if the language is not in English ("en"). "active" is a boolean indicating whether the license should be shown to users as an option. "sortOrder" is a numeric value - licenses are shown in the relative numeric order of this value.
+
.. code-block:: bash
export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
@@ -6557,10 +7267,10 @@ A curl example using allowing access to a dataset's metadata
Please see :ref:`dataverse.api.signature-secret` for the configuration option to add a shared secret, enabling extra
security.
-.. _send-feedback:
+.. _send-feedback-admin:
-Send Feedback To Contact(s)
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Send Feedback To Contact(s) Admin API
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This API call allows sending an email to the contacts for a collection, dataset, or datafile or to the support email address when no object is specified.
The call is protected by the normal /admin API protections (limited to localhost or requiring a separate key), but does not otherwise limit the sending of emails.
@@ -6583,6 +7293,44 @@ A curl example using an ``ID``
Note that this call could be useful in coordinating with dataset authors (assuming they are also contacts) as an alternative/addition to the functionality provided by :ref:`return-a-dataset`.
+.. _send-feedback:
+
+Send Feedback To Contact(s)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This API call allows sending an email to the contacts for a collection, dataset, or datafile or to the support email address when no object is specified.
+The call is protected from embedded html in the body as well as the ability to configure body size limits and rate limiting to avoid the potential for spam.
+
+The call is a POST with a JSON object as input with four keys:
+- "targetId" - the id of the collection, dataset, or datafile. Persistent ids and collection aliases are not supported. (Optional)
+- "identifier" - the alias of a collection or the persistence id of a dataset or datafile. (Optional)
+- "subject" - the email subject line. (Required)
+- "body" - the email body to send (Required)
+- "fromEmail" - the email to list in the reply-to field. (Dataverse always sends mail from the system email, but does it "on behalf of" and with a reply-to for the specified user. Authenticated users will have the 'fromEmail' filled in from their profile if this field is not specified)
+
+A curl example using an ``ID``
+
+.. code-block:: bash
+
+ export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ export SERVER_URL=https://demo.dataverse.org
+ export JSON='{"targetId":24, "subject":"Data Question", "body":"Please help me understand your data. Thank you!"}'
+
+ curl -X POST -H "X-Dataverse-key:$API_KEY" -H 'Content-Type:application/json' -d "$JSON" "$SERVER_URL/api/sendfeedback"
+
+
+A curl example using a ``Dataverse Alias or Dataset/DataFile PersistentId``
+
+.. code-block:: bash
+
+ export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ export SERVER_URL=https://demo.dataverse.org
+ export JSON='{"identifier":"root", "subject":"Data Question", "body":"Please help me understand your data. Thank you!"}'
+
+ curl -X POST -H "X-Dataverse-key:$API_KEY" -H 'Content-Type:application/json' -d "$JSON" "$SERVER_URL/api/sendfeedback"
+
+Note that this call could be useful in coordinating with dataset authors (assuming they are also contacts) as an alternative/addition to the functionality provided by :ref:`return-a-dataset`.
+
.. _thumbnail_reset:
Reset Thumbnail Failure Flags
@@ -6616,6 +7364,8 @@ MyData
The MyData API is used to get a list of just the datasets, dataverses or datafiles an authenticated user can edit.
+The API excludes dataverses linked to an harvesting client. This results in `a known issue `_ where regular datasets in harvesting dataverses are missing from the results.
+
A curl example listing objects
.. code-block:: bash
diff --git a/doc/sphinx-guides/source/api/search.rst b/doc/sphinx-guides/source/api/search.rst
index 7ca9a5abca6..9a211988979 100755
--- a/doc/sphinx-guides/source/api/search.rst
+++ b/doc/sphinx-guides/source/api/search.rst
@@ -21,9 +21,9 @@ Please note that in Dataverse Software 4.3 and older the "citation" field wrappe
Parameters
----------
-=============== ======= ===========
+================ ======= ===========
Name Type Description
-=============== ======= ===========
+================ ======= ===========
q string The search term or terms. Using "title:data" will search only the "title" field. "*" can be used as a wildcard either alone or adjacent to a term (i.e. "bird*"). For example, https://demo.dataverse.org/api/search?q=title:data . For a list of fields to search, please see https://github.com/IQSS/dataverse/issues/2558 (for now).
type string Can be either "dataverse", "dataset", or "file". Multiple "type" parameters can be used to include multiple types (i.e. ``type=dataset&type=file``). If omitted, all types will be returned. For example, https://demo.dataverse.org/api/search?q=*&type=dataset
subtree string The identifier of the Dataverse collection to which the search should be narrowed. The subtree of this Dataverse collection and all its children will be searched. Multiple "subtree" parameters can be used to include multiple Dataverse collections. For example, https://demo.dataverse.org/api/search?q=data&subtree=birds&subtree=cats .
@@ -38,7 +38,8 @@ show_entity_ids boolean Whether or not to show the database IDs of the search
geo_point string Latitude and longitude in the form ``geo_point=42.3,-71.1``. You must supply ``geo_radius`` as well. See also :ref:`geospatial-search`.
geo_radius string Radial distance in kilometers from ``geo_point`` (which must be supplied as well) such as ``geo_radius=1.5``.
metadata_fields string Includes the requested fields for each dataset in the response. Multiple "metadata_fields" parameters can be used to include several fields. The value must be in the form "{metadata_block_name}:{field_name}" to include a specific field from a metadata block (see :ref:`example `) or "{metadata_field_set_name}:\*" to include all the fields for a metadata block (see :ref:`example `). "{field_name}" cannot be a subfield of a compound field. If "{field_name}" is a compound field, all subfields are included.
-=============== ======= ===========
+show_type_counts boolean Whether or not to include total_count_per_object_type for types: Dataverse, Dataset, and Files.
+================ ======= ===========
Basic Search Example
--------------------
@@ -701,7 +702,11 @@ The above example ``metadata_fields=citation:dsDescription&metadata_fields=citat
"published_at": "2021-03-16T08:11:54Z"
}
],
- "count_in_response": 4
+ "count_in_response": 4,
+ "total_count_per_object_type": {
+ "Datasets": 2,
+ "Dataverses": 2
+ }
}
}
diff --git a/doc/sphinx-guides/source/container/configbaker-image.rst b/doc/sphinx-guides/source/container/configbaker-image.rst
index d098bd46436..09e431eb547 100644
--- a/doc/sphinx-guides/source/container/configbaker-image.rst
+++ b/doc/sphinx-guides/source/container/configbaker-image.rst
@@ -54,7 +54,7 @@ Scripts
- Default script when running container without parameters. Lists available scripts and details about them.
* - ``update-fields.sh``
- Update a Solr ``schema.xml`` with a given list of metadata fields. See ``update-fields.sh -h`` for usage details
- and :ref:`update-solr-schema` for an example use case.
+ and example use cases at :ref:`update-solr-schema` and :ref:`update-solr-schema-dev`.
Solr Template
^^^^^^^^^^^^^
diff --git a/doc/sphinx-guides/source/container/index.rst b/doc/sphinx-guides/source/container/index.rst
index abf871dd340..38641cce642 100644
--- a/doc/sphinx-guides/source/container/index.rst
+++ b/doc/sphinx-guides/source/container/index.rst
@@ -4,6 +4,7 @@ Container Guide
**Contents:**
.. toctree::
+ :maxdepth: 2
intro
running/index
diff --git a/doc/sphinx-guides/source/container/running/demo.rst b/doc/sphinx-guides/source/container/running/demo.rst
index b1945070714..3cb0274c936 100644
--- a/doc/sphinx-guides/source/container/running/demo.rst
+++ b/doc/sphinx-guides/source/container/running/demo.rst
@@ -29,6 +29,8 @@ To stop the containers hit ``Ctrl-c`` (hold down the ``Ctrl`` key and then hit t
To start the containers, run ``docker compose up``.
+.. _starting-over:
+
Deleting Data and Starting Over
-------------------------------
@@ -46,6 +48,8 @@ Starting Fresh
For this exercise, please start fresh by stopping all containers and removing the ``data`` directory.
+.. _demo-persona:
+
Creating and Running a Demo Persona
+++++++++++++++++++++++++++++++++++
@@ -137,6 +141,29 @@ In the example below of configuring :ref:`:FooterCopyright` we use the default u
One you make this change it should be visible in the copyright in the bottom left of every page.
+Root Collection Customization (Alias, Name, etc.)
++++++++++++++++++++++++++++++++++++++++++++++++++
+
+Before running ``docker compose up`` for the first time, you can customize the root collection by placing a JSON file in the right place.
+
+First, in the "demo" directory you created (see :ref:`demo-persona`), create a subdirectory called "config":
+
+``mkdir demo/config``
+
+Next, download :download:`dataverse-complete.json <../../_static/api/dataverse-complete.json>` and put it in the "config" directory you just created. The contents of your "demo" directory should look something like this:
+
+.. code-block:: bash
+
+ % find demo
+ demo
+ demo/config
+ demo/config/dataverse-complete.json
+ demo/init.sh
+
+Edit ``dataverse-complete.json`` to have the values you want. You'll want to refer to :ref:`update-dataverse-api` in the API Guide to understand the format. In that documentation you can find optional parameters as well.
+
+To test your JSON file, run ``docker compose up``. Again, this only works when you are running ``docker compose up`` for the first time. (You can always start over. See :ref:`starting-over`.)
+
Multiple Languages
++++++++++++++++++
@@ -160,6 +187,11 @@ Next, set up the UI toggle between English and French, again using the unblock k
Stop and start the Dataverse container in order for the language toggle to work.
+PID Providers
++++++++++++++
+
+Dataverse supports multiple Persistent ID (PID) providers. The ``compose.yml`` file uses the Permalink PID provider. Follow :ref:`pids-configuration` to reconfigure as needed.
+
Next Steps
----------
diff --git a/doc/sphinx-guides/source/contributor/index.md b/doc/sphinx-guides/source/contributor/index.md
index 1017f15f0ed..f7979b1dd0c 100644
--- a/doc/sphinx-guides/source/contributor/index.md
+++ b/doc/sphinx-guides/source/contributor/index.md
@@ -4,7 +4,7 @@ Thank you for your interest in contributing to Dataverse! We are open to contri
```{contents} Contents:
:local:
-:depth: 3
+:depth: 2
```
## Ideas and Feature Requests
diff --git a/doc/sphinx-guides/source/developers/big-data-support.rst b/doc/sphinx-guides/source/developers/big-data-support.rst
index f3d98fae0bf..75a50e2513d 100644
--- a/doc/sphinx-guides/source/developers/big-data-support.rst
+++ b/doc/sphinx-guides/source/developers/big-data-support.rst
@@ -44,7 +44,7 @@ Features that are Disabled if S3 Direct Upload is Enabled
The following features are disabled when S3 direct upload is enabled.
- Unzipping of zip files. (See :ref:`compressed-files`.)
-- Detection of file type based on JHOVE and custom code that reads the first few bytes. (See :ref:`redetect-file-type`.)
+- Detection of file type based on JHOVE and custom code that reads the first few bytes except for the refinement of Stata file types to include the version. (See :ref:`redetect-file-type`.)
- Extraction of metadata from FITS files. (See :ref:`fits`.)
- Creation of NcML auxiliary files (See :ref:`netcdf-and-hdf5`.)
- Extraction of a geospatial bounding box from NetCDF and HDF5 files (see :ref:`netcdf-and-hdf5`) unless :ref:`dataverse.netcdf.geo-extract-s3-direct-upload` is set to true.
diff --git a/doc/sphinx-guides/source/developers/classic-dev-env.rst b/doc/sphinx-guides/source/developers/classic-dev-env.rst
index d305019004e..2e32b8d4bfb 100755
--- a/doc/sphinx-guides/source/developers/classic-dev-env.rst
+++ b/doc/sphinx-guides/source/developers/classic-dev-env.rst
@@ -93,15 +93,15 @@ On Linux, install ``jq`` from your package manager or download a binary from htt
Install Payara
~~~~~~~~~~~~~~
-Payara 6.2024.6 or higher is required.
+Payara 6.2025.2 or higher is required.
To install Payara, run the following commands:
``cd /usr/local``
-``sudo curl -O -L https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2024.6/payara-6.2024.6.zip``
+``sudo curl -O -L https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2025.2/payara-6.2025.2.zip``
-``sudo unzip payara-6.2024.6.zip``
+``sudo unzip payara-6.2025.2.zip``
``sudo chown -R $USER /usr/local/payara6``
@@ -113,30 +113,30 @@ Install Service Dependencies Directly on localhost
Install PostgreSQL
^^^^^^^^^^^^^^^^^^
-The Dataverse Software has been tested with PostgreSQL versions up to 13. PostgreSQL version 10+ is required.
+The Dataverse Software has been tested with PostgreSQL versions up to 17. PostgreSQL version 10+ is required.
-On Mac, go to https://www.postgresql.org/download/macosx/ and choose "Interactive installer by EDB" option. Note that version 13.5 is used in the command line examples below, but the process should be similar for other versions. When prompted to set a password for the "database superuser (postgres)" just enter "password".
+On Mac, go to https://www.postgresql.org/download/macosx/ and choose "Interactive installer by EDB" option. Note that version 16 is used in the command line examples below, but the process should be similar for other versions. When prompted to set a password for the "database superuser (postgres)" just enter "password".
After installation is complete, make a backup of the ``pg_hba.conf`` file like this:
-``sudo cp /Library/PostgreSQL/13/data/pg_hba.conf /Library/PostgreSQL/13/data/pg_hba.conf.orig``
+``sudo cp /Library/PostgreSQL/16/data/pg_hba.conf /Library/PostgreSQL/16/data/pg_hba.conf.orig``
Then edit ``pg_hba.conf`` with an editor such as vi:
-``sudo vi /Library/PostgreSQL/13/data/pg_hba.conf``
+``sudo vi /Library/PostgreSQL/16/data/pg_hba.conf``
In the "METHOD" column, change all instances of "scram-sha-256" (or whatever is in that column) to "trust". This will make it so PostgreSQL doesn't require a password.
-In the Finder, click "Applications" then "PostgreSQL 13" and launch the "Reload Configuration" app. Click "OK" after you see "server signaled".
+In the Finder, click "Applications" then "PostgreSQL 16" and launch the "Reload Configuration" app. Click "OK" after you see "server signaled".
-Next, to confirm the edit worked, launch the "pgAdmin" application from the same folder. Under "Browser", expand "Servers" and double click "PostgreSQL 13". When you are prompted for a password, leave it blank and click "OK". If you have successfully edited "pg_hba.conf", you can get in without a password.
+Next, to confirm the edit worked, launch the "pgAdmin" application from the same folder. Under "Browser", expand "Servers" and double click "PostgreSQL 16". When you are prompted for a password, leave it blank and click "OK". If you have successfully edited "pg_hba.conf", you can get in without a password.
On Linux, you should just install PostgreSQL using your favorite package manager, such as ``yum``. (Consult the PostgreSQL section of :doc:`/installation/prerequisites` in the main Installation guide for more info and command line examples). Find ``pg_hba.conf`` and set the authentication method to "trust" and restart PostgreSQL.
Install Solr
^^^^^^^^^^^^
-`Solr `_ 9.4.1 is required.
+`Solr `_ 9.8.0 is required.
Follow the instructions in the "Installing Solr" section of :doc:`/installation/prerequisites` in the main Installation guide.
diff --git a/doc/sphinx-guides/source/developers/dataset-migration-api.rst b/doc/sphinx-guides/source/developers/dataset-migration-api.rst
index fc86b7ccdcf..941527133ef 100644
--- a/doc/sphinx-guides/source/developers/dataset-migration-api.rst
+++ b/doc/sphinx-guides/source/developers/dataset-migration-api.rst
@@ -5,10 +5,15 @@ The Dataverse software includes several ways to add Datasets originally created
This experimental migration API offers an additional option with some potential advantages:
-* metadata can be specified using the json-ld format used in the OAI-ORE metadata export
-* existing publication dates and PIDs are maintained (currently limited to the case where the PID can be managed by the Dataverse software, e.g. where the authority and shoulder match those the software is configured for)
-* updating the PID at the provider can be done immediately or later (with other existing APIs)
-* adding files can be done via the standard APIs, including using direct-upload to S3
+* Metadata can be specified using the json-ld format used in the OAI-ORE metadata export. Please note that the json-ld generated by OAI-ORE metadata export is not directly compatible with the Migration API. OAI-ORE export nests resource metadata under :code:`ore:describes` wrapper and Dataset Migration API requires that metadata is on the root level. Please check example file below for reference.
+
+ * If you need a tool to convert OAI-ORE exported json-ld into a format compatible with the Dataset Migration API, or if you need to generate compatible json-ld from sources other than an existing Dataverse installation, the `BaseX `_ database engine, used together with the XQuery language, provides an efficient solution. Please see example script :download:`transform-oai-ore-jsonld.xq <../_static/api/transform-oai-ore-jsonld.xq>` for a simple conversion from exported OAI-ORE json-ld to a Dataset Migration API -compatible version.
+
+* Existing publication dates and PIDs are maintained (currently limited to the case where the PID can be managed by the Dataverse software, e.g. where the authority and shoulder match those the software is configured for)
+
+* Updating the PID at the provider can be done immediately or later (with other existing APIs).
+
+* Adding files can be done via the standard APIs, including using direct-upload to S3.
This API consists of 2 calls: one to create an initial Dataset version, and one to 'republish' the dataset through Dataverse with a specified publication date.
Both calls require super-admin privileges.
@@ -31,7 +36,13 @@ To import a dataset with an existing persistent identifier (PID), the provided j
curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$DATAVERSE_ID/datasets/:startmigration --upload-file dataset-migrate.jsonld
-An example jsonld file is available at :download:`dataset-migrate.jsonld <../_static/api/dataset-migrate.jsonld>` . Note that you would need to replace the PID in the sample file with one supported in your Dataverse instance.
+An example jsonld file is available at :download:`dataset-migrate.jsonld <../_static/api/dataset-migrate.jsonld>` . Note that you would need to replace the PID in the sample file with one supported in your Dataverse instance.
+
+You also need to replace the :code:`dataverse.siteUrl` in the json-ld :code:`@context` with your current Dataverse site URL. This is necessary to define a local URI for metadata terms originating from community metadata blocks (in the case of the example file, from the Social Sciences and Humanities and Geospatial blocks).
+
+Currently, as of Dataverse 6.5 and earlier, community metadata blocks do not assign a default global URI to the terms used in the block in contrast to citation metadata, which has global URI defined.
+
+
Publish a Migrated Dataset
--------------------------
diff --git a/doc/sphinx-guides/source/developers/index.rst b/doc/sphinx-guides/source/developers/index.rst
index 6d01e13d78e..90ccc261b1d 100755
--- a/doc/sphinx-guides/source/developers/index.rst
+++ b/doc/sphinx-guides/source/developers/index.rst
@@ -9,6 +9,7 @@ Developer Guide
**Contents:**
.. toctree::
+ :maxdepth: 2
intro
dev-environment
diff --git a/doc/sphinx-guides/source/developers/make-data-count.rst b/doc/sphinx-guides/source/developers/make-data-count.rst
index f347e7b8ff9..9fb41f67be4 100644
--- a/doc/sphinx-guides/source/developers/make-data-count.rst
+++ b/doc/sphinx-guides/source/developers/make-data-count.rst
@@ -49,7 +49,7 @@ Once you are done with your configuration, you can run Counter Processor like th
``su - counter``
-``cd /usr/local/counter-processor-1.05``
+``cd /usr/local/counter-processor-1.06``
``CONFIG_FILE=counter-processor-config.yaml python39 main.py``
@@ -82,7 +82,7 @@ Second, if you are also sending your SUSHI report to Make Data Count, you will n
``curl -H "Authorization: Bearer $JSON_WEB_TOKEN" -X DELETE https://$MDC_SERVER/reports/$REPORT_ID``
-To get the ``REPORT_ID``, look at the logs generated in ``/usr/local/counter-processor-1.05/tmp/datacite_response_body.txt``
+To get the ``REPORT_ID``, look at the logs generated in ``/usr/local/counter-processor-1.06/tmp/datacite_response_body.txt``
To read more about the Make Data Count api, see https://github.com/datacite/sashimi
@@ -110,9 +110,11 @@ The script will process the newest set of log files (merging files from multiple
APIs to manage the states include GET, POST, and DELETE (for testing), as shown below.
Note: ``yearMonth`` must be in the format ``yyyymm`` or ``yyyymmdd``.
+Note: If running the new script on multiple servers add the query parameter &server=serverName on the first POST call. The server name can not be changed once set. To clear the name out you must delete the state and post a new one.
``curl -X GET http://localhost:8080/api/admin/makeDataCount/{yearMonth}/processingState``
+``curl -X POST http://localhost:8080/api/admin/makeDataCount/{yearMonth}/processingState?state=processing&server=server1``
``curl -X POST http://localhost:8080/api/admin/makeDataCount/{yearMonth}/processingState?state=done``
``curl -X DELETE http://localhost:8080/api/admin/makeDataCount/{yearMonth}/processingState``
diff --git a/doc/sphinx-guides/source/developers/making-releases.rst b/doc/sphinx-guides/source/developers/making-releases.rst
index aed174f60d4..8f9b43eabcb 100755
--- a/doc/sphinx-guides/source/developers/making-releases.rst
+++ b/doc/sphinx-guides/source/developers/making-releases.rst
@@ -30,7 +30,24 @@ Early on, make sure it's clear what type of release this is. The steps below des
Ensure Issues Have Been Created
-------------------------------
-In advance of a release, GitHub issues should have been created already that capture certain steps. See https://github.com/IQSS/dataverse-pm/issues/335 for examples.
+Some of the steps in this document are well-served by having their own dedicated GitHub issue. You'll see a label like this on them:
+
+|dedicated|
+
+There are a variety of reasons why a step might deserve its own dedicated issue:
+
+- The step can be done by a team member other than the person doing the release.
+- Stakeholders might be interested in the status of a step (e.g. has the released been deployed to the demo site).
+
+Steps don't get their own dedicated issue if it would be confusing to have multiple people involved. Too many cooks in the kitchen, as they say. Also, some steps are so small the overhead of an issue isn't worth it.
+
+Before the release even begins you can coordinate with the project manager about the creation of these issues.
+
+.. |dedicated| raw:: html
+
+
+ Dedicated Issue
+
Declare a Code Freeze
---------------------
@@ -40,18 +57,25 @@ The following steps are made more difficult if code is changing in the "develop"
Conduct Performance Testing
---------------------------
+|dedicated|
+
See :doc:`/qa/performance-tests` for details.
-Conduct Smoke Testing
----------------------
+Conduct Regression Testing
+---------------------------
+
+|dedicated|
See :doc:`/qa/testing-approach` for details.
+Refer to the provided regression checklist for the list of items to verify during the testing process: `Regression Checklist `_.
.. _write-release-notes:
Write Release Notes
-------------------
+|dedicated|
+
Developers express the need for an addition to release notes by creating a "release note snippet" in ``/doc/release-notes`` containing the name of the issue they're working on. The name of the branch could be used for the filename with ".md" appended (release notes are written in Markdown) such as ``5053-apis-custom-homepage.md``. See :ref:`writing-release-note-snippets` for how this is described for contributors.
The task at or near release time is to collect these snippets into a single file.
@@ -62,17 +86,22 @@ The task at or near release time is to collect these snippets into a single file
- Include instructions describing the steps required to upgrade the application from the previous version. These must be customized for release numbers and special circumstances such as changes to metadata blocks and infrastructure.
- Take the release notes .md through the regular Code Review and QA process. That is, make a pull request. Here's an example: https://github.com/IQSS/dataverse/pull/10866
-Upgrade Instructions for Internal
----------------------------------
+Deploy Release Candidate to Internal
+------------------------------------
+
+|dedicated|
To upgrade internal, go to /doc/release-notes, open the release-notes.md file for the current release and perform all the steps under "Upgrade Instructions".
Deploy Release Candidate to Demo
--------------------------------
+|dedicated|
+
First, build the release candidate.
ssh into the dataverse-internal server and undeploy the current war file.
+Go to /doc/release-notes, open the release-notes.md file for the current release, and perform all the steps under "Upgrade Instructions".
Go to https://jenkins.dataverse.org/job/IQSS_Dataverse_Internal/ and make the following adjustments to the config:
@@ -91,6 +120,8 @@ ssh into the demo server and follow the upgrade instructions in the release note
Prepare Release Branch
----------------------
+|dedicated|
+
The release branch will have the final changes such as bumping the version number.
Usually we branch from the "develop" branch to create the release branch. If we are creating a hotfix for a particular version (5.11, for example), we branch from the tag (e.g. ``v5.11``).
@@ -116,18 +147,20 @@ Return to the parent pom and make the following change, which is necessary for p
(Before you make this change the value should be ``${parsedVersion.majorVersion}.${parsedVersion.nextMinorVersion}``. Later on, after cutting a release, we'll change it back to that value.)
-For a regular release, make the changes above in the release branch you created, make a pull request, and merge it into the "develop" branch. Like usual, you can safely delete the branch after the merge is complete.
+For a regular release, make the changes above in the release branch you created, but hold off for a moment on making a pull requests because Jenkins will fail because it will be testing the previous release.
-If you are making a hotfix release, make the pull request against the "master" branch. Do not delete the branch after merging because we will later merge it into the "develop" branch to pick up the hotfix. More on this later.
+In the dataverse-ansible repo make bump the version in `jenkins.yml `_ and make a pull request such as https://github.com/gdcc/dataverse-ansible/pull/386. Wait for it to be merged. Note that bumping on the Jenkins side like this will mean that all pull requests will show failures in Jenkins until they are updated to the version we are releasing.
-Either way, as usual, you should ensure that all tests are passing. Please note that you will need to bump the version in `jenkins.yml `_ in dataverse-ansible to get the tests to pass. Consider doing this before making the pull request. Alternatively, you can bump jenkins.yml after making the pull request and re-run the Jenkins job to make sure tests pass.
+Once dataverse-ansible has been merged, return to the branch you created above ("10852-bump-to-6.4" or whatever) and make a pull request. Ensure that all tests are passing and then put the PR through the normal review and QA process.
+
+If you are making a hotfix release, make the pull request against the "master" branch. Do not delete the branch after merging because we will later merge it into the "develop" branch to pick up the hotfix. More on this later.
Merge "develop" into "master"
-----------------------------
If this is a regular (non-hotfix) release, create a pull request to merge the "develop" branch into the "master" branch using this "compare" link: https://github.com/IQSS/dataverse/compare/master...develop
-Once important tests have passed (compile, unit tests, etc.), merge the pull request. Don't worry about style tests failing such as for shell scripts.
+Once important tests have passed (compile, unit tests, etc.), merge the pull request (skipping code review is ok). Don't worry about style tests failing such as for shell scripts.
If this is a hotfix release, skip this whole "merge develop to master" step (the "develop" branch is not involved until later).
@@ -160,7 +193,7 @@ Go to https://jenkins.dataverse.org/job/guides.dataverse.org/ and make the follo
- Repository URL: ``https://github.com/IQSS/dataverse.git``
- Branch Specifier (blank for 'any'): ``*/master``
-- ``VERSION`` (under "Build Steps"): ``5.10.1`` (for example)
+- ``VERSION`` (under "Build Steps"): bump to the next release. Don't prepend a "v". Use ``5.10.1`` (for example)
Click "Save" then "Build Now".
@@ -265,24 +298,37 @@ Close Milestone on GitHub and Create a New One
You can find our milestones at https://github.com/IQSS/dataverse/milestones
-Now that we've published the release, close the milestone and create a new one.
+Now that we've published the release, close the milestone and create a new one for the **next** release, the release **after** the one we're working on, that is.
Note that for milestones we use just the number without the "v" (e.g. "5.10.1").
+On the project board at https://github.com/orgs/IQSS/projects/34 edit the tab (view) that shows the milestone to show the next milestone.
+
Update the Container Base Image Version Property
------------------------------------------------
+|dedicated|
+
Create a new branch (any name is fine but ``prepare-next-iteration`` is suggested) and update the following files to prepare for the next development cycle:
- modules/dataverse-parent/pom.xml -> ```` -> profile "ct" -> ```` -> Set ```` to ``${parsedVersion.majorVersion}.${parsedVersion.nextMinorVersion}``
-Now create a pull request and merge it.
+Create a pull request and put it through code review, like usual. Give it a milestone of the next release, the one **after** the one we're working on. Once the pull request has been approved, merge it. It should the the first PR merged of the next release.
For more background, see :ref:`base-supported-image-tags`. For an example, see https://github.com/IQSS/dataverse/pull/10896
+Lift the Code Freeze and Encourage Developers to Update Their Branches
+----------------------------------------------------------------------
+
+It's now safe to lift the code freeze. We can start merging pull requests into the "develop" branch for the next release.
+
+Let developers know that they should merge the latest from the "develop" branch into any branches they are working on.
+
Deploy Final Release on Demo
----------------------------
+|dedicated|
+
Above you already did the hard work of deploying a release candidate to https://demo.dataverse.org. It should be relatively straightforward to undeploy the release candidate and deploy the final release.
Update SchemaSpy
@@ -316,6 +362,11 @@ Announce the Release on the Mailing List
Post a message at https://groups.google.com/g/dataverse-community
+Announce the Release on Zulip
+-----------------------------
+
+Post a message under #community at https://dataverse.zulipchat.com
+
For Hotfixes, Merge Hotfix Branch into "develop" and Rename SQL Scripts
-----------------------------------------------------------------------
diff --git a/doc/sphinx-guides/source/developers/tips.rst b/doc/sphinx-guides/source/developers/tips.rst
index f5ffbac0c07..d466dff5c76 100755
--- a/doc/sphinx-guides/source/developers/tips.rst
+++ b/doc/sphinx-guides/source/developers/tips.rst
@@ -185,7 +185,24 @@ Solr
Once some Dataverse collections, datasets, and files have been created and indexed, you can experiment with searches directly from Solr at http://localhost:8983/solr/#/collection1/query and look at the JSON output of searches, such as this wildcard search: http://localhost:8983/solr/collection1/select?q=*%3A*&wt=json&indent=true . You can also get JSON output of static fields Solr knows about: http://localhost:8983/solr/collection1/schema/fields
-You can simply double-click "start.jar" rather that running ``java -jar start.jar`` from the command line. Figuring out how to stop Solr after double-clicking it is an exercise for the reader.
+You can simply double-click "start.jar" rather than running ``java -jar start.jar`` from the command line. Figuring out how to stop Solr after double-clicking it is an exercise for the reader.
+
+.. _update-solr-schema-dev:
+
+Updating the Solr Schema (Developers)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Both developers and sysadmins need to update the Solr schema from time to time. One difference is that developers will be committing changes to ``conf/solr/schema.xml`` in git. To prevent cross-platform differences in the git history, when running the ``update-fields.sh`` script, we ask all developers to run the script from within Docker. (See :doc:`/container/configbaker-image` for more on the image we'll use below.)
+
+.. code-block::
+
+ curl http://localhost:8080/api/admin/index/solr/schema | docker run -i --rm -v ./docker-dev-volumes/solr/data:/var/solr gdcc/configbaker:unstable update-fields.sh /var/solr/data/collection1/conf/schema.xml
+
+ cp docker-dev-volumes/solr/data/data/collection1/conf/schema.xml conf/solr/schema.xml
+
+At this point you can do a ``git diff`` and see if your changes make sense before committing.
+
+Sysadmins are welcome to run ``update-fields.sh`` however they like. See :ref:`update-solr-schema` in the Admin Guide for details.
Git
---
@@ -279,3 +296,14 @@ with the following code in ``SettingsWrapper.java``:
A more serious example would be direct calls to PermissionServiceBean methods used in render logic expressions. This is something that has happened and caused some problems in real life. A simple permission service lookup (for example, whether a user is authorized to create a dataset in the current dataverse) can easily take 15 database queries. Repeated multiple times, this can quickly become a measurable delay in rendering the page. PermissionsWrapper must be used exclusively for any such lookups from JSF pages.
See also :doc:`performance`.
+
+JSF1103 Errors
+~~~~~~~~~~~~~~
+
+Errors of the form ``JSF1103: The metadata facet must be a direct child of the view in viewId /dataverse.xhtml`` come from use of the f:metadata tag at the wrong depth in the .xhtml.
+
+Most/all known instances of the problem were corrected in https://github.com/IQSS/dataverse/pull/11128.
+
+Any page that used was including the f:metadata farther down in the tree rather than as a direct child of the view.
+As of Payara 6.2025.2, it is not clear that this error was resulting in changes to UI behavior, but the error messages were in the log.
+If you see these errors, this note and the examples in the PR will hopefully provide some insight as to how to fix them.
diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst
index 30a36da9499..6aa5f5c8ff6 100644
--- a/doc/sphinx-guides/source/installation/config.rst
+++ b/doc/sphinx-guides/source/installation/config.rst
@@ -151,6 +151,18 @@ Password complexity rules for "builtin" accounts can be adjusted with a variety
- :ref:`:PVGoodStrength`
- :ref:`:PVCustomPasswordResetAlertMessage`
+.. _samesite-cookie-attribute:
+
+SameSite Cookie Attribute
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The SameSite cookie attribute is defined in an upcoming revision to `RFC 6265 `_ (HTTP State Management Mechanism) called `6265bis `_ ("bis" meaning "repeated"). The possible values are "None", "Lax", and "Strict". "Strict" is intended to help prevent Cross-Site Request Forgery (CSRF) attacks, as described in the RFC proposal and an OWASP `cheetsheet `_. We don't recommend "None" for security reasons.
+
+By default, Payara doesn't send the SameSite cookie attribute, which browsers should interpret as "Lax" according to `MDN `_.
+Dataverse installations are explicity set to "Lax" out of the box by the installer (in the case of a "classic" installation) or through the base image (in the case of a Docker installation). For classic, see :ref:`http.cookie-same-site-value` and :ref:`http.cookie-same-site-enabled` for how to change the values. For Docker, you must rebuild the :doc:`base image `. See also Payara's `documentation `_ for the settings above.
+
+To inspect cookie attributes like SameSite, you can use ``curl -s -I http://localhost:8080 | grep JSESSIONID``, for example, looking for the "Set-Cookie" header.
+
.. _ongoing-security:
Ongoing Security of Your Installation
@@ -307,7 +319,7 @@ to be compatible with the MicroProfile specification which means that
Global Settings
^^^^^^^^^^^^^^^
-The following three global settings are required to configure PID Providers in the Dataverse software:
+The following two global settings are required to configure PID Providers in the Dataverse software:
.. _dataverse.pid.providers:
@@ -581,6 +593,7 @@ Note:
- If you configure ``base-url``, it should include a "/" after the hostname like this: ``https://demo.dataverse.org/``.
- When using multiple PermaLink providers, you should avoid ambiguous authority/separator/shoulder combinations that would result in the same overall prefix.
+- Configuring PermaLink providers differing only by their separator values is not supported.
- In general, PermaLink authority/shoulder values should be alphanumeric. For other cases, admins may need to consider the potential impact of special characters in S3 storage identifiers, resolver URLs, exports, etc.
.. _dataverse.pid.*.handlenet:
@@ -1093,6 +1106,8 @@ The Dataverse Software S3 driver supports multi-part upload for large files (ove
First: Set Up Accounts and Access Credentials
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+**Note:** As of version 5.14, if Dataverse is running in an EC2 instance it will prefer Role-Based Access Control over the S3 default profile, even if administrators configure Dataverse with programmatic access keys. Named profiles can still be used to override RBAC for specific datastores. RBAC is preferential from a security perspective as there are no keys to rotate or have stolen. If you intend to assign a role to your EC2 instance, you will still need the ``~/.aws/config`` file to specify the region but you need not generate credentials for the default profile. For more information please see https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2.html
+
The Dataverse Software and the AWS SDK make use of the "AWS credentials profile file" and "AWS config profile file" located in
``~/.aws/`` where ``~`` is the home directory of the user you run Payara as. This file can be generated via either
of two methods described below:
@@ -1116,13 +1131,6 @@ To **create a user** with full S3 access and nothing more for security reasons,
for more info on this process.
To use programmatic access, **Generate the user keys** needed for a Dataverse installation afterwards by clicking on the created user.
-(You can skip this step when running on EC2, see below.)
-
-.. TIP::
- If you are hosting your Dataverse installation on an AWS EC2 instance alongside storage in S3, it is possible to use IAM Roles instead
- of the credentials file (the file at ``~/.aws/credentials`` mentioned below). Please note that you will still need the
- ``~/.aws/config`` file to specify the region. For more information on this option, see
- https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2.html
Preparation When Using Custom S3-Compatible Service
###################################################
@@ -1854,6 +1862,128 @@ For Google Analytics, the example script at :download:`analytics-code.html
+
+
+
+2. Add to ``analytics-code.html``:
+
+````
+
+3. Go to https://playground.cookieconsent.orestbida.com to configure, download and copy contents of ``cookieconsent-config.js`` to ``analytics-code.html``. It should look something like this:
+
+.. code-block:: html
+
+
+
+After restarting or reloading Dataverse the cookie consent popup should appear, looking something like this:
+
+|cookieconsent|
+
+.. |cookieconsent| image:: ./img/cookie-consent-example.png
+ :class: img-responsive
+
+If you change the cookie consent config in ``CookieConsent.run()`` and want to test your changes, you should remove the cookie called ``cc_cookie`` in your browser and reload the Dataverse page to have the popup appear again. To remove cookies use Application > Cookies in the Chrome/Edge dev tool, and Storage > Cookies in Firefox and Safari.
+
.. _license-config:
Configuring Licenses
@@ -1916,6 +2046,11 @@ JSON files for software licenses are provided below.
- :download:`licenseMIT.json <../../../../scripts/api/data/licenses/licenseMIT.json>`
- :download:`licenseApache-2.0.json <../../../../scripts/api/data/licenses/licenseApache-2.0.json>`
+Adding Country-Specific Licenses
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+- :download:`licenseEtalab-2.0.json <../../../../scripts/api/data/licenses/licenseEtalab-2.0.json>` used in France (Etalab Open License 2.0, CC-BY 2.0 compliant).
+
Contributing to the Collection of Standard Licenses Above
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -3323,12 +3458,22 @@ dataverse.files.globus-monitoring-server
This setting is required in conjunction with the ``globus-use-experimental-async-framework`` feature flag (see :ref:`feature-flags`). Setting it to true designates the Dataverse instance to serve as the dedicated polling server. It is needed so that the new framework can be used in a multi-node installation.
+.. _dataverse.csl.common-styles:
+
+dataverse.csl.common-styles
++++++++++++++++++++++++++++
+
+This setting allows admins to highlight a few of the 1000+ CSL citation styles available from the dataset page. The value should be a comma-separated list of styles.
+These will be listed above the alphabetical list of all styles in the "View Styled Citations" pop-up.
+The default value when not set is "chicago-author-date, ieee".
+
+
.. _feature-flags:
Feature Flags
-------------
-Certain features might be deactivated because they are experimental and/or opt-in previews. If you want to enable these,
+Certain features might be deactivated because they are experimental and/or opt-in capabilities. If you want to enable these,
please find all known feature flags below. Any of these flags can be activated using a boolean value
(case-insensitive, one of "true", "1", "YES", "Y", "ON") for the setting.
@@ -3343,6 +3488,15 @@ please find all known feature flags below. Any of these flags can be activated u
* - api-session-auth
- Enables API authentication via session cookie (JSESSIONID). **Caution: Enabling this feature flag exposes the installation to CSRF risks!** We expect this feature flag to be temporary (only used by frontend developers, see `#9063 `_) and for the feature to be removed in the future.
- ``Off``
+ * - api-bearer-auth
+ - Enables API authentication via Bearer Token.
+ - ``Off``
+ * - api-bearer-auth-provide-missing-claims
+ - Enables sending missing user claims in the request JSON provided during OIDC user registration, when these claims are not returned by the identity provider and are required for registration. This feature only works when the feature flag ``api-bearer-auth`` is also enabled. **Caution: Enabling this feature flag exposes the installation to potential user impersonation issues.**
+ - ``Off``
+ * - api-bearer-auth-handle-tos-acceptance-in-idp
+ - Specifies that Terms of Service acceptance is handled by the IdP, eliminating the need to include ToS acceptance boolean parameter (termsAccepted) in the OIDC user registration request body. This feature only works when the feature flag ``api-bearer-auth`` is also enabled.
+ - ``Off``
* - avoid-expensive-solr-join
- Changes the way Solr queries are constructed for public content (published Collections, Datasets and Files). It removes a very expensive Solr join on all such documents, improving overall performance, especially for large instances under heavy load. Before this feature flag is enabled, the corresponding indexing feature (see next feature flag) must be turned on and a full reindex performed (otherwise public objects are not going to be shown in search results). See :doc:`/admin/solr-search-index`.
- ``Off``
@@ -3352,9 +3506,6 @@ please find all known feature flags below. Any of these flags can be activated u
* - reduce-solr-deletes
- Avoids deleting and recreating solr documents for dataset files when reindexing.
- ``Off``
- * - reduce-solr-deletes
- - Avoids deleting and recreating solr documents for dataset files when reindexing.
- - ``Off``
* - disable-return-to-author-reason
- Removes the reason field in the `Publish/Return To Author` dialog that was added as a required field in v6.2 and makes the reason an optional parameter in the :ref:`return-a-dataset` API call.
- ``Off``
@@ -3362,7 +3513,13 @@ please find all known feature flags below. Any of these flags can be activated u
- Turns off automatic selection of a dataset thumbnail from image files in that dataset. When set to ``On``, a user can still manually pick a thumbnail image or upload a dedicated thumbnail image.
- ``Off``
* - globus-use-experimental-async-framework
- - Activates a new experimental implementation of Globus polling of ongoing remote data transfers that does not rely on the instance staying up continuously for the duration of the transfers and saves the state information about Globus upload requests in the database. Added in v6.4. Affects :ref:`:GlobusPollingInterval`. Note that the JVM option :ref:`dataverse.files.globus-monitoring-server` described above must also be enabled on one (and only one, in a multi-node installation) Dataverse instance.
+ - Activates a new experimental implementation of Globus polling of ongoing remote data transfers that does not rely on the instance staying up continuously for the duration of the transfers and saves the state information about Globus upload requests in the database. Added in v6.4; extended in v6.6 to cover download transfers, in addition to uploads. Affects :ref:`:GlobusPollingInterval`. Note that the JVM option :ref:`dataverse.files.globus-monitoring-server` described above must also be enabled on one (and only one, in a multi-node installation) Dataverse instance.
+ - ``Off``
+ * - index-harvested-metadata-source
+ - Index the nickname or the source name (See the optional ``sourceName`` field in :ref:`create-a-harvesting-client`) of the harvesting client as the "metadata source" of harvested datasets and files. If enabled, the Metadata Source facet will show separate groupings of the content harvested from different sources (by harvesting client nickname or source name) instead of the default behavior where there is one "Harvested" grouping for all harvested content.
+ - ``Off``
+ * - enable-version-note
+ - Turns on the ability to add/view/edit/delete per-dataset-version notes intended to provide :ref:`provenance` information about why the dataset/version was created.
- ``Off``
**Note:** Feature flags can be set via any `supported MicroProfile Config API source`_, e.g. the environment variable
@@ -3384,6 +3541,32 @@ To facilitate large file upload and download, the Dataverse Software installer b
and restart Payara to apply your change.
+.. _http.cookie-same-site-value:
+
+http.cookie-same-site-value
+++++++++++++++++++++++++++++
+
+See :ref:`samesite-cookie-attribute` for context.
+
+The Dataverse installer configures the Payara **server-config.network-config.protocols.protocol.http-listener-1.http.cookie-same-site-value** setting to "Lax". From `Payara's documentation `_, the other possible values are "Strict" or "None". To change this to "Strict", for example, you could run the following command...
+
+``./asadmin set server-config.network-config.protocols.protocol.http-listener-1.http.cookie-same-site-value=Strict``
+
+... and restart Payara to apply your change.
+
+.. _http.cookie-same-site-enabled:
+
+http.cookie-same-site-enabled
++++++++++++++++++++++++++++++
+
+See :ref:`samesite-cookie-attribute` for context.
+
+The Dataverse installer configures the Payara **server-config.network-config.protocols.protocol.http-listener-1.http.cookie-same-site-enabled** setting to true. To change this to false, you could run the following command...
+
+``./asadmin set server-config.network-config.protocols.protocol.http-listener-1.http.cookie-same-site-enabled=true``
+
+... and restart Payara to apply your change.
+
mp.config.profile
+++++++++++++++++
@@ -4357,9 +4540,10 @@ Limit on how many guestbook entries to display on the guestbook-responses page.
:CustomDatasetSummaryFields
+++++++++++++++++++++++++++
-You can replace the default dataset metadata fields that are displayed above files table on the dataset page with a custom list separated by commas using the curl command below.
+You can replace the default dataset metadata fields that are displayed above files table on the dataset page with a custom list separated by commas (with optional spaces) using the curl command below.
+Note that the License is always displayed and that the description, subject, keywords, etc. will NOT be displayed if you do not include them in the :CustomDatasetSummaryFields.
-``curl http://localhost:8080/api/admin/settings/:CustomDatasetSummaryFields -X PUT -d 'producer,subtitle,alternativeTitle'``
+``curl http://localhost:8080/api/admin/settings/:CustomDatasetSummaryFields -X PUT -d 'producer,subtitle, alternativeTitle'``
You have to put the datasetFieldType name attribute in the :CustomDatasetSummaryFields setting for this to work.
@@ -4418,7 +4602,12 @@ This is enabled via the new setting `:MDCStartDate` that specifies the cut-over
``curl -X PUT -d '2019-10-01' http://localhost:8080/api/admin/settings/:MDCStartDate``
+:ContactFeedbackMessageSizeLimit
+++++++++++++++++++++++++++++++++
+
+Maximum length of the text body that can be sent to the contacts of a Collection, Dataset, or DataFile. Setting this limit to Zero will denote unlimited length.
+``curl -X PUT -d 1080 http://localhost:8080/api/admin/settings/:ContactFeedbackMessageSizeLimit``
.. _:Languages:
@@ -4653,6 +4842,9 @@ The commands below should give you an idea of how to load the configuration, but
``curl -X PUT --upload-file cvoc-conf.json http://localhost:8080/api/admin/settings/:CVocConf``
+Since external vocabulary scripts can change how fields are indexed (storing an identifier and name and/or values in different languages),
+updating the Solr schema as described in :ref:`update-solr-schema` should be done after adding new scripts to your configuration.
+
.. _:ControlledVocabularyCustomJavaScript:
:ControlledVocabularyCustomJavaScript
diff --git a/doc/sphinx-guides/source/installation/img/cookie-consent-example.png b/doc/sphinx-guides/source/installation/img/cookie-consent-example.png
new file mode 100644
index 00000000000..0dfe1fb113b
Binary files /dev/null and b/doc/sphinx-guides/source/installation/img/cookie-consent-example.png differ
diff --git a/doc/sphinx-guides/source/installation/index.rst b/doc/sphinx-guides/source/installation/index.rst
index 1965448aedb..7f3a2cc6e1d 100755
--- a/doc/sphinx-guides/source/installation/index.rst
+++ b/doc/sphinx-guides/source/installation/index.rst
@@ -9,6 +9,7 @@ Installation Guide
**Contents:**
.. toctree::
+ :maxdepth: 2
intro
prep
@@ -19,5 +20,6 @@ Installation Guide
shibboleth
oauth2
oidc
+ orcid
external-tools
advanced
diff --git a/doc/sphinx-guides/source/installation/installation-main.rst b/doc/sphinx-guides/source/installation/installation-main.rst
index 3c3376e3c85..837ca6f5a88 100755
--- a/doc/sphinx-guides/source/installation/installation-main.rst
+++ b/doc/sphinx-guides/source/installation/installation-main.rst
@@ -22,7 +22,7 @@ You should have already downloaded the installer from https://github.com/IQSS/da
Unpack the zip file - this will create the directory ``dvinstall``.
-**Important:** The installer will need to use the PostgreSQL command line utility ``psql`` in order to configure the database. If the executable is not in your system PATH, the installer will try to locate it on your system. However, we strongly recommend that you check and make sure it is in the PATH. This is especially important if you have multiple versions of PostgreSQL installed on your system. Make sure the psql that came with the version that you want to use with your Dataverse installation is the first on your path. For example, if the PostgreSQL distribution you are running is installed in /Library/PostgreSQL/13, add /Library/PostgreSQL/13/bin to the beginning of your $PATH variable. If you are *running* multiple PostgreSQL servers, make sure you know the port number of the one you want to use, as the installer will need it in order to connect to the database (the first PostgreSQL distribution installed on your system is likely using the default port 5432; but the second will likely be on 5433, etc.) Does every word in this paragraph make sense? If it does, great - because you definitely need to be comfortable with basic system tasks in order to install the Dataverse Software. If not - if you don't know how to check where your PostgreSQL is installed, or what port it is running on, or what a $PATH is... it's not too late to stop. Because it will most likely not work. And if you contact us for help, these will be the questions we'll be asking you - so, again, you need to be able to answer them comfortably for it to work.
+**Important:** The installer will need to use the PostgreSQL command line utility ``psql`` in order to configure the database. If the executable is not in your system PATH, the installer will try to locate it on your system. However, we strongly recommend that you check and make sure it is in the PATH. This is especially important if you have multiple versions of PostgreSQL installed on your system. Make sure the psql that came with the version that you want to use with your Dataverse installation is the first on your path. For example, if the PostgreSQL distribution you are running is installed in /Library/PostgreSQL/16, add /Library/PostgreSQL/16/bin to the beginning of your $PATH variable. If you are *running* multiple PostgreSQL servers, make sure you know the port number of the one you want to use, as the installer will need it in order to connect to the database (the first PostgreSQL distribution installed on your system is likely using the default port 5432; but the second will likely be on 5433, etc.) Does every word in this paragraph make sense? If it does, great - because you definitely need to be comfortable with basic system tasks in order to install the Dataverse Software. If not - if you don't know how to check where your PostgreSQL is installed, or what port it is running on, or what a $PATH is... it's not too late to stop. Because it will most likely not work. And if you contact us for help, these will be the questions we'll be asking you - so, again, you need to be able to answer them comfortably for it to work.
**It is no longer necessary to run the installer as root!**
diff --git a/doc/sphinx-guides/source/installation/oauth2.rst b/doc/sphinx-guides/source/installation/oauth2.rst
index 7a0e938b572..82d82c202b1 100644
--- a/doc/sphinx-guides/source/installation/oauth2.rst
+++ b/doc/sphinx-guides/source/installation/oauth2.rst
@@ -38,8 +38,10 @@ URLs to help you request a Client ID and Client Secret from the providers suppor
Each of these providers will require the following information from you:
- Basic information about your Dataverse installation such as a name, description, URL, logo, privacy policy, etc.
-- OAuth2 Redirect URI (ORCID) or Redirect URI (Microsoft Azure AD) or Authorization Callback URL (GitHub) or Authorized Redirect URIs (Google): This is the URL on the Dataverse installation side to which the user will be sent after successfully authenticating with the identity provider. This should be the advertised URL of your Dataverse installation (the protocol, fully qualified domain name, and optional port configured via the ``dataverse.siteUrl`` JVM option mentioned in the :doc:`config` section) appended with ``/oauth2/callback.xhtml`` such as ``https://dataverse.example.edu/oauth2/callback.xhtml``.
+- OAuth2 Redirect URI(s) (ORCID) or Redirect URI (Microsoft Azure AD) or Authorization Callback URL (GitHub) or Authorized Redirect URIs (Google): This is the URL on the Dataverse installation side to which the user will be sent after successfully authenticating with the identity provider. This should be the advertised URL of your Dataverse installation (the protocol, fully qualified domain name, and optional port configured via the ``dataverse.siteUrl`` JVM option mentioned in the :doc:`config` section) appended with ``/oauth2/callback.xhtml`` such as ``https://dataverse.example.edu/oauth2/callback.xhtml``.
+For ORCID, if you also want to enable the ability to associate ORCIDs with user accounts (when users did not login via ORCID) as discussed in :doc:`orcid`, you must add a second redirect URL: to ``/oauth2/orcidConfirm.xhtml`` such as ``https://dataverse.example.edu/oauth2/orcidConfirm.xhtml``.
+
When you are finished you should have a Client ID and Client Secret from the provider. Keep them safe and secret.
Dataverse Installation Side
diff --git a/doc/sphinx-guides/source/installation/orcid.rst b/doc/sphinx-guides/source/installation/orcid.rst
new file mode 100644
index 00000000000..c998b27f828
--- /dev/null
+++ b/doc/sphinx-guides/source/installation/orcid.rst
@@ -0,0 +1,26 @@
+ORCID Integration
+=================
+
+.. contents:: |toctitle|
+ :local:
+
+Introduction
+------------
+
+Dataverse leverages ORCIDs (and other types of persistent identifiers (PIDs)) to improve the findability of data and to simplify the process of adding metadata.
+When ORCIDs are included as metadata about authors, Dataverse includes them in metadata exports, advertises them through :ref:`discovery-sign-posting` and via metadata embedded in dataset pages, and includes them in the metadata associated with dataset DOIs.
+
+Dataverse can be configured to make it easier to include ORCIDs
+- via use of an ORCID "External Vocabulary Script" that allows users to lookup authors, depositors, etc. based on their ORCID profile metadata and then records these ORCIDs automatically and adds links to ORCID profiles in metadata displays. With this configured, there is no need enter ORCIDs directly. See :ref:`using-external-vocabulary-services` in the Admin Guide.
+- via association of ORCIDs with Dataverse user accounts, through the use of ORCID logins or, in addition or instead, a separate authenticated ORCID linking mechanism. When an ORCID is associated with a Dataverse account, it will automatically be added to the dataset metadata when a user creates a dataset and is added as an initial author.
+
+See also :ref:`orcid-integration` in the User Guide.
+
+Configuration
+--------------
+
+The steps needed to configure Dataverse to support lookup of ORCIDs for the author metadata field (and ROR identifiers for organizations as author affiliations) is described in the `Dataverse Author Field Example page `_ in the `Dataverse External Vocabulary Suport Github Repository `_. Briefly, this involves changing the :ref:`:CVocConf` setting and potentially creating local web-acessible copies of the relevant scripts.
+
+To configure Dataverse to support adding ORCIDs to user profiles, one must configure ORCID as an OAuth2 provider as described in :doc:`oauth2`. The ability to link ORCIDs to user accounts is automatically enabled if an ORCID provider is configured. To avoid also enabling ORCID login, the provider can be registered with "enabled":false.
+
+
diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst
index f61321ef245..0e17bc47166 100644
--- a/doc/sphinx-guides/source/installation/prerequisites.rst
+++ b/doc/sphinx-guides/source/installation/prerequisites.rst
@@ -44,7 +44,7 @@ On RHEL/derivative you can make Java 17 the default with the ``alternatives`` co
Payara
------
-Payara 6.2024.6 is recommended. Newer versions might work fine. Regular updates are recommended.
+Payara 6.2025.2 is recommended. Newer versions might work fine. Regular updates are recommended.
Installing Payara
=================
@@ -55,8 +55,8 @@ Installing Payara
- Download and install Payara (installed in ``/usr/local/payara6`` in the example commands below)::
- # wget https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2024.6/payara-6.2024.6.zip
- # unzip payara-6.2024.6.zip
+ # wget https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2025.2/payara-6.2025.2.zip
+ # unzip payara-6.2025.2.zip
# mv payara6 /usr/local
If nexus.payara.fish is ever down for maintenance, Payara distributions are also available from https://repo1.maven.org/maven2/fish/payara/distributions/payara/
@@ -97,21 +97,21 @@ Also note that Payara may utilize more than the default number of file descripto
PostgreSQL
----------
-PostgreSQL 13 is recommended because it's the version we test against. Version 10 or higher is required because that's what's `supported by Flyway `_, which we use for database migrations.
+PostgreSQL 16 is recommended because it's the version we test against. Version 10 or higher is required because that's what's `supported by Flyway `_, which we use for database migrations.
You are welcome to experiment with newer versions of PostgreSQL, but please note that as of PostgreSQL 15, permissions have been restricted on the ``public`` schema (`release notes `_, `EDB blog post `_, `Crunchy Data blog post `_). The Dataverse installer has been updated to restore the old permissions, but this may not be a long term solution.
Installing PostgreSQL
=====================
-*For example*, to install PostgreSQL 13 under RHEL7/derivative::
+*For example*, to install PostgreSQL 16 under RHEL9/derivative::
- # yum install -y https://download.postgresql.org/pub/repos/yum/reporpms/EL-7-x86_64/pgdg-redhat-repo-latest.noarch.rpm
+ # yum install -y https://download.postgresql.org/pub/repos/yum/reporpms/EL-9-x86_64/pgdg-redhat-repo-latest.noarch.rpm
# yum makecache fast
- # yum install -y postgresql13-server
- # /usr/pgsql-13/bin/postgresql-13-setup initdb
- # /usr/bin/systemctl start postgresql-13
- # /usr/bin/systemctl enable postgresql-13
+ # yum install -y postgresql16-server
+ # /usr/pgsql-16/bin/postgresql-16-setup initdb
+ # /usr/bin/systemctl start postgresql-16
+ # /usr/bin/systemctl enable postgresql-16
For RHEL8/derivative the process would be identical, except for the first two commands: you would need to install the "EL-8" yum repository configuration and run ``yum makecache`` instead.
@@ -149,7 +149,7 @@ Configuring Database Access for the Dataverse Installation (and the Dataverse So
- **Important: PostgreSQL must be restarted** for the configuration changes to take effect! On RHEL7/derivative and similar (provided you installed Postgres as instructed above)::
- # systemctl restart postgresql-13
+ # systemctl restart postgresql-16
On MacOS X a "Reload Configuration" icon is usually supplied in the PostgreSQL application folder. Or you could look up the process id of the PostgreSQL postmaster process, and send it the SIGHUP signal::
@@ -163,7 +163,7 @@ The Dataverse software search index is powered by Solr.
Supported Versions
==================
-The Dataverse software has been tested with Solr version 9.4.1. Future releases in the 9.x series are likely to be compatible. Please get in touch (:ref:`support`) if you are having trouble with a newer version.
+The Dataverse software has been tested with Solr version 9.8.0. Future releases in the 9.x series are likely to be compatible. Please get in touch (:ref:`support`) if you are having trouble with a newer version.
Installing Solr
===============
@@ -178,19 +178,19 @@ Become the ``solr`` user and then download and configure Solr::
su - solr
cd /usr/local/solr
- wget https://archive.apache.org/dist/solr/solr/9.4.1/solr-9.4.1.tgz
- tar xvzf solr-9.4.1.tgz
- cd solr-9.4.1
+ wget https://archive.apache.org/dist/solr/solr/9.8.0/solr-9.8.0.tgz
+ tar xvzf solr-9.8.0.tgz
+ cd solr-9.8.0
cp -r server/solr/configsets/_default server/solr/collection1
You should already have a "dvinstall.zip" file that you downloaded from https://github.com/IQSS/dataverse/releases . Unzip it into ``/tmp``. Then copy the files into place::
- cp /tmp/dvinstall/schema*.xml /usr/local/solr/solr-9.4.1/server/solr/collection1/conf
- cp /tmp/dvinstall/solrconfig.xml /usr/local/solr/solr-9.4.1/server/solr/collection1/conf
+ cp /tmp/dvinstall/schema*.xml /usr/local/solr/solr-9.8.0/server/solr/collection1/conf
+ cp /tmp/dvinstall/solrconfig.xml /usr/local/solr/solr-9.8.0/server/solr/collection1/conf
Note: The Dataverse Project team has customized Solr to boost results that come from certain indexed elements inside the Dataverse installation, for example prioritizing results from Dataverse collections over Datasets. If you would like to remove this, edit your ``solrconfig.xml`` and remove the ```` element and its contents. If you have ideas about how this boosting could be improved, feel free to contact us through our Google Group https://groups.google.com/forum/#!forum/dataverse-dev .
-A Dataverse installation requires a change to the ``jetty.xml`` file that ships with Solr. Edit ``/usr/local/solr/solr-9.4.1/server/etc/jetty.xml`` , increasing ``requestHeaderSize`` from ``8192`` to ``102400``
+A Dataverse installation requires a change to the ``jetty.xml`` file that ships with Solr. Edit ``/usr/local/solr/solr-9.8.0/server/etc/jetty.xml`` , increasing ``requestHeaderSize`` from ``8192`` to ``102400``
Solr will warn about needing to increase the number of file descriptors and max processes in a production environment but will still run with defaults. We have increased these values to the recommended levels by adding ulimit -n 65000 to the init script, and the following to ``/etc/security/limits.conf``::
@@ -209,7 +209,7 @@ Solr launches asynchronously and attempts to use the ``lsof`` binary to watch fo
Finally, you need to tell Solr to create the core "collection1" on startup::
- echo "name=collection1" > /usr/local/solr/solr-9.4.1/server/solr/collection1/core.properties
+ echo "name=collection1" > /usr/local/solr/solr-9.8.0/server/solr/collection1/core.properties
Dataverse collection ("dataverse") page uses Solr very heavily. On a busy instance this may cause the search engine to become the performance bottleneck, making these pages take increasingly longer to load, potentially affecting the overall performance of the application and/or causing Solr itself to crash. If this is observed on your instance, we recommend uncommenting the following lines in the ```` section of the ``solrconfig.xml`` file::
@@ -438,9 +438,9 @@ A scripted installation using Ansible is mentioned in the :doc:`/developers/make
As root, download and install Counter Processor::
cd /usr/local
- wget https://github.com/gdcc/counter-processor/archive/refs/tags/v1.05.tar.gz
- tar xvfz v1.05.tar.gz
- cd /usr/local/counter-processor-1.05
+ wget https://github.com/gdcc/counter-processor/archive/refs/tags/v1.06.tar.gz
+ tar xvfz v1.06.tar.gz
+ cd /usr/local/counter-processor-1.06
Installing GeoLite Country Database
===================================
@@ -451,7 +451,7 @@ The process required to sign up, download the database, and to configure automat
As root, change to the Counter Processor directory you just created, download the GeoLite2-Country tarball from MaxMind, untar it, and copy the geoip database into place::
-
+
tar xvfz GeoLite2-Country.tar.gz
cp GeoLite2-Country_*/GeoLite2-Country.mmdb maxmind_geoip
@@ -461,12 +461,12 @@ Creating a counter User
As root, create a "counter" user and change ownership of Counter Processor directory to this new user::
useradd counter
- chown -R counter:counter /usr/local/counter-processor-1.05
+ chown -R counter:counter /usr/local/counter-processor-1.06
Installing Counter Processor Python Requirements
================================================
-Counter Processor version 1.05 requires Python 3.7 or higher. This version of Python is available in many operating systems, and is purportedly available for RHEL7 or CentOS 7 via Red Hat Software Collections. Alternately, one may compile it from source.
+Counter Processor version 1.06 requires Python 3.7 or higher.
The following commands are intended to be run as root but we are aware that Pythonistas might prefer fancy virtualenv or similar setups. Pull requests are welcome to improve these steps!
@@ -477,7 +477,7 @@ Install Python 3.9::
Install Counter Processor Python requirements::
python3.9 -m ensurepip
- cd /usr/local/counter-processor-1.05
+ cd /usr/local/counter-processor-1.06
pip3 install -r requirements.txt
See the :doc:`/admin/make-data-count` section of the Admin Guide for how to configure and run Counter Processor.
diff --git a/doc/sphinx-guides/source/qa/index.md b/doc/sphinx-guides/source/qa/index.md
index f16cd1d38fc..623b93ef31b 100644
--- a/doc/sphinx-guides/source/qa/index.md
+++ b/doc/sphinx-guides/source/qa/index.md
@@ -1,6 +1,8 @@
# QA Guide
```{toctree}
+:caption: "Contents:"
+:maxdepth: 2
overview.md
testing-approach.md
testing-infrastructure.md
diff --git a/doc/sphinx-guides/source/qa/test-automation.md b/doc/sphinx-guides/source/qa/test-automation.md
index fe0d51f9174..73e7e570879 100644
--- a/doc/sphinx-guides/source/qa/test-automation.md
+++ b/doc/sphinx-guides/source/qa/test-automation.md
@@ -52,7 +52,7 @@ Go to the end of the log and then scroll up, looking for the failure. A failed A
```
TASK [dataverse : download payara zip] *****************************************
-fatal: [localhost]: FAILED! => {"changed": false, "dest": "/tmp/payara.zip", "elapsed": 10, "msg": "Request failed: ", "url": "https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2024.6/payara-6.2024.6.zip"}
+fatal: [localhost]: FAILED! => {"changed": false, "dest": "/tmp/payara.zip", "elapsed": 10, "msg": "Request failed: ", "url": "https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2025.2/payara-6.2025.2.zip"}
```
In the example above, if Payara can't be downloaded, we're obviously going to have problems deploying Dataverse to it!
diff --git a/doc/sphinx-guides/source/style/index.rst b/doc/sphinx-guides/source/style/index.rst
index 0e93716e146..65a9f0b2b55 100755
--- a/doc/sphinx-guides/source/style/index.rst
+++ b/doc/sphinx-guides/source/style/index.rst
@@ -11,6 +11,7 @@ This style guide is meant to help developers implement clear and appropriate UI
**Contents:**
.. toctree::
+ :maxdepth: 2
foundations
patterns
diff --git a/doc/sphinx-guides/source/user/account.rst b/doc/sphinx-guides/source/user/account.rst
index bb73ff20dc7..936dca93c0b 100755
--- a/doc/sphinx-guides/source/user/account.rst
+++ b/doc/sphinx-guides/source/user/account.rst
@@ -165,6 +165,20 @@ Microsoft Azure AD, GitHub, and Google Log In
You can also convert your Dataverse installation account to use authentication provided by GitHub, Microsoft, or Google. These options may be found in the "Other options" section of the log in page, and function similarly to how ORCID is outlined above. If you would like to convert your account away from using one of these services for log in, then you can follow the same steps as listed above for converting away from the ORCID log in.
+
+.. _orcid-integration:
+
+Linking ORCID with Your Account Profile
+---------------------------------------
+
+If you login using ORCID, Dataverse will add the link to your ORCID account in your account profile and, when you create datasets, will automatically add you, with your ORCID, as an author.
+
+If you login via other methods, you can add a link to your ORCID account as you create an account or later via the "Account Information" page.
+As when using ORCID login, you will be redirected to the ORCID website to log in there and allow the connection with Dataverse.
+Once you've done that, the link to your ORCID will be shown in the Account Information page and your ORCID will be added as your identifier when you create datasets (exactly the same as if you had logged in via ORCID).
+
+Note that the ability to login via ORCID (or other providers) and the ability to link to your ORCID profile are separate configuration options :doc:`available ` to Dataverse administrators.
+
.. _my-data:
My Data
diff --git a/doc/sphinx-guides/source/user/appendix.rst b/doc/sphinx-guides/source/user/appendix.rst
index df9b6704209..96b426a483c 100755
--- a/doc/sphinx-guides/source/user/appendix.rst
+++ b/doc/sphinx-guides/source/user/appendix.rst
@@ -22,14 +22,15 @@ Supported Metadata
Detailed below are what metadata schemas we support for Citation and Domain Specific Metadata in the Dataverse Project:
-- Citation Metadata (`see .tsv `__): compliant with `DDI Lite `_, `DDI 2.5 Codebook `__, `DataCite 4.0 `__, and Dublin Core's `DCMI Metadata Terms `__ . Language field uses `ISO 639-1 `__ controlled vocabulary.
-- Geospatial Metadata (`see .tsv `__): compliant with `DDI Lite `_, `DDI 2.5 Codebook `__, `DataCite 4.0 `__, and Dublin Core. Country / Nation field uses `ISO 3166-1 `_ controlled vocabulary.
+- Citation Metadata (`see .tsv `__): compliant with `DDI Lite `_, `DDI 2.5 Codebook `__, `DataCite 4.5 `__, and Dublin Core's `DCMI Metadata Terms `__ . Language field uses `ISO 639-1 `__ controlled vocabulary.
+- Geospatial Metadata (`see .tsv `__): compliant with `DDI Lite `_, `DDI 2.5 Codebook `__, `DataCite 4.5 `__, and Dublin Core. Country / Nation field uses `ISO 3166-1 `_ controlled vocabulary.
- Social Science & Humanities Metadata (`see .tsv `__): compliant with `DDI Lite `_, `DDI 2.5 Codebook `__, and Dublin Core.
- Astronomy and Astrophysics Metadata (`see .tsv `__): These metadata elements can be mapped/exported to the International Virtual Observatory Allianceโs (IVOA)
`VOResource Schema format `__ and is based on
`Virtual Observatory (VO) Discovery and Provenance Metadata `__.
- Life Sciences Metadata (`see .tsv `__): based on `ISA-Tab Specification `__, along with controlled vocabulary from subsets of the `OBI Ontology `__ and the `NCBI Taxonomy for Organisms `__.
- Journal Metadata (`see .tsv `__): based on the `Journal Archiving and Interchange Tag Set, version 1.2 `__.
+- 3D Objects Metadata (`see .tsv `__).
Experimental Metadata
~~~~~~~~~~~~~~~~~~~~~
@@ -38,6 +39,7 @@ Unlike supported metadata, experimental metadata is not enabled by default in a
- `CodeMeta Software Metadata `__: based on the `CodeMeta Software Metadata Schema, version 2.0 `__ (`see .tsv version `__)
- Computational Workflow Metadata (`see .tsv `__): adapted from `Bioschemas Computational Workflow Profile, version 1.0 `__ and `Codemeta `__.
+- Archival Metadata (`see .tsv `__): Enables repositories to register metadata relating to the potential archiving of the dataset at a depositor archive, whether that be your own institutional archive or an external archive, i.e. a historical archive.
Please note: these custom metadata schemas are not included in the Solr schema for indexing by default, you will need
to add them as necessary for your custom metadata blocks. See "Update the Solr Schema" in :doc:`../admin/metadatacustomization`.
diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst
index b3a14554b40..37656d1e243 100755
--- a/doc/sphinx-guides/source/user/dataset-management.rst
+++ b/doc/sphinx-guides/source/user/dataset-management.rst
@@ -43,6 +43,8 @@ Additional formats can be enabled. See :ref:`inventory-of-external-exporters` in
Each of these metadata exports contains the metadata of the most recently published version of the dataset.
+For each dataset, links to each enabled metadata format are available programmatically via Signposting. For details, see :ref:`discovery-sign-posting` in the Admin Guide and :ref:`signposting-api` in the API Guide.
+
.. _adding-new-dataset:
Adding a New Dataset
@@ -50,8 +52,10 @@ Adding a New Dataset
#. Navigate to the Dataverse collection in which you want to add a dataset.
#. Click on the "Add Data" button and select "New Dataset" in the dropdown menu. **Note:** If you are on the root Dataverse collection, your My Data page or click the "Add Data" link in the navbar, the dataset you create will be hosted in the root Dataverse collection. You can change this by selecting another Dataverse collection you have proper permissions to create datasets in, from the Host Dataverse collection dropdown in the create dataset form. This option to choose will not be available after you create the dataset.
-#. To quickly get started, enter at minimum all the required fields with an asterisk (e.g., the Dataset Title, Author Name,
- Description Text, Point of Contact Email, and Subject) to get a Data Citation with a DOI.
+#. To quickly get started, enter at minimum all the required fields with an asterisk (e.g., the Dataset Title, Author Name, Description Text, Point of Contact Email, and Subject) to get a Data Citation with a DOI.
+
+ #. When entering author identifiers, select the type from the dropdown (e.g. "ORCID") and under "Identifier" enter the full URL (e.g. "https://orcid.org/0000-0002-1825-0097") for identifiers that have a URL form. The shorter form of the unique identifier (e.g. "0000-0002-1825-0097") can also be entered, but URL form is preferred when available.
+
#. Scroll down to the "Files" section and click on "Select Files to Add" to add all the relevant files to your Dataset.
You can also upload your files directly from your Dropbox. **Tip:** You can drag and drop or select multiple files at a time from your desktop
directly into the upload widget. Your files will appear below the "Select Files to Add" button where you can add a
@@ -169,7 +173,7 @@ Certain file types in the Dataverse installation are supported by additional fun
File Previews
-------------
-Dataverse installations can add previewers for common file types uploaded by their research communities. The previews appear on the file page. If a preview tool for a specific file type is available, the preview will be created and will display automatically, after terms have been agreed to or a guestbook entry has been made, if necessary. File previews are not available for restricted files unless they are being accessed using a Preview URL. See also :ref:`previewUrl`.
+Dataverse installations can add previewers for common file types uploaded by their research communities. The previews appear on the file page. If a preview tool for a specific file type is available, the preview will be created and will display automatically, after terms have been agreed to or a guestbook entry has been made, if necessary. File previews are not available for restricted files unless they are being accessed using a Preview URL. See also :ref:`previewUrl`. When the dataset license is not the default license, users will be prompted to accept the license/data use agreement before the preview is shown. See also :ref:`license-terms`.
Previewers are available for the following file types:
@@ -572,7 +576,26 @@ When you access a dataset's file-level permissions page, you will see two sectio
Data Provenance
===============
-Data Provenance is a record of where your data came from and how it reached its current form. It describes the origin of a data file, any transformations that have been made to that file, and any persons or organizations associated with that file. A data file's provenance can aid in reproducibility and compliance with legal regulations. The Dataverse Software can help you keep track of your data's provenance. Currently, the Dataverse Software only makes provenance information available to those who have edit permissions on your dataset, but in the future we plan to expand this feature to make provenance information available to the public.
+Dataset-Level
+-------------
+When configured, the Dataverse software can allow data depositors, curators, and administrators
+to provide information about why a new version of a dataset was created and/or how its contents
+differ from a prior version. These users can add an optional "Version Note" to a draft dataset
+version in the dataset page/versions tab or during publication. This information is publicly
+available via the user interface (dataset page/versions tab), API, and in metadata exports
+(including the DataCite, JSON, DDI, and OAI_ORE exports).
+
+File-Level
+----------
+
+Data Provenance is a record of where your data came from and how it reached its current form.
+It describes the origin of a data file, any transformations that have been made to that file,
+and any persons or organizations associated with that file. A data file's provenance can aid in
+reproducibility and compliance with legal regulations. When configured to support provenance,
+the Dataverse Software can help you keep track of your data's provenance. Currently, the Dataverse
+Software only makes provenance information available to those who have edit permissions on your
+dataset, but in the future we plan to expand this feature to make provenance information available
+to the public.
.. COMMENTED OUT UNTIL PROV FILE DOWNLOAD IS ADDED: , and make it available to those who need it.
@@ -681,17 +704,26 @@ If you have a Contributor role (can edit metadata, upload files, and edit files,
Preview URL to Review Unpublished Dataset
=========================================
-Creating a Preview URL for your dataset allows you to share your dataset (for viewing and downloading of files) before it is published to a wide group of individuals who may not have a user account on the Dataverse installation. Anyone you send the Preview URL to will not have to log into the Dataverse installation to view the dataset.
+Creating a Preview URL for a draft version of your dataset allows you to share your dataset (for viewing and downloading of files) before it is published to a wide group of individuals who may not have a user account on the Dataverse installation. Anyone you send the Preview URL to will not have to log into the Dataverse installation to view the unpublished dataset. Once a dataset has been published you may create new General Preview URLs for subsequent draft versions, but the Anonymous Preview URL will no longer be available.
-**Note:** To create a Preview URL, you must have the *ManageDatasetPermissions* permission for your dataset, usually given by the :ref:`roles ` *Curator* or *Administrator*.
+**Note:** To create a Preview URL, you must have the *ManageDatasetPermissions* permission for your draft dataset, usually given by the :ref:`roles ` *Curator* or *Administrator*.
#. Go to your unpublished dataset
#. Select the โEditโ button
#. Select โPreview URLโ in the dropdown menu
-#. In the pop-up select โCreate General Preview URLโ or "Create URL for Anonymized Access". The latter supports anonymous review by removing author names and other potentially identifying information from citations, version history tables, and some metadata fields (as configured by the administrator).
+#. In the pop-up select โCreate General Preview URLโ or "Create Anonymous Preview URL". The latter supports anonymous review by removing author names and other potentially identifying information from citations, version history tables, and some metadata fields (as configured by the administrator).
#. Copy the Preview URL which has been created for this dataset and it can now be shared with anyone you wish to have access to view or download files in your unpublished dataset.
To disable a Preview URL and to revoke access, follow the same steps as above until step #3 when you return to the popup, click the โDisable Preview URLโ button.
+
+**Note:** Before distributing an anonymized Preview URL it is recommended that you view the dataset as a potential user to verify that the metadata available does not reveal authorship, etc.
+
+#. Create Anonymous Preview URL for your unpublished dataset via the Preview URL popup from Edit Dataset button
+#. Copy the Anonymous Preview URL to your clipboard
+#. Log out of Dataverse application
+#. Open the dataset using the Anonymous Preview URL you plan to distribute to view it as a reviewer would.
+#. It may be necessary for you to further edit your draft dataset's metadata to remove identifying items before you distribute the Anonymous Preview URL
+
Note that only one Preview URL (normal or with anonymized access) can be configured per dataset at a time.
Embargoes
@@ -790,13 +822,15 @@ If you deaccession the most recently published version of the dataset but not al
Dataset Types
=============
+.. note:: Development of the dataset types feature is ongoing. Please see https://github.com/IQSS/dataverse-pm/issues/307 for details.
+
Out of the box, all datasets have a dataset type of "dataset". Superusers can add additional types such as "software" or "workflow" using the :ref:`api-add-dataset-type` API endpoint.
Once more than one type appears in search results, a facet called "Dataset Type" will appear allowing you to filter down to a certain type.
If your installation is configured to use DataCite as a persistent ID (PID) provider, the appropriate type ("Dataset", "Software", "Workflow") will be sent to DataCite when the dataset is published for those three types.
-Currently, the dataset type can only be specified via API and only when the dataset is created. For details, see the following sections of the API guide:
+Currently, specifying a type for a dataset can only be done via API and only when the dataset is created. The type can't currently be changed afterward. For details, see the following sections of the API guide:
- :ref:`api-create-dataset-with-type` (Native API)
- :ref:`api-semantic-create-dataset-with-type` (Semantic API)
@@ -804,7 +838,7 @@ Currently, the dataset type can only be specified via API and only when the data
Dataset types can be listed, added, or deleted via API. See :ref:`api-dataset-types` in the API Guide for more.
-Development of the dataset types feature is ongoing. Please see https://github.com/IQSS/dataverse/issues/10489 for details.
+Dataset types can be linked with metadata blocks to make fields from those blocks available when datasets of that type are created or edited. See :ref:`api-link-dataset-type` and :ref:`list-metadata-blocks-for-a-collection` for details.
.. |image1| image:: ./img/DatasetDiagram.png
:class: img-responsive
diff --git a/doc/sphinx-guides/source/user/find-use-data.rst b/doc/sphinx-guides/source/user/find-use-data.rst
index 4bf45774b53..8e65108d680 100755
--- a/doc/sphinx-guides/source/user/find-use-data.rst
+++ b/doc/sphinx-guides/source/user/find-use-data.rst
@@ -96,7 +96,7 @@ Files can be organized in one or more folders (directories) within a dataset. If
Cite Data
---------
-You can find the citation for the dataset at the top of the dataset page in a blue box. Additionally, there is a Cite Data button that offers the option to download the citation as EndNote XML, RIS Format, or BibTeX Format.
+You can find the citation for the dataset at the top of the dataset page in a blue box. Additionally, there is a Cite Data button that offers the option to download the citation as EndNote XML, RIS Format, or BibTeX Format, or to cut/paste the citation in any of the 1000+ standard journal/society/other formats defined via the `Citation Style Language `_.
.. _download_files:
diff --git a/doc/sphinx-guides/source/user/index.rst b/doc/sphinx-guides/source/user/index.rst
index 857bd27ca22..cd6ccdbd421 100755
--- a/doc/sphinx-guides/source/user/index.rst
+++ b/doc/sphinx-guides/source/user/index.rst
@@ -9,6 +9,7 @@ User Guide
**Contents:**
.. toctree::
+ :maxdepth: 2
account
find-use-data
diff --git a/doc/sphinx-guides/source/versions.rst b/doc/sphinx-guides/source/versions.rst
index 9d640bd22bd..cd19837dff1 100755
--- a/doc/sphinx-guides/source/versions.rst
+++ b/doc/sphinx-guides/source/versions.rst
@@ -7,7 +7,8 @@ Dataverse Software Documentation Versions
This list provides a way to refer to the documentation for previous and future versions of the Dataverse Software. In order to learn more about the updates delivered from one version to another, visit the `Releases `__ page in our GitHub repo.
- pre-release `HTML (not final!) `__ and `PDF (experimental!) `__ built from the :doc:`develop ` branch :doc:`(how to contribute!) `
-- 6.5
+- 6.6
+- `6.5 `__
- `6.4 `__
- `6.3 `__
- `6.2 `__
diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml
index c8515f43136..0de90f7ec2a 100644
--- a/docker-compose-dev.yml
+++ b/docker-compose-dev.yml
@@ -17,6 +17,8 @@ services:
SKIP_DEPLOY: "${SKIP_DEPLOY}"
DATAVERSE_JSF_REFRESH_PERIOD: "1"
DATAVERSE_FEATURE_API_BEARER_AUTH: "1"
+ DATAVERSE_FEATURE_INDEX_HARVESTED_METADATA_SOURCE: "1"
+ DATAVERSE_FEATURE_API_BEARER_AUTH_PROVIDE_MISSING_CLAIMS: "1"
DATAVERSE_MAIL_SYSTEM_EMAIL: "dataverse@localhost"
DATAVERSE_MAIL_MTA_HOST: "smtp"
DATAVERSE_AUTH_OIDC_ENABLED: "1"
@@ -89,6 +91,8 @@ services:
- dev
networks:
- dataverse
+ volumes:
+ - ./docker-dev-volumes/solr/data:/var/solr
dev_dv_initializer:
container_name: "dev_dv_initializer"
diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml
index 60ed130612e..3c8bee29e3e 100644
--- a/docker/compose/demo/compose.yml
+++ b/docker/compose/demo/compose.yml
@@ -20,12 +20,12 @@ services:
-Ddataverse.files.file1.type=file
-Ddataverse.files.file1.label=Filesystem
-Ddataverse.files.file1.directory=${STORAGE_DIR}/store
- -Ddataverse.pid.providers=fake
- -Ddataverse.pid.default-provider=fake
- -Ddataverse.pid.fake.type=FAKE
- -Ddataverse.pid.fake.label=FakeDOIProvider
- -Ddataverse.pid.fake.authority=10.5072
- -Ddataverse.pid.fake.shoulder=FK2/
+ -Ddataverse.pid.providers=perma1
+ -Ddataverse.pid.default-provider=perma1
+ -Ddataverse.pid.perma1.type=perma
+ -Ddataverse.pid.perma1.label=Perma1
+ -Ddataverse.pid.perma1.authority=DV
+ -Ddataverse.pid.perma1.permalink.separator=/
#-Ddataverse.lang.directory=/dv/lang
ports:
- "8080:8080" # HTTP (Dataverse Application)
@@ -134,7 +134,7 @@ services:
solr:
container_name: "solr"
hostname: "solr"
- image: solr:9.4.1
+ image: solr:9.8.0
depends_on:
- solr_initializer
restart: on-failure
diff --git a/local_lib/io/gdcc/xoai-common/5.3.2-local/xoai-common-5.3.2-local.jar b/local_lib/io/gdcc/xoai-common/5.3.2-local/xoai-common-5.3.2-local.jar
new file mode 100644
index 00000000000..5047caacc5b
Binary files /dev/null and b/local_lib/io/gdcc/xoai-common/5.3.2-local/xoai-common-5.3.2-local.jar differ
diff --git a/local_lib/io/gdcc/xoai-common/5.3.2-local/xoai-common-5.3.2-local.pom b/local_lib/io/gdcc/xoai-common/5.3.2-local/xoai-common-5.3.2-local.pom
new file mode 100644
index 00000000000..b838f27c671
--- /dev/null
+++ b/local_lib/io/gdcc/xoai-common/5.3.2-local/xoai-common-5.3.2-local.pom
@@ -0,0 +1,82 @@
+
+
+
+
+
+ xoai
+ io.gdcc
+ 5.3.2-local
+
+ 4.0.0
+
+ XOAI Commons
+ xoai-common
+ OAI-PMH base functionality used for both data and service providers.
+
+
+
+ jakarta.xml.bind
+ jakarta.xml.bind-api
+
+
+ org.hamcrest
+ hamcrest
+
+ compile
+
+
+ io.gdcc
+ xoai-xmlio
+
+
+ org.codehaus.woodstox
+ stax2-api
+
+
+
+ com.fasterxml.woodstox
+ woodstox-core
+ runtime
+ true
+
+
+
+
+ org.junit.jupiter
+ junit-jupiter
+ test
+
+
+ org.xmlunit
+ xmlunit-core
+ test
+
+
+ org.xmlunit
+ xmlunit-matchers
+ test
+
+
+ org.openjdk.jmh
+ jmh-core
+ 1.37
+ test
+
+
+ org.openjdk.jmh
+ jmh-generator-annprocess
+ 1.37
+ test
+
+
+
diff --git a/local_lib/io/gdcc/xoai-data-provider/5.3.2-local/xoai-data-provider-5.3.2-local.jar b/local_lib/io/gdcc/xoai-data-provider/5.3.2-local/xoai-data-provider-5.3.2-local.jar
new file mode 100644
index 00000000000..cf6fb9c7d0c
Binary files /dev/null and b/local_lib/io/gdcc/xoai-data-provider/5.3.2-local/xoai-data-provider-5.3.2-local.jar differ
diff --git a/local_lib/io/gdcc/xoai-data-provider/5.3.2-local/xoai-data-provider-5.3.2-local.pom b/local_lib/io/gdcc/xoai-data-provider/5.3.2-local/xoai-data-provider-5.3.2-local.pom
new file mode 100644
index 00000000000..e17ae894e98
--- /dev/null
+++ b/local_lib/io/gdcc/xoai-data-provider/5.3.2-local/xoai-data-provider-5.3.2-local.pom
@@ -0,0 +1,72 @@
+
+
+
+
+
+ xoai
+ io.gdcc
+ 5.3.2-local
+
+
+ 4.0.0
+
+ XOAI Data Provider
+ xoai-data-provider
+ OAI-PMH data provider implementation. Use it to build an OAI-PMH endpoint, providing your data records as harvestable resources.
+
+
+
+
+ org.apache.maven.plugins
+ maven-jar-plugin
+
+
+
+ test-jar
+
+
+
+
+
+
+
+
+
+ io.gdcc
+ xoai-common
+ ${project.version}
+
+
+ org.slf4j
+ slf4j-api
+
+
+
+
+ org.junit.jupiter
+ junit-jupiter
+ test
+
+
+ org.xmlunit
+ xmlunit-core
+ test
+
+
+ org.xmlunit
+ xmlunit-matchers
+ test
+
+
+ org.slf4j
+ slf4j-simple
+ test
+
+
+
diff --git a/local_lib/io/gdcc/xoai-service-provider/5.3.2-local/xoai-service-provider-5.3.2-local.jar b/local_lib/io/gdcc/xoai-service-provider/5.3.2-local/xoai-service-provider-5.3.2-local.jar
new file mode 100644
index 00000000000..201eecc061c
Binary files /dev/null and b/local_lib/io/gdcc/xoai-service-provider/5.3.2-local/xoai-service-provider-5.3.2-local.jar differ
diff --git a/local_lib/io/gdcc/xoai-service-provider/5.3.2-local/xoai-service-provider-5.3.2-local.pom b/local_lib/io/gdcc/xoai-service-provider/5.3.2-local/xoai-service-provider-5.3.2-local.pom
new file mode 100644
index 00000000000..aa5a65824d7
--- /dev/null
+++ b/local_lib/io/gdcc/xoai-service-provider/5.3.2-local/xoai-service-provider-5.3.2-local.pom
@@ -0,0 +1,65 @@
+
+
+
+
+
+ io.gdcc
+ xoai
+ 5.3.2-local
+
+ 4.0.0
+
+ XOAI Service Provider
+ xoai-service-provider
+ OAI-PMH service provider implementation. Use it as a harvesting client to read remote repositories.
+
+
+
+ io.gdcc
+ xoai-common
+ ${project.version}
+
+
+ io.gdcc
+ xoai-xmlio
+ ${project.version}
+
+
+
+ org.slf4j
+ slf4j-api
+
+
+
+
+ io.gdcc
+ xoai-data-provider
+ ${project.version}
+ test
+
+
+ io.gdcc
+ xoai-data-provider
+ ${project.version}
+ test-jar
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter
+ test
+
+
+ org.slf4j
+ slf4j-simple
+ test
+
+
+
+
diff --git a/local_lib/io/gdcc/xoai-xmlio/5.3.2-local/xoai-xmlio-5.3.2-local.jar b/local_lib/io/gdcc/xoai-xmlio/5.3.2-local/xoai-xmlio-5.3.2-local.jar
new file mode 100644
index 00000000000..eebdac42f00
Binary files /dev/null and b/local_lib/io/gdcc/xoai-xmlio/5.3.2-local/xoai-xmlio-5.3.2-local.jar differ
diff --git a/local_lib/io/gdcc/xoai-xmlio/5.3.2-local/xoai-xmlio-5.3.2-local.pom b/local_lib/io/gdcc/xoai-xmlio/5.3.2-local/xoai-xmlio-5.3.2-local.pom
new file mode 100644
index 00000000000..50b13e50925
--- /dev/null
+++ b/local_lib/io/gdcc/xoai-xmlio/5.3.2-local/xoai-xmlio-5.3.2-local.pom
@@ -0,0 +1,63 @@
+
+ 4.0.0
+
+
+ io.gdcc
+ xoai
+ 5.3.2-local
+
+
+ xoai-xmlio
+ jar
+ XOAI XML IO Commons
+ Basic XML IO routines used for XOAI OAI-PMH implementation. Forked from obsolete Lyncode sources.
+
+
+
+ The Apache Software License, Version 2.0
+ https://www.apache.org/licenses/LICENSE-2.0.txt
+ repo
+
+
+
+
+
+ org.codehaus.woodstox
+ stax2-api
+
+
+
+ com.fasterxml.woodstox
+ woodstox-core
+ runtime
+ true
+
+
+
+
+ org.hamcrest
+ hamcrest
+
+
+
+
+ org.xmlunit
+ xmlunit-core
+ test
+
+
+ org.xmlunit
+ xmlunit-matchers
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter
+ test
+
+
+
diff --git a/local_lib/io/gdcc/xoai/5.3.2-local/xoai-5.3.2-local.pom b/local_lib/io/gdcc/xoai/5.3.2-local/xoai-5.3.2-local.pom
new file mode 100644
index 00000000000..2ebbd698d98
--- /dev/null
+++ b/local_lib/io/gdcc/xoai/5.3.2-local/xoai-5.3.2-local.pom
@@ -0,0 +1,235 @@
+
+
+
+ 4.0.0
+ pom
+
+
+ io.gdcc
+ parent
+ 0.10.2
+
+
+
+ xoai-common
+ xoai-data-provider
+ xoai-service-provider
+ xoai-xmlio
+ report
+ xoai-data-provider-tck
+
+
+ xoai
+ 5.3.2-local
+
+ XOAI : OAI-PMH Java Toolkit
+
+ An OAI-PMH data and/or service provider implementation, integration ready for your service.
+ https://github.com/${project.github.org}/${project.github.repo}
+
+
+ 11
+ xoai
+ true
+
+
+ 4.0.1
+ 4.0.4
+ 4.2.2
+ 7.0.0
+
+
+ 10.0.4
+
+
+
+
+ DuraSpace BSD License
+ https://raw.github.com/DSpace/DSpace/master/LICENSE
+ repo
+
+ A BSD 3-Clause license for the DSpace codebase.
+
+
+
+
+
+
+
+
+ com.diffplug.spotless
+ spotless-maven-plugin
+ ${spotless.version}
+
+
+ origin/branch-5.0
+
+
+
+
+
+ *.md
+ .gitignore
+
+
+
+
+
+ true
+ 4
+
+
+
+
+
+
+
+
+
+
+
+
+ 1.15.0
+
+ true
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ jakarta.xml.bind
+ jakarta.xml.bind-api
+ ${jakarta.jaxb.version}
+
+
+
+ com.sun.xml.bind
+ jaxb-impl
+ ${jakarta.jaxb-impl.version}
+ runtime
+ true
+
+
+
+
+ com.fasterxml.woodstox
+ woodstox-core
+ ${woodstox.version}
+
+
+ org.codehaus.woodstox
+ stax2-api
+ ${stax2.api.version}
+
+
+
+ io.gdcc
+ xoai-xmlio
+ ${project.version}
+
+
+
+
+
+
+ Oliver Bertuch
+ https://github.com/poikilotherm
+ xoai-lib@gdcc.io
+ Forschungszentrum Jรผlich GmbH
+ https://www.fz-juelich.de/en/zb
+
+
+ DSpace @ Lyncode
+ dspace@lyncode.com
+ Lyncode
+ http://www.lyncode.com
+
+
+
+
+
+ coverage
+
+ ${maven.multiModuleProjectDirectory}/report/target/site/jacoco-aggregate/jacoco.xml
+
+
+
+ benchmark
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+ ${maven-surefire-plugin.version}
+
+ ${skipUT}
+ **/*Benchmark
+
+ true
+
+
+
+
+
+
+
+ owasp
+
+
+
+
+ org.owasp
+ dependency-check-maven
+ ${dependency-check-maven.version}
+
+ 7
+ true
+ true
+
+ SARIF
+ owaspSuppression.xml
+
+
+
+
+ check
+
+
+
+
+
+
+
+
+
+
diff --git a/modules/container-base/src/main/docker/Dockerfile b/modules/container-base/src/main/docker/Dockerfile
index 802db62e5e4..ed13b77c449 100644
--- a/modules/container-base/src/main/docker/Dockerfile
+++ b/modules/container-base/src/main/docker/Dockerfile
@@ -199,6 +199,13 @@ RUN <
- 6.5
+ 6.617UTF-8
@@ -148,9 +148,9 @@
-Duser.timezone=${project.timezone} -Dfile.encoding=${project.build.sourceEncoding} -Duser.language=${project.language} -Duser.region=${project.region}
- 6.2024.6
+ 6.2025.242.7.4
- 9.4.1
+ 9.8.01.12.74826.30.0
@@ -164,7 +164,8 @@
4.4.14
- 5.2.0
+
+ 5.3.2-local1.19.7
@@ -374,6 +375,12 @@
+
+
+ dvn.private
+ Local repository for hosting jars not available from network repositories.
+ file://${project.basedir}/local_lib
+ payara-nexus-artifactsPayara Nexus Artifacts
diff --git a/pom.xml b/pom.xml
index 5ecbd7059c1..7b15a97ee73 100644
--- a/pom.xml
+++ b/pom.xml
@@ -29,8 +29,8 @@
1.2.18.410.19.01.20.1
- 5.2.1
- 2.9.1
+ 5.2.5
+ 2.9.25.5.3Dataverse API
@@ -51,6 +51,16 @@
org.apache.abderaabdera-core1.1.3
+
+
+ org.apache.geronimo.specs
+ geronimo-stax-api_1.0_spec
+
+
+ org.apache.james
+ apache-mime4j-core
+
+ org.apache.abdera
@@ -125,18 +135,36 @@
io.gdccsword2-server2.0.0
+
+
+ xml-apis
+ xml-apis
+
+ org.apache.abderaabdera-core
+
+
+ org.apache.geronimo.specs
+ geronimo-stax-api_1.0_spec
+
+ org.apache.abderaabdera-i18n
+
+
+ org.apache.geronimo.specs
+ geronimo-stax-api_1.0_spec
+
+
@@ -147,12 +175,12 @@
com.apicatalogtitanium-json-ld
- 1.3.2
+ 1.4.0com.google.code.gsongson
- 2.8.9
+ 2.9.1compile
@@ -168,11 +196,9 @@
provided
-
- org.everit.json
- org.everit.json.schema
- 1.5.1
+ com.github.erosb
+ everit-json-schema
+ 1.14.1org.mindrot
@@ -247,7 +273,7 @@
org.eclipse.parssonjakarta.json
- provided
+ test
@@ -335,7 +361,7 @@
org.apache.solrsolr-solrj
- 9.4.1
+ 9.8.0colt
@@ -406,7 +432,7 @@
com.github.jai-imageiojai-imageio-core
- 1.3.1
+ 1.4.0org.ocpsoft.rewrite
@@ -466,13 +492,23 @@
com.nimbusdsoauth2-oidc-sdk
- 10.13.2
+ 11.22.1com.github.ben-manes.caffeinecaffeine3.1.8
+
+
+ javax.xml.stream
+ stax-api
+
+
+ stax
+ stax-api
+
+
@@ -490,7 +526,7 @@
com.google.auto.serviceauto-service
- 1.0-rc2
+ 1.1.1truejar
@@ -559,6 +595,12 @@
org.apache.tikatika-parsers-standard-package${tika.version}
+
+
+ xml-apis
+ xml-apis
+
+
@@ -609,6 +651,21 @@
javax.cachecache-api
+
+ de.undercouch
+ citeproc-java
+ 3.1.0
+
+
+ org.citationstyles
+ styles
+ 24.3
+
+
+ org.citationstyles
+ locales
+ 24.3
+ org.junit.jupiter
diff --git a/scripts/api/data/dataset-create-new-additional-default-fields.json b/scripts/api/data/dataset-create-new-additional-default-fields.json
new file mode 100644
index 00000000000..30d6bde4355
--- /dev/null
+++ b/scripts/api/data/dataset-create-new-additional-default-fields.json
@@ -0,0 +1,1533 @@
+{
+ "datasetVersion": {
+ "license": {
+ "name": "CC0 1.0",
+ "uri": "http://creativecommons.org/publicdomain/zero/1.0"
+ },
+ "metadataBlocks": {
+ "citation": {
+ "displayName": "Citation Metadata",
+ "fields": [
+ {
+ "typeName": "title",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "Replication Data for: Title"
+ },
+ {
+ "typeName": "subtitle",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "Subtitle"
+ },
+ {
+ "typeName": "alternativeTitle",
+ "multiple": true,
+ "typeClass": "primitive",
+ "value": ["Alternative Title"]
+ },
+ {
+ "typeName": "alternativeURL",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "http://AlternativeURL.org"
+ },
+ {
+ "typeName": "otherId",
+ "multiple": true,
+ "typeClass": "compound",
+ "value": [
+ {
+ "otherIdAgency": {
+ "typeName": "otherIdAgency",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "OtherIDAgency1"
+ },
+ "otherIdValue": {
+ "typeName": "otherIdValue",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "OtherIDIdentifier1"
+ }
+ },
+ {
+ "otherIdAgency": {
+ "typeName": "otherIdAgency",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "OtherIDAgency2"
+ },
+ "otherIdValue": {
+ "typeName": "otherIdValue",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "OtherIDIdentifier2"
+ }
+ }
+ ]
+ },
+ {
+ "typeName": "author",
+ "multiple": true,
+ "typeClass": "compound",
+ "value": [
+ {
+ "authorName": {
+ "typeName": "authorName",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "LastAuthor1, FirstAuthor1"
+ },
+ "authorAffiliation": {
+ "typeName": "authorAffiliation",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "AuthorAffiliation1"
+ },
+ "authorIdentifierScheme": {
+ "typeName": "authorIdentifierScheme",
+ "multiple": false,
+ "typeClass": "controlledVocabulary",
+ "value": "ORCID"
+ },
+ "authorIdentifier": {
+ "typeName": "authorIdentifier",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "AuthorIdentifier1"
+ }
+ },
+ {
+ "authorName": {
+ "typeName": "authorName",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "LastAuthor2, FirstAuthor2"
+ },
+ "authorAffiliation": {
+ "typeName": "authorAffiliation",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "AuthorAffiliation2"
+ },
+ "authorIdentifierScheme": {
+ "typeName": "authorIdentifierScheme",
+ "multiple": false,
+ "typeClass": "controlledVocabulary",
+ "value": "ISNI"
+ },
+ "authorIdentifier": {
+ "typeName": "authorIdentifier",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "AuthorIdentifier2"
+ }
+ }
+ ]
+ },
+ {
+ "typeName": "datasetContact",
+ "multiple": true,
+ "typeClass": "compound",
+ "value": [
+ {
+ "datasetContactName": {
+ "typeName": "datasetContactName",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "LastContact1, FirstContact1"
+ },
+ "datasetContactAffiliation": {
+ "typeName": "datasetContactAffiliation",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "ContactAffiliation1"
+ },
+ "datasetContactEmail": {
+ "typeName": "datasetContactEmail",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "ContactEmail1@mailinator.com"
+ }
+ },
+ {
+ "datasetContactName": {
+ "typeName": "datasetContactName",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "LastContact2, FirstContact2"
+ },
+ "datasetContactAffiliation": {
+ "typeName": "datasetContactAffiliation",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "ContactAffiliation2"
+ },
+ "datasetContactEmail": {
+ "typeName": "datasetContactEmail",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "ContactEmail2@mailinator.com"
+ }
+ }
+ ]
+ },
+ {
+ "typeName": "dsDescription",
+ "multiple": true,
+ "typeClass": "compound",
+ "value": [
+ {
+ "dsDescriptionValue": {
+ "typeName": "dsDescriptionValue",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "DescriptionText1"
+ },
+ "dsDescriptionDate": {
+ "typeName": "dsDescriptionDate",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "1000-01-01"
+ }
+ },
+ {
+ "dsDescriptionValue": {
+ "typeName": "dsDescriptionValue",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "DescriptionText2"
+ },
+ "dsDescriptionDate": {
+ "typeName": "dsDescriptionDate",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "1000-02-02"
+ }
+ }
+ ]
+ },
+ {
+ "typeName": "subject",
+ "multiple": true,
+ "typeClass": "controlledVocabulary",
+ "value": [
+ "Agricultural Sciences",
+ "Business and Management",
+ "Engineering",
+ "Law"
+ ]
+ },
+ {
+ "typeName": "keyword",
+ "multiple": true,
+ "typeClass": "compound",
+ "value": [
+ {
+ "keywordValue": {
+ "typeName": "keywordValue",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "KeywordTerm1"
+ },
+ "keywordTermURI": {
+ "typeName": "keywordTermURI",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "http://keywordTermURI1.org"
+ },
+ "keywordVocabulary": {
+ "typeName": "keywordVocabulary",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "KeywordVocabulary1"
+ },
+ "keywordVocabularyURI": {
+ "typeName": "keywordVocabularyURI",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "http://KeywordVocabularyURL1.org"
+ }
+ },
+ {
+ "keywordValue": {
+ "typeName": "keywordValue",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "KeywordTerm2"
+ },
+ "keywordTermURI": {
+ "typeName": "keywordTermURI",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "http://keywordTermURI2.org"
+ },
+ "keywordVocabulary": {
+ "typeName": "keywordVocabulary",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "KeywordVocabulary2"
+ },
+ "keywordVocabularyURI": {
+ "typeName": "keywordVocabularyURI",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "http://KeywordVocabularyURL2.org"
+ }
+ }
+ ]
+ },
+ {
+ "typeName": "topicClassification",
+ "multiple": true,
+ "typeClass": "compound",
+ "value": [
+ {
+ "topicClassValue": {
+ "typeName": "topicClassValue",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "Topic Classification Term1"
+ },
+ "topicClassVocab": {
+ "typeName": "topicClassVocab",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "Topic Classification Vocab1"
+ },
+ "topicClassVocabURI": {
+ "typeName": "topicClassVocabURI",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "https://TopicClassificationURL1.com"
+ }
+ },
+ {
+ "topicClassValue": {
+ "typeName": "topicClassValue",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "Topic Classification Term2"
+ },
+ "topicClassVocab": {
+ "typeName": "topicClassVocab",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "Topic Classification Vocab2"
+ },
+ "topicClassVocabURI": {
+ "typeName": "topicClassVocabURI",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "https://TopicClassificationURL2.com"
+ }
+ }
+ ]
+ },
+ {
+ "typeName": "publication",
+ "multiple": true,
+ "typeClass": "compound",
+ "value": [
+ {
+ "publicationRelationType" : {
+ "typeName" : "publicationRelationType",
+ "multiple" : false,
+ "typeClass" : "controlledVocabulary",
+ "value" : "IsSupplementTo"
+ },
+ "publicationCitation": {
+ "typeName": "publicationCitation",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "RelatedPublicationCitation1"
+ },
+ "publicationIDType": {
+ "typeName": "publicationIDType",
+ "multiple": false,
+ "typeClass": "controlledVocabulary",
+ "value": "ark"
+ },
+ "publicationIDNumber": {
+ "typeName": "publicationIDNumber",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "RelatedPublicationIDNumber1"
+ },
+ "publicationURL": {
+ "typeName": "publicationURL",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "http://RelatedPublicationURL1.org"
+ }
+ },
+ {
+ "publicationCitation": {
+ "typeName": "publicationCitation",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "RelatedPublicationCitation2"
+ },
+ "publicationIDType": {
+ "typeName": "publicationIDType",
+ "multiple": false,
+ "typeClass": "controlledVocabulary",
+ "value": "arXiv"
+ },
+ "publicationIDNumber": {
+ "typeName": "publicationIDNumber",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "RelatedPublicationIDNumber2"
+ },
+ "publicationURL": {
+ "typeName": "publicationURL",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "http://RelatedPublicationURL2.org"
+ }
+ }
+ ]
+ },
+ {
+ "typeName": "notesText",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "Notes1"
+ },
+ {
+ "typeName": "language",
+ "multiple": true,
+ "typeClass": "controlledVocabulary",
+ "value": [
+ "Abkhaz",
+ "Afar"
+ ]
+ },
+ {
+ "typeName": "producer",
+ "multiple": true,
+ "typeClass": "compound",
+ "value": [
+ {
+ "producerName": {
+ "typeName": "producerName",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "LastProducer1, FirstProducer1"
+ },
+ "producerAffiliation": {
+ "typeName": "producerAffiliation",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "ProducerAffiliation1"
+ },
+ "producerAbbreviation": {
+ "typeName": "producerAbbreviation",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "ProducerAbbreviation1"
+ },
+ "producerURL": {
+ "typeName": "producerURL",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "http://ProducerURL1.org"
+ },
+ "producerLogoURL": {
+ "typeName": "producerLogoURL",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "http://ProducerLogoURL1.org"
+ }
+ },
+ {
+ "producerName": {
+ "typeName": "producerName",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "LastProducer2, FirstProducer2"
+ },
+ "producerAffiliation": {
+ "typeName": "producerAffiliation",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "ProducerAffiliation2"
+ },
+ "producerAbbreviation": {
+ "typeName": "producerAbbreviation",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "ProducerAbbreviation2"
+ },
+ "producerURL": {
+ "typeName": "producerURL",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "http://ProducerURL2.org"
+ },
+ "producerLogoURL": {
+ "typeName": "producerLogoURL",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "http://ProducerLogoURL2.org"
+ }
+ }
+ ]
+ },
+ {
+ "typeName": "productionDate",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "1003-01-01"
+ },
+ {
+ "typeName": "productionPlace",
+ "multiple": true,
+ "typeClass": "primitive",
+ "value": ["ProductionPlace","Second ProductionPlace"]
+ },
+ {
+ "typeName": "contributor",
+ "multiple": true,
+ "typeClass": "compound",
+ "value": [
+ {
+ "contributorType": {
+ "typeName": "contributorType",
+ "multiple": false,
+ "typeClass": "controlledVocabulary",
+ "value": "Data Collector"
+ },
+ "contributorName": {
+ "typeName": "contributorName",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "LastContributor1, FirstContributor1"
+ }
+ },
+ {
+ "contributorType": {
+ "typeName": "contributorType",
+ "multiple": false,
+ "typeClass": "controlledVocabulary",
+ "value": "Data Curator"
+ },
+ "contributorName": {
+ "typeName": "contributorName",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "LastContributor2, FirstContributor2"
+ }
+ }
+ ]
+ },
+ {
+ "typeName": "grantNumber",
+ "multiple": true,
+ "typeClass": "compound",
+ "value": [
+ {
+ "grantNumberAgency": {
+ "typeName": "grantNumberAgency",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "GrantInformationGrantAgency1"
+ },
+ "grantNumberValue": {
+ "typeName": "grantNumberValue",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "GrantInformationGrantNumber1"
+ }
+ },
+ {
+ "grantNumberAgency": {
+ "typeName": "grantNumberAgency",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "GrantInformationGrantAgency2"
+ },
+ "grantNumberValue": {
+ "typeName": "grantNumberValue",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "GrantInformationGrantNumber2"
+ }
+ }
+ ]
+ },
+ {
+ "typeName": "distributor",
+ "multiple": true,
+ "typeClass": "compound",
+ "value": [
+ {
+ "distributorName": {
+ "typeName": "distributorName",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "LastDistributor1, FirstDistributor1"
+ },
+ "distributorAffiliation": {
+ "typeName": "distributorAffiliation",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "DistributorAffiliation1"
+ },
+ "distributorAbbreviation": {
+ "typeName": "distributorAbbreviation",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "DistributorAbbreviation1"
+ },
+ "distributorURL": {
+ "typeName": "distributorURL",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "http://DistributorURL1.org"
+ },
+ "distributorLogoURL": {
+ "typeName": "distributorLogoURL",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "http://DistributorLogoURL1.org"
+ }
+ },
+ {
+ "distributorName": {
+ "typeName": "distributorName",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "LastDistributor2, FirstDistributor2"
+ },
+ "distributorAffiliation": {
+ "typeName": "distributorAffiliation",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "DistributorAffiliation2"
+ },
+ "distributorAbbreviation": {
+ "typeName": "distributorAbbreviation",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "DistributorAbbreviation2"
+ },
+ "distributorURL": {
+ "typeName": "distributorURL",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "http://DistributorURL2.org"
+ },
+ "distributorLogoURL": {
+ "typeName": "distributorLogoURL",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "http://DistributorLogoURL2.org"
+ }
+ }
+ ]
+ },
+ {
+ "typeName": "distributionDate",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "1004-01-01"
+ },
+ {
+ "typeName": "depositor",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "LastDepositor, FirstDepositor"
+ },
+ {
+ "typeName": "dateOfDeposit",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "1002-01-01"
+ },
+ {
+ "typeName": "timePeriodCovered",
+ "multiple": true,
+ "typeClass": "compound",
+ "value": [
+ {
+ "timePeriodCoveredStart": {
+ "typeName": "timePeriodCoveredStart",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "1005-01-01"
+ },
+ "timePeriodCoveredEnd": {
+ "typeName": "timePeriodCoveredEnd",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "1005-01-02"
+ }
+ },
+ {
+ "timePeriodCoveredStart": {
+ "typeName": "timePeriodCoveredStart",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "1005-02-01"
+ },
+ "timePeriodCoveredEnd": {
+ "typeName": "timePeriodCoveredEnd",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "1005-02-02"
+ }
+ }
+ ]
+ },
+ {
+ "typeName": "dateOfCollection",
+ "multiple": true,
+ "typeClass": "compound",
+ "value": [
+ {
+ "dateOfCollectionStart": {
+ "typeName": "dateOfCollectionStart",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "1006-01-01"
+ },
+ "dateOfCollectionEnd": {
+ "typeName": "dateOfCollectionEnd",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "1006-01-01"
+ }
+ },
+ {
+ "dateOfCollectionStart": {
+ "typeName": "dateOfCollectionStart",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "1006-02-01"
+ },
+ "dateOfCollectionEnd": {
+ "typeName": "dateOfCollectionEnd",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "1006-02-02"
+ }
+ }
+ ]
+ },
+ {
+ "typeName": "kindOfData",
+ "multiple": true,
+ "typeClass": "primitive",
+ "value": [
+ "KindOfData1",
+ "KindOfData2"
+ ]
+ },
+ {
+ "typeName": "series",
+ "multiple": true,
+ "typeClass": "compound",
+ "value": [{
+ "seriesName": {
+ "typeName": "seriesName",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "SeriesName"
+ },
+ "seriesInformation": {
+ "typeName": "seriesInformation",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "SeriesInformation"
+ }
+ }]
+ },
+ {
+ "typeName": "software",
+ "multiple": true,
+ "typeClass": "compound",
+ "value": [
+ {
+ "softwareName": {
+ "typeName": "softwareName",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "SoftwareName1"
+ },
+ "softwareVersion": {
+ "typeName": "softwareVersion",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "SoftwareVersion1"
+ }
+ },
+ {
+ "softwareName": {
+ "typeName": "softwareName",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "SoftwareName2"
+ },
+ "softwareVersion": {
+ "typeName": "softwareVersion",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "SoftwareVersion2"
+ }
+ }
+ ]
+ },
+ {
+ "typeName": "relatedMaterial",
+ "multiple": true,
+ "typeClass": "primitive",
+ "value": [
+ "RelatedMaterial1",
+ "RelatedMaterial2"
+ ]
+ },
+ {
+ "typeName": "relatedDatasets",
+ "multiple": true,
+ "typeClass": "primitive",
+ "value": [
+ "RelatedDatasets1",
+ "RelatedDatasets2"
+ ]
+ },
+ {
+ "typeName": "otherReferences",
+ "multiple": true,
+ "typeClass": "primitive",
+ "value": [
+ "OtherReferences1",
+ "OtherReferences2"
+ ]
+ },
+ {
+ "typeName": "dataSources",
+ "multiple": true,
+ "typeClass": "primitive",
+ "value": [
+ "DataSources1",
+ "DataSources2"
+ ]
+ },
+ {
+ "typeName": "originOfSources",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "OriginOfSources"
+ },
+ {
+ "typeName": "characteristicOfSources",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "CharacteristicOfSourcesNoted"
+ },
+ {
+ "typeName": "accessToSources",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "DocumentationAndAccessToSources"
+ }
+ ]
+ },
+ "geospatial": {
+ "displayName": "Geospatial Metadata",
+ "fields": [
+ {
+ "typeName": "geographicCoverage",
+ "multiple": true,
+ "typeClass": "compound",
+ "value": [
+ {
+ "country": {
+ "typeName": "country",
+ "multiple": false,
+ "typeClass": "controlledVocabulary",
+ "value": "Afghanistan"
+ },
+ "state": {
+ "typeName": "state",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "GeographicCoverageStateProvince1"
+ },
+ "city": {
+ "typeName": "city",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "GeographicCoverageCity1"
+ },
+ "otherGeographicCoverage": {
+ "typeName": "otherGeographicCoverage",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "GeographicCoverageOther1"
+ }
+ },
+ {
+ "country": {
+ "typeName": "country",
+ "multiple": false,
+ "typeClass": "controlledVocabulary",
+ "value": "Albania"
+ },
+ "state": {
+ "typeName": "state",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "GeographicCoverageStateProvince2"
+ },
+ "city": {
+ "typeName": "city",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "GeographicCoverageCity2"
+ },
+ "otherGeographicCoverage": {
+ "typeName": "otherGeographicCoverage",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "GeographicCoverageOther2"
+ }
+ }
+ ]
+ },
+ {
+ "typeName": "geographicUnit",
+ "multiple": true,
+ "typeClass": "primitive",
+ "value": [
+ "GeographicUnit1",
+ "GeographicUnit2"
+ ]
+ },
+ {
+ "typeName": "geographicBoundingBox",
+ "multiple": true,
+ "typeClass": "compound",
+ "value": [
+ {
+ "westLongitude": {
+ "typeName": "westLongitude",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "-72"
+ },
+ "eastLongitude": {
+ "typeName": "eastLongitude",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "-70"
+ },
+ "northLatitude": {
+ "typeName": "northLatitude",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "43"
+ },
+ "southLatitude": {
+ "typeName": "southLatitude",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "42"
+ }
+ },
+ {
+ "westLongitude": {
+ "typeName": "westLongitude",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "-18"
+ },
+ "eastLongitude": {
+ "typeName": "eastLongitude",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "-13"
+ },
+ "northLatitude": {
+ "typeName": "northLatitude",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "29"
+ },
+ "southLatitude": {
+ "typeName": "southLatitude",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "28"
+ }
+ }
+ ]
+ }
+ ]
+ },
+ "socialscience": {
+ "displayName": "Social Science and Humanities Metadata",
+ "fields": [
+ {
+ "typeName": "unitOfAnalysis",
+ "multiple": true,
+ "typeClass": "primitive",
+ "value": [
+ "UnitOfAnalysis1",
+ "UnitOfAnalysis2"
+ ]
+ },
+ {
+ "typeName": "universe",
+ "multiple": true,
+ "typeClass": "primitive",
+ "value": [
+ "Universe1",
+ "Universe2"
+ ]
+ },
+ {
+ "typeName": "timeMethod",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "TimeMethod"
+ },
+ {
+ "typeName": "dataCollector",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "LastDataCollector1, FirstDataCollector1"
+ },
+ {
+ "typeName": "collectorTraining",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "CollectorTraining"
+ },
+ {
+ "typeName": "frequencyOfDataCollection",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "Frequency"
+ },
+ {
+ "typeName": "samplingProcedure",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "SamplingProcedure"
+ },
+ {
+ "typeName": "targetSampleSize",
+ "multiple": false,
+ "typeClass": "compound",
+ "value": {
+ "targetSampleActualSize": {
+ "typeName": "targetSampleActualSize",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "100"
+ },
+ "targetSampleSizeFormula": {
+ "typeName": "targetSampleSizeFormula",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "TargetSampleSizeFormula"
+ }
+ }
+ },
+ {
+ "typeName": "deviationsFromSampleDesign",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "MajorDeviationsForSampleDesign"
+ },
+ {
+ "typeName": "collectionMode",
+ "multiple": true,
+ "typeClass": "primitive",
+ "value": ["CollectionMode"]
+ },
+ {
+ "typeName": "researchInstrument",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "TypeOfResearchInstrument"
+ },
+ {
+ "typeName": "dataCollectionSituation",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "CharacteristicsOfDataCollectionSituation"
+ },
+ {
+ "typeName": "actionsToMinimizeLoss",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "ActionsToMinimizeLosses"
+ },
+ {
+ "typeName": "controlOperations",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "ControlOperations"
+ },
+ {
+ "typeName": "weighting",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "Weighting"
+ },
+ {
+ "typeName": "cleaningOperations",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "CleaningOperations"
+ },
+ {
+ "typeName": "datasetLevelErrorNotes",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "StudyLevelErrorNotes"
+ },
+ {
+ "typeName": "responseRate",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "ResponseRate"
+ },
+ {
+ "typeName": "samplingErrorEstimates",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "EstimatesOfSamplingError"
+ },
+ {
+ "typeName": "otherDataAppraisal",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "OtherFormsOfDataAppraisal"
+ },
+ {
+ "typeName": "socialScienceNotes",
+ "multiple": false,
+ "typeClass": "compound",
+ "value": {
+ "socialScienceNotesType": {
+ "typeName": "socialScienceNotesType",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "NotesType"
+ },
+ "socialScienceNotesSubject": {
+ "typeName": "socialScienceNotesSubject",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "NotesSubject"
+ },
+ "socialScienceNotesText": {
+ "typeName": "socialScienceNotesText",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "NotesText"
+ }
+ }
+ }
+ ]
+ },
+ "astrophysics": {
+ "displayName": "Astronomy and Astrophysics Metadata",
+ "fields": [
+ {
+ "typeName": "astroType",
+ "multiple": true,
+ "typeClass": "controlledVocabulary",
+ "value": [
+ "Image",
+ "Mosaic",
+ "EventList",
+ "Cube"
+ ]
+ },
+ {
+ "typeName": "astroFacility",
+ "multiple": true,
+ "typeClass": "primitive",
+ "value": [
+ "Facility1",
+ "Facility2"
+ ]
+ },
+ {
+ "typeName": "astroInstrument",
+ "multiple": true,
+ "typeClass": "primitive",
+ "value": [
+ "Instrument1",
+ "Instrument2"
+ ]
+ },
+ {
+ "typeName": "astroObject",
+ "multiple": true,
+ "typeClass": "primitive",
+ "value": [
+ "Object1",
+ "Object2"
+ ]
+ },
+ {
+ "typeName": "resolution.Spatial",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "SpatialResolution"
+ },
+ {
+ "typeName": "resolution.Spectral",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "SpectralResolution"
+ },
+ {
+ "typeName": "resolution.Temporal",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "TimeResolution"
+ },
+ {
+ "typeName": "coverage.Spectral.Bandpass",
+ "multiple": true,
+ "typeClass": "primitive",
+ "value": [
+ "Bandpass1",
+ "Bandpass2"
+ ]
+ },
+ {
+ "typeName": "coverage.Spectral.CentralWavelength",
+ "multiple": true,
+ "typeClass": "primitive",
+ "value": [
+ "3001",
+ "3002"
+ ]
+ },
+ {
+ "typeName": "coverage.Spectral.Wavelength",
+ "multiple": true,
+ "typeClass": "compound",
+ "value": [
+ {
+ "coverage.Spectral.MinimumWavelength": {
+ "typeName": "coverage.Spectral.MinimumWavelength",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "4001"
+ },
+ "coverage.Spectral.MaximumWavelength": {
+ "typeName": "coverage.Spectral.MaximumWavelength",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "4002"
+ }
+ },
+ {
+ "coverage.Spectral.MinimumWavelength": {
+ "typeName": "coverage.Spectral.MinimumWavelength",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "4003"
+ },
+ "coverage.Spectral.MaximumWavelength": {
+ "typeName": "coverage.Spectral.MaximumWavelength",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "4004"
+ }
+ }
+ ]
+ },
+ {
+ "typeName": "coverage.Temporal",
+ "multiple": true,
+ "typeClass": "compound",
+ "value": [
+ {
+ "coverage.Temporal.StartTime": {
+ "typeName": "coverage.Temporal.StartTime",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "1007-01-01"
+ },
+ "coverage.Temporal.StopTime": {
+ "typeName": "coverage.Temporal.StopTime",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "1007-01-02"
+ }
+ },
+ {
+ "coverage.Temporal.StartTime": {
+ "typeName": "coverage.Temporal.StartTime",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "1007-02-01"
+ },
+ "coverage.Temporal.StopTime": {
+ "typeName": "coverage.Temporal.StopTime",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "1007-02-02"
+ }
+ }
+ ]
+ },
+ {
+ "typeName": "coverage.Spatial",
+ "multiple": true,
+ "typeClass": "primitive",
+ "value": [
+ "SkyCoverage1",
+ "SkyCoverage2"
+ ]
+ },
+ {
+ "typeName": "coverage.Depth",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "200"
+ },
+ {
+ "typeName": "coverage.ObjectDensity",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "300"
+ },
+ {
+ "typeName": "coverage.ObjectCount",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "400"
+ },
+ {
+ "typeName": "coverage.SkyFraction",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "500"
+ },
+ {
+ "typeName": "coverage.Polarization",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "Polarization"
+ },
+ {
+ "typeName": "redshiftType",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "RedshiftType"
+ },
+ {
+ "typeName": "resolution.Redshift",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "600"
+ },
+ {
+ "typeName": "coverage.RedshiftValue",
+ "multiple": true,
+ "typeClass": "compound",
+ "value": [
+ {
+ "coverage.Redshift.MinimumValue": {
+ "typeName": "coverage.Redshift.MinimumValue",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "701"
+ },
+ "coverage.Redshift.MaximumValue": {
+ "typeName": "coverage.Redshift.MaximumValue",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "702"
+ }
+ },
+ {
+ "coverage.Redshift.MinimumValue": {
+ "typeName": "coverage.Redshift.MinimumValue",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "703"
+ },
+ "coverage.Redshift.MaximumValue": {
+ "typeName": "coverage.Redshift.MaximumValue",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "704"
+ }
+ }
+ ]
+ }
+ ]
+ },
+ "biomedical": {
+ "displayName": "Life Sciences Metadata",
+ "fields": [
+ {
+ "typeName": "studyDesignType",
+ "multiple": true,
+ "typeClass": "controlledVocabulary",
+ "value": [
+ "Case Control",
+ "Cross Sectional",
+ "Cohort Study",
+ "Not Specified"
+ ]
+ },
+ {
+ "typeName": "studyFactorType",
+ "multiple": true,
+ "typeClass": "controlledVocabulary",
+ "value": [
+ "Age",
+ "Biomarkers",
+ "Cell Surface Markers",
+ "Developmental Stage"
+ ]
+ },
+ {
+ "typeName": "studyAssayOrganism",
+ "multiple": true,
+ "typeClass": "controlledVocabulary",
+ "value": [
+ "Arabidopsis thaliana",
+ "Bos taurus",
+ "Caenorhabditis elegans",
+ "Danio rerio (zebrafish)"
+ ]
+ },
+ {
+ "typeName": "studyAssayOtherOrganism",
+ "multiple": true,
+ "typeClass": "primitive",
+ "value": [
+ "OtherOrganism1",
+ "OtherOrganism2"
+ ]
+ },
+ {
+ "typeName": "studyAssayMeasurementType",
+ "multiple": true,
+ "typeClass": "controlledVocabulary",
+ "value": [
+ "genome sequencing",
+ "cell sorting",
+ "clinical chemistry analysis",
+ "DNA methylation profiling"
+ ]
+ },
+ {
+ "typeName": "studyAssayOtherMeasurmentType",
+ "multiple": true,
+ "typeClass": "primitive",
+ "value": [
+ "OtherMeasurementType1",
+ "OtherMeasurementType2"
+ ]
+ },
+ {
+ "typeName": "studyAssayTechnologyType",
+ "multiple": true,
+ "typeClass": "controlledVocabulary",
+ "value": [
+ "culture based drug susceptibility testing, single concentration",
+ "culture based drug susceptibility testing, two concentrations",
+ "culture based drug susceptibility testing, three or more concentrations (minimium inhibitory concentration measurement)",
+ "flow cytometry"
+ ]
+ },
+ {
+ "typeName": "studyAssayPlatform",
+ "multiple": true,
+ "typeClass": "controlledVocabulary",
+ "value": [
+ "210-MS GC Ion Trap (Varian)",
+ "220-MS GC Ion Trap (Varian)",
+ "225-MS GC Ion Trap (Varian)",
+ "300-MS quadrupole GC/MS (Varian)"
+ ]
+ },
+ {
+ "typeName": "studyAssayCellType",
+ "multiple": true,
+ "typeClass": "primitive",
+ "value": [
+ "CellType1",
+ "CellType2"
+ ]
+ }
+ ]
+ },
+ "journal": {
+ "displayName": "Journal Metadata",
+ "fields": [
+ {
+ "typeName": "journalVolumeIssue",
+ "multiple": true,
+ "typeClass": "compound",
+ "value": [
+ {
+ "journalVolume": {
+ "typeName": "journalVolume",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "JournalVolume1"
+ },
+ "journalIssue": {
+ "typeName": "journalIssue",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "JournalIssue1"
+ },
+ "journalPubDate": {
+ "typeName": "journalPubDate",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "1008-01-01"
+ }
+ },
+ {
+ "journalVolume": {
+ "typeName": "journalVolume",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "JournalVolume2"
+ },
+ "journalIssue": {
+ "typeName": "journalIssue",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "JournalIssue2"
+ },
+ "journalPubDate": {
+ "typeName": "journalPubDate",
+ "multiple": false,
+ "typeClass": "primitive",
+ "value": "1008-02-01"
+ }
+ }
+ ]
+ },
+ {
+ "typeName": "journalArticleType",
+ "multiple": false,
+ "typeClass": "controlledVocabulary",
+ "value": "abstract"
+ }
+ ]
+ }
+ }
+ }
+}
diff --git a/scripts/api/data/licenses/licenseApache-2.0.json b/scripts/api/data/licenses/licenseApache-2.0.json
index 5b7c3cf5c95..9b9bb0cc025 100644
--- a/scripts/api/data/licenses/licenseApache-2.0.json
+++ b/scripts/api/data/licenses/licenseApache-2.0.json
@@ -1,8 +1,11 @@
{
- "name": "Apache-2.0",
- "uri": "http://www.apache.org/licenses/LICENSE-2.0",
- "shortDescription": "Apache License 2.0",
- "active": true,
- "sortOrder": 9
- }
-
\ No newline at end of file
+ "name": "Apache-2.0",
+ "uri": "http://www.apache.org/licenses/LICENSE-2.0",
+ "shortDescription": "Apache License 2.0",
+ "active": true,
+ "sortOrder": 9,
+ "rightsIdentifier": "Apache-2.0",
+ "rightsIdentifierScheme": "SPDX",
+ "schemeUri": "https://spdx.org/licenses/",
+ "languageCode": "en"
+}
\ No newline at end of file
diff --git a/scripts/api/data/licenses/licenseCC-BY-4.0.json b/scripts/api/data/licenses/licenseCC-BY-4.0.json
index 59201b8d08e..3c723e80123 100644
--- a/scripts/api/data/licenses/licenseCC-BY-4.0.json
+++ b/scripts/api/data/licenses/licenseCC-BY-4.0.json
@@ -4,5 +4,9 @@
"shortDescription": "Creative Commons Attribution 4.0 International License.",
"iconUrl": "https://licensebuttons.net/l/by/4.0/88x31.png",
"active": true,
- "sortOrder": 2
-}
+ "sortOrder": 2,
+ "rightsIdentifier": "CC-BY-4.0",
+ "rightsIdentifierScheme": "SPDX",
+ "schemeUri": "https://spdx.org/licenses/",
+ "languageCode": "en"
+}
\ No newline at end of file
diff --git a/scripts/api/data/licenses/licenseCC-BY-NC-4.0.json b/scripts/api/data/licenses/licenseCC-BY-NC-4.0.json
index c19087664db..8c0d5f18fe3 100644
--- a/scripts/api/data/licenses/licenseCC-BY-NC-4.0.json
+++ b/scripts/api/data/licenses/licenseCC-BY-NC-4.0.json
@@ -4,5 +4,9 @@
"shortDescription": "Creative Commons Attribution-NonCommercial 4.0 International License.",
"iconUrl": "https://licensebuttons.net/l/by-nc/4.0/88x31.png",
"active": true,
- "sortOrder": 4
+ "sortOrder": 4,
+ "rightsIdentifier": "CC-BY-NC-4.0",
+ "rightsIdentifierScheme": "SPDX",
+ "schemeUri": "https://spdx.org/licenses/",
+ "languageCode": "en"
}
diff --git a/scripts/api/data/licenses/licenseCC-BY-NC-ND-4.0.json b/scripts/api/data/licenses/licenseCC-BY-NC-ND-4.0.json
index 2e374917d28..a9963919eae 100644
--- a/scripts/api/data/licenses/licenseCC-BY-NC-ND-4.0.json
+++ b/scripts/api/data/licenses/licenseCC-BY-NC-ND-4.0.json
@@ -4,5 +4,9 @@
"shortDescription": "Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International License.",
"iconUrl": "https://licensebuttons.net/l/by-nc-nd/4.0/88x31.png",
"active": true,
- "sortOrder": 7
+ "sortOrder": 7,
+ "rightsIdentifier": "CC-BY-NC-ND-4.0",
+ "rightsIdentifierScheme": "SPDX",
+ "schemeUri": "https://spdx.org/licenses/",
+ "languageCode": "en"
}
diff --git a/scripts/api/data/licenses/licenseCC-BY-NC-SA-4.0.json b/scripts/api/data/licenses/licenseCC-BY-NC-SA-4.0.json
index 5018884f65e..02cf9812a67 100644
--- a/scripts/api/data/licenses/licenseCC-BY-NC-SA-4.0.json
+++ b/scripts/api/data/licenses/licenseCC-BY-NC-SA-4.0.json
@@ -4,5 +4,9 @@
"shortDescription": "Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License.",
"iconUrl": "https://licensebuttons.net/l/by-nc-sa/4.0/88x31.png",
"active": true,
- "sortOrder": 3
+ "sortOrder": 3,
+ "rightsIdentifier": "CC-BY-NC-SA-4.0",
+ "rightsIdentifierScheme": "SPDX",
+ "schemeUri": "https://spdx.org/licenses/",
+ "languageCode": "en"
}
diff --git a/scripts/api/data/licenses/licenseCC-BY-ND-4.0.json b/scripts/api/data/licenses/licenseCC-BY-ND-4.0.json
index 317d459a7ae..260efbe19a5 100644
--- a/scripts/api/data/licenses/licenseCC-BY-ND-4.0.json
+++ b/scripts/api/data/licenses/licenseCC-BY-ND-4.0.json
@@ -4,5 +4,9 @@
"shortDescription": "Creative Commons Attribution-NoDerivatives 4.0 International License.",
"iconUrl": "https://licensebuttons.net/l/by-nd/4.0/88x31.png",
"active": true,
- "sortOrder": 6
+ "sortOrder": 6,
+ "rightsIdentifier": "CC-BY-ND-4.0",
+ "rightsIdentifierScheme": "SPDX",
+ "schemeUri": "https://spdx.org/licenses/",
+ "languageCode": "en"
}
diff --git a/scripts/api/data/licenses/licenseCC-BY-SA-4.0.json b/scripts/api/data/licenses/licenseCC-BY-SA-4.0.json
index 0d28c9423aa..ed7511ded17 100644
--- a/scripts/api/data/licenses/licenseCC-BY-SA-4.0.json
+++ b/scripts/api/data/licenses/licenseCC-BY-SA-4.0.json
@@ -4,5 +4,9 @@
"shortDescription": "Creative Commons Attribution-ShareAlike 4.0 International License.",
"iconUrl": "https://licensebuttons.net/l/by-sa/4.0/88x31.png",
"active": true,
- "sortOrder": 5
+ "sortOrder": 5,
+ "rightsIdentifier": "CC-BY-SA-4.0",
+ "rightsIdentifierScheme": "SPDX",
+ "schemeUri": "https://spdx.org/licenses/",
+ "languageCode": "en"
}
diff --git a/scripts/api/data/licenses/licenseCC0-1.0.json b/scripts/api/data/licenses/licenseCC0-1.0.json
index 216260a5de8..7f21b81eea5 100644
--- a/scripts/api/data/licenses/licenseCC0-1.0.json
+++ b/scripts/api/data/licenses/licenseCC0-1.0.json
@@ -4,5 +4,9 @@
"shortDescription": "Creative Commons CC0 1.0 Universal Public Domain Dedication.",
"iconUrl": "https://licensebuttons.net/p/zero/1.0/88x31.png",
"active": true,
- "sortOrder": 1
+ "sortOrder": 1,
+ "rightsIdentifier": "CC0-1.0",
+ "rightsIdentifierScheme": "SPDX",
+ "schemeUri": "https://spdx.org/licenses/",
+ "languageCode": "en"
}
diff --git a/scripts/api/data/licenses/licenseEtalab-2.0.json b/scripts/api/data/licenses/licenseEtalab-2.0.json
new file mode 100644
index 00000000000..42ec90a7540
--- /dev/null
+++ b/scripts/api/data/licenses/licenseEtalab-2.0.json
@@ -0,0 +1,12 @@
+{
+ "name": "etalab 2.0",
+ "uri": "https://spdx.org/licenses/etalab-2.0",
+ "shortDescription": "Etalab Open License 2.0, compatible CC-BY 2.0",
+ "iconUrl": "https://upload.wikimedia.org/wikipedia/commons/thumb/1/18/Logo-licence-ouverte2.svg/25px-Logo-licence-ouverte2.svg.png",
+ "active": true,
+ "sortOrder": 10,
+ "rightsIdentifier": "etalab-2.0",
+ "rightsIdentifierScheme": "SPDX",
+ "schemeUri": "https://spdx.org/licenses/",
+ "languageCode": "fr"
+}
diff --git a/scripts/api/data/licenses/licenseMIT.json b/scripts/api/data/licenses/licenseMIT.json
index a879e8a5595..32d4351ed91 100644
--- a/scripts/api/data/licenses/licenseMIT.json
+++ b/scripts/api/data/licenses/licenseMIT.json
@@ -3,5 +3,9 @@
"uri": "https://opensource.org/licenses/MIT",
"shortDescription": "MIT License",
"active": true,
- "sortOrder": 8
+ "sortOrder": 8,
+ "rightsIdentifier": "MIT",
+ "rightsIdentifierScheme": "SPDX",
+ "schemeUri": "https://spdx.org/licenses/",
+ "languageCode": "en"
}
diff --git a/scripts/api/data/metadatablocks/3d_objects.tsv b/scripts/api/data/metadatablocks/3d_objects.tsv
new file mode 100644
index 00000000000..e753e4dfbed
--- /dev/null
+++ b/scripts/api/data/metadatablocks/3d_objects.tsv
@@ -0,0 +1,45 @@
+#metadataBlock name dataverseAlias displayName
+ 3dobjects 3D Objects Metadata
+#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id termURI
+ 3d3DTechnique 3D Technique The technique used for capturing the 3D data text 0 #VALUE TRUE TRUE FALSE TRUE FALSE FALSE 3dobjects
+ 3dEquipment Equipment The equipment used for capturing the 3D data text 1 #VALUE TRUE FALSE FALSE FALSE FALSE FALSE 3dobjects
+ 3dLightingSetup Lighting Setup The lighting used while capturing the 3D data text 2 #VALUE TRUE TRUE FALSE TRUE FALSE FALSE 3dobjects
+ 3dMasterFilePolygonCount Master File Polygon Count The high-resolution polygon count text 3 #VALUE TRUE FALSE FALSE FALSE FALSE FALSE 3dobjects
+ 3dExportedFilePolygonCount Exported File Polygon Count The exported mesh polygon count text 4 #VALUE TRUE FALSE TRUE FALSE FALSE FALSE 3dobjects
+ 3dExportedFileFormat Exported File Format The format of the exported mesh text 5 #VALUE TRUE TRUE FALSE TRUE FALSE FALSE 3dobjects
+ 3dAltText Alt-Text A physical description of the object modeled textbox 6 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE 3dobjects
+ 3dMaterialComposition Material Composition The material used to create the object, e.g. stone text 7 #VALUE TRUE FALSE TRUE TRUE FALSE FALSE 3dobjects
+ 3dObjectDimensions Object Dimensions The general measurements of the physical object none 8 ; FALSE FALSE FALSE FALSE FALSE FALSE 3dobjects
+ 3dLength Length The rough length of the object text 9 Length: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE 3dObjectDimensions 3dobjects
+ 3dWidth Width The rough width of the object text 10 Width: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE 3dObjectDimensions 3dobjects
+ 3dHeight Height The rough height of the object text 11 Height: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE 3dObjectDimensions 3dobjects
+ 3dWeight Weight The rough weight of the object text 12 Weight:#VALUE FALSE FALSE FALSE FALSE FALSE FALSE 3dObjectDimensions 3dobjects
+ 3dUnit Unit The unit of measurement used for the object dimensions text 13 Unit: #VALUE FALSE TRUE FALSE TRUE FALSE FALSE 3dObjectDimensions 3dobjects
+ 3dHandling Instructions Safety and special handling instructions for the object textbox 14 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE 3dobjects
+#controlledVocabulary DatasetField Value identifier displayOrder
+ 3d3DTechnique IR Scanner 0
+ 3d3DTechnique Laser 1
+ 3d3DTechnique Modelled 2
+ 3d3DTechnique Photogrammetry 3
+ 3d3DTechnique RTI 4
+ 3d3DTechnique Structured Light 5
+ 3d3DTechnique Tomographic 6
+ 3d3DTechnique Other 7
+ 3dLightingSetup Natural Light 8
+ 3dLightingSetup Lightbox 9
+ 3dLightingSetup LED 10
+ 3dLightingSetup Fluorescent 11
+ 3dLightingSetup Other 12
+ 3dUnit cm 13
+ 3dUnit m 14
+ 3dUnit in 15
+ 3dUnit ft 16
+ 3dUnit lbs 17
+ 3dExportedFileFormat .fbx 18
+ 3dExportedFileFormat .glb 19
+ 3dExportedFileFormat .gltf 20
+ 3dExportedFileFormat .obj 21
+ 3dExportedFileFormat .stl 22
+ 3dExportedFileFormat .usdz 23
+ 3dExportedFileFormat .x3d 24
+ 3dExportedFileFormat other 25
diff --git a/scripts/api/data/metadatablocks/archival.tsv b/scripts/api/data/metadatablocks/archival.tsv
new file mode 100644
index 00000000000..89ef5466a44
--- /dev/null
+++ b/scripts/api/data/metadatablocks/archival.tsv
@@ -0,0 +1,12 @@
+#metadataBlock name dataverseAlias displayName blockURI
+ archival Archival Metadata
+#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id termURI
+ submitToArchivalAppraisal Submit to Archival Appraisal Your assessment whether the dataset should be submitted for archival appraisal text 0 #VALUE TRUE TRUE FALSE FALSE TRUE FALSE archival
+ archivedFrom Archived from A date (YYYY-MM-DD) from which the dataset is archived YYYY-MM-DD date 1 #VALUE TRUE FALSE FALSE FALSE FALSE FALSE archival
+ holdingArchive Holding Archive Information on the holding archive where the dataset is archived none 3 FALSE FALSE TRUE FALSE FALSE FALSE archival
+ holdingArchiveName Archived at Holding Archive The name of the holding archive text 4 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE holdingArchive archival https://schema.org/holdingArchive
+ archivedAt Archived at URL URL to holding archive URL url 5 "#VALUE" FALSE FALSE FALSE FALSE FALSE FALSE holdingArchive archival https://schema.org/archivedAt
+#controlledVocabulary DatasetField Value identifier displayOrder
+ submitToArchivalAppraisal True 0
+ submitToArchivalAppraisal False 1
+ submitToArchivalAppraisal Unknown 2
diff --git a/scripts/api/data/metadatablocks/citation.tsv b/scripts/api/data/metadatablocks/citation.tsv
index abc09465603..dea23aa9a73 100644
--- a/scripts/api/data/metadatablocks/citation.tsv
+++ b/scripts/api/data/metadatablocks/citation.tsv
@@ -133,13 +133,14 @@
contributorType Work Package Leader 15
contributorType Other 16
authorIdentifierScheme ORCID 0
- authorIdentifierScheme ISNI 1
- authorIdentifierScheme LCNA 2
- authorIdentifierScheme VIAF 3
- authorIdentifierScheme GND 4
- authorIdentifierScheme DAI 5
- authorIdentifierScheme ResearcherID 6
- authorIdentifierScheme ScopusID 7
+ authorIdentifierScheme ROR 1
+ authorIdentifierScheme ISNI 2
+ authorIdentifierScheme LCNA 3
+ authorIdentifierScheme VIAF 4
+ authorIdentifierScheme GND 5
+ authorIdentifierScheme DAI 6
+ authorIdentifierScheme ResearcherID 7
+ authorIdentifierScheme ScopusID 8
language 'Are'are alu 0 alu
language 'Auhelawa kud 1 kud
language A'ou aou 2 aou
@@ -8061,4 +8062,4 @@
publicationRelationType IsSupplementTo RT3 3
publicationRelationType IsSupplementedBy RT4 4
publicationRelationType IsReferencedBy RT5 5
- publicationRelationType References RT6 6
\ No newline at end of file
+ publicationRelationType References RT6 6
diff --git a/scripts/api/setup-datasetfields.sh b/scripts/api/setup-datasetfields.sh
index 51da677ceb8..908988f8acb 100755
--- a/scripts/api/setup-datasetfields.sh
+++ b/scripts/api/setup-datasetfields.sh
@@ -11,3 +11,4 @@ curl "${DATAVERSE_URL}/api/admin/datasetfield/load" -X POST --data-binary @"$SCR
curl "${DATAVERSE_URL}/api/admin/datasetfield/load" -X POST --data-binary @"$SCRIPT_PATH"/data/metadatablocks/astrophysics.tsv -H "Content-type: text/tab-separated-values"
curl "${DATAVERSE_URL}/api/admin/datasetfield/load" -X POST --data-binary @"$SCRIPT_PATH"/data/metadatablocks/biomedical.tsv -H "Content-type: text/tab-separated-values"
curl "${DATAVERSE_URL}/api/admin/datasetfield/load" -X POST --data-binary @"$SCRIPT_PATH"/data/metadatablocks/journals.tsv -H "Content-type: text/tab-separated-values"
+curl "${DATAVERSE_URL}/api/admin/datasetfield/load" -X POST --data-binary @"$SCRIPT_PATH"/data/metadatablocks/3d_objects.tsv -H "Content-type: text/tab-separated-values"
diff --git a/scripts/api/update-datasetfields.sh b/scripts/api/update-datasetfields.sh
deleted file mode 100644
index ae099f8dcfd..00000000000
--- a/scripts/api/update-datasetfields.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/sh
-curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/citation.tsv -H "Content-type: text/tab-separated-values"
-curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/geospatial.tsv -H "Content-type: text/tab-separated-values"
-curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/social_science.tsv -H "Content-type: text/tab-separated-values"
-curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/astrophysics.tsv -H "Content-type: text/tab-separated-values"
-curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/biomedical.tsv -H "Content-type: text/tab-separated-values"
-curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/journals.tsv -H "Content-type: text/tab-separated-values"
\ No newline at end of file
diff --git a/scripts/installer/README_python.txt b/scripts/installer/README_python.txt
index ba3dd041e09..13abfeb1941 100644
--- a/scripts/installer/README_python.txt
+++ b/scripts/installer/README_python.txt
@@ -39,7 +39,7 @@ in your PATH. If you have multiple versions of PostgresQL installed,
make sure the version that you will be using with Dataverse is the
first on your PATH. For example,
- PATH=/usr/pgsql-13/bin:$PATH; export PATH
+ PATH=/usr/pgsql-16/bin:$PATH; export PATH
Certain libraries and source include files, both for PostgresQL and
Python, are also needed to compile the module. On
@@ -47,7 +47,7 @@ RedHat/CentOS/etc. you may need to install the -devel packages, *for
the specific versions* of PostgreSQL and Python you will be using. For
example:
- yum install postgresql13-devel
+ yum install postgresql16-devel
yum install python37-devel
etc.
diff --git a/scripts/installer/as-setup.sh b/scripts/installer/as-setup.sh
index e87122ba77c..68a53270114 100755
--- a/scripts/installer/as-setup.sh
+++ b/scripts/installer/as-setup.sh
@@ -124,6 +124,10 @@ function preliminary_setup()
# bump the http-listener timeout from 900 to 3600
./asadmin $ASADMIN_OPTS set server-config.network-config.protocols.protocol.http-listener-1.http.request-timeout-seconds="${GLASSFISH_REQUEST_TIMEOUT}"
+ # Set SameSite cookie value: https://docs.payara.fish/community/docs/6.2024.6/Technical%20Documentation/Payara%20Server%20Documentation/General%20Administration/Administering%20HTTP%20Connectivity.html
+ ./asadmin $ASADMIN_OPTS set server-config.network-config.protocols.protocol.http-listener-1.http.cookie-same-site-value="Lax"
+ ./asadmin $ASADMIN_OPTS set server-config.network-config.protocols.protocol.http-listener-1.http.cookie-same-site-enabled="true"
+
# so we can front with apache httpd ( ProxyPass / ajp://localhost:8009/ )
./asadmin $ASADMIN_OPTS create-network-listener --protocol http-listener-1 --listenerport 8009 --jkenabled true jk-connector
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
index 02fb59751fb..589fb5fea9c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
@@ -6,6 +6,7 @@
package edu.harvard.iq.dataverse;
import edu.harvard.iq.dataverse.branding.BrandingUtil;
+import edu.harvard.iq.dataverse.dataset.DatasetType;
import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
import edu.harvard.iq.dataverse.pidproviders.AbstractPidProvider;
@@ -29,15 +30,34 @@
import java.util.stream.Collectors;
import jakarta.ejb.EJBException;
+import jakarta.json.JsonObject;
+import jakarta.ws.rs.core.MediaType;
+
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import edu.harvard.iq.dataverse.util.BundleUtil;
import edu.harvard.iq.dataverse.util.DateUtil;
+import edu.harvard.iq.dataverse.util.PersonOrOrgUtil;
+import edu.harvard.iq.dataverse.util.SystemConfig;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
+
import org.apache.commons.text.StringEscapeUtils;
+
+import de.undercouch.citeproc.csl.CSLItemDataBuilder;
+import de.undercouch.citeproc.csl.CSLName;
+import de.undercouch.citeproc.csl.CSLNameBuilder;
+import de.undercouch.citeproc.csl.CSLType;
+import de.undercouch.citeproc.helper.json.JsonBuilder;
+import de.undercouch.citeproc.helper.json.StringJsonBuilderFactory;
+
import org.apache.commons.lang3.StringUtils;
+import static edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider.DOI_PROTOCOL;
+import static edu.harvard.iq.dataverse.pidproviders.handle.HandlePidProvider.HDL_PROTOCOL;
+import static edu.harvard.iq.dataverse.pidproviders.perma.PermaLinkPidProvider.PERMA_PROTOCOL;
+
/**
*
* @author gdurand, qqmyers
@@ -47,6 +67,7 @@ public class DataCitation {
private static final Logger logger = Logger.getLogger(DataCitation.class.getCanonicalName());
private List authors = new ArrayList();
+ private List cslAuthors = new ArrayList();
private List producers = new ArrayList();
private String title;
private String fileTitle = null;
@@ -67,8 +88,18 @@ public class DataCitation {
private List spatialCoverages;
private List optionalValues = new ArrayList<>();
- private int optionalURLcount = 0;
+ private int optionalURLcount = 0;
+
+ private DatasetType type;
+ public enum Format {
+ Internal,
+ EndNote,
+ RIS,
+ BibTeX,
+ CSL
+ }
+
public DataCitation(DatasetVersion dsv) {
this(dsv, false);
}
@@ -142,8 +173,14 @@ private void getCommonValuesFrom(DatasetVersion dsv) {
spatialCoverages = dsv.getSpatialCoverages();
publisher = getPublisherFrom(dsv);
version = getVersionFrom(dsv);
+ type = getTypeFrom(dsv);
}
+ private DatasetType getTypeFrom(DatasetVersion dsv) {
+ return dsv.getDataset().getDatasetType();
+ }
+
+
public String getAuthorsString() {
return String.join("; ", authors);
}
@@ -189,7 +226,45 @@ public String toString() {
public String toString(boolean html) {
return toString(html, false);
}
+
public String toString(boolean html, boolean anonymized) {
+ return toString(Format.Internal, html, anonymized);
+ }
+
+ public String toString(Format format, boolean html, boolean anonymized) {
+ if(anonymized && (format != Format.Internal)) {
+ //Only Internal format supports anonymization
+ return null;
+ }
+ switch (format) {
+ case BibTeX:
+ return toBibtexString();
+ case CSL:
+ return JsonUtil.prettyPrint(getCSLJsonFormat());
+ case EndNote:
+ return toEndNoteString();
+ case Internal:
+ return formatInternalCitation(html, anonymized);
+ case RIS:
+ return toRISString();
+ }
+ return null;
+ }
+
+ public static String getCitationFormatMediaType(Format format, boolean isHtml) {
+ switch (format) {
+
+ case CSL:
+ return MediaType.APPLICATION_JSON;
+ case EndNote:
+ return MediaType.TEXT_XML;
+ case Internal:
+ return isHtml ? MediaType.TEXT_HTML : MediaType.TEXT_PLAIN;
+ }
+ return MediaType.TEXT_PLAIN;
+ }
+
+ private String formatInternalCitation(boolean html, boolean anonymized) {
// first add comma separated parts
String separator = ", ";
List citationList = new ArrayList<>();
@@ -293,11 +368,13 @@ public void writeAsBibtexCitation(OutputStream os) throws IOException {
out.write("version = {");
out.write(version);
out.write("},\r\n");
- out.write("doi = {");
- out.write(persistentId.getAuthority());
- out.write("/");
- out.write(persistentId.getIdentifier());
- out.write("},\r\n");
+ if("doi".equals(persistentId.getProtocol())) {
+ out.write("doi = {");
+ out.write(persistentId.getAuthority());
+ out.write("/");
+ out.write(persistentId.getIdentifier());
+ out.write("},\r\n");
+ }
out.write("url = {");
out.write(persistentId.asURL());
out.write("}\r\n");
@@ -595,11 +672,21 @@ private void createEndNoteXML(XMLStreamWriter xmlw) throws XMLStreamException {
}
xmlw.writeStartElement("urls");
- xmlw.writeStartElement("related-urls");
- xmlw.writeStartElement("url");
- xmlw.writeCharacters(getPersistentId().asURL());
- xmlw.writeEndElement(); // url
- xmlw.writeEndElement(); // related-urls
+ if (persistentId != null) {
+ if (PERMA_PROTOCOL.equals(persistentId.getProtocol()) || HDL_PROTOCOL.equals(persistentId.getProtocol())) {
+ xmlw.writeStartElement("web-urls");
+ xmlw.writeStartElement("url");
+ xmlw.writeCharacters(getPersistentId().asURL());
+ xmlw.writeEndElement(); // url
+ xmlw.writeEndElement(); // web-urls
+ } else if (DOI_PROTOCOL.equals(persistentId.getProtocol())) {
+ xmlw.writeStartElement("related-urls");
+ xmlw.writeStartElement("url");
+ xmlw.writeCharacters(getPersistentId().asURL());
+ xmlw.writeEndElement(); // url
+ xmlw.writeEndElement(); // related-urls
+ }
+ }
xmlw.writeEndElement(); // urls
// a DataFile citation also includes the filename and (for Tabular
@@ -617,10 +704,9 @@ private void createEndNoteXML(XMLStreamWriter xmlw) throws XMLStreamException {
xmlw.writeEndElement(); // custom2
}
}
- if (persistentId != null) {
+ if (persistentId != null && "doi".equals(persistentId.getProtocol())) {
xmlw.writeStartElement("electronic-resource-num");
- String electResourceNum = persistentId.getProtocol() + "/" + persistentId.getAuthority() + "/"
- + persistentId.getIdentifier();
+ String electResourceNum = persistentId.asRawIdentifier();
xmlw.writeCharacters(electResourceNum);
xmlw.writeEndElement();
}
@@ -650,9 +736,30 @@ public Map getDataCiteMetadata() {
metadata.put("datacite.publisher", producerString);
metadata.put("datacite.publicationyear", getYear());
return metadata;
- }
+ }
+
+ public JsonObject getCSLJsonFormat() {
+ CSLItemDataBuilder itemBuilder = new CSLItemDataBuilder();
+ if (type.equals(DatasetType.DATASET_TYPE_SOFTWARE)) {
+ itemBuilder.type(CSLType.SOFTWARE);
+ } else {
+ itemBuilder.type(CSLType.DATASET);
+ }
+ itemBuilder.title(formatString(title,true)).author((CSLName[]) cslAuthors.toArray(new CSLName[0])).issued(Integer.parseInt(year));
+ if (seriesTitles != null) {
+ itemBuilder.containerTitle(formatString(seriesTitles.get(0), true));
+ }
+ itemBuilder.version(version).DOI(persistentId.asString());
+ if (keywords != null) {
+ itemBuilder
+ .categories(keywords.stream().map(keyword -> formatString(keyword, true)).toArray(String[]::new));
+ }
+ itemBuilder.abstrct(formatString(description, true)).publisher(formatString(publisher, true))
+ .URL(SystemConfig.getDataverseSiteUrlStatic() + "/citation?persistentId=" + persistentId.asString());
+ JsonBuilder b = (new StringJsonBuilderFactory()).createJsonBuilder();
+ return JsonUtil.getJsonObject((String) itemBuilder.build().toJson(b));
+ }
-
// helper methods
private String formatString(String value, boolean escapeHtml) {
return formatString(value, escapeHtml, "");
@@ -759,6 +866,20 @@ private void getAuthorsAndProducersFrom(DatasetVersion dsv) {
if (!author.isEmpty()) {
String an = author.getName().getDisplayValue().trim();
authors.add(an);
+ boolean isOrg = "ROR".equals(author.getIdType());
+ JsonObject authorJson = PersonOrOrgUtil.getPersonOrOrganization(an, false, !isOrg);
+ if (!authorJson.getBoolean("isPerson")) {
+ cslAuthors.add(new CSLNameBuilder().literal(formatString(authorJson.getString("fullName"), true)).isInstitution(true).build());
+ } else {
+ if (authorJson.containsKey("givenName") && authorJson.containsKey("familyName")) {
+ String givenName = formatString(authorJson.getString("givenName"),true);
+ String familyName = formatString(authorJson.getString("familyName"), true);
+ cslAuthors.add(new CSLNameBuilder().given(givenName).family(familyName).isInstitution(false).build());
+ } else {
+ cslAuthors.add(
+ new CSLNameBuilder().literal(formatString(authorJson.getString("fullName"), true)).isInstitution(false).build());
+ }
+ }
}
});
producers = dsv.getDatasetProducerNames();
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
index 1a610d9ea6e..01c1a48e117 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
@@ -1142,4 +1142,12 @@ public boolean isDeaccessioned() {
}
return inDeaccessionedVersions; // since any published version would have already returned
}
+ public boolean isInDatasetVersion(DatasetVersion version) {
+ for (FileMetadata fmd : getFileMetadatas()) {
+ if (fmd.getDatasetVersion().equals(version)) {
+ return true;
+ }
+ }
+ return false;
+ }
} // end of class
diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java
index 78579b1de21..79c64d03d60 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java
@@ -68,6 +68,8 @@
query = "SELECT o FROM Dataset o WHERE o.creator.id=:creatorId"),
@NamedQuery(name = "Dataset.findByReleaseUserId",
query = "SELECT o FROM Dataset o WHERE o.releaseUser.id=:releaseUserId"),
+ @NamedQuery(name = "Dataset.countAll",
+ query = "SELECT COUNT(ds) FROM Dataset ds")
})
/*
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetAuthor.java b/src/main/java/edu/harvard/iq/dataverse/DatasetAuthor.java
index d33d709107f..bc85ab22e77 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetAuthor.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetAuthor.java
@@ -8,6 +8,8 @@
import java.util.Comparator;
+import org.apache.commons.lang3.StringUtils;
+
/**
*
* @author skraffmiller
@@ -81,8 +83,8 @@ public void setIdValue(String idValue) {
}
public boolean isEmpty() {
- return ( (affiliation==null || affiliation.getValue().trim().equals(""))
- && (name==null || name.getValue().trim().equals(""))
+ return ( (affiliation==null || StringUtils.isBlank(affiliation.getValue()))
+ && (name==null || StringUtils.isBlank(name.getValue()))
);
}
@@ -97,8 +99,13 @@ public static String getIdentifierAsUrl(String idType, String idValue) {
if (idType != null && !idType.isEmpty() && idValue != null && !idValue.isEmpty()) {
try {
ExternalIdentifier externalIdentifier = ExternalIdentifier.valueOf(idType);
- if (externalIdentifier.isValidIdentifier(idValue))
- return externalIdentifier.format(idValue);
+ if (externalIdentifier.isValidIdentifier(idValue)) {
+ String uri = externalIdentifier.format(idValue);
+ //The DAI identifier is a URI starting with "info" - we don't want to return it as a URL (we assume non-null URLs should be links in the display)
+ if(uri.startsWith("http")) {
+ return uri;
+ }
+ }
} catch (Exception e) {
// non registered identifier
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionNoteValidator.java b/src/main/java/edu/harvard/iq/dataverse/DatasetDeaccessionNoteValidator.java
similarity index 76%
rename from src/main/java/edu/harvard/iq/dataverse/DatasetVersionNoteValidator.java
rename to src/main/java/edu/harvard/iq/dataverse/DatasetDeaccessionNoteValidator.java
index a5ea487a68f..7c6263fe9b9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionNoteValidator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetDeaccessionNoteValidator.java
@@ -13,28 +13,28 @@
*
* @author skraffmi
*/
-public class DatasetVersionNoteValidator implements ConstraintValidator {
+public class DatasetDeaccessionNoteValidator implements ConstraintValidator {
private String versionState;
- private String versionNote;
+ private String deaccessionNote;
@Override
- public void initialize(ValidateVersionNote constraintAnnotation) {
+ public void initialize(ValidateDeaccessionNote constraintAnnotation) {
versionState = constraintAnnotation.versionState();
- versionNote = constraintAnnotation.versionNote();
+ deaccessionNote = constraintAnnotation.deaccessionNote();
}
@Override
public boolean isValid(DatasetVersion value, ConstraintValidatorContext context) {
- if (versionState.equals(DatasetVersion.VersionState.DEACCESSIONED) && versionNote.isEmpty()){
+ if (versionState.equals(DatasetVersion.VersionState.DEACCESSIONED) && deaccessionNote.isEmpty()){
if (context != null) {
context.buildConstraintViolationWithTemplate(value + " " + BundleUtil.getStringFromBundle("file.deaccessionDialog.dialog.textForReason.error")).addConstraintViolation();
}
return false;
}
- if (versionState.equals(DatasetVersion.VersionState.DEACCESSIONED) && versionNote.length() > DatasetVersion.VERSION_NOTE_MAX_LENGTH){
+ if (versionState.equals(DatasetVersion.VersionState.DEACCESSIONED) && deaccessionNote.length() > DatasetVersion.VERSION_NOTE_MAX_LENGTH){
if (context != null) {
context.buildConstraintViolationWithTemplate(value + " " + BundleUtil.getStringFromBundle("file.deaccessionDialog.dialog.limitChar.error")).addConstraintViolation();
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java
index ded7c83de62..85639de9a59 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java
@@ -1,5 +1,6 @@
package edu.harvard.iq.dataverse;
+import edu.harvard.iq.dataverse.dataset.DatasetType;
import java.io.IOException;
import java.io.StringReader;
import java.net.URI;
@@ -92,7 +93,7 @@ public class DatasetFieldServiceBean implements java.io.Serializable {
String oldHash = null;
public List findAllAdvancedSearchFieldTypes() {
- return em.createQuery("select object(o) from DatasetFieldType as o where o.advancedSearchFieldType = true and o.title != '' order by o.id", DatasetFieldType.class).getResultList();
+ return em.createQuery("select object(o) from DatasetFieldType as o where o.advancedSearchFieldType = true and o.title != '' order by o.displayOrder,o.id", DatasetFieldType.class).getResultList();
}
public List findAllFacetableFieldTypes() {
@@ -871,7 +872,7 @@ public List findAllDisplayedOnCreateInMetadataBlock(MetadataBl
Root metadataBlockRoot = criteriaQuery.from(MetadataBlock.class);
Root datasetFieldTypeRoot = criteriaQuery.from(DatasetFieldType.class);
- Predicate requiredInDataversePredicate = buildRequiredInDataversePredicate(criteriaBuilder, datasetFieldTypeRoot);
+ Predicate fieldRequiredInTheInstallation = buildFieldRequiredInTheInstallationPredicate(criteriaBuilder, datasetFieldTypeRoot);
criteriaQuery.where(
criteriaBuilder.and(
@@ -879,7 +880,7 @@ public List findAllDisplayedOnCreateInMetadataBlock(MetadataBl
datasetFieldTypeRoot.in(metadataBlockRoot.get("datasetFieldTypes")),
criteriaBuilder.or(
criteriaBuilder.isTrue(datasetFieldTypeRoot.get("displayOnCreate")),
- requiredInDataversePredicate
+ fieldRequiredInTheInstallation
)
)
);
@@ -890,9 +891,9 @@ public List findAllDisplayedOnCreateInMetadataBlock(MetadataBl
return typedQuery.getResultList();
}
- public List findAllInMetadataBlockAndDataverse(MetadataBlock metadataBlock, Dataverse dataverse, boolean onlyDisplayedOnCreate) {
+ public List findAllInMetadataBlockAndDataverse(MetadataBlock metadataBlock, Dataverse dataverse, boolean onlyDisplayedOnCreate, DatasetType datasetType) {
if (!dataverse.isMetadataBlockRoot() && dataverse.getOwner() != null) {
- return findAllInMetadataBlockAndDataverse(metadataBlock, dataverse.getOwner(), onlyDisplayedOnCreate);
+ return findAllInMetadataBlockAndDataverse(metadataBlock, dataverse.getOwner(), onlyDisplayedOnCreate, datasetType);
}
CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder();
@@ -900,6 +901,29 @@ public List findAllInMetadataBlockAndDataverse(MetadataBlock m
Root metadataBlockRoot = criteriaQuery.from(MetadataBlock.class);
Root datasetFieldTypeRoot = criteriaQuery.from(DatasetFieldType.class);
+
+ // Build the main predicate to include fields that belong to the specified dataverse and metadataBlock and match the onlyDisplayedOnCreate value.
+ Predicate fieldPresentInDataverse = buildFieldPresentInDataversePredicate(dataverse, onlyDisplayedOnCreate, criteriaQuery, criteriaBuilder, datasetFieldTypeRoot, metadataBlockRoot);
+
+ // Build an additional predicate to include fields from the datasetType, if the datasetType is specified and contains the given metadataBlock.
+ Predicate fieldPresentInDatasetType = buildFieldPresentInDatasetTypePredicate(datasetType, criteriaQuery, criteriaBuilder, datasetFieldTypeRoot, metadataBlockRoot, onlyDisplayedOnCreate);
+
+ // Build the final WHERE clause by combining all the predicates.
+ criteriaQuery.where(
+ criteriaBuilder.equal(metadataBlockRoot.get("id"), metadataBlock.getId()), // Match the MetadataBlock ID.
+ datasetFieldTypeRoot.in(metadataBlockRoot.get("datasetFieldTypes")), // Ensure the DatasetFieldType is part of the MetadataBlock.
+ criteriaBuilder.or(
+ fieldPresentInDataverse,
+ fieldPresentInDatasetType
+ )
+ );
+
+ criteriaQuery.select(datasetFieldTypeRoot);
+
+ return em.createQuery(criteriaQuery).getResultList();
+ }
+
+ private Predicate buildFieldPresentInDataversePredicate(Dataverse dataverse, boolean onlyDisplayedOnCreate, CriteriaQuery criteriaQuery, CriteriaBuilder criteriaBuilder, Root datasetFieldTypeRoot, Root metadataBlockRoot) {
Root dataverseRoot = criteriaQuery.from(Dataverse.class);
// Join Dataverse with DataverseFieldTypeInputLevel on the "dataverseFieldTypeInputLevels" attribute, using a LEFT JOIN.
@@ -917,20 +941,27 @@ public List findAllInMetadataBlockAndDataverse(MetadataBlock m
criteriaBuilder.isTrue(datasetFieldTypeInputLevelJoin.get("required"))
);
+ // Predicate for displayOnCreate in input level
+ Predicate displayOnCreateInputLevelPredicate = criteriaBuilder.and(
+ criteriaBuilder.equal(datasetFieldTypeRoot, datasetFieldTypeInputLevelJoin.get("datasetFieldType")),
+ criteriaBuilder.equal(datasetFieldTypeInputLevelJoin.get("displayOnCreate"), Boolean.TRUE)
+ );
+
// Create a subquery to check for the absence of a specific DataverseFieldTypeInputLevel.
Subquery subquery = criteriaQuery.subquery(Long.class);
Root subqueryRoot = subquery.from(DataverseFieldTypeInputLevel.class);
subquery.select(criteriaBuilder.literal(1L))
.where(
criteriaBuilder.equal(subqueryRoot.get("dataverse"), dataverseRoot),
- criteriaBuilder.equal(subqueryRoot.get("datasetFieldType"), datasetFieldTypeRoot)
+ criteriaBuilder.equal(subqueryRoot.get("datasetFieldType"), datasetFieldTypeRoot),
+ criteriaBuilder.isNotNull(subqueryRoot.get("displayOnCreate"))
);
// Define a predicate to exclude DatasetFieldTypes that have no associated input level (i.e., the subquery does not return a result).
Predicate hasNoInputLevelPredicate = criteriaBuilder.not(criteriaBuilder.exists(subquery));
// Define a predicate to include the required fields in Dataverse.
- Predicate requiredInDataversePredicate = buildRequiredInDataversePredicate(criteriaBuilder, datasetFieldTypeRoot);
+ Predicate fieldRequiredInTheInstallation = buildFieldRequiredInTheInstallationPredicate(criteriaBuilder, datasetFieldTypeRoot);
// Define a predicate for displaying DatasetFieldTypes on create.
// If onlyDisplayedOnCreate is true, include fields that:
@@ -939,30 +970,68 @@ public List findAllInMetadataBlockAndDataverse(MetadataBlock m
// Otherwise, use an always-true predicate (conjunction).
Predicate displayedOnCreatePredicate = onlyDisplayedOnCreate
? criteriaBuilder.or(
- criteriaBuilder.or(
+ // 1. Field marked as displayOnCreate in input level
+ displayOnCreateInputLevelPredicate,
+
+ // 2. Field without input level that is marked as displayOnCreate or required
+ criteriaBuilder.and(
+ hasNoInputLevelPredicate,
+ criteriaBuilder.or(
criteriaBuilder.isTrue(datasetFieldTypeRoot.get("displayOnCreate")),
- requiredInDataversePredicate
+ fieldRequiredInTheInstallation
+ )
),
+
+ // 3. Field required by input level
requiredAsInputLevelPredicate
)
: criteriaBuilder.conjunction();
- // Build the final WHERE clause by combining all the predicates.
- criteriaQuery.where(
+ // Combine all the predicates.
+ return criteriaBuilder.and(
criteriaBuilder.equal(dataverseRoot.get("id"), dataverse.getId()), // Match the Dataverse ID.
- criteriaBuilder.equal(metadataBlockRoot.get("id"), metadataBlock.getId()), // Match the MetadataBlock ID.
metadataBlockRoot.in(dataverseRoot.get("metadataBlocks")), // Ensure the MetadataBlock is part of the Dataverse.
- datasetFieldTypeRoot.in(metadataBlockRoot.get("datasetFieldTypes")), // Ensure the DatasetFieldType is part of the MetadataBlock.
criteriaBuilder.or(includedAsInputLevelPredicate, hasNoInputLevelPredicate), // Include DatasetFieldTypes based on the input level predicates.
displayedOnCreatePredicate // Apply the display-on-create filter if necessary.
);
+ }
- criteriaQuery.select(datasetFieldTypeRoot).distinct(true);
-
- return em.createQuery(criteriaQuery).getResultList();
+ private Predicate buildFieldPresentInDatasetTypePredicate(DatasetType datasetType,
+ CriteriaQuery criteriaQuery,
+ CriteriaBuilder criteriaBuilder,
+ Root datasetFieldTypeRoot,
+ Root metadataBlockRoot,
+ boolean onlyDisplayedOnCreate) {
+ Predicate datasetTypePredicate = criteriaBuilder.isFalse(criteriaBuilder.literal(true)); // Initialize datasetTypePredicate to always false by default
+ if (datasetType != null) {
+ // Create a subquery to check for the presence of the specified metadataBlock within the datasetType
+ Subquery datasetTypeSubquery = criteriaQuery.subquery(Long.class);
+ Root datasetTypeRoot = criteriaQuery.from(DatasetType.class);
+
+ // Define a predicate for displaying DatasetFieldTypes on create.
+ // If onlyDisplayedOnCreate is true, include fields that are either marked as displayed on create OR marked as required.
+ // Otherwise, use an always-true predicate (conjunction).
+ Predicate displayedOnCreatePredicate = onlyDisplayedOnCreate ?
+ criteriaBuilder.or(
+ criteriaBuilder.isTrue(datasetFieldTypeRoot.get("displayOnCreate")),
+ buildFieldRequiredInTheInstallationPredicate(criteriaBuilder, datasetFieldTypeRoot)
+ )
+ : criteriaBuilder.conjunction();
+
+ datasetTypeSubquery.select(criteriaBuilder.literal(1L))
+ .where(
+ criteriaBuilder.equal(datasetTypeRoot.get("id"), datasetType.getId()), // Match the DatasetType ID.
+ metadataBlockRoot.in(datasetTypeRoot.get("metadataBlocks")), // Ensure the metadataBlock is included in the datasetType's list of metadata blocks.
+ displayedOnCreatePredicate
+ );
+
+ // Now set the datasetTypePredicate to true if the subquery finds a matching metadataBlock
+ datasetTypePredicate = criteriaBuilder.exists(datasetTypeSubquery);
+ }
+ return datasetTypePredicate;
}
- private Predicate buildRequiredInDataversePredicate(CriteriaBuilder criteriaBuilder, Root datasetFieldTypeRoot) {
+ private Predicate buildFieldRequiredInTheInstallationPredicate(CriteriaBuilder criteriaBuilder, Root datasetFieldTypeRoot) {
// Predicate to check if the current DatasetFieldType is required.
Predicate isRequired = criteriaBuilder.isTrue(datasetFieldTypeRoot.get("required"));
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java
index 01785359e0e..32a23e06761 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java
@@ -282,7 +282,26 @@ public boolean isDisplayOnCreate() {
public void setDisplayOnCreate(boolean displayOnCreate) {
this.displayOnCreate = displayOnCreate;
}
+
+ /**
+ * Determines whether this field type is displayed in the form when creating
+ * the Dataset (or only later when editing after the initial creation).
+ */
+ @Transient
+ private Boolean localDisplayOnCreate;
+
+ public Boolean getLocalDisplayOnCreate() {
+ return localDisplayOnCreate;
+ }
+
+ public void setLocalDisplayOnCreate(Boolean localDisplayOnCreate) {
+ this.localDisplayOnCreate = localDisplayOnCreate;
+ }
+ public boolean shouldDisplayOnCreate() {
+ return (localDisplayOnCreate == null) ? displayOnCreate : localDisplayOnCreate;
+ }
+
public boolean isControlledVocabulary() {
return allowControlledVocabulary;
}
@@ -531,28 +550,36 @@ public String getDisplayName() {
public SolrField getSolrField() {
SolrField.SolrType solrType = SolrField.SolrType.TEXT_EN;
if (fieldType != null) {
-
- /**
- * @todo made more decisions based on fieldType: index as dates,
- * integers, and floats so we can do range queries etc.
- */
if (fieldType.equals(FieldType.DATE)) {
solrType = SolrField.SolrType.DATE;
} else if (fieldType.equals(FieldType.EMAIL)) {
solrType = SolrField.SolrType.EMAIL;
+ } else if (fieldType.equals(FieldType.INT)) {
+ solrType = SolrField.SolrType.INTEGER;
+ } else if (fieldType.equals(FieldType.FLOAT)) {
+ solrType = SolrField.SolrType.FLOAT;
}
- Boolean parentAllowsMultiplesBoolean = false;
- if (isHasParent()) {
- if (getParentDatasetFieldType() != null) {
- DatasetFieldType parent = getParentDatasetFieldType();
- parentAllowsMultiplesBoolean = parent.isAllowMultiples();
+ Boolean anyParentAllowsMultiplesBoolean = false;
+ DatasetFieldType currentDatasetFieldType = this;
+ // Traverse up through all parents of dataset field type
+ // If any one of them allows multiples, this child's Solr field must be multi-valued
+ while (currentDatasetFieldType.isHasParent()) {
+ if (currentDatasetFieldType.getParentDatasetFieldType() != null) {
+ DatasetFieldType parent = currentDatasetFieldType.getParentDatasetFieldType();
+ if (parent.isAllowMultiples()) {
+ anyParentAllowsMultiplesBoolean = true;
+ break; // no need to keep traversing
+ }
+ currentDatasetFieldType = parent;
+ } else {
+ break;
}
}
boolean makeSolrFieldMultivalued;
// http://stackoverflow.com/questions/5800762/what-is-the-use-of-multivalued-field-type-in-solr
- if (allowMultiples || parentAllowsMultiplesBoolean || isControlledVocabulary()) {
+ if (allowMultiples || anyParentAllowsMultiplesBoolean || isControlledVocabulary()) {
makeSolrFieldMultivalued = true;
} else {
makeSolrFieldMultivalued = false;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValueValidator.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValueValidator.java
index 610bb70ff49..74d3cbf73f0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValueValidator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValueValidator.java
@@ -241,10 +241,6 @@ private boolean isValidDate(String dateString, String pattern) {
return valid;
}
- public boolean isValidAuthorIdentifier(String userInput, Pattern pattern) {
- return pattern.matcher(userInput).matches();
- }
-
// Validate child fields against each other and return failure message or Optional.empty() if success
public Optional validateChildConstraints(DatasetField dsf) {
final String fieldName = dsf.getDatasetFieldType().getName() != null ? dsf.getDatasetFieldType().getName() : "";
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
index 33a093c8044..af8cdc21968 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
@@ -57,6 +57,7 @@
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.util.ArchiverUtil;
import edu.harvard.iq.dataverse.util.BundleUtil;
+import edu.harvard.iq.dataverse.util.CSLUtil;
import edu.harvard.iq.dataverse.util.DataFileComparator;
import edu.harvard.iq.dataverse.util.FileSortFieldAndOrder;
import edu.harvard.iq.dataverse.util.FileUtil;
@@ -163,7 +164,7 @@
import edu.harvard.iq.dataverse.util.FileMetadataUtil;
import java.util.Comparator;
import org.apache.solr.client.solrj.SolrQuery;
-import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.impl.BaseHttpSolrClient.RemoteSolrException;
import org.apache.solr.client.solrj.response.FacetField;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
@@ -184,7 +185,7 @@ public class DatasetPage implements java.io.Serializable {
public enum EditMode {
- CREATE, INFO, FILE, METADATA, LICENSE
+ CREATE, INFO, FILE, METADATA, LICENSE, VERSIONNOTE
};
public enum DisplayMode {
@@ -1041,7 +1042,7 @@ public Set getFileIdsInVersionFromSolr(Long datasetVersionId, String patte
try {
queryResponse = solrClientService.getSolrClient().query(solrQuery);
- } catch (HttpSolrClient.RemoteSolrException ex) {
+ } catch (RemoteSolrException ex) {
logger.fine("Remote Solr Exception: " + ex.getLocalizedMessage());
String msg = ex.getLocalizedMessage();
if (msg.contains(SearchFields.FILE_DELETED)) {
@@ -1855,6 +1856,10 @@ private void updateDatasetFieldInputLevels() {
if (dsf != null){
// Yes, call "setInclude"
dsf.setInclude(oneDSFieldTypeInputLevel.isInclude());
+ Boolean displayOnCreate = oneDSFieldTypeInputLevel.getDisplayOnCreate();
+ if (displayOnCreate!= null) {
+ dsf.getDatasetFieldType().setLocalDisplayOnCreate(displayOnCreate);
+ }
// remove from hash
mapDatasetFields.remove(oneDSFieldTypeInputLevel.getDatasetFieldType().getId());
}
@@ -1985,6 +1990,7 @@ private String init(boolean initFull) {
setDataverseSiteUrl(systemConfig.getDataverseSiteUrl());
guestbookResponse = new GuestbookResponse();
+ anonymizedAccess = null;
String sortOrder = getSortOrder();
if(sortOrder != null) {
@@ -2118,6 +2124,7 @@ private String init(boolean initFull) {
if (workingVersion.isDraft() && canUpdateDataset()) {
readOnly = false;
}
+ publishDialogVersionNote = workingVersion.getVersionNote();
// This will default to all the files in the version, if the search term
// parameter hasn't been specified yet:
fileMetadatasSearch = selectFileMetadatasForDisplay();
@@ -2609,7 +2616,7 @@ private void resetVersionUI() {
}
}
- String creatorOrcidId = au.getOrcidId();
+ String creatorOrcidId = au.getAuthenticatedOrcid();
if (dsf.getDatasetFieldType().getName().equals(DatasetFieldConstant.author) && dsf.isEmpty()) {
for (DatasetFieldCompoundValue authorValue : dsf.getDatasetFieldCompoundValues()) {
for (DatasetField subField : authorValue.getChildDatasetFields()) {
@@ -2774,6 +2781,7 @@ public String releaseDataset() {
if(!dataset.getOwner().isReleased()){
releaseParentDV();
}
+ workingVersion.setVersionNote(publishDialogVersionNote);
if(publishDatasetPopup()|| publishBothPopup() || !dataset.getLatestVersion().isMinorUpdate()){
return releaseDataset(false);
}
@@ -2840,35 +2848,35 @@ private DatasetVersion setDatasetVersionDeaccessionReasonAndURL(DatasetVersion d
String deacessionReasonDetail = getDeaccessionReasonText() != null ? ( getDeaccessionReasonText()).trim() : "";
switch (deaccessionReasonCode) {
case 1:
- dvIn.setVersionNote(BundleUtil.getStringFromBundle("file.deaccessionDialog.reason.selectItem.identifiable") );
+ dvIn.setDeaccessionNote(BundleUtil.getStringFromBundle("file.deaccessionDialog.reason.selectItem.identifiable") );
break;
case 2:
- dvIn.setVersionNote(BundleUtil.getStringFromBundle("file.deaccessionDialog.reason.selectItem.beRetracted") );
+ dvIn.setDeaccessionNote(BundleUtil.getStringFromBundle("file.deaccessionDialog.reason.selectItem.beRetracted") );
break;
case 3:
- dvIn.setVersionNote(BundleUtil.getStringFromBundle("file.deaccessionDialog.reason.selectItem.beTransferred") );
+ dvIn.setDeaccessionNote(BundleUtil.getStringFromBundle("file.deaccessionDialog.reason.selectItem.beTransferred") );
break;
case 4:
- dvIn.setVersionNote(BundleUtil.getStringFromBundle("file.deaccessionDialog.reason.selectItem.IRB"));
+ dvIn.setDeaccessionNote(BundleUtil.getStringFromBundle("file.deaccessionDialog.reason.selectItem.IRB"));
break;
case 5:
- dvIn.setVersionNote(BundleUtil.getStringFromBundle("file.deaccessionDialog.reason.selectItem.legalIssue"));
+ dvIn.setDeaccessionNote(BundleUtil.getStringFromBundle("file.deaccessionDialog.reason.selectItem.legalIssue"));
break;
case 6:
- dvIn.setVersionNote(BundleUtil.getStringFromBundle("file.deaccessionDialog.reason.selectItem.notValid"));
+ dvIn.setDeaccessionNote(BundleUtil.getStringFromBundle("file.deaccessionDialog.reason.selectItem.notValid"));
break;
case 7:
break;
}
if (!deacessionReasonDetail.isEmpty()){
- if (!StringUtil.isEmpty(dvIn.getVersionNote())){
- dvIn.setVersionNote(dvIn.getVersionNote() + " " + deacessionReasonDetail);
+ if (!StringUtil.isEmpty(dvIn.getDeaccessionNote())){
+ dvIn.setDeaccessionNote(dvIn.getDeaccessionNote() + " " + deacessionReasonDetail);
} else {
- dvIn.setVersionNote(deacessionReasonDetail);
+ dvIn.setDeaccessionNote(deacessionReasonDetail);
}
}
- dvIn.setArchiveNote(getDeaccessionForwardURLFor());
+ dvIn.setDeaccessionLink(getDeaccessionForwardURLFor());
return dvIn;
}
@@ -3934,7 +3942,7 @@ public void validateForwardURL(FacesContext context, UIComponent toValidate, Obj
return;
}
- if (value.toString().length() <= DatasetVersion.ARCHIVE_NOTE_MAX_LENGTH) {
+ if (value.toString().length() <= DatasetVersion.DEACCESSION_NOTE_MAX_LENGTH) {
((UIInput) toValidate).setValid(true);
} else {
((UIInput) toValidate).setValid(false);
@@ -4105,8 +4113,9 @@ public String save() {
}
if (editMode.equals(EditMode.FILE)) {
JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.filesSuccess"));
+ } if (editMode.equals(EditMode.VERSIONNOTE)) {
+ JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.message.versionNoteSuccess"));
}
-
} else {
// must have been a bulk file update or delete:
if (bulkFileDeleteInProgress) {
@@ -5695,7 +5704,7 @@ public String getPrivateUrlLink(PrivateUrl privateUrl) {
public boolean isAnonymizedAccess() {
if (anonymizedAccess == null) {
- if (session.getUser() instanceof PrivateUrlUser) {
+ if (session.getUser() instanceof PrivateUrlUser && workingVersion.isDraft()) {
anonymizedAccess = ((PrivateUrlUser) session.getUser()).hasAnonymizedAccess();
} else {
anonymizedAccess = false;
@@ -5719,6 +5728,22 @@ public boolean isAnonymizedAccessEnabled() {
return false;
}
}
+
+ String anonymizedFieldTypeNames = null;
+
+ public String getAnonymizedFieldTypeNames() {
+ if (anonymizedFieldTypeNames != null) {
+ return anonymizedFieldTypeNames;
+ }
+ if (settingsWrapper.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames) != null) {
+ anonymizedFieldTypeNames = settingsWrapper.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames);
+
+ } else {
+ anonymizedFieldTypeNames = "";
+
+ }
+ return anonymizedFieldTypeNames;
+ }
// todo: we should be able to remove - this is passed in the html pages to other fragments, but they could just access this service bean directly.
public FileDownloadServiceBean getFileDownloadService() {
@@ -5846,13 +5871,12 @@ public List getDatasetSummaryFields() {
return DatasetUtil.getDatasetSummaryFields(workingVersion, customFields);
}
- public boolean isShowPreviewButton(Long fileId) {
- List previewTools = getPreviewToolsForDataFile(fileId);
+ public boolean isShowPreviewButton(DataFile dataFile) {
+ List previewTools = getPreviewToolsForDataFile(dataFile);
return previewTools.size() > 0;
}
- public boolean isShowQueryButton(Long fileId) {
- DataFile dataFile = datafileService.find(fileId);
+ public boolean isShowQueryButton(DataFile dataFile) {
if(dataFile.isRestricted()
|| !dataFile.isReleased()
@@ -5861,26 +5885,28 @@ public boolean isShowQueryButton(Long fileId) {
return false;
}
- List fileQueryTools = getQueryToolsForDataFile(fileId);
+ List fileQueryTools = getQueryToolsForDataFile(dataFile);
return fileQueryTools.size() > 0;
}
- public List getPreviewToolsForDataFile(Long fileId) {
- return getCachedToolsForDataFile(fileId, ExternalTool.Type.PREVIEW);
+ public List getPreviewToolsForDataFile(DataFile dataFile) {
+ return getCachedToolsForDataFile(dataFile, ExternalTool.Type.PREVIEW);
}
- public List getQueryToolsForDataFile(Long fileId) {
- return getCachedToolsForDataFile(fileId, ExternalTool.Type.QUERY);
+ public List getQueryToolsForDataFile(DataFile dataFile) {
+ return getCachedToolsForDataFile(dataFile, ExternalTool.Type.QUERY);
}
- public List getConfigureToolsForDataFile(Long fileId) {
- return getCachedToolsForDataFile(fileId, ExternalTool.Type.CONFIGURE);
+
+ public List getConfigureToolsForDataFile(DataFile dataFile) {
+ return getCachedToolsForDataFile(dataFile, ExternalTool.Type.CONFIGURE);
}
- public List getExploreToolsForDataFile(Long fileId) {
- return getCachedToolsForDataFile(fileId, ExternalTool.Type.EXPLORE);
+ public List getExploreToolsForDataFile(DataFile dataFile) {
+ return getCachedToolsForDataFile(dataFile, ExternalTool.Type.EXPLORE);
}
- public List getCachedToolsForDataFile(Long fileId, ExternalTool.Type type) {
+ public List getCachedToolsForDataFile(DataFile dataFile, ExternalTool.Type type) {
+ Long fileId = dataFile.getId();
Map> cachedToolsByFileId = new HashMap<>();
List externalTools = new ArrayList<>();
switch (type) {
@@ -5907,7 +5933,6 @@ public List getCachedToolsForDataFile(Long fileId, ExternalTool.Ty
if (cachedTools != null) { //if already queried before and added to list
return cachedTools;
}
- DataFile dataFile = datafileService.find(fileId);
cachedTools = externalToolService.findExternalToolsByFile(externalTools, dataFile);
cachedToolsByFileId.put(fileId, cachedTools); //add to map so we don't have to do the lifting again
return cachedTools;
@@ -6214,7 +6239,7 @@ public String getEffectiveMetadataLanguage(boolean ofParent) {
public String getLocaleDisplayName(String code) {
String displayName = settingsWrapper.getBaseMetadataLanguageMap(false).get(code);
- if(displayName==null && !code.equals(DvObjectContainer.UNDEFINED_CODE)) {
+ if(displayName==null && code!=null && !code.equals(DvObjectContainer.UNDEFINED_CODE)) {
//Default (for cases such as :when a Dataset has a metadatalanguage code but :MetadataLanguages is no longer defined).
displayName = new Locale(code).getDisplayName();
}
@@ -6230,6 +6255,11 @@ public List getVocabScripts() {
}
public String getFieldLanguage(String languages) {
+ //Prevent NPE in Payara 6-2024-12 with CVoc
+ logger.fine("Languages: " + languages);
+ if(languages==null) {
+ languages="";
+ }
return fieldService.getFieldLanguage(languages,session.getLocaleCode());
}
@@ -6702,6 +6732,7 @@ public boolean isGlobusTransferRequested() {
* valid files to transfer.
*/
public void startGlobusTransfer(boolean transferAll, boolean popupShown) {
+ logger.fine("inside startGlobusTransfer; "+(transferAll ? "transferAll" : "NOTtransferAll") + " " + (popupShown ? "popupShown" : "NOTpopupShown"));
if (transferAll) {
this.setSelectedFiles(workingVersion.getFileMetadatas());
}
@@ -6763,5 +6794,31 @@ public String getSignpostingLinkHeader() {
public boolean isDOI() {
return AbstractDOIProvider.DOI_PROTOCOL.equals(dataset.getGlobalId().getProtocol());
}
+
+ public void saveVersionNote() {
+ this.editMode=EditMode.VERSIONNOTE;
+ publishDialogVersionNote = workingVersion.getVersionNote();
+ save();
+ }
+ String publishDialogVersionNote = null;
+
+ // Make separate property for versionNote - can't have two p:dialogs changing the same property
+ public String getPublishDialogVersionNote() {
+ return publishDialogVersionNote;
+ }
+
+ public void setPublishDialogVersionNote(String note) {
+ publishDialogVersionNote =note;
+ }
+
+ String requestedCSL = CSLUtil.getDefaultStyle();
+
+ public String getRequestedCSL() {
+ return requestedCSL;
+ }
+
+ public void setRequestedCSL(String requestedCSL) {
+ this.requestedCSL = requestedCSL;
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java
index e519614ba55..9a8c43668cb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java
@@ -1092,4 +1092,12 @@ public List getVersionStates(long id) {
}
}
+ /**
+ * Returns the total number of Datasets.
+ * @return the number of datasets in the database
+ */
+ public long getDatasetCount() {
+ return em.createNamedQuery("Dataset.countAll", Long.class).getSingleResult();
+ }
+
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java
index a7bbc7c3ad4..6c1000f2170 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java
@@ -8,7 +8,6 @@
import edu.harvard.iq.dataverse.branding.BrandingUtil;
import edu.harvard.iq.dataverse.dataset.DatasetUtil;
import edu.harvard.iq.dataverse.license.License;
-import edu.harvard.iq.dataverse.pidproviders.PidUtil;
import edu.harvard.iq.dataverse.util.FileUtil;
import edu.harvard.iq.dataverse.util.StringUtil;
import edu.harvard.iq.dataverse.util.SystemConfig;
@@ -17,6 +16,9 @@
import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
import edu.harvard.iq.dataverse.workflows.WorkflowComment;
import java.io.Serializable;
+import java.net.MalformedURLException;
+import java.net.URI;
+import java.net.URISyntaxException;
import java.sql.Timestamp;
import java.text.ParseException;
import java.text.SimpleDateFormat;
@@ -80,7 +82,7 @@
@Entity
@Table(indexes = {@Index(columnList="dataset_id")},
uniqueConstraints = @UniqueConstraint(columnNames = {"dataset_id,versionnumber,minorversionnumber"}))
-@ValidateVersionNote(versionNote = "versionNote", versionState = "versionState")
+@ValidateDeaccessionNote(deaccessionNote = "deaccessionNote", versionState = "versionState")
public class DatasetVersion implements Serializable {
private static final Logger logger = Logger.getLogger(DatasetVersion.class.getCanonicalName());
@@ -114,7 +116,8 @@ public enum VersionState {
DRAFT, RELEASED, ARCHIVED, DEACCESSIONED
}
- public static final int ARCHIVE_NOTE_MAX_LENGTH = 1000;
+ public static final int DEACCESSION_NOTE_MAX_LENGTH = 1000;
+ public static final int DEACCESSION_LINK_MAX_LENGTH = 1260; //Long enough to cover the case where a legacy deaccessionLink(256 char) and archiveNote (1000) are combined (with a space)
public static final int VERSION_NOTE_MAX_LENGTH = 1000;
//Archival copies: Status message required components
@@ -137,10 +140,16 @@ public enum VersionState {
private Long versionNumber;
private Long minorVersionNumber;
+ //This is used for the deaccession reason
+ @Size(min=0, max=DEACCESSION_NOTE_MAX_LENGTH)
+ @Column(length = DEACCESSION_NOTE_MAX_LENGTH)
+ private String deaccessionNote;
+
+ //This is a plain text, optional reason for the version's creation
@Size(min=0, max=VERSION_NOTE_MAX_LENGTH)
@Column(length = VERSION_NOTE_MAX_LENGTH)
private String versionNote;
-
+
/*
* @todo versionState should never be null so when we are ready, uncomment
* the `nullable = false` below.
@@ -177,12 +186,6 @@ public enum VersionState {
@Temporal(value = TemporalType.TIMESTAMP)
private Date archiveTime;
- @Size(min=0, max=ARCHIVE_NOTE_MAX_LENGTH)
- @Column(length = ARCHIVE_NOTE_MAX_LENGTH)
- //@ValidateURL() - this validation rule was making a bunch of older legacy datasets invalid;
- // removed pending further investigation (v4.13)
- private String archiveNote;
-
// Originally a simple string indicating the location of the archival copy. As
// of v5.12, repurposed to provide a more general json archival status (failure,
// pending, success) and message (serialized as a string). The archival copy
@@ -191,7 +194,9 @@ public enum VersionState {
@Column(nullable=true, columnDefinition = "TEXT")
private String archivalCopyLocation;
-
+ //This is used for the deaccession reason
+ @Size(min=0, max=DEACCESSION_LINK_MAX_LENGTH)
+ @Column(length = DEACCESSION_LINK_MAX_LENGTH)
private String deaccessionLink;
@Transient
@@ -361,19 +366,6 @@ public void setArchiveTime(Date archiveTime) {
this.archiveTime = archiveTime;
}
- public String getArchiveNote() {
- return archiveNote;
- }
-
- public void setArchiveNote(String note) {
- // @todo should this be using bean validation for trsting note length?
- if (note != null && note.length() > ARCHIVE_NOTE_MAX_LENGTH) {
- throw new IllegalArgumentException("Error setting archiveNote: String length is greater than maximum (" + ARCHIVE_NOTE_MAX_LENGTH + ")."
- + " StudyVersion id=" + id + ", archiveNote=" + note);
- }
- this.archiveNote = note;
- }
-
public String getArchivalCopyLocation() {
return archivalCopyLocation;
}
@@ -417,11 +409,21 @@ public String getDeaccessionLink() {
}
public void setDeaccessionLink(String deaccessionLink) {
+ if (deaccessionLink != null && deaccessionLink.length() > DEACCESSION_LINK_MAX_LENGTH) {
+ throw new IllegalArgumentException("Error setting deaccessionLink: String length is greater than maximum (" + DEACCESSION_LINK_MAX_LENGTH + ")."
+ + " StudyVersion id=" + id + ", deaccessionLink=" + deaccessionLink);
+ }
this.deaccessionLink = deaccessionLink;
}
- public GlobalId getDeaccessionLinkAsGlobalId() {
- return PidUtil.parseAsGlobalID(deaccessionLink);
+ public String getDeaccessionLinkAsURLString() {
+ String dLink = null;
+ try {
+ dLink = new URI(deaccessionLink).toURL().toExternalForm();
+ } catch (URISyntaxException | MalformedURLException e) {
+ logger.fine("Invalid deaccessionLink - not a URL: " + deaccessionLink);
+ }
+ return dLink;
}
public Date getCreateTime() {
@@ -490,8 +492,8 @@ public void setContributorNames(String contributorNames) {
}
- public String getVersionNote() {
- return versionNote;
+ public String getDeaccessionNote() {
+ return deaccessionNote;
}
public DatasetVersionDifference getDefaultVersionDifference() {
@@ -541,12 +543,12 @@ public VersionState getPriorVersionState() {
return null;
}
- public void setVersionNote(String note) {
- if (note != null && note.length() > VERSION_NOTE_MAX_LENGTH) {
- throw new IllegalArgumentException("Error setting versionNote: String length is greater than maximum (" + VERSION_NOTE_MAX_LENGTH + ")."
- + " StudyVersion id=" + id + ", versionNote=" + note);
+ public void setDeaccessionNote(String note) {
+ if (note != null && note.length() > DEACCESSION_NOTE_MAX_LENGTH) {
+ throw new IllegalArgumentException("Error setting deaccessionNote: String length is greater than maximum (" + DEACCESSION_NOTE_MAX_LENGTH + ")."
+ + " StudyVersion id=" + id + ", deaccessionNote=" + note);
}
- this.versionNote = note;
+ this.deaccessionNote = note;
}
public Long getVersionNumber() {
@@ -1483,11 +1485,14 @@ public String getCitation() {
}
public String getCitation(boolean html) {
- return getCitation(html, false);
+ return getCitation(DataCitation.Format.Internal, html, false);
}
-
public String getCitation(boolean html, boolean anonymized) {
- return new DataCitation(this).toString(html, anonymized);
+ return getCitation(DataCitation.Format.Internal, html, anonymized);
+ }
+
+ public String getCitation(DataCitation.Format format, boolean html, boolean anonymized) {
+ return new DataCitation(this).toString(format, html, anonymized);
}
public Date getCitationDate() {
@@ -2158,4 +2163,17 @@ public void setExternalStatusLabel(String externalStatusLabel) {
this.externalStatusLabel = externalStatusLabel;
}
+ public String getVersionNote() {
+ return versionNote;
+ }
+
+ public void setVersionNote(String note) {
+ if (note != null && note.length() > VERSION_NOTE_MAX_LENGTH) {
+ throw new IllegalArgumentException("Error setting versionNote: String length is greater than maximum (" + VERSION_NOTE_MAX_LENGTH + ")."
+ + " StudyVersion id=" + id + ", versionNote=" + note);
+ }
+
+ this.versionNote = note;
+ }
}
+
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java
index c5d6c31386c..3ea6e662751 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java
@@ -46,8 +46,8 @@ public final class DatasetVersionDifference {
private List changedVariableMetadata = new ArrayList<>();
private List replacedFiles = new ArrayList<>();
private List changedTermsAccess = new ArrayList<>();
- private List