diff --git a/Vagrantfile b/Vagrantfile index 44c78b18fba..f4e4126208f 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -18,10 +18,11 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| config.vm.provision "shell", path: "scripts/setup/asadmin-setup.sh" config.vm.provision "shell", path: "scripts/vagrant/deploy.sh" + config.vm.provision "shell", path: "scripts/vagrant/test.sh" # Create a forwarded port mapping which allows access to a specific port # within the machine from a port on the host machine. In the example below, - # accessing "localhost:8080" will access port 80 on the guest machine. + # accessing "localhost:8888" will access port 8080 on the guest machine. config.vm.network "forwarded_port", guest: 8080, host: 8888 # Create a private network, which allows host-only access to the machine @@ -41,10 +42,13 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| # the path on the host to the actual folder. The second argument is # the path on the guest to mount the folder. And the optional third # argument is a set of non-required options. + # FIXME: use /dataverse/downloads instead config.vm.synced_folder "downloads", "/downloads" - config.vm.synced_folder "target", "/builds" + # FIXME: use /dataverse/conf instead config.vm.synced_folder "conf", "/conf" + # FIXME: use /dataverse/scripts instead config.vm.synced_folder "scripts", "/scripts" + config.vm.synced_folder ".", "/dataverse" # Provider-specific configuration so you can fine-tune various # backing providers for Vagrant. These expose provider-specific options. diff --git a/conf/httpd/conf.d/dataverse.conf b/conf/httpd/conf.d/dataverse.conf new file mode 100644 index 00000000000..2a729b63d60 --- /dev/null +++ b/conf/httpd/conf.d/dataverse.conf @@ -0,0 +1,19 @@ +ProxyPass / ajp://localhost:8009/ + +# From https://wiki.apache.org/httpd/RewriteHTTPToHTTPS + +RewriteEngine On +# This will enable the Rewrite capabilities + +RewriteCond %{HTTPS} !=on +# This checks to make sure the connection is not already HTTPS + +#RewriteRule ^/?(.*) https://%{SERVER_NAME}/$1 [R,L] +RewriteRule ^/dvn/api/data-deposit/?(.*) https://%{SERVER_NAME}/dvn/api/data-deposit/$1 [R,L] +# This rule will redirect users from their original location, to the same location but using HTTPS. +# i.e. http://www.example.com/foo/ to https://www.example.com/foo/ +# The leading slash is made optional so that this will work either in httpd.conf +# or .htaccess context + +# [#GLASSFISH-20694] Glassfish 4.0 and jk Unable to populate SSL attributes - Java.net JIRA - https://java.net/jira/browse/GLASSFISH-20694 +#SSLOptions +StdEnvVars +ExportCertData diff --git a/doc/Sphinx/source/Developers/tools.rst b/doc/Sphinx/source/Developers/tools.rst index 9d245e38d76..45e2d805722 100644 --- a/doc/Sphinx/source/Developers/tools.rst +++ b/doc/Sphinx/source/Developers/tools.rst @@ -18,3 +18,14 @@ created. You can edit this file to configure PageKite to serve up port 8080 (the default GlassFish HTTP port) or the port of your choosing. According to https://pagekite.net/support/free-for-foss/ PageKite (very generously!) offers free accounts to developers writing software the meets http://opensource.org/docs/definition.php such as Dataverse. + +Vagrant ++++++++ + +Vagrant allows you to spin up a virtual machine running Dataverse on +your development workstation. + +From the root of the git repo, run ``vagrant up`` and eventually you +should be able to reach an installation of Dataverse at +http://localhost:8888 (or whatever forwarded_port indicates in the +Vagrantfile) diff --git a/doc/Sphinx/source/index.rst b/doc/Sphinx/source/index.rst index d07fa7fc19d..bda96e759f2 100644 --- a/doc/Sphinx/source/index.rst +++ b/doc/Sphinx/source/index.rst @@ -41,7 +41,7 @@ Other Resources Additional information about the Dataverse Project itself including presentations, information about upcoming releases, data management and citation, and announcements can be found at -`http://thedata.org `__ +`http://datascience.iq.harvard.edu/ `__ **User Group** diff --git a/scripts/api/data-deposit/create-dataset b/scripts/api/data-deposit/create-dataset index 52096569541..a5a2d45efbb 100755 --- a/scripts/api/data-deposit/create-dataset +++ b/scripts/api/data-deposit/create-dataset @@ -3,5 +3,5 @@ USERNAME=pete PASSWORD=pete DVN_SERVER=localhost:8181 DATAVERSE_ALIAS=peteTop -curl --insecure --data-binary "@scripts/api/data-deposit/data/atom-entry-study.xml" -H "Content-Type: application/atom+xml" https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS \ +curl -s --insecure --data-binary "@scripts/api/data-deposit/data/atom-entry-study.xml" -H "Content-Type: application/atom+xml" https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS \ | xmllint -format - diff --git a/scripts/api/data-deposit/delete-dataset b/scripts/api/data-deposit/delete-dataset index bcbee607f5f..529c54c65c1 100755 --- a/scripts/api/data-deposit/delete-dataset +++ b/scripts/api/data-deposit/delete-dataset @@ -2,8 +2,13 @@ USERNAME=pete PASSWORD=pete DVN_SERVER=localhost:8181 -#GLOBAL_ID=hdl:TEST/12345 -GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12` +if [ -z "$1" ]; then + GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` +else + GLOBAL_ID=$1 + # https://redmine.hmdc.harvard.edu/issues/3993 + #GLOBAL_ID=doi:10.5072/FK2/5555 +fi #curl --insecure -X DELETE https://$DVN_SERVER/api/datasets/$DATABASE_ID?key=$USERNAME curl --insecure -i -X DELETE https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/$GLOBAL_ID #| xmllint -format - diff --git a/scripts/api/data-deposit/delete-file b/scripts/api/data-deposit/delete-file new file mode 100755 index 00000000000..ba2d98421c7 --- /dev/null +++ b/scripts/api/data-deposit/delete-file @@ -0,0 +1,14 @@ +#!/bin/bash -x +USERNAME=pete +PASSWORD=pete +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + FILE_ID=`scripts/api/data-deposit/show-files | cut -d'/' -f1` + #echo $FILE_ID + #exit +else + FILE_ID=$1 +fi +#curl --insecure -X DELETE https://$DVN_SERVER/api/datasets/$DATABASE_ID?key=$USERNAME +curl --insecure -i -X DELETE https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit-media/file/$FILE_ID +#| xmllint -format - diff --git a/scripts/api/data-deposit/list-datasets b/scripts/api/data-deposit/list-datasets index 91fb9dc4f8d..5c718a41f80 100755 --- a/scripts/api/data-deposit/list-datasets +++ b/scripts/api/data-deposit/list-datasets @@ -2,6 +2,11 @@ USERNAME=pete PASSWORD=pete DVN_SERVER=localhost:8181 -DATAVERSE_ALIAS=peteTop -curl --insecure https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS \ +if [ -z "$1" ]; then + DATAVERSE_ALIAS=peteTop + #DATAVERSE_ALIAS=root +else + DATAVERSE_ALIAS=$1 +fi +curl --insecure -s https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/collection/dataverse/$DATAVERSE_ALIAS \ | xmllint -format - diff --git a/scripts/api/data-deposit/pipeline b/scripts/api/data-deposit/pipeline new file mode 100755 index 00000000000..da07d0f90f2 --- /dev/null +++ b/scripts/api/data-deposit/pipeline @@ -0,0 +1,11 @@ +#!/usr/bin/env ruby +require "rexml/document" +include REXML +service_document = Document.new `scripts/api/data-deposit/service-document` +collection = XPath.first(service_document, "//collection/@href") +puts collection + +puts "Getting first title from #{collection}" +feed_of_studies = Document.new `scripts/api/data-deposit/show-collection #{collection}` +title = XPath.first(feed_of_studies, "//title") +puts title diff --git a/scripts/api/data-deposit/publish-dataset b/scripts/api/data-deposit/publish-dataset new file mode 100755 index 00000000000..5325a5c3a94 --- /dev/null +++ b/scripts/api/data-deposit/publish-dataset @@ -0,0 +1,14 @@ +#!/bin/sh +USERNAME=pete +PASSWORD=pete +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` +else + GLOBAL_ID=$1 + #GLOBAL_ID=doi:10.5072/FK2/5555 +fi +# We cat /dev/null so that contentLength is zero. This makes headersOnly true:: https://github.com/swordapp/JavaServer2.0/blob/sword2-server-1.0/src/main/java/org/swordapp/server/ContainerAPI.java#L338 +# 'to tell curl to read the format from stdin you write "@-"' -- http://curl.haxx.se/docs/manpage.html +cat /dev/null | curl -s --insecure -X POST -H "In-Progress: false" --data-binary @- https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/$GLOBAL_ID \ +| xmllint --format - diff --git a/scripts/api/data-deposit/publish-dataverse b/scripts/api/data-deposit/publish-dataverse new file mode 100755 index 00000000000..fa22c4bbc0e --- /dev/null +++ b/scripts/api/data-deposit/publish-dataverse @@ -0,0 +1,13 @@ +#!/bin/sh +USERNAME=pete +PASSWORD=pete +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + echo "Please supply a dataverse alias" + exit 1 +else + DATAVERSE_ALIAS=$1 + #DATAVERSE_ALIAS=peteTop +fi +cat /dev/null | curl -s --insecure -X POST -H "In-Progress: false" --data-binary @- https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/dataverse/$DATAVERSE_ALIAS \ +| xmllint --format - diff --git a/scripts/api/data-deposit/service-document b/scripts/api/data-deposit/service-document index 87ccdf8c18d..4e3b87b6229 100755 --- a/scripts/api/data-deposit/service-document +++ b/scripts/api/data-deposit/service-document @@ -1,2 +1,12 @@ #!/bin/bash -curl --insecure https://pete:pete@localhost:8181/dvn/api/data-deposit/v1/swordv2/service-document | xmllint -format - +USERNAME=pete +PASSWORD=$USERNAME +if [ -z "$1" ]; then + HOSTNAME=localhost:8181 +else + HOSTNAME=$1 +fi +URL=https://$HOSTNAME/dvn/api/data-deposit/v1/swordv2/service-document +echo Retrieving service document from $URL >&2 +curl --insecure -u $USERNAME:$PASSWORD $URL \ +| xmllint -format - diff --git a/scripts/api/data-deposit/show-atom-entry b/scripts/api/data-deposit/show-atom-entry index bd0c588213d..c3543b330cb 100755 --- a/scripts/api/data-deposit/show-atom-entry +++ b/scripts/api/data-deposit/show-atom-entry @@ -2,7 +2,11 @@ USERNAME=pete PASSWORD=pete DVN_SERVER=localhost:8181 -#GLOBAL_ID=hdl:TEST/12345 -GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12` +if [ -z "$1" ]; then + GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` +else + GLOBAL_ID=$1 + #GLOBAL_ID=doi:10.5072/FK2/5555 +fi curl --insecure -s https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit/study/$GLOBAL_ID \ | xmllint -format - diff --git a/scripts/api/data-deposit/show-collection b/scripts/api/data-deposit/show-collection new file mode 100755 index 00000000000..8768e2f9c1b --- /dev/null +++ b/scripts/api/data-deposit/show-collection @@ -0,0 +1,10 @@ +#!/bin/bash +USERNAME=pete +PASSWORD=pete +if [ -z "$1" ]; then + echo "Please provide a URL" +else + URL=$1 +fi +curl --insecure -s -u $USERNAME:$PASSWORD $URL +#| xmllint -format - diff --git a/scripts/api/data-deposit/show-files b/scripts/api/data-deposit/show-files index 42dd477b434..9cf93fed584 100755 --- a/scripts/api/data-deposit/show-files +++ b/scripts/api/data-deposit/show-files @@ -1,3 +1,3 @@ #!/bin/sh #scripts/api/data-deposit/show-statement | xpath "//entry/content/@*[name()='type' or name()='src']" -scripts/api/data-deposit/show-statement | xpath '//entry/id/text()' | cut -d'/' -f11,12 +scripts/api/data-deposit/show-statement | xpath '//entry/id/text()' | cut -d'/' -f11,12,13 diff --git a/scripts/api/data-deposit/show-statement b/scripts/api/data-deposit/show-statement index 9648c18df9b..e507dda8eab 100755 --- a/scripts/api/data-deposit/show-statement +++ b/scripts/api/data-deposit/show-statement @@ -2,7 +2,12 @@ USERNAME=pete PASSWORD=pete DVN_SERVER=localhost:8181 -GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12` +if [ -z "$1" ]; then + GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` +else + GLOBAL_ID=$1 + #GLOBAL_ID=doi:10.5072/FK2/5555 +fi curl --insecure -s https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/statement/study/$GLOBAL_ID \ | xmllint -format - \ #| xpath '//entry/title' diff --git a/scripts/api/data-deposit/unsupported-download-files b/scripts/api/data-deposit/unsupported-download-files new file mode 100755 index 00000000000..cefe9637569 --- /dev/null +++ b/scripts/api/data-deposit/unsupported-download-files @@ -0,0 +1,12 @@ +#!/bin/sh +USERNAME=pete +PASSWORD=pete +DVN_SERVER=localhost:8181 +if [ -z "$1" ]; then + GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` +else + GLOBAL_ID=$1 + #GLOBAL_ID=doi:10.5072/FK2/5555 +fi +curl -s --insecure https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit-media/study/$GLOBAL_ID \ +| xmllint -format - diff --git a/scripts/api/data-deposit/upload-file b/scripts/api/data-deposit/upload-file index 935f8a82fcd..52207097bb3 100755 --- a/scripts/api/data-deposit/upload-file +++ b/scripts/api/data-deposit/upload-file @@ -2,6 +2,13 @@ USERNAME=pete PASSWORD=pete DVN_SERVER=localhost:8181 -GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12` +if [ -z "$1" ]; then + GLOBAL_ID=`scripts/api/data-deposit/list-datasets | xpath '//id/text()' | cut -d'/' -f11,12,13` +else + GLOBAL_ID=$1 + #GLOBAL_ID=doi:10.5072/FK2/5555 +fi curl -s --insecure --data-binary @scripts/api/data-deposit/data/example.zip -H "Content-Disposition: filename=example.zip" -H "Content-Type: application/zip" -H "Packaging: http://purl.org/net/sword/package/SimpleZip" https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit-media/study/$GLOBAL_ID \ | xmllint -format - +#curl -s --insecure --data-binary @scripts/search/data/binary/trees.zip -H "Content-Disposition: filename=trees.zip" -H "Content-Type: application/zip" -H "Packaging: http://purl.org/net/sword/package/SimpleZip" https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit-media/study/$GLOBAL_ID \ +#| xmllint -format - diff --git a/scripts/api/data/dataset-bad-missingInitialVersion.json b/scripts/api/data/dataset-bad-missingInitialVersion.json index caa698d3713..855702046b8 100644 --- a/scripts/api/data/dataset-bad-missingInitialVersion.json +++ b/scripts/api/data/dataset-bad-missingInitialVersion.json @@ -1,5 +1,6 @@ { - "authority":"anAuthority", - "identifier":"dataset-one", - "protocol":"chadham-house-rule" + "authority": "anAuthority", + "identifier": "dataset-one", + "persistentUrl": "http://dx.doi.org/10.5072/FK2/9", + "protocol": "chadham-house-rule" } \ No newline at end of file diff --git a/scripts/api/data/dataset-sample1.json b/scripts/api/data/dataset-create-new.json similarity index 61% rename from scripts/api/data/dataset-sample1.json rename to scripts/api/data/dataset-create-new.json index dd43fe4c394..c4c6f1aa5f5 100644 --- a/scripts/api/data/dataset-sample1.json +++ b/scripts/api/data/dataset-create-new.json @@ -1,10 +1,14 @@ { - "data": { + "authority": "anAuthority", + "identifier": "dataset-one", + "persistentUrl": "http://dx.doi.org/10.5072/FK2/9", + "protocol": "chadham-house-rule", + "initialVersion": { "metadataBlocks": { "citation": { "fields": [ { - "value": "sample dataset", + "value": "SampleTitle", "typeClass": "primitive", "multiple": false, "typeName": "title" @@ -27,7 +31,7 @@ }, { "authorAffiliation": { - "value": "Coca-cola co", + "value": "UMASS, Amherst", "typeClass": "primitive", "multiple": false, "typeName": "authorAffiliation" @@ -46,23 +50,24 @@ }, { "value": [ - "pete@malinator.com" + "pete@malinator.com", + "beat@pailinator.gov" ], "typeClass": "primitive", "multiple": true, "typeName": "distributorContact" }, { - "value": "description description description description description description description description description ", + "value": "lorem ipsum dolor sit amet. That must've been pretty interesting, I bet.", "typeClass": "primitive", "multiple": false, "typeName": "dsDescription" }, { "value": [ - "Keyword1", - "KeywordTwo", - "TheThirdKeyWord" + "kw1", + "kw2", + "kw3" ], "typeClass": "primitive", "multiple": true, @@ -70,14 +75,15 @@ }, { "value": [ - "Social Sciences" + "Arts and Humanities", + "Astronomy and Astrophysics" ], "typeClass": "controlledVocabulary", "multiple": true, "typeName": "subject" }, { - "value": "notes notes notes notes notes notes notes notes notes notes notes ", + "value": "Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note.\r\nNOTANOTANOTANOTANnot.e\r\n", "typeClass": "primitive", "multiple": false, "typeName": "notesText" @@ -86,13 +92,13 @@ "value": [ { "otherIdAgency": { - "value": "otheridAgency", + "value": "NSF", "typeClass": "primitive", "multiple": false, "typeName": "otherIdAgency" }, "otherIdValue": { - "value": "otherid", + "value": "NSF1234", "typeClass": "primitive", "multiple": false, "typeName": "otherIdValue" @@ -104,13 +110,46 @@ "typeName": "otherId" }, { - "value": "2014-04-01", + "value": [ + { + "contributorAbbreviation": { + "value": "DDD", + "typeClass": "primitive", + "multiple": false, + "typeName": "contributorAbbreviation" + }, + "contributorAffiliation": { + "value": "Denmark", + "typeClass": "primitive", + "multiple": false, + "typeName": "contributorAffiliation" + }, + "contributorName": { + "value": "Dennis", + "typeClass": "primitive", + "multiple": false, + "typeName": "contributorName" + }, + "contributorType": { + "value": "Distributor", + "typeClass": "controlledVocabulary", + "multiple": false, + "typeName": "contributorType" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "contributor" + }, + { + "value": "2014-02-03", "typeClass": "primitive", "multiple": false, "typeName": "productionDate" }, { - "value": "2014-04-01", + "value": "Cambridge, MA", "typeClass": "primitive", "multiple": false, "typeName": "productionPlace" @@ -119,13 +158,13 @@ "value": [ { "grantNumberAgency": { - "value": "NSF", + "value": "NIH", "typeClass": "primitive", "multiple": false, "typeName": "grantNumberAgency" }, "grantNumberValue": { - "value": "NSF12345", + "value": "NIH1231245154", "typeClass": "primitive", "multiple": false, "typeName": "grantNumberValue" @@ -139,7 +178,7 @@ "typeName": "grantNumberAgency" }, "grantNumberValue": { - "value": "NIH99999", + "value": "NIH99999999", "typeClass": "primitive", "multiple": false, "typeName": "grantNumberValue" @@ -157,15 +196,30 @@ "typeName": "depositor" }, { - "value": "2014-05-06", + "value": "2014-05-20", "typeClass": "primitive", "multiple": false, "typeName": "dateOfDeposit" }, { "value": [ - "Other reference number one", - "Other reference number two" + "Bananas" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "relatedMaterial" + }, + { + "value": [ + "Data about bananas" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "relatedDatasets" + }, + { + "value": [ + "other ref other ref other ref ef ef" ], "typeClass": "primitive", "multiple": true, @@ -175,36 +229,13 @@ "displayName": "Citation Metadata" } }, - "authors": [ - { - "displayOrder": 0, - "affiliation": { - "value": null - }, - "name": { - "value": null - } - }, - { - "displayOrder": 0, - "affiliation": { - "value": null - }, - "name": { - "value": null - } - } - ], - "createTime": "2014-05-06 01:38:01 -04", + "createTime": "2014-05-20 11:52:55 -04", "UNF": "UNF", - "id": 4, - "version": 1, + "id": 1, "versionNumber": 1, "versionMinorNumber": 0, "versionState": "DRAFT", - "title": "sample dataset", "distributionDate": "Distribution Date", "productionDate": "Production Date" - }, - "status": "OK" -} + } +} \ No newline at end of file diff --git a/scripts/api/data/dataset-updated-version.json b/scripts/api/data/dataset-updated-version.json new file mode 100644 index 00000000000..bef27390db1 --- /dev/null +++ b/scripts/api/data/dataset-updated-version.json @@ -0,0 +1,261 @@ +{ + "createTime": "2014-05-20 11:52:55 -04", + "UNF": "UNF", + "versionNumber": 1, + "versionMinorNumber": 0, + "versionState": "DRAFT", + "distributionDate": "Distribution Date", + "productionDate": "Production Date", + "metadataBlocks": { + "citation": { + "fields": [ + { + "value": "UpdatedTitle", + "typeClass": "primitive", + "multiple": false, + "typeName": "title" + }, + { + "value": [ + { + "authorAffiliation": { + "value": "Tippie Top", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "McPrivileged, Pete", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + }, + { + "authorAffiliation": { + "value": "UNC", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "Borrator, Colla", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + }, + { + "authorAffiliation": { + "value": "NASA", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "Naut, Astro", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "author" + }, + { + "value": [ + "pete@malinator.com" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "distributorContact" + }, + { + "value": "Lorem ipsum dolor sit amet, consectetur adipisicing elit. Quos, eos, natus soluta porro harum beatae voluptatem unde rerum eius quaerat officiis maxime autem asperiores facere.", + "typeClass": "primitive", + "multiple": false, + "typeName": "dsDescription" + }, + { + "value": [ + "kw10", + "kw20", + "kw30" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "keyword" + }, + { + "value": [ + "Arts and Humanities", + "Astronomy and Astrophysics" + ], + "typeClass": "controlledVocabulary", + "multiple": true, + "typeName": "subject" + }, + { + "value": "Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note.\r\nNOTANOTANOTANOTANnot.e\r\n", + "typeClass": "primitive", + "multiple": false, + "typeName": "notesText" + }, + { + "value": [ + { + "otherIdAgency": { + "value": "NSF", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdAgency" + }, + "otherIdValue": { + "value": "NSF1234", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdValue" + } + }, + { + "otherIdAgency": { + "value": "NIH", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdAgency" + }, + "otherIdValue": { + "value": "NIH98765", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdValue" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "otherId" + }, + { + "value": [ + { + "contributorAbbreviation": { + "value": "DDD", + "typeClass": "primitive", + "multiple": false, + "typeName": "contributorAbbreviation" + }, + "contributorAffiliation": { + "value": "Denmark", + "typeClass": "primitive", + "multiple": false, + "typeName": "contributorAffiliation" + }, + "contributorName": { + "value": "Dennis", + "typeClass": "primitive", + "multiple": false, + "typeName": "contributorName" + }, + "contributorType": { + "value": "Distributor", + "typeClass": "controlledVocabulary", + "multiple": false, + "typeName": "contributorType" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "contributor" + }, + { + "value": "2014-02-03", + "typeClass": "primitive", + "multiple": false, + "typeName": "productionDate" + }, + { + "value": "Cambridge, UK", + "typeClass": "primitive", + "multiple": false, + "typeName": "productionPlace" + }, + { + "value": [ + { + "grantNumberAgency": { + "value": "NIH", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberAgency" + }, + "grantNumberValue": { + "value": "NIH1231245154", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberValue" + } + }, + { + "grantNumberAgency": { + "value": "NIH", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberAgency" + }, + "grantNumberValue": { + "value": "NIH99999999", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberValue" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "grantNumber" + }, + { + "value": "Privileged, Pete", + "typeClass": "primitive", + "multiple": false, + "typeName": "depositor" + }, + { + "value": "2014-05-20", + "typeClass": "primitive", + "multiple": false, + "typeName": "dateOfDeposit" + }, + { + "value": [ + "Bananas" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "relatedMaterial" + }, + { + "value": [ + "Data about bananas" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "relatedDatasets" + }, + { + "value": [ + "other ref other ref other ref ef ef" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "otherReferences" + } + ], + "displayName": "Citation Metadata" + } + } +} \ No newline at end of file diff --git a/scripts/api/data/dataset-updated-version2.json b/scripts/api/data/dataset-updated-version2.json new file mode 100644 index 00000000000..0c0ba70b4da --- /dev/null +++ b/scripts/api/data/dataset-updated-version2.json @@ -0,0 +1,277 @@ +{ + "createTime": "2014-05-20 11:52:55 -04", + "UNF": "UNF", + "versionNumber": 1, + "versionMinorNumber": 0, + "versionState": "DRAFT", + "distributionDate": "Distribution Date", + "productionDate": "Production Date", + "metadataBlocks": { + "citation": { + "fields": [ + { + "value": "This is another title", + "typeClass": "primitive", + "multiple": false, + "typeName": "title" + }, + { + "value": [ + { + "authorAffiliation": { + "value": "Tippie Top", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "McPrivileged, Pete", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + }, + { + "authorAffiliation": { + "value": "Uber Under", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "McNew, Oldie", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + }, + { + "authorAffiliation": { + "value": "UNC", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "Borrator, Colla", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + }, + { + "authorAffiliation": { + "value": "NASA", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + }, + "authorName": { + "value": "Naut, Astro", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "author" + }, + { + "value": [ + "pete@malinator.com", + "beat@malinator.com", + "rete@malinator.com" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "distributorContact" + }, + { + "value": "This is a shorter description", + "typeClass": "primitive", + "multiple": false, + "typeName": "dsDescription" + }, + { + "value": [ + "kw15", + "kw25", + "kw35" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "keyword" + }, + { + "value": [ + "Arts and Humanities", + "Astronomy and Astrophysics" + ], + "typeClass": "controlledVocabulary", + "multiple": true, + "typeName": "subject" + }, + { + "value": "Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note Note note note note.\r\nNOTANOTANOTANOTANnot.e\r\n", + "typeClass": "primitive", + "multiple": false, + "typeName": "notesText" + }, + { + "value": [ + { + "otherIdAgency": { + "value": "NSF", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdAgency" + }, + "otherIdValue": { + "value": "NSF1234", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdValue" + } + }, + { + "otherIdAgency": { + "value": "NIH", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdAgency" + }, + "otherIdValue": { + "value": "NIH98765", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdValue" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "otherId" + }, + { + "value": [ + { + "contributorAbbreviation": { + "value": "DDD", + "typeClass": "primitive", + "multiple": false, + "typeName": "contributorAbbreviation" + }, + "contributorAffiliation": { + "value": "Denmark", + "typeClass": "primitive", + "multiple": false, + "typeName": "contributorAffiliation" + }, + "contributorName": { + "value": "Dennis", + "typeClass": "primitive", + "multiple": false, + "typeName": "contributorName" + }, + "contributorType": { + "value": "Distributor", + "typeClass": "controlledVocabulary", + "multiple": false, + "typeName": "contributorType" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "contributor" + }, + { + "value": "2014-02-03", + "typeClass": "primitive", + "multiple": false, + "typeName": "productionDate" + }, + { + "value": "Cambridge, UK", + "typeClass": "primitive", + "multiple": false, + "typeName": "productionPlace" + }, + { + "value": [ + { + "grantNumberAgency": { + "value": "NIH", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberAgency" + }, + "grantNumberValue": { + "value": "NIH1231245154", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberValue" + } + }, + { + "grantNumberAgency": { + "value": "NIH", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberAgency" + }, + "grantNumberValue": { + "value": "NIH99999999", + "typeClass": "primitive", + "multiple": false, + "typeName": "grantNumberValue" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "grantNumber" + }, + { + "value": "Privileged, Pete", + "typeClass": "primitive", + "multiple": false, + "typeName": "depositor" + }, + { + "value": "2014-05-20", + "typeClass": "primitive", + "multiple": false, + "typeName": "dateOfDeposit" + }, + { + "value": [ + "Bananas" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "relatedMaterial" + }, + { + "value": [ + "Data about bananas" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "relatedDatasets" + }, + { + "value": [ + "other ref other ref other ref ef ef" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "otherReferences" + } + ], + "displayName": "Citation Metadata" + } + } +} \ No newline at end of file diff --git a/scripts/api/readme.md b/scripts/api/readme.md index 5d0945e3a37..579cf4032ae 100644 --- a/scripts/api/readme.md +++ b/scripts/api/readme.md @@ -5,7 +5,7 @@ The API uses `json`, and sometimes query parameters as well. Also, sometimes the To have a fresh start in the database, you can execute the script `drop-create.sh` in the `../database` folder. -## Pre-made scripts +## Pre-made Scripts setup-users.sh @@ -40,11 +40,9 @@ View data about the dataverse identified by `{{id}}`. `{{id}}` can be the id num Deletes the dataverse whose ID is given. - GET http://{{SERVER}}/api/dvs/:gv + GET http://{{SERVER}}/api/dvs/{{id}}/contents -Dump the structure of the dataverse to a graphviz file. Sample usage: -`curl http://localhost:8080/api/dvs/:gv | circo -Tpdf > dataverses.pdf` -Creates a pdf with all dataverses, and their hierarchy. +Lists all the DvObjects under dataverse `id`. GET http://{{SERVER}}/api/dvs/{{id}}/roles?key={{username}} @@ -70,15 +68,23 @@ Get the metadata blocks defined on the passed dataverse. Sets the metadata blocks of the dataverse. Makes the dataverse a metadatablock root. The query body is a JSON array with a list of metadatablocks identifiers (either id or name). - GET http://{{SERVER}}/api/dvs/{{id}}/metadatablocks/:isRoot?key={{username}} + GET http://{{SERVER}}/api/dvs/{{id}}/metadatablocks/:isRoot?key={{apikey}} Get whether the dataverse is a metadata block root, or does it uses its parent blocks. - POST http://{{SERVER}}/api/dvs/{{id}}/metadatablocks/:isRoot?key={{username}} + POST http://{{SERVER}}/api/dvs/{{id}}/metadatablocks/:isRoot?key={{apikey}} Set whether the dataverse is a metadata block root, or does it uses its parent blocks. Possible values are `true` and `false` (both are valid JSON expressions). + POST http://{{SERVER}}/api/dvs/{{id}}/datasets/?key={{apikey}} + +Create a new dataset in dataverse `id`. The post data is a Json object, containing the dataset fields and an initial dataset version, under the field of `"initialVersion"`. The initial versions version number will be set to `1.0`, and its state will be set to `DRAFT` regardless of the content of the json object. Example json can be found at `data/dataset-create-new.json`. + + POST http://{{SERVER}}/api/dvs/{{identifier}}/actions/:publish?key={{apikey}} + +Publish the Dataverse pointed by `identifier`, which can either by the dataverse alias or its numerical id. + ### Datasets GET http://{{SERVER}}/api/datasets/?key={{apikey}} @@ -97,19 +103,27 @@ Delete the dataset whose id is passed. List versions of the dataset. - GET http://{{SERVER}}/api/datasets/{{id}}/versions/{{versionId}}?key={{apikey}} + GET http://{{SERVER}}/api/datasets/{{id}}/versions/{{versionNumber}}?key={{apikey}} -Show a version of the dataset. The `versionId` can be a number, or the values `:edit` for the edit version, and `:latest` for the latest one. +Show a version of the dataset. The `versionNumber` can be a specific version number (in the form of `major.minor`, e.g. `1.2` or `3.0`), or the values `:edit` for the edit version, and `:latest` for the latest one. The Dataset also include any metadata blocks the data might have. - GET http://{{SERVER}}//api/datasets/{{id}}/versions/{{versionId}}/metadata?key={{apikey}} + GET http://{{SERVER}}/api/datasets/{{id}}/versions/{{versionId}}/metadata?key={{apikey}} Lists all the metadata blocks and their content, for the given dataset and version. - GET http://{{SERVER}}//api/datasets/{{id}}/versions/{{versionId}}/metadata/{{blockname}}?key={{apikey}} + GET http://{{SERVER}}/api/datasets/{{id}}/versions/{{versionId}}/metadata/{{blockname}}?key={{apikey}} Lists the metadata block block named `blockname`, for the given dataset and version. + PUT http://{{SERVER}}/api/datasets/{{id}}/versions/:edit?key={{apiKey}} + +Updates the current edit version of dataset `{{id}}`. If the dataset does not have an edit version - e.g. when its most recent version is published, a new dreaft version is created. The invariant is - after a successful call to this command, the dataset has a DRAFT version with the passed data. + + POST http://{{SERVER}}/api/datasets/{{id}}/actions/:publish?type={{type}}&key={{apiKey}} + +Publishes the dataset whose id is passed. The new dataset version number is determined by the most recent version number and the `type` parameter. Passing `type=minor` increases the minor version number (2.3 → 2.4). Passing `type=major` increases the major version number (2.3 → 3.0). + ### permissions GET http://{{SERVER}}/api/permissions?user={{uid}}&on={{dvoId}} diff --git a/scripts/database/homebrew/rebuild-and-test b/scripts/database/homebrew/rebuild-and-test index 57d97b07469..d4ece691a41 100755 --- a/scripts/database/homebrew/rebuild-and-test +++ b/scripts/database/homebrew/rebuild-and-test @@ -13,3 +13,7 @@ scripts/database/homebrew/run-post-create-post-deploy scripts/search/tests/permissions scripts/search/tests/delete-dataverse scripts/search/tests/query-unparseable +scripts/search/tests/highlighting-setup01 > /dev/null +scripts/search/tests/highlighting-setup02 > /dev/null +scripts/search/tests/highlighting-setup03 +scripts/search/tests/highlighting diff --git a/scripts/installer/glassfish-setup.sh b/scripts/installer/glassfish-setup.sh index 7551f7acbb1..4f6d5bc2f6b 100755 --- a/scripts/installer/glassfish-setup.sh +++ b/scripts/installer/glassfish-setup.sh @@ -231,6 +231,9 @@ fi ./asadmin $ASADMIN_OPTS create-javamail-resource --mailhost "$SMTP_SERVER" --mailuser "dataversenotify" --fromaddress "do-not-reply@${HOST_ADDRESS}" mail/notifyMailSession +# so we can front with apache httpd ( ProxyPass / ajp://localhost:8009/ ) +./asadmin $ASADMIN_OPTS create-network-listener --protocol http-listener-1 --listenerport 8009 --jkenabled true jk-connector + ### # Restart echo Updates done. Restarting... diff --git a/scripts/search/create b/scripts/search/create index d6d18d03df6..754be01068f 100755 --- a/scripts/search/create +++ b/scripts/search/create @@ -7,15 +7,15 @@ FILESDIR='data/in/files' #rm data/in/dataverses/1 for i in `ls $DVDIR_ROOT`; do - curl -H "Content-type:application/json" -X POST -d @$DVDIR_ROOT/$i "http://localhost:8080/api/dvs/root?key=pete" + curl -s -H "Content-type:application/json" -X POST -d @$DVDIR_ROOT/$i "http://localhost:8080/api/dvs/root?key=pete" done for i in `ls $DVDIR_BIRDS`; do - curl -H "Content-type:application/json" -X POST -d @$DVDIR_BIRDS/$i "http://localhost:8080/api/dvs/birds?key=pete" + curl -s -H "Content-type:application/json" -X POST -d @$DVDIR_BIRDS/$i "http://localhost:8080/api/dvs/birds?key=pete" done for i in `ls $DVDIR_TREES`; do - curl -H "Content-type:application/json" -X POST -d @$DVDIR_TREES/$i "http://localhost:8080/api/dvs/trees?key=pete" + curl -s -H "Content-type:application/json" -X POST -d @$DVDIR_TREES/$i "http://localhost:8080/api/dvs/trees?key=pete" done # 9 is "sparrows" diff --git a/scripts/search/data/binary/trees.zip b/scripts/search/data/binary/trees.zip new file mode 100644 index 00000000000..170c2d30ae9 Binary files /dev/null and b/scripts/search/data/binary/trees.zip differ diff --git a/scripts/search/data/dataset01-create-new.json b/scripts/search/data/dataset01-create-new.json new file mode 100644 index 00000000000..9203480f5e0 --- /dev/null +++ b/scripts/search/data/dataset01-create-new.json @@ -0,0 +1,130 @@ +{ + "authority": "10.5072/FK2", + "identifier": "17", + "persistentUrl": "http://dx.doi.org/10.5072/FK2/17", + "protocol": "doi", + "initialVersion": { + "metadataBlocks": { + "citation": { + "fields": [ + { + "value": "Rings of Trees and Other Observations", + "typeClass": "primitive", + "multiple": false, + "typeName": "title" + }, + { + "value": [ + { + "authorName": { + "value": "Tree, Tony", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + }, + "authorAffiliation": { + "value": "Trees Inc.", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "author" + }, + { + "value": [ + "tony@trees.com" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "distributorContact" + }, + { + "value": "Trees have rings. Trees can be tall.", + "typeClass": "primitive", + "multiple": false, + "typeName": "dsDescription" + }, + { + "value": [ + "trees", + "rings", + "tall" + ], + "typeClass": "primitive", + "multiple": true, + "typeName": "keyword" + }, + { + "value": [ + "Medicine, Health & Life Sciences" + ], + "typeClass": "controlledVocabulary", + "multiple": true, + "typeName": "subject" + }, + { + "value": "Many notes have been taken about trees over the years.", + "typeClass": "primitive", + "multiple": false, + "typeName": "notesText" + }, + { + "value": [ + { + "otherIdAgency": { + "value": "NSF", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdAgency" + }, + "otherIdValue": { + "value": "NSF1234", + "typeClass": "primitive", + "multiple": false, + "typeName": "otherIdValue" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "otherId" + }, + { + "value": [ + { + "contributorType": { + "value": "Data Collector", + "typeClass": "controlledVocabulary", + "multiple": false, + "typeName": "contributorType" + }, + "contributorName": { + "value": "Edward Trees Jr.", + "typeClass": "primitive", + "multiple": false, + "typeName": "contributorName" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "contributor" + } + ], + "displayName": "Citation Metadata" + } + }, + "createTime": "2014-05-20 11:52:55 -04", + "UNF": "UNF", + "id": 1, + "versionNumber": 1, + "versionMinorNumber": 0, + "versionState": "DRAFT", + "distributionDate": "Distribution Date", + "productionDate": "Production Date" + } +} diff --git a/scripts/search/tests/highlighting b/scripts/search/tests/highlighting index 1d76815f4b7..568b10fd97f 100755 --- a/scripts/search/tests/highlighting +++ b/scripts/search/tests/highlighting @@ -1,35 +1,14 @@ #!/bin/bash -# We assume that scripts/search/tests/permissions came back clean. -# We assume you've added the bird and tree dataverses with this: +# We assume you added a file called "trees.png" to the dataset at +# by running highlighting-setup03 # -#cd scripts/search -#./populate -#./create -#exit +# It doesn't have a description yet. Go make it "Trees are lovely." +# Handy link: http://localhost:8080/dataset.xhtml?id=17 # -# We assume you've created a dataset as pete in the "Trees" dataverse at -# http://localhost:8080/dataverse.xhtml?id=11 -# with the following: -# Title: Rings of Trees and Other Observations -# Author: Tree, Tony -# Affiliation: Trees Inc. -# Contact E-mail: tony@trees.com -# Description: Trees have rings. Trees can be tall. -# Keyword: trees -# Keyword: rings -# Keyword: tall -# Subject: Medicine, Health & Life Sciences -# Notes: Many notes have been taken about trees over the years. +# Then this test should pass. # -# We assume you edit the metadata of this dataset at -# http://localhost:8080/dataset.xhtml?id=17 -# and add the following -# Contributor: -# Type: Data Collector -# Name: Edward Trees Jr. -# -# We assume you add a file called "trees.png" to this dataset -# with a description of "Trees are lovely." +# Of course, you can't operate on it with SWORD anymore until this +# ticket is resolved: https://redmine.hmdc.harvard.edu/issues/3993 # diff <(curl -s 'http://localhost:8080/api/search?q=trees&showrelevance=true&key=nick') scripts/search/tests/expected/highlighting-nick-trees diff <(curl -s 'http://localhost:8080/api/search?q=trees&showrelevance=true&key=pete') scripts/search/tests/expected/highlighting-pete-trees diff --git a/scripts/search/tests/highlighting-setup01 b/scripts/search/tests/highlighting-setup01 new file mode 100755 index 00000000000..82218643f7e --- /dev/null +++ b/scripts/search/tests/highlighting-setup01 @@ -0,0 +1,7 @@ +#!/bin/bash +# We assume that scripts/search/tests/permissions came back clean. +# We assume you've added the bird and tree dataverses with this: +# +cd scripts/search +./populate +./create diff --git a/scripts/search/tests/highlighting-setup02 b/scripts/search/tests/highlighting-setup02 new file mode 100755 index 00000000000..85ba9c6ae7a --- /dev/null +++ b/scripts/search/tests/highlighting-setup02 @@ -0,0 +1,22 @@ +#!/bin/bash +# We assume you've added the bird and tree dataverses with highlighting-setup01 +# We run the command below to create http://localhost:8080/dataset.xhtml?id=17 +# in the "Trees" dataverse at http://localhost:8080/dataverse.xhtml?id=11 +# with the following: +# +# Title: Rings of Trees and Other Observations +# Author: Tree, Tony +# Affiliation: Trees Inc. +# Contact E-mail: tony@trees.com +# Description: Trees have rings. Trees can be tall. +# Keyword: trees +# Keyword: rings +# Keyword: tall +# Subject: Medicine, Health & Life Sciences +# Notes: Many notes have been taken about trees over the years. +# +# Contributor: +# Type: Data Collector +# Name: Edward Trees Jr. +# +curl -X POST -H "Content-type:application/json" -d @scripts/search/data/dataset01-create-new.json http://localhost:8080/api/dvs/11/datasets/?key=pete diff --git a/scripts/search/tests/highlighting-setup03 b/scripts/search/tests/highlighting-setup03 new file mode 100755 index 00000000000..635ad3ac1a2 --- /dev/null +++ b/scripts/search/tests/highlighting-setup03 @@ -0,0 +1,7 @@ +#!/bin/sh +USERNAME=pete +PASSWORD=pete +DVN_SERVER=localhost:8181 +GLOBAL_ID=doi:10.5072/FK2/17 +curl -s --insecure --data-binary @scripts/search/data/binary/trees.zip -H "Content-Disposition: filename=example.zip" -H "Content-Type: application/zip" -H "Packaging: http://purl.org/net/sword/package/SimpleZip" https://$USERNAME:$PASSWORD@$DVN_SERVER/dvn/api/data-deposit/v1/swordv2/edit-media/study/$GLOBAL_ID \ +| xmllint -format - diff --git a/scripts/setup/asadmin-setup.sh b/scripts/setup/asadmin-setup.sh index 1f9c08c2635..9cb2d112cc4 100755 --- a/scripts/setup/asadmin-setup.sh +++ b/scripts/setup/asadmin-setup.sh @@ -96,8 +96,12 @@ if [ $SUDO_USER == "vagrant" ] DB_NAME=dataverse_db DB_USER=dataverse_app DB_PASS=secret + echo "Configuring EPEL Maven repo " + cd /etc/yum.repos.d + wget http://repos.fedorapeople.org/repos/dchen/apache-maven/epel-apache-maven.repo + cd echo "Installing dependencies via yum" - yum install -y -q java-1.7.0-openjdk postgresql-server + yum install -y -q java-1.7.0-openjdk-devel postgresql-server apache-maven httpd mod_ssl rpm -q postgresql-server echo "Starting PostgreSQL" chkconfig postgresql on @@ -120,6 +124,7 @@ if [ $SUDO_USER == "vagrant" ] su $GLASSFISH_USER -s /bin/sh -c "cp $GLASSFISH_ZIP $GLASSFISH_USER_HOME" su $GLASSFISH_USER -s /bin/sh -c "cd $GLASSFISH_USER_HOME && unzip -q $GLASSFISH_ZIP" DEFAULT_GLASSFISH_ROOT=$GLASSFISH_USER_HOME/glassfish4 + su $GLASSFISH_USER -s /bin/sh -c "/scripts/installer/glassfish-setup.sh" fi @@ -172,6 +177,13 @@ if ! grep -qs postgres $DOMAIN_LIB/* echo postgresql driver already installed. fi +if [ $SUDO_USER == "vagrant" ] + then + /scripts/installer/glassfish-setup.sh + echo "Done configuring Vagrant environment" + exit 0 +fi + ### # Move to the glassfish dir pushd $GLASSFISH_BIN_DIR @@ -187,9 +199,7 @@ if [ $(echo $DOMAIN_DOWN|wc -c) -ne 1 ]; echo domain running fi -# ONCE AGAIN, ASADMIN COMMANDS BELOW HAVE ALL BEEN MOVED INTO THIS SCRIPT: - -../installer/glassfish-setup.sh +# ONCE AGAIN, ASADMIN COMMANDS BELOW HAVE ALL BEEN MOVED INTO scripts/installer/glassfish-setup.sh # TODO: diagnostics diff --git a/scripts/vagrant/deploy.sh b/scripts/vagrant/deploy.sh index 50fcad45feb..a00f9b394b4 100755 --- a/scripts/vagrant/deploy.sh +++ b/scripts/vagrant/deploy.sh @@ -9,9 +9,14 @@ su $GLASSFISH_USER -s /bin/sh -c "cd $SOLR_HOME && tar xfz solr-4.6.0.tgz" su $GLASSFISH_USER -s /bin/sh -c "cp /conf/solr/4.6.0/schema.xml $SOLR_HOME/solr-4.6.0/example/solr/collection1/conf/schema.xml" su $GLASSFISH_USER -s /bin/sh -c "cd $SOLR_HOME/solr-4.6.0/example && java -jar start.jar &" -#su $GLASSFISH_USER -s /bin/sh -c "cp /builds/dataverse-4.0.war /home/glassfish/glassfish4/glassfish/domains/domain1/autodeploy" -cp /builds/dataverse-4.0.war /home/glassfish/glassfish4/glassfish/domains/domain1/autodeploy -sleep 60 +WAR=/dataverse/target/dataverse-4.0.war +if [ ! -f $WAR ]; then + su $SUDO_USER -s /bin/sh -c "cd /dataverse && mvn package" +fi +su $GLASSFISH_USER -s /bin/sh -c "cp $WAR /home/glassfish/glassfish4/glassfish/domains/domain1/autodeploy" +# FIXME: detect when the app is up rather than sleeping +# maybe check $? for 0 after `asadmin list-applications | grep dataverse` +sleep 180 cd /scripts/api ./datasetfields.sh ./setup-users.sh diff --git a/scripts/vagrant/test.sh b/scripts/vagrant/test.sh new file mode 100755 index 00000000000..3c5b835cb79 --- /dev/null +++ b/scripts/vagrant/test.sh @@ -0,0 +1,6 @@ +#!/bin/sh +echo "running tests..." +echo "running search tests..." +cd / +scripts/search/tests/permissions +echo "done running tests. no output is good. silence is golden" diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetField.java b/src/main/java/edu/harvard/iq/dataverse/DatasetField.java index 131705aa86d..be76e83e518 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetField.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetField.java @@ -33,8 +33,8 @@ @Entity @ValidateDatasetFieldType public class DatasetField implements Serializable { - private static final long serialVersionUID = 1L; - + private static final long serialVersionUID = 1L; + /** * Orders dataset fields by their display order. */ @@ -45,13 +45,12 @@ public int compare(DatasetField o1, DatasetField o2) { o2.getDatasetFieldType().getDisplayOrder() ); }}; - public static DatasetField createNewEmptyDatasetField(DatasetFieldType dsfType, DatasetVersion dsv) { DatasetField dsfv = createNewEmptyDatasetField(dsfType); dsfv.setDatasetVersion(dsv); return dsfv; } - + // originally this was an overloaded method, but we renamed it to get around an issue with Bean Validation // (that looked t overloaded methods, when it meant to look at overriden methods public static DatasetField createNewEmptyChildDatasetField(DatasetFieldType dsfType, DatasetFieldCompoundValue compoundValue) { @@ -78,6 +77,7 @@ private static DatasetField createNewEmptyDatasetField(DatasetFieldType dsfType) /** * Groups a list of fields by the block they belong to. + * * @param fields well, duh. * @return a map, mapping each block to the fields that belong to it. */ @@ -213,11 +213,26 @@ public String getValue() { public String getDisplayValue() { String returnString = ""; for (String value : getValues()) { + if(value == null) value=""; returnString += (returnString.equals("") ? "" : "; ") + value; } return returnString; } - + + public String getCompoundDisplayValue() { + String returnString = ""; + for (DatasetFieldCompoundValue dscv : datasetFieldCompoundValues) { + for (DatasetField dsf : dscv.getChildDatasetFields()) { + for (String value : dsf.getValues()) { + if (!(value == null)) { + returnString += (returnString.equals("") ? "" : "; ") + value; + } + } + } + } + return returnString; + } + public List getValues() { List returnList = new ArrayList(); if (!datasetFieldValues.isEmpty()) { @@ -226,8 +241,8 @@ public List getValues() { } } else { for (ControlledVocabularyValue cvv : controlledVocabularyValues) { - if (cvv != null && cvv.getStrValue() != null){ - returnList.add(cvv.getStrValue()); + if (cvv != null && cvv.getStrValue() != null) { + returnList.add(cvv.getStrValue()); } } } @@ -237,7 +252,7 @@ public List getValues() { public boolean isEmpty() { if (datasetFieldType.isPrimitive()) { // primitive for (String value : getValues()) { - if (value != null && !value.trim().isEmpty() ) { + if (value != null && !value.trim().isEmpty()) { return false; } } @@ -253,9 +268,9 @@ public boolean isEmpty() { return true; } - - @Transient private String validationMessage; + @Transient + private String validationMessage; public String getValidationMessage() { return validationMessage; @@ -264,7 +279,6 @@ public String getValidationMessage() { public void setValidationMessage(String validationMessage) { this.validationMessage = validationMessage; } - @Override public int hashCode() { diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java index fc0a8bc85d8..7dc5ff00464 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java @@ -17,6 +17,7 @@ import javax.persistence.Id; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; +import javax.persistence.OrderBy; /** * @@ -54,6 +55,7 @@ public static DatasetFieldCompoundValue createNewEmptyDatasetFieldCompoundValue( private DatasetField parentDatasetField; @OneToMany(mappedBy = "parentDatasetFieldCompoundValue", orphanRemoval=true, cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) + @OrderBy("datasetFieldType ASC") private List childDatasetFields = new ArrayList(); public Long getId() { diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValueValidator.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValueValidator.java index 0ca72bff60e..bd3a9ee5942 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValueValidator.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValueValidator.java @@ -64,7 +64,7 @@ public boolean isValid(DatasetFieldValue value, ConstraintValidatorContext conte // as the release date... // -- L.A. 4.0 beta - valid = (isValidDate(value.getValue(), "yyyy-MM-dd'T'HH:mm:ss") || isValidDate(value.getValue(), "yyyy-MM-dd HH:mm:ss")); + valid = (isValidDate(value.getValue(), "yyyy-MM-dd'T'HH:mm:ss") || isValidDate(value.getValue(), "yyyy-MM-dd'T'HH:mm:ss.SSS") || isValidDate(value.getValue(), "yyyy-MM-dd HH:mm:ss")); } if (!valid ) { @@ -110,7 +110,7 @@ public boolean isValid(DatasetFieldValue value, ConstraintValidatorContext conte } if (fieldType.equals("email")) { - Pattern p = Pattern.compile(".+@.+\\.[a-z]+"); + Pattern p = Pattern.compile("^[_A-Za-z0-9-]+(\\.[_A-Za-z0-9-]+)*@[A-Za-z0-9]+(\\.[A-Za-z0-9]+)*(\\.[A-Za-z]{2,})$"); Matcher m = p.matcher(value.getValue()); boolean matchFound = m.matches(); if (!matchFound) { diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 9fb7fbbafcf..67b352517d7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -10,7 +10,7 @@ import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ReleaseDatasetCommand; +import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.util.FileUtil; @@ -26,7 +26,9 @@ import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; @@ -50,6 +52,7 @@ import javax.validation.ValidatorFactory; import org.apache.commons.httpclient.HttpClient; import org.apache.commons.httpclient.methods.GetMethod; +import org.primefaces.context.RequestContext; /** * @@ -108,6 +111,10 @@ public enum DisplayMode { private String datasetNextMajorVersion = "1.0"; private String datasetNextMinorVersion = ""; private String dropBoxSelection = ""; + private DatasetVersionDifference datasetVersionDifference; + private Map checked = new HashMap<>(); + + private String displayCitation; @@ -240,6 +247,7 @@ public void init() { dataset.setOwner(dataverseService.find(ownerId)); datasetVersionUI = new DatasetVersionUI(editVersion); + dataset.setIdentifier(datasetService.generateIdentifierSequence("doi", "10.5072/FK2")); //On create set pre-populated fields for (DatasetField dsf : dataset.getEditVersion().getDatasetFields()) { if (dsf.getDatasetFieldType().getName().equals(DatasetFieldConstant.depositor)) { @@ -301,14 +309,14 @@ private String releaseDataset(boolean minor) { Command cmd; try { if (editMode == EditMode.CREATE) { - cmd = new ReleaseDatasetCommand(dataset, session.getUser(), minor); + cmd = new PublishDatasetCommand(dataset, session.getUser(), minor); } else { - cmd = new ReleaseDatasetCommand(dataset, session.getUser(), minor); + cmd = new PublishDatasetCommand(dataset, session.getUser(), minor); } dataset = commandEngine.submit(cmd); } catch (CommandException ex) { FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Release Failed", " - " + ex.toString())); - Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, null, ex); + logger.severe(ex.getMessage()); } FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_INFO, "DatasetReleased", "Your dataset is now public."); FacesContext.getCurrentInstance().addMessage(null, message); @@ -326,11 +334,11 @@ public void refresh(ActionEvent e) { DataFile dataFile = fileMetadata.getDataFile(); // and see if any are marked as "ingest-in-progress": if (dataFile.isIngestInProgress()) { - Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Refreshing the status of the file " + fileMetadata.getLabel() + "..."); + logger.info("Refreshing the status of the file " + fileMetadata.getLabel() + "..."); // and if so, reload the file object from the database... dataFile = datafileService.find(dataFile.getId()); if (!dataFile.isIngestInProgress()) { - Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "File " + fileMetadata.getLabel() + " finished ingesting."); + logger.info("File " + fileMetadata.getLabel() + " finished ingesting."); // and, if the status has changed - i.e., if the ingest has // completed, or failed, update the object in the list of // files visible to the page: @@ -375,94 +383,25 @@ public String save() { //TODO get real application-wide protocol/authority dataset.setProtocol("doi"); dataset.setAuthority("10.5072/FK2"); - dataset.setIdentifier("5555"); - + /* * Save and/or ingest files, if there are any: + + * All the back end-specific ingest logic has been moved into + * the IngestServiceBean! -- L.A. + * TODO: we still need to figure out how the ingestServiceBean is + * going to communicate the information about ingest errors back to + * the page, and what the page should be doing to alert the user. + * (we may not do any communication/exceptions/etc. here - relying + * instead on the ingest/upload status properly set on each of the + * individual files, and adding a mechanism to the page for displaying + * file-specific error reports - in pop-up windows maybe?) */ - if (newFiles != null && newFiles.size() > 0) { - try { - if (dataset.getFileSystemDirectory() != null && !Files.exists(dataset.getFileSystemDirectory())) { - /* Note that "createDirectories()" must be used - not - * "createDirectory()", to make sure all the parent - * directories that may not yet exist are created as well. - */ - - Files.createDirectories(dataset.getFileSystemDirectory()); - } - } catch (IOException dirEx) { - Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, "Failed to create study directory " + dataset.getFileSystemDirectory().toString()); - } - - if (dataset.getFileSystemDirectory() != null && Files.exists(dataset.getFileSystemDirectory())) { - for (DataFile dFile : newFiles) { - String tempFileLocation = getFilesTempDirectory() + "/" + dFile.getFileSystemName(); - - // These are all brand new files, so they should all have - // one filemetadata total. You do NOT want to use - // getLatestFilemetadata() here - because it relies on - // comparing the object IDs of the corresponding datasetversions... - // Which may not have been persisted yet. - // -- L.A. 4.0 beta. - FileMetadata fileMetadata = dFile.getFileMetadatas().get(0); - String fileName = fileMetadata.getLabel(); - - //boolean ingestedAsTabular = false; - boolean metadataExtracted = false; - - datasetService.generateFileSystemName(dFile); - - if (ingestService.ingestableAsTabular(dFile)) { - /* - * Note that we don't try to ingest the file right away - - * instead we mark it as "scheduled for ingest", then at - * the end of the save process it will be queued for async. - * ingest in the background. In the meantime, the file - * will be ingested as a regular, non-tabular file, and - * appear as such to the user, until the ingest job is - * finished with the Ingest Service. - */ - dFile.SetIngestScheduled(); - } else if (ingestService.fileMetadataExtractable(dFile)) { - - try { - dFile.setContentType("application/fits"); - metadataExtracted = ingestService.extractIndexableMetadata(tempFileLocation, dFile, editVersion); - } catch (IOException mex) { - Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, "Caught exception trying to extract indexable metadata from file " + fileName, mex); - } - if (metadataExtracted) { - Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Successfully extracted indexable metadata from file " + fileName); - } else { - Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Failed to extract indexable metadata from file " + fileName); - } - } - - // Try to save the file in its permanent location: - //if (!ingestedAsTabular) { - try { - - Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Will attempt to save the file as: " + dFile.getFileSystemLocation().toString()); - Files.copy(new FileInputStream(new File(tempFileLocation)), dFile.getFileSystemLocation(), StandardCopyOption.REPLACE_EXISTING); - - MD5Checksum md5Checksum = new MD5Checksum(); - try { - dFile.setmd5(md5Checksum.CalculateMD5(dFile.getFileSystemLocation().toString())); - } catch (Exception md5ex) { - Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Could not calculate MD5 signature for the new file " + fileName); - } - - } catch (IOException ioex) { - Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Failed to save the file " + dFile.getFileSystemLocation()); - } - //} - - // Any necessary post-processing: - ingestService.performPostProcessingTasks(dFile); - } - } - } - + + ingestService.addFiles(editVersion, newFiles); + + // Use the API to save the dataset: + Command cmd; try { if (editMode == EditMode.CREATE) { @@ -489,44 +428,17 @@ public String save() { return null; } catch (CommandException ex) { FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Dataset Save Failed", " - " + ex.toString())); - Logger.getLogger(DatasetPage.class.getName()).log(Level.SEVERE, null, ex); + logger.severe(ex.getMessage()); } newFiles.clear(); editMode = null; - // Queue the ingest jobs for asynchronous execution: - for (DataFile dataFile : dataset.getFiles()) { - if (dataFile.isIngestScheduled()) { - dataFile.SetIngestInProgress(); - Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Attempting to queue the file " + dataFile.getFileMetadata().getLabel() + " for ingest."); - ingestService.asyncIngestAsTabular(dataFile); - } - } - - return "/dataset.xhtml?id=" + dataset.getId() + "&versionId=" + dataset.getLatestVersion().getId() + "&faces-redirect=true"; - } - - private String getFilesTempDirectory() { - String filesRootDirectory = System.getProperty("dataverse.files.directory"); - if (filesRootDirectory == null || filesRootDirectory.equals("")) { - filesRootDirectory = "/tmp/files"; - } - - String filesTempDirectory = filesRootDirectory + "/temp"; - - if (!Files.exists(Paths.get(filesTempDirectory))) { - /* Note that "createDirectories()" must be used - not - * "createDirectory()", to make sure all the parent - * directories that may not yet exist are created as well. - */ - try { - Files.createDirectories(Paths.get(filesTempDirectory)); - } catch (IOException ex) { - return null; - } - } + // Call Ingest Service one more time, to + // queue the data ingest jobs for asynchronous execution: + + ingestService.startIngestJobs(dataset); - return filesTempDirectory; + return "/dataset.xhtml?id=" + dataset.getId() + "&versionId=" + dataset.getLatestVersion().getId() + "&faces-redirect=true"; } public void cancel() { @@ -557,7 +469,7 @@ public void handleDropBoxUpload(ActionEvent e) { String fileName = dbObject.getString("name"); int fileSize = dbObject.getInt("bytes"); - Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "DropBox url: " + fileLink + ", filename: " + fileName + ", size: " + fileSize); + logger.info("DropBox url: " + fileLink + ", filename: " + fileName + ", size: " + fileSize); DataFile dFile = null; @@ -569,25 +481,17 @@ public void handleDropBoxUpload(ActionEvent e) { status = getClient().executeMethod(dropBoxMethod); if (status == 200) { dropBoxStream = dropBoxMethod.getResponseBodyAsStream(); - dFile = new DataFile("application/octet-stream"); - dFile.setOwner(dataset); - - // save the file, in the temporary location for now: - datasetService.generateFileSystemName(dFile); - if (getFilesTempDirectory() != null) { - Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "Will attempt to save the DropBox file as: " + getFilesTempDirectory() + "/" + dFile.getFileSystemName()); - Files.copy(dropBoxStream, Paths.get(getFilesTempDirectory(), dFile.getFileSystemName()), StandardCopyOption.REPLACE_EXISTING); - File tempFile = Paths.get(getFilesTempDirectory(), dFile.getFileSystemName()).toFile(); - if (tempFile.exists()) { - long writtenBytes = tempFile.length(); - Logger.getLogger(DatasetPage.class.getName()).log(Level.INFO, "File size, expected: " + fileSize + ", written: " + writtenBytes); - } else { - throw new IOException(); - } - } + + // If we've made it this far, we must have been able to + // make a successful HTTP call to the DropBox server and + // obtain an InputStream - so we can now create a new + // DataFile object: + + dFile = ingestService.createDataFile(editVersion, dropBoxStream, fileName, null); + newFiles.add(dFile); } } catch (IOException ex) { - Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Failed to access DropBox url: " + fileLink + "!"); + logger.warning("Failed to access DropBox url: " + fileLink + "!"); continue; } finally { if (dropBoxMethod != null) { @@ -597,99 +501,24 @@ public void handleDropBoxUpload(ActionEvent e) { try { dropBoxStream.close(); } catch (Exception ex) { + //logger.whocares("..."); } } - } - - // If we've made it this far, we must have downloaded the file - // successfully, so let's finish processing it as a new DataFile - // object: - FileMetadata fmd = new FileMetadata(); - fmd.setDataFile(dFile); - dFile.getFileMetadatas().add(fmd); - fmd.setLabel(fileName); - fmd.setCategory(dFile.getContentType()); - if (editVersion.getFileMetadatas() == null) { - editVersion.setFileMetadatas(new ArrayList()); - } - editVersion.getFileMetadatas().add(fmd); - fmd.setDatasetVersion(editVersion); - dataset.getFiles().add(dFile); - - // When uploading files from dropBox, we don't get the benefit of - // having the browser recognize the mime type of the file. So we'll - // have to rely on our own utilities (Jhove, etc.) to try and determine - // what it is. - String fileType = null; - try { - fileType = FileUtil.determineFileType(Paths.get(getFilesTempDirectory(), dFile.getFileSystemName()).toFile(), fileName); - Logger.getLogger(DatasetPage.class.getName()).log(Level.FINE, "File utility recognized the file as " + fileType); - if (fileType != null && !fileType.equals("")) { - dFile.setContentType(fileType); - } - } catch (IOException ex) { - Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Failed to run the file utility mime type check on file " + fileName); - } - - newFiles.add(dFile); + } } } public void handleFileUpload(FileUploadEvent event) { UploadedFile uFile = event.getFile(); - DataFile dFile = new DataFile(uFile.getContentType()); - - FileMetadata fmd = new FileMetadata(); - fmd.setLabel(uFile.getFileName()); - fmd.setCategory(dFile.getContentType()); - - dFile.setOwner(dataset); - fmd.setDataFile(dFile); - - dFile.getFileMetadatas().add(fmd); - - if (editVersion.getFileMetadatas() == null) { - editVersion.setFileMetadatas(new ArrayList()); - } - editVersion.getFileMetadatas().add(fmd); - fmd.setDatasetVersion(editVersion); - dataset.getFiles().add(dFile); - - datasetService.generateFileSystemName(dFile); - - // save the file, in the temporary location for now: - if (getFilesTempDirectory() != null) { - try { - - Logger.getLogger(DatasetPage.class.getName()).log(Level.FINE, "Will attempt to save the file as: " + getFilesTempDirectory() + "/" + dFile.getFileSystemName()); - Files.copy(uFile.getInputstream(), Paths.get(getFilesTempDirectory(), dFile.getFileSystemName()), StandardCopyOption.REPLACE_EXISTING); - } catch (IOException ioex) { - Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Failed to save the file " + dFile.getFileSystemName()); - return; - } - } - - // Let's try our own utilities (Jhove, etc.) to determine the file type - // of the uploaded file. (we may or may not do better than the browser, - // which may have already recognized the type correctly...) - String fileType = null; + DataFile dFile = null; + try { - fileType = FileUtil.determineFileType(Paths.get(getFilesTempDirectory(), dFile.getFileSystemName()).toFile(), fmd.getLabel()); - Logger.getLogger(DatasetPage.class.getName()).log(Level.FINE, "File utility recognized the file as " + fileType); - if (fileType != null && !fileType.equals("")) { - // let's look at the browser's guess regarding the mime type - // of the file: - String bgType = dFile.getContentType(); - Logger.getLogger(DatasetPage.class.getName()).log(Level.FINE, "Browser recognized the file as " + bgType); - - if (bgType == null || bgType.equals("") || bgType.equalsIgnoreCase("application/octet-stream")) { - dFile.setContentType(fileType); - } - } - } catch (IOException ex) { - Logger.getLogger(DatasetPage.class.getName()).log(Level.WARNING, "Failed to run the file utility mime type check on file " + fmd.getLabel()); + dFile = ingestService.createDataFile(editVersion, uFile.getInputstream(), uFile.getFileName(), uFile.getContentType()); + } catch (IOException ioex) { + logger.warning("Failed to process and/or save the file " + uFile.getFileName() + "; " + ioex.getMessage()); + return; } - + newFiles.add(dFile); } @@ -707,6 +536,58 @@ public List getVersionTabList() { public void setVersionTabList(List versionTabList) { this.versionTabList = versionTabList; } + + + private List selectedVersions; + public List getSelectedVersions() { + return selectedVersions; + } + + public void setSelectedVersions(List selectedVersions) { + this.selectedVersions = selectedVersions; + } + + + public DatasetVersionDifference getDatasetVersionDifference() { + return datasetVersionDifference; + } + + public void setDatasetVersionDifference(DatasetVersionDifference datasetVersionDifference) { + this.datasetVersionDifference = datasetVersionDifference; + } + + public void compareVersionDifferences() { + RequestContext requestContext = RequestContext.getCurrentInstance(); + if (this.selectedVersions.size() != 2) { + requestContext.execute("openCompareTwo();"); + } else { + //order depends on order of selection - needs to be chronological order + if (this.selectedVersions.get(0).getId().intValue() > this.selectedVersions.get(1).getId().intValue() ){ + updateVersionDifferences(this.selectedVersions.get(0), this.selectedVersions.get(1)); + } else { + updateVersionDifferences(this.selectedVersions.get(1), this.selectedVersions.get(0)); + } + } + } + + public void updateVersionDifferences(DatasetVersion newVersion, DatasetVersion originalVersion) { + int count = 0; + int size = this.getDataset().getVersions().size(); + + if (originalVersion == null) { + for (DatasetVersion dsv : newVersion.getDataset().getVersions()) { + if (newVersion.equals(dsv)) { + if ((count + 1) < size) { + setDatasetVersionDifference(new DatasetVersionDifference(newVersion, newVersion.getDataset().getVersions().get(count + 1))); + break; + } + } + count++; + } + } else { + setDatasetVersionDifference(new DatasetVersionDifference(newVersion, originalVersion)); + } + } private boolean canIssueUpdateCommand() { try { @@ -733,8 +614,6 @@ private List resetVersionTabList() { } } return retList; - } } - } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index 5ffff921859..cb2fbf8ed22 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -30,7 +30,7 @@ public class DatasetServiceBean { private EntityManager em; public Dataset find(Object pk) { - return (Dataset) em.find(Dataset.class, pk); + return em.find(Dataset.class, pk); } public List findByOwnerId(Long ownerId) { @@ -67,5 +67,76 @@ public Dataset findByGlobalId(String globalId) { } return foundDataset; } + + public String generateIdentifierSequence(String protocol, String authority) { + + String identifier=null; + do { + identifier = ((Long) em.createNativeQuery("select nextval('dvobject_id_seq')").getSingleResult()).toString(); + + } while (!isUniqueIdentifier(identifier, protocol, authority)); + + + return identifier; + + } + + /** + * Check that a studyId entered by the user is unique (not currently used for any other study in this Dataverse Network) + */ + private boolean isUniqueIdentifier(String userIdentifier, String protocol,String authority) { + String query = "SELECT d FROM Dataset d WHERE d.identifier = '" + userIdentifier +"'"; + query += " and d.protocol ='"+protocol+"'"; + query += " and d.authority = '"+authority+"'"; + boolean u = em.createQuery(query).getResultList().size()==0; + return u; + } + + /* + public Study getStudyByGlobalId(String identifier) { + String protocol = null; + String authority = null; + String studyId = null; + int index1 = identifier.indexOf(':'); + int index2 = identifier.indexOf('/'); + int index3 = 0; + if (index1 == -1) { + throw new EJBException("Error parsing identifier: " + identifier + ". ':' not found in string"); + } else { + protocol = identifier.substring(0, index1); + } + if (index2 == -1) { + throw new EJBException("Error parsing identifier: " + identifier + ". '/' not found in string"); + + } else { + authority = identifier.substring(index1 + 1, index2); + } + if (protocol.equals("doi")){ + index3 = identifier.indexOf('/', index2 + 1 ); + if (index3== -1){ + studyId = identifier.substring(index2 + 1).toUpperCase(); + } else { + authority = identifier.substring(index1 + 1, index3); + studyId = identifier.substring(index3 + 1).toUpperCase(); + } + } else { + studyId = identifier.substring(index2 + 1).toUpperCase(); + } + + String queryStr = "SELECT s from Study s where s.studyId = :studyId and s.protocol= :protocol and s.authority= :authority"; + Study study = null; + try { + Query query = em.createQuery(queryStr); + query.setParameter("studyId", studyId); + query.setParameter("protocol", protocol); + query.setParameter("authority", authority); + study = (Study) query.getSingleResult(); + } catch (javax.persistence.NoResultException e) { + // DO nothing, just return null. + } + return study; + } +*/ + } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index a5c91f8195d..1845dba5519 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -61,7 +61,11 @@ public void setId(Long id) { @Version private Long version; - + + /** + * This is JPA's optimistic locking mechanism, and has no semantic meaning in the DV object model. + * @return the object db version + */ public Long getVersion() { return this.version; } @@ -196,6 +200,21 @@ public void setReleaseTime(Date releaseTime) { public String getVersionNote() { return versionNote; } + + public DatasetVersionDifference getDefaultVersionDifference(){ + int count = 0; + int size = this.getDataset().getVersions().size(); + for (DatasetVersion dsv: this.getDataset().getVersions()){ + if (this.equals(dsv)){ + if ((count + 1) < size){ + DatasetVersionDifference dvd = new DatasetVersionDifference(this, this.getDataset().getVersions().get(count+1)); + return dvd; + } + } + count++; + } + return null; + } public void setVersionNote(String note) { if (note != null && note.length() > VERSION_NOTE_MAX_LENGTH) { @@ -342,7 +361,7 @@ public String getTitle() { } return retVal; } - + public String getProductionDate() { //todo get "Production Date" from datasetfieldvalue table return "Production Date"; @@ -614,6 +633,10 @@ public String getSemanticVersion() { * See also to v or not to v · Issue #1 · mojombo/semver - * https://github.com/mojombo/semver/issues/1#issuecomment-2605236 */ - return versionNumber + "." + minorVersionNumber; + if (this.isReleased()){ + return versionNumber + "." + minorVersionNumber; + } else { + return "DRAFT"; + } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java new file mode 100644 index 00000000000..20279db6d8c --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java @@ -0,0 +1,793 @@ +package edu.harvard.iq.dataverse; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; + +/** + * + * @author skraffmiller + */ +public class DatasetVersionDifference { + + private DatasetVersion newVersion; + private DatasetVersion originalVersion; + private List detailDataByBlock = new ArrayList<>(); + private List datasetFilesDiffList; + private List addedFiles = new ArrayList(); + private List removedFiles = new ArrayList(); + private List changedFileMetadata = new ArrayList(); + private List summaryDataForNote = new ArrayList(); + private List blockDataForNote = new ArrayList(); + String noFileDifferencesFoundLabel = ""; + + public DatasetVersionDifference(DatasetVersion newVersion, DatasetVersion originalVersion) { + setOriginalVersion(originalVersion); + setNewVersion(newVersion); + //Compare Data + for (DatasetField dsfo : originalVersion.getDatasetFields()) { + boolean deleted = true; + for (DatasetField dsfn : newVersion.getDatasetFields()) { + if (dsfo.getDatasetFieldType().equals(dsfn.getDatasetFieldType())) { + deleted = false; + if (dsfo.getDatasetFieldType().isPrimitive()) { + if (!dsfo.getDatasetFieldType().getFieldType().equals("email") ) { + compareValues(dsfo, dsfn, false); + } + } else { + compareValues(dsfo, dsfn, true); + } + break; //if found go to next dataset field + } + } + if (deleted && !dsfo.isEmpty()) { + updateBlockSummary(dsfo, 0, dsfo.getDatasetFieldValues().size(), 0); + addToSummary(dsfo, null); + } + } + for (DatasetField dsfn : newVersion.getDatasetFields()) { + boolean added = true; + if (dsfn.getDatasetFieldType().isPrimitive()){ + for (DatasetField dsfo : originalVersion.getDatasetFields()) { + if (dsfo.getDatasetFieldType().equals(dsfn.getDatasetFieldType())) { + added = false; + break; + } + } + if (added && !dsfn.isEmpty()) { + updateBlockSummary(dsfn, dsfn.getDatasetFieldValues().size(), 0, 0); + addToSummary(null, dsfn); + } + } + } + + for (FileMetadata fmdo : originalVersion.getFileMetadatas()) { + boolean deleted = true; + for (FileMetadata fmdn : newVersion.getFileMetadatas()) { + if (fmdo.getDataFile().equals(fmdn.getDataFile())) { + deleted = false; + if (!compareFileMetadatas(fmdo, fmdn)) { + changedFileMetadata.add(fmdo); + changedFileMetadata.add(fmdn); + } + break; + } + } + if (deleted) { + removedFiles.add(fmdo); + } + } + for (FileMetadata fmdn : newVersion.getFileMetadatas()) { + boolean added = true; + for (FileMetadata fmdo : originalVersion.getFileMetadatas()) { + if (fmdo.getDataFile().equals(fmdn.getDataFile())) { + added = false; + break; + } + } + if (added) { + addedFiles.add(fmdn); + } + } + initDatasetFilesDifferencesList(); + + //Sort within blocks by datasetfieldtype dispaly order then.... + //sort via metadatablock order - citation first... + for (List blockList : detailDataByBlock) { + Collections.sort(blockList, new Comparator() { + public int compare(DatasetField[] l1, DatasetField[] l2) { + DatasetField dsfa = l1[0]; //(DatasetField[]) l1.get(0); + DatasetField dsfb = l2[0]; + int a = dsfa.getDatasetFieldType().getDisplayOrder(); + int b = dsfb.getDatasetFieldType().getDisplayOrder(); + return Integer.valueOf(a).compareTo(Integer.valueOf(b)); + } + }); + } + Collections.sort(detailDataByBlock, new Comparator() { + public int compare(List l1, List l2) { + DatasetField dsfa[] = (DatasetField[]) l1.get(0); + DatasetField dsfb[] = (DatasetField[]) l2.get(0); + int a = dsfa[0].getDatasetFieldType().getMetadataBlock().getId().intValue(); + int b = dsfb[0].getDatasetFieldType().getMetadataBlock().getId().intValue(); + return Integer.valueOf(a).compareTo(Integer.valueOf(b)); + } + }); + } + + private void addToList(List listIn, DatasetField dsfo, DatasetField dsfn) { + DatasetField[] dsfArray; + dsfArray = new DatasetField[2]; + dsfArray[0] = dsfo; + dsfArray[1] = dsfn; + listIn.add(dsfArray); + } + + private void addToSummary(DatasetField dsfo, DatasetField dsfn) { + if (dsfo == null) { + dsfo = new DatasetField(); + dsfo.setDatasetFieldType(dsfn.getDatasetFieldType()); + } + if (dsfn == null) { + dsfn = new DatasetField(); + dsfn.setDatasetFieldType(dsfo.getDatasetFieldType()); + } + boolean addedToAll = false; + for (List blockList : detailDataByBlock) { + DatasetField dsft[] = (DatasetField[]) blockList.get(0); + if (dsft[0].getDatasetFieldType().getMetadataBlock().equals(dsfo.getDatasetFieldType().getMetadataBlock())) { + addToList(blockList, dsfo, dsfn); + addedToAll = true; + } + } + if (!addedToAll) { + List newList = new ArrayList<>(); + addToList(newList, dsfo, dsfn); + detailDataByBlock.add(newList); + } + } + + private void updateBlockSummary(DatasetField dsf, int added, int deleted, int changed) { + + boolean addedToAll = false; + for (Object[] blockList : blockDataForNote) { + DatasetField dsft = (DatasetField) blockList[0]; + if (dsft.getDatasetFieldType().getMetadataBlock().equals(dsf.getDatasetFieldType().getMetadataBlock())) { + blockList[1] = (Integer) blockList[1] + added; + blockList[2] = (Integer) blockList[2] + deleted; + blockList[3] = (Integer) blockList[3] + changed; + addedToAll = true; + } + } + if (!addedToAll) { + Object[] newArray = new Object[4]; + newArray[0] = dsf; + newArray[1] = added; + newArray[2] = deleted; + newArray[3] = changed; + blockDataForNote.add(newArray); + } + + } + + private void addToNoteSummary(DatasetField dsfo, int added, int deleted, int changed) { + Object[] noteArray = new Object[4]; + noteArray[0] = dsfo; + noteArray[1] = added; + noteArray[2] = deleted; + noteArray[3] = changed; + summaryDataForNote.add(noteArray); + } + + private boolean compareFileMetadatas(FileMetadata fmdo, FileMetadata fmdn) { + + if (!(fmdo.getDescription().equals(fmdn.getDescription()))) { + return false; + } + if (!(fmdo.getCategory().equals(fmdn.getCategory()))) { + return false; + } + if (!(fmdo.getLabel().equals(fmdn.getLabel()))) { + return false; + } + + return true; + } + + + private void compareValues(DatasetField originalField, DatasetField newField, boolean compound) { + String originalValue = ""; + String newValue = ""; + int countOriginal = 0; + int countNew = 0; + int totalAdded = 0; + int totalDeleted = 0; + int totalChanged = 0; + int loopIndex = 0; + + if (compound) { + for (DatasetFieldCompoundValue datasetFieldCompoundValueOriginal : originalField.getDatasetFieldCompoundValues()) { + if (newField.getDatasetFieldCompoundValues().size() >= loopIndex + 1) { + for (DatasetField dsfo : datasetFieldCompoundValueOriginal.getChildDatasetFields()) { + if (!dsfo.getDisplayValue().isEmpty()) { + originalValue += dsfo.getDisplayValue() + ", "; + } + } + for (DatasetField dsfn : newField.getDatasetFieldCompoundValues().get(loopIndex).getChildDatasetFields()) { + if (!dsfn.getDisplayValue().isEmpty()) { + newValue += dsfn.getDisplayValue() + ", "; + } + } + if (originalValue.isEmpty() && !newValue.isEmpty()){ + totalAdded++; + } else if (!newValue.isEmpty() && !originalValue.equals(newValue)) { + totalChanged++; + } + } + loopIndex++; + } + countNew = newField.getDatasetFieldCompoundValues().size(); + countOriginal = originalField.getDatasetFieldCompoundValues().size(); + } else { + int index = 0; + for (String valString : originalField.getValues()) { + if (valString != null && !valString.isEmpty()) { + countOriginal++; + } + } + for (String valString : newField.getValues()) { + if (valString != null && !valString.isEmpty()) { + countNew++; + } + } + String nString = ""; + originalValue = originalField.getDisplayValue(); + newValue = newField.getDisplayValue(); + for (String oString : originalField.getValues()) { + if (newField.getValues().size() >= (index + 1)) { + nString = newField.getValues().get(index); + } + if (nString != null && oString != null && !oString.equals(nString)) { + totalChanged++; + } + } + if (originalValue.equalsIgnoreCase(newValue)){ + totalChanged = 0; + } + + } + if (countNew > countOriginal) { + totalAdded = countNew - countOriginal; + } + + if (countOriginal > countNew) { + totalDeleted = countOriginal - countNew; + } + if ((totalAdded + totalDeleted + totalChanged) > 0){ + if (originalField.getDatasetFieldType().isDisplayOnCreate() ) { + addToNoteSummary(originalField, totalAdded, totalDeleted, totalChanged); + addToSummary(originalField, newField); + } else { + updateBlockSummary(originalField, totalAdded, totalDeleted, totalChanged); + addToSummary(originalField, newField); + } + } + } + + public String getFileNote() { + String retString = ""; + + if (addedFiles.size() > 0) { + retString = "Files (Added: " + addedFiles.size(); + } + + if (removedFiles.size() > 0) { + if (retString.isEmpty()) { + retString = "Files (Removed: " + removedFiles.size(); + } else { + retString += "; Removed: " + removedFiles.size(); + } + } + + if (changedFileMetadata.size() > 0) { + if (retString.isEmpty()) { + retString = "Files (Changed File Metadata: " + changedFileMetadata.size() / 2; + } else { + retString += "; Changed File Metadata: " + changedFileMetadata.size() / 2; + } + } + + if (!retString.isEmpty()) { + retString += ")"; + } + + return retString; + } + + public List getDetailDataByBlock() { + return detailDataByBlock; + } + + public void setDetailDataByBlock(List detailDataByBlock) { + this.detailDataByBlock = detailDataByBlock; + } + + public List getAddedFiles() { + return addedFiles; + } + + public void setAddedFiles(List addedFiles) { + this.addedFiles = addedFiles; + } + + public List getRemovedFiles() { + return removedFiles; + } + + public void setRemovedFiles(List removedFiles) { + this.removedFiles = removedFiles; + } + + public DatasetVersion getNewVersion() { + return newVersion; + } + + public void setNewVersion(DatasetVersion newVersion) { + this.newVersion = newVersion; + } + + public DatasetVersion getOriginalVersion() { + return originalVersion; + } + + public void setOriginalVersion(DatasetVersion originalVersion) { + this.originalVersion = originalVersion; + } + + public List getChangedFileMetadata() { + return changedFileMetadata; + } + + public void setChangedFileMetadata(List changedFileMetadata) { + this.changedFileMetadata = changedFileMetadata; + } + + public List getSummaryDataForNote() { + return summaryDataForNote; + } + + public List getBlockDataForNote() { + return blockDataForNote; + } + + public void setSummaryDataForNote(List summaryDataForNote) { + this.summaryDataForNote = summaryDataForNote; + } + + public void setBlockDataForNote(List blockDataForNote) { + this.blockDataForNote = blockDataForNote; + } + + private void initDatasetFilesDifferencesList() { + datasetFilesDiffList = new ArrayList(); + + // Study Files themselves are version-less; + // In other words, 2 different versions can have different sets of + // study files, but the files themselves don't have versions. + // So in order to find the differences between the 2 sets of study + // files in 2 versions we can just go through the lists of the + // files and compare the ids. If both versions have the file with + // the same file id, it is the same file. + // UPDATE: in addition to the above, even when the 2 versions share the + // same study file, the file metadatas ARE version-specific, so some of + // the fields there (filename, etc.) may be different. If this is the + // case, we want to display these differences as well. + if (originalVersion.getFileMetadatas().size() == 0 && newVersion.getFileMetadatas().size() == 0) { + noFileDifferencesFoundLabel = "No data files in either version of the study"; + return; + } + + int i = 0; + int j = 0; + + FileMetadata fm1; + FileMetadata fm2; + + while (i < originalVersion.getFileMetadatas().size() + && j < newVersion.getFileMetadatas().size()) { + fm1 = originalVersion.getFileMetadatas().get(i); + fm2 = newVersion.getFileMetadatas().get(j); + + if (fm1.getDataFile().getId().compareTo(fm2.getDataFile().getId()) == 0) { + // The 2 versions share the same study file; + // Check if the metadata information is identical in the 2 versions + // of the metadata: + if (fileMetadataIsDifferent(fm1, fm2)) { + datasetFileDifferenceItem fdi = selectFileMetadataDiffs(fm1, fm2); + fdi.setFileId(fm1.getDataFile().getId().toString()); + datasetFilesDiffList.add(fdi); + } + i++; + j++; + } else if (fm1.getDataFile().getId().compareTo(fm2.getDataFile().getId()) > 0) { + datasetFileDifferenceItem fdi = selectFileMetadataDiffs(null, fm2); + fdi.setFileId(fm2.getDataFile().getId().toString()); + datasetFilesDiffList.add(fdi); + + j++; + } else if (fm1.getDataFile().getId().compareTo(fm2.getDataFile().getId()) < 0) { + datasetFileDifferenceItem fdi = selectFileMetadataDiffs(fm1, null); + fdi.setFileId(fm1.getDataFile().getId().toString()); + datasetFilesDiffList.add(fdi); + + i++; + } + } + + // We've reached the end of at least one file list. + // Whatever files are left on either of the 2 lists are automatically "different" + // between the 2 versions. + while (i < originalVersion.getFileMetadatas().size()) { + fm1 = originalVersion.getFileMetadatas().get(i); + datasetFileDifferenceItem fdi = selectFileMetadataDiffs(fm1, null); + fdi.setFileId(fm1.getDataFile().getId().toString()); + datasetFilesDiffList.add(fdi); + + i++; + } + + while (j < newVersion.getFileMetadatas().size()) { + fm2 = newVersion.getFileMetadatas().get(j); + datasetFileDifferenceItem fdi = selectFileMetadataDiffs(null, fm2); + fdi.setFileId(fm2.getDataFile().getId().toString()); + datasetFilesDiffList.add(fdi); + + j++; + } + + if (datasetFilesDiffList.size() == 0) { + noFileDifferencesFoundLabel = "These study versions have identical sets of data files"; + } + } + + private boolean fileMetadataIsDifferent(FileMetadata fm1, FileMetadata fm2) { + if (fm1 == null && fm2 == null) { + return false; + } + + if (fm1 == null && fm2 != null) { + return true; + } + + if (fm2 == null && fm1 != null) { + return true; + } + + // Both are non-null metadata objects. + // We simply go through the 5 metadata fields, if any one of them + // is different between the 2 versions, we declare the objects + // different. + String value1; + String value2; + + // filename: + value1 = fm1.getLabel(); + value2 = fm2.getLabel(); + + if (value1 == null || value1.equals("") || value1.equals(" ")) { + value1 = "[Empty]"; + } + if (value2 == null || value2.equals("") || value2.equals(" ")) { + value2 = "[Empty]"; + } + + if (!value1.equals(value2)) { + return true; + } + + // file type: + value1 = fm1.getDataFile().getFriendlyType(); + value2 = fm2.getDataFile().getFriendlyType(); + + if (value1 == null || value1.equals("") || value1.equals(" ")) { + value1 = "[Empty]"; + } + if (value2 == null || value2.equals("") || value2.equals(" ")) { + value2 = "[Empty]"; + } + + if (!value1.equals(value2)) { + return true; + } + + // file size: + /* + value1 = FileUtil.byteCountToDisplaySize(new File(fm1.getStudyFile().getFileSystemLocation()).length()); + value2 = FileUtil.byteCountToDisplaySize(new File(fm2.getStudyFile().getFileSystemLocation()).length()); + + if (value1 == null || value1.equals("") || value1.equals(" ")) { + value1 = "[Empty]"; + } + if (value2 == null || value2.equals("") || value2.equals(" ")) { + value2 = "[Empty]"; + } + + if(!value1.equals(value2)) { + return true; + } + */ + // file category: + value1 = fm1.getCategory(); + value2 = fm2.getCategory(); + + if (value1 == null || value1.equals("") || value1.equals(" ")) { + value1 = "[Empty]"; + } + if (value2 == null || value2.equals("") || value2.equals(" ")) { + value2 = "[Empty]"; + } + + if (!value1.equals(value2)) { + return true; + } + + // file description: + value1 = fm1.getDescription(); + value2 = fm2.getDescription(); + + if (value1 == null || value1.equals("") || value1.equals(" ")) { + value1 = "[Empty]"; + } + if (value2 == null || value2.equals("") || value2.equals(" ")) { + value2 = "[Empty]"; + } + + if (!value1.equals(value2)) { + return true; + } + + // if we got this far, the 2 metadatas are identical: + return false; + } + + private datasetFileDifferenceItem selectFileMetadataDiffs(FileMetadata fm1, FileMetadata fm2) { + datasetFileDifferenceItem fdi = new datasetFileDifferenceItem(); + + if (fm1 == null && fm2 == null) { + // this should never happen; but if it does, + // we return an empty diff object. + + return fdi; + + } + if (fm2 == null) { + fdi.setFileName1(fm1.getLabel()); + fdi.setFileType1(fm1.getDataFile().getFriendlyType()); + //fdi.setFileSize1(FileUtil. (new File(fm1.getDataFile().getFileSystemLocation()).length())); + + fdi.setFileCat1(fm1.getCategory()); + fdi.setFileDesc1(fm1.getDescription()); + + fdi.setFile2Empty(true); + + } else if (fm1 == null) { + fdi.setFile1Empty(true); + + fdi.setFileName2(fm2.getLabel()); + fdi.setFileType2(fm2.getDataFile().getFriendlyType()); + //fdi.setFileSize2(FileUtil.byteCountToDisplaySize(new File(fm2.getStudyFile().getFileSystemLocation()).length())); + fdi.setFileCat2(fm2.getCategory()); + fdi.setFileDesc2(fm2.getDescription()); + + } else { + // Both are non-null metadata objects. + // We simply go through the 5 metadata fields, if any are + // different between the 2 versions, we add them to the + // difference object: + + String value1; + String value2; + + // filename: + value1 = fm1.getLabel(); + value2 = fm2.getLabel(); + + if (value1 == null || value1.equals("") || value1.equals(" ")) { + value1 = "[Empty]"; + } + if (value2 == null || value2.equals("") || value2.equals(" ")) { + value2 = "[Empty]"; + } + + if (!value1.equals(value2)) { + + fdi.setFileName1(value1); + fdi.setFileName2(value2); + } + + // NOTE: + // fileType and fileSize will always be the same + // for the same studyFile! -- so no need to check for differences in + // these 2 items. + // file category: + value1 = fm1.getCategory(); + value2 = fm2.getCategory(); + + if (value1 == null || value1.equals("") || value1.equals(" ")) { + value1 = "[Empty]"; + } + if (value2 == null || value2.equals("") || value2.equals(" ")) { + value2 = "[Empty]"; + } + + if (!value1.equals(value2)) { + + fdi.setFileCat1(value1); + fdi.setFileCat2(value2); + } + + // file description: + value1 = fm1.getDescription(); + value2 = fm2.getDescription(); + + if (value1 == null || value1.equals("") || value1.equals(" ")) { + value1 = "[Empty]"; + } + if (value2 == null || value2.equals("") || value2.equals(" ")) { + value2 = "[Empty]"; + } + + if (!value1.equals(value2)) { + + fdi.setFileDesc1(value1); + fdi.setFileDesc2(value2); + } + } + return fdi; + } + + public class datasetFileDifferenceItem { + + public datasetFileDifferenceItem() { + } + + private String fileId; + + private String fileName1; + private String fileType1; + private String fileSize1; + private String fileCat1; + private String fileDesc1; + + private String fileName2; + private String fileType2; + private String fileSize2; + private String fileCat2; + private String fileDesc2; + + private boolean file1Empty = false; + private boolean file2Empty = false; + + public String getFileId() { + return fileId; + } + + public void setFileId(String fid) { + this.fileId = fid; + } + + public String getFileName1() { + return fileName1; + } + + public void setFileName1(String fn) { + this.fileName1 = fn; + } + + public String getFileType1() { + return fileType1; + } + + public void setFileType1(String ft) { + this.fileType1 = ft; + } + + public String getFileSize1() { + return fileSize1; + } + + public void setFileSize1(String fs) { + this.fileSize1 = fs; + } + + public String getFileCat1() { + return fileCat1; + } + + public void setFileCat1(String fc) { + this.fileCat1 = fc; + } + + public String getFileDesc1() { + return fileDesc1; + } + + public void setFileDesc1(String fd) { + this.fileDesc1 = fd; + } + + public String getFileName2() { + return fileName2; + } + + public void setFileName2(String fn) { + this.fileName2 = fn; + } + + public String getFileType2() { + return fileType2; + } + + public void setFileType2(String ft) { + this.fileType2 = ft; + } + + public String getFileSize2() { + return fileSize2; + } + + public void setFileSize2(String fs) { + this.fileSize2 = fs; + } + + public String getFileCat2() { + return fileCat2; + } + + public void setFileCat2(String fc) { + this.fileCat2 = fc; + } + + public String getFileDesc2() { + return fileDesc2; + } + + public void setFileDesc2(String fd) { + this.fileDesc2 = fd; + } + + public boolean isFile1Empty() { + return file1Empty; + } + + public boolean isFile2Empty() { + return file2Empty; + } + + public void setFile1Empty(boolean state) { + file1Empty = state; + } + + public void setFile2Empty(boolean state) { + file2Empty = state; + } + + } + + public List getDatasetFilesDiffList() { + return datasetFilesDiffList; + } + + public void setDatasetFilesDiffList(List datasetFilesDiffList) { + this.datasetFilesDiffList = datasetFilesDiffList; + } + + public String getNoFileDifferencesFoundLabel() { + return noFileDifferencesFoundLabel; + } + + public void setNoFileDifferencesFoundLabel(String noFileDifferencesFoundLabel) { + this.noFileDifferencesFoundLabel = noFileDifferencesFoundLabel; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 671a87efc1e..e2286ec9259 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -26,4 +26,7 @@ public DatasetVersion find(Object pk) { return (DatasetVersion) em.find(DatasetVersion.class, pk); } + + + } diff --git a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java index e8b357c7859..bec1840f34f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java @@ -9,9 +9,9 @@ import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.impl.CreateDataverseCommand; +import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseCommand; import static edu.harvard.iq.dataverse.util.JsfHelper.JH; -import java.sql.Timestamp; import java.util.List; import javax.ejb.EJB; import javax.ejb.EJBException; @@ -23,7 +23,6 @@ import javax.inject.Named; import javax.persistence.NoResultException; import java.util.ArrayList; -import java.util.Date; import java.util.logging.Logger; import javax.faces.component.UIComponent; import javax.faces.component.UIInput; @@ -231,12 +230,22 @@ public void setFacets(DualListModel facets) { } public String releaseDataverse() { - dataverse.setPublicationDate(new Timestamp(new Date().getTime())); - dataverse.setReleaseUser(session.getUser()); - dataverse = dataverseService.save(dataverse); - FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_INFO, "DataverseReleased", "Your dataverse is now public."); - FacesContext.getCurrentInstance().addMessage(null, message); - return "/dataverse.xhtml?id=" + dataverse.getId() + "&faces-redirect=true"; + PublishDataverseCommand cmd = new PublishDataverseCommand(session.getUser(), dataverse); + try { + commandEngine.submit(cmd); + FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_INFO, "DataverseReleased", "Your dataverse is now public."); + FacesContext.getCurrentInstance().addMessage(null, message); + return "/dataverse.xhtml?id=" + dataverse.getId() + "&faces-redirect=true"; + } catch (CommandException ex) { + String msg = "There was a problem publishing your dataverse: " + ex; + logger.severe(msg); + /** + * @todo how do we get this message to show up in the GUI? + */ + FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_INFO, "DataverseNotReleased", msg); + FacesContext.getCurrentInstance().addMessage(null, message); + return "/dataverse.xhtml?id=" + dataverse.getId() + "&faces-redirect=true"; + } } public String getMetadataBlockPreview(MetadataBlock mdb, int numberOfItems) { diff --git a/src/main/java/edu/harvard/iq/dataverse/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/IndexServiceBean.java index 2746892bec5..fa600f23670 100644 --- a/src/main/java/edu/harvard/iq/dataverse/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/IndexServiceBean.java @@ -45,6 +45,8 @@ public class IndexServiceBean { DataverseUserServiceBean dataverseUserServiceBean; private final String solrDocIdentifierDataverse = "dataverse_"; + public static final String solrDocIdentifierFile = "datafile_"; + public static final String draftSuffix = "_draft"; private static final String groupPrefix = "group_"; private static final String groupPerUserPrefix = "group_user"; private static final Long publicGroupId = 1L; @@ -667,14 +669,14 @@ private String addOrUpdateDataset(IndexableDataset indexableDataset) { datafileSolrInputDocument.addField(SearchFields.PUBLICATION_STATUS, UNPUBLISHED_STRING); } - String fileSolrDocId = "datafile_" + fileEntityId; + String fileSolrDocId = solrDocIdentifierFile + fileEntityId; if (indexableDataset.getDatasetState().equals(indexableDataset.getDatasetState().PUBLISHED)) { - fileSolrDocId = "datafile_" + fileEntityId; + fileSolrDocId = solrDocIdentifierFile + fileEntityId; datafileSolrInputDocument.addField(SearchFields.PUBLICATION_STATUS, PUBLISHED_STRING); datafileSolrInputDocument.addField(SearchFields.PERMS, publicGroupString); addDatasetReleaseDateToSolrDoc(datafileSolrInputDocument, dataset); } else if (indexableDataset.getDatasetState().equals(indexableDataset.getDatasetState().WORKING_COPY)) { - fileSolrDocId = "datafile_" + fileEntityId + indexableDataset.getDatasetState().getSuffix(); + fileSolrDocId = solrDocIdentifierFile + fileEntityId + indexableDataset.getDatasetState().getSuffix(); datafileSolrInputDocument.addField(SearchFields.PUBLICATION_STATUS, DRAFT_STRING); } datafileSolrInputDocument.addField(SearchFields.ID, fileSolrDocId); @@ -975,12 +977,7 @@ private List findSolrDocIdsForDraftFilesToDelete(Dataset datasetWithDraf SolrQuery solrQuery = new SolrQuery(); solrQuery.setRows(Integer.MAX_VALUE); solrQuery.setQuery(SearchFields.PARENT_ID + ":" + datasetId); - /** - * @todo rather than hard coding "_draft" here, tie to - * IndexableDataset(new DatasetVersion()).getDatasetState().getSuffix() - */ -// String draftSuffix = new IndexableDataset(new DatasetVersion()).getDatasetState().WORKING_COPY.name(); - solrQuery.addFilterQuery(SearchFields.ID + ":" + "*_draft"); + solrQuery.addFilterQuery(SearchFields.ID + ":" + "*" + draftSuffix); List solrIdsOfFilesToDelete = new ArrayList<>(); try { // i.e. rows=2147483647&q=parentid%3A16&fq=id%3A*_draft diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageRolesPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageRolesPage.java index 75d4b654fd4..e93a7c82b3c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManageRolesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManageRolesPage.java @@ -4,6 +4,7 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; import edu.harvard.iq.dataverse.engine.command.impl.AssignRoleCommand; +import edu.harvard.iq.dataverse.engine.command.impl.CreateRoleCommand; import edu.harvard.iq.dataverse.engine.command.impl.RevokeRoleCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseGuestRolesCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataversePermissionRootCommand; @@ -37,9 +38,6 @@ public class ManageRolesPage implements java.io.Serializable { private static final Logger logger = Logger.getLogger(ManageRolesPage.class.getName()); - public enum Intent { LIST, VIEW, EDIT }; - public enum ObjectType { DATAVERSE, ROLES, USERS }; - @Inject DataverseSession session; @EJB @@ -60,43 +58,38 @@ public enum ObjectType { DATAVERSE, ROLES, USERS }; @Inject DataversePage dvpage; - - private Intent intent = null; - private List selectedPermissions; private String intentParam; private Long viewRoleId; private int activeTabIndex; private DataverseRole role; private DataverseRole defaultUserRole; - private boolean permissionRoot; - private String objectTypeParam; - private Long dataverseIdParam; - private ObjectType objectType; private String assignRoleUsername; + private String dataverseIdParam; private Dataverse dataverse; private Long assignRoleRoleId; private List guestRolesHere; private List guestRolesUp; private List guestRolesHereId; + private boolean inheritAssignmentsCbValue; + public void init() { // decide object type - objectType = JH.enumValue(getObjectTypeParam(), ObjectType.class, ObjectType.DATAVERSE); - setActiveTab(objectType); - setIntent( JH.enumValue(getIntentParam(), Intent.class, Intent.LIST)); if ( viewRoleId != null ) { // enter view mode setRole( rolesService.find(viewRoleId) ); if ( getRole() == null ) { JH.addMessage(FacesMessage.SEVERITY_WARN, "Can't find role with id '" + viewRoleId + "'", "The role might have existed once, but was deleted"); - setIntent( Intent.LIST ); } - } + } else { + setRole( new DataverseRole() ); + } + dataverse = dvService.find( Long.parseLong(dataverseIdParam) ); dvpage.setDataverse(getDataverse()); - + setInheritAssignmentsCbValue( ! getDataverse().isPermissionRoot() ); guestRolesHere = new LinkedList<>(); guestRolesUp = new LinkedList<>(); for ( RoleAssignment ra : rolesService.roleAssignments(usersService.findGuestUser(), dataverse).getAssignments() ) { @@ -126,14 +119,24 @@ public boolean isHasRoles() { } public void createNewRole( ActionEvent e ) { - setIntent(Intent.EDIT); DataverseRole aRole = new DataverseRole(); setRole( aRole ); - setActiveTab(ObjectType.ROLES); } + public void editRole( String roleId ) { + setRole( rolesService.find(Long.parseLong(roleId)) ); + } + + public void updatePermissionRoot(javax.faces.event.AjaxBehaviorEvent event) throws javax.faces.event.AbortProcessingException { + try { + dataverse = engineService.submit( new UpdateDataversePermissionRootCommand(!isInheritAssignmentsCbValue(), session.getUser(), getDataverse()) ); + setInheritAssignmentsCbValue( ! dataverse.isPermissionRoot() ); + } catch (CommandException ex) { + Logger.getLogger(ManageRolesPage.class.getName()).log(Level.SEVERE, null, ex); + } + } + public void cancelEdit( ActionEvent e ) { - intent = Intent.LIST; } public void saveDataverse( ActionEvent e ) { @@ -144,32 +147,37 @@ public void saveDataverse( ActionEvent e ) { try { engineService.submit( new UpdateDataverseGuestRolesCommand(guestRolesToAddHere, session.getUser(), getDataverse())); - engineService.submit( new UpdateDataversePermissionRootCommand(permissionRoot, session.getUser(), getDataverse())); + engineService.submit( new UpdateDataversePermissionRootCommand(isPermissionRoot(), session.getUser(), getDataverse())); JH.addMessage(FacesMessage.SEVERITY_INFO, "Dataverse data updated"); } catch (CommandException ex) { JH.addMessage(FacesMessage.SEVERITY_ERROR, "Update failed: "+ ex.getMessage()); } - objectType=ObjectType.DATAVERSE; - setIntent( Intent.VIEW ); } - public void saveRole( ActionEvent e ) { + public void updateRole( ActionEvent e ) { role.setOwner(getDataverse()); - role.permissions().clear(); + role.clearPermissions(); for ( String pmsnStr : getSelectedPermissions() ) { role.addPermission(Permission.valueOf(pmsnStr) ); } - setRole( rolesService.save(role) );; - JH.addMessage(FacesMessage.SEVERITY_INFO, "Role '" + role.getName() + "' saved", ""); - intent = Intent.LIST; + try { + setRole( engineService.submit( new CreateRoleCommand(role, session.getUser(), getDataverse())) ); + JH.addMessage(FacesMessage.SEVERITY_INFO, "Role '" + role.getName() + "' saved", ""); + } catch (CommandException ex) { + JH.addMessage(FacesMessage.SEVERITY_ERROR, "Cannot save role", ex.getMessage() ); + Logger.getLogger(ManageRolesPage.class.getName()).log(Level.SEVERE, null, ex); + } } - + + public void saveNewRole( ActionEvent e ) { + role.setId( null ); + updateRole( e ); + } + public List getRolePermissions() { return (role != null ) ? new ArrayList( role.permissions() ) : Collections.emptyList(); } - - public List getSelectedPermissions() { return selectedPermissions; } @@ -177,14 +185,6 @@ public List getSelectedPermissions() { public void setSelectedPermissions(List selectedPermissions) { this.selectedPermissions = selectedPermissions; } - - public Intent getIntent() { - return intent; - } - - public void setIntent(Intent anIntent) { - this.intent = anIntent; - } public DataverseRole getRole() { return role; @@ -213,10 +213,14 @@ public List availableRoles() { } public void assignRole( ActionEvent evt ) { - DataverseUser u = usersService.findByUserName(getAssignRoleUsername()); + logger.warning("Username: " + getAssignRoleUsername() ); + logger.warning("RoleID: " + getAssignRoleRoleId()); + + DataverseUser u = usersService.findByUserName( getAssignRoleUsername() ); DataverseRole r = rolesService.find( getAssignRoleRoleId() ); + logger.warning("User: " + u + " role:" + r ); - try { + try { engineService.submit( new AssignRoleCommand(u, r, getDataverse(), session.getUser())); JH.addMessage(FacesMessage.SEVERITY_INFO, "Role " + r.getName() + " assigned to " + u.getFirstName() + " " + u.getLastName() + " on " + getDataverse().getName() ); } catch (CommandException ex) { @@ -239,7 +243,6 @@ public List getRoleAssignments() { } public void revokeRole( Long roleAssignmentId ) { - try { engineService.submit( new RevokeRoleCommand(em.find(RoleAssignment.class, roleAssignmentId), session.getUser())); JH.addMessage(FacesMessage.SEVERITY_INFO, "Role assignment revoked successfully"); @@ -278,13 +281,9 @@ public void setDefaultUserRole(DataverseRole defaultUserRole) { } public boolean isPermissionRoot() { - return permissionRoot; + return getDataverse().isPermissionRoot(); } - public void setPermissionRoot(boolean permissionRoot) { - this.permissionRoot = permissionRoot; - } - public int getActiveTabIndex() { return activeTabIndex; } @@ -293,18 +292,6 @@ public void setActiveTabIndex(int activeTabIndex) { this.activeTabIndex = activeTabIndex; } - public String getObjectTypeParam() { - return objectTypeParam; - } - - public void setObjectTypeParam(String objectTypeParam) { - this.objectTypeParam = objectTypeParam; - } - - public void setActiveTab( ObjectType t ) { - setActiveTabIndex( (t!=null) ? t.ordinal() : 0 ); - } - public String getAssignRoleUsername() { return assignRoleUsername; } @@ -321,18 +308,7 @@ public void setAssignRoleRoleId(Long assignRoleRoleId) { this.assignRoleRoleId = assignRoleRoleId; } - public Long getDataverseIdParam() { - return dataverseIdParam; - } - - public void setDataverseIdParam(Long dataverseIdParam) { - this.dataverseIdParam = dataverseIdParam; - } - public Dataverse getDataverse() { - if ( dataverse == null ) { - dataverse = dvService.find( getDataverseIdParam() ); - } return dataverse; } @@ -355,7 +331,23 @@ public List getGuestRolesHereId() { public void setGuestRolesHereId(List guestUserRolesHereId) { this.guestRolesHereId = guestUserRolesHereId; } - + + public boolean isInheritAssignmentsCbValue() { + return inheritAssignmentsCbValue; + } + + public void setInheritAssignmentsCbValue(boolean inheritAssignmentsCbValue) { + this.inheritAssignmentsCbValue = inheritAssignmentsCbValue; + } + + public String getDataverseIdParam() { + return dataverseIdParam; + } + + public void setDataverseIdParam(String dataverseIdParam) { + this.dataverseIdParam = dataverseIdParam; + } + public static class RoleAssignmentRow { private final String name; private final RoleAssignment ra; @@ -400,4 +392,3 @@ public Long getId() { } } - diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index 91cf95b5096..45cdd9813c9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -64,12 +64,13 @@ public boolean canIssue(Class cmd) { * "Fast and loose" query mechanism, allowing to pass the command class * name. Command is assumed to live in * {@code edu.harvard.iq.dataverse.engine.command.impl.} - * + * @deprecated * @param commandName * @return {@code true} iff the user has the permissions required by the * command on the object. * @throws ClassNotFoundException */ + @Deprecated public boolean canIssueCommand(String commandName) throws ClassNotFoundException { return isUserAllowedOn(user, (Class) Class.forName("edu.harvard.iq.dataverse.engine.command.impl." + commandName), subject); diff --git a/src/main/java/edu/harvard/iq/dataverse/SampleCommandPage.java b/src/main/java/edu/harvard/iq/dataverse/SampleCommandPage.java index debf05fdfe8..3cf38556782 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SampleCommandPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/SampleCommandPage.java @@ -3,7 +3,7 @@ import edu.harvard.iq.dataverse.engine.Permission; import edu.harvard.iq.dataverse.engine.command.impl.RenameDataverseCommand; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import edu.harvard.iq.dataverse.engine.command.impl.ReleaseDataverseCommand; +import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand; import java.util.Arrays; import java.util.List; import java.util.logging.Logger; @@ -97,7 +97,7 @@ public void actionSave( ActionEvent e ) { } public boolean isCanRelease() { - return permissionsService.on(dataversePage.getDataverse()).canIssue( ReleaseDataverseCommand.class ); + return permissionsService.on(dataversePage.getDataverse()).canIssue( PublishDataverseCommand.class ); } public List getDataverseList() { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index ec0916cedd5..aba7b63847a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -12,6 +12,7 @@ import edu.harvard.iq.dataverse.MetadataBlockServiceBean; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; import edu.harvard.iq.dataverse.util.json.JsonParser; import java.util.concurrent.Callable; @@ -34,6 +35,23 @@ */ public abstract class AbstractApiBean { + /** + * Utility class to convey a proper error response on failed commands. + * @see #execCommand(edu.harvard.iq.dataverse.engine.command.Command, java.lang.String) + */ + public static class FailedCommandResult extends Exception { + private final Response response; + + public FailedCommandResult(Response response) { + this.response = response; + } + + public Response getResponse() { + return response; + } + + } + @EJB protected EjbDataverseEngine engineSvc; @@ -102,6 +120,19 @@ protected Response okResponse( String msg ) { .add("data", Json.createObjectBuilder().add("message",msg)).build() ).build(); } + protected T execCommand( Command com, String messageSeed ) throws FailedCommandResult { + try { + return engineSvc.submit(com); + + } catch (IllegalCommandException ex) { + throw new FailedCommandResult( errorResponse( Response.Status.FORBIDDEN, messageSeed + ": Not Allowed (" + ex.getMessage() + ")" )); + + } catch (CommandException ex) { + Logger.getLogger(AbstractApiBean.class.getName()).log(Level.SEVERE, "Error while " + messageSeed, ex); + throw new FailedCommandResult(errorResponse(Status.INTERNAL_SERVER_ERROR, messageSeed + " failed: " + ex.getMessage())); + } + } + /** * Returns an OK response (HTTP 200, status:OK) with the passed value * in the data field. diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 5a5621827c5..b8b68542cd5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -4,34 +4,37 @@ import edu.harvard.iq.dataverse.DatasetField; import edu.harvard.iq.dataverse.DatasetServiceBean; import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.DataverseUser; import edu.harvard.iq.dataverse.MetadataBlock; -import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; -import edu.harvard.iq.dataverse.api.dto.DatasetDTO; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException; -import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand; +import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetCommand; +import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetCommand; +import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; +import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.util.json.JsonPrinter; +import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; +import java.io.StringReader; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import javax.ejb.EJB; -import javax.ejb.EJBException; import javax.json.Json; import javax.json.JsonArrayBuilder; -import javax.validation.ConstraintViolation; -import javax.validation.ConstraintViolationException; +import javax.json.JsonObject; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; +import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Response; +import javax.ws.rs.core.Response.Status; @Path("datasets") public class Datasets extends AbstractApiBean { @@ -44,9 +47,9 @@ public class Datasets extends AbstractApiBean { DataverseServiceBean dataverseService; @GET - public String list(@QueryParam("key") String apiKey ) { + public Response list(@QueryParam("key") String apiKey ) { DataverseUser u = userSvc.findByUserName(apiKey); - if ( u == null ) return error( "Invalid apikey '" + apiKey + "'"); + if ( u == null ) return errorResponse( Status.UNAUTHORIZED, "Invalid apikey '" + apiKey + "'"); // TODO filter by what the user can see. @@ -55,66 +58,63 @@ public String list(@QueryParam("key") String apiKey ) { for (Dataset dataset : datasets) { datasetsArrayBuilder.add( json(dataset) ); } - return ok(datasetsArrayBuilder); + return okResponse(datasetsArrayBuilder); } @GET @Path("{id}") - public String getDataset( @PathParam("id") Long id, @QueryParam("key") String apiKey ) { + public Response getDataset( @PathParam("id") Long id, @QueryParam("key") String apiKey ) { DataverseUser u = userSvc.findByUserName(apiKey); - if ( u == null ) return error( "Invalid apikey '" + apiKey + "'"); - - // TODO filter by what the user can see. + if ( u == null ) return errorResponse( Response.Status.UNAUTHORIZED, "Invalid apikey '" + apiKey + "'"); Dataset ds = datasetService.find(id); - return (ds != null) ? ok(json(ds)) - : error("dataset not found"); - + return (ds != null) ? okResponse(json(ds)) + : notFound("dataset not found"); } @DELETE @Path("{id}") - public String deleteDataset( @PathParam("id") Long id, @QueryParam("key") String apiKey ) { + public Response deleteDataset( @PathParam("id") Long id, @QueryParam("key") String apiKey ) { DataverseUser u = userSvc.findByUserName(apiKey); - if ( u == null ) return error( "Invalid apikey '" + apiKey + "'"); + if ( u == null ) return errorResponse( Response.Status.UNAUTHORIZED, "Invalid apikey '" + apiKey + "'"); Dataset ds = datasetService.find(id); - if (ds == null) return error("dataset not found"); + if (ds == null) return errorResponse( Response.Status.NOT_FOUND, "dataset not found"); try { engineSvc.submit( new DeleteDatasetCommand(ds, u)); - return ok("Dataset " + id + " deleted"); + return okResponse("Dataset " + id + " deleted"); } catch (CommandExecutionException ex) { // internal error logger.log( Level.SEVERE, "Error deleting dataset " + id + ": " + ex.getMessage(), ex ); - return error( "Can't delete dataset: " + ex.getMessage() ); + return errorResponse( Response.Status.FORBIDDEN, "Can't delete dataset: " + ex.getMessage() ); } catch (CommandException ex) { - return error( "Can't delete dataset: " + ex.getMessage() ); + return errorResponse( Response.Status.INTERNAL_SERVER_ERROR, "Can't delete dataset: " + ex.getMessage() ); } } @GET @Path("{id}/versions") - public String listVersions( @PathParam("id") Long id, @QueryParam("key") String apiKey ) { + public Response listVersions( @PathParam("id") Long id, @QueryParam("key") String apiKey ) { DataverseUser u = userSvc.findByUserName(apiKey); - if ( u == null ) return error( "Invalid apikey '" + apiKey + "'"); + if ( u == null ) return errorResponse( Status.UNAUTHORIZED, "Invalid apikey '" + apiKey + "'"); // TODO filter by what the user can see. Dataset ds = datasetService.find(id); - if (ds == null) return error("dataset not found"); + if (ds == null) return notFound("dataset not found"); JsonArrayBuilder bld = Json.createArrayBuilder(); for ( DatasetVersion dsv : ds.getVersions() ) { bld.add( json(dsv) ); } - return ok( bld ); + return okResponse( bld ); } @GET @@ -196,9 +196,9 @@ public Response getVersionMetadata( @PathParam("id") Long datasetId, @PathParam( } @GET - @Path("{id}/versions/{versionId}/metadata/{block}") + @Path("{id}/versions/{versionNumber}/metadata/{block}") public Response getVersionMetadataBlock( @PathParam("id") Long datasetId, - @PathParam("versionId") String versionId, + @PathParam("versionNumber") String versionNumber, @PathParam("block") String blockName, @QueryParam("key") String apiKey ) { DataverseUser u = userSvc.findByUserName(apiKey); @@ -210,7 +210,7 @@ public Response getVersionMetadataBlock( @PathParam("id") Long datasetId, if (ds == null) return errorResponse(Response.Status.NOT_FOUND, "dataset " + datasetId + " not found"); DatasetVersion dsv = null; - switch (versionId) { + switch (versionNumber) { case ":latest": dsv = ds.getLatestVersion(); break; @@ -219,15 +219,18 @@ public Response getVersionMetadataBlock( @PathParam("id") Long datasetId, break; default: try { - long versionNumericId = Long.parseLong(versionId); + String[] comps = versionNumber.split("\\."); + long majorVersion = Long.parseLong(comps[0]); + long minorVersion = comps.length > 1 ? Long.parseLong(comps[1]) : 0; for ( DatasetVersion aDsv : ds.getVersions() ) { - if ( aDsv.getId().equals(versionNumericId) ) { + if ( aDsv.getVersionNumber().equals(majorVersion) && + aDsv.getMinorVersionNumber().equals(minorVersion)) { dsv = aDsv; - break; // for, not while + break; // for, not switch } } } catch ( NumberFormatException nfe ) { - return errorResponse( Response.Status.BAD_REQUEST, "Illegal id number '" + versionId + "'"); + return errorResponse( Response.Status.BAD_REQUEST, "Illegal version number '" + versionNumber + "'. Values are :latest, :edit and x.y"); } break; } @@ -239,7 +242,7 @@ public Response getVersionMetadataBlock( @PathParam("id") Long datasetId, return okResponse( JsonPrinter.json(p.getKey(), p.getValue()) ); } } - return errorResponse(Response.Status.NOT_FOUND, "metadata block named " + blockName + " not found"); + return notFound("metadata block named " + blockName + " not found"); } @GET @@ -249,100 +252,74 @@ public String listFiles() { return error("Not implemented yet"); } - - @POST - @Path("{id}/versions") - public String addVersion( @PathParam("id") Long id, @QueryParam("key") String apikey, DatasetDTO dsDto ){ - // CONTPOINT accept the dsDto and push it to the DB. - return null; - } - - // used to primarily to feed data into elasticsearch - @GET - @Deprecated - @Path("deprecated/{id}/{verb}") - public Dataset get(@PathParam("id") Long id, @PathParam("verb") String verb) { - logger.info("GET called"); - if (verb.equals("dump")) { - Dataset dataset = datasetService.find(id); - if (dataset != null) { - logger.info("found " + dataset); - // prevent HTTP Status 500 - Internal Server Error - dataset.setFiles(null); - dataset.setAuthority(null); -// dataset.setDescription(null); - dataset.setIdentifier(null); - dataset.setProtocol(null); - dataset.setVersions(null); - // elasticsearch fails on "today" with - // MapperParsingException[failed to parse date field [today], - // tried both date format [dateOptionalTime], and timestamp number with locale []] - //dataset.setCitationDate(null); - // too much information - dataset.setOwner(null); - return dataset; - } + @PUT + @Path("{id}/versions/{versionId}") + public Response updateDraftVersion( String jsonBody, @PathParam("id") Long id, @PathParam("versionId") String versionId, @QueryParam("key") String apiKey ){ + + if ( ! ":edit".equals(versionId) ) + return errorResponse( Response.Status.BAD_REQUEST, "Only the :edit version can be put on server"); + + DataverseUser u = userSvc.findByUserName(apiKey); + if ( u == null ) return errorResponse( Response.Status.UNAUTHORIZED, "Invalid apikey '" + apiKey + "'"); + Dataset ds = datasetService.find(id); + if ( ds == null ) return notFound("Can't find dataset with id '" + id + "'"); + + try ( StringReader rdr = new StringReader(jsonBody) ) { + JsonObject json = Json.createReader(rdr).readObject(); + DatasetVersion version = jsonParser().parseDatasetVersion(json); + version.setDataset(ds); + boolean updateDraft = ds.getLatestVersion().isDraft(); + DatasetVersion managedVersion = engineSvc.submit( updateDraft + ? new UpdateDatasetVersionCommand(u, version) + : new CreateDatasetVersionCommand(u, ds, version) ); + return okResponse( json(managedVersion) ); + + } catch (CommandException ex) { + logger.log(Level.SEVERE, "Error executing CreateDatasetVersionCommand: " + ex.getMessage(), ex); + return errorResponse(Response.Status.INTERNAL_SERVER_ERROR, "Error: " + ex.getMessage() ); + + } catch (JsonParseException ex) { + logger.log(Level.SEVERE, "Semantic error parsing dataset version Json: " + ex.getMessage(), ex); + return errorResponse( Response.Status.BAD_REQUEST, "Error parsing dataset version: " + ex.getMessage() ); } - /** - * @todo return an error instead of "204 No Content"? - * - */ - logger.info("GET attempted with dataset id " + id + " and verb " + verb); - return null; } - - @Path("deprecated/") + @POST - @Deprecated - public String add(Dataset dataset, @QueryParam("owner") String owner, @QueryParam("key") String apiKey) { + @Path("{id}/actions/:publish") + public Response publishDataset( @PathParam("id") String id, @QueryParam("type") String type, @QueryParam("key") String apiKey ) { try { - DatasetVersion editVersion = new DatasetVersion(); - editVersion.setVersionState(DatasetVersion.VersionState.DRAFT); - editVersion.setDataset(dataset); - Dataverse owningDataverse = dataverseService.findByAlias(owner); - dataset.setOwner(owningDataverse); - editVersion.setDatasetFields(editVersion.initDatasetFields()); - dataset.getVersions().add(editVersion); - dataset.setIdentifier("myIdentifier"); - dataset.setProtocol("myProtocol"); - DataverseUser u = userSvc.findByUserName(apiKey); - if ( u == null ) return error( "Invalid apikey '" + apiKey + "'"); - engineSvc.submit( new CreateDatasetCommand(dataset, u)); - return "dataset " + dataset.getId() + " created/updated (and probably indexed, check server.log)\n"; - } catch (EJBException ex) { - Throwable cause = ex; - StringBuilder sb = new StringBuilder(); - sb.append(ex + " "); - while (cause.getCause() != null) { - cause = cause.getCause(); - sb.append(cause.getClass().getCanonicalName() + " "); - sb.append(cause.getMessage() + " "); - if (cause instanceof ConstraintViolationException) { - ConstraintViolationException constraintViolationException = (ConstraintViolationException) cause; - for (ConstraintViolation violation : constraintViolationException.getConstraintViolations()) { - sb.append("(invalid value: <<<" + violation.getInvalidValue() + ">>> for " + violation.getPropertyPath() + " at " + violation.getLeafBean() + " - " + violation.getMessage() + ")"); - } -// } else if (cause instanceof NullPointerException) { - } else { - for (int i = 0; i < 2; i++) { - StackTraceElement stacktrace = cause.getStackTrace()[i]; - if (stacktrace != null) { - String classCanonicalName = stacktrace.getClass().getCanonicalName(); - String methodName = stacktrace.getMethodName(); - int lineNumber = stacktrace.getLineNumber(); - String error = "at " + stacktrace.getClassName() + "." + stacktrace.getMethodName() + "(" + stacktrace.getFileName() + ":" + lineNumber + ") "; - sb.append(error); - } - } - } + + if ( type == null ) return errorResponse( Response.Status.BAD_REQUEST, "Missing 'type' parameter (either 'major' or 'minor')."); + type = type.toLowerCase(); + boolean isMinor; + switch ( type ) { + case "minor": isMinor = true; break; + case "major": isMinor = false; break; + default: return errorResponse( Response.Status.BAD_REQUEST, "Illegal 'type' parameter value '" + type + "'. It needs to be either 'major' or 'minor'."); } - if (sb.toString().equals("javax.ejb.EJBException: Transaction aborted javax.transaction.RollbackException java.lang.IllegalStateException ")) { - return "indexing went as well as can be expected... got java.lang.IllegalStateException but some indexing may have happened anyway\n"; - } else { - return Util.message2ApiError(sb.toString()); + long dsId=0; + try { + dsId = Long.parseLong(id); + } catch ( NumberFormatException nfe ) { + return errorResponse( Response.Status.BAD_REQUEST, "Bad dataset id. Please provide a number."); } + + DataverseUser u = userSvc.findByUserName(apiKey); + if ( u == null ) return errorResponse( Response.Status.UNAUTHORIZED, "Invalid apikey '" + apiKey + "'"); + + Dataset ds = datasetService.find(dsId); + if ( ds == null ) return notFound("Can't find dataset with id '" + id + "'"); + + ds = engineSvc.submit( new PublishDatasetCommand(ds, u, isMinor)); + return okResponse( json(ds) ); + + } catch (IllegalCommandException ex) { + return errorResponse( Response.Status.FORBIDDEN, "Error publishing the dataset: " + ex.getMessage() ); + } catch (CommandException ex) { - return error( "Can't add dataset: " + ex.getMessage() ); - } + Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, "Error while publishing a Dataset", ex); + return errorResponse( Response.Status.INTERNAL_SERVER_ERROR, "Error publishing the dataset: " + ex.getMessage() ); + } } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 86837993673..2254612399e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -11,9 +11,9 @@ import edu.harvard.iq.dataverse.RoleAssignment; import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO; import edu.harvard.iq.dataverse.api.dto.RoleDTO; -import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.brief; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.engine.command.impl.AssignRoleCommand; import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.CreateDataverseCommand; @@ -21,10 +21,11 @@ import edu.harvard.iq.dataverse.engine.command.impl.DeleteDataverseCommand; import edu.harvard.iq.dataverse.engine.command.impl.ListDataverseContentCommand; import edu.harvard.iq.dataverse.engine.command.impl.ListRoleAssignments; +import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseMetadataBlocksCommand; import edu.harvard.iq.dataverse.util.json.JsonParseException; +import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import java.io.StringReader; -import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.UUID; @@ -41,7 +42,6 @@ import javax.json.stream.JsonParsingException; import javax.validation.ConstraintViolation; import javax.validation.ConstraintViolationException; -import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.POST; import javax.ws.rs.PathParam; @@ -70,30 +70,30 @@ public String list() { } @POST - public String addRoot( Dataverse d, @QueryParam("key") String apiKey ) { + public Response addRoot( Dataverse d, @QueryParam("key") String apiKey ) { return addDataverse(d, "", apiKey); } @POST @Path("{identifier}") - public String addDataverse( Dataverse d, @PathParam("identifier") String parentIdtf, @QueryParam("key") String apiKey) { + public Response addDataverse( Dataverse d, @PathParam("identifier") String parentIdtf, @QueryParam("key") String apiKey) { DataverseUser u = userSvc.findByUserName(apiKey); - if ( u == null ) return error( "Invalid apikey '" + apiKey + "'"); + if ( u == null ) return errorResponse(Response.Status.UNAUTHORIZED, "Invalid apikey '" + apiKey + "'"); if ( ! parentIdtf.isEmpty() ) { Dataverse owner = findDataverse(parentIdtf); if ( owner == null ) { - return error( "Can't find dataverse with identifier='" + parentIdtf + "'"); + return errorResponse( Response.Status.NOT_FOUND, "Can't find dataverse with identifier='" + parentIdtf + "'"); } d.setOwner(owner); } try { d = engineSvc.submit( new CreateDataverseCommand(d, u) ); - return ok( json(d) ); + return okResponse( json(d) ); } catch (CommandException ex) { logger.log(Level.SEVERE, "Error creating dataverse", ex); - return error("Error creating dataverse: " + ex.getMessage() ); + return errorResponse( Response.Status.INTERNAL_SERVER_ERROR, "Error creating dataverse: " + ex.getMessage() ); } catch (EJBException ex) { Throwable cause = ex; StringBuilder sb = new StringBuilder(); @@ -111,7 +111,7 @@ public String addDataverse( Dataverse d, @PathParam("identifier") String parentI } } logger.log(Level.SEVERE, sb.toString()); - return error(sb.toString()); + return errorResponse( Response.Status.INTERNAL_SERVER_ERROR, "Error creating dataverse: " + sb.toString() ); } } @@ -145,15 +145,17 @@ public Response createDataset( String jsonBody, @PathParam("identifier") String } try { try { - DatasetVersion version = jsonParser().parseDatasetVersion(jsonVersion); - - // force "initial version" properties - version.setMinorVersionNumber(0l); - version.setVersion(1l); - version.setVersionNumber(1l); - version.setVersionState(DatasetVersion.VersionState.DRAFT); - - ds.setVersions( Collections.singletonList(version) ); + DatasetVersion version = jsonParser().parseDatasetVersion(jsonVersion); + + // force "initial version" properties + version.setMinorVersionNumber(0l); + version.setVersionNumber(1l); + version.setVersionState(DatasetVersion.VersionState.DRAFT); + LinkedList versions = new LinkedList<>(); + versions.add(version); + version.setDataset(ds); + + ds.setVersions( versions ); } catch ( javax.ejb.TransactionRolledbackLocalException rbe ) { throw rbe.getCausedByException(); } @@ -178,57 +180,56 @@ public Response createDataset( String jsonBody, @PathParam("identifier") String @GET @Path("{identifier}") - public String viewDataverse( @PathParam("identifier") String idtf ) { + public Response viewDataverse( @PathParam("identifier") String idtf ) { Dataverse d = findDataverse(idtf); - return ( d==null) ? error("Can't find dataverse with identifier '" + idtf + "'") - : ok( json(d) ); + return ( d==null) ? errorResponse( Response.Status.NOT_FOUND, "Can't find dataverse with identifier '" + idtf + "'") + : okResponse( json(d) ); } @DELETE @Path("{identifier}") - public String deleteDataverse( @PathParam("identifier") String idtf, @QueryParam("key") String apiKey ) { + public Response deleteDataverse( @PathParam("identifier") String idtf, @QueryParam("key") String apiKey ) { DataverseUser u = userSvc.findByUserName(apiKey); - if ( u == null ) return error( "Invalid apikey '" + apiKey + "'"); + if ( u == null ) return errorResponse( Response.Status.UNAUTHORIZED, "Invalid apikey '" + apiKey + "'"); Dataverse d = findDataverse(idtf); - if ( d == null ) return error("Can't find dataverse with identifier '" + idtf + "'"); + if ( d == null ) return errorResponse( Response.Status.NOT_FOUND, "Can't find dataverse with identifier '" + idtf + "'"); try { - engineSvc.submit( new DeleteDataverseCommand(u, d) ); - return ok( "Dataverse " + idtf +" deleted"); - } catch ( CommandException ex ) { - logger.log(Level.SEVERE, "Error deleting dataverse", ex); - return error("Error creating dataverse: " + ex.getMessage() ); - } + execCommand( new DeleteDataverseCommand(u, d), "Delete Dataverse" ); + return okResponse( "Dataverse " + idtf +" deleted"); + } catch ( FailedCommandResult ex ) { + return ex.getResponse(); + } } @GET @Path("{identifier}/roles") - public String listRoles( @PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey ) { + public Response listRoles( @PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey ) { DataverseUser u = userSvc.findByUserName(apiKey); - if ( u == null ) return error( "Invalid apikey '" + apiKey + "'"); + if ( u == null ) return errorResponse( Status.FORBIDDEN, "Invalid apikey '" + apiKey + "'"); Dataverse dataverse = findDataverse(dvIdtf); if ( dataverse == null ) { - return error( "Can't find dataverse with identifier='" + dvIdtf + "'"); + return errorResponse( Status.NOT_FOUND, "Can't find dataverse with identifier='" + dvIdtf + "'"); } JsonArrayBuilder jab = Json.createArrayBuilder(); for ( DataverseRole r : dataverse.getRoles() ){ jab.add( json(r) ); } - return ok(jab); + return okResponse(jab); } @GET @Path("{identifier}/metadatablocks") - public String listMetadataBlocks( @PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey ) { + public Response listMetadataBlocks( @PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey ) { DataverseUser u = userSvc.findByUserName(apiKey); - if ( u == null ) return error( "Invalid apikey '" + apiKey + "'"); + if ( u == null ) return errorResponse( Status.FORBIDDEN, "Invalid apikey '" + apiKey + "'"); Dataverse dataverse = findDataverse(dvIdtf); if ( dataverse == null ) { - return error( "Can't find dataverse with identifier='" + dvIdtf + "'"); + return errorResponse( Status.NOT_FOUND, "Can't find dataverse with identifier='" + dvIdtf + "'"); } JsonArrayBuilder jab = Json.createArrayBuilder(); @@ -236,7 +237,7 @@ public String listMetadataBlocks( @PathParam("identifier") String dvIdtf, @Query jab.add( brief.json(blk) ); } - return ok(jab); + return okResponse(jab); } @POST @@ -301,13 +302,13 @@ public Response setMetadataRoot( @PathParam("identifier")String dvIdtf, @QueryPa @GET @Path("{identifier}/contents") - public String listContent( @PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey ) { + public Response listContent( @PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey ) { DataverseUser u = userSvc.findByUserName(apiKey); - if ( u == null ) return error( "Invalid apikey '" + apiKey + "'"); + if ( u == null ) return errorResponse( Status.FORBIDDEN, "Invalid apikey '" + apiKey + "'"); Dataverse dataverse = findDataverse(dvIdtf); if ( dataverse == null ) { - return error( "Can't find dataverse with identifier='" + dvIdtf + "'"); + return errorResponse( Status.NOT_FOUND, "Can't find dataverse with identifier='" + dvIdtf + "'"); } final JsonArrayBuilder jab = Json.createArrayBuilder(); @@ -331,75 +332,70 @@ public Void visit(Dataset ds) { public Void visit(DataFile df) { throw new UnsupportedOperationException("Files don't live directly in Dataverses"); } }; try { - for ( DvObject o : engineSvc.submit( new ListDataverseContentCommand(u, dataverse)) ) { + for ( DvObject o : execCommand(new ListDataverseContentCommand(u, dataverse), "List Dataverse") ) { o.accept(ser); } - } catch (CommandException ex) { - return error(ex.getMessage()); + } catch (FailedCommandResult ex) { + return ex.getResponse(); } - return ok(jab); + return okResponse(jab); } @POST @Path("{identifier}/roles") - public String createRole( RoleDTO roleDto, @PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey ) { - DataverseUser u = userSvc.findByUserName(apiKey); - if ( u == null ) return error( "Invalid apikey '" + apiKey + "'"); - + public Response createRole( RoleDTO roleDto, @PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey ) { + + DataverseUser u = userSvc.findByUserName(apiKey); + if ( u == null ) return badApiKey(apiKey); Dataverse dataverse = findDataverse(dvIdtf); - if ( dataverse == null ) { - return error( "Can't find dataverse with identifier='" + dvIdtf + "'"); - } + if ( dataverse == null ) return notFound( "Can't find dataverse with identifier='" + dvIdtf + "'"); + + try { - return ok(json(engineSvc.submit( new CreateRoleCommand(roleDto.asRole(), u, dataverse) ))); - } catch ( CommandException ce ) { - return error( ce.getMessage() ); + return okResponse( json(execCommand(new CreateRoleCommand(roleDto.asRole(), u, dataverse), "Create Role"))); + } catch ( FailedCommandResult ce ) { + return ce.getResponse(); } } @GET @Path("{identifier}/assignments") - public String listAssignments( @PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey ) { + public Response listAssignments( @PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey ) { DataverseUser u = userSvc.findByUserName(apiKey); - if ( u == null ) return error( "Invalid apikey '" + apiKey + "'"); - + if ( u == null ) return badApiKey(apiKey); Dataverse dataverse = findDataverse(dvIdtf); - if ( dataverse == null ) { - return error( "Can't find dataverse with identifier='" + dvIdtf + "'"); - } + if ( dataverse == null ) return notFound( "Can't find dataverse with identifier='" + dvIdtf + "'"); try { JsonArrayBuilder jab = Json.createArrayBuilder(); - for ( RoleAssignment ra : engineSvc.submit(new ListRoleAssignments(u, dataverse)) ){ + for ( RoleAssignment ra : execCommand(new ListRoleAssignments(u, dataverse), "Role Assignment Listing") ){ jab.add( json(ra) ); } - return ok(jab); + return okResponse(jab); - } catch (CommandException ex) { - return error( "can't list assignments: " + ex.getMessage() ); + } catch (FailedCommandResult ex) { + return ex.getResponse(); } } @POST @Path("{identifier}/assignments") - public String createAssignment( RoleAssignmentDTO ra, @PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey ) { + public Response createAssignment( RoleAssignmentDTO ra, @PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey ) { DataverseUser actingUser = userSvc.findByUserName(apiKey); - if ( actingUser == null ) return error( "Invalid apikey '" + apiKey + "'"); - + if ( actingUser == null ) return badApiKey(apiKey); Dataverse dataverse = findDataverse(dvIdtf); - if ( dataverse == null ) { - return error( "Can't find dataverse with identifier='" + dvIdtf + "'"); - } + if ( dataverse == null ) return notFound( "Can't find dataverse with identifier='" + dvIdtf + "'"); + DataverseUser grantedUser = (ra.getUserName()!=null) ? findUser(ra.getUserName()) : userSvc.find(ra.getUserId()); if ( grantedUser==null ) { - return error("Can't find user using " + ra.getUserName() + "/" + ra.getUserId() ); + return errorResponse( Status.BAD_REQUEST, "Can't find user using " + ra.getUserName() + "/" + ra.getUserId() ); } DataverseRole theRole; if ( ra.getRoleId() != 0 ) { theRole = rolesSvc.find(ra.getRoleId()); if ( theRole == null ) { - return error("Can't find role with id " + ra.getRoleId() ); + return errorResponse( Status.BAD_REQUEST, "Can't find role with id " + ra.getRoleId() ); } } else { @@ -415,63 +411,60 @@ public String createAssignment( RoleAssignmentDTO ra, @PathParam("identifier") S dv = dv.getOwner(); } if ( theRole == null ) { - return error("Can't find role named '" + ra.getRoleAlias() + "' in dataverse " + dataverse); + return errorResponse( Status.BAD_REQUEST, "Can't find role named '" + ra.getRoleAlias() + "' in dataverse " + dataverse); } } try { - RoleAssignment roleAssignment = engineSvc.submit( new AssignRoleCommand(grantedUser, theRole, dataverse, actingUser)); - return ok(json(roleAssignment)); + RoleAssignment roleAssignment = execCommand( new AssignRoleCommand(grantedUser, theRole, dataverse, actingUser), "Assign role"); + return okResponse(json(roleAssignment)); - } catch (CommandException ex) { + } catch (FailedCommandResult ex) { logger.log(Level.WARNING, "Can''t create assignment: {0}", ex.getMessage()); - return error(ex.getMessage()); + return ex.getResponse(); } } @DELETE @Path("{identifier}/assignments/{id}") - public String deleteAssignment( @PathParam("id") long assignmentId, @PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey ) { + public Response deleteAssignment( @PathParam("id") long assignmentId, @PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey ) { DataverseUser actingUser = userSvc.findByUserName(apiKey); - if ( actingUser == null ) return error( "Invalid apikey '" + apiKey + "'"); - + if ( actingUser == null ) return badApiKey(apiKey); Dataverse dataverse = findDataverse(dvIdtf); - if ( dataverse == null ) { - return error( "Can't find dataverse with identifier='" + dvIdtf + "'"); - } + if ( dataverse == null ) return notFound( "Can't find dataverse with identifier='" + dvIdtf + "'"); RoleAssignment ra = em.find( RoleAssignment.class, assignmentId ); if ( ra != null ) { em.remove( ra ); em.flush(); - return "Role assignment " + assignmentId + " removed"; + return okResponse("Role assignment " + assignmentId + " removed"); } else { - return "Role assignment " + assignmentId + " not found"; - } - } - - @GET - @Path(":gv") - public String toGraphviz() { - StringBuilder sb = new StringBuilder(); - StringBuilder edges = new StringBuilder(); - - sb.append( "digraph dataverses {"); - for ( Dataverse dv : dataverseSvc.findAll() ) { - sb.append("dv").append(dv.getId()).append(" [label=\"").append(dv.getAlias()).append( "\"]\n"); - if ( dv.getOwner() != null ) { - edges.append("dv").append(dv.getOwner().getId()) - .append("->") - .append("dv").append(dv.getId()) - .append("\n"); - } + return errorResponse( Status.NOT_FOUND, "Role assignment " + assignmentId + " not found" ); } - - sb.append("\n"); - sb.append( edges ); - - sb.append( "}" ); - return sb.toString(); } + @POST + @Path("{identifier}/actions/:publish") + public Response publishDataverse( @PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey ) { + try { + + Dataverse dv = findDataverse(dvIdtf); + if ( dv == null ) { + return errorResponse( Response.Status.NOT_FOUND, "Can't find dataverse with identifier='" + dvIdtf + "'"); + } + + DataverseUser u = userSvc.findByUserName(apiKey); + if ( u == null ) return errorResponse( Response.Status.UNAUTHORIZED, "Invalid apikey '" + apiKey + "'"); + + dv = engineSvc.submit( new PublishDataverseCommand(u, dv) ); + return okResponse( json(dv) ); + + } catch (IllegalCommandException ex) { + return errorResponse( Response.Status.FORBIDDEN, "Error publishing dataverse: " + ex.getMessage() ); + + } catch (CommandException ex) { + Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, "Error while publishing a Dataverse", ex); + return errorResponse( Response.Status.INTERNAL_SERVER_ERROR, "Error publishing the dataset: " + ex.getMessage() ); + } + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Permissions.java b/src/main/java/edu/harvard/iq/dataverse/api/Permissions.java index 249b2785630..8a9c5675e2c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Permissions.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Permissions.java @@ -4,13 +4,12 @@ import edu.harvard.iq.dataverse.DataverseUser; import edu.harvard.iq.dataverse.PermissionServiceBean; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; -import edu.harvard.iq.dataverse.engine.Permission; -import java.util.Set; -import java.util.logging.Logger; import javax.ejb.EJB; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.QueryParam; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.Response.Status; /** * Permission test bean. @@ -18,19 +17,17 @@ */ @Path("permissions") public class Permissions extends AbstractApiBean { - private static final Logger logger = Logger.getLogger(Permissions.class.getName()); @EJB PermissionServiceBean permissions; @GET - public String listPermissions( @QueryParam("user") String userIdtf, @QueryParam("on") String dvoIdtf ) { + public Response listPermissions( @QueryParam("user") String userIdtf, @QueryParam("on") String dvoIdtf ) { DataverseUser u = findUser(userIdtf); - if ( u==null ) return error("Can't find user with identifier '" + userIdtf + "'"); + if ( u==null ) return errorResponse( Status.FORBIDDEN, "Can't find user with identifier '" + userIdtf + "'"); Dataverse d = findDataverse(dvoIdtf); - if ( d==null ) error( "Can't find dataverser with identifier '" + dvoIdtf ); + if ( d==null ) notFound("Can't find dataverser with identifier '" + dvoIdtf ); - Set granted = permissions.on(d).user(u).get(); - return ok( json(granted) ) ; + return okResponse( json(permissions.on(d).user(u).get()) ) ; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Roles.java b/src/main/java/edu/harvard/iq/dataverse/api/Roles.java index 84f17b33c9f..f65442cc1e6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Roles.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Roles.java @@ -6,8 +6,6 @@ import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.DataverseUser; import edu.harvard.iq.dataverse.DataverseUserServiceBean; -import edu.harvard.iq.dataverse.RoleAssignment; -import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.impl.AssignRoleCommand; import java.util.logging.Level; import java.util.logging.Logger; @@ -25,6 +23,8 @@ import javax.ejb.Stateless; import javax.ws.rs.DELETE; import javax.ws.rs.QueryParam; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.Response.Status; /** * Util API for managing roles. Might not make it to the production version. @@ -43,78 +43,80 @@ public class Roles extends AbstractApiBean { DataverseServiceBean dvSvc; @GET - public String list() { + public Response list() { JsonArrayBuilder rolesArrayBuilder = Json.createArrayBuilder(); for ( DataverseRole role : rolesSvc.findAll() ) { rolesArrayBuilder.add(json(role)); } - return Util.jsonArray2prettyString(rolesArrayBuilder.build()); + return okResponse(rolesArrayBuilder); } @GET @Path("{id}") - public String viewRole( @PathParam("id") Long id ) { + public Response viewRole( @PathParam("id") Long id ) { DataverseRole role = rolesSvc.find(id); if ( role == null ) { - return error( "role with id " + id + " not found"); + return notFound("role with id " + id + " not found"); } else { - return ok( json(role).build() ); + return okResponse( json(role) ); } } @DELETE @Path("{id}") - public String deleteRole( @PathParam("id") Long id ) { + public Response deleteRole( @PathParam("id") Long id ) { DataverseRole role = rolesSvc.find(id); if ( role == null ) { - return error( "role with id " + id + " not found"); + return notFound( "role with id " + id + " not found"); } else { em.remove(role); - return "role " + id + " deleted."; + return okResponse("role " + id + " deleted."); } } @POST @Path("assignments") - public String assignRole( @FormParam("username") String username, + public Response assignRole( @FormParam("username") String username, @FormParam("roleId") long roleId, @FormParam("definitionPointId") long dvObjectId, @QueryParam("key") String key ) { - DataverseUser u = usersSvc.findByUserName(username); - if ( u == null ) return error("no user with username " + username ); - DataverseUser issuer = usersSvc.findByUserName(key); - if ( issuer == null ) return error("invalid api key '" + key +"'" ); + + DataverseUser issuer = usersSvc.findByUserName(key); + if ( issuer == null ) return errorResponse( Status.UNAUTHORIZED, "invalid api key '" + key +"'" ); + + DataverseUser u = usersSvc.findByUserName(username); + if ( u == null ) return errorResponse( Status.BAD_REQUEST, "no user with username " + username ); Dataverse d = dvSvc.find( dvObjectId ); - if ( d == null ) return error("no DvObject with id " + dvObjectId ); + if ( d == null ) return errorResponse( Status.BAD_REQUEST, "no DvObject with id " + dvObjectId ); DataverseRole r = rolesSvc.find(roleId); - if ( r == null ) return error("no role with id " + roleId ); + if ( r == null ) return errorResponse( Status.BAD_REQUEST, "no role with id " + roleId ); try { - RoleAssignment ra = engineSvc.submit( new AssignRoleCommand(u,r,d, issuer) ); - return ok( json(ra).build() ); + return okResponse( json(execCommand( new AssignRoleCommand(u,r,d, issuer), "Assign Role")) ); - } catch (CommandException ex) { + } catch (FailedCommandResult ex) { logger.log( Level.WARNING, "Error Assigning role", ex ); - return error("Assignment Faild: " + ex.getMessage() ); + return ex.getResponse(); } } @POST - public String createNewRole( RoleDTO roleDto, + public Response createNewRole( RoleDTO roleDto, @QueryParam("dvo") String dvoIdtf, @QueryParam("key") String key ) { - DataverseUser u = usersSvc.findByUserName(key); - if ( u == null ) return error("bad api key " + key ); + + DataverseUser issuer = usersSvc.findByUserName(key); + if ( issuer == null ) return errorResponse( Status.UNAUTHORIZED, "invalid api key '" + key +"'" ); + Dataverse d = findDataverse(dvoIdtf); - if ( d == null ) return error("no dataverse with id " + dvoIdtf ); + if ( d == null ) return errorResponse( Status.BAD_REQUEST, "no dataverse with id " + dvoIdtf ); try { - return ok(json(engineSvc.submit( new CreateRoleCommand(roleDto.asRole(), u, d) ))); - } catch ( CommandException ce ) { - return error( ce.getMessage() ); + return okResponse(json(execCommand(new CreateRoleCommand(roleDto.asRole(), issuer, d), "Create New Role"))); + } catch ( FailedCommandResult ce ) { + return ce.getResponse(); } } - - + } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Users.java b/src/main/java/edu/harvard/iq/dataverse/api/Users.java index 88ae39f5332..51e04db9da4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Users.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Users.java @@ -11,6 +11,8 @@ import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.QueryParam; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.Response.Status; /** * @@ -21,44 +23,44 @@ public class Users extends AbstractApiBean { private static final Logger logger = Logger.getLogger(Users.class.getName()); @GET - public String list() { + public Response list() { JsonArrayBuilder bld = Json.createArrayBuilder(); for ( DataverseUser u : userSvc.findAll() ) { bld.add( json(u) ); } - return ok( bld.build() ); + return okResponse( bld ); } @GET @Path("{identifier}") - public String view( @PathParam("identifier") String identifier ) { + public Response view( @PathParam("identifier") String identifier ) { DataverseUser u = findUser(identifier); return ( u!=null ) - ? ok( json(u).build() ) - : error( "Can't find user with identifier '" + identifier + "'"); + ? okResponse( json(u) ) + : errorResponse( Status.NOT_FOUND, "Can't find user with identifier '" + identifier + "'"); } @POST - public String save( DataverseUser user, @QueryParam("password") String password ) { + public Response save( DataverseUser user, @QueryParam("password") String password ) { try { if ( password != null ) { user.setEncryptedPassword(userSvc.encryptPassword(password)); } user = userSvc.save(user); - return ok ( json(user).build() ); + return okResponse( json(user) ); } catch ( Exception e ) { logger.log( Level.WARNING, "Error saving user", e ); - return error( "Can't save user: " + e.getMessage() ); + return errorResponse( Status.INTERNAL_SERVER_ERROR, "Can't save user: " + e.getMessage() ); } } @GET @Path(":guest") - public String genarateGuest() { - return ok( json(userSvc.createGuestUser()) ); + public Response genarateGuest() { + return okResponse( json(userSvc.createGuestUser()) ); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java index 6348d16e875..c338e431ab4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java @@ -4,14 +4,15 @@ import edu.harvard.iq.dataverse.DatasetField; import edu.harvard.iq.dataverse.DatasetFieldServiceBean; import edu.harvard.iq.dataverse.DatasetFieldType; -import edu.harvard.iq.dataverse.DatasetFieldValidator; import edu.harvard.iq.dataverse.DatasetFieldValue; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.DataverseUser; import edu.harvard.iq.dataverse.EjbDataverseEngine; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand; +import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand; import java.io.File; import java.io.IOException; import java.util.ArrayList; @@ -135,7 +136,8 @@ public DepositReceipt createNew(String collectionUri, Deposit deposit, AuthCrede * https://redmine.hmdc.harvard.edu/issues/3993 */ dataset.setProtocol("doi"); - dataset.setAuthority("myAuthority"); + dataset.setAuthority("10.5072/FK2"); + // temporary, will change identifer to database id of dataset after we know it dataset.setIdentifier(UUID.randomUUID().toString()); DatasetVersion newDatasetVersion = dataset.getVersions().get(0); @@ -153,10 +155,11 @@ public DepositReceipt createNew(String collectionUri, Deposit deposit, AuthCrede newDatasetVersion.setDatasetFields(datasetFields); + Dataset createdDataset = null; try { // there is no importStudy method in 4.0 :( // study = studyService.importStudy(tmpFile, dcmiTermsFormatId, dvThatWillOwnStudy.getId(), vdcUser.getId()); - engineSvc.submit(new CreateDatasetCommand(dataset, dataverseUser)); + createdDataset = engineSvc.submit(new CreateDatasetCommand(dataset, dataverseUser)); } catch (Exception ex) { // StringWriter stringWriter = new StringWriter(); // ex.printStackTrace(new PrintWriter(stringWriter)); @@ -188,10 +191,20 @@ public DepositReceipt createNew(String collectionUri, Deposit deposit, AuthCrede tmpFile.delete(); uploadDir.delete(); } - ReceiptGenerator receiptGenerator = new ReceiptGenerator(); - String baseUrl = urlManager.getHostnamePlusBaseUrlPath(collectionUri); - DepositReceipt depositReceipt = receiptGenerator.createReceipt(baseUrl, dataset); - return depositReceipt; + if (createdDataset != null) { + try { + createdDataset.setIdentifier(createdDataset.getId().toString()); + engineSvc.submit(new UpdateDatasetCommand(createdDataset, dataverseUser)); + } catch (CommandException ex) { + throw new SwordError("Dataset created but identifier was not changed to database id from " + dataset.getIdentifier() + " " + ex); + } + ReceiptGenerator receiptGenerator = new ReceiptGenerator(); + String baseUrl = urlManager.getHostnamePlusBaseUrlPath(collectionUri); + DepositReceipt depositReceipt = receiptGenerator.createReceipt(baseUrl, createdDataset); + return depositReceipt; + } else { + throw new SwordError("Dataset created but identifier was not changed to database id from " + dataset.getIdentifier()); + } } else if (deposit.isBinaryOnly()) { // get here with this: // curl --insecure -s --data-binary "@example.zip" -H "Content-Disposition: filename=example.zip" -H "Content-Type: application/zip" https://sword:sword@localhost:8181/dvn/api/data-deposit/v1/swordv2/collection/dataverse/sword/ diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java index cf1ee53e852..8d3b8ee2d3e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java @@ -4,12 +4,16 @@ import edu.harvard.iq.dataverse.DatasetServiceBean; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.DataverseUser; import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.IndexServiceBean; +import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException; import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetCommand; +import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetCommand; +import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand; import edu.harvard.iq.dataverse.export.DDIExportServiceBean; import java.io.File; import java.util.List; @@ -40,6 +44,8 @@ public class ContainerManagerImpl implements ContainerManager { @EJB protected EjbDataverseEngine engineSvc; @EJB + DataverseServiceBean dataverseService; + @EJB DatasetServiceBean datasetService; @EJB IndexServiceBean indexService; @@ -294,19 +300,42 @@ public void deleteContainer(String uri, AuthCredentials authCredentials, SwordCo } if (study != null) { Dataverse dvThatOwnsStudy = study.getOwner(); - if (swordAuth.hasAccessToModifyDataverse(vdcUser, dvThatOwnsStudy)) { - DatasetVersion.VersionState studyState = study.getLatestVersion().getVersionState(); + if (!swordAuth.hasAccessToModifyDataverse(vdcUser, dvThatOwnsStudy)) { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + vdcUser.getUserName() + " is not authorized to modify " + dvThatOwnsStudy.getAlias()); + } + DatasetVersion.VersionState studyState = study.getLatestVersion().getVersionState(); + if (study.isReleased()) { if (studyState.equals(DatasetVersion.VersionState.DRAFT)) { logger.info("destroying working copy version of study " + study.getGlobalId()); /** * @todo in DVN 3.x we had a convenient - * destroyWorkingCopyVersion method but the - * DeleteDatasetCommand is pretty scary... what - * if a released study has a new draft version? - * What we need is a - * DeleteDatasetVersionCommand, I suppose... + * destroyWorkingCopyVersion method. We have + * DeleteDatasetCommand but we need + * DeleteDatasetEditVersionCommand */ -// studyService.destroyWorkingCopyVersion(study.getLatestVersion().getId()); + // studyService.destroyWorkingCopyVersion(study.getLatestVersion().getId()); + throw SwordUtil.throwSpecialSwordErrorWithoutStackTrace(UriRegistry.ERROR_METHOD_NOT_ALLOWED, "This dataset has been published and subsequently a draft has been created. You are trying to delete that draft but this is not yet supported: https://redmine.hmdc.harvard.edu/issues/4032"); + } else if (studyState.equals(DatasetVersion.VersionState.RELEASED)) { +// logger.fine("deaccessioning latest version of study " + study.getGlobalId()); +// studyService.deaccessionStudy(study.getLatestVersion()); + /** + * @todo revisit this when deaccessioning is + * available in + * https://redmine.hmdc.harvard.edu/issues/4031 + */ + throw SwordUtil.throwSpecialSwordErrorWithoutStackTrace(UriRegistry.ERROR_METHOD_NOT_ALLOWED, "Deaccessioning a dataset is not yet supported: https://redmine.hmdc.harvard.edu/issues/4031"); + } else if (studyState.equals(DatasetVersion.VersionState.DEACCESSIONED)) { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Lastest version of dataset " + study.getGlobalId() + " has already been deaccessioned."); + } else if (studyState.equals(DatasetVersion.VersionState.ARCHIVED)) { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Lastest version of study " + study.getGlobalId() + " has been archived and can not be deleted or deaccessioned."); + } else if (studyState.equals(DatasetVersion.VersionState.IN_REVIEW)) { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Lastest version of study " + study.getGlobalId() + " is in review and can not be deleted or deaccessioned."); + } else { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Operation not valid for study " + study.getGlobalId() + " in state " + studyState); + } + } else { + // dataset has never been published, this is just a sanity check (should always be draft) + if (studyState.equals(DatasetVersion.VersionState.DRAFT)) { try { engineSvc.submit(new DeleteDatasetCommand(study, vdcUser)); /** @@ -320,23 +349,10 @@ public void deleteContainer(String uri, AuthCredentials authCredentials, SwordCo } catch (CommandException ex) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Can't delete dataset: " + ex.getMessage()); } - /** - * @todo think about how to handle non-drafts - */ - } else if (studyState.equals(DatasetVersion.VersionState.RELEASED)) { -// logger.fine("deaccessioning latest version of study " + study.getGlobalId()); -// studyService.deaccessionStudy(study.getLatestVersion()); - } else if (studyState.equals(DatasetVersion.VersionState.DEACCESSIONED)) { -// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Lastest version of study " + study.getGlobalId() + " has already been deaccessioned."); - } else if (studyState.equals(DatasetVersion.VersionState.ARCHIVED)) { -// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Lastest version of study " + study.getGlobalId() + " has been archived and can not be deleted or deaccessioned."); - } else if (studyState.equals(DatasetVersion.VersionState.IN_REVIEW)) { -// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Lastest version of study " + study.getGlobalId() + " is in review and can not be deleted or deaccessioned."); } else { -// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Operation not valid for study " + study.getGlobalId() + " in state " + studyState); + // we should never get here. throw an error explaining why + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "dataset is in illegal state (not released yet not in draft)"); } - } else { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + vdcUser.getUserName() + " is not authorized to modify " + dvThatOwnsStudy.getAlias()); } } else { throw new SwordError(404); @@ -356,7 +372,7 @@ public void deleteContainer(String uri, AuthCredentials authCredentials, SwordCo public DepositReceipt useHeaders(String uri, Deposit deposit, AuthCredentials authCredentials, SwordConfiguration swordConfiguration) throws SwordError, SwordServerException, SwordAuthException { logger.fine("uri was " + uri); logger.fine("isInProgress:" + deposit.isInProgress()); - DataverseUser vdcUser = swordAuth.auth(authCredentials); + DataverseUser dataverseUser = swordAuth.auth(authCredentials); urlManager.processUrl(uri); String targetType = urlManager.getTargetType(); if (!targetType.isEmpty()) { @@ -364,15 +380,15 @@ public DepositReceipt useHeaders(String uri, Deposit deposit, AuthCredentials au if ("study".equals(targetType)) { String globalId = urlManager.getTargetIdentifier(); if (globalId != null) { - Dataset studyToRelease = null; + Dataset dataset = null; try { -// studyToRelease = studyService.getStudyByGlobalId(globalId); + dataset = datasetService.findByGlobalId(globalId); } catch (EJBException ex) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find study based on global id (" + globalId + ") in URL: " + uri); } - if (studyToRelease != null) { - Dataverse dvThatOwnsStudy = studyToRelease.getOwner(); - if (swordAuth.hasAccessToModifyDataverse(vdcUser, dvThatOwnsStudy)) { + if (dataset != null) { + Dataverse dvThatOwnsStudy = dataset.getOwner(); + if (swordAuth.hasAccessToModifyDataverse(dataverseUser, dvThatOwnsStudy)) { if (!deposit.isInProgress()) { /** * We are considering a draft version of a study @@ -393,16 +409,29 @@ public DepositReceipt useHeaders(String uri, Deposit deposit, AuthCredentials au * that the deposit is complete." -- * http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html#continueddeposit_incomplete */ - if (!studyToRelease.getLatestVersion().getVersionState().equals(DatasetVersion.VersionState.RELEASED)) { - /** - * @todo DVN 3.x had - * studyService.setReleased... what should - * we do in 4.0? - */ -// studyService.setReleased(studyToRelease.getId()); + if (!dataset.getLatestVersion().getVersionState().equals(DatasetVersion.VersionState.RELEASED)) { + Command cmd; + try { + /** + * @todo We *could* attempt a minor + * version bump first and if it fails go + * ahead and re-try with a major version + * bump. For simplicity, we decided to + * always do a major version bump but + * the @todo is to think about this a + * bit more before we release 4.0. + */ + boolean attemptMinorVersionBump = false; + cmd = new PublishDatasetCommand(dataset, dataverseUser, attemptMinorVersionBump); + dataset = engineSvc.submit(cmd); + } catch (CommandException ex) { + String msg = "Unable to publish dataset: " + ex; + logger.severe(msg + ": " + ex.getMessage()); + throw SwordUtil.throwRegularSwordErrorWithoutStackTrace(msg); + } ReceiptGenerator receiptGenerator = new ReceiptGenerator(); String baseUrl = urlManager.getHostnamePlusBaseUrlPath(uri); - DepositReceipt depositReceipt = receiptGenerator.createReceipt(baseUrl, studyToRelease); + DepositReceipt depositReceipt = receiptGenerator.createReceipt(baseUrl, dataset); return depositReceipt; } else { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Latest version of dataset " + globalId + " has already been released."); @@ -411,7 +440,7 @@ public DepositReceipt useHeaders(String uri, Deposit deposit, AuthCredentials au throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Pass 'In-Progress: false' header to release a study."); } } else { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + vdcUser.getUserName() + " is not authorized to modify dataverse " + dvThatOwnsStudy.getAlias()); + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + dataverseUser.getUserName() + " is not authorized to modify dataverse " + dvThatOwnsStudy.getAlias()); } } else { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find study using globalId " + globalId); @@ -421,73 +450,35 @@ public DepositReceipt useHeaders(String uri, Deposit deposit, AuthCredentials au } } else if ("dataverse".equals(targetType)) { /** - * @todo support releasing of dataverses via SWORD + * @todo confirm we want to allow dataverses to be released via + * SWORD. If so, document the curl example. */ -// String dvAlias = urlManager.getTargetIdentifier(); -// if (dvAlias != null) { -// VDC dvToRelease = vdcService.findByAlias(dvAlias); -// if (swordAuth.hasAccessToModifyDataverse(vdcUser, dvToRelease)) { -// if (dvToRelease != null) { -// String optionalPort = ""; -// URI u; -// try { -// u = new URI(uri); -// int port = u.getPort(); -// if (port != -1) { -// // https often runs on port 8181 in dev -// optionalPort = ":" + port; -// } -// } catch (URISyntaxException ex) { -// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "unable to part URL"); -// } -// String hostName = System.getProperty("dvn.inetAddress"); -// String dvHomePage = "https://" + hostName + optionalPort + "/dvn/dv/" + dvToRelease.getAlias(); -// if (deposit.isInProgress()) { -// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Changing a dataverse to 'not released' is not supported. Please change to 'not released' from the web interface: " + dvHomePage); -// } else { -// try { -// getVDCRequestBean().setVdcNetwork(dvToRelease.getVdcNetwork()); -// } catch (ContextNotActiveException ex) { -// /** -// * todo: observe same rules about dataverse -// * release via web interface such as a study -// * or a collection must be release: -// * https://redmine.hmdc.harvard.edu/issues/3225 -// */ -// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Releasing a dataverse is not yet supported. Please release from the web interface: " + dvHomePage); -// } -// OptionsPage optionsPage = new OptionsPage(); -// if (optionsPage.isReleasable()) { -// if (dvToRelease.isRestricted()) { -// logger.fine("releasing dataverse via SWORD: " + dvAlias); -// /** -// * @todo: tweet and send email about -// * release -// */ -// dvToRelease.setReleaseDate(DateUtil.getTimestamp()); -// dvToRelease.setRestricted(false); -// vdcService.edit(dvToRelease); - DepositReceipt fakeDepositReceipt = new DepositReceipt(); -// IRI fakeIri = new IRI("fakeIriDvWasJustReleased"); -// fakeDepositReceipt.setEditIRI(fakeIri); -// fakeDepositReceipt.setVerboseDescription("Dataverse alias: " + dvAlias); - return fakeDepositReceipt; -// } else { -// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Dataverse has already been released: " + dvAlias); -// } -// } else { -// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "dataverse is not releaseable"); -// } -// } -// } else { -// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find dataverse based on alias in URL: " + uri); -// } -// } else { -// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + vdcUser.getUserName() + " is not authorized to modify dataverse " + dvAlias); -// } -// } else { -// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to find dataverse alias in URL: " + uri); -// } + String dvAlias = urlManager.getTargetIdentifier(); + if (dvAlias != null) { + Dataverse dvToRelease = dataverseService.findByAlias(dvAlias); + if (dvToRelease != null) { + if (!swordAuth.hasAccessToModifyDataverse(dataverseUser, dvToRelease)) { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + dataverseUser.getUserName() + " is not authorized to modify dataverse " + dvAlias); + } + if (deposit.isInProgress()) { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unpublishing a dataverse is not supported."); + } + PublishDataverseCommand cmd = new PublishDataverseCommand(dataverseUser, dvToRelease); + try { + engineSvc.submit(cmd); + ReceiptGenerator receiptGenerator = new ReceiptGenerator(); + String baseUrl = urlManager.getHostnamePlusBaseUrlPath(uri); + DepositReceipt depositReceipt = receiptGenerator.createDataverseReceipt(baseUrl, dvToRelease); + return depositReceipt; + } catch (CommandException ex) { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Couldn't publish dataverse " + dvAlias + ": " + ex); + } + } else { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find dataverse based on alias in URL: " + uri); + } + } else { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to find dataverse alias in URL: " + uri); + } } else { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "unsupported target type (" + targetType + ") in URL:" + uri); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java index bef7b3e2261..a06967f77eb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java @@ -1,25 +1,21 @@ package edu.harvard.iq.dataverse.api.datadeposit; import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.DataFileServiceBean; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetServiceBean; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DataverseUser; import edu.harvard.iq.dataverse.EjbDataverseEngine; -import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.impl.DeleteDataFileCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import java.io.ByteArrayInputStream; -import java.io.File; -import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -29,9 +25,6 @@ import javax.ejb.EJB; import javax.ejb.EJBException; import javax.inject.Inject; -import javax.naming.Context; -import javax.naming.InitialContext; -import javax.naming.NamingException; import javax.validation.ConstraintViolation; import javax.validation.ConstraintViolationException; import org.swordapp.server.AuthCredentials; @@ -53,6 +46,8 @@ public class MediaResourceManagerImpl implements MediaResourceManager { @EJB DatasetServiceBean datasetService; @EJB + DataFileServiceBean dataFileService; + @EJB IngestServiceBean ingestService; @Inject SwordAuth swordAuth; @@ -62,44 +57,37 @@ public class MediaResourceManagerImpl implements MediaResourceManager { @Override public MediaResource getMediaResourceRepresentation(String uri, Map map, AuthCredentials authCredentials, SwordConfiguration swordConfiguration) throws SwordError, SwordServerException, SwordAuthException { - DataverseUser vdcUser = swordAuth.auth(authCredentials); + DataverseUser dataverseUser = swordAuth.auth(authCredentials); urlManager.processUrl(uri); String globalId = urlManager.getTargetIdentifier(); if (urlManager.getTargetType().equals("study") && globalId != null) { -// EditStudyService editStudyService; - Context ctx; - try { - ctx = new InitialContext(); -// editStudyService = (EditStudyService) ctx.lookup("java:comp/env/editStudy"); - } catch (NamingException ex) { - logger.info("problem looking up editStudyService"); - throw new SwordServerException("problem looking up editStudyService"); - } - logger.fine("looking up study with globalId " + globalId); -// Study study = editStudyService.getStudyByGlobalId(globalId); - Dataset study = null; - if (study != null) { + logger.fine("looking up dataset with globalId " + globalId); + Dataset dataset = datasetService.findByGlobalId(globalId); + if (dataset != null) { /** - * @todo: support this + * @todo: support downloading of files (SWORD 2.0 Profile 6.4. - + * Retrieving the content) + * http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html#protocoloperations_retrievingcontent + * https://redmine.hmdc.harvard.edu/issues/3595 */ boolean getMediaResourceRepresentationSupported = false; if (getMediaResourceRepresentationSupported) { - Dataverse dvThatOwnsStudy = study.getOwner(); - if (swordAuth.hasAccessToModifyDataverse(vdcUser, dvThatOwnsStudy)) { - InputStream fixmeInputStream = new ByteArrayInputStream("FIXME: replace with zip of all study files".getBytes()); + Dataverse dvThatOwnsStudy = dataset.getOwner(); + if (swordAuth.hasAccessToModifyDataverse(dataverseUser, dvThatOwnsStudy)) { + InputStream fixmeInputStream = new ByteArrayInputStream("FIXME: replace with zip of all dataset files".getBytes()); String contentType = "application/zip"; String packaging = UriRegistry.PACKAGE_SIMPLE_ZIP; boolean isPackaged = true; MediaResource mediaResource = new MediaResource(fixmeInputStream, contentType, packaging, isPackaged); return mediaResource; } else { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + vdcUser.getUserName() + " is not authorized to get a media resource representation of the dataset with global ID " + study.getGlobalId()); + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + dataverseUser.getUserName() + " is not authorized to get a media resource representation of the dataset with global ID " + dataset.getGlobalId()); } } else { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Please use the Dataverse Network Data Sharing API instead"); + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Downloading files via the SWORD-based Dataverse Data Deposit API is not (yet) supported: https://redmine.hmdc.harvard.edu/issues/3595"); } } else { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "couldn't find study with global ID of " + globalId); + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Couldn't find dataset with global ID of " + globalId); } } else { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Couldn't dermine target type or identifier from URL: " + uri); @@ -121,12 +109,12 @@ public DepositReceipt replaceMediaResource(String uri, Deposit deposit, AuthCred * and an empty zip uploaded. If no files are unzipped the user will see * a error about this but the files will still be deleted! */ - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Replacing the files of a study is not supported. Please delete and add files separately instead."); + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Replacing the files of a dataset is not supported. Please delete and add files separately instead."); } @Override public void deleteMediaResource(String uri, AuthCredentials authCredentials, SwordConfiguration swordConfiguration) throws SwordError, SwordServerException, SwordAuthException { - DataverseUser vdcUser = swordAuth.auth(authCredentials); + DataverseUser dataverseUser = swordAuth.auth(authCredentials); urlManager.processUrl(uri); String targetType = urlManager.getTargetType(); String fileId = urlManager.getTargetIdentifier(); @@ -141,50 +129,41 @@ public void deleteMediaResource(String uri, AuthCredentials authCredentials, Swo throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "File id must be a number, not '" + fileIdString + "'. URL was: " + uri); } if (fileIdLong != null) { - logger.fine("preparing to delete file id " + fileIdLong); -// StudyFile fileToDelete; - try { -// fileToDelete = studyFileService.getStudyFile(fileIdLong); - } catch (EJBException ex) { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to find file id " + fileIdLong); - } -// if (fileToDelete != null) { + logger.info("preparing to delete file id " + fileIdLong); + DataFile fileToDelete = dataFileService.find(fileIdLong); + if (fileToDelete != null) { + /** + * @todo test if StudyLock is necessary + */ // Study study = fileToDelete.getStudy(); // StudyLock studyLock = study.getStudyLock(); // if (studyLock != null) { // String message = Util.getStudyLockMessage(studyLock, study.getGlobalId()); // throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, message); // } -// String globalId = study.getGlobalId(); -// VDC dvThatOwnsFile = fileToDelete.getStudy().getOwner(); -// if (swordAuth.hasAccessToModifyDataverse(vdcUser, dvThatOwnsFile)) { -// EditStudyFilesService editStudyFilesService; -// try { -// Context ctx = new InitialContext(); -// editStudyFilesService = (EditStudyFilesService) ctx.lookup("java:comp/env/editStudyFiles"); -// } catch (NamingException ex) { -// logger.info("problem looking up editStudyFilesService"); -// throw new SwordServerException("problem looking up editStudyFilesService"); -// } -// editStudyFilesService.setStudyVersionByGlobalId(globalId); -// // editStudyFilesService.findStudyFileEditBeanById() would be nice -// List studyFileEditBeans = editStudyFilesService.getCurrentFiles(); -// for (Iterator it = studyFileEditBeans.iterator(); it.hasNext();) { -// StudyFileEditBean studyFileEditBean = (StudyFileEditBean) it.next(); -// if (studyFileEditBean.getStudyFile().getId().equals(fileToDelete.getId())) { -// logger.fine("marked for deletion: " + studyFileEditBean.getStudyFile().getFileName()); -// studyFileEditBean.setDeleteFlag(true); -// } else { -// logger.fine("not marked for deletion: " + studyFileEditBean.getStudyFile().getFileName()); -// } -// } -// editStudyFilesService.save(dvThatOwnsFile.getId(), vdcUser.getId()); -// } else { -// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + vdcUser.getUserName() + " is not authorized to modify " + dvThatOwnsFile.getAlias()); -// } -// } else { -// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to find file id " + fileIdLong + " from URL: " + uri); -// } + Dataset datasetThatOwnsFile = fileToDelete.getOwner(); + Dataverse dataverseThatOwnsFile = datasetThatOwnsFile.getOwner(); + if (swordAuth.hasAccessToModifyDataverse(dataverseUser, dataverseThatOwnsFile)) { + try { + /** + * @todo with only one command, should we be + * falling back on the permissions system to + * enforce if the user can delete a file or + * not. If we do, a 403 Forbidden is + * returned. For now, we'll have belt and + * suspenders and do our normal sword auth + * check. + */ + commandEngine.submit(new DeleteDataFileCommand(fileToDelete, dataverseUser)); + } catch (CommandException ex) { + throw SwordUtil.throwSpecialSwordErrorWithoutStackTrace(UriRegistry.ERROR_BAD_REQUEST, "Could not delete file: " + ex); + } + } else { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + dataverseUser.getUserName() + " is not authorized to modify " + dataverseThatOwnsFile.getAlias()); + } + } else { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to find file id " + fileIdLong + " from URL: " + uri); + } } else { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to find file id in URL: " + uri); } @@ -211,18 +190,9 @@ DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials au urlManager.processUrl(uri); String globalId = urlManager.getTargetIdentifier(); if (urlManager.getTargetType().equals("study") && globalId != null) { -// EditStudyService editStudyService; - Context ctx; - try { - ctx = new InitialContext(); -// editStudyService = (EditStudyService) ctx.lookup("java:comp/env/editStudy"); - } catch (NamingException ex) { - logger.info("problem looking up editStudyService"); - throw new SwordServerException("problem looking up editStudyService"); - } logger.fine("looking up study with globalId " + globalId); - Dataset study = datasetService.findByGlobalId(globalId); - if (study == null) { + Dataset dataset = datasetService.findByGlobalId(globalId); + if (dataset == null) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find study with global ID of " + globalId); } // StudyLock studyLock = study.getStudyLock(); @@ -230,138 +200,23 @@ DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials au // String message = Util.getStudyLockMessage(studyLock, study.getGlobalId()); // throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, message); // } - Long studyId; - try { - studyId = study.getId(); - } catch (NullPointerException ex) { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "couldn't find study with global ID of " + globalId); - } - Dataverse dvThatOwnsStudy = study.getOwner(); - if (swordAuth.hasAccessToModifyDataverse(vdcUser, dvThatOwnsStudy)) { -// editStudyService.setStudyVersion(studyId); -// editStudyService.save(dvThatOwnsStudy.getId(), vdcUser.getId()); -// -// EditStudyFilesService editStudyFilesService; -// try { -// editStudyFilesService = (EditStudyFilesService) ctx.lookup("java:comp/env/editStudyFiles"); -// } catch (NamingException ex) { -// logger.info("problem looking up editStudyFilesService"); -// throw new SwordServerException("problem looking up editStudyFilesService"); -// } -// editStudyFilesService.setStudyVersionByGlobalId(globalId); -// List studyFileEditBeans = editStudyFilesService.getCurrentFiles(); - List exisitingFilenames = new ArrayList(); -// for (Iterator it = studyFileEditBeans.iterator(); it.hasNext();) { -// StudyFileEditBean studyFileEditBean = (StudyFileEditBean) it.next(); - if (shouldReplace) { -// studyFileEditBean.setDeleteFlag(true); -// logger.fine("marked for deletion: " + studyFileEditBean.getStudyFile().getFileName()); - } else { -// String filename = studyFileEditBean.getStudyFile().getFileName(); -// exisitingFilenames.add(filename); - } - } -// editStudyFilesService.save(dvThatOwnsStudy.getId(), vdcUser.getId()); - - if (!deposit.getPackaging().equals(UriRegistry.PACKAGE_SIMPLE_ZIP)) { - throw new SwordError(UriRegistry.ERROR_CONTENT, 415, "Package format " + UriRegistry.PACKAGE_SIMPLE_ZIP + " is required but format specified in 'Packaging' HTTP header was " + deposit.getPackaging()); + Dataverse dvThatOwnsDataset = dataset.getOwner(); + if (!swordAuth.hasAccessToModifyDataverse(vdcUser, dvThatOwnsDataset)) { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + vdcUser.getUserName() + " is not authorized to modify dataset with global ID " + dataset.getGlobalId()); } // Right now we are only supporting UriRegistry.PACKAGE_SIMPLE_ZIP but // in the future maybe we'll support other formats? Rdata files? Stata files? - // That's what the uploadDir was going to be for, but for now it's commented out - // - String importDirString; - File importDir; - String swordTempDirString = swordConfiguration.getTempDirectory(); - if (swordTempDirString == null) { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not determine temp directory"); - } else { - importDirString = swordTempDirString + File.separator + "import" + File.separator + study.getId().toString(); - importDir = new File(importDirString); - if (!importDir.exists()) { - if (!importDir.mkdirs()) { - logger.info("couldn't create directory: " + importDir.getAbsolutePath()); - throw new SwordServerException("couldn't create import directory"); - } - } - } - - if (true) { - DataFile dFile = new DataFile("application/octet-stream"); - dFile.setOwner(study); - datasetService.generateFileSystemName(dFile); -// if (true) { -// throw returnEarly("dataFile.getFileSystemName(): " + dFile.getFileSystemName()); -// } - InputStream depositInputStream = deposit.getInputStream(); - try { - Files.copy(depositInputStream, Paths.get(importDirString, dFile.getFileSystemName()), StandardCopyOption.REPLACE_EXISTING); - } catch (IOException ex) { - throw new SwordError("problem running Files.copy"); - } - study.getFiles().add(dFile); - - DatasetVersion editVersion = study.getEditVersion(); -// boolean metadataExtracted = false; -// try { -// metadataExtracted = ingestService.extractIndexableMetadata(importDir.getAbsolutePath() + File.separator + dFile.getFileSystemName(), dFile, editVersion); -// } catch (IOException ex) { -// throw returnEarly("couldn't extract metadata" + ex); -// } - FileMetadata fmd = new FileMetadata(); - fmd.setDataFile(dFile); - fmd.setLabel("myLabel"); - fmd.setDatasetVersion(editVersion); - dFile.getFileMetadatas().add(fmd); - - Command cmd; - cmd = new UpdateDatasetCommand(study, vdcUser); - try { - /** - * @todo at update time indexing is run but the file is not - * indexed. Why? Manually re-indexing later finds it. Fix - * this. Related to - * https://redmine.hmdc.harvard.edu/issues/3809 ? - */ - study = commandEngine.submit(cmd); - } catch (CommandException ex) { - throw returnEarly("couldn't update dataset"); - } catch (EJBException ex) { - Throwable cause = ex; - StringBuilder sb = new StringBuilder(); - sb.append(ex.getLocalizedMessage()); - while (cause.getCause() != null) { - cause = cause.getCause(); - sb.append(cause + " "); - if (cause instanceof ConstraintViolationException) { - ConstraintViolationException constraintViolationException = (ConstraintViolationException) cause; - for (ConstraintViolation violation : constraintViolationException.getConstraintViolations()) { - sb.append(" Invalid value: <<<").append(violation.getInvalidValue()).append(">>> for ") - .append(violation.getPropertyPath()).append(" at ") - .append(violation.getLeafBean()).append(" - ") - .append(violation.getMessage()); - } - } - } - throw returnEarly("EJBException: " + sb.toString()); - } - + if (!deposit.getPackaging().equals(UriRegistry.PACKAGE_SIMPLE_ZIP)) { + throw new SwordError(UriRegistry.ERROR_CONTENT, 415, "Package format " + UriRegistry.PACKAGE_SIMPLE_ZIP + " is required but format specified in 'Packaging' HTTP header was " + deposit.getPackaging()); } - /** - * @todo remove this comment after confirming that the upstream jar - * now has our bugfix - */ - // the first character of the filename is truncated with the official jar - // so we use include the bug fix at https://github.com/IQSS/swordv2-java-server-library/commit/aeaef83 - // and use this jar: https://build.hmdc.harvard.edu:8443/job/swordv2-java-server-library-iqss/2/ String uploadedZipFilename = deposit.getFilename(); ZipInputStream ziStream = new ZipInputStream(deposit.getInputStream()); ZipEntry zEntry; - FileOutputStream tempOutStream = null; -// List fbList = new ArrayList(); + DatasetVersion editVersion = dataset.getEditVersion(); + List newFiles = new ArrayList<>(); try { // copied from createStudyFilesFromZip in AddFilesPage while ((zEntry = ziStream.getNextEntry()) != null) { @@ -369,24 +224,25 @@ DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials au // simply skip them: if (!zEntry.isDirectory()) { - String fileEntryName = zEntry.getName(); - logger.fine("file found: " + fileEntryName); - - String dirName = null; - String finalFileName = null; - - int ind = fileEntryName.lastIndexOf('/'); - - if (ind > -1) { - finalFileName = fileEntryName.substring(ind + 1); - if (ind > 0) { - dirName = fileEntryName.substring(0, ind); - dirName = dirName.replace('/', '-'); + String finalFileName = "UNKNOWN"; + if (zEntry.getName() != null) { + String zentryFilename = zEntry.getName(); + int ind = zentryFilename.lastIndexOf('/'); + + String dirName = ""; + if (ind > -1) { + finalFileName = zentryFilename.substring(ind + 1); + if (ind > 0) { + dirName = zentryFilename.substring(0, ind); + dirName = dirName.replace('/', '-'); + } + } else { + finalFileName = zentryFilename; } - } else { - finalFileName = fileEntryName; + } + // skip junk files if (".DS_Store".equals(finalFileName)) { continue; } @@ -396,91 +252,83 @@ DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials au continue; } - File tempUploadedFile = new File(importDir, finalFileName); - tempOutStream = new FileOutputStream(tempUploadedFile); - - byte[] dataBuffer = new byte[8192]; - int i = 0; - - while ((i = ziStream.read(dataBuffer)) > 0) { - tempOutStream.write(dataBuffer, 0, i); - tempOutStream.flush(); - } - - tempOutStream.close(); - + /** + * @todo confirm that this DVN 3.x zero-length file + * check that was put in because of + * https://redmine.hmdc.harvard.edu/issues/3273 is done + * in the back end, if it's still important in 4.0. + */ // We now have the unzipped file saved in the upload directory; // zero-length dta files (for example) are skipped during zip // upload in the GUI, so we'll skip them here as well - if (tempUploadedFile.length() != 0) { - - if (true) { -// tempUploadedFile; -// UploadedFile uFile = tempUploadedFile; -// DataFile dataFile = new DataFile(); -// throw new SwordError("let's create a file"); - } -// StudyFileEditBean tempFileBean = new StudyFileEditBean(tempUploadedFile, studyService.generateFileSystemNameSequence(), study); -// tempFileBean.setSizeFormatted(tempUploadedFile.length()); - String finalFileNameAfterReplace = finalFileName; -// if (tempFileBean.getStudyFile() instanceof TabularDataFile) { - // predict what the tabular file name will be -// finalFileNameAfterReplace = FileUtil.replaceExtension(finalFileName); -// } - -// validateFileName(exisitingFilenames, finalFileNameAfterReplace, study); - // And, if this file was in a legit (non-null) directory, - // we'll use its name as the file category: - if (dirName != null) { -// tempFileBean.getFileMetadata().setCategory(dirName); - } - -// fbList.add(tempFileBean); - } +// if (tempUploadedFile.length() != 0) { + /** + * @todo set the category (or categories) for files once + * we can: https://redmine.hmdc.harvard.edu/issues/3717 + */ + // And, if this file was in a legit (non-null) directory, + // we'll use its name as the file category: +// tempFileBean.getFileMetadata().setCategory(dirName); + String guessContentTypeForMe = null; + DataFile dFile = ingestService.createDataFile(editVersion, ziStream, finalFileName, guessContentTypeForMe); + newFiles.add(dFile); } else { logger.fine("directory found: " + zEntry.getName()); } } } catch (IOException ex) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Problem with file: " + uploadedZipFilename); - } finally { - /** - * @todo shouldn't we delete this uploadDir? Commented out in - * DVN 3.x - */ -// if (!uploadDir.delete()) { -// logger.fine("Unable to delete " + uploadDir.getAbsolutePath()); -// } + } catch (EJBException ex) { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset: " + ex.getMessage()); } -// if (fbList.size() > 0) { -// StudyFileServiceLocal studyFileService; -// try { -// studyFileService = (StudyFileServiceLocal) ctx.lookup("java:comp/env/studyFileService"); -// } catch (NamingException ex) { -// logger.info("problem looking up studyFileService"); -// throw new SwordServerException("problem looking up studyFileService"); -// } + + if (newFiles.isEmpty()) { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Problem with zip file '" + uploadedZipFilename + "'. Number of files unzipped: " + newFiles.size()); + } + + ingestService.addFiles(editVersion, newFiles); + + Command cmd; + cmd = new UpdateDatasetCommand(dataset, vdcUser); try { -// studyFileService.addFiles(study.getLatestVersion(), fbList, vdcUser); + dataset = commandEngine.submit(cmd); + } catch (CommandException ex) { + throw returnEarly("couldn't update dataset"); } catch (EJBException ex) { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to study: " + ex.getMessage()); + Throwable cause = ex; + StringBuilder sb = new StringBuilder(); + sb.append(ex.getLocalizedMessage()); + while (cause.getCause() != null) { + cause = cause.getCause(); + sb.append(cause + " "); + if (cause instanceof ConstraintViolationException) { + ConstraintViolationException constraintViolationException = (ConstraintViolationException) cause; + for (ConstraintViolation violation : constraintViolationException.getConstraintViolations()) { + sb.append(" Invalid value: <<<").append(violation.getInvalidValue()).append(">>> for ") + .append(violation.getPropertyPath()).append(" at ") + .append(violation.getLeafBean()).append(" - ") + .append(violation.getMessage()); + } + } + } + throw returnEarly("EJBException: " + sb.toString()); } + + ingestService.startIngestJobs(dataset); + ReceiptGenerator receiptGenerator = new ReceiptGenerator(); String baseUrl = urlManager.getHostnamePlusBaseUrlPath(uri); - DepositReceipt depositReceipt = receiptGenerator.createReceipt(baseUrl, study); + DepositReceipt depositReceipt = receiptGenerator.createReceipt(baseUrl, dataset); return depositReceipt; } else { -// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Problem with zip file '" + uploadedZipFilename + "'. Number of files unzipped: " + fbList.size()); + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to determine target type or identifier from URL: " + uri); } -// } else { -// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + vdcUser.getUserName() + " is not authorized to modify study with global ID " + study.getGlobalId()); - return new DepositReceipt(); // added just to get this to compile 2014-05-14 } -// } else { -// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to determine target type or identifier from URL: " + uri); -// } -// } + /** + * @todo This validation was in DVN 3.x and should go into the 4.0 ingest + * service + */ // copied from AddFilesPage // private void validateFileName(List existingFilenames, String fileName, Study study) throws SwordError { // if (fileName.contains("\\") @@ -505,6 +353,5 @@ private SwordError returnEarly(String error) { StackTraceElement[] emptyStackTrace = new StackTraceElement[0]; swordError.setStackTrace(emptyStackTrace); return swordError; - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ReceiptGenerator.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ReceiptGenerator.java index 6f55b594adc..0b8afa07b42 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ReceiptGenerator.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ReceiptGenerator.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.api.datadeposit; import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.Dataverse; import java.util.logging.Logger; import org.apache.abdera.i18n.iri.IRI; import org.swordapp.server.DepositReceipt; @@ -9,9 +10,16 @@ public class ReceiptGenerator { private static final Logger logger = Logger.getLogger(ReceiptGenerator.class.getCanonicalName()); + /** + * @todo rename to createDatasetReceipt? + */ DepositReceipt createReceipt(String baseUrl, Dataset dataset) { logger.fine("baseUrl was: " + baseUrl); DepositReceipt depositReceipt = new DepositReceipt(); + /** + * @todo is dataset.getGlobalId() being populated properly? + * https://redmine.hmdc.harvard.edu/issues/3988 ? + */ String globalId = dataset.getGlobalId(); /** * @todo should these URLs continue to have "study" in them? Do we need @@ -30,11 +38,32 @@ DepositReceipt createReceipt(String baseUrl, Dataset dataset) { depositReceipt.setStatementURI("application/atom+xml;type=feed", baseUrl + "/statement/study/" + globalId); depositReceipt.addDublinCore("bibliographicCitation", dataset.getLatestVersion().getCitation()); /** - * @todo is this still returning the database id? - * https://redmine.hmdc.harvard.edu/issues/3397 ? + * @todo is dataset.getPersistentURL() still returning the database id? + * https://redmine.hmdc.harvard.edu/issues/3988 ? */ depositReceipt.setSplashUri(dataset.getPersistentURL()); return depositReceipt; } + DepositReceipt createDataverseReceipt(String baseUrl, Dataverse dataverse) { + logger.fine("baseUrl was: " + baseUrl); + DepositReceipt depositReceipt = new DepositReceipt(); + String globalId = dataverse.getAlias(); + String collectionIri = baseUrl + "/collection/dataverse/" + globalId; + depositReceipt.setSplashUri(collectionIri); + /** + * @todo We have to include and "edit" IRI or else we get + * NullPointerException in getAbderaEntry at + * https://github.com/swordapp/JavaServer2.0/blob/sword2-server-1.0/src/main/java/org/swordapp/server/DepositReceipt.java#L52 + * + * Do we want to support a replaceMetadata of dataverses? + * + * Typically, we only operate on the "collection" IRI for dataverses, to + * create a dataset. + */ + String editIri = baseUrl + "/edit/dataverse/" + globalId; + depositReceipt.setEditIRI(new IRI(editIri)); + return depositReceipt; + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java index 8b7eeed0658..614b935d15d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java @@ -50,36 +50,29 @@ public Statement getStatement(String editUri, Map map, AuthCrede throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "swordAuth is null"); } - DataverseUser vdcUser = swordAuth.auth(authCredentials); + DataverseUser dataverseUser = swordAuth.auth(authCredentials); urlManager.processUrl(editUri); String globalId = urlManager.getTargetIdentifier(); if (urlManager.getTargetType().equals("study") && globalId != null) { - logger.fine("request for sword statement by user " + vdcUser.getUserName()); - Dataset study = datasetService.findByGlobalId(globalId); -// try { -// study = studyService.getStudyByGlobalId(globalId); -// } catch (EJBException ex) { -// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find study based on global id (" + globalId + ") in URL: " + editUri); -// } - Long studyId; - try { - studyId = study.getId(); - } catch (NullPointerException ex) { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "couldn't find study with global ID of " + globalId); + logger.fine("request for sword statement by user " + dataverseUser.getUserName()); + Dataset dataset = datasetService.findByGlobalId(globalId); + if (dataset == null) { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "couldn't find dataset with global ID of " + globalId); } - Dataverse dvThatOwnsStudy = study.getOwner(); - if (swordAuth.hasAccessToModifyDataverse(vdcUser, dvThatOwnsStudy)) { - String feedUri = urlManager.getHostnamePlusBaseUrlPath(editUri) + "/edit/study/" + study.getGlobalId(); + Dataverse dvThatOwnsDataset = dataset.getOwner(); + if (swordAuth.hasAccessToModifyDataverse(dataverseUser, dvThatOwnsDataset)) { + String feedUri = urlManager.getHostnamePlusBaseUrlPath(editUri) + "/edit/study/" + dataset.getGlobalId(); /** - * @todo is it safe to use this? + * @todo did the format of getAuthorsStr() change? It looks more + * or less the same. */ - String author = study.getLatestVersion().getAuthorsStr(); - String title = study.getLatestVersion().getTitle(); + String author = dataset.getLatestVersion().getAuthorsStr(); + String title = dataset.getLatestVersion().getTitle(); // in the statement, the element is called "updated" Date lastUpdatedFinal = new Date(); - Date lastUpdateTime = study.getLatestVersion().getLastUpdateTime(); + Date lastUpdateTime = dataset.getLatestVersion().getLastUpdateTime(); if (lastUpdateTime != null) { lastUpdatedFinal = lastUpdateTime; } else { @@ -90,7 +83,7 @@ public Statement getStatement(String editUri, Map map, AuthCrede * In 4.0, lastUpdateTime is always null. */ logger.info("lastUpdateTime was null, trying createtime"); - Date createtime = study.getLatestVersion().getCreateTime(); + Date createtime = dataset.getLatestVersion().getCreateTime(); if (createtime != null) { lastUpdatedFinal = createtime; } else { @@ -102,7 +95,7 @@ public Statement getStatement(String editUri, Map map, AuthCrede String datedUpdated = atomDate.toString(); Statement statement = new AtomStatement(feedUri, author, title, datedUpdated); Map states = new HashMap(); - states.put("latestVersionState", study.getLatestVersion().getVersionState().toString()); + states.put("latestVersionState", dataset.getLatestVersion().getVersionState().toString()); /** * @todo DVN 3.x had a studyLock. What's the equivalent in 4.0? */ @@ -115,15 +108,15 @@ public Statement getStatement(String editUri, Map map, AuthCrede // states.put("locked", "false"); // } statement.setStates(states); - List fileMetadatas = study.getLatestVersion().getFileMetadatas(); + List fileMetadatas = dataset.getLatestVersion().getFileMetadatas(); for (FileMetadata fileMetadata : fileMetadatas) { - DataFile studyFile = fileMetadata.getDataFile(); + DataFile dataFile = fileMetadata.getDataFile(); // We are exposing the filename for informational purposes. The file id is what you // actually operate on to delete a file, etc. // // Replace spaces to avoid IRISyntaxException String fileNameFinal = fileMetadata.getLabel().replace(' ', '_'); - String studyFileUrlString = urlManager.getHostnamePlusBaseUrlPath(editUri) + "/edit-media/file/" + studyFile.getId() + "/" + fileNameFinal; + String studyFileUrlString = urlManager.getHostnamePlusBaseUrlPath(editUri) + "/edit-media/file/" + dataFile.getId() + "/" + fileNameFinal; IRI studyFileUrl; try { studyFileUrl = new IRI(studyFileUrlString); @@ -151,7 +144,7 @@ public Statement getStatement(String editUri, Map map, AuthCrede } return statement; } else { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + vdcUser.getUserName() + " is not authorized to view study with global ID " + globalId); + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + dataverseUser.getUserName() + " is not authorized to view study with global ID " + globalId); } } else { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not determine target type or identifier from URL: " + editUri); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordAuth.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordAuth.java index 849ea4b4e05..fb22725b8a7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordAuth.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordAuth.java @@ -1,9 +1,13 @@ package edu.harvard.iq.dataverse.api.datadeposit; import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DataverseRoleServiceBean; import edu.harvard.iq.dataverse.DataverseUser; import edu.harvard.iq.dataverse.DataverseUserServiceBean; import edu.harvard.iq.dataverse.PasswordEncryption; +import edu.harvard.iq.dataverse.PermissionServiceBean; +import edu.harvard.iq.dataverse.RoleAssignment; +import edu.harvard.iq.dataverse.engine.Permission; import java.util.logging.Logger; import javax.ejb.EJB; import org.swordapp.server.AuthCredentials; @@ -14,8 +18,13 @@ public class SwordAuth { private static final Logger logger = Logger.getLogger(SwordAuth.class.getCanonicalName()); + @EJB DataverseUserServiceBean dataverseUserService; + @EJB + PermissionServiceBean permissionService; + @EJB + DataverseRoleServiceBean roleService; public DataverseUser auth(AuthCredentials authCredentials) throws SwordAuthException, SwordServerException { @@ -64,7 +73,26 @@ boolean hasAccessToModifyDataverse(DataverseUser dataverseUser, Dataverse datave // } // } // - if (dataverse.getCreator().equals(dataverseUser)) { + for (RoleAssignment roleAssignment : roleService.assignmentsFor(dataverseUser, dataverse).getAssignments()) { + /** + * @todo do we want to hard code a check for the string "manager" + * here? Probably not... for now let's just check for + * Permission.DestructiveEdit which feels equivalent to the "admin" + * role in DVN 3.x. We could also do a check for an admin-type + * command like this: permissionService.userOn(dataverseUser, + * dataverse).canIssue(DestroyDataverseCommand.class) + * + * @todo What about the root dataverse? With the GUI, any user can + * create datasets in the root dataverse but users won't be "admin" + * of the root dataverse. The "all or nothing" admin concept for all + * SWORD operations will probably need to go away. Rather than a + * single hasAccessToModifyDataverse method, we should have methods + * per SWORD commands that map onto permissions like + * canIssue(CreateDatasetCommand.class) + */ + logger.fine(dataverse.getAlias() + ": " + dataverseUser.getUserName() + " has role " + roleAssignment.getRole().getAlias()); + } + if (permissionService.userOn(dataverseUser, dataverse).has(Permission.DestructiveEdit)) { authorized = true; return authorized; } else { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordConfigurationImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordConfigurationImpl.java index 3a579dbad3d..5881475877f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordConfigurationImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordConfigurationImpl.java @@ -61,7 +61,29 @@ public String getTempDirectory() { // String tmpFileDir = System.getProperty("vdc.temp.file.dir"); String tmpFileDir = System.getProperty("dataverse.files.directory"); if (tmpFileDir != null) { - return tmpFileDir + File.separator + "sword"; + String swordDirString = tmpFileDir + File.separator + "sword"; + File swordDirFile = new File(swordDirString); + /** + * @todo Do we really need this check? It seems like we do because + * if you create a dataset via the native API and then later try to + * upload a file via SWORD, the directory defined by + * dataverse.files.directory may not exist and we get errors deep in + * the SWORD library code. Could maybe use a try catch in the doPost + * method of our SWORDv2MediaResourceServlet. + */ + if (swordDirFile.exists()) { + return swordDirString; + } else { + boolean mkdirSuccess = swordDirFile.mkdirs(); + if (mkdirSuccess) { + logger.info("Created directory " + swordDirString); + return swordDirString; + } else { + String msgForSwordUsers = ("Could not determine or create SWORD temp directory. Check logs for details."); + logger.severe(msgForSwordUsers + " Failed to create " + swordDirString); + throw new RuntimeException(msgForSwordUsers); + } + } } else { return null; } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordUtil.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordUtil.java new file mode 100644 index 00000000000..3a0a5d95132 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordUtil.java @@ -0,0 +1,31 @@ +package edu.harvard.iq.dataverse.api.datadeposit; + +import org.swordapp.server.SwordError; +import org.swordapp.server.UriRegistry; + +public class SwordUtil { + + public static SwordError throwSpecialSwordErrorWithoutStackTrace(String SwordUriRegistryError, String error) { + if (SwordUriRegistryError == null) { + SwordUriRegistryError = UriRegistry.ERROR_BAD_REQUEST; + } + if (error == null) { + error = "UNKNOWN"; + } + SwordError swordError = new SwordError(SwordUriRegistryError, error); + StackTraceElement[] emptyStackTrace = new StackTraceElement[0]; + swordError.setStackTrace(emptyStackTrace); + return swordError; + } + + public static SwordError throwRegularSwordErrorWithoutStackTrace(String error) { + if (error == null) { + error = "UNKNOWN"; + } + SwordError swordError = new SwordError(error); + StackTraceElement[] emptyStackTrace = new StackTraceElement[0]; + swordError.setStackTrace(emptyStackTrace); + return swordError; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/UrlManager.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/UrlManager.java index 2c960d54ea9..064af3ae157 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/UrlManager.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/UrlManager.java @@ -28,8 +28,32 @@ void processUrl(String url) throws SwordError { } catch (URISyntaxException ex) { throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Invalid URL syntax: " + url); } + /** + * @todo: figure out another way to check for http. We used to use + * javaNetUri.getScheme() but now that we are using "ProxyPass / + * ajp://localhost:8009/" in Apache it's always http rather than https. + * + * http://serverfault.com/questions/6128/how-do-i-force-apache-to-use-https-in-conjunction-with-ajp + * http://stackoverflow.com/questions/1685563/apache-webserver-jboss-ajp-connectivity-with-https + * http://stackoverflow.com/questions/12460422/how-do-ensure-that-apache-ajp-to-tomcat-connection-is-secure-encrypted + */ if (!"https".equals(javaNetUri.getScheme())) { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "https is required but protocol was " + javaNetUri.getScheme()); + /** + * @todo figure out how to prevent this stackstrace from showing up + * in Glassfish logs: + * + * Unable to populate SSL attributes + * java.lang.IllegalStateException: SSLEngine is null at + * org.glassfish.grizzly.ssl.SSLSupportImpl + * + * SSLOptions +StdEnvVars +ExportCertData ? + * + * [#GLASSFISH-20694] Glassfish 4.0 and jk Unable to populate SSL + * attributes - Java.net JIRA - + * https://java.net/jira/browse/GLASSFISH-20694 + */ + logger.info("https is required but protocol was " + javaNetUri.getScheme()); +// throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "https is required but protocol was " + javaNetUri.getScheme()); } this.port = javaNetUri.getPort(); String[] urlPartsArray = javaNetUri.getPath().split("/"); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java index 1a3e2cc06d8..90ffdabef84 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java @@ -238,7 +238,7 @@ public static String generatePDFThumb(String fileLocation, int size) { if (new File(imageMagickExec).exists()) { - String ImageMagick = imageMagickExec + " pdf:" + fileLocation + "[0] -resize "+ size + " -flatten png:" + thumbFileLocation; + String ImageMagick = imageMagickExec + " pdf:" + fileLocation + "[0] -resize "+ size + "x" + size + " png:" + thumbFileLocation; int exitValue = 1; try { diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java index 4ac0db6f8bb..0b44e048329 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/TabularSubsetGenerator.java @@ -20,15 +20,22 @@ package edu.harvard.iq.dataverse.dataaccess; - +import edu.harvard.iq.dataverse.DataFile; import java.util.*; import java.util.Scanner; import java.util.logging.*; import java.io.*; import java.io.FileNotFoundException; -import org.apache.commons.lang.*; +import java.math.BigDecimal; +import java.math.MathContext; +import java.math.RoundingMode; +import java.nio.ByteBuffer; +import java.nio.channels.FileChannel; +import java.nio.file.Paths; +import java.nio.file.StandardOpenOption; + -//import edu.harvard.iq.dvn.ingest.dsb.*; +import org.apache.commons.lang.*; /** @@ -42,6 +49,10 @@ public class TabularSubsetGenerator implements SubsetGenerator { private static Logger dbgLog = Logger.getLogger(TabularSubsetGenerator.class.getPackage().getName()); + private static int COLUMN_TYPE_STRING = 1; + private static int COLUMN_TYPE_LONG = 2; + private static int COLUMN_TYPE_DOUBLE = 3; + public void subsetFile(String infile, String outfile, Set columns, Long numCases) { subsetFile(infile, outfile, columns, numCases, "\t"); @@ -179,5 +190,490 @@ public static Double[][] subsetDoubleVectors(InputStream in, Set column return retVector; } + + public String[] subsetStringVector(DataFile datafile, int column) throws IOException { + return (String[])subsetObjectVector(datafile, column, COLUMN_TYPE_STRING); + } + + public Double[] subsetDoubleVector(DataFile datafile, int column) throws IOException { + return (Double[])subsetObjectVector(datafile, column, COLUMN_TYPE_DOUBLE); + } + + public String[] subsetStringVector(File tabfile, int column, int varcount, int casecount) throws IOException { + return (String[])subsetObjectVector(tabfile, column, varcount, casecount, COLUMN_TYPE_STRING); + } + + public Double[] subsetDoubleVector(File tabfile, int column, int varcount, int casecount) throws IOException { + return (Double[])subsetObjectVector(tabfile, column, varcount, casecount, COLUMN_TYPE_DOUBLE); + } + + public Object[] subsetObjectVector(DataFile dataFile, int column, int columntype) throws IOException { + if (!dataFile.isTabularData()) { + throw new IOException("DataFile is not tabular data."); + } + + int varcount = dataFile.getDataTable().getVarQuantity().intValue(); + int casecount = dataFile.getDataTable().getCaseQuantity().intValue(); + + if (column >= varcount) { + throw new IOException("Column "+column+" is out of bounds."); + } + + File tabfile = dataFile.getFileSystemLocation().toFile(); + + return subsetObjectVector(tabfile, column, varcount, casecount, columntype); + } + + public Object[] subsetObjectVector(File tabfile, int column, int varcount, int casecount, int columntype) throws IOException { + + Object[] retVector = null; + + boolean isString = false; + boolean isDouble = false; + + if (columntype == COLUMN_TYPE_STRING) { + isString = true; + } else if (columntype == COLUMN_TYPE_DOUBLE) { + isDouble = true; + } else { + throw new IOException("Unsupported column type: "+columntype); + } + if (isString) { + retVector = new String[casecount]; + } else if (isDouble) { + retVector = new Double[casecount]; + } + + File rotatedImageFile = getRotatedImage(tabfile, varcount, casecount); + long[] columnEndOffsets = getColumnOffsets(rotatedImageFile, varcount, casecount); + long columnOffset = 0; + long columnLength = 0; + + if (column > 0) { + columnOffset = columnEndOffsets[column - 1]; + columnLength = columnEndOffsets[column] - columnEndOffsets[column - 1]; + } else { + columnOffset = varcount * 8; + columnLength = columnEndOffsets[0] - varcount * 8; + } + + FileChannel fc = (FileChannel.open(Paths.get(rotatedImageFile.getAbsolutePath()), StandardOpenOption.READ)); + fc.position(columnOffset); + int MAX_COLUMN_BUFFER = 8192; + + ByteBuffer in = ByteBuffer.allocate(MAX_COLUMN_BUFFER); + + if (columnLength < MAX_COLUMN_BUFFER) { + in.limit((int)(columnLength)); + } + + long bytesRead = 0; + long bytesReadTotal = 0; + int caseindex = 0; + int byteoffset = 0; + byte[] leftover = null; + + while (bytesReadTotal < columnLength) { + bytesRead = fc.read(in); + byte[] columnBytes = in.array(); + int bytecount = 0; + + + while (bytecount < bytesRead) { + if (columnBytes[bytecount] == '\n') { + String token = new String(columnBytes, byteoffset, bytecount-byteoffset); + if (leftover != null) { + String leftoverString = new String (leftover); + token = leftoverString + token; + leftover = null; + } + + if (isString) { + retVector[caseindex] = token; + } else if (isDouble) { + try { + // TODO: verify that NaN and +-Inf are + // handled correctly here! -- L.A. + retVector[caseindex] = new Double(token); + } catch (NumberFormatException ex) { + retVector[caseindex] = null; // missing value + } + } + caseindex++; + + if (bytecount == bytesRead - 1) { + byteoffset = 0; + } else { + byteoffset = bytecount + 1; + } + } else { + if (bytecount == bytesRead - 1) { + leftover = new byte[(int)bytesRead - byteoffset]; + System.arraycopy(columnBytes, byteoffset, leftover, 0, (int)bytesRead - byteoffset); + byteoffset = 0; + + } + } + bytecount++; + } + + bytesReadTotal += bytesRead; + in.clear(); + if (columnLength - bytesReadTotal < MAX_COLUMN_BUFFER) { + in.limit((int)(columnLength - bytesReadTotal)); + } + } + + fc.close(); + + if (caseindex != casecount) { + throw new IOException("Faile to read "+casecount+" tokens for column "+column); + //System.out.println("read "+caseindex+" tokens instead of expected "+casecount+"."); + } + + return retVector; + } + + private long[] getColumnOffsets (File rotatedImageFile, int varcount, int casecount) throws IOException { + BufferedInputStream rotfileStream = new BufferedInputStream(new FileInputStream(rotatedImageFile)); + + byte[] offsetHeader = new byte[varcount * 8]; + long[] byteOffsets = new long[varcount]; + + + int readlen = rotfileStream.read(offsetHeader); + + if (readlen != varcount * 8) { + throw new IOException ("Could not read "+varcount*8+" header bytes from the rotated file."); + } + + for (int varindex = 0; varindex < varcount; varindex++) { + byte[] offsetBytes = new byte[8]; + System.arraycopy(offsetHeader, varindex*8, offsetBytes, 0, 8); + + ByteBuffer offsetByteBuffer = ByteBuffer.wrap(offsetBytes); + byteOffsets[varindex] = offsetByteBuffer.getLong(); + + //System.out.println(byteOffsets[varindex]); + } + + rotfileStream.close(); + + return byteOffsets; + } + + private File getRotatedImage(File tabfile, int varcount, int casecount) throws IOException { + String fileName = tabfile.getAbsolutePath(); + String rotatedImageFileName = fileName + ".90d"; + File rotatedImageFile = new File(rotatedImageFileName); + if (rotatedImageFile.exists()) { + //System.out.println("Image already exists!"); + return rotatedImageFile; + } + + return generateRotatedImage(tabfile, varcount, casecount); + + } + + private File generateRotatedImage (File tabfile, int varcount, int casecount) throws IOException { + // TODO: throw exceptions if bad file, zero varcount, etc. ... + + String fileName = tabfile.getAbsolutePath(); + String rotatedImageFileName = fileName + ".90d"; + + int MAX_OUTPUT_STREAMS = 32; + int MAX_BUFFERED_BYTES = 10 * 1024 * 1024; // 10 MB - for now? + int MAX_COLUMN_BUFFER = 8192; + + // offsetHeader will contain the byte offsets of the individual column + // vectors in the final rotated image file + byte[] offsetHeader = new byte[varcount * 8]; + int[] bufferedSizes = new int[varcount]; + long[] cachedfileSizes = new long[varcount]; + File[] columnTempFiles = new File[varcount]; + + for (int i = 0; i < varcount; i++) { + bufferedSizes[i] = 0; + cachedfileSizes[i] = 0; + } + + // TODO: adjust MAX_COLUMN_BUFFER here, so that the total size is + // no more than MAX_BUFFERED_BYTES (but no less than 1024 maybe?) + + byte[][] bufferedColumns = new byte [varcount][MAX_COLUMN_BUFFER]; + + // read the tab-delimited file: + + FileInputStream tabfileStream = new FileInputStream(tabfile); + + Scanner scanner = new Scanner(tabfileStream); + scanner.useDelimiter("\\n"); + + for (int caseindex = 0; caseindex < casecount; caseindex++) { + if (scanner.hasNext()) { + String[] line = (scanner.next()).split("\t", -1); + // TODO: throw an exception if there are fewer tab-delimited + // tokens than the number of variables specified. + String token = ""; + int tokensize = 0; + for (int varindex = 0; varindex < varcount; varindex++) { + // TODO: figure out the safest way to convert strings to + // bytes here. Is it going to be safer to use getBytes("UTF8")? + // we are already making the assumption that the values + // in the tab file are in UTF8. -- L.A. + token = line[varindex] + "\n"; + tokensize = token.getBytes().length; + if (bufferedSizes[varindex]+tokensize > MAX_COLUMN_BUFFER) { + // fill the buffer and dump its contents into the temp file: + if (bufferedSizes[varindex] != MAX_COLUMN_BUFFER) { + System.arraycopy(token.getBytes(), 0, bufferedColumns[varindex], bufferedSizes[varindex], MAX_COLUMN_BUFFER-bufferedSizes[varindex]); + } + File bufferTempFile = columnTempFiles[varindex]; + if (bufferTempFile == null) { + bufferTempFile = File.createTempFile("columnBufferFile", "bytes"); + columnTempFiles[varindex] = bufferTempFile; + } + + // *append* the contents of the buffer to the end of the + // temp file, if already exists: + BufferedOutputStream outputStream = new BufferedOutputStream(new FileOutputStream (bufferTempFile, true)); + outputStream.write(bufferedColumns[varindex], 0, MAX_COLUMN_BUFFER); + cachedfileSizes[varindex] += MAX_COLUMN_BUFFER; + outputStream.close(); + + // buffer the remaining bytes and reset the buffered + // byte counter: + + System.arraycopy(token.getBytes(), + MAX_COLUMN_BUFFER-bufferedSizes[varindex], + bufferedColumns[varindex], + 0, + bufferedSizes[varindex] + tokensize - MAX_COLUMN_BUFFER); + + bufferedSizes[varindex] = bufferedSizes[varindex] + tokensize - MAX_COLUMN_BUFFER; + + } else { + // continue buffering + System.arraycopy(token.getBytes(), 0, bufferedColumns[varindex], bufferedSizes[varindex], tokensize); + bufferedSizes[varindex] += tokensize; + } + } + } else { + scanner.close(); + throw new IOException("Tab file has fewer rows than the stored number of cases!"); + } + + } + + // OK, we've created the individual byte vectors of the tab file columns; + // they may be partially saved in temp files and/or in memory. + // We now need to go through all these buffers and create the final + // rotated image file. + + BufferedOutputStream finalOut = new BufferedOutputStream(new FileOutputStream (new File(rotatedImageFileName))); + + // but first we should create the offset header and write it out into + // the final file; because it should be at the head, doh! + + long columnOffset = varcount * 8; + // (this is the offset of the first column vector; it is equal to the + // size of the offset header, i.e. varcount * 8 bytes) + + for (int varindex = 0; varindex < varcount; varindex++) { + long totalColumnBytes = cachedfileSizes[varindex] + bufferedSizes[varindex]; + columnOffset+=totalColumnBytes; + //totalColumnBytes; + byte[] columnOffsetByteArray = ByteBuffer.allocate(8).putLong(columnOffset).array(); + System.arraycopy(columnOffsetByteArray, 0, offsetHeader, varindex * 8, 8); + } + + finalOut.write(offsetHeader, 0, varcount * 8); + + for (int varindex = 0; varindex < varcount; varindex++) { + long cachedBytesRead = 0; + + // check if there is a cached temp file: + + File cachedTempFile = columnTempFiles[varindex]; + if (cachedTempFile != null) { + byte[] cachedBytes = new byte[MAX_COLUMN_BUFFER]; + BufferedInputStream cachedIn = new BufferedInputStream(new FileInputStream(cachedTempFile)); + int readlen = 0; + while ((readlen = cachedIn.read(cachedBytes)) > -1) { + finalOut.write(cachedBytes, 0, readlen); + cachedBytesRead += readlen; + } + cachedIn.close(); + } + + if (cachedBytesRead != cachedfileSizes[varindex]) { + finalOut.close(); + throw new IOException("Could not read the correct number of bytes cached for column "+varindex+"; "+ + cachedfileSizes[varindex] + " bytes expected, "+cachedBytesRead+" read."); + } + + // then check if there are any bytes buffered for this column: + + if (bufferedSizes[varindex] > 0) { + finalOut.write(bufferedColumns[varindex], 0, bufferedSizes[varindex]); + } + + } + + finalOut.close(); + return new File(rotatedImageFileName); + + } + + /* + * Test method for taking a "rotated" image, and reversing it, reassembling + * all the columns in the original order. Which should result in a file + * byte-for-byte identical file to the original tab-delimited version. + * + * (do note that this method is not efficiently implemented; it's only + * being used for experiments so far, to confirm the accuracy of the + * accuracy of generateRotatedImage(). It should not be used for any + * practical means in the application!) + */ + private void reverseRotatedImage (File rotfile, int varcount, int casecount) throws IOException { + // open the file, read in the offset header: + BufferedInputStream rotfileStream = new BufferedInputStream(new FileInputStream(rotfile)); + + byte[] offsetHeader = new byte[varcount * 8]; + long[] byteOffsets = new long[varcount]; + + int readlen = rotfileStream.read(offsetHeader); + + if (readlen != varcount * 8) { + throw new IOException ("Could not read "+varcount*8+" header bytes from the rotated file."); + } + + for (int varindex = 0; varindex < varcount; varindex++) { + byte[] offsetBytes = new byte[8]; + System.arraycopy(offsetHeader, varindex*8, offsetBytes, 0, 8); + + ByteBuffer offsetByteBuffer = ByteBuffer.wrap(offsetBytes); + byteOffsets[varindex] = offsetByteBuffer.getLong(); + + //System.out.println(byteOffsets[varindex]); + } + + String [][] reversedMatrix = new String[casecount][varcount]; + + long offset = varcount * 8; + byte[] columnBytes; + + for (int varindex = 0; varindex < varcount; varindex++) { + long columnLength = byteOffsets[varindex] - offset; + + + + columnBytes = new byte[(int)columnLength]; + readlen = rotfileStream.read(columnBytes); + + if (readlen != columnLength) { + throw new IOException ("Could not read "+columnBytes+" bytes for column "+varindex); + } + /* + String columnString = new String(columnBytes); + //System.out.print(columnString); + String[] values = columnString.split("\n", -1); + + if (values.length < casecount) { + throw new IOException("count mismatch: "+values.length+" tokens found for column "+varindex); + } + + for (int caseindex = 0; caseindex < casecount; caseindex++) { + reversedMatrix[caseindex][varindex] = values[caseindex]; + }*/ + + int bytecount = 0; + int byteoffset = 0; + int caseindex = 0; + //System.out.println("generating value vector for column "+varindex); + while (bytecount < columnLength) { + if (columnBytes[bytecount] == '\n') { + String token = new String(columnBytes, byteoffset, bytecount-byteoffset); + reversedMatrix[caseindex++][varindex] = token; + byteoffset = bytecount + 1; + } + bytecount++; + } + + if (caseindex != casecount) { + throw new IOException("count mismatch: "+caseindex+" tokens found for column "+varindex); + } + offset = byteOffsets[varindex]; + } + + for (int caseindex = 0; caseindex < casecount; caseindex++) { + for (int varindex = 0; varindex < varcount; varindex++) { + System.out.print(reversedMatrix[caseindex][varindex]); + if (varindex < varcount-1) { + System.out.print("\t"); + } else { + System.out.print("\n"); + } + } + } + + rotfileStream.close(); + + + } + + /** + * main() method, for testing + * usage: java edu.harvard.iq.dataverse.dataaccess.TabularSubsetGenerator testfile.tab varcount casecount + * make sure the CLASSPATH contains ... + * + */ + + public static void main(String[] args) { + + String tabFileName = args[0]; + int varcount = new Integer(args[1]).intValue(); + int casecount = new Integer(args[2]).intValue(); + int column = new Integer(args[3]).intValue(); + + File tabFile = new File(tabFileName); + File rotatedImageFile = null; + + TabularSubsetGenerator subsetGenerator = new TabularSubsetGenerator(); + + /* + try { + rotatedImageFile = subsetGenerator.getRotatedImage(tabFile, varcount, casecount); + } catch (IOException ex) { + System.out.println(ex.getMessage()); + } + */ + + //System.out.println("\nFinished generating \"rotated\" column image file."); + + //System.out.println("\nOffsets:"); + + MathContext doubleMathContext = new MathContext(15, RoundingMode.HALF_EVEN); + String FORMAT_IEEE754 = "%+#.15e"; + + try { + //subsetGenerator.reverseRotatedImage(rotatedImageFile, varcount, casecount); + //String[] columns = subsetGenerator.subsetStringVector(tabFile, column, varcount, casecount); + Double[] columns = subsetGenerator.subsetDoubleVector(tabFile, column, varcount, casecount); + for (int i = 0; i < casecount; i++) { + if (columns[i] != null) { + BigDecimal outBigDecimal = new BigDecimal(columns[i], doubleMathContext); + System.out.println(String.format(FORMAT_IEEE754, outBigDecimal)); + } else { + System.out.println("NA"); + } + //System.out.println(columns[i]); + } + } catch (IOException ex) { + System.out.println(ex.getMessage()); + } + } } + + diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/Permission.java b/src/main/java/edu/harvard/iq/dataverse/engine/Permission.java index e5e2791b942..f779d4860e0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/Permission.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/Permission.java @@ -13,8 +13,9 @@ public enum Permission implements java.io.Serializable { EditMetadata("Edit the metadata of objects"), AddDataverse("Add a dataverse within another dataverse"), AddDataset("Add a dataset to a dataverse"), + AddDatasetVersion("Add a version to a dataset"), ChooseTemplate("Choose metadata template for dataverses and datasets"), - Release("Release a dataverse or a dataset"), + Publish("Release a dataverse or a dataset"), Style("Customize the appearance of objects"), GrantPermissions("Manage permissions of other users"), Tracking("Manage guestbook, download statistics, etc.") diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/AbstractVoidCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/AbstractVoidCommand.java index 0df44a772c3..cf6f836a41c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/AbstractVoidCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/AbstractVoidCommand.java @@ -1,6 +1,5 @@ package edu.harvard.iq.dataverse.engine.command; -import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DataverseUser; import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; @@ -13,9 +12,9 @@ * @author michael */ public abstract class AbstractVoidCommand extends AbstractCommand { - - public AbstractVoidCommand(DataverseUser aUser, Dataverse anAffectedDataverse) { - super(aUser, anAffectedDataverse); + + public AbstractVoidCommand(DataverseUser aUser, DvObject dvObject) { + super(aUser, dvObject); } public AbstractVoidCommand(DataverseUser aUser, DvNamePair dvp, DvNamePair... more) { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/RequiredPermissions.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/RequiredPermissions.java index 7855bd99163..5ba67043284 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/RequiredPermissions.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/RequiredPermissions.java @@ -16,5 +16,5 @@ @Target(ElementType.TYPE) public @interface RequiredPermissions { Permission[] value(); - String dataverseName() default ""; + String dataverseName() default ""; // TODO change to "dvObjectName" } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java index 15dfbb71f2e..edb873b6f1b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java @@ -53,12 +53,11 @@ public Dataset execute(CommandContext ctxt) throws CommandException { } Date createDate = new Timestamp(new Date().getTime()); theDataset.getEditVersion().setCreateTime(createDate); + theDataset.getEditVersion().setLastUpdateTime(createDate); for (DataFile dataFile: theDataset.getFiles() ){ dataFile.setCreateDate(theDataset.getCreateDate()); } Dataset savedDataset = ctxt.em().merge(theDataset); - String indexingResult = ctxt.index().indexDataset(savedDataset); - logger.log(Level.INFO, "during dataset save, indexing result was: {0}", indexingResult); DataverseRole manager = new DataverseRole(); manager.addPermissions(EnumSet.allOf(Permission.class)); @@ -68,6 +67,14 @@ public Dataset execute(CommandContext ctxt) throws CommandException { manager.setOwner(savedDataset); ctxt.roles().save(manager); ctxt.roles().save(new RoleAssignment(manager, getUser(), savedDataset)); + + try { + // TODO make async + String indexingResult = ctxt.index().indexDataset(savedDataset); + logger.log(Level.INFO, "during dataset save, indexing result was: {0}", indexingResult); + } catch ( RuntimeException e ) { + logger.log(Level.WARNING, "Exception while indexing:" + e.getMessage(), e); + } return savedDataset; } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java new file mode 100644 index 00000000000..8c11e35fcd0 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java @@ -0,0 +1,45 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.DataverseUser; +import edu.harvard.iq.dataverse.engine.Permission; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; + +/** + * + * @author michael + */ +@RequiredPermissions( Permission.AddDatasetVersion ) +public class CreateDatasetVersionCommand extends AbstractCommand { + + final DatasetVersion newVersion; + final Dataset dataset; + + public CreateDatasetVersionCommand(DataverseUser aUser, Dataset theDataset, DatasetVersion aVersion) { + super(aUser, theDataset); + dataset = theDataset; + newVersion = aVersion; + } + + @Override + public DatasetVersion execute(CommandContext ctxt) throws CommandException { + DatasetVersion latest = dataset.getLatestVersion(); + if ( latest.isWorkingCopy() ) { + throw new IllegalCommandException("Latests version is already a draft. Cannot add another draft", this); + } + + newVersion.setDataset(dataset); + ctxt.em().persist(newVersion); + + // TODO make async + ctxt.index().indexDataset(dataset); + + return newVersion; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataFileCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataFileCommand.java index a8fa6414edf..6c75b742cfa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataFileCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataFileCommand.java @@ -1,8 +1,8 @@ package edu.harvard.iq.dataverse.engine.command.impl; import edu.harvard.iq.dataverse.DataFile; -import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DataverseUser; +import edu.harvard.iq.dataverse.IndexServiceBean; import edu.harvard.iq.dataverse.engine.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; @@ -30,8 +30,8 @@ public class DeleteDataFileCommand extends AbstractVoidCommand { private final DataFile doomed; - public DeleteDataFileCommand(DataFile doomed, DataverseUser aUser, Dataverse anAffectedDataverse) { - super(aUser, anAffectedDataverse); + public DeleteDataFileCommand(DataFile doomed, DataverseUser aUser) { + super(aUser, doomed); this.doomed = doomed; } @@ -90,7 +90,34 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { } // Finally, delete the file from the DB. - ctxt.em().remove(doomed); + /** + * added merge to avoid this: java.lang.IllegalArgumentException: + * Entity must be managed to call remove: [DataFile id:42 + * name:null], try merging the detached and try the remove again + */ + DataFile doomedAndMerged = ctxt.em().merge(doomed); + ctxt.em().remove(doomedAndMerged); + /** + * @todo consider adding an em.flush here (despite the performance + * impact) if you need to operate on the dataset below. Without the + * flush, the dataset still thinks it has the file that was just + * deleted. + */ + // ctxt.em().flush(); + + /** + * We *could* re-index the entire dataset but it's more efficient to + * target individual files for deletion, which should always be + * drafts. + * + * See also https://redmine.hmdc.harvard.edu/issues/3786 + */ + String indexingResult = ctxt.index().removeDraftFromIndex(IndexServiceBean.solrDocIdentifierFile + doomed.getId() + "_draft"); + /** + * @todo check indexing result for success or failure. Really, we + * need an indexing queuing system: + * https://redmine.hmdc.harvard.edu/issues/3643 + */ } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetCommand.java index 3fd862af97a..9e7af247f8a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetCommand.java @@ -13,7 +13,7 @@ * @author michael */ @RequiredPermissions( Permission.DestructiveEdit ) -public class DeleteDatasetCommand extends DestroyDataverseCommand { +public class DeleteDatasetCommand extends DestroyDatasetCommand { private final Dataset doomed; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java similarity index 80% rename from src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDataverseCommand.java rename to src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java index 2d5a311db18..914a429ccf1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java @@ -3,6 +3,7 @@ import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DataverseRole; import edu.harvard.iq.dataverse.DataverseUser; import edu.harvard.iq.dataverse.RoleAssignment; @@ -11,20 +12,18 @@ import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import java.util.logging.Level; -import java.util.logging.Logger; /** - * Same as {@link DeleteDataversCommand}, but does not stop it the dataset is published. + * Same as {@link DeleteDatasetCommand}, but does not stop it the dataset is published. * This command is reserved for super-users, if at all. * @author michael */ @RequiredPermissions( Permission.DestructiveEdit ) -public class DestroyDataverseCommand extends AbstractVoidCommand { +public class DestroyDatasetCommand extends AbstractVoidCommand { private final Dataset doomed; - public DestroyDataverseCommand(Dataset doomed, DataverseUser aUser) { + public DestroyDatasetCommand(Dataset doomed, DataverseUser aUser) { super(aUser, doomed.getOwner()); this.doomed = doomed; } @@ -45,7 +44,7 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { // files for ( DataFile df : managedDoomed.getFiles() ) { - ctxt.engine().submit( new DeleteDataFileCommand(df, getUser(), managedDoomed.getOwner()) ); + ctxt.engine().submit( new DeleteDataFileCommand(df, getUser()) ); } // versions @@ -54,8 +53,12 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { ctxt.em().remove( managed ); } + Dataverse toReIndex = managedDoomed.getOwner(); + // dataset ctxt.em().remove(managedDoomed); + + ctxt.index().indexDataverse(toReIndex); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReleaseDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java similarity index 84% rename from src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReleaseDatasetCommand.java rename to src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java index 76a637478bf..adec3eead4d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReleaseDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java @@ -25,14 +25,14 @@ * @author skraffmiller */ @RequiredPermissionsMap({ - @RequiredPermissions(dataverseName = "", value = Permission.Release) + @RequiredPermissions(dataverseName = "", value = Permission.Publish) }) -public class ReleaseDatasetCommand extends AbstractCommand { - private static final Logger logger = Logger.getLogger(ReleaseDatasetCommand.class.getCanonicalName()); +public class PublishDatasetCommand extends AbstractCommand { + private static final Logger logger = Logger.getLogger(PublishDatasetCommand.class.getCanonicalName()); boolean minorRelease = false; Dataset theDataset; - public ReleaseDatasetCommand(Dataset datasetIn, DataverseUser user, boolean minor) { + public PublishDatasetCommand(Dataset datasetIn, DataverseUser user, boolean minor) { super(user, datasetIn); minorRelease = minor; theDataset = datasetIn; @@ -41,10 +41,14 @@ public ReleaseDatasetCommand(Dataset datasetIn, DataverseUser user, boolean mino @Override public Dataset execute(CommandContext ctxt) throws CommandException { + if (!theDataset.getOwner().isReleased()) { + throw new IllegalCommandException("This dataset may not be published because its host dataverse (" + theDataset.getOwner().getAlias() + ") has not been published.", this); + } + if (minorRelease && !theDataset.getLatestVersion().isMinorUpdate()) { throw new IllegalCommandException("Cannot release as minor version. Re-try as major release.", this); } - + if (theDataset.getReleasedVersion() == null) { theDataset.setPublicationDate(new Timestamp(new Date().getTime())); theDataset.setReleaseUser(getUser()); @@ -67,6 +71,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { Timestamp updateTime = new Timestamp(new Date().getTime()); theDataset.getEditVersion().setReleaseTime(updateTime); + theDataset.getEditVersion().setLastUpdateTime(updateTime); theDataset.getEditVersion().setVersionState(DatasetVersion.VersionState.RELEASED); for (DataFile dataFile: theDataset.getFiles() ){ @@ -76,8 +81,9 @@ public Dataset execute(CommandContext ctxt) throws CommandException { } Dataset savedDataset = ctxt.em().merge(theDataset); - String indexingResult = ctxt.index().indexDataset(savedDataset); - logger.info("during dataset save, indexing result was: " + indexingResult); + + ctxt.index().indexDataset(savedDataset); + return savedDataset; } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDataverseCommand.java new file mode 100644 index 00000000000..503b0ee9944 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDataverseCommand.java @@ -0,0 +1,45 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DataverseUser; +import edu.harvard.iq.dataverse.engine.Permission; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import java.sql.Timestamp; +import java.util.Date; + +@RequiredPermissions(Permission.Publish) +public class PublishDataverseCommand extends AbstractCommand { + + private final Dataverse dataverse; + private final DataverseUser dataverseUser; + + public PublishDataverseCommand(DataverseUser dataverseUser, Dataverse dataverse) { + super(dataverseUser, dataverse); + this.dataverse = dataverse; + this.dataverseUser = dataverseUser; + } + + @Override + public Dataverse execute(CommandContext ctxt) throws CommandException { + if (dataverse.isReleased()) { + throw new IllegalCommandException("Dataverse " + dataverse.getAlias() + " has already been published.", this); + } + + Dataverse parent = dataverse.getOwner(); + // root dataverse doesn't have a parent + if (parent != null) { + if (!parent.isReleased()) { + throw new IllegalCommandException("Dataverse " + dataverse.getAlias() + " may not be published because its host dataverse (" + parent.getAlias() + ") has not been published.", this); + } + } + + dataverse.setPublicationDate(new Timestamp(new Date().getTime())); + dataverse.setReleaseUser(dataverseUser); + return ctxt.dataverses().save(dataverse); + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReleaseDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReleaseDataverseCommand.java deleted file mode 100644 index b36c1ed22fc..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReleaseDataverseCommand.java +++ /dev/null @@ -1,27 +0,0 @@ -package edu.harvard.iq.dataverse.engine.command.impl; - -import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.DataverseUser; -import edu.harvard.iq.dataverse.engine.Permission; -import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand; -import edu.harvard.iq.dataverse.engine.command.CommandContext; -import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; -import edu.harvard.iq.dataverse.engine.command.exception.CommandException; - -/** - * - * @author michael - */ -@RequiredPermissions( Permission.Release ) -public class ReleaseDataverseCommand extends AbstractVoidCommand { - - public ReleaseDataverseCommand(DataverseUser aUser, Dataverse anAffectedDataverse) { - super(aUser, anAffectedDataverse); - } - - @Override - protected void executeImpl(CommandContext ctxt) throws CommandException { - throw new UnsupportedOperationException("Sample Implementation"); - } - -} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetCommand.java index e5c2cd1c250..c177d491d81 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetCommand.java @@ -13,7 +13,6 @@ import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; -import edu.harvard.iq.dataverse.engine.command.RequiredPermissionsMap; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import java.sql.Timestamp; import java.util.Date; @@ -24,10 +23,7 @@ * * @author skraffmiller */ -@RequiredPermissionsMap({ - @RequiredPermissions(dataverseName = "", value = Permission.UndoableEdit), - @RequiredPermissions(dataverseName = "", value = Permission.EditMetadata) -}) +@RequiredPermissions({Permission.UndoableEdit,Permission.EditMetadata} ) public class UpdateDatasetCommand extends AbstractCommand { private static final Logger logger = Logger.getLogger(UpdateDatasetCommand.class.getCanonicalName()); private final Dataset theDataset; @@ -46,7 +42,6 @@ public void saveDatasetAPI(CommandContext ctxt) { save(ctxt); } - public Dataset save(CommandContext ctxt) { Iterator dsfIt = theDataset.getEditVersion().getDatasetFields().iterator(); while (dsfIt.hasNext()) { @@ -58,8 +53,8 @@ public Dataset save(CommandContext ctxt) { while (dsfItSort.hasNext()) { dsfItSort.next().setValueDisplayOrder(); } - Timestamp updateTime = new Timestamp(new Date().getTime()); - + Timestamp updateTime = new Timestamp(new Date().getTime()); + theDataset.getEditVersion().setLastUpdateTime(updateTime); for (DataFile dataFile: theDataset.getFiles() ){ if(dataFile.getCreateDate() == null){ dataFile.setCreateDate(updateTime); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java new file mode 100644 index 00000000000..be5798698ae --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java @@ -0,0 +1,53 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.DataverseUser; +import edu.harvard.iq.dataverse.engine.Permission; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; + +/** + * Updates a {@link DatasetVersion}, as long as that version is in a "draft" state. + * @author michael + */ +@RequiredPermissions(Permission.DestructiveEdit) +public class UpdateDatasetVersionCommand extends AbstractCommand { + + final DatasetVersion newVersion; + + public UpdateDatasetVersionCommand(DataverseUser aUser, DatasetVersion theNewVersion) { + super(aUser, theNewVersion.getDataset()); + newVersion = theNewVersion; + } + + + + @Override + public DatasetVersion execute(CommandContext ctxt) throws CommandException { + + Dataset ds = newVersion.getDataset(); + DatasetVersion latest = ds.getLatestVersion(); + + if ( latest == null ) { + throw new IllegalCommandException("Dataset " + ds.getId() + " does not have a latest version.", this); + } + + if ( ! latest.isDraft() ) { + throw new IllegalCommandException("Cannot update a dataset version that's not a draft", this); + } + + DatasetVersion edit = ds.getEditVersion(); + edit.setDatasetFields( newVersion.getDatasetFields() ); + + DatasetVersion managed = ctxt.em().merge(edit); + + ctxt.index().indexDataset(ds); + + return managed; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index 0b9dac2ada8..80467a9930a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -27,20 +27,21 @@ public UpdateDataverseCommand(Dataverse editedDv, List facetLi this.facetList = new ArrayList<>(facetList); } - - @Override public Dataverse execute(CommandContext ctxt) throws CommandException { Dataverse result = ctxt.dataverses().save(editedDv); - ctxt.facets().deleteFacetsFor(result); - int i=0; - for ( DatasetFieldType df : facetList ) { - ctxt.facets().create(i++, df.getId(), result.getId()); - } + if ( facetList != null ) { + ctxt.facets().deleteFacetsFor(result); + int i=0; + for ( DatasetFieldType df : facetList ) { + ctxt.facets().create(i++, df.getId(), result.getId()); + } + } ctxt.index().indexDataverse(result); - return result; + + return result; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index 7f8d6395be8..2b0158c882d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -23,12 +23,14 @@ import edu.harvard.iq.dataverse.ControlledVocabularyValue; import edu.harvard.iq.dataverse.datavariable.VariableServiceBean; import edu.harvard.iq.dataverse.DatasetServiceBean; +import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFileServiceBean; import edu.harvard.iq.dataverse.DatasetField; import edu.harvard.iq.dataverse.DatasetFieldServiceBean; import edu.harvard.iq.dataverse.DatasetFieldType; import edu.harvard.iq.dataverse.DatasetFieldValue; +import edu.harvard.iq.dataverse.DatasetFieldCompoundValue; import edu.harvard.iq.dataverse.DatasetPage; import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.FileMetadata; @@ -55,6 +57,7 @@ import edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.por.PORFileReader; import edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.por.PORFileReaderSpi; import edu.harvard.iq.dataverse.util.FileUtil; +import edu.harvard.iq.dataverse.util.MD5Checksum; import edu.harvard.iq.dataverse.util.SumStatCalculator; import java.io.BufferedInputStream; import java.io.File; @@ -137,6 +140,195 @@ public class IngestServiceBean { // TODO: this constant should be provided by the Ingest Service Provder Registry; private static final String METADATA_SUMMARY = "FILE_METADATA_SUMMARY_INFO"; + + public DataFile createDataFile(DatasetVersion version, InputStream inputStream, String fileName, String contentType) throws IOException { + Dataset dataset = version.getDataset(); + DataFile datafile; + + FileMetadata fmd = new FileMetadata(); + + if (contentType != null && !contentType.equals("")) { + datafile = new DataFile(contentType); + fmd.setCategory(contentType); + } else { + datafile = new DataFile("application/octet-stream"); + } + + fmd.setLabel(fileName); + + datafile.setOwner(dataset); + fmd.setDataFile(datafile); + + datafile.getFileMetadatas().add(fmd); + + if (version.getFileMetadatas() == null) { + version.setFileMetadatas(new ArrayList()); + } + version.getFileMetadatas().add(fmd); + fmd.setDatasetVersion(version); + dataset.getFiles().add(datafile); + + datasetService.generateFileSystemName(datafile); + + // save the file, in the temporary location for now: + String tempFilesDirectory = getFilesTempDirectory(); + if (tempFilesDirectory != null) { + //try { + + logger.fine("Will attempt to save the file as: " + tempFilesDirectory + "/" + datafile.getFileSystemName()); + Files.copy(inputStream, Paths.get(tempFilesDirectory, datafile.getFileSystemName()), StandardCopyOption.REPLACE_EXISTING); + //} catch (IOException ioex) { + // logger.warning("Failed to save the file " + datafile.getFileSystemName()); + // return; + //} + } + + // Let's try our own utilities (Jhove, etc.) to determine the file type + // of the uploaded file. (We may already have a mime type supplied for this + // file - maybe the type that the browser recognized on upload; or, if + // it's a harvest, maybe the remote server has already given us the type + // for this file... with our own type utility we may or may not do better + // than the type supplied: + // -- L.A. + String recognizedType = null; + try { + recognizedType = FileUtil.determineFileType(Paths.get(tempFilesDirectory, datafile.getFileSystemName()).toFile(), fmd.getLabel()); + logger.fine("File utility recognized the file as " + recognizedType); + if (recognizedType != null && !recognizedType.equals("")) { + // is it any better than the type that was supplied to us, + // if any? + + if (contentType == null || contentType.equals("") || contentType.equalsIgnoreCase("application/octet-stream")) { + datafile.setContentType(recognizedType); + } + } + } catch (IOException ex) { + logger.warning("Failed to run the file utility mime type check on file " + fmd.getLabel()); + } + + return datafile; + } + + public void addFiles (DatasetVersion version, List newFiles) { + if (newFiles != null && newFiles.size() > 0) { + Dataset dataset = version.getDataset(); + + try { + if (dataset.getFileSystemDirectory() != null && !Files.exists(dataset.getFileSystemDirectory())) { + /* Note that "createDirectories()" must be used - not + * "createDirectory()", to make sure all the parent + * directories that may not yet exist are created as well. + */ + + Files.createDirectories(dataset.getFileSystemDirectory()); + } + } catch (IOException dirEx) { + logger.severe("Failed to create study directory " + dataset.getFileSystemDirectory().toString()); + return; + // TODO: + // Decide how we are communicating failure information back to + // the page, and what the page should be doing to communicate + // it to the user - if anything. + // -- L.A. + } + + if (dataset.getFileSystemDirectory() != null && Files.exists(dataset.getFileSystemDirectory())) { + for (DataFile dataFile : newFiles) { + String tempFileLocation = getFilesTempDirectory() + "/" + dataFile.getFileSystemName(); + + FileMetadata fileMetadata = dataFile.getFileMetadatas().get(0); + String fileName = fileMetadata.getLabel(); + // These are all brand new files, so they should all have + // one filemetadata total. -- L.A. + boolean metadataExtracted = false; + + datasetService.generateFileSystemName(dataFile); + + if (ingestableAsTabular(dataFile)) { + /* + * Note that we don't try to ingest the file right away - + * instead we mark it as "scheduled for ingest", then at + * the end of the save process it will be queued for async. + * ingest in the background. In the meantime, the file + * will be ingested as a regular, non-tabular file, and + * appear as such to the user, until the ingest job is + * finished with the Ingest Service. + */ + dataFile.SetIngestScheduled(); + } else if (fileMetadataExtractable(dataFile)) { + + try { + // FITS is the only type supported for metadata + // extraction, as of now. -- L.A. 4.0 + dataFile.setContentType("application/fits"); + metadataExtracted = extractMetadata(tempFileLocation, dataFile, version); + } catch (IOException mex) { + logger.severe("Caught exception trying to extract indexable metadata from file " + fileName + ", " + mex.getMessage()); + } + if (metadataExtracted) { + logger.info("Successfully extracted indexable metadata from file " + fileName); + } else { + logger.info("Failed to extract indexable metadata from file " + fileName); + } + } + + // Try to save the file in its permanent location: + try { + + logger.info("Will attempt to save the file as: " + dataFile.getFileSystemLocation().toString()); + Files.copy(new FileInputStream(new File(tempFileLocation)), dataFile.getFileSystemLocation(), StandardCopyOption.REPLACE_EXISTING); + + MD5Checksum md5Checksum = new MD5Checksum(); + try { + dataFile.setmd5(md5Checksum.CalculateMD5(dataFile.getFileSystemLocation().toString())); + } catch (Exception md5ex) { + logger.warning("Could not calculate MD5 signature for the new file " + fileName); + } + + } catch (IOException ioex) { + logger.warning("Failed to save the file " + dataFile.getFileSystemLocation()); + } + + // Any necessary post-processing: + performPostProcessingTasks(dataFile); + } + } + } + } + + public String getFilesTempDirectory() { + String filesRootDirectory = System.getProperty("dataverse.files.directory"); + if (filesRootDirectory == null || filesRootDirectory.equals("")) { + filesRootDirectory = "/tmp/files"; + } + + String filesTempDirectory = filesRootDirectory + "/temp"; + + if (!Files.exists(Paths.get(filesTempDirectory))) { + /* Note that "createDirectories()" must be used - not + * "createDirectory()", to make sure all the parent + * directories that may not yet exist are created as well. + */ + try { + Files.createDirectories(Paths.get(filesTempDirectory)); + } catch (IOException ex) { + return null; + } + } + + return filesTempDirectory; + } + + public void startIngestJobs (Dataset dataset) { + for (DataFile dataFile : dataset.getFiles()) { + if (dataFile.isIngestScheduled()) { + dataFile.SetIngestInProgress(); + logger.info("Attempting to queue the file " + dataFile.getFileMetadata().getLabel() + " for ingest."); + asyncIngestAsTabular(dataFile); + } + } + } + public void produceSummaryStatistics(DataFile dataFile) throws IOException { //produceDiscreteNumericSummaryStatistics(dataFile); produceContinuousSummaryStatistics(dataFile); @@ -145,10 +337,24 @@ public void produceSummaryStatistics(DataFile dataFile) throws IOException { public void produceContinuousSummaryStatistics(DataFile dataFile) throws IOException { - Double[][] variableVectors = subsetContinuousVectors(dataFile); - - calculateContinuousSummaryStatistics(dataFile, variableVectors); - + // quick, but memory-inefficient way: + // - this method just loads the entire file-worth of continuous vectors + // into a Double[][] matrix. + //Double[][] variableVectors = subsetContinuousVectors(dataFile); + //calculateContinuousSummaryStatistics(dataFile, variableVectors); + + // A more sophisticated way: this subsets one column at a time, using + // the new optimized subsetting that does not have to read any extra + // bytes from the file to extract the column: + + TabularSubsetGenerator subsetGenerator = new TabularSubsetGenerator(); + + for (int i = 0; i < dataFile.getDataTable().getVarQuantity(); i++) { + if ("continuous".equals(dataFile.getDataTable().getDataVariables().get(i).getVariableIntervalType().getName())) { + Double[] variableVector = subsetGenerator.subsetDoubleVector(dataFile, i); + calculateContinuousSummaryStatistics(dataFile, i, variableVector); + } + } } public boolean asyncIngestAsTabular(DataFile dataFile) { @@ -383,7 +589,12 @@ public boolean fileMetadataExtractable(DataFile dataFile) { return false; } - public boolean extractIndexableMetadata(String tempFileLocation, DataFile dataFile, DatasetVersion editVersion) throws IOException { + /* + * extractMetadata: + * framework for extracting metadata from uploaded files. The results will + * be used to populate the metadata of the Dataset to which the file belongs. + */ + public boolean extractMetadata(String tempFileLocation, DataFile dataFile, DatasetVersion editVersion) throws IOException { boolean ingestSuccessful = false; FileInputStream tempFileInputStream = null; @@ -437,13 +648,14 @@ private void processDatasetMetadata(FileMetadataIngest fileMetadataIngest, Datas Map> fileMetadataMap = fileMetadataIngest.getMetadataMap(); for (DatasetFieldType dsft : mdb.getDatasetFieldTypes()) { if (dsft.isPrimitive()) { + if (!dsft.isHasParent()) { String dsfName = dsft.getName(); // See if the plugin has found anything for this field: if (fileMetadataMap.get(dsfName) != null && !fileMetadataMap.get(dsfName).isEmpty()) { + logger.fine("Ingest Service: found extracted metadata for field " + dsfName); // go through the existing fields: for (DatasetField dsf : editVersion.getFlatDatasetFields()) { - String fName = dsf.getDatasetFieldType().getName(); if (dsf.getDatasetFieldType().equals(dsft)) { // yep, this is our field! // let's go through the values that the ingest @@ -516,12 +728,90 @@ private void processDatasetMetadata(FileMetadataIngest fileMetadataIngest, Datas } } } + } + } else { + // A compound field: + // See if the plugin has found anything for the fields that + // make up this compound field; if we find at least one + // of the child values in the map of extracted values, we'll + // create a new compound field value and its child + // + DatasetFieldCompoundValue compoundDsfv = new DatasetFieldCompoundValue(); + int nonEmptyFields = 0; + for (DatasetFieldType cdsft : dsft.getChildDatasetFieldTypes()) { + String dsfName = cdsft.getName(); + if (fileMetadataMap.get(dsfName) != null && !fileMetadataMap.get(dsfName).isEmpty()) { + logger.fine("Ingest Service: found extracted metadata for field " + dsfName + ", part of the compound field "+dsft.getName()); + + if (cdsft.isPrimitive()) { + // probably an unnecessary check - child fields + // of compound fields are always primitive... + // but maybe it'll change in the future. + if (!cdsft.isControlledVocabulary()) { + // TODO: can we have controlled vocabulary + // sub-fields inside compound fields? + + DatasetField childDsf = new DatasetField(); + childDsf.setDatasetFieldType(cdsft); + + DatasetFieldValue newDsfv = new DatasetFieldValue(childDsf); + newDsfv.setValue((String)fileMetadataMap.get(dsfName).toArray()[0]); + childDsf.getDatasetFieldValues().add(newDsfv); + + childDsf.setParentDatasetFieldCompoundValue(compoundDsfv); + compoundDsfv.getChildDatasetFields().add(childDsf); + + nonEmptyFields++; + } + } + } + } + + if (nonEmptyFields > 0) { + // let's go through this dataset's fields and find the + // actual parent for this sub-field: + for (DatasetField dsf : editVersion.getFlatDatasetFields()) { + if (dsf.getDatasetFieldType().equals(dsft)) { + + // Now let's check that the dataset version doesn't already have + // this compound value - we are only interested in aggregating + // unique values. Note that we need to compare compound values + // as sets! -- i.e. all the sub fields in 2 compound fields + // must match in order for these 2 compounds to be recognized + // as "the same": + + boolean alreadyExists = false; + for (DatasetFieldCompoundValue dsfcv : dsf.getDatasetFieldCompoundValues()) { + int matches = 0; + + for (DatasetField cdsf : dsfcv.getChildDatasetFields()) { + String cdsfName = cdsf.getDatasetFieldType().getName(); + String cdsfValue = cdsf.getDatasetFieldValues().get(0).getValue(); + if (cdsfValue != null && !cdsfValue.equals("")) { + String extractedValue = (String)fileMetadataMap.get(cdsfName).toArray()[0]; + logger.info("values: existing: "+cdsfValue+", extracted: "+extractedValue); + if (cdsfValue.equals(extractedValue)) { + matches++; + } + } + } + if (matches == nonEmptyFields) { + alreadyExists = true; + break; + } + } + + if (!alreadyExists) { + // save this compound value, by attaching it to the + // version for proper cascading: + compoundDsfv.setParentDatasetField(dsf); + dsf.getDatasetFieldCompoundValues().add(compoundDsfv); + } + } + } + } } - } //else { - // A compound field: - // - but that's not going to happen! - // because ... (TODO: add explanation! -- L.A. 4.0 alpha - //} + } } } } @@ -628,6 +918,11 @@ private void calculateContinuousSummaryStatistics(DataFile dataFile, Double[][] } } + private void calculateContinuousSummaryStatistics(DataFile dataFile, int varnum, Double[] dataVector) throws IOException { + double[] sumStats = SumStatCalculator.calculateSummaryStatistics(dataVector); + assignContinuousSummaryStatistics(dataFile.getDataTable().getDataVariables().get(varnum), sumStats); + } + private void assignContinuousSummaryStatistics(DataVariable variable, double[] sumStats) throws IOException { if (sumStats == null || sumStats.length != variableService.summaryStatisticTypes.length) { throw new IOException ("Wrong number of summary statistics types calculated! ("+sumStats.length+")"); diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java index 9f95cca6ef2..2937659bb45 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java @@ -550,8 +550,14 @@ public String detectTabularDataFormat(File fh) { } } catch (InvocationTargetException e) { Throwable cause = e.getCause(); - err.format(cause.getMessage()); - e.printStackTrace(); + // added null check because of "homemade.zip" from https://redmine.hmdc.harvard.edu/issues/3273 + if (cause.getMessage() != null) { + err.format(cause.getMessage()); + e.printStackTrace(); + } else { + dbgLog.info("cause.getMessage() was null for " + e); + e.printStackTrace(); + } } catch (IllegalAccessException e) { e.printStackTrace(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/impl/plugins/fits/FITSFileMetadataExtractor.java b/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/impl/plugins/fits/FITSFileMetadataExtractor.java index 87cd0d509d6..6d37c42151d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/impl/plugins/fits/FITSFileMetadataExtractor.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/impl/plugins/fits/FITSFileMetadataExtractor.java @@ -16,7 +16,6 @@ import java.io.IOException; import java.io.File; import java.text.ParseException; -import java.text.ParsePosition; import java.text.SimpleDateFormat; import java.util.Map; import java.util.HashMap; @@ -24,11 +23,11 @@ import java.util.HashSet; import java.util.List; import java.util.ArrayList; +import java.util.Calendar; import java.util.Date; import java.util.Properties; import java.util.logging.Logger; import nom.tam.fits.BasicHDU; -import nom.tam.fits.Data; import nom.tam.fits.Fits; import nom.tam.fits.FitsException; import nom.tam.fits.Header; @@ -36,7 +35,6 @@ import nom.tam.fits.ImageHDU; import nom.tam.fits.TableHDU; import nom.tam.fits.UndefinedHDU; -import org.apache.commons.lang.StringUtils; /** * @@ -85,8 +83,10 @@ public class FITSFileMetadataExtractor extends FileMetadataExtractor { private static final String ATTRIBUTE_TYPE = "astroType"; private static final String ATTRIBUTE_FACILITY = "astroFacility"; private static final String ATTRIBUTE_INSTRUMENT = "astroInstrument"; + private static final String ATTRIBUTE_OBJECT = "astroObject"; private static final String ATTRIBUTE_START_TIME = "coverage.Temporal.StartTime"; private static final String ATTRIBUTE_STOP_TIME = "coverage.Temporal.StopTime"; + private static final String ATTRIBUTE_COVERAGE_SPATIAL = "coverage.Spatial"; static { @@ -107,23 +107,24 @@ public class FITSFileMetadataExtractor extends FileMetadataExtractor { //defaultRecognizedFitsMetadataKeys.put("DATE-OBS", FIELD_TYPE_DATE); // both coverage.Temporal.StartTime and .EndTime are derived from // the DATE-OBS values; extra rules apply (coded further down) + //defaultRecognizedFitsMetadataKeys.put("OBJECT", FIELD_TYPE_TEXT); + + //defaultIndexableFitsMetaKeys.put("DATE-OBS", "coverage.Temporal.StartTime"); //defaultIndexableFitsMetaKeys.put("DATE-OBS", "coverage.Temporal.StopTime"); - - //defaultIndexableFitsMetaKeys.put("NAXIS", "naxis"); - + //defaultIndexableFitsMetaKeys.put("OBJECT", "astroObject"); + //defaultIndexableFitsMetaKeys.put("CRVAL1", "coverage.Spatial"); + //defaultIndexableFitsMetaKeys.put("CRVAL2", "coverage.Spatial"); + //defaultRecognizedFitsMetadataKeys.put("CRVAL1", FIELD_TYPE_TEXT); + //defaultRecognizedFitsMetadataKeys.put("CRVAL2", FIELD_TYPE_TEXT); // Optional, configurable fields: defaultRecognizedFitsMetadataKeys.put("FILTER", FIELD_TYPE_TEXT); - defaultRecognizedFitsMetadataKeys.put("OBJECT", FIELD_TYPE_TEXT); defaultRecognizedFitsMetadataKeys.put("CD1_1", FIELD_TYPE_FLOAT); defaultRecognizedFitsMetadataKeys.put("CDELT", FIELD_TYPE_FLOAT); defaultRecognizedFitsMetadataKeys.put("EXPTIME", FIELD_TYPE_DATE); - defaultRecognizedFitsMetadataKeys.put("CRVAL1", FIELD_TYPE_TEXT); - defaultRecognizedFitsMetadataKeys.put("CRVAL2", FIELD_TYPE_TEXT); - // And the mapping to the corresponding values in the // metadata block: @@ -133,13 +134,10 @@ public class FITSFileMetadataExtractor extends FileMetadataExtractor { defaultIndexableFitsMetaKeys.put("TELESCOP", ATTRIBUTE_FACILITY); defaultIndexableFitsMetaKeys.put("INSTRUME", ATTRIBUTE_INSTRUMENT); defaultIndexableFitsMetaKeys.put("FILTER", "coverage.Spectral.Bandpass"); - defaultIndexableFitsMetaKeys.put("OBJECT", "astroObject"); defaultIndexableFitsMetaKeys.put("CD1_1", "resolution.Spatial"); defaultIndexableFitsMetaKeys.put("CDELT", "resolution.Spatial"); defaultIndexableFitsMetaKeys.put("EXPTIME", "resolution.Temporal"); defaultIndexableFitsMetaKeys.put("CDELT", "resolution.Spatial"); - defaultIndexableFitsMetaKeys.put("CRVAL1", "coverage.Spatial"); - defaultIndexableFitsMetaKeys.put("CRVAL2", "coverage.Spatial"); @@ -199,9 +197,13 @@ public class FITSFileMetadataExtractor extends FileMetadataExtractor { // Recognized date formats, for extracting temporal values: private static SimpleDateFormat[] DATE_FORMATS = new SimpleDateFormat[] { - new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"), - new SimpleDateFormat("yyyy-MM-dd") - //new SimpleDateFormat("yyyy") + new SimpleDateFormat("yyyy-MM-dd"), + new SimpleDateFormat("dd-MM-yy") + }; + + private static SimpleDateFormat[] TIME_FORMATS = new SimpleDateFormat[] { + new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS"), + new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss") }; /** @@ -222,7 +224,13 @@ public FileMetadataIngest ingest (BufferedInputStream stream) throws IOException dbgLog.fine("Attempting to read FITS file;"); Map> fitsMetaMap = new HashMap<>(); - Map> tempMetaMap = new HashMap<>(); + + Date minDate = null; + Date maxDate = null; + + String startObsTime = ""; + String stopObsTime = ""; + FileMetadataIngest ingest = new FileMetadataIngest(); ingest.setMetadataBlockName(ASTROPHYSICS_BLOCK_NAME); @@ -257,7 +265,6 @@ public FileMetadataIngest ingest (BufferedInputStream stream) throws IOException try { fitsMetaMap.put(ATTRIBUTE_TYPE, new HashSet()); - while ((hdu = fitsFile.readHDU()) != null) { dbgLog.fine("reading HDU number " + i); hduNames.add("[UNNAMED]"); @@ -324,29 +331,237 @@ public FileMetadataIngest ingest (BufferedInputStream stream) throws IOException metadataKeys.add("INSTRUME"); } } - - //if (fitsMetaMap.get(ATTRIBUTE_START_TIME) == null) { - String obsDate = hduHeader.getStringValue("DATE-OBS"); - if (obsDate != null) { - // The value of DATE-OBS will be used later, to determine - // coverage.Temporal.StarTime and .StppTime; for now - // we are storing the values in a temporary map. - if (tempMetaMap.get("DATE-OBS") == null) { - tempMetaMap.put("DATE-OBS", new HashSet()); + + /* + * Spatial coordinates: we just use CRVAL1 and CRVAL2, + * X and Y coordinates of the center pixel, if available: + */ + + double crval1Float = hduHeader.getDoubleValue("CRVAL1"); + double crval2Float = hduHeader.getDoubleValue("CRVAL2"); + + dbgLog.fine("CRVAL1: "+crval1Float); + dbgLog.fine("CRVAL2: "+crval2Float); + + if (crval1Float != 0.0 || crval2Float != 0.0) { + if (fitsMetaMap.get(ATTRIBUTE_COVERAGE_SPATIAL) == null) { + fitsMetaMap.put(ATTRIBUTE_COVERAGE_SPATIAL, new HashSet()); + } + fitsMetaMap.get(ATTRIBUTE_COVERAGE_SPATIAL).add("("+crval1Float+" "+crval2Float+")"); + metadataKeys.add("CRVAL1"); + metadataKeys.add("CRVAL2"); + } + + /* + * Special treatment for the OBJECT value: + */ + + String objectString = hduHeader.getStringValue("OBJECT"); + if (objectString != null && !objectString.equals("")) { + metadataKeys.add("OBJECT"); + } else { + objectString = hduHeader.getStringValue("TARGNAME"); + if (objectString != null && !objectString.equals("")) { + metadataKeys.add("TARGNAME"); } - tempMetaMap.get("DATE-OBS").add(obsDate); + } + + if (objectString != null && !objectString.equals("")) { + if (fitsMetaMap.get(ATTRIBUTE_OBJECT) == null) { + fitsMetaMap.put(ATTRIBUTE_OBJECT, new HashSet()); + } + fitsMetaMap.get(ATTRIBUTE_OBJECT).add(objectString); + } + + + /* + * Let's try to determine the start and end date/time for this + * HDU. HDUs can have their own, differend end and start times; + * for the start time of the whole file we'll select the min. + * of the individual HDU start times, and the max. for end time. + */ + + // The standard header key is "DATE-OBS" - but all these + // hacky variants below are common too, so we'll go through + // them all: + + String obsDateString = hduHeader.getStringValue("DATE-OBS"); + if (obsDateString != null && !obsDateString.equals("")) { metadataKeys.add("DATE-OBS"); + } else { + obsDateString = hduHeader.getStringValue("DATE_OBS"); + if (obsDateString != null && !obsDateString.equals("")) { + metadataKeys.add("DATE_OBS"); + } else { + obsDateString = hduHeader.getStringValue("OBS-DATE"); + if (obsDateString != null && !obsDateString.equals("")) { + metadataKeys.add("OBS-DATE"); + } + } + } + + // TODO: + // see if it's easier to replace this with getObservationDate() + // on the HDU - ? + // DONE: No, getObservationDate() is bad news. all it does + // on the inside is + // return new FitsDate(myHeader.getStringValue("DATE-OBS")).toDate(); + // -- which adds all the complications you'd expect, adding + // a time zone to the strings that didn't have any (shifting + // the value by a seemingly random number of hours), etc. + + if (obsDateString != null) { + Date startDate = null; + Date endDate = null; + String startDateFormatted = null; + String endDateFormatted = null; + + // We'll try to parse it, first as a full date-time string: + // replace all slashes with dashes: + obsDateString = obsDateString.replace('/', '-'); + + for (SimpleDateFormat format : TIME_FORMATS) { + format.setLenient(false); + + try { + startDate = format.parse(obsDateString); + dbgLog.info("Valid date string: " + obsDateString + ", format: " + format.toPattern() + ", resulting date: "+startDate+", formatted resulting date: "+TIME_FORMATS[0].format(startDate)); + startDateFormatted = format.format(startDate); + //startDateFormatted = obsDateString; + break; + } catch (ParseException ex) { + startDate = null; + } + + // Alternative method: + // We could truncate the string to the point where the parser + // stopped; e.g., if our format was yyyy-mm-dd and the + // string was "2014-05-07T14:52:01" we'll truncate the + // string to "2014-05-07". + /* + ParsePosition pos = new ParsePosition(0); + startDate = format.parse(obsDateString, pos); + if (startDate == null) { + continue; + } + if (pos.getIndex() != obsDateString.length()) { + obsDateString = obsDateString.substring(0, pos.getIndex()); + } + dbgLog.fine("Valid date: " + obsDateString + ", format: " + format.toPattern()); + break; + */ + } + + // if that didn't work, we'll try parsing the string as a + // date only: + if (startDate == null) { + for (SimpleDateFormat format : DATE_FORMATS) { + format.setLenient(false); + + try { + startDate = format.parse(obsDateString); + dbgLog.fine("Valid date string: " + obsDateString + ", format: " + format.toPattern() + ", resulting date: "+startDate+", formatted resulting date: "+DATE_FORMATS[0].format(startDate)); + //startDateFormatted = format.format(startDate); + startDateFormatted = DATE_FORMATS[0].format(startDate); + break; + } catch (ParseException ex) { + startDate = null; + } + } + // if that worked, let's see if we have the time value + // stored separately - in "TIME-OBS": + if (startDate != null) { + String obsTimeString = hduHeader.getStringValue("TIME-OBS"); + Date startDateTime = null; + + if (obsTimeString != null && !obsTimeString.equals("")) { + String newObsDateString = DATE_FORMATS[0].format(startDate) + "T" + obsTimeString; + + for (SimpleDateFormat format : TIME_FORMATS) { + format.setLenient(false); + + try { + startDateTime = format.parse(newObsDateString); + dbgLog.fine("Valid date obtained by combining obs date and time: " + newObsDateString + ", format: " + format.toPattern() + ", resulting date: "+startDateTime+", formatted resulting date: "+TIME_FORMATS[0].format(startDateTime)); + //startDateFormatted = TIME_FORMATS[0].format(startDateTime); + //startDateFormatted = newObsDateString; + startDateFormatted = format.format(startDateTime); + break; + } catch (ParseException ex) { + startDateTime = null; + } + } + + if (startDateTime != null) { + startDate = startDateTime; + } + } + } + } + + if (startDate != null) { + dbgLog.fine("Let's try and calculate the end date..."); + // Check if it's the min. start date value we've got so far: + + if (minDate == null) { + minDate = startDate; + startObsTime = startDateFormatted; + } else if (startDate.before(minDate)) { + minDate = startDate; + startObsTime = startDateFormatted; + } + + // Stop/end dates: + endDate = startDate; + endDateFormatted = startDateFormatted; + + // Check if we have the EXPTIME stored, that would allow us + // to recalculate the end time: + float expTimeValue = hduHeader.getFloatValue("EXPTIME"); + if (expTimeValue != 0.0) { + long expTimeInMillis = (long) (expTimeValue * 1000); + dbgLog.fine("EXPTIME in MILLISECONDS: " + expTimeInMillis); + Calendar endDateCal = Calendar.getInstance(); + endDateCal.setTime(endDate); + long endTimeInMillis = endDateCal.getTimeInMillis() + expTimeInMillis; + dbgLog.fine("END TIME in MILLISECONDS: " + endTimeInMillis); + endDateCal.setTimeInMillis(endTimeInMillis); + endDate = endDateCal.getTime(); + + if ((endTimeInMillis / 1000) * 1000 != endTimeInMillis) { + endDateFormatted = TIME_FORMATS[0].format(endDate); + } else { + endDateFormatted = TIME_FORMATS[1].format(endDate); + } + } + + // Check if it's the max. end date value so far: + + if (maxDate == null) { + maxDate = endDate; + stopObsTime = endDateFormatted; + } else if (endDate.after(maxDate)) { + maxDate = endDate; + stopObsTime = endDateFormatted; + } + + + } + } - //} /* TODO: * use the Axes values for determining if this is a spectrum: */ - for (int j = 0; j < hdu.getAxes().length; j++) { - int nAxisN = hdu.getAxes()[j]; - metadataKeys.add("NAXIS"+j); - dbgLog.fine("NAXIS"+j+" value: "+nAxisN); + if (hdu.getAxes() != null) { + for (int j = 0; j < hdu.getAxes().length; j++) { + int nAxisN = hdu.getAxes()[j]; + metadataKeys.add("NAXIS"+j); + dbgLog.fine("NAXIS"+j+" value: "+nAxisN); + } + } else { + dbgLog.fine("NULL Axes array."); } // Process individual header cards: @@ -405,24 +620,18 @@ public FileMetadataIngest ingest (BufferedInputStream stream) throws IOException dbgLog.fine("value is null"); } - /* - * TODO: - * decide what to do with regular key comments: - - if (headerComment != null) { - dbgLog.fine("comment: " + headerComment); - } else { - dbgLog.fine("comment is null"); - } - * */ } j++; } dbgLog.fine ("processed "+j+" cards total;"); - Data fitsData = hdu.getData(); - - dbgLog.fine ("data size: "+fitsData.getSize()); + // not sure this is legit: hdu.skipData(fitsFile.getStream()); + // the following is legit, but seemingly unnecessary: + // Data fitsData = hdu.getData(); + // dbgLog.info ("data size: "+fitsData.getSize()); + // TODO: confirm memory use implications of reading the Data + // section vs. skipping it explicitly vs. not doing anything. :) + // -- L.A. June 1 2014. dbgLog.fine("total size of the HDU is "+hdu.getSize()); } @@ -470,86 +679,16 @@ public FileMetadataIngest ingest (BufferedInputStream stream) throws IOException // start time and and stop time: - int numObsDates = tempMetaMap.get("DATE-OBS") == null ? 0 : tempMetaMap.get("DATE-OBS").size(); - if (numObsDates > 0) { - - String[] obsDateValues = new String[numObsDates]; - obsDateValues = tempMetaMap.get("DATE-OBS").toArray(new String[0]); - - Date minDate = null; - Date maxDate = null; - - String startObsTime = ""; - String stopObsTime = ""; - - for (int k = 0; k < obsDateValues.length; k++) { - Date obsDate = null; - String obsDateString = obsDateValues[k]; - - for (SimpleDateFormat format : DATE_FORMATS) { - // Strict parsing - it will throw an - // exception if it doesn't parse! - format.setLenient(false); - // replace all slashes with dashes: - obsDateString = obsDateString.replace('/', '-'); - // parse date string without truncating: - try { - obsDate = format.parse(obsDateString); - dbgLog.fine("Valid date: " + obsDateString + ", format: " + format.toPattern()); - break; - } catch (ParseException ex) { - obsDate = null; - } - // Alternative method: - // We'll truncate the string to the point where the parser - // stopped; e.g., if our format was yyyy-mm-dd and the - // string was "2014-05-07T14:52:01" we'll truncate the - // string to "2014-05-07". - /* - ParsePosition pos = new ParsePosition(0); - obsDate = format.parse(obsDateString, pos); - if (obsDate == null) { - continue; - } - if (pos.getIndex() != obsDateString.length()) { - obsDateString = obsDateString.substring(0, pos.getIndex()); - } - dbgLog.fine("Valid date: " + obsDateString + ", format: " + format.toPattern()); - break; - */ - } - - if (obsDate != null) { - - if (minDate == null) { - minDate = obsDate; - startObsTime = obsDateString; - } else if (obsDate.before(minDate)) { - minDate = obsDate; - startObsTime = obsDateString; - } - - if (maxDate == null) { - maxDate = obsDate; - stopObsTime = obsDateString; - } else if (obsDate.after(maxDate)) { - maxDate = obsDate; - stopObsTime = obsDateString; - } - } - } - - if (!startObsTime.equals("")) { - fitsMetaMap.put(ATTRIBUTE_START_TIME, new HashSet()); - fitsMetaMap.get(ATTRIBUTE_START_TIME).add(startObsTime); - } + if (!startObsTime.equals("")) { + fitsMetaMap.put(ATTRIBUTE_START_TIME, new HashSet()); + fitsMetaMap.get(ATTRIBUTE_START_TIME).add(startObsTime); + } - if (!stopObsTime.equals("")) { - fitsMetaMap.put(ATTRIBUTE_STOP_TIME, new HashSet()); - fitsMetaMap.get(ATTRIBUTE_STOP_TIME).add(stopObsTime); - } + if (!stopObsTime.equals("")) { + fitsMetaMap.put(ATTRIBUTE_STOP_TIME, new HashSet()); + fitsMetaMap.get(ATTRIBUTE_STOP_TIME).add(stopObsTime); } - + // TODO: // Numeric fields should also be validated! // -- L.A. 4.0 beta diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexableDataset.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexableDataset.java index a1c6940dcf8..73caa913f3a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexableDataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexableDataset.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.search; import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.IndexServiceBean; public class IndexableDataset extends IndexableObject { @@ -32,7 +33,7 @@ public DatasetState getDatasetState() { public enum DatasetState { - WORKING_COPY("_draft"), PUBLISHED(""); + WORKING_COPY(IndexServiceBean.draftSuffix), PUBLISHED(""); private String suffix; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 453c22c0471..abcd09a724b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -48,7 +48,6 @@ public DatasetVersion parseDatasetVersion( JsonObject obj ) throws JsonParseExce if ( archiveNote != null ) dsv.setArchiveNote( archiveNote ); dsv.setDeaccessionLink( obj.getString("deaccessionLink", null) ); - dsv.setVersion( parseLong(obj.getString("version", null)) ); dsv.setVersionNumber( parseLong(obj.getString("versionNumber", null)) ); dsv.setMinorVersionNumber( parseLong(obj.getString("minorVersionNumber", null)) ); dsv.setId( parseLong(obj.getString("id", null)) ); @@ -65,39 +64,6 @@ public DatasetVersion parseDatasetVersion( JsonObject obj ) throws JsonParseExce dsv.setDatasetFields( parseMetadataBlocks(obj.getJsonObject("metadataBlocks")) ); - // parse authors - JsonArray authorsJson = obj.getJsonArray("authors"); - List authors = new ArrayList<>( authorsJson.size() ); - for ( JsonObject authorJson : authorsJson.getValuesAs(JsonObject.class) ) { - DatasetAuthor author = new DatasetAuthor(); - author.setAffiliation( parseField( authorJson.getJsonObject("affiliation")) ); - author.setIdType( authorJson.getString("idType", null) ); - author.setIdValue( authorJson.getString("idValue", null)); - author.setDisplayOrder( parsePrimitiveInt(authorJson.getString("displayOrder", null), 0) ); - author.setName( parseField( authorJson.getJsonObject("name")) ); - - authors.add( author ); - author.setDatasetVersion(dsv); - } - dsv.setDatasetAuthors(authors); - - // parse distributors - JsonArray distrosJson = obj.getJsonArray("distributors"); - if ( distrosJson != null ) { - List distros = new ArrayList<>(distrosJson.size()); - for ( JsonObject distJson : distrosJson.getValuesAs(JsonObject.class) ) { - DatasetDistributor distr = new DatasetDistributor(); - distr.setDisplayOrder( distJson.getInt("displayOrder", 0)); - distr.setVersion( Long.valueOf(distJson.getInt("version", 0)) ); - distr.setAbbreviation( parseField(distJson.getJsonObject("abbreviation"))); - distr.setAffiliation( parseField(distJson.getJsonObject("affiliation"))); - distr.setLogo( parseField(distJson.getJsonObject("logo"))); - distr.setName( parseField(distJson.getJsonObject("name"))); - distr.setUrl( parseField(distJson.getJsonObject("url"))); - } - dsv.setDatasetDistributors(distros); - } - return dsv; } catch (ParseException ex) { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 27d370740e4..2789cabb6d5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -30,13 +30,11 @@ import java.util.TreeSet; import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; -import java.math.BigDecimal; import java.util.Deque; import java.util.LinkedList; import java.util.Map; import javax.json.JsonArray; import javax.json.JsonObject; -import javax.json.JsonValue; /** * Convert objects to Json. @@ -50,12 +48,12 @@ public class JsonPrinter { public static final BriefJsonPrinter brief = new BriefJsonPrinter(); public static JsonObjectBuilder json( RoleAssignment ra ) { - return Json.createObjectBuilder() + return jsonObjectBuilder() .add("id", ra.getId()) .add("userId", ra.getUser().getId() ) - .add("_username", nullFill(ra.getUser().getUserName())) + .add("_username", ra.getUser().getUserName()) .add("roleId", ra.getRole().getId() ) - .add("_roleAlias", nullFill(ra.getRole().getAlias())) + .add("_roleAlias", ra.getRole().getAlias()) .add("definitionPointId", ra.getDefinitionPoint().getId() ); } @@ -68,11 +66,11 @@ public static JsonArrayBuilder json( Set permissions ) { } public static JsonObjectBuilder json( DataverseRole role ) { - JsonObjectBuilder bld = Json.createObjectBuilder() - .add("alias", nullFill(role.getAlias()) ) - .add("name", nullFill(role.getName())) + JsonObjectBuilder bld = jsonObjectBuilder() + .add("alias", role.getAlias()) + .add("name", role.getName()) .add("permissions", json(role.permissions())) - .add("description", nullFill(role.getDescription())); + .add("description", role.getDescription()); if ( role.getId() != null ) bld.add("id", role.getId() ); if ( role.getOwner()!=null && role.getOwner().getId()!=null ) bld.add("ownerId", role.getOwner().getId()); @@ -82,9 +80,9 @@ public static JsonObjectBuilder json( DataverseRole role ) { public static JsonObjectBuilder json( Dataverse dv ) { JsonObjectBuilder bld = jsonObjectBuilder() .add("id", dv.getId() ) - .add("alias", nullFill(dv.getAlias()) ) - .add("name", nullFill(dv.getName())) - .add("affiliation", dv.getAffiliation()) + .add("alias", dv.getAlias()) + .add("name", dv.getName()) + .add("affiliation", dv.getAffiliation()) .add("contactEmail", dv.getContactEmail()) .add("permissionRoot", dv.isPermissionRoot()) .add("creator",json(dv.getCreator())) @@ -123,19 +121,17 @@ public static JsonObjectBuilder json( Dataset ds ) { .add( "versions", jsonObjectBuilder() .add("count", versionCount) .add("latest", brief.json(ds.getLatestVersion())) - .add("edit", brief.json(ds.getEditVersion())) + .add("edit", ds.getEditVersion().getId()!=null ? brief.json(ds.getEditVersion()) : null ) ); } public static JsonObjectBuilder json( DatasetVersion dsv ) { JsonObjectBuilder bld = jsonObjectBuilder() .add("id", dsv.getId()) - .add("version", dsv.getVersion() ) .add("versionNumber", dsv.getVersionNumber()) .add("versionMinorNumber", dsv.getMinorVersionNumber()) .add("versionState", dsv.getVersionState().name() ) .add("versionNote", dsv.getVersionNote()) - .add("title", dsv.getTitle()) .add("archiveNote", dsv.getArchiveNote()) .add("deaccessionLink", dsv.getDeaccessionLink()) .add("distributionDate", dsv.getDistributionDate()) @@ -146,33 +142,7 @@ public static JsonObjectBuilder json( DatasetVersion dsv ) { .add("releaseTime", format(dsv.getReleaseTime()) ) .add("createTime", format(dsv.getCreateTime()) ) ; - - // Add distributors - List dists = dsv.getDatasetDistributors(); - if ( ! dists.isEmpty() ) { - if ( dists.size() > 1 ) { - Collections.sort(dists, DatasetDistributor.DisplayOrder ); - } - JsonArrayBuilder ab = Json.createArrayBuilder(); - for ( DatasetDistributor dist : dists ) { - ab.add( json(dist) ); - } - bld.add( "distributors", ab ); - } - - // Add authors - List auth = dsv.getDatasetAuthors(); - if ( ! auth.isEmpty() ) { - if ( auth.size() > 1 ) { - Collections.sort(auth, DatasetAuthor.DisplayOrder ); - } - JsonArrayBuilder ab = Json.createArrayBuilder(); - for ( DatasetAuthor da : auth ) { - ab.add( json(da) ); - } - bld.add("authors", ab); - } - + bld.add("metadataBlocks", jsonByBlocks(dsv.getDatasetFields())); return bld; @@ -302,21 +272,6 @@ public static JsonObjectBuilder json( DataFile df ) { ; } - public static JsonObjectBuilder json( DatasetAuthor da ) { - return jsonObjectBuilder() - .add( "idType", da.getIdType() ) - .add( "idValue", da.getIdValue() ) - .add( "name", json(da.getName()) ) - .add( "affiliation", json(da.getAffiliation()) ) - .add( "displayOrder", da.getDisplayOrder() ) - ; - } - - - public static String nullFill( String s ) { - return s==null ? "" : s; - } - public static String format( Date d ) { return (d==null) ? null : dateFormat.format(d); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java b/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java index aa4426938dd..452a5366576 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java @@ -62,6 +62,10 @@ public NullSafeJsonBuilder add(String name, int value) { return this; } + public NullSafeJsonBuilder add(String name, Long value) { + return ( value != null ) ? add(name, value.longValue()) : this; + } + @Override public NullSafeJsonBuilder add(String name, long value) { delegate.add(name, value); diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index b1080045ad7..de88807b377 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -9,7 +9,7 @@ - + @@ -19,28 +19,43 @@ - + @@ -49,17 +64,15 @@ - -
- +
- + and permissionServiceBean.on(DatasetPage.dataset).canIssueCommand('PublishDatasetCommand')}">
- +
- + - - + + - + Select version number: - - - - - + + + + +
Due to the nature of changes to the current draft this will be a major release (#{DatasetPage.datasetNextMajorVersion}) - +
- - + + + and permissionServiceBean.on(DatasetPage.dataset).canIssueCommand('UpdateDatasetCommand')}">
@@ -148,7 +161,7 @@
- +
@@ -159,7 +172,7 @@
- #{DatasetPage.displayCitation} + #{DatasetPage.displayCitation} @@ -220,15 +233,15 @@ Host Dataverse
- + - - - - - - + + + + + +
@@ -262,8 +275,8 @@
- + fileUploadListener="#{DatasetPage.handleFileUpload}" process="filesTable" update="filesTable" label="Select Files to Add" oncomplete="javascript:bind_bsui_components();"/> +
-
@@ -322,7 +335,7 @@
- +
- + + + + + + +
+
+ Version: #{DatasetPage.datasetVersionDifference.newVersion.semanticVersion}
+ +
+
+ Version: #{DatasetPage.datasetVersionDifference.originalVersion.semanticVersion}
+ +
+
+   +
+
+ + +
+
+ +
+ + + + + + + + + + + + + +
+
+
+ +
+
+ +
+ + + + + + + + + + + + +   + + + + + + + + + +   + + +
+
+
+
Tip: You can add more metadata about this dataset after it's created. diff --git a/src/main/webapp/dataverse.xhtml b/src/main/webapp/dataverse.xhtml index ee4195cb080..6a23d548f4c 100644 --- a/src/main/webapp/dataverse.xhtml +++ b/src/main/webapp/dataverse.xhtml @@ -158,6 +158,25 @@ + + + + + + + + + + + + + + + + + + @@ -186,7 +205,7 @@
  • - +
  • @@ -194,22 +213,16 @@ - + - -
  • - - - -
  • + permissionServiceBean.on(DataversePage.dataverse).canIssueCommand('PublishDataverseCommand')}">
    -
    @@ -247,20 +259,7 @@ - - - - - - - - - - - - - + @@ -317,7 +316,7 @@ - +
    diff --git a/src/main/webapp/dataverse_header.xhtml b/src/main/webapp/dataverse_header.xhtml index f84feabb533..bb4255de159 100644 --- a/src/main/webapp/dataverse_header.xhtml +++ b/src/main/webapp/dataverse_header.xhtml @@ -116,13 +116,13 @@
    @@ -198,7 +198,7 @@
    - +
    diff --git a/src/main/webapp/manage-roles.xhtml b/src/main/webapp/manage-roles.xhtml index 74347212286..d426d1b045c 100644 --- a/src/main/webapp/manage-roles.xhtml +++ b/src/main/webapp/manage-roles.xhtml @@ -6,7 +6,6 @@ xmlns:p="http://primefaces.org/ui"> - @@ -14,255 +13,199 @@ - - + - - + + - - - - - - - - - - - - - When a dataverse is defined as a permission root, - it does not honor roles granted to users on its parents dataverses. - Unless this dataverse holds data which is significantly more restricted - than the data in the parent dataverse, you probably want to keep this - box unchecked. - - - - - - - Define what roles non-logged in users have. - - Guests do not inherit any role from parent dataverses. - - - Guest users inherit these roles from parent dataverses: - - #{ra.role.name} (assigned at #{ra.definitionPoint.name}) - - - - - - - - + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - No roles are defined for Dataverse '#{DataversePage.dataverse.name}'. - - Any roles applied to users at parent dataverses, are valid in this - dataverse as well. - - - - - - - + + + + + + + + + + - - - - - - - - - + + + + - - - - + + +
    + +
    + + + +
    + +
    + + +
    + #{u.firstName} #{u.lastName}
    + #{u.affiliation} + #{u.email} +
    +
    +
    +
    - - - - - - - - - - - - - - - - - - - +
    + +
    +
    + + + + + + + + + + + + +
    - - - - - - - + + + + - - + + + + + +

    + #{role.name} + +
    + + #{prm} + +

    +
    + + No roles are defined for Dataverse '#{DataversePage.dataverse.name}'. + + Any roles applied to users at parent dataverses, are valid in this + dataverse as well. + + - - + + + + + + - -
    -
    - - - #{pmsn.humanName} - - - - - -
    -
    + + + + - -
    - - - - - - -
    - #{u.firstName} #{u.lastName}
    - #{u.affiliation} - #{u.email} -
    - -
    -
    - - - - -
    - - - - - - - - - - - - - - - This Dataverse - - - - - -
    -
    -
    + + + + + + + + + + + + + + + + + + + -
    -
    -
    + + + + + + + + + Define what roles non-logged in users have. + + Guests do not inherit any role from parent dataverses. + + + Guest users inherit these roles from parent dataverses: + + #{ra.role.name} (assigned at #{ra.definitionPoint.name}) + + + + + + +
    + +
    + + +
    diff --git a/src/main/webapp/resources/css/structure.css b/src/main/webapp/resources/css/structure.css index 6f668812573..b0e42a15621 100644 --- a/src/main/webapp/resources/css/structure.css +++ b/src/main/webapp/resources/css/structure.css @@ -27,6 +27,8 @@ body .ui-widget {font-size: inherit;} .panelgridLayoutTable, .panelgridFormTable {width:100%;} .panelgridFormTable > tbody > tr.ui-widget-content > td:first-child {width:20%;} +.panelgridFormTable.ui-panelgrid td table.ui-picklist td {border: 0; padding:0;} +.panelgridFormTable.ui-panelgrid td table.ui-picklist td ul.ui-widget-content.ui-corner-all {border:1px solid #DDDDDD; border-radius: 4px;} .panelLayoutButtonBlock button {margin-right:2em;} @@ -85,8 +87,8 @@ form[id$='dataverseForm'] .panelLayoutBlock .form-horizontal table.ui-panelgrid. form[id$='dataverseForm'] .panelLayoutBlock .form-horizontal div.form-group {vertical-align: middle;} -div.ui-chkbox.metadata-blocks-default div.ui-chkbox-box {margin: 4px 10px;} -div.ui-chkbox.metadata-blocks-default span.ui-chkbox-label {margin: 1px 10px 4px 10px;} +div.ui-chkbox.metadata-blocks-default div.ui-chkbox-box, div.ui-chkbox.facet-category-default div.ui-chkbox-box {margin: 4px 10px;} +div.ui-chkbox.metadata-blocks-default span.ui-chkbox-label, div.ui-chkbox.facet-category-default span.ui-chkbox-label {margin: 1px 10px 4px 10px;} table.ui-selectmanycheckbox.metadata-blocks-select label {font-weight:normal; margin-top:1px;} table.ui-selectmanycheckbox.metadata-blocks-select label.ui-state-disabled {background:inherit;} @@ -150,7 +152,6 @@ div[id$='facetCategoryList'] div.ui-datalist, div[id$='facetCategoryList'] div.u hyphens: auto;} a.facetLink {text-decoration:none;} a.facetLink.facetSelected, a.facetTypeLink.facetSelected {font-weight:bold;} -.facetSelected.resultsTopBlock {background-color:#E6F1F6; padding-left:.3em; padding-right:.3em;} a.facetLink.facetSelected span.glyphicon-remove {vertical-align:text-top; margin-top:1px; margin-left:.25em;} table.ui-panelgrid.facetsMoreLess {width:100%;} @@ -159,7 +160,8 @@ table.ui-panelgrid.facetsMoreLess td {width:50%; border:0;} table.ui-panelgrid.facetsMoreLess td:last-child {text-align:right;} #dv-maincolumn div.emptyResults.bg-warning {background:#FCF8F2; padding:.5em 1em;} -#dv-maincolumn a.facetLink.facetSelected.resultsTopBlock {margin-right:1em;} +#resultsFacetsTopBlock {margin-bottom:.5em;} +#resultsFacetsTopBlock a.facetLink.facetSelected {display:inline-block; margin-bottom:.5em; margin-right:1em; background-color:#E6F1F6; padding-left:.3em; padding-right:.3em;} #dv-maincolumn div.results-sort-pagination div.pagination {margin-left:1em;} #dv-maincolumn div.results-sort-pagination.results-bottom .pagination {margin-top:0; margin-bottom:0;} @@ -217,6 +219,24 @@ div.popover.fade.top.in div.popover-content {text-align:center; vertical-align: div[id$="filesTable"] .ui-datatable-tablewrapper thead {display:none;} div[id$="versionsTable"] .ui-datatable-tablewrapper thead {display:none;} +div[id$='versionsTab'] .ui-state-highlight .ui-icon {background-image: url("/javax.faces.resource/images/ui-icons_333333_256x240.png.xhtml?ln=primefaces-bootstrap");} +div[id$='versionsTab'] .ui-state-highlight .ui-commandlink {color:#FFFFFF;} + +div[id$='detailsBlocks'] {min-width:60%; max-height:60%;} +div[id$='detailsBlocks'] .ui-dialog-content {overflow-y:scroll;} +div[id$='detailsBlocks'] div.ui-dialog-buttonpane {border-top:0;} + +div.dvnDifferanceTable > div.ui-datatable-tablewrapper > table > thead {display:none;} +div.dvnDifferanceTable > div.ui-datatable-tablewrapper > table, div.dvnDifferanceTable > div.ui-datatable-tablewrapper > table > tbody {border:0;} +div.dvnDifferanceTable > div.ui-datatable-tablewrapper > table > tbody > tr:first-child, div.dvnDifferanceTable > div.ui-datatable-tablewrapper > table > tbody > tr:first-child > td {border-top-width:0;} +div.dvnDifferanceTable > div.ui-datatable-tablewrapper > table > tbody > tr:last-child, div.dvnDifferanceTable > div.ui-datatable-tablewrapper > table > tbody > tr:last-child > td {border-bottom-width:0;} +div.dvnDifferanceTable > div.ui-datatable-tablewrapper > table > tbody > tr, div.dvnDifferanceTable > div.ui-datatable-tablewrapper > table > tbody > tr > td:first-child {border-left-width:0;} +div.dvnDifferanceTable > div.ui-datatable-tablewrapper > table > tbody > tr, div.dvnDifferanceTable > div.ui-datatable-tablewrapper > table > tbody > tr > td:last-child {border-right-width:0;} +div.dvnDifferanceTable div.ui-datatable-tablewrapper table td.versionValue {width:30%;} +div.dvnDifferanceTable div.ui-datatable-tablewrapper table td.versionDetails {width:35%;} + +div.dvnDifferanceTable .diffDetailBlock {display:block;} + .dropin-btn-status.ui-icon {background: url("https://www.dropbox.com/static/images/widgets/dbx-saver-status.png") no-repeat;} /* Overwrite Primefaces */ diff --git a/src/main/webapp/resources/iqbs/messages.xhtml b/src/main/webapp/resources/iqbs/messages.xhtml index 84fd4ec46d3..a5253f61923 100644 --- a/src/main/webapp/resources/iqbs/messages.xhtml +++ b/src/main/webapp/resources/iqbs/messages.xhtml @@ -1,7 +1,7 @@ diff --git a/src/main/webapp/resources/js/dv_rebind_bootstrap_ui.js b/src/main/webapp/resources/js/dv_rebind_bootstrap_ui.js index 320bad32b9e..462ced1e5ee 100644 --- a/src/main/webapp/resources/js/dv_rebind_bootstrap_ui.js +++ b/src/main/webapp/resources/js/dv_rebind_bootstrap_ui.js @@ -1,5 +1,5 @@ /* - * Rebind bootstrap UI components + * Rebind bootstrap UI components after Primefaces ajax calls */ function bind_bsui_components(){ //console.log('bind_bsui_components'); @@ -26,16 +26,67 @@ function bind_bsui_components(){ } /* - * Called after "Edit Dataverse" + * show breadcrumb navigation + */ +function show_breadcrumb(){ + $('#breadcrumbNavBlock').show(); +} + +/* + * hide breadcrumb navigation + */ +function hide_breadcrumb(){ + $('#breadcrumbNavBlock').hide(); +} + +/* + * Hide notification message + */ +function hide_info_msg(){ + if ($('div.messagePanel').length > 0){ + $('div.messagePanel').html(''); + } +} + +/* + * Show notification message + */ +function show_info_msg(mtitle, mtext){ + if ($('div.messagePanel').length > 0){ + // alert('msg panel exists'); + edit_msg = '
    ' + + '' + + ' ' + mtitle + ' – ' + mtext + '
    '; + $('div.messagePanel').html(edit_msg ); + }else{ + //console.log('message panel does not exist'); + } +} + + +/* + * Called after "Edit Dataverse" - "General Information" + */ +function post_edit_dv_general_info(){ + show_info_msg('Edit Dataverse', 'Edit your dataverse and click Save Changes. Asterisks indicate required fields.'); + post_edit_dv(); +} + +/* + * Called after "Edit Dataverse" - "Setup" + */ +function post_edit_dv_setup(){ + show_info_msg('Dataverse Setup', 'Edit the Metadata Blocks and Facets you want to associate with your dataverse. Note: facets will appear in the order shown on the list.'); + post_edit_dv(); +} +/* + * Called after "Edit Dataverse" - "General Information" or "Setup" */ function post_edit_dv(){ - hide_breadcrumb(); - bind_bsui_components(); - var dv_srch_panel = $('#dv-sidecolumn').parent(); - if (dv_srch_panel.length > 0){ - dv_srch_panel.hide(); - } + hide_search_panels(); + bind_bsui_components(); + //console.log('hide after edit3'); } /* @@ -43,11 +94,35 @@ function post_edit_dv(){ */ function post_cancel_edit_dv(){ show_breadcrumb(); + show_search_panels() + hide_info_msg(); bind_bsui_components(); - var dv_srch_panel = $('#dv-sidecolumn').parent(); - if (dv_srch_panel.length > 0){ - dv_srch_panel.show(); - } + //console.log('show after cancel edit3'); +} + +/* + * Hide search panels when editing a dv + */ +function hide_search_panels(){ + if($(".panelSerchForm").length>0){ + $(".panelSerchForm").hide(); + if($(".panelSerchForm").next().length>0){ + $(".panelSerchForm").next().hide(); + } + } +} + +/* + * Show search panels when cancel a dv edit + */ + +function show_search_panels(){ + if($(".panelSerchForm").length>0){ + if($(".panelSerchForm").next().length>0){ + $(".panelSerchForm").next().show(); + } + $(".panelSerchForm").show(); + } } /* @@ -78,36 +153,15 @@ function post_cancel_edit_files_or_metadata(){ //console.log('post_cancel_edit_metadata'); show_breadcrumb(); bind_bsui_components(); - hide_edit_msg(); -} -function show_breadcrumb(){ - $('#breadcrumbNavBlock').show(); // show breadcrumb navigation -} -function hide_breadcrumb(){ - $('#breadcrumbNavBlock').hide(); // hide breadcrumb navigation + hide_info_msg(); } /* - * Hide notification message + * Dialog Height-Scrollable */ -function hide_edit_msg(){ - if ($('div.messagePanel').length > 0){ - $('div.messagePanel').html(''); - } +function post_differences(){ + var dialogHeight = $('div[id$="detailsBlocks"].ui-dialog').outerHeight(); + var dialogHeader = $('div[id$="detailsBlocks"] .ui-dialog-titlebar').outerHeight(); + var dialogScroll = dialogHeight - dialogHeader; + $('div[id$="detailsBlocks"] .ui-dialog-content').css('height', dialogScroll); } - -/* - * Show notification message - */ -function show_info_msg(mtitle, mtext){ - if ($('div.messagePanel').length > 0){ - // alert('msg panel exists'); - edit_msg = '
    ' - + '' - + ' ' + mtitle + ' – ' + mtext + '
    '; - $('div.messagePanel').html(edit_msg ); - }else{ - //console.log('message panel does not exist'); - } -} - diff --git a/src/main/webapp/search-include-fragment.xhtml b/src/main/webapp/search-include-fragment.xhtml index f857f565ff1..21de0bf0f2d 100644 --- a/src/main/webapp/search-include-fragment.xhtml +++ b/src/main/webapp/search-include-fragment.xhtml @@ -238,21 +238,24 @@
    - - - - - - - - - - - - - - - + +
    + + + + + + + + + + + + + + +
    +