diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index c356828dffd..4e5f14a137f 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -199,6 +199,170 @@ Delete a Private URL from a dataset (if it exists):: DELETE http://$SERVER/api/datasets/$id/privateUrl?key=$apiKey +Add a file to an existing Dataset. Description and tags are optional:: + + PUT http://$SERVER/api/datasets/$id/add?key=$apiKey + + +Example python code to add a file. This may be run by changing these parameters in the sample code: + +* ``dataverse_server`` - e.g. https://dataverse.harvard.edu +* ``api_key`` - See the top of this document for a description +* ``persistentId`` - Example: ``doi:10.5072/FK2/6XACVA`` +* ``dataset_id`` - Database id of the dataset + +In practice, you only need one the ``dataset_id`` or the ``persistentId``. The example below shows both uses. + +.. code-block:: python + + from datetime import datetime + import json + import requests # http://docs.python-requests.org/en/master/ + + # -------------------------------------------------- + # Update the 4 params below to run this code + # -------------------------------------------------- + dataverse_server = 'https://your dataverse server' # no trailing slash + api_key = 'api key' + dataset_id = 1 # database id of the dataset + persistentId = 'doi:10.5072/FK2/6XACVA' # doi or hdl of the dataset + + # -------------------------------------------------- + # Prepare "file" + # -------------------------------------------------- + file_content = 'content: %s' % datetime.now() + files = {'file': ('sample_file.txt', file_content)} + + # -------------------------------------------------- + # Using a "jsonData" parameter, add optional description + file tags + # -------------------------------------------------- + params = dict(description='Blue skies!', + tags=['Lily', 'Rosemary', 'Jack of Hearts']) + + params_as_json_string = json.dumps(params) + + payload = dict(jsonData=params_as_json_string) + + # -------------------------------------------------- + # Add file using the Dataset's id + # -------------------------------------------------- + url_dataset_id = '%s/api/datasets/%s/add?key=%s' % (dataverse_server, dataset_id, api_key) + + # ------------------- + # Make the request + # ------------------- + print '-' * 40 + print 'making request: %s' % url_dataset_id + r = requests.post(url_dataset_id, data=payload, files=files) + + # ------------------- + # Print the response + # ------------------- + print '-' * 40 + print r.json() + print r.status_code + + # -------------------------------------------------- + # Add file using the Dataset's persistentId (e.g. doi, hdl, etc) + # -------------------------------------------------- + url_persistent_id = '%s/api/datasets/:persistentId/add?persistentId=%s&key=%s' % (dataverse_server, persistentId, api_key) + + # ------------------- + # Update the file content to avoid a duplicate file error + # ------------------- + file_content = 'content2: %s' % datetime.now() + files = {'file': ('sample_file2.txt', file_content)} + + + # ------------------- + # Make the request + # ------------------- + print '-' * 40 + print 'making request: %s' % url_persistent_id + r = requests.post(url_persistent_id, data=payload, files=files) + + # ------------------- + # Print the response + # ------------------- + print '-' * 40 + print r.json() + print r.status_code + +Files +~~~~~~~~~~~ + +Replace an existing file where ``id`` is the Database id of the file to replace:: + + POST http://$SERVER/api/files/{id}/replace?key=$apiKey + +Example python code to replace a file. This may be run by changing these parameters in the sample code: + +* ``dataverse_server`` - e.g. https://dataverse.harvard.edu +* ``api_key`` - See the top of this document for a description +* ``file_id`` - Database id of the file to replace (returned in the GET API for a Dataset) + +.. code-block:: python + + from datetime import datetime + import json + import requests # http://docs.python-requests.org/en/master/ + + # -------------------------------------------------- + # Update params below to run code + # -------------------------------------------------- + dataverse_server = 'http://127.0.0.1:8080' # no trailing slash + api_key = 'some key' + file_id = 1401 # id of the file to replace + + # -------------------------------------------------- + # Prepare replacement "file" + # -------------------------------------------------- + file_content = 'content: %s' % datetime.now() + files = {'file': ('replacement_file.txt', file_content)} + + # -------------------------------------------------- + # Using a "jsonData" parameter, add optional description + file tags + # -------------------------------------------------- + params = dict(description='Sunset', + tags=['One', 'More', 'Cup of Coffee']) + + # ------------------- + # IMPORTANT: If the mimetype of the replacement file differs + # from the origina file, the replace will fail + # + # e.g. if you try to replace a ".csv" with a ".png" or something similar + # + # You can override this with a "forceReplace" parameter + # ------------------- + params['forceReplace'] = True + + + params_as_json_string = json.dumps(params) + + payload = dict(jsonData=params_as_json_string) + + print 'payload', payload + # -------------------------------------------------- + # Replace file using the id of the file to replace + # -------------------------------------------------- + url_replace = '%s/api/v1/files/%s/replace?key=%s' % (dataverse_server, file_id, api_key) + + # ------------------- + # Make the request + # ------------------- + print '-' * 40 + print 'making request: %s' % url_replace + r = requests.post(url_replace, data=payload, files=files) + + # ------------------- + # Print the response + # ------------------- + print '-' * 40 + print r.json() + print r.status_code + + + Builtin Users ~~~~~~~~~~~~~ diff --git a/pom.xml b/pom.xml index 7812b866812..162191db3f3 100644 --- a/pom.xml +++ b/pom.xml @@ -403,6 +403,18 @@ true jar + + + org.glassfish.jersey.containers + jersey-container-servlet + 2.23.2 + + + + org.glassfish.jersey.media + jersey-media-multipart + 2.23.2 + diff --git a/scripts/database/upgrades/3354-alt-checksum.sql b/scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql similarity index 53% rename from scripts/database/upgrades/3354-alt-checksum.sql rename to scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql index 42956fcc65d..51d6684a3af 100644 --- a/scripts/database/upgrades/3354-alt-checksum.sql +++ b/scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql @@ -1,3 +1,4 @@ +-- For supporting SHA1 rather than MD5 as a checksum on a per file basis #3354 ALTER TABLE datafile ADD COLUMN checksumtype character varying(255); UPDATE datafile SET checksumtype = 'MD5'; ALTER TABLE datafile ALTER COLUMN checksumtype SET NOT NULL; @@ -5,3 +6,8 @@ ALTER TABLE datafile ALTER COLUMN checksumtype SET NOT NULL; -- note that in the database we use "SHA1" (no hyphen) but the GUI will show "SHA-1" --UPDATE datafile SET checksumtype = 'SHA1'; ALTER TABLE datafile RENAME md5 TO checksumvalue; +-- For DataFile, file replace functionality: +ALTER TABLE datafile ADD COLUMN rootdatafileid bigint default -1; +ALTER TABLE datafile ADD COLUMN previousdatafileid bigint default null; +-- For existing DataFile objects, update rootDataFileId values: +UPDATE datafile SET rootdatafileid = -1; diff --git a/scripts/search/data/replace_test/003.txt b/scripts/search/data/replace_test/003.txt new file mode 100644 index 00000000000..e440e5c8425 --- /dev/null +++ b/scripts/search/data/replace_test/003.txt @@ -0,0 +1 @@ +3 \ No newline at end of file diff --git a/scripts/search/data/replace_test/004.txt b/scripts/search/data/replace_test/004.txt new file mode 100644 index 00000000000..bf0d87ab1b2 --- /dev/null +++ b/scripts/search/data/replace_test/004.txt @@ -0,0 +1 @@ +4 \ No newline at end of file diff --git a/scripts/search/data/replace_test/005.txt b/scripts/search/data/replace_test/005.txt new file mode 100644 index 00000000000..7813681f5b4 --- /dev/null +++ b/scripts/search/data/replace_test/005.txt @@ -0,0 +1 @@ +5 \ No newline at end of file diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index 02fbce1af3d..b91e49534ee 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -1427,3 +1427,46 @@ citationFrame.banner.message.here=here citationFrame.banner.closeIcon=Close this message, go to dataset citationFrame.banner.countdownMessage= This message will close in citationFrame.banner.countdownMessage.seconds=seconds + + +# File metadata error +file.metadata.datafiletag.not_tabular=You cannot add data file tags to a non-tabular file. + +# File Add/Replace operation messages +file.addreplace.error.dataset_is_null=The dataset cannot be null. +file.addreplace.error.dataset_id_is_null=The dataset ID cannot be null. +find.dataset.error.dataset_id_is_null=When accessing a dataset based on persistent id, a {0} query parameter must be present +find.dataset.error.dataset.not.found.persistentId=Dataset with persistent id {0} not found +find.dataset.error.dataset.not.found.id=Dataset with id {0} not found +find.dataset.error.dataset.not.found.bad.id=Bad dataset id number: {0} +file.addreplace.error.dataset_id_not_found=There was no dataset found for id: +file.addreplace.error.no_edit_dataset_permission=You do not have permission to edit this dataset. +file.addreplace.error.filename_is_null=The fileName cannot be null. +file.addreplace.error.file_content_type_is_null=The file content type cannot be null. +file.addreplace.error.file_input_stream_is_null=The file upload cannot be null. +file.addreplace.error.duplicate_file=This file has a duplicate already in the dataset: +file.addreplace.error.existing_file_to_replace_id_is_null=The id of the existing file to replace cannot be null +file.addreplace.error.existing_file_to_replace_not_found_by_id=Replacement file not found. There was no file found for id: +file.addreplace.error.existing_file_to_replace_is_null=The existing file to replace cannot be null +file.addreplace.error.existing_file_to_replace_not_in_dataset=The existing file to replace does not belong to this dataset +file.addreplace.error.existing_file_not_in_latest_published_version=You cannot replace a file that is not in the most recently published Dataset. (The file is unpublished or was deleted from a previous version.) +file.addreplace.error.replace.new_file_same_as_replacement=The new file contains the same content as the file to be replaced. +file.addreplace.error.replace.new_file_has_different_content_type=Warning! The new and old file have different content types. +file.addreplace.error.unpublished_file_cannot_be_replaced=You cannot replace an unpublished file. Please delete it instead of replacing it. +file.addreplace.error.ingest_create_file_err=There was an error when trying to add the new file. +file.addreplace.error.initial_file_list_empty=Sorry! An error occurred and the new file was not added. +file.addreplace.error.initial_file_list_more_than_one=You cannot replace a single file with multiple files. The file you uploaded was ingested into multiple files. +file.addreplace.error.final_file_list_empty=There are no files to add. (This error should not happen if steps called in sequence....) +file.addreplace.error.only_replace_operation=This should ONLY be called for file replace operations!! +file.addreplace.error.failed_to_remove_old_file_from_dataset=Unable to remove old file from new DatasetVersion. +file.addreplace.error.add.command_engine_error=Failed to update the dataset. Please contact the administrator. (CommandException) +file.addreplace.error.add.ejb_exception=Failed to update the dataset. Please contact the administrator. (EJBException) +file.addreplace.error.replace.command_engine_error=Failed to update the dataset. Please contact the administrator. (CommandException) +file.addreplace.error.replace.ejb_exception=Failed to update the dataset. Please contact the administrator. (EJBException) +file.addreplace.error.remove_linked_file.dataset=dataset cannot be null in removeLinkedFileFromDataset +file.addreplace.error.remove_linked_file.file=file cannot be null in removeLinkedFileFromDataset +file.addreplace.error.phase2_called_early_no_new_files=There was an error saving the dataset--no new files found. +file.addreplace.success.add=File successfully added! +file.addreplace.success.replace=File successfully replaced! +file.addreplace.error.auth=The API key is invalid. +file.addreplace.error.invalid_datafile_tag=Not a valid Tabular Tag: \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index 76ca90e8038..98b3888c315 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -1,5 +1,11 @@ package edu.harvard.iq.dataverse; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.annotations.Expose; +import com.google.gson.annotations.SerializedName; import edu.harvard.iq.dataverse.DatasetVersion.VersionState; import edu.harvard.iq.dataverse.api.WorldMapRelatedData; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; @@ -18,6 +24,11 @@ import java.nio.file.Paths; import java.nio.file.Files; import java.util.Arrays; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import javax.json.Json; +import javax.json.JsonArrayBuilder; import javax.persistence.Entity; import javax.persistence.OneToMany; import javax.persistence.OneToOne; @@ -56,13 +67,19 @@ public class DataFile extends DvObject implements Comparable { public static final char INGEST_STATUS_INPROGRESS = 67; public static final char INGEST_STATUS_ERROR = 68; + public static final Long ROOT_DATAFILE_ID_DEFAULT = new Long(-1); + private String name; + @Expose @NotBlank @Column( nullable = false ) @Pattern(regexp = "^.*/.*$", message = "Content-Type must contain a slash") private String contentType; + + @Expose + @SerializedName("storageIdentifier") @Column( nullable = false ) private String fileSystemName; @@ -105,6 +122,7 @@ public String toString() { } } + //@Expose @Column(nullable = false) @Enumerated(EnumType.STRING) private ChecksumType checksumType; @@ -113,12 +131,37 @@ public String toString() { * Examples include "f622da34d54bdc8ee541d6916ac1c16f" as an MD5 value or * "3a484dfdb1b429c2e15eb2a735f1f5e4d5b04ec6" as a SHA-1 value" */ + //@Expose @Column(nullable = false) private String checksumValue; + + /* start: FILE REPLACE ATTRIBUTES */ + + // For the initial version of a file, this will be equivalent to the ID + // Default is -1 until the intial id is generated + @Expose + @Column(nullable=false) + private Long rootDataFileId; + + /** + * @todo We should have consistency between "Id" vs "ID" for rootDataFileId + * vs. previousDataFileId. + */ + // null for initial version; subsequent versions will point to the previous file + // + @Expose + @Column(nullable=true) + private Long previousDataFileId; + /* endt: FILE REPLACE ATTRIBUTES */ + + + + @Expose @Column(nullable=true) private Long filesize; // Number of bytes in file. Allows 0 and null, negative numbers not permitted + @Expose private boolean restricted; /* @@ -159,11 +202,23 @@ public void setGuestbookResponses(List guestbookResponses) { public DataFile() { this.fileMetadatas = new ArrayList<>(); + initFileReplaceAttributes(); } public DataFile(String contentType) { this.contentType = contentType; this.fileMetadatas = new ArrayList<>(); + initFileReplaceAttributes(); + } + + + /** + * All constructors should use this method + * to intitialize this file replace attributes + */ + private void initFileReplaceAttributes(){ + this.rootDataFileId = ROOT_DATAFILE_ID_DEFAULT; + this.previousDataFileId = null; } // The dvObject field "name" should not be used in @@ -211,6 +266,64 @@ public List getTags() { return dataFileTags; } + public List getTagLabels(){ + + List currentDataTags = this.getTags(); + List tagStrings = new ArrayList<>(); + + if (( currentDataTags != null)||(!currentDataTags.isEmpty())){ + + Iterator itr = currentDataTags.iterator(); + while (itr.hasNext()){ + DataFileTag element = (DataFileTag)itr.next(); + tagStrings.add(element.getTypeLabel()); + } + } + return tagStrings; + } + + public JsonArrayBuilder getTagLabelsAsJsonArrayBuilder(){ + + List currentDataTags = this.getTags(); + + JsonArrayBuilder builder = Json.createArrayBuilder(); + + if ( (currentDataTags == null)||(currentDataTags.isEmpty())){ + return builder; + } + + + Iterator itr = currentDataTags.iterator(); + while (itr.hasNext()){ + DataFileTag element = (DataFileTag)itr.next(); + builder.add(element.getTypeLabel()); + } + return builder; + } + + /** + * Return a list of Tag labels + * + * If there are none, return an empty list + * + * @return + */ + /* + public List getTagsLabelsOnly() { + + List tags = this.getTags(); + + if (tags == null){ + return new ArrayList(); + } + + return tags.stream() + .map(x -> x.getTypeLabel()) + .collect(Collectors.toList()) + ; + } + */ + public void setTags(List dataFileTags) { this.dataFileTags = dataFileTags; } @@ -407,6 +520,7 @@ public void setFilesize(long filesize) { /** * Converts the stored size of the file in bytes to * a user-friendly value in KB, MB or GB. + * @return */ public String getFriendlySize() { return FileUtil.getFriendlySize(filesize); @@ -420,6 +534,15 @@ public void setRestricted(boolean restricted) { this.restricted = restricted; } + + + + + + + + + public ChecksumType getChecksumType() { return checksumType; } @@ -693,4 +816,149 @@ public boolean hasGeospatialTag(){ } return false; } -} + + + /** + * Set rootDataFileId + * @param rootDataFileId + */ + public void setRootDataFileId(Long rootDataFileId){ + this.rootDataFileId = rootDataFileId; + } + + /** + * Get for rootDataFileId + * @return Long + */ + public Long getRootDataFileId(){ + return this.rootDataFileId; + } + + + /** + * Set previousDataFileId + * @param previousDataFileId + */ + public void setPreviousDataFileId(Long previousDataFileId){ + this.previousDataFileId = previousDataFileId; + } + + /** + * Get for previousDataFileId + * @return Long + */ + public Long getPreviousDataFileId(){ + return this.previousDataFileId; + } + + public String asPrettyJSON(){ + + return serializeAsJSON(true); + } + + public String asJSON(){ + + return serializeAsJSON(false); + } + + + + public JsonObject asGsonObject(boolean prettyPrint){ + + String overarchingKey = "data"; + + GsonBuilder builder; + if (prettyPrint){ // Add pretty printing + builder = new GsonBuilder().excludeFieldsWithoutExposeAnnotation().setPrettyPrinting(); + }else{ + builder = new GsonBuilder().excludeFieldsWithoutExposeAnnotation(); + } + + builder.serializeNulls(); // correctly capture nulls + Gson gson = builder.create(); + + // ---------------------------------- + // serialize this object + add the id + // ---------------------------------- + JsonElement jsonObj = gson.toJsonTree(this); + jsonObj.getAsJsonObject().addProperty("id", this.getId()); + + // ---------------------------------- + // get the FileMetadata object + // ---------------------------------- + FileMetadata thisFileMetadata = this.getFileMetadata(); + + // ---------------------------------- + // Add dataset info + // ---------------------------------- + + Map datasetMap = new HashMap<>(); + // expensive call.......bleh!!! + // https://github.com/IQSS/dataverse/issues/761, https://github.com/IQSS/dataverse/issues/2110, https://github.com/IQSS/dataverse/issues/3191 + // + datasetMap.put("title", thisFileMetadata.getDatasetVersion().getTitle()); + datasetMap.put("persistentId", getOwner().getGlobalId()); + datasetMap.put("url", getOwner().getPersistentURL()); + datasetMap.put("version", thisFileMetadata.getDatasetVersion().getSemanticVersion()); + datasetMap.put("id", getOwner().getId()); + datasetMap.put("isPublished", thisFileMetadata.getDatasetVersion().isReleased()); + + jsonObj.getAsJsonObject().add("dataset", gson.toJsonTree(datasetMap)); + + // ---------------------------------- + // Add dataverse info + // ---------------------------------- + Map dataverseMap = new HashMap<>(); + Dataverse dv = this.getOwner().getOwner(); + + dataverseMap.put("name", dv.getName()); + dataverseMap.put("alias", dv.getAlias()); + dataverseMap.put("id", dv.getId()); + + jsonObj.getAsJsonObject().add("dataverse", gson.toJsonTree(dataverseMap)); + + // ---------------------------------- + // Add label (filename), description, and categories from the FileMetadata object + // ---------------------------------- + + jsonObj.getAsJsonObject().addProperty("filename", thisFileMetadata.getLabel()); + jsonObj.getAsJsonObject().addProperty("description", thisFileMetadata.getDescription()); + jsonObj.getAsJsonObject().add("categories", + gson.toJsonTree(thisFileMetadata.getCategoriesByName()) + ); + + // ---------------------------------- + // Tags + // ---------------------------------- + jsonObj.getAsJsonObject().add("tags", gson.toJsonTree(getTagLabels())); + + // ---------------------------------- + // Checksum + // ---------------------------------- + Map checkSumMap = new HashMap(); + checkSumMap.put("type", getChecksumType().toString()); + checkSumMap.put("value", getChecksumValue()); + + JsonElement checkSumJSONMap = gson.toJsonTree(checkSumMap); + + jsonObj.getAsJsonObject().add("checksum", checkSumJSONMap); + + return jsonObj.getAsJsonObject(); + + } + + /** + * + * @param prettyPrint + * @return + */ + private String serializeAsJSON(boolean prettyPrint){ + + JsonObject fullFileJSON = asGsonObject(prettyPrint); + + //return fullFileJSON. + return fullFileJSON.toString(); + + } + +} // end of class diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java index c41b2aa1caf..24423b4efbd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java @@ -23,6 +23,7 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.UUID; import java.util.logging.Level; @@ -610,7 +611,7 @@ public void findFileMetadataOptimizedExperimental(Dataset owner, DatasetVersion categoryMap.put(fileCategory.getId(), i++); } - logger.fine("Retreived "+i+" file categories attached to the dataset."); + logger.fine("Retrieved "+i+" file categories attached to the dataset."); if (requestedVersion != null) { requestedVersion.setFileMetadatas(retrieveFileMetadataForVersion(owner, requestedVersion, filesMap, categoryMap)); @@ -743,11 +744,57 @@ public List findAll() { } public DataFile save(DataFile dataFile) { - + + // save datafile DataFile savedDataFile = em.merge(dataFile); + + // Set the initial value of the rootDataFileId + // (does nothing if it's already set) + //savedDataFile = setAndCheckFileReplaceAttributes(savedDataFile); + return savedDataFile; } + private void msg(String m){ + System.out.println(m); + } + private void dashes(){ + msg("----------------"); + } + private void msgt(String m){ + dashes(); msg(m); dashes(); + } + + /* + Make sure the file replace ids are set for a initial version + of a file + + */ + public DataFile setAndCheckFileReplaceAttributes(DataFile savedDataFile){ + + // Is this the initial version of a file? + + if ((savedDataFile.getRootDataFileId() == null)|| + (savedDataFile.getRootDataFileId().equals(DataFile.ROOT_DATAFILE_ID_DEFAULT))){ + msg("yes, initial version"); + + // YES! Set the RootDataFileId to the Id + savedDataFile.setRootDataFileId(savedDataFile.getId()); + + // SAVE IT AGAIN!!! + msg("yes, save again"); + + return em.merge(savedDataFile); + + }else{ + // Looking Good Billy Ray! Feeling Good Louis! + msg("nope, looks ok"); + + return savedDataFile; + } + } + + public Boolean isPreviouslyPublished(Long fileId){ Query query = em.createQuery("select object(o) from FileMetadata as o where o.dataFile.id =:fileId"); query.setParameter("fileId", fileId); @@ -767,10 +814,19 @@ public void deleteFromVersion( DatasetVersion d, DataFile f ) { */ public FileMetadata mergeFileMetadata(FileMetadata fileMetadata) { - return em.merge(fileMetadata); + + FileMetadata newFileMetadata = em.merge(fileMetadata); + em.flush(); + + // Set the initial value of the rootDataFileId + // (does nothing if it's already set) + //DataFile updatedDataFile = setAndCheckFileReplaceAttributes(newFileMetadata.getDataFile()); + + return newFileMetadata; } public void removeFileMetadata(FileMetadata fileMetadata) { + msgt("removeFileMetadata: fileMetadata"); FileMetadata mergedFM = em.merge(fileMetadata); em.remove(mergedFM); } @@ -908,7 +964,9 @@ public boolean isThumbnailAvailable (DataFile file) { if (ImageThumbConverter.isThumbnailAvailable(file)) { file = this.find(file.getId()); file.setPreviewImageAvailable(true); - file = em.merge(file); + msgt("OVER HERE_----------"); + msg("bleh....."); + file = this.save(file); //em.merge(file); // (should this be done here? - TODO:) return true; } @@ -1221,4 +1279,74 @@ public void populateFileSearchCard(SolrSearchResult solrSearchResult) { solrSearchResult.setEntity(this.findCheapAndEasy(solrSearchResult.getEntityId())); } + + /** + * Does this file have a replacement. + * Any file should have AT MOST 1 replacement + * + * @param df + * @return + */ + public boolean hasReplacement(DataFile df) throws Exception{ + + if (df.getId() == null){ + // An unsaved file cannot have a replacment + return false; + } + + + TypedQuery query = em.createQuery("select o from DataFile o" + + " WHERE o.previousVersionId = :dataFileId;", DataFile.class); + query.setParameter("dataFileId", df.getId()); + //query.setMaxResults(maxResults); + + List dataFiles = query.getResultList(); + + if (dataFiles.size() == 0){ + return false; + } + + if (!df.isReleased()){ + // An unpublished SHOULD NOT have a replacment + String errMsg = "DataFile with id: [" + df.getId() + "] is UNPUBLISHED with a REPLACEMENT. This should NOT happen."; + logger.severe(errMsg); + + throw new Exception(errMsg); + } + + + + else if (dataFiles.size() == 1){ + return true; + }else{ + + String errMsg = "DataFile with id: [" + df.getId() + "] has more than one replacment!"; + logger.severe(errMsg); + + throw new Exception(errMsg); + } + + } + + /** + * Is this a replacement file?? + * + * The indication of a previousDataFileId says that it is + * + * @param df + * @return + * @throws Exception + */ + public boolean isReplacementFile(DataFile df) throws Exception{ + if (df.getPreviousDataFileId() == null){ + return false; + }else if (df.getPreviousDataFileId() < 1){ + logger.severe("Stop! previousDataFileId should either be null or a number greater than 0"); + //return false; + // blow up -- this shouldn't happen! + throw new Exception("previousDataFileId should either be null or a number greater than 0"); + }else{ + return true; + } + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java index d1fc22008bc..c9b3f85888a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java @@ -20,6 +20,7 @@ import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.Table; +import org.apache.commons.lang.StringUtils; /** * @@ -167,4 +168,45 @@ public String toString() { return "edu.harvard.iq.dataverse.DataFileTag[ id=" + id + " ]"; } + + /** + * Static method to check whether a string is a valid tag + * + * Used for API check + * + * @param tagString + * @return + */ + public static boolean isDataFileTag(String label){ + + if (label == null){ + throw new NullPointerException("label cannot be null"); + } + + if (TagLabelToTypes.containsKey(label)){ + return true; + } + + return false; + } + + public TagType getDataFileTagFromLabel(String label){ + + if (!TagLabelToTypes.containsKey(label)){ + return null; + } + + return TagLabelToTypes.get(label); + } + + + public static List getListofLabels(){ + + return new ArrayList<>(TagTypeToLabels.values()); + } + + public static String getListofLabelsAsString(){ + + return StringUtils.join(DataFileTag.getListofLabels(), ", "); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index fb4044d7ea4..37b79e0c516 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -8,6 +8,9 @@ import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import edu.harvard.iq.dataverse.authorization.users.GuestUser; +import edu.harvard.iq.dataverse.datasetutility.DuplicateFileChecker; +import edu.harvard.iq.dataverse.datasetutility.TwoRavensHelper; +import edu.harvard.iq.dataverse.datasetutility.WorldMapPermissionHelper; import edu.harvard.iq.dataverse.datavariable.VariableServiceBean; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; @@ -207,13 +210,15 @@ public enum DisplayMode { private List versionTabList = new ArrayList(); private List versionTabListForPostLoad = new ArrayList(); + // Used to help with displaying buttons related to the WorldMap + private WorldMapPermissionHelper worldMapPermissionHelper; + + // Used to help with displaying buttons related to TwoRavens + private TwoRavensHelper twoRavensHelper; // Used to store results of permissions checks private final Map datasetPermissionMap = new HashMap<>(); // { Permission human_name : Boolean } private final Map fileDownloadPermissionMap = new HashMap<>(); // { FileMetadata.id : Boolean } - - private final Map fileMetadataTwoRavensExploreMap = new HashMap<>(); // { FileMetadata.id : Boolean } - private final Map fileMetadataWorldMapExplore = new HashMap<>(); // { FileMetadata.id : Boolean } private DataFile selectedDownloadFile; @@ -627,9 +632,7 @@ public boolean doesSessionUserHaveDataSetPermission(Permission permissionToCheck public void setNoDVsRemaining(boolean noDVsRemaining) { this.noDVsRemaining = noDVsRemaining; } - - private final Map mapLayerMetadataLookup = new HashMap<>(); - + private GuestbookResponse guestbookResponse; private Guestbook selectedGuestbook; @@ -964,50 +967,88 @@ public void handleChange() { public void handleChangeButton() { + } + + private void msg(String s){ + // System.out.println(s); } + + + + /** + * For development + * + * Flag for whether to show sample insert statements for Geoconnect Debug + * + * Conditions to meet: Person is superuser and GeoconnectDebug active + * + * @return + */ + public boolean isGeoconnectDebugAvailable(){ - public boolean isShapefileType(FileMetadata fm) { - if (fm == null) { - return false; - } - if (fm.getDataFile() == null) { + if (!this.isSuperUser()){ return false; } - return fm.getDataFile().isShapefileType(); + if (settingsService.isTrueForKey(SettingsServiceBean.Key.GeoconnectDebug, false)){ + return true; + } + return false; + } - - /* - Check if the FileMetadata.dataFile has an associated MapLayerMetadata object - The MapLayerMetadata objects have been fetched at page inception by "loadMapLayerMetadataLookup()" + + /** + * This object wraps methods used for hiding/displaying WorldMap related messages + * */ - public boolean hasMapLayerMetadata(FileMetadata fm) { - if (fm == null) { + private void loadTwoRavensHelper() { + + twoRavensHelper = new TwoRavensHelper(settingsService, permissionService); + + } + + public boolean canSeeTwoRavensExploreButton(FileMetadata fm){ + if (fm == null){ return false; } - if (fm.getDataFile() == null) { + if (twoRavensHelper == null){ return false; } - return doesDataFileHaveMapLayerMetadata(fm.getDataFile()); + + return twoRavensHelper.canSeeTwoRavensExploreButtonFromPage(fm); } - + + + public String getDataExploreURL() { + if (twoRavensHelper == null){ + return ""; + } + return twoRavensHelper.getDataExploreURL(); + } + + + public String getDataExploreURLComplete(Long fileid) { + if (twoRavensHelper == null){ + return ""; + } + return twoRavensHelper.getDataExploreURLComplete(fileid, getApiTokenKey()); + + + // return TwoRavensDefaultLocal + fileid + "&" + getApiTokenKey(); + } + + /** - * Check if a DataFile has an associated MapLayerMetadata object + * This object wraps methods used for hiding/displaying WorldMap related messages * - * The MapLayerMetadata objects have been fetched at page inception by - * "loadMapLayerMetadataLookup()" */ - private boolean doesDataFileHaveMapLayerMetadata(DataFile df) { - if (df == null) { - return false; - } - if (df.getId() == null) { - return false; - } - return this.mapLayerMetadataLookup.containsKey(df.getId()); + private void loadWorldMapPermissionHelper() { + + worldMapPermissionHelper = WorldMapPermissionHelper.getPermissionHelperForDatasetPage(settingsService, mapLayerMetadataService, dataset); + } - + /** * Using a DataFile id, retrieve an associated MapLayerMetadata object * @@ -1018,321 +1059,110 @@ public MapLayerMetadata getMapLayerMetadata(DataFile df) { if (df == null) { return null; } - return this.mapLayerMetadataLookup.get(df.getId()); - } - - private void msg(String s){ - // System.out.println(s); + return this.worldMapPermissionHelper.getMapLayerMetadata(df); } + + /** - * See table in: https://github.com/IQSS/dataverse/issues/1618 * - * Can the user see a reminder to publish button? - * (0) The application has to be set to Create Edit Maps - true - * (1) Logged in user - * (2) Is geospatial file? - * (3) File has NOT been released - * (4) No existing Map - * (5) Can Edit Dataset - * - * @param FileMetadata fm - * @return boolean + * WARNING: Check if the user has file download permission + * - This check is assumed when calling to the worldMapPermissionHelper + * + * Should the user be able to see the WorldMap Explore button? + * + * @param fm + * @return */ - public boolean canSeeMapButtonReminderToPublish(FileMetadata fm){ - if (fm==null){ - - return false; - } - - // (0) Is the view GeoconnectViewMaps - if (!settingsService.isTrueForKey(SettingsServiceBean.Key.GeoconnectCreateEditMaps, false)){ - return false; - } - - - // (1) Is there an authenticated user? - // - if (!(isSessionUserAuthenticated())){ - return false; - } - - - // Is this file a Shapefile or a Tabular file tagged as Geospatial? - // - if (!(this.isPotentiallyMappableFileType(fm))){ - return false; - } - - // (3) Is this DataFile released? Yes, don't need reminder - // - if (fm.getDataFile().isReleased()){ + public boolean canUserSeeExploreWorldMapButton(FileMetadata fm){ + if ((worldMapPermissionHelper == null)||(fm == null)){ return false; } - // (4) Does a map already exist? Yes, don't need reminder - // - if (this.hasMapLayerMetadata(fm)){ - return false; - } - - // (5) If so, can the logged in user edit the Dataset to which this FileMetadata belongs? - if (!this.doesSessionUserHaveDataSetPermission(Permission.EditDataset)){ - return false; - } - - // Looks good - // - return true; - } - - /** - * Should there be a Map Data Button for this file? - * see table in: https://github.com/IQSS/dataverse/issues/1618 - * (1) Is the user logged in? - * (2) Is this file a Shapefile or a Tabular file tagged as Geospatial? - * (3) Does the logged in user have permission to edit the Dataset to which this FileMetadata belongs? - * (4) Is the create Edit Maps flag set to true? - * (5) Any of these conditions: - * 9a) File Published - * (b) Draft: File Previously published - * @param fm FileMetadata - * @return boolean - */ - public boolean canUserSeeMapDataButton(FileMetadata fm){ - - if (fm==null){ - return false; - } - - - // (1) Is there an authenticated user? - if (!(isSessionUserAuthenticated())){ - return false; - } - - // (2) Is this file a Shapefile or a Tabular file tagged as Geospatial? - // TO DO: EXPAND FOR TABULAR FILES TAGGED AS GEOSPATIAL! - // - if (!(this.isPotentiallyMappableFileType(fm))){ - return false; - } - - // (3) Does the user have Edit Dataset permissions? + // You need to have download file permissions as a prereq! // - if (!this.doesSessionUserHaveDataSetPermission(Permission.EditDataset)){ + if (!this.canDownloadFile(fm)){ return false; - } + } - // (4) Is the view GeoconnectViewMaps - if (!settingsService.isTrueForKey(SettingsServiceBean.Key.GeoconnectCreateEditMaps, false)){ - return false; - } - - // (5) Is File released? - // - if (fm.getDataFile().isReleased()){ - return true; - } + return worldMapPermissionHelper.canUserSeeExploreWorldMapButtonFromPage(fm); - // Nope - return false; - } + } // end: canUserSeeExploreWorldMapButton /** - * Used in the .xhtml file to check whether a tabular file - * may be viewed via TwoRavens + * WARNING: Check if the user isAuthenicated AND has Permission.EditDataset + * - These checks are assumed when calling to the worldMapPermissionHelper + * + * If this is an unpublished Dataset with a mappable file, + * should the user see the "Reminder to Publish" button * * @param fm * @return */ - public boolean canSeeTwoRavensExploreButton(FileMetadata fm){ - - if (fm == null){ - return false; - } - - // Has this already been checked? - if (this.fileMetadataTwoRavensExploreMap.containsKey(fm.getId())){ - // Yes, return previous answer - //logger.info("using cached result for candownloadfile on filemetadata "+fid); - return this.fileMetadataTwoRavensExploreMap.get(fm.getId()); - } - + public boolean canSeeMapButtonReminderToPublish(FileMetadata fm){ - // (1) Is TwoRavens active via the "setting" table? - // Nope: get out - // - if (!settingsService.isTrueForKey(SettingsServiceBean.Key.TwoRavensTabularView, false)){ - this.fileMetadataTwoRavensExploreMap.put(fm.getId(), false); + if ((worldMapPermissionHelper == null)||(fm == null)){ return false; } - // (2) Does the user have download permission? - // Nope: get out + // Is this user authenticated with EditDataset permission? // - if (!(this.canDownloadFile(fm))){ - this.fileMetadataTwoRavensExploreMap.put(fm.getId(), false); + if (!(isUserAuthenticatedWithEditDatasetPermission())){ return false; - } - // (3) Is the DataFile object there and persisted? - // Nope: scat - // - if ((fm.getDataFile() == null)||(fm.getDataFile().getId()==null)){ - this.fileMetadataTwoRavensExploreMap.put(fm.getId(), false); - return false; - } - - // (4) Is there tabular data or is the ingest in progress? - // Yes: great - // - if ((fm.getDataFile().isTabularData())||(fm.getDataFile().isIngestInProgress())){ - this.fileMetadataTwoRavensExploreMap.put(fm.getId(), true); - return true; } - // Nope - this.fileMetadataTwoRavensExploreMap.put(fm.getId(), false); - return false; + return worldMapPermissionHelper.canSeeMapButtonReminderToPublishFromPage(fm); + - // (empty fileMetadata.dataFile.id) and (fileMetadata.dataFile.tabularData or fileMetadata.dataFile.ingestInProgress) - // and DatasetPage.canDownloadFile(fileMetadata) - } + } // end: canSeeMapButtonReminderToPublish /** - * Check if this is a mappable file type. + * WARNING: Check if the user isAuthenicated AND has Permission.EditDataset + * - These checks are assumed when calling to the worldMapPermissionHelper * - * Currently (2/2016) - * - Shapefile (zipped shapefile) - * - Tabular file with Geospatial Data tag + * Should the user be able to map this file? * * @param fm * @return */ - private boolean isPotentiallyMappableFileType(FileMetadata fm){ - if (fm==null){ + public boolean canUserSeeMapDataButton(FileMetadata fm){ + + if ((worldMapPermissionHelper == null)||(fm == null)){ return false; } - // Yes, it's a shapefile - // - if (this.isShapefileType(fm)){ - return true; - } - - // Yes, it's tabular with a geospatial tag + // Is this user authenticated with EditDataset permission? // - if (fm.getDataFile().isTabularData()){ - if (fm.getDataFile().hasGeospatialTag()){ - return true; - } - } - return false; - } - - - /** - * For development - * - * Flag for whether to show sample insert statements for Geoconnect Debug - * - * Conditions to meet: Person is superuser and GeoconnectDebug active - * - * @return - */ - public boolean isGeoconnectDebugAvailable(){ - - if (!this.isSuperUser()){ + if (!(isUserAuthenticatedWithEditDatasetPermission())){ return false; } - - if (settingsService.isTrueForKey(SettingsServiceBean.Key.GeoconnectDebug, false)){ - return true; - } - return false; - + + return worldMapPermissionHelper.canUserSeeMapDataButtonFromPage(fm); + } - /** - * Should there be a Explore WorldMap Button for this file? - * See table in: https://github.com/IQSS/dataverse/issues/1618 - * - * (1) Does the file have MapLayerMetadata? - * (2) Is there DownloadFile permission for this file? + * Is this user authenticated with EditDataset permission * - * @param fm FileMetadata - * @return boolean + * @return */ - public boolean canUserSeeExploreWorldMapButton(FileMetadata fm){ - if (fm==null){ - return false; - } + private boolean isUserAuthenticatedWithEditDatasetPermission(){ - if (this.fileMetadataWorldMapExplore.containsKey(fm.getId())){ - // Yes, return previous answer - //logger.info("using cached result for candownloadfile on filemetadata "+fid); - return this.fileMetadataWorldMapExplore.get(fm.getId()); + // Is the user authenticated? + // + if (!(isSessionUserAuthenticated())){ + return false; } - /* ----------------------------------------------------- - Does a Map Exist? - ----------------------------------------------------- */ - if (!(this.hasMapLayerMetadata(fm))){ - // Nope: no button - this.fileMetadataWorldMapExplore.put(fm.getId(), false); + // If so, can the logged in user edit the Dataset to which this FileMetadata belongs? + // + if (!this.doesSessionUserHaveDataSetPermission(Permission.EditDataset)){ return false; } - - /* - Is setting for GeoconnectViewMaps true? - Nope? no button - */ - if (!settingsService.isTrueForKey(SettingsServiceBean.Key.GeoconnectViewMaps, false)){ - this.fileMetadataWorldMapExplore.put(fm.getId(), false); - return false; - } - /* ----------------------------------------------------- - Does user have DownloadFile permission for this file? - Yes: User can view button! - ----------------------------------------------------- */ - if (this.canDownloadFile(fm)){ - this.fileMetadataWorldMapExplore.put(fm.getId(), true); - return true; - } - - // Nope: Can't see button - // - this.fileMetadataWorldMapExplore.put(fm.getId(), false); - return false; + return true; } - - /** - * Create a hashmap consisting of { DataFile.id : MapLayerMetadata object} - * - * Very few DataFiles will have associated MapLayerMetadata objects so only - * use 1 query to get them - */ - private void loadMapLayerMetadataLookup() { - if (this.dataset == null) { - return; - } - if (this.dataset.getId() == null) { - return; - } - List mapLayerMetadataList = mapLayerMetadataService.getMapLayerMetadataForDataset(this.dataset); - if (mapLayerMetadataList == null) { - return; - } - for (MapLayerMetadata layer_metadata : mapLayerMetadataList) { - mapLayerMetadataLookup.put(layer_metadata.getDataFile().getId(), layer_metadata); - } - - }// A DataFile may have a related MapLayerMetadata object - - private List displayFileMetadata; @@ -1500,7 +1330,8 @@ private String init(boolean initFull) { //SEK - lazymodel may be needed for datascroller in future release // lazyModel = new LazyFileMetadataDataModel(workingVersion.getId(), datafileService ); // populate MapLayerMetadata - this.loadMapLayerMetadataLookup(); // A DataFile may have a related MapLayerMetadata object + this.loadWorldMapPermissionHelper(); // A DataFile may have a related MapLayerMetadata object + this.loadTwoRavensHelper(); } } else if (ownerId != null) { // create mode for a new child dataset @@ -2788,37 +2619,12 @@ public String cancel() { return returnToLatestVersion(); } + public boolean isDuplicate(FileMetadata fileMetadata) { - String thisMd5 = fileMetadata.getDataFile().getChecksumValue(); - if (thisMd5 == null) { - return false; - } - Map MD5Map = new HashMap(); - - // TODO: - // think of a way to do this that doesn't involve populating this - // map for every file on the page? - // man not be that much of a problem, if we paginate and never display - // more than a certain number of files... Still, needs to be revisited - // before the final 4.0. - // -- L.A. 4.0 - Iterator fmIt = workingVersion.getFileMetadatas().iterator(); - while (fmIt.hasNext()) { - FileMetadata fm = fmIt.next(); - String md5 = fm.getDataFile().getChecksumValue(); - if (md5 != null) { - if (MD5Map.get(md5) != null) { - MD5Map.put(md5, MD5Map.get(md5).intValue() + 1); - } else { - MD5Map.put(md5, 1); - } - } - } - - return MD5Map.get(thisMd5) != null && MD5Map.get(thisMd5).intValue() > 1; + return DuplicateFileChecker.isDuplicateOriginalWay(workingVersion, fileMetadata); } - + private HttpClient getClient() { // TODO: // cache the http client? -- L.A. 4.0 alpha @@ -3366,40 +3172,7 @@ public Boolean isDatasetPublishPopupCustomTextOnAllVersions(){ return settingsService.isTrueForKey(SettingsServiceBean.Key.DatasetPublishPopupCustomTextOnAllVersions, false); } - public String getDataExploreURL() { - String TwoRavensUrl = settingsService.getValueForKey(SettingsServiceBean.Key.TwoRavensUrl); - if (TwoRavensUrl != null && !TwoRavensUrl.equals("")) { - return TwoRavensUrl; - } - - return ""; - } - - public String getDataExploreURLComplete(Long fileid) { - String TwoRavensUrl = settingsService.getValueForKey(SettingsServiceBean.Key.TwoRavensUrl); - String TwoRavensDefaultLocal = "/dataexplore/gui.html?dfId="; - - if (TwoRavensUrl != null && !TwoRavensUrl.equals("")) { - // If we have TwoRavensUrl set up as, as an optional - // configuration service, it must mean that TwoRavens is sitting - // on some remote server. And that in turn means that we must use - // full URLs to pass data and metadata to it. - // update: actually, no we don't want to use this "dataurl" notation. - // switching back to the dfId=: - // -- L.A. 4.1 - /* - String tabularDataURL = getTabularDataFileURL(fileid); - String tabularMetaURL = getVariableMetadataURL(fileid); - return TwoRavensUrl + "?ddiurl=" + tabularMetaURL + "&dataurl=" + tabularDataURL + "&" + getApiTokenKey(); - */ - return TwoRavensUrl + "?dfId=" + fileid + "&" + getApiTokenKey(); - } - - // For a local TwoRavens setup it's enough to call it with just - // the file id: - return TwoRavensDefaultLocal + fileid + "&" + getApiTokenKey(); - } public String getVariableMetadataURL(Long fileid) { String myHostURL = getDataverseSiteUrl(); diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index d054b52ad81..c947000ece1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -819,4 +819,54 @@ public void populateDatasetSearchCard(SolrSearchResult solrSearchResult) { } } + /** + * Return a list of the checksum Strings for files in the specified DatasetVersion + * + * This is used to help check for duplicate files within a DatasetVersion + * + * @param datasetVersion + * @return a list of checksum Strings for files in the specified DatasetVersion + */ + public List getChecksumListForDatasetVersion(DatasetVersion datasetVersion) { + + if (datasetVersion == null){ + throw new NullPointerException("datasetVersion cannot be null"); + } + + String query = "SELECT df.md5 FROM datafile df, filemetadata fm WHERE fm.datasetversion_id = " + datasetVersion.getId() + " AND fm.datafile_id = df.id;"; + + logger.log(Level.FINE, "query: {0}", query); + Query nativeQuery = em.createNativeQuery(query); + List checksumList = nativeQuery.getResultList(); + + return checksumList; + } + + + /** + * Check for the existence of a single checksum value within a DatasetVersion's files + * + * @param datasetVersion + * @param selectedChecksum + * @return + */ + public boolean doesChecksumExistInDatasetVersion(DatasetVersion datasetVersion, String selectedChecksum) { + if (datasetVersion == null){ + throw new NullPointerException("datasetVersion cannot be null"); + } + + String query = "SELECT df.md5 FROM datafile df, filemetadata fm" + + " WHERE fm.datasetversion_id = " + datasetVersion.getId() + + " AND fm.datafile_id = df.id" + + " AND df.md5 = '" + selectedChecksum + "';"; + + Query nativeQuery = em.createNativeQuery(query); + List checksumList = nativeQuery.getResultList(); + + if (checksumList.size() > 0){ + return true; + } + return false; + } + } // end class diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 86696771258..4a9557f1453 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -6,34 +6,16 @@ import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.Permission; -import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean; -import edu.harvard.iq.dataverse.authorization.users.ApiToken; -import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import edu.harvard.iq.dataverse.datavariable.VariableServiceBean; +import edu.harvard.iq.dataverse.datasetutility.DuplicateFileChecker; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand; -import edu.harvard.iq.dataverse.engine.command.impl.CreateGuestbookResponseCommand; -import edu.harvard.iq.dataverse.engine.command.impl.DeaccessionDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteDataFileCommand; -import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetVersionCommand; -import edu.harvard.iq.dataverse.engine.command.impl.DestroyDatasetCommand; -import edu.harvard.iq.dataverse.engine.command.impl.LinkDatasetCommand; -import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetCommand; -import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand; import edu.harvard.iq.dataverse.ingest.IngestRequest; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; -import edu.harvard.iq.dataverse.metadataimport.ForeignMetadataImportServiceBean; -import edu.harvard.iq.dataverse.search.FacetCategory; import edu.harvard.iq.dataverse.search.FileView; -import edu.harvard.iq.dataverse.search.SearchFilesServiceBean; -import edu.harvard.iq.dataverse.search.SolrSearchResult; -import edu.harvard.iq.dataverse.search.SortBy; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import edu.harvard.iq.dataverse.util.BundleUtil; -import edu.harvard.iq.dataverse.util.FileSortFieldAndOrder; import edu.harvard.iq.dataverse.util.JsfHelper; import static edu.harvard.iq.dataverse.util.JsfHelper.JH; import edu.harvard.iq.dataverse.util.StringUtil; @@ -44,24 +26,20 @@ import java.io.InputStream; import java.io.StringReader; import java.nio.file.Files; -import java.nio.file.Path; import java.nio.file.Paths; import java.sql.Timestamp; -import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.logging.Logger; import javax.ejb.EJB; import javax.ejb.EJBException; import javax.faces.application.FacesMessage; import javax.faces.context.FacesContext; import javax.faces.event.ActionEvent; -import javax.faces.event.ValueChangeEvent; import javax.faces.view.ViewScoped; import javax.inject.Inject; import javax.inject.Named; @@ -71,16 +49,10 @@ import javax.json.JsonObject; import javax.json.JsonArray; import javax.json.JsonReader; -import javax.servlet.ServletOutputStream; -import javax.servlet.http.HttpServletResponse; -import javax.validation.ConstraintViolation; import org.apache.commons.httpclient.HttpClient; import org.apache.commons.httpclient.methods.GetMethod; -import org.primefaces.context.RequestContext; import java.text.DateFormat; import java.util.Arrays; -import java.util.HashSet; -import javax.faces.model.SelectItem; import java.util.logging.Level; import javax.faces.event.AjaxBehaviorEvent; @@ -1029,42 +1001,17 @@ public String cancel() { return returnToDatasetOnly(); } + /** + * Just moved to another class for now + * + * @param fileMetadata + * @return + */ public boolean isDuplicate(FileMetadata fileMetadata) { - String thisMd5 = fileMetadata.getDataFile().getChecksumValue(); - if (thisMd5 == null) { - return false; - } - - Map MD5Map = new HashMap(); - - // TODO: - // think of a way to do this that doesn't involve populating this - // map for every file on the page? - // man not be that much of a problem, if we paginate and never display - // more than a certain number of files... Still, needs to be revisited - // before the final 4.0. - // -- L.A. 4.0 - - // make a "defensive copy" to avoid java.util.ConcurrentModificationException from being thrown - // when uploading 100+ files - List wvCopy = new ArrayList<>(workingVersion.getFileMetadatas()); - Iterator fmIt = wvCopy.iterator(); - - while (fmIt.hasNext()) { - FileMetadata fm = fmIt.next(); - String md5 = fm.getDataFile().getChecksumValue(); - if (md5 != null) { - if (MD5Map.get(md5) != null) { - MD5Map.put(md5, MD5Map.get(md5).intValue() + 1); - } else { - MD5Map.put(md5, 1); - } - } - } - return MD5Map.get(thisMd5) != null && MD5Map.get(thisMd5).intValue() > 1; + return DuplicateFileChecker.isDuplicateOriginalWay(workingVersion, fileMetadata); } - + private HttpClient getClient() { // TODO: // cache the http client? -- L.A. 4.0 alpha @@ -1225,9 +1172,9 @@ public void handleDropBoxUpload(ActionEvent event) { public void handleFileUpload(FileUploadEvent event) { UploadedFile uFile = event.getFile(); + List dFileList = null; - try { // Note: A single file may be unzipped into multiple files dFileList = ingestService.createDataFiles(workingVersion, uFile.getInputstream(), uFile.getFileName(), uFile.getContentType()); @@ -1259,6 +1206,9 @@ private String processUploadedFileList(List dFileList){ boolean multipleDupes = false; String warningMessage = null; + // NOTE: for native file uploads, the dFileList will only + // contain 1 file--method is called for every file even if the UI shows "simultaneous uploads" + // ----------------------------------------------------------- // Iterate through list of DataFile objects // ----------------------------------------------------------- @@ -1285,9 +1235,11 @@ private String processUploadedFileList(List dFileList){ // ----------------------------------------------------------- // Check for duplicates -- e.g. file is already in the dataset // ----------------------------------------------------------- + if (!isDuplicate(dataFile.getFileMetadata())) { newFiles.add(dataFile); // looks good fileMetadatas.add(dataFile.getFileMetadata()); + } else { if (duplicateFileNames == null) { duplicateFileNames = dataFile.getFileMetadata().getLabel(); @@ -1319,7 +1271,7 @@ private String processUploadedFileList(List dFileList){ } } } - + // ----------------------------------------------------------- // Formate error message for duplicate files // ----------------------------------------------------------- diff --git a/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java b/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java index 7231a457264..cee4378c852 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java @@ -1,5 +1,10 @@ package edu.harvard.iq.dataverse; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.annotations.Expose; import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; @@ -8,6 +13,8 @@ import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; +import javax.json.Json; +import javax.json.JsonArrayBuilder; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; @@ -38,18 +45,23 @@ public class FileMetadata implements Serializable { private static final Logger logger = Logger.getLogger(FileMetadata.class.getCanonicalName()); + @Expose @Pattern(regexp="^[^:<>;#/\"\\*\\|\\?\\\\]*$", message = "File Name cannot contain any of the following characters: \\ / : * ? \" < > | ; # .") @NotBlank(message = "Please specify a file name.") @Column( nullable=false ) private String label = ""; + + @Expose @Column(columnDefinition = "TEXT") private String description = ""; + @Expose private boolean restricted; @ManyToOne @JoinColumn(nullable=false) private DatasetVersion datasetVersion; + @ManyToOne @JoinColumn(nullable=false) private DataFile dataFile; @@ -121,16 +133,47 @@ public void addCategory(DataFileCategory category) { fileCategories.add(category); } + /** + * Retrieve categories + * @return + */ public List getCategoriesByName() { ArrayList ret = new ArrayList<>(); - if (fileCategories != null) { - for (int i = 0; i < fileCategories.size(); i++) { - ret.add(fileCategories.get(i).getName()); - } + + if (fileCategories == null) { + return ret; } + + for (int idx=0; idx < fileCategories.size(); idx++){ + ret.add(fileCategories.get(idx).getName()); + } + // fileCategories.stream() + // .map(x -> ret.add(x.getName())); + return ret; } + + public JsonArrayBuilder getCategoryNamesAsJsonArrayBuilder() { + + JsonArrayBuilder builder = Json.createArrayBuilder(); + + if (fileCategories == null) { + return builder; + } + + for (int idx=0; idx < fileCategories.size(); idx++){ + builder.add(fileCategories.get(idx).getName()); + } + + //fileCategories.stream() + // .map(x -> builder.add(x.getName())); + + return builder; + + } + + // alternative, experimental method: public void setCategoriesByName(List newCategoryNames) { @@ -376,4 +419,51 @@ public int compare(FileMetadata o1, FileMetadata o2) { return o1.getLabel().toUpperCase().compareTo(o2.getLabel().toUpperCase()); } }; + + + + public String asPrettyJSON(){ + + return serializeAsJSON(true); + } + + public String asJSON(){ + + return serializeAsJSON(false); + } + + /** + * + * @param prettyPrint + * @return + */ + private String serializeAsJSON(boolean prettyPrint){ + + JsonObject jsonObj = asGsonObject(prettyPrint); + + return jsonObj.toString(); + + } + + + public JsonObject asGsonObject(boolean prettyPrint){ + + + GsonBuilder builder; + if (prettyPrint){ // Add pretty printing + builder = new GsonBuilder().excludeFieldsWithoutExposeAnnotation().setPrettyPrinting(); + }else{ + builder = new GsonBuilder().excludeFieldsWithoutExposeAnnotation(); + } + + builder.serializeNulls(); // correctly capture nulls + Gson gson = builder.create(); + + // serialize this object + JsonElement jsonObj = gson.toJsonTree(this); + jsonObj.getAsJsonObject().addProperty("id", this.getId()); + + return jsonObj.getAsJsonObject(); + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index ef6ab430c8d..849740fbdf2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -205,6 +205,8 @@ public Set permissionsFor( DataverseRequest req, DvObject dvo ) { // Add permissions specifically given to the user permissions.addAll( permissionsForSingleRoleAssignee(req.getUser(),dvo) ); + + /* Set groups = groupService.groupsFor(req,dvo); // Add permissions gained from groups @@ -212,6 +214,7 @@ public Set permissionsFor( DataverseRequest req, DvObject dvo ) { final Set groupPremissions = permissionsForSingleRoleAssignee(g,dvo); permissions.addAll(groupPremissions); } + */ if ( ! req.getUser().isAuthenticated() ) { permissions.removeAll( PERMISSIONS_FOR_AUTHENTICATED_USERS_ONLY ); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 6c0a4eb9c6d..dfbcd6f1c11 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse.api; +import com.google.gson.JsonElement; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetFieldServiceBean; import edu.harvard.iq.dataverse.DatasetFieldType; @@ -29,6 +30,7 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; +import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean; import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -36,6 +38,7 @@ import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import edu.harvard.iq.dataverse.validation.BeanValidationServiceBean; import java.io.StringReader; +import java.math.BigDecimal; import java.net.URI; import java.util.concurrent.Callable; import java.util.function.Function; @@ -67,6 +70,10 @@ public abstract class AbstractApiBean { private static final Logger logger = Logger.getLogger(AbstractApiBean.class.getName()); private static final String DATAVERSE_KEY_HEADER_NAME = "X-Dataverse-key"; + public static final String STATUS_ERROR = "ERROR"; + public static final String STATUS_OK = "OK"; + + /** * Utility class to convey a proper error response using Java's exceptions. */ @@ -130,29 +137,32 @@ String getWrappedMessageWhenJson() { } } - @EJB - protected EjbDataverseEngine engineSvc; + @EJB + protected AuthenticationServiceBean authSvc; + + @EJB + protected EjbDataverseEngine engineSvc; @EJB protected DatasetServiceBean datasetSvc; - @EJB - protected DataverseServiceBean dataverseSvc; - - @EJB - protected AuthenticationServiceBean authSvc; - + @EJB + protected DataverseServiceBean dataverseSvc; + @EJB protected DatasetFieldServiceBean datasetFieldSvc; - + + @EJB + IngestServiceBean ingestService; + @EJB protected MetadataBlockServiceBean metadataBlockSvc; @EJB protected UserServiceBean userSvc; - @EJB - protected DataverseRoleServiceBean rolesSvc; + @EJB + protected DataverseRoleServiceBean rolesSvc; @EJB protected SettingsServiceBean settingsSvc; @@ -184,8 +194,10 @@ String getWrappedMessageWhenJson() { @EJB protected UserNotificationServiceBean userNotificationSvc; - @PersistenceContext(unitName = "VDCNet-ejbPU") - protected EntityManager em; + // ---------------------------- + + @PersistenceContext(unitName = "VDCNet-ejbPU") + protected EntityManager em; @Context protected HttpServletRequest httpRequest; @@ -460,29 +472,95 @@ protected Response response( DataverseRequestHandler hdl ) { protected Response ok( JsonArrayBuilder bld ) { return Response.ok(Json.createObjectBuilder() - .add("status", "OK") + .add("status", STATUS_OK) .add("data", bld).build()).build(); } + protected Response createdResponse( String uri, JsonObjectBuilder bld ) { + return Response.created( URI.create(uri) ) + .entity( Json.createObjectBuilder() + .add("status", STATUS_OK) + .add("data", bld).build()) + .type(MediaType.APPLICATION_JSON) + .build(); + } + protected Response ok( JsonObjectBuilder bld ) { return Response.ok( Json.createObjectBuilder() - .add("status", "OK") + .add("status", STATUS_OK) .add("data", bld).build() ) .type(MediaType.APPLICATION_JSON) .build(); } - + + protected Response ok( String msg ) { return Response.ok().entity(Json.createObjectBuilder() - .add("status", "OK") + .add("status", STATUS_OK) .add("data", Json.createObjectBuilder().add("message",msg)).build() ) .type(MediaType.APPLICATION_JSON) .build(); } + + protected Response ok(String message, JsonObjectBuilder jsonObjectBuilder ) { + + if (message == null){ + throw new NullPointerException("message cannot be null"); + } + if (jsonObjectBuilder == null){ + throw new NullPointerException("jsonObjectBuilder cannot be null"); + } + + jsonObjectBuilder.add("message", message); + + //JsonObjectBuilder foo = Json.createObjectBuilder(); + //foo.add("message", message); + + + return Response.ok( Json.createObjectBuilder() + .add("status", STATUS_OK) + .add("data", jsonObjectBuilder).build()) + .type(MediaType.APPLICATION_JSON) + .build(); + } + + /** + * Added to accommodate a JSON String generated from gson + * + * @param gsonObject + * @return + */ + /* + protected Response ok(String msg, com.google.gson.JsonObject gsonObject){ + + if (gsonObject == null){ + throw new NullPointerException("gsonObject cannot be null"); + } + + gsonObject.addProperty("status", "OK"); + gsonObject.addProperty("message", msg); + + return Response.ok(gsonObject.toString(), MediaType.APPLICATION_JSON).build(); + } + */ + + + /** + * Returns an OK response (HTTP 200, status:OK) with the passed value + * in the data field. + * @param value the value for the data field + * @return a HTTP OK response with the passed value as data. + */ + protected Response okResponseWithValue( String value ) { + return Response.ok(Json.createObjectBuilder() + .add("status", STATUS_OK) + .add("data", value).build(), MediaType.APPLICATION_JSON_TYPE ).build(); + } + protected Response ok( boolean value ) { return Response.ok().entity(Json.createObjectBuilder() - .add("status", "OK") + .add("status", STATUS_OK) .add("data", value).build() ).build(); } @@ -498,7 +576,7 @@ protected Response created( String uri, JsonObjectBuilder bld ) { protected Response accepted() { return Response.accepted() .entity(Json.createObjectBuilder() - .add("status", "OK").build() + .add("status", STATUS_OK).build() ).build(); } @@ -525,7 +603,7 @@ protected Response permissionError( String message ) { protected static Response error( Status sts, String msg ) { return Response.status(sts) .entity( NullSafeJsonBuilder.jsonObjectBuilder() - .add("status", "ERROR") + .add("status", STATUS_ERROR) .add( "message", msg ).build() ).type(MediaType.APPLICATION_JSON_TYPE).build(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 7cad071d341..ddfba67e0d5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse.api; + import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.EMailValidator; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ApiConfiguration.java b/src/main/java/edu/harvard/iq/dataverse/api/ApiConfiguration.java index 8bb58670104..ab48c93ed00 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/ApiConfiguration.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/ApiConfiguration.java @@ -1,8 +1,18 @@ package edu.harvard.iq.dataverse.api; import javax.ws.rs.ApplicationPath; -import javax.ws.rs.core.Application; +import org.glassfish.jersey.media.multipart.MultiPartFeature; +import org.glassfish.jersey.server.ResourceConfig; @ApplicationPath("api/v1") -public class ApiConfiguration extends Application { +public class ApiConfiguration extends ResourceConfig { + + public ApiConfiguration() { + packages("edu.harvard.iq.dataverse.api"); + register(MultiPartFeature.class); + } } +/* +public class ApiConfiguration extends ResourceConfi { +} +*/ \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 5e5670ab57a..90c1d16429a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -1,18 +1,29 @@ package edu.harvard.iq.dataverse.api; import edu.harvard.iq.dataverse.DOIEZIdServiceBean; +import edu.harvard.iq.dataverse.DataFileServiceBean; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetField; import edu.harvard.iq.dataverse.DatasetFieldServiceBean; import edu.harvard.iq.dataverse.DatasetFieldType; import edu.harvard.iq.dataverse.DatasetServiceBean; import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.DatasetVersionServiceBean; import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DataverseServiceBean; +import edu.harvard.iq.dataverse.EjbDataverseEngine; +import edu.harvard.iq.dataverse.MetadataBlock; +import edu.harvard.iq.dataverse.MetadataBlockServiceBean; +import edu.harvard.iq.dataverse.RoleAssignment; import edu.harvard.iq.dataverse.MetadataBlock; import edu.harvard.iq.dataverse.MetadataBlockServiceBean; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.RoleAssignee; import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper; +import edu.harvard.iq.dataverse.datasetutility.DataFileTagException; +import edu.harvard.iq.dataverse.datasetutility.NoFilesException; +import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.impl.AssignRoleCommand; @@ -37,16 +48,21 @@ import edu.harvard.iq.dataverse.export.DDIExportServiceBean; import edu.harvard.iq.dataverse.export.ExportService; import edu.harvard.iq.dataverse.export.ddi.DdiExportUtil; +import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.json.JsonParseException; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; import java.io.ByteArrayOutputStream; +import java.io.InputStream; import java.io.OutputStream; import java.io.StringReader; +import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.ResourceBundle; import java.util.logging.Level; import java.util.logging.Logger; import javax.ejb.EJB; @@ -54,6 +70,7 @@ import javax.json.JsonArrayBuilder; import javax.json.JsonObject; import javax.json.JsonObjectBuilder; +import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; @@ -64,6 +81,10 @@ import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; +import static javax.ws.rs.core.Response.Status.BAD_REQUEST; +import org.glassfish.jersey.media.multipart.FormDataBodyPart; +import org.glassfish.jersey.media.multipart.FormDataContentDisposition; +import org.glassfish.jersey.media.multipart.FormDataParam; @Path("datasets") public class Datasets extends AbstractApiBean { @@ -93,7 +114,18 @@ public class Datasets extends AbstractApiBean { @EJB SettingsServiceBean settingsService; + @EJB + DataFileServiceBean fileService; + + @EJB + DatasetVersionServiceBean datasetVersionService; + + @EJB + IngestServiceBean ingestService; + @EJB + EjbDataverseEngine commandEngine; + /** * Used to consolidate the way we parse and handle dataset versions. * @param @@ -105,8 +137,8 @@ private interface DsVersionHandler { T handleLatestPublished(); } - @GET - @Path("{id}") + @GET + @Path("{id}") public Response getDataset(@PathParam("id") String id) { return response( req -> { final Dataset retrieved = execCommand(new GetDatasetCommand(req, findDatasetOrDie(id))); @@ -239,7 +271,7 @@ public Response getVersionFiles( @PathParam("id") String datasetId, @PathParam(" } @GET - @Path("{id}/versions/{versionId}/metadata") + @Path("{id}/versions/{versionId}/metadata") public Response getVersionMetadata( @PathParam("id") String datasetId, @PathParam("versionId") String versionId) { return response( req -> ok( jsonByBlocks( @@ -305,8 +337,8 @@ public Response updateDatasetTargetURLAll() { } @PUT - @Path("{id}/versions/{versionId}") - public Response updateDraftVersion( String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId ){ + @Path("{id}/versions/{versionId}") + public Response updateDraftVersion( String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId ){ if ( ! ":draft".equals(versionId) ) { return error( Response.Status.BAD_REQUEST, "Only the :draft version can be updated"); @@ -520,35 +552,170 @@ public Response deletePrivateUrl(@PathParam("id") String idSupplied) { }); } - private Dataset findDatasetOrDie( String id ) throws WrappedResponse { + + + /** + * Add a File to an existing Dataset + * + * @param idSupplied + * @param datasetId + * @param jsonData + * @param testFileInputStream + * @param contentDispositionHeader + * @param formDataBodyPart + * @return + */ + @POST + @Path("{id}/add") + @Consumes(MediaType.MULTIPART_FORM_DATA) + public Response addFileToDataset(@PathParam("id") String idSupplied, + @FormDataParam("jsonData") String jsonData, + @FormDataParam("file") InputStream fileInputStream, + @FormDataParam("file") FormDataContentDisposition contentDispositionHeader, + @FormDataParam("file") final FormDataBodyPart formDataBodyPart + ){ + + + // ------------------------------------- + // (1) Get the user from the API key + // ------------------------------------- + User authUser; + try { + authUser = findUserOrDie(); + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, + ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.auth") + ); + } + + // ------------------------------------- + // (2) Get the Dataset Id + // + // ------------------------------------- + Dataset dataset; + + Long datasetId; + try { + dataset = findDatasetOrDie(idSupplied); + datasetId = dataset.getId(); + } catch (WrappedResponse wr) { + return wr.getResponse(); + /* + String errMsg; + if (idSupplied == null) { + errMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.dataset_id_is_null"); + return error(Response.Status.BAD_REQUEST, errMsg); + } else if (idSupplied.equals(Datasets.PERSISTENT_ID_KEY)) { + return wr.getResponse(); + } else { + errMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.dataset_id_not_found") + " " + idSupplied; + return error(Response.Status.BAD_REQUEST, errMsg); + }*/ + } + + + // ------------------------------------- + // (3) Get the file name and content type + // ------------------------------------- + String newFilename = contentDispositionHeader.getFileName(); + String newFileContentType = formDataBodyPart.getMediaType().toString(); + + + // (2a) Load up optional params via JSON + //--------------------------------------- + OptionalFileParams optionalFileParams = null; + msgt("(api) jsonData: " + jsonData); + + try { + optionalFileParams = new OptionalFileParams(jsonData); + } catch (DataFileTagException ex) { + return error( Response.Status.BAD_REQUEST, ex.getMessage()); + } + + + //------------------- + // (3) Create the AddReplaceFileHelper object + //------------------- + msg("ADD!"); + + DataverseRequest dvRequest2 = createDataverseRequest(authUser); + AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2, + ingestService, + datasetService, + fileService, + permissionSvc, + commandEngine); + + + //------------------- + // (4) Run "runAddFileByDatasetId" + //------------------- + addFileHelper.runAddFileByDataset(dataset, + newFilename, + newFileContentType, + fileInputStream, + optionalFileParams); + + + if (addFileHelper.hasError()){ + return error(addFileHelper.getHttpErrorCode(), addFileHelper.getErrorMessagesAsString("\n")); + }else{ + String successMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.add"); + try { + //msgt("as String: " + addFileHelper.getSuccessResult()); + + return ok(successMsg, + addFileHelper.getSuccessResultAsJsonObjectBuilder()); + //"Look at that! You added a file! (hey hey, it may have worked)"); + } catch (NoFilesException ex) { + Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex); + return error(Response.Status.BAD_REQUEST, "NoFileException! Serious Error! See administrator!"); + + } + } + + } // end: addFileToDataset + + + + private void msg(String m){ + //System.out.println(m); + } + private void dashes(){ + msg("----------------"); + } + private void msgt(String m){ + dashes(); msg(m); dashes(); + } + + + + private Dataset findDatasetOrDie(String id) throws WrappedResponse { Dataset dataset; - if ( id.equals(PERSISTENT_ID_KEY) ) { + if (id.equals(PERSISTENT_ID_KEY)) { String persistentId = getRequestParameter(PERSISTENT_ID_KEY.substring(1)); - if ( persistentId == null ) { - throw new WrappedResponse( - badRequest("When accessing a dataset based on persistent id, " - + "a " + PERSISTENT_ID_KEY.substring(1) + " query parameter " - + "must be present")); + if (persistentId == null) { + throw new WrappedResponse( + badRequest(BundleUtil.getStringFromBundle("find.dataset.error.dataset_id_is_null", Collections.singletonList(PERSISTENT_ID_KEY.substring(1))))); } dataset = datasetService.findByGlobalId(persistentId); if (dataset == null) { - throw new WrappedResponse( notFound("dataset " + persistentId + " not found") ); - } + throw new WrappedResponse(notFound(BundleUtil.getStringFromBundle("find.dataset.error.dataset.not.found.persistentId", Collections.singletonList(persistentId)))); + } return dataset; - + } else { try { - dataset = datasetService.find( Long.parseLong(id) ); + dataset = datasetService.find(Long.parseLong(id)); if (dataset == null) { - throw new WrappedResponse( notFound("dataset " + id + " not found") ); - } + throw new WrappedResponse(notFound(BundleUtil.getStringFromBundle("find.dataset.error.dataset.not.found.id", Collections.singletonList(id)))); + } return dataset; - } catch ( NumberFormatException nfe ) { - throw new WrappedResponse( - badRequest("Bad dataset id number: '" + id + "'")); + } catch (NumberFormatException nfe) { + throw new WrappedResponse( + badRequest(BundleUtil.getStringFromBundle("find.dataset.error.dataset.not.found.bad.id", Collections.singletonList(id)))); } } - } @@ -587,7 +754,7 @@ public Command handleLatest() { public Command handleDraft() { return new GetDraftDatasetVersionCommand(req, ds); } - + @Override public Command handleSpecific(long major, long minor) { return new GetSpecificPublishedDatasetVersionCommand(req, ds, major, minor); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java new file mode 100644 index 00000000000..9c9c53885be --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -0,0 +1,228 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package edu.harvard.iq.dataverse.api; + +//import com.sun.jersey.core.header.FormDataContentDisposition; +//import com.sun.jersey.multipart.FormDataParam; +import com.google.gson.Gson; +import com.google.gson.JsonObject; +import edu.harvard.iq.dataverse.DataFileServiceBean; +import edu.harvard.iq.dataverse.DatasetServiceBean; +import edu.harvard.iq.dataverse.DatasetVersionServiceBean; +import edu.harvard.iq.dataverse.DataverseRequestServiceBean; +import edu.harvard.iq.dataverse.DataverseServiceBean; +import edu.harvard.iq.dataverse.EjbDataverseEngine; +import edu.harvard.iq.dataverse.UserNotificationServiceBean; +import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper; +import edu.harvard.iq.dataverse.datasetutility.DataFileTagException; +import edu.harvard.iq.dataverse.datasetutility.NoFilesException; +import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.ingest.IngestServiceBean; +import java.io.InputStream; +import java.util.ResourceBundle; +import java.util.logging.Level; +import java.util.logging.Logger; +import javax.ejb.EJB; +import javax.inject.Inject; +import javax.ws.rs.Consumes; +import javax.ws.rs.POST; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import org.glassfish.jersey.media.multipart.FormDataBodyPart; +import org.glassfish.jersey.media.multipart.FormDataContentDisposition; +import org.glassfish.jersey.media.multipart.FormDataParam; + +/** + * + * @author rmp553 + */ +@Path("files") +public class Files extends AbstractApiBean { + + @EJB + DatasetServiceBean datasetService; + @EJB + DataFileServiceBean fileService; + @EJB + DatasetVersionServiceBean datasetVersionService; + @EJB + DataverseServiceBean dataverseService; + @EJB + IngestServiceBean ingestService; + @Inject + DataverseRequestServiceBean dvRequestService; + @EJB + EjbDataverseEngine commandEngine; + @EJB + UserNotificationServiceBean userNotificationService; + + private static final Logger logger = Logger.getLogger(Files.class.getName()); + + + + private void msg(String m){ + System.out.println(m); + } + private void dashes(){ + msg("----------------"); + } + private void msgt(String m){ + dashes(); msg(m); dashes(); + } + + + + /** + * Replace an Existing File + * + * @param datasetId + * @param testFileInputStream + * @param contentDispositionHeader + * @param formDataBodyPart + * @return + */ + @POST + @Path("{id}/replace") + @Consumes(MediaType.MULTIPART_FORM_DATA) + public Response replaceFileInDataset( + @PathParam("id") Long fileToReplaceId, + @FormDataParam("jsonData") String jsonData, + @FormDataParam("file") InputStream testFileInputStream, + @FormDataParam("file") FormDataContentDisposition contentDispositionHeader, + @FormDataParam("file") final FormDataBodyPart formDataBodyPart + ){ + + // ------------------------------------- + // (1) Get the user from the API key + // ------------------------------------- + User authUser; + try { + authUser = this.findUserOrDie(); + } catch (AbstractApiBean.WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, + ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.auth") + ); + } + + // ------------------------------------- + // (2) Check/Parse the JSON + // ------------------------------------- + if (jsonData == null){ + logger.log(Level.SEVERE, "jsonData is null"); + return error( Response.Status.BAD_REQUEST, "No JSON data"); + } + JsonObject jsonObj = new Gson().fromJson(jsonData, JsonObject.class); + + // (2a) Check for required "fileToReplaceId" + // ------------------------------------- + /*if ((!jsonObj.has("fileToReplaceId")) || jsonObj.get("fileToReplaceId").isJsonNull()){ + return error( Response.Status.BAD_REQUEST, "'fileToReplaceId' NOT found in the JSON Request"); + } + + Long fileToReplaceId; + + try { + fileToReplaceId = Long.parseLong(jsonObj.get("fileToReplaceId").toString()); + } catch (Exception e) { + return error( Response.Status.BAD_REQUEST, "'fileToReplaceId' in the JSON Request must be a number."); + } + */ + + // (2b) Check for optional "forceReplace" + // ------------------------------------- + Boolean forceReplace = false; + if ((jsonObj.has("forceReplace")) && (!jsonObj.get("forceReplace").isJsonNull())){ + forceReplace = jsonObj.get("forceReplace").getAsBoolean(); + if (forceReplace == null){ + forceReplace = false; + } + } + + + // (2d) Load up optional params via JSON + // - Will skip extra attributes which includes fileToReplaceId and forceReplace + //--------------------------------------- + OptionalFileParams optionalFileParams = null; + try { + optionalFileParams = new OptionalFileParams(jsonData); + } catch (DataFileTagException ex) { + return error( Response.Status.BAD_REQUEST, ex.getMessage()); + } + + + // ------------------------------------- + // (3) Get the file name and content type + // ------------------------------------- + String newFilename = contentDispositionHeader.getFileName(); + String newFileContentType = formDataBodyPart.getMediaType().toString(); + + + //------------------- + // (4) Create the AddReplaceFileHelper object + //------------------- + msg("REPLACE!"); + + DataverseRequest dvRequest2 = createDataverseRequest(authUser); + AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2, + this.ingestService, + this.datasetService, + this.fileService, + this.permissionSvc, + this.commandEngine); + + //------------------- + // (5) Run "runReplaceFileByDatasetId" + //------------------- + + + if (forceReplace){ + addFileHelper.runForceReplaceFile(fileToReplaceId, + newFilename, + newFileContentType, + testFileInputStream, + optionalFileParams); + }else{ + addFileHelper.runReplaceFile(fileToReplaceId, + newFilename, + newFileContentType, + testFileInputStream, + optionalFileParams); + } + + msg("we're back....."); + if (addFileHelper.hasError()){ + msg("yes, has error"); + return error(addFileHelper.getHttpErrorCode(), addFileHelper.getErrorMessagesAsString("\n")); + + }else{ + msg("no error"); + String successMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.replace"); + + try { + msgt("as String: " + addFileHelper.getSuccessResult()); + return ok(successMsg, + addFileHelper.getSuccessResultAsJsonObjectBuilder()); + //return okResponseGsonObject(successMsg, + // addFileHelper.getSuccessResultAsGsonObject()); + //"Look at that! You added a file! (hey hey, it may have worked)"); + } catch (NoFilesException ex) { + Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex); + return error(Response.Status.BAD_REQUEST, "NoFileException! Serious Error! See administrator!"); + + } + } + + } // end: replaceFileInDataset + + + +} + + diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Util.java b/src/main/java/edu/harvard/iq/dataverse/api/Util.java index 639e3cfa7e8..ce7cb34a280 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Util.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Util.java @@ -2,9 +2,11 @@ import java.io.StringReader; import java.text.SimpleDateFormat; +import java.util.List; import java.util.Set; import java.util.TimeZone; import java.util.TreeSet; +import java.util.stream.Collectors; import javax.json.Json; import javax.json.JsonArray; import javax.json.JsonReader; @@ -107,6 +109,24 @@ public static SimpleDateFormat getDateFormat() { return DATE_FORMAT_TL.get(); } - - + /** + * Takes in a list of strings and returns a list stripped of nulls, empty strings and duplicates + * @param stringsToCheck + * @return + */ + + public static List removeDuplicatesNullsEmptyStrings(List stringsToCheck){ + + if (stringsToCheck == null){ + throw new NullPointerException("stringsToCheck cannot be null"); + } + + return stringsToCheck.stream() + .filter(p -> p != null) // no nulls + .map(String :: trim) // strip strings + .filter(p -> p.length() > 0 ) // no empty strings + .distinct() // distinct + .collect(Collectors.toList()); + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java new file mode 100644 index 00000000000..7ddcc847782 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -0,0 +1,1662 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package edu.harvard.iq.dataverse.datasetutility; + +import com.google.gson.JsonArray; +import com.google.gson.JsonObject; +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.DataFileServiceBean; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetServiceBean; +import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.EjbDataverseEngine; +import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.PermissionServiceBean; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.engine.command.Command; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand; +import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand; +import edu.harvard.iq.dataverse.ingest.IngestServiceBean; +import edu.harvard.iq.dataverse.util.json.JsonPrinter; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Objects; +import java.util.ResourceBundle; +import java.util.Set; +import java.util.logging.Level; +import java.util.logging.Logger; +import javax.ejb.EJBException; +import javax.json.JsonObjectBuilder; +import javax.validation.ConstraintViolation; +import javax.ws.rs.core.Response; + +/** + * Methods to add or replace a single file. + * + * Usage example: + * + * // (1) Instantiate the class + * + * AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2, + * this.ingestService, + * this.datasetService, + * this.fileService, + * this.permissionSvc, + * this.commandEngine); + * + * // (2) Run file "ADD" + * + * addFileHelper.runAddFileByDatasetId(datasetId, + * newFileName, + * newFileContentType, + * newFileInputStream); + * // (2a) Check for errors + * if (addFileHelper.hasError()){ + * // get some errors + * System.out.println(addFileHelper.getErrorMessagesAsString("\n")); + * } + * + * + * // OR (3) Run file "REPLACE" + * + * addFileHelper.runReplaceFile(datasetId, + * newFileName, + * newFileContentType, + * newFileInputStream, + * fileToReplaceId); + * // (2a) Check for errors + * if (addFileHelper.hasError()){ + * // get some errors + * System.out.println(addFileHelper.getErrorMessagesAsString("\n")); + * } + * + * + * + * @author rmp553 + */ +public class AddReplaceFileHelper{ + + private static final Logger logger = Logger.getLogger(AddReplaceFileHelper.class.getCanonicalName()); + + + public static String FILE_ADD_OPERATION = "FILE_ADD_OPERATION"; + public static String FILE_REPLACE_OPERATION = "FILE_REPLACE_OPERATION"; + public static String FILE_REPLACE_FORCE_OPERATION = "FILE_REPLACE_FORCE_OPERATION"; + + + private String currentOperation; + + // ----------------------------------- + // All the needed EJBs, passed to the constructor + // ----------------------------------- + private IngestServiceBean ingestService; + private DatasetServiceBean datasetService; + private DataFileServiceBean fileService; + private PermissionServiceBean permissionService; + private EjbDataverseEngine commandEngine; + + // ----------------------------------- + // Instance variables directly added + // ----------------------------------- + private Dataset dataset; // constructor (for add, not replace) + private DataverseRequest dvRequest; // constructor + private InputStream newFileInputStream; // step 20 + private String newFileName; // step 20 + private String newFileContentType; // step 20 + // -- Optional + private DataFile fileToReplace; // step 25 + + + // ----------------------------------- + // Instance variables derived from other input + // ----------------------------------- + private User user; + private DatasetVersion workingVersion; + List initialFileList; + List finalFileList; + + // ----------------------------------- + // Ingested files + // ----------------------------------- + private List newlyAddedFiles; + + // ----------------------------------- + // For error handling + // ----------------------------------- + + private boolean errorFound; + private List errorMessages; + private Response.Status httpErrorCode; // optional + + + /** + * MAIN CONSTRUCTOR -- minimal requirements + * + * @param dataset + * @param ingestService + * @param datasetService + * @param dvRequest + */ + public AddReplaceFileHelper(DataverseRequest dvRequest, + IngestServiceBean ingestService, + DatasetServiceBean datasetService, + DataFileServiceBean fileService, + PermissionServiceBean permissionService, + EjbDataverseEngine commandEngine){ + + // --------------------------------- + // make sure DataverseRequest isn't null and has a user + // --------------------------------- + if (dvRequest == null){ + throw new NullPointerException("dvRequest cannot be null"); + } + if (dvRequest.getUser() == null){ + throw new NullPointerException("dvRequest cannot have a null user"); + } + + // --------------------------------- + // make sure services aren't null + // --------------------------------- + if (ingestService == null){ + throw new NullPointerException("ingestService cannot be null"); + } + if (datasetService == null){ + throw new NullPointerException("datasetService cannot be null"); + } + if (fileService == null){ + throw new NullPointerException("fileService cannot be null"); + } + if (permissionService == null){ + throw new NullPointerException("ingestService cannot be null"); + } + if (commandEngine == null){ + throw new NullPointerException("commandEngine cannot be null"); + } + + // --------------------------------- + + this.ingestService = ingestService; + this.datasetService = datasetService; + this.fileService = fileService; + this.permissionService = permissionService; + this.commandEngine = commandEngine; + + + + initErrorHandling(); + + // Initiate instance vars + this.dataset = null; + this.dvRequest = dvRequest; + this.user = dvRequest.getUser(); + + } + + /** + * + * @param chosenDataset + * @param newFileName + * @param newFileContentType + * @param newFileInputStream + * @param optionalFileParams + * @return + */ + public boolean runAddFileByDataset(Dataset chosenDataset, + String newFileName, + String newFileContentType, + InputStream newFileInputStream, + OptionalFileParams optionalFileParams){ + + msgt(">> runAddFileByDatasetId"); + + initErrorHandling(); + + this.currentOperation = FILE_ADD_OPERATION; + + if (!this.step_001_loadDataset(chosenDataset)){ + return false; + } + + //return this.runAddFile(this.dataset, newFileName, newFileContentType, newFileInputStream, optionalFileParams); + return this.runAddReplaceFile(dataset, newFileName, newFileContentType, newFileInputStream, optionalFileParams); + + } + + + /** + * After the constructor, this method is called to add a file + * + * @param dataset + * @param newFileName + * @param newFileContentType + * @param newFileInputStream + * @return + */ + /* + public boolean runAddFile(Dataset dataset, + String newFileName, + String newFileContentType, + InputStream newFileInputStream, + OptionalFileParams optionalFileParams){ + msgt(">> runAddFile"); + + initErrorHandling(); + + if (this.hasError()){ + return false; + } + this.currentOperation = FILE_ADD_OPERATION; + + return this.runAddReplaceFile(dataset, newFileName, newFileContentType, newFileInputStream, optionalFileParams); + }*/ + + + /** + * After the constructor, this method is called to replace a file + * + * @param dataset + * @param newFileName + * @param newFileContentType + * @param newFileInputStream + * @return + */ + public boolean runForceReplaceFile(Long oldFileId, + String newFileName, + String newFileContentType, + InputStream newFileInputStream, + OptionalFileParams optionalFileParams){ + + msgt(">> runForceReplaceFile"); + initErrorHandling(); + + this.currentOperation = FILE_REPLACE_FORCE_OPERATION; + + + if (oldFileId==null){ + this.addErrorSevere(getBundleErr("existing_file_to_replace_id_is_null")); + return false; + } + + // Loads local variable "fileToReplace" + // + if (!this.step_005_loadFileToReplaceById(oldFileId)){ + return false; + } + + + return this.runAddReplaceFile(fileToReplace.getOwner(), newFileName, newFileContentType, newFileInputStream, optionalFileParams); + } + + + + + public boolean runReplaceFile(Long oldFileId, + String newFileName, + String newFileContentType, + InputStream newFileInputStream, + OptionalFileParams optionalFileParams){ + + msgt(">> runReplaceFile"); + + initErrorHandling(); + this.currentOperation = FILE_REPLACE_OPERATION; + + if (oldFileId==null){ + this.addErrorSevere(getBundleErr("existing_file_to_replace_id_is_null")); + return false; + } + + + // Loads local variable "fileToReplace" + // + if (!this.step_005_loadFileToReplaceById(oldFileId)){ + return false; + } + + return this.runAddReplaceFile(fileToReplace.getOwner(), newFileName, newFileContentType, newFileInputStream, optionalFileParams); + } + + + + /** + * Here we're going to run through the steps to ADD or REPLACE a file + * + * The difference between ADD and REPLACE (add/delete) is: + * + * oldFileId - For ADD, set to null + * oldFileId - For REPLACE, set to id of file to replace + * + * This has now been broken into Phase 1 and Phase 2 + * + * The APIs will use this method and call Phase 1 & Phase 2 consecutively + * + * The UI will call Phase 1 on initial upload and + * then run Phase 2 if the user chooses to save the changes. + * + * + * @return + */ + private boolean runAddReplaceFile(Dataset dataset, + String newFileName, String newFileContentType, + InputStream newFileInputStream, + OptionalFileParams optionalFileParams){ + + // Run "Phase 1" - Initial ingest of file + error check + // But don't save the dataset version yet + // + boolean phase1Success = runAddReplacePhase1(dataset, + newFileName, + newFileContentType, + newFileInputStream, + optionalFileParams + ); + + if (!phase1Success){ + return false; + } + + + return runAddReplacePhase2(); + + } + + /** + * For the UI: File add/replace has been broken into 2 steps + * + * Phase 1 (here): Add/replace the file and make sure there are no errors + * But don't update the Dataset (yet) + * + * @return + */ + public boolean runAddReplacePhase1(Dataset dataset, + String newFileName, + String newFileContentType, + InputStream newFileInputStream, + OptionalFileParams optionalFileParams){ + + if (this.hasError()){ + return false; // possible to have errors already... + } + + msgt("step_001_loadDataset"); + if (!this.step_001_loadDataset(dataset)){ + return false; + } + + msgt("step_010_VerifyUserAndPermissions"); + if (!this.step_010_VerifyUserAndPermissions()){ + return false; + + } + + msgt("step_020_loadNewFile"); + if (!this.step_020_loadNewFile(newFileName, newFileContentType, newFileInputStream)){ + return false; + + } + + msgt("step_030_createNewFilesViaIngest"); + if (!this.step_030_createNewFilesViaIngest()){ + return false; + + } + + msgt("step_050_checkForConstraintViolations"); + if (!this.step_050_checkForConstraintViolations()){ + return false; + } + + msgt("step_055_loadOptionalFileParams"); + if (!this.step_055_loadOptionalFileParams(optionalFileParams)){ + return false; + } + + return true; + } + + + /** + * For the UI: File add/replace has been broken into 2 steps + * + * Phase 2 (here): Phase 1 has run ok, Update the Dataset -- issue the commands! + * + * @return + */ + public boolean runAddReplacePhase2(){ + + if (this.hasError()){ + return false; // possible to have errors already... + } + + if ((finalFileList == null)||(finalFileList.isEmpty())){ + addError(getBundleErr("phase2_called_early_no_new_files")); + return false; + } + + msgt("step_060_addFilesViaIngestService"); + if (!this.step_060_addFilesViaIngestService()){ + return false; + + } + + if (this.isFileReplaceOperation()){ + msgt("step_080_run_update_dataset_command_for_replace"); + if (!this.step_080_run_update_dataset_command_for_replace()){ + return false; + } + + }else{ + msgt("step_070_run_update_dataset_command"); + if (!this.step_070_run_update_dataset_command()){ + return false; + } + } + + msgt("step_090_notifyUser"); + if (!this.step_090_notifyUser()){ + return false; + } + + msgt("step_100_startIngestJobs"); + if (!this.step_100_startIngestJobs()){ + return false; + } + + return true; + } + + + /** + * Get for currentOperation + * @return String + */ + public String getCurrentOperation(){ + return this.currentOperation; + } + + + /** + * Is this a file FORCE replace operation? + * + * Only overrides warnings of content type change + * + * @return + */ + public boolean isForceFileOperation(){ + + return this.currentOperation.equals(FILE_REPLACE_FORCE_OPERATION); + } + + /** + * Is this a file replace operation? + * @return + */ + public boolean isFileReplaceOperation(){ + + if (this.currentOperation.equals(FILE_REPLACE_OPERATION)){ + return true; + }else if (this.currentOperation.equals(FILE_REPLACE_FORCE_OPERATION)){ + return true; + } + return false; + } + + /** + * Is this a file add operation? + * + * @return + */ + public boolean isFileAddOperation(){ + + return this.currentOperation.equals(FILE_ADD_OPERATION); + } + + /** + * Initialize error handling vars + */ + private void initErrorHandling(){ + + this.errorFound = false; + this.errorMessages = new ArrayList<>(); + + } + + + + /** + * Add error message + * + * @param errMsg + */ + private void addError(String errMsg){ + + if (errMsg == null){ + throw new NullPointerException("errMsg cannot be null"); + } + this.errorFound = true; + + logger.fine(errMsg); + this.errorMessages.add(errMsg); + } + + /** + * Add Error mesage and, if it's known, the HTTP response code + * + * @param badHttpResponse, e.g. Response.Status.FORBIDDEN + * @param errMsg + */ + private void addError(Response.Status badHttpResponse, String errMsg){ + + if (badHttpResponse == null){ + throw new NullPointerException("badHttpResponse cannot be null"); + } + if (errMsg == null){ + throw new NullPointerException("errMsg cannot be null"); + } + + this.httpErrorCode = badHttpResponse; + + this.addError(errMsg); + + + } + + + private void addErrorSevere(String errMsg){ + + if (errMsg == null){ + throw new NullPointerException("errMsg cannot be null"); + } + this.errorFound = true; + + logger.severe(errMsg); + this.errorMessages.add(errMsg); + } + + + /** + * Was an error found? + * + * @return + */ + public boolean hasError(){ + return this.errorFound; + + } + + /** + * get error messages + * + * @return + */ + public List getErrorMessages(){ + return this.errorMessages; + } + + /** + * get error messages as string + * + * @param joinString + * @return + */ + public String getErrorMessagesAsString(String joinString){ + if (joinString==null){ + joinString = "\n"; + } + return String.join(joinString, this.errorMessages); + } + + + /** + * For API use, return the HTTP error code + * + * Default is BAD_REQUEST + * + * @return + */ + public Response.Status getHttpErrorCode(){ + + if (!hasError()){ + logger.severe("Do not call this method unless there is an error! check '.hasError()'"); + } + + if (httpErrorCode == null){ + return Response.Status.BAD_REQUEST; + }else{ + return httpErrorCode; + } + } + + + /** + * Convenience method for getting bundle properties + * + * @param msgName + * @return + */ + private String getBundleMsg(String msgName, boolean isErr){ + if (msgName == null){ + throw new NullPointerException("msgName cannot be null"); + } + if (isErr){ + return ResourceBundle.getBundle("Bundle").getString("file.addreplace.error." + msgName); + }else{ + return ResourceBundle.getBundle("Bundle").getString("file.addreplace.success." + msgName); + } + + } + + /** + * Convenience method for getting bundle error message + * + * @param msgName + * @return + */ + private String getBundleErr(String msgName){ + return this.getBundleMsg(msgName, true); + } + + /** + * Convenience method for getting bundle success message + * + * @param msgName + * @return + */ + private String getBundleSuccess(String msgName){ + return this.getBundleMsg(msgName, false); + } + + + + /** + * + */ + private boolean step_001_loadDataset(Dataset selectedDataset){ + + if (this.hasError()){ + return false; + } + + if (selectedDataset == null){ + this.addErrorSevere(getBundleErr("dataset_is_null")); + return false; + } + + dataset = selectedDataset; + + return true; + } + + + + /** + * Step 10 Verify User and Permissions + * + * + * @return + */ + private boolean step_010_VerifyUserAndPermissions(){ + + if (this.hasError()){ + return false; + } + + return step_015_auto_check_permissions(dataset); + + } + + private boolean step_015_auto_check_permissions(Dataset datasetToCheck){ + + if (this.hasError()){ + return false; + } + + if (datasetToCheck == null){ + addError(getBundleErr("dataset_is_null")); + return false; + } + + // Make a temp. command + // + CreateDatasetCommand createDatasetCommand = new CreateDatasetCommand(datasetToCheck, dvRequest, false); + + // Can this user run the command? + // + if (!permissionService.isUserAllowedOn(dvRequest.getUser(), createDatasetCommand, datasetToCheck)) { + addError(Response.Status.FORBIDDEN,getBundleErr("no_edit_dataset_permission")); + return false; + } + + return true; + + } + + + private boolean step_020_loadNewFile(String fileName, String fileContentType, InputStream fileInputStream){ + + if (this.hasError()){ + return false; + } + + if (fileName == null){ + this.addErrorSevere(getBundleErr("filename_is_null")); + return false; + + } + + if (fileContentType == null){ + this.addErrorSevere(getBundleErr("file_content_type_is_null")); + return false; + + } + + if (fileInputStream == null){ + this.addErrorSevere(getBundleErr("file_input_stream_is_null")); + return false; + } + + newFileName = fileName; + newFileContentType = fileContentType; + newFileInputStream = fileInputStream; + + return true; + } + + + /** + * Optional: old file to replace + * + * @param oldFile + * @return + */ + private boolean step_005_loadFileToReplaceById(Long dataFileId){ + + if (this.hasError()){ + return false; + } + + // Check for Null + // + if (dataFileId == null){ + this.addErrorSevere(getBundleErr("existing_file_to_replace_id_is_null")); + return false; + } + + // Does the file exist? + // + DataFile existingFile = fileService.find(dataFileId); + + if (existingFile == null){ + this.addError(getBundleErr("existing_file_to_replace_not_found_by_id") + " " + dataFileId); + return false; + } + + + // Do we have permission to replace this file? e.g. Edit the file's dataset + // + if (!step_015_auto_check_permissions(existingFile.getOwner())){ + return false; + }; + + + + // Is the file published? + // + if (!existingFile.isReleased()){ + addError(getBundleErr("unpublished_file_cannot_be_replaced")); + return false; + } + + // Is the file in the latest dataset version? + // + if (!step_007_auto_isReplacementInLatestVersion(existingFile)){ + return false; + } + + fileToReplace = existingFile; + + return true; + + } + + /** + * Make sure the file to replace is in the workingVersion + * -- e.g. that it wasn't deleted from a previous Version + * + * @return + */ + private boolean step_007_auto_isReplacementInLatestVersion(DataFile existingFile){ + + if (existingFile == null){ + throw new NullPointerException("existingFile cannot be null!"); + } + + if (this.hasError()){ + return false; + } + + + DatasetVersion latestVersion = existingFile.getOwner().getLatestVersion(); + + boolean fileInLatestVersion = false; + for (FileMetadata fm : latestVersion.getFileMetadatas()){ + if (fm.getDataFile().getId() != null){ + if (Objects.equals(existingFile.getId(),fm.getDataFile().getId())){ + fileInLatestVersion = true; + } + } + } + if (!fileInLatestVersion){ + addError(getBundleErr("existing_file_not_in_latest_published_version")); + return false; + } + return true; + } + + + private boolean step_030_createNewFilesViaIngest(){ + + if (this.hasError()){ + return false; + } + + // Load the working version of the Dataset + workingVersion = dataset.getEditVersion(); + + try { + initialFileList = ingestService.createDataFiles(workingVersion, + this.newFileInputStream, + this.newFileName, + this.newFileContentType); + + } catch (IOException ex) { + this.addErrorSevere(getBundleErr("ingest_create_file_err")); + logger.severe(ex.toString()); + this.runMajorCleanup(); + return false; + } + + + /** + * This only happens: + * (1) the dataset was empty + * (2) the new file (or new file unzipped) did not ingest via "createDataFiles" + */ + if (initialFileList.isEmpty()){ + this.addErrorSevere(getBundleErr("initial_file_list_empty")); + this.runMajorCleanup(); + return false; + } + + /** + * REPLACE: File replacement is limited to a single file!! + * + * ADD: When adding files, some types of individual files + * are broken into several files--which is OK + */ + if (isFileReplaceOperation()){ + if (initialFileList.size() > 1){ + this.addError(getBundleErr("initial_file_list_more_than_one")); + this.runMajorCleanup(); + return false; + + } + } + + if (!this.step_040_auto_checkForDuplicates()){ + return false; + } + + + return this.step_045_auto_checkForFileReplaceDuplicate(); + } + + + /** + * Create a "final file list" + * + * This is always run after step 30 -- the ingest + * + * @return + */ + private boolean step_040_auto_checkForDuplicates(){ + + msgt("step_040_auto_checkForDuplicates"); + if (this.hasError()){ + return false; + } + + // Double checked -- this check also happens in step 30 + // + if (initialFileList.isEmpty()){ + this.addErrorSevere(getBundleErr("initial_file_list_empty")); + return false; + } + + // Initialize new file list + this.finalFileList = new ArrayList(); + + String warningMessage = null; + + + // ----------------------------------------------------------- + // Iterate through the recently ingest files + // ----------------------------------------------------------- + for (DataFile df : initialFileList){ + msg("Checking file: " + df.getFileMetadata().getLabel()); + + // ----------------------------------------------------------- + // (1) Check for ingest warnings + // ----------------------------------------------------------- + if (df.isIngestProblem()) { + if (df.getIngestReportMessage() != null) { + // may collect multiple error messages + this.addError(df.getIngestReportMessage()); + } + df.setIngestDone(); + } + + + // ----------------------------------------------------------- + // (2) Check for duplicates + // ----------------------------------------------------------- + if (DuplicateFileChecker.isDuplicateOriginalWay(workingVersion, df.getFileMetadata())){ + + String dupeName = df.getFileMetadata().getLabel(); + //removeUnSavedFilesFromWorkingVersion(); + //removeLinkedFileFromDataset(dataset, df); + //abandonOperationRemoveAllNewFilesFromDataset(); + this.addErrorSevere(getBundleErr("duplicate_file") + " " + dupeName); + //return false; + }else{ + finalFileList.add(df); + } + } + + if (this.hasError()){ + // We're recovering from the duplicate check. + msg("We're recovering from a duplicate check 1"); + runMajorCleanup(); + msg("We're recovering from a duplicate check 2"); + finalFileList.clear(); + return false; + } + + /** + * REPLACE: File replacement is limited to a single file!! + * + * ADD: When adding files, some types of individual files + * are broken into several files--which is OK + */ + + if (isFileReplaceOperation()){ + + if (finalFileList.size() > 1){ + String errMsg = "(This shouldn't happen -- error should have been detected in 030_createNewFilesViaIngest)"; + this.addErrorSevere(getBundleErr("initial_file_list_more_than_one") + " " + errMsg); + return false; + } + } + + if (finalFileList.isEmpty()){ + this.addErrorSevere("There are no files to add. (This error shouldn't happen if steps called in sequence....step_040_auto_checkForDuplicates)"); + return false; + } + + + return true; + } // end step_040_auto_checkForDuplicates + + + /** + * This is always checked. + * + * For ADD: If there is not replacement file, then the check is considered a success + * For REPLACE: The checksum is examined against the "finalFileList" list + * + */ + private boolean step_045_auto_checkForFileReplaceDuplicate(){ + + if (this.hasError()){ + return false; + } + + // Not a FILE REPLACE operation -- skip this step!! + // + if (!isFileReplaceOperation()){ + return true; + } + + + if (finalFileList.isEmpty()){ + // This error shouldn't happen if steps called in sequence.... + this.addErrorSevere("There are no files to add. (This error shouldn't happen if steps called in sequence....checkForFileReplaceDuplicate)"); + return false; + } + + + if (this.fileToReplace == null){ + // This error shouldn't happen if steps called correctly + this.addErrorSevere(getBundleErr("existing_file_to_replace_is_null") + " (This error shouldn't happen if steps called in sequence....checkForFileReplaceDuplicate)"); + return false; + } + + for (DataFile df : finalFileList){ + + if (Objects.equals(df.getChecksumValue(), fileToReplace.getChecksumValue())){ + this.addError(getBundleErr("replace.new_file_same_as_replacement")); + break; + } + + // This should be able to be overridden --force + if (!isForceFileOperation()){ + + // Warning that content type of the file has changed + // + if (!df.getContentType().equalsIgnoreCase(fileToReplace.getContentType())){ + this.addError(getBundleErr("replace.new_file_has_different_content_type")); + //+ " The new file,\"" + df.getFileMetadata().getLabel() + // + "\" has content type [" + df.getContentType() + "] while the replacment file, \"" + // + fileToReplace.getFileMetadata().getLabel() + "\" has content type: [" + fileToReplace.getContentType() + "]"); + } + } + } + + if (hasError()){ + runMajorCleanup(); + return false; + } + + return true; + + } // end step_045_auto_checkForFileReplaceDuplicate + + + + private boolean step_050_checkForConstraintViolations(){ + + if (this.hasError()){ + return false; + } + + if (finalFileList.isEmpty()){ + // This error shouldn't happen if steps called in sequence.... + this.addErrorSevere(getBundleErr("final_file_list_empty")); + return false; + } + + // ----------------------------------------------------------- + // Iterate through checking for constraint violations + // Gather all error messages + // ----------------------------------------------------------- + Set constraintViolations = workingVersion.validate(); + + // ----------------------------------------------------------- + // No violations found + // ----------------------------------------------------------- + if (constraintViolations.isEmpty()){ + return true; + } + + // ----------------------------------------------------------- + // violations found: gather all error messages + // ----------------------------------------------------------- + List errMsgs = new ArrayList<>(); + for (ConstraintViolation violation : constraintViolations){ + this.addError(violation.getMessage()); + } + + return this.hasError(); + } + + + /** + * Load optional file params such as description, tags, fileDataTags, etc.. + * + * @param optionalFileParams + * @return + */ + private boolean step_055_loadOptionalFileParams(OptionalFileParams optionalFileParams){ + + if (hasError()){ + return false; + } + + // -------------------------------------------- + // OK, the object may be null + // -------------------------------------------- + if (optionalFileParams == null){ + return true; + } + + + // -------------------------------------------- + // Iterate through files (should only be 1 for now) + // Add tags, description, etc + // -------------------------------------------- + for (DataFile df : finalFileList){ + try { + optionalFileParams.addOptionalParams(df); + } catch (DataFileTagException ex) { + Logger.getLogger(AddReplaceFileHelper.class.getName()).log(Level.SEVERE, null, ex); + addError(ex.getMessage()); + return false; + } + } + + + return true; + } + + private boolean step_060_addFilesViaIngestService(){ + + if (this.hasError()){ + return false; + } + + if (finalFileList.isEmpty()){ + // This error shouldn't happen if steps called in sequence.... + this.addErrorSevere(getBundleErr("final_file_list_empty")); + return false; + } + + ingestService.addFiles(workingVersion, finalFileList); + + return true; + } + + + /** + * Create and run the update dataset command + * + * @return + */ + private boolean step_070_run_update_dataset_command(){ + + if (this.hasError()){ + return false; + } + + Command update_cmd; + update_cmd = new UpdateDatasetCommand(dataset, dvRequest); + ((UpdateDatasetCommand) update_cmd).setValidateLenient(true); + + try { + // Submit the update dataset command + // and update the local dataset object + // + dataset = commandEngine.submit(update_cmd); + } catch (CommandException ex) { + this.addErrorSevere(getBundleErr("add.command_engine_error")); + logger.severe(ex.getMessage()); + return false; + }catch (EJBException ex) { + this.addErrorSevere("add.ejb_exception (see logs)"); + logger.severe(ex.getMessage()); + return false; + } + return true; + } + + + /** + * Go through the working DatasetVersion and remove the + * FileMetadata of the file to replace + * + * @return + */ + private boolean step_085_auto_remove_filemetadata_to_replace_from_working_version(){ + + msgt("step_085_auto_remove_filemetadata_to_replace_from_working_version 1"); + + if (!isFileReplaceOperation()){ + // Shouldn't happen! + this.addErrorSevere(getBundleErr("only_replace_operation") + " (step_085_auto_remove_filemetadata_to_replace_from_working_version"); + return false; + } + msg("step_085_auto_remove_filemetadata_to_replace_from_working_version 2"); + + if (this.hasError()){ + return false; + } + + + msgt("File to replace getId: " + fileToReplace.getId()); + + Iterator fmIt = workingVersion.getFileMetadatas().iterator(); + msgt("Clear file to replace"); + int cnt = 0; + while (fmIt.hasNext()) { + cnt++; + + FileMetadata fm = fmIt.next(); + msg(cnt + ") next file: " + fm); + msg(" getDataFile().getId(): " + fm.getDataFile().getId()); + if (fm.getDataFile().getId() != null){ + if (Objects.equals(fm.getDataFile().getId(), fileToReplace.getId())){ + msg("Let's remove it!"); + fmIt.remove(); + return true; + } + } + } + msg("No matches found!"); + addErrorSevere(getBundleErr("failed_to_remove_old_file_from_dataset")); + runMajorCleanup(); + return false; + } + + + private boolean runMajorCleanup(){ + + // (1) remove unsaved files from the working version + removeUnSavedFilesFromWorkingVersion(); + + // ---------------------------------------------------- + // (2) if the working version is brand new, delete it + // It doesn't have an "id" so you can't use the DeleteDatasetVersionCommand + // ---------------------------------------------------- + // Remove this working version from the dataset + Iterator versionIterator = dataset.getVersions().iterator(); + msgt("Clear Files"); + while (versionIterator.hasNext()) { + DatasetVersion dsv = versionIterator.next(); + if (dsv.getId() == null){ + versionIterator.remove(); + } + } + + return true; + + } + + /** + * We are outta here! Remove everything unsaved from the edit version! + * + * @return + */ + private boolean removeUnSavedFilesFromWorkingVersion(){ + msgt("Clean up: removeUnSavedFilesFromWorkingVersion"); + + // ----------------------------------------------------------- + // (1) Remove all new FileMetadata objects + // ----------------------------------------------------------- + //Iterator fmIt = dataset.getEditVersion().getFileMetadatas().iterator();// + Iterator fmIt = workingVersion.getFileMetadatas().iterator(); //dataset.getEditVersion().getFileMetadatas().iterator();// + while (fmIt.hasNext()) { + FileMetadata fm = fmIt.next(); + if (fm.getDataFile().getId() == null){ + fmIt.remove(); + } + } + + // ----------------------------------------------------------- + // (2) Remove all new DataFile objects + // ----------------------------------------------------------- + Iterator dfIt = dataset.getFiles().iterator(); + msgt("Clear Files"); + while (dfIt.hasNext()) { + DataFile df = dfIt.next(); + if (df.getId() == null){ + dfIt.remove(); + } + } + return true; + + } + + + private boolean step_080_run_update_dataset_command_for_replace(){ + + if (!isFileReplaceOperation()){ + // Shouldn't happen! + this.addErrorSevere(getBundleErr("only_replace_operation") + " (step_080_run_update_dataset_command_for_replace)"); + return false; + } + + if (this.hasError()){ + return false; + } + + // ----------------------------------------------------------- + // Remove the "fileToReplace" from the current working version + // ----------------------------------------------------------- + if (!step_085_auto_remove_filemetadata_to_replace_from_working_version()){ + return false; + } + + // ----------------------------------------------------------- + // Set the "root file ids" and "previous file ids" + // THIS IS A KEY STEP - SPLIT IT OUT + // (1) Old file: Set the Root File Id on the original file and save it + // (2) New file: Set the previousFileId to the id of the original file + // (3) New file: Set the rootFileId to the rootFileId of the original file + // ----------------------------------------------------------- + + + /* + Check the root file id on fileToReplace, updating it if necessary + */ + if (fileToReplace.getRootDataFileId().equals(DataFile.ROOT_DATAFILE_ID_DEFAULT)){ + + fileToReplace.setRootDataFileId(fileToReplace.getId()); + fileToReplace = fileService.save(fileToReplace); + } + + /* + Go through the final file list, settting the rootFileId and previousFileId + */ + for (DataFile df : finalFileList){ + df.setPreviousDataFileId(fileToReplace.getId()); + + df.setRootDataFileId(fileToReplace.getRootDataFileId()); + + } + + // Call the update dataset command + // + return step_070_run_update_dataset_command(); + + + } + + /** + * We want the version of the newly added file that has an id set + * + * TODO: This is inefficient/expensive. Need to redo it in a sane way + * - e.g. Query to find + * (1) latest dataset version in draft + * (2) pick off files that are NOT released + * (3) iterate through only those files + * - or an alternate/better version + * + * @param df + */ + private void setNewlyAddedFiles(List datafiles){ + + if (hasError()){ + return; + } + + // Init. newly added file list + newlyAddedFiles = new ArrayList<>(); + + // Loop of uglinesss...but expect 1 to 4 files in final file list + List latestFileMetadatas = dataset.getEditVersion().getFileMetadatas(); + + + for (DataFile newlyAddedFile : finalFileList){ + + for (FileMetadata fm : latestFileMetadatas){ + if (newlyAddedFile.getChecksumValue().equals(fm.getDataFile().getChecksumValue())){ + if (newlyAddedFile.getStorageIdentifier().equals(fm.getDataFile().getStorageIdentifier())){ + newlyAddedFiles.add(fm.getDataFile()); + } + } + } + } + /* + + newlyAddedFile = df; + + for (FileMetadata fm : dataset.getEditVersion().getFileMetadatas()){ + + // Find a file where the checksum value and identifiers are the same.. + // + if (newlyAddedFile.getChecksumValue().equals(fm.getDataFile().getChecksumValue())){ + if (newlyAddedFile.getStorageIdentifier().equals(fm.getDataFile().getStorageIdentifier())){ + newlyAddedFile = fm.getDataFile(); + break; + } + } + } + */ + + } + + + public List getNewlyAddedFiles(){ + + return newlyAddedFiles; + } + + public String getSuccessResult() throws NoFilesException{ + if (hasError()){ + throw new NoFilesException("Don't call this method if an error exists!! First check 'hasError()'"); + } + + if (newlyAddedFiles == null){ + throw new NullPointerException("newlyAddedFiles is null!"); + } + + return getSuccessResultAsJsonObjectBuilder().toString(); + + } + + public JsonObjectBuilder getSuccessResultAsJsonObjectBuilder() throws NoFilesException{ + + if (hasError()){ + throw new NoFilesException("Don't call this method if an error exists!! First check 'hasError()'"); + } + + if (newlyAddedFiles == null){ + throw new NullPointerException("newlyAddedFiles is null!"); + } + + if (newlyAddedFiles.isEmpty()){ + throw new NoFilesException("newlyAddedFiles is empty!"); + } + + return JsonPrinter.jsonDataFileList(newlyAddedFiles); + } + + + /** + * Currently this is a placeholder if we decide to send + * user notifications. + * + */ + private boolean step_090_notifyUser(){ + if (this.hasError()){ + return false; + } + + // Create a notification! + + // skip for now, may be part of dataset update listening + // + return true; + } + + + private boolean step_100_startIngestJobs(){ + if (this.hasError()){ + return false; + } + + // Should only be one file in the list + setNewlyAddedFiles(finalFileList); + + // clear old file list + // + finalFileList.clear(); + + // TODO: Need to run ingwest async...... + //if (true){ + //return true; + //} + + msg("pre ingest start"); + // start the ingest! + // + + ingestService.startIngestJobs(dataset, dvRequest.getAuthenticatedUser()); + + msg("post ingest start"); + return true; + } + + + private void msg(String m){ + logger.fine(m); + //System.out.println(m); + } + private void dashes(){ + msg("----------------"); + } + private void msgt(String m){ + dashes(); msg(m); dashes(); + } + + + + /** + * When a duplicate file is found after the initial ingest, + * remove the file from the dataset because + * createDataFiles has already linked it to the dataset: + * - first, through the filemetadata list + * - then through tht datafiles list + * + * + * @param dataset + * @param dataFileToRemove + */ + private boolean removeLinkedFileFromDataset(Dataset dataset, DataFile dataFileToRemove){ + + if (dataset==null){ + this.addErrorSevere(getBundleErr("remove_linked_file.dataset")); + return false; + } + + if (dataFileToRemove==null){ + this.addErrorSevere(getBundleErr("remove_linked_file.file")); + return false; + } + + // ----------------------------------------------------------- + // (1) Remove file from filemetadata list + // ----------------------------------------------------------- + Iterator fmIt = workingVersion.getFileMetadatas().iterator(); + msgt("Clear FileMetadatas"); + while (fmIt.hasNext()) { + FileMetadata fm = fmIt.next(); + msg("Check: " + fm); + if (fm.getId() == null && dataFileToRemove.getStorageIdentifier().equals(fm.getDataFile().getStorageIdentifier())) { + msg("Got It! "); + fmIt.remove(); + break; + } + } + + + // ----------------------------------------------------------- + // (2) Remove file from datafiles list + // ----------------------------------------------------------- + Iterator dfIt = dataset.getFiles().iterator(); + msgt("Clear Files"); + while (dfIt.hasNext()) { + DataFile dfn = dfIt.next(); + msg("Check: " + dfn); + if (dfn.getId() == null && dataFileToRemove.getStorageIdentifier().equals(dfn.getStorageIdentifier())) { + msg("Got It! try to remove from iterator"); + + dfIt.remove(); + msg("it worked"); + + break; + }else{ + msg("...ok"); + } + } + return true; + } + + + +} + /* + DatasetPage sequence: + + (A) editFilesFragment.xhtml -> EditDataFilesPage.handleFileUpload + (B) EditDataFilesPage.java -> handleFileUpload + (1) UploadedFile uf event.getFile() // UploadedFile + -------- + UploadedFile interface: + public String getFileName() + public InputStream getInputstream() throws IOException; + public long getSize(); + public byte[] getContents(); + public String getContentType(); + public void write(String string) throws Exception; + -------- + (2) List dFileList = null; + try { + // Note: A single file may be unzipped into multiple files + dFileList = ingestService.createDataFiles(workingVersion, uFile.getInputstream(), uFile.getFileName(), uFile.getContentType()); + } + + (3) processUploadedFileList(dFileList); + + (C) EditDataFilesPage.java -> processUploadedFileList + - iterate through list of DataFile objects -- which COULD happen with a single .zip + - isDuplicate check + - if good: + - newFiles.add(dataFile); // looks good + - fileMetadatas.add(dataFile.getFileMetadata()); + - return null; // looks good, return null + (D) save() // in the UI, user clicks the button. API is automatic if no errors + + (1) Look for constraintViolations: + // DatasetVersion workingVersion; + Set constraintViolations = workingVersion.validate(); + if (!constraintViolations.isEmpty()) { + //JsfHelper.addFlashMessage(JH.localize("dataset.message.validationError")); + JH.addMessage(FacesMessage.SEVERITY_ERROR, JH.localize("dataset.message.validationError")); + //FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Validation Error", "See below for details.")); + return ""; + } + + (2) Use the ingestService for a final check + // ask Leonid if this is needed for API + // One last check before we save the files - go through the newly-uploaded + // ones and modify their names so that there are no duplicates. + // (but should we really be doing it here? - maybe a better approach to do it + // in the ingest service bean, when the files get uploaded.) + // Finally, save the files permanently: + ingestService.addFiles(workingVersion, newFiles); + + (3) Use the API to save the dataset + - make new CreateDatasetCommand + - check if dataset has a template + - creates UserNotification message + + */ + // Checks: + // - Does the md5 already exist in the dataset? + // - If it's a replace, has the name and/or extension changed? + // On failure, send back warning + // + // - All looks good + // - Create a DataFile + // - Create a FileMetadata + // - Copy the Dataset version, making a new DRAFT + // - If it's replace, don't copy the file being replaced + // - Add this new file. + // .... + + + +/* + 1) Recovery from adding same file and duplicate being found + - draft ok + - published verion - nope +*/ \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/DataFileTagException.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/DataFileTagException.java new file mode 100644 index 00000000000..8ae0bfd6b2f --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/DataFileTagException.java @@ -0,0 +1,24 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package edu.harvard.iq.dataverse.datasetutility; + +/** + * + * @author rmp553 + */ +public class DataFileTagException extends Exception { + + public DataFileTagException(String message) { + super(message); + } + + public DataFileTagException(String message, Throwable cause) { + super(message, cause); + } + +} + + diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileChecker.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileChecker.java new file mode 100644 index 00000000000..817cebad949 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileChecker.java @@ -0,0 +1,165 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package edu.harvard.iq.dataverse.datasetutility; + +import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.DatasetVersionServiceBean; +import edu.harvard.iq.dataverse.FileMetadata; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.logging.Logger; + +/** + * Used for adding/replacing single files. + * + * Methods check if the files already exist in the *saved* DatasetVersion + * + * @author rmp553 + */ +public class DuplicateFileChecker { + + private static final Logger logger = Logger.getLogger(DuplicateFileChecker.class.getCanonicalName()); + private DatasetVersionServiceBean datasetVersionService; + + /** + * Constructor + * + * @param datasetVersionService + */ + public DuplicateFileChecker(DatasetVersionServiceBean datasetVersionService){ + + if (datasetVersionService == null){ + throw new NullPointerException("datasetVersionService cannot be null"); + } + + this.datasetVersionService = datasetVersionService; + } // end: constructor + + + /** + * Check the database to see if this file is already in the DatasetVersion + * + * Note: This checks a SINGLE file against the database only. + * + * @param checksum + * @return + */ + public boolean isFileInSavedDatasetVersion(DatasetVersion datasetVersion, FileMetadata fileMetadata){ + + if (datasetVersion == null){ + throw new NullPointerException("datasetVersion cannot be null"); + } + + if (fileMetadata == null){ + throw new NullPointerException("fileMetadata cannot be null"); + } + return this.isFileInSavedDatasetVersion(datasetVersion, fileMetadata.getDataFile().getChecksumValue()); + } + + /** + * See if this checksum already exists by a new query + * + * @param checksum + * @return + */ + public boolean isFileInSavedDatasetVersion(DatasetVersion datasetVersion, String checkSum){ + + if (datasetVersion == null){ + throw new NullPointerException("datasetVersion cannot be null"); + } + + if (checkSum == null){ + throw new NullPointerException("checkSum cannot be null"); + } + + return datasetVersionService.doesChecksumExistInDatasetVersion(datasetVersion, checkSum); + + } + + /** + * From dataset version: + * - Get the checksum of all the files + * - Load them into a hash + * + * Loads checksums from unsaved datasetversion--checks more + * + */ + public Map getDatasetHashesFromDatabase(DatasetVersion datasetVersion){ + + if (datasetVersion == null){ + throw new NullPointerException("datasetVersion cannot be null"); + } + + Map checksumHashCounts = new HashMap<>(); + + List fileMetadatas = new ArrayList<>(datasetVersion.getFileMetadatas()); + + for (FileMetadata fm : fileMetadatas){ + String checkSum = fm.getDataFile().getChecksumValue(); + if (checksumHashCounts.get(checkSum) != null){ + checksumHashCounts.put(checkSum, checksumHashCounts.get(checkSum).intValue() + 1); + }else{ + checksumHashCounts.put(checkSum, 1); + } + } + return checksumHashCounts; + } + + + + /** + * Original isDuplicate method from the DatasetPage and EditDatafilesPage + * + * Note: this has efficiency issues in that the hash is re-created for every fileMetadata checked + * + * @param workingVersion + * @param fileMetadata + * @return + */ + public static boolean isDuplicateOriginalWay(DatasetVersion workingVersion, FileMetadata fileMetadata) { + if (workingVersion == null){ + throw new NullPointerException("datasetVersion cannot be null"); + } + + String selectedCheckSum = fileMetadata.getDataFile().getChecksumValue(); + if (selectedCheckSum == null) { + return false; + } + + Map checkSumMap = new HashMap(); + + // TODO: + // think of a way to do this that doesn't involve populating this + // map for every file on the page? + // man not be that much of a problem, if we paginate and never display + // more than a certain number of files... Still, needs to be revisited + // before the final 4.0. + // -- L.A. 4.0 + + // make a "defensive copy" to avoid java.util.ConcurrentModificationException from being thrown + // when uploading 100+ files + List wvCopy = new ArrayList<>(workingVersion.getFileMetadatas()); + Iterator fmIt = wvCopy.iterator(); + + while (fmIt.hasNext()) { + FileMetadata fm = fmIt.next(); + String currentCheckSum = fm.getDataFile().getChecksumValue(); + if (currentCheckSum != null) { + if (checkSumMap.get(currentCheckSum) != null) { + checkSumMap.put(currentCheckSum, checkSumMap.get(currentCheckSum).intValue() + 1); + } else { + checkSumMap.put(currentCheckSum, 1); + } + } + } + return checkSumMap.get(selectedCheckSum) != null && checkSumMap.get(selectedCheckSum).intValue() > 1; + + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java new file mode 100644 index 00000000000..d879f80ea88 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java @@ -0,0 +1,270 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package edu.harvard.iq.dataverse.datasetutility; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.DataFileServiceBean; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetPage; +import edu.harvard.iq.dataverse.DatasetServiceBean; +import edu.harvard.iq.dataverse.DatasetVersionServiceBean; +import edu.harvard.iq.dataverse.DataverseLinkingServiceBean; +import edu.harvard.iq.dataverse.DataverseRequestServiceBean; +import edu.harvard.iq.dataverse.DataverseSession; +import edu.harvard.iq.dataverse.EjbDataverseEngine; +import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.PermissionServiceBean; +import edu.harvard.iq.dataverse.UserNotificationServiceBean; +import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; +import edu.harvard.iq.dataverse.ingest.IngestServiceBean; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.SystemConfig; +import java.io.IOException; +import java.io.InputStream; +import java.util.List; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; +import javax.ejb.EJB; +import javax.faces.context.FacesContext; +import javax.faces.view.ViewScoped; +import javax.inject.Inject; +import javax.inject.Named; +import org.apache.commons.lang.StringUtils; +import org.primefaces.event.FileUploadEvent; +import org.primefaces.model.UploadedFile; + +/** + * + * @author rmp553 + */ +@ViewScoped +@Named("FileUploadTestPage") +public class FileUploadTestPage implements java.io.Serializable { + + private static final Logger logger = Logger.getLogger(DatasetPage.class.getCanonicalName()); + + private boolean replaceOperation = false; + private Long datasetId; + private Dataset dataset; + private DataFile fileToReplace; + private List newlyAddedFiles; + + @EJB + IngestServiceBean ingestService; + @Inject + DataverseSession session; + @EJB + DatasetServiceBean datasetService; + @EJB + DatasetVersionServiceBean datasetVersionService; + @EJB + DataFileServiceBean datafileService; + @EJB + UserNotificationServiceBean userNotificationService; + @EJB + SettingsServiceBean settingsService; + @EJB + AuthenticationServiceBean authService; + @EJB + SystemConfig systemConfig; + @EJB + DataverseLinkingServiceBean dvLinkingService; + @Inject + DataverseRequestServiceBean dvRequestService; + @EJB + PermissionServiceBean permissionService; + @EJB + EjbDataverseEngine commandEngine; + + + public String init() { + + Map params =FacesContext.getCurrentInstance(). + getExternalContext().getRequestParameterMap(); + + + msgt("params: " + params.toString()); + + if (params.containsKey("ds_id")){ + String ds_id = params.get("ds_id"); + if ((!ds_id.isEmpty()) && (StringUtils.isNumeric(ds_id))){ + dataset = datasetService.find(Long.parseLong(ds_id)); + } + } + + if (params.containsKey("fid")){ + String fid = params.get("fid"); + if ((!fid.isEmpty()) && (StringUtils.isNumeric(fid))){ + fileToReplace = datafileService.find(Long.parseLong(fid)); + } + } + + if (fileToReplace != null){ + replaceOperation = true; + }else{ + replaceOperation = false; + } + + return null; + } + + + + public List getDatasetFileMetadatas(){ + + if (dataset == null){ + return null; + } + return dataset.getLatestVersion().getFileMetadatasSorted(); + } + + public String yesYes(){ + return "yes yes"; + } + + private void msg(String s){ + System.out.println(s); + } + + private void msgt(String s){ + msg("-------------------------------"); + msg(s); + msg("-------------------------------"); + } + + public Dataset getDataset(){ + return dataset; + } + + public void setDataset(Dataset ds){ + dataset = ds; + } + + public DataFile getFileToReplace(){ + return fileToReplace; + } + + public void setFileToReplace(DataFile df){ + fileToReplace = df; + } + + + public void handleFileUpload(FileUploadEvent event) { + + + String foo = (String) event.getComponent().getAttributes().get("isReplaceOperation"); // bar + msgt("Foo: " + foo); + //FacesMessage message = new FacesMessage("Succesful", event.getFile().getFileName() + " is uploaded."); + //FacesContext.getCurrentInstance().addMessage(null, message); + + + UploadedFile uFile = event.getFile(); + + msg("getFileName: " + uFile.getFileName()); + msg("getContentType: " + uFile.getContentType()); + + addReplaceFile(uFile); + //msg("file name: " + event.getFileName()); + // dFileList = ingestService.createDataFiles(workingVersion, uFile.getInputstream(), uFile.getFileName(), uFile.getContentType()); + + } + + + + public void addReplaceFile(UploadedFile laFile){ + + + //DataverseRequest dvRequest2 = createDataverseRequest(authUser); + AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequestService.getDataverseRequest(), + ingestService, + datasetService, + datafileService, + permissionService, + commandEngine); + + + InputStream inputStream = null; + try { + inputStream = laFile.getInputstream(); + } catch (IOException ex) { + msgt("file io exception"); + + Logger.getLogger(FileUploadTestPage.class.getName()).log(Level.SEVERE, null, ex); + return; + } + + + if (this.replaceOperation){ + addFileHelper.runReplaceFile( fileToReplace.getId(), + laFile.getFileName(), + laFile.getContentType(), + inputStream, + null + ); + }else{ + addFileHelper.runAddFileByDataset(dataset, + laFile.getFileName(), + laFile.getContentType(), + inputStream, + null); + } + + if (addFileHelper.hasError()){ + msgt("upload error"); + msg(addFileHelper.getErrorMessagesAsString("\n")); + }else{ + newlyAddedFiles = addFileHelper.getNewlyAddedFiles(); + msg("Look at that! You added a file! (hey hey, it may have worked)"); + } + } + + /* + public String getPebbleTest() throws PebbleException, IOException{ + + ClasspathLoader loader = new ClasspathLoader(); + + //String pagePath = getServletContext().getRealPath("WEB-INF/home.html"); + ////msgt("pagePath: " + pagePath); + + //loader.setPrefix(getServletContext().getRealPath("WEB-INF/templates")); + loader.setPrefix("WEB-INF/templates"); + loader.setSuffix(".html"); + + Builder yeBuilder = new PebbleEngine.Builder(); + yeBuilder.loader(loader);// = loader;//(loader) + + PebbleEngine engine = yeBuilder.build(); + //PebbleEngine.Builder(). + + PebbleTemplate compiledTemplate = engine.getTemplate("home"); + + + + //PebbleTemplate compiledTemplate = engine.getTemplate(pagePath); + + //PebbleTemplate compiledTemplate = engine.getTemplate(getServletContext().getRealPath("WEB-INF/home.html")); + + Map context = new HashMap<>(); + context.put("name", "Mitchell"); + + Writer writer = new StringWriter(); + compiledTemplate.evaluate(writer, context); + + String output = writer.toString(); + + msgt("getPebbleTest: " + output); + + return output; + + } + */ + public List getNewlyAddedFile(){ + + return newlyAddedFiles; + } + +} // end class FileUploadTestPage diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileVersionRetriever.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileVersionRetriever.java new file mode 100644 index 00000000000..f166ed52cc6 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileVersionRetriever.java @@ -0,0 +1,60 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package edu.harvard.iq.dataverse.datasetutility; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.DataFileServiceBean; +import java.util.List; +import java.util.logging.Logger; + +/** + * Given a file id or object, return a a list of this file's versions + * + * @author rmp553 + */ +public class FileVersionRetriever { + + private static final Logger logger = Logger.getLogger(FileVersionRetriever.class.getCanonicalName()); + + DataFileServiceBean dataFileService; + + DataFile chosenFile; + List fileList; + + /** + * Constructor by chosenFileId + * + * @param fileService + * @param chosenFileId + */ + public FileVersionRetriever(DataFileServiceBean fileService, Long chosenFileId){ + if (fileService == null){ + throw new NullPointerException("fileService cannot be null"); + } + if (chosenFileId == null){ + throw new NullPointerException("chosenFileId cannot be null"); + } + dataFileService = fileService; + chosenFile = dataFileService.find(chosenFileId); + } + + /** + * Constructor by chosenFile + * + * @param fileService + * @param chosenFile + */ + public FileVersionRetriever(DataFileServiceBean fileService, DataFile selectedFile){ + if (fileService == null){ + throw new NullPointerException("fileService cannot be null"); + } + if (selectedFile == null){ + throw new NullPointerException("selectedFile cannot be null"); + } + dataFileService = fileService; + chosenFile = selectedFile; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/NoFilesException.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/NoFilesException.java new file mode 100644 index 00000000000..32881fd25c4 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/NoFilesException.java @@ -0,0 +1,24 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package edu.harvard.iq.dataverse.datasetutility; + +/** + * + * @author rmp553 + */ +public class NoFilesException extends Exception { + + public NoFilesException(String message) { + super(message); + } + + public NoFilesException(String message, Throwable cause) { + super(message, cause); + } + +} + + diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java new file mode 100644 index 00000000000..245e9e88915 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java @@ -0,0 +1,365 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package edu.harvard.iq.dataverse.datasetutility; + +import com.google.gson.Gson; +import com.google.gson.JsonObject; +import com.google.gson.reflect.TypeToken; +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.DataFileTag; +import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.api.Util; +import java.lang.reflect.Type; +import java.util.ArrayList; +import java.util.List; +import java.util.ResourceBundle; +import java.util.stream.Collectors; + +/** + * This is used in conjunction with the AddReplaceFileHelper + * + * It encapsulates these optional parameters: + * + * - description + * - file tags (can be custom) + * - tabular tags (controlled vocabulary) + * + * Future params: + * - Provenance related information + * + * @author rmp553 + */ +public class OptionalFileParams { + + private String description; + public static final String DESCRIPTION_ATTR_NAME = "description"; + + private List categories; + public static final String CATEGORIES_ATTR_NAME = "categories"; + + private List dataFileTags; + public static final String FILE_DATA_TAGS_ATTR_NAME = "dataFileTags"; + + + + + public OptionalFileParams(String jsonData) throws DataFileTagException{ + + if (jsonData != null){ + loadParamsFromJson(jsonData); + } + } + + + public OptionalFileParams(String description, + List newCategories, + List potentialFileDataTags) throws DataFileTagException{ + + this.description = description; + setCategories(newCategories); + this.addFileDataTags(potentialFileDataTags); + } + + /** + * Set description + * @param description + */ + public void setDescription(String description){ + this.description = description; + } + + /** + * Get for description + * @return String + */ + public String getDescription(){ + return this.description; + } + + public boolean hasCategories(){ + if ((categories == null)||(this.categories.isEmpty())){ + return false; + } + return true; + } + + public boolean hasFileDataTags(){ + if ((dataFileTags == null)||(this.dataFileTags.isEmpty())){ + return false; + } + return true; + } + + public boolean hasDescription(){ + if ((description == null)||(this.description.isEmpty())){ + return false; + } + return true; + } + + /** + * Set tags + * @param tags + */ + public void setCategories(List newCategories) { + + if (newCategories != null) { + newCategories = Util.removeDuplicatesNullsEmptyStrings(newCategories); + if (newCategories.isEmpty()) { + newCategories = null; + } + } + + this.categories = newCategories; + } + + /** + * Get for tags + * @return List + */ + public List getCategories(){ + return this.categories; + } + + + /** + * Set dataFileTags + * @param dataFileTags + */ + public void setDataFileTags(List dataFileTags){ + this.dataFileTags = dataFileTags; + } + + /** + * Get for dataFileTags + * @return List + */ + public List getDataFileTags(){ + return this.dataFileTags; + } + + private void loadParamsFromJson(String jsonData) throws DataFileTagException{ + + msgt("jsonData: " + jsonData); + if (jsonData == null){ + return; +// logger.log(Level.SEVERE, "jsonData is null"); + } + JsonObject jsonObj = new Gson().fromJson(jsonData, JsonObject.class); + + + // ------------------------------- + // get description as string + // ------------------------------- + if ((jsonObj.has(DESCRIPTION_ATTR_NAME)) && (!jsonObj.get(DESCRIPTION_ATTR_NAME).isJsonNull())){ + + this.description = jsonObj.get(DESCRIPTION_ATTR_NAME).getAsString(); + } + + + // ------------------------------- + // get tags + // ------------------------------- + Gson gson = new Gson(); + + //Type objType = new TypeToken>() {}.getType(); + Type listType = new TypeToken>() {}.getType(); + + //---------------------- + // Load tags + //---------------------- + if ((jsonObj.has(CATEGORIES_ATTR_NAME)) && (!jsonObj.get(CATEGORIES_ATTR_NAME).isJsonNull())){ + + setCategories(this.categories = gson.fromJson(jsonObj.get(CATEGORIES_ATTR_NAME), listType)); + } + + //---------------------- + // Load tabular tags + //---------------------- + if ((jsonObj.has(FILE_DATA_TAGS_ATTR_NAME)) && (!jsonObj.get(FILE_DATA_TAGS_ATTR_NAME).isJsonNull())){ + + + // Get potential tags from JSON + List potentialTags = gson.fromJson(jsonObj.get(FILE_DATA_TAGS_ATTR_NAME), listType); + + // Add valid potential tags to the list + addFileDataTags(potentialTags); + + } + + } + + private void addFileDataTags(List potentialTags) throws DataFileTagException{ + + if (potentialTags == null){ + return; + } + + potentialTags = Util.removeDuplicatesNullsEmptyStrings(potentialTags); + + if (potentialTags.isEmpty()){ + return; + } + + // Make a new list + this.dataFileTags = new ArrayList<>(); + + // Add valid potential tags to the list + for (String tagToCheck : potentialTags){ + if (DataFileTag.isDataFileTag(tagToCheck)){ + this.dataFileTags.add(tagToCheck); + }else{ + String errMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.invalid_datafile_tag"); + throw new DataFileTagException(errMsg + " [" + tagToCheck + "]. Please use one of the following: " + DataFileTag.getListofLabelsAsString()); + } + } + // Shouldn't happen.... + if (dataFileTags.isEmpty()){ + dataFileTags = null; + } + } + + + private void msg(String s){ + System.out.println(s); + } + + private void msgt(String s){ + msg("-------------------------------"); + msg(s); + msg("-------------------------------"); + } + + /** + * Add parameters to a DataFile object + * + */ + public void addOptionalParams(DataFile df) throws DataFileTagException{ + if (df == null){ + throw new NullPointerException("The datafile cannot be null!"); + } + + FileMetadata fm = df.getFileMetadata(); + + // --------------------------- + // Add description + // --------------------------- + if (hasDescription()){ + fm.setDescription(this.getDescription()); + } + + // --------------------------- + // Add categories + // --------------------------- + addCategoriesToDataFile(fm); + + + // --------------------------- + // Add DataFileTags + // --------------------------- + addFileDataTagsToFile(df); + + } + + + /** + * Add Tags to the DataFile + * + */ + private void addCategoriesToDataFile(FileMetadata fileMetadata){ + + if (fileMetadata == null){ + throw new NullPointerException("The fileMetadata cannot be null!"); + } + + // Is there anything to add? + // + if (!hasCategories()){ + return; + } + + List currentCategories = fileMetadata.getCategoriesByName(); + + // Add categories to the file metadata object + // + this.getCategories().stream().forEach((catText) -> { + fileMetadata.addCategoryByName(catText); // fyi: "addCategoryByName" checks for dupes + }); + } + + + /** + * NOTE: DataFile tags can only be added to tabular files + * + * - e.g. The file must already be ingested. + * + * Because of this, these tags cannot be used when "Adding" a file via + * the API--e.g. b/c the file will note yet be ingested + * + * @param df + */ + private void addFileDataTagsToFile(DataFile df) throws DataFileTagException{ + if (df == null){ + throw new NullPointerException("The DataFile (df) cannot be null!"); + } + + // -------------------------------------------------- + // Is there anything to add? + // -------------------------------------------------- + if (!hasFileDataTags()){ + return; + } + + // -------------------------------------------------- + // Is this a tabular file? + // -------------------------------------------------- + if (!df.isTabularData()){ + String errMsg = ResourceBundle.getBundle("Bundle").getString("file.metadata.datafiletag.not_tabular"); + + throw new DataFileTagException(errMsg); + } + + // -------------------------------------------------- + // Get existing tag list and convert it to list of strings (labels) + // -------------------------------------------------- + List existingDataFileTags = df.getTags(); + List currentLabels; + + if (existingDataFileTags == null){ + // nothing, just make an empty list + currentLabels = new ArrayList<>(); + }else{ + // Yes, get the labels in a list + currentLabels = df.getTags().stream() + .map(x -> x.getTypeLabel()) + .collect(Collectors.toList()) + ; + } + + // -------------------------------------------------- + // Iterate through and add any new labels + // -------------------------------------------------- + DataFileTag newTagObj; + for (String tagLabel : this.getDataFileTags()){ + + if (!currentLabels.contains(tagLabel)){ // not already there! + + // redundant "if" check here. Also done in constructor + // + if (DataFileTag.isDataFileTag(tagLabel)){ + + newTagObj = new DataFileTag(); + newTagObj.setDataFile(df); + newTagObj.setTypeByLabel(tagLabel); + df.addTag(newTagObj); + + } + } + } + + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/TwoRavensHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/TwoRavensHelper.java new file mode 100644 index 00000000000..fa16e83be12 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/TwoRavensHelper.java @@ -0,0 +1,203 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package edu.harvard.iq.dataverse.datasetutility; + +import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.PermissionServiceBean; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import java.util.HashMap; +import java.util.Map; + +/** + * + * @author rmp553 + */ +public class TwoRavensHelper { + + private final SettingsServiceBean settingsService; + private PermissionServiceBean permissionService; + + private final Map fileMetadataTwoRavensExploreMap = new HashMap<>(); // { FileMetadata.id : Boolean } + + public TwoRavensHelper(SettingsServiceBean settingsService, PermissionServiceBean permissionService){ + if (settingsService == null){ + throw new NullPointerException("settingsService cannot be null"); + } + if (permissionService == null){ + throw new NullPointerException("permissionService cannot be null"); + } + this.permissionService = permissionService; + this.settingsService = settingsService; + + + } + + + /** + * Call this from a Dataset or File page + * - calls private method canSeeTwoRavensExploreButton + * + * WARNING: Before calling this, make sure the user has download + * permission for the file!! (See DatasetPage.canDownloadFile()) + * + * @param fm + * @return + */ + public boolean canSeeTwoRavensExploreButtonFromAPI(FileMetadata fm, User user){ + + if (fm == null){ + return false; + } + + if (user == null){ + return false; + } + + if (!this.permissionService.userOn(user, fm.getDataFile()).has(Permission.DownloadFile)){ + return false; + } + + return this.canSeeTwoRavensExploreButton(fm, true); + } + + /** + * Call this from a Dataset or File page + * - calls private method canSeeTwoRavensExploreButton + * + * WARNING: Before calling this, make sure the user has download + * permission for the file!! (See DatasetPage.canDownloadFile()) + * + * @param fm + * @return + */ + public boolean canSeeTwoRavensExploreButtonFromPage(FileMetadata fm){ + + if (fm == null){ + return false; + } + + return this.canSeeTwoRavensExploreButton(fm, true); + } + + /** + * Used to check whether a tabular file + * may be viewed via TwoRavens + * + * @param fm + * @return + */ + public boolean canSeeTwoRavensExploreButton(FileMetadata fm, boolean permissionsChecked){ + + if (fm == null){ + return false; + } + + // This is only here as a reminder to the public method users + if (!permissionsChecked){ + return false; + } + + // Has this already been checked? + if (this.fileMetadataTwoRavensExploreMap.containsKey(fm.getId())){ + // Yes, return previous answer + //logger.info("using cached result for candownloadfile on filemetadata "+fid); + return this.fileMetadataTwoRavensExploreMap.get(fm.getId()); + } + + + // (1) Is TwoRavens active via the "setting" table? + // Nope: get out + // + if (!settingsService.isTrueForKey(SettingsServiceBean.Key.TwoRavensTabularView, false)){ + this.fileMetadataTwoRavensExploreMap.put(fm.getId(), false); + return false; + } + + + // (2) Is the DataFile object there and persisted? + // Nope: scat + // + if ((fm.getDataFile() == null)||(fm.getDataFile().getId()==null)){ + this.fileMetadataTwoRavensExploreMap.put(fm.getId(), false); + return false; + } + + // (3) Is there tabular data or is the ingest in progress? + // Yes: great + // + if ((fm.getDataFile().isTabularData())||(fm.getDataFile().isIngestInProgress())){ + this.fileMetadataTwoRavensExploreMap.put(fm.getId(), true); + return true; + } + + // Nope + this.fileMetadataTwoRavensExploreMap.put(fm.getId(), false); + return false; + + // (empty fileMetadata.dataFile.id) and (fileMetadata.dataFile.tabularData or fileMetadata.dataFile.ingestInProgress) + // and DatasetPage.canDownloadFile(fileMetadata) + } + + + /** + * Copied over from the dataset page - 9/21/2016 + * + * @return + */ + public String getDataExploreURL() { + String TwoRavensUrl = settingsService.getValueForKey(SettingsServiceBean.Key.TwoRavensUrl); + + if (TwoRavensUrl != null && !TwoRavensUrl.equals("")) { + return TwoRavensUrl; + } + + return ""; + } + + + /** + * Copied over from the dataset page - 9/21/2016 + * + * @param fileid + * @param apiTokenKey + * @return + */ + public String getDataExploreURLComplete(Long fileid, String apiTokenKey) { + + if (fileid == null){ + throw new NullPointerException("fileid cannot be null"); + } + if (apiTokenKey == null){ + throw new NullPointerException("apiTokenKey cannot be null (at least adding this check)"); + } + + + String TwoRavensUrl = settingsService.getValueForKey(SettingsServiceBean.Key.TwoRavensUrl); + String TwoRavensDefaultLocal = "/dataexplore/gui.html?dfId="; + + if (TwoRavensUrl != null && !TwoRavensUrl.equals("")) { + // If we have TwoRavensUrl set up as, as an optional + // configuration service, it must mean that TwoRavens is sitting + // on some remote server. And that in turn means that we must use + // full URLs to pass data and metadata to it. + // update: actually, no we don't want to use this "dataurl" notation. + // switching back to the dfId=: + // -- L.A. 4.1 + /* + String tabularDataURL = getTabularDataFileURL(fileid); + String tabularMetaURL = getVariableMetadataURL(fileid); + return TwoRavensUrl + "?ddiurl=" + tabularMetaURL + "&dataurl=" + tabularDataURL + "&" + getApiTokenKey(); + */ + return TwoRavensUrl + "?dfId=" + fileid + "&" + apiTokenKey; + } + + // For a local TwoRavens setup it's enough to call it with just + // the file id: + return TwoRavensDefaultLocal + fileid + "&" + apiTokenKey; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/WorldMapPermissionHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/WorldMapPermissionHelper.java new file mode 100644 index 00000000000..69205f29452 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/WorldMapPermissionHelper.java @@ -0,0 +1,517 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package edu.harvard.iq.dataverse.datasetutility; + +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.MapLayerMetadata; +import edu.harvard.iq.dataverse.MapLayerMetadataServiceBean; +import edu.harvard.iq.dataverse.PermissionServiceBean; +import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * This class originally encapsulated display logic for the DatasetPage + * + * It allows the following checks without redundantly querying the db to + * check permissions or if MapLayerMetadata exists + * + * - canUserSeeMapDataButton (private) + * - canUserSeeMapDataButtonFromPage (public) + * - canUserSeeMapDataButtonFromAPI (public) + * + * - canSeeMapButtonReminderToPublish (private) + * - canSeeMapButtonReminderToPublishFromPage (public) + * - canSeeMapButtonReminderToPublishFromAPI (public) + * + * - canUserSeeExploreWorldMapButton (private) + * - canUserSeeExploreWorldMapButtonFromPage (public) + * - canUserSeeExploreWorldMapButtonFromAPI (public) + * + * @author rmp553 + */ +public class WorldMapPermissionHelper { + + private SettingsServiceBean settingsService; + private MapLayerMetadataServiceBean mapLayerMetadataService; + private PermissionServiceBean permissionService; + + private Dataset dataset; + + private final Map fileMetadataWorldMapExplore = new HashMap<>(); // { FileMetadata.id : Boolean } + private final Map mapLayerMetadataLookup = new HashMap<>(); + + + public WorldMapPermissionHelper(SettingsServiceBean settingsService, MapLayerMetadataServiceBean mapLayerMetadataService, + Dataset dataset, PermissionServiceBean permissionService){ + + if (dataset == null){ + throw new NullPointerException("dataset cannot be null"); + } + if (dataset.getId() == null){ + throw new NullPointerException("dataset must be saved! (have an id)"); + } + + if (settingsService == null){ + throw new NullPointerException("settingsService cannot be null"); + } + if (mapLayerMetadataService == null){ + throw new NullPointerException("mapLayerMetadataService cannot be null"); + } + + this.dataset = dataset; + + this.settingsService = settingsService; + this.mapLayerMetadataService = mapLayerMetadataService; + this.permissionService = permissionService; + + loadMapLayerMetadataLookup(); + } + + + /** + * Convenience method for instantiating from dataset page or File page + * + * Does NOT use PermissionServiceBean + * + * @param settingsService + * @param mapLayerMetadataService + * @param dataset + * @return + */ + public static WorldMapPermissionHelper getPermissionHelperForDatasetPage( + SettingsServiceBean settingsService, MapLayerMetadataServiceBean mapLayerMetadataService, + Dataset dataset){ + + return new WorldMapPermissionHelper(settingsService, mapLayerMetadataService, dataset, null); + } + + /** + * Convenience method for instantiating from the API + * + * REQUIRES PermissionServiceBean + * + * @param settingsService + * @param mapLayerMetadataService + * @param dataset + * @param permissionService + * @return + */ + public static WorldMapPermissionHelper getPermissionHelperForAPI( + SettingsServiceBean settingsService, + MapLayerMetadataServiceBean mapLayerMetadataService, + Dataset dataset, + PermissionServiceBean permissionService){ + + if (permissionService == null){ + throw new NullPointerException("permissionService is required for API checks"); + } + + return new WorldMapPermissionHelper(settingsService, mapLayerMetadataService, dataset, permissionService); + } + + + /** + * Create a hashmap consisting of { DataFile.id : MapLayerMetadata object} + * + * Very few DataFiles will have associated MapLayerMetadata objects so only + * use 1 query to get them + */ + private void loadMapLayerMetadataLookup() { + + + List mapLayerMetadataList = mapLayerMetadataService.getMapLayerMetadataForDataset(this.dataset); + if (mapLayerMetadataList == null) { + return; + } + for (MapLayerMetadata layer_metadata : mapLayerMetadataList) { + mapLayerMetadataLookup.put(layer_metadata.getDataFile().getId(), layer_metadata); + } + + }// A DataFile may have a related MapLayerMetadata object + + + /** + * Using a DataFile id, retrieve an associated MapLayerMetadata object + * + * The MapLayerMetadata objects have been fetched at page inception by + * "loadMapLayerMetadataLookup()" + */ + public MapLayerMetadata getMapLayerMetadata(DataFile df) { + if (df == null) { + return null; + } + return this.mapLayerMetadataLookup.get(df.getId()); + } + + + /* + * Call this when using the API + * - calls private method canUserSeeExploreWorldMapButton + */ + public boolean canUserSeeExploreWorldMapButtonFromAPI(FileMetadata fm, User user){ + + if (fm == null){ + return false; + } + if (user==null){ + return false; + } + if (!this.permissionService.userOn(user, fm.getDataFile()).has(Permission.DownloadFile)){ + return false; + } + + return this.canUserSeeExploreWorldMapButton(fm, true); + } + + /** + * Call this from a Dataset or File page + * - calls private method canUserSeeExploreWorldMapButton + * + * WARNING: Before calling this, make sure the user has download + * permission for the file!! (See DatasetPage.canDownloadFile()) + * + * @param FileMetadata fm + * @return boolean + */ + public boolean canUserSeeExploreWorldMapButtonFromPage(FileMetadata fm){ + + if (fm==null){ + return false; + } + + return this.canUserSeeExploreWorldMapButton(fm, true); + } + + /** + * WARNING: Before calling this, make sure the user has download + * permission for the file!! (See DatasetPage.canDownloadFile()) + * + * Should there be a Explore WorldMap Button for this file? + * See table in: https://github.com/IQSS/dataverse/issues/1618 + * + * (1) Does the file have MapLayerMetadata? + * (2) Are the proper settings in place + * + * @param fm FileMetadata + * @return boolean + */ + private boolean canUserSeeExploreWorldMapButton(FileMetadata fm, boolean permissionsChecked){ + + if (fm==null){ + return false; + } + + // This is only here to make the public method users think... + if (!permissionsChecked){ + return false; + } + + if (this.fileMetadataWorldMapExplore.containsKey(fm.getId())){ + // Yes, return previous answer + //logger.info("using cached result for candownloadfile on filemetadata "+fid); + return this.fileMetadataWorldMapExplore.get(fm.getId()); + } + + /* ----------------------------------------------------- + Does a Map Exist? + ----------------------------------------------------- */ + if (!(this.hasMapLayerMetadata(fm))){ + // Nope: no button + this.fileMetadataWorldMapExplore.put(fm.getId(), false); + return false; + } + + /* + Is setting for GeoconnectViewMaps true? + Nope? no button + */ + if (!settingsService.isTrueForKey(SettingsServiceBean.Key.GeoconnectViewMaps, false)){ + this.fileMetadataWorldMapExplore.put(fm.getId(), false); + return false; + } + + /* ----------------------------------------------------- + Yes: User can view button! + ----------------------------------------------------- */ + this.fileMetadataWorldMapExplore.put(fm.getId(), true); + return true; + } + + + /* + Check if the FileMetadata.dataFile has an associated MapLayerMetadata object + + The MapLayerMetadata objects have been fetched at page inception by "loadMapLayerMetadataLookup()" + */ + public boolean hasMapLayerMetadata(FileMetadata fm) { + if (fm == null) { + return false; + } + if (fm.getDataFile() == null) { + return false; + } + return doesDataFileHaveMapLayerMetadata(fm.getDataFile()); + } + + /** + * Check if a DataFile has an associated MapLayerMetadata object + * + * The MapLayerMetadata objects have been fetched at page inception by + * "loadMapLayerMetadataLookup()" + */ + private boolean doesDataFileHaveMapLayerMetadata(DataFile df) { + if (df == null) { + return false; + } + if (df.getId() == null) { + return false; + } + return this.mapLayerMetadataLookup.containsKey(df.getId()); + } + + + + /** + * Check if this is a mappable file type. + * + * Currently (2/2016) + * - Shapefile (zipped shapefile) + * - Tabular file with Geospatial Data tag + * + * @param fm + * @return + */ + private boolean isPotentiallyMappableFileType(FileMetadata fm){ + if (fm==null){ + return false; + } + + // Yes, it's a shapefile + // + if (this.isShapefileType(fm)){ + return true; + } + + // Yes, it's tabular with a geospatial tag + // + if (fm.getDataFile().isTabularData()){ + if (fm.getDataFile().hasGeospatialTag()){ + return true; + } + } + return false; + } + + + + public boolean isShapefileType(FileMetadata fm) { + if (fm == null) { + return false; + } + if (fm.getDataFile() == null) { + return false; + } + + return fm.getDataFile().isShapefileType(); + } + + + /** + * Call this from a Dataset or File page + * - calls private method canSeeMapButtonReminderToPublish + * + * WARNING: Assumes user isAuthenicated AND has Permission.EditDataset + * - These checks should be made on the DatasetPage or FilePage which calls this method + * + * + * @param FileMetadata fm + * @return boolean + */ + public boolean canSeeMapButtonReminderToPublishFromPage(FileMetadata fm){ + if (fm == null){ + return false; + } + + return this.canSeeMapButtonReminderToPublish(fm, true); + + } + + + /** + * Call this when using the API + * - calls private method canSeeMapButtonReminderToPublish + * + * @param fm + * @param user + * @return + */ + public boolean canSeeMapButtonReminderToPublishFromAPI(FileMetadata fm, User user){ + if (fm == null){ + return false; + } + if (user==null){ + return false; + } + + if (!this.permissionService.userOn(user, this.dataset).has(Permission.EditDataset)){ + return false; + } + + return this.canSeeMapButtonReminderToPublish(fm, true); + + } + + + + /** + * Assumes permissions have been checked!! + * + * See table in: https://github.com/IQSS/dataverse/issues/1618 + * + * Can the user see a reminder to publish button? + * (1) Is the view GeoconnectViewMaps + * (2) Is this file a Shapefile or a Tabular file tagged as Geospatial? + * (3) Is this DataFile released? Yes, don't need reminder + * (4) Does a map already exist? Yes, don't need reminder + */ + private boolean canSeeMapButtonReminderToPublish(FileMetadata fm, boolean permissionsChecked){ + if (fm==null){ + return false; + } + + // This is only here as a reminder to the public method users + if (!permissionsChecked){ + return false; + } + + // (1) Is the view GeoconnectViewMaps + if (!settingsService.isTrueForKey(SettingsServiceBean.Key.GeoconnectCreateEditMaps, false)){ + return false; + } + + + // (2) Is this file a Shapefile or a Tabular file tagged as Geospatial? + // + if (!(this.isPotentiallyMappableFileType(fm))){ + return false; + } + + // (3) Is this DataFile released? Yes, don't need reminder + // + if (fm.getDataFile().isReleased()){ + return false; + } + + // (4) Does a map already exist? Yes, don't need reminder + // + if (this.hasMapLayerMetadata(fm)){ + return false; + } + + // Looks good + // + return true; + } + + /** + * + * WARNING: Assumes user isAuthenicated AND has Permission.EditDataset + * - These checks are made on the DatasetPage which calls this method + * + */ + public boolean canUserSeeMapDataButtonFromPage(FileMetadata fm){ + + if (fm==null){ + return false; + } + return this.canUserSeeMapDataButton(fm, true); + } + + + + /** + * Call this when using the API + * - calls private method canUserSeeMapDataButton + * + * @param fm + * @param user + * @return + */ + public boolean canUserSeeMapDataButtonFromAPI(FileMetadata fm, User user){ + if (fm == null){ + return false; + } + if (user==null){ + return false; + } + + if (!this.permissionService.userOn(user, this.dataset).has(Permission.EditDataset)){ + return false; + } + + return this.canUserSeeMapDataButton(fm, true); + + } + + /** + * + * WARNING: Assumes user isAuthenicated AND has Permission.EditDataset + * - These checks are made on the DatasetPage which calls this method + * + * Should there be a Map Data Button for this file? + * see table in: https://github.com/IQSS/dataverse/issues/1618 + * (1) Is the user logged in? + * (2) Is this file a Shapefile or a Tabular file tagged as Geospatial? + * (3) Does the logged in user have permission to edit the Dataset to which this FileMetadata belongs? + * (4) Is the create Edit Maps flag set to true? + * (5) Any of these conditions: + * 9a) File Published + * (b) Draft: File Previously published + * @param fm FileMetadata + * @return boolean + */ + private boolean canUserSeeMapDataButton(FileMetadata fm, boolean permissionsChecked){ + + if (fm==null){ + return false; + } + + // This is only here as a reminder to the public method users + if (!permissionsChecked){ + return false; + } + + // (1) Is this file a Shapefile or a Tabular file tagged as Geospatial? + // TO DO: EXPAND FOR TABULAR FILES TAGGED AS GEOSPATIAL! + // + if (!(this.isPotentiallyMappableFileType(fm))){ + return false; + } + + + // (2) Is the view GeoconnectViewMaps + if (!settingsService.isTrueForKey(SettingsServiceBean.Key.GeoconnectCreateEditMaps, false)){ + return false; + } + + // (3) Is File released? + // + if (fm.getDataFile().isReleased()){ + return true; + } + + // Nope + return false; + } + + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/RolePermissionHelperPage.java b/src/main/java/edu/harvard/iq/dataverse/mydata/RolePermissionHelperPage.java index 114e33d058c..06841c470d8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/mydata/RolePermissionHelperPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/mydata/RolePermissionHelperPage.java @@ -53,7 +53,7 @@ public String init() { //List dtypes = new ArrayList<>(); - return null; + return null; } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 9fd1bc02633..9104414724a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -36,6 +36,7 @@ import edu.harvard.iq.dataverse.util.DatasetFieldWalker; import edu.harvard.iq.dataverse.util.StringUtil; import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; +import java.math.BigDecimal; import java.util.ArrayList; import java.util.Set; import javax.json.Json; @@ -57,6 +58,7 @@ import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collector; +import java.util.stream.Collectors; import static java.util.stream.Collectors.toList; import javax.json.JsonArray; import javax.json.JsonObject; @@ -309,6 +311,26 @@ public static JsonObjectBuilder json(DatasetVersion dsv) { return bld; } + + public static JsonObjectBuilder jsonDataFileList(List dataFiles){ + + if (dataFiles==null){ + throw new NullPointerException("dataFiles cannot be null"); + } + + JsonObjectBuilder bld = jsonObjectBuilder(); + + + List dataFileList = dataFiles.stream() + .map(x -> x.getFileMetadata()) + .collect(Collectors.toList()); + + + bld.add("files", jsonFileMetadatas(dataFileList)); + + return bld; + } + private static String getRootDataverseNameforCitation(Dataset dataset) { Dataverse root = dataset.getOwner(); while (root.getOwner() != null) { @@ -366,6 +388,7 @@ public static JsonArrayBuilder jsonFileMetadatas(Collection fmds) for (FileMetadata fmd : fmds) { filesArr.add(json(fmd)); } + return filesArr; } @@ -503,23 +526,39 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata) { fileName = df.getFileMetadata().getLabel(); } + return jsonObjectBuilder() .add("id", df.getId()) .add("filename", fileName) - .add("contentType", df.getContentType()) + .add("contentType", df.getContentType()) + .add("filesize", df.getFilesize()) + .add("description", df.getDescription()) + //.add("released", df.isReleased()) + //.add("restricted", df.isRestricted()) .add("storageIdentifier", df.getStorageIdentifier()) .add("originalFileFormat", df.getOriginalFileFormat()) .add("originalFormatLabel", df.getOriginalFormatLabel()) .add("UNF", df.getUnf()) - /** - * @todo Should we deprecate "md5" now that it's under - * "checksum" (which may also be a SHA-1 rather than an MD5)? - */ + //--------------------------------------------- + // For file replace: rootDataFileId, previousDataFileId + //--------------------------------------------- + .add("rootDataFileId", df.getRootDataFileId()) + .add("previousDataFileId", df.getPreviousDataFileId()) + //--------------------------------------------- + // Add categories + tags + //--------------------------------------------- + .add("categories", fileMetadata.getCategoryNamesAsJsonArrayBuilder()) + .add("tags", df.getTagLabelsAsJsonArrayBuilder()) + //--------------------------------------------- + // Checksum + // * @todo Should we deprecate "md5" now that it's under + // * "checksum" (which may also be a SHA-1 rather than an MD5)? + //--------------------------------------------- .add("md5", getMd5IfItExists(df.getChecksumType(), df.getChecksumValue())) .add("checksum", getChecksumTypeAndValue(df.getChecksumType(), df.getChecksumValue())) - .add("description", df.getDescription()); + ; } - + public static String format(Date d) { return (d == null) ? null : Util.getDateTimeFormat().format(d); } diff --git a/src/main/webapp/file_upload_test.xhtml b/src/main/webapp/file_upload_test.xhtml new file mode 100644 index 00000000000..7f90bc2fae1 --- /dev/null +++ b/src/main/webapp/file_upload_test.xhtml @@ -0,0 +1,109 @@ + + + + + + + + + + + + + + + + + Test Add/Replace functions + + + + + + + + Added!: #{FileUploadTestPage.newlyAddedFile.displayName } + + + + + + + + Add New File + + + Replace File: #{FileUploadTestPage.fileToReplace.displayName } + + + + + + + + + + + + + + + + + + + + No dataset chosen. + + + #{FileUploadTestPage.dataset.displayName } v#{FileUploadTestPage.dataset.versionNumber } + + + + Name + Content type + Hash + Is Released + + + + + + #{fm.label} + + + #{fm.label} + + + + + + + replacable + + + nope + + + + + + + + + + + + + + diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index e392f0cf7ab..ba99cec8b44 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -28,6 +28,7 @@ import static junit.framework.Assert.assertEquals; import org.hamcrest.CoreMatchers; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.nullValue; public class DatasetsIT { @@ -465,4 +466,6 @@ public void testFileChecksum() { .statusCode(OK.getStatusCode()); } + + } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java new file mode 100644 index 00000000000..342e54539a1 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -0,0 +1,528 @@ +package edu.harvard.iq.dataverse.api; + +import com.jayway.restassured.RestAssured; +import com.jayway.restassured.response.Response; +import java.util.logging.Logger; +import org.junit.BeforeClass; +import org.junit.Test; +import com.jayway.restassured.path.json.JsonPath; +import edu.harvard.iq.dataverse.util.BundleUtil; +import java.util.Collections; +import java.util.ResourceBundle; +import static javax.ws.rs.core.Response.Status.BAD_REQUEST; + +import static javax.ws.rs.core.Response.Status.CREATED; +import static javax.ws.rs.core.Response.Status.FORBIDDEN; +import static javax.ws.rs.core.Response.Status.NOT_FOUND; +import static javax.ws.rs.core.Response.Status.OK; +import static junit.framework.Assert.assertEquals; +import static org.hamcrest.CoreMatchers.equalTo; +import org.hamcrest.Matchers; +import static org.junit.Assert.assertNotNull; + +public class FilesIT { + + private static final Logger logger = Logger.getLogger(FilesIT.class.getCanonicalName()); + + @BeforeClass + public static void setUpClass() { + RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); + } + + + /** + * Create user and get apiToken + * + * @return + */ + private String createUserGetToken(){ + Response createUser = UtilIT.createRandomUser(); + msg(createUser.toString()); + msg(createUser.prettyPrint()); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + + msg(createUser.prettyPrint()); + + + String username = UtilIT.getUsernameFromResponse(createUser); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + return apiToken; + } + + + private String createDataverseGetAlias(String apiToken){ + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + //createDataverseResponse.prettyPrint(); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + return dataverseAlias; + } + + + private Integer createDatasetGetId(String dataverseAlias, String apiToken){ + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + return datasetId; + + } + + @Test + public void test_001_AddFileGood() { + msgt("test_001_AddFileGood"); + // Create user + String apiToken = createUserGetToken(); + + // Create Dataverse + String dataverseAlias = createDataverseGetAlias(apiToken); + + // Create Dataset + Integer datasetId = createDatasetGetId(dataverseAlias, apiToken); + + + String pathToFile = "src/main/webapp/resources/images/favicondataverse.png"; + Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken); + + //addResponse.prettyPrint(); + msgt("Here it is: " + addResponse.prettyPrint()); + String successMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.add"); + + + addResponse.then().assertThat() + .body("data.message", equalTo(successMsg)) + .body("status", equalTo(AbstractApiBean.STATUS_OK)) + .body("data.files[0].dataFile.contentType", equalTo("image/png")) + .body("data.files[0].label", equalTo("dataverseproject.png")) + .statusCode(OK.getStatusCode()); + + + //------------------------------------------------ + // Try to add the same file again -- and fail + //------------------------------------------------ + Response addTwiceResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken); + + msgt("2nd requests: " + addTwiceResponse.prettyPrint()); //addResponse.prettyPrint(); + + String errMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.duplicate_file"); + + addTwiceResponse.then().assertThat() + .body("message", Matchers.startsWith(errMsg)) + .body("status", equalTo(AbstractApiBean.STATUS_ERROR)) + .statusCode(BAD_REQUEST.getStatusCode()); + } + + + @Test + public void test_002_AddFileBadDatasetId() { + msgt("test_002_AddFileNullFileId"); + // Create user + String apiToken =createUserGetToken(); + + // Create Dataset + String datasetId = "cat"; //createDatasetGetId(dataverseAlias, apiToken); + + + String pathToFile = "src/main/webapp/resources/images/favicondataverse.png"; + Response addResponse = UtilIT.uploadFileViaNative("cat", pathToFile, apiToken); + //msgt("Here it is: " + addResponse.prettyPrint()); + + // Adding a non-numeric id should result in a 404 + addResponse.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()); + } + + + @Test + public void test_003_AddFileNonExistentDatasetId() { + msgt("test_003_AddFileNonExistentDatasetId"); + + // Create user + String apiToken = createUserGetToken(); + + // Create Dataset + String datasetId = "9999"; //createDatasetGetId(dataverseAlias, apiToken); + + + String pathToFile = "src/main/webapp/resources/images/favicondataverse.png"; + Response addResponse = UtilIT.uploadFileViaNative(datasetId, pathToFile, apiToken); + + + msgt("Here it is: " + addResponse.prettyPrint()); + + //String errMsg Start = ResourceBundle.getBundle("Bundle").getString("find.dataset.error.dataset.not.found.id"); + String errMsg = BundleUtil.getStringFromBundle("find.dataset.error.dataset.not.found.id", Collections.singletonList(datasetId)); + + addResponse.then().assertThat() + .body("status", equalTo(AbstractApiBean.STATUS_ERROR)) + .body("message", equalTo(errMsg)) + .statusCode(NOT_FOUND.getStatusCode()); + } + + @Test + public void test_004_AddFileBadToken() { + msgt("test_004_AddFileBadToken"); + + // Create user + String apiToken = "Bad Medicine"; + + // Create Dataset - should pick up permissions error first + String datasetId = "1"; //createDatasetGetId(dataverseAlias, apiToken); + + + String pathToFile = "src/main/webapp/resources/images/favicondataverse.png"; + Response addResponse = UtilIT.uploadFileViaNative(datasetId, pathToFile, apiToken); + + msgt("Here it is: " + addResponse.prettyPrint()); + + String errMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.auth"); + + addResponse.then().assertThat() + .body("status", equalTo(AbstractApiBean.STATUS_ERROR)) + .body("message", equalTo(errMsg)) + .statusCode(FORBIDDEN.getStatusCode()); + } + + + @Test + public void test_005_AddFileBadPermissions() { + msgt("test_005_AddFileBadPerms"); + + // Create user + String apiToken = createUserGetToken(); + + // Create Dataverse + String dataverseAlias = createDataverseGetAlias(apiToken); + + // Create Dataset + Integer datasetId = createDatasetGetId(dataverseAlias, apiToken); + + // Create another user + String apiTokenUnauthorizedUser = createUserGetToken(); + + + String pathToFile = "src/main/webapp/resources/images/favicondataverse.png"; + Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiTokenUnauthorizedUser); + + //addResponse.prettyPrint(); + msgt("Here it is: " + addResponse.prettyPrint()); + + + String errMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.no_edit_dataset_permission"); + + + addResponse.then().assertThat() + .body("message", equalTo(errMsg)) + .body("status", equalTo(AbstractApiBean.STATUS_ERROR)) + .statusCode(FORBIDDEN.getStatusCode()); + } + + @Test + public void test_006_ReplaceFileGood() { + msgt("test_006_ReplaceFileGood"); + + // Create user + String apiToken = createUserGetToken(); + + // Create Dataverse + String dataverseAlias = createDataverseGetAlias(apiToken); + + // Create Dataset + Integer datasetId = createDatasetGetId(dataverseAlias, apiToken); + + // ------------------------- + // Add initial file + // ------------------------- + msg("Add initial file"); + String pathToFile = "src/main/webapp/resources/images/favicondataverse.png"; + Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken); + + String successMsgAdd = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.add"); + + addResponse.then().assertThat() + .body("data.message", equalTo(successMsgAdd)) + .body("data.files[0].dataFile.contentType", equalTo("image/png")) + .body("data.files[0].label", equalTo("dataverseproject.png")) + .statusCode(OK.getStatusCode()); + + + long origFileId = JsonPath.from(addResponse.body().asString()).getLong("data.files[0].dataFile.id"); + + msg("Orig file id: " + origFileId); + assertNotNull(origFileId); // If checkOut fails, display message + + // ------------------------- + // Publish dataverse and dataset + // ------------------------- + msg("Publish dataverse and dataset"); + Response publishDataversetResp = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken); + publishDataversetResp.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResp.then().assertThat() + .statusCode(OK.getStatusCode()); + + + // ------------------------- + // Replace file - BAD/warning b/c different content-type + // ------------------------- + msg("Replace file - BAD/warning b/c different content-type"); + + String pathToFileWrongCtype = "src/main/webapp/resources/images/ajax-loading.gif"; + Response replaceRespWrongCtype = UtilIT.replaceFile(origFileId, pathToFileWrongCtype, apiToken); + + msgt(replaceRespWrongCtype.prettyPrint()); + + String errMsgCtype = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.replace.new_file_has_different_content_type"); + + + replaceRespWrongCtype.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("status", equalTo(AbstractApiBean.STATUS_ERROR)) + .body("message", equalTo(errMsgCtype)); + //.body("data.rootDataFileId", equalTo(origFileId)) + + // ------------------------- + // Replace file + // ------------------------- + msg("Replace file - 1st time"); + String pathToFile2 = "src/main/webapp/resources/images/cc0.png"; + Response replaceResp = UtilIT.replaceFile(origFileId, pathToFile2, apiToken); + + msgt(replaceResp.prettyPrint()); + + String successMsg2 = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.replace"); + + replaceResp.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.message", equalTo(successMsg2)) + .body("data.files[0].label", equalTo("cc0.png")) + //.body("data.rootDataFileId", equalTo(origFileId)) + ; + + long rootDataFileId = JsonPath.from(replaceResp.body().asString()).getLong("data.files[0].dataFile.rootDataFileId"); + long previousDataFileId = JsonPath.from(replaceResp.body().asString()).getLong("data.files[0].dataFile.previousDataFileId"); + long newDataFileId = JsonPath.from(replaceResp.body().asString()).getLong("data.files[0].dataFile.id"); + + assertEquals(origFileId, previousDataFileId); + assertEquals(rootDataFileId, previousDataFileId); + + + // ------------------------- + // Publish dataset (again) + // ------------------------- + msg("Publish dataset (again)"); + publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResp.then().assertThat() + .statusCode(OK.getStatusCode()); + + + // ------------------------- + // Replace file (again) + // ------------------------- + msg("Replace file (again)"); + String pathToFile3 = "src/main/webapp/resources/images/favicondataverse.png"; + Response replaceResp2 = UtilIT.replaceFile(newDataFileId, pathToFile3, apiToken); + + msgt("2nd replace: " + replaceResp2.prettyPrint()); + + replaceResp2.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("status", equalTo(AbstractApiBean.STATUS_OK)) + .body("data.message", equalTo(successMsg2)) + .body("data.files[0].label", equalTo("favicondataverse.png")) + ; + + long rootDataFileId2 = JsonPath.from(replaceResp2.body().asString()).getLong("data.files[0].dataFile.rootDataFileId"); + long previousDataFileId2 = JsonPath.from(replaceResp2.body().asString()).getLong("data.files[0].dataFile.previousDataFileId"); + + msgt("newDataFileId: " + newDataFileId); + msgt("previousDataFileId2: " + previousDataFileId2); + msgt("rootDataFileId2: " + rootDataFileId2); + + assertEquals(newDataFileId, previousDataFileId2); + assertEquals(rootDataFileId2, origFileId); + + } + + + @Test + public void test_007_ReplaceFileUnpublishedAndBadIds() { + msgt("test_007_ReplaceFileBadIds"); + + // Create user + String apiToken = createUserGetToken(); + + // Create Dataverse + String dataverseAlias = createDataverseGetAlias(apiToken); + + // Create Dataset + Integer datasetId = createDatasetGetId(dataverseAlias, apiToken); + + // ------------------------- + // Add initial file + // ------------------------- + String pathToFile = "src/main/webapp/resources/images/favicondataverse.png"; + Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken); + + String successMsgAdd = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.add"); + + addResponse.then().assertThat() + .body("data.message", equalTo(successMsgAdd)) + .body("data.files[0].dataFile.contentType", equalTo("image/png")) + .body("data.files[0].label", equalTo("dataverseproject.png")) + .statusCode(OK.getStatusCode()); + + + long origFileId = JsonPath.from(addResponse.body().asString()).getLong("data.files[0].dataFile.id"); + + msg("Orig file id: " + origFileId); + assertNotNull(origFileId); // If checkOut fails, display message + + // ------------------------- + // Publish dataverse + // ------------------------- + Response publishDataversetResp = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken); + publishDataversetResp.then().assertThat() + .statusCode(OK.getStatusCode()); + + + // ------------------------- + // Replace file in unpublished dataset -- e.g. file not published + // ------------------------- + String pathToFile2 = "src/main/webapp/resources/images/cc0.png"; + Response replaceResp = UtilIT.replaceFile(origFileId, pathToFile2, apiToken); + + String errMsgUnpublished = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.unpublished_file_cannot_be_replaced"); + + replaceResp.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("status", equalTo(AbstractApiBean.STATUS_ERROR)) + .body("message", Matchers.startsWith(errMsgUnpublished)) + ; + + // ------------------------- + // Publish dataset + // ------------------------- + Response publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResp.then().assertThat() + .statusCode(OK.getStatusCode()); + + + // ------------------------- + // Replace file with non-existent Id + // ------------------------- + pathToFile2 = "src/main/webapp/resources/images/cc0.png"; + Response replaceResp2 = UtilIT.replaceFile(origFileId+10, pathToFile2, apiToken); + + msgt("non-existent id: " + replaceResp.prettyPrint()); + + String errMsg1 = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.existing_file_to_replace_not_found_by_id"); + + replaceResp2.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("status", equalTo(AbstractApiBean.STATUS_ERROR)) + .body("message", Matchers.startsWith(errMsg1)) + ; + + + } + + + @Test + public void test_008_ReplaceFileAlreadyDeleted() { + msgt("test_008_ReplaceFileAlreadyDeleted"); + + // Create user + String apiToken = createUserGetToken(); + + // Create Dataverse + String dataverseAlias = createDataverseGetAlias(apiToken); + + // Create Dataset + Integer datasetId = createDatasetGetId(dataverseAlias, apiToken); + + // ------------------------- + // Add initial file + // ------------------------- + String pathToFile = "src/main/webapp/resources/images/favicondataverse.png"; + Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken); + + String successMsgAdd = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.add"); + + addResponse.then().assertThat() + .body("data.message", equalTo(successMsgAdd)) + .body("data.files[0].dataFile.contentType", equalTo("image/png")) + .body("data.files[0].label", equalTo("dataverseproject.png")) + .statusCode(OK.getStatusCode()); + + + long origFileId = JsonPath.from(addResponse.body().asString()).getLong("data.files[0].dataFile.id"); + + msg("Orig file id: " + origFileId); + assertNotNull(origFileId); // If checkOut fails, display message + + // ------------------------- + // Publish dataverse + // ------------------------- + Response publishDataversetResp = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken); + publishDataversetResp.then().assertThat() + .statusCode(OK.getStatusCode()); + + + // ------------------------- + // Publish dataset + // ------------------------- + Response publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResp.then().assertThat() + .statusCode(OK.getStatusCode()); + + // ------------------------- + // Delete file + // ------------------------- + UtilIT.deleteFile((int)origFileId, apiToken); + + // ------------------------- + // Re-Publish dataset + // ------------------------- + publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResp.then().assertThat() + .statusCode(OK.getStatusCode()); + + + // ------------------------- + // Replace file in unpublished dataset -- e.g. file not published + // ------------------------- + String pathToFile2 = "src/main/webapp/resources/images/cc0.png"; + Response replaceResp = UtilIT.replaceFile(origFileId, pathToFile2, apiToken); + + String errMsgDeleted = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.existing_file_not_in_latest_published_version"); + + msgt("replace resp: " + replaceResp.prettyPrint()); + + replaceResp.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("status", equalTo(AbstractApiBean.STATUS_ERROR)) + .body("message", Matchers.startsWith(errMsgDeleted)) + ; + + } + + + private void msg(String m){ + System.out.println(m); + } + private void dashes(){ + msg("----------------"); + } + private void msgt(String m){ + dashes(); msg(m); dashes(); + } + + +} diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java index 3fb9744a2eb..df2dbe914ed 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java @@ -16,6 +16,7 @@ import static javax.ws.rs.core.Response.Status.NO_CONTENT; import static javax.ws.rs.core.Response.Status.OK; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.Matchers.endsWith; import org.junit.AfterClass; import static org.junit.Assert.assertEquals; @@ -203,6 +204,19 @@ public void testCreateDataverseCreateDatasetUploadFileDownloadFileEditTitle() { uploadFile1.prettyPrint(); assertEquals(CREATED.getStatusCode(), uploadFile1.getStatusCode()); + Response getDatasetJson = UtilIT.nativeGetUsingPersistentId(persistentId, apiToken); + getDatasetJson.prettyPrint(); + getDatasetJson.then().assertThat() + .body("data.latestVersion.files[0].dataFile.filename", equalTo("trees.png")) + /** + * @todo The plan is to switch this to the nullValue version + * rather than expecting -1. + */ + .body("data.latestVersion.files[0].dataFile.rootDataFileId", equalTo(-1)) + // .body("data.latestVersion.files[0].dataFile.rootDataFileId", nullValue()) + .body("data.latestVersion.files[0].dataFile.previousDataFileId", nullValue()) + .statusCode(OK.getStatusCode()); + Response swordStatementUnAuth = UtilIT.getSwordStatement(persistentId, apiTokenNoPrivs); swordStatementUnAuth.prettyPrint(); swordStatementUnAuth.then().assertThat() diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index ee651ecab64..ffd5f147286 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -271,6 +271,33 @@ public static Response uploadFile(String persistentId, String zipfilename, Strin } + /** + * For test purposes, datasetId can be non-numeric + * + * @param datasetId + * @param pathToFile + * @param apiToken + * @return + */ + static Response uploadFileViaNative(String datasetId, String pathToFile, String apiToken) { + + + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .multiPart("datasetId", datasetId) + .multiPart("file", new File("src/main/webapp/resources/images/dataverseproject.png")) + .post("/api/datasets/" + datasetId + "/add"); + } + + static Response replaceFile(long fileId, String pathToFile, String apiToken) { + //.add("fileToReplaceId", fileId) + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .multiPart("file", new File(pathToFile)) + .multiPart("jsonData", Json.createObjectBuilder().build().toString()) + .post("/api/files/" + fileId + "/replace"); + } + static Response downloadFile(Integer fileId) { return given() // .header(API_TOKEN_HTTP_HEADER, apiToken) diff --git a/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java b/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java new file mode 100644 index 00000000000..02b435aa7fe --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java @@ -0,0 +1,248 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package edu.harvard.iq.dataverse.datasetutility; + +import java.util.Arrays; +import java.util.List; +import java.util.ResourceBundle; +import org.hamcrest.Matchers; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import static org.junit.Assert.*; + +/** + * + * @author rmp553 + */ +public class OptionalFileParamsTest { + + public OptionalFileParamsTest() { + } + + /** + * Good Json Description + */ + @Test + public void test_01_jsonDescriptionGood() throws DataFileTagException { + + msgt("test_01_jsonDescription"); + + String val = "A new file"; + String jsonParams = "{\"description\": \"" + val + "\"}"; + + OptionalFileParams instance = new OptionalFileParams(jsonParams); + + assertEquals(instance.getDescription(), val); + assertNull(instance.getCategories()); + assertNull(instance.getDataFileTags()); + + } + + /** + * Good Json Description + */ + @Test + public void test_02_jsonDescriptionNumeric() throws DataFileTagException { + + msgt("test_02_jsonDescriptionNumeric"); + + String jsonParams = "{\"description\": 250 }"; + + OptionalFileParams instance = new OptionalFileParams(jsonParams); + + assertEquals(instance.getDescription(), "250"); + + } + + /** + * Good Json Description + */ + @Test + public void test_03_jsonNull() throws DataFileTagException { + + msgt("test_03_jsonNull"); + + //String val = "A new file"; + String jsonParams = null; + + OptionalFileParams instance = new OptionalFileParams(jsonParams); + + assertNull(instance.getDescription()); + + } + + /** + * Good Json Description + */ + @Test + public void test_04_jsonTagsGood() throws DataFileTagException { + + msgt("test_04_jsonTagsGood"); + + String val = "A new file"; + String jsonParams = "{\"description\": \"A new file\", \"categories\": [\"dog\", \"cat\", \"mouse\"]}"; + + OptionalFileParams instance = new OptionalFileParams(jsonParams); + + assertEquals(instance.getDescription(), val); + + List expectedCategories = Arrays.asList("dog", "cat", "mouse"); + assertEquals(expectedCategories, instance.getCategories()); + + assertNull(instance.getDataFileTags()); + assertTrue(instance.hasCategories()); + assertTrue(instance.hasDescription()); + assertFalse(instance.hasFileDataTags()); + + } + + @Test + public void test_05_jsonTabularTagsGood() throws DataFileTagException { + + msgt("test_05_jsonTabularTagsGood"); + + String val = "A new file"; + String jsonParams = "{\"dataFileTags\": [\"Survey\", \"Event\", \"Panel\"], \"description\": \"A new file\"}"; + + OptionalFileParams instance = new OptionalFileParams(jsonParams); + + assertEquals(instance.getDescription(), val); + + List expectedTags = Arrays.asList("Survey", "Event", "Panel"); + assertEquals(expectedTags, instance.getDataFileTags()); + + assertNull(instance.getCategories()); + assertFalse(instance.hasCategories()); + assertTrue(instance.hasDescription()); + assertTrue(instance.hasFileDataTags()); + } + + @Test + public void test_06_jsonTabularTagsBad() throws DataFileTagException { + + msgt("test_06_jsonTabularTagsBad"); + + String val = "A new file"; + String jsonParams = "{\"dataFileTags\": [\"Survey\", \"Event\", \"xPanel\"], \"description\": \"A new file\"}"; + + try{ + OptionalFileParams instance = new OptionalFileParams(jsonParams); + }catch(DataFileTagException ex){ + // msgt("ex: " + ex.getMessage()); + String errMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.invalid_datafile_tag"); + msgt("errMsg: " + errMsg); + assertTrue(ex.getMessage().startsWith(errMsg)); + } + } + + + @Test + public void test_07_regularInstanceGood() throws DataFileTagException { + + msgt("test_07_regularInstanceGood"); + + String val = "A new file"; + List categories = Arrays.asList("dog", " dog ", "cat", "mouse", "dog "); + List dataFileTags = Arrays.asList("Survey", "Event", "Panel"); + + OptionalFileParams instance = new OptionalFileParams(val, + categories, + dataFileTags); + + assertEquals(val, instance.getDescription()); + assertEquals( Arrays.asList("dog", "cat", "mouse"), instance.getCategories()); + assertEquals(dataFileTags, instance.getDataFileTags()); + + } + + @Test + public void test_08_regularInstanceGoodWithNulls() throws DataFileTagException { + + msgt("test_08_regularInstanceGoodWithNulls"); + + String val = null; + List categories = null;//Arrays.asList("dog", "cat", "mouse"); + List dataFileTags = Arrays.asList("Survey", "Survey", "Event", "Panel", "Survey", " "); + + OptionalFileParams instance = new OptionalFileParams(val, + categories, + dataFileTags); + + assertEquals(val, instance.getDescription()); + assertEquals(categories, instance.getCategories()); + assertEquals(Arrays.asList("Survey", "Event", "Panel"), instance.getDataFileTags()); + + } + + @Test + public void test_09_unusedParamsGood() throws DataFileTagException { + + msgt("test_08_regularInstanceGoodWithNulls"); + + String jsonParams = "{\"forceReplace\": \"unused within OptionalFileParams\", \"oldFileId\": \"unused within OptionalFileParams\", \"description\": null, \"unusedParam1\": \"haha\", \"categories\": []}"; + + OptionalFileParams instance = new OptionalFileParams(jsonParams); + + assertNull(instance.getDescription()); + assertFalse(instance.hasDescription()); + + assertNull(instance.getCategories()); + assertFalse(instance.hasCategories()); + + assertNull(instance.getDataFileTags()); + assertFalse(instance.hasFileDataTags()); + + } + + + private void msg(String s){ + System.out.println(s); + } + + private void msgt(String s){ + msg("-------------------------------"); + msg(s); + msg("-------------------------------"); + } +} + +/* +Python for creating escaped JSON objects + +import json +d = dict(description="A new file" + ,categories=["dog", "cat", "mouse"]) +print json.dumps(json.dumps(d)) + +# result: +# "{\"description\": \"A new file\", \"categories\": [\"dog\", \"cat\", \"mouse\"]}" + + + +d = dict(description="A new file", + tabular_tags=["Survey", "Event", "Panel"]) +print json.dumps(json.dumps(d)) + +# "{\"fileDataTags\": [\"Survey\", \"Event\", \"Panel\"], \"description\": \"A new file\"}" + + +#import json; d = dict(tags=["dog", "cat", "mouse"]); print json.dumps(json.dumps(d)) + + +import json +d = dict(description="A new file", + categories=["dog", "cat", "mouse"], + unusedParam1="haha", + forceReplace="unused within OptionalFileParams", + oldFileId="unused within OptionalFileParams" +) +print json.dumps(json.dumps(d)) + + +*/ \ No newline at end of file
+ +