From 316ce03d024913fc50bf18ff31ae8b012c2f7cd1 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 21 Sep 2016 12:55:39 -0400
Subject: [PATCH 01/86] Moved isDuplicate method to datasetutility package. No
changes to actual method. Added direct method for checking a single
file--that's for #2290
---
.../edu/harvard/iq/dataverse/DatasetPage.java | 32 +---
.../dataverse/DatasetVersionServiceBean.java | 50 ++++++
.../iq/dataverse/EditDatafilesPage.java | 36 +---
.../datasetutility/DuplicateFileChecker.java | 165 ++++++++++++++++++
4 files changed, 222 insertions(+), 61 deletions(-)
create mode 100644 src/main/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileChecker.java
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
index 882608dff45..ae06bd4cd29 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
@@ -8,6 +8,7 @@
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
import edu.harvard.iq.dataverse.authorization.users.GuestUser;
+import edu.harvard.iq.dataverse.datasetutility.DuplicateFileChecker;
import edu.harvard.iq.dataverse.datavariable.VariableServiceBean;
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
@@ -2836,37 +2837,12 @@ public String cancel() {
return returnToLatestVersion();
}
+
public boolean isDuplicate(FileMetadata fileMetadata) {
- String thisMd5 = fileMetadata.getDataFile().getmd5();
- if (thisMd5 == null) {
- return false;
- }
-
- Map MD5Map = new HashMap();
-
- // TODO:
- // think of a way to do this that doesn't involve populating this
- // map for every file on the page?
- // man not be that much of a problem, if we paginate and never display
- // more than a certain number of files... Still, needs to be revisited
- // before the final 4.0.
- // -- L.A. 4.0
- Iterator fmIt = workingVersion.getFileMetadatas().iterator();
- while (fmIt.hasNext()) {
- FileMetadata fm = fmIt.next();
- String md5 = fm.getDataFile().getmd5();
- if (md5 != null) {
- if (MD5Map.get(md5) != null) {
- MD5Map.put(md5, MD5Map.get(md5).intValue() + 1);
- } else {
- MD5Map.put(md5, 1);
- }
- }
- }
- return MD5Map.get(thisMd5) != null && MD5Map.get(thisMd5).intValue() > 1;
+ return DuplicateFileChecker.IsDuplicateOriginalWay(workingVersion, fileMetadata);
}
-
+
private HttpClient getClient() {
// TODO:
// cache the http client? -- L.A. 4.0 alpha
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
index d054b52ad81..c947000ece1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
@@ -819,4 +819,54 @@ public void populateDatasetSearchCard(SolrSearchResult solrSearchResult) {
}
}
+ /**
+ * Return a list of the checksum Strings for files in the specified DatasetVersion
+ *
+ * This is used to help check for duplicate files within a DatasetVersion
+ *
+ * @param datasetVersion
+ * @return a list of checksum Strings for files in the specified DatasetVersion
+ */
+ public List getChecksumListForDatasetVersion(DatasetVersion datasetVersion) {
+
+ if (datasetVersion == null){
+ throw new NullPointerException("datasetVersion cannot be null");
+ }
+
+ String query = "SELECT df.md5 FROM datafile df, filemetadata fm WHERE fm.datasetversion_id = " + datasetVersion.getId() + " AND fm.datafile_id = df.id;";
+
+ logger.log(Level.FINE, "query: {0}", query);
+ Query nativeQuery = em.createNativeQuery(query);
+ List checksumList = nativeQuery.getResultList();
+
+ return checksumList;
+ }
+
+
+ /**
+ * Check for the existence of a single checksum value within a DatasetVersion's files
+ *
+ * @param datasetVersion
+ * @param selectedChecksum
+ * @return
+ */
+ public boolean doesChecksumExistInDatasetVersion(DatasetVersion datasetVersion, String selectedChecksum) {
+ if (datasetVersion == null){
+ throw new NullPointerException("datasetVersion cannot be null");
+ }
+
+ String query = "SELECT df.md5 FROM datafile df, filemetadata fm"
+ + " WHERE fm.datasetversion_id = " + datasetVersion.getId()
+ + " AND fm.datafile_id = df.id"
+ + " AND df.md5 = '" + selectedChecksum + "';";
+
+ Query nativeQuery = em.createNativeQuery(query);
+ List checksumList = nativeQuery.getResultList();
+
+ if (checksumList.size() > 0){
+ return true;
+ }
+ return false;
+ }
+
} // end class
diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
index a86374185e8..5d4a65a4c8a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
@@ -10,6 +10,7 @@
import edu.harvard.iq.dataverse.authorization.users.ApiToken;
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.datasetutility.DuplicateFileChecker;
import edu.harvard.iq.dataverse.datavariable.VariableServiceBean;
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
@@ -1030,41 +1031,10 @@ public String cancel() {
}
public boolean isDuplicate(FileMetadata fileMetadata) {
- String thisMd5 = fileMetadata.getDataFile().getmd5();
- if (thisMd5 == null) {
- return false;
- }
-
- Map MD5Map = new HashMap();
- // TODO:
- // think of a way to do this that doesn't involve populating this
- // map for every file on the page?
- // man not be that much of a problem, if we paginate and never display
- // more than a certain number of files... Still, needs to be revisited
- // before the final 4.0.
- // -- L.A. 4.0
-
- // make a "defensive copy" to avoid java.util.ConcurrentModificationException from being thrown
- // when uploading 100+ files
- List wvCopy = new ArrayList<>(workingVersion.getFileMetadatas());
- Iterator fmIt = wvCopy.iterator();
-
- while (fmIt.hasNext()) {
- FileMetadata fm = fmIt.next();
- String md5 = fm.getDataFile().getmd5();
- if (md5 != null) {
- if (MD5Map.get(md5) != null) {
- MD5Map.put(md5, MD5Map.get(md5).intValue() + 1);
- } else {
- MD5Map.put(md5, 1);
- }
- }
- }
-
- return MD5Map.get(thisMd5) != null && MD5Map.get(thisMd5).intValue() > 1;
+ return DuplicateFileChecker.IsDuplicateOriginalWay(workingVersion, fileMetadata);
}
-
+
private HttpClient getClient() {
// TODO:
// cache the http client? -- L.A. 4.0 alpha
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileChecker.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileChecker.java
new file mode 100644
index 00000000000..7be3ae428fc
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileChecker.java
@@ -0,0 +1,165 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package edu.harvard.iq.dataverse.datasetutility;
+
+import edu.harvard.iq.dataverse.DatasetVersion;
+import edu.harvard.iq.dataverse.DatasetVersionServiceBean;
+import edu.harvard.iq.dataverse.FileMetadata;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Logger;
+
+/**
+ * Used for adding/replacing single files.
+ *
+ * Methods check if the files already exist in the *saved* DatasetVersion
+ *
+ * @author rmp553
+ */
+public class DuplicateFileChecker {
+
+ private static final Logger logger = Logger.getLogger(DuplicateFileChecker.class.getCanonicalName());
+ private DatasetVersionServiceBean datasetVersionService;
+
+ /**
+ * Constructor
+ *
+ * @param datasetVersionService
+ */
+ public DuplicateFileChecker(DatasetVersionServiceBean datasetVersionService){
+
+ if (datasetVersionService == null){
+ throw new NullPointerException("datasetVersionService cannot be null");
+ }
+
+ this.datasetVersionService = datasetVersionService;
+ } // end: constructor
+
+
+ /**
+ * Check the database to see if this file is already in the DatasetVersion
+ *
+ * Note: This checks a SINGLE file against the database only.
+ *
+ * @param checksum
+ * @return
+ */
+ public boolean isFileInSavedDatasetVersion(DatasetVersion datasetVersion, FileMetadata fileMetadata){
+
+ if (datasetVersion == null){
+ throw new NullPointerException("datasetVersion cannot be null");
+ }
+
+ if (fileMetadata == null){
+ throw new NullPointerException("fileMetadata cannot be null");
+ }
+ return this.isFileInSavedDatasetVersion(datasetVersion, fileMetadata.getDataFile().getmd5());
+ }
+
+ /**
+ * See if this checksum already exists by a new query
+ *
+ * @param checksum
+ * @return
+ */
+ public boolean isFileInSavedDatasetVersion(DatasetVersion datasetVersion, String checkSum){
+
+ if (datasetVersion == null){
+ throw new NullPointerException("datasetVersion cannot be null");
+ }
+
+ if (checkSum == null){
+ throw new NullPointerException("checkSum cannot be null");
+ }
+
+ return datasetVersionService.doesChecksumExistInDatasetVersion(datasetVersion, checkSum);
+
+ }
+
+ /**
+ * From dataset version:
+ * - Get the md5s of all the files
+ * - Load them into a hash
+ *
+ * Loads checksums from unsaved datasetversion--checks more
+ *
+ */
+ public Map getDatasetHashesFromDatabase(DatasetVersion datasetVersion){
+
+ if (datasetVersion == null){
+ throw new NullPointerException("datasetVersion cannot be null");
+ }
+
+ Map checksumHashCounts = new HashMap<>();
+
+ List fileMetadatas = new ArrayList<>(datasetVersion.getFileMetadatas());
+
+ for (FileMetadata fm : fileMetadatas){
+ String checkSum = fm.getDataFile().getmd5();
+ if (checksumHashCounts.get(checkSum) != null){
+ checksumHashCounts.put(checkSum, checksumHashCounts.get(checkSum).intValue() + 1);
+ }else{
+ checksumHashCounts.put(checkSum, 1);
+ }
+ }
+ return checksumHashCounts;
+ }
+
+
+
+ /**
+ * Original isDuplicate method from the DatasetPage and EditDatafilesPage
+ *
+ * Note: this has efficiency issues in that the hash is re-created for every fileMetadata checked
+ *
+ * @param workingVersion
+ * @param fileMetadata
+ * @return
+ */
+ public static boolean IsDuplicateOriginalWay(DatasetVersion workingVersion, FileMetadata fileMetadata) {
+ if (workingVersion == null){
+ throw new NullPointerException("datasetVersion cannot be null");
+ }
+
+ String thisMd5 = fileMetadata.getDataFile().getmd5();
+ if (thisMd5 == null) {
+ return false;
+ }
+
+ Map MD5Map = new HashMap();
+
+ // TODO:
+ // think of a way to do this that doesn't involve populating this
+ // map for every file on the page?
+ // man not be that much of a problem, if we paginate and never display
+ // more than a certain number of files... Still, needs to be revisited
+ // before the final 4.0.
+ // -- L.A. 4.0
+
+ // make a "defensive copy" to avoid java.util.ConcurrentModificationException from being thrown
+ // when uploading 100+ files
+ List wvCopy = new ArrayList<>(workingVersion.getFileMetadatas());
+ Iterator fmIt = wvCopy.iterator();
+
+ while (fmIt.hasNext()) {
+ FileMetadata fm = fmIt.next();
+ String md5 = fm.getDataFile().getmd5();
+ if (md5 != null) {
+ if (MD5Map.get(md5) != null) {
+ MD5Map.put(md5, MD5Map.get(md5).intValue() + 1);
+ } else {
+ MD5Map.put(md5, 1);
+ }
+ }
+ }
+ return MD5Map.get(thisMd5) != null && MD5Map.get(thisMd5).intValue() > 1;
+
+ }
+
+}
From 22239b569ceaf5f809a379b5a6aa85c6704abfdf Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 21 Sep 2016 13:56:50 -0400
Subject: [PATCH 02/86] created WorldMapPermissionHelper.java to move checks
out of the DatasetPage. #2290
---
.../edu/harvard/iq/dataverse/DatasetPage.java | 359 +++++-------------
.../iq/dataverse/EditDatafilesPage.java | 13 +-
.../WorldMapPermissionHelper.java | 320 ++++++++++++++++
3 files changed, 427 insertions(+), 265 deletions(-)
create mode 100644 src/main/java/edu/harvard/iq/dataverse/datasetutility/WorldMapPermissionHelper.java
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
index ae06bd4cd29..fbe7628c406 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
@@ -9,6 +9,7 @@
import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
import edu.harvard.iq.dataverse.authorization.users.GuestUser;
import edu.harvard.iq.dataverse.datasetutility.DuplicateFileChecker;
+import edu.harvard.iq.dataverse.datasetutility.WorldMapPermissionHelper;
import edu.harvard.iq.dataverse.datavariable.VariableServiceBean;
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
@@ -210,13 +211,14 @@ public enum DisplayMode {
private List versionTabList = new ArrayList();
private List versionTabListForPostLoad = new ArrayList();
+ // Used to help with displaying buttons related to the WorldMap
+ private WorldMapPermissionHelper worldMapPermissionHelper;
// Used to store results of permissions checks
private final Map datasetPermissionMap = new HashMap<>(); // { Permission human_name : Boolean }
private final Map fileDownloadPermissionMap = new HashMap<>(); // { FileMetadata.id : Boolean }
private final Map fileMetadataTwoRavensExploreMap = new HashMap<>(); // { FileMetadata.id : Boolean }
- private final Map fileMetadataWorldMapExplore = new HashMap<>(); // { FileMetadata.id : Boolean }
private DataFile selectedDownloadFile;
@@ -676,9 +678,7 @@ public boolean doesSessionUserHaveDataSetPermission(Permission permissionToCheck
public void setNoDVsRemaining(boolean noDVsRemaining) {
this.noDVsRemaining = noDVsRemaining;
}
-
- private final Map mapLayerMetadataLookup = new HashMap<>();
-
+
private GuestbookResponse guestbookResponse;
private Guestbook selectedGuestbook;
@@ -1013,181 +1013,12 @@ public void handleChange() {
public void handleChangeButton() {
- }
-
- public boolean isShapefileType(FileMetadata fm) {
- if (fm == null) {
- return false;
- }
- if (fm.getDataFile() == null) {
- return false;
- }
-
- return fm.getDataFile().isShapefileType();
- }
-
- /*
- Check if the FileMetadata.dataFile has an associated MapLayerMetadata object
-
- The MapLayerMetadata objects have been fetched at page inception by "loadMapLayerMetadataLookup()"
- */
- public boolean hasMapLayerMetadata(FileMetadata fm) {
- if (fm == null) {
- return false;
- }
- if (fm.getDataFile() == null) {
- return false;
- }
- return doesDataFileHaveMapLayerMetadata(fm.getDataFile());
- }
-
- /**
- * Check if a DataFile has an associated MapLayerMetadata object
- *
- * The MapLayerMetadata objects have been fetched at page inception by
- * "loadMapLayerMetadataLookup()"
- */
- private boolean doesDataFileHaveMapLayerMetadata(DataFile df) {
- if (df == null) {
- return false;
- }
- if (df.getId() == null) {
- return false;
- }
- return this.mapLayerMetadataLookup.containsKey(df.getId());
- }
-
- /**
- * Using a DataFile id, retrieve an associated MapLayerMetadata object
- *
- * The MapLayerMetadata objects have been fetched at page inception by
- * "loadMapLayerMetadataLookup()"
- */
- public MapLayerMetadata getMapLayerMetadata(DataFile df) {
- if (df == null) {
- return null;
- }
- return this.mapLayerMetadataLookup.get(df.getId());
- }
+ }
private void msg(String s){
// System.out.println(s);
}
- /**
- * See table in: https://github.com/IQSS/dataverse/issues/1618
- *
- * Can the user see a reminder to publish button?
- * (0) The application has to be set to Create Edit Maps - true
- * (1) Logged in user
- * (2) Is geospatial file?
- * (3) File has NOT been released
- * (4) No existing Map
- * (5) Can Edit Dataset
- *
- * @param FileMetadata fm
- * @return boolean
- */
- public boolean canSeeMapButtonReminderToPublish(FileMetadata fm){
- if (fm==null){
-
- return false;
- }
-
- // (0) Is the view GeoconnectViewMaps
- if (!settingsService.isTrueForKey(SettingsServiceBean.Key.GeoconnectCreateEditMaps, false)){
- return false;
- }
-
-
- // (1) Is there an authenticated user?
- //
- if (!(isSessionUserAuthenticated())){
- return false;
- }
-
-
- // Is this file a Shapefile or a Tabular file tagged as Geospatial?
- //
- if (!(this.isPotentiallyMappableFileType(fm))){
- return false;
- }
-
- // (3) Is this DataFile released? Yes, don't need reminder
- //
- if (fm.getDataFile().isReleased()){
- return false;
- }
-
- // (4) Does a map already exist? Yes, don't need reminder
- //
- if (this.hasMapLayerMetadata(fm)){
- return false;
- }
-
- // (5) If so, can the logged in user edit the Dataset to which this FileMetadata belongs?
- if (!this.doesSessionUserHaveDataSetPermission(Permission.EditDataset)){
- return false;
- }
-
- // Looks good
- //
- return true;
- }
-
- /**
- * Should there be a Map Data Button for this file?
- * see table in: https://github.com/IQSS/dataverse/issues/1618
- * (1) Is the user logged in?
- * (2) Is this file a Shapefile or a Tabular file tagged as Geospatial?
- * (3) Does the logged in user have permission to edit the Dataset to which this FileMetadata belongs?
- * (4) Is the create Edit Maps flag set to true?
- * (5) Any of these conditions:
- * 9a) File Published
- * (b) Draft: File Previously published
- * @param fm FileMetadata
- * @return boolean
- */
- public boolean canUserSeeMapDataButton(FileMetadata fm){
-
- if (fm==null){
- return false;
- }
-
-
- // (1) Is there an authenticated user?
- if (!(isSessionUserAuthenticated())){
- return false;
- }
-
- // (2) Is this file a Shapefile or a Tabular file tagged as Geospatial?
- // TO DO: EXPAND FOR TABULAR FILES TAGGED AS GEOSPATIAL!
- //
- if (!(this.isPotentiallyMappableFileType(fm))){
- return false;
- }
-
- // (3) Does the user have Edit Dataset permissions?
- //
- if (!this.doesSessionUserHaveDataSetPermission(Permission.EditDataset)){
- return false;
- }
-
- // (4) Is the view GeoconnectViewMaps
- if (!settingsService.isTrueForKey(SettingsServiceBean.Key.GeoconnectCreateEditMaps, false)){
- return false;
- }
-
- // (5) Is File released?
- //
- if (fm.getDataFile().isReleased()){
- return true;
- }
-
- // Nope
- return false;
- }
-
/**
* Used in the .xhtml file to check whether a tabular file
@@ -1249,37 +1080,6 @@ public boolean canSeeTwoRavensExploreButton(FileMetadata fm){
// and DatasetPage.canDownloadFile(fileMetadata)
}
- /**
- * Check if this is a mappable file type.
- *
- * Currently (2/2016)
- * - Shapefile (zipped shapefile)
- * - Tabular file with Geospatial Data tag
- *
- * @param fm
- * @return
- */
- private boolean isPotentiallyMappableFileType(FileMetadata fm){
- if (fm==null){
- return false;
- }
-
- // Yes, it's a shapefile
- //
- if (this.isShapefileType(fm)){
- return true;
- }
-
- // Yes, it's tabular with a geospatial tag
- //
- if (fm.getDataFile().isTabularData()){
- if (fm.getDataFile().hasGeospatialTag()){
- return true;
- }
- }
- return false;
- }
-
/**
* For development
@@ -1305,83 +1105,114 @@ public boolean isGeoconnectDebugAvailable(){
/**
- * Should there be a Explore WorldMap Button for this file?
- * See table in: https://github.com/IQSS/dataverse/issues/1618
+ * This object wraps methods used for hiding/displaying WorldMap related messages
+ *
+ */
+ private void loadWorldMapPermissionHelper() {
+
+ worldMapPermissionHelper = new WorldMapPermissionHelper(settingsService, mapLayerMetadataService, dataset);
+
+ }
+
+ /**
*
- * (1) Does the file have MapLayerMetadata?
- * (2) Is there DownloadFile permission for this file?
+ * WARNING: Check if the user has file download permission
+ * - This check is assumed when calling to the worldMapPermissionHelper
*
- * @param fm FileMetadata
- * @return boolean
+ * Should the user be able to see the WorldMap Explore button?
+ *
+ * @param fm
+ * @return
*/
public boolean canUserSeeExploreWorldMapButton(FileMetadata fm){
- if (fm==null){
+ if ((worldMapPermissionHelper == null)||(fm == null)){
return false;
}
- if (this.fileMetadataWorldMapExplore.containsKey(fm.getId())){
- // Yes, return previous answer
- //logger.info("using cached result for candownloadfile on filemetadata "+fid);
- return this.fileMetadataWorldMapExplore.get(fm.getId());
- }
-
- /* -----------------------------------------------------
- Does a Map Exist?
- ----------------------------------------------------- */
- if (!(this.hasMapLayerMetadata(fm))){
- // Nope: no button
- this.fileMetadataWorldMapExplore.put(fm.getId(), false);
- return false;
- }
-
- /*
- Is setting for GeoconnectViewMaps true?
- Nope? no button
- */
- if (!settingsService.isTrueForKey(SettingsServiceBean.Key.GeoconnectViewMaps, false)){
- this.fileMetadataWorldMapExplore.put(fm.getId(), false);
+ // You need to have download file permissions as a prereq!
+ //
+ if (!this.canDownloadFile(fm)){
return false;
}
- /* -----------------------------------------------------
- Does user have DownloadFile permission for this file?
- Yes: User can view button!
- ----------------------------------------------------- */
- if (this.canDownloadFile(fm)){
- this.fileMetadataWorldMapExplore.put(fm.getId(), true);
- return true;
+ return worldMapPermissionHelper.canUserSeeExploreWorldMapButton(fm);
+
+ } // end: canUserSeeExploreWorldMapButton
+
+
+ /**
+ * WARNING: Check if the user isAuthenicated AND has Permission.EditDataset
+ * - These checks are assumed when calling to the worldMapPermissionHelper
+ *
+ * If this is an unpublished Dataset with a mappable file,
+ * should the user see the "Reminder to Publish" button
+ *
+ * @param fm
+ * @return
+ */
+ public boolean canSeeMapButtonReminderToPublish(FileMetadata fm){
+
+ if ((worldMapPermissionHelper == null)||(fm == null)){
+ return false;
}
-
- // Nope: Can't see button
+
+ // Is this user authenticated with EditDataset permission?
//
- this.fileMetadataWorldMapExplore.put(fm.getId(), false);
- return false;
- }
+ if (!(isUserAuthenticatedWithEditDatasetPermission())){
+ return false;
+ }
+
+ return worldMapPermissionHelper.canSeeMapButtonReminderToPublish(fm);
+
+ } // end: canSeeMapButtonReminderToPublish
+
/**
- * Create a hashmap consisting of { DataFile.id : MapLayerMetadata object}
- *
- * Very few DataFiles will have associated MapLayerMetadata objects so only
- * use 1 query to get them
+ * WARNING: Check if the user isAuthenicated AND has Permission.EditDataset
+ * - These checks are assumed when calling to the worldMapPermissionHelper
+ *
+ * Should the user be able to map this file?
+ *
+ * @param fm
+ * @return
*/
- private void loadMapLayerMetadataLookup() {
- if (this.dataset == null) {
- return;
+ public boolean canUserSeeMapDataButton(FileMetadata fm){
+
+ if ((worldMapPermissionHelper == null)||(fm == null)){
+ return false;
}
- if (this.dataset.getId() == null) {
- return;
+
+ // Is this user authenticated with EditDataset permission?
+ //
+ if (!(isUserAuthenticatedWithEditDatasetPermission())){
+ return false;
}
- List mapLayerMetadataList = mapLayerMetadataService.getMapLayerMetadataForDataset(this.dataset);
- if (mapLayerMetadataList == null) {
- return;
+
+ return worldMapPermissionHelper.canUserSeeMapDataButton(fm);
+
+ }
+
+ /**
+ * Is this user authenticated with EditDataset permission
+ *
+ * @return
+ */
+ private boolean isUserAuthenticatedWithEditDatasetPermission(){
+
+ // Is the user authenticated?
+ //
+ if (!(isSessionUserAuthenticated())){
+ return false;
}
- for (MapLayerMetadata layer_metadata : mapLayerMetadataList) {
- mapLayerMetadataLookup.put(layer_metadata.getDataFile().getId(), layer_metadata);
+
+ // If so, can the logged in user edit the Dataset to which this FileMetadata belongs?
+ //
+ if (!this.doesSessionUserHaveDataSetPermission(Permission.EditDataset)){
+ return false;
}
-
- }// A DataFile may have a related MapLayerMetadata object
-
-
+
+ return true;
+ }
private List displayFileMetadata;
@@ -1549,7 +1380,7 @@ private String init(boolean initFull) {
//SEK - lazymodel may be needed for datascroller in future release
// lazyModel = new LazyFileMetadataDataModel(workingVersion.getId(), datafileService );
// populate MapLayerMetadata
- this.loadMapLayerMetadataLookup(); // A DataFile may have a related MapLayerMetadata object
+ this.loadWorldMapPermissionHelper(); // A DataFile may have a related MapLayerMetadata object
}
} else if (ownerId != null) {
// create mode for a new child dataset
diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
index 5d4a65a4c8a..97fb4f20bb8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
@@ -1030,6 +1030,12 @@ public String cancel() {
return returnToDatasetOnly();
}
+ /**
+ * Just moved to another class for now
+ *
+ * @param fileMetadata
+ * @return
+ */
public boolean isDuplicate(FileMetadata fileMetadata) {
return DuplicateFileChecker.IsDuplicateOriginalWay(workingVersion, fileMetadata);
@@ -1229,6 +1235,9 @@ private String processUploadedFileList(List dFileList){
boolean multipleDupes = false;
String warningMessage = null;
+ // NOTE: for native file uploads, the dFileList will only
+ // contain 1 file--method is called for every file even if the UI shows "simultaneous uploads"
+
// -----------------------------------------------------------
// Iterate through list of DataFile objects
// -----------------------------------------------------------
@@ -1255,9 +1264,11 @@ private String processUploadedFileList(List dFileList){
// -----------------------------------------------------------
// Check for duplicates -- e.g. file is already in the dataset
// -----------------------------------------------------------
+
if (!isDuplicate(dataFile.getFileMetadata())) {
newFiles.add(dataFile); // looks good
fileMetadatas.add(dataFile.getFileMetadata());
+
} else {
if (duplicateFileNames == null) {
duplicateFileNames = dataFile.getFileMetadata().getLabel();
@@ -1289,7 +1300,7 @@ private String processUploadedFileList(List dFileList){
}
}
}
-
+
// -----------------------------------------------------------
// Formate error message for duplicate files
// -----------------------------------------------------------
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/WorldMapPermissionHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/WorldMapPermissionHelper.java
new file mode 100644
index 00000000000..0877cff4f2e
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/WorldMapPermissionHelper.java
@@ -0,0 +1,320 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package edu.harvard.iq.dataverse.datasetutility;
+
+import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.Dataset;
+import edu.harvard.iq.dataverse.FileMetadata;
+import edu.harvard.iq.dataverse.MapLayerMetadata;
+import edu.harvard.iq.dataverse.MapLayerMetadataServiceBean;
+import edu.harvard.iq.dataverse.authorization.Permission;
+import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ *
+ * @author rmp553
+ */
+public class WorldMapPermissionHelper {
+
+ private SettingsServiceBean settingsService;
+ private MapLayerMetadataServiceBean mapLayerMetadataService;
+
+ private Dataset dataset;
+
+ private final Map fileMetadataWorldMapExplore = new HashMap<>(); // { FileMetadata.id : Boolean }
+ private final Map mapLayerMetadataLookup = new HashMap<>();
+
+
+ public WorldMapPermissionHelper(SettingsServiceBean settingsService, MapLayerMetadataServiceBean mapLayerMetadataService,
+ Dataset dataset){
+
+ if (dataset == null){
+ throw new NullPointerException("dataset cannot be null");
+ }
+ if (dataset.getId() == null){
+ throw new NullPointerException("dataset must be saved! (have an id)");
+ }
+
+ if (settingsService == null){
+ throw new NullPointerException("settingsService cannot be null");
+ }
+ if (mapLayerMetadataService == null){
+ throw new NullPointerException("mapLayerMetadataService cannot be null");
+ }
+ this.dataset = dataset;
+
+ this.settingsService = settingsService;
+ this.mapLayerMetadataService = mapLayerMetadataService;
+
+ loadMapLayerMetadataLookup();
+ }
+
+
+
+ /**
+ * Create a hashmap consisting of { DataFile.id : MapLayerMetadata object}
+ *
+ * Very few DataFiles will have associated MapLayerMetadata objects so only
+ * use 1 query to get them
+ */
+ private void loadMapLayerMetadataLookup() {
+
+
+ List mapLayerMetadataList = mapLayerMetadataService.getMapLayerMetadataForDataset(this.dataset);
+ if (mapLayerMetadataList == null) {
+ return;
+ }
+ for (MapLayerMetadata layer_metadata : mapLayerMetadataList) {
+ mapLayerMetadataLookup.put(layer_metadata.getDataFile().getId(), layer_metadata);
+ }
+
+ }// A DataFile may have a related MapLayerMetadata object
+
+
+ /**
+ * Using a DataFile id, retrieve an associated MapLayerMetadata object
+ *
+ * The MapLayerMetadata objects have been fetched at page inception by
+ * "loadMapLayerMetadataLookup()"
+ */
+ public MapLayerMetadata getMapLayerMetadata(DataFile df) {
+ if (df == null) {
+ return null;
+ }
+ return this.mapLayerMetadataLookup.get(df.getId());
+ }
+
+ /**
+ * WARNING: Before calling this, make sure the user has download
+ * permission for the file!! (See DatasetPage.canDownloadFile())
+ *
+ * Should there be a Explore WorldMap Button for this file?
+ * See table in: https://github.com/IQSS/dataverse/issues/1618
+ *
+ * (1) Does the file have MapLayerMetadata?
+ * (2) Are the proper settings in place
+ *
+ * @param fm FileMetadata
+ * @return boolean
+ */
+ public boolean canUserSeeExploreWorldMapButton(FileMetadata fm){
+ if (fm==null){
+ return false;
+ }
+
+ if (this.fileMetadataWorldMapExplore.containsKey(fm.getId())){
+ // Yes, return previous answer
+ //logger.info("using cached result for candownloadfile on filemetadata "+fid);
+ return this.fileMetadataWorldMapExplore.get(fm.getId());
+ }
+
+ /* -----------------------------------------------------
+ Does a Map Exist?
+ ----------------------------------------------------- */
+ if (!(this.hasMapLayerMetadata(fm))){
+ // Nope: no button
+ this.fileMetadataWorldMapExplore.put(fm.getId(), false);
+ return false;
+ }
+
+ /*
+ Is setting for GeoconnectViewMaps true?
+ Nope? no button
+ */
+ if (!settingsService.isTrueForKey(SettingsServiceBean.Key.GeoconnectViewMaps, false)){
+ this.fileMetadataWorldMapExplore.put(fm.getId(), false);
+ return false;
+ }
+
+ /* -----------------------------------------------------
+ Yes: User can view button!
+ ----------------------------------------------------- */
+ this.fileMetadataWorldMapExplore.put(fm.getId(), true);
+ return true;
+ }
+
+
+ /*
+ Check if the FileMetadata.dataFile has an associated MapLayerMetadata object
+
+ The MapLayerMetadata objects have been fetched at page inception by "loadMapLayerMetadataLookup()"
+ */
+ public boolean hasMapLayerMetadata(FileMetadata fm) {
+ if (fm == null) {
+ return false;
+ }
+ if (fm.getDataFile() == null) {
+ return false;
+ }
+ return doesDataFileHaveMapLayerMetadata(fm.getDataFile());
+ }
+
+ /**
+ * Check if a DataFile has an associated MapLayerMetadata object
+ *
+ * The MapLayerMetadata objects have been fetched at page inception by
+ * "loadMapLayerMetadataLookup()"
+ */
+ private boolean doesDataFileHaveMapLayerMetadata(DataFile df) {
+ if (df == null) {
+ return false;
+ }
+ if (df.getId() == null) {
+ return false;
+ }
+ return this.mapLayerMetadataLookup.containsKey(df.getId());
+ }
+
+
+
+ /**
+ * Check if this is a mappable file type.
+ *
+ * Currently (2/2016)
+ * - Shapefile (zipped shapefile)
+ * - Tabular file with Geospatial Data tag
+ *
+ * @param fm
+ * @return
+ */
+ private boolean isPotentiallyMappableFileType(FileMetadata fm){
+ if (fm==null){
+ return false;
+ }
+
+ // Yes, it's a shapefile
+ //
+ if (this.isShapefileType(fm)){
+ return true;
+ }
+
+ // Yes, it's tabular with a geospatial tag
+ //
+ if (fm.getDataFile().isTabularData()){
+ if (fm.getDataFile().hasGeospatialTag()){
+ return true;
+ }
+ }
+ return false;
+ }
+
+
+
+ public boolean isShapefileType(FileMetadata fm) {
+ if (fm == null) {
+ return false;
+ }
+ if (fm.getDataFile() == null) {
+ return false;
+ }
+
+ return fm.getDataFile().isShapefileType();
+ }
+
+
+ /**
+ * WARNING: Assumes user isAuthenicated AND has Permission.EditDataset
+ * - These checks are made on the DatasetPage which calls this method
+ *
+ * See table in: https://github.com/IQSS/dataverse/issues/1618
+ *
+ * Can the user see a reminder to publish button?
+ * (0) The application has to be set to Create Edit Maps - true
+ * (1) Logged in user
+ * (2) Is geospatial file?
+ * (3) File has NOT been released
+ * (4) No existing Map
+ * (5) Can Edit Dataset
+ *
+ * @param FileMetadata fm
+ * @return boolean
+ */
+ public boolean canSeeMapButtonReminderToPublish(FileMetadata fm){
+ if (fm==null){
+
+ return false;
+ }
+
+ // (1) Is the view GeoconnectViewMaps
+ if (!settingsService.isTrueForKey(SettingsServiceBean.Key.GeoconnectCreateEditMaps, false)){
+ return false;
+ }
+
+
+ // (2) Is this file a Shapefile or a Tabular file tagged as Geospatial?
+ //
+ if (!(this.isPotentiallyMappableFileType(fm))){
+ return false;
+ }
+
+ // (3) Is this DataFile released? Yes, don't need reminder
+ //
+ if (fm.getDataFile().isReleased()){
+ return false;
+ }
+
+ // (4) Does a map already exist? Yes, don't need reminder
+ //
+ if (this.hasMapLayerMetadata(fm)){
+ return false;
+ }
+
+ // Looks good
+ //
+ return true;
+ }
+
+ /**
+ *
+ * WARNING: Assumes user isAuthenicated AND has Permission.EditDataset
+ * - These checks are made on the DatasetPage which calls this method
+ *
+ * Should there be a Map Data Button for this file?
+ * see table in: https://github.com/IQSS/dataverse/issues/1618
+ * (1) Is the user logged in?
+ * (2) Is this file a Shapefile or a Tabular file tagged as Geospatial?
+ * (3) Does the logged in user have permission to edit the Dataset to which this FileMetadata belongs?
+ * (4) Is the create Edit Maps flag set to true?
+ * (5) Any of these conditions:
+ * 9a) File Published
+ * (b) Draft: File Previously published
+ * @param fm FileMetadata
+ * @return boolean
+ */
+ public boolean canUserSeeMapDataButton(FileMetadata fm){
+
+ if (fm==null){
+ return false;
+ }
+
+ // (1) Is this file a Shapefile or a Tabular file tagged as Geospatial?
+ // TO DO: EXPAND FOR TABULAR FILES TAGGED AS GEOSPATIAL!
+ //
+ if (!(this.isPotentiallyMappableFileType(fm))){
+ return false;
+ }
+
+
+ // (2) Is the view GeoconnectViewMaps
+ if (!settingsService.isTrueForKey(SettingsServiceBean.Key.GeoconnectCreateEditMaps, false)){
+ return false;
+ }
+
+ // (3) Is File released?
+ //
+ if (fm.getDataFile().isReleased()){
+ return true;
+ }
+
+ // Nope
+ return false;
+ }
+
+
+}
From b77ab3725a29516467c5e8277b0bd0a3bef4b570 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 21 Sep 2016 15:34:19 -0400
Subject: [PATCH 03/86] further updates, methods for api calls and file page
---
.../edu/harvard/iq/dataverse/DatasetPage.java | 9 +-
.../WorldMapPermissionHelper.java | 226 ++++++++++++++++--
2 files changed, 214 insertions(+), 21 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
index fbe7628c406..fc1503b0e53 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
@@ -85,7 +85,6 @@
import javax.faces.event.AjaxBehaviorEvent;
-import javax.faces.context.ExternalContext;
import org.apache.commons.lang.StringEscapeUtils;
import org.primefaces.component.tabview.TabView;
@@ -1110,7 +1109,7 @@ public boolean isGeoconnectDebugAvailable(){
*/
private void loadWorldMapPermissionHelper() {
- worldMapPermissionHelper = new WorldMapPermissionHelper(settingsService, mapLayerMetadataService, dataset);
+ worldMapPermissionHelper = WorldMapPermissionHelper.getPermissionHelperForDatasetPage(settingsService, mapLayerMetadataService, dataset);
}
@@ -1135,7 +1134,7 @@ public boolean canUserSeeExploreWorldMapButton(FileMetadata fm){
return false;
}
- return worldMapPermissionHelper.canUserSeeExploreWorldMapButton(fm);
+ return worldMapPermissionHelper.canUserSeeExploreWorldMapButtonFromPage(fm);
} // end: canUserSeeExploreWorldMapButton
@@ -1162,7 +1161,7 @@ public boolean canSeeMapButtonReminderToPublish(FileMetadata fm){
return false;
}
- return worldMapPermissionHelper.canSeeMapButtonReminderToPublish(fm);
+ return worldMapPermissionHelper.canSeeMapButtonReminderToPublishFromPage(fm);
} // end: canSeeMapButtonReminderToPublish
@@ -1188,7 +1187,7 @@ public boolean canUserSeeMapDataButton(FileMetadata fm){
return false;
}
- return worldMapPermissionHelper.canUserSeeMapDataButton(fm);
+ return worldMapPermissionHelper.canUserSeeMapDataButtonFromPage(fm);
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/WorldMapPermissionHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/WorldMapPermissionHelper.java
index 0877cff4f2e..eece1bb48fe 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/WorldMapPermissionHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/WorldMapPermissionHelper.java
@@ -10,20 +10,39 @@
import edu.harvard.iq.dataverse.FileMetadata;
import edu.harvard.iq.dataverse.MapLayerMetadata;
import edu.harvard.iq.dataverse.MapLayerMetadataServiceBean;
+import edu.harvard.iq.dataverse.PermissionServiceBean;
import edu.harvard.iq.dataverse.authorization.Permission;
+import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
- *
+ * This class originally encapsulated display logic for the DatasetPage
+ *
+ * It allows the following checks without redundantly querying the db to
+ * check permissions or if MapLayerMetadata exists
+ *
+ * - canUserSeeMapDataButton (private)
+ * - canUserSeeMapDataButtonFromPage (public)
+ * - canUserSeeMapDataButtonFromAPI (public)
+ *
+ * - canSeeMapButtonReminderToPublish (private)
+ * - canSeeMapButtonReminderToPublishFromPage (public)
+ * - canSeeMapButtonReminderToPublishFromAPI (public)
+ *
+ * - canUserSeeExploreWorldMapButton (private)
+ * - canUserSeeExploreWorldMapButtonFromPage (public)
+ * - canUserSeeExploreWorldMapButtonFromAPI (public)
+ *
* @author rmp553
*/
public class WorldMapPermissionHelper {
private SettingsServiceBean settingsService;
private MapLayerMetadataServiceBean mapLayerMetadataService;
+ private PermissionServiceBean permissionService;
private Dataset dataset;
@@ -32,7 +51,7 @@ public class WorldMapPermissionHelper {
public WorldMapPermissionHelper(SettingsServiceBean settingsService, MapLayerMetadataServiceBean mapLayerMetadataService,
- Dataset dataset){
+ Dataset dataset, PermissionServiceBean permissionService){
if (dataset == null){
throw new NullPointerException("dataset cannot be null");
@@ -47,15 +66,58 @@ public WorldMapPermissionHelper(SettingsServiceBean settingsService, MapLayerMet
if (mapLayerMetadataService == null){
throw new NullPointerException("mapLayerMetadataService cannot be null");
}
+
this.dataset = dataset;
this.settingsService = settingsService;
this.mapLayerMetadataService = mapLayerMetadataService;
+ this.permissionService = permissionService;
loadMapLayerMetadataLookup();
}
+ /**
+ * Convenience method for instantiating from dataset page or File page
+ *
+ * Does NOT use PermissionServiceBean
+ *
+ * @param settingsService
+ * @param mapLayerMetadataService
+ * @param dataset
+ * @return
+ */
+ public static WorldMapPermissionHelper getPermissionHelperForDatasetPage(
+ SettingsServiceBean settingsService, MapLayerMetadataServiceBean mapLayerMetadataService,
+ Dataset dataset){
+
+ return new WorldMapPermissionHelper(settingsService, mapLayerMetadataService, dataset, null);
+ }
+
+ /**
+ * Convenience method for instantiating from the API
+ *
+ * REQUIRES PermissionServiceBean
+ *
+ * @param settingsService
+ * @param mapLayerMetadataService
+ * @param dataset
+ * @param permissionService
+ * @return
+ */
+ public static WorldMapPermissionHelper getPermissionHelperForAPI(
+ SettingsServiceBean settingsService,
+ MapLayerMetadataServiceBean mapLayerMetadataService,
+ Dataset dataset,
+ PermissionServiceBean permissionService){
+
+ if (permissionService == null){
+ throw new NullPointerException("permissionService is required for API checks");
+ }
+
+ return new WorldMapPermissionHelper(settingsService, mapLayerMetadataService, dataset, permissionService);
+ }
+
/**
* Create a hashmap consisting of { DataFile.id : MapLayerMetadata object}
@@ -90,6 +152,45 @@ public MapLayerMetadata getMapLayerMetadata(DataFile df) {
return this.mapLayerMetadataLookup.get(df.getId());
}
+
+ /*
+ * Call this when using the API
+ * - calls private method canUserSeeExploreWorldMapButton
+ */
+ public boolean canUserSeeExploreWorldMapButtonFromAPI(FileMetadata fm, User user){
+
+ if (fm == null){
+ return false;
+ }
+ if (user==null){
+ return false;
+ }
+ if (!this.permissionService.userOn(user, fm.getDataFile()).has(Permission.DownloadFile)){
+ return false;
+ }
+
+ return this.canUserSeeExploreWorldMapButton(fm, true);
+ }
+
+ /**
+ * Call this for a Dataset or File page
+ * - calls private method canUserSeeExploreWorldMapButton
+ *
+ * WARNING: Before calling this, make sure the user has download
+ * permission for the file!! (See DatasetPage.canDownloadFile())
+ *
+ * @param FileMetadata fm
+ * @return boolean
+ */
+ public boolean canUserSeeExploreWorldMapButtonFromPage(FileMetadata fm){
+
+ if (fm==null){
+ return false;
+ }
+
+ return this.canUserSeeExploreWorldMapButton(fm, true);
+ }
+
/**
* WARNING: Before calling this, make sure the user has download
* permission for the file!! (See DatasetPage.canDownloadFile())
@@ -103,11 +204,16 @@ public MapLayerMetadata getMapLayerMetadata(DataFile df) {
* @param fm FileMetadata
* @return boolean
*/
- public boolean canUserSeeExploreWorldMapButton(FileMetadata fm){
+ public boolean canUserSeeExploreWorldMapButton(FileMetadata fm, boolean permissionsChecked){
+
if (fm==null){
return false;
}
+ if (!permissionsChecked){
+ return false;
+ }
+
if (this.fileMetadataWorldMapExplore.containsKey(fm.getId())){
// Yes, return previous answer
//logger.info("using cached result for candownloadfile on filemetadata "+fid);
@@ -219,28 +325,72 @@ public boolean isShapefileType(FileMetadata fm) {
/**
- * WARNING: Assumes user isAuthenicated AND has Permission.EditDataset
- * - These checks are made on the DatasetPage which calls this method
+ * Call this for a Dataset or File page
+ * - calls private method canSeeMapButtonReminderToPublish
*
- * See table in: https://github.com/IQSS/dataverse/issues/1618
+ * WARNING: Assumes user isAuthenicated AND has Permission.EditDataset
+ * - These checks should be made on the DatasetPage or FilePage which calls this method
*
- * Can the user see a reminder to publish button?
- * (0) The application has to be set to Create Edit Maps - true
- * (1) Logged in user
- * (2) Is geospatial file?
- * (3) File has NOT been released
- * (4) No existing Map
- * (5) Can Edit Dataset
*
* @param FileMetadata fm
* @return boolean
*/
- public boolean canSeeMapButtonReminderToPublish(FileMetadata fm){
- if (fm==null){
+ public boolean canSeeMapButtonReminderToPublishFromPage(FileMetadata fm){
+ if (fm == null){
+ return false;
+ }
+
+ return this.canSeeMapButtonReminderToPublish(fm, true);
+
+ }
+
+ /**
+ * Call this when using the API
+ * - calls private method canSeeMapButtonReminderToPublish
+ *
+ * @param fm
+ * @param user
+ * @return
+ */
+ public boolean canSeeMapButtonReminderToPublishFromAPI(FileMetadata fm, User user){
+ if (fm == null){
+ return false;
+ }
+ if (user==null){
+ return false;
+ }
+
+ if (!this.permissionService.userOn(user, this.dataset).has(Permission.EditDataset)){
+ return false;
+ }
+
+ return this.canSeeMapButtonReminderToPublish(fm, true);
+
+ }
+
+
+
+ /**
+ * Assumes permissions have been checked!!
+ *
+ * See table in: https://github.com/IQSS/dataverse/issues/1618
+ *
+ * Can the user see a reminder to publish button?
+ * (1) Is the view GeoconnectViewMaps
+ * (2) Is this file a Shapefile or a Tabular file tagged as Geospatial?
+ * (3) Is this DataFile released? Yes, don't need reminder
+ * (4) Does a map already exist? Yes, don't need reminder
+ */
+ private boolean canSeeMapButtonReminderToPublish(FileMetadata fm, boolean permissionsChecked){
+ if (fm==null){
return false;
}
+ if (!permissionsChecked){
+ return false;
+ }
+
// (1) Is the view GeoconnectViewMaps
if (!settingsService.isTrueForKey(SettingsServiceBean.Key.GeoconnectCreateEditMaps, false)){
return false;
@@ -268,6 +418,46 @@ public boolean canSeeMapButtonReminderToPublish(FileMetadata fm){
// Looks good
//
return true;
+ }
+
+ /**
+ *
+ * WARNING: Assumes user isAuthenicated AND has Permission.EditDataset
+ * - These checks are made on the DatasetPage which calls this method
+ *
+ */
+ public boolean canUserSeeMapDataButtonFromPage(FileMetadata fm){
+
+ if (fm==null){
+ return false;
+ }
+ return this.canUserSeeMapDataButton(fm, true);
+ }
+
+
+
+ /**
+ * Call this when using the API
+ * - calls private method canUserSeeMapDataButton
+ *
+ * @param fm
+ * @param user
+ * @return
+ */
+ public boolean canUserSeeMapDataButtonFromAPI(FileMetadata fm, User user){
+ if (fm == null){
+ return false;
+ }
+ if (user==null){
+ return false;
+ }
+
+ if (!this.permissionService.userOn(user, this.dataset).has(Permission.EditDataset)){
+ return false;
+ }
+
+ return this.canUserSeeMapDataButton(fm, true);
+
}
/**
@@ -287,11 +477,15 @@ public boolean canSeeMapButtonReminderToPublish(FileMetadata fm){
* @param fm FileMetadata
* @return boolean
*/
- public boolean canUserSeeMapDataButton(FileMetadata fm){
+ private boolean canUserSeeMapDataButton(FileMetadata fm, boolean permissionsChecked){
if (fm==null){
return false;
}
+
+ if (!permissionsChecked){
+ return false;
+ }
// (1) Is this file a Shapefile or a Tabular file tagged as Geospatial?
// TO DO: EXPAND FOR TABULAR FILES TAGGED AS GEOSPATIAL!
From 0badbf8aab908b167b661456d6b67dc6c67f1f74 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 21 Sep 2016 16:02:29 -0400
Subject: [PATCH 04/86] For #2290 and #2465. Move twoRavens code from dataset
page to separate class
---
.../edu/harvard/iq/dataverse/DatasetPage.java | 143 ++++--------
.../datasetutility/TwoRavensHelper.java | 203 ++++++++++++++++++
.../WorldMapPermissionHelper.java | 9 +-
3 files changed, 256 insertions(+), 99 deletions(-)
create mode 100644 src/main/java/edu/harvard/iq/dataverse/datasetutility/TwoRavensHelper.java
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
index fc1503b0e53..26370afcb30 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
@@ -9,6 +9,7 @@
import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
import edu.harvard.iq.dataverse.authorization.users.GuestUser;
import edu.harvard.iq.dataverse.datasetutility.DuplicateFileChecker;
+import edu.harvard.iq.dataverse.datasetutility.TwoRavensHelper;
import edu.harvard.iq.dataverse.datasetutility.WorldMapPermissionHelper;
import edu.harvard.iq.dataverse.datavariable.VariableServiceBean;
import edu.harvard.iq.dataverse.engine.command.Command;
@@ -213,11 +214,12 @@ public enum DisplayMode {
// Used to help with displaying buttons related to the WorldMap
private WorldMapPermissionHelper worldMapPermissionHelper;
+ // Used to help with displaying buttons related to TwoRavens
+ private TwoRavensHelper twoRavensHelper;
+
// Used to store results of permissions checks
private final Map datasetPermissionMap = new HashMap<>(); // { Permission human_name : Boolean }
private final Map fileDownloadPermissionMap = new HashMap<>(); // { FileMetadata.id : Boolean }
-
- private final Map fileMetadataTwoRavensExploreMap = new HashMap<>(); // { FileMetadata.id : Boolean }
private DataFile selectedDownloadFile;
@@ -1019,67 +1021,7 @@ private void msg(String s){
}
- /**
- * Used in the .xhtml file to check whether a tabular file
- * may be viewed via TwoRavens
- *
- * @param fm
- * @return
- */
- public boolean canSeeTwoRavensExploreButton(FileMetadata fm){
-
- if (fm == null){
- return false;
- }
-
- // Has this already been checked?
- if (this.fileMetadataTwoRavensExploreMap.containsKey(fm.getId())){
- // Yes, return previous answer
- //logger.info("using cached result for candownloadfile on filemetadata "+fid);
- return this.fileMetadataTwoRavensExploreMap.get(fm.getId());
- }
-
-
- // (1) Is TwoRavens active via the "setting" table?
- // Nope: get out
- //
- if (!settingsService.isTrueForKey(SettingsServiceBean.Key.TwoRavensTabularView, false)){
- this.fileMetadataTwoRavensExploreMap.put(fm.getId(), false);
- return false;
- }
-
- // (2) Does the user have download permission?
- // Nope: get out
- //
- if (!(this.canDownloadFile(fm))){
- this.fileMetadataTwoRavensExploreMap.put(fm.getId(), false);
- return false;
- }
- // (3) Is the DataFile object there and persisted?
- // Nope: scat
- //
- if ((fm.getDataFile() == null)||(fm.getDataFile().getId()==null)){
- this.fileMetadataTwoRavensExploreMap.put(fm.getId(), false);
- return false;
- }
-
- // (4) Is there tabular data or is the ingest in progress?
- // Yes: great
- //
- if ((fm.getDataFile().isTabularData())||(fm.getDataFile().isIngestInProgress())){
- this.fileMetadataTwoRavensExploreMap.put(fm.getId(), true);
- return true;
- }
-
- // Nope
- this.fileMetadataTwoRavensExploreMap.put(fm.getId(), false);
- return false;
-
- // (empty fileMetadata.dataFile.id) and (fileMetadata.dataFile.tabularData or fileMetadata.dataFile.ingestInProgress)
- // and DatasetPage.canDownloadFile(fileMetadata)
- }
-
-
+
/**
* For development
*
@@ -1103,6 +1045,47 @@ public boolean isGeoconnectDebugAvailable(){
}
+ /**
+ * This object wraps methods used for hiding/displaying WorldMap related messages
+ *
+ */
+ private void loadTwoRavensHelper() {
+
+ twoRavensHelper = new TwoRavensHelper(settingsService, permissionService);
+
+ }
+
+ public boolean canSeeTwoRavensExploreButton(FileMetadata fm){
+ if (fm == null){
+ return false;
+ }
+ if (twoRavensHelper == null){
+ return false;
+ }
+
+ return twoRavensHelper.canSeeTwoRavensExploreButtonFromPage(fm);
+ }
+
+
+ public String getDataExploreURL() {
+ if (twoRavensHelper == null){
+ return "";
+ }
+ return twoRavensHelper.getDataExploreURL();
+ }
+
+
+ public String getDataExploreURLComplete(Long fileid) {
+ if (twoRavensHelper == null){
+ return "";
+ }
+ return twoRavensHelper.getDataExploreURLComplete(fileid, getApiTokenKey());
+
+
+ // return TwoRavensDefaultLocal + fileid + "&" + getApiTokenKey();
+ }
+
+
/**
* This object wraps methods used for hiding/displaying WorldMap related messages
*
@@ -1380,6 +1363,7 @@ private String init(boolean initFull) {
// lazyModel = new LazyFileMetadataDataModel(workingVersion.getId(), datafileService );
// populate MapLayerMetadata
this.loadWorldMapPermissionHelper(); // A DataFile may have a related MapLayerMetadata object
+ this.loadTwoRavensHelper();
}
} else if (ownerId != null) {
// create mode for a new child dataset
@@ -3220,40 +3204,7 @@ public Boolean isDatasetPublishPopupCustomTextOnAllVersions(){
return settingsService.isTrueForKey(SettingsServiceBean.Key.DatasetPublishPopupCustomTextOnAllVersions, false);
}
- public String getDataExploreURL() {
- String TwoRavensUrl = settingsService.getValueForKey(SettingsServiceBean.Key.TwoRavensUrl);
- if (TwoRavensUrl != null && !TwoRavensUrl.equals("")) {
- return TwoRavensUrl;
- }
-
- return "";
- }
-
- public String getDataExploreURLComplete(Long fileid) {
- String TwoRavensUrl = settingsService.getValueForKey(SettingsServiceBean.Key.TwoRavensUrl);
- String TwoRavensDefaultLocal = "/dataexplore/gui.html?dfId=";
-
- if (TwoRavensUrl != null && !TwoRavensUrl.equals("")) {
- // If we have TwoRavensUrl set up as, as an optional
- // configuration service, it must mean that TwoRavens is sitting
- // on some remote server. And that in turn means that we must use
- // full URLs to pass data and metadata to it.
- // update: actually, no we don't want to use this "dataurl" notation.
- // switching back to the dfId=:
- // -- L.A. 4.1
- /*
- String tabularDataURL = getTabularDataFileURL(fileid);
- String tabularMetaURL = getVariableMetadataURL(fileid);
- return TwoRavensUrl + "?ddiurl=" + tabularMetaURL + "&dataurl=" + tabularDataURL + "&" + getApiTokenKey();
- */
- return TwoRavensUrl + "?dfId=" + fileid + "&" + getApiTokenKey();
- }
-
- // For a local TwoRavens setup it's enough to call it with just
- // the file id:
- return TwoRavensDefaultLocal + fileid + "&" + getApiTokenKey();
- }
public String getVariableMetadataURL(Long fileid) {
String myHostURL = getDataverseSiteUrl();
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/TwoRavensHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/TwoRavensHelper.java
new file mode 100644
index 00000000000..fa16e83be12
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/TwoRavensHelper.java
@@ -0,0 +1,203 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package edu.harvard.iq.dataverse.datasetutility;
+
+import edu.harvard.iq.dataverse.FileMetadata;
+import edu.harvard.iq.dataverse.PermissionServiceBean;
+import edu.harvard.iq.dataverse.authorization.Permission;
+import edu.harvard.iq.dataverse.authorization.users.User;
+import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ *
+ * @author rmp553
+ */
+public class TwoRavensHelper {
+
+ private final SettingsServiceBean settingsService;
+ private PermissionServiceBean permissionService;
+
+ private final Map fileMetadataTwoRavensExploreMap = new HashMap<>(); // { FileMetadata.id : Boolean }
+
+ public TwoRavensHelper(SettingsServiceBean settingsService, PermissionServiceBean permissionService){
+ if (settingsService == null){
+ throw new NullPointerException("settingsService cannot be null");
+ }
+ if (permissionService == null){
+ throw new NullPointerException("permissionService cannot be null");
+ }
+ this.permissionService = permissionService;
+ this.settingsService = settingsService;
+
+
+ }
+
+
+ /**
+ * Call this from a Dataset or File page
+ * - calls private method canSeeTwoRavensExploreButton
+ *
+ * WARNING: Before calling this, make sure the user has download
+ * permission for the file!! (See DatasetPage.canDownloadFile())
+ *
+ * @param fm
+ * @return
+ */
+ public boolean canSeeTwoRavensExploreButtonFromAPI(FileMetadata fm, User user){
+
+ if (fm == null){
+ return false;
+ }
+
+ if (user == null){
+ return false;
+ }
+
+ if (!this.permissionService.userOn(user, fm.getDataFile()).has(Permission.DownloadFile)){
+ return false;
+ }
+
+ return this.canSeeTwoRavensExploreButton(fm, true);
+ }
+
+ /**
+ * Call this from a Dataset or File page
+ * - calls private method canSeeTwoRavensExploreButton
+ *
+ * WARNING: Before calling this, make sure the user has download
+ * permission for the file!! (See DatasetPage.canDownloadFile())
+ *
+ * @param fm
+ * @return
+ */
+ public boolean canSeeTwoRavensExploreButtonFromPage(FileMetadata fm){
+
+ if (fm == null){
+ return false;
+ }
+
+ return this.canSeeTwoRavensExploreButton(fm, true);
+ }
+
+ /**
+ * Used to check whether a tabular file
+ * may be viewed via TwoRavens
+ *
+ * @param fm
+ * @return
+ */
+ public boolean canSeeTwoRavensExploreButton(FileMetadata fm, boolean permissionsChecked){
+
+ if (fm == null){
+ return false;
+ }
+
+ // This is only here as a reminder to the public method users
+ if (!permissionsChecked){
+ return false;
+ }
+
+ // Has this already been checked?
+ if (this.fileMetadataTwoRavensExploreMap.containsKey(fm.getId())){
+ // Yes, return previous answer
+ //logger.info("using cached result for candownloadfile on filemetadata "+fid);
+ return this.fileMetadataTwoRavensExploreMap.get(fm.getId());
+ }
+
+
+ // (1) Is TwoRavens active via the "setting" table?
+ // Nope: get out
+ //
+ if (!settingsService.isTrueForKey(SettingsServiceBean.Key.TwoRavensTabularView, false)){
+ this.fileMetadataTwoRavensExploreMap.put(fm.getId(), false);
+ return false;
+ }
+
+
+ // (2) Is the DataFile object there and persisted?
+ // Nope: scat
+ //
+ if ((fm.getDataFile() == null)||(fm.getDataFile().getId()==null)){
+ this.fileMetadataTwoRavensExploreMap.put(fm.getId(), false);
+ return false;
+ }
+
+ // (3) Is there tabular data or is the ingest in progress?
+ // Yes: great
+ //
+ if ((fm.getDataFile().isTabularData())||(fm.getDataFile().isIngestInProgress())){
+ this.fileMetadataTwoRavensExploreMap.put(fm.getId(), true);
+ return true;
+ }
+
+ // Nope
+ this.fileMetadataTwoRavensExploreMap.put(fm.getId(), false);
+ return false;
+
+ // (empty fileMetadata.dataFile.id) and (fileMetadata.dataFile.tabularData or fileMetadata.dataFile.ingestInProgress)
+ // and DatasetPage.canDownloadFile(fileMetadata)
+ }
+
+
+ /**
+ * Copied over from the dataset page - 9/21/2016
+ *
+ * @return
+ */
+ public String getDataExploreURL() {
+ String TwoRavensUrl = settingsService.getValueForKey(SettingsServiceBean.Key.TwoRavensUrl);
+
+ if (TwoRavensUrl != null && !TwoRavensUrl.equals("")) {
+ return TwoRavensUrl;
+ }
+
+ return "";
+ }
+
+
+ /**
+ * Copied over from the dataset page - 9/21/2016
+ *
+ * @param fileid
+ * @param apiTokenKey
+ * @return
+ */
+ public String getDataExploreURLComplete(Long fileid, String apiTokenKey) {
+
+ if (fileid == null){
+ throw new NullPointerException("fileid cannot be null");
+ }
+ if (apiTokenKey == null){
+ throw new NullPointerException("apiTokenKey cannot be null (at least adding this check)");
+ }
+
+
+ String TwoRavensUrl = settingsService.getValueForKey(SettingsServiceBean.Key.TwoRavensUrl);
+ String TwoRavensDefaultLocal = "/dataexplore/gui.html?dfId=";
+
+ if (TwoRavensUrl != null && !TwoRavensUrl.equals("")) {
+ // If we have TwoRavensUrl set up as, as an optional
+ // configuration service, it must mean that TwoRavens is sitting
+ // on some remote server. And that in turn means that we must use
+ // full URLs to pass data and metadata to it.
+ // update: actually, no we don't want to use this "dataurl" notation.
+ // switching back to the dfId=:
+ // -- L.A. 4.1
+ /*
+ String tabularDataURL = getTabularDataFileURL(fileid);
+ String tabularMetaURL = getVariableMetadataURL(fileid);
+ return TwoRavensUrl + "?ddiurl=" + tabularMetaURL + "&dataurl=" + tabularDataURL + "&" + getApiTokenKey();
+ */
+ return TwoRavensUrl + "?dfId=" + fileid + "&" + apiTokenKey;
+ }
+
+ // For a local TwoRavens setup it's enough to call it with just
+ // the file id:
+ return TwoRavensDefaultLocal + fileid + "&" + apiTokenKey;
+ }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/WorldMapPermissionHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/WorldMapPermissionHelper.java
index eece1bb48fe..69205f29452 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/WorldMapPermissionHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/WorldMapPermissionHelper.java
@@ -173,7 +173,7 @@ public boolean canUserSeeExploreWorldMapButtonFromAPI(FileMetadata fm, User user
}
/**
- * Call this for a Dataset or File page
+ * Call this from a Dataset or File page
* - calls private method canUserSeeExploreWorldMapButton
*
* WARNING: Before calling this, make sure the user has download
@@ -204,12 +204,13 @@ public boolean canUserSeeExploreWorldMapButtonFromPage(FileMetadata fm){
* @param fm FileMetadata
* @return boolean
*/
- public boolean canUserSeeExploreWorldMapButton(FileMetadata fm, boolean permissionsChecked){
+ private boolean canUserSeeExploreWorldMapButton(FileMetadata fm, boolean permissionsChecked){
if (fm==null){
return false;
}
+ // This is only here to make the public method users think...
if (!permissionsChecked){
return false;
}
@@ -325,7 +326,7 @@ public boolean isShapefileType(FileMetadata fm) {
/**
- * Call this for a Dataset or File page
+ * Call this from a Dataset or File page
* - calls private method canSeeMapButtonReminderToPublish
*
* WARNING: Assumes user isAuthenicated AND has Permission.EditDataset
@@ -387,6 +388,7 @@ private boolean canSeeMapButtonReminderToPublish(FileMetadata fm, boolean permis
return false;
}
+ // This is only here as a reminder to the public method users
if (!permissionsChecked){
return false;
}
@@ -483,6 +485,7 @@ private boolean canUserSeeMapDataButton(FileMetadata fm, boolean permissionsChec
return false;
}
+ // This is only here as a reminder to the public method users
if (!permissionsChecked){
return false;
}
From 7810af4f3817b8cf48bb58b0f5007c04d94e11d0 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Thu, 22 Sep 2016 14:06:10 -0400
Subject: [PATCH 05/86] adding typo change before mucking up repo too much...
---
.../java/edu/harvard/iq/dataverse/DatasetPage.java | 2 +-
.../edu/harvard/iq/dataverse/EditDatafilesPage.java | 2 +-
.../java/edu/harvard/iq/dataverse/api/Admin.java | 13 +++++++++++--
3 files changed, 13 insertions(+), 4 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
index 26370afcb30..83303f3255f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
@@ -2654,7 +2654,7 @@ public String cancel() {
public boolean isDuplicate(FileMetadata fileMetadata) {
- return DuplicateFileChecker.IsDuplicateOriginalWay(workingVersion, fileMetadata);
+ return DuplicateFileChecker.isDuplicateOriginalWay(workingVersion, fileMetadata);
}
private HttpClient getClient() {
diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
index 97fb4f20bb8..9e41cddd102 100644
--- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
@@ -1038,7 +1038,7 @@ public String cancel() {
*/
public boolean isDuplicate(FileMetadata fileMetadata) {
- return DuplicateFileChecker.IsDuplicateOriginalWay(workingVersion, fileMetadata);
+ return DuplicateFileChecker.isDuplicateOriginalWay(workingVersion, fileMetadata);
}
private HttpClient getClient() {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
index 0026ec85a6b..921a72277b7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
@@ -1,5 +1,6 @@
package edu.harvard.iq.dataverse.api;
+
import edu.harvard.iq.dataverse.Dataverse;
import edu.harvard.iq.dataverse.EMailValidator;
import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
@@ -30,7 +31,6 @@
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.core.Response;
-
import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*;
import java.io.StringReader;
@@ -543,5 +543,14 @@ public Response convertUserFromBcryptToSha1(String json) {
BuiltinUser savedUser = builtinUserService.save(builtinUser);
return okResponse("foo: " + savedUser);
}
-
+
+
+ @Path("mymy")
+ @GET
+ public Response testIt_001() {
+
+ return okResponse("hullo, system!");
+ }
+
+
}
From 53616da18ade89048b89950f67d98f15e8d74d6c Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Thu, 22 Sep 2016 14:56:27 -0400
Subject: [PATCH 06/86] should have been in last commit with typo fix
---
.../iq/dataverse/datasetutility/DuplicateFileChecker.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileChecker.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileChecker.java
index 7be3ae428fc..d2e38d50582 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileChecker.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileChecker.java
@@ -122,7 +122,7 @@ public Map getDatasetHashesFromDatabase(DatasetVersion datasetV
* @param fileMetadata
* @return
*/
- public static boolean IsDuplicateOriginalWay(DatasetVersion workingVersion, FileMetadata fileMetadata) {
+ public static boolean isDuplicateOriginalWay(DatasetVersion workingVersion, FileMetadata fileMetadata) {
if (workingVersion == null){
throw new NullPointerException("datasetVersion cannot be null");
}
From 26afcdf3d2fccb321bf8dba293ebc8f9716d040b Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Thu, 22 Sep 2016 16:56:22 -0400
Subject: [PATCH 07/86] Work in progress. Lots of debug stmts
---
.../iq/dataverse/EditDatafilesPage.java | 31 +--
.../harvard/iq/dataverse/api/FileUpload.java | 249 ++++++++++++++++++
.../datasetutility/AddReplaceFileHelper.java | 101 +++++++
3 files changed, 351 insertions(+), 30 deletions(-)
create mode 100644 src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
create mode 100644 src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
index 9e41cddd102..f45032282bc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
@@ -6,35 +6,16 @@
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
import edu.harvard.iq.dataverse.authorization.Permission;
-import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean;
-import edu.harvard.iq.dataverse.authorization.users.ApiToken;
-import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.datasetutility.DuplicateFileChecker;
-import edu.harvard.iq.dataverse.datavariable.VariableServiceBean;
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
-import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand;
-import edu.harvard.iq.dataverse.engine.command.impl.CreateGuestbookResponseCommand;
-import edu.harvard.iq.dataverse.engine.command.impl.DeaccessionDatasetVersionCommand;
import edu.harvard.iq.dataverse.engine.command.impl.DeleteDataFileCommand;
-import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetVersionCommand;
-import edu.harvard.iq.dataverse.engine.command.impl.DestroyDatasetCommand;
-import edu.harvard.iq.dataverse.engine.command.impl.LinkDatasetCommand;
-import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetCommand;
-import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand;
import edu.harvard.iq.dataverse.ingest.IngestRequest;
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
-import edu.harvard.iq.dataverse.metadataimport.ForeignMetadataImportServiceBean;
-import edu.harvard.iq.dataverse.search.FacetCategory;
import edu.harvard.iq.dataverse.search.FileView;
-import edu.harvard.iq.dataverse.search.SearchFilesServiceBean;
-import edu.harvard.iq.dataverse.search.SolrSearchResult;
-import edu.harvard.iq.dataverse.search.SortBy;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import edu.harvard.iq.dataverse.util.BundleUtil;
-import edu.harvard.iq.dataverse.util.FileSortFieldAndOrder;
import edu.harvard.iq.dataverse.util.JsfHelper;
import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
import edu.harvard.iq.dataverse.util.StringUtil;
@@ -45,24 +26,20 @@
import java.io.InputStream;
import java.io.StringReader;
import java.nio.file.Files;
-import java.nio.file.Path;
import java.nio.file.Paths;
import java.sql.Timestamp;
-import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
-import java.util.Set;
import java.util.logging.Logger;
import javax.ejb.EJB;
import javax.ejb.EJBException;
import javax.faces.application.FacesMessage;
import javax.faces.context.FacesContext;
import javax.faces.event.ActionEvent;
-import javax.faces.event.ValueChangeEvent;
import javax.faces.view.ViewScoped;
import javax.inject.Inject;
import javax.inject.Named;
@@ -72,16 +49,10 @@
import javax.json.JsonObject;
import javax.json.JsonArray;
import javax.json.JsonReader;
-import javax.servlet.ServletOutputStream;
-import javax.servlet.http.HttpServletResponse;
-import javax.validation.ConstraintViolation;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.methods.GetMethod;
-import org.primefaces.context.RequestContext;
import java.text.DateFormat;
import java.util.Arrays;
-import java.util.HashSet;
-import javax.faces.model.SelectItem;
import java.util.logging.Level;
import javax.faces.event.AjaxBehaviorEvent;
@@ -1201,9 +1172,9 @@ public void handleDropBoxUpload(ActionEvent event) {
public void handleFileUpload(FileUploadEvent event) {
UploadedFile uFile = event.getFile();
+
List dFileList = null;
-
try {
// Note: A single file may be unzipped into multiple files
dFileList = ingestService.createDataFiles(workingVersion, uFile.getInputstream(), uFile.getFileName(), uFile.getContentType());
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
new file mode 100644
index 00000000000..712b7c52ecc
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
@@ -0,0 +1,249 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package edu.harvard.iq.dataverse.api;
+
+//import com.sun.jersey.core.header.FormDataContentDisposition;
+//import com.sun.jersey.multipart.FormDataParam;
+import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.Dataset;
+import edu.harvard.iq.dataverse.DatasetServiceBean;
+import edu.harvard.iq.dataverse.DatasetVersion;
+import edu.harvard.iq.dataverse.DatasetVersionServiceBean;
+import edu.harvard.iq.dataverse.DataverseServiceBean;
+import edu.harvard.iq.dataverse.FileMetadata;
+import edu.harvard.iq.dataverse.Template;
+import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import javax.ejb.EJB;
+import javax.ejb.Stateless;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+/**
+ *
+ * @author rmp553
+ */
+@Stateless
+@Path("upload")
+public class FileUpload extends AbstractApiBean {
+
+ @EJB
+ DatasetServiceBean datasetService;
+ @EJB
+ DatasetVersionServiceBean datasetVersionService;
+ @EJB
+ DataverseServiceBean dataverseService;
+ @EJB
+ IngestServiceBean ingestService;
+
+ private static final Logger logger = Logger.getLogger(FileUpload.class.getName());
+
+ // for testing
+ private static final String SERVER_UPLOAD_LOCATION_FOLDER = "/Users/rmp553/Documents/iqss-git/dataverse-helper-scripts/src/api_scripts/output/";
+
+ /*
+ @POST
+ @Path("hello") //Your Path or URL to call this service
+ @Consumes(MediaType.MULTIPART_FORM_DATA)
+ public Response uploadFile(
+ @DefaultValue("true") @FormDataParam("enabled") boolean enabled,
+ @FormDataParam("file") InputStream uploadedInputStream,
+ @FormDataParam("file") FormDataContentDisposition fileDetail) {
+ //Your local disk path where you want to store the file
+ String uploadedFileLocation = SERVER_UPLOAD_LOCATION_FOLDER + fileDetail.getFileName();
+ System.out.println(uploadedFileLocation);
+ // save it
+ File objFile=new File(uploadedFileLocation);
+ if(objFile.exists())
+ {
+ objFile.delete();
+
+ }
+
+ saveToFile(uploadedInputStream, uploadedFileLocation);
+
+ String userMsg = "File uploaded via Jersey based RESTFul Webservice to: " + uploadedFileLocation;
+
+ return okResponse(userMsg);
+ }
+
+ private void saveToFile(InputStream uploadedInputStream,
+ String uploadedFileLocation) {
+
+ try {
+ OutputStream out = null;
+ int read = 0;
+ byte[] bytes = new byte[1024];
+
+ out = new FileOutputStream(new File(uploadedFileLocation));
+ while ((read = uploadedInputStream.read(bytes)) != -1) {
+ out.write(bytes, 0, read);
+ }
+ out.flush();
+ out.close();
+ } catch (IOException e) {
+
+ e.printStackTrace();
+ }
+ }
+ */
+ /*
+ @POST
+ @Path("hello")
+ @Consumes(MediaType.MULTIPART_FORM_DATA)
+ public Response uploadFile(
+ @FormDataParam("file") InputStream fileInputStream,
+ @FormDataParam("file") FormDataContentDisposition contentDispositionHeader) {
+
+ String filePath = SERVER_UPLOAD_LOCATION_FOLDER + contentDispositionHeader.getFileName();
+
+ // save the file to the server
+ saveFile(fileInputStream, filePath);
+
+ String output = "File saved to server location : " + filePath;
+
+ return okResponse(output);
+ //return Response.status(200).entity(output).build();
+
+ }
+
+ // save uploaded file to a defined location on the server
+ private void saveFile(InputStream uploadedInputStream,
+ String serverLocation) {
+
+ try {
+ OutputStream outpuStream = new FileOutputStream(new File(serverLocation));
+ int read = 0;
+ byte[] bytes = new byte[1024];
+
+ outpuStream = new FileOutputStream(new File(serverLocation));
+ while ((read = uploadedInputStream.read(bytes)) != -1) {
+ outpuStream.write(bytes, 0, read);
+ }
+ outpuStream.flush();
+ outpuStream.close();
+ } catch (IOException e) {
+
+ e.printStackTrace();
+ }
+
+ }
+ */
+
+ private InputStream getSampleFile(){
+
+ InputStream is = null;
+ String testFileName = "/Users/rmp553/Documents/iqss-git/dataverse-helper-scripts/src/api_scripts/input/howdy.txt";
+ //testFileName = "/Users/rmp553/NetBeansProjects/dataverse/src/main/java/edu/harvard/iq/dataverse/datasetutility/howdy.txt";
+ try {
+ is = new FileInputStream(testFileName);
+ //is.close();
+ } catch (FileNotFoundException e) {
+ e.printStackTrace();
+ return null;
+ } catch (IOException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ return null;
+ }
+
+ return is;
+
+ }
+
+ private void msg(String m){
+ System.out.println(m);
+ }
+ private void dashes(){
+ msg("----------------");
+ }
+
+ @GET
+ @Path("hi")
+ public Response hi(){
+
+ InputStream testFile = getSampleFile();
+ if (testFile == null){
+ return okResponse("Couldn't find the file!!");
+ }
+ DatasetVersion workingVersion = datasetVersionService.find(new Long(3));
+ Dataset dataset = workingVersion.getDataset(); //datasetService.find(new Long(26));
+
+ int cnt = 0;
+ for (FileMetadata fm : workingVersion.getFileMetadatas()){
+ cnt++;
+ msg("File " + cnt + ": " + fm.getLabel());
+ }
+ dashes();
+ //DatasetVersion workingVersion = null;
+
+ /*
+ ------------------------------------------
+ ------------------------------------------
+ Set up the workingVersion for editing
+ - copied from DatasetPage*
+ * undisputed king of tech debt...
+ ------------------------------------------
+ */
+ List dataverseTemplates = new ArrayList();
+ Long ownerId = dataset.getOwner().getId();
+ Template defaultTemplate = null;
+ Template selectedTemplate = null;
+ dataverseTemplates = dataverseService.find(ownerId).getTemplates();
+
+ if (!dataverseService.find(ownerId).isTemplateRoot()) {
+ dataverseTemplates.addAll(dataverseService.find(ownerId).getParentTemplates());
+ }
+
+ defaultTemplate = dataverseService.find(ownerId).getDefaultTemplate();
+ if (defaultTemplate != null) {
+ selectedTemplate = defaultTemplate;
+ for (Template testT : dataverseTemplates) {
+ if (defaultTemplate.getId().equals(testT.getId())) {
+ selectedTemplate = testT;
+ }
+ }
+ workingVersion = dataset.getEditVersion(selectedTemplate);
+ }
+ // -------------------------------------
+
+ List dFileList = null;
+ msg("state of the workingVersion: " + workingVersion.getVersionState());
+ try {
+ msg("The starting bell rings....");
+ dFileList = ingestService.createDataFiles(workingVersion,
+ testFile,
+ "hullo.txt",
+ "text/plain");
+ msg("Almost there....");
+ } catch (IOException ex) {
+ msg("Not happy...:" + ex.toString());
+ logger.severe(ex.toString());
+ return okResponse("IOException when trying to ingest: " + testFile.toString());
+ }
+ msg("But ok, we can continue now...");
+ // testDataset =
+
+ return okResponse("hi");
+
+ }
+
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
new file mode 100644
index 00000000000..a3d61f3feb0
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -0,0 +1,101 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package edu.harvard.iq.dataverse.datasetutility;
+
+import edu.harvard.iq.dataverse.Dataset;
+
+/**
+ * Methods to add or replace a single file.
+ *
+ * @author rmp553
+ */
+public class AddReplaceFileHelper {
+
+
+ private Dataset dataset;
+
+ public AddReplaceFileHelper(Dataset dataset){
+
+ if (dataset == null){
+ throw new NullPointerException("dataset cannot be null");
+ }
+
+ this.dataset = dataset;
+ }
+ /*
+ DatasetPage sequence:
+
+ (A) editFilesFragment.xhtml -> EditDataFilesPage.handleFileUpload
+ (B) EditDataFilesPage.java -> handleFileUpload
+ (1) UploadedFile uf event.getFile() // UploadedFile
+ --------
+ UploadedFile interface:
+ public String getFileName()
+ public InputStream getInputstream() throws IOException;
+ public long getSize();
+ public byte[] getContents();
+ public String getContentType();
+ public void write(String string) throws Exception;
+ --------
+ (2) List dFileList = null;
+ try {
+ // Note: A single file may be unzipped into multiple files
+ dFileList = ingestService.createDataFiles(workingVersion, uFile.getInputstream(), uFile.getFileName(), uFile.getContentType());
+ }
+
+ (3) processUploadedFileList(dFileList);
+
+ (C) EditDataFilesPage.java -> processUploadedFileList
+ - iterate through list of DataFile objects -- which COULD happen with a single .zip
+ - isDuplicate check
+ - if good:
+ - newFiles.add(dataFile); // looks good
+ - fileMetadatas.add(dataFile.getFileMetadata());
+ - return null; // looks good, return null
+ (D) save() // in the UI, user clicks the button. API is automatic if no errors
+
+ (1) Look for constraintViolations:
+ // DatasetVersion workingVersion;
+ Set constraintViolations = workingVersion.validate();
+ if (!constraintViolations.isEmpty()) {
+ //JsfHelper.addFlashMessage(JH.localize("dataset.message.validationError"));
+ JH.addMessage(FacesMessage.SEVERITY_ERROR, JH.localize("dataset.message.validationError"));
+ //FacesContext.getCurrentInstance().addMessage(null, new FacesMessage(FacesMessage.SEVERITY_ERROR, "Validation Error", "See below for details."));
+ return "";
+ }
+
+ (2) Use the ingestService for a final check
+ // ask Leonid if this is needed for API
+ // One last check before we save the files - go through the newly-uploaded
+ // ones and modify their names so that there are no duplicates.
+ // (but should we really be doing it here? - maybe a better approach to do it
+ // in the ingest service bean, when the files get uploaded.)
+ // Finally, save the files permanently:
+ ingestService.addFiles(workingVersion, newFiles);
+
+ (3) Use the API to save the dataset
+ - make new CreateDatasetCommand
+ - check if dataset has a template
+ - creates UserNotification message
+
+ */
+ // Checks:
+ // - Does the md5 already exist in the dataset?
+ // - If it's a replace, has the name and/or extension changed?
+ // On failure, send back warning
+ //
+ // - All looks good
+ // - Create a DataFile
+ // - Create a FileMetadata
+ // - Copy the Dataset version, making a new DRAFT
+ // - If it's replace, don't copy the file being replaced
+ // - Add this new file.
+ // ....
+
+
+
+
+}
From eb7e5e57391fc13bc0950152580d9dfa7443aac1 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Thu, 22 Sep 2016 17:56:05 -0400
Subject: [PATCH 08/86] almost to getting file to save. crashing on command
submit. Need to check that the state of the Dataset is correct, etc
---
.../harvard/iq/dataverse/api/FileUpload.java | 148 +++++++++++++-----
1 file changed, 105 insertions(+), 43 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
index 712b7c52ecc..f1bfb03415b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
@@ -12,29 +12,34 @@
import edu.harvard.iq.dataverse.DatasetServiceBean;
import edu.harvard.iq.dataverse.DatasetVersion;
import edu.harvard.iq.dataverse.DatasetVersionServiceBean;
+import edu.harvard.iq.dataverse.DataverseRequestServiceBean;
import edu.harvard.iq.dataverse.DataverseServiceBean;
+import edu.harvard.iq.dataverse.EjbDataverseEngine;
import edu.harvard.iq.dataverse.FileMetadata;
-import edu.harvard.iq.dataverse.Template;
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.datasetutility.DuplicateFileChecker;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand;
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
-import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
-import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
+import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.EJB;
+import javax.ejb.EJBException;
import javax.ejb.Stateless;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.DefaultValue;
+import javax.faces.application.FacesMessage;
+import javax.faces.context.FacesContext;
+import javax.inject.Inject;
+import javax.validation.ConstraintViolation;
import javax.ws.rs.GET;
-import javax.ws.rs.POST;
import javax.ws.rs.Path;
-import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
/**
@@ -53,7 +58,12 @@ public class FileUpload extends AbstractApiBean {
DataverseServiceBean dataverseService;
@EJB
IngestServiceBean ingestService;
-
+ @Inject
+ DataverseRequestServiceBean dvRequestService;
+ @EJB
+ EjbDataverseEngine commandEngine;
+
+
private static final Logger logger = Logger.getLogger(FileUpload.class.getName());
// for testing
@@ -175,56 +185,42 @@ private void msg(String m){
private void dashes(){
msg("----------------");
}
+ private void msgt(String m){
+ dashes(); msg(m); dashes();
+ }
@GET
@Path("hi")
public Response hi(){
+ // -------------------------------------
+ msgt("(1) getSampleFile() + workingVersion");
+ // -------------------------------------
+
InputStream testFile = getSampleFile();
if (testFile == null){
return okResponse("Couldn't find the file!!");
}
DatasetVersion workingVersion = datasetVersionService.find(new Long(3));
- Dataset dataset = workingVersion.getDataset(); //datasetService.find(new Long(26));
-
+
+ if (workingVersion.getVersionState()!=DatasetVersion.VersionState.DRAFT){
+ return okResponse("For testing, making the sure the state is DRAFT. This workingVersion is: " + workingVersion.getVersionState());
+ }
+
+ // List the current files
+ //
int cnt = 0;
for (FileMetadata fm : workingVersion.getFileMetadatas()){
cnt++;
msg("File " + cnt + ": " + fm.getLabel());
}
dashes();
- //DatasetVersion workingVersion = null;
-
- /*
- ------------------------------------------
- ------------------------------------------
- Set up the workingVersion for editing
- - copied from DatasetPage*
- * undisputed king of tech debt...
- ------------------------------------------
- */
- List dataverseTemplates = new ArrayList();
- Long ownerId = dataset.getOwner().getId();
- Template defaultTemplate = null;
- Template selectedTemplate = null;
- dataverseTemplates = dataverseService.find(ownerId).getTemplates();
-
- if (!dataverseService.find(ownerId).isTemplateRoot()) {
- dataverseTemplates.addAll(dataverseService.find(ownerId).getParentTemplates());
- }
- defaultTemplate = dataverseService.find(ownerId).getDefaultTemplate();
- if (defaultTemplate != null) {
- selectedTemplate = defaultTemplate;
- for (Template testT : dataverseTemplates) {
- if (defaultTemplate.getId().equals(testT.getId())) {
- selectedTemplate = testT;
- }
- }
- workingVersion = dataset.getEditVersion(selectedTemplate);
- }
+
// -------------------------------------
-
+ msgt("(2) ingestService.createDataFiles");
+ // -------------------------------------
+
List dFileList = null;
msg("state of the workingVersion: " + workingVersion.getVersionState());
try {
@@ -240,8 +236,74 @@ public Response hi(){
return okResponse("IOException when trying to ingest: " + testFile.toString());
}
msg("But ok, we can continue now...");
- // testDataset =
-
+
+ // -------------------------------------
+ msgt("3 Duplicate check");
+ // -------------------------------------
+ List newFiles = new ArrayList();
+
+ DuplicateFileChecker dfc = new DuplicateFileChecker(datasetVersionService);
+ for (DataFile df : dFileList){
+ //if (dfc.isFileInSavedDatasetVersion(workingVersion, df.getmd5())){
+ // return okResponse("This file has a dupe md5! " + df.getFileMetadata().getLabel());
+ if (DuplicateFileChecker.isDuplicateOriginalWay(workingVersion, df.getFileMetadata())){
+ return okResponse("This file has a dupe md5! " + df.getFileMetadata().getLabel());
+ }else{
+ newFiles.add(df);
+ }
+ }
+
+ // -------------------------------------
+ msgt("4 Check constraints");
+ // -------------------------------------
+ Set constraintViolations = workingVersion.validate();
+ List errMsgs = new ArrayList<>();
+ for (ConstraintViolation violation : constraintViolations){
+ msg("Violation found! :" + violation.getMessage());
+ errMsgs.add(violation.getMessage());
+ }
+ if (errMsgs.size() > 0){
+ return okResponse("Constraint violations found! " + String.join("
\n", errMsgs));
+ }
+
+ // -------------------------------------
+ msgt("5 Add the files!");
+ // -------------------------------------
+ ingestService.addFiles(workingVersion, newFiles);
+
+
+ // -------------------------------------
+ msgt("6 Make the command!");
+ // -------------------------------------
+ /*
+
+ execCommand(new SetDatasetCitationDateCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(id), dsfType));
+
+ */
+ AuthenticatedUser authUser = authSvc.findByID(new Long(1));
+ msg("authUser: " + authUser);
+ DataverseRequest dvRequest = createDataverseRequest(authUser);
+ msg("dvRequest: " + dvRequest);
+
+ if (dvRequest == null){
+ return okResponse("Failed, dvRequest is null");
+ }
+ CreateDatasetCommand cmd = new CreateDatasetCommand(workingVersion.getDataset(),
+ dvRequest);
+
+ // -------------------------------------
+ msgt("7 Run the command!");
+ // -------------------------------------
+ try {
+ Dataset newDataset = commandEngine.submit(cmd);
+ } catch (CommandException ex) {
+ //ex.getMessage()
+ msgt("Bombed: " + ex.getMessage());
+ //Logger.getLogger(FileUpload.class.getName()).log(Level.SEVERE, null, ex);
+ }catch (EJBException ex) {
+ msgt("Bombed2: " + ex.getMessage());
+ }
+
return okResponse("hi");
}
From eea46552f88003ca1677904607cc27b2030376ac Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Fri, 23 Sep 2016 15:25:08 -0400
Subject: [PATCH 09/86] WARNING - NEEDS TO BE ADDED BACK. trying to avoid
local error
---
.../java/edu/harvard/iq/dataverse/PermissionServiceBean.java | 3 +++
1 file changed, 3 insertions(+)
diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java
index 82019c3db85..c677be53d24 100644
--- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java
@@ -209,11 +209,14 @@ public Set permissionsFor( DataverseRequest req, DvObject dvo ) {
// Add permissions specifically given to the user
permissions.addAll( permissionsForSingleRoleAssignee(req.getUser(),dvo) );
+
+ /*
Set groups = groupService.groupsFor(req,dvo);
// Add permissions gained from groups
for ( Group g : groups ) {
permissions.addAll( permissionsForSingleRoleAssignee(g,dvo) );
}
+ */
if ( ! req.getUser().isAuthenticated() ) {
permissions.removeAll( PERMISSIONS_FOR_AUTHENTICATED_USERS_ONLY );
From 8a02e79514557a01dc48e38df8a0bb0708d032cc Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Fri, 23 Sep 2016 15:27:31 -0400
Subject: [PATCH 10/86] proof of concept for api upload
---
.../harvard/iq/dataverse/api/FileUpload.java | 182 ++++++++++++++++--
1 file changed, 167 insertions(+), 15 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
index f1bfb03415b..ab9848691ef 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
@@ -16,17 +16,24 @@
import edu.harvard.iq.dataverse.DataverseServiceBean;
import edu.harvard.iq.dataverse.EjbDataverseEngine;
import edu.harvard.iq.dataverse.FileMetadata;
+import edu.harvard.iq.dataverse.Template;
+import edu.harvard.iq.dataverse.UserNotification;
+import edu.harvard.iq.dataverse.UserNotificationServiceBean;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.datasetutility.DuplicateFileChecker;
+import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand;
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
+import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
@@ -37,10 +44,12 @@
import javax.faces.application.FacesMessage;
import javax.faces.context.FacesContext;
import javax.inject.Inject;
+import javax.servlet.http.HttpServletRequest;
import javax.validation.ConstraintViolation;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.core.Response;
+import org.mindrot.jbcrypt.BCrypt;
/**
*
@@ -62,7 +71,8 @@ public class FileUpload extends AbstractApiBean {
DataverseRequestServiceBean dvRequestService;
@EJB
EjbDataverseEngine commandEngine;
-
+ @EJB
+ UserNotificationServiceBean userNotificationService;
private static final Logger logger = Logger.getLogger(FileUpload.class.getName());
@@ -189,24 +199,86 @@ private void msgt(String m){
dashes(); msg(m); dashes();
}
+
+ private void removeLinkedFileFromDataset(Dataset dataset, DataFile dataFileToRemove){
+
+ // remove the file from the dataset (since createDataFiles has already linked
+ // it to the dataset!
+ // first, through the filemetadata list, then through tht datafiles list:
+ Iterator fmIt = dataset.getEditVersion().getFileMetadatas().iterator();
+ msgt("Clear FileMetadatas");
+ while (fmIt.hasNext()) {
+ FileMetadata fm = fmIt.next();
+ msg("Check: " + fm);
+ if (fm.getId() == null && dataFileToRemove.getStorageIdentifier().equals(fm.getDataFile().getStorageIdentifier())) {
+ msg("Got It! ");
+ fmIt.remove();
+ break;
+ }
+ }
+
+
+ Iterator dfIt = dataset.getFiles().iterator();
+ msgt("Clear Files");
+ while (dfIt.hasNext()) {
+ DataFile dfn = dfIt.next();
+ msg("Check: " + dfn);
+ if (dfn.getId() == null && dataFileToRemove.getStorageIdentifier().equals(dfn.getStorageIdentifier())) {
+ msg("Got It! try to remove from iterator");
+
+ dfIt.remove();
+ msg("...didn't work");
+
+ break;
+ }else{
+ msg("...ok");
+ }
+ }
+ }
+
@GET
@Path("hi")
public Response hi(){
// -------------------------------------
- msgt("(1) getSampleFile() + workingVersion");
+ msgt("(1) getSampleFile()");
// -------------------------------------
InputStream testFile = getSampleFile();
if (testFile == null){
return okResponse("Couldn't find the file!!");
}
- DatasetVersion workingVersion = datasetVersionService.find(new Long(3));
- if (workingVersion.getVersionState()!=DatasetVersion.VersionState.DRAFT){
- return okResponse("For testing, making the sure the state is DRAFT. This workingVersion is: " + workingVersion.getVersionState());
+ // -------------------------------------
+ msgt("(1a) Get User from API token");
+ // -------------------------------------
+ User authUser;
+ try {
+ authUser = this.findUserOrDie();
+ } catch (WrappedResponse ex) {
+ return okResponse("Couldn't find a user from the API key");
}
+ //authSvc.findByID(new Long(1));
+ msg("authUser: " + authUser);
+ msg("getUserIdentifier: " + authUser.getIdentifier());
+
+ // -------------------------------------
+ msgt("(1b) Get the selected Dataset");
+ // -------------------------------------
+ int dataset_id = 10;
+ Dataset selectedDataset = datasetService.find(new Long(dataset_id));
+
+ // -------------------------------------
+ msgt("(1c) Get the edit version of the Dataset");
+ // -------------------------------------
+ DatasetVersion workingVersion = selectedDataset.getEditVersion();
+ msg("new workingVersion: " + workingVersion + "\n url:" + selectedDataset.getPersistentURL());
+
+ // -------------------------------------
+ msgt("(1d) List the dataset version files");
+ // -------------------------------------
+
// List the current files
//
int cnt = 0;
@@ -235,24 +307,85 @@ public Response hi(){
logger.severe(ex.toString());
return okResponse("IOException when trying to ingest: " + testFile.toString());
}
- msg("But ok, we can continue now...");
+
+
+ // -------------------------------------
+ msgt("(2A) we should have an additional file");
+ // -------------------------------------
+ // List the current files
+ //
+ cnt = 0;
+ for (FileMetadata fm : workingVersion.getFileMetadatas()){
+ cnt++;
+ msg("File " + cnt + ": " + fm.getLabel());
+ }
+ dashes();
+
+
+
// -------------------------------------
msgt("3 Duplicate check");
// -------------------------------------
List newFiles = new ArrayList();
-
- DuplicateFileChecker dfc = new DuplicateFileChecker(datasetVersionService);
+ msg("dFileList: " + dFileList.toString());
+ String warningMessage = null;
for (DataFile df : dFileList){
+
+
+ // -----------------------------------------------------------
+ // Check for ingest warnings
+ // -----------------------------------------------------------
+ if (df.isIngestProblem()) {
+ if (df.getIngestReportMessage() != null) {
+ if (warningMessage == null) {
+ warningMessage = df.getIngestReportMessage();
+ } else {
+ warningMessage = warningMessage.concat("; " + df.getIngestReportMessage());
+ }
+ }
+ df.setIngestDone();
+ }
+ if (warningMessage != null){
+ return okResponse(warningMessage);
+ }
+
+
+ msg("Checking file: " + df.getFileMetadata().getLabel());
//if (dfc.isFileInSavedDatasetVersion(workingVersion, df.getmd5())){
// return okResponse("This file has a dupe md5! " + df.getFileMetadata().getLabel());
if (DuplicateFileChecker.isDuplicateOriginalWay(workingVersion, df.getFileMetadata())){
- return okResponse("This file has a dupe md5! " + df.getFileMetadata().getLabel());
+ msg("has a dupe:");
+ // Shut things down!
+ try {
+ testFile.close();
+ } catch (IOException ex) {
+ Logger.getLogger(FileUpload.class.getName()).log(Level.SEVERE, null, ex);
+ }
+
+ removeLinkedFileFromDataset(selectedDataset, df);
+
+ return okResponse("This file has a dupe md5! " + df.toString());
}else{
+ //df.save();
newFiles.add(df);
}
}
+
+ // -------------------------------------
+ msgt("(3a) List the new files");
+ // -------------------------------------
+ // List the current files
+ //
+ cnt = 0;
+ for (DataFile df : newFiles){
+ cnt++;
+ msg("File " + cnt + ": " + df.getFileMetadata().getLabel());
+ }
+ dashes();
+
+
// -------------------------------------
msgt("4 Check constraints");
// -------------------------------------
@@ -280,31 +413,50 @@ public Response hi(){
execCommand(new SetDatasetCitationDateCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(id), dsfType));
*/
- AuthenticatedUser authUser = authSvc.findByID(new Long(1));
- msg("authUser: " + authUser);
+
DataverseRequest dvRequest = createDataverseRequest(authUser);
msg("dvRequest: " + dvRequest);
if (dvRequest == null){
return okResponse("Failed, dvRequest is null");
}
- CreateDatasetCommand cmd = new CreateDatasetCommand(workingVersion.getDataset(),
- dvRequest);
+ //CreateDatasetCommand cmd = new CreateDatasetCommand(workingVersion.getDataset(),
+ // dvRequest);
+ Command update_cmd;
+ update_cmd = new UpdateDatasetCommand(selectedDataset, dvRequest);
+ ((UpdateDatasetCommand) update_cmd).setValidateLenient(true);
// -------------------------------------
msgt("7 Run the command!");
// -------------------------------------
try {
- Dataset newDataset = commandEngine.submit(cmd);
+ commandEngine.submit(update_cmd);
} catch (CommandException ex) {
//ex.getMessage()
msgt("Bombed: " + ex.getMessage());
+ return okResponse("bombed....");
//Logger.getLogger(FileUpload.class.getName()).log(Level.SEVERE, null, ex);
}catch (EJBException ex) {
msgt("Bombed2: " + ex.getMessage());
+ return okResponse("bombed 2....");
}
- return okResponse("hi");
+ // -------------------------------------
+ msgt("8 userNotificationService");
+ // -------------------------------------
+
+ userNotificationService.sendNotification((AuthenticatedUser) authUser, selectedDataset.getCreateDate(), UserNotification.Type.CREATEDS, selectedDataset.getLatestVersion().getId());
+
+ // -------------------------------------
+ msgt("9 start Ingest jobs");
+ // -------------------------------------
+ newFiles.clear();
+
+
+ ingestService.startIngestJobs(selectedDataset, (AuthenticatedUser) authUser);
+
+
+ return okResponse("hi. maybe it worked!");
}
From e1e2e78d2781616f4b43500e3c0515eb0762c4e5 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Fri, 23 Sep 2016 15:42:31 -0400
Subject: [PATCH 11/86] basic concept working. need to add form upload +
refactor
---
src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
index ab9848691ef..1cfff62fdf4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
@@ -171,7 +171,7 @@ private void saveFile(InputStream uploadedInputStream,
private InputStream getSampleFile(){
InputStream is = null;
- String testFileName = "/Users/rmp553/Documents/iqss-git/dataverse-helper-scripts/src/api_scripts/input/howdy.txt";
+ String testFileName = "/Users/rmp553/Documents/iqss-git/dataverse-helper-scripts/src/api_scripts/input/howdy2.txt";
//testFileName = "/Users/rmp553/NetBeansProjects/dataverse/src/main/java/edu/harvard/iq/dataverse/datasetutility/howdy.txt";
try {
is = new FileInputStream(testFileName);
@@ -363,9 +363,10 @@ public Response hi(){
Logger.getLogger(FileUpload.class.getName()).log(Level.SEVERE, null, ex);
}
+ String dupeName = df.getFileMetadata().getLabel();
removeLinkedFileFromDataset(selectedDataset, df);
- return okResponse("This file has a dupe md5! " + df.toString());
+ return okResponse("This file has a dupe md5! " + dupeName + " checksum: " + df.getmd5());
}else{
//df.save();
newFiles.add(df);
From b2b6a35d52ff42470e0d70636dafe0ced74c19eb Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Mon, 26 Sep 2016 11:37:30 -0400
Subject: [PATCH 12/86] Adding attributes for #2290 -- attributes are described
in #3220
---
scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql | 5 +++++
1 file changed, 5 insertions(+)
create mode 100644 scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql
diff --git a/scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql b/scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql
new file mode 100644
index 00000000000..f2fcad94522
--- /dev/null
+++ b/scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql
@@ -0,0 +1,5 @@
+-- For DataFile, file replace functionality:
+ALTER TABLE datafile ADD COLUMN rootdatafileid bigint default -1;
+ALTER TABLE datafile ADD COLUMN previousdatafileid bigint default null;
+-- For existing DataFile objects, update rootDataFileId values:
+UPDATE datafile SET rootdatafileid = id;
From 482b3592c5ae5380845ec25052e2ec8d29a87e62 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Mon, 26 Sep 2016 11:48:14 -0400
Subject: [PATCH 13/86] add file replace attributes to the DataFile object
#2290
---
.../edu/harvard/iq/dataverse/DataFile.java | 62 +++++++++++++++++++
1 file changed, 62 insertions(+)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
index 24c01e3b107..256ed5bff0d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
@@ -65,6 +65,22 @@ public class DataFile extends DvObject {
@Column( nullable = false )
private String md5;
+
+ /* start: FILE REPLACE ATTRIBUTES */
+
+ // For the initial version of a file, this will be equivalent to the ID
+ // Default is -1 until the intial id is generated
+ @Column(nullable=false)
+ private Long rootDataFileId;
+
+ // null for initial version; subsequent versions will point to the previous file
+ //
+ @Column(nullable=true)
+ private Long previousDataFileID;
+ /* endt: FILE REPLACE ATTRIBUTES */
+
+
+
@Column(nullable=true)
private Long filesize; // Number of bytes in file. Allows 0 and null, negative numbers not permitted
@@ -108,11 +124,23 @@ public void setGuestbookResponses(List guestbookResponses) {
public DataFile() {
this.fileMetadatas = new ArrayList<>();
+ initFileReplaceAttributes();
}
public DataFile(String contentType) {
this.contentType = contentType;
this.fileMetadatas = new ArrayList<>();
+ initFileReplaceAttributes();
+ }
+
+
+ /**
+ * All constructors should use this method
+ * to intitialize this file replace attributes
+ */
+ private void initFileReplaceAttributes(){
+ this.rootDataFileId = new Long(-1);
+ this.previousDataFileID = null;
}
// The dvObject field "name" should not be used in
@@ -619,4 +647,38 @@ public boolean hasGeospatialTag(){
}
return false;
}
+
+ /**
+ * Set rootDataFileId
+ * @param rootDataFileId
+ */
+ public void setRootDataFileId(Long rootDataFileId){
+ this.rootDataFileId = rootDataFileId;
+ }
+
+ /**
+ * Get for rootDataFileId
+ * @return Long
+ */
+ public Long getRootDataFileId(){
+ return this.rootDataFileId;
+ }
+
+
+ /**
+ * Set previousDataFileID
+ * @param previousDataFileID
+ */
+ public void setPreviousDataFileID(Long previousDataFileID){
+ this.previousDataFileID = previousDataFileID;
+ }
+
+ /**
+ * Get for previousDataFileID
+ * @return Long
+ */
+ public Long getPreviousDataFileID(){
+ return this.previousDataFileID;
+ }
+
}
From a03417c76536d5c2caa38bce39f385b05700f663 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Mon, 26 Sep 2016 12:15:56 -0400
Subject: [PATCH 14/86] Part of #2290. See 13 and 14 under comment
2290#issuecomment-249582593
---
.../edu/harvard/iq/dataverse/DataFile.java | 4 +-
.../iq/dataverse/DataFileServiceBean.java | 44 ++++++++++++++++++-
2 files changed, 46 insertions(+), 2 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
index 256ed5bff0d..1117c40eb3f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
@@ -52,6 +52,8 @@ public class DataFile extends DvObject {
public static final char INGEST_STATUS_INPROGRESS = 67;
public static final char INGEST_STATUS_ERROR = 68;
+ public static final Long ROOT_DATAFILE_ID_DEFAULT = new Long(-1);
+
private String name;
@NotBlank
@@ -139,7 +141,7 @@ public DataFile(String contentType) {
* to intitialize this file replace attributes
*/
private void initFileReplaceAttributes(){
- this.rootDataFileId = new Long(-1);
+ this.rootDataFileId = ROOT_DATAFILE_ID_DEFAULT;
this.previousDataFileID = null;
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
index c2ecce1543e..bb2d4452275 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
@@ -23,6 +23,7 @@
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
import java.util.Set;
import java.util.UUID;
import java.util.logging.Level;
@@ -721,11 +722,52 @@ public List findAll() {
}
public DataFile save(DataFile dataFile) {
-
+
+ // datafile
+
DataFile savedDataFile = em.merge(dataFile);
+
+ // Set the initial value of the rootDataFileId
+ savedDataFile = setAndCheckFileReplaceAttributes(savedDataFile);
+
+ return savedDataFile;
+ }
+
+
+ /*
+ Make sure the file replace ids are set for a initial version
+ of a file
+
+ */
+ private DataFile setAndCheckFileReplaceAttributes(DataFile savedDataFile){
+
+ // Is this the initial version of a file?
+ if (savedDataFile.getPreviousDataFileID() == null){
+
+ // YES! Set the RootDataFileId to the Id
+ savedDataFile.setRootDataFileId(savedDataFile.getId());
+
+ // SAVE IT AGAIN!!!
+ savedDataFile = em.merge(savedDataFile);
+
+ }else{
+ // NO! This IS a previous version. Do a quick sanity check.
+
+ // This IS a previous version, make the sure the root data file id is set
+ if (Objects.equals(savedDataFile.getRootDataFileId(), DataFile.ROOT_DATAFILE_ID_DEFAULT)){
+ String errorMessage = "The rootDataFileId should NEVER be -1 for a replacment file. (The previousDataFileID is " + savedDataFile.getPreviousDataFileID();
+ logger.severe(errorMessage);
+ throw new IllegalStateException(errorMessage);
+ }
+
+ }
+
+ // Looking Good Billy Ray! Feeling Good Louis!
+
return savedDataFile;
}
+
public Boolean isPreviouslyPublished(Long fileId){
Query query = em.createQuery("select object(o) from FileMetadata as o where o.dataFile.id =:fileId");
query.setParameter("fileId", fileId);
From 688aeb97cf1739374a112d5f4036c6882eda29f1 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Mon, 26 Sep 2016 15:53:07 -0400
Subject: [PATCH 15/86] container class for file add/replace steps. need to
test tomorrow. #2290-api-replace
---
.../iq/dataverse/DataFileServiceBean.java | 45 +-
.../harvard/iq/dataverse/api/FileUpload.java | 25 +-
.../datasetutility/AddReplaceFileHelper.java | 614 +++++++++++++++++-
3 files changed, 662 insertions(+), 22 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
index bb2d4452275..3f681c04da9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
@@ -724,15 +724,38 @@ public List findAll() {
public DataFile save(DataFile dataFile) {
// datafile
-
+ msgt("pre save");
+ msg("id : " + dataFile.getId());
+ msg("getRootDataFileId : " + dataFile.getRootDataFileId());
+ msg("getPreviousDataFileID : " + dataFile.getPreviousDataFileID());
+
DataFile savedDataFile = em.merge(dataFile);
+ msgt("post save");
+ msg("id : " + savedDataFile.getId());
+ msg("getRootDataFileId : " + savedDataFile.getRootDataFileId());
+ msg("getPreviousDataFileID : " + savedDataFile.getPreviousDataFileID());
+
// Set the initial value of the rootDataFileId
savedDataFile = setAndCheckFileReplaceAttributes(savedDataFile);
-
+ msgt("post post save");
+ msg("id : " + savedDataFile.getId());
+ msg("getRootDataFileId : " + savedDataFile.getRootDataFileId());
+ msg("getPreviousDataFileID : " + savedDataFile.getPreviousDataFileID());
+
+
return savedDataFile;
}
+ private void msg(String m){
+ System.out.println(m);
+ }
+ private void dashes(){
+ msg("----------------");
+ }
+ private void msgt(String m){
+ dashes(); msg(m); dashes();
+ }
/*
Make sure the file replace ids are set for a initial version
@@ -742,24 +765,14 @@ public DataFile save(DataFile dataFile) {
private DataFile setAndCheckFileReplaceAttributes(DataFile savedDataFile){
// Is this the initial version of a file?
- if (savedDataFile.getPreviousDataFileID() == null){
-
+ if (Objects.equals(savedDataFile.getRootDataFileId(), DataFile.ROOT_DATAFILE_ID_DEFAULT)){
+
// YES! Set the RootDataFileId to the Id
savedDataFile.setRootDataFileId(savedDataFile.getId());
// SAVE IT AGAIN!!!
- savedDataFile = em.merge(savedDataFile);
+ return em.merge(savedDataFile);
- }else{
- // NO! This IS a previous version. Do a quick sanity check.
-
- // This IS a previous version, make the sure the root data file id is set
- if (Objects.equals(savedDataFile.getRootDataFileId(), DataFile.ROOT_DATAFILE_ID_DEFAULT)){
- String errorMessage = "The rootDataFileId should NEVER be -1 for a replacment file. (The previousDataFileID is " + savedDataFile.getPreviousDataFileID();
- logger.severe(errorMessage);
- throw new IllegalStateException(errorMessage);
- }
-
}
// Looking Good Billy Ray! Feeling Good Louis!
@@ -928,7 +941,7 @@ public boolean isThumbnailAvailable (DataFile file) {
if (ImageThumbConverter.isThumbnailAvailable(file)) {
file = this.find(file.getId());
file.setPreviewImageAvailable(true);
- file = em.merge(file);
+ file = this.save(file); //em.merge(file);
// (should this be done here? - TODO:)
return true;
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
index 1cfff62fdf4..efac91d68c6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
@@ -8,6 +8,7 @@
//import com.sun.jersey.core.header.FormDataContentDisposition;
//import com.sun.jersey.multipart.FormDataParam;
import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.DataFileServiceBean;
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetServiceBean;
import edu.harvard.iq.dataverse.DatasetVersion;
@@ -48,6 +49,7 @@
import javax.validation.ConstraintViolation;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
import javax.ws.rs.core.Response;
import org.mindrot.jbcrypt.BCrypt;
@@ -62,6 +64,8 @@ public class FileUpload extends AbstractApiBean {
@EJB
DatasetServiceBean datasetService;
@EJB
+ DataFileServiceBean fileService;
+ @EJB
DatasetVersionServiceBean datasetVersionService;
@EJB
DataverseServiceBean dataverseService;
@@ -171,7 +175,7 @@ private void saveFile(InputStream uploadedInputStream,
private InputStream getSampleFile(){
InputStream is = null;
- String testFileName = "/Users/rmp553/Documents/iqss-git/dataverse-helper-scripts/src/api_scripts/input/howdy2.txt";
+ String testFileName = "/Users/rmp553/Documents/iqss-git/dataverse-helper-scripts/src/api_scripts/input/howdy3.txt";
//testFileName = "/Users/rmp553/NetBeansProjects/dataverse/src/main/java/edu/harvard/iq/dataverse/datasetutility/howdy.txt";
try {
is = new FileInputStream(testFileName);
@@ -236,6 +240,25 @@ private void removeLinkedFileFromDataset(Dataset dataset, DataFile dataFileToRem
}
}
+ /**
+ *
+ * @param fileId
+ * @return
+ */
+ @GET
+ @Path("resave/{fileId}")
+ public Response hiReSave(@PathParam("fileId") Long fileId){
+ msgt("hiReSave: " + fileId);
+ DataFile df = fileService.find(fileId);
+
+ if (df ==null){
+ return okResponse("file not found: " + fileId);
+ }
+ df = fileService.save(df);
+
+ return okResponse("saved: " + df);
+ }
+
@GET
@Path("hi")
public Response hi(){
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index a3d61f3feb0..96cf90efe7b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -5,26 +5,569 @@
*/
package edu.harvard.iq.dataverse.datasetutility;
+import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.DataFileServiceBean;
import edu.harvard.iq.dataverse.Dataset;
+import edu.harvard.iq.dataverse.DatasetServiceBean;
+import edu.harvard.iq.dataverse.DatasetVersion;
+import edu.harvard.iq.dataverse.DataverseRequestServiceBean;
+import edu.harvard.iq.dataverse.EjbDataverseEngine;
+import edu.harvard.iq.dataverse.FileMetadata;
+import edu.harvard.iq.dataverse.PermissionServiceBean;
+import edu.harvard.iq.dataverse.api.FileUpload;
+import edu.harvard.iq.dataverse.authorization.Permission;
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.authorization.users.User;
+import edu.harvard.iq.dataverse.engine.command.Command;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand;
+import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import javax.ejb.EJB;
+import javax.ejb.EJBException;
+import javax.enterprise.context.SessionScoped;
+import javax.inject.Inject;
+import javax.validation.ConstraintViolation;
/**
* Methods to add or replace a single file.
*
* @author rmp553
*/
-public class AddReplaceFileHelper {
+@SessionScoped
+public class AddReplaceFileHelper implements java.io.Serializable {
+ private static final Logger logger = Logger.getLogger(AddReplaceFileHelper.class.getCanonicalName());
- private Dataset dataset;
+ @EJB
+ IngestServiceBean ingestService;
+ @EJB
+ DatasetServiceBean datasetService;
+ @EJB
+ DataFileServiceBean fileService;
+ @EJB
+ PermissionServiceBean permissionService;
+ @EJB
+ EjbDataverseEngine commandEngine;
+
+ // -----------------------------------
+ // Instance variables directly added
+ // -----------------------------------
+ private Dataset dataset; // constructor
+ private DataverseRequest dvRequest; // constructor
+ private InputStream newFileInputStream; // step 20
+ private String newFileName; // step 20
+ private String newFileContentType; // step 20
+ // -- Optional
+ private DataFile fileToReplace; // step 25
+
+
+ // Instance variables derived from other input
+ private User user;
+ private DatasetVersion workingVersion;
+ List newFileList;
+ List filesToAdd;
+
+
+ // For error handling
+ private boolean errorFound;
+ private List errorMessages;
+
+
+ public AddReplaceFileHelper(){
+ throw new IllegalStateException("Must be called with a dataset and or user");
+ }
+
- public AddReplaceFileHelper(Dataset dataset){
+ /**
+ * MAIN CONSTRUCTOR -- minimal requirements
+ *
+ * @param dataset
+ * @param dvRequest
+ */
+
+ public AddReplaceFileHelper(DataverseRequest dvRequest){
+
+
+ if (dvRequest == null){
+ throw new NullPointerException("dvRequest cannot be null");
+ }
+ if (dvRequest.getUser() == null){
+ throw new NullPointerException("dvRequest cannot have a null user");
+ }
+
+ initErrorHandling();
- if (dataset == null){
+ // Initiate instance vars
+ this.dataset = null;
+ this.dvRequest = dvRequest;
+ this.user = dvRequest.getUser();
+
+ }
+
+
+ /**
+ * Initialize error handling vars
+ */
+ private void initErrorHandling(){
+
+ this.errorFound = false;
+ this.errorMessages = new ArrayList<>();
+
+ }
+
+
+
+
+
+ /**
+ * Add error message
+ *
+ * @param errMsg
+ */
+ private void addError(String errMsg){
+
+ if (errMsg == null){
+ throw new NullPointerException("errMsg cannot be null");
+ }
+ this.errorFound = true;
+
+ logger.fine(errMsg);
+ this.errorMessages.add(errMsg);
+ }
+
+
+ private void addErrorSevere(String errMsg){
+
+ if (errMsg == null){
+ throw new NullPointerException("errMsg cannot be null");
+ }
+ this.errorFound = true;
+
+ logger.severe(errMsg);
+ this.errorMessages.add(errMsg);
+ }
+
+
+ /**
+ * Was an error found?
+ *
+ * @return
+ */
+ public boolean hasError(){
+ return this.errorFound;
+
+ }
+
+ /**
+ * get error messages
+ *
+ * @return
+ */
+ public List getErrorMessages(){
+ return this.errorMessages;
+ }
+
+ /**
+ * get error messages as string
+ *
+ * @param joinString
+ * @return
+ */
+ public String getErrorMessagesAsString(String joinString){
+ if (joinString==null){
+ joinString = "\n";
+ }
+ return String.join(joinString, this.errorMessages);
+ }
+
+
+ /**
+ *
+ */
+ public boolean step_01_loadDataset(Dataset selectedDataset){
+
+ if (this.hasError()){
+ return false;
+ }
+
+ if (selectedDataset == null){
throw new NullPointerException("dataset cannot be null");
}
+
+ dataset = selectedDataset;
+
+ return true;
+ }
+
+
+ /**
+ *
+ */
+ public boolean step_01_loadDatasetById(Long datasetId){
+
+ if (this.hasError()){
+ return false;
+ }
+
+ if (datasetId == null){
+ throw new NullPointerException("datasetId cannot be null");
+ }
+
+ dataset = datasetService.find(datasetId);
+ if (dataset == null){
+ this.addError("There was no dataset found for id: " + datasetId);
+ return false;
+ }
+
+ return true;
+ }
+
+
+
+
+
+ /**
+ * Step 10 Verify User and Permissions
+ *
+ *
+ * @return
+ */
+ public boolean step_10_VerifyUserAndPermissions(){
+
+ if (this.hasError()){
+ return false;
+ }
+
+ if (!permissionService.request(dvRequest).on(dataset).has(Permission.EditDataset)){
+ String errMsg = "You do not have permission to this dataset.";
+ addError(errMsg);
+ return false;
+ }
+ return true;
+
+ }
+
+
+ public boolean step_20_loadNewFile(String fileName, String fileContentType, InputStream fileInputStream){
+
+ if (this.hasError()){
+ return false;
+ }
+
+ if (fileName == null){
+ String errMsg = "The fileName cannot be null.";
+ this.addErrorSevere(errMsg);
+ return false;
+
+ }
+
+ if (fileContentType == null){
+ String errMsg = "The fileContentType cannot be null.";
+ this.addErrorSevere(errMsg);
+ return false;
+
+ }
+
+ if (fileName == null){
+ String errMsg = "The fileName cannot be null.";
+ this.addErrorSevere(errMsg);
+ return false;
+
+ }
+
+
+ if (fileInputStream == null){
+ String errMsg = "The fileInputStream cannot be null.";
+ this.addErrorSevere(errMsg);
+ return false;
+ }
+
+ newFileName = fileName;
+ newFileContentType = fileContentType;
+ newFileInputStream = fileInputStream;
+
+ return true;
+ }
+
+ /**
+ * Optional: old file to replace
+ *
+ * @param oldFile
+ * @return
+ */
+ public boolean step_25_loadFileToReplace(DataFile oldFile){
+
+ if (this.hasError()){
+ return false;
+ }
+
+ if (oldFile == null){
+ throw new NullPointerException("fileToReplace cannot be null");
+ }
+
+ if (oldFile.getOwner() != this.dataset){
+ String errMsg = "This file does not belong to the datset";
+ addError(errMsg);
+ return false;
+ }
+
+ fileToReplace = oldFile;
+
+ return true;
+ }
+
+
+ /**
+ * Optional: old file to replace
+ *
+ * @param oldFile
+ * @return
+ */
+ public boolean step_25_loadFileToReplaceById(Long dataFileId){
+
+ if (this.hasError()){
+ return false;
+ }
+
+ if (dataFileId == null){
+ throw new NullPointerException("dataFileId cannot be null");
+ }
+
+ fileToReplace = fileService.find(dataFileId);
+ if (fileToReplace == null){
+ this.addError("There was no file found for id: " + dataFileId);
+ return false;
+ }
+
+ return true;
+ }
+
+
+ public boolean step_30_createNewFilesViaIngest(){
+
+ if (this.hasError()){
+ return false;
+ }
+
+ // Load the working version of the Dataset
+ workingVersion = dataset.getEditVersion();
+
+ try {
+ newFileList = ingestService.createDataFiles(workingVersion,
+ this.newFileInputStream,
+ this.newFileName,
+ this.newFileContentType);
+ } catch (IOException ex) {
+ String errMsg = "There was an error when trying to add the new file.";
+ this.addErrorSevere(errMsg);
+ logger.severe(ex.toString());
+ return false;
+ }
+
+
+ /**
+ * This only happens:
+ * (1) the dataset was empty
+ * (2) the new file (or new file unzipped) did not ingest via "createDataFiles"
+ */
+ if (newFileList.isEmpty()){
+ this.addErrorSevere("Sorry! An error occurred and the new file was not added.");
+ return false;
+ }
+
+ return this.run_auto_step_35_checkForDuplicates();
+
+ }
+
+ /**
+ * This is always run after step 30
+ *
+ * @return
+ */
+ public boolean run_auto_step_35_checkForDuplicates(){
+
+ if (this.hasError()){
+ return false;
+ }
+
+ // Double checked -- this check also happens in step 30
+ //
+ if (newFileList.isEmpty()){
+ this.addErrorSevere("Sorry! An error occurred and the new file was not added.");
+ return false;
+ }
+
+ // Initialize new file list
+ this.filesToAdd = new ArrayList();
+
+ String warningMessage = null;
+
+
+ // -----------------------------------------------------------
+ // Iterate through the recently ingest files
+ // -----------------------------------------------------------
+ for (DataFile df : newFileList){
+ msg("Checking file: " + df.getFileMetadata().getLabel());
+
+ // -----------------------------------------------------------
+ // (1) Check for ingest warnings
+ // -----------------------------------------------------------
+ if (df.isIngestProblem()) {
+ if (df.getIngestReportMessage() != null) {
+ // may collect multiple error messages
+ this.addError(df.getIngestReportMessage());
+ }
+ df.setIngestDone();
+ }
+
+
+ // -----------------------------------------------------------
+ // (2) Check for duplicates
+ // -----------------------------------------------------------
+ if (DuplicateFileChecker.isDuplicateOriginalWay(workingVersion, df.getFileMetadata())){
+
+ String dupeName = df.getFileMetadata().getLabel();
+ removeLinkedFileFromDataset(dataset, df);
+ this.addErrorSevere("This file has a duplicate already in the dataset: " + dupeName);
+ }else{
+ filesToAdd.add(df);
+ }
+ }
+
+ if (this.hasError()){
+ filesToAdd.clear();
+ return false;
+ }
+
+ return true;
+ } // end run_auto_step_35_checkForDuplicates
+
+
+ public boolean step_40_checkForConstraintViolations(){
+
+ if (this.hasError()){
+ return false;
+ }
+
+ if (filesToAdd.isEmpty()){
+ // This error shouldn't happen if steps called in sequence....
+ this.addErrorSevere("There are no files to add. (This error shouldn't happen if steps called in sequence....)");
+ return false;
+ }
+
+ // -----------------------------------------------------------
+ // Iterate through checking for constraint violations
+ // Gather all error messages
+ // -----------------------------------------------------------
+ Set constraintViolations = workingVersion.validate();
+ List errMsgs = new ArrayList<>();
+ for (ConstraintViolation violation : constraintViolations){
+ this.addError(violation.getMessage());
+ }
- this.dataset = dataset;
+ return this.hasError();
}
+
+
+ public boolean step_50_addFilesViaIngestService(){
+
+ if (this.hasError()){
+ return false;
+ }
+
+ if (filesToAdd.isEmpty()){
+ // This error shouldn't happen if steps called in sequence....
+ this.addErrorSevere("There are no files to add. (This error shouldn't happen if steps called in sequence....)");
+ return false;
+ }
+
+ ingestService.addFiles(workingVersion, filesToAdd);
+
+ return true;
+ }
+
+
+ /**
+ * Create and run the update dataset command
+ *
+ * @return
+ */
+ public boolean step_70_run_update_dataset_command(){
+
+ if (this.hasError()){
+ return false;
+ }
+
+ Command update_cmd;
+ update_cmd = new UpdateDatasetCommand(dataset, dvRequest);
+ ((UpdateDatasetCommand) update_cmd).setValidateLenient(true);
+
+ try {
+ commandEngine.submit(update_cmd);
+ } catch (CommandException ex) {
+ this.addErrorSevere("Failed to update the dataset. Please contact the administrator");
+ logger.severe(ex.getMessage());
+ return false;
+ }catch (EJBException ex) {
+ this.addErrorSevere("Failed to update the dataset. Please contact the administrator");
+ logger.severe(ex.getMessage());
+ return false;
+ }
+ return true;
+ }
+
+
+ public boolean step_80_notifyUser(){
+ if (this.hasError()){
+ return false;
+ }
+
+ // Create a notification!
+
+ // skip for now
+ return true;
+ }
+
+
+ public boolean step_100_startIngestJobs(){
+ if (this.hasError()){
+ return false;
+ }
+
+ // clear old file list
+ //
+ filesToAdd.clear();
+
+
+ // start the ingest!
+ //
+ ingestService.startIngestJobs(dataset, dvRequest.getAuthenticatedUser());
+
+ return true;
+ }
+
+
+ private void msg(String m){
+ System.out.println(m);
+ }
+ private void dashes(){
+ msg("----------------");
+ }
+ private void msgt(String m){
+ dashes(); msg(m); dashes();
+ }
+
+
/*
DatasetPage sequence:
@@ -96,6 +639,67 @@ public String getFileName()
// ....
+ /**
+ * When a duplicate file is found after the initial ingest,
+ * remove the file from the dataset because
+ * createDataFiles has already linked it to the dataset:
+ * - first, through the filemetadata list
+ * - then through tht datafiles list
+ *
+ *
+ * @param dataset
+ * @param dataFileToRemove
+ */
+ private boolean removeLinkedFileFromDataset(Dataset dataset, DataFile dataFileToRemove){
+
+ if (dataset==null){
+ this.addErrorSevere("dataset cannot be null in removeLinkedFileFromDataset");
+ return false;
+ }
+
+ if (dataFileToRemove==null){
+ this.addErrorSevere("dataFileToRemove cannot be null in removeLinkedFileFromDataset");
+ return false;
+ }
+
+ // -----------------------------------------------------------
+ // (1) Remove file from filemetadata list
+ // -----------------------------------------------------------
+ Iterator fmIt = dataset.getEditVersion().getFileMetadatas().iterator();
+ msgt("Clear FileMetadatas");
+ while (fmIt.hasNext()) {
+ FileMetadata fm = fmIt.next();
+ msg("Check: " + fm);
+ if (fm.getId() == null && dataFileToRemove.getStorageIdentifier().equals(fm.getDataFile().getStorageIdentifier())) {
+ msg("Got It! ");
+ fmIt.remove();
+ break;
+ }
+ }
+
+
+ // -----------------------------------------------------------
+ // (2) Remove file from datafiles list
+ // -----------------------------------------------------------
+ Iterator dfIt = dataset.getFiles().iterator();
+ msgt("Clear Files");
+ while (dfIt.hasNext()) {
+ DataFile dfn = dfIt.next();
+ msg("Check: " + dfn);
+ if (dfn.getId() == null && dataFileToRemove.getStorageIdentifier().equals(dfn.getStorageIdentifier())) {
+ msg("Got It! try to remove from iterator");
+
+ dfIt.remove();
+ msg("it work");
+
+ break;
+ }else{
+ msg("...ok");
+ }
+ }
+ return true;
+ }
+
}
From 4bb44a6c8f31a21f468c131cac33c8d67d127844 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Tue, 27 Sep 2016 13:33:12 -0400
Subject: [PATCH 16/86] 2290- on our way. basic replace (w/o notifications) is
working in AddReplaceFileHelper.java
---
.../edu/harvard/iq/dataverse/DataFile.java | 8 +
.../harvard/iq/dataverse/api/FileUpload.java | 45 +-
.../datasetutility/AddReplaceFileHelper.java | 558 ++++++++++++++----
.../datasetutility/DuplicateFileChecker.java | 24 +-
4 files changed, 494 insertions(+), 141 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
index 8f369ed02a2..1b3fa14c6f0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
@@ -400,6 +400,14 @@ public void setRestricted(boolean restricted) {
this.restricted = restricted;
}
+ /**
+ * Fill in until sha1 branch checked in.
+ *
+ * @return
+ */
+ public String getCheckSum(){
+ return this.md5;
+ }
public String getmd5() {
return this.md5;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
index efac91d68c6..51ded12df8d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
@@ -22,6 +22,7 @@
import edu.harvard.iq.dataverse.UserNotificationServiceBean;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.authorization.users.User;
+import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper;
import edu.harvard.iq.dataverse.datasetutility.DuplicateFileChecker;
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
@@ -175,10 +176,10 @@ private void saveFile(InputStream uploadedInputStream,
private InputStream getSampleFile(){
InputStream is = null;
- String testFileName = "/Users/rmp553/Documents/iqss-git/dataverse-helper-scripts/src/api_scripts/input/howdy3.txt";
- //testFileName = "/Users/rmp553/NetBeansProjects/dataverse/src/main/java/edu/harvard/iq/dataverse/datasetutility/howdy.txt";
+ String testFileInputStreamName = "/Users/rmp553/Documents/iqss-git/dataverse-helper-scripts/src/api_scripts/input/howdy3.txt";
+ //testFileInputStreamName = "/Users/rmp553/NetBeansProjects/dataverse/src/main/java/edu/harvard/iq/dataverse/datasetutility/howdy.txt";
try {
- is = new FileInputStream(testFileName);
+ is = new FileInputStream(testFileInputStreamName);
//is.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
@@ -267,8 +268,8 @@ public Response hi(){
msgt("(1) getSampleFile()");
// -------------------------------------
- InputStream testFile = getSampleFile();
- if (testFile == null){
+ InputStream testFileInputStream = getSampleFile();
+ if (testFileInputStream == null){
return okResponse("Couldn't find the file!!");
}
@@ -284,12 +285,40 @@ public Response hi(){
//authSvc.findByID(new Long(1));
msg("authUser: " + authUser);
msg("getUserIdentifier: " + authUser.getIdentifier());
+
// -------------------------------------
msgt("(1b) Get the selected Dataset");
// -------------------------------------
int dataset_id = 10;
Dataset selectedDataset = datasetService.find(new Long(dataset_id));
+
+
+ //-------------------
+ if (true){
+
+
+ DataverseRequest dvRequest2 = createDataverseRequest(authUser);
+ AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2,
+ this.ingestService,
+ this.datasetService,
+ this.fileService,
+ this.permissionSvc,
+ this.commandEngine);
+
+
+ addFileHelper.runAddFile(selectedDataset, "blackbox.txt", "text/plain", testFileInputStream);
+
+
+ if (addFileHelper.hasError()){
+ return okResponse(addFileHelper.getErrorMessagesAsString("\n"));
+ }else{
+ return okResponse("hey hey, it may have worked");
+ }
+
+ }
+ //-------------------
+
// -------------------------------------
@@ -321,14 +350,14 @@ public Response hi(){
try {
msg("The starting bell rings....");
dFileList = ingestService.createDataFiles(workingVersion,
- testFile,
+ testFileInputStream,
"hullo.txt",
"text/plain");
msg("Almost there....");
} catch (IOException ex) {
msg("Not happy...:" + ex.toString());
logger.severe(ex.toString());
- return okResponse("IOException when trying to ingest: " + testFile.toString());
+ return okResponse("IOException when trying to ingest: " + testFileInputStream.toString());
}
@@ -381,7 +410,7 @@ public Response hi(){
msg("has a dupe:");
// Shut things down!
try {
- testFile.close();
+ testFileInputStream.close();
} catch (IOException ex) {
Logger.getLogger(FileUpload.class.getName()).log(Level.SEVERE, null, ex);
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 96cf90efe7b..3b8ce10cd3a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -10,13 +10,10 @@
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetServiceBean;
import edu.harvard.iq.dataverse.DatasetVersion;
-import edu.harvard.iq.dataverse.DataverseRequestServiceBean;
import edu.harvard.iq.dataverse.EjbDataverseEngine;
import edu.harvard.iq.dataverse.FileMetadata;
import edu.harvard.iq.dataverse.PermissionServiceBean;
-import edu.harvard.iq.dataverse.api.FileUpload;
import edu.harvard.iq.dataverse.authorization.Permission;
-import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
@@ -25,17 +22,16 @@
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
import java.io.IOException;
import java.io.InputStream;
-import java.io.Serializable;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
-import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.EJB;
import javax.ejb.EJBException;
+import javax.ejb.Stateless;
import javax.enterprise.context.SessionScoped;
-import javax.inject.Inject;
+import javax.inject.Named;
import javax.validation.ConstraintViolation;
/**
@@ -43,22 +39,26 @@
*
* @author rmp553
*/
-@SessionScoped
-public class AddReplaceFileHelper implements java.io.Serializable {
+public class AddReplaceFileHelper{
private static final Logger logger = Logger.getLogger(AddReplaceFileHelper.class.getCanonicalName());
- @EJB
- IngestServiceBean ingestService;
- @EJB
- DatasetServiceBean datasetService;
- @EJB
- DataFileServiceBean fileService;
- @EJB
- PermissionServiceBean permissionService;
- @EJB
- EjbDataverseEngine commandEngine;
-
+
+ public static String FILE_ADD_OPERATION = "FILE_ADD_OPERATION";
+ public static String FILE_REPLACE_OPERATION = "FILE_REPLACE_OPERATION";
+
+
+ private String currentOperation;
+
+ // -----------------------------------
+ // All the needed EJBs, passed to the constructor
+ // -----------------------------------
+ private IngestServiceBean ingestService;
+ private DatasetServiceBean datasetService;
+ private DataFileServiceBean fileService;
+ private PermissionServiceBean permissionService;
+ private EjbDataverseEngine commandEngine;
+
// -----------------------------------
// Instance variables directly added
// -----------------------------------
@@ -83,9 +83,9 @@ public class AddReplaceFileHelper implements java.io.Serializable {
private List errorMessages;
- public AddReplaceFileHelper(){
- throw new IllegalStateException("Must be called with a dataset and or user");
- }
+ // public AddReplaceFileHelper(){
+ // throw new IllegalStateException("Must be called with a dataset and or user");
+ // }
/**
@@ -95,9 +95,16 @@ public AddReplaceFileHelper(){
* @param dvRequest
*/
- public AddReplaceFileHelper(DataverseRequest dvRequest){
-
+ public AddReplaceFileHelper(DataverseRequest dvRequest,
+ IngestServiceBean ingestService,
+ DatasetServiceBean datasetService,
+ DataFileServiceBean fileService,
+ PermissionServiceBean permissionService,
+ EjbDataverseEngine commandEngine){
+ // ---------------------------------
+ // make sure DataverseRequest isn't null and has a user
+ // ---------------------------------
if (dvRequest == null){
throw new NullPointerException("dvRequest cannot be null");
}
@@ -105,6 +112,35 @@ public AddReplaceFileHelper(DataverseRequest dvRequest){
throw new NullPointerException("dvRequest cannot have a null user");
}
+ // ---------------------------------
+ // make sure services aren't null
+ // ---------------------------------
+ if (ingestService == null){
+ throw new NullPointerException("ingestService cannot be null");
+ }
+ if (datasetService == null){
+ throw new NullPointerException("datasetService cannot be null");
+ }
+ if (fileService == null){
+ throw new NullPointerException("fileService cannot be null");
+ }
+ if (permissionService == null){
+ throw new NullPointerException("ingestService cannot be null");
+ }
+ if (commandEngine == null){
+ throw new NullPointerException("commandEngine cannot be null");
+ }
+
+ // ---------------------------------
+
+ this.ingestService = ingestService;
+ this.datasetService = datasetService;
+ this.fileService = fileService;
+ this.permissionService = permissionService;
+ this.commandEngine = commandEngine;
+
+
+
initErrorHandling();
// Initiate instance vars
@@ -114,7 +150,154 @@ public AddReplaceFileHelper(DataverseRequest dvRequest){
}
-
+ /**
+ * After the constructor, this method is called to add a file
+ *
+ * @param dataset
+ * @param newFileName
+ * @param newFileContentType
+ * @param newFileInputStream
+ * @return
+ */
+ public boolean runAddFile(Dataset dataset, String newFileName, String newFileContentType, InputStream newFileInputStream){
+
+ this.currentOperation = FILE_ADD_OPERATION;
+
+ return this.runAddReplaceFile(dataset, newFileName, newFileContentType, newFileInputStream, null);
+ }
+
+
+ /**
+ * After the constructor, this method is called to replace a file
+ *
+ * @param dataset
+ * @param newFileName
+ * @param newFileContentType
+ * @param newFileInputStream
+ * @return
+ */
+ public boolean runReplaceFile(Dataset dataset, String newFileName, String newFileContentType, InputStream newFileInputStream, Long oldFileId){
+
+ if (oldFileId==null){
+ throw new NullPointerException("For a replace operation, oldFileId cannot be null");
+ }
+
+ this.currentOperation = FILE_REPLACE_OPERATION;
+
+ return this.runAddReplaceFile(dataset, newFileName, newFileContentType, newFileInputStream, oldFileId);
+ }
+
+ /**
+ * Here we're going to run through the steps to ADD or REPLACE a file
+ *
+ * The difference between ADD and REPLACE (add/delete) is:
+ *
+ * oldFileId - For ADD, set to null
+ * oldFileId - For REPLACE, set to id of file to replace
+ *
+ *
+ * @return
+ */
+ private boolean runAddReplaceFile(Dataset dataset,
+ String newFileName, String newFileContentType, InputStream newFileInputStream,
+ Long oldFileId){
+
+ initErrorHandling();
+
+
+ if (this.hasError()){
+ return false;
+ }
+
+ msgt("step_001_loadDataset");
+ if (!this.step_001_loadDataset(dataset)){
+ return false;
+ }
+
+ msgt("step_010_VerifyUserAndPermissions");
+ if (!this.step_010_VerifyUserAndPermissions()){
+ return false;
+
+ }
+
+ msgt("step_020_loadNewFile");
+ if (!this.step_020_loadNewFile(newFileName, newFileContentType, newFileInputStream)){
+ return false;
+
+ }
+
+ // Replace only step!
+ if (isFileReplaceOperation()){
+
+ msgt("step_025_loadFileToReplaceById");
+ if (!this.step_025_loadFileToReplaceById(oldFileId)){
+ return false;
+ }
+ }
+
+ msgt("step_030_createNewFilesViaIngest");
+ if (!this.step_030_createNewFilesViaIngest()){
+ return false;
+
+ }
+
+ msgt("step_050_checkForConstraintViolations");
+ if (!this.step_050_checkForConstraintViolations()){
+ return false;
+
+ }
+
+ msgt("step_060_addFilesViaIngestService");
+ if (!this.step_060_addFilesViaIngestService()){
+ return false;
+
+ }
+
+ if (this.isFileReplaceOperation()){
+ msgt("step_080_run_update_dataset_command_for_replace");
+ if (!this.step_080_run_update_dataset_command_for_replace()){
+ return false;
+ }
+
+ }else{
+ msgt("step_070_run_update_dataset_command");
+ if (!this.step_070_run_update_dataset_command()){
+ return false;
+ }
+ }
+
+ msgt("step_090_notifyUser");
+ if (!this.step_090_notifyUser()){
+ return false;
+ }
+
+ msgt("step_100_startIngestJobs");
+ if (!this.step_100_startIngestJobs()){
+ return false;
+ }
+
+
+ return true;
+ }
+
+ /**
+ * Get for currentOperation
+ * @return String
+ */
+ public String getCurrentOperation(){
+ return this.currentOperation;
+ }
+
+ public boolean isFileReplaceOperation(){
+
+ return this.currentOperation == FILE_REPLACE_OPERATION;
+ }
+
+ public boolean isFileAddOperation(){
+
+ return this.currentOperation == FILE_ADD_OPERATION;
+ }
+
/**
* Initialize error handling vars
*/
@@ -194,14 +377,15 @@ public String getErrorMessagesAsString(String joinString){
/**
*
*/
- public boolean step_01_loadDataset(Dataset selectedDataset){
+ private boolean step_001_loadDataset(Dataset selectedDataset){
if (this.hasError()){
return false;
}
if (selectedDataset == null){
- throw new NullPointerException("dataset cannot be null");
+ this.addErrorSevere("The dataset cannot be null");
+ return false;
}
dataset = selectedDataset;
@@ -213,23 +397,24 @@ public boolean step_01_loadDataset(Dataset selectedDataset){
/**
*
*/
- public boolean step_01_loadDatasetById(Long datasetId){
+ private boolean step_001_loadDatasetById(Long datasetId){
if (this.hasError()){
return false;
}
if (datasetId == null){
- throw new NullPointerException("datasetId cannot be null");
+ this.addErrorSevere("The datasetId cannot be null");
+ return false;
}
- dataset = datasetService.find(datasetId);
- if (dataset == null){
+ Dataset yeDataset = datasetService.find(datasetId);
+ if (yeDataset == null){
this.addError("There was no dataset found for id: " + datasetId);
return false;
}
- return true;
+ return step_001_loadDataset(yeDataset);
}
@@ -242,12 +427,17 @@ public boolean step_01_loadDatasetById(Long datasetId){
*
* @return
*/
- public boolean step_10_VerifyUserAndPermissions(){
+ private boolean step_010_VerifyUserAndPermissions(){
if (this.hasError()){
return false;
}
+ msg("dataset:" + dataset.toString());
+ msg("Permission.EditDataset:" + Permission.EditDataset.toString());
+ msg("dvRequest:" + dvRequest.toString());
+ msg("permissionService:" + permissionService.toString());
+
if (!permissionService.request(dvRequest).on(dataset).has(Permission.EditDataset)){
String errMsg = "You do not have permission to this dataset.";
addError(errMsg);
@@ -258,7 +448,7 @@ public boolean step_10_VerifyUserAndPermissions(){
}
- public boolean step_20_loadNewFile(String fileName, String fileContentType, InputStream fileInputStream){
+ private boolean step_020_loadNewFile(String fileName, String fileContentType, InputStream fileInputStream){
if (this.hasError()){
return false;
@@ -305,23 +495,24 @@ public boolean step_20_loadNewFile(String fileName, String fileContentType, Inpu
* @param oldFile
* @return
*/
- public boolean step_25_loadFileToReplace(DataFile oldFile){
+ private boolean step_025_loadFileToReplace(DataFile existingFile){
if (this.hasError()){
return false;
}
- if (oldFile == null){
- throw new NullPointerException("fileToReplace cannot be null");
+ if (existingFile == null){
+ this.addErrorSevere("The existingFile to replace cannot be null");
+ return false;
}
- if (oldFile.getOwner() != this.dataset){
+ if (existingFile.getOwner() != this.dataset){
String errMsg = "This file does not belong to the datset";
addError(errMsg);
return false;
}
- fileToReplace = oldFile;
+ fileToReplace = existingFile;
return true;
}
@@ -333,27 +524,31 @@ public boolean step_25_loadFileToReplace(DataFile oldFile){
* @param oldFile
* @return
*/
- public boolean step_25_loadFileToReplaceById(Long dataFileId){
+ private boolean step_025_loadFileToReplaceById(Long dataFileId){
if (this.hasError()){
return false;
}
+ // This shouldn't happen, the public replace method should through
+ // a NullPointerException
+ //
if (dataFileId == null){
- throw new NullPointerException("dataFileId cannot be null");
+ this.addError("The dataFileId cannot be null");
+ return false;
}
- fileToReplace = fileService.find(dataFileId);
- if (fileToReplace == null){
- this.addError("There was no file found for id: " + dataFileId);
+ DataFile existingFile = fileService.find(dataFileId);
+ if (existingFile == null){
+ this.addError("Replacement file not found. There was no file found for id: " + dataFileId);
return false;
}
- return true;
+ return step_025_loadFileToReplace(existingFile);
}
- public boolean step_30_createNewFilesViaIngest(){
+ private boolean step_030_createNewFilesViaIngest(){
if (this.hasError()){
return false;
@@ -385,8 +580,11 @@ public boolean step_30_createNewFilesViaIngest(){
return false;
}
- return this.run_auto_step_35_checkForDuplicates();
+ if (!this.run_auto_step_040_checkForDuplicates()){
+ return false;
+ }
+ return this.run_auto_step_045_checkForFileReplaceDuplicate();
}
/**
@@ -394,8 +592,9 @@ public boolean step_30_createNewFilesViaIngest(){
*
* @return
*/
- public boolean run_auto_step_35_checkForDuplicates(){
+ private boolean run_auto_step_040_checkForDuplicates(){
+ msgt("run_auto_step_040_checkForDuplicates");
if (this.hasError()){
return false;
}
@@ -438,6 +637,7 @@ public boolean run_auto_step_35_checkForDuplicates(){
String dupeName = df.getFileMetadata().getLabel();
removeLinkedFileFromDataset(dataset, df);
+ //abandonOperationRemoveAllNewFilesFromDataset();
this.addErrorSevere("This file has a duplicate already in the dataset: " + dupeName);
}else{
filesToAdd.add(df);
@@ -450,10 +650,74 @@ public boolean run_auto_step_35_checkForDuplicates(){
}
return true;
- } // end run_auto_step_35_checkForDuplicates
+ } // end run_auto_step_040_checkForDuplicates
- public boolean step_40_checkForConstraintViolations(){
+ /**
+ * This is always checked.
+ *
+ * For ADD: If there is not replacement file, then the check is considered a success
+ * For REPLACE: The checksum is examined against the "filesToAdd" list
+ *
+ */
+ private boolean run_auto_step_045_checkForFileReplaceDuplicate(){
+
+ if (this.hasError()){
+ return false;
+ }
+
+ // Not a FILE REPLACE operation -- skip this step!!
+ //
+ if (!isFileReplaceOperation()){
+ return true;
+ }
+
+
+ if (filesToAdd.isEmpty()){
+ // This error shouldn't happen if steps called in sequence....
+ this.addErrorSevere("There are no files to add. (This error shouldn't happen if steps called in sequence....checkForFileReplaceDuplicate)");
+ return false;
+ }
+
+
+ if (this.fileToReplace == null){
+ // This error shouldn't happen if steps called correctly
+ this.addErrorSevere("The fileToReplace cannot be null. (This error shouldn't happen if steps called in sequence....checkForFileReplaceDuplicate)");
+ return false;
+ }
+
+ for (DataFile df : filesToAdd){
+
+ if (df.getCheckSum() == fileToReplace.getCheckSum()){
+ this.addError("The new file,\"" + df.getFileMetadata().getLabel()
+ + "\" has the same content as the replacment file, \""
+ + fileToReplace.getFileMetadata().getLabel() + "\" .");
+
+ removeLinkedFileFromDataset(dataset, df); // Is this correct, if multiple files added in case of .shp or .zip, shouldn't they all be removed?
+ //this.abandonOperationRemoveAllNewFilesFromDataset(); // Is this correct, if multiple files, shouldn't they all be removed?
+ return false;
+ }
+ }
+
+ return true;
+
+ } // end run_auto_step_045_checkForFileReplaceDuplicate
+
+
+ private boolean abandonOperationRemoveAllNewFilesFromDataset(){
+
+ if (filesToAdd.isEmpty()){
+ return true;
+ }
+
+ for (DataFile df : filesToAdd){
+ this.removeLinkedFileFromDataset(dataset, df); // Is this correct, if multiple files, shouldn't they all be removed?
+ }
+ return true;
+ }
+
+
+ private boolean step_050_checkForConstraintViolations(){
if (this.hasError()){
return false;
@@ -468,8 +732,19 @@ public boolean step_40_checkForConstraintViolations(){
// -----------------------------------------------------------
// Iterate through checking for constraint violations
// Gather all error messages
- // -----------------------------------------------------------
+ // -----------------------------------------------------------
Set constraintViolations = workingVersion.validate();
+
+ // -----------------------------------------------------------
+ // No violations found
+ // -----------------------------------------------------------
+ if (constraintViolations.isEmpty()){
+ return true;
+ }
+
+ // -----------------------------------------------------------
+ // violations found: gather all error messages
+ // -----------------------------------------------------------
List errMsgs = new ArrayList<>();
for (ConstraintViolation violation : constraintViolations){
this.addError(violation.getMessage());
@@ -479,7 +754,7 @@ public boolean step_40_checkForConstraintViolations(){
}
- public boolean step_50_addFilesViaIngestService(){
+ private boolean step_060_addFilesViaIngestService(){
if (this.hasError()){
return false;
@@ -502,7 +777,7 @@ public boolean step_50_addFilesViaIngestService(){
*
* @return
*/
- public boolean step_70_run_update_dataset_command(){
+ private boolean step_070_run_update_dataset_command(){
if (this.hasError()){
return false;
@@ -527,7 +802,47 @@ public boolean step_70_run_update_dataset_command(){
}
- public boolean step_80_notifyUser(){
+ private boolean step_080_run_update_dataset_command_for_replace(){
+
+ if (this.hasError()){
+ return false;
+ }
+
+ // -----------------------------------------------------------
+ // Make list of files to delete -- e.g. the single "fileToReplace"
+ // -----------------------------------------------------------
+ List filesToBeDeleted = new ArrayList();
+ filesToBeDeleted.add(fileToReplace.getFileMetadata());
+
+ // -----------------------------------------------------------
+ // Set the "root file ids" and "previous file ids"
+ // -----------------------------------------------------------
+ for (DataFile df : filesToAdd){
+ df.setPreviousDataFileID(fileToReplace.getId());
+ df.setRootDataFileId(fileToReplace.getRootDataFileId());
+ }
+
+
+ Command update_cmd;
+ update_cmd = new UpdateDatasetCommand(dataset, dvRequest, filesToBeDeleted);
+ ((UpdateDatasetCommand) update_cmd).setValidateLenient(true);
+
+ try {
+ commandEngine.submit(update_cmd);
+ } catch (CommandException ex) {
+ this.addErrorSevere("Failed to update the dataset. Please contact the administrator");
+ logger.severe(ex.getMessage());
+ return false;
+ }catch (EJBException ex) {
+ this.addErrorSevere("Failed to update the dataset. Please contact the administrator");
+ logger.severe(ex.getMessage());
+ return false;
+ }
+ return true;
+ }
+
+
+ private boolean step_090_notifyUser(){
if (this.hasError()){
return false;
}
@@ -539,7 +854,7 @@ public boolean step_80_notifyUser(){
}
- public boolean step_100_startIngestJobs(){
+ private boolean step_100_startIngestJobs(){
if (this.hasError()){
return false;
}
@@ -568,7 +883,72 @@ private void msgt(String m){
}
- /*
+
+ /**
+ * When a duplicate file is found after the initial ingest,
+ * remove the file from the dataset because
+ * createDataFiles has already linked it to the dataset:
+ * - first, through the filemetadata list
+ * - then through tht datafiles list
+ *
+ *
+ * @param dataset
+ * @param dataFileToRemove
+ */
+ private boolean removeLinkedFileFromDataset(Dataset dataset, DataFile dataFileToRemove){
+
+ if (dataset==null){
+ this.addErrorSevere("dataset cannot be null in removeLinkedFileFromDataset");
+ return false;
+ }
+
+ if (dataFileToRemove==null){
+ this.addErrorSevere("dataFileToRemove cannot be null in removeLinkedFileFromDataset");
+ return false;
+ }
+
+ // -----------------------------------------------------------
+ // (1) Remove file from filemetadata list
+ // -----------------------------------------------------------
+ Iterator fmIt = dataset.getEditVersion().getFileMetadatas().iterator();
+ msgt("Clear FileMetadatas");
+ while (fmIt.hasNext()) {
+ FileMetadata fm = fmIt.next();
+ msg("Check: " + fm);
+ if (fm.getId() == null && dataFileToRemove.getStorageIdentifier().equals(fm.getDataFile().getStorageIdentifier())) {
+ msg("Got It! ");
+ fmIt.remove();
+ break;
+ }
+ }
+
+
+ // -----------------------------------------------------------
+ // (2) Remove file from datafiles list
+ // -----------------------------------------------------------
+ Iterator dfIt = dataset.getFiles().iterator();
+ msgt("Clear Files");
+ while (dfIt.hasNext()) {
+ DataFile dfn = dfIt.next();
+ msg("Check: " + dfn);
+ if (dfn.getId() == null && dataFileToRemove.getStorageIdentifier().equals(dfn.getStorageIdentifier())) {
+ msg("Got It! try to remove from iterator");
+
+ dfIt.remove();
+ msg("it work");
+
+ break;
+ }else{
+ msg("...ok");
+ }
+ }
+ return true;
+ }
+
+
+
+}
+ /*
DatasetPage sequence:
(A) editFilesFragment.xhtml -> EditDataFilesPage.handleFileUpload
@@ -638,68 +1018,4 @@ public String getFileName()
// - Add this new file.
// ....
-
- /**
- * When a duplicate file is found after the initial ingest,
- * remove the file from the dataset because
- * createDataFiles has already linked it to the dataset:
- * - first, through the filemetadata list
- * - then through tht datafiles list
- *
- *
- * @param dataset
- * @param dataFileToRemove
- */
- private boolean removeLinkedFileFromDataset(Dataset dataset, DataFile dataFileToRemove){
-
- if (dataset==null){
- this.addErrorSevere("dataset cannot be null in removeLinkedFileFromDataset");
- return false;
- }
-
- if (dataFileToRemove==null){
- this.addErrorSevere("dataFileToRemove cannot be null in removeLinkedFileFromDataset");
- return false;
- }
-
- // -----------------------------------------------------------
- // (1) Remove file from filemetadata list
- // -----------------------------------------------------------
- Iterator fmIt = dataset.getEditVersion().getFileMetadatas().iterator();
- msgt("Clear FileMetadatas");
- while (fmIt.hasNext()) {
- FileMetadata fm = fmIt.next();
- msg("Check: " + fm);
- if (fm.getId() == null && dataFileToRemove.getStorageIdentifier().equals(fm.getDataFile().getStorageIdentifier())) {
- msg("Got It! ");
- fmIt.remove();
- break;
- }
- }
-
-
- // -----------------------------------------------------------
- // (2) Remove file from datafiles list
- // -----------------------------------------------------------
- Iterator dfIt = dataset.getFiles().iterator();
- msgt("Clear Files");
- while (dfIt.hasNext()) {
- DataFile dfn = dfIt.next();
- msg("Check: " + dfn);
- if (dfn.getId() == null && dataFileToRemove.getStorageIdentifier().equals(dfn.getStorageIdentifier())) {
- msg("Got It! try to remove from iterator");
-
- dfIt.remove();
- msg("it work");
-
- break;
- }else{
- msg("...ok");
- }
- }
- return true;
- }
-
-
-
-}
+
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileChecker.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileChecker.java
index d2e38d50582..da318f67128 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileChecker.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileChecker.java
@@ -59,7 +59,7 @@ public boolean isFileInSavedDatasetVersion(DatasetVersion datasetVersion, FileMe
if (fileMetadata == null){
throw new NullPointerException("fileMetadata cannot be null");
}
- return this.isFileInSavedDatasetVersion(datasetVersion, fileMetadata.getDataFile().getmd5());
+ return this.isFileInSavedDatasetVersion(datasetVersion, fileMetadata.getDataFile().getCheckSum());
}
/**
@@ -84,7 +84,7 @@ public boolean isFileInSavedDatasetVersion(DatasetVersion datasetVersion, String
/**
* From dataset version:
- * - Get the md5s of all the files
+ * - Get the checksum of all the files
* - Load them into a hash
*
* Loads checksums from unsaved datasetversion--checks more
@@ -101,7 +101,7 @@ public Map getDatasetHashesFromDatabase(DatasetVersion datasetV
List fileMetadatas = new ArrayList<>(datasetVersion.getFileMetadatas());
for (FileMetadata fm : fileMetadatas){
- String checkSum = fm.getDataFile().getmd5();
+ String checkSum = fm.getDataFile().getCheckSum();
if (checksumHashCounts.get(checkSum) != null){
checksumHashCounts.put(checkSum, checksumHashCounts.get(checkSum).intValue() + 1);
}else{
@@ -127,12 +127,12 @@ public static boolean isDuplicateOriginalWay(DatasetVersion workingVersion, File
throw new NullPointerException("datasetVersion cannot be null");
}
- String thisMd5 = fileMetadata.getDataFile().getmd5();
- if (thisMd5 == null) {
+ String selectedCheckSum = fileMetadata.getDataFile().getCheckSum();
+ if (selectedCheckSum == null) {
return false;
}
- Map MD5Map = new HashMap();
+ Map checkSumMap = new HashMap();
// TODO:
// think of a way to do this that doesn't involve populating this
@@ -149,16 +149,16 @@ public static boolean isDuplicateOriginalWay(DatasetVersion workingVersion, File
while (fmIt.hasNext()) {
FileMetadata fm = fmIt.next();
- String md5 = fm.getDataFile().getmd5();
- if (md5 != null) {
- if (MD5Map.get(md5) != null) {
- MD5Map.put(md5, MD5Map.get(md5).intValue() + 1);
+ String currentCheckSum = fm.getDataFile().getCheckSum();
+ if (currentCheckSum != null) {
+ if (checkSumMap.get(currentCheckSum) != null) {
+ checkSumMap.put(currentCheckSum, checkSumMap.get(currentCheckSum).intValue() + 1);
} else {
- MD5Map.put(md5, 1);
+ checkSumMap.put(currentCheckSum, 1);
}
}
}
- return MD5Map.get(thisMd5) != null && MD5Map.get(thisMd5).intValue() > 1;
+ return checkSumMap.get(selectedCheckSum) != null && checkSumMap.get(selectedCheckSum).intValue() > 1;
}
From 30d6256155515ac25d938543716972801f26affa Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Tue, 27 Sep 2016 13:42:29 -0400
Subject: [PATCH 17/86] working on replace example #2290
---
.../harvard/iq/dataverse/api/FileUpload.java | 218 +++---------------
1 file changed, 30 insertions(+), 188 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
index 51ded12df8d..2c0cd52ffa3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
@@ -295,7 +295,9 @@ public Response hi(){
//-------------------
- if (true){
+ // ADD
+ //-------------------
+ if (false){
DataverseRequest dvRequest2 = createDataverseRequest(authUser);
@@ -316,201 +318,41 @@ public Response hi(){
return okResponse("hey hey, it may have worked");
}
- }
+ }
+
+ //-------------------
+ // REPLACE
//-------------------
-
-
-
- // -------------------------------------
- msgt("(1c) Get the edit version of the Dataset");
- // -------------------------------------
- DatasetVersion workingVersion = selectedDataset.getEditVersion();
- msg("new workingVersion: " + workingVersion + "\n url:" + selectedDataset.getPersistentURL());
-
- // -------------------------------------
- msgt("(1d) List the dataset version files");
- // -------------------------------------
-
- // List the current files
- //
- int cnt = 0;
- for (FileMetadata fm : workingVersion.getFileMetadatas()){
- cnt++;
- msg("File " + cnt + ": " + fm.getLabel());
- }
- dashes();
-
-
- // -------------------------------------
- msgt("(2) ingestService.createDataFiles");
- // -------------------------------------
- List dFileList = null;
- msg("state of the workingVersion: " + workingVersion.getVersionState());
- try {
- msg("The starting bell rings....");
- dFileList = ingestService.createDataFiles(workingVersion,
- testFileInputStream,
- "hullo.txt",
- "text/plain");
- msg("Almost there....");
- } catch (IOException ex) {
- msg("Not happy...:" + ex.toString());
- logger.severe(ex.toString());
- return okResponse("IOException when trying to ingest: " + testFileInputStream.toString());
- }
-
-
- // -------------------------------------
- msgt("(2A) we should have an additional file");
- // -------------------------------------
- // List the current files
- //
- cnt = 0;
- for (FileMetadata fm : workingVersion.getFileMetadatas()){
- cnt++;
- msg("File " + cnt + ": " + fm.getLabel());
- }
- dashes();
-
-
-
+ if (true){
- // -------------------------------------
- msgt("3 Duplicate check");
- // -------------------------------------
- List newFiles = new ArrayList();
- msg("dFileList: " + dFileList.toString());
- String warningMessage = null;
- for (DataFile df : dFileList){
+ DataverseRequest dvRequest2 = createDataverseRequest(authUser);
+ AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2,
+ this.ingestService,
+ this.datasetService,
+ this.fileService,
+ this.permissionSvc,
+ this.commandEngine);
- // -----------------------------------------------------------
- // Check for ingest warnings
- // -----------------------------------------------------------
- if (df.isIngestProblem()) {
- if (df.getIngestReportMessage() != null) {
- if (warningMessage == null) {
- warningMessage = df.getIngestReportMessage();
- } else {
- warningMessage = warningMessage.concat("; " + df.getIngestReportMessage());
- }
- }
- df.setIngestDone();
- }
- if (warningMessage != null){
- return okResponse(warningMessage);
- }
-
+ Long oldFileId = new Long(141);
+ addFileHelper.runReplaceFile(selectedDataset,
+ "blackbox.txt",
+ "text/plain",
+ testFileInputStream,
+ oldFileId
+ );
- msg("Checking file: " + df.getFileMetadata().getLabel());
- //if (dfc.isFileInSavedDatasetVersion(workingVersion, df.getmd5())){
- // return okResponse("This file has a dupe md5! " + df.getFileMetadata().getLabel());
- if (DuplicateFileChecker.isDuplicateOriginalWay(workingVersion, df.getFileMetadata())){
- msg("has a dupe:");
- // Shut things down!
- try {
- testFileInputStream.close();
- } catch (IOException ex) {
- Logger.getLogger(FileUpload.class.getName()).log(Level.SEVERE, null, ex);
- }
-
- String dupeName = df.getFileMetadata().getLabel();
- removeLinkedFileFromDataset(selectedDataset, df);
-
- return okResponse("This file has a dupe md5! " + dupeName + " checksum: " + df.getmd5());
+
+ if (addFileHelper.hasError()){
+ return okResponse(addFileHelper.getErrorMessagesAsString("\n"));
}else{
- //df.save();
- newFiles.add(df);
+ return okResponse("hey hey, it may have worked");
}
- }
-
-
- // -------------------------------------
- msgt("(3a) List the new files");
- // -------------------------------------
- // List the current files
- //
- cnt = 0;
- for (DataFile df : newFiles){
- cnt++;
- msg("File " + cnt + ": " + df.getFileMetadata().getLabel());
- }
- dashes();
-
-
- // -------------------------------------
- msgt("4 Check constraints");
- // -------------------------------------
- Set constraintViolations = workingVersion.validate();
- List errMsgs = new ArrayList<>();
- for (ConstraintViolation violation : constraintViolations){
- msg("Violation found! :" + violation.getMessage());
- errMsgs.add(violation.getMessage());
- }
- if (errMsgs.size() > 0){
- return okResponse("Constraint violations found! " + String.join("
\n", errMsgs));
- }
-
- // -------------------------------------
- msgt("5 Add the files!");
- // -------------------------------------
- ingestService.addFiles(workingVersion, newFiles);
-
-
- // -------------------------------------
- msgt("6 Make the command!");
- // -------------------------------------
- /*
-
- execCommand(new SetDatasetCitationDateCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(id), dsfType));
-
- */
-
- DataverseRequest dvRequest = createDataverseRequest(authUser);
- msg("dvRequest: " + dvRequest);
-
- if (dvRequest == null){
- return okResponse("Failed, dvRequest is null");
- }
- //CreateDatasetCommand cmd = new CreateDatasetCommand(workingVersion.getDataset(),
- // dvRequest);
- Command update_cmd;
- update_cmd = new UpdateDatasetCommand(selectedDataset, dvRequest);
- ((UpdateDatasetCommand) update_cmd).setValidateLenient(true);
-
- // -------------------------------------
- msgt("7 Run the command!");
- // -------------------------------------
- try {
- commandEngine.submit(update_cmd);
- } catch (CommandException ex) {
- //ex.getMessage()
- msgt("Bombed: " + ex.getMessage());
- return okResponse("bombed....");
- //Logger.getLogger(FileUpload.class.getName()).log(Level.SEVERE, null, ex);
- }catch (EJBException ex) {
- msgt("Bombed2: " + ex.getMessage());
- return okResponse("bombed 2....");
- }
-
- // -------------------------------------
- msgt("8 userNotificationService");
- // -------------------------------------
-
- userNotificationService.sendNotification((AuthenticatedUser) authUser, selectedDataset.getCreateDate(), UserNotification.Type.CREATEDS, selectedDataset.getLatestVersion().getId());
-
- // -------------------------------------
- msgt("9 start Ingest jobs");
- // -------------------------------------
- newFiles.clear();
-
-
- ingestService.startIngestJobs(selectedDataset, (AuthenticatedUser) authUser);
+
+ }
+ return okResponse("ain't done nuthin'");
- return okResponse("hi. maybe it worked!");
-
- }
-
+ } // end call to "hi"
}
From 4cf21c1942fc03c4c91207d2f20c0389e1e1cea4 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Tue, 27 Sep 2016 14:49:42 -0400
Subject: [PATCH 18/86] #2291 troubleshooting where file save is called from
ingestServiceBean -- e.g. why is it not going through
DataFileServiceBean.save
---
.../iq/dataverse/DataFileServiceBean.java | 4 +-
.../harvard/iq/dataverse/api/FileUpload.java | 11 ++-
.../datasetutility/AddReplaceFileHelper.java | 82 ++++++++++++++++++-
3 files changed, 89 insertions(+), 8 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
index 3f681c04da9..56192029ceb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
@@ -731,11 +731,11 @@ public DataFile save(DataFile dataFile) {
DataFile savedDataFile = em.merge(dataFile);
- msgt("post save");
+/* msgt("post save");
msg("id : " + savedDataFile.getId());
msg("getRootDataFileId : " + savedDataFile.getRootDataFileId());
msg("getPreviousDataFileID : " + savedDataFile.getPreviousDataFileID());
-
+*/
// Set the initial value of the rootDataFileId
savedDataFile = setAndCheckFileReplaceAttributes(savedDataFile);
msgt("post post save");
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
index 2c0cd52ffa3..13de080b0d6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
@@ -297,7 +297,7 @@ public Response hi(){
//-------------------
// ADD
//-------------------
- if (false){
+ if (true){
DataverseRequest dvRequest2 = createDataverseRequest(authUser);
@@ -309,7 +309,10 @@ public Response hi(){
this.commandEngine);
- addFileHelper.runAddFile(selectedDataset, "blackbox.txt", "text/plain", testFileInputStream);
+ addFileHelper.runAddFile(selectedDataset,
+ "blackbox.txt",
+ "text/plain",
+ testFileInputStream);
if (addFileHelper.hasError()){
@@ -324,7 +327,7 @@ public Response hi(){
// REPLACE
//-------------------
- if (true){
+ if (false){
DataverseRequest dvRequest2 = createDataverseRequest(authUser);
@@ -335,7 +338,7 @@ public Response hi(){
this.permissionSvc,
this.commandEngine);
- Long oldFileId = new Long(141);
+ Long oldFileId = (long) 141;
addFileHelper.runReplaceFile(selectedDataset,
"blackbox.txt",
"text/plain",
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 3b8ce10cd3a..17fc95ffbf0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -802,11 +802,79 @@ private boolean step_070_run_update_dataset_command(){
}
+ private boolean step_auto_085_delete_file_to_replace_from_working_version(){
+
+ msgt("step_auto_085_delete_file_to_replace_from_working_version 1");
+
+ if (!isFileReplaceOperation()){
+ // Shouldn't happen!
+ this.addErrorSevere("This should ONLY be called for file replace operations!! (step_auto_085_delete_file_to_replace_from_working_version");
+ return false;
+ }
+ msg("step_auto_085_delete_file_to_replace_from_working_version 1");
+
+ if (this.hasError()){
+ return false;
+ }
+
+ msg("step_auto_085_delete_file_to_replace_from_working_version 2");
+
+ // 2. delete the filemetadata from the version:
+ //fmit = dataset.getEditVersion().getFileMetadatas().iterator();
+ Iterator fmit = workingVersion.getFileMetadatas().iterator();
+ msg("step_auto_085_delete_file_to_replace_from_working_version 3");
+ msg("-------------------------");
+ msg("File to replace getId: " + fileToReplace.getId());
+ msg("File to replace getCheckSum: " + fileToReplace.getCheckSum());
+ msg("File to replace getFileMetadata: " + fileToReplace.getFileMetadata());
+ msg("File to replace getLabel: " + fileToReplace.getFileMetadata().getLabel());
+ msg("-------------------------");
+
+
+ while (fmit.hasNext()) {
+ msg("-------------------------");
+ msg("step_auto_085_delete_file_to_replace_from_working_version 4");
+ FileMetadata fmd = (FileMetadata) fmit.next();
+ msg(" ....getLabel: " + fmd.getLabel());
+ msg(" ....getId: " + fmd.getId());
+ msg(" ....getDataFile: " + fmd.getDataFile().toString());
+ msg(" ....getDataFile id: " + fmd.getDataFile().getId());
+ if (fmd.getId() != null){
+ msg("step_auto_085_delete_file_to_replace_from_working_version 5");
+ msg("fileToReplace.getStorageIdentifier: " + fileToReplace.getStorageIdentifier());
+ msg("fmd.getDataFile().getStorageIdentifier(): " + fmd.getDataFile().getStorageIdentifier());
+ if (fileToReplace.getStorageIdentifier().equals(fmd.getDataFile().getStorageIdentifier())) {
+ msg("step_auto_085_delete_file_to_replace_from_working_version 6");
+ fmit.remove();
+ return true;
+ }
+ }
+ }
+ return true;
+ //this.addErrorSevere("Could not find file to replace in the working DatasetVersion");
+ //return false;
+ }
+
private boolean step_080_run_update_dataset_command_for_replace(){
-
+
+ if (!isFileReplaceOperation()){
+ // Shouldn't happen!
+ this.addErrorSevere("This should ONLY be called for file replace operations!! (step_080_run_update_dataset_command_for_replace");
+ return false;
+ }
+
if (this.hasError()){
return false;
}
+ msg("step_080_run_update_dataset_command_for_replace 1");
+ // -----------------------------------------------------------
+ // Remove the "fileToReplace" from the current working version
+ // -----------------------------------------------------------
+ if (!step_auto_085_delete_file_to_replace_from_working_version()){
+ return false;
+ }
+
+ msg("step_080_run_update_dataset_command_for_replace 2");
// -----------------------------------------------------------
// Make list of files to delete -- e.g. the single "fileToReplace"
@@ -814,6 +882,9 @@ private boolean step_080_run_update_dataset_command_for_replace(){
List filesToBeDeleted = new ArrayList();
filesToBeDeleted.add(fileToReplace.getFileMetadata());
+ msg("step_080_run_update_dataset_command_for_replace 3");
+
+
// -----------------------------------------------------------
// Set the "root file ids" and "previous file ids"
// -----------------------------------------------------------
@@ -821,12 +892,19 @@ private boolean step_080_run_update_dataset_command_for_replace(){
df.setPreviousDataFileID(fileToReplace.getId());
df.setRootDataFileId(fileToReplace.getRootDataFileId());
}
-
+
+ msg("step_080_run_update_dataset_command_for_replace 4");
+
Command update_cmd;
update_cmd = new UpdateDatasetCommand(dataset, dvRequest, filesToBeDeleted);
+
+ msg("step_080_run_update_dataset_command_for_replace 5");
+
((UpdateDatasetCommand) update_cmd).setValidateLenient(true);
+ msg("step_080_run_update_dataset_command_for_replace 6");
+
try {
commandEngine.submit(update_cmd);
} catch (CommandException ex) {
From 9da79c7ad7ef9abcc39e6c4b8599a6165cdb55e5 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Tue, 27 Sep 2016 15:34:20 -0400
Subject: [PATCH 19/86] rootId now working via UI #2290
---
.../iq/dataverse/DataFileServiceBean.java | 66 +++++++++++--------
1 file changed, 37 insertions(+), 29 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
index 56192029ceb..54167085f96 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
@@ -723,31 +723,17 @@ public List findAll() {
public DataFile save(DataFile dataFile) {
- // datafile
- msgt("pre save");
- msg("id : " + dataFile.getId());
- msg("getRootDataFileId : " + dataFile.getRootDataFileId());
- msg("getPreviousDataFileID : " + dataFile.getPreviousDataFileID());
-
+ // save datafile
DataFile savedDataFile = em.merge(dataFile);
-
-/* msgt("post save");
- msg("id : " + savedDataFile.getId());
- msg("getRootDataFileId : " + savedDataFile.getRootDataFileId());
- msg("getPreviousDataFileID : " + savedDataFile.getPreviousDataFileID());
-*/
+
// Set the initial value of the rootDataFileId
+ // (does nothing if it's already set)
savedDataFile = setAndCheckFileReplaceAttributes(savedDataFile);
- msgt("post post save");
- msg("id : " + savedDataFile.getId());
- msg("getRootDataFileId : " + savedDataFile.getRootDataFileId());
- msg("getPreviousDataFileID : " + savedDataFile.getPreviousDataFileID());
-
-
+
return savedDataFile;
}
- private void msg(String m){
+ private void msg(String m){
System.out.println(m);
}
private void dashes(){
@@ -762,22 +748,32 @@ private void msgt(String m){
of a file
*/
- private DataFile setAndCheckFileReplaceAttributes(DataFile savedDataFile){
-
+ public DataFile setAndCheckFileReplaceAttributes(DataFile savedDataFile){
+
+ msgt("setAndCheckFileReplaceAttributes: " + savedDataFile);
+ msgt("setAndCheckFileReplaceAttributes: getCheckSum()" + savedDataFile.getCheckSum());
+
// Is this the initial version of a file?
- if (Objects.equals(savedDataFile.getRootDataFileId(), DataFile.ROOT_DATAFILE_ID_DEFAULT)){
-
+ msg("savedDataFile.getRootDataFileId(): " + savedDataFile.getRootDataFileId());
+
+ if ((savedDataFile.getRootDataFileId() == null)||
+ (savedDataFile.getRootDataFileId().equals(DataFile.ROOT_DATAFILE_ID_DEFAULT))){
+ msg("yes, initial version");
+
// YES! Set the RootDataFileId to the Id
savedDataFile.setRootDataFileId(savedDataFile.getId());
// SAVE IT AGAIN!!!
- return em.merge(savedDataFile);
+ msg("yes, save again");
+
+ return em.merge(savedDataFile);
+ }else{
+ // Looking Good Billy Ray! Feeling Good Louis!
+ msg("nope, looks ok");
+
+ return savedDataFile;
}
-
- // Looking Good Billy Ray! Feeling Good Louis!
-
- return savedDataFile;
}
@@ -800,10 +796,20 @@ public void deleteFromVersion( DatasetVersion d, DataFile f ) {
*/
public FileMetadata mergeFileMetadata(FileMetadata fileMetadata) {
- return em.merge(fileMetadata);
+
+ FileMetadata newFileMetadata = em.merge(fileMetadata);
+ em.flush();
+
+
+ // Set the initial value of the rootDataFileId
+ // (does nothing if it's already set)
+ DataFile updatedDataFile = setAndCheckFileReplaceAttributes(newFileMetadata.getDataFile());
+
+ return newFileMetadata;
}
public void removeFileMetadata(FileMetadata fileMetadata) {
+ msgt("removeFileMetadata: fileMetadata");
FileMetadata mergedFM = em.merge(fileMetadata);
em.remove(mergedFM);
}
@@ -941,6 +947,8 @@ public boolean isThumbnailAvailable (DataFile file) {
if (ImageThumbConverter.isThumbnailAvailable(file)) {
file = this.find(file.getId());
file.setPreviewImageAvailable(true);
+ msgt("OVER HERE_----------");
+ msg("bleh.....");
file = this.save(file); //em.merge(file);
// (should this be done here? - TODO:)
return true;
From 63172a0ddf6e15855cbcb55eb6ef5b7ed7905b40 Mon Sep 17 00:00:00 2001
From: Philip Durbin
Date: Tue, 27 Sep 2016 16:13:54 -0400
Subject: [PATCH 20/86] add API test to exercise rootDataFileId #2290
---
.../java/edu/harvard/iq/dataverse/DataFile.java | 6 +++++-
.../iq/dataverse/util/json/JsonPrinter.java | 2 ++
.../java/edu/harvard/iq/dataverse/api/SwordIT.java | 14 ++++++++++++++
3 files changed, 21 insertions(+), 1 deletion(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
index 1b3fa14c6f0..9d57126d60f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
@@ -75,7 +75,11 @@ public class DataFile extends DvObject implements Comparable {
// Default is -1 until the intial id is generated
@Column(nullable=false)
private Long rootDataFileId;
-
+
+ /**
+ * @todo We should have consistency between "Id" vs "ID" for rootDataFileId
+ * vs. previousDataFileID.
+ */
// null for initial version; subsequent versions will point to the previous file
//
@Column(nullable=true)
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
index c3fbb45c4ad..862c3d15beb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
@@ -495,6 +495,8 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata) {
.add("originalFormatLabel", df.getOriginalFormatLabel())
.add("UNF", df.getUnf())
.add("md5", df.getmd5())
+ .add("rootDataFileId", df.getRootDataFileId())
+ .add("previousDataFileId", df.getPreviousDataFileID())
.add("description", df.getDescription());
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java
index 3fb9744a2eb..df2dbe914ed 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java
@@ -16,6 +16,7 @@
import static javax.ws.rs.core.Response.Status.NO_CONTENT;
import static javax.ws.rs.core.Response.Status.OK;
import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.Matchers.endsWith;
import org.junit.AfterClass;
import static org.junit.Assert.assertEquals;
@@ -203,6 +204,19 @@ public void testCreateDataverseCreateDatasetUploadFileDownloadFileEditTitle() {
uploadFile1.prettyPrint();
assertEquals(CREATED.getStatusCode(), uploadFile1.getStatusCode());
+ Response getDatasetJson = UtilIT.nativeGetUsingPersistentId(persistentId, apiToken);
+ getDatasetJson.prettyPrint();
+ getDatasetJson.then().assertThat()
+ .body("data.latestVersion.files[0].dataFile.filename", equalTo("trees.png"))
+ /**
+ * @todo The plan is to switch this to the nullValue version
+ * rather than expecting -1.
+ */
+ .body("data.latestVersion.files[0].dataFile.rootDataFileId", equalTo(-1))
+ // .body("data.latestVersion.files[0].dataFile.rootDataFileId", nullValue())
+ .body("data.latestVersion.files[0].dataFile.previousDataFileId", nullValue())
+ .statusCode(OK.getStatusCode());
+
Response swordStatementUnAuth = UtilIT.getSwordStatement(persistentId, apiTokenNoPrivs);
swordStatementUnAuth.prettyPrint();
swordStatementUnAuth.then().assertThat()
From f101a4657c762013c570ce6b3616c6b99ad93293 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 28 Sep 2016 10:39:02 -0400
Subject: [PATCH 21/86] Almost there. Replace working for published version
but not draft. #2290
---
.../upgrades/upgrade_v4.5.1_to_v4.6.sql | 2 +-
.../edu/harvard/iq/dataverse/DataFile.java | 20 +-
.../iq/dataverse/DataFileServiceBean.java | 5 +-
.../harvard/iq/dataverse/api/FileUpload.java | 137 ++++++++-----
.../datasetutility/AddReplaceFileHelper.java | 191 ++++++++++--------
.../iq/dataverse/util/json/JsonPrinter.java | 2 +-
6 files changed, 206 insertions(+), 151 deletions(-)
diff --git a/scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql b/scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql
index f2fcad94522..a8957cff53a 100644
--- a/scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql
+++ b/scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql
@@ -2,4 +2,4 @@
ALTER TABLE datafile ADD COLUMN rootdatafileid bigint default -1;
ALTER TABLE datafile ADD COLUMN previousdatafileid bigint default null;
-- For existing DataFile objects, update rootDataFileId values:
-UPDATE datafile SET rootdatafileid = id;
+UPDATE datafile SET rootdatafileid = -1;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
index 9d57126d60f..5b68aeafc82 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
@@ -78,12 +78,12 @@ public class DataFile extends DvObject implements Comparable {
/**
* @todo We should have consistency between "Id" vs "ID" for rootDataFileId
- * vs. previousDataFileID.
+ * vs. previousDataFileId.
*/
// null for initial version; subsequent versions will point to the previous file
//
@Column(nullable=true)
- private Long previousDataFileID;
+ private Long previousDataFileId;
/* endt: FILE REPLACE ATTRIBUTES */
@@ -147,7 +147,7 @@ public DataFile(String contentType) {
*/
private void initFileReplaceAttributes(){
this.rootDataFileId = ROOT_DATAFILE_ID_DEFAULT;
- this.previousDataFileID = null;
+ this.previousDataFileId = null;
}
// The dvObject field "name" should not be used in
@@ -693,19 +693,19 @@ public Long getRootDataFileId(){
/**
- * Set previousDataFileID
- * @param previousDataFileID
+ * Set previousDataFileId
+ * @param previousDataFileId
*/
- public void setPreviousDataFileID(Long previousDataFileID){
- this.previousDataFileID = previousDataFileID;
+ public void setPreviousDataFileId(Long previousDataFileId){
+ this.previousDataFileId = previousDataFileId;
}
/**
- * Get for previousDataFileID
+ * Get for previousDataFileId
* @return Long
*/
- public Long getPreviousDataFileID(){
- return this.previousDataFileID;
+ public Long getPreviousDataFileId(){
+ return this.previousDataFileId;
}
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
index 54167085f96..5f1c1694daa 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
@@ -728,7 +728,7 @@ public DataFile save(DataFile dataFile) {
// Set the initial value of the rootDataFileId
// (does nothing if it's already set)
- savedDataFile = setAndCheckFileReplaceAttributes(savedDataFile);
+ //savedDataFile = setAndCheckFileReplaceAttributes(savedDataFile);
return savedDataFile;
}
@@ -800,10 +800,9 @@ public FileMetadata mergeFileMetadata(FileMetadata fileMetadata) {
FileMetadata newFileMetadata = em.merge(fileMetadata);
em.flush();
-
// Set the initial value of the rootDataFileId
// (does nothing if it's already set)
- DataFile updatedDataFile = setAndCheckFileReplaceAttributes(newFileMetadata.getDataFile());
+ //DataFile updatedDataFile = setAndCheckFileReplaceAttributes(newFileMetadata.getDataFile());
return newFileMetadata;
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
index 13de080b0d6..2ea9287c370 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
@@ -261,8 +261,8 @@ public Response hiReSave(@PathParam("fileId") Long fileId){
}
@GET
- @Path("hi")
- public Response hi(){
+ @Path("add")
+ public Response hi_add(){
// -------------------------------------
msgt("(1) getSampleFile()");
@@ -297,65 +297,98 @@ public Response hi(){
//-------------------
// ADD
//-------------------
- if (true){
+ msg("ADD!");
+ DataverseRequest dvRequest2 = createDataverseRequest(authUser);
+ AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2,
+ this.ingestService,
+ this.datasetService,
+ this.fileService,
+ this.permissionSvc,
+ this.commandEngine);
+
+
+ addFileHelper.runAddFile(selectedDataset,
+ "blackbox.txt",
+ "text/plain",
+ testFileInputStream);
+
+
+ if (addFileHelper.hasError()){
+ return okResponse(addFileHelper.getErrorMessagesAsString("\n"));
+ }else{
+ return okResponse("hey hey, it may have worked");
+ }
- DataverseRequest dvRequest2 = createDataverseRequest(authUser);
- AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2,
- this.ingestService,
- this.datasetService,
- this.fileService,
- this.permissionSvc,
- this.commandEngine);
-
-
- addFileHelper.runAddFile(selectedDataset,
- "blackbox.txt",
- "text/plain",
- testFileInputStream);
-
-
- if (addFileHelper.hasError()){
- return okResponse(addFileHelper.getErrorMessagesAsString("\n"));
- }else{
- return okResponse("hey hey, it may have worked");
- }
-
- }
+ } // end call to "hi"
+
+
+ @GET
+ @Path("replace/{oldFileId}")
+ public Response hi_replace(@PathParam("oldFileId") Long oldFileId){
+
+ // -------------------------------------
+ msgt("(1) getSampleFile()");
+ // -------------------------------------
+
+ InputStream testFileInputStream = getSampleFile();
+ if (testFileInputStream == null){
+ return okResponse("Couldn't find the file!!");
+ }
+
+ // -------------------------------------
+ msgt("(1a) Get User from API token");
+ // -------------------------------------
+ User authUser;
+ try {
+ authUser = this.findUserOrDie();
+ } catch (WrappedResponse ex) {
+ return okResponse("Couldn't find a user from the API key");
+ }
+ //authSvc.findByID(new Long(1));
+ msg("authUser: " + authUser);
+ msg("getUserIdentifier: " + authUser.getIdentifier());
+
+
+ // -------------------------------------
+ msgt("(1b) Get the selected Dataset");
+ // -------------------------------------
+ int dataset_id = 10;
+ Dataset selectedDataset = datasetService.find(new Long(dataset_id));
+
+
//-------------------
// REPLACE
//-------------------
- if (false){
+ msg("REPLACE!");
-
- DataverseRequest dvRequest2 = createDataverseRequest(authUser);
- AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2,
- this.ingestService,
- this.datasetService,
- this.fileService,
- this.permissionSvc,
- this.commandEngine);
-
- Long oldFileId = (long) 141;
- addFileHelper.runReplaceFile(selectedDataset,
- "blackbox.txt",
- "text/plain",
- testFileInputStream,
- oldFileId
- );
-
-
- if (addFileHelper.hasError()){
- return okResponse(addFileHelper.getErrorMessagesAsString("\n"));
- }else{
- return okResponse("hey hey, it may have worked");
- }
-
- }
- return okResponse("ain't done nuthin'");
+ DataverseRequest dvRequest2 = createDataverseRequest(authUser);
+ AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2,
+ this.ingestService,
+ this.datasetService,
+ this.fileService,
+ this.permissionSvc,
+ this.commandEngine);
+
+ //Long oldFileId = oldFileId;
+ addFileHelper.runReplaceFile(selectedDataset,
+ "replace_" + oldFileId.toString() + ".txt",
+ "text/plain",
+ testFileInputStream,
+ oldFileId
+ );
+
+
+ if (addFileHelper.hasError()){
+ return okResponse(addFileHelper.getErrorMessagesAsString("\n"));
+ }else{
+ return okResponse("hey hey, it may have worked");
+ }
+
} // end call to "hi"
+
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 17fc95ffbf0..a48aa4e7be5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -25,6 +25,7 @@
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
+import java.util.Objects;
import java.util.Set;
import java.util.logging.Logger;
import javax.ejb.EJB;
@@ -74,8 +75,8 @@ public class AddReplaceFileHelper{
// Instance variables derived from other input
private User user;
private DatasetVersion workingVersion;
- List newFileList;
- List filesToAdd;
+ List initialFileList;
+ List finalFileList;
// For error handling
@@ -160,7 +161,8 @@ public AddReplaceFileHelper(DataverseRequest dvRequest,
* @return
*/
public boolean runAddFile(Dataset dataset, String newFileName, String newFileContentType, InputStream newFileInputStream){
-
+ msgt(">> runAddFile");
+
this.currentOperation = FILE_ADD_OPERATION;
return this.runAddReplaceFile(dataset, newFileName, newFileContentType, newFileInputStream, null);
@@ -181,7 +183,7 @@ public boolean runReplaceFile(Dataset dataset, String newFileName, String newFil
if (oldFileId==null){
throw new NullPointerException("For a replace operation, oldFileId cannot be null");
}
-
+ msgt(">> runReplaceFile");
this.currentOperation = FILE_REPLACE_OPERATION;
return this.runAddReplaceFile(dataset, newFileName, newFileContentType, newFileInputStream, oldFileId);
@@ -252,7 +254,7 @@ private boolean runAddReplaceFile(Dataset dataset,
return false;
}
-
+
if (this.isFileReplaceOperation()){
msgt("step_080_run_update_dataset_command_for_replace");
if (!this.step_080_run_update_dataset_command_for_replace()){
@@ -558,7 +560,7 @@ private boolean step_030_createNewFilesViaIngest(){
workingVersion = dataset.getEditVersion();
try {
- newFileList = ingestService.createDataFiles(workingVersion,
+ initialFileList = ingestService.createDataFiles(workingVersion,
this.newFileInputStream,
this.newFileName,
this.newFileContentType);
@@ -575,16 +577,16 @@ private boolean step_030_createNewFilesViaIngest(){
* (1) the dataset was empty
* (2) the new file (or new file unzipped) did not ingest via "createDataFiles"
*/
- if (newFileList.isEmpty()){
+ if (initialFileList.isEmpty()){
this.addErrorSevere("Sorry! An error occurred and the new file was not added.");
return false;
}
- if (!this.run_auto_step_040_checkForDuplicates()){
+ if (!this.step_040_auto_checkForDuplicates()){
return false;
}
- return this.run_auto_step_045_checkForFileReplaceDuplicate();
+ return this.step_045_auto_checkForFileReplaceDuplicate();
}
/**
@@ -592,22 +594,22 @@ private boolean step_030_createNewFilesViaIngest(){
*
* @return
*/
- private boolean run_auto_step_040_checkForDuplicates(){
+ private boolean step_040_auto_checkForDuplicates(){
- msgt("run_auto_step_040_checkForDuplicates");
+ msgt("step_040_auto_checkForDuplicates");
if (this.hasError()){
return false;
}
// Double checked -- this check also happens in step 30
//
- if (newFileList.isEmpty()){
+ if (initialFileList.isEmpty()){
this.addErrorSevere("Sorry! An error occurred and the new file was not added.");
return false;
}
// Initialize new file list
- this.filesToAdd = new ArrayList();
+ this.finalFileList = new ArrayList();
String warningMessage = null;
@@ -615,7 +617,7 @@ private boolean run_auto_step_040_checkForDuplicates(){
// -----------------------------------------------------------
// Iterate through the recently ingest files
// -----------------------------------------------------------
- for (DataFile df : newFileList){
+ for (DataFile df : initialFileList){
msg("Checking file: " + df.getFileMetadata().getLabel());
// -----------------------------------------------------------
@@ -636,31 +638,32 @@ private boolean run_auto_step_040_checkForDuplicates(){
if (DuplicateFileChecker.isDuplicateOriginalWay(workingVersion, df.getFileMetadata())){
String dupeName = df.getFileMetadata().getLabel();
- removeLinkedFileFromDataset(dataset, df);
+ removeUnSavedFilesFromWorkingVersion();
+ //removeLinkedFileFromDataset(dataset, df);
//abandonOperationRemoveAllNewFilesFromDataset();
this.addErrorSevere("This file has a duplicate already in the dataset: " + dupeName);
}else{
- filesToAdd.add(df);
+ finalFileList.add(df);
}
}
if (this.hasError()){
- filesToAdd.clear();
+ finalFileList.clear();
return false;
}
return true;
- } // end run_auto_step_040_checkForDuplicates
+ } // end step_040_auto_checkForDuplicates
/**
* This is always checked.
*
* For ADD: If there is not replacement file, then the check is considered a success
- * For REPLACE: The checksum is examined against the "filesToAdd" list
+ * For REPLACE: The checksum is examined against the "finalFileList" list
*
*/
- private boolean run_auto_step_045_checkForFileReplaceDuplicate(){
+ private boolean step_045_auto_checkForFileReplaceDuplicate(){
if (this.hasError()){
return false;
@@ -673,7 +676,7 @@ private boolean run_auto_step_045_checkForFileReplaceDuplicate(){
}
- if (filesToAdd.isEmpty()){
+ if (finalFileList.isEmpty()){
// This error shouldn't happen if steps called in sequence....
this.addErrorSevere("There are no files to add. (This error shouldn't happen if steps called in sequence....checkForFileReplaceDuplicate)");
return false;
@@ -686,7 +689,7 @@ private boolean run_auto_step_045_checkForFileReplaceDuplicate(){
return false;
}
- for (DataFile df : filesToAdd){
+ for (DataFile df : finalFileList){
if (df.getCheckSum() == fileToReplace.getCheckSum()){
this.addError("The new file,\"" + df.getFileMetadata().getLabel()
@@ -701,20 +704,8 @@ private boolean run_auto_step_045_checkForFileReplaceDuplicate(){
return true;
- } // end run_auto_step_045_checkForFileReplaceDuplicate
-
+ } // end step_045_auto_checkForFileReplaceDuplicate
- private boolean abandonOperationRemoveAllNewFilesFromDataset(){
-
- if (filesToAdd.isEmpty()){
- return true;
- }
-
- for (DataFile df : filesToAdd){
- this.removeLinkedFileFromDataset(dataset, df); // Is this correct, if multiple files, shouldn't they all be removed?
- }
- return true;
- }
private boolean step_050_checkForConstraintViolations(){
@@ -723,7 +714,7 @@ private boolean step_050_checkForConstraintViolations(){
return false;
}
- if (filesToAdd.isEmpty()){
+ if (finalFileList.isEmpty()){
// This error shouldn't happen if steps called in sequence....
this.addErrorSevere("There are no files to add. (This error shouldn't happen if steps called in sequence....)");
return false;
@@ -760,13 +751,13 @@ private boolean step_060_addFilesViaIngestService(){
return false;
}
- if (filesToAdd.isEmpty()){
+ if (finalFileList.isEmpty()){
// This error shouldn't happen if steps called in sequence....
this.addErrorSevere("There are no files to add. (This error shouldn't happen if steps called in sequence....)");
return false;
}
- ingestService.addFiles(workingVersion, filesToAdd);
+ ingestService.addFiles(workingVersion, finalFileList);
return true;
}
@@ -802,59 +793,83 @@ private boolean step_070_run_update_dataset_command(){
}
- private boolean step_auto_085_delete_file_to_replace_from_working_version(){
+ /**
+ * Go through the working DatasetVersion and remove the
+ * FileMetadata of the file to replace
+ *
+ * @return
+ */
+ private boolean step_085_auto_remove_filemetadata_to_replace_from_working_version(){
- msgt("step_auto_085_delete_file_to_replace_from_working_version 1");
+ msgt("step_085_auto_remove_filemetadata_to_replace_from_working_version 1");
if (!isFileReplaceOperation()){
// Shouldn't happen!
- this.addErrorSevere("This should ONLY be called for file replace operations!! (step_auto_085_delete_file_to_replace_from_working_version");
+ this.addErrorSevere("This should ONLY be called for file replace operations!! (step_085_auto_remove_filemetadata_to_replace_from_working_version");
return false;
}
- msg("step_auto_085_delete_file_to_replace_from_working_version 1");
+ msg("step_085_auto_remove_filemetadata_to_replace_from_working_version 1");
if (this.hasError()){
return false;
}
- msg("step_auto_085_delete_file_to_replace_from_working_version 2");
-
- // 2. delete the filemetadata from the version:
- //fmit = dataset.getEditVersion().getFileMetadatas().iterator();
- Iterator fmit = workingVersion.getFileMetadatas().iterator();
- msg("step_auto_085_delete_file_to_replace_from_working_version 3");
- msg("-------------------------");
- msg("File to replace getId: " + fileToReplace.getId());
- msg("File to replace getCheckSum: " + fileToReplace.getCheckSum());
- msg("File to replace getFileMetadata: " + fileToReplace.getFileMetadata());
- msg("File to replace getLabel: " + fileToReplace.getFileMetadata().getLabel());
- msg("-------------------------");
-
-
- while (fmit.hasNext()) {
- msg("-------------------------");
- msg("step_auto_085_delete_file_to_replace_from_working_version 4");
- FileMetadata fmd = (FileMetadata) fmit.next();
- msg(" ....getLabel: " + fmd.getLabel());
- msg(" ....getId: " + fmd.getId());
- msg(" ....getDataFile: " + fmd.getDataFile().toString());
- msg(" ....getDataFile id: " + fmd.getDataFile().getId());
- if (fmd.getId() != null){
- msg("step_auto_085_delete_file_to_replace_from_working_version 5");
- msg("fileToReplace.getStorageIdentifier: " + fileToReplace.getStorageIdentifier());
- msg("fmd.getDataFile().getStorageIdentifier(): " + fmd.getDataFile().getStorageIdentifier());
- if (fileToReplace.getStorageIdentifier().equals(fmd.getDataFile().getStorageIdentifier())) {
- msg("step_auto_085_delete_file_to_replace_from_working_version 6");
- fmit.remove();
- return true;
- }
+
+ msgt("File to replace getId: " + fileToReplace.getId());
+
+ Iterator fmIt = workingVersion.getFileMetadatas().iterator();
+ msgt("Clear file to replace");
+ int cnt = 0;
+ while (fmIt.hasNext()) {
+ cnt++;
+
+ FileMetadata fm = fmIt.next();
+ msg(cnt + ") next file: " + fm);
+ msg(" getDataFile().getId(): " + fm.getDataFile().getId());
+ if (fm.getDataFile().getId() != null){
+ if (Objects.equals(fm.getDataFile().getId(), fileToReplace.getId())){
+ msg("Let's remove it!");
+ fmIt.remove();
+ return true;
+ }
+ }
+ }
+ msg("No matches found!");
+ addErrorSevere("Unable to remove old file from new DatasetVersion");
+ removeUnSavedFilesFromWorkingVersion();
+ return false;
+ }
+
+ private boolean removeUnSavedFilesFromWorkingVersion(){
+ msgt("Clean up: removeUnSavedFilesFromWorkingVersion");
+
+ // -----------------------------------------------------------
+ // (1) Remove all new FileMetadata objects
+ // -----------------------------------------------------------
+ Iterator fmIt = workingVersion.getFileMetadatas().iterator(); //dataset.getEditVersion().getFileMetadatas().iterator();//
+ while (fmIt.hasNext()) {
+ FileMetadata fm = fmIt.next();
+ if (fm.getDataFile().getId() == null){
+ fmIt.remove();
+ }
+ }
+
+ // -----------------------------------------------------------
+ // (2) Remove all new DataFile objects
+ // -----------------------------------------------------------
+ Iterator dfIt = dataset.getFiles().iterator();
+ msgt("Clear Files");
+ while (dfIt.hasNext()) {
+ DataFile df = dfIt.next();
+ if (df.getId() == null){
+ dfIt.remove();
}
}
return true;
- //this.addErrorSevere("Could not find file to replace in the working DatasetVersion");
- //return false;
+
}
+
private boolean step_080_run_update_dataset_command_for_replace(){
if (!isFileReplaceOperation()){
@@ -870,7 +885,7 @@ private boolean step_080_run_update_dataset_command_for_replace(){
// -----------------------------------------------------------
// Remove the "fileToReplace" from the current working version
// -----------------------------------------------------------
- if (!step_auto_085_delete_file_to_replace_from_working_version()){
+ if (!step_085_auto_remove_filemetadata_to_replace_from_working_version()){
return false;
}
@@ -879,17 +894,25 @@ private boolean step_080_run_update_dataset_command_for_replace(){
// -----------------------------------------------------------
// Make list of files to delete -- e.g. the single "fileToReplace"
// -----------------------------------------------------------
- List filesToBeDeleted = new ArrayList();
- filesToBeDeleted.add(fileToReplace.getFileMetadata());
+ //List filesToBeDeleted = new ArrayList();
+ //filesToBeDeleted.add(fileToReplace.getFileMetadata());
- msg("step_080_run_update_dataset_command_for_replace 3");
+ //msg("step_080_run_update_dataset_command_for_replace 3");
// -----------------------------------------------------------
// Set the "root file ids" and "previous file ids"
+ // THIS IS A KEY STEP - SPLIT IT OUT
+ // (1) Old file: Set the Root File Id on the original file and save it
+ // (2) New file: Set the previousFileId to the id of the original file
+ // (3) New file: Set the rootFileId to the rootFileId of the original file
// -----------------------------------------------------------
- for (DataFile df : filesToAdd){
- df.setPreviousDataFileID(fileToReplace.getId());
+ if (fileToReplace.getRootDataFileId() == DataFile.ROOT_DATAFILE_ID_DEFAULT){
+ fileToReplace.setRootDataFileId(fileToReplace.getId());
+ fileToReplace = fileService.save(fileToReplace);
+ }
+ for (DataFile df : finalFileList){
+ df.setPreviousDataFileId(fileToReplace.getId());
df.setRootDataFileId(fileToReplace.getRootDataFileId());
}
@@ -897,7 +920,7 @@ private boolean step_080_run_update_dataset_command_for_replace(){
Command update_cmd;
- update_cmd = new UpdateDatasetCommand(dataset, dvRequest, filesToBeDeleted);
+ update_cmd = new UpdateDatasetCommand(dataset, dvRequest);
msg("step_080_run_update_dataset_command_for_replace 5");
@@ -939,7 +962,7 @@ private boolean step_100_startIngestJobs(){
// clear old file list
//
- filesToAdd.clear();
+ finalFileList.clear();
// start the ingest!
@@ -988,7 +1011,7 @@ private boolean removeLinkedFileFromDataset(Dataset dataset, DataFile dataFileTo
// -----------------------------------------------------------
// (1) Remove file from filemetadata list
// -----------------------------------------------------------
- Iterator fmIt = dataset.getEditVersion().getFileMetadatas().iterator();
+ Iterator fmIt = workingVersion.getFileMetadatas().iterator();
msgt("Clear FileMetadatas");
while (fmIt.hasNext()) {
FileMetadata fm = fmIt.next();
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
index 862c3d15beb..1666f7cd265 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
@@ -496,7 +496,7 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata) {
.add("UNF", df.getUnf())
.add("md5", df.getmd5())
.add("rootDataFileId", df.getRootDataFileId())
- .add("previousDataFileId", df.getPreviousDataFileID())
+ .add("previousDataFileId", df.getPreviousDataFileId())
.add("description", df.getDescription());
}
From 2b25de19ca978191afbfcbf4f960efe6e5b59f1a Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 28 Sep 2016 12:04:53 -0400
Subject: [PATCH 22/86] basic replace working. #2290
---
.../iq/dataverse/DataFileServiceBean.java | 2 +-
.../harvard/iq/dataverse/api/FileUpload.java | 12 ++-
.../datasetutility/AddReplaceFileHelper.java | 93 ++++++++++++++-----
3 files changed, 76 insertions(+), 31 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
index 5f1c1694daa..7afce6b9241 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
@@ -589,7 +589,7 @@ public void findFileMetadataOptimizedExperimental(Dataset owner, DatasetVersion
categoryMap.put(fileCategory.getId(), i++);
}
- logger.fine("Retreived "+i+" file categories attached to the dataset.");
+ logger.fine("Retrieved "+i+" file categories attached to the dataset.");
if (requestedVersion != null) {
requestedVersion.setFileMetadatas(retrieveFileMetadataForVersion(owner, requestedVersion, filesMap, categoryMap));
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
index 2ea9287c370..f2833b7e170 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
@@ -261,8 +261,8 @@ public Response hiReSave(@PathParam("fileId") Long fileId){
}
@GET
- @Path("add")
- public Response hi_add(){
+ @Path("add/{newFilename}")
+ public Response hi_add(@PathParam("newFilename") String newFilename){
// -------------------------------------
msgt("(1) getSampleFile()");
@@ -309,7 +309,7 @@ public Response hi_add(){
addFileHelper.runAddFile(selectedDataset,
- "blackbox.txt",
+ newFilename,
"text/plain",
testFileInputStream);
@@ -317,7 +317,7 @@ public Response hi_add(){
if (addFileHelper.hasError()){
return okResponse(addFileHelper.getErrorMessagesAsString("\n"));
}else{
- return okResponse("hey hey, it may have worked");
+ return okResponse("Look at that! You added a file! (hey hey, it may have worked)");
}
@@ -385,10 +385,12 @@ public Response hi_replace(@PathParam("oldFileId") Long oldFileId){
if (addFileHelper.hasError()){
return okResponse(addFileHelper.getErrorMessagesAsString("\n"));
}else{
- return okResponse("hey hey, it may have worked");
+ return okResponse("File was replaced! hey hey, it may have worked");
}
} // end call to "hi"
}
+
+
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index a48aa4e7be5..52cc4759c45 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -18,8 +18,11 @@
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetVersionCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand;
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
+import edu.harvard.iq.dataverse.util.JsfHelper;
+import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
@@ -32,6 +35,7 @@
import javax.ejb.EJBException;
import javax.ejb.Stateless;
import javax.enterprise.context.SessionScoped;
+import javax.faces.application.FacesMessage;
import javax.inject.Named;
import javax.validation.ConstraintViolation;
@@ -509,11 +513,15 @@ private boolean step_025_loadFileToReplace(DataFile existingFile){
}
if (existingFile.getOwner() != this.dataset){
- String errMsg = "This file does not belong to the datset";
- addError(errMsg);
+ addError("This file does not belong to the datset");
return false;
}
+ if (!existingFile.isReleased()){
+ addError("You cannot replace an unpublished file. Please delete it instead of replacing it.");
+ return false;
+ }
+
fileToReplace = existingFile;
return true;
@@ -638,17 +646,22 @@ private boolean step_040_auto_checkForDuplicates(){
if (DuplicateFileChecker.isDuplicateOriginalWay(workingVersion, df.getFileMetadata())){
String dupeName = df.getFileMetadata().getLabel();
- removeUnSavedFilesFromWorkingVersion();
+ //removeUnSavedFilesFromWorkingVersion();
//removeLinkedFileFromDataset(dataset, df);
//abandonOperationRemoveAllNewFilesFromDataset();
- this.addErrorSevere("This file has a duplicate already in the dataset: " + dupeName);
+ this.addErrorSevere("This file has a duplicate already in the dataset: " + dupeName);
+ //return false;
}else{
finalFileList.add(df);
}
}
if (this.hasError()){
- finalFileList.clear();
+ // We're recovering from the duplicate check.
+ msg("We're recovering from a duplicate check 1");
+ runMajorCleanup();
+ msg("We're recovering from a duplicate check 2");
+ finalFileList.clear();
return false;
}
@@ -836,16 +849,46 @@ private boolean step_085_auto_remove_filemetadata_to_replace_from_working_versio
}
msg("No matches found!");
addErrorSevere("Unable to remove old file from new DatasetVersion");
- removeUnSavedFilesFromWorkingVersion();
+ runMajorCleanup();
return false;
}
+
+ private boolean runMajorCleanup(){
+
+ // (1) remove unsaved files from the working version
+ removeUnSavedFilesFromWorkingVersion();
+
+ // ----------------------------------------------------
+ // (2) if the working version is brand new, delete it
+ // It doesn't have an "id" so you can't use the DeleteDatasetVersionCommand
+ // ----------------------------------------------------
+ // Remove this working version from the dataset
+ Iterator versionIterator = dataset.getVersions().iterator();
+ msgt("Clear Files");
+ while (versionIterator.hasNext()) {
+ DatasetVersion dsv = versionIterator.next();
+ if (dsv.getId() == null){
+ versionIterator.remove();
+ }
+ }
+
+ return true;
+
+ }
+
+ /**
+ * We are outta here! Remove everything unsaved from the edit version!
+ *
+ * @return
+ */
private boolean removeUnSavedFilesFromWorkingVersion(){
msgt("Clean up: removeUnSavedFilesFromWorkingVersion");
// -----------------------------------------------------------
// (1) Remove all new FileMetadata objects
// -----------------------------------------------------------
+ //Iterator fmIt = dataset.getEditVersion().getFileMetadatas().iterator();//
Iterator fmIt = workingVersion.getFileMetadatas().iterator(); //dataset.getEditVersion().getFileMetadatas().iterator();//
while (fmIt.hasNext()) {
FileMetadata fm = fmIt.next();
@@ -881,7 +924,7 @@ private boolean step_080_run_update_dataset_command_for_replace(){
if (this.hasError()){
return false;
}
- msg("step_080_run_update_dataset_command_for_replace 1");
+
// -----------------------------------------------------------
// Remove the "fileToReplace" from the current working version
// -----------------------------------------------------------
@@ -889,15 +932,6 @@ private boolean step_080_run_update_dataset_command_for_replace(){
return false;
}
- msg("step_080_run_update_dataset_command_for_replace 2");
-
- // -----------------------------------------------------------
- // Make list of files to delete -- e.g. the single "fileToReplace"
- // -----------------------------------------------------------
- //List filesToBeDeleted = new ArrayList();
- //filesToBeDeleted.add(fileToReplace.getFileMetadata());
-
- //msg("step_080_run_update_dataset_command_for_replace 3");
// -----------------------------------------------------------
@@ -907,26 +941,29 @@ private boolean step_080_run_update_dataset_command_for_replace(){
// (2) New file: Set the previousFileId to the id of the original file
// (3) New file: Set the rootFileId to the rootFileId of the original file
// -----------------------------------------------------------
- if (fileToReplace.getRootDataFileId() == DataFile.ROOT_DATAFILE_ID_DEFAULT){
+ msgt("Root id check");
+ msg("file to replace 1: " + fileToReplace.getRootDataFileId());
+
+ if (fileToReplace.getRootDataFileId().equals(DataFile.ROOT_DATAFILE_ID_DEFAULT)){
fileToReplace.setRootDataFileId(fileToReplace.getId());
+ msg("file to replace 2: pre save " + fileToReplace.getRootDataFileId());
fileToReplace = fileService.save(fileToReplace);
+ msg("file to replace 3 post save: " + fileToReplace.getRootDataFileId());
}
- for (DataFile df : finalFileList){
+ for (DataFile df : finalFileList){
df.setPreviousDataFileId(fileToReplace.getId());
+
+ msg("file to replace 4 - update new file: " + fileToReplace.getRootDataFileId());
df.setRootDataFileId(fileToReplace.getRootDataFileId());
}
- msg("step_080_run_update_dataset_command_for_replace 4");
-
-
+
Command update_cmd;
update_cmd = new UpdateDatasetCommand(dataset, dvRequest);
- msg("step_080_run_update_dataset_command_for_replace 5");
((UpdateDatasetCommand) update_cmd).setValidateLenient(true);
- msg("step_080_run_update_dataset_command_for_replace 6");
try {
commandEngine.submit(update_cmd);
@@ -1036,7 +1073,7 @@ private boolean removeLinkedFileFromDataset(Dataset dataset, DataFile dataFileTo
msg("Got It! try to remove from iterator");
dfIt.remove();
- msg("it work");
+ msg("it worked");
break;
}else{
@@ -1119,4 +1156,10 @@ public String getFileName()
// - Add this new file.
// ....
-
\ No newline at end of file
+
+
+/*
+ 1) Recovery from adding same file and duplicate being found
+ - draft ok
+ - published verion - nope
+*/
\ No newline at end of file
From ae0cadffb75535f6e63e2347ad11b71c7466c90c Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 28 Sep 2016 12:17:45 -0400
Subject: [PATCH 23/86] check for differing content types #2290. needs testing
---
.../datasetutility/AddReplaceFileHelper.java | 22 ++++++++++++++++---
1 file changed, 19 insertions(+), 3 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 52cc4759c45..85d80a03ec3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -594,6 +594,7 @@ private boolean step_030_createNewFilesViaIngest(){
return false;
}
+
return this.step_045_auto_checkForFileReplaceDuplicate();
}
@@ -704,15 +705,30 @@ private boolean step_045_auto_checkForFileReplaceDuplicate(){
for (DataFile df : finalFileList){
- if (df.getCheckSum() == fileToReplace.getCheckSum()){
+ if (Objects.equals(df.getCheckSum(), fileToReplace.getCheckSum())){
this.addError("The new file,\"" + df.getFileMetadata().getLabel()
+ "\" has the same content as the replacment file, \""
+ fileToReplace.getFileMetadata().getLabel() + "\" .");
- removeLinkedFileFromDataset(dataset, df); // Is this correct, if multiple files added in case of .shp or .zip, shouldn't they all be removed?
+// removeLinkedFileFromDataset(dataset, df); // Is this correct, if multiple files added in case of .shp or .zip, shouldn't they all be removed?
+ //this.abandonOperationRemoveAllNewFilesFromDataset(); // Is this correct, if multiple files, shouldn't they all be removed?
+ }
+
+ // This should be able to be overridden --force
+ if (!df.getContentType().equalsIgnoreCase(fileToReplace.getContentType())){
+ this.addError("Warning! Different content type. The new file,\"" + df.getFileMetadata().getLabel()
+ + "\" has content type [" + df.getContentType() + "] while the replacment file, \""
+ + fileToReplace.getFileMetadata().getLabel() + "\" has content type: [" + fileToReplace.getContentType() + "]");
+
+ // removeLinkedFileFromDataset(dataset, df); // Is this correct, if multiple files added in case of .shp or .zip, shouldn't they all be removed?
//this.abandonOperationRemoveAllNewFilesFromDataset(); // Is this correct, if multiple files, shouldn't they all be removed?
- return false;
}
+
+ }
+
+ if (hasError()){
+ runMajorCleanup();
+ return false;
}
return true;
From b399d497df58c87631720764749778ca9037eb62 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 28 Sep 2016 12:57:07 -0400
Subject: [PATCH 24/86] error check for running a replace on a file that is not
in the most recently published Dataset--e.g. was deleted. #2290
---
.../datasetutility/AddReplaceFileHelper.java | 35 +++++++++++++++++++
1 file changed, 35 insertions(+)
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 85d80a03ec3..2deee78df1a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -522,6 +522,7 @@ private boolean step_025_loadFileToReplace(DataFile existingFile){
return false;
}
+
fileToReplace = existingFile;
return true;
@@ -567,6 +568,10 @@ private boolean step_030_createNewFilesViaIngest(){
// Load the working version of the Dataset
workingVersion = dataset.getEditVersion();
+ if (!step_035_auto_isReplacementInLatestVersion()){
+ return false;
+ }
+
try {
initialFileList = ingestService.createDataFiles(workingVersion,
this.newFileInputStream,
@@ -598,6 +603,36 @@ private boolean step_030_createNewFilesViaIngest(){
return this.step_045_auto_checkForFileReplaceDuplicate();
}
+ /**
+ * Make sure the file to replace is in the workingVersion
+ * -- e.g. that it wasn't deleted from a previous Version
+ *
+ * @return
+ */
+ private boolean step_035_auto_isReplacementInLatestVersion(){
+
+ if (this.hasError()){
+ return false;
+ }
+ if (!this.isFileReplaceOperation()){
+ return true;
+ }
+
+ boolean fileInLatestVersion = false;
+ for (FileMetadata fm : workingVersion.getFileMetadatas()){
+ if (fm.getDataFile().getId() != null){
+ if (Objects.equals(fileToReplace.getId(),fm.getDataFile().getId())){
+ fileInLatestVersion = true;
+ }
+ }
+ }
+ if (!fileInLatestVersion){
+ addError("You cannot replace a file that is not in the most recently published Dataset.");
+ return false;
+ }
+ return true;
+ }
+
/**
* This is always run after step 30
*
From abaab775df706ca8ecab1e819ab092a85edf014b Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Thu, 29 Sep 2016 10:43:01 -0400
Subject: [PATCH 25/86] DatasetPage.java was missing
---
.../edu/harvard/iq/dataverse/DatasetPage.java | 15 +++++++++++++++
1 file changed, 15 insertions(+)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
index 2045f63be83..37b79e0c516 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
@@ -1049,6 +1049,21 @@ private void loadWorldMapPermissionHelper() {
}
+ /**
+ * Using a DataFile id, retrieve an associated MapLayerMetadata object
+ *
+ * The MapLayerMetadata objects have been fetched at page inception by
+ * "loadMapLayerMetadataLookup()"
+ */
+ public MapLayerMetadata getMapLayerMetadata(DataFile df) {
+ if (df == null) {
+ return null;
+ }
+ return this.worldMapPermissionHelper.getMapLayerMetadata(df);
+ }
+
+
+
/**
*
* WARNING: Check if the user has file download permission
From 0482e11914e2ed1db21ebd01878f70188d538d6a Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Thu, 29 Sep 2016 12:55:01 -0400
Subject: [PATCH 26/86] Starting to move error messages to bundles. Prep for
rest assured test. #2290
---
src/main/java/Bundle.properties | 9 ++
.../harvard/iq/dataverse/api/FileUpload.java | 85 +++++++++++++++++
.../datasetutility/AddReplaceFileHelper.java | 91 ++++++++++++++-----
3 files changed, 163 insertions(+), 22 deletions(-)
diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties
index 2ca4fd5de66..fa01cfab12e 100755
--- a/src/main/java/Bundle.properties
+++ b/src/main/java/Bundle.properties
@@ -1430,3 +1430,12 @@ citationFrame.banner.message.here=here
citationFrame.banner.closeIcon=Close this message, go to dataset
citationFrame.banner.countdownMessage= This message will close in
citationFrame.banner.countdownMessage.seconds=seconds
+
+
+file.addreplace.error.dataset_is_null=The dataset cannot be null.
+file.addreplace.error.dataset_id_is_null=The dataset ID cannot be null.
+file.addreplace.error.dataset_id_not_found=There was no dataset found for id:
+file.addreplace.error.no_edit_dataset_permission=You do not have permission to edit this dataset.
+file.addreplace.error.filename_is_null=The fileName cannot be null.
+file.addreplace.error.file_content_type_is_null=The file content type cannot be null.
+file.addreplace.error.file_input_stream_is_null=The file upload cannot be null.
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
index f2833b7e170..46d08af2477 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
@@ -51,6 +51,7 @@
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
+import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Response;
import org.mindrot.jbcrypt.BCrypt;
@@ -324,6 +325,90 @@ public Response hi_add(@PathParam("newFilename") String newFilename){
} // end call to "hi"
+ /**
+ * Used for RestAssured testing until multipart form available
+ * @param datasetId - dataset to add files
+ * @param existingFileName test file in directory "scripts/search/data/binary/"
+ * @param fileContentType
+ * @param fileName
+ * @param fileToReplaceId
+ * @return
+ */
+ @GET
+ @Path("addTest1")
+ public Response testAddReplace(@QueryParam("datasetId") Long datasetId,
+ @QueryParam("loadById") Boolean loadById,
+ @QueryParam("existingFileName") String existingFileName,
+ @QueryParam("newFileContentType") String newFileContentType,
+ @QueryParam("newFileName") String newFileName,
+ @QueryParam("fileToReplaceId") Long fileToReplaceId){
+
+ if (loadById==null){
+ loadById = false;
+ }
+
+ // -------------------------------------
+ msgt("(1) Get User from API token");
+ // -------------------------------------
+ User authUser;
+ try {
+ authUser = this.findUserOrDie();
+ } catch (WrappedResponse ex) {
+ return okResponse("Couldn't find a user from the API key");
+ }
+ //authSvc.findByID(new Long(1));
+ msg("authUser: " + authUser);
+ msg("getUserIdentifier: " + authUser.getIdentifier());
+
+ // -------------------------------------
+ msgt("(2) createDataverseRequest");
+ // -------------------------------------
+ DataverseRequest dvRequest2 = createDataverseRequest(authUser);
+ AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2,
+ this.ingestService,
+ this.datasetService,
+ this.fileService,
+ this.permissionSvc,
+ this.commandEngine);
+
+
+ // -------------------------------------
+ msgt("(3) send Params, including nulls");
+ // -------------------------------------
+ InputStream testFileInputStream = getSampleFile();
+ if (testFileInputStream == null){
+ return okResponse("Couldn't find the file!!");
+ }
+
+ if (loadById){
+ addFileHelper.runAddFileByDatasetId(datasetId,
+ newFileName,
+ newFileContentType,
+ testFileInputStream);
+
+ }else{
+ Dataset selectedDataset = null;
+ if (datasetId != null){
+ selectedDataset = datasetService.find(datasetId);
+ }
+ addFileHelper.runAddFile(selectedDataset,
+ newFileName,
+ newFileContentType,
+ testFileInputStream);
+ }
+
+ if (addFileHelper.hasError()){
+ return okResponse(addFileHelper.getErrorMessagesAsString("\n"));
+ }else{
+ return okResponse("Look at that! You added a file! (hey hey, it may have worked)");
+ }
+
+ //return okResponse("in progress2");
+
+ }
+
+
+
@GET
@Path("replace/{oldFileId}")
public Response hi_replace(@PathParam("oldFileId") Long oldFileId){
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 2deee78df1a..b855e17f64c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -29,6 +29,7 @@
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
+import java.util.ResourceBundle;
import java.util.Set;
import java.util.logging.Logger;
import javax.ejb.EJB;
@@ -155,6 +156,21 @@ public AddReplaceFileHelper(DataverseRequest dvRequest,
}
+ public boolean runAddFileByDatasetId(Long datasetId, String newFileName, String newFileContentType, InputStream newFileInputStream){
+
+ msgt(">> runAddFileByDatasetId");
+
+ initErrorHandling();
+ this.currentOperation = FILE_ADD_OPERATION;
+
+ if (!this.step_001_loadDatasetById(datasetId)){
+ return false;
+ }
+
+ return this.runAddFile(this.dataset, newFileName, newFileContentType, newFileInputStream);
+ }
+
+
/**
* After the constructor, this method is called to add a file
*
@@ -166,13 +182,16 @@ public AddReplaceFileHelper(DataverseRequest dvRequest,
*/
public boolean runAddFile(Dataset dataset, String newFileName, String newFileContentType, InputStream newFileInputStream){
msgt(">> runAddFile");
-
+ if (this.hasError()){
+ return false;
+ }
this.currentOperation = FILE_ADD_OPERATION;
return this.runAddReplaceFile(dataset, newFileName, newFileContentType, newFileInputStream, null);
}
+
/**
* After the constructor, this method is called to replace a file
*
@@ -379,8 +398,49 @@ public String getErrorMessagesAsString(String joinString){
return String.join(joinString, this.errorMessages);
}
+
+
+ /**
+ * Convenience method for getting bundle properties
+ *
+ * @param msgName
+ * @return
+ */
+ private String getBundleMsg(String msgName, boolean isErr){
+ if (msgName == null){
+ throw new NullPointerException("msgName cannot be null");
+ }
+ if (isErr){
+ return ResourceBundle.getBundle("Bundle").getString("file.addreplace.error." + msgName);
+ }else{
+ return ResourceBundle.getBundle("Bundle").getString("file.addreplace.success." + msgName);
+ }
+
+ }
+
+ /**
+ * Convenience method for getting bundle error message
+ *
+ * @param msgName
+ * @return
+ */
+ private String getBundleErr(String msgName){
+ return this.getBundleMsg(msgName, true);
+ }
+
+ /**
+ * Convenience method for getting bundle success message
+ *
+ * @param msgName
+ * @return
+ */
+ private String getBundleSuccess(String msgName){
+ return this.getBundleMsg(msgName, false);
+ }
- /**
+
+
+ /**
*
*/
private boolean step_001_loadDataset(Dataset selectedDataset){
@@ -390,7 +450,7 @@ private boolean step_001_loadDataset(Dataset selectedDataset){
}
if (selectedDataset == null){
- this.addErrorSevere("The dataset cannot be null");
+ this.addErrorSevere(getBundleErr("dataset_is_null"));
return false;
}
@@ -398,7 +458,6 @@ private boolean step_001_loadDataset(Dataset selectedDataset){
return true;
}
-
/**
*
@@ -410,13 +469,13 @@ private boolean step_001_loadDatasetById(Long datasetId){
}
if (datasetId == null){
- this.addErrorSevere("The datasetId cannot be null");
+ this.addErrorSevere(getBundleErr("dataset_id_is_null"));
return false;
}
Dataset yeDataset = datasetService.find(datasetId);
if (yeDataset == null){
- this.addError("There was no dataset found for id: " + datasetId);
+ this.addError(getBundleErr("dataset_id_not_found") + " " + datasetId);
return false;
}
@@ -445,8 +504,7 @@ private boolean step_010_VerifyUserAndPermissions(){
msg("permissionService:" + permissionService.toString());
if (!permissionService.request(dvRequest).on(dataset).has(Permission.EditDataset)){
- String errMsg = "You do not have permission to this dataset.";
- addError(errMsg);
+ addError(getBundleErr("no_edit_dataset_permission"));
return false;
}
return true;
@@ -461,30 +519,19 @@ private boolean step_020_loadNewFile(String fileName, String fileContentType, In
}
if (fileName == null){
- String errMsg = "The fileName cannot be null.";
- this.addErrorSevere(errMsg);
+ this.addErrorSevere(getBundleErr("filename_is_null"));
return false;
}
if (fileContentType == null){
- String errMsg = "The fileContentType cannot be null.";
- this.addErrorSevere(errMsg);
- return false;
-
- }
-
- if (fileName == null){
- String errMsg = "The fileName cannot be null.";
- this.addErrorSevere(errMsg);
+ this.addErrorSevere(getBundleErr("file_content_type_is_null"));
return false;
}
-
if (fileInputStream == null){
- String errMsg = "The fileInputStream cannot be null.";
- this.addErrorSevere(errMsg);
+ this.addErrorSevere(getBundleErr("file_input_stream_is_null"));
return false;
}
From 51ffa6293e7af4f5a739988cfe0fba6ad998a4ac Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Mon, 3 Oct 2016 13:38:18 -0400
Subject: [PATCH 27/86] #3387 #2290 moved add/replace error messages to
bundles. api endpoint for tests (to fake actual api upload)
---
src/main/java/Bundle.properties | 20 ++++++-
.../harvard/iq/dataverse/api/FileUpload.java | 49 +++++++++++++----
.../datasetutility/AddReplaceFileHelper.java | 53 ++++++++++---------
3 files changed, 83 insertions(+), 39 deletions(-)
diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties
index fa01cfab12e..763c38948ac 100755
--- a/src/main/java/Bundle.properties
+++ b/src/main/java/Bundle.properties
@@ -1431,11 +1431,27 @@ citationFrame.banner.closeIcon=Close this message, go to dataset
citationFrame.banner.countdownMessage= This message will close in
citationFrame.banner.countdownMessage.seconds=seconds
-
+# File Add/Replace operation messages
file.addreplace.error.dataset_is_null=The dataset cannot be null.
file.addreplace.error.dataset_id_is_null=The dataset ID cannot be null.
file.addreplace.error.dataset_id_not_found=There was no dataset found for id:
file.addreplace.error.no_edit_dataset_permission=You do not have permission to edit this dataset.
file.addreplace.error.filename_is_null=The fileName cannot be null.
file.addreplace.error.file_content_type_is_null=The file content type cannot be null.
-file.addreplace.error.file_input_stream_is_null=The file upload cannot be null.
\ No newline at end of file
+file.addreplace.error.file_input_stream_is_null=The file upload cannot be null.
+file.addreplace.error.duplicate_file=This file has a duplicate already in the dataset:
+file.addreplace.error.existing_file_to_replace_id_is_null=The id of the existing file to replace cannot be null
+file.addreplace.error.existing_file_to_replace_not_found_by_id=Replacement file not found. There was no file found for id:
+file.addreplace.error.existing_file_to_replace_is_null=The existing file to replace cannot be null
+file.addreplace.error.existing_file_to_replace_not_in_dataset=The existing file to replace does not belong to this dataset
+file.addreplace.error.existing_file_not_in_latest_published_version=You cannot replace a file that is not in the most recently published Dataset. (It was deleted from a previous version.)
+file.addreplace.error.unpublished_file_cannot_be_replaced=You cannot replace an unpublished file. Please delete it instead of replacing it.
+file.addreplace.error.ingest_create_file_err=There was an error when trying to add the new file.
+file.addreplace.error.initial_file_list_empty=Sorry! An error occurred and the new file was not added.
+file.addreplace.error.final_file_list_empty=There are no files to add. (This error should not happen if steps called in sequence....)
+file.addreplace.error.only_replace_operation=This should ONLY be called for file replace operations!!
+file.addreplace.error.failed_to_remove_old_file_from_dataset=Unable to remove old file from new DatasetVersion.
+file.addreplace.error.add.command_engine_error=Failed to update the dataset. Please contact the administrator. (CommandException)
+file.addreplace.error.add.ejb_exception=Failed to update the dataset. Please contact the administrator. (EJBException)
+file.addreplace.error.replace.command_engine_error=Failed to update the dataset. Please contact the administrator. (CommandException)
+file.addreplace.error.replace.ejb_exception=Failed to update the dataset. Please contact the administrator. (EJBException)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
index 46d08af2477..036b9af0f1d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
@@ -336,16 +336,24 @@ public Response hi_add(@PathParam("newFilename") String newFilename){
*/
@GET
@Path("addTest1")
- public Response testAddReplace(@QueryParam("datasetId") Long datasetId,
+ public Response testAddReplace(@QueryParam("replaceOperation") Boolean replaceOperation,
+ @QueryParam("datasetId") Long datasetId,
@QueryParam("loadById") Boolean loadById,
@QueryParam("existingFileName") String existingFileName,
@QueryParam("newFileContentType") String newFileContentType,
@QueryParam("newFileName") String newFileName,
- @QueryParam("fileToReplaceId") Long fileToReplaceId){
+ @QueryParam("fileToReplaceId") Long fileToReplaceId,
+ @QueryParam("badStreamTest") Boolean badStreamTest){
if (loadById==null){
loadById = false;
}
+ if (badStreamTest==null){
+ badStreamTest = false;
+ }
+ if (replaceOperation == null){
+ replaceOperation = false;
+ }
// -------------------------------------
msgt("(1) Get User from API token");
@@ -374,10 +382,15 @@ public Response testAddReplace(@QueryParam("datasetId") Long datasetId,
// -------------------------------------
msgt("(3) send Params, including nulls");
- // -------------------------------------
- InputStream testFileInputStream = getSampleFile();
- if (testFileInputStream == null){
- return okResponse("Couldn't find the file!!");
+ // -------------------------------------
+ InputStream testFileInputStream;
+ if (badStreamTest){
+ testFileInputStream = null;
+ }else{
+ testFileInputStream = getSampleFile();
+ if (testFileInputStream == null){
+ return okResponse("Couldn't find the file!!");
+ }
}
if (loadById){
@@ -391,12 +404,26 @@ public Response testAddReplace(@QueryParam("datasetId") Long datasetId,
if (datasetId != null){
selectedDataset = datasetService.find(datasetId);
}
- addFileHelper.runAddFile(selectedDataset,
- newFileName,
- newFileContentType,
- testFileInputStream);
- }
+
+ if (replaceOperation){
+ msg("Test REPLACE operation");
+ // Replace operation
+ addFileHelper.runReplaceFile(selectedDataset,
+ newFileName,
+ newFileContentType,
+ testFileInputStream,
+ fileToReplaceId);
+
+ }else{
+ msg("Test ADD operation");
+ // Add operation
+ addFileHelper.runAddFile(selectedDataset,
+ newFileName,
+ newFileContentType,
+ testFileInputStream);
+ }
+ }
if (addFileHelper.hasError()){
return okResponse(addFileHelper.getErrorMessagesAsString("\n"));
}else{
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index b855e17f64c..a05f528c166 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -203,11 +203,13 @@ public boolean runAddFile(Dataset dataset, String newFileName, String newFileCon
*/
public boolean runReplaceFile(Dataset dataset, String newFileName, String newFileContentType, InputStream newFileInputStream, Long oldFileId){
- if (oldFileId==null){
- throw new NullPointerException("For a replace operation, oldFileId cannot be null");
- }
msgt(">> runReplaceFile");
this.currentOperation = FILE_REPLACE_OPERATION;
+
+ if (oldFileId==null){
+ this.addErrorSevere(getBundleErr("existing_file_to_replace_id_is_null"));
+ return false;
+ }
return this.runAddReplaceFile(dataset, newFileName, newFileContentType, newFileInputStream, oldFileId);
}
@@ -555,17 +557,17 @@ private boolean step_025_loadFileToReplace(DataFile existingFile){
}
if (existingFile == null){
- this.addErrorSevere("The existingFile to replace cannot be null");
+ this.addErrorSevere(getBundleErr("existing_file_to_replace_is_null"));
return false;
}
if (existingFile.getOwner() != this.dataset){
- addError("This file does not belong to the datset");
+ addError(getBundleErr("existing_file_to_replace_not_in_dataset"));
return false;
}
if (!existingFile.isReleased()){
- addError("You cannot replace an unpublished file. Please delete it instead of replacing it.");
+ addError(getBundleErr("unpublished_file_cannot_be_replaced"));
return false;
}
@@ -592,13 +594,13 @@ private boolean step_025_loadFileToReplaceById(Long dataFileId){
// a NullPointerException
//
if (dataFileId == null){
- this.addError("The dataFileId cannot be null");
+ this.addErrorSevere(getBundleErr("existing_file_to_replace_id_is_null"));
return false;
}
DataFile existingFile = fileService.find(dataFileId);
if (existingFile == null){
- this.addError("Replacement file not found. There was no file found for id: " + dataFileId);
+ this.addError(getBundleErr("existing_file_to_replace_not_found_by_id") + " " + dataFileId);
return false;
}
@@ -625,8 +627,7 @@ private boolean step_030_createNewFilesViaIngest(){
this.newFileName,
this.newFileContentType);
} catch (IOException ex) {
- String errMsg = "There was an error when trying to add the new file.";
- this.addErrorSevere(errMsg);
+ this.addErrorSevere(getBundleErr("ingest_create_file_err"));
logger.severe(ex.toString());
return false;
}
@@ -638,7 +639,7 @@ private boolean step_030_createNewFilesViaIngest(){
* (2) the new file (or new file unzipped) did not ingest via "createDataFiles"
*/
if (initialFileList.isEmpty()){
- this.addErrorSevere("Sorry! An error occurred and the new file was not added.");
+ this.addErrorSevere("initial_file_list_empty");
return false;
}
@@ -674,7 +675,7 @@ private boolean step_035_auto_isReplacementInLatestVersion(){
}
}
if (!fileInLatestVersion){
- addError("You cannot replace a file that is not in the most recently published Dataset.");
+ addError(getBundleErr("existing_file_not_in_latest_published_version"));
return false;
}
return true;
@@ -695,7 +696,7 @@ private boolean step_040_auto_checkForDuplicates(){
// Double checked -- this check also happens in step 30
//
if (initialFileList.isEmpty()){
- this.addErrorSevere("Sorry! An error occurred and the new file was not added.");
+ this.addErrorSevere("initial_file_list_empty");
return false;
}
@@ -732,7 +733,7 @@ private boolean step_040_auto_checkForDuplicates(){
//removeUnSavedFilesFromWorkingVersion();
//removeLinkedFileFromDataset(dataset, df);
//abandonOperationRemoveAllNewFilesFromDataset();
- this.addErrorSevere("This file has a duplicate already in the dataset: " + dupeName);
+ this.addErrorSevere(getBundleErr("duplicate_file") + " " + dupeName);
//return false;
}else{
finalFileList.add(df);
@@ -781,7 +782,7 @@ private boolean step_045_auto_checkForFileReplaceDuplicate(){
if (this.fileToReplace == null){
// This error shouldn't happen if steps called correctly
- this.addErrorSevere("The fileToReplace cannot be null. (This error shouldn't happen if steps called in sequence....checkForFileReplaceDuplicate)");
+ this.addErrorSevere(getBundleErr("existing_file_to_replace_is_null") + " (This error shouldn't happen if steps called in sequence....checkForFileReplaceDuplicate)");
return false;
}
@@ -827,7 +828,7 @@ private boolean step_050_checkForConstraintViolations(){
if (finalFileList.isEmpty()){
// This error shouldn't happen if steps called in sequence....
- this.addErrorSevere("There are no files to add. (This error shouldn't happen if steps called in sequence....)");
+ this.addErrorSevere(getBundleErr("final_file_list_empty"));
return false;
}
@@ -864,7 +865,7 @@ private boolean step_060_addFilesViaIngestService(){
if (finalFileList.isEmpty()){
// This error shouldn't happen if steps called in sequence....
- this.addErrorSevere("There are no files to add. (This error shouldn't happen if steps called in sequence....)");
+ this.addErrorSevere(getBundleErr("final_file_list_empty"));
return false;
}
@@ -892,11 +893,11 @@ private boolean step_070_run_update_dataset_command(){
try {
commandEngine.submit(update_cmd);
} catch (CommandException ex) {
- this.addErrorSevere("Failed to update the dataset. Please contact the administrator");
+ this.addErrorSevere(getBundleErr("add.command_engine_error"));
logger.severe(ex.getMessage());
return false;
}catch (EJBException ex) {
- this.addErrorSevere("Failed to update the dataset. Please contact the administrator");
+ this.addErrorSevere("add.ejb_exception");
logger.severe(ex.getMessage());
return false;
}
@@ -916,7 +917,7 @@ private boolean step_085_auto_remove_filemetadata_to_replace_from_working_versio
if (!isFileReplaceOperation()){
// Shouldn't happen!
- this.addErrorSevere("This should ONLY be called for file replace operations!! (step_085_auto_remove_filemetadata_to_replace_from_working_version");
+ this.addErrorSevere(getBundleErr("only_replace_operation") + " (step_085_auto_remove_filemetadata_to_replace_from_working_version");
return false;
}
msg("step_085_auto_remove_filemetadata_to_replace_from_working_version 1");
@@ -946,7 +947,7 @@ private boolean step_085_auto_remove_filemetadata_to_replace_from_working_versio
}
}
msg("No matches found!");
- addErrorSevere("Unable to remove old file from new DatasetVersion");
+ addErrorSevere(getBundleErr("failed_to_remove_old_file_from_dataset"));
runMajorCleanup();
return false;
}
@@ -1015,7 +1016,7 @@ private boolean step_080_run_update_dataset_command_for_replace(){
if (!isFileReplaceOperation()){
// Shouldn't happen!
- this.addErrorSevere("This should ONLY be called for file replace operations!! (step_080_run_update_dataset_command_for_replace");
+ this.addErrorSevere(getBundleErr("only_replace_operation") + " (step_080_run_update_dataset_command_for_replace)");
return false;
}
@@ -1066,11 +1067,11 @@ private boolean step_080_run_update_dataset_command_for_replace(){
try {
commandEngine.submit(update_cmd);
} catch (CommandException ex) {
- this.addErrorSevere("Failed to update the dataset. Please contact the administrator");
+ this.addErrorSevere(getBundleErr("replace.command_engine_error"));
logger.severe(ex.getMessage());
return false;
}catch (EJBException ex) {
- this.addErrorSevere("Failed to update the dataset. Please contact the administrator");
+ this.addErrorSevere(getBundleErr("replace.ejb_exception"));
logger.severe(ex.getMessage());
return false;
}
@@ -1134,12 +1135,12 @@ private void msgt(String m){
private boolean removeLinkedFileFromDataset(Dataset dataset, DataFile dataFileToRemove){
if (dataset==null){
- this.addErrorSevere("dataset cannot be null in removeLinkedFileFromDataset");
+ this.addErrorSevere(getBundleErr("remove_linked_file.dataset"));
return false;
}
if (dataFileToRemove==null){
- this.addErrorSevere("dataFileToRemove cannot be null in removeLinkedFileFromDataset");
+ this.addErrorSevere(getBundleErr("remove_linked_file.file"));
return false;
}
From fe9c0cfd6e7d1c047d2b0da4863edb357a23d0e9 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Mon, 3 Oct 2016 13:44:00 -0400
Subject: [PATCH 28/86] Similar. #2290 #3387 - two additional error messages
moved to the bundles
---
src/main/java/Bundle.properties | 4 ++++
.../datasetutility/AddReplaceFileHelper.java | 17 +++++------------
2 files changed, 9 insertions(+), 12 deletions(-)
diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties
index 763c38948ac..e07c10cc112 100755
--- a/src/main/java/Bundle.properties
+++ b/src/main/java/Bundle.properties
@@ -1445,6 +1445,8 @@ file.addreplace.error.existing_file_to_replace_not_found_by_id=Replacement file
file.addreplace.error.existing_file_to_replace_is_null=The existing file to replace cannot be null
file.addreplace.error.existing_file_to_replace_not_in_dataset=The existing file to replace does not belong to this dataset
file.addreplace.error.existing_file_not_in_latest_published_version=You cannot replace a file that is not in the most recently published Dataset. (It was deleted from a previous version.)
+file.addreplace.error.replace.new_file_same_as_replacement=The new file contains the same content as the file to be replaced.
+file.addreplace.error.replace.new_file_has_different_content_type=Warning! The new and old file have different content types.
file.addreplace.error.unpublished_file_cannot_be_replaced=You cannot replace an unpublished file. Please delete it instead of replacing it.
file.addreplace.error.ingest_create_file_err=There was an error when trying to add the new file.
file.addreplace.error.initial_file_list_empty=Sorry! An error occurred and the new file was not added.
@@ -1455,3 +1457,5 @@ file.addreplace.error.add.command_engine_error=Failed to update the dataset. Pl
file.addreplace.error.add.ejb_exception=Failed to update the dataset. Please contact the administrator. (EJBException)
file.addreplace.error.replace.command_engine_error=Failed to update the dataset. Please contact the administrator. (CommandException)
file.addreplace.error.replace.ejb_exception=Failed to update the dataset. Please contact the administrator. (EJBException)
+file.addreplace.error.remove_linked_file.dataset=dataset cannot be null in removeLinkedFileFromDataset
+file.addreplace.error.remove_linked_file.file=file cannot be null in removeLinkedFileFromDataset
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index a05f528c166..86c3197b3e9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -789,22 +789,15 @@ private boolean step_045_auto_checkForFileReplaceDuplicate(){
for (DataFile df : finalFileList){
if (Objects.equals(df.getCheckSum(), fileToReplace.getCheckSum())){
- this.addError("The new file,\"" + df.getFileMetadata().getLabel()
- + "\" has the same content as the replacment file, \""
- + fileToReplace.getFileMetadata().getLabel() + "\" .");
-
-// removeLinkedFileFromDataset(dataset, df); // Is this correct, if multiple files added in case of .shp or .zip, shouldn't they all be removed?
- //this.abandonOperationRemoveAllNewFilesFromDataset(); // Is this correct, if multiple files, shouldn't they all be removed?
+ this.addError(getBundleErr("replace.new_file_same_as_replacement"));
}
// This should be able to be overridden --force
if (!df.getContentType().equalsIgnoreCase(fileToReplace.getContentType())){
- this.addError("Warning! Different content type. The new file,\"" + df.getFileMetadata().getLabel()
- + "\" has content type [" + df.getContentType() + "] while the replacment file, \""
- + fileToReplace.getFileMetadata().getLabel() + "\" has content type: [" + fileToReplace.getContentType() + "]");
-
- // removeLinkedFileFromDataset(dataset, df); // Is this correct, if multiple files added in case of .shp or .zip, shouldn't they all be removed?
- //this.abandonOperationRemoveAllNewFilesFromDataset(); // Is this correct, if multiple files, shouldn't they all be removed?
+ this.addError(getBundleErr("replace.new_file_has_different_content_type"));
+ //+ " The new file,\"" + df.getFileMetadata().getLabel()
+ // + "\" has content type [" + df.getContentType() + "] while the replacment file, \""
+ // + fileToReplace.getFileMetadata().getLabel() + "\" has content type: [" + fileToReplace.getContentType() + "]");
}
}
From d1a76d363c2d5811db98cd4d43e7381415bc525a Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Mon, 3 Oct 2016 15:30:09 -0400
Subject: [PATCH 29/86] Part of #2290. starting basic ui test
---
scripts/search/data/replace_test/003.txt | 1 +
scripts/search/data/replace_test/004.txt | 1 +
scripts/search/data/replace_test/005.txt | 1 +
.../harvard/iq/dataverse/api/FileUpload.java | 51 +++++-
.../datasetutility/AddReplaceFileHelper.java | 110 +++++++++++--
.../datasetutility/FileUploadTestPage.java | 149 ++++++++++++++++++
.../datasetutility/FileVersionRetriever.java | 60 +++++++
.../mydata/RolePermissionHelperPage.java | 2 +-
src/main/webapp/file_upload_test.xhtml | 43 +++++
9 files changed, 402 insertions(+), 16 deletions(-)
create mode 100644 scripts/search/data/replace_test/003.txt
create mode 100644 scripts/search/data/replace_test/004.txt
create mode 100644 scripts/search/data/replace_test/005.txt
create mode 100644 src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
create mode 100644 src/main/java/edu/harvard/iq/dataverse/datasetutility/FileVersionRetriever.java
create mode 100644 src/main/webapp/file_upload_test.xhtml
diff --git a/scripts/search/data/replace_test/003.txt b/scripts/search/data/replace_test/003.txt
new file mode 100644
index 00000000000..e440e5c8425
--- /dev/null
+++ b/scripts/search/data/replace_test/003.txt
@@ -0,0 +1 @@
+3
\ No newline at end of file
diff --git a/scripts/search/data/replace_test/004.txt b/scripts/search/data/replace_test/004.txt
new file mode 100644
index 00000000000..bf0d87ab1b2
--- /dev/null
+++ b/scripts/search/data/replace_test/004.txt
@@ -0,0 +1 @@
+4
\ No newline at end of file
diff --git a/scripts/search/data/replace_test/005.txt b/scripts/search/data/replace_test/005.txt
new file mode 100644
index 00000000000..7813681f5b4
--- /dev/null
+++ b/scripts/search/data/replace_test/005.txt
@@ -0,0 +1 @@
+5
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
index 036b9af0f1d..859b45cc62e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
@@ -34,6 +34,8 @@
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
+import java.nio.file.Files;
+import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
@@ -174,13 +176,45 @@ private void saveFile(InputStream uploadedInputStream,
}
*/
+ /**
+ * get existing test file from this directory:
+ * "scripts/search/data/replace_test/"
+ *
+ * @param existingFileName
+ * @return
+ */
+
+ private InputStream getExistingFileInputStream(String existingFileName){
+ if (existingFileName == null){
+ return null;
+ }
+ InputStream inputStream = null;
+
+ //System.out.println("Current path: " + Paths.get(".").toAbsolutePath().normalize().toString());
+ String pathToFileName = "(some path)/scripts/search/data/replace_test/" + existingFileName;
+
+ try {
+ inputStream = new FileInputStream(pathToFileName);
+ //is.close();
+ } catch (FileNotFoundException e) {
+ e.printStackTrace();
+ return null;
+ } catch (IOException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ return null;
+ }
+
+ return inputStream;
+ }
+
private InputStream getSampleFile(){
- InputStream is = null;
+ InputStream inputStream = null;
String testFileInputStreamName = "/Users/rmp553/Documents/iqss-git/dataverse-helper-scripts/src/api_scripts/input/howdy3.txt";
//testFileInputStreamName = "/Users/rmp553/NetBeansProjects/dataverse/src/main/java/edu/harvard/iq/dataverse/datasetutility/howdy.txt";
try {
- is = new FileInputStream(testFileInputStreamName);
+ inputStream = new FileInputStream(testFileInputStreamName);
//is.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
@@ -191,7 +225,7 @@ private InputStream getSampleFile(){
return null;
}
- return is;
+ return inputStream;
}
@@ -328,7 +362,7 @@ public Response hi_add(@PathParam("newFilename") String newFilename){
/**
* Used for RestAssured testing until multipart form available
* @param datasetId - dataset to add files
- * @param existingFileName test file in directory "scripts/search/data/binary/"
+ * @param existingTestFileName test file in directory "scripts/search/data/binary/"
* @param fileContentType
* @param fileName
* @param fileToReplaceId
@@ -339,7 +373,7 @@ public Response hi_add(@PathParam("newFilename") String newFilename){
public Response testAddReplace(@QueryParam("replaceOperation") Boolean replaceOperation,
@QueryParam("datasetId") Long datasetId,
@QueryParam("loadById") Boolean loadById,
- @QueryParam("existingFileName") String existingFileName,
+ @QueryParam("existingTestFileName") String existingTestFileName,
@QueryParam("newFileContentType") String newFileContentType,
@QueryParam("newFileName") String newFileName,
@QueryParam("fileToReplaceId") Long fileToReplaceId,
@@ -386,7 +420,12 @@ public Response testAddReplace(@QueryParam("replaceOperation") Boolean replaceOp
InputStream testFileInputStream;
if (badStreamTest){
testFileInputStream = null;
- }else{
+ }else if (existingTestFileName != null){
+
+ testFileInputStream = getExistingFileInputStream(existingTestFileName);
+ msgt("testFileInputStream: " + testFileInputStream);
+
+ } else{
testFileInputStream = getSampleFile();
if (testFileInputStream == null){
return okResponse("Couldn't find the file!!");
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 86c3197b3e9..fdb1a2666d6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -43,6 +43,45 @@
/**
* Methods to add or replace a single file.
*
+ * Usage example:
+ *
+ * // (1) Instantiate the class
+ *
+ * AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2,
+ * this.ingestService,
+ * this.datasetService,
+ * this.fileService,
+ * this.permissionSvc,
+ * this.commandEngine);
+ *
+ * // (2) Run file "ADD"
+ *
+ * addFileHelper.runAddFileByDatasetId(datasetId,
+ * newFileName,
+ * newFileContentType,
+ * newFileInputStream);
+ * // (2a) Check for errors
+ * if (addFileHelper.hasError()){
+ * // get some errors
+ * System.out.println(addFileHelper.getErrorMessagesAsString("\n"));
+ * }
+ *
+ *
+ * // OR (3) Run file "REPLACE"
+ *
+ * addFileHelper.runReplaceFile(datasetId,
+ * newFileName,
+ * newFileContentType,
+ * newFileInputStream,
+ * fileToReplaceId);
+ * // (2a) Check for errors
+ * if (addFileHelper.hasError()){
+ * // get some errors
+ * System.out.println(addFileHelper.getErrorMessagesAsString("\n"));
+ * }
+ *
+ *
+ *
* @author rmp553
*/
public class AddReplaceFileHelper{
@@ -52,6 +91,7 @@ public class AddReplaceFileHelper{
public static String FILE_ADD_OPERATION = "FILE_ADD_OPERATION";
public static String FILE_REPLACE_OPERATION = "FILE_REPLACE_OPERATION";
+ public static String FILE_REPLACE_FORCE_OPERATION = "FILE_REPLACE_FORCE_OPERATION";
private String currentOperation;
@@ -191,6 +231,27 @@ public boolean runAddFile(Dataset dataset, String newFileName, String newFileCon
}
+ /**
+ * After the constructor, this method is called to replace a file
+ *
+ * @param dataset
+ * @param newFileName
+ * @param newFileContentType
+ * @param newFileInputStream
+ * @return
+ */
+ public boolean runForceReplaceFile(Dataset dataset, String newFileName, String newFileContentType, InputStream newFileInputStream, Long oldFileId){
+
+ msgt(">> runForceReplaceFile");
+ this.currentOperation = FILE_REPLACE_FORCE_OPERATION;
+
+ if (oldFileId==null){
+ this.addErrorSevere(getBundleErr("existing_file_to_replace_id_is_null"));
+ return false;
+ }
+
+ return this.runAddReplaceFile(dataset, newFileName, newFileContentType, newFileInputStream, oldFileId);
+ }
/**
* After the constructor, this method is called to replace a file
@@ -315,14 +376,41 @@ public String getCurrentOperation(){
return this.currentOperation;
}
+
+ /**
+ * Is this a file FORCE replace operation?
+ *
+ * Only overrides warnings of content type change
+ *
+ * @return
+ */
+ public boolean isForceFileOperation(){
+
+ return this.currentOperation.equals(FILE_REPLACE_FORCE_OPERATION);
+ }
+
+ /**
+ * Is this a file replace operation?
+ * @return
+ */
public boolean isFileReplaceOperation(){
- return this.currentOperation == FILE_REPLACE_OPERATION;
+ if (this.currentOperation.equals(FILE_REPLACE_OPERATION)){
+ return true;
+ }else if (this.currentOperation.equals(FILE_REPLACE_FORCE_OPERATION)){
+ return true;
+ }
+ return false;
}
-
+
+ /**
+ * Is this a file add operation?
+ *
+ * @return
+ */
public boolean isFileAddOperation(){
- return this.currentOperation == FILE_ADD_OPERATION;
+ return this.currentOperation.equals(FILE_ADD_OPERATION);
}
/**
@@ -793,13 +881,17 @@ private boolean step_045_auto_checkForFileReplaceDuplicate(){
}
// This should be able to be overridden --force
- if (!df.getContentType().equalsIgnoreCase(fileToReplace.getContentType())){
- this.addError(getBundleErr("replace.new_file_has_different_content_type"));
- //+ " The new file,\"" + df.getFileMetadata().getLabel()
- // + "\" has content type [" + df.getContentType() + "] while the replacment file, \""
- // + fileToReplace.getFileMetadata().getLabel() + "\" has content type: [" + fileToReplace.getContentType() + "]");
+ if (isForceFileOperation()){
+
+ // Warning that content type of the file has changed
+ //
+ if (!df.getContentType().equalsIgnoreCase(fileToReplace.getContentType())){
+ this.addError(getBundleErr("replace.new_file_has_different_content_type"));
+ //+ " The new file,\"" + df.getFileMetadata().getLabel()
+ // + "\" has content type [" + df.getContentType() + "] while the replacment file, \""
+ // + fileToReplace.getFileMetadata().getLabel() + "\" has content type: [" + fileToReplace.getContentType() + "]");
+ }
}
-
}
if (hasError()){
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
new file mode 100644
index 00000000000..b56cd4dd2ef
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
@@ -0,0 +1,149 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package edu.harvard.iq.dataverse.datasetutility;
+
+import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.DataFileServiceBean;
+import edu.harvard.iq.dataverse.DatasetPage;
+import edu.harvard.iq.dataverse.DatasetServiceBean;
+import edu.harvard.iq.dataverse.DatasetVersionServiceBean;
+import edu.harvard.iq.dataverse.DataverseLinkingServiceBean;
+import edu.harvard.iq.dataverse.DataverseRequestServiceBean;
+import edu.harvard.iq.dataverse.DataverseSession;
+import edu.harvard.iq.dataverse.EjbDataverseEngine;
+import edu.harvard.iq.dataverse.PermissionServiceBean;
+import edu.harvard.iq.dataverse.PermissionsWrapper;
+import edu.harvard.iq.dataverse.UserNotificationServiceBean;
+import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
+import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
+import edu.harvard.iq.dataverse.util.SystemConfig;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import javax.ejb.EJB;
+import javax.faces.application.FacesMessage;
+import javax.faces.context.FacesContext;
+import javax.faces.view.ViewScoped;
+import javax.inject.Inject;
+import javax.inject.Named;
+import org.primefaces.event.FileUploadEvent;
+import org.primefaces.model.UploadedFile;
+
+/**
+ *
+ * @author rmp553
+ */
+@ViewScoped
+@Named("FileUploadTestPage")
+public class FileUploadTestPage implements java.io.Serializable {
+
+ private static final Logger logger = Logger.getLogger(DatasetPage.class.getCanonicalName());
+
+ @EJB
+ IngestServiceBean ingestService;
+ @Inject DataverseSession session;
+ @EJB
+ DatasetServiceBean datasetService;
+ @EJB
+ DatasetVersionServiceBean datasetVersionService;
+ @EJB
+ DataFileServiceBean datafileService;
+ @EJB
+ UserNotificationServiceBean userNotificationService;
+ @EJB
+ SettingsServiceBean settingsService;
+ @EJB
+ AuthenticationServiceBean authService;
+ @EJB
+ SystemConfig systemConfig;
+ @EJB
+ DataverseLinkingServiceBean dvLinkingService;
+ @Inject
+ DataverseRequestServiceBean dvRequestService;
+ @EJB
+ PermissionServiceBean permissionService;
+ @EJB
+ EjbDataverseEngine commandEngine;
+
+ public String init() {
+
+ return null;
+ }
+
+ public String yesYes(){
+ return "yes yes";
+ }
+
+ private void msg(String s){
+ System.out.println(s);
+ }
+
+ private void msgt(String s){
+ msg("-------------------------------");
+ msg(s);
+ msg("-------------------------------");
+ }
+
+
+ public void handleFileUpload(FileUploadEvent event) {
+ msgt("handleFileUpload");
+
+ //FacesMessage message = new FacesMessage("Succesful", event.getFile().getFileName() + " is uploaded.");
+ //FacesContext.getCurrentInstance().addMessage(null, message);
+
+
+ UploadedFile uFile = event.getFile();
+
+ msg("getFileName: " + uFile.getFileName());
+ msg("getContentType: " + uFile.getContentType());
+
+ addFile(uFile);
+ //msg("file name: " + event.getFileName());
+ // dFileList = ingestService.createDataFiles(workingVersion, uFile.getInputstream(), uFile.getFileName(), uFile.getContentType());
+
+ }
+
+
+ public void addFile(UploadedFile laFile){
+
+
+ //DataverseRequest dvRequest2 = createDataverseRequest(authUser);
+ AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequestService.getDataverseRequest(),
+ ingestService,
+ datasetService,
+ datafileService,
+ permissionService,
+ commandEngine);
+
+
+ InputStream inputStream = null;
+ try {
+ inputStream = laFile.getInputstream();
+ } catch (IOException ex) {
+ msgt("file io exception");
+
+ Logger.getLogger(FileUploadTestPage.class.getName()).log(Level.SEVERE, null, ex);
+ return;
+ }
+
+
+ addFileHelper.runAddFileByDatasetId(new Long(10),
+ laFile.getFileName(),
+ laFile.getContentType(),
+ inputStream);
+
+ if (addFileHelper.hasError()){
+ msgt("upload error");
+ msg(addFileHelper.getErrorMessagesAsString("\n"));
+ }else{
+ msg("Look at that! You added a file! (hey hey, it may have worked)");
+ }
+ }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileVersionRetriever.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileVersionRetriever.java
new file mode 100644
index 00000000000..f166ed52cc6
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileVersionRetriever.java
@@ -0,0 +1,60 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package edu.harvard.iq.dataverse.datasetutility;
+
+import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.DataFileServiceBean;
+import java.util.List;
+import java.util.logging.Logger;
+
+/**
+ * Given a file id or object, return a a list of this file's versions
+ *
+ * @author rmp553
+ */
+public class FileVersionRetriever {
+
+ private static final Logger logger = Logger.getLogger(FileVersionRetriever.class.getCanonicalName());
+
+ DataFileServiceBean dataFileService;
+
+ DataFile chosenFile;
+ List fileList;
+
+ /**
+ * Constructor by chosenFileId
+ *
+ * @param fileService
+ * @param chosenFileId
+ */
+ public FileVersionRetriever(DataFileServiceBean fileService, Long chosenFileId){
+ if (fileService == null){
+ throw new NullPointerException("fileService cannot be null");
+ }
+ if (chosenFileId == null){
+ throw new NullPointerException("chosenFileId cannot be null");
+ }
+ dataFileService = fileService;
+ chosenFile = dataFileService.find(chosenFileId);
+ }
+
+ /**
+ * Constructor by chosenFile
+ *
+ * @param fileService
+ * @param chosenFile
+ */
+ public FileVersionRetriever(DataFileServiceBean fileService, DataFile selectedFile){
+ if (fileService == null){
+ throw new NullPointerException("fileService cannot be null");
+ }
+ if (selectedFile == null){
+ throw new NullPointerException("selectedFile cannot be null");
+ }
+ dataFileService = fileService;
+ chosenFile = selectedFile;
+ }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/RolePermissionHelperPage.java b/src/main/java/edu/harvard/iq/dataverse/mydata/RolePermissionHelperPage.java
index 114e33d058c..06841c470d8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/mydata/RolePermissionHelperPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/mydata/RolePermissionHelperPage.java
@@ -53,7 +53,7 @@ public String init() {
//List dtypes = new ArrayList<>();
- return null;
+ return null;
}
diff --git a/src/main/webapp/file_upload_test.xhtml b/src/main/webapp/file_upload_test.xhtml
new file mode 100644
index 00000000000..5e66fc6d0b0
--- /dev/null
+++ b/src/main/webapp/file_upload_test.xhtml
@@ -0,0 +1,43 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Test Add/Replace functions
+
+
+
+
+
+ Add New File
+
+
+
+
+
+
+
+
+
+
+
+
+
+
From b7ffd044bfe0e2981b078beb70d786ad3ceca30c Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Tue, 4 Oct 2016 13:30:29 -0400
Subject: [PATCH 30/86] #2290 - UI to test replace functionality
---
.../edu/harvard/iq/dataverse/DataFile.java | 1 -
.../datasetutility/AddReplaceFileHelper.java | 5 +-
.../datasetutility/FileUploadTestPage.java | 90 +++++++++++++++++--
src/main/webapp/file_upload_test.xhtml | 61 ++++++++++++-
4 files changed, 143 insertions(+), 14 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
index 5b68aeafc82..b937eecb624 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
@@ -16,7 +16,6 @@
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.Files;
-import java.util.Comparator;
import javax.persistence.Entity;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index fdb1a2666d6..ab8a0ba7591 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -647,9 +647,9 @@ private boolean step_025_loadFileToReplace(DataFile existingFile){
if (existingFile == null){
this.addErrorSevere(getBundleErr("existing_file_to_replace_is_null"));
return false;
- }
+ }
- if (existingFile.getOwner() != this.dataset){
+ if (!existingFile.getOwner().equals(this.dataset)){
addError(getBundleErr("existing_file_to_replace_not_in_dataset"));
return false;
}
@@ -687,6 +687,7 @@ private boolean step_025_loadFileToReplaceById(Long dataFileId){
}
DataFile existingFile = fileService.find(dataFileId);
+
if (existingFile == null){
this.addError(getBundleErr("existing_file_to_replace_not_found_by_id") + " " + dataFileId);
return false;
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
index b56cd4dd2ef..0cec10efbf4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
@@ -7,6 +7,7 @@
import edu.harvard.iq.dataverse.DataFile;
import edu.harvard.iq.dataverse.DataFileServiceBean;
+import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetPage;
import edu.harvard.iq.dataverse.DatasetServiceBean;
import edu.harvard.iq.dataverse.DatasetVersionServiceBean;
@@ -14,25 +15,25 @@
import edu.harvard.iq.dataverse.DataverseRequestServiceBean;
import edu.harvard.iq.dataverse.DataverseSession;
import edu.harvard.iq.dataverse.EjbDataverseEngine;
+import edu.harvard.iq.dataverse.FileMetadata;
import edu.harvard.iq.dataverse.PermissionServiceBean;
-import edu.harvard.iq.dataverse.PermissionsWrapper;
import edu.harvard.iq.dataverse.UserNotificationServiceBean;
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
-import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.util.SystemConfig;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
+import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
import javax.faces.context.FacesContext;
import javax.faces.view.ViewScoped;
import javax.inject.Inject;
import javax.inject.Named;
+import org.apache.commons.lang.StringUtils;
import org.primefaces.event.FileUploadEvent;
import org.primefaces.model.UploadedFile;
@@ -46,9 +47,15 @@ public class FileUploadTestPage implements java.io.Serializable {
private static final Logger logger = Logger.getLogger(DatasetPage.class.getCanonicalName());
+ private boolean replaceOperation = false;
+ private Long datasetId;
+ private Dataset dataset;
+ private DataFile fileToReplace;
+
@EJB
IngestServiceBean ingestService;
- @Inject DataverseSession session;
+ @Inject
+ DataverseSession session;
@EJB
DatasetServiceBean datasetService;
@EJB
@@ -72,11 +79,48 @@ public class FileUploadTestPage implements java.io.Serializable {
@EJB
EjbDataverseEngine commandEngine;
+
public String init() {
+ Map params =FacesContext.getCurrentInstance().
+ getExternalContext().getRequestParameterMap();
+
+
+ msgt("params: " + params.toString());
+
+ if (params.containsKey("ds_id")){
+ String ds_id = params.get("ds_id");
+ if ((!ds_id.isEmpty()) && (StringUtils.isNumeric(ds_id))){
+ dataset = datasetService.find(Long.parseLong(ds_id));
+ }
+ }
+
+ if (params.containsKey("fid")){
+ String fid = params.get("fid");
+ if ((!fid.isEmpty()) && (StringUtils.isNumeric(fid))){
+ fileToReplace = datafileService.find(Long.parseLong(fid));
+ }
+ }
+
+ if (fileToReplace != null){
+ replaceOperation = true;
+ }else{
+ replaceOperation = false;
+ }
+
return null;
}
+
+
+
+ public List getDatasetFileMetadatas(){
+ if (dataset == null){
+ return null;
+ }
+ return dataset.getLatestVersion().getFileMetadatasSorted();
+ }
+
public String yesYes(){
return "yes yes";
}
@@ -90,11 +134,29 @@ private void msgt(String s){
msg(s);
msg("-------------------------------");
}
+
+ public Dataset getDataset(){
+ return dataset;
+ }
+
+ public void setDataset(Dataset ds){
+ dataset = ds;
+ }
+
+ public DataFile getFileToReplace(){
+ return fileToReplace;
+ }
+
+ public void setFileToReplace(DataFile df){
+ fileToReplace = df;
+ }
public void handleFileUpload(FileUploadEvent event) {
- msgt("handleFileUpload");
+
+ String foo = (String) event.getComponent().getAttributes().get("isReplaceOperation"); // bar
+ msgt("Foo: " + foo);
//FacesMessage message = new FacesMessage("Succesful", event.getFile().getFileName() + " is uploaded.");
//FacesContext.getCurrentInstance().addMessage(null, message);
@@ -104,14 +166,15 @@ public void handleFileUpload(FileUploadEvent event) {
msg("getFileName: " + uFile.getFileName());
msg("getContentType: " + uFile.getContentType());
- addFile(uFile);
+ addReplaceFile(uFile);
//msg("file name: " + event.getFileName());
// dFileList = ingestService.createDataFiles(workingVersion, uFile.getInputstream(), uFile.getFileName(), uFile.getContentType());
}
- public void addFile(UploadedFile laFile){
+
+ public void addReplaceFile(UploadedFile laFile){
//DataverseRequest dvRequest2 = createDataverseRequest(authUser);
@@ -134,10 +197,18 @@ public void addFile(UploadedFile laFile){
}
- addFileHelper.runAddFileByDatasetId(new Long(10),
+ if (this.replaceOperation){
+ addFileHelper.runReplaceFile(dataset,
+ laFile.getFileName(),
+ laFile.getContentType(),
+ inputStream,
+ fileToReplace.getId());
+ }else{
+ addFileHelper.runAddFile(dataset,
laFile.getFileName(),
laFile.getContentType(),
inputStream);
+ }
if (addFileHelper.hasError()){
msgt("upload error");
@@ -146,4 +217,5 @@ public void addFile(UploadedFile laFile){
msg("Look at that! You added a file! (hey hey, it may have worked)");
}
}
-}
+
+} // end class FileUploadTestPage
diff --git a/src/main/webapp/file_upload_test.xhtml b/src/main/webapp/file_upload_test.xhtml
index 5e66fc6d0b0..c90b2cb36a2 100644
--- a/src/main/webapp/file_upload_test.xhtml
+++ b/src/main/webapp/file_upload_test.xhtml
@@ -25,17 +25,74 @@
+
Add New File
+
+
+ Replace File: #{FileUploadTestPage.fileToReplace.displayName }
+
+
+ sizeLimit="100000" fileLimit="2" allowTypes="/(\.|\/)(gif|jpe?g|png|txt)$/"
+ update="@all" />
+
-
+
+
+
+
+
+
+
+
+ No dataset chosen.
+
+
+ #{FileUploadTestPage.dataset.displayName } v#{FileUploadTestPage.dataset.versionNumber }
+
+
+
+ | Name |
+ Content type |
+ Hash |
+ Is Released |
+
+
+
+ |
+
+ #{fm.label}
+
+
+ #{fm.label}
+
+ |
+ |
+ |
+
+
+ replacable
+
+
+ nope
+
+
+ |
+
+
+
+
+
+
From dc2910e045b2ac4a3c7f23ff5f6e2a537a0c5f28 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Thu, 6 Oct 2016 12:29:19 -0400
Subject: [PATCH 31/86] #2290 Set error if single replacement file ingests into
multiple files. e.g. a .zip as a replacement
---
src/main/java/Bundle.properties | 1 +
.../datasetutility/AddReplaceFileHelper.java | 24 ++++++++++++++++++-
src/main/webapp/file_upload_test.xhtml | 2 +-
3 files changed, 25 insertions(+), 2 deletions(-)
diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties
index f5120a87c0e..27bfcdd3f5b 100755
--- a/src/main/java/Bundle.properties
+++ b/src/main/java/Bundle.properties
@@ -1447,6 +1447,7 @@ file.addreplace.error.replace.new_file_has_different_content_type=Warning! The n
file.addreplace.error.unpublished_file_cannot_be_replaced=You cannot replace an unpublished file. Please delete it instead of replacing it.
file.addreplace.error.ingest_create_file_err=There was an error when trying to add the new file.
file.addreplace.error.initial_file_list_empty=Sorry! An error occurred and the new file was not added.
+file.addreplace.error.initial_file_list_more_than_one=You cannot replace a single file with multiple files. The file you uploaded was ingested into multiple files.
file.addreplace.error.final_file_list_empty=There are no files to add. (This error should not happen if steps called in sequence....)
file.addreplace.error.only_replace_operation=This should ONLY be called for file replace operations!!
file.addreplace.error.failed_to_remove_old_file_from_dataset=Unable to remove old file from new DatasetVersion.
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 5e111ed2eaa..127d81b617c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -729,9 +729,17 @@ private boolean step_030_createNewFilesViaIngest(){
*/
if (initialFileList.isEmpty()){
this.addErrorSevere("initial_file_list_empty");
+ this.runMajorCleanup();
return false;
}
+ if (initialFileList.size() > 1){
+ this.addError("initial_file_list_more_than_one");
+ this.runMajorCleanup();
+ return false;
+
+ }
+
if (!this.step_040_auto_checkForDuplicates()){
return false;
}
@@ -771,7 +779,9 @@ private boolean step_035_auto_isReplacementInLatestVersion(){
}
/**
- * This is always run after step 30
+ * Create a "final file list"
+ *
+ * This is always run after step 30 -- the ingest
*
* @return
*/
@@ -838,6 +848,18 @@ private boolean step_040_auto_checkForDuplicates(){
return false;
}
+ if (finalFileList.size() > 1){
+ this.addErrorSevere("There is more than 1 file to add. (This error shouldn't happen b/c the initial file list should always have 1 item");
+ return false;
+ }
+
+
+ if (finalFileList.isEmpty()){
+ this.addErrorSevere("There are no files to add. (This error shouldn't happen if steps called in sequence....step_040_auto_checkForDuplicates)");
+ return false;
+ }
+
+
return true;
} // end step_040_auto_checkForDuplicates
diff --git a/src/main/webapp/file_upload_test.xhtml b/src/main/webapp/file_upload_test.xhtml
index c90b2cb36a2..3a0577358a7 100644
--- a/src/main/webapp/file_upload_test.xhtml
+++ b/src/main/webapp/file_upload_test.xhtml
@@ -77,7 +77,7 @@
|
- |
+ |
replacable
From 2541ef9e609fe41e678eb37ff2544c7bb97b7ab5 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Thu, 6 Oct 2016 16:57:48 -0400
Subject: [PATCH 32/86] #2290 working example of @scolapasta configuration for
file upload via API
---
pom.xml | 12 +++
.../iq/dataverse/api/ApiConfiguration.java | 14 +++-
.../harvard/iq/dataverse/api/FileUpload.java | 77 +++----------------
.../datasetutility/AddReplaceFileHelper.java | 17 +++-
.../datasetutility/FileUploadTestPage.java | 8 ++
src/main/webapp/file_upload_test.xhtml | 11 ++-
6 files changed, 67 insertions(+), 72 deletions(-)
diff --git a/pom.xml b/pom.xml
index 7812b866812..162191db3f3 100644
--- a/pom.xml
+++ b/pom.xml
@@ -403,6 +403,18 @@
true
jar
+
+
+ org.glassfish.jersey.containers
+ jersey-container-servlet
+ 2.23.2
+
+
+
+ org.glassfish.jersey.media
+ jersey-media-multipart
+ 2.23.2
+
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ApiConfiguration.java b/src/main/java/edu/harvard/iq/dataverse/api/ApiConfiguration.java
index 8bb58670104..ab48c93ed00 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/ApiConfiguration.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/ApiConfiguration.java
@@ -1,8 +1,18 @@
package edu.harvard.iq.dataverse.api;
import javax.ws.rs.ApplicationPath;
-import javax.ws.rs.core.Application;
+import org.glassfish.jersey.media.multipart.MultiPartFeature;
+import org.glassfish.jersey.server.ResourceConfig;
@ApplicationPath("api/v1")
-public class ApiConfiguration extends Application {
+public class ApiConfiguration extends ResourceConfig {
+
+ public ApiConfiguration() {
+ packages("edu.harvard.iq.dataverse.api");
+ register(MultiPartFeature.class);
+ }
}
+/*
+public class ApiConfiguration extends ResourceConfi {
+}
+*/
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
index 859b45cc62e..3df776fa25d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
@@ -11,51 +11,38 @@
import edu.harvard.iq.dataverse.DataFileServiceBean;
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetServiceBean;
-import edu.harvard.iq.dataverse.DatasetVersion;
import edu.harvard.iq.dataverse.DatasetVersionServiceBean;
import edu.harvard.iq.dataverse.DataverseRequestServiceBean;
import edu.harvard.iq.dataverse.DataverseServiceBean;
import edu.harvard.iq.dataverse.EjbDataverseEngine;
import edu.harvard.iq.dataverse.FileMetadata;
-import edu.harvard.iq.dataverse.Template;
-import edu.harvard.iq.dataverse.UserNotification;
import edu.harvard.iq.dataverse.UserNotificationServiceBean;
-import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper;
-import edu.harvard.iq.dataverse.datasetutility.DuplicateFileChecker;
-import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
-import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
-import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand;
-import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand;
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
+import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import java.util.ArrayList;
+import java.io.OutputStream;
import java.util.Iterator;
-import java.util.List;
-import java.util.Set;
-import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.EJB;
-import javax.ejb.EJBException;
import javax.ejb.Stateless;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.FacesContext;
import javax.inject.Inject;
-import javax.servlet.http.HttpServletRequest;
-import javax.validation.ConstraintViolation;
+import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
+import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
-import org.mindrot.jbcrypt.BCrypt;
+import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
+import org.glassfish.jersey.media.multipart.FormDataParam;
/**
*
@@ -87,53 +74,7 @@ public class FileUpload extends AbstractApiBean {
// for testing
private static final String SERVER_UPLOAD_LOCATION_FOLDER = "/Users/rmp553/Documents/iqss-git/dataverse-helper-scripts/src/api_scripts/output/";
- /*
- @POST
- @Path("hello") //Your Path or URL to call this service
- @Consumes(MediaType.MULTIPART_FORM_DATA)
- public Response uploadFile(
- @DefaultValue("true") @FormDataParam("enabled") boolean enabled,
- @FormDataParam("file") InputStream uploadedInputStream,
- @FormDataParam("file") FormDataContentDisposition fileDetail) {
- //Your local disk path where you want to store the file
- String uploadedFileLocation = SERVER_UPLOAD_LOCATION_FOLDER + fileDetail.getFileName();
- System.out.println(uploadedFileLocation);
- // save it
- File objFile=new File(uploadedFileLocation);
- if(objFile.exists())
- {
- objFile.delete();
-
- }
-
- saveToFile(uploadedInputStream, uploadedFileLocation);
-
- String userMsg = "File uploaded via Jersey based RESTFul Webservice to: " + uploadedFileLocation;
-
- return okResponse(userMsg);
- }
- private void saveToFile(InputStream uploadedInputStream,
- String uploadedFileLocation) {
-
- try {
- OutputStream out = null;
- int read = 0;
- byte[] bytes = new byte[1024];
-
- out = new FileOutputStream(new File(uploadedFileLocation));
- while ((read = uploadedInputStream.read(bytes)) != -1) {
- out.write(bytes, 0, read);
- }
- out.flush();
- out.close();
- } catch (IOException e) {
-
- e.printStackTrace();
- }
- }
- */
- /*
@POST
@Path("hello")
@Consumes(MediaType.MULTIPART_FORM_DATA)
@@ -174,7 +115,7 @@ private void saveFile(InputStream uploadedInputStream,
}
}
- */
+
/**
* get existing test file from this directory:
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 127d81b617c..6559108bb60 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -122,6 +122,9 @@ public class AddReplaceFileHelper{
private DatasetVersion workingVersion;
List initialFileList;
List finalFileList;
+
+ // Ingested file
+ private DataFile newlyAddedFile;
// For error handling
@@ -1183,9 +1186,20 @@ private boolean step_080_run_update_dataset_command_for_replace(){
logger.severe(ex.getMessage());
return false;
}
- return true;
+
+ // Save newly added file to object
+ if (!finalFileList.isEmpty()){
+ newlyAddedFile = finalFileList.get(0);
+ }
+
+ return true;
}
+
+ public DataFile getNewlyAddedFile(){
+
+ return newlyAddedFile;
+ }
private boolean step_090_notifyUser(){
if (this.hasError()){
@@ -1203,6 +1217,7 @@ private boolean step_100_startIngestJobs(){
if (this.hasError()){
return false;
}
+
// clear old file list
//
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
index 0cec10efbf4..0126400714c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
@@ -51,6 +51,7 @@ public class FileUploadTestPage implements java.io.Serializable {
private Long datasetId;
private Dataset dataset;
private DataFile fileToReplace;
+ private DataFile newlyAddedFile;
@EJB
IngestServiceBean ingestService;
@@ -214,8 +215,15 @@ public void addReplaceFile(UploadedFile laFile){
msgt("upload error");
msg(addFileHelper.getErrorMessagesAsString("\n"));
}else{
+ newlyAddedFile = addFileHelper.getNewlyAddedFile();
msg("Look at that! You added a file! (hey hey, it may have worked)");
}
}
+
+ public DataFile getNewlyAddedFile(){
+
+ return newlyAddedFile;
+ }
+
} // end class FileUploadTestPage
diff --git a/src/main/webapp/file_upload_test.xhtml b/src/main/webapp/file_upload_test.xhtml
index 3a0577358a7..7f90bc2fae1 100644
--- a/src/main/webapp/file_upload_test.xhtml
+++ b/src/main/webapp/file_upload_test.xhtml
@@ -24,7 +24,16 @@
-
+
+
+
+ Added!: #{FileUploadTestPage.newlyAddedFile.displayName }
+
+
+
+
+
+
Add New File
From 2403cdb2097b3f06c7f6b1b014e00bc592f67bc1 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Thu, 6 Oct 2016 17:36:48 -0400
Subject: [PATCH 33/86] #2290 - Use the FormDataBodyPart class to get the mime
type of the uploaded file. e.g. formDataBodyPart.getMediaType().toString()
---
.../harvard/iq/dataverse/api/FileUpload.java | 32 +++++++++++++++----
1 file changed, 25 insertions(+), 7 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
index 3df776fa25d..e754843c8f0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
@@ -10,6 +10,7 @@
import edu.harvard.iq.dataverse.DataFile;
import edu.harvard.iq.dataverse.DataFileServiceBean;
import edu.harvard.iq.dataverse.Dataset;
+import edu.harvard.iq.dataverse.DatasetFieldValidator;
import edu.harvard.iq.dataverse.DatasetServiceBean;
import edu.harvard.iq.dataverse.DatasetVersionServiceBean;
import edu.harvard.iq.dataverse.DataverseRequestServiceBean;
@@ -34,6 +35,7 @@
import javax.ejb.Stateless;
import javax.inject.Inject;
import javax.ws.rs.Consumes;
+import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
@@ -41,8 +43,10 @@
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
+import org.glassfish.jersey.media.multipart.FormDataBodyPart;
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
import org.glassfish.jersey.media.multipart.FormDataParam;
+import org.omnifaces.util.Faces;
/**
*
@@ -236,19 +240,33 @@ public Response hiReSave(@PathParam("fileId") Long fileId){
return okResponse("saved: " + df);
}
- @GET
- @Path("add/{newFilename}")
- public Response hi_add(@PathParam("newFilename") String newFilename){
+
+ @POST
+ //@Path("add/{newFilename}")
+ @Path("add")
+ @Consumes(MediaType.MULTIPART_FORM_DATA)
+ public Response hi_add(@FormDataParam("datasetId") Long datasetId,
+ @FormDataParam("file") InputStream testFileInputStream,
+ @FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
+ @FormDataParam("file") final FormDataBodyPart formDataBodyPart
+ ){
// -------------------------------------
msgt("(1) getSampleFile()");
// -------------------------------------
+ String newFilename = contentDispositionHeader.getFileName();
+ String newFileContentType = formDataBodyPart.getMediaType().toString();
+ //Faces.getServletContext().getMimeType(newFilename);
+ //contentDispositionHeader.getParameters().toString();
+ msgt("newFileContentType:" + newFileContentType);
+
+ /*
InputStream testFileInputStream = getSampleFile();
if (testFileInputStream == null){
return okResponse("Couldn't find the file!!");
}
-
+ */
// -------------------------------------
msgt("(1a) Get User from API token");
// -------------------------------------
@@ -266,8 +284,8 @@ public Response hi_add(@PathParam("newFilename") String newFilename){
// -------------------------------------
msgt("(1b) Get the selected Dataset");
// -------------------------------------
- int dataset_id = 10;
- Dataset selectedDataset = datasetService.find(new Long(dataset_id));
+ //int dataset_id = 10;
+ Dataset selectedDataset = datasetService.find(datasetId);//new Long(dataset_id));
//-------------------
@@ -286,7 +304,7 @@ public Response hi_add(@PathParam("newFilename") String newFilename){
addFileHelper.runAddFile(selectedDataset,
newFilename,
- "text/plain",
+ newFileContentType,
testFileInputStream);
From 3831f4e025b9d5c55666c38c57f3452d2353f4c9 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Fri, 7 Oct 2016 15:26:11 -0400
Subject: [PATCH 34/86] working on #2290
---
.../iq/dataverse/DataFileServiceBean.java | 70 +++++++++++++++++++
.../harvard/iq/dataverse/api/FileUpload.java | 50 ++++++-------
.../datasetutility/FileUploadTestPage.java | 39 +++++++++++
3 files changed, 131 insertions(+), 28 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
index 8b545804607..24423b4efbd 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
@@ -1279,4 +1279,74 @@ public void populateFileSearchCard(SolrSearchResult solrSearchResult) {
solrSearchResult.setEntity(this.findCheapAndEasy(solrSearchResult.getEntityId()));
}
+
+ /**
+ * Does this file have a replacement.
+ * Any file should have AT MOST 1 replacement
+ *
+ * @param df
+ * @return
+ */
+ public boolean hasReplacement(DataFile df) throws Exception{
+
+ if (df.getId() == null){
+ // An unsaved file cannot have a replacment
+ return false;
+ }
+
+
+ TypedQuery query = em.createQuery("select o from DataFile o" +
+ " WHERE o.previousVersionId = :dataFileId;", DataFile.class);
+ query.setParameter("dataFileId", df.getId());
+ //query.setMaxResults(maxResults);
+
+ List dataFiles = query.getResultList();
+
+ if (dataFiles.size() == 0){
+ return false;
+ }
+
+ if (!df.isReleased()){
+ // An unpublished SHOULD NOT have a replacment
+ String errMsg = "DataFile with id: [" + df.getId() + "] is UNPUBLISHED with a REPLACEMENT. This should NOT happen.";
+ logger.severe(errMsg);
+
+ throw new Exception(errMsg);
+ }
+
+
+
+ else if (dataFiles.size() == 1){
+ return true;
+ }else{
+
+ String errMsg = "DataFile with id: [" + df.getId() + "] has more than one replacment!";
+ logger.severe(errMsg);
+
+ throw new Exception(errMsg);
+ }
+
+ }
+
+ /**
+ * Is this a replacement file??
+ *
+ * The indication of a previousDataFileId says that it is
+ *
+ * @param df
+ * @return
+ * @throws Exception
+ */
+ public boolean isReplacementFile(DataFile df) throws Exception{
+ if (df.getPreviousDataFileId() == null){
+ return false;
+ }else if (df.getPreviousDataFileId() < 1){
+ logger.severe("Stop! previousDataFileId should either be null or a number greater than 0");
+ //return false;
+ // blow up -- this shouldn't happen!
+ throw new Exception("previousDataFileId should either be null or a number greater than 0");
+ }else{
+ return true;
+ }
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
index e754843c8f0..71733d00063 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
@@ -241,55 +241,46 @@ public Response hiReSave(@PathParam("fileId") Long fileId){
}
+ /**
+ * Add a File to an existing Dataset
+ *
+ * @param datasetId
+ * @param testFileInputStream
+ * @param contentDispositionHeader
+ * @param formDataBodyPart
+ * @return
+ */
@POST
- //@Path("add/{newFilename}")
@Path("add")
@Consumes(MediaType.MULTIPART_FORM_DATA)
- public Response hi_add(@FormDataParam("datasetId") Long datasetId,
+ public Response addFileToDataset(@FormDataParam("datasetId") Long datasetId,
@FormDataParam("file") InputStream testFileInputStream,
@FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
@FormDataParam("file") final FormDataBodyPart formDataBodyPart
){
// -------------------------------------
- msgt("(1) getSampleFile()");
+ // (1) Get the file name and content type
// -------------------------------------
String newFilename = contentDispositionHeader.getFileName();
String newFileContentType = formDataBodyPart.getMediaType().toString();
- //Faces.getServletContext().getMimeType(newFilename);
- //contentDispositionHeader.getParameters().toString();
- msgt("newFileContentType:" + newFileContentType);
- /*
- InputStream testFileInputStream = getSampleFile();
- if (testFileInputStream == null){
- return okResponse("Couldn't find the file!!");
- }
- */
// -------------------------------------
- msgt("(1a) Get User from API token");
+ // (2) Get the user from the API key
// -------------------------------------
User authUser;
try {
authUser = this.findUserOrDie();
} catch (WrappedResponse ex) {
- return okResponse("Couldn't find a user from the API key");
+ return errorResponse(Response.Status.FORBIDDEN, "Couldn't find a user from the API key");
}
//authSvc.findByID(new Long(1));
- msg("authUser: " + authUser);
- msg("getUserIdentifier: " + authUser.getIdentifier());
-
-
- // -------------------------------------
- msgt("(1b) Get the selected Dataset");
- // -------------------------------------
- //int dataset_id = 10;
- Dataset selectedDataset = datasetService.find(datasetId);//new Long(dataset_id));
-
+ //msg("authUser: " + authUser);
+ //msg("getUserIdentifier: " + authUser.getIdentifier());
//-------------------
- // ADD
+ // (3) Create the AddReplaceFileHelper object
//-------------------
msg("ADD!");
@@ -302,20 +293,23 @@ public Response hi_add(@FormDataParam("datasetId") Long datasetId,
this.commandEngine);
- addFileHelper.runAddFile(selectedDataset,
+ //-------------------
+ // (4) Run "runAddFileByDatasetId"
+ //-------------------
+ addFileHelper.runAddFileByDatasetId(datasetId,
newFilename,
newFileContentType,
testFileInputStream);
if (addFileHelper.hasError()){
- return okResponse(addFileHelper.getErrorMessagesAsString("\n"));
+ return errorResponse(Response.Status.BAD_REQUEST, addFileHelper.getErrorMessagesAsString("\n"));
}else{
return okResponse("Look at that! You added a file! (hey hey, it may have worked)");
}
- } // end call to "hi"
+ } // end: addFileToDataset
/**
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
index 0126400714c..311408f324d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
@@ -220,7 +220,46 @@ public void addReplaceFile(UploadedFile laFile){
}
}
+ /*
+ public String getPebbleTest() throws PebbleException, IOException{
+
+ ClasspathLoader loader = new ClasspathLoader();
+
+ //String pagePath = getServletContext().getRealPath("WEB-INF/home.html");
+ ////msgt("pagePath: " + pagePath);
+
+ //loader.setPrefix(getServletContext().getRealPath("WEB-INF/templates"));
+ loader.setPrefix("WEB-INF/templates");
+ loader.setSuffix(".html");
+
+ Builder yeBuilder = new PebbleEngine.Builder();
+ yeBuilder.loader(loader);// = loader;//(loader)
+ PebbleEngine engine = yeBuilder.build();
+ //PebbleEngine.Builder().
+
+ PebbleTemplate compiledTemplate = engine.getTemplate("home");
+
+
+
+ //PebbleTemplate compiledTemplate = engine.getTemplate(pagePath);
+
+ //PebbleTemplate compiledTemplate = engine.getTemplate(getServletContext().getRealPath("WEB-INF/home.html"));
+
+ Map context = new HashMap<>();
+ context.put("name", "Mitchell");
+
+ Writer writer = new StringWriter();
+ compiledTemplate.evaluate(writer, context);
+
+ String output = writer.toString();
+
+ msgt("getPebbleTest: " + output);
+
+ return output;
+
+ }
+ */
public DataFile getNewlyAddedFile(){
return newlyAddedFile;
From 9fd2be19306bd854cf9941bf68f9d634dd2fd637 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Tue, 11 Oct 2016 10:41:09 -0400
Subject: [PATCH 35/86] 2290 - serialization of DataFile and FileMetadata in
order to return the file id
---
.../edu/harvard/iq/dataverse/DataFile.java | 70 ++++++++++++++++-
.../harvard/iq/dataverse/FileMetadata.java | 57 ++++++++++++++
.../iq/dataverse/api/AbstractApiBean.java | 23 +++++-
.../harvard/iq/dataverse/api/FileUpload.java | 13 +++-
.../datasetutility/AddReplaceFileHelper.java | 76 ++++++++++++++++---
5 files changed, 222 insertions(+), 17 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
index 65d98a0b761..3ea04d8c8c3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
@@ -1,5 +1,10 @@
package edu.harvard.iq.dataverse;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.annotations.Expose;
import edu.harvard.iq.dataverse.DatasetVersion.VersionState;
import edu.harvard.iq.dataverse.api.WorldMapRelatedData;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
@@ -58,13 +63,17 @@ public class DataFile extends DvObject implements Comparable {
public static final Long ROOT_DATAFILE_ID_DEFAULT = new Long(-1);
+ @Expose
private String name;
+ @Expose
@NotBlank
@Column( nullable = false )
@Pattern(regexp = "^.*/.*$", message = "Content-Type must contain a slash")
private String contentType;
+
+ @Expose
@Column( nullable = false )
private String fileSystemName;
@@ -107,6 +116,7 @@ public String toString() {
}
}
+ @Expose
@Column(nullable = false)
@Enumerated(EnumType.STRING)
private ChecksumType checksumType;
@@ -115,6 +125,7 @@ public String toString() {
* Examples include "f622da34d54bdc8ee541d6916ac1c16f" as an MD5 value or
* "3a484dfdb1b429c2e15eb2a735f1f5e4d5b04ec6" as a SHA-1 value"
*/
+ @Expose
@Column(nullable = false)
private String checksumValue;
@@ -123,6 +134,7 @@ public String toString() {
// For the initial version of a file, this will be equivalent to the ID
// Default is -1 until the intial id is generated
+ @Expose
@Column(nullable=false)
private Long rootDataFileId;
@@ -132,15 +144,18 @@ public String toString() {
*/
// null for initial version; subsequent versions will point to the previous file
//
+ @Expose
@Column(nullable=true)
private Long previousDataFileId;
/* endt: FILE REPLACE ATTRIBUTES */
+ @Expose
@Column(nullable=true)
private Long filesize; // Number of bytes in file. Allows 0 and null, negative numbers not permitted
+ @Expose
private boolean restricted;
/*
@@ -771,6 +786,59 @@ public Long getPreviousDataFileId(){
return this.previousDataFileId;
}
+ public String asPrettyJSON(){
+
+ return serializeAsJSON(true);
+ }
+
+ public String asJSON(){
+
+ return serializeAsJSON(false);
+ }
+
+
+
+ public JsonObject asGsonObject(boolean prettyPrint){
+
+ String overarchingKey = "data";
+
+ GsonBuilder builder;
+ if (prettyPrint){ // Add pretty printing
+ builder = new GsonBuilder().excludeFieldsWithoutExposeAnnotation().setPrettyPrinting();
+ }else{
+ builder = new GsonBuilder().excludeFieldsWithoutExposeAnnotation();
+ }
+
+ builder.serializeNulls(); // correctly capture nulls
+ Gson gson = builder.create();
+
+ // serialize this object
+ JsonElement jsonObj = gson.toJsonTree(this);
+ jsonObj.getAsJsonObject().addProperty("id", this.getId());
+ JsonObject fileMetadataGson = this.getFileMetadata().asGsonObject(prettyPrint);
+
+ jsonObj.getAsJsonObject().add("fileMetadata", fileMetadataGson);
+
+ //JsonObject fileMetadataJSON = new JsonObject();
+ JsonObject fullFileJSON = new JsonObject();
+ fullFileJSON.add(overarchingKey, jsonObj);
+
+ return fullFileJSON;
+ }
+
+ /**
+ *
+ * @param prettyPrint
+ * @return
+ */
+ private String serializeAsJSON(boolean prettyPrint){
+
+ JsonObject fullFileJSON = asGsonObject(prettyPrint);
+
+ //return fullFileJSON.
+ return fullFileJSON.toString();
+
+ }
-}
+} // end of class
diff --git a/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java b/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java
index 7231a457264..ba590cf1792 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java
@@ -1,5 +1,10 @@
package edu.harvard.iq.dataverse;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.annotations.Expose;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
@@ -38,18 +43,23 @@ public class FileMetadata implements Serializable {
private static final Logger logger = Logger.getLogger(FileMetadata.class.getCanonicalName());
+ @Expose
@Pattern(regexp="^[^:<>;#/\"\\*\\|\\?\\\\]*$", message = "File Name cannot contain any of the following characters: \\ / : * ? \" < > | ; # .")
@NotBlank(message = "Please specify a file name.")
@Column( nullable=false )
private String label = "";
+
+ @Expose
@Column(columnDefinition = "TEXT")
private String description = "";
+ @Expose
private boolean restricted;
@ManyToOne
@JoinColumn(nullable=false)
private DatasetVersion datasetVersion;
+
@ManyToOne
@JoinColumn(nullable=false)
private DataFile dataFile;
@@ -376,4 +386,51 @@ public int compare(FileMetadata o1, FileMetadata o2) {
return o1.getLabel().toUpperCase().compareTo(o2.getLabel().toUpperCase());
}
};
+
+
+
+ public String asPrettyJSON(){
+
+ return serializeAsJSON(true);
+ }
+
+ public String asJSON(){
+
+ return serializeAsJSON(false);
+ }
+
+ /**
+ *
+ * @param prettyPrint
+ * @return
+ */
+ private String serializeAsJSON(boolean prettyPrint){
+
+ JsonObject jsonObj = asGsonObject(prettyPrint);
+
+ return jsonObj.toString();
+
+ }
+
+
+
+ public JsonObject asGsonObject(boolean prettyPrint){
+
+ GsonBuilder builder;
+ if (prettyPrint){ // Add pretty printing
+ builder = new GsonBuilder().excludeFieldsWithoutExposeAnnotation().setPrettyPrinting();
+ }else{
+ builder = new GsonBuilder().excludeFieldsWithoutExposeAnnotation();
+ }
+
+ builder.serializeNulls(); // correctly capture nulls
+ Gson gson = builder.create();
+
+ // serialize this object
+ JsonElement jsonObj = gson.toJsonTree(this);
+ jsonObj.getAsJsonObject().addProperty("id", this.getId());
+
+ return jsonObj.getAsJsonObject();
+ }
+
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
index 6068a2c6e1e..b9495e7c69c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
@@ -1,5 +1,6 @@
package edu.harvard.iq.dataverse.api;
+import com.google.gson.JsonElement;
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
import edu.harvard.iq.dataverse.DatasetFieldType;
@@ -358,8 +359,9 @@ protected T execCommand( Command cmd ) throws WrappedResponse {
throw new WrappedResponse(ex, errorResponse(Status.INTERNAL_SERVER_ERROR, ex.getMessage()));
}
}
-
protected Response okResponse( JsonArrayBuilder bld ) {
+ Response.ok();
+
return Response.ok(Json.createObjectBuilder()
.add("status", "OK")
.add("data", bld).build()).build();
@@ -390,6 +392,25 @@ protected Response okResponse( String msg ) {
.build();
}
+
+ /**
+ * Added to accommodate a JSON String generated from gson
+ *
+ * @param gsonObject
+ * @return
+ */
+ protected Response okResponseGsonObject(String msg, com.google.gson.JsonObject gsonObject){
+
+ if (gsonObject == null){
+ throw new NullPointerException("gsonObject cannot be null");
+ }
+
+ gsonObject.addProperty("status", "OK");
+ gsonObject.addProperty("message", msg);
+
+ return Response.ok(gsonObject.toString(), MediaType.APPLICATION_JSON).build();
+ }
+
/**
* Returns an OK response (HTTP 200, status:OK) with the passed value
* in the data field.
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
index 71733d00063..8fa896274d6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
@@ -29,11 +29,15 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
+import java.io.StringReader;
import java.util.Iterator;
import java.util.logging.Logger;
import javax.ejb.EJB;
import javax.ejb.Stateless;
import javax.inject.Inject;
+import javax.json.Json;
+import javax.json.JsonObject;
+import javax.json.JsonReader;
import javax.ws.rs.Consumes;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
@@ -305,10 +309,13 @@ public Response addFileToDataset(@FormDataParam("datasetId") Long datasetId,
if (addFileHelper.hasError()){
return errorResponse(Response.Status.BAD_REQUEST, addFileHelper.getErrorMessagesAsString("\n"));
}else{
- return okResponse("Look at that! You added a file! (hey hey, it may have worked)");
+
+ return okResponseGsonObject("File successfully added!",
+ addFileHelper.getSuccessResultAsGsonObject());
+ //"Look at that! You added a file! (hey hey, it may have worked)");
}
-
+ //return okR
} // end: addFileToDataset
@@ -491,7 +498,7 @@ public Response hi_replace(@PathParam("oldFileId") Long oldFileId){
}else{
return okResponse("File was replaced! hey hey, it may have worked");
}
-
+
} // end call to "hi"
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 6559108bb60..11aeb80ca7c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -5,6 +5,7 @@
*/
package edu.harvard.iq.dataverse.datasetutility;
+import com.google.gson.JsonObject;
import edu.harvard.iq.dataverse.DataFile;
import edu.harvard.iq.dataverse.DataFileServiceBean;
import edu.harvard.iq.dataverse.Dataset;
@@ -124,8 +125,7 @@ public class AddReplaceFileHelper{
List finalFileList;
// Ingested file
- private DataFile newlyAddedFile;
-
+ private DataFile newlyAddedFile;
// For error handling
private boolean errorFound;
@@ -1151,20 +1151,25 @@ private boolean step_080_run_update_dataset_command_for_replace(){
// (2) New file: Set the previousFileId to the id of the original file
// (3) New file: Set the rootFileId to the rootFileId of the original file
// -----------------------------------------------------------
- msgt("Root id check");
- msg("file to replace 1: " + fileToReplace.getRootDataFileId());
+
+ /*
+ Check the root file id on fileToReplace, updating it if necessary
+ */
if (fileToReplace.getRootDataFileId().equals(DataFile.ROOT_DATAFILE_ID_DEFAULT)){
+
fileToReplace.setRootDataFileId(fileToReplace.getId());
- msg("file to replace 2: pre save " + fileToReplace.getRootDataFileId());
fileToReplace = fileService.save(fileToReplace);
- msg("file to replace 3 post save: " + fileToReplace.getRootDataFileId());
}
+
+ /*
+ Go through the final file list, settting the rootFileId and previousFileId
+ */
for (DataFile df : finalFileList){
df.setPreviousDataFileId(fileToReplace.getId());
- msg("file to replace 4 - update new file: " + fileToReplace.getRootDataFileId());
df.setRootDataFileId(fileToReplace.getRootDataFileId());
+
}
@@ -1187,20 +1192,61 @@ private boolean step_080_run_update_dataset_command_for_replace(){
return false;
}
- // Save newly added file to object
- if (!finalFileList.isEmpty()){
- newlyAddedFile = finalFileList.get(0);
+ return true;
+ }
+
+ /**
+ * We want the version of the newly added file that has an id set
+ *
+ * TODO: This is inefficient/expensive. Need to redo it in a sane way
+ * - e.g. Query to find
+ * (1) latest dataset version in draft
+ * (2) pick off files that are NOT released
+ * (3) iterate through only those files
+ *
+ * @param df
+ */
+ private void setNewlyAddedFile(DataFile df){
+
+ newlyAddedFile = df;
+
+ for (FileMetadata fm : dataset.getEditVersion().getFileMetadatas()){
+
+ // Find a file where the checksum value and identifiers are the same..
+ //
+ if (newlyAddedFile.getChecksumValue().equals(fm.getDataFile().getChecksumValue())){
+ if (newlyAddedFile.getStorageIdentifier().equals(fm.getDataFile().getStorageIdentifier())){
+ newlyAddedFile = fm.getDataFile();
+ break;
+ }
+ }
}
- return true;
}
-
+
public DataFile getNewlyAddedFile(){
return newlyAddedFile;
}
+ public String getSuccessResult(){
+ if (newlyAddedFile == null){
+ return "Bad ERROR: Newly created file not found";
+ }
+ return newlyAddedFile.asJSON();
+
+ }
+
+ public JsonObject getSuccessResultAsGsonObject(){
+ if (newlyAddedFile == null){
+ throw new NullPointerException("Bad error: newlyAddedFile is null!");
+ }
+ return newlyAddedFile.asGsonObject(false);
+
+ }
+
+
private boolean step_090_notifyUser(){
if (this.hasError()){
return false;
@@ -1218,6 +1264,12 @@ private boolean step_100_startIngestJobs(){
return false;
}
+ // Should only be one file in the list
+ for (DataFile df : finalFileList){
+ setNewlyAddedFile(df);
+ //df.getFileMetadata();
+ break;
+ }
// clear old file list
//
From 6d6c7dd0f7943ac3159edf0b8be7c46665fd6039 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Tue, 11 Oct 2016 10:58:22 -0400
Subject: [PATCH 36/86] api endpoint for replace #2290
---
.../harvard/iq/dataverse/api/FileUpload.java | 78 +++++++++++++++++--
.../datasetutility/AddReplaceFileHelper.java | 41 +++++++++-
2 files changed, 113 insertions(+), 6 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
index 8fa896274d6..d1b8fdcfc47 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
@@ -266,7 +266,6 @@ public Response addFileToDataset(@FormDataParam("datasetId") Long datasetId,
// -------------------------------------
// (1) Get the file name and content type
// -------------------------------------
-
String newFilename = contentDispositionHeader.getFileName();
String newFileContentType = formDataBodyPart.getMediaType().toString();
@@ -279,9 +278,6 @@ public Response addFileToDataset(@FormDataParam("datasetId") Long datasetId,
} catch (WrappedResponse ex) {
return errorResponse(Response.Status.FORBIDDEN, "Couldn't find a user from the API key");
}
- //authSvc.findByID(new Long(1));
- //msg("authUser: " + authUser);
- //msg("getUserIdentifier: " + authUser.getIdentifier());
//-------------------
// (3) Create the AddReplaceFileHelper object
@@ -315,10 +311,82 @@ public Response addFileToDataset(@FormDataParam("datasetId") Long datasetId,
//"Look at that! You added a file! (hey hey, it may have worked)");
}
- //return okR
} // end: addFileToDataset
+
+ /**
+ * Add a File to an existing Dataset
+ *
+ * @param datasetId
+ * @param testFileInputStream
+ * @param contentDispositionHeader
+ * @param formDataBodyPart
+ * @return
+ */
+ @POST
+ @Path("replace")
+ @Consumes(MediaType.MULTIPART_FORM_DATA)
+ public Response replaceFileInDataset(@FormDataParam("datasetId") Long datasetId,
+ @FormDataParam("file") InputStream testFileInputStream,
+ @FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
+ @FormDataParam("file") final FormDataBodyPart formDataBodyPart,
+ @FormDataParam("fileToReplaceId") Long fileToReplaceId
+ ){
+
+ // -------------------------------------
+ // (1) Get the file name and content type
+ // -------------------------------------
+ String newFilename = contentDispositionHeader.getFileName();
+ String newFileContentType = formDataBodyPart.getMediaType().toString();
+
+ // -------------------------------------
+ // (2) Get the user from the API key
+ // -------------------------------------
+ User authUser;
+ try {
+ authUser = this.findUserOrDie();
+ } catch (WrappedResponse ex) {
+ return errorResponse(Response.Status.FORBIDDEN, "Couldn't find a user from the API key");
+ }
+
+ //-------------------
+ // (3) Create the AddReplaceFileHelper object
+ //-------------------
+ msg("ADD!");
+
+ DataverseRequest dvRequest2 = createDataverseRequest(authUser);
+ AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2,
+ this.ingestService,
+ this.datasetService,
+ this.fileService,
+ this.permissionSvc,
+ this.commandEngine);
+
+
+ //-------------------
+ // (4) Run "runReplaceFileByDatasetId"
+ //-------------------
+ addFileHelper.runReplaceFileByDatasetId(datasetId,
+ newFilename,
+ newFileContentType,
+ testFileInputStream,
+ fileToReplaceId);
+
+
+ if (addFileHelper.hasError()){
+ return errorResponse(Response.Status.BAD_REQUEST, addFileHelper.getErrorMessagesAsString("\n"));
+ }else{
+
+ return okResponseGsonObject("File successfully replaced!",
+ addFileHelper.getSuccessResultAsGsonObject());
+ //"Look at that! You added a file! (hey hey, it may have worked)");
+ }
+
+ } // end: replaceFileInDataset
+
+
+
/**
* Used for RestAssured testing until multipart form available
* @param datasetId - dataset to add files
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 11aeb80ca7c..149cacad288 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -256,6 +256,44 @@ public boolean runForceReplaceFile(Dataset dataset, String newFileName, String n
return this.runAddReplaceFile(dataset, newFileName, newFileContentType, newFileInputStream, oldFileId);
}
+
+ public boolean runForceReplaceFileByDatasetId(Long datasetId, String newFileName, String newFileContentType, InputStream newFileInputStream, Long oldFileId){
+
+ msgt(">> runAddFileByDatasetId");
+
+ initErrorHandling();
+ this.currentOperation = FILE_REPLACE_FORCE_OPERATION;
+
+ if (!this.step_001_loadDatasetById(datasetId)){
+ return false;
+ }
+ if (oldFileId==null){
+ this.addErrorSevere(getBundleErr("existing_file_to_replace_id_is_null"));
+ return false;
+ }
+
+ return this.runAddReplaceFile(this.dataset, newFileName, newFileContentType, newFileInputStream, oldFileId);
+ }
+
+
+ public boolean runReplaceFileByDatasetId(Long datasetId, String newFileName, String newFileContentType, InputStream newFileInputStream, Long oldFileId){
+
+ msgt(">> runAddFileByDatasetId");
+
+ initErrorHandling();
+ this.currentOperation = FILE_REPLACE_OPERATION;
+
+ if (!this.step_001_loadDatasetById(datasetId)){
+ return false;
+ }
+ if (oldFileId==null){
+ this.addErrorSevere(getBundleErr("existing_file_to_replace_id_is_null"));
+ return false;
+ }
+
+ return this.runReplaceFile(this.dataset, newFileName, newFileContentType, newFileInputStream, oldFileId);
+ }
+
/**
* After the constructor, this method is called to replace a file
*
@@ -1203,7 +1241,8 @@ private boolean step_080_run_update_dataset_command_for_replace(){
* (1) latest dataset version in draft
* (2) pick off files that are NOT released
* (3) iterate through only those files
- *
+ * - or an alternate/better version
+ *
* @param df
*/
private void setNewlyAddedFile(DataFile df){
From 1c86f91e20e1a68509b107abb825aa47b212b414 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Tue, 11 Oct 2016 12:44:03 -0400
Subject: [PATCH 37/86] #2290 fix where not running 'major cleanup' when old
file not in current dataset version
---
.../iq/dataverse/datasetutility/AddReplaceFileHelper.java | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 149cacad288..3cc1b80b18c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -719,7 +719,7 @@ private boolean step_025_loadFileToReplaceById(Long dataFileId){
return false;
}
- // This shouldn't happen, the public replace method should through
+ // This shouldn't happen, the public replace method should throw
// a NullPointerException
//
if (dataFileId == null){
@@ -756,9 +756,11 @@ private boolean step_030_createNewFilesViaIngest(){
this.newFileInputStream,
this.newFileName,
this.newFileContentType);
+
} catch (IOException ex) {
this.addErrorSevere(getBundleErr("ingest_create_file_err"));
logger.severe(ex.toString());
+ this.runMajorCleanup();
return false;
}
@@ -814,6 +816,7 @@ private boolean step_035_auto_isReplacementInLatestVersion(){
}
if (!fileInLatestVersion){
addError(getBundleErr("existing_file_not_in_latest_published_version"));
+ this.runMajorCleanup();
return false;
}
return true;
From b94560fc5adfb0580401de9366bfe8a1d9216d5d Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Tue, 11 Oct 2016 14:54:40 -0400
Subject: [PATCH 38/86] #2290 for UI - break down add/replace sequence into two
methods
---
src/main/java/Bundle.properties | 1 +
.../datasetutility/AddReplaceFileHelper.java | 70 +++++++++++++++++--
2 files changed, 64 insertions(+), 7 deletions(-)
diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties
index 27bfcdd3f5b..3fa1e4c98e6 100755
--- a/src/main/java/Bundle.properties
+++ b/src/main/java/Bundle.properties
@@ -1457,3 +1457,4 @@ file.addreplace.error.replace.command_engine_error=Failed to update the dataset.
file.addreplace.error.replace.ejb_exception=Failed to update the dataset. Please contact the administrator. (EJBException)
file.addreplace.error.remove_linked_file.dataset=dataset cannot be null in removeLinkedFileFromDataset
file.addreplace.error.remove_linked_file.file=file cannot be null in removeLinkedFileFromDataset
+file.addreplace.error.phase2_called_early_no_new_files=There was an error saving the dataset--no new files found.
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 3cc1b80b18c..c566c0f7e97 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -324,6 +324,13 @@ public boolean runReplaceFile(Dataset dataset, String newFileName, String newFil
* oldFileId - For ADD, set to null
* oldFileId - For REPLACE, set to id of file to replace
*
+ * This has now been broken into Phase 1 and Phase 2
+ *
+ * The APIs will use this method and call Phase 1 & Phase 2 consecutively
+ *
+ * The UI will call Phase 1 on initial upload and
+ * then run Phase 2 if the user chooses to save the changes.
+ *
*
* @return
*/
@@ -331,13 +338,43 @@ private boolean runAddReplaceFile(Dataset dataset,
String newFileName, String newFileContentType, InputStream newFileInputStream,
Long oldFileId){
- initErrorHandling();
+ // Run "Phase 1" - Initial ingest of file + error check
+ // But don't save the dataset version yet
+ //
+ boolean phase1Success = runAddReplacePhase1(dataset,
+ newFileName,
+ newFileContentType,
+ newFileInputStream,
+ oldFileId);
+
+ if (!phase1Success){
+ return false;
+ }
+
+
+ return runAddReplacePhase2();
+
+ }
+ /**
+ * For the UI: File add/replace has been broken into 2 steps
+ *
+ * Phase 1 (here): Add/replace the file and make sure there are no errors
+ * But don't update the Dataset (yet)
+ *
+ * @return
+ */
+ public boolean runAddReplacePhase1(Dataset dataset,
+ String newFileName, String newFileContentType, InputStream newFileInputStream,
+ Long oldFileId){
if (this.hasError()){
- return false;
+ return false; // possible to have errors already...
}
+ initErrorHandling();
+
+
msgt("step_001_loadDataset");
if (!this.step_001_loadDataset(dataset)){
return false;
@@ -375,8 +412,29 @@ private boolean runAddReplaceFile(Dataset dataset,
return false;
}
+ return true;
+ }
+
+
+ /**
+ * For the UI: File add/replace has been broken into 2 steps
+ *
+ * Phase 2 (here): Phase 1 has run ok, Update the Dataset -- issue the commands!
+ *
+ * @return
+ */
+ public boolean runAddReplacePhase2(){
+
+ if (this.hasError()){
+ return false; // possible to have errors already...
+ }
- msgt("step_060_addFilesViaIngestService");
+ if ((finalFileList == null)||(finalFileList.isEmpty())){
+ addError(getBundleErr("phase2_called_early_no_new_files"));
+ return false;
+ }
+
+ msgt("step_060_addFilesViaIngestService");
if (!this.step_060_addFilesViaIngestService()){
return false;
@@ -405,10 +463,10 @@ private boolean runAddReplaceFile(Dataset dataset,
return false;
}
-
return true;
}
-
+
+
/**
* Get for currentOperation
* @return String
@@ -464,8 +522,6 @@ private void initErrorHandling(){
}
-
-
/**
From 573cd26e21f6d62c51c53d65f9e781cb29da9ee3 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Thu, 13 Oct 2016 10:39:55 -0400
Subject: [PATCH 39/86] temp removed kick off of ingest until it is async.
#2290
---
.../harvard/iq/dataverse/api/FileUpload.java | 36 ++++++++++++++-----
.../datasetutility/AddReplaceFileHelper.java | 11 ++++--
2 files changed, 36 insertions(+), 11 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
index d1b8fdcfc47..4a366c49195 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
@@ -314,6 +314,9 @@ public Response addFileToDataset(@FormDataParam("datasetId") Long datasetId,
} // end: addFileToDataset
+
+
+
/**
* Add a File to an existing Dataset
@@ -331,9 +334,14 @@ public Response replaceFileInDataset(@FormDataParam("datasetId") Long datasetId,
@FormDataParam("file") InputStream testFileInputStream,
@FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
@FormDataParam("file") final FormDataBodyPart formDataBodyPart,
- @FormDataParam("fileToReplaceId") Long fileToReplaceId
+ @FormDataParam("fileToReplaceId") Long fileToReplaceId,
+ @FormDataParam("forceReplace") Boolean forceReplace
){
+ if (forceReplace==null){
+ forceReplace = false;
+ }
+
// -------------------------------------
// (1) Get the file name and content type
// -------------------------------------
@@ -353,7 +361,7 @@ public Response replaceFileInDataset(@FormDataParam("datasetId") Long datasetId,
//-------------------
// (3) Create the AddReplaceFileHelper object
//-------------------
- msg("ADD!");
+ msg("REPLACE!");
DataverseRequest dvRequest2 = createDataverseRequest(authUser);
AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2,
@@ -367,16 +375,26 @@ public Response replaceFileInDataset(@FormDataParam("datasetId") Long datasetId,
//-------------------
// (4) Run "runReplaceFileByDatasetId"
//-------------------
- addFileHelper.runReplaceFileByDatasetId(datasetId,
- newFilename,
- newFileContentType,
- testFileInputStream,
- fileToReplaceId);
-
-
+ if (forceReplace){
+ addFileHelper.runForceReplaceFileByDatasetId(datasetId,
+ newFilename,
+ newFileContentType,
+ testFileInputStream,
+ fileToReplaceId);
+ }else{
+ addFileHelper.runReplaceFileByDatasetId(datasetId,
+ newFilename,
+ newFileContentType,
+ testFileInputStream,
+ fileToReplaceId);
+ }
+
+ msg("we're back.....");
if (addFileHelper.hasError()){
+ msg("yes, has error");
return errorResponse(Response.Status.BAD_REQUEST, addFileHelper.getErrorMessagesAsString("\n"));
}else{
+ msg("no error");
return okResponseGsonObject("File successfully replaced!",
addFileHelper.getSuccessResultAsGsonObject());
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index c566c0f7e97..1a9d73a3f33 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -1001,10 +1001,11 @@ private boolean step_045_auto_checkForFileReplaceDuplicate(){
if (Objects.equals(df.getChecksumValue(), fileToReplace.getChecksumValue())){
this.addError(getBundleErr("replace.new_file_same_as_replacement"));
+ break;
}
// This should be able to be overridden --force
- if (isForceFileOperation()){
+ if (!isForceFileOperation()){
// Warning that content type of the file has changed
//
@@ -1128,7 +1129,7 @@ private boolean step_085_auto_remove_filemetadata_to_replace_from_working_versio
this.addErrorSevere(getBundleErr("only_replace_operation") + " (step_085_auto_remove_filemetadata_to_replace_from_working_version");
return false;
}
- msg("step_085_auto_remove_filemetadata_to_replace_from_working_version 1");
+ msg("step_085_auto_remove_filemetadata_to_replace_from_working_version 2");
if (this.hasError()){
return false;
@@ -1373,11 +1374,17 @@ private boolean step_100_startIngestJobs(){
//
finalFileList.clear();
+ // TODO: Need to run ingest async......
+ if (true){
+ return true;
+ }
+ msg("pre ingest start");
// start the ingest!
//
ingestService.startIngestJobs(dataset, dvRequest.getAuthenticatedUser());
+ msg("post ingest start");
return true;
}
From 94e302cd1cac456630f9cab04718a9f26ffc337c Mon Sep 17 00:00:00 2001
From: Philip Durbin
Date: Thu, 13 Oct 2016 14:13:04 -0400
Subject: [PATCH 40/86] add API tests for file replace and native add #2290
---
.../harvard/iq/dataverse/api/DatasetsIT.java | 73 +++++++++++++++++++
.../edu/harvard/iq/dataverse/api/UtilIT.java | 17 +++++
2 files changed, 90 insertions(+)
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
index 6bca29aff12..e2056d5d409 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
@@ -465,4 +465,77 @@ public void testFileChecksum() {
.statusCode(OK.getStatusCode());
}
+
+ @Test
+ public void testFileReplace() {
+
+ Response createUser = UtilIT.createRandomUser();
+ createUser.then().assertThat().statusCode(OK.getStatusCode());
+// createUser.prettyPrint();
+ String username = UtilIT.getUsernameFromResponse(createUser);
+ String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+ Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+// createDataverseResponse.prettyPrint();
+ createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+ String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+ Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+ createDatasetResponse.prettyPrint();
+ createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+ Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+
+ Response getDatasetJsonBeforeFiles = UtilIT.nativeGet(datasetId, apiToken);
+ getDatasetJsonBeforeFiles.prettyPrint();
+ getDatasetJsonBeforeFiles.then().assertThat().statusCode(OK.getStatusCode());
+ String protocol1 = JsonPath.from(getDatasetJsonBeforeFiles.getBody().asString()).getString("data.protocol");
+ String authority1 = JsonPath.from(getDatasetJsonBeforeFiles.getBody().asString()).getString("data.authority");
+ String identifier1 = JsonPath.from(getDatasetJsonBeforeFiles.getBody().asString()).getString("data.identifier");
+ String dataset1PersistentId = protocol1 + ":" + authority1 + "/" + identifier1;
+
+ Response uploadFileResponse = UtilIT.uploadRandomFile(dataset1PersistentId, apiToken);
+ uploadFileResponse.prettyPrint();
+ getDatasetJsonBeforeFiles.then().assertThat().statusCode(OK.getStatusCode());
+ assertEquals(CREATED.getStatusCode(), uploadFileResponse.getStatusCode());
+
+ Response getDatasetJsonWithFiles = UtilIT.nativeGet(datasetId, apiToken);
+ getDatasetJsonWithFiles.prettyPrint();
+ getDatasetJsonWithFiles.then().assertThat().statusCode(OK.getStatusCode());
+ int fileId = JsonPath.from(getDatasetJsonWithFiles.getBody().asString()).getInt("data.latestVersion.files[0].dataFile.id");
+ UtilIT.publishDataverseViaSword(dataverseAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode());
+ UtilIT.publishDatasetViaSword(dataset1PersistentId, apiToken).then().assertThat().statusCode(OK.getStatusCode());
+
+ String pathToFile = "src/main/webapp/resources/images/dataverseproject.png";
+ Response replace = UtilIT.replaceFile(datasetId, fileId, pathToFile, apiToken);
+ replace.prettyPrint();
+ replace.then().assertThat().statusCode(OK.getStatusCode());
+
+ }
+
+ @Test
+ public void testFileReplaceNativeAdd() {
+
+ Response createUser = UtilIT.createRandomUser();
+ createUser.then().assertThat().statusCode(OK.getStatusCode());
+// createUser.prettyPrint();
+ String username = UtilIT.getUsernameFromResponse(createUser);
+ String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+ Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+// createDataverseResponse.prettyPrint();
+ createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+ String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+ Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+ createDatasetResponse.prettyPrint();
+ createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+ Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+
+ String pathToFile = "src/main/webapp/resources/images/dataverseproject.png";
+ Response add = UtilIT.uploadFileViaNative(datasetId, pathToFile, apiToken);
+
+ add.prettyPrint();
+ add.then().assertThat().statusCode(OK.getStatusCode());
+ }
+
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
index fa097182407..1a8762084a8 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
@@ -259,6 +259,23 @@ public static Response uploadFile(String persistentId, String zipfilename, Strin
}
+ static Response uploadFileViaNative(Integer datasetId, String pathToFile, String apiToken) {
+ return given()
+ .header(API_TOKEN_HTTP_HEADER, apiToken)
+ .multiPart("datasetId", datasetId)
+ .multiPart("file", new File("src/main/webapp/resources/images/dataverseproject.png"))
+ .post("/api/upload/add");
+ }
+
+ static Response replaceFile(Integer datasetId, int fileId, String pathToFile, String apiToken) {
+ return given()
+ .header(API_TOKEN_HTTP_HEADER, apiToken)
+ .multiPart("datasetId", datasetId)
+ .multiPart("fileToReplaceId", fileId)
+ .multiPart("file", new File(pathToFile))
+ .post("/api/upload/replace");
+ }
+
static Response downloadFile(Integer fileId) {
return given()
// .header(API_TOKEN_HTTP_HEADER, apiToken)
From ebef1a2c7478ebd0cc494e9642defa506db46579 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Thu, 13 Oct 2016 14:33:33 -0400
Subject: [PATCH 41/86] Let add/replace tests pass by making person superuser
---
.../edu/harvard/iq/dataverse/api/DatasetsIT.java | 12 ++++++++++--
1 file changed, 10 insertions(+), 2 deletions(-)
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
index e2056d5d409..04db0a2302b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
@@ -475,6 +475,8 @@ public void testFileReplace() {
String username = UtilIT.getUsernameFromResponse(createUser);
String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+ UtilIT.makeSuperUser(username);
+
Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
// createDataverseResponse.prettyPrint();
createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
@@ -512,6 +514,8 @@ public void testFileReplace() {
}
+
+
@Test
public void testFileReplaceNativeAdd() {
@@ -520,8 +524,12 @@ public void testFileReplaceNativeAdd() {
// createUser.prettyPrint();
String username = UtilIT.getUsernameFromResponse(createUser);
String apiToken = UtilIT.getApiTokenFromResponse(createUser);
-
+
+
+ UtilIT.makeSuperUser(username);
+
Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+
// createDataverseResponse.prettyPrint();
createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
@@ -533,7 +541,7 @@ public void testFileReplaceNativeAdd() {
String pathToFile = "src/main/webapp/resources/images/dataverseproject.png";
Response add = UtilIT.uploadFileViaNative(datasetId, pathToFile, apiToken);
-
+
add.prettyPrint();
add.then().assertThat().statusCode(OK.getStatusCode());
}
From 6adb10e62c7e4473a17e3a9621909c8cd9254892 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Fri, 14 Oct 2016 11:45:11 -0400
Subject: [PATCH 42/86] #2290 Fix for ingest issue. Two errors: (a) API bean
was incorrectly stateless and (b) the dataset variable wasn't being updated
after running the update dataset command. e.g. Passing non-peristed dataset
object to ingest
---
.../harvard/iq/dataverse/api/FileUpload.java | 1 -
.../datasetutility/AddReplaceFileHelper.java | 34 +++++++++++--------
2 files changed, 19 insertions(+), 16 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
index 4a366c49195..e5c63407b41 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
@@ -56,7 +56,6 @@
*
* @author rmp553
*/
-@Stateless
@Path("upload")
public class FileUpload extends AbstractApiBean {
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 1a9d73a3f33..d7547afcc75 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -19,11 +19,8 @@
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
-import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetVersionCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand;
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
-import edu.harvard.iq.dataverse.util.JsfHelper;
-import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
@@ -33,12 +30,7 @@
import java.util.ResourceBundle;
import java.util.Set;
import java.util.logging.Logger;
-import javax.ejb.EJB;
import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.enterprise.context.SessionScoped;
-import javax.faces.application.FacesMessage;
-import javax.inject.Named;
import javax.validation.ConstraintViolation;
/**
@@ -1100,7 +1092,10 @@ private boolean step_070_run_update_dataset_command(){
((UpdateDatasetCommand) update_cmd).setValidateLenient(true);
try {
- commandEngine.submit(update_cmd);
+ // Submit the update dataset command
+ // and update the local dataset object
+ //
+ dataset = commandEngine.submit(update_cmd);
} catch (CommandException ex) {
this.addErrorSevere(getBundleErr("add.command_engine_error"));
logger.severe(ex.getMessage());
@@ -1278,8 +1273,11 @@ private boolean step_080_run_update_dataset_command_for_replace(){
((UpdateDatasetCommand) update_cmd).setValidateLenient(true);
- try {
- commandEngine.submit(update_cmd);
+ try {
+ // Submit the update dataset command
+ // and update the local dataset object
+ //
+ dataset = commandEngine.submit(update_cmd);
} catch (CommandException ex) {
this.addErrorSevere(getBundleErr("replace.command_engine_error"));
logger.severe(ex.getMessage());
@@ -1346,6 +1344,11 @@ public JsonObject getSuccessResultAsGsonObject(){
}
+ /**
+ * Currently this is a placeholder if we decide to send
+ * user notifications.
+ *
+ */
private boolean step_090_notifyUser(){
if (this.hasError()){
return false;
@@ -1374,14 +1377,15 @@ private boolean step_100_startIngestJobs(){
//
finalFileList.clear();
- // TODO: Need to run ingest async......
- if (true){
- return true;
- }
+ // TODO: Need to run ingwest async......
+ //if (true){
+ //return true;
+ //}
msg("pre ingest start");
// start the ingest!
//
+
ingestService.startIngestJobs(dataset, dvRequest.getAuthenticatedUser());
msg("post ingest start");
From 86c421026243345b55fcae93352d4656ef513145 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Fri, 14 Oct 2016 12:34:33 -0400
Subject: [PATCH 43/86] #2290 - Restructured so Replace only requires a FileId,
not a Dataset ID and File Id
---
.../harvard/iq/dataverse/api/Datasets.java | 106 ++++++++-
.../harvard/iq/dataverse/api/FileUpload.java | 198 +---------------
.../datasetutility/AddReplaceFileHelper.java | 215 ++++++++----------
3 files changed, 201 insertions(+), 318 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
index f0af8490c3d..cc05a0c0f81 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
@@ -2,6 +2,7 @@
import edu.harvard.iq.dataverse.DOIEZIdServiceBean;
import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.DataFileServiceBean;
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetField;
import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
@@ -9,16 +10,20 @@
import edu.harvard.iq.dataverse.DatasetFieldValue;
import edu.harvard.iq.dataverse.DatasetServiceBean;
import edu.harvard.iq.dataverse.DatasetVersion;
+import edu.harvard.iq.dataverse.DatasetVersionServiceBean;
import edu.harvard.iq.dataverse.Dataverse;
import edu.harvard.iq.dataverse.DataverseServiceBean;
+import edu.harvard.iq.dataverse.EjbDataverseEngine;
import edu.harvard.iq.dataverse.MetadataBlock;
import edu.harvard.iq.dataverse.MetadataBlockServiceBean;
import edu.harvard.iq.dataverse.RoleAssignment;
+import static edu.harvard.iq.dataverse.api.AbstractApiBean.errorResponse;
import edu.harvard.iq.dataverse.api.imports.ImportException;
import edu.harvard.iq.dataverse.api.imports.ImportUtil;
import edu.harvard.iq.dataverse.authorization.DataverseRole;
import edu.harvard.iq.dataverse.authorization.RoleAssignee;
import edu.harvard.iq.dataverse.authorization.users.User;
+import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper;
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
@@ -45,6 +50,7 @@
import edu.harvard.iq.dataverse.export.DDIExportServiceBean;
import edu.harvard.iq.dataverse.export.ExportService;
import edu.harvard.iq.dataverse.export.ddi.DdiExportUtil;
+import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.util.SystemConfig;
@@ -70,6 +76,7 @@
import javax.validation.Validation;
import javax.validation.Validator;
import javax.validation.ValidatorFactory;
+import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
@@ -80,6 +87,9 @@
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
+import org.glassfish.jersey.media.multipart.FormDataBodyPart;
+import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
+import org.glassfish.jersey.media.multipart.FormDataParam;
@Path("datasets")
public class Datasets extends AbstractApiBean {
@@ -112,7 +122,18 @@ public class Datasets extends AbstractApiBean {
@EJB
SettingsServiceBean settingsService;
+ @EJB
+ DataFileServiceBean fileService;
+
+ @EJB
+ DatasetVersionServiceBean datasetVersionService;
+ @EJB
+ IngestServiceBean ingestService;
+
+ @EJB
+ EjbDataverseEngine commandEngine;
+
/**
* Used to consolidate the way we parse and handle dataset versions.
* @param
@@ -376,7 +397,7 @@ public Response getVersionFiles( @PathParam("id") String datasetId, @PathParam("
}
@GET
- @Path("{id}/versions/{versionId}/metadata")
+ @Path("{id}/versions/{versionId}/metadata")
public Response getVersionMetadata( @PathParam("id") String datasetId, @PathParam("versionId") String versionId) {
try {
@@ -779,4 +800,87 @@ public Response deletePrivateUrl(@PathParam("id") String idSupplied) {
}
}
+
+
+ /**
+ * Add a File to an existing Dataset
+ *
+ * @param datasetId
+ * @param testFileInputStream
+ * @param contentDispositionHeader
+ * @param formDataBodyPart
+ * @return
+ */
+ @POST
+ @Path("{id}/add")
+ @Consumes(MediaType.MULTIPART_FORM_DATA)
+ public Response addFileToDataset(@PathParam("id") Long datasetId,
+ @FormDataParam("file") InputStream testFileInputStream,
+ @FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
+ @FormDataParam("file") final FormDataBodyPart formDataBodyPart
+ ){
+
+ // -------------------------------------
+ // (1) Get the file name and content type
+ // -------------------------------------
+ String newFilename = contentDispositionHeader.getFileName();
+ String newFileContentType = formDataBodyPart.getMediaType().toString();
+
+ // -------------------------------------
+ // (2) Get the user from the API key
+ // -------------------------------------
+ User authUser;
+ try {
+ authUser = this.findUserOrDie();
+ } catch (WrappedResponse ex) {
+ return errorResponse(Response.Status.FORBIDDEN, "Couldn't find a user from the API key");
+ }
+
+ //-------------------
+ // (3) Create the AddReplaceFileHelper object
+ //-------------------
+ msg("ADD!");
+
+ DataverseRequest dvRequest2 = createDataverseRequest(authUser);
+ AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2,
+ this.ingestService,
+ this.datasetService,
+ this.fileService,
+ this.permissionSvc,
+ this.commandEngine);
+
+
+ //-------------------
+ // (4) Run "runAddFileByDatasetId"
+ //-------------------
+ addFileHelper.runAddFileByDatasetId(datasetId,
+ newFilename,
+ newFileContentType,
+ testFileInputStream);
+
+
+ if (addFileHelper.hasError()){
+ return errorResponse(Response.Status.BAD_REQUEST, addFileHelper.getErrorMessagesAsString("\n"));
+ }else{
+
+ return okResponseGsonObject("File successfully added!",
+ addFileHelper.getSuccessResultAsGsonObject());
+ //"Look at that! You added a file! (hey hey, it may have worked)");
+ }
+
+ } // end: addFileToDataset
+
+
+
+ private void msg(String m){
+ System.out.println(m);
+ }
+ private void dashes(){
+ msg("----------------");
+ }
+ private void msgt(String m){
+ dashes(); msg(m); dashes();
+ }
+
+
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
index e5c63407b41..6b29321bc9e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
@@ -329,7 +329,7 @@ public Response addFileToDataset(@FormDataParam("datasetId") Long datasetId,
@POST
@Path("replace")
@Consumes(MediaType.MULTIPART_FORM_DATA)
- public Response replaceFileInDataset(@FormDataParam("datasetId") Long datasetId,
+ public Response replaceFileInDataset(
@FormDataParam("file") InputStream testFileInputStream,
@FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
@FormDataParam("file") final FormDataBodyPart formDataBodyPart,
@@ -375,17 +375,17 @@ public Response replaceFileInDataset(@FormDataParam("datasetId") Long datasetId,
// (4) Run "runReplaceFileByDatasetId"
//-------------------
if (forceReplace){
- addFileHelper.runForceReplaceFileByDatasetId(datasetId,
+ addFileHelper.runForceReplaceFile(fileToReplaceId,
newFilename,
newFileContentType,
- testFileInputStream,
- fileToReplaceId);
+ testFileInputStream
+ );
}else{
- addFileHelper.runReplaceFileByDatasetId(datasetId,
+ addFileHelper.runForceReplaceFile(fileToReplaceId,
newFilename,
newFileContentType,
- testFileInputStream,
- fileToReplaceId);
+ testFileInputStream
+ );
}
msg("we're back.....");
@@ -403,190 +403,6 @@ public Response replaceFileInDataset(@FormDataParam("datasetId") Long datasetId,
} // end: replaceFileInDataset
-
- /**
- * Used for RestAssured testing until multipart form available
- * @param datasetId - dataset to add files
- * @param existingTestFileName test file in directory "scripts/search/data/binary/"
- * @param fileContentType
- * @param fileName
- * @param fileToReplaceId
- * @return
- */
- @GET
- @Path("addTest1")
- public Response testAddReplace(@QueryParam("replaceOperation") Boolean replaceOperation,
- @QueryParam("datasetId") Long datasetId,
- @QueryParam("loadById") Boolean loadById,
- @QueryParam("existingTestFileName") String existingTestFileName,
- @QueryParam("newFileContentType") String newFileContentType,
- @QueryParam("newFileName") String newFileName,
- @QueryParam("fileToReplaceId") Long fileToReplaceId,
- @QueryParam("badStreamTest") Boolean badStreamTest){
-
- if (loadById==null){
- loadById = false;
- }
- if (badStreamTest==null){
- badStreamTest = false;
- }
- if (replaceOperation == null){
- replaceOperation = false;
- }
-
- // -------------------------------------
- msgt("(1) Get User from API token");
- // -------------------------------------
- User authUser;
- try {
- authUser = this.findUserOrDie();
- } catch (WrappedResponse ex) {
- return okResponse("Couldn't find a user from the API key");
- }
- //authSvc.findByID(new Long(1));
- msg("authUser: " + authUser);
- msg("getUserIdentifier: " + authUser.getIdentifier());
-
- // -------------------------------------
- msgt("(2) createDataverseRequest");
- // -------------------------------------
- DataverseRequest dvRequest2 = createDataverseRequest(authUser);
- AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2,
- this.ingestService,
- this.datasetService,
- this.fileService,
- this.permissionSvc,
- this.commandEngine);
-
-
- // -------------------------------------
- msgt("(3) send Params, including nulls");
- // -------------------------------------
- InputStream testFileInputStream;
- if (badStreamTest){
- testFileInputStream = null;
- }else if (existingTestFileName != null){
-
- testFileInputStream = getExistingFileInputStream(existingTestFileName);
- msgt("testFileInputStream: " + testFileInputStream);
-
- } else{
- testFileInputStream = getSampleFile();
- if (testFileInputStream == null){
- return okResponse("Couldn't find the file!!");
- }
- }
-
- if (loadById){
- addFileHelper.runAddFileByDatasetId(datasetId,
- newFileName,
- newFileContentType,
- testFileInputStream);
-
- }else{
- Dataset selectedDataset = null;
- if (datasetId != null){
- selectedDataset = datasetService.find(datasetId);
- }
-
- if (replaceOperation){
- msg("Test REPLACE operation");
- // Replace operation
- addFileHelper.runReplaceFile(selectedDataset,
- newFileName,
- newFileContentType,
- testFileInputStream,
- fileToReplaceId);
-
- }else{
- msg("Test ADD operation");
- // Add operation
- addFileHelper.runAddFile(selectedDataset,
- newFileName,
- newFileContentType,
- testFileInputStream);
-
- }
- }
- if (addFileHelper.hasError()){
- return okResponse(addFileHelper.getErrorMessagesAsString("\n"));
- }else{
- return okResponse("Look at that! You added a file! (hey hey, it may have worked)");
- }
-
- //return okResponse("in progress2");
-
- }
-
-
-
- @GET
- @Path("replace/{oldFileId}")
- public Response hi_replace(@PathParam("oldFileId") Long oldFileId){
-
- // -------------------------------------
- msgt("(1) getSampleFile()");
- // -------------------------------------
-
- InputStream testFileInputStream = getSampleFile();
- if (testFileInputStream == null){
- return okResponse("Couldn't find the file!!");
- }
-
- // -------------------------------------
- msgt("(1a) Get User from API token");
- // -------------------------------------
- User authUser;
- try {
- authUser = this.findUserOrDie();
- } catch (WrappedResponse ex) {
- return okResponse("Couldn't find a user from the API key");
- }
- //authSvc.findByID(new Long(1));
- msg("authUser: " + authUser);
- msg("getUserIdentifier: " + authUser.getIdentifier());
-
-
- // -------------------------------------
- msgt("(1b) Get the selected Dataset");
- // -------------------------------------
- int dataset_id = 10;
- Dataset selectedDataset = datasetService.find(new Long(dataset_id));
-
-
- //-------------------
- // REPLACE
- //-------------------
-
- msg("REPLACE!");
-
-
- DataverseRequest dvRequest2 = createDataverseRequest(authUser);
- AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2,
- this.ingestService,
- this.datasetService,
- this.fileService,
- this.permissionSvc,
- this.commandEngine);
-
- //Long oldFileId = oldFileId;
- addFileHelper.runReplaceFile(selectedDataset,
- "replace_" + oldFileId.toString() + ".txt",
- "text/plain",
- testFileInputStream,
- oldFileId
- );
-
-
- if (addFileHelper.hasError()){
- return okResponse(addFileHelper.getErrorMessagesAsString("\n"));
- }else{
- return okResponse("File was replaced! hey hey, it may have worked");
- }
-
-
- } // end call to "hi"
-
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index d7547afcc75..856d787aeff 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -196,6 +196,7 @@ public boolean runAddFileByDatasetId(Long datasetId, String newFileName, String
msgt(">> runAddFileByDatasetId");
initErrorHandling();
+
this.currentOperation = FILE_ADD_OPERATION;
if (!this.step_001_loadDatasetById(datasetId)){
@@ -217,12 +218,15 @@ public boolean runAddFileByDatasetId(Long datasetId, String newFileName, String
*/
public boolean runAddFile(Dataset dataset, String newFileName, String newFileContentType, InputStream newFileInputStream){
msgt(">> runAddFile");
+
+ initErrorHandling();
+
if (this.hasError()){
return false;
}
this.currentOperation = FILE_ADD_OPERATION;
- return this.runAddReplaceFile(dataset, newFileName, newFileContentType, newFileInputStream, null);
+ return this.runAddReplaceFile(dataset, newFileName, newFileContentType, newFileInputStream);
}
@@ -235,79 +239,56 @@ public boolean runAddFile(Dataset dataset, String newFileName, String newFileCon
* @param newFileInputStream
* @return
*/
- public boolean runForceReplaceFile(Dataset dataset, String newFileName, String newFileContentType, InputStream newFileInputStream, Long oldFileId){
+ public boolean runForceReplaceFile(Long oldFileId, String newFileName, String newFileContentType, InputStream newFileInputStream){
msgt(">> runForceReplaceFile");
+ initErrorHandling();
+
this.currentOperation = FILE_REPLACE_FORCE_OPERATION;
+
if (oldFileId==null){
this.addErrorSevere(getBundleErr("existing_file_to_replace_id_is_null"));
return false;
}
-
- return this.runAddReplaceFile(dataset, newFileName, newFileContentType, newFileInputStream, oldFileId);
- }
-
-
- public boolean runForceReplaceFileByDatasetId(Long datasetId, String newFileName, String newFileContentType, InputStream newFileInputStream, Long oldFileId){
-
- msgt(">> runAddFileByDatasetId");
-
- initErrorHandling();
- this.currentOperation = FILE_REPLACE_FORCE_OPERATION;
-
- if (!this.step_001_loadDatasetById(datasetId)){
- return false;
- }
- if (oldFileId==null){
- this.addErrorSevere(getBundleErr("existing_file_to_replace_id_is_null"));
+
+ // Loads local variable "fileToReplace"
+ //
+ if (!this.step_005_loadFileToReplaceById(oldFileId)){
return false;
}
+
- return this.runAddReplaceFile(this.dataset, newFileName, newFileContentType, newFileInputStream, oldFileId);
+ return this.runAddReplaceFile(fileToReplace.getOwner(), newFileName, newFileContentType, newFileInputStream);
}
+
+
- public boolean runReplaceFileByDatasetId(Long datasetId, String newFileName, String newFileContentType, InputStream newFileInputStream, Long oldFileId){
+ public boolean runReplaceFile(Long oldFileId, String newFileName, String newFileContentType, InputStream newFileInputStream){
- msgt(">> runAddFileByDatasetId");
+ msgt(">> runReplaceFile");
initErrorHandling();
this.currentOperation = FILE_REPLACE_OPERATION;
- if (!this.step_001_loadDatasetById(datasetId)){
- return false;
- }
if (oldFileId==null){
this.addErrorSevere(getBundleErr("existing_file_to_replace_id_is_null"));
return false;
}
- return this.runReplaceFile(this.dataset, newFileName, newFileContentType, newFileInputStream, oldFileId);
- }
-
- /**
- * After the constructor, this method is called to replace a file
- *
- * @param dataset
- * @param newFileName
- * @param newFileContentType
- * @param newFileInputStream
- * @return
- */
- public boolean runReplaceFile(Dataset dataset, String newFileName, String newFileContentType, InputStream newFileInputStream, Long oldFileId){
-
- msgt(">> runReplaceFile");
- this.currentOperation = FILE_REPLACE_OPERATION;
-
- if (oldFileId==null){
- this.addErrorSevere(getBundleErr("existing_file_to_replace_id_is_null"));
+
+ // Loads local variable "fileToReplace"
+ //
+ if (!this.step_005_loadFileToReplaceById(oldFileId)){
return false;
}
-
- return this.runAddReplaceFile(dataset, newFileName, newFileContentType, newFileInputStream, oldFileId);
+
+ return this.runAddReplaceFile(fileToReplace.getOwner(), newFileName, newFileContentType, newFileInputStream);
}
+
+
/**
* Here we're going to run through the steps to ADD or REPLACE a file
*
@@ -327,8 +308,9 @@ public boolean runReplaceFile(Dataset dataset, String newFileName, String newFil
* @return
*/
private boolean runAddReplaceFile(Dataset dataset,
- String newFileName, String newFileContentType, InputStream newFileInputStream,
- Long oldFileId){
+ String newFileName, String newFileContentType,
+ InputStream newFileInputStream
+ ){
// Run "Phase 1" - Initial ingest of file + error check
// But don't save the dataset version yet
@@ -336,8 +318,7 @@ private boolean runAddReplaceFile(Dataset dataset,
boolean phase1Success = runAddReplacePhase1(dataset,
newFileName,
newFileContentType,
- newFileInputStream,
- oldFileId);
+ newFileInputStream);
if (!phase1Success){
return false;
@@ -357,16 +338,14 @@ private boolean runAddReplaceFile(Dataset dataset,
* @return
*/
public boolean runAddReplacePhase1(Dataset dataset,
- String newFileName, String newFileContentType, InputStream newFileInputStream,
- Long oldFileId){
+ String newFileName,
+ String newFileContentType,
+ InputStream newFileInputStream){
if (this.hasError()){
return false; // possible to have errors already...
}
- initErrorHandling();
-
-
msgt("step_001_loadDataset");
if (!this.step_001_loadDataset(dataset)){
return false;
@@ -383,15 +362,6 @@ public boolean runAddReplacePhase1(Dataset dataset,
return false;
}
-
- // Replace only step!
- if (isFileReplaceOperation()){
-
- msgt("step_025_loadFileToReplaceById");
- if (!this.step_025_loadFileToReplaceById(oldFileId)){
- return false;
- }
- }
msgt("step_030_createNewFilesViaIngest");
if (!this.step_030_createNewFilesViaIngest()){
@@ -721,68 +691,95 @@ private boolean step_020_loadNewFile(String fileName, String fileContentType, In
return true;
}
+
/**
* Optional: old file to replace
*
* @param oldFile
* @return
*/
- private boolean step_025_loadFileToReplace(DataFile existingFile){
-
+ private boolean step_005_loadFileToReplaceById(Long dataFileId){
+
if (this.hasError()){
return false;
}
- if (existingFile == null){
- this.addErrorSevere(getBundleErr("existing_file_to_replace_is_null"));
+ // Check for Null
+ //
+ if (dataFileId == null){
+ this.addErrorSevere(getBundleErr("existing_file_to_replace_id_is_null"));
return false;
- }
+ }
- if (!existingFile.getOwner().equals(this.dataset)){
- addError(getBundleErr("existing_file_to_replace_not_in_dataset"));
+ // Does the file exist?
+ //
+ DataFile existingFile = fileService.find(dataFileId);
+
+ if (existingFile == null){
+ this.addError(getBundleErr("existing_file_to_replace_not_found_by_id") + " " + dataFileId);
return false;
+ }
+
+
+ // Do we have permission to replace this file? e.g. Edit the file's dataset
+ //
+ if (!permissionService.request(dvRequest).on(existingFile.getOwner()).has(Permission.EditDataset)){
+ addError(getBundleErr("no_edit_dataset_permission"));
+ return false;
}
+
+ // Is the file published?
+ //
if (!existingFile.isReleased()){
addError(getBundleErr("unpublished_file_cannot_be_replaced"));
return false;
}
-
+ // Is the file in the latest dataset version?
+ //
+ if (!step_007_auto_isReplacementInLatestVersion(existingFile)){
+ return false;
+ }
+
fileToReplace = existingFile;
- return true;
- }
+ return true;
+ }
/**
- * Optional: old file to replace
+ * Make sure the file to replace is in the workingVersion
+ * -- e.g. that it wasn't deleted from a previous Version
*
- * @param oldFile
* @return
*/
- private boolean step_025_loadFileToReplaceById(Long dataFileId){
+ private boolean step_007_auto_isReplacementInLatestVersion(DataFile existingFile){
+ if (existingFile == null){
+ throw new NullPointerException("existingFile cannot be null!");
+ }
+
if (this.hasError()){
return false;
}
- // This shouldn't happen, the public replace method should throw
- // a NullPointerException
- //
- if (dataFileId == null){
- this.addErrorSevere(getBundleErr("existing_file_to_replace_id_is_null"));
- return false;
- }
- DataFile existingFile = fileService.find(dataFileId);
-
- if (existingFile == null){
- this.addError(getBundleErr("existing_file_to_replace_not_found_by_id") + " " + dataFileId);
- return false;
- }
+ DatasetVersion latestVersion = existingFile.getOwner().getLatestVersion();
- return step_025_loadFileToReplace(existingFile);
+ boolean fileInLatestVersion = false;
+ for (FileMetadata fm : latestVersion.getFileMetadatas()){
+ if (fm.getDataFile().getId() != null){
+ if (Objects.equals(existingFile.getId(),fm.getDataFile().getId())){
+ fileInLatestVersion = true;
+ }
+ }
+ }
+ if (!fileInLatestVersion){
+ addError(getBundleErr("existing_file_not_in_latest_published_version"));
+ return false;
+ }
+ return true;
}
@@ -794,11 +791,7 @@ private boolean step_030_createNewFilesViaIngest(){
// Load the working version of the Dataset
workingVersion = dataset.getEditVersion();
-
- if (!step_035_auto_isReplacementInLatestVersion()){
- return false;
- }
-
+
try {
initialFileList = ingestService.createDataFiles(workingVersion,
this.newFileInputStream,
@@ -839,36 +832,6 @@ private boolean step_030_createNewFilesViaIngest(){
return this.step_045_auto_checkForFileReplaceDuplicate();
}
- /**
- * Make sure the file to replace is in the workingVersion
- * -- e.g. that it wasn't deleted from a previous Version
- *
- * @return
- */
- private boolean step_035_auto_isReplacementInLatestVersion(){
-
- if (this.hasError()){
- return false;
- }
- if (!this.isFileReplaceOperation()){
- return true;
- }
-
- boolean fileInLatestVersion = false;
- for (FileMetadata fm : workingVersion.getFileMetadatas()){
- if (fm.getDataFile().getId() != null){
- if (Objects.equals(fileToReplace.getId(),fm.getDataFile().getId())){
- fileInLatestVersion = true;
- }
- }
- }
- if (!fileInLatestVersion){
- addError(getBundleErr("existing_file_not_in_latest_published_version"));
- this.runMajorCleanup();
- return false;
- }
- return true;
- }
/**
* Create a "final file list"
From 562301dda7aef433379d164f02a2a912c354fb7e Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Fri, 14 Oct 2016 12:56:53 -0400
Subject: [PATCH 44/86] Moved add/replace API endpoints to correct paths.
e.g. datasets/{id}/add and files/{id}/replace #2290
---
.../api/{FileUpload.java => Files.java} | 194 ++----------------
.../datasetutility/AddReplaceFileHelper.java | 3 -
2 files changed, 14 insertions(+), 183 deletions(-)
rename src/main/java/edu/harvard/iq/dataverse/api/{FileUpload.java => Files.java} (54%)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
similarity index 54%
rename from src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
rename to src/main/java/edu/harvard/iq/dataverse/api/Files.java
index 6b29321bc9e..01f41bac26a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FileUpload.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
@@ -18,6 +18,7 @@
import edu.harvard.iq.dataverse.EjbDataverseEngine;
import edu.harvard.iq.dataverse.FileMetadata;
import edu.harvard.iq.dataverse.UserNotificationServiceBean;
+import static edu.harvard.iq.dataverse.api.AbstractApiBean.errorResponse;
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
@@ -56,8 +57,8 @@
*
* @author rmp553
*/
-@Path("upload")
-public class FileUpload extends AbstractApiBean {
+@Path("files")
+public class Files extends AbstractApiBean {
@EJB
DatasetServiceBean datasetService;
@@ -76,53 +77,12 @@ public class FileUpload extends AbstractApiBean {
@EJB
UserNotificationServiceBean userNotificationService;
- private static final Logger logger = Logger.getLogger(FileUpload.class.getName());
+ private static final Logger logger = Logger.getLogger(Files.class.getName());
// for testing
private static final String SERVER_UPLOAD_LOCATION_FOLDER = "/Users/rmp553/Documents/iqss-git/dataverse-helper-scripts/src/api_scripts/output/";
- @POST
- @Path("hello")
- @Consumes(MediaType.MULTIPART_FORM_DATA)
- public Response uploadFile(
- @FormDataParam("file") InputStream fileInputStream,
- @FormDataParam("file") FormDataContentDisposition contentDispositionHeader) {
-
- String filePath = SERVER_UPLOAD_LOCATION_FOLDER + contentDispositionHeader.getFileName();
-
- // save the file to the server
- saveFile(fileInputStream, filePath);
-
- String output = "File saved to server location : " + filePath;
-
- return okResponse(output);
- //return Response.status(200).entity(output).build();
-
- }
-
- // save uploaded file to a defined location on the server
- private void saveFile(InputStream uploadedInputStream,
- String serverLocation) {
-
- try {
- OutputStream outpuStream = new FileOutputStream(new File(serverLocation));
- int read = 0;
- byte[] bytes = new byte[1024];
-
- outpuStream = new FileOutputStream(new File(serverLocation));
- while ((read = uploadedInputStream.read(bytes)) != -1) {
- outpuStream.write(bytes, 0, read);
- }
- outpuStream.flush();
- outpuStream.close();
- } catch (IOException e) {
-
- e.printStackTrace();
- }
-
- }
-
/**
* get existing test file from this directory:
@@ -186,66 +146,12 @@ private void dashes(){
private void msgt(String m){
dashes(); msg(m); dashes();
}
-
-
- private void removeLinkedFileFromDataset(Dataset dataset, DataFile dataFileToRemove){
- // remove the file from the dataset (since createDataFiles has already linked
- // it to the dataset!
- // first, through the filemetadata list, then through tht datafiles list:
- Iterator fmIt = dataset.getEditVersion().getFileMetadatas().iterator();
- msgt("Clear FileMetadatas");
- while (fmIt.hasNext()) {
- FileMetadata fm = fmIt.next();
- msg("Check: " + fm);
- if (fm.getId() == null && dataFileToRemove.getStorageIdentifier().equals(fm.getDataFile().getStorageIdentifier())) {
- msg("Got It! ");
- fmIt.remove();
- break;
- }
- }
-
-
- Iterator dfIt = dataset.getFiles().iterator();
- msgt("Clear Files");
- while (dfIt.hasNext()) {
- DataFile dfn = dfIt.next();
- msg("Check: " + dfn);
- if (dfn.getId() == null && dataFileToRemove.getStorageIdentifier().equals(dfn.getStorageIdentifier())) {
- msg("Got It! try to remove from iterator");
-
- dfIt.remove();
- msg("...didn't work");
-
- break;
- }else{
- msg("...ok");
- }
- }
- }
- /**
- *
- * @param fileId
- * @return
- */
- @GET
- @Path("resave/{fileId}")
- public Response hiReSave(@PathParam("fileId") Long fileId){
- msgt("hiReSave: " + fileId);
- DataFile df = fileService.find(fileId);
-
- if (df ==null){
- return okResponse("file not found: " + fileId);
- }
- df = fileService.save(df);
-
- return okResponse("saved: " + df);
- }
/**
- * Add a File to an existing Dataset
+ * Replace an Existing File
*
* @param datasetId
* @param testFileInputStream
@@ -254,86 +160,13 @@ public Response hiReSave(@PathParam("fileId") Long fileId){
* @return
*/
@POST
- @Path("add")
- @Consumes(MediaType.MULTIPART_FORM_DATA)
- public Response addFileToDataset(@FormDataParam("datasetId") Long datasetId,
- @FormDataParam("file") InputStream testFileInputStream,
- @FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
- @FormDataParam("file") final FormDataBodyPart formDataBodyPart
- ){
-
- // -------------------------------------
- // (1) Get the file name and content type
- // -------------------------------------
- String newFilename = contentDispositionHeader.getFileName();
- String newFileContentType = formDataBodyPart.getMediaType().toString();
-
- // -------------------------------------
- // (2) Get the user from the API key
- // -------------------------------------
- User authUser;
- try {
- authUser = this.findUserOrDie();
- } catch (WrappedResponse ex) {
- return errorResponse(Response.Status.FORBIDDEN, "Couldn't find a user from the API key");
- }
-
- //-------------------
- // (3) Create the AddReplaceFileHelper object
- //-------------------
- msg("ADD!");
-
- DataverseRequest dvRequest2 = createDataverseRequest(authUser);
- AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2,
- this.ingestService,
- this.datasetService,
- this.fileService,
- this.permissionSvc,
- this.commandEngine);
-
-
- //-------------------
- // (4) Run "runAddFileByDatasetId"
- //-------------------
- addFileHelper.runAddFileByDatasetId(datasetId,
- newFilename,
- newFileContentType,
- testFileInputStream);
-
-
- if (addFileHelper.hasError()){
- return errorResponse(Response.Status.BAD_REQUEST, addFileHelper.getErrorMessagesAsString("\n"));
- }else{
-
- return okResponseGsonObject("File successfully added!",
- addFileHelper.getSuccessResultAsGsonObject());
- //"Look at that! You added a file! (hey hey, it may have worked)");
- }
-
- } // end: addFileToDataset
-
-
-
-
-
-
- /**
- * Add a File to an existing Dataset
- *
- * @param datasetId
- * @param testFileInputStream
- * @param contentDispositionHeader
- * @param formDataBodyPart
- * @return
- */
- @POST
- @Path("replace")
+ @Path("{id}/replace")
@Consumes(MediaType.MULTIPART_FORM_DATA)
public Response replaceFileInDataset(
+ @PathParam("id") Long fileToReplaceId,
@FormDataParam("file") InputStream testFileInputStream,
@FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
@FormDataParam("file") final FormDataBodyPart formDataBodyPart,
- @FormDataParam("fileToReplaceId") Long fileToReplaceId,
@FormDataParam("forceReplace") Boolean forceReplace
){
@@ -353,7 +186,7 @@ public Response replaceFileInDataset(
User authUser;
try {
authUser = this.findUserOrDie();
- } catch (WrappedResponse ex) {
+ } catch (AbstractApiBean.WrappedResponse ex) {
return errorResponse(Response.Status.FORBIDDEN, "Couldn't find a user from the API key");
}
@@ -374,18 +207,18 @@ public Response replaceFileInDataset(
//-------------------
// (4) Run "runReplaceFileByDatasetId"
//-------------------
+
+
if (forceReplace){
addFileHelper.runForceReplaceFile(fileToReplaceId,
newFilename,
newFileContentType,
- testFileInputStream
- );
+ testFileInputStream);
}else{
- addFileHelper.runForceReplaceFile(fileToReplaceId,
+ addFileHelper.runReplaceFile(fileToReplaceId,
newFilename,
newFileContentType,
- testFileInputStream
- );
+ testFileInputStream);
}
msg("we're back.....");
@@ -403,6 +236,7 @@ public Response replaceFileInDataset(
} // end: replaceFileInDataset
+
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 856d787aeff..182ac1fdc49 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -632,9 +632,6 @@ private boolean step_001_loadDatasetById(Long datasetId){
}
-
-
-
/**
* Step 10 Verify User and Permissions
*
From 26cb18b6f87b78c94bad8db7166db9a18998fb43 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Fri, 14 Oct 2016 13:23:55 -0400
Subject: [PATCH 45/86] Fix to broken clean/build/compile on last commit #2290
---
.../iq/dataverse/datasetutility/FileUploadTestPage.java | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
index 311408f324d..53da697471c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
@@ -199,11 +199,11 @@ public void addReplaceFile(UploadedFile laFile){
if (this.replaceOperation){
- addFileHelper.runReplaceFile(dataset,
+ addFileHelper.runReplaceFile( fileToReplace.getId(),
laFile.getFileName(),
laFile.getContentType(),
- inputStream,
- fileToReplace.getId());
+ inputStream
+ );
}else{
addFileHelper.runAddFile(dataset,
laFile.getFileName(),
From 8ba97484bcb4b8092f97b1729553fa78066c6ab7 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Fri, 14 Oct 2016 13:44:16 -0400
Subject: [PATCH 46/86] #2290 json data in form param -- next try as data
string
---
.../edu/harvard/iq/dataverse/api/Files.java | 71 ++++++++++++-------
1 file changed, 47 insertions(+), 24 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
index 01f41bac26a..8693c62f531 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
@@ -7,51 +7,36 @@
//import com.sun.jersey.core.header.FormDataContentDisposition;
//import com.sun.jersey.multipart.FormDataParam;
-import edu.harvard.iq.dataverse.DataFile;
+import com.google.gson.Gson;
+import com.google.gson.JsonObject;
import edu.harvard.iq.dataverse.DataFileServiceBean;
-import edu.harvard.iq.dataverse.Dataset;
-import edu.harvard.iq.dataverse.DatasetFieldValidator;
import edu.harvard.iq.dataverse.DatasetServiceBean;
import edu.harvard.iq.dataverse.DatasetVersionServiceBean;
import edu.harvard.iq.dataverse.DataverseRequestServiceBean;
import edu.harvard.iq.dataverse.DataverseServiceBean;
import edu.harvard.iq.dataverse.EjbDataverseEngine;
-import edu.harvard.iq.dataverse.FileMetadata;
import edu.harvard.iq.dataverse.UserNotificationServiceBean;
import static edu.harvard.iq.dataverse.api.AbstractApiBean.errorResponse;
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
-import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.StringReader;
-import java.util.Iterator;
+import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.EJB;
-import javax.ejb.Stateless;
import javax.inject.Inject;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
import javax.ws.rs.Consumes;
-import javax.ws.rs.FormParam;
-import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
import org.glassfish.jersey.media.multipart.FormDataParam;
-import org.omnifaces.util.Faces;
/**
*
@@ -147,7 +132,6 @@ private void msgt(String m){
dashes(); msg(m); dashes();
}
-
/**
@@ -163,17 +147,56 @@ private void msgt(String m){
@Path("{id}/replace")
@Consumes(MediaType.MULTIPART_FORM_DATA)
public Response replaceFileInDataset(
- @PathParam("id") Long fileToReplaceId,
+ @FormDataParam("jsonData") String jsonData,
+ //@PathParam("id") Long fileToReplaceId,
@FormDataParam("file") InputStream testFileInputStream,
@FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
- @FormDataParam("file") final FormDataBodyPart formDataBodyPart,
- @FormDataParam("forceReplace") Boolean forceReplace
+ @FormDataParam("file") final FormDataBodyPart formDataBodyPart
+ //@FormDataParam("forceReplace") Boolean forceReplace
){
- if (forceReplace==null){
- forceReplace = false;
+ // -------------------------------------
+ // (1) Check/Parse the JSON
+ // -------------------------------------
+ if (jsonData == null){
+ logger.log(Level.SEVERE, "jsonData is null");
+ return errorResponse( Response.Status.BAD_REQUEST, "No JSON data");
+ }
+
+ // Convert string to GSON
+ // -------------------------------------
+ JsonObject jsonObj = new Gson().fromJson(jsonData, JsonObject.class);
+
+ // Check for required "fileToReplaceId"
+ // -------------------------------------
+ if ((!jsonObj.has("fileToReplaceId")) || jsonObj.get("fileToReplaceId").isJsonNull()){
+ return errorResponse( Response.Status.BAD_REQUEST, "'fileToReplaceId' NOT found in the JSON Request");
+ }
+
+ Long fileToReplaceId;
+
+ try {
+ fileToReplaceId = Long.parseLong(jsonObj.get("fileToReplaceId").toString());
+ } catch (Exception e) {
+ return errorResponse( Response.Status.BAD_REQUEST, "'fileToReplaceId' in the JSON Request must be a number.");
}
+
+ // Check for optional "forceReplace"
+ // -------------------------------------
+ Boolean forceReplace = false;
+ if ((jsonObj.has("forceReplace")) && (!jsonObj.get("forceReplace").isJsonNull())){
+ forceReplace = jsonObj.get("forceReplace").getAsBoolean();
+ if (forceReplace == null){
+ forceReplace = false;
+ }
+ }
+ msgt("forceReplace: " + forceReplace);
+ /*
+ if (forceReplace == null){
+ forceReplace = false;
+ }
+ */
// -------------------------------------
// (1) Get the file name and content type
// -------------------------------------
From 6577b6653dc0bed54128edd7d6615d66c9db1b57 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Fri, 14 Oct 2016 13:51:39 -0400
Subject: [PATCH 47/86] #2290 replace with data in JSON under a form param
---
.../edu/harvard/iq/dataverse/api/Files.java | 45 ++++++++-----------
1 file changed, 18 insertions(+), 27 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
index 8693c62f531..eb12668528c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
@@ -148,26 +148,31 @@ private void msgt(String m){
@Consumes(MediaType.MULTIPART_FORM_DATA)
public Response replaceFileInDataset(
@FormDataParam("jsonData") String jsonData,
- //@PathParam("id") Long fileToReplaceId,
@FormDataParam("file") InputStream testFileInputStream,
@FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
@FormDataParam("file") final FormDataBodyPart formDataBodyPart
- //@FormDataParam("forceReplace") Boolean forceReplace
){
// -------------------------------------
- // (1) Check/Parse the JSON
+ // (1) Get the user from the API key
+ // -------------------------------------
+ User authUser;
+ try {
+ authUser = this.findUserOrDie();
+ } catch (AbstractApiBean.WrappedResponse ex) {
+ return errorResponse(Response.Status.FORBIDDEN, "Couldn't find a user from the API key");
+ }
+
+ // -------------------------------------
+ // (2) Check/Parse the JSON
// -------------------------------------
if (jsonData == null){
logger.log(Level.SEVERE, "jsonData is null");
return errorResponse( Response.Status.BAD_REQUEST, "No JSON data");
}
-
- // Convert string to GSON
- // -------------------------------------
JsonObject jsonObj = new Gson().fromJson(jsonData, JsonObject.class);
- // Check for required "fileToReplaceId"
+ // (2a) Check for required "fileToReplaceId"
// -------------------------------------
if ((!jsonObj.has("fileToReplaceId")) || jsonObj.get("fileToReplaceId").isJsonNull()){
return errorResponse( Response.Status.BAD_REQUEST, "'fileToReplaceId' NOT found in the JSON Request");
@@ -182,7 +187,7 @@ public Response replaceFileInDataset(
}
- // Check for optional "forceReplace"
+ // (2b) Check for optional "forceReplace"
// -------------------------------------
Boolean forceReplace = false;
if ((jsonObj.has("forceReplace")) && (!jsonObj.get("forceReplace").isJsonNull())){
@@ -191,30 +196,17 @@ public Response replaceFileInDataset(
forceReplace = false;
}
}
- msgt("forceReplace: " + forceReplace);
- /*
- if (forceReplace == null){
- forceReplace = false;
- }
- */
+
+
// -------------------------------------
- // (1) Get the file name and content type
+ // (3) Get the file name and content type
// -------------------------------------
String newFilename = contentDispositionHeader.getFileName();
String newFileContentType = formDataBodyPart.getMediaType().toString();
- // -------------------------------------
- // (2) Get the user from the API key
- // -------------------------------------
- User authUser;
- try {
- authUser = this.findUserOrDie();
- } catch (AbstractApiBean.WrappedResponse ex) {
- return errorResponse(Response.Status.FORBIDDEN, "Couldn't find a user from the API key");
- }
//-------------------
- // (3) Create the AddReplaceFileHelper object
+ // (4) Create the AddReplaceFileHelper object
//-------------------
msg("REPLACE!");
@@ -226,9 +218,8 @@ public Response replaceFileInDataset(
this.permissionSvc,
this.commandEngine);
-
//-------------------
- // (4) Run "runReplaceFileByDatasetId"
+ // (5) Run "runReplaceFileByDatasetId"
//-------------------
From 9ecefe0f60e9fb4040de0e28c1905fa2fae4285d Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Fri, 14 Oct 2016 13:55:56 -0400
Subject: [PATCH 48/86] #2290 add jsonData placeholder for 'add'
---
.../edu/harvard/iq/dataverse/api/Datasets.java | 18 +++++-------------
1 file changed, 5 insertions(+), 13 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
index cc05a0c0f81..4aae169196b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
@@ -1,13 +1,11 @@
package edu.harvard.iq.dataverse.api;
import edu.harvard.iq.dataverse.DOIEZIdServiceBean;
-import edu.harvard.iq.dataverse.DataFile;
import edu.harvard.iq.dataverse.DataFileServiceBean;
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetField;
import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
import edu.harvard.iq.dataverse.DatasetFieldType;
-import edu.harvard.iq.dataverse.DatasetFieldValue;
import edu.harvard.iq.dataverse.DatasetServiceBean;
import edu.harvard.iq.dataverse.DatasetVersion;
import edu.harvard.iq.dataverse.DatasetVersionServiceBean;
@@ -18,17 +16,13 @@
import edu.harvard.iq.dataverse.MetadataBlockServiceBean;
import edu.harvard.iq.dataverse.RoleAssignment;
import static edu.harvard.iq.dataverse.api.AbstractApiBean.errorResponse;
-import edu.harvard.iq.dataverse.api.imports.ImportException;
-import edu.harvard.iq.dataverse.api.imports.ImportUtil;
import edu.harvard.iq.dataverse.authorization.DataverseRole;
import edu.harvard.iq.dataverse.authorization.RoleAssignee;
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper;
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
-import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.impl.AssignRoleCommand;
-import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand;
import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetVersionCommand;
import edu.harvard.iq.dataverse.engine.command.impl.CreatePrivateUrlCommand;
import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetCommand;
@@ -55,7 +49,6 @@
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.util.SystemConfig;
import edu.harvard.iq.dataverse.util.json.JsonParseException;
-import edu.harvard.iq.dataverse.util.json.JsonParser;
import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
@@ -63,7 +56,6 @@
import java.io.StringReader;
import java.util.List;
import java.util.Map;
-import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.EJB;
@@ -71,11 +63,6 @@
import javax.json.JsonArrayBuilder;
import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;
-import javax.json.JsonReader;
-import javax.validation.ConstraintViolation;
-import javax.validation.Validation;
-import javax.validation.Validator;
-import javax.validation.ValidatorFactory;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
@@ -815,10 +802,15 @@ public Response deletePrivateUrl(@PathParam("id") String idSupplied) {
@Path("{id}/add")
@Consumes(MediaType.MULTIPART_FORM_DATA)
public Response addFileToDataset(@PathParam("id") Long datasetId,
+ @FormDataParam("jsonData") String jsonData,
@FormDataParam("file") InputStream testFileInputStream,
@FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
@FormDataParam("file") final FormDataBodyPart formDataBodyPart
){
+
+
+ // TODO: Handle jsonData: description, tags, etc
+
// -------------------------------------
// (1) Get the file name and content type
From 3546c27a6701f706d81c7b1c8b21bb5a030908b7 Mon Sep 17 00:00:00 2001
From: Philip Durbin
Date: Fri, 14 Oct 2016 16:50:58 -0400
Subject: [PATCH 49/86] get file add/replace API tests working again #2290
Also don't require superuser.
---
.../harvard/iq/dataverse/api/DatasetsIT.java | 38 +++++++++++++------
.../edu/harvard/iq/dataverse/api/UtilIT.java | 5 ++-
2 files changed, 30 insertions(+), 13 deletions(-)
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
index 04db0a2302b..1e1dd6642e5 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
@@ -28,6 +28,7 @@
import static junit.framework.Assert.assertEquals;
import org.hamcrest.CoreMatchers;
import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.nullValue;
public class DatasetsIT {
@@ -475,8 +476,6 @@ public void testFileReplace() {
String username = UtilIT.getUsernameFromResponse(createUser);
String apiToken = UtilIT.getApiTokenFromResponse(createUser);
- UtilIT.makeSuperUser(username);
-
Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
// createDataverseResponse.prettyPrint();
createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
@@ -510,12 +509,21 @@ public void testFileReplace() {
String pathToFile = "src/main/webapp/resources/images/dataverseproject.png";
Response replace = UtilIT.replaceFile(datasetId, fileId, pathToFile, apiToken);
replace.prettyPrint();
- replace.then().assertThat().statusCode(OK.getStatusCode());
+ replace.then().assertThat()
+ .body("message", equalTo("File successfully replaced!"))
+ .statusCode(OK.getStatusCode());
+
+ Response getDatasetJson = UtilIT.nativeGet(datasetId, apiToken);
+ getDatasetJson.prettyPrint();
+ getDatasetJson.then().assertThat()
+ .body("data.latestVersion.files[0].dataFile.filename", equalTo("dataverseproject.png"))
+ .body("data.latestVersion.files[0].dataFile.contentType", equalTo("image/png"))
+ .body("data.latestVersion.files[0].dataFile.rootDataFileId", not(-1))
+ .body("data.latestVersion.files[0].dataFile.previousDataFileId", equalTo(fileId))
+ .statusCode(OK.getStatusCode());
}
-
-
@Test
public void testFileReplaceNativeAdd() {
@@ -524,10 +532,7 @@ public void testFileReplaceNativeAdd() {
// createUser.prettyPrint();
String username = UtilIT.getUsernameFromResponse(createUser);
String apiToken = UtilIT.getApiTokenFromResponse(createUser);
-
-
- UtilIT.makeSuperUser(username);
-
+
Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
// createDataverseResponse.prettyPrint();
@@ -541,9 +546,20 @@ public void testFileReplaceNativeAdd() {
String pathToFile = "src/main/webapp/resources/images/dataverseproject.png";
Response add = UtilIT.uploadFileViaNative(datasetId, pathToFile, apiToken);
-
+
add.prettyPrint();
- add.then().assertThat().statusCode(OK.getStatusCode());
+ add.then().assertThat()
+ .body("message", equalTo("File successfully added!"))
+ .statusCode(OK.getStatusCode());
+
+ Response getDatasetJson = UtilIT.nativeGet(datasetId, apiToken);
+ getDatasetJson.prettyPrint();
+ getDatasetJson.then().assertThat()
+ .body("data.latestVersion.files[0].dataFile.filename", equalTo("dataverseproject.png"))
+ .body("data.latestVersion.files[0].dataFile.contentType", equalTo("image/png"))
+ .body("data.latestVersion.files[0].dataFile.rootDataFileId", equalTo(-1))
+ .body("data.latestVersion.files[0].dataFile.previousDataFileId", nullValue())
+ .statusCode(OK.getStatusCode());
}
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
index 1a8762084a8..f68215056dc 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
@@ -264,7 +264,7 @@ static Response uploadFileViaNative(Integer datasetId, String pathToFile, String
.header(API_TOKEN_HTTP_HEADER, apiToken)
.multiPart("datasetId", datasetId)
.multiPart("file", new File("src/main/webapp/resources/images/dataverseproject.png"))
- .post("/api/upload/add");
+ .post("/api/datasets/" + datasetId + "/add");
}
static Response replaceFile(Integer datasetId, int fileId, String pathToFile, String apiToken) {
@@ -273,7 +273,8 @@ static Response replaceFile(Integer datasetId, int fileId, String pathToFile, St
.multiPart("datasetId", datasetId)
.multiPart("fileToReplaceId", fileId)
.multiPart("file", new File(pathToFile))
- .post("/api/upload/replace");
+ .multiPart("jsonData", Json.createObjectBuilder().add("fileToReplaceId", fileId).build().toString())
+ .post("/api/files/" + fileId + "/replace");
}
static Response downloadFile(Integer fileId) {
From 9cbc456d8623cbbb7f69686b1a34b6713da26f4d Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Mon, 17 Oct 2016 08:26:02 -0400
Subject: [PATCH 50/86] remove old params from replace test util #2290
---
src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java | 2 --
1 file changed, 2 deletions(-)
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
index f68215056dc..c2b9940e597 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
@@ -270,8 +270,6 @@ static Response uploadFileViaNative(Integer datasetId, String pathToFile, String
static Response replaceFile(Integer datasetId, int fileId, String pathToFile, String apiToken) {
return given()
.header(API_TOKEN_HTTP_HEADER, apiToken)
- .multiPart("datasetId", datasetId)
- .multiPart("fileToReplaceId", fileId)
.multiPart("file", new File(pathToFile))
.multiPart("jsonData", Json.createObjectBuilder().add("fileToReplaceId", fileId).build().toString())
.post("/api/files/" + fileId + "/replace");
From e5271f73830c8dbca483f4693be0c6405b3b6463 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Mon, 17 Oct 2016 10:53:09 -0400
Subject: [PATCH 51/86] #2290 adjust api output to begin more tests. (still
need to integrate with JsonPrinter but tests should still work
---
.../edu/harvard/iq/dataverse/DataFile.java | 40 +++++-
.../harvard/iq/dataverse/FileMetadata.java | 4 +-
.../harvard/iq/dataverse/api/DatasetsIT.java | 96 +------------
.../edu/harvard/iq/dataverse/api/FilesIT.java | 126 ++++++++++++++++++
4 files changed, 163 insertions(+), 103 deletions(-)
create mode 100644 src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
index 3ea04d8c8c3..580228ad0ed 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
@@ -5,6 +5,7 @@
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.annotations.Expose;
+import com.google.gson.annotations.SerializedName;
import edu.harvard.iq.dataverse.DatasetVersion.VersionState;
import edu.harvard.iq.dataverse.api.WorldMapRelatedData;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
@@ -23,6 +24,8 @@
import java.nio.file.Paths;
import java.nio.file.Files;
import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
import javax.persistence.Entity;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
@@ -63,7 +66,6 @@ public class DataFile extends DvObject implements Comparable {
public static final Long ROOT_DATAFILE_ID_DEFAULT = new Long(-1);
- @Expose
private String name;
@Expose
@@ -74,6 +76,7 @@ public class DataFile extends DvObject implements Comparable {
@Expose
+ @SerializedName("storageIdentifier")
@Column( nullable = false )
private String fileSystemName;
@@ -116,7 +119,7 @@ public String toString() {
}
}
- @Expose
+ //@Expose
@Column(nullable = false)
@Enumerated(EnumType.STRING)
private ChecksumType checksumType;
@@ -125,7 +128,7 @@ public String toString() {
* Examples include "f622da34d54bdc8ee541d6916ac1c16f" as an MD5 value or
* "3a484dfdb1b429c2e15eb2a735f1f5e4d5b04ec6" as a SHA-1 value"
*/
- @Expose
+ //@Expose
@Column(nullable = false)
private String checksumValue;
@@ -812,13 +815,38 @@ public JsonObject asGsonObject(boolean prettyPrint){
builder.serializeNulls(); // correctly capture nulls
Gson gson = builder.create();
- // serialize this object
+ // ----------------------------------
+ // serialize this object + add the id
+ // ----------------------------------
JsonElement jsonObj = gson.toJsonTree(this);
jsonObj.getAsJsonObject().addProperty("id", this.getId());
- JsonObject fileMetadataGson = this.getFileMetadata().asGsonObject(prettyPrint);
+ // ----------------------------------
+ // Add label (filename), description, and categories from the FileMetadata object
+ // ----------------------------------
+ FileMetadata thisFileMetadata = this.getFileMetadata();
+
+ jsonObj.getAsJsonObject().addProperty("filename", thisFileMetadata.getLabel());
+ jsonObj.getAsJsonObject().addProperty("description", thisFileMetadata.getDescription());
+ jsonObj.getAsJsonObject().add("categories",
+ gson.toJsonTree(thisFileMetadata.getCategoriesByName())
+ );
+
+ // ----------------------------------
+ // Checksum map
+ // ----------------------------------
+ Map checkSumMap = new HashMap();
+ checkSumMap.put("type", getChecksumType().toString());
+ checkSumMap.put("value", getChecksumValue());
+
+ JsonElement checkSumJSONMap = gson.toJsonTree(checkSumMap);
+
+ jsonObj.getAsJsonObject().add("checksum", checkSumJSONMap);
+
+
+ //JsonObject fileMetadataGson = this.getFileMetadata().asGsonObject(prettyPrint);
- jsonObj.getAsJsonObject().add("fileMetadata", fileMetadataGson);
+ //jsonObj.getAsJsonObject().add("fileMetadata", fileMetadataGson);
//JsonObject fileMetadataJSON = new JsonObject();
JsonObject fullFileJSON = new JsonObject();
diff --git a/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java b/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java
index ba590cf1792..87043d6bf99 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java
@@ -413,9 +413,9 @@ private String serializeAsJSON(boolean prettyPrint){
}
-
public JsonObject asGsonObject(boolean prettyPrint){
-
+
+
GsonBuilder builder;
if (prettyPrint){ // Add pretty printing
builder = new GsonBuilder().excludeFieldsWithoutExposeAnnotation().setPrettyPrinting();
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
index 1e1dd6642e5..3fcc182d1e4 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
@@ -467,99 +467,5 @@ public void testFileChecksum() {
}
- @Test
- public void testFileReplace() {
-
- Response createUser = UtilIT.createRandomUser();
- createUser.then().assertThat().statusCode(OK.getStatusCode());
-// createUser.prettyPrint();
- String username = UtilIT.getUsernameFromResponse(createUser);
- String apiToken = UtilIT.getApiTokenFromResponse(createUser);
-
- Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
-// createDataverseResponse.prettyPrint();
- createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
- String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
-
- Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
- createDatasetResponse.prettyPrint();
- createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
- Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
-
- Response getDatasetJsonBeforeFiles = UtilIT.nativeGet(datasetId, apiToken);
- getDatasetJsonBeforeFiles.prettyPrint();
- getDatasetJsonBeforeFiles.then().assertThat().statusCode(OK.getStatusCode());
- String protocol1 = JsonPath.from(getDatasetJsonBeforeFiles.getBody().asString()).getString("data.protocol");
- String authority1 = JsonPath.from(getDatasetJsonBeforeFiles.getBody().asString()).getString("data.authority");
- String identifier1 = JsonPath.from(getDatasetJsonBeforeFiles.getBody().asString()).getString("data.identifier");
- String dataset1PersistentId = protocol1 + ":" + authority1 + "/" + identifier1;
-
- Response uploadFileResponse = UtilIT.uploadRandomFile(dataset1PersistentId, apiToken);
- uploadFileResponse.prettyPrint();
- getDatasetJsonBeforeFiles.then().assertThat().statusCode(OK.getStatusCode());
- assertEquals(CREATED.getStatusCode(), uploadFileResponse.getStatusCode());
-
- Response getDatasetJsonWithFiles = UtilIT.nativeGet(datasetId, apiToken);
- getDatasetJsonWithFiles.prettyPrint();
- getDatasetJsonWithFiles.then().assertThat().statusCode(OK.getStatusCode());
- int fileId = JsonPath.from(getDatasetJsonWithFiles.getBody().asString()).getInt("data.latestVersion.files[0].dataFile.id");
- UtilIT.publishDataverseViaSword(dataverseAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode());
- UtilIT.publishDatasetViaSword(dataset1PersistentId, apiToken).then().assertThat().statusCode(OK.getStatusCode());
-
- String pathToFile = "src/main/webapp/resources/images/dataverseproject.png";
- Response replace = UtilIT.replaceFile(datasetId, fileId, pathToFile, apiToken);
- replace.prettyPrint();
- replace.then().assertThat()
- .body("message", equalTo("File successfully replaced!"))
- .statusCode(OK.getStatusCode());
-
- Response getDatasetJson = UtilIT.nativeGet(datasetId, apiToken);
- getDatasetJson.prettyPrint();
- getDatasetJson.then().assertThat()
- .body("data.latestVersion.files[0].dataFile.filename", equalTo("dataverseproject.png"))
- .body("data.latestVersion.files[0].dataFile.contentType", equalTo("image/png"))
- .body("data.latestVersion.files[0].dataFile.rootDataFileId", not(-1))
- .body("data.latestVersion.files[0].dataFile.previousDataFileId", equalTo(fileId))
- .statusCode(OK.getStatusCode());
-
- }
-
- @Test
- public void testFileReplaceNativeAdd() {
-
- Response createUser = UtilIT.createRandomUser();
- createUser.then().assertThat().statusCode(OK.getStatusCode());
-// createUser.prettyPrint();
- String username = UtilIT.getUsernameFromResponse(createUser);
- String apiToken = UtilIT.getApiTokenFromResponse(createUser);
-
- Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
-
-// createDataverseResponse.prettyPrint();
- createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
- String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
-
- Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
- createDatasetResponse.prettyPrint();
- createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
- Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
-
- String pathToFile = "src/main/webapp/resources/images/dataverseproject.png";
- Response add = UtilIT.uploadFileViaNative(datasetId, pathToFile, apiToken);
-
- add.prettyPrint();
- add.then().assertThat()
- .body("message", equalTo("File successfully added!"))
- .statusCode(OK.getStatusCode());
-
- Response getDatasetJson = UtilIT.nativeGet(datasetId, apiToken);
- getDatasetJson.prettyPrint();
- getDatasetJson.then().assertThat()
- .body("data.latestVersion.files[0].dataFile.filename", equalTo("dataverseproject.png"))
- .body("data.latestVersion.files[0].dataFile.contentType", equalTo("image/png"))
- .body("data.latestVersion.files[0].dataFile.rootDataFileId", equalTo(-1))
- .body("data.latestVersion.files[0].dataFile.previousDataFileId", nullValue())
- .statusCode(OK.getStatusCode());
- }
-
+
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
new file mode 100644
index 00000000000..5ee16e896c5
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
@@ -0,0 +1,126 @@
+package edu.harvard.iq.dataverse.api;
+
+import com.jayway.restassured.RestAssured;
+import com.jayway.restassured.response.Response;
+import java.util.logging.Logger;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import com.jayway.restassured.path.json.JsonPath;
+
+import static javax.ws.rs.core.Response.Status.CREATED;
+import static javax.ws.rs.core.Response.Status.OK;
+import static junit.framework.Assert.assertEquals;
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.not;
+import static org.hamcrest.CoreMatchers.nullValue;
+
+public class FilesIT {
+
+ private static final Logger logger = Logger.getLogger(FilesIT.class.getCanonicalName());
+
+ @BeforeClass
+ public static void setUpClass() {
+ RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
+ }
+
+
+ @Test
+ public void testFileReplace() {
+
+ Response createUser = UtilIT.createRandomUser();
+ createUser.then().assertThat().statusCode(OK.getStatusCode());
+// createUser.prettyPrint();
+ String username = UtilIT.getUsernameFromResponse(createUser);
+ String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+ Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+// createDataverseResponse.prettyPrint();
+ createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+ String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+ Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+ createDatasetResponse.prettyPrint();
+ createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+ Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+
+ Response getDatasetJsonBeforeFiles = UtilIT.nativeGet(datasetId, apiToken);
+ getDatasetJsonBeforeFiles.prettyPrint();
+ getDatasetJsonBeforeFiles.then().assertThat().statusCode(OK.getStatusCode());
+ String protocol1 = JsonPath.from(getDatasetJsonBeforeFiles.getBody().asString()).getString("data.protocol");
+ String authority1 = JsonPath.from(getDatasetJsonBeforeFiles.getBody().asString()).getString("data.authority");
+ String identifier1 = JsonPath.from(getDatasetJsonBeforeFiles.getBody().asString()).getString("data.identifier");
+ String dataset1PersistentId = protocol1 + ":" + authority1 + "/" + identifier1;
+
+ Response uploadFileResponse = UtilIT.uploadRandomFile(dataset1PersistentId, apiToken);
+ uploadFileResponse.prettyPrint();
+ getDatasetJsonBeforeFiles.then().assertThat().statusCode(OK.getStatusCode());
+ assertEquals(CREATED.getStatusCode(), uploadFileResponse.getStatusCode());
+
+ Response getDatasetJsonWithFiles = UtilIT.nativeGet(datasetId, apiToken);
+ getDatasetJsonWithFiles.prettyPrint();
+ getDatasetJsonWithFiles.then().assertThat().statusCode(OK.getStatusCode());
+ int fileId = JsonPath.from(getDatasetJsonWithFiles.getBody().asString()).getInt("data.latestVersion.files[0].dataFile.id");
+ UtilIT.publishDataverseViaSword(dataverseAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode());
+ UtilIT.publishDatasetViaSword(dataset1PersistentId, apiToken).then().assertThat().statusCode(OK.getStatusCode());
+
+ String pathToFile = "src/main/webapp/resources/images/dataverseproject.png";
+ Response replace = UtilIT.replaceFile(datasetId, fileId, pathToFile, apiToken);
+ replace.prettyPrint();
+ replace.then().assertThat()
+ .body("message", equalTo("File successfully replaced!"))
+ .statusCode(OK.getStatusCode());
+
+ Response getDatasetJson = UtilIT.nativeGet(datasetId, apiToken);
+ getDatasetJson.prettyPrint();
+ getDatasetJson.then().assertThat()
+ .body("data.latestVersion.files[0].dataFile.filename", equalTo("dataverseproject.png"))
+ .body("data.latestVersion.files[0].dataFile.contentType", equalTo("image/png"))
+ .body("data.latestVersion.files[0].dataFile.rootDataFileId", not(-1))
+ .body("data.latestVersion.files[0].dataFile.previousDataFileId", equalTo(fileId))
+ .statusCode(OK.getStatusCode());
+
+ }
+
+ @Test
+ public void test01_ReplaceGood() {
+
+ Response createUser = UtilIT.createRandomUser();
+ createUser.then().assertThat().statusCode(OK.getStatusCode());
+// createUser.prettyPrint();
+ String username = UtilIT.getUsernameFromResponse(createUser);
+ String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+ Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+
+// createDataverseResponse.prettyPrint();
+ createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+ String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+ Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+ createDatasetResponse.prettyPrint();
+ createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+ Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+
+ String pathToFile = "src/main/webapp/resources/images/dataverseproject.png";
+ String expectedContentType = "image/png";
+ String expectedLabel = "dataverseproject.png";
+ Response add = UtilIT.uploadFileViaNative(datasetId, pathToFile, apiToken);
+
+ add.prettyPrint();
+ add.then().assertThat()
+ .body("message", equalTo("File successfully added!"))
+ .body("data.filename", equalTo(expectedLabel))
+ .body("data.contentType", equalTo(expectedContentType))
+ .statusCode(OK.getStatusCode());
+
+ Response getDatasetJson = UtilIT.nativeGet(datasetId, apiToken);
+ getDatasetJson.prettyPrint();
+ getDatasetJson.then().assertThat()
+ .body("data.latestVersion.files[0].dataFile.filename", equalTo(expectedLabel))
+ .body("data.latestVersion.files[0].dataFile.contentType", equalTo(expectedContentType))
+ .body("data.latestVersion.files[0].dataFile.rootDataFileId", equalTo(-1))
+ .body("data.latestVersion.files[0].dataFile.previousDataFileId", nullValue())
+ .statusCode(OK.getStatusCode());
+ }
+
+}
From 646c000a5850bfaefc8652fd471c5a1757874e8b Mon Sep 17 00:00:00 2001
From: Philip Durbin
Date: Mon, 17 Oct 2016 11:53:48 -0400
Subject: [PATCH 52/86] put 3354-alt-checksum.sql in upgrade_v4.5.1_to_v4.6.sql
#3354
---
scripts/database/upgrades/3354-alt-checksum.sql | 7 -------
scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql | 8 ++++++++
2 files changed, 8 insertions(+), 7 deletions(-)
delete mode 100644 scripts/database/upgrades/3354-alt-checksum.sql
diff --git a/scripts/database/upgrades/3354-alt-checksum.sql b/scripts/database/upgrades/3354-alt-checksum.sql
deleted file mode 100644
index 42956fcc65d..00000000000
--- a/scripts/database/upgrades/3354-alt-checksum.sql
+++ /dev/null
@@ -1,7 +0,0 @@
-ALTER TABLE datafile ADD COLUMN checksumtype character varying(255);
-UPDATE datafile SET checksumtype = 'MD5';
-ALTER TABLE datafile ALTER COLUMN checksumtype SET NOT NULL;
--- alternate statement for sbgrid.org and others interested in SHA-1 support
--- note that in the database we use "SHA1" (no hyphen) but the GUI will show "SHA-1"
---UPDATE datafile SET checksumtype = 'SHA1';
-ALTER TABLE datafile RENAME md5 TO checksumvalue;
diff --git a/scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql b/scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql
index a8957cff53a..51d6684a3af 100644
--- a/scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql
+++ b/scripts/database/upgrades/upgrade_v4.5.1_to_v4.6.sql
@@ -1,3 +1,11 @@
+-- For supporting SHA1 rather than MD5 as a checksum on a per file basis #3354
+ALTER TABLE datafile ADD COLUMN checksumtype character varying(255);
+UPDATE datafile SET checksumtype = 'MD5';
+ALTER TABLE datafile ALTER COLUMN checksumtype SET NOT NULL;
+-- alternate statement for sbgrid.org and others interested in SHA-1 support
+-- note that in the database we use "SHA1" (no hyphen) but the GUI will show "SHA-1"
+--UPDATE datafile SET checksumtype = 'SHA1';
+ALTER TABLE datafile RENAME md5 TO checksumvalue;
-- For DataFile, file replace functionality:
ALTER TABLE datafile ADD COLUMN rootdatafileid bigint default -1;
ALTER TABLE datafile ADD COLUMN previousdatafileid bigint default null;
From d784c925205385458d8f30fdc4b4dcd7f0e56f02 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Mon, 17 Oct 2016 15:16:31 -0400
Subject: [PATCH 53/86] Part of #2290 tests and code re-adjustments for err
messages.
---
src/main/java/Bundle.properties | 5 +-
.../iq/dataverse/api/AbstractApiBean.java | 20 +-
.../harvard/iq/dataverse/api/Datasets.java | 9 +-
.../edu/harvard/iq/dataverse/api/Files.java | 10 +-
.../edu/harvard/iq/dataverse/api/FilesIT.java | 332 +++++++++++++++---
.../edu/harvard/iq/dataverse/api/UtilIT.java | 14 +-
6 files changed, 324 insertions(+), 66 deletions(-)
diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties
index 3fa1e4c98e6..ef339afdefe 100755
--- a/src/main/java/Bundle.properties
+++ b/src/main/java/Bundle.properties
@@ -1457,4 +1457,7 @@ file.addreplace.error.replace.command_engine_error=Failed to update the dataset.
file.addreplace.error.replace.ejb_exception=Failed to update the dataset. Please contact the administrator. (EJBException)
file.addreplace.error.remove_linked_file.dataset=dataset cannot be null in removeLinkedFileFromDataset
file.addreplace.error.remove_linked_file.file=file cannot be null in removeLinkedFileFromDataset
-file.addreplace.error.phase2_called_early_no_new_files=There was an error saving the dataset--no new files found.
\ No newline at end of file
+file.addreplace.error.phase2_called_early_no_new_files=There was an error saving the dataset--no new files found.
+file.addreplace.success.add=File successfully added!
+file.addreplace.success.replace=File successfully replaced!
+file.addreplace.error.auth=The API key is invalid.
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
index b9495e7c69c..7d63c5d0ec6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
@@ -67,6 +67,10 @@ public abstract class AbstractApiBean {
private static final Logger logger = Logger.getLogger(AbstractApiBean.class.getName());
private static final String DATAVERSE_KEY_HEADER_NAME = "X-Dataverse-key";
+ public static final String STATUS_ERROR = "ERROR";
+ public static final String STATUS_OK = "OK";
+
+
/**
* Utility class to convey a proper error response using Java's exceptions.
*/
@@ -363,14 +367,14 @@ protected Response okResponse( JsonArrayBuilder bld ) {
Response.ok();
return Response.ok(Json.createObjectBuilder()
- .add("status", "OK")
+ .add("status", STATUS_OK)
.add("data", bld).build()).build();
}
protected Response createdResponse( String uri, JsonObjectBuilder bld ) {
return Response.created( URI.create(uri) )
.entity( Json.createObjectBuilder()
- .add("status", "OK")
+ .add("status", STATUS_OK)
.add("data", bld).build())
.type(MediaType.APPLICATION_JSON)
.build();
@@ -378,7 +382,7 @@ protected Response createdResponse( String uri, JsonObjectBuilder bld ) {
protected Response okResponse( JsonObjectBuilder bld ) {
return Response.ok( Json.createObjectBuilder()
- .add("status", "OK")
+ .add("status", STATUS_OK)
.add("data", bld).build() )
.type(MediaType.APPLICATION_JSON)
.build();
@@ -386,7 +390,7 @@ protected Response okResponse( JsonObjectBuilder bld ) {
protected Response okResponse( String msg ) {
return Response.ok().entity(Json.createObjectBuilder()
- .add("status", "OK")
+ .add("status", STATUS_OK)
.add("data", Json.createObjectBuilder().add("message",msg)).build() )
.type(MediaType.APPLICATION_JSON)
.build();
@@ -419,20 +423,20 @@ protected Response okResponseGsonObject(String msg, com.google.gson.JsonObject g
*/
protected Response okResponseWithValue( String value ) {
return Response.ok(Json.createObjectBuilder()
- .add("status", "OK")
+ .add("status", STATUS_OK)
.add("data", value).build(), MediaType.APPLICATION_JSON_TYPE ).build();
}
protected Response okResponseWithValue( boolean value ) {
return Response.ok().entity(Json.createObjectBuilder()
- .add("status", "OK")
+ .add("status", STATUS_OK)
.add("data", value).build() ).build();
}
protected Response accepted() {
return Response.accepted()
.entity(Json.createObjectBuilder()
- .add("status", "OK").build()
+ .add("status", STATUS_OK).build()
).build();
}
@@ -463,7 +467,7 @@ protected static Response errorResponse( Status sts ) {
protected static Response errorResponse( Status sts, String msg ) {
return Response.status(sts)
.entity( NullSafeJsonBuilder.jsonObjectBuilder()
- .add("status", "ERROR")
+ .add("status", STATUS_ERROR)
.add( "message", msg ).build()
).type(MediaType.APPLICATION_JSON_TYPE).build();
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
index 4aae169196b..625293fba13 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
@@ -56,6 +56,7 @@
import java.io.StringReader;
import java.util.List;
import java.util.Map;
+import java.util.ResourceBundle;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.EJB;
@@ -825,7 +826,9 @@ public Response addFileToDataset(@PathParam("id") Long datasetId,
try {
authUser = this.findUserOrDie();
} catch (WrappedResponse ex) {
- return errorResponse(Response.Status.FORBIDDEN, "Couldn't find a user from the API key");
+ return errorResponse(Response.Status.FORBIDDEN,
+ ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.auth")
+ );
}
//-------------------
@@ -854,8 +857,8 @@ public Response addFileToDataset(@PathParam("id") Long datasetId,
if (addFileHelper.hasError()){
return errorResponse(Response.Status.BAD_REQUEST, addFileHelper.getErrorMessagesAsString("\n"));
}else{
-
- return okResponseGsonObject("File successfully added!",
+ String successMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.add");
+ return okResponseGsonObject(successMsg,
addFileHelper.getSuccessResultAsGsonObject());
//"Look at that! You added a file! (hey hey, it may have worked)");
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
index eb12668528c..366b41a0c84 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
@@ -25,6 +25,7 @@
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
+import java.util.ResourceBundle;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.EJB;
@@ -160,7 +161,9 @@ public Response replaceFileInDataset(
try {
authUser = this.findUserOrDie();
} catch (AbstractApiBean.WrappedResponse ex) {
- return errorResponse(Response.Status.FORBIDDEN, "Couldn't find a user from the API key");
+ return errorResponse(Response.Status.FORBIDDEN,
+ ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.auth")
+ );
}
// -------------------------------------
@@ -241,8 +244,9 @@ public Response replaceFileInDataset(
return errorResponse(Response.Status.BAD_REQUEST, addFileHelper.getErrorMessagesAsString("\n"));
}else{
msg("no error");
-
- return okResponseGsonObject("File successfully replaced!",
+ String successMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.replace");
+
+ return okResponseGsonObject(successMsg,
addFileHelper.getSuccessResultAsGsonObject());
//"Look at that! You added a file! (hey hey, it may have worked)");
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
index 5ee16e896c5..2016142fe80 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
@@ -6,13 +6,20 @@
import org.junit.BeforeClass;
import org.junit.Test;
import com.jayway.restassured.path.json.JsonPath;
+import java.util.ResourceBundle;
+import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
import static javax.ws.rs.core.Response.Status.CREATED;
+import static javax.ws.rs.core.Response.Status.FORBIDDEN;
+import static javax.ws.rs.core.Response.Status.NOT_FOUND;
import static javax.ws.rs.core.Response.Status.OK;
import static junit.framework.Assert.assertEquals;
+import org.apache.poi.ss.usermodel.DataValidationConstraint;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.nullValue;
+import org.hamcrest.Matchers;
+import static org.junit.Assert.assertNotNull;
public class FilesIT {
@@ -24,25 +31,274 @@ public static void setUpClass() {
}
- @Test
- public void testFileReplace() {
-
+ /**
+ * Create user and get apiToken
+ *
+ * @return
+ */
+ private String createUserGetToken(){
+
Response createUser = UtilIT.createRandomUser();
createUser.then().assertThat().statusCode(OK.getStatusCode());
-// createUser.prettyPrint();
+ //createUser.prettyPrint();
+
String username = UtilIT.getUsernameFromResponse(createUser);
String apiToken = UtilIT.getApiTokenFromResponse(createUser);
-
+
+ return apiToken;
+ }
+
+
+ private String createDataverseGetAlias(String apiToken){
+
Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
-// createDataverseResponse.prettyPrint();
+ //createDataverseResponse.prettyPrint();
createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+ return dataverseAlias;
+ }
+
+
+ private Integer createDatasetGetId(String dataverseAlias, String apiToken){
+
Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
- createDatasetResponse.prettyPrint();
+ //createDatasetResponse.prettyPrint();
createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+
+ return datasetId;
+
+ }
+
+ @Test
+ public void test_001_AddFileGood() {
+ msgt("test_001_AddFileGood");
+ // Create user
+ String apiToken = createUserGetToken();
+
+ // Create Dataverse
+ String dataverseAlias = createDataverseGetAlias(apiToken);
+
+ // Create Dataset
+ Integer datasetId = createDatasetGetId(dataverseAlias, apiToken);
+
+
+ String pathToFile = "src/main/webapp/resources/images/favicondataverse.png";
+ Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
+
+ //addResponse.prettyPrint();
+ //msgt("Here it is: " + addResponse.prettyPrint());
+ String successMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.add");
+
+
+ addResponse.then().assertThat()
+ .body("message", equalTo(successMsg))
+ .body("status", equalTo(AbstractApiBean.STATUS_OK))
+ .body("data.contentType", equalTo("image/png"))
+ .body("data.filename", equalTo("dataverseproject.png"))
+ .statusCode(OK.getStatusCode());
+
+ }
+
+
+ @Test
+ public void test_002_AddFileBadDatasetId() {
+ msgt("test_002_AddFileNullFileId");
+ // Create user
+ String apiToken = "someToken";
+
+ // Create Dataset
+ String datasetId = "cat"; //createDatasetGetId(dataverseAlias, apiToken);
+
+
+ String pathToFile = "src/main/webapp/resources/images/favicondataverse.png";
+ Response addResponse = UtilIT.uploadFileViaNative("cat", pathToFile, apiToken);
+ //msgt("Here it is: " + addResponse.prettyPrint());
+
+ // Adding a non-numeric id should result in a 404
+ addResponse.then().assertThat()
+ .statusCode(NOT_FOUND.getStatusCode());
+ }
+
+
+ @Test
+ public void test_003_AddFileNonExistentDatasetId() {
+ msgt("test_003_AddFileNonExistentDatasetId");
+
+ // Create user
+ String apiToken = createUserGetToken();
+
+ // Create Dataset
+ String datasetId = "9999"; //createDatasetGetId(dataverseAlias, apiToken);
+
+
+ String pathToFile = "src/main/webapp/resources/images/favicondataverse.png";
+ Response addResponse = UtilIT.uploadFileViaNative(datasetId, pathToFile, apiToken);
+
+
+ //msgt("Here it is: " + addResponse.prettyPrint());
+
+ String errMsgStart = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.dataset_id_not_found");
+
+ addResponse.then().assertThat()
+ .body("status", equalTo(AbstractApiBean.STATUS_ERROR))
+ .body("message", Matchers.startsWith(errMsgStart))
+ .statusCode(BAD_REQUEST.getStatusCode());
+ }
+
+ @Test
+ public void test_004_AddFileBadToken() {
+ msgt("test_004_AddFileBadToken");
+
+ // Create user
+ String apiToken = "Bad Medicine";
+
+ // Create Dataset - should pick up permissions error first
+ String datasetId = "1"; //createDatasetGetId(dataverseAlias, apiToken);
+
+
+ String pathToFile = "src/main/webapp/resources/images/favicondataverse.png";
+ Response addResponse = UtilIT.uploadFileViaNative(datasetId, pathToFile, apiToken);
+
+ msgt("Here it is: " + addResponse.prettyPrint());
+
+ String errMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.auth");
+
+ addResponse.then().assertThat()
+ .body("status", equalTo(AbstractApiBean.STATUS_ERROR))
+ .body("message", equalTo(errMsg))
+ .statusCode(FORBIDDEN.getStatusCode());
+ }
+
+
+ @Test
+ public void test_005_AddFileBadPermissions() {
+ msgt("test_005_AddFileBadPerms");
+
+ // To do!!!
+ }
+
+ @Test
+ public void test_006_ReplaceFileGood() {
+ msgt("test_006_ReplaceFileGood");
+
+ // Create user
+ String apiToken = createUserGetToken();
+
+ // Create Dataverse
+ String dataverseAlias = createDataverseGetAlias(apiToken);
+
+ // Create Dataset
+ Integer datasetId = createDatasetGetId(dataverseAlias, apiToken);
+
+ // -------------------------
+ // Add initial file
+ // -------------------------
+ String pathToFile = "src/main/webapp/resources/images/favicondataverse.png";
+ Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
+ String successMsgAdd = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.add");
+
+ addResponse.then().assertThat()
+ .body("message", equalTo(successMsgAdd))
+ .body("data.contentType", equalTo("image/png"))
+ .body("data.filename", equalTo("dataverseproject.png"))
+ .statusCode(OK.getStatusCode());
+
+
+ long origFileId = JsonPath.from(addResponse.body().asString()).getLong("data.id");
+
+ msg("Orig file id: " + origFileId);
+ assertNotNull(origFileId); // If checkOut fails, display message
+
+ // -------------------------
+ // Publish dataverse and dataset
+ // -------------------------
+ Response publishDataversetResp = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken);
+ publishDataversetResp.then().assertThat()
+ .statusCode(OK.getStatusCode());
+
+ Response publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken);
+ publishDatasetResp.then().assertThat()
+ .statusCode(OK.getStatusCode());
+
+
+ // -------------------------
+ // Replace file
+ // -------------------------
+ String pathToFile2 = "src/main/webapp/resources/images/cc0.png";
+ Response replaceResp = UtilIT.replaceFile(origFileId, pathToFile2, apiToken);
+
+ msgt(replaceResp.prettyPrint());
+
+ String successMsg2 = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.replace");
+
+ replaceResp.then().assertThat()
+ .statusCode(OK.getStatusCode())
+ .body("message", equalTo(successMsg2))
+ .body("data.filename", equalTo("cc0.png"))
+ //.body("data.rootDataFileId", equalTo(origFileId))
+ ;
+
+ long rootDataFileId = JsonPath.from(replaceResp.body().asString()).getLong("data.rootDataFileId");
+ long previousDataFileId = JsonPath.from(replaceResp.body().asString()).getLong("data.previousDataFileId");
+ long newDataFileId = JsonPath.from(replaceResp.body().asString()).getLong("data.id");
+
+ assertEquals(origFileId, previousDataFileId);
+ assertEquals(rootDataFileId, previousDataFileId);
+
+
+ // -------------------------
+ // Publish dataset (again)
+ // -------------------------
+ publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken);
+ publishDatasetResp.then().assertThat()
+ .statusCode(OK.getStatusCode());
+
+
+ // -------------------------
+ // Replace file (again)
+ // -------------------------
+ String pathToFile3 = "src/main/webapp/resources/images/favicondataverse.png";
+ Response replaceResp2 = UtilIT.replaceFile(newDataFileId, pathToFile3, apiToken);
+
+ msgt("2nd replace: " + replaceResp2.prettyPrint());
+
+ replaceResp2.then().assertThat()
+ .statusCode(OK.getStatusCode())
+ .body("status", equalTo(AbstractApiBean.STATUS_OK))
+ .body("message", equalTo(successMsg2))
+ .body("data.filename", equalTo("favicondataverse.png"))
+ ;
+
+ long rootDataFileId2 = JsonPath.from(replaceResp2.body().asString()).getLong("data.rootDataFileId");
+ long previousDataFileId2 = JsonPath.from(replaceResp2.body().asString()).getLong("data.previousDataFileId");
+
+ msgt("newDataFileId: " + newDataFileId);
+ msgt("previousDataFileId2: " + previousDataFileId2);
+ msgt("rootDataFileId2: " + rootDataFileId2);
+
+ assertEquals(newDataFileId, previousDataFileId2);
+ assertEquals(rootDataFileId2, origFileId);
+
+ }
+
+ //@Test
+ public void xtest_006_ReplaceFileGood() {
+
+ // Create user
+ String apiToken = createUserGetToken();
+
+ // Create Dataverse
+ String dataverseAlias = createDataverseGetAlias(apiToken);
+
+ // Create Dataset
+ Integer datasetId = createDatasetGetId(dataverseAlias, apiToken);
+
+ // ---------------------
+ // Add file
+ // ---------------------
Response getDatasetJsonBeforeFiles = UtilIT.nativeGet(datasetId, apiToken);
getDatasetJsonBeforeFiles.prettyPrint();
getDatasetJsonBeforeFiles.then().assertThat().statusCode(OK.getStatusCode());
@@ -63,9 +319,16 @@ public void testFileReplace() {
UtilIT.publishDataverseViaSword(dataverseAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode());
UtilIT.publishDatasetViaSword(dataset1PersistentId, apiToken).then().assertThat().statusCode(OK.getStatusCode());
+ // ---------------------
+ // Replace file
+ // ---------------------
String pathToFile = "src/main/webapp/resources/images/dataverseproject.png";
- Response replace = UtilIT.replaceFile(datasetId, fileId, pathToFile, apiToken);
+ Response replace = UtilIT.replaceFile(fileId, pathToFile, apiToken);
replace.prettyPrint();
+
+ String successMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.add");
+
+
replace.then().assertThat()
.body("message", equalTo("File successfully replaced!"))
.statusCode(OK.getStatusCode());
@@ -81,46 +344,17 @@ public void testFileReplace() {
}
- @Test
- public void test01_ReplaceGood() {
-
- Response createUser = UtilIT.createRandomUser();
- createUser.then().assertThat().statusCode(OK.getStatusCode());
-// createUser.prettyPrint();
- String username = UtilIT.getUsernameFromResponse(createUser);
- String apiToken = UtilIT.getApiTokenFromResponse(createUser);
-
- Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
-
-// createDataverseResponse.prettyPrint();
- createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
- String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
-
- Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
- createDatasetResponse.prettyPrint();
- createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
- Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
-
- String pathToFile = "src/main/webapp/resources/images/dataverseproject.png";
- String expectedContentType = "image/png";
- String expectedLabel = "dataverseproject.png";
- Response add = UtilIT.uploadFileViaNative(datasetId, pathToFile, apiToken);
-
- add.prettyPrint();
- add.then().assertThat()
- .body("message", equalTo("File successfully added!"))
- .body("data.filename", equalTo(expectedLabel))
- .body("data.contentType", equalTo(expectedContentType))
- .statusCode(OK.getStatusCode());
-
- Response getDatasetJson = UtilIT.nativeGet(datasetId, apiToken);
- getDatasetJson.prettyPrint();
- getDatasetJson.then().assertThat()
- .body("data.latestVersion.files[0].dataFile.filename", equalTo(expectedLabel))
- .body("data.latestVersion.files[0].dataFile.contentType", equalTo(expectedContentType))
- .body("data.latestVersion.files[0].dataFile.rootDataFileId", equalTo(-1))
- .body("data.latestVersion.files[0].dataFile.previousDataFileId", nullValue())
- .statusCode(OK.getStatusCode());
+
+
+ private void msg(String m){
+ System.out.println(m);
}
-
+ private void dashes(){
+ msg("----------------");
+ }
+ private void msgt(String m){
+ dashes(); msg(m); dashes();
+ }
+
+
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
index c2b9940e597..3dead822faa 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
@@ -259,7 +259,17 @@ public static Response uploadFile(String persistentId, String zipfilename, Strin
}
- static Response uploadFileViaNative(Integer datasetId, String pathToFile, String apiToken) {
+ /**
+ * For test purposes, datasetId can be non-numeric
+ *
+ * @param datasetId
+ * @param pathToFile
+ * @param apiToken
+ * @return
+ */
+ static Response uploadFileViaNative(String datasetId, String pathToFile, String apiToken) {
+
+
return given()
.header(API_TOKEN_HTTP_HEADER, apiToken)
.multiPart("datasetId", datasetId)
@@ -267,7 +277,7 @@ static Response uploadFileViaNative(Integer datasetId, String pathToFile, String
.post("/api/datasets/" + datasetId + "/add");
}
- static Response replaceFile(Integer datasetId, int fileId, String pathToFile, String apiToken) {
+ static Response replaceFile(long fileId, String pathToFile, String apiToken) {
return given()
.header(API_TOKEN_HTTP_HEADER, apiToken)
.multiPart("file", new File(pathToFile))
From 1bb50d94da9bf73e4635af4c4ef595ef944e1180 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Mon, 17 Oct 2016 15:33:29 -0400
Subject: [PATCH 54/86] test for unpublished file and bad file id #2290
---
.../edu/harvard/iq/dataverse/api/FilesIT.java | 119 ++++++++++--------
1 file changed, 70 insertions(+), 49 deletions(-)
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
index 2016142fe80..2022ccd2eaa 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
@@ -71,7 +71,7 @@ private Integer createDatasetGetId(String dataverseAlias, String apiToken){
}
- @Test
+ //@Test
public void test_001_AddFileGood() {
msgt("test_001_AddFileGood");
// Create user
@@ -102,7 +102,7 @@ public void test_001_AddFileGood() {
}
- @Test
+ //@Test
public void test_002_AddFileBadDatasetId() {
msgt("test_002_AddFileNullFileId");
// Create user
@@ -122,7 +122,7 @@ public void test_002_AddFileBadDatasetId() {
}
- @Test
+ //@Test
public void test_003_AddFileNonExistentDatasetId() {
msgt("test_003_AddFileNonExistentDatasetId");
@@ -147,7 +147,7 @@ public void test_003_AddFileNonExistentDatasetId() {
.statusCode(BAD_REQUEST.getStatusCode());
}
- @Test
+ //@Test
public void test_004_AddFileBadToken() {
msgt("test_004_AddFileBadToken");
@@ -172,14 +172,14 @@ public void test_004_AddFileBadToken() {
}
- @Test
+ //@Test
public void test_005_AddFileBadPermissions() {
msgt("test_005_AddFileBadPerms");
// To do!!!
}
- @Test
+ //@Test
public void test_006_ReplaceFileGood() {
msgt("test_006_ReplaceFileGood");
@@ -284,8 +284,10 @@ public void test_006_ReplaceFileGood() {
}
- //@Test
- public void xtest_006_ReplaceFileGood() {
+
+ @Test
+ public void test_007_ReplaceFileUnpublishedAndBadIds() {
+ msgt("test_007_ReplaceFileBadIds");
// Create user
String apiToken = createUserGetToken();
@@ -295,56 +297,75 @@ public void xtest_006_ReplaceFileGood() {
// Create Dataset
Integer datasetId = createDatasetGetId(dataverseAlias, apiToken);
+
+ // -------------------------
+ // Add initial file
+ // -------------------------
+ String pathToFile = "src/main/webapp/resources/images/favicondataverse.png";
+ Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
+
+ String successMsgAdd = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.add");
+
+ addResponse.then().assertThat()
+ .body("message", equalTo(successMsgAdd))
+ .body("data.contentType", equalTo("image/png"))
+ .body("data.filename", equalTo("dataverseproject.png"))
+ .statusCode(OK.getStatusCode());
- // ---------------------
- // Add file
- // ---------------------
- Response getDatasetJsonBeforeFiles = UtilIT.nativeGet(datasetId, apiToken);
- getDatasetJsonBeforeFiles.prettyPrint();
- getDatasetJsonBeforeFiles.then().assertThat().statusCode(OK.getStatusCode());
- String protocol1 = JsonPath.from(getDatasetJsonBeforeFiles.getBody().asString()).getString("data.protocol");
- String authority1 = JsonPath.from(getDatasetJsonBeforeFiles.getBody().asString()).getString("data.authority");
- String identifier1 = JsonPath.from(getDatasetJsonBeforeFiles.getBody().asString()).getString("data.identifier");
- String dataset1PersistentId = protocol1 + ":" + authority1 + "/" + identifier1;
-
- Response uploadFileResponse = UtilIT.uploadRandomFile(dataset1PersistentId, apiToken);
- uploadFileResponse.prettyPrint();
- getDatasetJsonBeforeFiles.then().assertThat().statusCode(OK.getStatusCode());
- assertEquals(CREATED.getStatusCode(), uploadFileResponse.getStatusCode());
-
- Response getDatasetJsonWithFiles = UtilIT.nativeGet(datasetId, apiToken);
- getDatasetJsonWithFiles.prettyPrint();
- getDatasetJsonWithFiles.then().assertThat().statusCode(OK.getStatusCode());
- int fileId = JsonPath.from(getDatasetJsonWithFiles.getBody().asString()).getInt("data.latestVersion.files[0].dataFile.id");
- UtilIT.publishDataverseViaSword(dataverseAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode());
- UtilIT.publishDatasetViaSword(dataset1PersistentId, apiToken).then().assertThat().statusCode(OK.getStatusCode());
-
- // ---------------------
- // Replace file
- // ---------------------
- String pathToFile = "src/main/webapp/resources/images/dataverseproject.png";
- Response replace = UtilIT.replaceFile(fileId, pathToFile, apiToken);
- replace.prettyPrint();
- String successMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.add");
+ long origFileId = JsonPath.from(addResponse.body().asString()).getLong("data.id");
+ msg("Orig file id: " + origFileId);
+ assertNotNull(origFileId); // If checkOut fails, display message
- replace.then().assertThat()
- .body("message", equalTo("File successfully replaced!"))
+ // -------------------------
+ // Publish dataverse
+ // -------------------------
+ Response publishDataversetResp = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken);
+ publishDataversetResp.then().assertThat()
.statusCode(OK.getStatusCode());
- Response getDatasetJson = UtilIT.nativeGet(datasetId, apiToken);
- getDatasetJson.prettyPrint();
- getDatasetJson.then().assertThat()
- .body("data.latestVersion.files[0].dataFile.filename", equalTo("dataverseproject.png"))
- .body("data.latestVersion.files[0].dataFile.contentType", equalTo("image/png"))
- .body("data.latestVersion.files[0].dataFile.rootDataFileId", not(-1))
- .body("data.latestVersion.files[0].dataFile.previousDataFileId", equalTo(fileId))
+
+ // -------------------------
+ // Replace file in unpublished dataset -- e.g. file not published
+ // -------------------------
+ String pathToFile2 = "src/main/webapp/resources/images/cc0.png";
+ Response replaceResp = UtilIT.replaceFile(origFileId, pathToFile2, apiToken);
+
+ String errMsgUnpublished = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.unpublished_file_cannot_be_replaced");
+
+ replaceResp.then().assertThat()
+ .statusCode(BAD_REQUEST.getStatusCode())
+ .body("status", equalTo(AbstractApiBean.STATUS_ERROR))
+ .body("message", Matchers.startsWith(errMsgUnpublished))
+ ;
+
+ // -------------------------
+ // Publish dataset
+ // -------------------------
+ Response publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken);
+ publishDatasetResp.then().assertThat()
.statusCode(OK.getStatusCode());
+
+
+ // -------------------------
+ // Replace file with non-existent Id
+ // -------------------------
+ pathToFile2 = "src/main/webapp/resources/images/cc0.png";
+ Response replaceResp2 = UtilIT.replaceFile(origFileId+10, pathToFile2, apiToken);
- }
+ msgt("non-existent id: " + replaceResp.prettyPrint());
-
+ String errMsg1 = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.existing_file_to_replace_not_found_by_id");
+
+ replaceResp2.then().assertThat()
+ .statusCode(BAD_REQUEST.getStatusCode())
+ .body("status", equalTo(AbstractApiBean.STATUS_ERROR))
+ .body("message", Matchers.startsWith(errMsg1))
+ ;
+
+
+ }
private void msg(String m){
System.out.println(m);
From 65d924a94007e17c428f208fd5d02b680586bce8 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Mon, 17 Oct 2016 15:55:32 -0400
Subject: [PATCH 55/86] #2290 test. attempt to replace deleted file that was
previously published
---
.../edu/harvard/iq/dataverse/api/FilesIT.java | 93 +++++++++++++++++--
1 file changed, 87 insertions(+), 6 deletions(-)
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
index 2022ccd2eaa..f2ed6c16e46 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
@@ -71,7 +71,7 @@ private Integer createDatasetGetId(String dataverseAlias, String apiToken){
}
- //@Test
+ @Test
public void test_001_AddFileGood() {
msgt("test_001_AddFileGood");
// Create user
@@ -102,7 +102,7 @@ public void test_001_AddFileGood() {
}
- //@Test
+ @Test
public void test_002_AddFileBadDatasetId() {
msgt("test_002_AddFileNullFileId");
// Create user
@@ -122,7 +122,7 @@ public void test_002_AddFileBadDatasetId() {
}
- //@Test
+ @Test
public void test_003_AddFileNonExistentDatasetId() {
msgt("test_003_AddFileNonExistentDatasetId");
@@ -147,7 +147,7 @@ public void test_003_AddFileNonExistentDatasetId() {
.statusCode(BAD_REQUEST.getStatusCode());
}
- //@Test
+ @Test
public void test_004_AddFileBadToken() {
msgt("test_004_AddFileBadToken");
@@ -172,14 +172,14 @@ public void test_004_AddFileBadToken() {
}
- //@Test
+ @Test
public void test_005_AddFileBadPermissions() {
msgt("test_005_AddFileBadPerms");
// To do!!!
}
- //@Test
+ @Test
public void test_006_ReplaceFileGood() {
msgt("test_006_ReplaceFileGood");
@@ -367,6 +367,87 @@ public void test_007_ReplaceFileUnpublishedAndBadIds() {
}
+
+ @Test
+ public void test_008_ReplaceFileAlreadyDeleted() {
+ msgt("test_008_ReplaceFileAlreadyDeleted");
+
+ // Create user
+ String apiToken = createUserGetToken();
+
+ // Create Dataverse
+ String dataverseAlias = createDataverseGetAlias(apiToken);
+
+ // Create Dataset
+ Integer datasetId = createDatasetGetId(dataverseAlias, apiToken);
+
+ // -------------------------
+ // Add initial file
+ // -------------------------
+ String pathToFile = "src/main/webapp/resources/images/favicondataverse.png";
+ Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
+
+ String successMsgAdd = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.add");
+
+ addResponse.then().assertThat()
+ .body("message", equalTo(successMsgAdd))
+ .body("data.contentType", equalTo("image/png"))
+ .body("data.filename", equalTo("dataverseproject.png"))
+ .statusCode(OK.getStatusCode());
+
+
+ long origFileId = JsonPath.from(addResponse.body().asString()).getLong("data.id");
+
+ msg("Orig file id: " + origFileId);
+ assertNotNull(origFileId); // If checkOut fails, display message
+
+ // -------------------------
+ // Publish dataverse
+ // -------------------------
+ Response publishDataversetResp = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken);
+ publishDataversetResp.then().assertThat()
+ .statusCode(OK.getStatusCode());
+
+
+ // -------------------------
+ // Publish dataset
+ // -------------------------
+ Response publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken);
+ publishDatasetResp.then().assertThat()
+ .statusCode(OK.getStatusCode());
+
+ // -------------------------
+ // Delete file
+ // -------------------------
+ UtilIT.deleteFile((int)origFileId, apiToken);
+
+ // -------------------------
+ // Re-Publish dataset
+ // -------------------------
+ publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken);
+ publishDatasetResp.then().assertThat()
+ .statusCode(OK.getStatusCode());
+
+
+ // -------------------------
+ // Replace file in unpublished dataset -- e.g. file not published
+ // -------------------------
+ String pathToFile2 = "src/main/webapp/resources/images/cc0.png";
+ Response replaceResp = UtilIT.replaceFile(origFileId, pathToFile2, apiToken);
+
+ String errMsgDeleted = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.existing_file_not_in_latest_published_version");
+
+ msgt("replace resp: " + replaceResp.prettyPrint());
+
+ replaceResp.then().assertThat()
+ .statusCode(BAD_REQUEST.getStatusCode())
+ .body("status", equalTo(AbstractApiBean.STATUS_ERROR))
+ .body("message", Matchers.startsWith(errMsgDeleted))
+ ;
+
+ }
+
+
private void msg(String m){
System.out.println(m);
}
From b7c976b19e815d260385b0eecdd23dde7c4fdaa0 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Mon, 17 Oct 2016 16:00:28 -0400
Subject: [PATCH 56/86] #2290 test. try to add same file twice
---
.../java/edu/harvard/iq/dataverse/api/FilesIT.java | 14 ++++++++++++++
1 file changed, 14 insertions(+)
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
index f2ed6c16e46..3cb2e8a8384 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
@@ -99,6 +99,20 @@ public void test_001_AddFileGood() {
.body("data.filename", equalTo("dataverseproject.png"))
.statusCode(OK.getStatusCode());
+
+ //------------------------------------------------
+ // Try to add the same file again -- and fail
+ //------------------------------------------------
+ Response addTwiceResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
+
+ msgt("2nd requests: " + addTwiceResponse.prettyPrint()); //addResponse.prettyPrint();
+
+ String errMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.duplicate_file");
+
+ addTwiceResponse.then().assertThat()
+ .body("message", Matchers.startsWith(errMsg))
+ .body("status", equalTo(AbstractApiBean.STATUS_ERROR))
+ .statusCode(BAD_REQUEST.getStatusCode());
}
From e7c07429114239404c0c34037edcf32fa4e3bc2a Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Mon, 17 Oct 2016 16:09:49 -0400
Subject: [PATCH 57/86] #2290 test. try to replace file w/ different content
type
---
.../edu/harvard/iq/dataverse/api/FilesIT.java | 24 +++++++++++++++++++
1 file changed, 24 insertions(+)
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
index 3cb2e8a8384..7cece5094cc 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
@@ -209,6 +209,7 @@ public void test_006_ReplaceFileGood() {
// -------------------------
// Add initial file
// -------------------------
+ msg("Add initial file");
String pathToFile = "src/main/webapp/resources/images/favicondataverse.png";
Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
@@ -229,6 +230,7 @@ public void test_006_ReplaceFileGood() {
// -------------------------
// Publish dataverse and dataset
// -------------------------
+ msg("Publish dataverse and dataset");
Response publishDataversetResp = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken);
publishDataversetResp.then().assertThat()
.statusCode(OK.getStatusCode());
@@ -238,9 +240,29 @@ public void test_006_ReplaceFileGood() {
.statusCode(OK.getStatusCode());
+ // -------------------------
+ // Replace file - BAD/warning b/c different content-type
+ // -------------------------
+ msg("Replace file - BAD/warning b/c different content-type");
+
+ String pathToFileWrongCtype = "src/main/webapp/resources/images/ajax-loading.gif";
+ Response replaceRespWrongCtype = UtilIT.replaceFile(origFileId, pathToFileWrongCtype, apiToken);
+
+ msgt(replaceRespWrongCtype.prettyPrint());
+
+ String errMsgCtype = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.replace.new_file_has_different_content_type");
+
+
+ replaceRespWrongCtype.then().assertThat()
+ .statusCode(BAD_REQUEST.getStatusCode())
+ .body("status", equalTo(AbstractApiBean.STATUS_ERROR))
+ .body("message", equalTo(errMsgCtype));
+ //.body("data.rootDataFileId", equalTo(origFileId))
+
// -------------------------
// Replace file
// -------------------------
+ msg("Replace file - 1st time");
String pathToFile2 = "src/main/webapp/resources/images/cc0.png";
Response replaceResp = UtilIT.replaceFile(origFileId, pathToFile2, apiToken);
@@ -266,6 +288,7 @@ public void test_006_ReplaceFileGood() {
// -------------------------
// Publish dataset (again)
// -------------------------
+ msg("Publish dataset (again)");
publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken);
publishDatasetResp.then().assertThat()
.statusCode(OK.getStatusCode());
@@ -274,6 +297,7 @@ public void test_006_ReplaceFileGood() {
// -------------------------
// Replace file (again)
// -------------------------
+ msg("Replace file (again)");
String pathToFile3 = "src/main/webapp/resources/images/favicondataverse.png";
Response replaceResp2 = UtilIT.replaceFile(newDataFileId, pathToFile3, apiToken);
From 07df4474fbfa17fe98ea119d808033a2b3a1ee16 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Mon, 17 Oct 2016 16:11:07 -0400
Subject: [PATCH 58/86] #2290 tests. remove unused packages
---
src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java | 3 ---
1 file changed, 3 deletions(-)
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
index 7cece5094cc..ef8db9a8469 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
@@ -14,10 +14,7 @@
import static javax.ws.rs.core.Response.Status.NOT_FOUND;
import static javax.ws.rs.core.Response.Status.OK;
import static junit.framework.Assert.assertEquals;
-import org.apache.poi.ss.usermodel.DataValidationConstraint;
import static org.hamcrest.CoreMatchers.equalTo;
-import static org.hamcrest.CoreMatchers.not;
-import static org.hamcrest.CoreMatchers.nullValue;
import org.hamcrest.Matchers;
import static org.junit.Assert.assertNotNull;
From de93ffabb9f1cd65ebc010ffdbc3ba03dc770c48 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Tue, 18 Oct 2016 10:51:56 -0400
Subject: [PATCH 59/86] #2290 signatures (but not implementation) for adding
optionalFileParams: description, file tags, tabular tags
---
.../harvard/iq/dataverse/api/Datasets.java | 3 +-
.../edu/harvard/iq/dataverse/api/Files.java | 6 +-
.../datasetutility/AddReplaceFileHelper.java | 56 +++++++---
.../datasetutility/FileUploadTestPage.java | 8 +-
.../datasetutility/OptionalFileParams.java | 103 ++++++++++++++++++
5 files changed, 157 insertions(+), 19 deletions(-)
create mode 100644 src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
index 625293fba13..4295d7ef0ff 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
@@ -851,7 +851,8 @@ public Response addFileToDataset(@PathParam("id") Long datasetId,
addFileHelper.runAddFileByDatasetId(datasetId,
newFilename,
newFileContentType,
- testFileInputStream);
+ testFileInputStream,
+ null);
if (addFileHelper.hasError()){
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
index 366b41a0c84..c5001fcd8bd 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
@@ -230,12 +230,14 @@ public Response replaceFileInDataset(
addFileHelper.runForceReplaceFile(fileToReplaceId,
newFilename,
newFileContentType,
- testFileInputStream);
+ testFileInputStream,
+ null);
}else{
addFileHelper.runReplaceFile(fileToReplaceId,
newFilename,
newFileContentType,
- testFileInputStream);
+ testFileInputStream,
+ null);
}
msg("we're back.....");
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 182ac1fdc49..fdff0c64420 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -191,7 +191,22 @@ public AddReplaceFileHelper(DataverseRequest dvRequest,
}
- public boolean runAddFileByDatasetId(Long datasetId, String newFileName, String newFileContentType, InputStream newFileInputStream){
+ /**
+ *
+ * @param datasetId
+ * @param newFileName
+ * @param newFileContentType
+ * @param newFileInputStream
+ * @param optionalFileParams
+ * @param description optional
+ * @param
+ * @return
+ */
+ public boolean runAddFileByDatasetId(Long datasetId,
+ String newFileName,
+ String newFileContentType,
+ InputStream newFileInputStream,
+ OptionalFileParams optionalFileParams){
msgt(">> runAddFileByDatasetId");
@@ -203,7 +218,7 @@ public boolean runAddFileByDatasetId(Long datasetId, String newFileName, String
return false;
}
- return this.runAddFile(this.dataset, newFileName, newFileContentType, newFileInputStream);
+ return this.runAddFile(this.dataset, newFileName, newFileContentType, newFileInputStream, optionalFileParams);
}
@@ -216,7 +231,11 @@ public boolean runAddFileByDatasetId(Long datasetId, String newFileName, String
* @param newFileInputStream
* @return
*/
- public boolean runAddFile(Dataset dataset, String newFileName, String newFileContentType, InputStream newFileInputStream){
+ public boolean runAddFile(Dataset dataset,
+ String newFileName,
+ String newFileContentType,
+ InputStream newFileInputStream,
+ OptionalFileParams optionalFileParams){
msgt(">> runAddFile");
initErrorHandling();
@@ -226,7 +245,7 @@ public boolean runAddFile(Dataset dataset, String newFileName, String newFileCon
}
this.currentOperation = FILE_ADD_OPERATION;
- return this.runAddReplaceFile(dataset, newFileName, newFileContentType, newFileInputStream);
+ return this.runAddReplaceFile(dataset, newFileName, newFileContentType, newFileInputStream, optionalFileParams);
}
@@ -239,7 +258,11 @@ public boolean runAddFile(Dataset dataset, String newFileName, String newFileCon
* @param newFileInputStream
* @return
*/
- public boolean runForceReplaceFile(Long oldFileId, String newFileName, String newFileContentType, InputStream newFileInputStream){
+ public boolean runForceReplaceFile(Long oldFileId,
+ String newFileName,
+ String newFileContentType,
+ InputStream newFileInputStream,
+ OptionalFileParams optionalFileParams){
msgt(">> runForceReplaceFile");
initErrorHandling();
@@ -259,14 +282,18 @@ public boolean runForceReplaceFile(Long oldFileId, String newFileName, String ne
}
- return this.runAddReplaceFile(fileToReplace.getOwner(), newFileName, newFileContentType, newFileInputStream);
+ return this.runAddReplaceFile(fileToReplace.getOwner(), newFileName, newFileContentType, newFileInputStream, optionalFileParams);
}
- public boolean runReplaceFile(Long oldFileId, String newFileName, String newFileContentType, InputStream newFileInputStream){
-
+ public boolean runReplaceFile(Long oldFileId,
+ String newFileName,
+ String newFileContentType,
+ InputStream newFileInputStream,
+ OptionalFileParams optionalFileParams){
+
msgt(">> runReplaceFile");
initErrorHandling();
@@ -284,7 +311,7 @@ public boolean runReplaceFile(Long oldFileId, String newFileName, String newFile
return false;
}
- return this.runAddReplaceFile(fileToReplace.getOwner(), newFileName, newFileContentType, newFileInputStream);
+ return this.runAddReplaceFile(fileToReplace.getOwner(), newFileName, newFileContentType, newFileInputStream, optionalFileParams);
}
@@ -309,8 +336,8 @@ public boolean runReplaceFile(Long oldFileId, String newFileName, String newFile
*/
private boolean runAddReplaceFile(Dataset dataset,
String newFileName, String newFileContentType,
- InputStream newFileInputStream
- ){
+ InputStream newFileInputStream,
+ OptionalFileParams optionalFileParams){
// Run "Phase 1" - Initial ingest of file + error check
// But don't save the dataset version yet
@@ -318,7 +345,9 @@ private boolean runAddReplaceFile(Dataset dataset,
boolean phase1Success = runAddReplacePhase1(dataset,
newFileName,
newFileContentType,
- newFileInputStream);
+ newFileInputStream,
+ optionalFileParams
+ );
if (!phase1Success){
return false;
@@ -340,7 +369,8 @@ private boolean runAddReplaceFile(Dataset dataset,
public boolean runAddReplacePhase1(Dataset dataset,
String newFileName,
String newFileContentType,
- InputStream newFileInputStream){
+ InputStream newFileInputStream,
+ OptionalFileParams optionalFileParams){
if (this.hasError()){
return false; // possible to have errors already...
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
index 53da697471c..3c2a9bbc9ac 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
@@ -202,13 +202,15 @@ public void addReplaceFile(UploadedFile laFile){
addFileHelper.runReplaceFile( fileToReplace.getId(),
laFile.getFileName(),
laFile.getContentType(),
- inputStream
+ inputStream,
+ null
);
}else{
- addFileHelper.runAddFile(dataset,
+ addFileHelper.runAddFileByDatasetId(dataset.getId(),
laFile.getFileName(),
laFile.getContentType(),
- inputStream);
+ inputStream,
+ null);
}
if (addFileHelper.hasError()){
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
new file mode 100644
index 00000000000..e0e0e27944f
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
@@ -0,0 +1,103 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package edu.harvard.iq.dataverse.datasetutility;
+
+import java.util.List;
+
+/**
+ * This is used in conjunction with the AddReplaceFileHelper
+ *
+ * It encapsulates these optional parameters:
+ *
+ * - description
+ * - file tags (can be custom)
+ * - tabular tags (controlled vocabulary)
+ *
+ * Future params:
+ * - Provenance related information
+ *
+ * @author rmp553
+ */
+public class OptionalFileParams {
+
+ private String description;
+
+ private List tags;
+
+ private List tabularTags;
+
+
+ public OptionalFileParams(String description){
+
+ this.description = description;
+ }
+
+ public OptionalFileParams(String description,
+ List tags){
+
+ this.description = description;
+ this.tags = tags;
+ }
+
+ public OptionalFileParams(String description,
+ List tags,
+ List tabularTags){
+
+ this.description = description;
+ this.tags = tags;
+ this.tabularTags = tabularTags;
+ }
+
+ /**
+ * Set description
+ * @param description
+ */
+ public void setDescription(String description){
+ this.description = description;
+ }
+
+ /**
+ * Get for description
+ * @return String
+ */
+ public String getDescription(){
+ return this.description;
+ }
+
+
+ /**
+ * Set tags
+ * @param tags
+ */
+ public void setTags(List tags){
+ this.tags = tags;
+ }
+
+ /**
+ * Get for tags
+ * @return List
+ */
+ public List getTags(){
+ return this.tags;
+ }
+
+
+ /**
+ * Set tabularTags
+ * @param tabularTags
+ */
+ public void setTabularTags(List tabularTags){
+ this.tabularTags = tabularTags;
+ }
+
+ /**
+ * Get for tabularTags
+ * @return List
+ */
+ public List getTabularTags(){
+ return this.tabularTags;
+ }
+}
From 78de5f5ccb1b3f10e439df03c42f84ef1bf4dbdf Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Tue, 18 Oct 2016 10:56:33 -0400
Subject: [PATCH 60/86] #2290, remove currently un-needed method
---
.../iq/dataverse/datasetutility/AddReplaceFileHelper.java | 7 +++++--
1 file changed, 5 insertions(+), 2 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index fdff0c64420..cd994107dd7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -218,7 +218,9 @@ public boolean runAddFileByDatasetId(Long datasetId,
return false;
}
- return this.runAddFile(this.dataset, newFileName, newFileContentType, newFileInputStream, optionalFileParams);
+ //return this.runAddFile(this.dataset, newFileName, newFileContentType, newFileInputStream, optionalFileParams);
+ return this.runAddReplaceFile(dataset, newFileName, newFileContentType, newFileInputStream, optionalFileParams);
+
}
@@ -231,6 +233,7 @@ public boolean runAddFileByDatasetId(Long datasetId,
* @param newFileInputStream
* @return
*/
+ /*
public boolean runAddFile(Dataset dataset,
String newFileName,
String newFileContentType,
@@ -246,7 +249,7 @@ public boolean runAddFile(Dataset dataset,
this.currentOperation = FILE_ADD_OPERATION;
return this.runAddReplaceFile(dataset, newFileName, newFileContentType, newFileInputStream, optionalFileParams);
- }
+ }*/
/**
From ac288198f7d21bf0b5cae9361d9621e6f2c02f32 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Tue, 18 Oct 2016 14:20:09 -0400
Subject: [PATCH 61/86] #2290 OptionalFileParams object to use for API when
adding description, tags, etc
---
.../edu/harvard/iq/dataverse/DataFileTag.java | 32 +++++
.../datasetutility/DataFileTagException.java | 24 ++++
.../datasetutility/OptionalFileParams.java | 95 ++++++++++++--
.../OptionalFileParamsTest.java | 121 ++++++++++++++++++
4 files changed, 262 insertions(+), 10 deletions(-)
create mode 100644 src/main/java/edu/harvard/iq/dataverse/datasetutility/DataFileTagException.java
create mode 100644 src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java
index d1fc22008bc..ef6867becd3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java
@@ -20,6 +20,7 @@
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
+import org.apache.commons.lang.StringUtils;
/**
*
@@ -167,4 +168,35 @@ public String toString() {
return "edu.harvard.iq.dataverse.DataFileTag[ id=" + id + " ]";
}
+
+ /**
+ * Static method to check whether a string is a valid tag
+ *
+ * Used for API check
+ *
+ * @param tagString
+ * @return
+ */
+ public static boolean isDataFileTag(String tagString){
+
+ if (tagString == null){
+ throw new NullPointerException("tagString cannot be null");
+ }
+
+ if (TagLabelToTypes.containsKey(tagString)){
+ return true;
+ }
+
+ return false;
+ }
+
+ public static List getListofLabels(){
+
+ return new ArrayList<>(TagTypeToLabels.values());
+ }
+
+ public static String getListofLabelsAsString(){
+
+ return StringUtils.join(DataFileTag.getListofLabels(), ", ");
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/DataFileTagException.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/DataFileTagException.java
new file mode 100644
index 00000000000..8ae0bfd6b2f
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/DataFileTagException.java
@@ -0,0 +1,24 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package edu.harvard.iq.dataverse.datasetutility;
+
+/**
+ *
+ * @author rmp553
+ */
+public class DataFileTagException extends Exception {
+
+ public DataFileTagException(String message) {
+ super(message);
+ }
+
+ public DataFileTagException(String message, Throwable cause) {
+ super(message, cause);
+ }
+
+}
+
+
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
index e0e0e27944f..434b2280d42 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
@@ -5,6 +5,12 @@
*/
package edu.harvard.iq.dataverse.datasetutility;
+import com.google.gson.Gson;
+import com.google.gson.JsonObject;
+import com.google.gson.reflect.TypeToken;
+import edu.harvard.iq.dataverse.DataFileTag;
+import java.lang.reflect.Type;
+import java.util.ArrayList;
import java.util.List;
/**
@@ -24,24 +30,25 @@
public class OptionalFileParams {
private String description;
+ public static final String DESCRIPTION_ATTR_NAME = "description";
private List tags;
-
+ public static final String TAGS_ATTR_NAME = "tags";
+
private List tabularTags;
+ public static final String TABULAR_TAGS_ATTR_NAME = "tabularTags";
- public OptionalFileParams(String description){
-
- this.description = description;
- }
-
- public OptionalFileParams(String description,
- List tags){
+
+
+ public OptionalFileParams(String jsonData) throws DataFileTagException{
- this.description = description;
- this.tags = tags;
+ if (jsonData != null){
+ loadParamsFromJson(jsonData);
+ }
}
+
public OptionalFileParams(String description,
List tags,
List tabularTags){
@@ -100,4 +107,72 @@ public void setTabularTags(List tabularTags){
public List getTabularTags(){
return this.tabularTags;
}
+
+ private void loadParamsFromJson(String jsonData) throws DataFileTagException{
+
+ if (jsonData == null){
+// logger.log(Level.SEVERE, "jsonData is null");
+ }
+ JsonObject jsonObj = new Gson().fromJson(jsonData, JsonObject.class);
+
+
+ // -------------------------------
+ // get description as string
+ // -------------------------------
+ if ((jsonObj.has(DESCRIPTION_ATTR_NAME)) && (!jsonObj.get(DESCRIPTION_ATTR_NAME).isJsonNull())){
+
+ this.description = jsonObj.get(DESCRIPTION_ATTR_NAME).getAsString();
+ }
+
+
+ // -------------------------------
+ // get tags
+ // -------------------------------
+ Gson gson = new Gson();
+
+ //Type objType = new TypeToken>() {}.getType();
+ Type listType = new TypeToken>() {}.getType();
+
+ // Load tags
+ if ((jsonObj.has(TAGS_ATTR_NAME)) && (!jsonObj.get(TAGS_ATTR_NAME).isJsonNull())){
+
+ this.tags = gson.fromJson(jsonObj.get(TAGS_ATTR_NAME), listType);
+ }
+
+ // Load tabular tags
+ if ((jsonObj.has(TABULAR_TAGS_ATTR_NAME)) && (!jsonObj.get(TABULAR_TAGS_ATTR_NAME).isJsonNull())){
+
+ // Make a new list
+ this.tabularTags = new ArrayList<>();
+
+ // Get potential tags from JSON
+ List potentialTags = gson.fromJson(jsonObj.get(TABULAR_TAGS_ATTR_NAME), listType);
+
+ // Add valid potential tags to the list
+ for (String tagToCheck : potentialTags){
+ if (DataFileTag.isDataFileTag(tagToCheck)){
+ this.tabularTags.add(tagToCheck);
+ }else{
+ throw new DataFileTagException("Not a valid Tabular Tag: [" + tagToCheck + "]. Please use one of the following: " + DataFileTag.getListofLabelsAsString());
+ }
+ }
+
+ // Shouldn't happen....
+ if (tabularTags.isEmpty()){
+ tabularTags = null;
+ }
+ }
+
+ }
+
+ private void msg(String s){
+ System.out.println(s);
+ }
+
+ private void msgt(String s){
+ msg("-------------------------------");
+ msg(s);
+ msg("-------------------------------");
+ }
+
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java b/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
new file mode 100644
index 00000000000..df44871b7b6
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
@@ -0,0 +1,121 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package edu.harvard.iq.dataverse.datasetutility;
+
+import java.util.Arrays;
+import java.util.List;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+/**
+ *
+ * @author rmp553
+ */
+public class OptionalFileParamsTest {
+
+ public OptionalFileParamsTest() {
+ }
+
+ /**
+ * Good Json Description
+ */
+ @Test
+ public void test_01_jsonDescriptionGood() throws DataFileTagException {
+
+ msgt("test_01_jsonDescription");
+
+ String val = "A new file";
+ String jsonParams = "{\"description\": \"" + val + "\"}";
+
+ OptionalFileParams instance = new OptionalFileParams(jsonParams);
+
+ assertEquals(instance.getDescription(), val);
+ assertNull(instance.getTags());
+ assertNull(instance.getTabularTags());
+
+ }
+
+ /**
+ * Good Json Description
+ */
+ @Test
+ public void test_02_jsonDescriptionNumeric() throws DataFileTagException {
+
+ msgt("test_02_jsonDescriptionNumeric");
+
+ String jsonParams = "{\"description\": 250 }";
+
+ OptionalFileParams instance = new OptionalFileParams(jsonParams);
+
+ assertEquals(instance.getDescription(), "250");
+
+ }
+
+ /**
+ * Good Json Description
+ */
+ @Test
+ public void test_03_jsonNull() throws DataFileTagException {
+
+ msgt("test_03_jsonNull");
+
+ //String val = "A new file";
+ String jsonParams = null;
+
+ OptionalFileParams instance = new OptionalFileParams(jsonParams);
+
+ assertEquals(instance.getDescription(), null);
+
+ }
+
+ /**
+ * Good Json Description
+ */
+ @Test
+ public void test_04_jsonTagsGood() throws DataFileTagException {
+
+ msgt("test_04_jsonTagsGood");
+
+ String val = "A new file";
+ String jsonParams = "{\"description\": \"A new file\", \"tags\": [\"dog\", \"cat\", \"mouse\"]}";
+
+ OptionalFileParams instance = new OptionalFileParams(jsonParams);
+
+ assertEquals(instance.getDescription(), val);
+
+ List expectedTags = Arrays.asList("dog", "cat", "mouse");
+ assertEquals(expectedTags, instance.getTags());
+
+ assertNull(instance.getTabularTags());
+
+ }
+
+ private void msg(String s){
+ System.out.println(s);
+ }
+
+ private void msgt(String s){
+ msg("-------------------------------");
+ msg(s);
+ msg("-------------------------------");
+ }
+}
+
+/*
+Python for creating escaped JSON objects
+
+import json
+d = dict(description="A new file",
+ tags=["dog", "cat", "mouse"])
+print json.dumps(json.dumps(d))
+
+# result:
+# "{\"description\": \"A new file\", \"tags\": [\"dog\", \"cat\", \"mouse\"]}"
+*/
\ No newline at end of file
From ee5245baf3cbfe58ee2a8c8bcf3e031d7b3f7059 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Tue, 18 Oct 2016 16:17:11 -0400
Subject: [PATCH 62/86] #2290 changed tabularTags to fileDataTags
---
src/main/java/Bundle.properties | 3 +-
.../datasetutility/OptionalFileParams.java | 97 ++++++++++----
.../OptionalFileParamsTest.java | 124 +++++++++++++++++-
3 files changed, 190 insertions(+), 34 deletions(-)
diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties
index ef339afdefe..871f36cde4c 100755
--- a/src/main/java/Bundle.properties
+++ b/src/main/java/Bundle.properties
@@ -1460,4 +1460,5 @@ file.addreplace.error.remove_linked_file.file=file cannot be null in removeLinke
file.addreplace.error.phase2_called_early_no_new_files=There was an error saving the dataset--no new files found.
file.addreplace.success.add=File successfully added!
file.addreplace.success.replace=File successfully replaced!
-file.addreplace.error.auth=The API key is invalid.
\ No newline at end of file
+file.addreplace.error.auth=The API key is invalid.
+file.addreplace.error.invalid_datafile_tag=Not a valid Tabular Tag:
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
index 434b2280d42..7e81a225681 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
@@ -11,7 +11,9 @@
import edu.harvard.iq.dataverse.DataFileTag;
import java.lang.reflect.Type;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.List;
+import java.util.ResourceBundle;
/**
* This is used in conjunction with the AddReplaceFileHelper
@@ -35,8 +37,8 @@ public class OptionalFileParams {
private List tags;
public static final String TAGS_ATTR_NAME = "tags";
- private List tabularTags;
- public static final String TABULAR_TAGS_ATTR_NAME = "tabularTags";
+ private List fileDataTags;
+ public static final String FILE_DATA_TAGS_ATTR_NAME = "fileDataTags";
@@ -51,11 +53,11 @@ public OptionalFileParams(String jsonData) throws DataFileTagException{
public OptionalFileParams(String description,
List tags,
- List tabularTags){
+ List potentialFileDataTags) throws DataFileTagException{
this.description = description;
this.tags = tags;
- this.tabularTags = tabularTags;
+ this.addFileDataTags(potentialFileDataTags);
}
/**
@@ -74,6 +76,26 @@ public String getDescription(){
return this.description;
}
+ public boolean hasTags(){
+ if ((tags == null)||(this.tags.isEmpty())){
+ return false;
+ }
+ return true;
+ }
+
+ public boolean hasFileDataTags(){
+ if ((fileDataTags == null)||(this.fileDataTags.isEmpty())){
+ return false;
+ }
+ return true;
+ }
+
+ public boolean hasDescription(){
+ if ((description == null)||(this.description.isEmpty())){
+ return false;
+ }
+ return true;
+ }
/**
* Set tags
@@ -93,19 +115,19 @@ public List getTags(){
/**
- * Set tabularTags
- * @param tabularTags
+ * Set fileDataTags
+ * @param fileDataTags
*/
- public void setTabularTags(List tabularTags){
- this.tabularTags = tabularTags;
+ public void setFileDataTags(List fileDataTags){
+ this.fileDataTags = fileDataTags;
}
/**
- * Get for tabularTags
+ * Get for dataFileTags
* @return List
*/
- public List getTabularTags(){
- return this.tabularTags;
+ public List getFileDataTags(){
+ return this.fileDataTags;
}
private void loadParamsFromJson(String jsonData) throws DataFileTagException{
@@ -140,31 +162,52 @@ private void loadParamsFromJson(String jsonData) throws DataFileTagException{
}
// Load tabular tags
- if ((jsonObj.has(TABULAR_TAGS_ATTR_NAME)) && (!jsonObj.get(TABULAR_TAGS_ATTR_NAME).isJsonNull())){
+ if ((jsonObj.has(FILE_DATA_TAGS_ATTR_NAME)) && (!jsonObj.get(FILE_DATA_TAGS_ATTR_NAME).isJsonNull())){
- // Make a new list
- this.tabularTags = new ArrayList<>();
// Get potential tags from JSON
- List potentialTags = gson.fromJson(jsonObj.get(TABULAR_TAGS_ATTR_NAME), listType);
+ List potentialTags = gson.fromJson(jsonObj.get(FILE_DATA_TAGS_ATTR_NAME), listType);
// Add valid potential tags to the list
- for (String tagToCheck : potentialTags){
- if (DataFileTag.isDataFileTag(tagToCheck)){
- this.tabularTags.add(tagToCheck);
- }else{
- throw new DataFileTagException("Not a valid Tabular Tag: [" + tagToCheck + "]. Please use one of the following: " + DataFileTag.getListofLabelsAsString());
- }
- }
-
- // Shouldn't happen....
- if (tabularTags.isEmpty()){
- tabularTags = null;
- }
+ addFileDataTags(potentialTags);
+
}
}
+
+ private void addFileDataTags(List potentialTags) throws DataFileTagException{
+
+ if (potentialTags == null){
+ return;
+ }
+
+ potentialTags.removeAll(Collections.singleton(""));
+ potentialTags.removeAll(Collections.singleton(null));
+
+ if (potentialTags.isEmpty()){
+ return;
+ }
+
+ // Make a new list
+ this.fileDataTags = new ArrayList<>();
+
+ // Add valid potential tags to the list
+ for (String tagToCheck : potentialTags){
+ if (DataFileTag.isDataFileTag(tagToCheck)){
+ this.fileDataTags.add(tagToCheck);
+ }else{
+ String errMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.invalid_datafile_tag");
+ throw new DataFileTagException(errMsg + " [" + tagToCheck + "]. Please use one of the following: " + DataFileTag.getListofLabelsAsString());
+ }
+ }
+ // Shouldn't happen....
+ if (fileDataTags.isEmpty()){
+ fileDataTags = null;
+ }
+ }
+
+
private void msg(String s){
System.out.println(s);
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java b/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
index df44871b7b6..d7942c9bd8b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
@@ -7,6 +7,8 @@
import java.util.Arrays;
import java.util.List;
+import java.util.ResourceBundle;
+import org.hamcrest.Matchers;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
@@ -38,7 +40,7 @@ public void test_01_jsonDescriptionGood() throws DataFileTagException {
assertEquals(instance.getDescription(), val);
assertNull(instance.getTags());
- assertNull(instance.getTabularTags());
+ assertNull(instance.getFileDataTags());
}
@@ -93,10 +95,111 @@ public void test_04_jsonTagsGood() throws DataFileTagException {
List expectedTags = Arrays.asList("dog", "cat", "mouse");
assertEquals(expectedTags, instance.getTags());
- assertNull(instance.getTabularTags());
+ assertNull(instance.getFileDataTags());
+ assertTrue(instance.hasTags());
+ assertTrue(instance.hasDescription());
+ assertFalse(instance.hasFileDataTags());
+
+ }
+
+ @Test
+ public void test_05_jsonTabularTagsGood() throws DataFileTagException {
+
+ msgt("test_05_jsonTabularTagsGood");
+
+ String val = "A new file";
+ String jsonParams = "{\"fileDataTags\": [\"Survey\", \"Event\", \"Panel\"], \"description\": \"A new file\"}";
+
+ OptionalFileParams instance = new OptionalFileParams(jsonParams);
+
+ assertEquals(instance.getDescription(), val);
+
+ List expectedTags = Arrays.asList("Survey", "Event", "Panel");
+ assertEquals(expectedTags, instance.getFileDataTags());
+
+ assertNull(instance.getTags());
+ assertFalse(instance.hasTags());
+ assertTrue(instance.hasDescription());
+ assertTrue(instance.hasFileDataTags());
+ }
+
+ @Test
+ public void test_06_jsonTabularTagsBad() throws DataFileTagException {
+
+ msgt("test_06_jsonTabularTagsBad");
+
+ String val = "A new file";
+ String jsonParams = "{\"fileDataTags\": [\"Survey\", \"Event\", \"xPanel\"], \"description\": \"A new file\"}";
+
+ try{
+ OptionalFileParams instance = new OptionalFileParams(jsonParams);
+ }catch(DataFileTagException ex){
+ // msgt("ex: " + ex.getMessage());
+ String errMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.invalid_datafile_tag");
+ msgt("errMsg: " + errMsg);
+ assertTrue(ex.getMessage().startsWith(errMsg));
+ }
+ }
+
+
+ @Test
+ public void test_07_regularInstanceGood() throws DataFileTagException {
+
+ msgt("test_07_regularInstanceGood");
+ String val = "A new file";
+ List tags = Arrays.asList("dog", "cat", "mouse");
+ List fileDataTags = Arrays.asList("Survey", "Event", "Panel");
+
+ OptionalFileParams instance = new OptionalFileParams(val,
+ tags,
+ fileDataTags);
+
+ assertEquals(val, instance.getDescription());
+ assertEquals(tags, instance.getTags());
+ assertEquals(fileDataTags, instance.getFileDataTags());
+
+ }
+
+ @Test
+ public void test_08_regularInstanceGoodWithNulls() throws DataFileTagException {
+
+ msgt("test_08_regularInstanceGoodWithNulls");
+
+ String val = null;
+ List tags = null;//Arrays.asList("dog", "cat", "mouse");
+ List fileDataTags = Arrays.asList("Survey", "Event", "Panel");
+
+ OptionalFileParams instance = new OptionalFileParams(val,
+ tags,
+ fileDataTags);
+
+ assertEquals(val, instance.getDescription());
+ assertEquals(tags, instance.getTags());
+ assertEquals(fileDataTags, instance.getFileDataTags());
+
+ }
+
+ @Test
+ public void test_08_regularInstanceBadTabularTag() throws DataFileTagException {
+
+ msgt("test_08_regularInstanceGoodWithNulls");
+
+ String val = null;
+ List tags = null;//Arrays.asList("dog", "cat", "mouse");
+ List fileDataTags = Arrays.asList("Survey", "Event", "Panel");
+
+ OptionalFileParams instance = new OptionalFileParams(val,
+ tags,
+ fileDataTags);
+
+ assertEquals(val, instance.getDescription());
+ assertEquals(tags, instance.getTags());
+ assertEquals(fileDataTags, instance.getFileDataTags());
+
}
+
private void msg(String s){
System.out.println(s);
}
@@ -111,11 +214,20 @@ private void msgt(String s){
/*
Python for creating escaped JSON objects
-import json
-d = dict(description="A new file",
- tags=["dog", "cat", "mouse"])
-print json.dumps(json.dumps(d))
+import json; d = dict(description="A new file",tags=["dog", "cat", "mouse"]); print json.dumps(json.dumps(d))
# result:
# "{\"description\": \"A new file\", \"tags\": [\"dog\", \"cat\", \"mouse\"]}"
+
+
+
+d = dict(description="A new file",
+ tabular_tags=["Survey", "Event", "Panel"])
+print json.dumps(json.dumps(d))
+
+# "{\"fileDataTags\": [\"Survey\", \"Event\", \"Panel\"], \"description\": \"A new file\"}"
+
+
+#import json; d = dict(tags=["dog", "cat", "mouse"]); print json.dumps(json.dumps(d))
+
*/
\ No newline at end of file
From 8583c6cbc47fdcb145b77553602bbf89a99fc174 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Tue, 18 Oct 2016 17:17:03 -0400
Subject: [PATCH 63/86] #2290 Add descriptiona and tags via API for
add/replace.
---
.../edu/harvard/iq/dataverse/DataFileTag.java | 18 ++++-
.../harvard/iq/dataverse/api/Datasets.java | 16 +++-
.../edu/harvard/iq/dataverse/api/Files.java | 75 ++++---------------
.../datasetutility/AddReplaceFileHelper.java | 41 +++++++++-
.../datasetutility/OptionalFileParams.java | 42 +++++++++++
.../OptionalFileParamsTest.java | 43 +++++++----
6 files changed, 155 insertions(+), 80 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java
index ef6867becd3..c9b3f85888a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java
@@ -177,19 +177,29 @@ public String toString() {
* @param tagString
* @return
*/
- public static boolean isDataFileTag(String tagString){
+ public static boolean isDataFileTag(String label){
- if (tagString == null){
- throw new NullPointerException("tagString cannot be null");
+ if (label == null){
+ throw new NullPointerException("label cannot be null");
}
- if (TagLabelToTypes.containsKey(tagString)){
+ if (TagLabelToTypes.containsKey(label)){
return true;
}
return false;
}
+ public TagType getDataFileTagFromLabel(String label){
+
+ if (!TagLabelToTypes.containsKey(label)){
+ return null;
+ }
+
+ return TagLabelToTypes.get(label);
+ }
+
+
public static List getListofLabels(){
return new ArrayList<>(TagTypeToLabels.values());
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
index 4295d7ef0ff..32da5dc222a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
@@ -20,6 +20,8 @@
import edu.harvard.iq.dataverse.authorization.RoleAssignee;
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper;
+import edu.harvard.iq.dataverse.datasetutility.DataFileTagException;
+import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams;
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.impl.AssignRoleCommand;
@@ -831,6 +833,18 @@ public Response addFileToDataset(@PathParam("id") Long datasetId,
);
}
+ // (2a) Load up optional params via JSON
+ //---------------------------------------
+ OptionalFileParams optionalFileParams = null;
+ msgt("(api) jsonData: " + jsonData);
+
+ try {
+ optionalFileParams = new OptionalFileParams(jsonData);
+ } catch (DataFileTagException ex) {
+ return errorResponse( Response.Status.BAD_REQUEST, ex.getMessage());
+ }
+
+
//-------------------
// (3) Create the AddReplaceFileHelper object
//-------------------
@@ -852,7 +866,7 @@ public Response addFileToDataset(@PathParam("id") Long datasetId,
newFilename,
newFileContentType,
testFileInputStream,
- null);
+ optionalFileParams);
if (addFileHelper.hasError()){
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
index c5001fcd8bd..fcbc1932b65 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
@@ -19,6 +19,8 @@
import static edu.harvard.iq.dataverse.api.AbstractApiBean.errorResponse;
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper;
+import edu.harvard.iq.dataverse.datasetutility.DataFileTagException;
+import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
import java.io.FileInputStream;
@@ -65,63 +67,7 @@ public class Files extends AbstractApiBean {
private static final Logger logger = Logger.getLogger(Files.class.getName());
- // for testing
- private static final String SERVER_UPLOAD_LOCATION_FOLDER = "/Users/rmp553/Documents/iqss-git/dataverse-helper-scripts/src/api_scripts/output/";
-
-
-
- /**
- * get existing test file from this directory:
- * "scripts/search/data/replace_test/"
- *
- * @param existingFileName
- * @return
- */
- private InputStream getExistingFileInputStream(String existingFileName){
- if (existingFileName == null){
- return null;
- }
- InputStream inputStream = null;
-
- //System.out.println("Current path: " + Paths.get(".").toAbsolutePath().normalize().toString());
- String pathToFileName = "(some path)/scripts/search/data/replace_test/" + existingFileName;
-
- try {
- inputStream = new FileInputStream(pathToFileName);
- //is.close();
- } catch (FileNotFoundException e) {
- e.printStackTrace();
- return null;
- } catch (IOException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- return null;
- }
-
- return inputStream;
- }
-
- private InputStream getSampleFile(){
-
- InputStream inputStream = null;
- String testFileInputStreamName = "/Users/rmp553/Documents/iqss-git/dataverse-helper-scripts/src/api_scripts/input/howdy3.txt";
- //testFileInputStreamName = "/Users/rmp553/NetBeansProjects/dataverse/src/main/java/edu/harvard/iq/dataverse/datasetutility/howdy.txt";
- try {
- inputStream = new FileInputStream(testFileInputStreamName);
- //is.close();
- } catch (FileNotFoundException e) {
- e.printStackTrace();
- return null;
- } catch (IOException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- return null;
- }
-
- return inputStream;
-
- }
private void msg(String m){
System.out.println(m);
@@ -174,7 +120,7 @@ public Response replaceFileInDataset(
return errorResponse( Response.Status.BAD_REQUEST, "No JSON data");
}
JsonObject jsonObj = new Gson().fromJson(jsonData, JsonObject.class);
-
+
// (2a) Check for required "fileToReplaceId"
// -------------------------------------
if ((!jsonObj.has("fileToReplaceId")) || jsonObj.get("fileToReplaceId").isJsonNull()){
@@ -199,6 +145,17 @@ public Response replaceFileInDataset(
forceReplace = false;
}
}
+
+
+ // (2d) Load up optional params via JSON
+ // - Will skip extra attributes which includes fileToReplaceId and forceReplace
+ //---------------------------------------
+ OptionalFileParams optionalFileParams = null;
+ try {
+ optionalFileParams = new OptionalFileParams(jsonData);
+ } catch (DataFileTagException ex) {
+ return errorResponse( Response.Status.BAD_REQUEST, ex.getMessage());
+ }
// -------------------------------------
@@ -231,13 +188,13 @@ public Response replaceFileInDataset(
newFilename,
newFileContentType,
testFileInputStream,
- null);
+ optionalFileParams);
}else{
addFileHelper.runReplaceFile(fileToReplaceId,
newFilename,
newFileContentType,
testFileInputStream,
- null);
+ optionalFileParams);
}
msg("we're back.....");
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index cd994107dd7..87c4f583903 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -404,9 +404,14 @@ public boolean runAddReplacePhase1(Dataset dataset,
msgt("step_050_checkForConstraintViolations");
if (!this.step_050_checkForConstraintViolations()){
- return false;
-
+ return false;
}
+
+ msgt("step_055_loadOptionalFileParams");
+ if (!this.step_055_loadOptionalFileParams(optionalFileParams)){
+ return false;
+ }
+
return true;
}
@@ -1051,6 +1056,38 @@ private boolean step_050_checkForConstraintViolations(){
}
+ /**
+ * Load optional file params such as description, tags, fileDataTags, etc..
+ *
+ * @param optionalFileParams
+ * @return
+ */
+ private boolean step_055_loadOptionalFileParams(OptionalFileParams optionalFileParams){
+
+ if (hasError()){
+ return false;
+ }
+
+ // --------------------------------------------
+ // OK, the object may be null
+ // --------------------------------------------
+ if (optionalFileParams == null){
+ return true;
+ }
+
+
+ // --------------------------------------------
+ // Iterate through files (should only be 1 for now)
+ // Add tags, description, etc
+ // --------------------------------------------
+ for (DataFile df : finalFileList){
+ optionalFileParams.addOptionalParams(df);
+ }
+
+
+ return true;
+ }
+
private boolean step_060_addFilesViaIngestService(){
if (this.hasError()){
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
index 7e81a225681..96498a79f45 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
@@ -8,7 +8,9 @@
import com.google.gson.Gson;
import com.google.gson.JsonObject;
import com.google.gson.reflect.TypeToken;
+import edu.harvard.iq.dataverse.DataFile;
import edu.harvard.iq.dataverse.DataFileTag;
+import edu.harvard.iq.dataverse.FileMetadata;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Collections;
@@ -132,7 +134,9 @@ public List getFileDataTags(){
private void loadParamsFromJson(String jsonData) throws DataFileTagException{
+ msgt("jsonData: " + jsonData);
if (jsonData == null){
+ return;
// logger.log(Level.SEVERE, "jsonData is null");
}
JsonObject jsonObj = new Gson().fromJson(jsonData, JsonObject.class);
@@ -159,6 +163,10 @@ private void loadParamsFromJson(String jsonData) throws DataFileTagException{
if ((jsonObj.has(TAGS_ATTR_NAME)) && (!jsonObj.get(TAGS_ATTR_NAME).isJsonNull())){
this.tags = gson.fromJson(jsonObj.get(TAGS_ATTR_NAME), listType);
+
+ if (this.tags.isEmpty()){
+ this.tags = null;
+ }
}
// Load tabular tags
@@ -217,5 +225,39 @@ private void msgt(String s){
msg(s);
msg("-------------------------------");
}
+
+ /**
+ * Add parameters to the new DataFile file
+ *
+ */
+ public void addOptionalParams(DataFile df) {
+ if (df == null){
+ throw new NullPointerException("The datafile cannot be null!");
+ }
+
+ FileMetadata fm = df.getFileMetadata();
+
+ // Add description
+ //
+ if (hasDescription()){
+ fm.setDescription(this.getDescription());
+ }
+
+ // Add tags
+ //
+ if (hasTags()){
+ for (String tagText : this.getTags()){
+ fm.addCategoryByName(tagText);
+ }
+ }
+
+ // file data tags: TO DO!!!
+ //
+ if (hasFileDataTags()){
+ for (String tagLabel : this.getFileDataTags()){
+ }
+ }
+
+ }
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java b/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
index d7942c9bd8b..226fe725e55 100644
--- a/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
@@ -73,7 +73,7 @@ public void test_03_jsonNull() throws DataFileTagException {
OptionalFileParams instance = new OptionalFileParams(jsonParams);
- assertEquals(instance.getDescription(), null);
+ assertNull(instance.getDescription());
}
@@ -181,22 +181,23 @@ public void test_08_regularInstanceGoodWithNulls() throws DataFileTagException {
}
@Test
- public void test_08_regularInstanceBadTabularTag() throws DataFileTagException {
+ public void test_09_unusedParamsGood() throws DataFileTagException {
msgt("test_08_regularInstanceGoodWithNulls");
-
- String val = null;
- List tags = null;//Arrays.asList("dog", "cat", "mouse");
- List fileDataTags = Arrays.asList("Survey", "Event", "Panel");
+
+ String jsonParams = "{\"forceReplace\": \"unused within OptionalFileParams\", \"oldFileId\": \"unused within OptionalFileParams\", \"description\": null, \"unusedParam1\": \"haha\", \"tags\": []}";
- OptionalFileParams instance = new OptionalFileParams(val,
- tags,
- fileDataTags);
+ OptionalFileParams instance = new OptionalFileParams(jsonParams);
+
+ assertNull(instance.getDescription());
+ assertFalse(instance.hasDescription());
+
+ assertNull(instance.getTags());
+ assertFalse(instance.hasTags());
+
+ assertNull(instance.getFileDataTags());
+ assertFalse(instance.hasFileDataTags());
- assertEquals(val, instance.getDescription());
- assertEquals(tags, instance.getTags());
- assertEquals(fileDataTags, instance.getFileDataTags());
-
}
@@ -214,7 +215,10 @@ private void msgt(String s){
/*
Python for creating escaped JSON objects
-import json; d = dict(description="A new file",tags=["dog", "cat", "mouse"]); print json.dumps(json.dumps(d))
+import json
+d = dict(description="A new file"
+ ,tags=["dog", "cat", "mouse"])
+print json.dumps(json.dumps(d))
# result:
# "{\"description\": \"A new file\", \"tags\": [\"dog\", \"cat\", \"mouse\"]}"
@@ -230,4 +234,15 @@ private void msgt(String s){
#import json; d = dict(tags=["dog", "cat", "mouse"]); print json.dumps(json.dumps(d))
+
+import json
+d = dict(description="A new file",
+ tags=["dog", "cat", "mouse"],
+ unusedParam1="haha",
+ forceReplace="unused within OptionalFileParams",
+ oldFileId="unused within OptionalFileParams"
+)
+print json.dumps(json.dumps(d))
+
+
*/
\ No newline at end of file
From 8741a04f285948527697ff82ce1a37e71d4ae7ab Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 19 Oct 2016 10:53:51 -0400
Subject: [PATCH 64/86] #2290 Tag fix to avoid adding duplicates
---
.../datasetutility/OptionalFileParams.java | 63 +++++++++++++------
.../OptionalFileParamsTest.java | 8 +--
2 files changed, 49 insertions(+), 22 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
index 96498a79f45..4f74d4359f5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
@@ -16,6 +16,7 @@
import java.util.Collections;
import java.util.List;
import java.util.ResourceBundle;
+import java.util.stream.Collectors;
/**
* This is used in conjunction with the AddReplaceFileHelper
@@ -54,11 +55,11 @@ public OptionalFileParams(String jsonData) throws DataFileTagException{
public OptionalFileParams(String description,
- List tags,
+ List newTags,
List potentialFileDataTags) throws DataFileTagException{
this.description = description;
- this.tags = tags;
+ setTags(newTags);
this.addFileDataTags(potentialFileDataTags);
}
@@ -103,8 +104,17 @@ public boolean hasDescription(){
* Set tags
* @param tags
*/
- public void setTags(List tags){
- this.tags = tags;
+ public void setTags(List newTags){
+
+ if (newTags != null){
+ newTags = removeDuplicatesNullsEmptyStrings(newTags);
+ if (newTags.isEmpty()){
+ newTags = null;
+ }
+ }
+
+
+ this.tags = newTags;
}
/**
@@ -162,11 +172,7 @@ private void loadParamsFromJson(String jsonData) throws DataFileTagException{
// Load tags
if ((jsonObj.has(TAGS_ATTR_NAME)) && (!jsonObj.get(TAGS_ATTR_NAME).isJsonNull())){
- this.tags = gson.fromJson(jsonObj.get(TAGS_ATTR_NAME), listType);
-
- if (this.tags.isEmpty()){
- this.tags = null;
- }
+ setTags(this.tags = gson.fromJson(jsonObj.get(TAGS_ATTR_NAME), listType));
}
// Load tabular tags
@@ -183,16 +189,31 @@ private void loadParamsFromJson(String jsonData) throws DataFileTagException{
}
+ private List removeDuplicatesNullsEmptyStrings(List tagsToCheck){
+
+ if (tagsToCheck == null){
+ throw new NullPointerException("tagsToCheck cannot be null");
+ }
+
+
+ return tagsToCheck.stream()
+ .filter(p -> p != null) // no nulls
+ .map(String :: trim) // strip strings
+ .filter(p -> p.length() > 0 ) // no empty strings
+ .distinct() // distinct
+ .collect(Collectors.toList());
+
+ }
+
private void addFileDataTags(List potentialTags) throws DataFileTagException{
if (potentialTags == null){
return;
}
-
- potentialTags.removeAll(Collections.singleton(""));
- potentialTags.removeAll(Collections.singleton(null));
-
+
+ potentialTags = removeDuplicatesNullsEmptyStrings(potentialTags);
+
if (potentialTags.isEmpty()){
return;
}
@@ -227,7 +248,7 @@ private void msgt(String s){
}
/**
- * Add parameters to the new DataFile file
+ * Add parameters to a DataFile object
*
*/
public void addOptionalParams(DataFile df) {
@@ -237,22 +258,28 @@ public void addOptionalParams(DataFile df) {
FileMetadata fm = df.getFileMetadata();
+ // ---------------------------
// Add description
- //
+ // ---------------------------
if (hasDescription()){
fm.setDescription(this.getDescription());
}
+ // ---------------------------
// Add tags
- //
+ // ---------------------------
if (hasTags()){
+ List currentCategories = fm.getCategoriesByName();
for (String tagText : this.getTags()){
- fm.addCategoryByName(tagText);
+ if (!currentCategories.contains(tagText)){
+ fm.addCategoryByName(tagText);
+ }
}
}
+ // ---------------------------
// file data tags: TO DO!!!
- //
+ // ---------------------------
if (hasFileDataTags()){
for (String tagLabel : this.getFileDataTags()){
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java b/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
index 226fe725e55..bf34bd86cda 100644
--- a/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
@@ -148,7 +148,7 @@ public void test_07_regularInstanceGood() throws DataFileTagException {
msgt("test_07_regularInstanceGood");
String val = "A new file";
- List tags = Arrays.asList("dog", "cat", "mouse");
+ List tags = Arrays.asList("dog", " dog ", "cat", "mouse", "dog ");
List fileDataTags = Arrays.asList("Survey", "Event", "Panel");
OptionalFileParams instance = new OptionalFileParams(val,
@@ -156,7 +156,7 @@ public void test_07_regularInstanceGood() throws DataFileTagException {
fileDataTags);
assertEquals(val, instance.getDescription());
- assertEquals(tags, instance.getTags());
+ assertEquals( Arrays.asList("dog", "cat", "mouse"),instance.getTags());
assertEquals(fileDataTags, instance.getFileDataTags());
}
@@ -168,7 +168,7 @@ public void test_08_regularInstanceGoodWithNulls() throws DataFileTagException {
String val = null;
List tags = null;//Arrays.asList("dog", "cat", "mouse");
- List fileDataTags = Arrays.asList("Survey", "Event", "Panel");
+ List fileDataTags = Arrays.asList("Survey", "Survey", "Event", "Panel", "Survey", " ");
OptionalFileParams instance = new OptionalFileParams(val,
tags,
@@ -176,7 +176,7 @@ public void test_08_regularInstanceGoodWithNulls() throws DataFileTagException {
assertEquals(val, instance.getDescription());
assertEquals(tags, instance.getTags());
- assertEquals(fileDataTags, instance.getFileDataTags());
+ assertEquals(Arrays.asList("Survey", "Event", "Panel"), instance.getFileDataTags());
}
From 007dc57318a8f7ccb5cfadffdd01766514549bd8 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 19 Oct 2016 13:42:37 -0400
Subject: [PATCH 65/86] Add tag labels #2290 to output json
---
.../edu/harvard/iq/dataverse/DataFile.java | 56 ++++++++++--
.../datasetutility/OptionalFileParams.java | 91 ++++++++++++++++---
2 files changed, 128 insertions(+), 19 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
index 580228ad0ed..c015e8e73c2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
@@ -25,7 +25,9 @@
import java.nio.file.Files;
import java.util.Arrays;
import java.util.HashMap;
+import java.util.Iterator;
import java.util.Map;
+import java.util.stream.Collectors;
import javax.persistence.Entity;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
@@ -263,6 +265,45 @@ public List getTags() {
return dataFileTags;
}
+ public List getTagLabels(){
+
+ List currentDataTags = this.getTags();
+ List tagStrings = new ArrayList<>();
+
+ if (( currentDataTags != null)||(!currentDataTags.isEmpty())){
+
+ Iterator itr = currentDataTags.iterator();
+ while (itr.hasNext()){
+ DataFileTag element = (DataFileTag)itr.next();
+ tagStrings.add(element.getTypeLabel());
+ }
+ }
+ return tagStrings;
+ }
+
+ /**
+ * Return a list of Tag labels
+ *
+ * If there are none, return an empty list
+ *
+ * @return
+ */
+ /*
+ public List getTagsLabelsOnly() {
+
+ List tags = this.getTags();
+
+ if (tags == null){
+ return new ArrayList();
+ }
+
+ return tags.stream()
+ .map(x -> x.getTypeLabel())
+ .collect(Collectors.toList())
+ ;
+ }
+ */
+
public void setTags(List dataFileTags) {
this.dataFileTags = dataFileTags;
}
@@ -833,7 +874,12 @@ public JsonObject asGsonObject(boolean prettyPrint){
);
// ----------------------------------
- // Checksum map
+ // Tags
+ // ----------------------------------
+ jsonObj.getAsJsonObject().add("tags", gson.toJsonTree(getTagLabels()));
+
+ // ----------------------------------
+ // Checksum
// ----------------------------------
Map checkSumMap = new HashMap();
checkSumMap.put("type", getChecksumType().toString());
@@ -844,11 +890,9 @@ public JsonObject asGsonObject(boolean prettyPrint){
jsonObj.getAsJsonObject().add("checksum", checkSumJSONMap);
- //JsonObject fileMetadataGson = this.getFileMetadata().asGsonObject(prettyPrint);
-
- //jsonObj.getAsJsonObject().add("fileMetadata", fileMetadataGson);
-
- //JsonObject fileMetadataJSON = new JsonObject();
+ // ----------------------------------
+ // Overarching data key
+ // ----------------------------------
JsonObject fullFileJSON = new JsonObject();
fullFileJSON.add(overarchingKey, jsonObj);
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
index 4f74d4359f5..762b7865ea7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
@@ -194,8 +194,7 @@ private List removeDuplicatesNullsEmptyStrings(List tagsToCheck)
if (tagsToCheck == null){
throw new NullPointerException("tagsToCheck cannot be null");
}
-
-
+
return tagsToCheck.stream()
.filter(p -> p != null) // no nulls
.map(String :: trim) // strip strings
@@ -268,22 +267,88 @@ public void addOptionalParams(DataFile df) {
// ---------------------------
// Add tags
// ---------------------------
- if (hasTags()){
- List currentCategories = fm.getCategoriesByName();
- for (String tagText : this.getTags()){
- if (!currentCategories.contains(tagText)){
- fm.addCategoryByName(tagText);
- }
- }
- }
+ addTagsToDataFile(fm);
+
// ---------------------------
- // file data tags: TO DO!!!
+ // Add DataFileTags
// ---------------------------
- if (hasFileDataTags()){
- for (String tagLabel : this.getFileDataTags()){
+ addFileDataTagsToFile(df);
+
+ }
+
+
+ /**
+ * Add Tags to the DataFile
+ *
+ */
+ private void addTagsToDataFile(FileMetadata fileMetadata){
+
+ if (fileMetadata == null){
+ throw new NullPointerException("The fileMetadata cannot be null!");
+ }
+
+ // Is there anything to add?
+ //
+ if (!hasTags()){
+ return;
+ }
+
+ List currentCategories = fileMetadata.getCategoriesByName();
+ for (String tagText : this.getTags()){
+ if (!currentCategories.contains(tagText)){
+ fileMetadata.addCategoryByName(tagText);
}
}
+ }
+
+
+ private void addFileDataTagsToFile(DataFile df){
+ if (df == null){
+ throw new NullPointerException("The DataFile (df) cannot be null!");
+ }
+ msgt("addFileDataTagsToFile");
+
+ // Is there anything to add?
+ if (!hasFileDataTags()){
+ return;
+ }
+ msgt("addFileDataTagsToFile 2");
+
+ // Get existing tag list and convert it to list of strings
+ List existingDataFileTags = df.getTags();
+ List currentLabels;
+
+ if (existingDataFileTags == null){
+ // nothing, just make an empty list
+ currentLabels = new ArrayList<>();
+ }else{
+ // Yes, get the labels in a list
+ currentLabels = df.getTags().stream()
+ .map(x -> x.getTypeLabel())
+ .collect(Collectors.toList())
+ ;
+ }
+
+ // Iterate through and add any new labels
+ //
+ DataFileTag newTagObj;
+ for (String tagLabel : this.getFileDataTags()){
+
+ if (!currentLabels.contains(tagLabel)){ // not already there!
+
+ // redundant "if" check here. Also done in constructor
+ //
+ if (DataFileTag.isDataFileTag(tagLabel)){
+
+ newTagObj = new DataFileTag();
+ newTagObj.setDataFile(df);
+ newTagObj.setTypeByLabel(tagLabel);
+ df.addTag(newTagObj);
+
+ }
+ }
+ }
}
From dfee568cb02fb6ad2cdfc69356ab38c8bde2f671 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 19 Oct 2016 13:44:17 -0400
Subject: [PATCH 66/86] #2290 remove print statemetns
---
.../iq/dataverse/datasetutility/AddReplaceFileHelper.java | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 87c4f583903..730cc965b79 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -1424,7 +1424,8 @@ private boolean step_100_startIngestJobs(){
private void msg(String m){
- System.out.println(m);
+ logger.fine(m);
+ //System.out.println(m);
}
private void dashes(){
msg("----------------");
From 3d9446ad9b49651ed706e60a92ee7864a94b57f1 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 19 Oct 2016 14:48:48 -0400
Subject: [PATCH 67/86] #1612 add dataset & dataverse info to API output
---
.../edu/harvard/iq/dataverse/DataFile.java | 37 ++++++++++++++++++-
1 file changed, 35 insertions(+), 2 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
index c015e8e73c2..83083cf21b7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
@@ -27,7 +27,6 @@
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
-import java.util.stream.Collectors;
import javax.persistence.Entity;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
@@ -500,6 +499,7 @@ public void setFilesize(long filesize) {
/**
* Converts the stored size of the file in bytes to
* a user-friendly value in KB, MB or GB.
+ * @return
*/
public String getFriendlySize() {
return FileUtil.getFriendlySize(filesize);
@@ -863,10 +863,43 @@ public JsonObject asGsonObject(boolean prettyPrint){
jsonObj.getAsJsonObject().addProperty("id", this.getId());
// ----------------------------------
- // Add label (filename), description, and categories from the FileMetadata object
+ // get the FileMetadata object
// ----------------------------------
FileMetadata thisFileMetadata = this.getFileMetadata();
+
+ // ----------------------------------
+ // Add dataset info
+ // ----------------------------------
+
+ Map datasetMap = new HashMap<>();
+ // expensive call.......bleh!!!
+ // https://github.com/IQSS/dataverse/issues/761, https://github.com/IQSS/dataverse/issues/2110, https://github.com/IQSS/dataverse/issues/3191
+ //
+ datasetMap.put("title", thisFileMetadata.getDatasetVersion().getTitle());
+ datasetMap.put("persistentId", getOwner().getGlobalId());
+ datasetMap.put("url", getOwner().getPersistentURL());
+ datasetMap.put("version", thisFileMetadata.getDatasetVersion().getSemanticVersion());
+ datasetMap.put("id", getOwner().getId());
+ datasetMap.put("isPublished", thisFileMetadata.getDatasetVersion().isReleased());
+
+ jsonObj.getAsJsonObject().add("dataset", gson.toJsonTree(datasetMap));
+
+ // ----------------------------------
+ // Add dataverse info
+ // ----------------------------------
+ Map dataverseMap = new HashMap<>();
+ Dataverse dv = this.getOwner().getOwner();
+
+ dataverseMap.put("name", dv.getName());
+ dataverseMap.put("alias", dv.getAlias());
+ dataverseMap.put("id", dv.getId());
+
+ jsonObj.getAsJsonObject().add("dataverse", gson.toJsonTree(dataverseMap));
+ // ----------------------------------
+ // Add label (filename), description, and categories from the FileMetadata object
+ // ----------------------------------
+
jsonObj.getAsJsonObject().addProperty("filename", thisFileMetadata.getLabel());
jsonObj.getAsJsonObject().addProperty("description", thisFileMetadata.getDescription());
jsonObj.getAsJsonObject().add("categories",
From 10abf738540884b28b2c82945eb30ded457d2861 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 19 Oct 2016 15:02:02 -0400
Subject: [PATCH 68/86] #1612 allow persistentId use for add file
---
.../edu/harvard/iq/dataverse/api/Datasets.java | 16 +++++++++++++---
1 file changed, 13 insertions(+), 3 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
index 32da5dc222a..4d60757dfe1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
@@ -790,8 +790,8 @@ public Response deletePrivateUrl(@PathParam("id") String idSupplied) {
}
}
-
-
+
+
/**
* Add a File to an existing Dataset
*
@@ -804,7 +804,7 @@ public Response deletePrivateUrl(@PathParam("id") String idSupplied) {
@POST
@Path("{id}/add")
@Consumes(MediaType.MULTIPART_FORM_DATA)
- public Response addFileToDataset(@PathParam("id") Long datasetId,
+ public Response addFileToDataset(@PathParam("id") String idSupplied,
@FormDataParam("jsonData") String jsonData,
@FormDataParam("file") InputStream testFileInputStream,
@FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
@@ -814,6 +814,16 @@ public Response addFileToDataset(@PathParam("id") Long datasetId,
// TODO: Handle jsonData: description, tags, etc
+ Dataset dataset;
+
+ try{
+ dataset = findDatasetOrDie(idSupplied);
+ }catch (WrappedResponse wr) {
+ return wr.getResponse();
+ }
+
+ Long datasetId = dataset.getId();
+
// -------------------------------------
// (1) Get the file name and content type
From 84ce907362d39b20140c519628d1a1d8d2f8cd43 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 19 Oct 2016 15:45:14 -0400
Subject: [PATCH 69/86] #1612 documentation for adding a file
---
doc/sphinx-guides/source/api/native-api.rst | 90 +++++++++++++++++++++
1 file changed, 90 insertions(+)
diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst
index 8b686df66cd..aa1ecc4c612 100644
--- a/doc/sphinx-guides/source/api/native-api.rst
+++ b/doc/sphinx-guides/source/api/native-api.rst
@@ -184,6 +184,96 @@ Delete a Private URL from a dataset (if it exists)::
DELETE http://$SERVER/api/datasets/$id/privateUrl?key=$apiKey
+Add a file to an existing Dataset. Description and tags are optional::
+
+ PUT http://$SERVER/api/datasets/$id/add?key=$apiKey
+
+
+Example python code to add a file. This may be run by changing these parameters in the sample code:
+
+* ``dataverse_server`` - e.g. https://dataverse.harvard.edu
+* ``api_key`` - See the top of this document for a description
+* ``persistentId`` - Example: ``doi:10.5072/FK2/6XACVA``
+* ``dataset_id`` - Database id of the dataset
+
+In practice, you only need one the ``dataset_id`` or the ``persistentId``. The example below shows both uses.
+
+.. code-block:: python
+
+ from datetime import datetime
+ import json
+ import requests # http://docs.python-requests.org/en/master/
+
+ # --------------------------------------------------
+ # Update the 4 params below to run this code
+ # --------------------------------------------------
+ dataverse_server = 'https://your dataverse server' # no trailing slash
+ api_key = 'api key'
+ dataset_id = 1 # database id of the dataset
+ persistentId = 'doi:10.5072/FK2/6XACVA' # doi or hdl of the dataset
+
+ # --------------------------------------------------
+ # Prepare "file"
+ # --------------------------------------------------
+ file_content = 'content: %s' % datetime.now()
+ files = {'file': ('sample_file.txt', file_content)}
+
+ # --------------------------------------------------
+ # Using a "jsonData" parameter, add optional description + file tags
+ # --------------------------------------------------
+ params = dict(description='Blue skies!',
+ tags=['Lily', 'Rosemary', 'Jack of Hearts'])
+
+ params_as_json_string = json.dumps(params)
+
+ payload = dict(jsonData=params_as_json_string)
+
+ # --------------------------------------------------
+ # Add file using the Dataset's id
+ # --------------------------------------------------
+ url_dataset_id = '%s/api/datasets/%s/add?key=%s' % (dataverse_server, dataset_id, api_key)
+
+ # -------------------
+ # Make the request
+ # -------------------
+ print '-' * 40
+ print 'making request: %s' % url_dataset_id
+ r = requests.post(url_dataset_id, data=payload, files=files)
+
+ # -------------------
+ # Print the response
+ # -------------------
+ print '-' * 40
+ print r.json()
+ print r.status_code
+
+ # --------------------------------------------------
+ # Add file using the Dataset's persistentId (e.g. doi, hdl, etc)
+ # --------------------------------------------------
+ url_persistent_id = '%s/api/datasets/:persistentId/add?persistentId=%s&key=%s' % (dataverse_server, persistentId, api_key)
+
+ # -------------------
+ # Update the file content to avoid a duplicate file error
+ # -------------------
+ file_content = 'content2: %s' % datetime.now()
+ files = {'file': ('sample_file2.txt', file_content)}
+
+
+ # -------------------
+ # Make the request
+ # -------------------
+ print '-' * 40
+ print 'making request: %s' % url_persistent_id
+ r = requests.post(url_persistent_id, data=payload, files=files)
+
+ # -------------------
+ # Print the response
+ # -------------------
+ print '-' * 40
+ print r.json()
+ print r.status_code
+
+
Builtin Users
~~~~~~~~~~~~~
From 3f4c3a93e14656997aeaafc2f1b7492e0bd769bb Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 19 Oct 2016 16:16:16 -0400
Subject: [PATCH 70/86] #1612 - allow adding file by persistent id + fixed
tests
---
.../harvard/iq/dataverse/api/Datasets.java | 46 ++++++++++++-------
.../edu/harvard/iq/dataverse/api/Files.java | 6 ++-
.../edu/harvard/iq/dataverse/api/FilesIT.java | 4 +-
.../edu/harvard/iq/dataverse/api/UtilIT.java | 3 +-
4 files changed, 38 insertions(+), 21 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
index 4d60757dfe1..fb3175127de 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
@@ -77,6 +77,7 @@
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
+import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
import org.glassfish.jersey.media.multipart.FormDataParam;
@@ -811,37 +812,50 @@ public Response addFileToDataset(@PathParam("id") String idSupplied,
@FormDataParam("file") final FormDataBodyPart formDataBodyPart
){
+
+ // -------------------------------------
+ // (1) Get the user from the API key
+ // -------------------------------------
+ User authUser;
+ try {
+ authUser = this.findUserOrDie();
+ } catch (WrappedResponse ex) {
+ return errorResponse(Response.Status.FORBIDDEN,
+ ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.auth")
+ );
+ }
- // TODO: Handle jsonData: description, tags, etc
-
+ // -------------------------------------
+ // (2) Get the Dataset Id
+ //
+ // -------------------------------------
Dataset dataset;
try{
dataset = findDatasetOrDie(idSupplied);
}catch (WrappedResponse wr) {
- return wr.getResponse();
+ String errMsg;
+ if (idSupplied==null){
+ errMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.dataset_id_is_null");
+ return errorResponse(Response.Status.BAD_REQUEST, errMsg);
+
+ }else if (idSupplied.equals(Datasets.PERSISTENT_ID_KEY)){
+ return wr.getResponse();
+ }else{
+ errMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.dataset_id_not_found") + " " + idSupplied;
+ return errorResponse(Response.Status.BAD_REQUEST, errMsg);
+ }
}
Long datasetId = dataset.getId();
// -------------------------------------
- // (1) Get the file name and content type
+ // (3) Get the file name and content type
// -------------------------------------
String newFilename = contentDispositionHeader.getFileName();
String newFileContentType = formDataBodyPart.getMediaType().toString();
-
- // -------------------------------------
- // (2) Get the user from the API key
- // -------------------------------------
- User authUser;
- try {
- authUser = this.findUserOrDie();
- } catch (WrappedResponse ex) {
- return errorResponse(Response.Status.FORBIDDEN,
- ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.auth")
- );
- }
+
// (2a) Load up optional params via JSON
//---------------------------------------
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
index fcbc1932b65..5fa50536117 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
@@ -35,6 +35,7 @@
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
@@ -94,6 +95,7 @@ private void msgt(String m){
@Path("{id}/replace")
@Consumes(MediaType.MULTIPART_FORM_DATA)
public Response replaceFileInDataset(
+ @PathParam("id") Long fileToReplaceId,
@FormDataParam("jsonData") String jsonData,
@FormDataParam("file") InputStream testFileInputStream,
@FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
@@ -123,7 +125,7 @@ public Response replaceFileInDataset(
// (2a) Check for required "fileToReplaceId"
// -------------------------------------
- if ((!jsonObj.has("fileToReplaceId")) || jsonObj.get("fileToReplaceId").isJsonNull()){
+ /*if ((!jsonObj.has("fileToReplaceId")) || jsonObj.get("fileToReplaceId").isJsonNull()){
return errorResponse( Response.Status.BAD_REQUEST, "'fileToReplaceId' NOT found in the JSON Request");
}
@@ -134,7 +136,7 @@ public Response replaceFileInDataset(
} catch (Exception e) {
return errorResponse( Response.Status.BAD_REQUEST, "'fileToReplaceId' in the JSON Request must be a number.");
}
-
+ */
// (2b) Check for optional "forceReplace"
// -------------------------------------
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
index ef8db9a8469..59488eb8119 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
@@ -117,7 +117,7 @@ public void test_001_AddFileGood() {
public void test_002_AddFileBadDatasetId() {
msgt("test_002_AddFileNullFileId");
// Create user
- String apiToken = "someToken";
+ String apiToken =createUserGetToken();
// Create Dataset
String datasetId = "cat"; //createDatasetGetId(dataverseAlias, apiToken);
@@ -129,7 +129,7 @@ public void test_002_AddFileBadDatasetId() {
// Adding a non-numeric id should result in a 404
addResponse.then().assertThat()
- .statusCode(NOT_FOUND.getStatusCode());
+ .statusCode(BAD_REQUEST.getStatusCode());
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
index 3dead822faa..9e5fae4ab92 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
@@ -278,10 +278,11 @@ static Response uploadFileViaNative(String datasetId, String pathToFile, String
}
static Response replaceFile(long fileId, String pathToFile, String apiToken) {
+ //.add("fileToReplaceId", fileId)
return given()
.header(API_TOKEN_HTTP_HEADER, apiToken)
.multiPart("file", new File(pathToFile))
- .multiPart("jsonData", Json.createObjectBuilder().add("fileToReplaceId", fileId).build().toString())
+ .multiPart("jsonData", Json.createObjectBuilder().build().toString())
.post("/api/files/" + fileId + "/replace");
}
From e199fbddacff670abe5ee470414df065054a350c Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 19 Oct 2016 17:00:17 -0400
Subject: [PATCH 71/86] #1612 - documentation for file replace via API
---
doc/sphinx-guides/source/api/native-api.rst | 74 +++++++++++++++++++++
src/main/java/Bundle.properties | 2 +-
2 files changed, 75 insertions(+), 1 deletion(-)
diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst
index aa1ecc4c612..54c262745f5 100644
--- a/doc/sphinx-guides/source/api/native-api.rst
+++ b/doc/sphinx-guides/source/api/native-api.rst
@@ -272,6 +272,80 @@ In practice, you only need one the ``dataset_id`` or the ``persistentId``. The e
print '-' * 40
print r.json()
print r.status_code
+
+Files
+~~~~~~~~~~~
+
+Replace an existing file where ``id`` is the Database id of the file to replace::
+
+ POST http://$SERVER/api/files/{id}/replace?key=$apiKey
+
+Example python code to replace a file. This may be run by changing these parameters in the sample code:
+
+* ``dataverse_server`` - e.g. https://dataverse.harvard.edu
+* ``api_key`` - See the top of this document for a description
+* ``file_id`` - Database id of the file to replace (returned in the GET API for a Dataset)
+
+.. code-block:: python
+
+ from datetime import datetime
+ import json
+ import requests # http://docs.python-requests.org/en/master/
+
+ # --------------------------------------------------
+ # Update params below to run code
+ # --------------------------------------------------
+ dataverse_server = 'http://127.0.0.1:8080' # no trailing slash
+ api_key = 'some key'
+ file_id = 1401 # id of the file to replace
+
+ # --------------------------------------------------
+ # Prepare replacement "file"
+ # --------------------------------------------------
+ file_content = 'content: %s' % datetime.now()
+ files = {'file': ('replacement_file.txt', file_content)}
+
+ # --------------------------------------------------
+ # Using a "jsonData" parameter, add optional description + file tags
+ # --------------------------------------------------
+ params = dict(description='Sunset',
+ tags=['One', 'More', 'Cup of Coffee'])
+
+ # -------------------
+ # IMPORTANT: If the mimetype of the replacement file differs
+ # from the origina file, the replace will fail
+ #
+ # e.g. if you try to replace a ".csv" with a ".png" or something similar
+ #
+ # You can override this with a "forceReplace" parameter
+ # -------------------
+ params['forceReplace'] = True
+
+
+ params_as_json_string = json.dumps(params)
+
+ payload = dict(jsonData=params_as_json_string)
+
+ print 'payload', payload
+ # --------------------------------------------------
+ # Replace file using the id of the file to replace
+ # --------------------------------------------------
+ url_replace = '%s/api/v1/files/%s/replace?key=%s' % (dataverse_server, file_id, api_key)
+
+ # -------------------
+ # Make the request
+ # -------------------
+ print '-' * 40
+ print 'making request: %s' % url_replace
+ r = requests.post(url_replace, data=payload, files=files)
+
+ # -------------------
+ # Print the response
+ # -------------------
+ print '-' * 40
+ print r.json()
+ print r.status_code
+
Builtin Users
diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties
index 871f36cde4c..16a7fc297d4 100755
--- a/src/main/java/Bundle.properties
+++ b/src/main/java/Bundle.properties
@@ -1441,7 +1441,7 @@ file.addreplace.error.existing_file_to_replace_id_is_null=The id of the existing
file.addreplace.error.existing_file_to_replace_not_found_by_id=Replacement file not found. There was no file found for id:
file.addreplace.error.existing_file_to_replace_is_null=The existing file to replace cannot be null
file.addreplace.error.existing_file_to_replace_not_in_dataset=The existing file to replace does not belong to this dataset
-file.addreplace.error.existing_file_not_in_latest_published_version=You cannot replace a file that is not in the most recently published Dataset. (It was deleted from a previous version.)
+file.addreplace.error.existing_file_not_in_latest_published_version=You cannot replace a file that is not in the most recently published Dataset. (The file is unpublished or was deleted from a previous version.)
file.addreplace.error.replace.new_file_same_as_replacement=The new file contains the same content as the file to be replaced.
file.addreplace.error.replace.new_file_has_different_content_type=Warning! The new and old file have different content types.
file.addreplace.error.unpublished_file_cannot_be_replaced=You cannot replace an unpublished file. Please delete it instead of replacing it.
From 78f3267bf9d59e249755f8e20681dff582dccabc Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Mon, 24 Oct 2016 13:34:30 -0400
Subject: [PATCH 72/86] Fix for #1612 multiple file upload'
---
.../edu/harvard/iq/dataverse/DataFile.java | 8 +-
.../harvard/iq/dataverse/api/Datasets.java | 15 ++-
.../edu/harvard/iq/dataverse/api/Files.java | 14 +-
.../datasetutility/AddReplaceFileHelper.java | 124 ++++++++++++++----
.../datasetutility/FileUploadTestPage.java | 8 +-
.../datasetutility/NoFilesException.java | 24 ++++
.../edu/harvard/iq/dataverse/api/FilesIT.java | 39 +++---
7 files changed, 166 insertions(+), 66 deletions(-)
create mode 100644 src/main/java/edu/harvard/iq/dataverse/datasetutility/NoFilesException.java
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
index 83083cf21b7..0b37cf55932 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
@@ -922,14 +922,8 @@ public JsonObject asGsonObject(boolean prettyPrint){
jsonObj.getAsJsonObject().add("checksum", checkSumJSONMap);
+ return jsonObj.getAsJsonObject();
- // ----------------------------------
- // Overarching data key
- // ----------------------------------
- JsonObject fullFileJSON = new JsonObject();
- fullFileJSON.add(overarchingKey, jsonObj);
-
- return fullFileJSON;
}
/**
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
index fb3175127de..8aba68b7efc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
@@ -21,6 +21,7 @@
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper;
import edu.harvard.iq.dataverse.datasetutility.DataFileTagException;
+import edu.harvard.iq.dataverse.datasetutility.NoFilesException;
import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams;
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
@@ -897,9 +898,17 @@ public Response addFileToDataset(@PathParam("id") String idSupplied,
return errorResponse(Response.Status.BAD_REQUEST, addFileHelper.getErrorMessagesAsString("\n"));
}else{
String successMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.add");
- return okResponseGsonObject(successMsg,
- addFileHelper.getSuccessResultAsGsonObject());
- //"Look at that! You added a file! (hey hey, it may have worked)");
+ try {
+ msgt("as String: " + addFileHelper.getSuccessResult());
+
+ return okResponseGsonObject(successMsg,
+ addFileHelper.getSuccessResultAsGsonObject());
+ //"Look at that! You added a file! (hey hey, it may have worked)");
+ } catch (NoFilesException ex) {
+ Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex);
+ return errorResponse(Response.Status.BAD_REQUEST, "NoFileException! Serious Error! See administrator!");
+
+ }
}
} // end: addFileToDataset
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
index 5fa50536117..fa04367cb15 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
@@ -20,6 +20,7 @@
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper;
import edu.harvard.iq.dataverse.datasetutility.DataFileTagException;
+import edu.harvard.iq.dataverse.datasetutility.NoFilesException;
import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
@@ -207,9 +208,16 @@ public Response replaceFileInDataset(
msg("no error");
String successMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.replace");
- return okResponseGsonObject(successMsg,
- addFileHelper.getSuccessResultAsGsonObject());
- //"Look at that! You added a file! (hey hey, it may have worked)");
+ try {
+ msgt("as String: " + addFileHelper.getSuccessResult());
+ return okResponseGsonObject(successMsg,
+ addFileHelper.getSuccessResultAsGsonObject());
+ //"Look at that! You added a file! (hey hey, it may have worked)");
+ } catch (NoFilesException ex) {
+ Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex);
+ return errorResponse(Response.Status.BAD_REQUEST, "NoFileException! Serious Error! See administrator!");
+
+ }
}
} // end: replaceFileInDataset
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 730cc965b79..49fc8cdce50 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -5,6 +5,7 @@
*/
package edu.harvard.iq.dataverse.datasetutility;
+import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import edu.harvard.iq.dataverse.DataFile;
import edu.harvard.iq.dataverse.DataFileServiceBean;
@@ -117,7 +118,8 @@ public class AddReplaceFileHelper{
List finalFileList;
// Ingested file
- private DataFile newlyAddedFile;
+ //private DataFile newlyAddedFile;
+ private List newlyAddedFiles;
// For error handling
private boolean errorFound;
@@ -847,16 +849,24 @@ private boolean step_030_createNewFilesViaIngest(){
* (2) the new file (or new file unzipped) did not ingest via "createDataFiles"
*/
if (initialFileList.isEmpty()){
- this.addErrorSevere("initial_file_list_empty");
+ this.addErrorSevere(getBundleErr("initial_file_list_empty"));
this.runMajorCleanup();
return false;
}
- if (initialFileList.size() > 1){
- this.addError("initial_file_list_more_than_one");
- this.runMajorCleanup();
- return false;
-
+ /**
+ * REPLACE: File replacement is limited to a single file!!
+ *
+ * ADD: When adding files, some types of individual files
+ * are broken into several files--which is OK
+ */
+ if (isFileReplaceOperation()){
+ if (initialFileList.size() > 1){
+ this.addError(getBundleErr("initial_file_list_more_than_one"));
+ this.runMajorCleanup();
+ return false;
+
+ }
}
if (!this.step_040_auto_checkForDuplicates()){
@@ -885,7 +895,7 @@ private boolean step_040_auto_checkForDuplicates(){
// Double checked -- this check also happens in step 30
//
if (initialFileList.isEmpty()){
- this.addErrorSevere("initial_file_list_empty");
+ this.addErrorSevere(getBundleErr("initial_file_list_empty"));
return false;
}
@@ -938,11 +948,21 @@ private boolean step_040_auto_checkForDuplicates(){
return false;
}
- if (finalFileList.size() > 1){
- this.addErrorSevere("There is more than 1 file to add. (This error shouldn't happen b/c the initial file list should always have 1 item");
- return false;
- }
+ /**
+ * REPLACE: File replacement is limited to a single file!!
+ *
+ * ADD: When adding files, some types of individual files
+ * are broken into several files--which is OK
+ */
+
+ if (isFileReplaceOperation()){
+ if (finalFileList.size() > 1){
+ String errMsg = "(This shouldn't happen -- error should have been detected in 030_createNewFilesViaIngest)";
+ this.addErrorSevere(getBundleErr("initial_file_list_more_than_one") + " " + errMsg);
+ return false;
+ }
+ }
if (finalFileList.isEmpty()){
this.addErrorSevere("There are no files to add. (This error shouldn't happen if steps called in sequence....step_040_auto_checkForDuplicates)");
@@ -1131,7 +1151,7 @@ private boolean step_070_run_update_dataset_command(){
logger.severe(ex.getMessage());
return false;
}catch (EJBException ex) {
- this.addErrorSevere("add.ejb_exception");
+ this.addErrorSevere("add.ejb_exception (see logs)");
logger.severe(ex.getMessage());
return false;
}
@@ -1333,8 +1353,31 @@ private boolean step_080_run_update_dataset_command_for_replace(){
*
* @param df
*/
- private void setNewlyAddedFile(DataFile df){
+ private void setNewlyAddedFiles(List datafiles){
+
+ if (hasError()){
+ return;
+ }
+
+ // Init. newly added file list
+ newlyAddedFiles = new ArrayList<>();
+ // Loop of uglinesss...but expect 1 to 4 files in final file list
+ List latestFileMetadatas = dataset.getEditVersion().getFileMetadatas();
+
+
+ for (DataFile newlyAddedFile : finalFileList){
+
+ for (FileMetadata fm : latestFileMetadatas){
+ if (newlyAddedFile.getChecksumValue().equals(fm.getDataFile().getChecksumValue())){
+ if (newlyAddedFile.getStorageIdentifier().equals(fm.getDataFile().getStorageIdentifier())){
+ newlyAddedFiles.add(fm.getDataFile());
+ }
+ }
+ }
+ }
+ /*
+
newlyAddedFile = df;
for (FileMetadata fm : dataset.getEditVersion().getFileMetadatas()){
@@ -1348,28 +1391,55 @@ private void setNewlyAddedFile(DataFile df){
}
}
}
+ */
}
- public DataFile getNewlyAddedFile(){
+ public List getNewlyAddedFiles(){
- return newlyAddedFile;
+ return newlyAddedFiles;
}
- public String getSuccessResult(){
- if (newlyAddedFile == null){
- return "Bad ERROR: Newly created file not found";
+ public String getSuccessResult() throws NoFilesException{
+ if (hasError()){
+ throw new NoFilesException("Don't call this method if an error exists!! First check 'hasError()'");
+ }
+
+ if (newlyAddedFiles == null){
+ throw new NullPointerException("newlyAddedFiles is null!");
}
- return newlyAddedFile.asJSON();
+
+ return getSuccessResultAsGsonObject().toString();
}
- public JsonObject getSuccessResultAsGsonObject(){
- if (newlyAddedFile == null){
- throw new NullPointerException("Bad error: newlyAddedFile is null!");
+ public JsonObject getSuccessResultAsGsonObject() throws NoFilesException{
+
+ if (hasError()){
+ throw new NoFilesException("Don't call this method if an error exists!! First check 'hasError()'");
+ }
+
+ if (newlyAddedFiles == null){
+ throw new NullPointerException("newlyAddedFiles is null!");
}
- return newlyAddedFile.asGsonObject(false);
+
+ if (newlyAddedFiles.isEmpty()){
+ throw new NoFilesException("newlyAddedFiles is empty!");
+ }
+
+
+ JsonArray jsonList = new JsonArray();
+
+ for (DataFile df : newlyAddedFiles){
+ jsonList.add(df.asGsonObject(false));
+ }
+
+ JsonObject fullFilesJSON = new JsonObject();
+ fullFilesJSON.add("files", jsonList);
+
+ return fullFilesJSON;
+ //return newlyAddedFile.asGsonObject(false);
}
@@ -1397,11 +1467,7 @@ private boolean step_100_startIngestJobs(){
}
// Should only be one file in the list
- for (DataFile df : finalFileList){
- setNewlyAddedFile(df);
- //df.getFileMetadata();
- break;
- }
+ setNewlyAddedFiles(finalFileList);
// clear old file list
//
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
index 3c2a9bbc9ac..fb3779b8d8a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
@@ -51,7 +51,7 @@ public class FileUploadTestPage implements java.io.Serializable {
private Long datasetId;
private Dataset dataset;
private DataFile fileToReplace;
- private DataFile newlyAddedFile;
+ private List newlyAddedFiles;
@EJB
IngestServiceBean ingestService;
@@ -217,7 +217,7 @@ public void addReplaceFile(UploadedFile laFile){
msgt("upload error");
msg(addFileHelper.getErrorMessagesAsString("\n"));
}else{
- newlyAddedFile = addFileHelper.getNewlyAddedFile();
+ newlyAddedFiles = addFileHelper.getNewlyAddedFiles();
msg("Look at that! You added a file! (hey hey, it may have worked)");
}
}
@@ -262,9 +262,9 @@ public String getPebbleTest() throws PebbleException, IOException{
}
*/
- public DataFile getNewlyAddedFile(){
+ public List getNewlyAddedFile(){
- return newlyAddedFile;
+ return newlyAddedFiles;
}
} // end class FileUploadTestPage
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/NoFilesException.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/NoFilesException.java
new file mode 100644
index 00000000000..32881fd25c4
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/NoFilesException.java
@@ -0,0 +1,24 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package edu.harvard.iq.dataverse.datasetutility;
+
+/**
+ *
+ * @author rmp553
+ */
+public class NoFilesException extends Exception {
+
+ public NoFilesException(String message) {
+ super(message);
+ }
+
+ public NoFilesException(String message, Throwable cause) {
+ super(message, cause);
+ }
+
+}
+
+
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
index 59488eb8119..ba3ea55e876 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
@@ -11,7 +11,6 @@
import static javax.ws.rs.core.Response.Status.CREATED;
import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.NOT_FOUND;
import static javax.ws.rs.core.Response.Status.OK;
import static junit.framework.Assert.assertEquals;
import static org.hamcrest.CoreMatchers.equalTo;
@@ -62,7 +61,7 @@ private Integer createDatasetGetId(String dataverseAlias, String apiToken){
Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
//createDatasetResponse.prettyPrint();
createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
- Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+ Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("files[0].id");
return datasetId;
@@ -92,8 +91,8 @@ public void test_001_AddFileGood() {
addResponse.then().assertThat()
.body("message", equalTo(successMsg))
.body("status", equalTo(AbstractApiBean.STATUS_OK))
- .body("data.contentType", equalTo("image/png"))
- .body("data.filename", equalTo("dataverseproject.png"))
+ .body("files[0].contentType", equalTo("image/png"))
+ .body("files[0].filename", equalTo("dataverseproject.png"))
.statusCode(OK.getStatusCode());
@@ -214,12 +213,12 @@ public void test_006_ReplaceFileGood() {
addResponse.then().assertThat()
.body("message", equalTo(successMsgAdd))
- .body("data.contentType", equalTo("image/png"))
- .body("data.filename", equalTo("dataverseproject.png"))
+ .body("files[0].contentType", equalTo("image/png"))
+ .body("files[0].filename", equalTo("dataverseproject.png"))
.statusCode(OK.getStatusCode());
- long origFileId = JsonPath.from(addResponse.body().asString()).getLong("data.id");
+ long origFileId = JsonPath.from(addResponse.body().asString()).getLong("files[0].id");
msg("Orig file id: " + origFileId);
assertNotNull(origFileId); // If checkOut fails, display message
@@ -270,13 +269,13 @@ public void test_006_ReplaceFileGood() {
replaceResp.then().assertThat()
.statusCode(OK.getStatusCode())
.body("message", equalTo(successMsg2))
- .body("data.filename", equalTo("cc0.png"))
+ .body("files[0].filename", equalTo("cc0.png"))
//.body("data.rootDataFileId", equalTo(origFileId))
;
- long rootDataFileId = JsonPath.from(replaceResp.body().asString()).getLong("data.rootDataFileId");
- long previousDataFileId = JsonPath.from(replaceResp.body().asString()).getLong("data.previousDataFileId");
- long newDataFileId = JsonPath.from(replaceResp.body().asString()).getLong("data.id");
+ long rootDataFileId = JsonPath.from(replaceResp.body().asString()).getLong("files[0].rootDataFileId");
+ long previousDataFileId = JsonPath.from(replaceResp.body().asString()).getLong("files[0].previousDataFileId");
+ long newDataFileId = JsonPath.from(replaceResp.body().asString()).getLong("files[0].id");
assertEquals(origFileId, previousDataFileId);
assertEquals(rootDataFileId, previousDataFileId);
@@ -304,11 +303,11 @@ public void test_006_ReplaceFileGood() {
.statusCode(OK.getStatusCode())
.body("status", equalTo(AbstractApiBean.STATUS_OK))
.body("message", equalTo(successMsg2))
- .body("data.filename", equalTo("favicondataverse.png"))
+ .body("files[0].filename", equalTo("favicondataverse.png"))
;
- long rootDataFileId2 = JsonPath.from(replaceResp2.body().asString()).getLong("data.rootDataFileId");
- long previousDataFileId2 = JsonPath.from(replaceResp2.body().asString()).getLong("data.previousDataFileId");
+ long rootDataFileId2 = JsonPath.from(replaceResp2.body().asString()).getLong("files[0].rootDataFileId");
+ long previousDataFileId2 = JsonPath.from(replaceResp2.body().asString()).getLong("files[0].previousDataFileId");
msgt("newDataFileId: " + newDataFileId);
msgt("previousDataFileId2: " + previousDataFileId2);
@@ -343,12 +342,12 @@ public void test_007_ReplaceFileUnpublishedAndBadIds() {
addResponse.then().assertThat()
.body("message", equalTo(successMsgAdd))
- .body("data.contentType", equalTo("image/png"))
- .body("data.filename", equalTo("dataverseproject.png"))
+ .body("files[0].contentType", equalTo("image/png"))
+ .body("files[0].filename", equalTo("dataverseproject.png"))
.statusCode(OK.getStatusCode());
- long origFileId = JsonPath.from(addResponse.body().asString()).getLong("data.id");
+ long origFileId = JsonPath.from(addResponse.body().asString()).getLong("files[0].id");
msg("Orig file id: " + origFileId);
assertNotNull(origFileId); // If checkOut fails, display message
@@ -426,12 +425,12 @@ public void test_008_ReplaceFileAlreadyDeleted() {
addResponse.then().assertThat()
.body("message", equalTo(successMsgAdd))
- .body("data.contentType", equalTo("image/png"))
- .body("data.filename", equalTo("dataverseproject.png"))
+ .body("files[0].contentType", equalTo("image/png"))
+ .body("files[0].filename", equalTo("dataverseproject.png"))
.statusCode(OK.getStatusCode());
- long origFileId = JsonPath.from(addResponse.body().asString()).getLong("data.id");
+ long origFileId = JsonPath.from(addResponse.body().asString()).getLong("files[0].id");
msg("Orig file id: " + origFileId);
assertNotNull(origFileId); // If checkOut fails, display message
From b2dafc672027df176383310105a86bbbdabe1ba3 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Mon, 24 Oct 2016 16:02:56 -0400
Subject: [PATCH 73/86] #1612 - Test updated to reflect the API returning a
list of files
---
.../java/edu/harvard/iq/dataverse/api/FilesIT.java | 13 ++++++++-----
1 file changed, 8 insertions(+), 5 deletions(-)
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
index ba3ea55e876..d17381a8196 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
@@ -33,11 +33,14 @@ public static void setUpClass() {
* @return
*/
private String createUserGetToken(){
-
Response createUser = UtilIT.createRandomUser();
+ msg(createUser.toString());
+ msg(createUser.prettyPrint());
createUser.then().assertThat().statusCode(OK.getStatusCode());
- //createUser.prettyPrint();
+ msg(createUser.prettyPrint());
+
+
String username = UtilIT.getUsernameFromResponse(createUser);
String apiToken = UtilIT.getApiTokenFromResponse(createUser);
@@ -57,11 +60,11 @@ private String createDataverseGetAlias(String apiToken){
private Integer createDatasetGetId(String dataverseAlias, String apiToken){
-
Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
- //createDatasetResponse.prettyPrint();
+
+
createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
- Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("files[0].id");
+ Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
return datasetId;
From 078fbd4a500ada43c571237e8e03ed3511f5ba91 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Mon, 24 Oct 2016 16:23:52 -0400
Subject: [PATCH 74/86] #1612. In anticipation of API cleanup #3358, ability
to add alternative status http error codes to add/replace api calls
---
.../harvard/iq/dataverse/api/Datasets.java | 2 +-
.../edu/harvard/iq/dataverse/api/Files.java | 5 +-
.../datasetutility/AddReplaceFileHelper.java | 63 ++++++++++++++++---
3 files changed, 60 insertions(+), 10 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
index 8aba68b7efc..09e28661f12 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
@@ -895,7 +895,7 @@ public Response addFileToDataset(@PathParam("id") String idSupplied,
if (addFileHelper.hasError()){
- return errorResponse(Response.Status.BAD_REQUEST, addFileHelper.getErrorMessagesAsString("\n"));
+ return errorResponse(addFileHelper.getHttpErrorCode(), addFileHelper.getErrorMessagesAsString("\n"));
}else{
String successMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.add");
try {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
index fa04367cb15..7fd5ac1a88c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
@@ -202,8 +202,9 @@ public Response replaceFileInDataset(
msg("we're back.....");
if (addFileHelper.hasError()){
- msg("yes, has error");
- return errorResponse(Response.Status.BAD_REQUEST, addFileHelper.getErrorMessagesAsString("\n"));
+ msg("yes, has error");
+ return errorResponse(addFileHelper.getHttpErrorCode(), addFileHelper.getErrorMessagesAsString("\n"));
+
}else{
msg("no error");
String successMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.replace");
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 49fc8cdce50..58d67ae0982 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -33,6 +33,7 @@
import java.util.logging.Logger;
import javax.ejb.EJBException;
import javax.validation.ConstraintViolation;
+import javax.ws.rs.core.Response;
/**
* Methods to add or replace a single file.
@@ -111,20 +112,26 @@ public class AddReplaceFileHelper{
private DataFile fileToReplace; // step 25
+ // -----------------------------------
// Instance variables derived from other input
+ // -----------------------------------
private User user;
private DatasetVersion workingVersion;
List initialFileList;
List finalFileList;
- // Ingested file
- //private DataFile newlyAddedFile;
+ // -----------------------------------
+ // Ingested files
+ // -----------------------------------
private List newlyAddedFiles;
+ // -----------------------------------
// For error handling
+ // -----------------------------------
+
private boolean errorFound;
private List errorMessages;
-
+ private Response.Status httpErrorCode; // optional
// public AddReplaceFileHelper(){
// throw new IllegalStateException("Must be called with a dataset and or user");
@@ -542,7 +549,29 @@ private void addError(String errMsg){
this.errorMessages.add(errMsg);
}
-
+ /**
+ * Add Error mesage and, if it's known, the HTTP response code
+ *
+ * @param badHttpResponse, e.g. Response.Status.FORBIDDEN
+ * @param errMsg
+ */
+ private void addError(Response.Status badHttpResponse, String errMsg){
+
+ if (badHttpResponse == null){
+ throw new NullPointerException("badHttpResponse cannot be null");
+ }
+ if (errMsg == null){
+ throw new NullPointerException("errMsg cannot be null");
+ }
+
+ this.httpErrorCode = badHttpResponse;
+
+ this.addError(errMsg);
+
+
+ }
+
+
private void addErrorSevere(String errMsg){
if (errMsg == null){
@@ -588,7 +617,27 @@ public String getErrorMessagesAsString(String joinString){
}
-
+ /**
+ * For API use, return the HTTP error code
+ *
+ * Default is BAD_REQUEST
+ *
+ * @return
+ */
+ public Response.Status getHttpErrorCode(){
+
+ if (!hasError()){
+ logger.severe("Do not call this method unless there is an error! check '.hasError()'");
+ }
+
+ if (httpErrorCode == null){
+ return Response.Status.BAD_REQUEST;
+ }else{
+ return httpErrorCode;
+ }
+ }
+
+
/**
* Convenience method for getting bundle properties
*
@@ -690,7 +739,7 @@ private boolean step_010_VerifyUserAndPermissions(){
msg("permissionService:" + permissionService.toString());
if (!permissionService.request(dvRequest).on(dataset).has(Permission.EditDataset)){
- addError(getBundleErr("no_edit_dataset_permission"));
+ addError(Response.Status.FORBIDDEN,getBundleErr("no_edit_dataset_permission"));
return false;
}
return true;
@@ -761,7 +810,7 @@ private boolean step_005_loadFileToReplaceById(Long dataFileId){
// Do we have permission to replace this file? e.g. Edit the file's dataset
//
if (!permissionService.request(dvRequest).on(existingFile.getOwner()).has(Permission.EditDataset)){
- addError(getBundleErr("no_edit_dataset_permission"));
+ addError(Response.Status.FORBIDDEN, getBundleErr("no_edit_dataset_permission"));
return false;
}
From 634029efb0894ce837dc65b52a9dc574185cb1a0 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Tue, 25 Oct 2016 13:07:39 -0400
Subject: [PATCH 75/86] #1612 - remove commented out @Test decorators
---
.../java/edu/harvard/iq/dataverse/api/FilesIT.java | 14 +++++++-------
1 file changed, 7 insertions(+), 7 deletions(-)
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
index fad163924fd..292d3cda265 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
@@ -115,7 +115,7 @@ public void test_001_AddFileGood() {
}
- //@Test
+ @Test
public void test_002_AddFileBadDatasetId() {
msgt("test_002_AddFileNullFileId");
// Create user
@@ -135,7 +135,7 @@ public void test_002_AddFileBadDatasetId() {
}
- //@Test
+ @Test
public void test_003_AddFileNonExistentDatasetId() {
msgt("test_003_AddFileNonExistentDatasetId");
@@ -160,7 +160,7 @@ public void test_003_AddFileNonExistentDatasetId() {
.statusCode(BAD_REQUEST.getStatusCode());
}
- //@Test
+ @Test
public void test_004_AddFileBadToken() {
msgt("test_004_AddFileBadToken");
@@ -185,14 +185,14 @@ public void test_004_AddFileBadToken() {
}
- //@Test
+ @Test
public void test_005_AddFileBadPermissions() {
msgt("test_005_AddFileBadPerms");
// To do!!!
}
- //@Test
+ @Test
public void test_006_ReplaceFileGood() {
msgt("test_006_ReplaceFileGood");
@@ -322,7 +322,7 @@ public void test_006_ReplaceFileGood() {
}
- //@Test
+ @Test
public void test_007_ReplaceFileUnpublishedAndBadIds() {
msgt("test_007_ReplaceFileBadIds");
@@ -405,7 +405,7 @@ public void test_007_ReplaceFileUnpublishedAndBadIds() {
}
- //@Test
+ @Test
public void test_008_ReplaceFileAlreadyDeleted() {
msgt("test_008_ReplaceFileAlreadyDeleted");
From b1421cb30a83794b13c455c6f1b7a3de1c02814a Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 26 Oct 2016 09:17:52 -0400
Subject: [PATCH 76/86] #1641 - post code review. In addCategoryByName
already checks for existing categories, remove it from this class. rename
to
---
.../edu/harvard/iq/dataverse/api/Datasets.java | 13 ++++++++-----
.../datasetutility/AddReplaceFileHelper.java | 14 ++++----------
.../datasetutility/OptionalFileParams.java | 4 +---
3 files changed, 13 insertions(+), 18 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
index 5e0cdeeb55c..b8b790c9563 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
@@ -555,7 +555,9 @@ public Response deletePrivateUrl(@PathParam("id") String idSupplied) {
/**
* Add a File to an existing Dataset
*
+ * @param idSupplied
* @param datasetId
+ * @param jsonData
* @param testFileInputStream
* @param contentDispositionHeader
* @param formDataBodyPart
@@ -566,7 +568,7 @@ public Response deletePrivateUrl(@PathParam("id") String idSupplied) {
@Consumes(MediaType.MULTIPART_FORM_DATA)
public Response addFileToDataset(@PathParam("id") String idSupplied,
@FormDataParam("jsonData") String jsonData,
- @FormDataParam("file") InputStream testFileInputStream,
+ @FormDataParam("file") InputStream fileInputStream,
@FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
@FormDataParam("file") final FormDataBodyPart formDataBodyPart
){
@@ -590,9 +592,12 @@ public Response addFileToDataset(@PathParam("id") String idSupplied,
// -------------------------------------
Dataset dataset;
+ Long datasetId;
try{
dataset = findDatasetOrDie(idSupplied);
+ datasetId = dataset.getId();
}catch (WrappedResponse wr) {
+
String errMsg;
if (idSupplied==null){
errMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.dataset_id_is_null");
@@ -606,8 +611,6 @@ public Response addFileToDataset(@PathParam("id") String idSupplied,
}
}
- Long datasetId = dataset.getId();
-
// -------------------------------------
// (3) Get the file name and content type
@@ -648,7 +651,7 @@ public Response addFileToDataset(@PathParam("id") String idSupplied,
addFileHelper.runAddFileByDatasetId(datasetId,
newFilename,
newFileContentType,
- testFileInputStream,
+ fileInputStream,
optionalFileParams);
@@ -657,7 +660,7 @@ public Response addFileToDataset(@PathParam("id") String idSupplied,
}else{
String successMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.add");
try {
- msgt("as String: " + addFileHelper.getSuccessResult());
+ //msgt("as String: " + addFileHelper.getSuccessResult());
return okResponseGsonObject(successMsg,
addFileHelper.getSuccessResultAsGsonObject());
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 58d67ae0982..34489f6ff3b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -103,7 +103,7 @@ public class AddReplaceFileHelper{
// -----------------------------------
// Instance variables directly added
// -----------------------------------
- private Dataset dataset; // constructor
+ private Dataset dataset; // constructor (for add, not replace)
private DataverseRequest dvRequest; // constructor
private InputStream newFileInputStream; // step 20
private String newFileName; // step 20
@@ -133,10 +133,6 @@ public class AddReplaceFileHelper{
private List errorMessages;
private Response.Status httpErrorCode; // optional
- // public AddReplaceFileHelper(){
- // throw new IllegalStateException("Must be called with a dataset and or user");
- // }
-
/**
* MAIN CONSTRUCTOR -- minimal requirements
@@ -144,7 +140,6 @@ public class AddReplaceFileHelper{
* @param dataset
* @param dvRequest
*/
-
public AddReplaceFileHelper(DataverseRequest dvRequest,
IngestServiceBean ingestService,
DatasetServiceBean datasetService,
@@ -1168,7 +1163,7 @@ private boolean step_060_addFilesViaIngestService(){
this.addErrorSevere(getBundleErr("final_file_list_empty"));
return false;
}
-
+
ingestService.addFiles(workingVersion, finalFileList);
return true;
@@ -1333,8 +1328,6 @@ private boolean step_080_run_update_dataset_command_for_replace(){
if (!step_085_auto_remove_filemetadata_to_replace_from_working_version()){
return false;
}
-
-
// -----------------------------------------------------------
// Set the "root file ids" and "previous file ids"
@@ -1505,7 +1498,8 @@ private boolean step_090_notifyUser(){
// Create a notification!
- // skip for now
+ // skip for now, may be part of dataset update listening
+ //
return true;
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
index 762b7865ea7..f41e37d80f4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
@@ -296,9 +296,7 @@ private void addTagsToDataFile(FileMetadata fileMetadata){
List currentCategories = fileMetadata.getCategoriesByName();
for (String tagText : this.getTags()){
- if (!currentCategories.contains(tagText)){
- fileMetadata.addCategoryByName(tagText);
- }
+ fileMetadata.addCategoryByName(tagText);
}
}
From 62b98ad6bb14ce5fee2f87499f86b1b7a7368d86 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 26 Oct 2016 10:03:16 -0400
Subject: [PATCH 77/86] #1612 - post code review. For add file API endpoint, we
now have the dataset object--pass directly to AddReplaceHelper instead of the
id
---
.../harvard/iq/dataverse/api/Datasets.java | 2 +-
.../datasetutility/AddReplaceFileHelper.java | 30 ++-----------------
.../datasetutility/FileUploadTestPage.java | 2 +-
.../edu/harvard/iq/dataverse/api/FilesIT.java | 1 -
4 files changed, 5 insertions(+), 30 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
index b8b790c9563..4e0dd0598f8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
@@ -648,7 +648,7 @@ public Response addFileToDataset(@PathParam("id") String idSupplied,
//-------------------
// (4) Run "runAddFileByDatasetId"
//-------------------
- addFileHelper.runAddFileByDatasetId(datasetId,
+ addFileHelper.runAddFileByDataset(dataset,
newFilename,
newFileContentType,
fileInputStream,
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 34489f6ff3b..caca5538289 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -197,16 +197,14 @@ public AddReplaceFileHelper(DataverseRequest dvRequest,
/**
*
- * @param datasetId
+ * @param chosenDataset
* @param newFileName
* @param newFileContentType
* @param newFileInputStream
* @param optionalFileParams
- * @param description optional
- * @param
* @return
*/
- public boolean runAddFileByDatasetId(Long datasetId,
+ public boolean runAddFileByDataset(Dataset chosenDataset,
String newFileName,
String newFileContentType,
InputStream newFileInputStream,
@@ -218,7 +216,7 @@ public boolean runAddFileByDatasetId(Long datasetId,
this.currentOperation = FILE_ADD_OPERATION;
- if (!this.step_001_loadDatasetById(datasetId)){
+ if (!this.step_001_loadDataset(chosenDataset)){
return false;
}
@@ -692,28 +690,6 @@ private boolean step_001_loadDataset(Dataset selectedDataset){
return true;
}
- /**
- *
- */
- private boolean step_001_loadDatasetById(Long datasetId){
-
- if (this.hasError()){
- return false;
- }
-
- if (datasetId == null){
- this.addErrorSevere(getBundleErr("dataset_id_is_null"));
- return false;
- }
-
- Dataset yeDataset = datasetService.find(datasetId);
- if (yeDataset == null){
- this.addError(getBundleErr("dataset_id_not_found") + " " + datasetId);
- return false;
- }
-
- return step_001_loadDataset(yeDataset);
- }
/**
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
index fb3779b8d8a..d879f80ea88 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileUploadTestPage.java
@@ -206,7 +206,7 @@ public void addReplaceFile(UploadedFile laFile){
null
);
}else{
- addFileHelper.runAddFileByDatasetId(dataset.getId(),
+ addFileHelper.runAddFileByDataset(dataset,
laFile.getFileName(),
laFile.getContentType(),
inputStream,
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
index 292d3cda265..47452bfb1dc 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
@@ -62,7 +62,6 @@ private String createDataverseGetAlias(String apiToken){
private Integer createDatasetGetId(String dataverseAlias, String apiToken){
Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
-
createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
From 0ab12233ac18571af98ea403342a32de85a1e0ec Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 26 Oct 2016 11:36:49 -0400
Subject: [PATCH 78/86] #1612 - In OptionalFileParams, change 'tags' to
'categories'; also in tests
---
.../datasetutility/AddReplaceFileHelper.java | 2 +
.../datasetutility/OptionalFileParams.java | 57 +++++++++++--------
.../OptionalFileParamsTest.java | 38 ++++++-------
3 files changed, 54 insertions(+), 43 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index caca5538289..d86ba9b1b50 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -138,6 +138,8 @@ public class AddReplaceFileHelper{
* MAIN CONSTRUCTOR -- minimal requirements
*
* @param dataset
+ * @param ingestService
+ * @param datasetService
* @param dvRequest
*/
public AddReplaceFileHelper(DataverseRequest dvRequest,
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
index f41e37d80f4..7fb5ff4e957 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
@@ -37,8 +37,8 @@ public class OptionalFileParams {
private String description;
public static final String DESCRIPTION_ATTR_NAME = "description";
- private List tags;
- public static final String TAGS_ATTR_NAME = "tags";
+ private List categories;
+ public static final String CATEGORIES_ATTR_NAME = "categories";
private List fileDataTags;
public static final String FILE_DATA_TAGS_ATTR_NAME = "fileDataTags";
@@ -55,11 +55,11 @@ public OptionalFileParams(String jsonData) throws DataFileTagException{
public OptionalFileParams(String description,
- List newTags,
+ List newCategories,
List potentialFileDataTags) throws DataFileTagException{
this.description = description;
- setTags(newTags);
+ setCategories(newCategories);
this.addFileDataTags(potentialFileDataTags);
}
@@ -79,8 +79,8 @@ public String getDescription(){
return this.description;
}
- public boolean hasTags(){
- if ((tags == null)||(this.tags.isEmpty())){
+ public boolean hasCategories(){
+ if ((categories == null)||(this.categories.isEmpty())){
return false;
}
return true;
@@ -104,25 +104,24 @@ public boolean hasDescription(){
* Set tags
* @param tags
*/
- public void setTags(List newTags){
+ public void setCategories(List newCategories){
- if (newTags != null){
- newTags = removeDuplicatesNullsEmptyStrings(newTags);
- if (newTags.isEmpty()){
- newTags = null;
+ if (newCategories != null){
+ newCategories = removeDuplicatesNullsEmptyStrings(newCategories);
+ if (newCategories.isEmpty()){
+ newCategories = null;
}
}
-
-
- this.tags = newTags;
+
+ this.categories = newCategories;
}
/**
* Get for tags
* @return List
*/
- public List getTags(){
- return this.tags;
+ public List getCategories(){
+ return this.categories;
}
@@ -169,13 +168,17 @@ private void loadParamsFromJson(String jsonData) throws DataFileTagException{
//Type objType = new TypeToken>() {}.getType();
Type listType = new TypeToken>() {}.getType();
+ //----------------------
// Load tags
- if ((jsonObj.has(TAGS_ATTR_NAME)) && (!jsonObj.get(TAGS_ATTR_NAME).isJsonNull())){
+ //----------------------
+ if ((jsonObj.has(CATEGORIES_ATTR_NAME)) && (!jsonObj.get(CATEGORIES_ATTR_NAME).isJsonNull())){
- setTags(this.tags = gson.fromJson(jsonObj.get(TAGS_ATTR_NAME), listType));
+ setCategories(this.categories = gson.fromJson(jsonObj.get(CATEGORIES_ATTR_NAME), listType));
}
+ //----------------------
// Load tabular tags
+ //----------------------
if ((jsonObj.has(FILE_DATA_TAGS_ATTR_NAME)) && (!jsonObj.get(FILE_DATA_TAGS_ATTR_NAME).isJsonNull())){
@@ -290,14 +293,17 @@ private void addTagsToDataFile(FileMetadata fileMetadata){
// Is there anything to add?
//
- if (!hasTags()){
+ if (!hasCategories()){
return;
}
List currentCategories = fileMetadata.getCategoriesByName();
- for (String tagText : this.getTags()){
- fileMetadata.addCategoryByName(tagText);
- }
+
+ // Add categories to the file metadata object
+ //
+ this.getCategories().stream().forEach((catText) -> {
+ fileMetadata.addCategoryByName(catText); // fyi: "addCategoryByName" checks for dupes
+ });
}
@@ -313,7 +319,9 @@ private void addFileDataTagsToFile(DataFile df){
}
msgt("addFileDataTagsToFile 2");
- // Get existing tag list and convert it to list of strings
+ // --------------------------------------------------
+ // Get existing tag list and convert it to list of strings (labels)
+ // --------------------------------------------------
List existingDataFileTags = df.getTags();
List currentLabels;
@@ -328,8 +336,9 @@ private void addFileDataTagsToFile(DataFile df){
;
}
+ // --------------------------------------------------
// Iterate through and add any new labels
- //
+ // --------------------------------------------------
DataFileTag newTagObj;
for (String tagLabel : this.getFileDataTags()){
diff --git a/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java b/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
index bf34bd86cda..9bf87fb2e6a 100644
--- a/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
@@ -39,7 +39,7 @@ public void test_01_jsonDescriptionGood() throws DataFileTagException {
OptionalFileParams instance = new OptionalFileParams(jsonParams);
assertEquals(instance.getDescription(), val);
- assertNull(instance.getTags());
+ assertNull(instance.getCategories());
assertNull(instance.getFileDataTags());
}
@@ -86,17 +86,17 @@ public void test_04_jsonTagsGood() throws DataFileTagException {
msgt("test_04_jsonTagsGood");
String val = "A new file";
- String jsonParams = "{\"description\": \"A new file\", \"tags\": [\"dog\", \"cat\", \"mouse\"]}";
+ String jsonParams = "{\"description\": \"A new file\", \"categories\": [\"dog\", \"cat\", \"mouse\"]}";
OptionalFileParams instance = new OptionalFileParams(jsonParams);
assertEquals(instance.getDescription(), val);
- List expectedTags = Arrays.asList("dog", "cat", "mouse");
- assertEquals(expectedTags, instance.getTags());
+ List expectedCategories = Arrays.asList("dog", "cat", "mouse");
+ assertEquals(expectedCategories, instance.getCategories());
assertNull(instance.getFileDataTags());
- assertTrue(instance.hasTags());
+ assertTrue(instance.hasCategories());
assertTrue(instance.hasDescription());
assertFalse(instance.hasFileDataTags());
@@ -117,8 +117,8 @@ public void test_05_jsonTabularTagsGood() throws DataFileTagException {
List expectedTags = Arrays.asList("Survey", "Event", "Panel");
assertEquals(expectedTags, instance.getFileDataTags());
- assertNull(instance.getTags());
- assertFalse(instance.hasTags());
+ assertNull(instance.getCategories());
+ assertFalse(instance.hasCategories());
assertTrue(instance.hasDescription());
assertTrue(instance.hasFileDataTags());
}
@@ -148,15 +148,15 @@ public void test_07_regularInstanceGood() throws DataFileTagException {
msgt("test_07_regularInstanceGood");
String val = "A new file";
- List tags = Arrays.asList("dog", " dog ", "cat", "mouse", "dog ");
+ List categories = Arrays.asList("dog", " dog ", "cat", "mouse", "dog ");
List fileDataTags = Arrays.asList("Survey", "Event", "Panel");
OptionalFileParams instance = new OptionalFileParams(val,
- tags,
+ categories,
fileDataTags);
assertEquals(val, instance.getDescription());
- assertEquals( Arrays.asList("dog", "cat", "mouse"),instance.getTags());
+ assertEquals( Arrays.asList("dog", "cat", "mouse"), instance.getCategories());
assertEquals(fileDataTags, instance.getFileDataTags());
}
@@ -167,15 +167,15 @@ public void test_08_regularInstanceGoodWithNulls() throws DataFileTagException {
msgt("test_08_regularInstanceGoodWithNulls");
String val = null;
- List tags = null;//Arrays.asList("dog", "cat", "mouse");
+ List categories = null;//Arrays.asList("dog", "cat", "mouse");
List fileDataTags = Arrays.asList("Survey", "Survey", "Event", "Panel", "Survey", " ");
OptionalFileParams instance = new OptionalFileParams(val,
- tags,
+ categories,
fileDataTags);
assertEquals(val, instance.getDescription());
- assertEquals(tags, instance.getTags());
+ assertEquals(categories, instance.getCategories());
assertEquals(Arrays.asList("Survey", "Event", "Panel"), instance.getFileDataTags());
}
@@ -185,15 +185,15 @@ public void test_09_unusedParamsGood() throws DataFileTagException {
msgt("test_08_regularInstanceGoodWithNulls");
- String jsonParams = "{\"forceReplace\": \"unused within OptionalFileParams\", \"oldFileId\": \"unused within OptionalFileParams\", \"description\": null, \"unusedParam1\": \"haha\", \"tags\": []}";
+ String jsonParams = "{\"forceReplace\": \"unused within OptionalFileParams\", \"oldFileId\": \"unused within OptionalFileParams\", \"description\": null, \"unusedParam1\": \"haha\", \"categories\": []}";
OptionalFileParams instance = new OptionalFileParams(jsonParams);
assertNull(instance.getDescription());
assertFalse(instance.hasDescription());
- assertNull(instance.getTags());
- assertFalse(instance.hasTags());
+ assertNull(instance.getCategories());
+ assertFalse(instance.hasCategories());
assertNull(instance.getFileDataTags());
assertFalse(instance.hasFileDataTags());
@@ -217,11 +217,11 @@ private void msgt(String s){
import json
d = dict(description="A new file"
- ,tags=["dog", "cat", "mouse"])
+ ,categories=["dog", "cat", "mouse"])
print json.dumps(json.dumps(d))
# result:
-# "{\"description\": \"A new file\", \"tags\": [\"dog\", \"cat\", \"mouse\"]}"
+# "{\"description\": \"A new file\", \"categories\": [\"dog\", \"cat\", \"mouse\"]}"
@@ -237,7 +237,7 @@ private void msgt(String s){
import json
d = dict(description="A new file",
- tags=["dog", "cat", "mouse"],
+ categories=["dog", "cat", "mouse"],
unusedParam1="haha",
forceReplace="unused within OptionalFileParams",
oldFileId="unused within OptionalFileParams"
From 346387e52df96f7a9378ed7d2da7f2859ab0dc0f Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 26 Oct 2016 11:53:45 -0400
Subject: [PATCH 79/86] #1612 - Adding dataFileTags for a non-tabular file now
returns an error message
---
src/main/java/Bundle.properties | 4 ++
.../datasetutility/AddReplaceFileHelper.java | 9 ++-
.../datasetutility/OptionalFileParams.java | 63 ++++++++++++-------
.../OptionalFileParamsTest.java | 24 +++----
4 files changed, 65 insertions(+), 35 deletions(-)
diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties
index 16a7fc297d4..104a55b2a43 100755
--- a/src/main/java/Bundle.properties
+++ b/src/main/java/Bundle.properties
@@ -1428,6 +1428,10 @@ citationFrame.banner.closeIcon=Close this message, go to dataset
citationFrame.banner.countdownMessage= This message will close in
citationFrame.banner.countdownMessage.seconds=seconds
+
+# File metadata error
+file.metadata.datafiletag.not_tabular=You cannot add data file tags to a non-tabular file.
+
# File Add/Replace operation messages
file.addreplace.error.dataset_is_null=The dataset cannot be null.
file.addreplace.error.dataset_id_is_null=The dataset ID cannot be null.
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index d86ba9b1b50..7ff44de5482 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -30,6 +30,7 @@
import java.util.Objects;
import java.util.ResourceBundle;
import java.util.Set;
+import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.EJBException;
import javax.validation.ConstraintViolation;
@@ -1123,7 +1124,13 @@ private boolean step_055_loadOptionalFileParams(OptionalFileParams optionalFileP
// Add tags, description, etc
// --------------------------------------------
for (DataFile df : finalFileList){
- optionalFileParams.addOptionalParams(df);
+ try {
+ optionalFileParams.addOptionalParams(df);
+ } catch (DataFileTagException ex) {
+ Logger.getLogger(AddReplaceFileHelper.class.getName()).log(Level.SEVERE, null, ex);
+ addError(ex.getMessage());
+ return false;
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
index 7fb5ff4e957..53c755fcd74 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
@@ -40,8 +40,8 @@ public class OptionalFileParams {
private List categories;
public static final String CATEGORIES_ATTR_NAME = "categories";
- private List fileDataTags;
- public static final String FILE_DATA_TAGS_ATTR_NAME = "fileDataTags";
+ private List dataFileTags;
+ public static final String FILE_DATA_TAGS_ATTR_NAME = "dataFileTags";
@@ -87,7 +87,7 @@ public boolean hasCategories(){
}
public boolean hasFileDataTags(){
- if ((fileDataTags == null)||(this.fileDataTags.isEmpty())){
+ if ((dataFileTags == null)||(this.dataFileTags.isEmpty())){
return false;
}
return true;
@@ -126,19 +126,19 @@ public List getCategories(){
/**
- * Set fileDataTags
- * @param fileDataTags
+ * Set dataFileTags
+ * @param dataFileTags
*/
- public void setFileDataTags(List fileDataTags){
- this.fileDataTags = fileDataTags;
+ public void setDataFileTags(List dataFileTags){
+ this.dataFileTags = dataFileTags;
}
/**
* Get for dataFileTags
* @return List
*/
- public List getFileDataTags(){
- return this.fileDataTags;
+ public List getDataFileTags(){
+ return this.dataFileTags;
}
private void loadParamsFromJson(String jsonData) throws DataFileTagException{
@@ -221,20 +221,20 @@ private void addFileDataTags(List potentialTags) throws DataFileTagExcep
}
// Make a new list
- this.fileDataTags = new ArrayList<>();
+ this.dataFileTags = new ArrayList<>();
// Add valid potential tags to the list
for (String tagToCheck : potentialTags){
if (DataFileTag.isDataFileTag(tagToCheck)){
- this.fileDataTags.add(tagToCheck);
+ this.dataFileTags.add(tagToCheck);
}else{
String errMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.invalid_datafile_tag");
throw new DataFileTagException(errMsg + " [" + tagToCheck + "]. Please use one of the following: " + DataFileTag.getListofLabelsAsString());
}
}
// Shouldn't happen....
- if (fileDataTags.isEmpty()){
- fileDataTags = null;
+ if (dataFileTags.isEmpty()){
+ dataFileTags = null;
}
}
@@ -253,7 +253,7 @@ private void msgt(String s){
* Add parameters to a DataFile object
*
*/
- public void addOptionalParams(DataFile df) {
+ public void addOptionalParams(DataFile df) throws DataFileTagException{
if (df == null){
throw new NullPointerException("The datafile cannot be null!");
}
@@ -268,9 +268,9 @@ public void addOptionalParams(DataFile df) {
}
// ---------------------------
- // Add tags
+ // Add categories
// ---------------------------
- addTagsToDataFile(fm);
+ addCategoriesToDataFile(fm);
// ---------------------------
@@ -285,7 +285,7 @@ public void addOptionalParams(DataFile df) {
* Add Tags to the DataFile
*
*/
- private void addTagsToDataFile(FileMetadata fileMetadata){
+ private void addCategoriesToDataFile(FileMetadata fileMetadata){
if (fileMetadata == null){
throw new NullPointerException("The fileMetadata cannot be null!");
@@ -305,19 +305,38 @@ private void addTagsToDataFile(FileMetadata fileMetadata){
fileMetadata.addCategoryByName(catText); // fyi: "addCategoryByName" checks for dupes
});
}
+
-
- private void addFileDataTagsToFile(DataFile df){
+ /**
+ * NOTE: DataFile tags can only be added to tabular files
+ *
+ * - e.g. The file must already be ingested.
+ *
+ * Because of this, these tags cannot be used when "Adding" a file via
+ * the API--e.g. b/c the file will note yet be ingested
+ *
+ * @param df
+ */
+ private void addFileDataTagsToFile(DataFile df) throws DataFileTagException{
if (df == null){
throw new NullPointerException("The DataFile (df) cannot be null!");
}
- msgt("addFileDataTagsToFile");
+ // --------------------------------------------------
// Is there anything to add?
+ // --------------------------------------------------
if (!hasFileDataTags()){
return;
}
- msgt("addFileDataTagsToFile 2");
+
+ // --------------------------------------------------
+ // Is this a tabular file?
+ // --------------------------------------------------
+ if (!df.isTabularData()){
+ String errMsg = ResourceBundle.getBundle("Bundle").getString("file.metadata.datafiletag.not_tabular");
+
+ throw new DataFileTagException(errMsg);
+ }
// --------------------------------------------------
// Get existing tag list and convert it to list of strings (labels)
@@ -340,7 +359,7 @@ private void addFileDataTagsToFile(DataFile df){
// Iterate through and add any new labels
// --------------------------------------------------
DataFileTag newTagObj;
- for (String tagLabel : this.getFileDataTags()){
+ for (String tagLabel : this.getDataFileTags()){
if (!currentLabels.contains(tagLabel)){ // not already there!
diff --git a/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java b/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
index 9bf87fb2e6a..02b435aa7fe 100644
--- a/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
@@ -40,7 +40,7 @@ public void test_01_jsonDescriptionGood() throws DataFileTagException {
assertEquals(instance.getDescription(), val);
assertNull(instance.getCategories());
- assertNull(instance.getFileDataTags());
+ assertNull(instance.getDataFileTags());
}
@@ -95,7 +95,7 @@ public void test_04_jsonTagsGood() throws DataFileTagException {
List expectedCategories = Arrays.asList("dog", "cat", "mouse");
assertEquals(expectedCategories, instance.getCategories());
- assertNull(instance.getFileDataTags());
+ assertNull(instance.getDataFileTags());
assertTrue(instance.hasCategories());
assertTrue(instance.hasDescription());
assertFalse(instance.hasFileDataTags());
@@ -108,14 +108,14 @@ public void test_05_jsonTabularTagsGood() throws DataFileTagException {
msgt("test_05_jsonTabularTagsGood");
String val = "A new file";
- String jsonParams = "{\"fileDataTags\": [\"Survey\", \"Event\", \"Panel\"], \"description\": \"A new file\"}";
+ String jsonParams = "{\"dataFileTags\": [\"Survey\", \"Event\", \"Panel\"], \"description\": \"A new file\"}";
OptionalFileParams instance = new OptionalFileParams(jsonParams);
assertEquals(instance.getDescription(), val);
List expectedTags = Arrays.asList("Survey", "Event", "Panel");
- assertEquals(expectedTags, instance.getFileDataTags());
+ assertEquals(expectedTags, instance.getDataFileTags());
assertNull(instance.getCategories());
assertFalse(instance.hasCategories());
@@ -129,7 +129,7 @@ public void test_06_jsonTabularTagsBad() throws DataFileTagException {
msgt("test_06_jsonTabularTagsBad");
String val = "A new file";
- String jsonParams = "{\"fileDataTags\": [\"Survey\", \"Event\", \"xPanel\"], \"description\": \"A new file\"}";
+ String jsonParams = "{\"dataFileTags\": [\"Survey\", \"Event\", \"xPanel\"], \"description\": \"A new file\"}";
try{
OptionalFileParams instance = new OptionalFileParams(jsonParams);
@@ -149,15 +149,15 @@ public void test_07_regularInstanceGood() throws DataFileTagException {
String val = "A new file";
List categories = Arrays.asList("dog", " dog ", "cat", "mouse", "dog ");
- List fileDataTags = Arrays.asList("Survey", "Event", "Panel");
+ List dataFileTags = Arrays.asList("Survey", "Event", "Panel");
OptionalFileParams instance = new OptionalFileParams(val,
categories,
- fileDataTags);
+ dataFileTags);
assertEquals(val, instance.getDescription());
assertEquals( Arrays.asList("dog", "cat", "mouse"), instance.getCategories());
- assertEquals(fileDataTags, instance.getFileDataTags());
+ assertEquals(dataFileTags, instance.getDataFileTags());
}
@@ -168,15 +168,15 @@ public void test_08_regularInstanceGoodWithNulls() throws DataFileTagException {
String val = null;
List categories = null;//Arrays.asList("dog", "cat", "mouse");
- List fileDataTags = Arrays.asList("Survey", "Survey", "Event", "Panel", "Survey", " ");
+ List dataFileTags = Arrays.asList("Survey", "Survey", "Event", "Panel", "Survey", " ");
OptionalFileParams instance = new OptionalFileParams(val,
categories,
- fileDataTags);
+ dataFileTags);
assertEquals(val, instance.getDescription());
assertEquals(categories, instance.getCategories());
- assertEquals(Arrays.asList("Survey", "Event", "Panel"), instance.getFileDataTags());
+ assertEquals(Arrays.asList("Survey", "Event", "Panel"), instance.getDataFileTags());
}
@@ -195,7 +195,7 @@ public void test_09_unusedParamsGood() throws DataFileTagException {
assertNull(instance.getCategories());
assertFalse(instance.hasCategories());
- assertNull(instance.getFileDataTags());
+ assertNull(instance.getDataFileTags());
assertFalse(instance.hasFileDataTags());
}
From 4fcab0d5a1dbca2b108812b3b1b8a54bbaed0cc3 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 26 Oct 2016 12:02:13 -0400
Subject: [PATCH 80/86] #1641 Consolidate calls to UpdateDatasetCommand in
step_070_run_update_dataset_command and
step_080_run_update_dataset_command_for_replace
---
.../datasetutility/AddReplaceFileHelper.java | 27 +++----------------
1 file changed, 4 insertions(+), 23 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 7ff44de5482..8bdb506c6f7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -1341,31 +1341,12 @@ private boolean step_080_run_update_dataset_command_for_replace(){
df.setRootDataFileId(fileToReplace.getRootDataFileId());
}
-
-
- Command update_cmd;
- update_cmd = new UpdateDatasetCommand(dataset, dvRequest);
-
- ((UpdateDatasetCommand) update_cmd).setValidateLenient(true);
+ // Call the update dataset command
+ //
+ return step_070_run_update_dataset_command();
-
- try {
- // Submit the update dataset command
- // and update the local dataset object
- //
- dataset = commandEngine.submit(update_cmd);
- } catch (CommandException ex) {
- this.addErrorSevere(getBundleErr("replace.command_engine_error"));
- logger.severe(ex.getMessage());
- return false;
- }catch (EJBException ex) {
- this.addErrorSevere(getBundleErr("replace.ejb_exception"));
- logger.severe(ex.getMessage());
- return false;
- }
-
- return true;
+
}
/**
From df804bcfacfd5978235ed77972664c3380600d52 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 26 Oct 2016 12:19:38 -0400
Subject: [PATCH 81/86] 1612 - switch to checking specific permission to
checking perms on a command. Adjusted test as necessary
---
.../datasetutility/AddReplaceFileHelper.java | 40 ++++++++++++++-----
.../edu/harvard/iq/dataverse/api/FilesIT.java | 28 ++++++++++++-
2 files changed, 56 insertions(+), 12 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 8bdb506c6f7..9d02ffa227b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -20,6 +20,7 @@
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand;
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
import java.io.IOException;
@@ -706,18 +707,35 @@ private boolean step_010_VerifyUserAndPermissions(){
if (this.hasError()){
return false;
}
+
+ return step_015_auto_check_permissions(dataset);
+
+ }
+
+ private boolean step_015_auto_check_permissions(Dataset datasetToCheck){
+
+ if (this.hasError()){
+ return false;
+ }
+
+ if (datasetToCheck == null){
+ addError(getBundleErr("dataset_is_null"));
+ return false;
+ }
- msg("dataset:" + dataset.toString());
- msg("Permission.EditDataset:" + Permission.EditDataset.toString());
- msg("dvRequest:" + dvRequest.toString());
- msg("permissionService:" + permissionService.toString());
+ // Make a temp. command
+ //
+ CreateDatasetCommand createDatasetCommand = new CreateDatasetCommand(datasetToCheck, dvRequest, false);
- if (!permissionService.request(dvRequest).on(dataset).has(Permission.EditDataset)){
- addError(Response.Status.FORBIDDEN,getBundleErr("no_edit_dataset_permission"));
+ // Can this user run the command?
+ //
+ if (!permissionService.isUserAllowedOn(dvRequest.getUser(), createDatasetCommand, datasetToCheck)) {
+ addError(Response.Status.FORBIDDEN,getBundleErr("no_edit_dataset_permission"));
return false;
}
+
return true;
-
+
}
@@ -783,10 +801,10 @@ private boolean step_005_loadFileToReplaceById(Long dataFileId){
// Do we have permission to replace this file? e.g. Edit the file's dataset
//
- if (!permissionService.request(dvRequest).on(existingFile.getOwner()).has(Permission.EditDataset)){
- addError(Response.Status.FORBIDDEN, getBundleErr("no_edit_dataset_permission"));
- return false;
- }
+ if (!step_015_auto_check_permissions(existingFile.getOwner())){
+ return false;
+ };
+
// Is the file published?
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
index 47452bfb1dc..95dc9f6f659 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
@@ -188,7 +188,33 @@ public void test_004_AddFileBadToken() {
public void test_005_AddFileBadPermissions() {
msgt("test_005_AddFileBadPerms");
- // To do!!!
+ // Create user
+ String apiToken = createUserGetToken();
+
+ // Create Dataverse
+ String dataverseAlias = createDataverseGetAlias(apiToken);
+
+ // Create Dataset
+ Integer datasetId = createDatasetGetId(dataverseAlias, apiToken);
+
+ // Create another user
+ String apiTokenUnauthorizedUser = createUserGetToken();
+
+
+ String pathToFile = "src/main/webapp/resources/images/favicondataverse.png";
+ Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiTokenUnauthorizedUser);
+
+ //addResponse.prettyPrint();
+ msgt("Here it is: " + addResponse.prettyPrint());
+
+
+ String errMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.no_edit_dataset_permission");
+
+
+ addResponse.then().assertThat()
+ .body("message", equalTo(errMsg))
+ .body("status", equalTo(AbstractApiBean.STATUS_ERROR))
+ .statusCode(FORBIDDEN.getStatusCode());
}
@Test
From 6cfc03635e8b878389d5cefe854d03f12bd976f4 Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 26 Oct 2016 14:05:05 -0400
Subject: [PATCH 82/86] part of #1612 post code review. update native file API
to include tags and categories. also hits #3067
---
.../edu/harvard/iq/dataverse/DataFile.java | 23 ++++++++++-
.../harvard/iq/dataverse/FileMetadata.java | 41 +++++++++++++++++--
.../iq/dataverse/util/json/JsonPrinter.java | 29 +++++++++----
3 files changed, 81 insertions(+), 12 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
index 0b37cf55932..98b3888c315 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
@@ -27,6 +27,8 @@
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
+import javax.json.Json;
+import javax.json.JsonArrayBuilder;
import javax.persistence.Entity;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
@@ -279,7 +281,26 @@ public List getTagLabels(){
}
return tagStrings;
}
-
+
+ public JsonArrayBuilder getTagLabelsAsJsonArrayBuilder(){
+
+ List currentDataTags = this.getTags();
+
+ JsonArrayBuilder builder = Json.createArrayBuilder();
+
+ if ( (currentDataTags == null)||(currentDataTags.isEmpty())){
+ return builder;
+ }
+
+
+ Iterator itr = currentDataTags.iterator();
+ while (itr.hasNext()){
+ DataFileTag element = (DataFileTag)itr.next();
+ builder.add(element.getTypeLabel());
+ }
+ return builder;
+ }
+
/**
* Return a list of Tag labels
*
diff --git a/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java b/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java
index 87043d6bf99..cee4378c852 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java
@@ -13,6 +13,8 @@
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
+import javax.json.Json;
+import javax.json.JsonArrayBuilder;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
@@ -131,16 +133,47 @@ public void addCategory(DataFileCategory category) {
fileCategories.add(category);
}
+ /**
+ * Retrieve categories
+ * @return
+ */
public List getCategoriesByName() {
ArrayList ret = new ArrayList<>();
- if (fileCategories != null) {
- for (int i = 0; i < fileCategories.size(); i++) {
- ret.add(fileCategories.get(i).getName());
- }
+
+ if (fileCategories == null) {
+ return ret;
}
+
+ for (int idx=0; idx < fileCategories.size(); idx++){
+ ret.add(fileCategories.get(idx).getName());
+ }
+ // fileCategories.stream()
+ // .map(x -> ret.add(x.getName()));
+
return ret;
}
+
+ public JsonArrayBuilder getCategoryNamesAsJsonArrayBuilder() {
+
+ JsonArrayBuilder builder = Json.createArrayBuilder();
+
+ if (fileCategories == null) {
+ return builder;
+ }
+
+ for (int idx=0; idx < fileCategories.size(); idx++){
+ builder.add(fileCategories.get(idx).getName());
+ }
+
+ //fileCategories.stream()
+ // .map(x -> builder.add(x.getName()));
+
+ return builder;
+
+ }
+
+
// alternative, experimental method:
public void setCategoriesByName(List newCategoryNames) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
index 80c47e5364f..6108498b342 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
@@ -36,6 +36,7 @@
import edu.harvard.iq.dataverse.util.DatasetFieldWalker;
import edu.harvard.iq.dataverse.util.StringUtil;
import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
+import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Set;
import javax.json.Json;
@@ -503,25 +504,39 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata) {
fileName = df.getFileMetadata().getLabel();
}
+
return jsonObjectBuilder()
.add("id", df.getId())
.add("filename", fileName)
- .add("contentType", df.getContentType())
+ .add("contentType", df.getContentType())
+ .add("filesize", df.getFilesize())
+ .add("description", df.getDescription())
+ .add("released", df.isReleased())
+ .add("restricted", df.isRestricted())
.add("storageIdentifier", df.getStorageIdentifier())
.add("originalFileFormat", df.getOriginalFileFormat())
.add("originalFormatLabel", df.getOriginalFormatLabel())
.add("UNF", df.getUnf())
+ //---------------------------------------------
+ // For file replace: rootDataFileId, previousDataFileId
+ //---------------------------------------------
.add("rootDataFileId", df.getRootDataFileId())
.add("previousDataFileId", df.getPreviousDataFileId())
- /**
- * @todo Should we deprecate "md5" now that it's under
- * "checksum" (which may also be a SHA-1 rather than an MD5)?
- */
+ //---------------------------------------------
+ // Add categories + tags
+ //---------------------------------------------
+ .add("categories", fileMetadata.getCategoryNamesAsJsonArrayBuilder())
+ .add("tags", df.getTagLabelsAsJsonArrayBuilder())
+ //---------------------------------------------
+ // Checksum
+ // * @todo Should we deprecate "md5" now that it's under
+ // * "checksum" (which may also be a SHA-1 rather than an MD5)?
+ //---------------------------------------------
.add("md5", getMd5IfItExists(df.getChecksumType(), df.getChecksumValue()))
.add("checksum", getChecksumTypeAndValue(df.getChecksumType(), df.getChecksumValue()))
- .add("description", df.getDescription());
+ ;
}
-
+
public static String format(Date d) {
return (d == null) ? null : Util.getDateTimeFormat().format(d);
}
From aa70f067ffa0861ad4f78c81b02b9e222b4e68e8 Mon Sep 17 00:00:00 2001
From: Stephen Kraffmiller
Date: Wed, 26 Oct 2016 14:27:44 -0400
Subject: [PATCH 83/86] Bundle-ize Find Dataset Error Messages
---
src/main/java/Bundle.properties | 4 ++
.../harvard/iq/dataverse/api/Datasets.java | 57 +++++++++----------
2 files changed, 31 insertions(+), 30 deletions(-)
diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties
index 104a55b2a43..b91e49534ee 100755
--- a/src/main/java/Bundle.properties
+++ b/src/main/java/Bundle.properties
@@ -1435,6 +1435,10 @@ file.metadata.datafiletag.not_tabular=You cannot add data file tags to a non-tab
# File Add/Replace operation messages
file.addreplace.error.dataset_is_null=The dataset cannot be null.
file.addreplace.error.dataset_id_is_null=The dataset ID cannot be null.
+find.dataset.error.dataset_id_is_null=When accessing a dataset based on persistent id, a {0} query parameter must be present
+find.dataset.error.dataset.not.found.persistentId=Dataset with persistent id {0} not found
+find.dataset.error.dataset.not.found.id=Dataset with id {0} not found
+find.dataset.error.dataset.not.found.bad.id=Bad dataset id number: {0}
file.addreplace.error.dataset_id_not_found=There was no dataset found for id:
file.addreplace.error.no_edit_dataset_permission=You do not have permission to edit this dataset.
file.addreplace.error.filename_is_null=The fileName cannot be null.
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
index 4e0dd0598f8..921624053fc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
@@ -51,6 +51,7 @@
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
+import edu.harvard.iq.dataverse.util.BundleUtil;
import edu.harvard.iq.dataverse.util.SystemConfig;
import edu.harvard.iq.dataverse.util.json.JsonParseException;
import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*;
@@ -58,6 +59,7 @@
import java.io.InputStream;
import java.io.OutputStream;
import java.io.StringReader;
+import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.ResourceBundle;
@@ -593,21 +595,19 @@ public Response addFileToDataset(@PathParam("id") String idSupplied,
Dataset dataset;
Long datasetId;
- try{
+ try {
dataset = findDatasetOrDie(idSupplied);
datasetId = dataset.getId();
- }catch (WrappedResponse wr) {
-
+ } catch (WrappedResponse wr) {
String errMsg;
- if (idSupplied==null){
+ if (idSupplied == null) {
errMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.dataset_id_is_null");
- return error(Response.Status.BAD_REQUEST, errMsg);
-
- }else if (idSupplied.equals(Datasets.PERSISTENT_ID_KEY)){
- return wr.getResponse();
- }else{
- errMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.dataset_id_not_found") + " " + idSupplied;
- return error(Response.Status.BAD_REQUEST, errMsg);
+ return error(Response.Status.BAD_REQUEST, errMsg);
+ } else if (idSupplied.equals(Datasets.PERSISTENT_ID_KEY)) {
+ return wr.getResponse();
+ } else {
+ errMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.dataset_id_not_found") + " " + idSupplied;
+ return error(Response.Status.BAD_REQUEST, errMsg);
}
}
@@ -685,38 +685,35 @@ private void dashes(){
private void msgt(String m){
dashes(); msg(m); dashes();
}
-
-
- private Dataset findDatasetOrDie( String id ) throws WrappedResponse {
+
+
+ private Dataset findDatasetOrDie(String id) throws WrappedResponse {
Dataset dataset;
- if ( id.equals(PERSISTENT_ID_KEY) ) {
+ if (id.equals(PERSISTENT_ID_KEY)) {
String persistentId = getRequestParameter(PERSISTENT_ID_KEY.substring(1));
- if ( persistentId == null ) {
- throw new WrappedResponse(
- badRequest("When accessing a dataset based on persistent id, "
- + "a " + PERSISTENT_ID_KEY.substring(1) + " query parameter "
- + "must be present"));
+ if (persistentId == null) {
+ throw new WrappedResponse(
+ badRequest(BundleUtil.getStringFromBundle("find.dataset.error.dataset_id_is_null", Collections.singletonList(PERSISTENT_ID_KEY.substring(1)))));
}
dataset = datasetService.findByGlobalId(persistentId);
if (dataset == null) {
- throw new WrappedResponse( notFound("dataset " + persistentId + " not found") );
- }
+ throw new WrappedResponse(notFound(BundleUtil.getStringFromBundle("find.dataset.error.dataset.not.found.persistentId", Collections.singletonList(persistentId))));
+ }
return dataset;
-
+
} else {
try {
- dataset = datasetService.find( Long.parseLong(id) );
+ dataset = datasetService.find(Long.parseLong(id));
if (dataset == null) {
- throw new WrappedResponse( notFound("dataset " + id + " not found") );
- }
+ throw new WrappedResponse(notFound(BundleUtil.getStringFromBundle("find.dataset.error.dataset.not.found.id", Collections.singletonList(id))));
+ }
return dataset;
- } catch ( NumberFormatException nfe ) {
- throw new WrappedResponse(
- badRequest("Bad dataset id number: '" + id + "'"));
+ } catch (NumberFormatException nfe) {
+ throw new WrappedResponse(
+ badRequest(BundleUtil.getStringFromBundle("find.dataset.error.dataset.not.found.bad.id", Collections.singletonList(id))));
}
}
-
}
From fe2248837088b8f084ff37c0d8d6dccc0b746920 Mon Sep 17 00:00:00 2001
From: Stephen Kraffmiller
Date: Wed, 26 Oct 2016 15:25:14 -0400
Subject: [PATCH 84/86] move removeDuplicates... method to Util
---
.../edu/harvard/iq/dataverse/api/Util.java | 24 +++++++++++--
.../datasetutility/OptionalFileParams.java | 36 ++++++-------------
2 files changed, 32 insertions(+), 28 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Util.java b/src/main/java/edu/harvard/iq/dataverse/api/Util.java
index 639e3cfa7e8..ce7cb34a280 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Util.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Util.java
@@ -2,9 +2,11 @@
import java.io.StringReader;
import java.text.SimpleDateFormat;
+import java.util.List;
import java.util.Set;
import java.util.TimeZone;
import java.util.TreeSet;
+import java.util.stream.Collectors;
import javax.json.Json;
import javax.json.JsonArray;
import javax.json.JsonReader;
@@ -107,6 +109,24 @@ public static SimpleDateFormat getDateFormat() {
return DATE_FORMAT_TL.get();
}
-
-
+ /**
+ * Takes in a list of strings and returns a list stripped of nulls, empty strings and duplicates
+ * @param stringsToCheck
+ * @return
+ */
+
+ public static List removeDuplicatesNullsEmptyStrings(List stringsToCheck){
+
+ if (stringsToCheck == null){
+ throw new NullPointerException("stringsToCheck cannot be null");
+ }
+
+ return stringsToCheck.stream()
+ .filter(p -> p != null) // no nulls
+ .map(String :: trim) // strip strings
+ .filter(p -> p.length() > 0 ) // no empty strings
+ .distinct() // distinct
+ .collect(Collectors.toList());
+ }
+
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
index 53c755fcd74..245e9e88915 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParams.java
@@ -11,9 +11,9 @@
import edu.harvard.iq.dataverse.DataFile;
import edu.harvard.iq.dataverse.DataFileTag;
import edu.harvard.iq.dataverse.FileMetadata;
+import edu.harvard.iq.dataverse.api.Util;
import java.lang.reflect.Type;
import java.util.ArrayList;
-import java.util.Collections;
import java.util.List;
import java.util.ResourceBundle;
import java.util.stream.Collectors;
@@ -104,15 +104,15 @@ public boolean hasDescription(){
* Set tags
* @param tags
*/
- public void setCategories(List newCategories){
-
- if (newCategories != null){
- newCategories = removeDuplicatesNullsEmptyStrings(newCategories);
- if (newCategories.isEmpty()){
+ public void setCategories(List newCategories) {
+
+ if (newCategories != null) {
+ newCategories = Util.removeDuplicatesNullsEmptyStrings(newCategories);
+ if (newCategories.isEmpty()) {
newCategories = null;
}
}
-
+
this.categories = newCategories;
}
@@ -191,30 +191,14 @@ private void loadParamsFromJson(String jsonData) throws DataFileTagException{
}
}
-
- private List removeDuplicatesNullsEmptyStrings(List tagsToCheck){
-
- if (tagsToCheck == null){
- throw new NullPointerException("tagsToCheck cannot be null");
- }
-
- return tagsToCheck.stream()
- .filter(p -> p != null) // no nulls
- .map(String :: trim) // strip strings
- .filter(p -> p.length() > 0 ) // no empty strings
- .distinct() // distinct
- .collect(Collectors.toList());
-
- }
-
-
+
private void addFileDataTags(List potentialTags) throws DataFileTagException{
if (potentialTags == null){
return;
}
-
- potentialTags = removeDuplicatesNullsEmptyStrings(potentialTags);
+
+ potentialTags = Util.removeDuplicatesNullsEmptyStrings(potentialTags);
if (potentialTags.isEmpty()){
return;
From e39d0495d40c9893bf066e292efbc68fa9c1d90c Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 26 Oct 2016 16:08:30 -0400
Subject: [PATCH 85/86] #1612 - Native JSON updated--tags and categories added.
New endpoints use native JSON. GSON results removed
---
.../iq/dataverse/api/AbstractApiBean.java | 26 +++++++-
.../harvard/iq/dataverse/api/Datasets.java | 8 ++-
.../edu/harvard/iq/dataverse/api/Files.java | 6 +-
.../datasetutility/AddReplaceFileHelper.java | 20 ++----
.../iq/dataverse/util/json/JsonPrinter.java | 26 +++++++-
.../edu/harvard/iq/dataverse/api/FilesIT.java | 62 ++++++++++---------
6 files changed, 96 insertions(+), 52 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
index 8396e85c40b..bd8fc176440 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
@@ -38,6 +38,7 @@
import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
import edu.harvard.iq.dataverse.validation.BeanValidationServiceBean;
import java.io.StringReader;
+import java.math.BigDecimal;
import java.net.URI;
import java.util.concurrent.Callable;
import java.util.function.Function;
@@ -491,7 +492,8 @@ protected Response ok( JsonObjectBuilder bld ) {
.type(MediaType.APPLICATION_JSON)
.build();
}
-
+
+
protected Response ok( String msg ) {
return Response.ok().entity(Json.createObjectBuilder()
.add("status", STATUS_OK)
@@ -501,6 +503,28 @@ protected Response ok( String msg ) {
}
+ protected Response ok(String message, JsonObjectBuilder jsonObjectBuilder ) {
+
+ if (message == null){
+ throw new NullPointerException("message cannot be null");
+ }
+ if (jsonObjectBuilder == null){
+ throw new NullPointerException("jsonObjectBuilder cannot be null");
+ }
+
+ jsonObjectBuilder.add("message", message);
+
+ //JsonObjectBuilder foo = Json.createObjectBuilder();
+ //foo.add("message", message);
+
+
+ return Response.ok( Json.createObjectBuilder()
+ .add("status", STATUS_OK)
+ .add("data", jsonObjectBuilder).build())
+ .type(MediaType.APPLICATION_JSON)
+ .build();
+ }
+
/**
* Added to accommodate a JSON String generated from gson
*
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
index 921624053fc..90c1d16429a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
@@ -599,6 +599,8 @@ public Response addFileToDataset(@PathParam("id") String idSupplied,
dataset = findDatasetOrDie(idSupplied);
datasetId = dataset.getId();
} catch (WrappedResponse wr) {
+ return wr.getResponse();
+ /*
String errMsg;
if (idSupplied == null) {
errMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.dataset_id_is_null");
@@ -608,7 +610,7 @@ public Response addFileToDataset(@PathParam("id") String idSupplied,
} else {
errMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.dataset_id_not_found") + " " + idSupplied;
return error(Response.Status.BAD_REQUEST, errMsg);
- }
+ }*/
}
@@ -662,8 +664,8 @@ public Response addFileToDataset(@PathParam("id") String idSupplied,
try {
//msgt("as String: " + addFileHelper.getSuccessResult());
- return okResponseGsonObject(successMsg,
- addFileHelper.getSuccessResultAsGsonObject());
+ return ok(successMsg,
+ addFileHelper.getSuccessResultAsJsonObjectBuilder());
//"Look at that! You added a file! (hey hey, it may have worked)");
} catch (NoFilesException ex) {
Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex);
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
index fd40bf7b30d..9c9c53885be 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
@@ -207,8 +207,10 @@ public Response replaceFileInDataset(
try {
msgt("as String: " + addFileHelper.getSuccessResult());
- return okResponseGsonObject(successMsg,
- addFileHelper.getSuccessResultAsGsonObject());
+ return ok(successMsg,
+ addFileHelper.getSuccessResultAsJsonObjectBuilder());
+ //return okResponseGsonObject(successMsg,
+ // addFileHelper.getSuccessResultAsGsonObject());
//"Look at that! You added a file! (hey hey, it may have worked)");
} catch (NoFilesException ex) {
Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex);
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 9d02ffa227b..7ddcc847782 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -23,6 +23,7 @@
import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetCommand;
import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetCommand;
import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
+import edu.harvard.iq.dataverse.util.json.JsonPrinter;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
@@ -34,6 +35,7 @@
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.EJBException;
+import javax.json.JsonObjectBuilder;
import javax.validation.ConstraintViolation;
import javax.ws.rs.core.Response;
@@ -1436,11 +1438,11 @@ public String getSuccessResult() throws NoFilesException{
throw new NullPointerException("newlyAddedFiles is null!");
}
- return getSuccessResultAsGsonObject().toString();
+ return getSuccessResultAsJsonObjectBuilder().toString();
}
- public JsonObject getSuccessResultAsGsonObject() throws NoFilesException{
+ public JsonObjectBuilder getSuccessResultAsJsonObjectBuilder() throws NoFilesException{
if (hasError()){
throw new NoFilesException("Don't call this method if an error exists!! First check 'hasError()'");
@@ -1454,19 +1456,7 @@ public JsonObject getSuccessResultAsGsonObject() throws NoFilesException{
throw new NoFilesException("newlyAddedFiles is empty!");
}
-
- JsonArray jsonList = new JsonArray();
-
- for (DataFile df : newlyAddedFiles){
- jsonList.add(df.asGsonObject(false));
- }
-
- JsonObject fullFilesJSON = new JsonObject();
- fullFilesJSON.add("files", jsonList);
-
- return fullFilesJSON;
- //return newlyAddedFile.asGsonObject(false);
-
+ return JsonPrinter.jsonDataFileList(newlyAddedFiles);
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
index 6108498b342..9104414724a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
@@ -58,6 +58,7 @@
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collector;
+import java.util.stream.Collectors;
import static java.util.stream.Collectors.toList;
import javax.json.JsonArray;
import javax.json.JsonObject;
@@ -310,6 +311,26 @@ public static JsonObjectBuilder json(DatasetVersion dsv) {
return bld;
}
+
+ public static JsonObjectBuilder jsonDataFileList(List dataFiles){
+
+ if (dataFiles==null){
+ throw new NullPointerException("dataFiles cannot be null");
+ }
+
+ JsonObjectBuilder bld = jsonObjectBuilder();
+
+
+ List dataFileList = dataFiles.stream()
+ .map(x -> x.getFileMetadata())
+ .collect(Collectors.toList());
+
+
+ bld.add("files", jsonFileMetadatas(dataFileList));
+
+ return bld;
+ }
+
private static String getRootDataverseNameforCitation(Dataset dataset) {
Dataverse root = dataset.getOwner();
while (root.getOwner() != null) {
@@ -367,6 +388,7 @@ public static JsonArrayBuilder jsonFileMetadatas(Collection fmds)
for (FileMetadata fmd : fmds) {
filesArr.add(json(fmd));
}
+
return filesArr;
}
@@ -511,8 +533,8 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata) {
.add("contentType", df.getContentType())
.add("filesize", df.getFilesize())
.add("description", df.getDescription())
- .add("released", df.isReleased())
- .add("restricted", df.isRestricted())
+ //.add("released", df.isReleased())
+ //.add("restricted", df.isRestricted())
.add("storageIdentifier", df.getStorageIdentifier())
.add("originalFileFormat", df.getOriginalFileFormat())
.add("originalFormatLabel", df.getOriginalFormatLabel())
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
index 95dc9f6f659..342e54539a1 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
@@ -6,11 +6,14 @@
import org.junit.BeforeClass;
import org.junit.Test;
import com.jayway.restassured.path.json.JsonPath;
+import edu.harvard.iq.dataverse.util.BundleUtil;
+import java.util.Collections;
import java.util.ResourceBundle;
import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
import static javax.ws.rs.core.Response.Status.CREATED;
import static javax.ws.rs.core.Response.Status.FORBIDDEN;
+import static javax.ws.rs.core.Response.Status.NOT_FOUND;
import static javax.ws.rs.core.Response.Status.OK;
import static junit.framework.Assert.assertEquals;
import static org.hamcrest.CoreMatchers.equalTo;
@@ -91,10 +94,10 @@ public void test_001_AddFileGood() {
addResponse.then().assertThat()
- .body("message", equalTo(successMsg))
+ .body("data.message", equalTo(successMsg))
.body("status", equalTo(AbstractApiBean.STATUS_OK))
- .body("files[0].contentType", equalTo("image/png"))
- .body("files[0].filename", equalTo("dataverseproject.png"))
+ .body("data.files[0].dataFile.contentType", equalTo("image/png"))
+ .body("data.files[0].label", equalTo("dataverseproject.png"))
.statusCode(OK.getStatusCode());
@@ -149,14 +152,15 @@ public void test_003_AddFileNonExistentDatasetId() {
Response addResponse = UtilIT.uploadFileViaNative(datasetId, pathToFile, apiToken);
- //msgt("Here it is: " + addResponse.prettyPrint());
+ msgt("Here it is: " + addResponse.prettyPrint());
- String errMsgStart = ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.dataset_id_not_found");
-
+ //String errMsg Start = ResourceBundle.getBundle("Bundle").getString("find.dataset.error.dataset.not.found.id");
+ String errMsg = BundleUtil.getStringFromBundle("find.dataset.error.dataset.not.found.id", Collections.singletonList(datasetId));
+
addResponse.then().assertThat()
.body("status", equalTo(AbstractApiBean.STATUS_ERROR))
- .body("message", Matchers.startsWith(errMsgStart))
- .statusCode(BAD_REQUEST.getStatusCode());
+ .body("message", equalTo(errMsg))
+ .statusCode(NOT_FOUND.getStatusCode());
}
@Test
@@ -240,13 +244,13 @@ public void test_006_ReplaceFileGood() {
String successMsgAdd = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.add");
addResponse.then().assertThat()
- .body("message", equalTo(successMsgAdd))
- .body("files[0].contentType", equalTo("image/png"))
- .body("files[0].filename", equalTo("dataverseproject.png"))
+ .body("data.message", equalTo(successMsgAdd))
+ .body("data.files[0].dataFile.contentType", equalTo("image/png"))
+ .body("data.files[0].label", equalTo("dataverseproject.png"))
.statusCode(OK.getStatusCode());
- long origFileId = JsonPath.from(addResponse.body().asString()).getLong("files[0].id");
+ long origFileId = JsonPath.from(addResponse.body().asString()).getLong("data.files[0].dataFile.id");
msg("Orig file id: " + origFileId);
assertNotNull(origFileId); // If checkOut fails, display message
@@ -296,14 +300,14 @@ public void test_006_ReplaceFileGood() {
replaceResp.then().assertThat()
.statusCode(OK.getStatusCode())
- .body("message", equalTo(successMsg2))
- .body("files[0].filename", equalTo("cc0.png"))
+ .body("data.message", equalTo(successMsg2))
+ .body("data.files[0].label", equalTo("cc0.png"))
//.body("data.rootDataFileId", equalTo(origFileId))
;
- long rootDataFileId = JsonPath.from(replaceResp.body().asString()).getLong("files[0].rootDataFileId");
- long previousDataFileId = JsonPath.from(replaceResp.body().asString()).getLong("files[0].previousDataFileId");
- long newDataFileId = JsonPath.from(replaceResp.body().asString()).getLong("files[0].id");
+ long rootDataFileId = JsonPath.from(replaceResp.body().asString()).getLong("data.files[0].dataFile.rootDataFileId");
+ long previousDataFileId = JsonPath.from(replaceResp.body().asString()).getLong("data.files[0].dataFile.previousDataFileId");
+ long newDataFileId = JsonPath.from(replaceResp.body().asString()).getLong("data.files[0].dataFile.id");
assertEquals(origFileId, previousDataFileId);
assertEquals(rootDataFileId, previousDataFileId);
@@ -330,12 +334,12 @@ public void test_006_ReplaceFileGood() {
replaceResp2.then().assertThat()
.statusCode(OK.getStatusCode())
.body("status", equalTo(AbstractApiBean.STATUS_OK))
- .body("message", equalTo(successMsg2))
- .body("files[0].filename", equalTo("favicondataverse.png"))
+ .body("data.message", equalTo(successMsg2))
+ .body("data.files[0].label", equalTo("favicondataverse.png"))
;
- long rootDataFileId2 = JsonPath.from(replaceResp2.body().asString()).getLong("files[0].rootDataFileId");
- long previousDataFileId2 = JsonPath.from(replaceResp2.body().asString()).getLong("files[0].previousDataFileId");
+ long rootDataFileId2 = JsonPath.from(replaceResp2.body().asString()).getLong("data.files[0].dataFile.rootDataFileId");
+ long previousDataFileId2 = JsonPath.from(replaceResp2.body().asString()).getLong("data.files[0].dataFile.previousDataFileId");
msgt("newDataFileId: " + newDataFileId);
msgt("previousDataFileId2: " + previousDataFileId2);
@@ -369,13 +373,13 @@ public void test_007_ReplaceFileUnpublishedAndBadIds() {
String successMsgAdd = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.add");
addResponse.then().assertThat()
- .body("message", equalTo(successMsgAdd))
- .body("files[0].contentType", equalTo("image/png"))
- .body("files[0].filename", equalTo("dataverseproject.png"))
+ .body("data.message", equalTo(successMsgAdd))
+ .body("data.files[0].dataFile.contentType", equalTo("image/png"))
+ .body("data.files[0].label", equalTo("dataverseproject.png"))
.statusCode(OK.getStatusCode());
- long origFileId = JsonPath.from(addResponse.body().asString()).getLong("files[0].id");
+ long origFileId = JsonPath.from(addResponse.body().asString()).getLong("data.files[0].dataFile.id");
msg("Orig file id: " + origFileId);
assertNotNull(origFileId); // If checkOut fails, display message
@@ -452,13 +456,13 @@ public void test_008_ReplaceFileAlreadyDeleted() {
String successMsgAdd = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.add");
addResponse.then().assertThat()
- .body("message", equalTo(successMsgAdd))
- .body("files[0].contentType", equalTo("image/png"))
- .body("files[0].filename", equalTo("dataverseproject.png"))
+ .body("data.message", equalTo(successMsgAdd))
+ .body("data.files[0].dataFile.contentType", equalTo("image/png"))
+ .body("data.files[0].label", equalTo("dataverseproject.png"))
.statusCode(OK.getStatusCode());
- long origFileId = JsonPath.from(addResponse.body().asString()).getLong("files[0].id");
+ long origFileId = JsonPath.from(addResponse.body().asString()).getLong("data.files[0].dataFile.id");
msg("Orig file id: " + origFileId);
assertNotNull(origFileId); // If checkOut fails, display message
From ea21a06f0c21fd23a5aff75476ca5919c4f48f6b Mon Sep 17 00:00:00 2001
From: Raman Prasad
Date: Wed, 26 Oct 2016 16:14:00 -0400
Subject: [PATCH 86/86] #1612 - removed okResponseGsonObject
---
.../java/edu/harvard/iq/dataverse/api/AbstractApiBean.java | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
index bd8fc176440..dfbcd6f1c11 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
@@ -531,7 +531,8 @@ protected Response ok(String message, JsonObjectBuilder jsonObjectBuilder ) {
* @param gsonObject
* @return
*/
- protected Response okResponseGsonObject(String msg, com.google.gson.JsonObject gsonObject){
+ /*
+ protected Response ok(String msg, com.google.gson.JsonObject gsonObject){
if (gsonObject == null){
throw new NullPointerException("gsonObject cannot be null");
@@ -542,6 +543,8 @@ protected Response okResponseGsonObject(String msg, com.google.gson.JsonObject g
return Response.ok(gsonObject.toString(), MediaType.APPLICATION_JSON).build();
}
+ */
+
/**
* Returns an OK response (HTTP 200, status:OK) with the passed value
|