From 465c528c7da850f423f7dbb205f2f36f623fbd79 Mon Sep 17 00:00:00 2001 From: akio-sone Date: Tue, 1 May 2018 15:44:50 -0400 Subject: [PATCH 01/46] create a new API endpoint --- .../harvard/iq/dataverse/api/Datasets.java | 145 ++++++++++++ .../datasetutility/AddReplaceFileHelper.java | 222 ++++++++++++++++++ 2 files changed, 367 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index e38a9fd3ca5..cf0a28b1c19 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -879,6 +879,151 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, + /* + * The following API is based on the above addFileToDataset + * + * + */ + /** + * Add DataFile-related Metadata to an existing Dataset without invoking + * an ingest request + * + * @param idSupplied +// * @param jsonData + * @param fileInputStream + * @param contentDispositionHeader + * @param formDataBodyPart + * @return + */ + @POST + @Path("{id}/addFileMetadata") + @Consumes(MediaType.MULTIPART_FORM_DATA) + public Response addFileMetadataToDataset(@PathParam("id") String idSupplied, + //@FormDataParam("jsonData") String jsonData, + @FormDataParam("file") InputStream fileInputStream, + @FormDataParam("file") FormDataContentDisposition contentDispositionHeader, + @FormDataParam("file") final FormDataBodyPart formDataBodyPart + ){ + + + // ------------------------------------- + // (1) Get the user from the API key + // ------------------------------------- + User authUser; + try { + authUser = findUserOrDie(); + } catch (WrappedResponse ex) { + return error(Response.Status.FORBIDDEN, + ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.auth") + ); + } + //--------------------------------------- + // (1A) Make sure that the upload type is not rsync + // ------------------------------------- + + if (DataCaptureModuleUtil.rsyncSupportEnabled(settingsSvc.getValueForKey(SettingsServiceBean.Key.UploadMethods))) { + return error(Response.Status.METHOD_NOT_ALLOWED, SettingsServiceBean.Key.UploadMethods + " contains " + SystemConfig.FileUploadMethods.RSYNC + ". Please use rsync file upload."); + } + + + // ------------------------------------- + // (2) Get the Dataset Id + // + // ------------------------------------- + Dataset dataset; + + Long datasetId; + try { + dataset = findDatasetOrDie(idSupplied); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + + + // ------------------------------------- + // (3) Get the file name and content type + // ------------------------------------- + String newFilename = contentDispositionHeader.getFileName(); + String newFileContentType = formDataBodyPart.getMediaType().toString(); + + + // (2a) Load up optional params via JSON + // this block is not used because no jsonData + //--------------------------------------- + + OptionalFileParams optionalFileParams = null; +/* + msgt("(api) jsonData: " + jsonData); + + try { + optionalFileParams = new OptionalFileParams(jsonData); + } catch (DataFileTagException ex) { + return error( Response.Status.BAD_REQUEST, ex.getMessage()); + } +*/ + + //------------------- + // (3) Create the AddReplaceFileHelper object + //------------------- + msg("ADD!"); + + DataverseRequest dvRequest2 = createDataverseRequest(authUser); + AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2, + ingestService, + datasetService, + fileService, + permissionSvc, + commandEngine, + systemConfig); + + + //------------------- + // (4) Run "runAddFileByDatasetId" + //------------------- + addFileHelper.runAddFileWIByDataset(dataset, + newFilename, + newFileContentType, + fileInputStream, + optionalFileParams); + + + if (addFileHelper.hasError()){ + return error(addFileHelper.getHttpErrorCode(), addFileHelper.getErrorMessagesAsString("\n")); + }else{ + String successMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.add"); + try { + //msgt("as String: " + addFileHelper.getSuccessResult()); + /** + * @todo We need a consistent, sane way to communicate a human + * readable message to an API client suitable for human + * consumption. Imagine if the UI were built in Angular or React + * and we want to return a message from the API as-is to the + * user. Human readable. + */ + logger.log(Level.FINE, "successMsg:{0} ", successMsg); + return ok(addFileHelper.getSuccessResultAsJsonObjectBuilder()); + //"Look at that! You added a file! (hey hey, it may have worked)"); + } catch (NoFilesException ex) { + logger.log(Level.SEVERE, "NoFilesException during addFileMetadata:{0}", ex); + return error(Response.Status.BAD_REQUEST, "NoFileException! Serious Error! See administrator!"); + + } + } + + } // end: addFileMetadataToDataset + + + + + + + + + + + + + private void msg(String m){ //System.out.println(m); logger.fine(m); diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index 3d3848f71ea..874fab1d7f0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -95,6 +95,7 @@ public class AddReplaceFileHelper{ public static String FILE_ADD_OPERATION = "FILE_ADD_OPERATION"; + public static String FILE_ADD_WI_OPERATION = "FILE_ADD_WI_OPERATION"; public static String FILE_REPLACE_OPERATION = "FILE_REPLACE_OPERATION"; public static String FILE_REPLACE_FORCE_OPERATION = "FILE_REPLACE_FORCE_OPERATION"; @@ -273,6 +274,56 @@ public boolean runAddFileByDataset(Dataset chosenDataset, return this.runAddReplaceFile(dataset, newFileName, newFileContentType, newFileInputStream, optionalFileParams); } + + // optionalFileParams is always null, not used + /** + * + * @param chosenDataset + * @param newFileName + * @param newFileContentType + * @param newFileInputStream + * @param optionalFileParams + * @return + */ + public boolean runAddFileWIByDataset(Dataset chosenDataset, + String newFileName, + String newFileContentType, + InputStream newFileInputStream, + OptionalFileParams optionalFileParams){ + + msgt(">> runAddFileWithoutIngestByDataset"); + + initErrorHandling(); + + this.currentOperation = FILE_ADD_WI_OPERATION; + + if (!this.step_001_loadDataset(chosenDataset)){ + return false; + } + + //return this.runAddFile(this.dataset, newFileName, newFileContentType, newFileInputStream, optionalFileParams); + return this.runAddReplaceFileWI(dataset, newFileName, newFileContentType, newFileInputStream, optionalFileParams); + } + + + + + + + + + + + + + + + + + + + + /** @@ -411,6 +462,61 @@ private boolean runAddReplaceFile(Dataset dataset, return runAddReplacePhase2(); } + + + + + + /** + * Here we're going to run through the steps to ADD or REPLACE a file + * + * The difference between ADD and REPLACE (add/delete) is: + * + * oldFileId - For ADD, set to null + * oldFileId - For REPLACE, set to id of file to replace + * + * This has now been broken into Phase 1 and Phase 2 + * + * The APIs will use this method and call Phase 1 & Phase 2 consecutively + * + * The UI will call Phase 1 on initial upload and + * then run Phase 2 if the user chooses to save the changes. + * + * + * @return + */ + private boolean runAddReplaceFileWI(Dataset dataset, + String newFileName, String newFileContentType, + InputStream newFileInputStream, + OptionalFileParams optionalFileParams){ + + // Run "Phase 1" - Initial ingest of file + error check + // But don't save the dataset version yet + // + boolean phase1Success = runAddReplacePhase1WI(dataset, + newFileName, + newFileContentType, + newFileInputStream, + optionalFileParams + ); + + if (!phase1Success){ + return false; + } + + + return runAddReplacePhase2WI(); + + } + + + + + + + + + /** * Note: UI replace is always a "force replace" which means @@ -510,6 +616,67 @@ private boolean runAddReplacePhase1(Dataset dataset, } + /** + * For the UI: File add/replace has been broken into 2 steps + * + * Phase 1 (here): Add/replace the file and make sure there are no errors + * But don't update the Dataset (yet) + * + * @return + */ + private boolean runAddReplacePhase1WI(Dataset dataset, + String newFileName, + String newFileContentType, + InputStream newFileInputStream, + OptionalFileParams optionalFileParams){ + + if (this.hasError()){ + return false; // possible to have errors already... + } + + msgt("step_001_loadDataset"); + if (!this.step_001_loadDataset(dataset)){ + return false; + } + + msgt("step_010_VerifyUserAndPermissions"); + if (!this.step_010_VerifyUserAndPermissions()){ + return false; + + } + + msgt("step_020_loadNewFile"); + if (!this.step_020_loadNewFile(newFileName, newFileContentType, newFileInputStream)){ + return false; + + } + + msgt("step_030_createNewFilesViaIngest"); + if (!this.step_030_createNewFilesViaIngest()){ + return false; + + } + + msgt("step_050_checkForConstraintViolations"); + if (!this.step_050_checkForConstraintViolations()){ + return false; + } + + msgt("step_055_loadOptionalFileParams"); + if (!this.step_055_loadOptionalFileParams(optionalFileParams)){ + return false; + } + + return true; + } + + + + + + + + public boolean runReplaceFromUI_Phase2(){ return runAddReplacePhase2(); } @@ -646,6 +813,61 @@ private boolean runAddReplacePhase2(){ } + /** + * For the UI: File add/replace has been broken into 2 steps + * + * Phase 2 (here): Phase 1 has run ok, Update the Dataset -- issue the commands! + * + * @return + */ + private boolean runAddReplacePhase2WI(){ + + if (this.hasError()){ + return false; // possible to have errors already... + } + + if ((finalFileList == null)||(finalFileList.isEmpty())){ + addError(getBundleErr("phase2_called_early_no_new_files")); + return false; + } + + msgt("step_060_addFilesViaIngestService"); + if (!this.step_060_addFilesViaIngestService()){ + return false; + + } + + if (this.isFileReplaceOperation()){ + msgt("step_080_run_update_dataset_command_for_replace"); + if (!this.step_080_run_update_dataset_command_for_replace()){ + return false; + } + + }else{ + msgt("step_070_run_update_dataset_command"); + if (!this.step_070_run_update_dataset_command()){ + return false; + } + } + + msgt("step_090_notifyUser"); + if (!this.step_090_notifyUser()){ + return false; + } + + msgt("step_100_startIngestJobs"); + if (!this.step_100_startIngestJobs()){ + return false; + } + + return true; + } + + + + + + /** * Get for currentOperation * @return String From a998e5d1bc0cf74c72cfec55a9f14f91ea041080 Mon Sep 17 00:00:00 2001 From: akio-sone Date: Fri, 4 May 2018 14:50:02 -0400 Subject: [PATCH 02/46] new methods for parsing variable-level metadata --- .../iq/dataverse/util/json/JsonParser.java | 38 +++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 5da814c5b36..24eeee47db5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -4,6 +4,7 @@ import edu.harvard.iq.dataverse.ControlledVocabularyValue; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFileCategory; +import edu.harvard.iq.dataverse.DataTable; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetField; import edu.harvard.iq.dataverse.DatasetFieldConstant; @@ -25,6 +26,9 @@ import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddressRange; import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams; +import edu.harvard.iq.dataverse.datavariable.DataVariable; +import edu.harvard.iq.dataverse.datavariable.SummaryStatistic; +import edu.harvard.iq.dataverse.datavariable.VariableCategory; import edu.harvard.iq.dataverse.harvest.client.HarvestingClient; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.workflow.Workflow; @@ -458,6 +462,40 @@ public DataFile parseDataFile(JsonObject datafileJson) { return dataFile; } + + + + public List parseDataTables(JsonArray dataTablesJson){ + List dataTables = new LinkedList<>(); + if (dataTablesJson !=null){ + for (JsonObject dataTableJson : dataTablesJson.getValuesAs(JsonObject.class)){ + // capture scalar items + + // call the method for pasring dataVariables array + } + } + return dataTables; + } + + + public List parseDataVariables(JsonArray dataVariables){ + + + return null; + } + + + public List parseSummaryStatistics(JsonObject summaryStatisticsJson){ + return null; + } + + + public List parseVariableCategory(JsonArray variableCategoriesJson){ + return null; + } + + + /** * Special processing for GeographicCoverage compound field: * Handle parsing exceptions caused by invalid controlled vocabulary in the "country" field by From d319f7595407ec8b07cdc11335ed84a7758d7d4e Mon Sep 17 00:00:00 2001 From: akio-sone Date: Fri, 4 May 2018 15:59:32 -0400 Subject: [PATCH 03/46] working on parsing methods --- .../iq/dataverse/util/json/JsonParser.java | 51 ++++++++++++++++++- 1 file changed, 50 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 24eeee47db5..ce0239c03d3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -421,6 +421,17 @@ public DataFile parseDataFile(JsonObject datafileJson) { contentType = "application/octet-stream"; } String storageIdentifier = datafileJson.getString("storageIdentifier", " "); + + // available items + // filename + // filesize + // originalFileFormat + // originalFormatLabel + // UNF + // md5 + + + JsonObject checksum = datafileJson.getJsonObject("checksum"); if (checksum != null) { // newer style that allows for SHA-1 rather than MD5 @@ -469,8 +480,17 @@ public List parseDataTables(JsonArray dataTablesJson){ List dataTables = new LinkedList<>(); if (dataTablesJson !=null){ for (JsonObject dataTableJson : dataTablesJson.getValuesAs(JsonObject.class)){ + + // capture scalar items + // varQuantity + // dataTableJson.getString("varQuantity", null) + + // caseQuantity + // UNF + + // // call the method for pasring dataVariables array } } @@ -479,6 +499,15 @@ public List parseDataTables(JsonArray dataTablesJson){ public List parseDataVariables(JsonArray dataVariables){ + // capture scalar items + // name + // label + // weighted + // variableIntervalType + // variableFormatType + // orderedFactor + // fileOrder + // UNF return null; @@ -486,11 +515,31 @@ public List parseDataVariables(JsonArray dataVariables){ public List parseSummaryStatistics(JsonObject summaryStatisticsJson){ + // mean + + // medn + + // mode + + // vald + + // invd + + // min + + // max + + // stdev return null; } - public List parseVariableCategory(JsonArray variableCategoriesJson){ + public List parseVariableCategories(JsonArray variableCategoriesJson){ + + // label + + // value + return null; } From eddcb47ac2152d731201ca0a87a02ec0e0fd763b Mon Sep 17 00:00:00 2001 From: akio-sone Date: Sun, 6 May 2018 22:07:51 -0400 Subject: [PATCH 04/46] variable-level methods were coded --- .../iq/dataverse/util/json/JsonParser.java | 186 ++++++++++++++---- 1 file changed, 143 insertions(+), 43 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index ce0239c03d3..8d0baadff07 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -48,11 +48,13 @@ import java.util.logging.Logger; import javax.json.Json; import javax.json.JsonArray; +import javax.json.JsonNumber; import javax.json.JsonObject; import javax.json.JsonReader; import javax.json.JsonString; import javax.json.JsonValue; import javax.json.JsonValue.ValueType; +import org.apache.commons.lang.StringUtils; /** * Parses JSON objects into domain objects. @@ -424,12 +426,17 @@ public DataFile parseDataFile(JsonObject datafileJson) { // available items // filename + String filename = datafileJson.getString("filename", null); // filesize + long filesize = datafileJson.getJsonNumber("filesize").longValue(); // originalFileFormat + String originalFileFormat = datafileJson.getString("originalFileFormat", null); // originalFormatLabel + String originalFormatLabel = datafileJson.getString("originalFormatLabel", null); // UNF + String UNF = datafileJson.getString("UNF", null); // md5 - + String MD5 = datafileJson.getString("md5", null); JsonObject checksum = datafileJson.getJsonObject("checksum"); @@ -466,10 +473,21 @@ public DataFile parseDataFile(JsonObject datafileJson) { } // TODO: + // the UNF of DataFile is called via DataTable and therefor no set method // unf (if available)... etc.? dataFile.setContentType(contentType); dataFile.setStorageIdentifier(storageIdentifier); + dataFile.setFilesize(filesize); + + // parse DataTable + JsonArray dataTablesJson = datafileJson.getJsonArray("dataTables"); + if ((dataTablesJson != null ) && (!dataTablesJson.isEmpty())){ + // get parsing results of a DataTable + List dataTables = parseDataTables(dataTablesJson); + dataFile.setDataTables(dataTables); + dataFile.setDataTable(dataTables.get(0)); + } return dataFile; } @@ -478,69 +496,151 @@ public DataFile parseDataFile(JsonObject datafileJson) { public List parseDataTables(JsonArray dataTablesJson){ List dataTables = new LinkedList<>(); - if (dataTablesJson !=null){ + if ((dataTablesJson !=null) && (!dataTablesJson.isEmpty())){ for (JsonObject dataTableJson : dataTablesJson.getValuesAs(JsonObject.class)){ - + DataTable dataTable = new DataTable(); // capture scalar items // varQuantity - // dataTableJson.getString("varQuantity", null) - + long varQuantity = dataTableJson.getJsonNumber("varQuantity").longValue(); + dataTable.setVarQuantity(varQuantity); // caseQuantity - + long caseQuantity = dataTableJson.getJsonNumber("varQuantity").longValue(); + dataTable.setCaseQuantity(caseQuantity); // UNF - - // + String UNF = dataTableJson.getString("UNF", null); + dataTable.setUnf(UNF); // call the method for pasring dataVariables array + List dataVariables = parseDataVariables(dataTableJson.getJsonArray("dataVariables")); + dataTable.setDataVariables(dataVariables); + dataTables.add(dataTable); } } return dataTables; } - public List parseDataVariables(JsonArray dataVariables){ - // capture scalar items - // name - // label - // weighted - // variableIntervalType - // variableFormatType - // orderedFactor - // fileOrder - // UNF - - - return null; + public List parseDataVariables(JsonArray dataVariablesJson){ + List dataVariables = new LinkedList<>(); + if ((dataVariablesJson != null) && (!dataVariablesJson.isEmpty())) { + for (JsonObject dataVariableJson: dataVariablesJson.getValuesAs(JsonObject.class)){ + DataVariable dataVariable = new DataVariable(); + // capture scalar itemse. + // name + dataVariable.setName(dataVariableJson.getString("name", null)); + // label + dataVariable.setLabel(dataVariableJson.getString("label", null)); + // weighted + dataVariable.setWeighted(dataVariableJson.getBoolean("weighted", false)); + // variableIntervalType + dataVariable.setInterval(DataVariable.VariableInterval.valueOf(dataVariableJson.getString("variableIntervalType", null))); + // variableFormatType + dataVariable.setType(DataVariable.VariableType.valueOf(dataVariableJson.getString("variableFormatType", null))); + // orderedFactor + dataVariable.setOrderedCategorical(dataVariableJson.getBoolean("orderedFactor", false)); + // fileOrder + dataVariable.setFileOrder(dataVariableJson.getInt("fileOrder")); + + // summaryStatistics + dataVariable.setSummaryStatistics(parseSummaryStatistics(dataVariableJson.getJsonObject("summaryStatistics"))); + // variableCategories + dataVariable.setCategories(parseVariableCategories(dataVariableJson.getJsonArray("variableCategories"))); + + // UNF + dataVariable.setUnf(dataVariableJson.getString("UNF", null)); + } + } + return dataVariables; } public List parseSummaryStatistics(JsonObject summaryStatisticsJson){ - // mean - - // medn - - // mode - - // vald - - // invd - - // min - - // max - - // stdev - return null; + List summaryStatistics = new LinkedList<>(); + if (summaryStatisticsJson !=null){ + // mean + String meanjsn = summaryStatisticsJson.getString("mean", null); + if (StringUtils.isNotBlank(meanjsn)){ + SummaryStatistic mean = new SummaryStatistic(); + mean.setType(SummaryStatistic.SummaryStatisticType.MEAN); + mean.setValue(meanjsn); + summaryStatistics.add(mean); + } + // medn + String mednjsn = summaryStatisticsJson.getString("medn", null); + if (StringUtils.isNotBlank(mednjsn)){ + SummaryStatistic medn = new SummaryStatistic(); + medn.setType(SummaryStatistic.SummaryStatisticType.MEDN); + medn.setValue(mednjsn); + summaryStatistics.add(medn); + } + // mode + String modejsn = summaryStatisticsJson.getString("mode", null); + if (StringUtils.isNotBlank(modejsn)){ + SummaryStatistic mode = new SummaryStatistic(); + mode.setType(SummaryStatistic.SummaryStatisticType.MODE); + mode.setValue(modejsn); + summaryStatistics.add(mode); + } + // vald + String valdjsn = summaryStatisticsJson.getString("vald", null); + if (StringUtils.isNotBlank(valdjsn)){ + SummaryStatistic vald = new SummaryStatistic(); + vald.setType(SummaryStatistic.SummaryStatisticType.VALD); + vald.setValue(valdjsn); + summaryStatistics.add(vald); + } + // invd + String invdjsn = summaryStatisticsJson.getString("invd", null); + if (StringUtils.isNotBlank(invdjsn)){ + SummaryStatistic invd = new SummaryStatistic(); + invd.setType(SummaryStatistic.SummaryStatisticType.INVD); + invd.setValue(invdjsn); + summaryStatistics.add(invd); + } + // min + String minjsn = summaryStatisticsJson.getString("min", null); + if (StringUtils.isNotBlank(minjsn)){ + SummaryStatistic min = new SummaryStatistic(); + min.setType(SummaryStatistic.SummaryStatisticType.MIN); + min.setValue(minjsn); + summaryStatistics.add(min); + } + // max + String maxjsn = summaryStatisticsJson.getString("max", null); + if (StringUtils.isNotBlank(maxjsn)){ + SummaryStatistic max = new SummaryStatistic(); + max.setType(SummaryStatistic.SummaryStatisticType.MAX); + max.setValue(maxjsn); + summaryStatistics.add(max); + } + // stdev + String stdevjsn = summaryStatisticsJson.getString("stdev", null); + if (StringUtils.isNotBlank(stdevjsn)){ + SummaryStatistic stdev = new SummaryStatistic(); + stdev.setType(SummaryStatistic.SummaryStatisticType.STDEV); + stdev.setValue(stdevjsn); + summaryStatistics.add(stdev); + } + } + return summaryStatistics; } public List parseVariableCategories(JsonArray variableCategoriesJson){ - - // label - - // value - - return null; + List variableCategories = new LinkedList<>(); + if ((variableCategoriesJson != null) && (!variableCategoriesJson.isEmpty())){ + for (JsonObject variableCategoryJson : variableCategoriesJson.getValuesAs(JsonObject.class)){ + VariableCategory vc = new VariableCategory(); + // label + String label = variableCategoryJson.getString("label", ""); + vc.setLabel(label); + // value + String value = variableCategoryJson.getString("value", ""); + vc.setValue(value); + variableCategories.add(vc); + } + } + return variableCategories; } From ef5ce657f89cea1f73503e86e11dc5b8ce684070 Mon Sep 17 00:00:00 2001 From: donsizemore Date: Mon, 7 May 2018 10:04:58 -0400 Subject: [PATCH 05/46] adding dummy file to test jenkins workflow --- tests/test.txt | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 tests/test.txt diff --git a/tests/test.txt b/tests/test.txt new file mode 100644 index 00000000000..e69de29bb2d From 1e7a055635aa2d7030cb75d34b2c3f7c76c30407 Mon Sep 17 00:00:00 2001 From: donsizemore Date: Mon, 7 May 2018 10:07:47 -0400 Subject: [PATCH 06/46] adding dummy file to test jenkins workflow --- tests/test.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test.txt b/tests/test.txt index e69de29bb2d..9daeafb9864 100644 --- a/tests/test.txt +++ b/tests/test.txt @@ -0,0 +1 @@ +test From 1ebb4ce702c743d272abd915f3f28d1ae439041d Mon Sep 17 00:00:00 2001 From: donsizemore Date: Mon, 7 May 2018 10:23:18 -0400 Subject: [PATCH 07/46] adding dummy file to test jenkins workflow --- tests/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test.txt b/tests/test.txt index 9daeafb9864..b33c5606734 100644 --- a/tests/test.txt +++ b/tests/test.txt @@ -1 +1 @@ -test +test test From d13e221188109a26e8b8887e8941e9912a8dbd94 Mon Sep 17 00:00:00 2001 From: akio-sone Date: Mon, 7 May 2018 10:42:04 -0400 Subject: [PATCH 08/46] add TRSA-related ascii doc --- doc/Architecture/TRSA.adoc | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 doc/Architecture/TRSA.adoc diff --git a/doc/Architecture/TRSA.adoc b/doc/Architecture/TRSA.adoc new file mode 100644 index 00000000000..e69de29bb2d From 0fc629f10e8cd4aa23100418d7366b44562d18b2 Mon Sep 17 00:00:00 2001 From: donsizemore Date: Mon, 7 May 2018 13:56:25 -0400 Subject: [PATCH 09/46] re-enabling unit tests --- tests/test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test.txt b/tests/test.txt index b33c5606734..d49aab150ca 100644 --- a/tests/test.txt +++ b/tests/test.txt @@ -1 +1 @@ -test test +test test test From 680ee37b54387b3f37c310a448e4abfd4625eb85 Mon Sep 17 00:00:00 2001 From: akio-sone Date: Mon, 7 May 2018 14:11:07 -0400 Subject: [PATCH 10/46] filesize-null case handling --- .../harvard/iq/dataverse/util/json/JsonParser.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 8d0baadff07..006258d8317 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -428,7 +428,11 @@ public DataFile parseDataFile(JsonObject datafileJson) { // filename String filename = datafileJson.getString("filename", null); // filesize - long filesize = datafileJson.getJsonNumber("filesize").longValue(); + JsonNumber filesizejsn = datafileJson.getJsonNumber("filesize"); + Long filesize = null; + if (filesizejsn != null){ + filesize = datafileJson.getJsonNumber("filesize").longValue(); + } // originalFileFormat String originalFileFormat = datafileJson.getString("originalFileFormat", null); // originalFormatLabel @@ -478,8 +482,9 @@ public DataFile parseDataFile(JsonObject datafileJson) { dataFile.setContentType(contentType); dataFile.setStorageIdentifier(storageIdentifier); - dataFile.setFilesize(filesize); - + if (filesize != null){ + dataFile.setFilesize(filesize); + } // parse DataTable JsonArray dataTablesJson = datafileJson.getJsonArray("dataTables"); if ((dataTablesJson != null ) && (!dataTablesJson.isEmpty())){ From 2b5a1225b42cf1caba85e18abfeb952171c6754a Mon Sep 17 00:00:00 2001 From: akio-sone Date: Mon, 7 May 2018 23:02:31 -0400 Subject: [PATCH 11/46] add methods for printing variable-level metadata --- .../iq/dataverse/util/json/JsonPrinter.java | 76 +++++++++++++++++++ 1 file changed, 76 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 0119e645243..d1e3993f265 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -4,6 +4,7 @@ import edu.harvard.iq.dataverse.ControlledVocabularyValue; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFileTag; +import edu.harvard.iq.dataverse.DataTable; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetDistributor; import edu.harvard.iq.dataverse.DatasetFieldType; @@ -32,6 +33,9 @@ import edu.harvard.iq.dataverse.authorization.providers.AuthenticationProviderRow; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.datavariable.DataVariable; +import edu.harvard.iq.dataverse.datavariable.SummaryStatistic; +import edu.harvard.iq.dataverse.datavariable.VariableCategory; import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.DatasetFieldWalker; @@ -584,9 +588,81 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata) { .add("md5", getMd5IfItExists(df.getChecksumType(), df.getChecksumValue())) .add("checksum", getChecksumTypeAndValue(df.getChecksumType(), df.getChecksumValue())) .add("tabularTags", getTabularFileTags(df)) + .add("dataTables", !CollectionUtils.isEmpty(df.getDataTables()) ? JsonPrinter.jsonDT(df.getDataTables()) : null) ; } + + public static JsonArrayBuilder jsonDT(List ldt) { + JsonArrayBuilder ldtArr = Json.createArrayBuilder(); + for(DataTable dt: ldt){ + ldtArr.add(jsonObjectBuilder().add("dataTable", JsonPrinter.json(dt))); + } + return ldtArr; + } + + public static JsonObjectBuilder json(DataTable dt) { + return jsonObjectBuilder() + .add("varQuantity", dt.getVarQuantity()) + .add("caseQuantity", dt.getCaseQuantity()) + .add("UNF", dt.getUnf()) + .add("dataVariables", JsonPrinter.jsonDV(dt.getDataVariables())) + ; + } + + public static JsonArrayBuilder jsonDV(List dvl) { + JsonArrayBuilder varArr = Json.createArrayBuilder(); + for (DataVariable dv: dvl){ + varArr.add(JsonPrinter.json(dv)); + } + return varArr; + } + + // TODO: add sumstat and variable categories, check formats + public static JsonObjectBuilder json(DataVariable dv) { + return jsonObjectBuilder() + .add("name", dv.getName()) + .add("label", dv.getLabel()) + .add("weighted", dv.isWeighted()) + .add("variableIntervalType", dv.getIntervalLabel()) + .add("variableFormatType", dv.getType().name()) // varFormat + .add("formatCategory", dv.getFormatCategory()) + .add("orderedFactor", dv.isOrderedCategorical()) + .add("fileOrder", dv.getFileOrder()) + .add("UNF",dv.getUnf()) + .add("summaryStatistics", JsonPrinter.jsonSumStat(dv.getSummaryStatistics())) + .add("variableCategories", JsonPrinter.jsonCatStat(dv.getCategories())) + ; + } + + public static JsonObjectBuilder jsonSumStat(Collection sumStat){ + //JsonArrayBuilder sumStatArr = Json.createArrayBuilder(); + JsonObjectBuilder sumStatObj = Json.createObjectBuilder(); + for (SummaryStatistic stat: sumStat){ + sumStatObj.add(stat.getTypeLabel(), stat.getValue()); + } + return sumStatObj; + } + + + public static JsonArrayBuilder jsonCatStat(Collection catStat){ + JsonArrayBuilder catArr = Json.createArrayBuilder(); + + for (VariableCategory stat: catStat){ + JsonObjectBuilder catStatObj = Json.createObjectBuilder(); + catStatObj.add("label", stat.getLabel()) + .add("value", stat.getValue()) + //.add("frequency", stat.getFrequency()) // frequency is not calculated + ; + catArr.add(catStatObj); + } + return catArr; + } + + + + + public static String format(Date d) { return (d == null) ? null : Util.getDateTimeFormat().format(d); } From f73f4c20c20ef23b3baaee691e0d2a7dfaad7771 Mon Sep 17 00:00:00 2001 From: akio-sone Date: Tue, 8 May 2018 08:52:38 -0400 Subject: [PATCH 12/46] bug-fix: missing package name --- .../java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index d1e3993f265..02734bfc789 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -588,7 +588,7 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata) { .add("md5", getMd5IfItExists(df.getChecksumType(), df.getChecksumValue())) .add("checksum", getChecksumTypeAndValue(df.getChecksumType(), df.getChecksumValue())) .add("tabularTags", getTabularFileTags(df)) - .add("dataTables", !CollectionUtils.isEmpty(df.getDataTables()) ? JsonPrinter.jsonDT(df.getDataTables()) : null) + .add("dataTables", !org.apache.commons.collections.CollectionUtils.isEmpty(df.getDataTables()) ? JsonPrinter.jsonDT(df.getDataTables()) : null) ; } From 2de1ef663b3451e78f2e0a31e545c2b7d3849268 Mon Sep 17 00:00:00 2001 From: akio-sone Date: Tue, 8 May 2018 14:29:04 -0400 Subject: [PATCH 13/46] bug-fix: rendering empty sumstat and varcat objects --- .../edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 02734bfc789..2e29609180f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -588,7 +588,7 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata) { .add("md5", getMd5IfItExists(df.getChecksumType(), df.getChecksumValue())) .add("checksum", getChecksumTypeAndValue(df.getChecksumType(), df.getChecksumValue())) .add("tabularTags", getTabularFileTags(df)) - .add("dataTables", !org.apache.commons.collections.CollectionUtils.isEmpty(df.getDataTables()) ? JsonPrinter.jsonDT(df.getDataTables()) : null) + .add("dataTables", org.apache.commons.collections.CollectionUtils.isNotEmpty(df.getDataTables()) ? JsonPrinter.jsonDT(df.getDataTables()) : null) ; } @@ -630,8 +630,8 @@ public static JsonObjectBuilder json(DataVariable dv) { .add("orderedFactor", dv.isOrderedCategorical()) .add("fileOrder", dv.getFileOrder()) .add("UNF",dv.getUnf()) - .add("summaryStatistics", JsonPrinter.jsonSumStat(dv.getSummaryStatistics())) - .add("variableCategories", JsonPrinter.jsonCatStat(dv.getCategories())) + .add("summaryStatistics", org.apache.commons.collections.CollectionUtils.isNotEmpty(dv.getSummaryStatistics()) ? JsonPrinter.jsonSumStat(dv.getSummaryStatistics()) : null) + .add("variableCategories", org.apache.commons.collections.CollectionUtils.isNotEmpty(dv.getCategories()) ? JsonPrinter.jsonCatStat(dv.getCategories()) : null) ; } From 91ff3a42da1584811c743d07ba3ad754d97b0c80 Mon Sep 17 00:00:00 2001 From: akio-sone Date: Thu, 10 May 2018 16:03:20 -0400 Subject: [PATCH 14/46] add a comment --- src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index cf0a28b1c19..e2de1bdfbf4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -878,6 +878,11 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, } // end: addFileToDataset + /* + * Note: + * The following method may be combined with the above addFiletoDataset method + * if there is a simple way to switch on/off calling the ingest process. + */ /* * The following API is based on the above addFileToDataset From 38417f41818140bd57e57cb9b792eb521722e7d2 Mon Sep 17 00:00:00 2001 From: akio-sone Date: Wed, 16 May 2018 15:57:57 -0400 Subject: [PATCH 15/46] add a new API related methods --- .../harvard/iq/dataverse/api/BatchImport.java | 52 +++++++ .../harvard/iq/dataverse/api/Datasets.java | 2 +- .../api/imports/ImportServiceBean.java | 131 ++++++++++++++++++ .../datasetutility/AddReplaceFileHelper.java | 18 +-- 4 files changed, 193 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java b/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java index 7d29e9e2334..08821f28333 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java @@ -13,15 +13,19 @@ import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import java.io.IOException; +import java.io.InputStream; import java.io.PrintWriter; import javax.ejb.EJB; import javax.ejb.Stateless; import javax.json.JsonObjectBuilder; +import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.QueryParam; +import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; +import org.glassfish.jersey.media.multipart.FormDataParam; @Stateless @Path("batch") @@ -104,6 +108,54 @@ public Response postImport(String body, @QueryParam("dv") String parentIdtf, @Qu } } + /** + * Import a new Dataset with DDI xml data posted in the request + * + * + * @param parentIdtf the dataverse to import into (id or alias) + * @param apiKey user's api key + * @param fileInputStream InputStream of the uploaded Json File + * @return import status (including id of the dataset created) + */ + + @POST + @Path("importwoi") + @Consumes({MediaType.MULTIPART_FORM_DATA}) + public Response postImportWoI( + @FormDataParam("dv") String parentIdtf, + @FormDataParam("key") String apiKey, + @FormDataParam("file") InputStream fileInputStream) { + + DataverseRequest dataverseRequest; + + try { + dataverseRequest = createDataverseRequest(findAuthenticatedUserOrDie()); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + + if (parentIdtf == null) { + parentIdtf = "root"; + } + + Dataverse owner = findDataverse(parentIdtf); + + if (owner == null) { + return error(Response.Status.NOT_FOUND, "Can't find dataverse with identifier='" + parentIdtf + "'"); + } + + try { + PrintWriter cleanupLog = null; // Cleanup log isn't needed for ImportType == NEW. We don't do any data cleanup in this mode. + + JsonObjectBuilder status = importService.doImportWoI(dataverseRequest, owner, fileInputStream, ImportType.NEW, cleanupLog); + return this.ok(status); + } catch (ImportException | IOException e) { + return this.error(Response.Status.BAD_REQUEST, e.getMessage()); + } + } + + + /** * Import single or multiple datasets that are in the local filesystem * diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index e2de1bdfbf4..ae61e8db8c7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -985,7 +985,7 @@ public Response addFileMetadataToDataset(@PathParam("id") String idSupplied, //------------------- // (4) Run "runAddFileByDatasetId" //------------------- - addFileHelper.runAddFileWIByDataset(dataset, + addFileHelper.runAddFileWOIByDataset(dataset, newFilename, newFileContentType, fileInputStream, diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index 40e8a4a9f71..039cfbfa276 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -40,6 +40,7 @@ import java.io.File; import java.io.FileOutputStream; import java.io.IOException; +import java.io.InputStream; import java.io.PrintWriter; import java.io.StringReader; import java.nio.file.Files; @@ -526,6 +527,136 @@ public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse o return Json.createObjectBuilder().add("message", status); } +// + /** + creates a new Dataset with provided Json file without invoking ingest requests. + + */ + public JsonObjectBuilder doImportWoI(DataverseRequest dataverseRequest, Dataverse owner, InputStream fileInputStream, ImportType importType, PrintWriter cleanupLog) throws ImportException, IOException { + String status = ""; + Long createdId = null; + String fileName = ""; + + try (JsonReader jsonReader = Json.createReader(fileInputStream);) { + JsonObject obj = jsonReader.readObject(); + + JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService); + parser.setLenient(false); + Dataset ds = parser.parseDataset(obj); + + // For ImportType.NEW, if the user supplies a global identifier, and it's not a protocol + // we support, it will be rejected. + if (importType.equals(ImportType.NEW)) { + if (ds.getGlobalId() != null && !ds.getProtocol().equals(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""))) { + throw new ImportException("Could not register id " + ds.getGlobalId() + ", protocol not supported"); + } + } + + ds.setOwner(owner); + ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields()); + + // Check data against required contraints + List> violations = ds.getVersions().get(0).validateRequired(); + if (!violations.isEmpty()) { + if (importType.equals(ImportType.MIGRATION) || importType.equals(ImportType.HARVEST)) { + // For migration and harvest, add NA for missing required values + for (ConstraintViolation v : violations) { + DatasetField f = v.getRootBean(); + f.setSingleValue(DatasetField.NA_VALUE); + } + } else { + // when importing a new dataset, the import will fail + // if required values are missing. + String errMsg = "Error importing data:"; + for (ConstraintViolation v : violations) { + errMsg += " " + v.getMessage(); + } + throw new ImportException(errMsg); + } + } + + // Check data against validation constraints + // If we are migrating and "scrub migration data" is true we attempt to fix invalid data + // if the fix fails stop processing of this file by throwing exception + Set invalidViolations = ds.getVersions().get(0).validate(); + ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); + Validator validator = factory.getValidator(); + if (!invalidViolations.isEmpty()) { + for (ConstraintViolation v : invalidViolations) { + DatasetFieldValue f = v.getRootBean(); + boolean fixed = false; + boolean converted = false; + if ((importType.equals(ImportType.MIGRATION) || importType.equals(ImportType.HARVEST)) && settingsService.isTrueForKey(SettingsServiceBean.Key.ScrubMigrationData, false)) { + fixed = processMigrationValidationError(f, cleanupLog, fileName); + converted = true; + if (fixed) { + Set> scrubbedViolations = validator.validate(f); + if (!scrubbedViolations.isEmpty()) { + fixed = false; + } + } + } + if (!fixed) { + if (importType.equals(ImportType.HARVEST)) { + String msg = "Data modified - File: " + fileName + "; Field: " + f.getDatasetField().getDatasetFieldType().getDisplayName() + "; " + + "Invalid value: '" + f.getValue() + "'" + " Converted Value:'" + DatasetField.NA_VALUE + "'"; + cleanupLog.println(msg); + f.setValue(DatasetField.NA_VALUE); + + } else { + String msg = " Validation error for "; + if (converted) { + msg += "converted "; + } + msg += "value: " + f.getValue() + ", " + f.getValidationMessage(); + throw new ImportException(msg); + } + } + } + } + + Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId()); + + if (existingDs != null) { + if (importType.equals(ImportType.HARVEST)) { + // For harvested datasets, there should always only be one version. + // We will replace the current version with the imported version. + if (existingDs.getVersions().size() != 1) { + throw new ImportException("Error importing Harvested Dataset, existing dataset has " + existingDs.getVersions().size() + " versions"); + } + engineSvc.submit(new DestroyDatasetCommand(existingDs, dataverseRequest)); + Dataset managedDs = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, importType)); + status = " updated dataset, id=" + managedDs.getId() + "."; + } else { + // If we are adding a new version to an existing dataset, + // check that the version number isn't already in the dataset + for (DatasetVersion dsv : existingDs.getVersions()) { + if (dsv.getVersionNumber().equals(ds.getLatestVersion().getVersionNumber())) { + throw new ImportException("VersionNumber " + ds.getLatestVersion().getVersionNumber() + " already exists in dataset " + existingDs.getGlobalId()); + } + } + DatasetVersion dsv = engineSvc.submit(new CreateDatasetVersionCommand(dataverseRequest, existingDs, ds.getVersions().get(0))); + status = " created datasetVersion, for dataset " + dsv.getDataset().getGlobalId(); + createdId = dsv.getId(); + } + + } else { + Dataset managedDs = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, importType)); + status = " created dataset, id=" + managedDs.getId() + "."; + createdId = managedDs.getId(); + } + + } catch (JsonParseException ex) { + logger.log(Level.INFO, "Error parsing datasetVersion: {0}", ex.getMessage()); + throw new ImportException("Error parsing datasetVersion: " + ex.getMessage(), ex); + } catch (CommandException ex) { + logger.log(Level.INFO, "Error excuting Create dataset command: {0}", ex.getMessage()); + throw new ImportException("Error excuting dataverse command: " + ex.getMessage(), ex); + } + + return Json.createObjectBuilder().add("message", status); + } + private boolean processMigrationValidationError(DatasetFieldValue f, PrintWriter cleanupLog, String fileName) { if (f.getDatasetField().getDatasetFieldType().getName().equals(DatasetFieldConstant.datasetContactEmail)) { //Try to convert it based on the errors we've seen diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index 874fab1d7f0..ca25a368d56 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -95,7 +95,7 @@ public class AddReplaceFileHelper{ public static String FILE_ADD_OPERATION = "FILE_ADD_OPERATION"; - public static String FILE_ADD_WI_OPERATION = "FILE_ADD_WI_OPERATION"; + public static String FILE_ADD_WOI_OPERATION = "FILE_ADD_WOI_OPERATION"; public static String FILE_REPLACE_OPERATION = "FILE_REPLACE_OPERATION"; public static String FILE_REPLACE_FORCE_OPERATION = "FILE_REPLACE_FORCE_OPERATION"; @@ -285,7 +285,7 @@ public boolean runAddFileByDataset(Dataset chosenDataset, * @param optionalFileParams * @return */ - public boolean runAddFileWIByDataset(Dataset chosenDataset, + public boolean runAddFileWOIByDataset(Dataset chosenDataset, String newFileName, String newFileContentType, InputStream newFileInputStream, @@ -295,14 +295,14 @@ public boolean runAddFileWIByDataset(Dataset chosenDataset, initErrorHandling(); - this.currentOperation = FILE_ADD_WI_OPERATION; + this.currentOperation = FILE_ADD_WOI_OPERATION; if (!this.step_001_loadDataset(chosenDataset)){ return false; } //return this.runAddFile(this.dataset, newFileName, newFileContentType, newFileInputStream, optionalFileParams); - return this.runAddReplaceFileWI(dataset, newFileName, newFileContentType, newFileInputStream, optionalFileParams); + return this.runAddReplaceFileWOI(dataset, newFileName, newFileContentType, newFileInputStream, optionalFileParams); } @@ -485,7 +485,7 @@ private boolean runAddReplaceFile(Dataset dataset, * * @return */ - private boolean runAddReplaceFileWI(Dataset dataset, + private boolean runAddReplaceFileWOI(Dataset dataset, String newFileName, String newFileContentType, InputStream newFileInputStream, OptionalFileParams optionalFileParams){ @@ -493,7 +493,7 @@ private boolean runAddReplaceFileWI(Dataset dataset, // Run "Phase 1" - Initial ingest of file + error check // But don't save the dataset version yet // - boolean phase1Success = runAddReplacePhase1WI(dataset, + boolean phase1Success = runAddReplacePhase1WOI(dataset, newFileName, newFileContentType, newFileInputStream, @@ -505,7 +505,7 @@ private boolean runAddReplaceFileWI(Dataset dataset, } - return runAddReplacePhase2WI(); + return runAddReplacePhase2WOI(); } @@ -624,7 +624,7 @@ private boolean runAddReplacePhase1(Dataset dataset, * * @return */ - private boolean runAddReplacePhase1WI(Dataset dataset, + private boolean runAddReplacePhase1WOI(Dataset dataset, String newFileName, String newFileContentType, InputStream newFileInputStream, @@ -820,7 +820,7 @@ private boolean runAddReplacePhase2(){ * * @return */ - private boolean runAddReplacePhase2WI(){ + private boolean runAddReplacePhase2WOI(){ if (this.hasError()){ return false; // possible to have errors already... From 5c6fe4d986538d972c303fad09b6473bb8b6499d Mon Sep 17 00:00:00 2001 From: akio-sone Date: Thu, 17 May 2018 12:45:17 -0400 Subject: [PATCH 16/46] logging lines are added --- pom.xml | 9 ++++ .../harvard/iq/dataverse/api/BatchImport.java | 20 ++++++-- .../api/imports/ImportServiceBean.java | 49 +++++++++++++++++-- 3 files changed, 70 insertions(+), 8 deletions(-) diff --git a/pom.xml b/pom.xml index 361ae85df7f..f0612bef3cb 100644 --- a/pom.xml +++ b/pom.xml @@ -461,6 +461,15 @@ unirest-java 1.4.9 + + + com.thoughtworks.xstream + xstream + 1.4.10 + + + + diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java b/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java index 08821f28333..6a21813733a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java @@ -1,5 +1,7 @@ package edu.harvard.iq.dataverse.api; +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.io.json.JsonHierarchicalStreamDriver; import edu.harvard.iq.dataverse.api.imports.ImportServiceBean; import edu.harvard.iq.dataverse.DatasetFieldServiceBean; import edu.harvard.iq.dataverse.DatasetServiceBean; @@ -15,6 +17,8 @@ import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; +import java.util.logging.Level; +import java.util.logging.Logger; import javax.ejb.EJB; import javax.ejb.Stateless; import javax.json.JsonObjectBuilder; @@ -45,7 +49,9 @@ public class BatchImport extends AbstractApiBean { ImportServiceBean importService; @EJB BatchServiceBean batchService; - + + private static final Logger logger = Logger.getLogger(BatchImport.class.getName()); + static XStream xstream = new XStream(new JsonHierarchicalStreamDriver()); /** * migrate - only needed for importing studies from old DVN installations * into Dataverse 4.0 read ddi files from the filesystem, and import them in @@ -114,6 +120,7 @@ public Response postImport(String body, @QueryParam("dv") String parentIdtf, @Qu * * @param parentIdtf the dataverse to import into (id or alias) * @param apiKey user's api key + * @param fileName Dataset Id string * @param fileInputStream InputStream of the uploaded Json File * @return import status (including id of the dataset created) */ @@ -124,7 +131,13 @@ public Response postImport(String body, @QueryParam("dv") String parentIdtf, @Qu public Response postImportWoI( @FormDataParam("dv") String parentIdtf, @FormDataParam("key") String apiKey, + @FormDataParam("filename") String fileName, @FormDataParam("file") InputStream fileInputStream) { + logger.log(Level.INFO, " ========= BatchImport#importwoi() is called ========="); + logger.log(Level.INFO, "datavarse Id: number or alias={0}", parentIdtf); + logger.log(Level.INFO, "api key={0}", apiKey); + logger.log(Level.INFO, "filename={0}", fileName); + DataverseRequest dataverseRequest; @@ -139,7 +152,7 @@ public Response postImportWoI( } Dataverse owner = findDataverse(parentIdtf); - + logger.log(Level.INFO, "dataverse:owner={0}", owner); if (owner == null) { return error(Response.Status.NOT_FOUND, "Can't find dataverse with identifier='" + parentIdtf + "'"); } @@ -147,7 +160,8 @@ public Response postImportWoI( try { PrintWriter cleanupLog = null; // Cleanup log isn't needed for ImportType == NEW. We don't do any data cleanup in this mode. - JsonObjectBuilder status = importService.doImportWoI(dataverseRequest, owner, fileInputStream, ImportType.NEW, cleanupLog); + JsonObjectBuilder status = importService.doImportWoI(dataverseRequest, owner, fileInputStream, fileName, ImportType.NEW, cleanupLog); + logger.log(Level.INFO, "returned status={0}", xstream.toXML(status)); return this.ok(status); } catch (ImportException | IOException e) { return this.error(Response.Status.BAD_REQUEST, e.getMessage()); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index 039cfbfa276..3fa85999aa7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -7,6 +7,8 @@ import com.google.gson.Gson; import com.google.gson.GsonBuilder; +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.io.json.JsonHierarchicalStreamDriver; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetField; @@ -84,7 +86,7 @@ public class ImportServiceBean { private EntityManager em; private static final Logger logger = Logger.getLogger(ImportServiceBean.class.getCanonicalName()); - + static XStream xstream = new XStream(new JsonHierarchicalStreamDriver()); @EJB protected EjbDataverseEngine engineSvc; @EJB @@ -530,12 +532,20 @@ public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse o // /** creates a new Dataset with provided Json file without invoking ingest requests. - + * @param dataverseRequest + * @param owner + * @param fileInputStream + * @param importType + * @param cleanupLog + * @return + * @throws edu.harvard.iq.dataverse.api.imports.ImportException + * @throws java.io.IOException */ - public JsonObjectBuilder doImportWoI(DataverseRequest dataverseRequest, Dataverse owner, InputStream fileInputStream, ImportType importType, PrintWriter cleanupLog) throws ImportException, IOException { + public JsonObjectBuilder doImportWoI(DataverseRequest dataverseRequest, Dataverse owner, InputStream fileInputStream, String fileName, ImportType importType, PrintWriter cleanupLog) throws ImportException, IOException { + + logger.log(Level.INFO, "========== ImportServiceBean#doImportWoI() is called ==========", cleanupLog); String status = ""; Long createdId = null; - String fileName = ""; try (JsonReader jsonReader = Json.createReader(fileInputStream);) { JsonObject obj = jsonReader.readObject(); @@ -543,6 +553,8 @@ public JsonObjectBuilder doImportWoI(DataverseRequest dataverseRequest, Datavers JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService); parser.setLenient(false); Dataset ds = parser.parseDataset(obj); + + logger.log(Level.INFO, "dataset={0}", xstream.toXML(ds)); // For ImportType.NEW, if the user supplies a global identifier, and it's not a protocol // we support, it will be rejected. @@ -554,6 +566,8 @@ public JsonObjectBuilder doImportWoI(DataverseRequest dataverseRequest, Datavers ds.setOwner(owner); ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields()); + + logger.log(Level.INFO, "dataset: after setting the owner={0}", xstream.toXML(ds)); // Check data against required contraints List> violations = ds.getVersions().get(0).validateRequired(); @@ -615,35 +629,60 @@ public JsonObjectBuilder doImportWoI(DataverseRequest dataverseRequest, Datavers } } + // check whether the imported dataset exists Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId()); - + if (existingDs != null) { + // case #1: the dataset exists + // case #1.1: harvesting case if (importType.equals(ImportType.HARVEST)) { + logger.log(Level.INFO, "case #1.1 harvested case"); // For harvested datasets, there should always only be one version. // We will replace the current version with the imported version. if (existingDs.getVersions().size() != 1) { throw new ImportException("Error importing Harvested Dataset, existing dataset has " + existingDs.getVersions().size() + " versions"); } + + logger.log(Level.INFO, "calling DestroyDatasetCommand"); engineSvc.submit(new DestroyDatasetCommand(existingDs, dataverseRequest)); Dataset managedDs = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, importType)); status = " updated dataset, id=" + managedDs.getId() + "."; + + logger.log(Level.INFO, "case #1.1: status={0}", status); } else { + // case #1.2 non-harvesting import // If we are adding a new version to an existing dataset, // check that the version number isn't already in the dataset + logger.log(Level.INFO, "case #1.2 non-harvesting import case"); + logger.log(Level.INFO, "datasetVersion from the uploaded data", ds.getLatestVersion().getVersionNumber()); for (DatasetVersion dsv : existingDs.getVersions()) { + logger.log(Level.INFO, "this datasetVersion={0}", dsv.getVersionNumber()); if (dsv.getVersionNumber().equals(ds.getLatestVersion().getVersionNumber())) { throw new ImportException("VersionNumber " + ds.getLatestVersion().getVersionNumber() + " already exists in dataset " + existingDs.getGlobalId()); } } + logger.log(Level.INFO, "existing versions do not have this one"); + + logger.log(Level.INFO, "calling CreateDatasetVersionCommand"); DatasetVersion dsv = engineSvc.submit(new CreateDatasetVersionCommand(dataverseRequest, existingDs, ds.getVersions().get(0))); + + status = " created datasetVersion, for dataset " + dsv.getDataset().getGlobalId(); + logger.log(Level.INFO, "case #1.2 status={0}", status); + createdId = dsv.getId(); + logger.log(Level.INFO, "createdId={0}", createdId); } } else { + // case #2 dataset does not exist + logger.log(Level.INFO, "case #2: dataset does not exist=> new dataset"); + logger.log(Level.INFO, "calling CreateDatasetCommand"); Dataset managedDs = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, importType)); status = " created dataset, id=" + managedDs.getId() + "."; + logger.log(Level.INFO, "case #2: new dataset: status={0}", status); createdId = managedDs.getId(); + logger.log(Level.INFO, "case #2: createdId={0}", createdId); } } catch (JsonParseException ex) { From c3da7dba80f6d617f9f1474a4b4f2df84480a4f8 Mon Sep 17 00:00:00 2001 From: akio-sone Date: Thu, 17 May 2018 15:47:57 -0400 Subject: [PATCH 17/46] typo corection and parsing-logic correction --- .../java/edu/harvard/iq/dataverse/util/json/JsonParser.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 006258d8317..93bf5f604d5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -502,15 +502,15 @@ public DataFile parseDataFile(JsonObject datafileJson) { public List parseDataTables(JsonArray dataTablesJson){ List dataTables = new LinkedList<>(); if ((dataTablesJson !=null) && (!dataTablesJson.isEmpty())){ - for (JsonObject dataTableJson : dataTablesJson.getValuesAs(JsonObject.class)){ - + for (JsonObject dataTableJsonL : dataTablesJson.getValuesAs(JsonObject.class)){ + JsonObject dataTableJson = dataTableJsonL.getJsonObject("dataFile"); DataTable dataTable = new DataTable(); // capture scalar items // varQuantity long varQuantity = dataTableJson.getJsonNumber("varQuantity").longValue(); dataTable.setVarQuantity(varQuantity); // caseQuantity - long caseQuantity = dataTableJson.getJsonNumber("varQuantity").longValue(); + long caseQuantity = dataTableJson.getJsonNumber("caseQuantity").longValue(); dataTable.setCaseQuantity(caseQuantity); // UNF String UNF = dataTableJson.getString("UNF", null); From 1e6b80d5ec326d2bece4d41962905bf3a5793282 Mon Sep 17 00:00:00 2001 From: akio-sone Date: Thu, 17 May 2018 22:43:14 -0400 Subject: [PATCH 18/46] typo correction and enum handling dataFile => dataTable contin => CONTINUOUS, etc. --- .../iq/dataverse/util/json/JsonParser.java | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 93bf5f604d5..e50fb93a5f5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -45,6 +45,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.logging.Level; import java.util.logging.Logger; import javax.json.Json; import javax.json.JsonArray; @@ -503,7 +504,7 @@ public List parseDataTables(JsonArray dataTablesJson){ List dataTables = new LinkedList<>(); if ((dataTablesJson !=null) && (!dataTablesJson.isEmpty())){ for (JsonObject dataTableJsonL : dataTablesJson.getValuesAs(JsonObject.class)){ - JsonObject dataTableJson = dataTableJsonL.getJsonObject("dataFile"); + JsonObject dataTableJson = dataTableJsonL.getJsonObject("dataTable"); DataTable dataTable = new DataTable(); // capture scalar items // varQuantity @@ -537,10 +538,19 @@ public List parseDataVariables(JsonArray dataVariablesJson){ dataVariable.setLabel(dataVariableJson.getString("label", null)); // weighted dataVariable.setWeighted(dataVariableJson.getBoolean("weighted", false)); - // variableIntervalType - dataVariable.setInterval(DataVariable.VariableInterval.valueOf(dataVariableJson.getString("variableIntervalType", null))); + String variableIntervalType= dataVariableJson.getString("variableIntervalType", null); + if (variableIntervalType!=null){ + String variableIntervalTypeFinal = variableIntervalType.toUpperCase(); + if (variableIntervalType.equals("contin")){ + variableIntervalTypeFinal="CONTINUOUS"; + } + dataVariable.setInterval(DataVariable.VariableInterval.valueOf(variableIntervalTypeFinal)); + } // variableFormatType - dataVariable.setType(DataVariable.VariableType.valueOf(dataVariableJson.getString("variableFormatType", null))); + String variableFormatType = dataVariableJson.getString("variableFormatType", null); + if (variableFormatType!=null){ + dataVariable.setType(DataVariable.VariableType.valueOf(variableFormatType)); + } // orderedFactor dataVariable.setOrderedCategorical(dataVariableJson.getBoolean("orderedFactor", false)); // fileOrder From cd0763ba7d965d5915bd0f2ce2d54af4c9c9c583 Mon Sep 17 00:00:00 2001 From: akio-sone Date: Fri, 18 May 2018 12:00:38 -0400 Subject: [PATCH 19/46] adding new api logic blocks --- .../harvard/iq/dataverse/api/BatchImport.java | 4 ++-- .../api/imports/ImportServiceBean.java | 9 +++++---- .../iq/dataverse/api/imports/ImportUtil.java | 2 +- .../command/impl/CreateDatasetCommand.java | 19 +++++++++++++++---- 4 files changed, 23 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java b/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java index 6a21813733a..0e5704d69c0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java @@ -160,8 +160,8 @@ public Response postImportWoI( try { PrintWriter cleanupLog = null; // Cleanup log isn't needed for ImportType == NEW. We don't do any data cleanup in this mode. - JsonObjectBuilder status = importService.doImportWoI(dataverseRequest, owner, fileInputStream, fileName, ImportType.NEW, cleanupLog); - logger.log(Level.INFO, "returned status={0}", xstream.toXML(status)); + JsonObjectBuilder status = importService.doImportWoI(dataverseRequest, owner, fileInputStream, fileName, ImportType.IMPORT_METADATA_ONLY, cleanupLog); + logger.log(Level.INFO, "returned status={0}", status); return this.ok(status); } catch (ImportException | IOException e) { return this.error(Response.Status.BAD_REQUEST, e.getMessage()); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index 3fa85999aa7..f63c7bc94fd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -554,20 +554,21 @@ public JsonObjectBuilder doImportWoI(DataverseRequest dataverseRequest, Datavers parser.setLenient(false); Dataset ds = parser.parseDataset(obj); - logger.log(Level.INFO, "dataset={0}", xstream.toXML(ds)); + logger.log(Level.INFO, "dataset={0}", ds); // For ImportType.NEW, if the user supplies a global identifier, and it's not a protocol // we support, it will be rejected. - if (importType.equals(ImportType.NEW)) { + if (importType.equals(ImportType.NEW) || importType.equals(ImportType.IMPORT_METADATA_ONLY)) { if (ds.getGlobalId() != null && !ds.getProtocol().equals(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""))) { throw new ImportException("Could not register id " + ds.getGlobalId() + ", protocol not supported"); } } ds.setOwner(owner); + logger.log(Level.INFO, "dataset owner alias={0}", ds.getOwner().getAlias()); ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields()); - logger.log(Level.INFO, "dataset: after setting the owner={0}", xstream.toXML(ds)); + logger.log(Level.INFO, "dataset version: after setting the owner={0}", ds.getLatestVersion()); // Check data against required contraints List> violations = ds.getVersions().get(0).validateRequired(); @@ -628,7 +629,7 @@ public JsonObjectBuilder doImportWoI(DataverseRequest dataverseRequest, Datavers } } } - + logger.log(Level.INFO, "check whether this dataset exists"); // check whether the imported dataset exists Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId()); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportUtil.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportUtil.java index c1165bbb7f1..5cb78b90b10 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportUtil.java @@ -10,6 +10,6 @@ * @author ellenk */ public interface ImportUtil { - public enum ImportType{ NEW, MIGRATION, HARVEST, HARVEST_WITH_FILES}; + public enum ImportType{ NEW, MIGRATION, HARVEST, HARVEST_WITH_FILES, IMPORT_METADATA_ONLY}; } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java index 4fba6cf65d0..c278b4b71f1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java @@ -15,6 +15,7 @@ import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.ingest.IngestUtil; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import java.io.IOException; import java.sql.Timestamp; @@ -93,6 +94,9 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // If we are importing with the API, then we don't want to create an editable version, // just save the version is already in theDataset. DatasetVersion dsv = importType!=null? theDataset.getLatestVersion() : theDataset.getEditVersion(); + if (importType.equals(ImportType.IMPORT_METADATA_ONLY)){ + dsv = theDataset.getEditVersion(); + } // validate // @todo for now we run through an initFields method that creates empty fields for anything without a value // that way they can be checked for required @@ -167,10 +171,16 @@ by the Dataset page (in CREATE mode), it already has the persistent } logger.fine("Saving the files permanently."); - ctxt.ingest().addFiles(dsv, theDataset.getFiles()); + if (!importType.equals(ImportType.IMPORT_METADATA_ONLY)){ + ctxt.ingest().addFiles(dsv, theDataset.getFiles()); + } else { + IngestUtil.checkForDuplicateFileNamesFinal(dsv, theDataset.getFiles()); + } + + logger.log(Level.FINE,"doiProvider={0} protocol={1} importType={2} GlobalIdCreateTime=={3}", new Object[]{doiProvider, protocol, importType, theDataset.getGlobalIdCreateTime()}); // Attempt the registration if importing dataset through the API, or the app (but not harvest or migrate) - if ((importType == null || importType.equals(ImportType.NEW)) + if ((importType == null || importType.equals(ImportType.NEW) || importType.equals(ImportType.IMPORT_METADATA_ONLY)) && theDataset.getGlobalIdCreateTime() == null) { String doiRetString = ""; idServiceBean = IdServiceBean.getBean(ctxt); @@ -197,8 +207,9 @@ by the Dataset page (in CREATE mode), it already has the persistent logger.log(Level.FINE, "after doi {0}", formatter.format(new Date().getTime())); Dataset savedDataset = ctxt.em().merge(theDataset); logger.log(Level.FINE, "after db update {0}", formatter.format(new Date().getTime())); + // set the role to be default contributor role for its dataverse - if (importType==null || importType.equals(ImportType.NEW)) { + if (importType==null || importType.equals(ImportType.NEW) || importType.equals(ImportType.IMPORT_METADATA_ONLY)) { String privateUrlToken = null; ctxt.roles().save(new RoleAssignment(savedDataset.getOwner().getDefaultContributorRole(), getRequest().getUser(), savedDataset, privateUrlToken)); } @@ -250,7 +261,7 @@ by the Dataset page (in CREATE mode), it already has the persistent logger.log(Level.FINE, "after index {0}", formatter.format(new Date().getTime())); // if we are not migrating, assign the user to this version - if (importType==null || importType.equals(ImportType.NEW)) { + if (importType==null || importType.equals(ImportType.NEW) || importType.equals(ImportType.IMPORT_METADATA_ONLY)) { DatasetVersionUser datasetVersionDataverseUser = new DatasetVersionUser(); String id = getRequest().getUser().getIdentifier(); id = id.startsWith("@") ? id.substring(1) : id; From e8cd033761c4f0a8316d4a68afa1dc02e1204d13 Mon Sep 17 00:00:00 2001 From: akio-sone Date: Fri, 18 May 2018 12:24:59 -0400 Subject: [PATCH 20/46] add a comment --- doc/Architecture/TRSA.adoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/Architecture/TRSA.adoc b/doc/Architecture/TRSA.adoc index e69de29bb2d..4928b2ff64c 100644 --- a/doc/Architecture/TRSA.adoc +++ b/doc/Architecture/TRSA.adoc @@ -0,0 +1,2 @@ +=== major changes to version 4.8.6 +* ImportType.IMPORT_METADATA_ONLY was added to ImportUtil \ No newline at end of file From fe3f2117d0d38d93da726d30cc2d88980b23c91c Mon Sep 17 00:00:00 2001 From: akio-sone Date: Fri, 18 May 2018 13:49:29 -0400 Subject: [PATCH 21/46] missing assignment: DataTable and DataFile --- .../java/edu/harvard/iq/dataverse/util/json/JsonParser.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index e50fb93a5f5..9bb630e7a86 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -493,6 +493,9 @@ public DataFile parseDataFile(JsonObject datafileJson) { List dataTables = parseDataTables(dataTablesJson); dataFile.setDataTables(dataTables); dataFile.setDataTable(dataTables.get(0)); + dataTables.get(0).setDataFile(dataFile); + dataTables.get(0).setOriginalFileFormat(originalFileFormat); + } return dataFile; From 2f52918ab2e1b64a626c2f1ed00fbf35abf74875 Mon Sep 17 00:00:00 2001 From: akio-sone Date: Fri, 18 May 2018 14:22:26 -0400 Subject: [PATCH 22/46] change to datasetversion handling --- .../dataverse/engine/command/impl/CreateDatasetCommand.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java index c278b4b71f1..b56a7ac8f0f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java @@ -94,9 +94,9 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // If we are importing with the API, then we don't want to create an editable version, // just save the version is already in theDataset. DatasetVersion dsv = importType!=null? theDataset.getLatestVersion() : theDataset.getEditVersion(); - if (importType.equals(ImportType.IMPORT_METADATA_ONLY)){ - dsv = theDataset.getEditVersion(); - } +// if (importType.equals(ImportType.IMPORT_METADATA_ONLY)){ +// dsv = theDataset.getEditVersion(); +// } // validate // @todo for now we run through an initFields method that creates empty fields for anything without a value // that way they can be checked for required From fd7b42bb7eded1ce8849dce2950be15ed7ee1ae1 Mon Sep 17 00:00:00 2001 From: akio-sone Date: Fri, 18 May 2018 15:25:59 -0400 Subject: [PATCH 23/46] change to DataFile restriction setting --- .../java/edu/harvard/iq/dataverse/util/json/JsonParser.java | 3 +++ .../java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 9bb630e7a86..3a2136afb36 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -383,11 +383,13 @@ public List parseFiles(JsonArray metadatasJson, DatasetVersion dsv if (metadatasJson != null) { for (JsonObject filemetadataJson : metadatasJson.getValuesAs(JsonObject.class)) { String label = filemetadataJson.getString("label"); + boolean restricted = filemetadataJson.getBoolean("restricted", false); String directoryLabel = filemetadataJson.getString("directoryLabel", null); String description = filemetadataJson.getString("description", null); FileMetadata fileMetadata = new FileMetadata(); fileMetadata.setLabel(label); + fileMetadata.setRestricted(restricted); fileMetadata.setDirectoryLabel(directoryLabel); fileMetadata.setDescription(description); fileMetadata.setDatasetVersion(dsv); @@ -397,6 +399,7 @@ public List parseFiles(JsonArray metadatasJson, DatasetVersion dsv fileMetadata.setDataFile(dataFile); dataFile.getFileMetadatas().add(fileMetadata); dataFile.setOwner(dsv.getDataset()); + dataFile.setRestricted(restricted); if (dsv.getDataset().getFiles() == null) { dsv.getDataset().setFiles(new ArrayList<>()); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 2e29609180f..f0eb1c01ee6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -570,7 +570,7 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata) { .add("filesize", df.getFilesize()) .add("description", df.getDescription()) //.add("released", df.isReleased()) - //.add("restricted", df.isRestricted()) + .add("restricted", df.isRestricted()) .add("storageIdentifier", df.getStorageIdentifier()) .add("originalFileFormat", df.getOriginalFileFormat()) .add("originalFormatLabel", df.getOriginalFormatLabel()) From 922d8c0eb3e8b7d11b495b9b705f4dc0c6e3638f Mon Sep 17 00:00:00 2001 From: akio-sone Date: Mon, 21 May 2018 12:59:26 -0400 Subject: [PATCH 24/46] bug-fix: handling of importType is null case --- .../engine/command/impl/CreateDatasetCommand.java | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java index b56a7ac8f0f..48f96b858a8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java @@ -171,10 +171,12 @@ by the Dataset page (in CREATE mode), it already has the persistent } logger.fine("Saving the files permanently."); - if (!importType.equals(ImportType.IMPORT_METADATA_ONLY)){ - ctxt.ingest().addFiles(dsv, theDataset.getFiles()); - } else { + if ((importType!=null) && importType.equals(ImportType.IMPORT_METADATA_ONLY)) { + logger.log(Level.INFO, "IMPORT_METADATA_ONLY special case"); IngestUtil.checkForDuplicateFileNamesFinal(dsv, theDataset.getFiles()); + } else { + logger.log(Level.INFO, "cases other than IMPORT_METADATA_ONLY"); + ctxt.ingest().addFiles(dsv, theDataset.getFiles()); } From fad057d56f49a15991f8dd253c6f9ad31b3158d8 Mon Sep 17 00:00:00 2001 From: Akio Sone Date: Tue, 12 Jun 2018 15:44:27 -0400 Subject: [PATCH 25/46] delete a test file --- tests/test.txt | 1 - 1 file changed, 1 deletion(-) delete mode 100644 tests/test.txt diff --git a/tests/test.txt b/tests/test.txt deleted file mode 100644 index d49aab150ca..00000000000 --- a/tests/test.txt +++ /dev/null @@ -1 +0,0 @@ -test test test From 4150fa6e4e38867f2423fa54a66359c5fc5d0457 Mon Sep 17 00:00:00 2001 From: Akio Sone Date: Wed, 13 Jun 2018 14:09:37 -0400 Subject: [PATCH 26/46] changes for a pull-request --- .../command/impl/CreateDatasetCommand.java | 35 ++++++++----------- 1 file changed, 15 insertions(+), 20 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java index 48f96b858a8..5ac29daa59e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java @@ -130,17 +130,21 @@ public Dataset execute(CommandContext ctxt) throws CommandException { dsv.setLastUpdateTime(createDate); theDataset.setModificationTime(createDate); for (DataFile dataFile: theDataset.getFiles() ){ + dataFile.setOwner(theDataset); dataFile.setCreator((AuthenticatedUser) getRequest().getUser()); dataFile.setCreateDate(theDataset.getCreateDate()); } String nonNullDefaultIfKeyNotFound = ""; - String protocol = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Protocol, nonNullDefaultIfKeyNotFound); - String authority = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Authority, nonNullDefaultIfKeyNotFound); - String doiSeparator = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiSeparator, nonNullDefaultIfKeyNotFound); - String doiProvider = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiProvider, nonNullDefaultIfKeyNotFound); - if (theDataset.getProtocol()==null) theDataset.setProtocol(protocol); - if (theDataset.getAuthority()==null) theDataset.setAuthority(authority); - if (theDataset.getDoiSeparator()==null) theDataset.setDoiSeparator(doiSeparator); + + String protocol = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Protocol, nonNullDefaultIfKeyNotFound); + String authority = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Authority, nonNullDefaultIfKeyNotFound); + String doiProvider = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiProvider, nonNullDefaultIfKeyNotFound); + if (theDataset.getProtocol() == null) { + theDataset.setProtocol(protocol); + } + if (theDataset.getAuthority() == null) { + theDataset.setAuthority(authority); + } if (theDataset.getStorageIdentifier() == null) { try { DataAccess.createNewStorageIO(theDataset, "placeholder"); @@ -148,7 +152,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // if setting the storage identifier through createNewStorageIO fails, dataset creation // does not have to fail. we just set the storage id to a default -SF String storageDriver = (System.getProperty("dataverse.files.storage-driver-id") != null) ? System.getProperty("dataverse.files.storage-driver-id") : "file"; - theDataset.setStorageIdentifier(storageDriver + "://" + theDataset.getAuthority()+theDataset.getDoiSeparator()+theDataset.getIdentifier()); + theDataset.setStorageIdentifier(storageDriver + "://" + theDataset.getAuthority() + "/" + theDataset.getIdentifier()); logger.info("Failed to create StorageIO. StorageIdentifier set to default. Not fatal." + "(" + ioex.getMessage() + ")"); } } @@ -170,17 +174,6 @@ by the Dataset page (in CREATE mode), it already has the persistent theDataset.setIdentifier(ctxt.datasets().generateDatasetIdentifier(theDataset, idServiceBean)); } - logger.fine("Saving the files permanently."); - if ((importType!=null) && importType.equals(ImportType.IMPORT_METADATA_ONLY)) { - logger.log(Level.INFO, "IMPORT_METADATA_ONLY special case"); - IngestUtil.checkForDuplicateFileNamesFinal(dsv, theDataset.getFiles()); - } else { - logger.log(Level.INFO, "cases other than IMPORT_METADATA_ONLY"); - ctxt.ingest().addFiles(dsv, theDataset.getFiles()); - } - - - logger.log(Level.FINE,"doiProvider={0} protocol={1} importType={2} GlobalIdCreateTime=={3}", new Object[]{doiProvider, protocol, importType, theDataset.getGlobalIdCreateTime()}); // Attempt the registration if importing dataset through the API, or the app (but not harvest or migrate) if ((importType == null || importType.equals(ImportType.NEW) || importType.equals(ImportType.IMPORT_METADATA_ONLY)) && theDataset.getGlobalIdCreateTime() == null) { @@ -196,13 +189,15 @@ by the Dataset page (in CREATE mode), it already has the persistent // Check return value to make sure registration succeeded if (!idServiceBean.registerWhenPublished() && doiRetString.contains(theDataset.getIdentifier())) { theDataset.setGlobalIdCreateTime(createDate); + //theDataset.setIdentifierRegistered(true); } } else // If harvest or migrate, and this is a released dataset, we don't need to register, // so set the globalIdCreateTime to now if (theDataset.getLatestVersion().getVersionState().equals(VersionState.RELEASED)) { theDataset.setGlobalIdCreateTime(new Date()); + //theDataset.setIdentifierRegistered(true); } - + // if (registrationRequired && !theDataset.isIdentifierRegistered()) { if (registrationRequired && theDataset.getGlobalIdCreateTime() == null) { throw new IllegalCommandException("Dataset could not be created. Registration failed", this); } From 572f784b81151bc7a863caf35ff251129c2824f8 Mon Sep 17 00:00:00 2001 From: Akio Sone Date: Wed, 13 Jun 2018 15:50:01 -0400 Subject: [PATCH 27/46] rever to the version when the pull request was submitted --- .../command/impl/CreateDatasetCommand.java | 35 +++++++++++-------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java index 5ac29daa59e..48f96b858a8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetCommand.java @@ -130,21 +130,17 @@ public Dataset execute(CommandContext ctxt) throws CommandException { dsv.setLastUpdateTime(createDate); theDataset.setModificationTime(createDate); for (DataFile dataFile: theDataset.getFiles() ){ - dataFile.setOwner(theDataset); dataFile.setCreator((AuthenticatedUser) getRequest().getUser()); dataFile.setCreateDate(theDataset.getCreateDate()); } String nonNullDefaultIfKeyNotFound = ""; - - String protocol = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Protocol, nonNullDefaultIfKeyNotFound); - String authority = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Authority, nonNullDefaultIfKeyNotFound); - String doiProvider = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiProvider, nonNullDefaultIfKeyNotFound); - if (theDataset.getProtocol() == null) { - theDataset.setProtocol(protocol); - } - if (theDataset.getAuthority() == null) { - theDataset.setAuthority(authority); - } + String protocol = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Protocol, nonNullDefaultIfKeyNotFound); + String authority = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Authority, nonNullDefaultIfKeyNotFound); + String doiSeparator = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiSeparator, nonNullDefaultIfKeyNotFound); + String doiProvider = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DoiProvider, nonNullDefaultIfKeyNotFound); + if (theDataset.getProtocol()==null) theDataset.setProtocol(protocol); + if (theDataset.getAuthority()==null) theDataset.setAuthority(authority); + if (theDataset.getDoiSeparator()==null) theDataset.setDoiSeparator(doiSeparator); if (theDataset.getStorageIdentifier() == null) { try { DataAccess.createNewStorageIO(theDataset, "placeholder"); @@ -152,7 +148,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // if setting the storage identifier through createNewStorageIO fails, dataset creation // does not have to fail. we just set the storage id to a default -SF String storageDriver = (System.getProperty("dataverse.files.storage-driver-id") != null) ? System.getProperty("dataverse.files.storage-driver-id") : "file"; - theDataset.setStorageIdentifier(storageDriver + "://" + theDataset.getAuthority() + "/" + theDataset.getIdentifier()); + theDataset.setStorageIdentifier(storageDriver + "://" + theDataset.getAuthority()+theDataset.getDoiSeparator()+theDataset.getIdentifier()); logger.info("Failed to create StorageIO. StorageIdentifier set to default. Not fatal." + "(" + ioex.getMessage() + ")"); } } @@ -174,6 +170,17 @@ by the Dataset page (in CREATE mode), it already has the persistent theDataset.setIdentifier(ctxt.datasets().generateDatasetIdentifier(theDataset, idServiceBean)); } + logger.fine("Saving the files permanently."); + if ((importType!=null) && importType.equals(ImportType.IMPORT_METADATA_ONLY)) { + logger.log(Level.INFO, "IMPORT_METADATA_ONLY special case"); + IngestUtil.checkForDuplicateFileNamesFinal(dsv, theDataset.getFiles()); + } else { + logger.log(Level.INFO, "cases other than IMPORT_METADATA_ONLY"); + ctxt.ingest().addFiles(dsv, theDataset.getFiles()); + } + + + logger.log(Level.FINE,"doiProvider={0} protocol={1} importType={2} GlobalIdCreateTime=={3}", new Object[]{doiProvider, protocol, importType, theDataset.getGlobalIdCreateTime()}); // Attempt the registration if importing dataset through the API, or the app (but not harvest or migrate) if ((importType == null || importType.equals(ImportType.NEW) || importType.equals(ImportType.IMPORT_METADATA_ONLY)) && theDataset.getGlobalIdCreateTime() == null) { @@ -189,15 +196,13 @@ by the Dataset page (in CREATE mode), it already has the persistent // Check return value to make sure registration succeeded if (!idServiceBean.registerWhenPublished() && doiRetString.contains(theDataset.getIdentifier())) { theDataset.setGlobalIdCreateTime(createDate); - //theDataset.setIdentifierRegistered(true); } } else // If harvest or migrate, and this is a released dataset, we don't need to register, // so set the globalIdCreateTime to now if (theDataset.getLatestVersion().getVersionState().equals(VersionState.RELEASED)) { theDataset.setGlobalIdCreateTime(new Date()); - //theDataset.setIdentifierRegistered(true); } - // if (registrationRequired && !theDataset.isIdentifierRegistered()) { + if (registrationRequired && theDataset.getGlobalIdCreateTime() == null) { throw new IllegalCommandException("Dataset could not be created. Registration failed", this); } From c2a089326d7c6b5aed5b455f9a768197c83745ca Mon Sep 17 00:00:00 2001 From: Akio Sone Date: Wed, 11 Jul 2018 13:01:18 -0400 Subject: [PATCH 28/46] add log lines to detect why dataVaraibles are not imported --- .../edu/harvard/iq/dataverse/util/json/JsonParser.java | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 414ea3e6eeb..3580889c8be 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -488,11 +488,14 @@ public DataFile parseDataFile(JsonObject datafileJson) { if (filesize != null){ dataFile.setFilesize(filesize); } + logger.log(Level.INFO, "parsing DataTable"); // parse DataTable JsonArray dataTablesJson = datafileJson.getJsonArray("dataTables"); + logger.log(Level.INFO, "dataTablesJson=-{0}", dataTablesJson.size()); if ((dataTablesJson != null ) && (!dataTablesJson.isEmpty())){ // get parsing results of a DataTable List dataTables = parseDataTables(dataTablesJson); + logger.log(Level.INFO, "dataTables:size={0}", dataTables.size()); dataFile.setDataTables(dataTables); dataFile.setDataTable(dataTables.get(0)); dataTables.get(0).setDataFile(dataFile); @@ -506,8 +509,10 @@ public DataFile parseDataFile(JsonObject datafileJson) { public List parseDataTables(JsonArray dataTablesJson){ + logger.log(Level.INFO, "parseDataTables={0}", dataTablesJson.size()); List dataTables = new LinkedList<>(); if ((dataTablesJson !=null) && (!dataTablesJson.isEmpty())){ + logger.log(Level.INFO, "dataTables is not empty"); for (JsonObject dataTableJsonL : dataTablesJson.getValuesAs(JsonObject.class)){ JsonObject dataTableJson = dataTableJsonL.getJsonObject("dataTable"); DataTable dataTable = new DataTable(); @@ -527,6 +532,7 @@ public List parseDataTables(JsonArray dataTablesJson){ dataTables.add(dataTable); } } + logger.log(Level.INFO, "dataTables: size(final)={0}", dataTables.size()); return dataTables; } @@ -534,6 +540,7 @@ public List parseDataTables(JsonArray dataTablesJson){ public List parseDataVariables(JsonArray dataVariablesJson){ List dataVariables = new LinkedList<>(); if ((dataVariablesJson != null) && (!dataVariablesJson.isEmpty())) { + logger.log(Level.INFO, "dataVariablesJson is not empty:size={0}", dataVariablesJson.size()); for (JsonObject dataVariableJson: dataVariablesJson.getValuesAs(JsonObject.class)){ DataVariable dataVariable = new DataVariable(); // capture scalar itemse. From 3594851a174f72c4c1458c13befe16b587f3bbd0 Mon Sep 17 00:00:00 2001 From: Akio Sone Date: Wed, 11 Jul 2018 13:26:09 -0400 Subject: [PATCH 29/46] another set of logging lines --- .../iq/dataverse/api/imports/ImportServiceBean.java | 5 ++++- .../edu/harvard/iq/dataverse/util/json/JsonParser.java | 10 ++++++++-- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index f63c7bc94fd..75dc4b6399a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -552,9 +552,12 @@ public JsonObjectBuilder doImportWoI(DataverseRequest dataverseRequest, Datavers JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService); parser.setLenient(false); + + logger.log(Level.INFO, "dataset={0}",xstream.toXML(obj)); + logger.log(Level.INFO, "parsing the received dataset"); Dataset ds = parser.parseDataset(obj); - logger.log(Level.INFO, "dataset={0}", ds); + logger.log(Level.INFO, "dataset={0}",xstream.toXML(ds)); // For ImportType.NEW, if the user supplies a global identifier, and it's not a protocol // we support, it will be rejected. diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 3580889c8be..5d7b5b53276 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -258,6 +258,7 @@ public DatasetVersion parseDatasetVersion(JsonObject obj) throws JsonParseExcept } public Dataset parseDataset(JsonObject obj) throws JsonParseException { + logger.log(Level.INFO, "parseDataset is called"); Dataset dataset = new Dataset(); dataset.setAuthority(obj.getString("authority", null) == null ? settingsService.getValueForKey(SettingsServiceBean.Key.Authority) : obj.getString("authority")); @@ -275,6 +276,7 @@ public Dataset parseDataset(JsonObject obj) throws JsonParseException { } public DatasetVersion parseDatasetVersion(JsonObject obj, DatasetVersion dsv) throws JsonParseException { + logger.log(Level.INFO, "parseDatasetVersion is called"); try { String archiveNote = obj.getString("archiveNote", null); @@ -332,6 +334,7 @@ public DatasetVersion parseDatasetVersion(JsonObject obj, DatasetVersion dsv) th filesJson = obj.getJsonArray("fileMetadatas"); } if (filesJson != null) { + logger.log(Level.INFO, "parseFiles to be called"); dsv.setFileMetadatas(parseFiles(filesJson, dsv)); } return dsv; @@ -377,6 +380,7 @@ public List parseMetadataBlocks(JsonObject json) throws JsonParseE } public List parseFiles(JsonArray metadatasJson, DatasetVersion dsv) throws JsonParseException { + logger.log(Level.INFO, "parseFiles is called"); List fileMetadatas = new LinkedList<>(); if (metadatasJson != null) { @@ -392,7 +396,7 @@ public List parseFiles(JsonArray metadatasJson, DatasetVersion dsv fileMetadata.setDirectoryLabel(directoryLabel); fileMetadata.setDescription(description); fileMetadata.setDatasetVersion(dsv); - + logger.log(Level.INFO, "parseDataFile is to be called"); DataFile dataFile = parseDataFile(filemetadataJson.getJsonObject("dataFile")); fileMetadata.setDataFile(dataFile); @@ -414,6 +418,7 @@ public List parseFiles(JsonArray metadatasJson, DatasetVersion dsv } public DataFile parseDataFile(JsonObject datafileJson) { + logger.log(Level.INFO, "parseDataFile is called"); DataFile dataFile = new DataFile(); Timestamp timestamp = new Timestamp(new Date().getTime()); @@ -491,8 +496,9 @@ public DataFile parseDataFile(JsonObject datafileJson) { logger.log(Level.INFO, "parsing DataTable"); // parse DataTable JsonArray dataTablesJson = datafileJson.getJsonArray("dataTables"); - logger.log(Level.INFO, "dataTablesJson=-{0}", dataTablesJson.size()); + if ((dataTablesJson != null ) && (!dataTablesJson.isEmpty())){ + logger.log(Level.INFO, "dataTablesJson:size={0}", dataTablesJson.size()); // get parsing results of a DataTable List dataTables = parseDataTables(dataTablesJson); logger.log(Level.INFO, "dataTables:size={0}", dataTables.size()); From f5c46e607fbd9feec56b168e9a79c9a848e2778d Mon Sep 17 00:00:00 2001 From: Akio Sone Date: Wed, 11 Jul 2018 13:49:39 -0400 Subject: [PATCH 30/46] ditto --- src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java b/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java index 0e5704d69c0..c52883af7a8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java @@ -159,7 +159,7 @@ public Response postImportWoI( try { PrintWriter cleanupLog = null; // Cleanup log isn't needed for ImportType == NEW. We don't do any data cleanup in this mode. - + logger.log(Level.INFO, "calling importService"); JsonObjectBuilder status = importService.doImportWoI(dataverseRequest, owner, fileInputStream, fileName, ImportType.IMPORT_METADATA_ONLY, cleanupLog); logger.log(Level.INFO, "returned status={0}", status); return this.ok(status); From b2ea6718fcc7baf6c25b41e64ab31664331040aa Mon Sep 17 00:00:00 2001 From: Akio Sone Date: Wed, 11 Jul 2018 14:06:58 -0400 Subject: [PATCH 31/46] ditto2 --- src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java b/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java index c52883af7a8..67d7a4c0828 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java @@ -159,7 +159,7 @@ public Response postImportWoI( try { PrintWriter cleanupLog = null; // Cleanup log isn't needed for ImportType == NEW. We don't do any data cleanup in this mode. - logger.log(Level.INFO, "calling importService"); + logger.log(Level.INFO, "calling importService#doImportWoI()"); JsonObjectBuilder status = importService.doImportWoI(dataverseRequest, owner, fileInputStream, fileName, ImportType.IMPORT_METADATA_ONLY, cleanupLog); logger.log(Level.INFO, "returned status={0}", status); return this.ok(status); From 353b3e60e89e0b8f869bcb48f67d6bdffb503c5a Mon Sep 17 00:00:00 2001 From: Akio Sone Date: Wed, 11 Jul 2018 14:13:33 -0400 Subject: [PATCH 32/46] version update --- pom.xml | 76 ++++++++++++++++++++++++++++----------------------------- 1 file changed, 38 insertions(+), 38 deletions(-) diff --git a/pom.xml b/pom.xml index b8bd4f71443..da4b33c386d 100644 --- a/pom.xml +++ b/pom.xml @@ -4,15 +4,15 @@ edu.harvard.iq dataverse - 4.9 + 4.9.1 war dataverse ${project.build.directory}/endorsed - UTF-8 - -Xdoclint:none + UTF-8 + -Xdoclint:none 1.11.172 @@ -55,8 +55,8 @@ + we are pointing to an aws sdk with unique references to needed libraries. + If we update glassfish, we should return to the normal sdk as well. --> com.amazonaws @@ -340,7 +340,7 @@ org.slf4j slf4j-log4j12 1.7.7 - + org.mockito mockito-core @@ -377,7 +377,7 @@ commons-codec 1.9 - + org.javaswift joss @@ -409,27 +409,27 @@ scribejava-apis 3.1.0 - - - - - - - - com.lyncode - xoai-common - 4.1.0-header-patch - - - com.lyncode - xoai-data-provider - 4.1.0-header-patch - - - com.lyncode - xoai-service-provider - 4.1.0-header-patch - + + + + + + + + com.lyncode + xoai-common + 4.1.0-header-patch + + + com.lyncode + xoai-data-provider + 4.1.0-header-patch + + + com.lyncode + xoai-service-provider + 4.1.0-header-patch + com.google.auto.service @@ -440,16 +440,16 @@ - org.glassfish.jersey.containers - jersey-container-servlet - 2.23.2 - + org.glassfish.jersey.containers + jersey-container-servlet + 2.23.2 + - - org.glassfish.jersey.media - jersey-media-multipart - 2.23.2 - + + org.glassfish.jersey.media + jersey-media-multipart + 2.23.2 + com.mashape.unirest unirest-java @@ -516,7 +516,7 @@ maven-war-plugin 2.3 - true + true false From 2e5de0316f9bef1163fb5f630ccda2fc1083925b Mon Sep 17 00:00:00 2001 From: Akio Sone Date: Wed, 11 Jul 2018 15:12:21 -0400 Subject: [PATCH 33/46] more logging lines --- .../java/edu/harvard/iq/dataverse/util/json/JsonParser.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 5d7b5b53276..ff0213253b6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -515,7 +515,7 @@ public DataFile parseDataFile(JsonObject datafileJson) { public List parseDataTables(JsonArray dataTablesJson){ - logger.log(Level.INFO, "parseDataTables={0}", dataTablesJson.size()); + logger.log(Level.INFO, "parseDataTables is called"); List dataTables = new LinkedList<>(); if ((dataTablesJson !=null) && (!dataTablesJson.isEmpty())){ logger.log(Level.INFO, "dataTables is not empty"); @@ -544,6 +544,7 @@ public List parseDataTables(JsonArray dataTablesJson){ public List parseDataVariables(JsonArray dataVariablesJson){ + logger.log(Level.INFO, "parseDataVariables is called"); List dataVariables = new LinkedList<>(); if ((dataVariablesJson != null) && (!dataVariablesJson.isEmpty())) { logger.log(Level.INFO, "dataVariablesJson is not empty:size={0}", dataVariablesJson.size()); @@ -588,6 +589,7 @@ public List parseDataVariables(JsonArray dataVariablesJson){ public List parseSummaryStatistics(JsonObject summaryStatisticsJson){ + logger.log(Level.INFO, "parseSummaryStatistics is called"); List summaryStatistics = new LinkedList<>(); if (summaryStatisticsJson !=null){ // mean @@ -660,6 +662,7 @@ public List parseSummaryStatistics(JsonObject summaryStatistic public List parseVariableCategories(JsonArray variableCategoriesJson){ + logger.log(Level.INFO, "parseVariableCategories is called"); List variableCategories = new LinkedList<>(); if ((variableCategoriesJson != null) && (!variableCategoriesJson.isEmpty())){ for (JsonObject variableCategoryJson : variableCategoriesJson.getValuesAs(JsonObject.class)){ From b865aa52fc419ba99da47db448261a3f4b7593eb Mon Sep 17 00:00:00 2001 From: Akio Sone Date: Thu, 25 Oct 2018 09:13:58 -0400 Subject: [PATCH 34/46] update classes to deal with post-version-4.9.X changes --- conf/docker-aio/c7.dockerfile | 1 + conf/docker-aio/dv/postgresql.conf | 185 ++++++++++++ conf/docker-aio/prep_it.bash | 5 +- .../harvard/iq/dataverse/api/Datasets.java | 272 +++++++++--------- .../harvard/iq/dataverse/api/Dataverses.java | 10 +- .../api/imports/ImportServiceBean.java | 4 +- .../datasetutility/AddReplaceFileHelper.java | 221 +------------- .../impl/UpdateDatasetVersionCommand.java | 16 +- .../iq/dataverse/util/json/JsonParser.java | 60 +++- 9 files changed, 399 insertions(+), 375 deletions(-) create mode 100644 conf/docker-aio/dv/postgresql.conf diff --git a/conf/docker-aio/c7.dockerfile b/conf/docker-aio/c7.dockerfile index 129ca4bc8e1..50dc60a5e60 100644 --- a/conf/docker-aio/c7.dockerfile +++ b/conf/docker-aio/c7.dockerfile @@ -24,6 +24,7 @@ RUN sudo -u postgres /usr/pgsql-9.6/bin/initdb -D /var/lib/pgsql/data # copy configuration related files RUN cp /tmp/dv/pg_hba.conf /var/lib/pgsql/data/ +RUN cp /tmp/dv/postgresql.conf /var/lib/pgsql/data/ RUN cp -r /opt/solr-7.3.0/server/solr/configsets/_default /opt/solr-7.3.0/server/solr/collection1 RUN cp /tmp/dv/schema.xml /opt/solr-7.3.0/server/solr/collection1/conf/schema.xml RUN cp /tmp/dv/solrconfig.xml /opt/solr-7.3.0/server/solr/collection1/conf/solrconfig.xml diff --git a/conf/docker-aio/dv/postgresql.conf b/conf/docker-aio/dv/postgresql.conf new file mode 100644 index 00000000000..22ab7ab85e8 --- /dev/null +++ b/conf/docker-aio/dv/postgresql.conf @@ -0,0 +1,185 @@ + # (change requires restart) + # (change requires restart) + # (change requires restart) + # (change requires restart) +listen_addresses = '*' # what IP address(es) to listen on; + # comma-separated list of addresses; + # defaults to 'localhost'; use '*' for all + # (change requires restart) +max_connections = 100 # (change requires restart) + # (change requires restart) + # (change requires restart) + # (change requires restart) + # (change requires restart) + # (change requires restart) + # 0 selects the system default + # 0 selects the system default + # 0 selects the system default +shared_buffers = 128MB # min 128kB + # (change requires restart) + # (change requires restart) + # (change requires restart) +dynamic_shared_memory_type = posix # the default is the first option + # supported by the operating system: + # posix + # sysv + # windows + # mmap + # use none to disable dynamic shared memory + # (change requires restart) + # in kB, or -1 for no limit + # (change requires restart) + # (change requires restart) + # (change requires restart) + # (turning this off can cause + # unrecoverable data corruption) + # off, local, remote_write, remote_apply, or on + # supported by the operating system: + # open_datasync + # fdatasync (default on Linux) + # fsync + # fsync_writethrough + # open_sync + # (change requires restart) + # (change requires restart) + # (change requires restart) + # placeholders: %p = path of file to archive + # %f = file name only + # e.g. 'test ! -f /mnt/server/archivedir/%f && cp %p /mnt/server/archivedir/%f' + # number of seconds; 0 disables + # (change requires restart) + # (change requires restart) + # (change requires restart) + # number of sync standbys and comma-separated list of application_name + # from standby(s); '*' = all + # (change requires restart) + # when reading WAL from archive; + # -1 allows indefinite delay + # when reading streaming WAL; + # -1 allows indefinite delay + # 0 disables + # query conflicts + # communication from master + # in milliseconds; 0 disables + # retrieve WAL after a failed attempt + # JOIN clauses +log_destination = 'stderr' # Valid values are combinations of + # stderr, csvlog, syslog, and eventlog, + # depending on platform. csvlog + # requires logging_collector to be on. +logging_collector = on # Enable capturing of stderr and csvlog + # into log files. Required to be on for + # csvlogs. + # (change requires restart) +log_directory = 'pg_log' # directory where log files are written, + # can be absolute or relative to PGDATA +log_filename = 'postgresql-%a.log' # log file name pattern, + # can include strftime() escapes + # begin with 0 to use octal notation +log_truncate_on_rotation = on # If on, an existing log file with the + # same name as the new log file will be + # truncated rather than appended to. + # But such truncation only occurs on + # time-driven rotation, not on restarts + # or size-driven rotation. Default is + # off, meaning append to existing files + # in all cases. +log_rotation_age = 1d # Automatic rotation of logfiles will + # happen after that time. 0 disables. +log_rotation_size = 0 # Automatic rotation of logfiles will + # happen after that much log output. + # 0 disables. + # debug5 + # debug4 + # debug3 + # debug2 + # debug1 + # log + # notice + # warning + # error + # debug5 + # debug4 + # debug3 + # debug2 + # debug1 + # info + # notice + # warning + # error + # log + # fatal + # panic + # debug5 + # debug4 + # debug3 + # debug2 + # debug1 + # info + # notice + # warning + # error + # log + # fatal + # panic (effectively off) + # and their durations, > 0 logs only + # statements running at least this number + # of milliseconds +log_line_prefix = '< %m > ' # special values: + # %a = application name + # %u = user name + # %d = database name + # %r = remote host and port + # %h = remote host + # %p = process ID + # %t = timestamp without milliseconds + # %m = timestamp with milliseconds + # %n = timestamp with milliseconds (as a Unix epoch) + # %i = command tag + # %e = SQL state + # %c = session ID + # %l = session line number + # %s = session start timestamp + # %v = virtual transaction ID + # %x = transaction ID (0 if none) + # %q = stop here in non-session + # processes + # %% = '%' + # e.g. '<%u%%%d> ' + # than the specified size in kilobytes; + # -1 disables, 0 logs all temp files +log_timezone = 'UTC' + # (change requires restart) + # requires track_counts to also be on. + # their durations, > 0 logs only + # actions running at least this number + # of milliseconds. + # (change requires restart) + # vacuum + # analyze + # (change requires restart) + # before forced vacuum + # (change requires restart) + # autovacuum, in milliseconds; + # -1 means use vacuum_cost_delay + # autovacuum, -1 means use + # vacuum_cost_limit + # only default tablespace +datestyle = 'iso, mdy' +timezone = 'UTC' + # abbreviations. Currently, there are + # Default + # Australia (historical usage) + # India + # You can create your own file in + # share/timezonesets/. + # encoding +lc_messages = 'C' # locale for system error message + # strings +lc_monetary = 'C' # locale for monetary formatting +lc_numeric = 'C' # locale for number formatting +lc_time = 'C' # locale for time formatting +default_text_search_config = 'pg_catalog.english' + # (change requires restart) + # (change requires restart) + # directory 'conf.d' diff --git a/conf/docker-aio/prep_it.bash b/conf/docker-aio/prep_it.bash index f78e90484f4..17b4b89d094 100755 --- a/conf/docker-aio/prep_it.bash +++ b/conf/docker-aio/prep_it.bash @@ -21,7 +21,7 @@ do # cleanup from previous runs if necessary docker rm -f dv # start container - docker run -d -p 8084:80 -p 8083:8080 -p 9010:9009 --name dv dv0 + docker run -d -p 15432:5432 -p 8084:80 -p 8083:8080 -p 9010:9009 --name dv dv0 # wait for glassfish to be healthy i_wait=0 @@ -35,7 +35,7 @@ do sleep $d_wait fi i_wait=$(( $i_wait + 1 )) - + done # try setupIT.bash @@ -68,4 +68,3 @@ cd ../.. echo "docker-aio ready to run integration tests ($i_retry)" curl http://localhost:8084/api/info/version echo $? - diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index af44b819857..dc8ad61db36 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -1,5 +1,7 @@ package edu.harvard.iq.dataverse.api; +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.io.json.JsonHierarchicalStreamDriver; import edu.harvard.iq.dataverse.ControlledVocabularyValue; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFileServiceBean; @@ -75,6 +77,7 @@ import edu.harvard.iq.dataverse.util.EjbUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.json.JsonParseException; +import edu.harvard.iq.dataverse.util.json.JsonPrinter; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; import java.io.InputStream; import java.io.StringReader; @@ -95,6 +98,7 @@ import javax.json.JsonArrayBuilder; import javax.json.JsonObject; import javax.json.JsonObjectBuilder; +import javax.json.stream.JsonParsingException; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; @@ -114,7 +118,7 @@ public class Datasets extends AbstractApiBean { private static final Logger logger = Logger.getLogger(Datasets.class.getCanonicalName()); - + static XStream xstream = new XStream(new JsonHierarchicalStreamDriver()); @Inject DataverseSession session; @EJB @@ -371,7 +375,7 @@ public Response updateDatasetTargetURLAll() { @PUT @Path("{id}/versions/{versionId}") public Response updateDraftVersion( String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId ){ - + logger.log(Level.INFO, "updateDraftVersion:jsonBody={0}", jsonBody); if ( ! ":draft".equals(versionId) ) { return error( Response.Status.BAD_REQUEST, "Only the :draft version can be updated"); } @@ -379,9 +383,42 @@ public Response updateDraftVersion( String jsonBody, @PathParam("id") String id, try ( StringReader rdr = new StringReader(jsonBody) ) { DataverseRequest req = createDataverseRequest(findUserOrDie()); Dataset ds = findDatasetOrDie(id); + + if (ds.getCreateDate()!=null){ + logger.log(Level.INFO, "dataset:creationdate={0}", ds.getCreateDate()); + } else { + logger.log(Level.INFO, "dataset:creationdate is not set"); + } + + + + logger.log(Level.INFO, "target dataset is retrieved={0}", + xstream.toXML(ds)); + JsonObject json = Json.createReader(rdr).readObject(); + logger.log(Level.INFO, "calling 1-arg-jsonParser().parseDatasetVersion()"); DatasetVersion incomingVersion = jsonParser().parseDatasetVersion(json); + + + if (incomingVersion.getDataset() !=null){ + logger.log(Level.INFO, "this case has a dataset: copy datafile info"); + for (DataFile df: incomingVersion.getDataset().getFiles()){ + df.setOwner(ds); + } + + // datafile info was attached + ds.setFiles(incomingVersion.getDataset().getFiles()); + logger.log(Level.INFO, "updated dataset with datafiles={0}", + xstream.toXML(ds)); + } + logger.log(Level.INFO, "incomingVersion:updateDraftVersion={0}", + xstream.toXML(incomingVersion)); + + + + + // clear possibly stale fields from the incoming dataset version. // creation and modification dates are updated by the commands. incomingVersion.setId(null); @@ -389,18 +426,23 @@ public Response updateDraftVersion( String jsonBody, @PathParam("id") String id, incomingVersion.setMinorVersionNumber(null); incomingVersion.setVersionState(DatasetVersion.VersionState.DRAFT); incomingVersion.setDataset(ds); + + + incomingVersion.setCreateTime(null); incomingVersion.setLastUpdateTime(null); boolean updateDraft = ds.getLatestVersion().isDraft(); DatasetVersion managedVersion; if ( updateDraft ) { + logger.log(Level.INFO, "updateDraft: yes"); final DatasetVersion editVersion = ds.getEditVersion(); editVersion.setDatasetFields(incomingVersion.getDatasetFields()); editVersion.setTermsOfUseAndAccess( incomingVersion.getTermsOfUseAndAccess() ); Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req)); managedVersion = managedDataset.getEditVersion(); } else { + logger.log(Level.INFO, "updateDraft: no"); managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, incomingVersion)); } // DatasetVersion managedVersion = execCommand( updateDraft @@ -577,7 +619,7 @@ private String getCompoundDisplayValue (DatasetFieldCompoundValue dscv){ @PUT @Path("{id}/editMetadata") public Response editVersionMetadata(String jsonBody, @PathParam("id") String id, @QueryParam("replace") Boolean replace) throws WrappedResponse{ - + logger.log(Level.INFO, "jsonBody:editVersionMetadata ={0}", jsonBody); Boolean replaceData = replace != null; DataverseRequest req = createDataverseRequest(findUserOrDie()); @@ -591,8 +633,9 @@ private Response processDatasetUpdate(String jsonBody, String id, DataverseReque Dataset ds = findDatasetOrDie(id); JsonObject json = Json.createReader(rdr).readObject(); + logger.log(Level.INFO, "json:processDatasetUpdate={0}", xstream.toXML(json)); DatasetVersion dsv = ds.getEditVersion(); - + logger.log(Level.INFO, "DatasetVersion:processDatasetUpdate={0}", xstream.toXML(dsv)); List fields = new LinkedList<>(); DatasetField singleField = null; @@ -1232,157 +1275,108 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, } // end: addFileToDataset - /* - * Note: - * The following method may be combined with the above addFiletoDataset method - * if there is a simple way to switch on/off calling the ingest process. - */ - - /* - * The following API is based on the above addFileToDataset - * - * - */ + /** * Add DataFile-related Metadata to an existing Dataset without invoking * an ingest request * - * @param idSupplied -// * @param jsonData - * @param fileInputStream - * @param contentDispositionHeader - * @param formDataBodyPart + * @param jsonBody + * @param datasetId * @return */ @POST @Path("{id}/addFileMetadata") - @Consumes(MediaType.MULTIPART_FORM_DATA) - public Response addFileMetadataToDataset(@PathParam("id") String idSupplied, - //@FormDataParam("jsonData") String jsonData, - @FormDataParam("file") InputStream fileInputStream, - @FormDataParam("file") FormDataContentDisposition contentDispositionHeader, - @FormDataParam("file") final FormDataBodyPart formDataBodyPart + public Response addFileMetadataToDataset(String jsonBody, @PathParam("id") String datasetId ){ - - - // ------------------------------------- - // (1) Get the user from the API key - // ------------------------------------- - User authUser; - try { - authUser = findUserOrDie(); - } catch (WrappedResponse ex) { - return error(Response.Status.FORBIDDEN, - ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.auth") - ); - } - //--------------------------------------- - // (1A) Make sure that the upload type is not rsync - // ------------------------------------- - - if (DataCaptureModuleUtil.rsyncSupportEnabled(settingsSvc.getValueForKey(SettingsServiceBean.Key.UploadMethods))) { - return error(Response.Status.METHOD_NOT_ALLOWED, SettingsServiceBean.Key.UploadMethods + " contains " + SystemConfig.FileUploadMethods.RSYNC + ". Please use rsync file upload."); - } - - - // ------------------------------------- - // (2) Get the Dataset Id - // - // ------------------------------------- - Dataset dataset; - - Long datasetId; - try { - dataset = findDatasetOrDie(idSupplied); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - - - // ------------------------------------- - // (3) Get the file name and content type - // ------------------------------------- - String newFilename = contentDispositionHeader.getFileName(); - String newFileContentType = formDataBodyPart.getMediaType().toString(); - + try ( StringReader rdr = new StringReader(jsonBody) ) { + logger.log(Level.INFO, "addFileMetadataToDataset:received jsonBody={0}", jsonBody); + DataverseRequest req = createDataverseRequest(findUserOrDie()); + // retrieve dataset by id + logger.log(Level.INFO, "datasetId={0}", datasetId); + Dataset ds = findDatasetOrDie(datasetId); + logger.log(Level.INFO, "dataset display name={0}", ds.getDisplayName()); + // parse jsonbody and create metadata instances + JsonObject json = Json.createReader(rdr).readObject(); + + // step_060 + // set datafile's owner + // add each datafile to the list of datafile of dataset + logger.log(Level.INFO, "dataset: editversion={0}", ds.getEditVersion().getVersion()); + // step_070 + DatasetVersion incomingVersion = jsonParser().parseDatasetVersion(json, ds.getEditVersion()); + + // check datafile is owned by the dataset + List dataFiles = incomingVersion.getDataset().getFiles(); + if (dataFiles != null) { + - // (2a) Load up optional params via JSON - // this block is not used because no jsonData - //--------------------------------------- + if (dataFiles.isEmpty()){ + throw new NoFilesException("newlyAddedFiles is empty!"); + } + for (DataFile df : dataFiles) { + if (df.getOwner() == null){ + // df.setOwner(ds); + logger.log(Level.INFO, "df is not owned by any dataset"); + } else { + logger.log(Level.INFO, "df is owned by dataset whose id={0}", df.getOwner().getId()); + } + } + + + + } else { + logger.log(Level.INFO, "no datafile was parsed"); + throw new NullPointerException("newlyAddedFiles is null!"); - OptionalFileParams optionalFileParams = null; -/* - msgt("(api) jsonData: " + jsonData); + + } + // call UpdateDatasetVersion command + + ds = execCommand(new UpdateDatasetVersionCommand(ds, req)); + + // save datafiles + // from addFileToDataset case + dataFiles = ds.getFiles(); + + if (dataFiles != null) { + for (DataFile df : dataFiles) { + if (df.getOwner() == null){ + // df.setOwner(ds); + logger.log(Level.INFO, "df is not owned by any dataset"); + + df=fileService.save(df); + } else { + logger.log(Level.INFO, "df is owned by dataset whose id={0}", df.getOwner().getId()); + } + } + + + + } else { + logger.log(Level.INFO, "no datafile was parsed"); + dataFiles= new ArrayList<>(); + } + + logger.log(Level.INFO, "newlyAddedFiles: how many={0}", dataFiles.size()); + logger.log(Level.INFO, "newlyAddedFiles={0}", xstream.toXML(dataFiles)); + + return ok( JsonPrinter.jsonDataFileList(dataFiles)); + } catch (JsonParseException ex) { + logger.log(Level.SEVERE, "Semantic error parsing dataset version Json: " + ex.getMessage(), ex); + return error( Response.Status.BAD_REQUEST, "Error parsing dataset: " + ex.getMessage() ); + + } catch (NoFilesException ex) { + logger.log(Level.SEVERE, null, ex); + return error(Response.Status.BAD_REQUEST, "Datafile was not found: " + ex.getMessage() ); + } catch (WrappedResponse ex) { + logger.log(Level.SEVERE, null, ex); + return ex.getResponse(); - try { - optionalFileParams = new OptionalFileParams(jsonData); - } catch (DataFileTagException ex) { - return error( Response.Status.BAD_REQUEST, ex.getMessage()); } -*/ - - //------------------- - // (3) Create the AddReplaceFileHelper object - //------------------- - msg("ADD!"); - - DataverseRequest dvRequest2 = createDataverseRequest(authUser); - AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2, - ingestService, - datasetService, - fileService, - permissionSvc, - commandEngine, - systemConfig); - - - //------------------- - // (4) Run "runAddFileByDatasetId" - //------------------- - addFileHelper.runAddFileWOIByDataset(dataset, - newFilename, - newFileContentType, - fileInputStream, - optionalFileParams); - - - if (addFileHelper.hasError()){ - return error(addFileHelper.getHttpErrorCode(), addFileHelper.getErrorMessagesAsString("\n")); - }else{ - String successMsg = ResourceBundle.getBundle("Bundle").getString("file.addreplace.success.add"); - try { - //msgt("as String: " + addFileHelper.getSuccessResult()); - /** - * @todo We need a consistent, sane way to communicate a human - * readable message to an API client suitable for human - * consumption. Imagine if the UI were built in Angular or React - * and we want to return a message from the API as-is to the - * user. Human readable. - */ - logger.log(Level.FINE, "successMsg:{0} ", successMsg); - return ok(addFileHelper.getSuccessResultAsJsonObjectBuilder()); - //"Look at that! You added a file! (hey hey, it may have worked)"); - } catch (NoFilesException ex) { - logger.log(Level.SEVERE, "NoFilesException during addFileMetadata:{0}", ex); - return error(Response.Status.BAD_REQUEST, "NoFileException! Serious Error! See administrator!"); + } - } - } - - } // end: addFileMetadataToDataset - - - - - - - - - - - private void msg(String m){ //System.out.println(m); logger.fine(m); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index fcfd84224af..d06d3955d90 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -1,5 +1,7 @@ package edu.harvard.iq.dataverse.api; +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.io.json.JsonHierarchicalStreamDriver; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetFieldType; @@ -104,7 +106,7 @@ public class Dataverses extends AbstractApiBean { private static final Logger logger = Logger.getLogger(Dataverses.class.getCanonicalName()); - + static XStream xstream = new XStream(new JsonHierarchicalStreamDriver()); @EJB ExplicitGroupServiceBean explicitGroupSvc; // @EJB @@ -204,7 +206,9 @@ public Response createDataset( String jsonBody, @PathParam("identifier") String try { User u = findUserOrDie(); Dataverse owner = findDataverseOrDie(parentIdtf); + logger.log(Level.INFO, "jsonBody:createDataset={0}", jsonBody); Dataset ds = parseDataset(jsonBody); + logger.log(Level.INFO, "dataset={0}", xstream.toXML(ds)); ds.setOwner(owner); if ( ds.getVersions().isEmpty() ) { @@ -239,9 +243,11 @@ public Response createDataset( String jsonBody, @PathParam("identifier") String @Path("{identifier}/datasets/:import") public Response importDataset( String jsonBody, @PathParam("identifier") String parentIdtf, @QueryParam("pid") String pidParam, @QueryParam("release") String releaseParam ) { try { + logger.log(Level.INFO, "jsonBody:importDataset={0}", jsonBody); User u = findUserOrDie(); Dataverse owner = findDataverseOrDie(parentIdtf); Dataset ds = parseDataset(jsonBody); + logger.log(Level.INFO, "dataset:importDataset={0}", xstream.toXML(ds)); ds.setOwner(owner); if ( ds.getVersions().isEmpty() ) { @@ -300,7 +306,9 @@ public Response importDataset( String jsonBody, @PathParam("identifier") String } private Dataset parseDataset(String datasetJson ) throws WrappedResponse { + logger.log(Level.INFO, "parseDataset is called"); try ( StringReader rdr = new StringReader(datasetJson) ) { + logger.log(Level.INFO, "calling jsonParser().parseDataset()"); return jsonParser().parseDataset(Json.createReader(rdr).readObject()); } catch ( JsonParsingException | JsonParseException jpe ) { logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", datasetJson); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index 2bdd8617ee8..0b4fe999b08 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -544,7 +544,7 @@ public JsonObjectBuilder doImportWoI(DataverseRequest dataverseRequest, Datavers try (JsonReader jsonReader = Json.createReader(fileInputStream);) { JsonObject obj = jsonReader.readObject(); - + JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService); parser.setLenient(false); @@ -676,7 +676,7 @@ public JsonObjectBuilder doImportWoI(DataverseRequest dataverseRequest, Datavers } else { // case #2 dataset does not exist logger.log(Level.INFO, "case #2: dataset does not exist=> new dataset"); - logger.log(Level.INFO, "calling CreateDatasetCommand"); + logger.log(Level.INFO, "calling CreateNewDatasetCommand"); Dataset managedDs = engineSvc.submit(new CreateNewDatasetCommand(ds, dataverseRequest)); status = " created dataset, id=" + managedDs.getId() + "."; logger.log(Level.INFO, "case #2: new dataset: status={0}", status); diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index 633352b8d46..87e31981167 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -95,7 +95,6 @@ public class AddReplaceFileHelper{ private static final Logger logger = Logger.getLogger(AddReplaceFileHelper.class.getCanonicalName()); public static String FILE_ADD_OPERATION = "FILE_ADD_OPERATION"; - public static String FILE_ADD_WOI_OPERATION = "FILE_ADD_WOI_OPERATION"; public static String FILE_REPLACE_OPERATION = "FILE_REPLACE_OPERATION"; public static String FILE_REPLACE_FORCE_OPERATION = "FILE_REPLACE_FORCE_OPERATION"; @@ -274,57 +273,6 @@ public boolean runAddFileByDataset(Dataset chosenDataset, return this.runAddReplaceFile(dataset, newFileName, newFileContentType, newFileInputStream, optionalFileParams); } - - // optionalFileParams is always null, not used - /** - * - * @param chosenDataset - * @param newFileName - * @param newFileContentType - * @param newFileInputStream - * @param optionalFileParams - * @return - */ - public boolean runAddFileWOIByDataset(Dataset chosenDataset, - String newFileName, - String newFileContentType, - InputStream newFileInputStream, - OptionalFileParams optionalFileParams){ - - msgt(">> runAddFileWithoutIngestByDataset"); - - initErrorHandling(); - - this.currentOperation = FILE_ADD_WOI_OPERATION; - - if (!this.step_001_loadDataset(chosenDataset)){ - return false; - } - - //return this.runAddFile(this.dataset, newFileName, newFileContentType, newFileInputStream, optionalFileParams); - return this.runAddReplaceFileWOI(dataset, newFileName, newFileContentType, newFileInputStream, optionalFileParams); - } - - - - - - - - - - - - - - - - - - - - - /** * After the constructor, this method is called to add a file @@ -464,59 +412,7 @@ private boolean runAddReplaceFile(Dataset dataset, } - - - - /** - * Here we're going to run through the steps to ADD or REPLACE a file - * - * The difference between ADD and REPLACE (add/delete) is: - * - * oldFileId - For ADD, set to null - * oldFileId - For REPLACE, set to id of file to replace - * - * This has now been broken into Phase 1 and Phase 2 - * - * The APIs will use this method and call Phase 1 & Phase 2 consecutively - * - * The UI will call Phase 1 on initial upload and - * then run Phase 2 if the user chooses to save the changes. - * - * - * @return - */ - private boolean runAddReplaceFileWOI(Dataset dataset, - String newFileName, String newFileContentType, - InputStream newFileInputStream, - OptionalFileParams optionalFileParams){ - - // Run "Phase 1" - Initial ingest of file + error check - // But don't save the dataset version yet - // - boolean phase1Success = runAddReplacePhase1WOI(dataset, - newFileName, - newFileContentType, - newFileInputStream, - optionalFileParams - ); - - if (!phase1Success){ - return false; - } - - - return runAddReplacePhase2WOI(); - - } - - - - - - - - - + /** * Note: UI replace is always a "force replace" which means @@ -615,68 +511,8 @@ private boolean runAddReplacePhase1(Dataset dataset, return true; } - - /** - * For the UI: File add/replace has been broken into 2 steps - * - * Phase 1 (here): Add/replace the file and make sure there are no errors - * But don't update the Dataset (yet) - * - * @return - */ - private boolean runAddReplacePhase1WOI(Dataset dataset, - String newFileName, - String newFileContentType, - InputStream newFileInputStream, - OptionalFileParams optionalFileParams){ - - if (this.hasError()){ - return false; // possible to have errors already... - } - - msgt("step_001_loadDataset"); - if (!this.step_001_loadDataset(dataset)){ - return false; - } - - msgt("step_010_VerifyUserAndPermissions"); - if (!this.step_010_VerifyUserAndPermissions()){ - return false; - - } - msgt("step_020_loadNewFile"); - if (!this.step_020_loadNewFile(newFileName, newFileContentType, newFileInputStream)){ - return false; - - } - - msgt("step_030_createNewFilesViaIngest"); - if (!this.step_030_createNewFilesViaIngest()){ - return false; - - } - msgt("step_050_checkForConstraintViolations"); - if (!this.step_050_checkForConstraintViolations()){ - return false; - } - - msgt("step_055_loadOptionalFileParams"); - if (!this.step_055_loadOptionalFileParams(optionalFileParams)){ - return false; - } - - return true; - } - - - - - - - - public boolean runReplaceFromUI_Phase2(){ return runAddReplacePhase2(); } @@ -813,61 +649,6 @@ private boolean runAddReplacePhase2(){ } - /** - * For the UI: File add/replace has been broken into 2 steps - * - * Phase 2 (here): Phase 1 has run ok, Update the Dataset -- issue the commands! - * - * @return - */ - private boolean runAddReplacePhase2WOI(){ - - if (this.hasError()){ - return false; // possible to have errors already... - } - - if ((finalFileList == null)||(finalFileList.isEmpty())){ - addError(getBundleErr("phase2_called_early_no_new_files")); - return false; - } - - msgt("step_060_addFilesViaIngestService"); - if (!this.step_060_addFilesViaIngestService()){ - return false; - - } - - if (this.isFileReplaceOperation()){ - msgt("step_080_run_update_dataset_command_for_replace"); - if (!this.step_080_run_update_dataset_command_for_replace()){ - return false; - } - - }else{ - msgt("step_070_run_update_dataset_command"); - if (!this.step_070_run_update_dataset_command()){ - return false; - } - } - - msgt("step_090_notifyUser"); - if (!this.step_090_notifyUser()){ - return false; - } - - msgt("step_100_startIngestJobs"); - if (!this.step_100_startIngestJobs()){ - return false; - } - - return true; - } - - - - - - /** * Get for currentOperation * @return String diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java index f54ce5d8be5..b43ce4ea7fa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java @@ -1,5 +1,7 @@ package edu.harvard.iq.dataverse.engine.command.impl; +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.io.json.JsonHierarchicalStreamDriver; import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; @@ -10,6 +12,7 @@ import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import java.util.ArrayList; import java.util.List; +import java.util.logging.Level; import java.util.logging.Logger; /** @@ -20,6 +23,9 @@ public class UpdateDatasetVersionCommand extends AbstractDatasetCommand { private static final Logger logger = Logger.getLogger(UpdateDatasetVersionCommand.class.getCanonicalName()); + + static XStream xstream = new XStream(new JsonHierarchicalStreamDriver()); + private final List filesToDelete; private boolean validateLenient = false; @@ -71,11 +77,14 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // Merge the new version into out JPA context, if needed. if ( editVersion.getId() == null || editVersion.getId() == 0L ) { + logger.log(Level.INFO, "editVersion Id is not set: persist data"); ctxt.em().persist(editVersion); } else { + logger.log(Level.INFO, "editVersion Id is already set: merge data"); ctxt.em().merge(editVersion); } + logger.log(Level.INFO, "datafiles attatched to ds:size={0}", getDataset().getFiles().size()); for (DataFile dataFile : getDataset().getFiles()) { if (dataFile.getCreateDate() == null) { dataFile.setCreateDate(getTimestamp()); @@ -108,10 +117,13 @@ public Dataset execute(CommandContext ctxt) throws CommandException { recalculateUNF = true; } } + logger.log(Level.INFO, "recalculateUNF:statsus={0}", recalculateUNF); // we have to merge to update the database but not flush because // we don't want to create two draft versions! Dataset tempDataset = ctxt.em().merge(getDataset()); + logger.log(Level.INFO, "tempDataset={0}", xstream.toXML(tempDataset)); + for (FileMetadata fmd : filesToDelete) { if (!fmd.getDataFile().isReleased()) { // if file is draft (ie. new to this version, delete; otherwise just remove filemetadata object) @@ -138,8 +150,10 @@ public Dataset execute(CommandContext ctxt) throws CommandException { tempDataset.getEditVersion().setLastUpdateTime(getTimestamp()); tempDataset.setModificationTime(getTimestamp()); - + Dataset savedDataset = ctxt.em().merge(tempDataset); + logger.log(Level.INFO, "savedDataset={0}", xstream.toXML(savedDataset)); + ctxt.em().flush(); updateDatasetUser(ctxt); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 9ee00e06809..751ecff1617 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -1,6 +1,8 @@ package edu.harvard.iq.dataverse.util.json; import com.google.gson.Gson; +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.io.json.JsonHierarchicalStreamDriver; import edu.harvard.iq.dataverse.ControlledVocabularyValue; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFileCategory; @@ -65,7 +67,7 @@ public class JsonParser { private static final Logger logger = Logger.getLogger(JsonParser.class.getCanonicalName()); - + static XStream xstream = new XStream(new JsonHierarchicalStreamDriver()); DatasetFieldServiceBean datasetFieldSvc; MetadataBlockServiceBean blockService; SettingsServiceBean settingsService; @@ -254,11 +256,15 @@ public IpGroup parseIpGroup(JsonObject obj) { } public DatasetVersion parseDatasetVersion(JsonObject obj) throws JsonParseException { + logger.log(Level.INFO, "1-arg-parseDatasetVersion:obj is called"); + logger.log(Level.INFO, "obj:1-arg-parseDatasetVersion={0}",xstream.toXML(obj)); + logger.log(Level.INFO, "within 1-arg-parseDatasetVersion, 2-args-version is called"); return parseDatasetVersion(obj, new DatasetVersion()); } public Dataset parseDataset(JsonObject obj) throws JsonParseException { logger.log(Level.INFO, "parseDataset is called"); + logger.log(Level.INFO, "obj: parseDataset={0}",xstream.toXML(obj)); Dataset dataset = new Dataset(); dataset.setAuthority(obj.getString("authority", null) == null ? settingsService.getValueForKey(SettingsServiceBean.Key.Authority) : obj.getString("authority")); @@ -267,6 +273,7 @@ public Dataset parseDataset(JsonObject obj) throws JsonParseException { DatasetVersion dsv = new DatasetVersion(); dsv.setDataset(dataset); + logger.log(Level.INFO, "calling 2-args-parseDatasetVersion method"); dsv = parseDatasetVersion(obj.getJsonObject("datasetVersion"), dsv); List versions = new ArrayList<>(1); versions.add(dsv); @@ -276,7 +283,8 @@ public Dataset parseDataset(JsonObject obj) throws JsonParseException { } public DatasetVersion parseDatasetVersion(JsonObject obj, DatasetVersion dsv) throws JsonParseException { - logger.log(Level.INFO, "parseDatasetVersion is called"); + logger.log(Level.INFO, "2-args-parseDatasetVersion is called"); + logger.log(Level.INFO, "obj:2-args-parseDatasetVersion={0}",xstream.toXML(obj)); try { String archiveNote = obj.getString("archiveNote", null); @@ -354,6 +362,9 @@ private License parseLicense(String inString) { } public List parseMetadataBlocks(JsonObject json) throws JsonParseException { + logger.log(Level.INFO, "within parseMetadataBlocks"); + logger.log(Level.INFO, "json={0}",xstream.toXML(json)); + Set keys = json.keySet(); List fields = new LinkedList<>(); @@ -423,6 +434,14 @@ public List parseFiles(JsonArray metadatasJson, DatasetVersion dsv if ( filemetadataJson.containsKey("dataFile") ) { DataFile dataFile = parseDataFile(filemetadataJson.getJsonObject("dataFile")); dataFile.getFileMetadatas().add(fileMetadata); + if (dsv.getDataset() == null) { + logger.log(Level.INFO, "datasetVersion does not have a dataset; attach a dataset to dsv"); + Dataset dataset = new Dataset(); + dsv.setDataset(dataset); + } + // the following line may cause NullpointerException + // if a Dataset is not attached to dsv + // therefore a null test is necessary dataFile.setOwner(dsv.getDataset()); fileMetadata.setDataFile(dataFile); if (dsv.getDataset().getFiles() == null) { @@ -442,6 +461,7 @@ public List parseFiles(JsonArray metadatasJson, DatasetVersion dsv public DataFile parseDataFile(JsonObject datafileJson) { logger.log(Level.INFO, "parseDataFile is called"); + logger.log(Level.INFO, "datafileJson={0}",xstream.toXML(datafileJson)); DataFile dataFile = new DataFile(); Timestamp timestamp = new Timestamp(new Date().getTime()); @@ -529,13 +549,14 @@ public DataFile parseDataFile(JsonObject datafileJson) { // get parsing results of a DataTable List dataTables = parseDataTables(dataTablesJson); logger.log(Level.INFO, "dataTables:size={0}", dataTables.size()); + logger.log(Level.INFO, "returned dataTables={0}", xstream.toXML(dataTables)); dataFile.setDataTables(dataTables); dataFile.setDataTable(dataTables.get(0)); dataTables.get(0).setDataFile(dataFile); dataTables.get(0).setOriginalFileFormat(originalFileFormat); } - + logger.log(Level.INFO, "dataFile: parseDataFile={0}",xstream.toXML(dataFile)); return dataFile; } @@ -560,9 +581,12 @@ public List parseDataTables(JsonArray dataTablesJson){ String UNF = dataTableJson.getString("UNF", null); dataTable.setUnf(UNF); // call the method for pasring dataVariables array - List dataVariables = parseDataVariables(dataTableJson.getJsonArray("dataVariables")); + List dataVariables = parseDataVariables(dataTableJson.getJsonArray("dataVariables"), dataTable); + logger.log(Level.INFO, "returned dataVariables list: size={0}", dataVariables.size()); + dataTable.setDataVariables(dataVariables); dataTables.add(dataTable); + logger.log(Level.INFO, " dataTables: current size={0}", dataTables.size()); } } logger.log(Level.INFO, "dataTables: size(final)={0}", dataTables.size()); @@ -570,7 +594,10 @@ public List parseDataTables(JsonArray dataTablesJson){ } - public List parseDataVariables(JsonArray dataVariablesJson){ + // note: the following method has not yet implemented a few of processXXX- + // calls in DataTableImportDDI#processVar + + public List parseDataVariables(JsonArray dataVariablesJson, DataTable dataTable){ logger.log(Level.INFO, "parseDataVariables is called"); List dataVariables = new LinkedList<>(); if ((dataVariablesJson != null) && (!dataVariablesJson.isEmpty())) { @@ -603,19 +630,25 @@ public List parseDataVariables(JsonArray dataVariablesJson){ dataVariable.setFileOrder(dataVariableJson.getInt("fileOrder")); // summaryStatistics - dataVariable.setSummaryStatistics(parseSummaryStatistics(dataVariableJson.getJsonObject("summaryStatistics"))); + dataVariable.setSummaryStatistics(parseSummaryStatistics(dataVariableJson.getJsonObject("summaryStatistics"), dataVariable)); // variableCategories - dataVariable.setCategories(parseVariableCategories(dataVariableJson.getJsonArray("variableCategories"))); + dataVariable.setCategories(parseVariableCategories(dataVariableJson.getJsonArray("variableCategories"), dataVariable)); // UNF dataVariable.setUnf(dataVariableJson.getString("UNF", null)); + + // DataTable (do not forget this) + dataVariable.setDataTable(dataTable); + dataVariables.add(dataVariable); + logger.log(Level.INFO, "dataVariables: current size={0}", dataVariables.size()); } } + logger.log(Level.INFO, "dataVariables:final size={0}", dataVariables.size()); return dataVariables; } - public List parseSummaryStatistics(JsonObject summaryStatisticsJson){ + public List parseSummaryStatistics(JsonObject summaryStatisticsJson, DataVariable dataVariable){ logger.log(Level.INFO, "parseSummaryStatistics is called"); List summaryStatistics = new LinkedList<>(); if (summaryStatisticsJson !=null){ @@ -625,6 +658,7 @@ public List parseSummaryStatistics(JsonObject summaryStatistic SummaryStatistic mean = new SummaryStatistic(); mean.setType(SummaryStatistic.SummaryStatisticType.MEAN); mean.setValue(meanjsn); + mean.setDataVariable(dataVariable); summaryStatistics.add(mean); } // medn @@ -633,6 +667,7 @@ public List parseSummaryStatistics(JsonObject summaryStatistic SummaryStatistic medn = new SummaryStatistic(); medn.setType(SummaryStatistic.SummaryStatisticType.MEDN); medn.setValue(mednjsn); + medn.setDataVariable(dataVariable); summaryStatistics.add(medn); } // mode @@ -641,6 +676,7 @@ public List parseSummaryStatistics(JsonObject summaryStatistic SummaryStatistic mode = new SummaryStatistic(); mode.setType(SummaryStatistic.SummaryStatisticType.MODE); mode.setValue(modejsn); + mode.setDataVariable(dataVariable); summaryStatistics.add(mode); } // vald @@ -649,6 +685,7 @@ public List parseSummaryStatistics(JsonObject summaryStatistic SummaryStatistic vald = new SummaryStatistic(); vald.setType(SummaryStatistic.SummaryStatisticType.VALD); vald.setValue(valdjsn); + vald.setDataVariable(dataVariable); summaryStatistics.add(vald); } // invd @@ -657,6 +694,7 @@ public List parseSummaryStatistics(JsonObject summaryStatistic SummaryStatistic invd = new SummaryStatistic(); invd.setType(SummaryStatistic.SummaryStatisticType.INVD); invd.setValue(invdjsn); + invd.setDataVariable(dataVariable); summaryStatistics.add(invd); } // min @@ -665,6 +703,7 @@ public List parseSummaryStatistics(JsonObject summaryStatistic SummaryStatistic min = new SummaryStatistic(); min.setType(SummaryStatistic.SummaryStatisticType.MIN); min.setValue(minjsn); + min.setDataVariable(dataVariable); summaryStatistics.add(min); } // max @@ -673,6 +712,7 @@ public List parseSummaryStatistics(JsonObject summaryStatistic SummaryStatistic max = new SummaryStatistic(); max.setType(SummaryStatistic.SummaryStatisticType.MAX); max.setValue(maxjsn); + max.setDataVariable(dataVariable); summaryStatistics.add(max); } // stdev @@ -681,6 +721,7 @@ public List parseSummaryStatistics(JsonObject summaryStatistic SummaryStatistic stdev = new SummaryStatistic(); stdev.setType(SummaryStatistic.SummaryStatisticType.STDEV); stdev.setValue(stdevjsn); + stdev.setDataVariable(dataVariable); summaryStatistics.add(stdev); } } @@ -688,7 +729,7 @@ public List parseSummaryStatistics(JsonObject summaryStatistic } - public List parseVariableCategories(JsonArray variableCategoriesJson){ + public List parseVariableCategories(JsonArray variableCategoriesJson, DataVariable dataVariable){ logger.log(Level.INFO, "parseVariableCategories is called"); List variableCategories = new LinkedList<>(); if ((variableCategoriesJson != null) && (!variableCategoriesJson.isEmpty())){ @@ -700,6 +741,7 @@ public List parseVariableCategories(JsonArray variableCategori // value String value = variableCategoryJson.getString("value", ""); vc.setValue(value); + vc.setDataVariable(dataVariable); variableCategories.add(vc); } } From 205aae67f509e753fe552d7785487eb3d443ce99 Mon Sep 17 00:00:00 2001 From: Akio Sone Date: Fri, 26 Oct 2018 15:35:52 -0400 Subject: [PATCH 35/46] obsolete api was removed --- .../harvard/iq/dataverse/api/BatchImport.java | 52 ------ .../api/imports/ImportServiceBean.java | 169 ------------------ .../iq/dataverse/api/imports/ImportUtil.java | 4 +- .../iq/dataverse/util/json/JsonParser.java | 4 + src/main/webapp/WEB-INF/glassfish-web.xml | 1 - 5 files changed, 5 insertions(+), 225 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java b/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java index 3c01f702085..f6d4e4543b0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java @@ -95,59 +95,7 @@ public Response postImport(String body, @QueryParam("dv") String parentIdtf, @Qu } } - /** - * Import a new Dataset with DDI xml data posted in the request - * - * - * @param parentIdtf the dataverse to import into (id or alias) - * @param apiKey user's api key - * @param fileName Dataset Id string - * @param fileInputStream InputStream of the uploaded Json File - * @return import status (including id of the dataset created) - */ - - @POST - @Path("importwoi") - @Consumes({MediaType.MULTIPART_FORM_DATA}) - public Response postImportWoI( - @FormDataParam("dv") String parentIdtf, - @FormDataParam("key") String apiKey, - @FormDataParam("filename") String fileName, - @FormDataParam("file") InputStream fileInputStream) { - logger.log(Level.INFO, " ========= BatchImport#importwoi() is called ========="); - logger.log(Level.INFO, "datavarse Id: number or alias={0}", parentIdtf); - logger.log(Level.INFO, "api key={0}", apiKey); - logger.log(Level.INFO, "filename={0}", fileName); - - - DataverseRequest dataverseRequest; - - try { - dataverseRequest = createDataverseRequest(findAuthenticatedUserOrDie()); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - if (parentIdtf == null) { - parentIdtf = "root"; - } - - Dataverse owner = findDataverse(parentIdtf); - logger.log(Level.INFO, "dataverse:owner={0}", owner); - if (owner == null) { - return error(Response.Status.NOT_FOUND, "Can't find dataverse with identifier='" + parentIdtf + "'"); - } - - try { - PrintWriter cleanupLog = null; // Cleanup log isn't needed for ImportType == NEW. We don't do any data cleanup in this mode. - logger.log(Level.INFO, "calling importService#doImportWoI()"); - JsonObjectBuilder status = importService.doImportWoI(dataverseRequest, owner, fileInputStream, fileName, ImportType.IMPORT_METADATA_ONLY, cleanupLog); - logger.log(Level.INFO, "returned status={0}", status); - return this.ok(status); - } catch (ImportException | IOException e) { - return this.error(Response.Status.BAD_REQUEST, e.getMessage()); - } - } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index 0b4fe999b08..7f429bd8368 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -524,176 +524,7 @@ public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse o return Json.createObjectBuilder().add("message", status); } -// - /** - creates a new Dataset with provided Json file without invoking ingest requests. - * @param dataverseRequest - * @param owner - * @param fileInputStream - * @param importType - * @param cleanupLog - * @return - * @throws edu.harvard.iq.dataverse.api.imports.ImportException - * @throws java.io.IOException - */ - public JsonObjectBuilder doImportWoI(DataverseRequest dataverseRequest, Dataverse owner, InputStream fileInputStream, String fileName, ImportType importType, PrintWriter cleanupLog) throws ImportException, IOException { - - logger.log(Level.INFO, "========== ImportServiceBean#doImportWoI() is called ==========", cleanupLog); - String status = ""; - Long createdId = null; - - try (JsonReader jsonReader = Json.createReader(fileInputStream);) { - JsonObject obj = jsonReader.readObject(); - - JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService); - parser.setLenient(false); - - logger.log(Level.INFO, "dataset={0}",xstream.toXML(obj)); - logger.log(Level.INFO, "parsing the received dataset"); - Dataset ds = parser.parseDataset(obj); - - logger.log(Level.INFO, "dataset={0}",xstream.toXML(ds)); - - // For ImportType.NEW, if the user supplies a global identifier, and it's not a protocol - // we support, it will be rejected. - if (importType.equals(ImportType.NEW) || importType.equals(ImportType.IMPORT_METADATA_ONLY)) { - if (ds.getGlobalId() != null && !ds.getProtocol().equals(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""))) { - throw new ImportException("Could not register id " + ds.getGlobalId() + ", protocol not supported"); - } - } - ds.setOwner(owner); - logger.log(Level.INFO, "dataset owner alias={0}", ds.getOwner().getAlias()); - ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields()); - - logger.log(Level.INFO, "dataset version: after setting the owner={0}", ds.getLatestVersion()); - - // Check data against required contraints - List> violations = ds.getVersions().get(0).validateRequired(); - if (!violations.isEmpty()) { - if (importType.equals(ImportType.HARVEST)) { - // For migration and harvest, add NA for missing required values - for (ConstraintViolation v : violations) { - DatasetField f = v.getRootBean(); - f.setSingleValue(DatasetField.NA_VALUE); - } - } else { - // when importing a new dataset, the import will fail - // if required values are missing. - String errMsg = "Error importing data:"; - for (ConstraintViolation v : violations) { - errMsg += " " + v.getMessage(); - } - throw new ImportException(errMsg); - } - } - - // Check data against validation constraints - // If we are migrating and "scrub migration data" is true we attempt to fix invalid data - // if the fix fails stop processing of this file by throwing exception - Set invalidViolations = ds.getVersions().get(0).validate(); - ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); - Validator validator = factory.getValidator(); - if (!invalidViolations.isEmpty()) { - for (ConstraintViolation v : invalidViolations) { - DatasetFieldValue f = v.getRootBean(); - boolean fixed = false; - boolean converted = false; - if ((importType.equals(ImportType.HARVEST)) && settingsService.isTrueForKey(SettingsServiceBean.Key.ScrubMigrationData, false)) { - fixed = processMigrationValidationError(f, cleanupLog, fileName); - converted = true; - if (fixed) { - Set> scrubbedViolations = validator.validate(f); - if (!scrubbedViolations.isEmpty()) { - fixed = false; - } - } - } - if (!fixed) { - if (importType.equals(ImportType.HARVEST)) { - String msg = "Data modified - File: " + fileName + "; Field: " + f.getDatasetField().getDatasetFieldType().getDisplayName() + "; " - + "Invalid value: '" + f.getValue() + "'" + " Converted Value:'" + DatasetField.NA_VALUE + "'"; - cleanupLog.println(msg); - f.setValue(DatasetField.NA_VALUE); - - } else { - String msg = " Validation error for "; - if (converted) { - msg += "converted "; - } - msg += "value: " + f.getValue() + ", " + f.getValidationMessage(); - throw new ImportException(msg); - } - } - } - } - logger.log(Level.INFO, "check whether this dataset exists"); - // check whether the imported dataset exists - Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalIdString()); - - if (existingDs != null) { - // case #1: the dataset exists - // case #1.1: harvesting case - if (importType.equals(ImportType.HARVEST)) { - logger.log(Level.INFO, "case #1.1 harvested case"); - // For harvested datasets, there should always only be one version. - // We will replace the current version with the imported version. - if (existingDs.getVersions().size() != 1) { - throw new ImportException("Error importing Harvested Dataset, existing dataset has " + existingDs.getVersions().size() + " versions"); - } - - logger.log(Level.INFO, "calling DestroyDatasetCommand"); - engineSvc.submit(new DestroyDatasetCommand(existingDs, dataverseRequest)); - Dataset managedDs = engineSvc.submit(new CreateHarvestedDatasetCommand(ds, dataverseRequest)); - status = " updated dataset, id=" + managedDs.getId() + "."; - - logger.log(Level.INFO, "case #1.1: status={0}", status); - } else { - // case #1.2 non-harvesting import - // If we are adding a new version to an existing dataset, - // check that the version number isn't already in the dataset - logger.log(Level.INFO, "case #1.2 non-harvesting import case"); - logger.log(Level.INFO, "datasetVersion from the uploaded data", ds.getLatestVersion().getVersionNumber()); - for (DatasetVersion dsv : existingDs.getVersions()) { - logger.log(Level.INFO, "this datasetVersion={0}", dsv.getVersionNumber()); - if (dsv.getVersionNumber().equals(ds.getLatestVersion().getVersionNumber())) { - throw new ImportException("VersionNumber " + ds.getLatestVersion().getVersionNumber() + " already exists in dataset " + existingDs.getGlobalId()); - } - } - logger.log(Level.INFO, "existing versions do not have this one"); - - logger.log(Level.INFO, "calling CreateDatasetVersionCommand"); - DatasetVersion dsv = engineSvc.submit(new CreateDatasetVersionCommand(dataverseRequest, existingDs, ds.getVersions().get(0))); - - - status = " created datasetVersion, for dataset " + dsv.getDataset().getGlobalId(); - logger.log(Level.INFO, "case #1.2 status={0}", status); - - createdId = dsv.getId(); - logger.log(Level.INFO, "createdId={0}", createdId); - } - - } else { - // case #2 dataset does not exist - logger.log(Level.INFO, "case #2: dataset does not exist=> new dataset"); - logger.log(Level.INFO, "calling CreateNewDatasetCommand"); - Dataset managedDs = engineSvc.submit(new CreateNewDatasetCommand(ds, dataverseRequest)); - status = " created dataset, id=" + managedDs.getId() + "."; - logger.log(Level.INFO, "case #2: new dataset: status={0}", status); - createdId = managedDs.getId(); - logger.log(Level.INFO, "case #2: createdId={0}", createdId); - } - - } catch (JsonParseException ex) { - logger.log(Level.INFO, "Error parsing datasetVersion: {0}", ex.getMessage()); - throw new ImportException("Error parsing datasetVersion: " + ex.getMessage(), ex); - } catch (CommandException ex) { - logger.log(Level.INFO, "Error excuting Create dataset command: {0}", ex.getMessage()); - throw new ImportException("Error excuting dataverse command: " + ex.getMessage(), ex); - } - - return Json.createObjectBuilder().add("message", status); - } private boolean processMigrationValidationError(DatasetFieldValue f, PrintWriter cleanupLog, String fileName) { if (f.getDatasetField().getDatasetFieldType().getName().equals(DatasetFieldConstant.datasetContactEmail)) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportUtil.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportUtil.java index 89d8312e077..6c3e35c1d73 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportUtil.java @@ -15,9 +15,7 @@ public enum ImportType{ NEW, /** Data is harvested from another Dataverse instance */ - HARVEST, - /** after TRSA */ - IMPORT_METADATA_ONLY + HARVEST }; } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 0fb27942b98..aad4ad142e1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -466,6 +466,10 @@ public DataFile parseDataFile(JsonObject datafileJson) { dataFile.setModificationTime(timestamp); dataFile.setPermissionModificationTime(timestamp); + // as of version 4.9.4, missing datafile-items that exist in JsonPrinter + // persistentId + // pidURL + if ( datafileJson.containsKey("filesize") ) { dataFile.setFilesize(datafileJson.getJsonNumber("filesize").longValueExact()); } diff --git a/src/main/webapp/WEB-INF/glassfish-web.xml b/src/main/webapp/WEB-INF/glassfish-web.xml index ecd3ba15c40..335d5bc9b94 100644 --- a/src/main/webapp/WEB-INF/glassfish-web.xml +++ b/src/main/webapp/WEB-INF/glassfish-web.xml @@ -1,7 +1,6 @@ - / From bc17b3517b9e60bfcacca56b94f3ae3dd910ada4 Mon Sep 17 00:00:00 2001 From: Akio Sone Date: Tue, 30 Oct 2018 21:14:26 -0400 Subject: [PATCH 36/46] import api changes and more logging lines --- .../harvard/iq/dataverse/api/BatchImport.java | 77 +++++++- .../iq/dataverse/api/BatchServiceBean.java | 13 +- .../api/imports/ImportDDIServiceBean.java | 4 +- .../api/imports/ImportServiceBean.java | 174 +++++++++++++++++- .../iq/dataverse/api/imports/ImportUtil.java | 6 + .../iq/dataverse/util/json/JsonParser.java | 11 +- 6 files changed, 267 insertions(+), 18 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java b/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java index f6d4e4543b0..57b8c13ef47 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java @@ -10,6 +10,7 @@ import edu.harvard.iq.dataverse.MetadataBlockServiceBean; import edu.harvard.iq.dataverse.api.imports.ImportException; +import edu.harvard.iq.dataverse.api.imports.ImportUtil.ImportFileType; import edu.harvard.iq.dataverse.api.imports.ImportUtil.ImportType; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -55,7 +56,7 @@ public class BatchImport extends AbstractApiBean { @GET @Path("harvest") public Response harvest(@QueryParam("path") String fileDir, @QueryParam("dv") String parentIdtf, @QueryParam("createDV") Boolean createDV, @QueryParam("key") String apiKey) throws IOException { - return startBatchJob(fileDir, parentIdtf, apiKey, ImportType.HARVEST, createDV); + return startBatchJob(fileDir, parentIdtf, apiKey, ImportType.HARVEST, createDV, ImportFileType.XML); } @@ -99,6 +100,51 @@ public Response postImport(String body, @QueryParam("dv") String parentIdtf, @Qu + /** + * Import a new Dataset with JSON data posted in the request + * + * @param body the JSON + * @param parentIdtf the dataverse to import into (id or alias) + * @param apiKey user's api key + * @return import status (including id of the dataset created) + */ + @POST + @Path("importJson") + public Response postImportJson(String body, @QueryParam("dv") String parentIdtf, @QueryParam("key") String apiKey) { + + + DataverseRequest dataverseRequest; + try { + dataverseRequest = createDataverseRequest(findAuthenticatedUserOrDie()); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + + if (parentIdtf == null) { + parentIdtf = "root"; + } else { + logger.log(Level.INFO, "dvId={0}", parentIdtf); + } + Dataverse owner = findDataverse(parentIdtf); + logger.log(Level.INFO, "Dataverse:alias={0}", owner.getAlias()); + if (owner == null) { + return error(Response.Status.NOT_FOUND, "Can't find dataverse with identifier='" + parentIdtf + "'"); + } + try { + PrintWriter cleanupLog = null; // Cleanup log isn't needed for ImportType == NEW. We don't do any data cleanup in this mode. + String filename = null; // Since this is a single input from a POST, there is no file that we are reading from. + JsonObjectBuilder status = importService.doImportJson(dataverseRequest, owner, body, filename, ImportType.NEW, cleanupLog); + return this.ok(status); + } catch (ImportException | IOException e) { + return this.error(Response.Status.BAD_REQUEST, e.getMessage()); + } + } + + + + + + /** * Import single or multiple datasets that are in the local filesystem * @@ -112,11 +158,34 @@ public Response postImport(String body, @QueryParam("dv") String parentIdtf, @Qu @Path("import") public Response getImport(@QueryParam("path") String fileDir, @QueryParam("dv") String parentIdtf, @QueryParam("createDV") Boolean createDV, @QueryParam("key") String apiKey) { - return startBatchJob(fileDir, parentIdtf, apiKey, ImportType.NEW, createDV); + return startBatchJob(fileDir, parentIdtf, apiKey, ImportType.NEW, createDV, ImportFileType.XML); } - private Response startBatchJob(String fileDir, String parentIdtf, String apiKey, ImportType importType, Boolean createDV) { + + + + /** + * Import single or multiple datasets that are in the local filesystem + * + * @param fileDir the absolute path of the file or directory (all files + * within the directory will be imported + * @param parentIdtf the dataverse to import into (id or alias) + * @param apiKey user's api key + * @return import status (including id's of the datasets created) + */ + @GET + @Path("importJson") + public Response getImportJson(@QueryParam("path") String fileDir, @QueryParam("dv") String parentIdtf, @QueryParam("createDV") Boolean createDV, @QueryParam("key") String apiKey) { + + return startBatchJob(fileDir, parentIdtf, apiKey, ImportType.NEW, createDV, ImportFileType.JSON); + + } + + + + + private Response startBatchJob(String fileDir, String parentIdtf, String apiKey, ImportType importType, Boolean createDV, ImportFileType importFileType) { if (createDV == null) { createDV = Boolean.FALSE; } @@ -138,7 +207,7 @@ private Response startBatchJob(String fileDir, String parentIdtf, String apiKey, return error(Response.Status.NOT_FOUND, "Can't find dataverse with identifier='" + parentIdtf + "'"); } } - batchService.processFilePath(fileDir, parentIdtf, dataverseRequest, owner, importType, createDV); + batchService.processFilePath(fileDir, parentIdtf, dataverseRequest, owner, importType, createDV, importFileType); } catch (ImportException e) { return this.error(Response.Status.BAD_REQUEST, "Import Exception, " + e.getMessage()); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BatchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/BatchServiceBean.java index 8fe58298481..e6e4365dc2d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/BatchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/BatchServiceBean.java @@ -5,6 +5,7 @@ import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.api.imports.ImportException; import edu.harvard.iq.dataverse.api.imports.ImportUtil; +import edu.harvard.iq.dataverse.api.imports.ImportUtil.ImportFileType; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import java.io.File; import java.io.FileWriter; @@ -36,7 +37,7 @@ public class BatchServiceBean { @Asynchronous - public void processFilePath(String fileDir, String parentIdtf, DataverseRequest dataverseRequest, Dataverse owner, ImportUtil.ImportType importType, Boolean createDV) { + public void processFilePath(String fileDir, String parentIdtf, DataverseRequest dataverseRequest, Dataverse owner, ImportUtil.ImportType importType, Boolean createDV, ImportFileType importFileType) { logger.info("BEGIN IMPORT"); PrintWriter validationLog = null; PrintWriter cleanupLog = null; @@ -54,13 +55,13 @@ public void processFilePath(String fileDir, String parentIdtf, DataverseRequest if (!file.isHidden()) { if (file.isDirectory()) { try { - status.add(handleDirectory(dataverseRequest, file, importType, validationLog, cleanupLog, createDV)); + status.add(handleDirectory(dataverseRequest, file, importType, validationLog, cleanupLog, createDV, importFileType)); } catch (ImportException e) { logger.log(Level.SEVERE, "Exception in handleDirectory() for "+ file.getName(),e); } } else { try { - status.add(importService.handleFile(dataverseRequest, owner, file, importType, validationLog, cleanupLog)); + status.add(importService.handleFile(dataverseRequest, owner, file, importType, validationLog, cleanupLog, importFileType)); } catch(ImportException e) { logger.log(Level.SEVERE, "Exception in handleFile() for "+ file.getName(),e); } @@ -69,7 +70,7 @@ public void processFilePath(String fileDir, String parentIdtf, DataverseRequest } } } else { - status.add(importService.handleFile(dataverseRequest, owner, dir, importType, validationLog, cleanupLog)); + status.add(importService.handleFile(dataverseRequest, owner, dir, importType, validationLog, cleanupLog, importFileType)); } } @@ -83,7 +84,7 @@ public void processFilePath(String fileDir, String parentIdtf, DataverseRequest } - public JsonArrayBuilder handleDirectory(DataverseRequest dataverseRequest, File dir, ImportUtil.ImportType importType, PrintWriter validationLog, PrintWriter cleanupLog, Boolean createDV) throws ImportException{ + public JsonArrayBuilder handleDirectory(DataverseRequest dataverseRequest, File dir, ImportUtil.ImportType importType, PrintWriter validationLog, PrintWriter cleanupLog, Boolean createDV, ImportFileType importFileType) throws ImportException{ JsonArrayBuilder status = Json.createArrayBuilder(); Dataverse owner = dataverseService.findByAlias(dir.getName()); if (owner == null ) { @@ -97,7 +98,7 @@ public JsonArrayBuilder handleDirectory(DataverseRequest dataverseRequest, File for (File file : dir.listFiles()) { if (!file.isHidden()) { try { - JsonObjectBuilder fileStatus = importService.handleFile(dataverseRequest, owner, file, importType, validationLog, cleanupLog); + JsonObjectBuilder fileStatus = importService.handleFile(dataverseRequest, owner, file, importType, validationLog, cleanupLog, importFileType); status.add(fileStatus); } catch (ImportException | IOException e) { status.add(Json.createObjectBuilder().add("importStatus", "Exception importing " + file.getName() + ", message = " + e.getMessage())); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java index 591998d117c..9968cd87302 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java @@ -40,6 +40,8 @@ // -- L.A. 4.5 @Stateless public class ImportDDIServiceBean { + + private static final Logger logger = Logger.getLogger(ImportDDIServiceBean.class.getName()); public static final String SOURCE_DVN_3_0 = "DVN_3_0"; public static final String NAMING_PROTOCOL_HANDLE = "hdl"; @@ -817,7 +819,7 @@ private void processCustomField(XMLStreamReader xmlr, DatasetVersionDTO dvDTO) t private void handleChildField(MetadataBlockDTO customBlock, DatasetFieldType dsfType, String fieldValue) throws ImportException { DatasetFieldType parent = dsfType.getParentDatasetFieldType(); - + logger.log(Level.INFO, "fieldValue={0}", fieldValue); // Create child Field FieldDTO child = null; if (dsfType.isAllowControlledVocabulary()) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index 7f429bd8368..b4caafcd060 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -24,6 +24,7 @@ import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.MetadataBlockServiceBean; import edu.harvard.iq.dataverse.api.dto.DatasetDTO; +import edu.harvard.iq.dataverse.api.imports.ImportUtil.ImportFileType; import edu.harvard.iq.dataverse.api.imports.ImportUtil.ImportType; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; @@ -153,13 +154,18 @@ public Dataverse createDataverse(String dvName, DataverseRequest dataverseReques } @TransactionAttribute(REQUIRES_NEW) - public JsonObjectBuilder handleFile(DataverseRequest dataverseRequest, Dataverse owner, File file, ImportType importType, PrintWriter validationLog, PrintWriter cleanupLog) throws ImportException, IOException { + public JsonObjectBuilder handleFile(DataverseRequest dataverseRequest, Dataverse owner, File file, ImportType importType, PrintWriter validationLog, PrintWriter cleanupLog, ImportFileType importFileType) throws ImportException, IOException { System.out.println("handling file: " + file.getAbsolutePath()); - String ddiXMLToParse; + String fileToParse; + JsonObjectBuilder status; try { - ddiXMLToParse = new String(Files.readAllBytes(file.toPath())); - JsonObjectBuilder status = doImport(dataverseRequest, owner, ddiXMLToParse,file.getParentFile().getName() + "/" + file.getName(), importType, cleanupLog); + fileToParse = new String(Files.readAllBytes(file.toPath())); + if (importFileType == ImportUtil.ImportFileType.JSON) { + status = doImportJson(dataverseRequest, owner, fileToParse, file.getParentFile().getName() + "/" + file.getName(), importType, cleanupLog); + } else { + status = doImport(dataverseRequest, owner, fileToParse, file.getParentFile().getName() + "/" + file.getName(), importType, cleanupLog); + } status.add("file", file.getName()); logger.log(Level.INFO, "completed doImport {0}/{1}", new Object[]{file.getParentFile().getName(), file.getName()}); return status; @@ -386,7 +392,7 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve } return importedDataset; } - +/* public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse owner, String xmlToParse, String fileName, ImportType importType, PrintWriter cleanupLog) throws ImportException, IOException { String status = ""; @@ -524,7 +530,165 @@ public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse o return Json.createObjectBuilder().add("message", status); } +*/ + + public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse owner, String xmlToParse, String fileName, ImportType importType, PrintWriter cleanupLog) throws ImportException, IOException { + String status = ""; + Long createdId = null; + DatasetDTO dsDTO = null; + try { + + dsDTO = importDDIService.doImport(importType, xmlToParse); + } catch (XMLStreamException e) { + throw new ImportException("XMLStreamException" + e); + } + // convert DTO to Json, + Gson gson = new GsonBuilder().setPrettyPrinting().create(); + String json = gson.toJson(dsDTO); + return doImportJson(dataverseRequest, owner, json, fileName, importType, cleanupLog); + } + + + public JsonObjectBuilder doImportJson(DataverseRequest dataverseRequest, Dataverse owner, String json, String fileName, ImportType importType, PrintWriter cleanupLog) throws ImportException, IOException { + String status = ""; + Long createdId = null; +// DatasetDTO dsDTO = null; +// try { +// +// dsDTO = importDDIService.doImport(importType, xmlToParse); +// } catch (XMLStreamException e) { +// throw new ImportException("XMLStreamException" + e); +// } +// // convert DTO to Json, +// Gson gson = new GsonBuilder().setPrettyPrinting().create(); +// String json = gson.toJson(dsDTO); + JsonReader jsonReader = Json.createReader(new StringReader(json)); + JsonObject obj = jsonReader.readObject(); + //and call parse Json to read it into a dataset + try { + JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService); + parser.setLenient(!importType.equals(ImportType.NEW)); + Dataset ds = parser.parseDataset(obj); + + // For ImportType.NEW, if the user supplies a global identifier, and it's not a protocol + // we support, it will be rejected. + if (importType.equals(ImportType.NEW)) { + if (ds.getGlobalIdString() != null && !ds.getProtocol().equals(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""))) { + throw new ImportException("Could not register id " + ds.getGlobalIdString() + ", protocol not supported"); + } + } + + ds.setOwner(owner); + ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields()); + + // Check data against required contraints + List> violations = ds.getVersions().get(0).validateRequired(); + if (!violations.isEmpty()) { + if ( importType.equals(ImportType.HARVEST) ) { + // For migration and harvest, add NA for missing required values + for (ConstraintViolation v : violations) { + DatasetField f = v.getRootBean(); + f.setSingleValue(DatasetField.NA_VALUE); + } + } else { + // when importing a new dataset, the import will fail + // if required values are missing. + String errMsg = "Error importing data:"; + for (ConstraintViolation v : violations) { + errMsg += " " + v.getMessage(); + } + throw new ImportException(errMsg); + } + } + + // Check data against validation constraints + // If we are migrating and "scrub migration data" is true we attempt to fix invalid data + // if the fix fails stop processing of this file by throwing exception + Set invalidViolations = ds.getVersions().get(0).validate(); + ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); + Validator validator = factory.getValidator(); + if (!invalidViolations.isEmpty()) { + for (ConstraintViolation v : invalidViolations) { + DatasetFieldValue f = v.getRootBean(); + boolean fixed = false; + boolean converted = false; + if ( importType.equals(ImportType.HARVEST) && + settingsService.isTrueForKey(SettingsServiceBean.Key.ScrubMigrationData, false)) { + fixed = processMigrationValidationError(f, cleanupLog, fileName); + converted = true; + if (fixed) { + Set> scrubbedViolations = validator.validate(f); + if (!scrubbedViolations.isEmpty()) { + fixed = false; + } + } + } + if (!fixed) { + if (importType.equals(ImportType.HARVEST)) { + String msg = "Data modified - File: " + fileName + "; Field: " + f.getDatasetField().getDatasetFieldType().getDisplayName() + "; " + + "Invalid value: '" + f.getValue() + "'" + " Converted Value:'" + DatasetField.NA_VALUE + "'"; + cleanupLog.println(msg); + f.setValue(DatasetField.NA_VALUE); + + } else { + String msg = " Validation error for "; + if (converted) { + msg += "converted "; + } + msg += "value: " + f.getValue() + ", " + f.getValidationMessage(); + throw new ImportException(msg); + } + } + } + } + + + Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalIdString()); + + if (existingDs != null) { + if (importType.equals(ImportType.HARVEST)) { + // For harvested datasets, there should always only be one version. + // We will replace the current version with the imported version. + if (existingDs.getVersions().size() != 1) { + throw new ImportException("Error importing Harvested Dataset, existing dataset has " + existingDs.getVersions().size() + " versions"); + } + engineSvc.submit(new DestroyDatasetCommand(existingDs, dataverseRequest)); + Dataset managedDs = engineSvc.submit(new CreateHarvestedDatasetCommand(ds, dataverseRequest)); + status = " updated dataset, id=" + managedDs.getId() + "."; + + } else { + // If we are adding a new version to an existing dataset, + // check that the version number isn't already in the dataset + for (DatasetVersion dsv : existingDs.getVersions()) { + if (dsv.getVersionNumber().equals(ds.getLatestVersion().getVersionNumber())) { + throw new ImportException("VersionNumber " + ds.getLatestVersion().getVersionNumber() + " already exists in dataset " + existingDs.getGlobalIdString()); + } + } + DatasetVersion dsv = engineSvc.submit(new CreateDatasetVersionCommand(dataverseRequest, existingDs, ds.getVersions().get(0))); + status = " created datasetVersion, for dataset "+ dsv.getDataset().getGlobalIdString(); + createdId = dsv.getId(); + } + + } else { + Dataset managedDs = engineSvc.submit(new CreateNewDatasetCommand(ds, dataverseRequest)); + status = " created dataset, id=" + managedDs.getId() + "."; + createdId = managedDs.getId(); + } + + } catch (JsonParseException ex) { + logger.log(Level.INFO, "Error parsing datasetVersion: {0}", ex.getMessage()); + throw new ImportException("Error parsing datasetVersion: " + ex.getMessage(), ex); + } catch (CommandException ex) { + logger.log(Level.INFO, "Error excuting Create dataset command: {0}", ex.getMessage()); + throw new ImportException("Error excuting dataverse command: " + ex.getMessage(), ex); + } + return Json.createObjectBuilder().add("message", status); + } + + + + private boolean processMigrationValidationError(DatasetFieldValue f, PrintWriter cleanupLog, String fileName) { if (f.getDatasetField().getDatasetFieldType().getName().equals(DatasetFieldConstant.datasetContactEmail)) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportUtil.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportUtil.java index 6c3e35c1d73..ef46b0611d9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportUtil.java @@ -18,5 +18,11 @@ public enum ImportType{ HARVEST }; + + public enum ImportFileType{ + XML, + JSON + }; + } \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index aad4ad142e1..96fc7dc9d3f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -363,9 +363,10 @@ private License parseLicense(String inString) { public List parseMetadataBlocks(JsonObject json) throws JsonParseException { logger.log(Level.INFO, "within parseMetadataBlocks"); - logger.log(Level.INFO, "json={0}",xstream.toXML(json)); + logger.log(Level.FINE, "json={0}",xstream.toXML(json)); Set keys = json.keySet(); + logger.log(Level.INFO, "keys={0}", xstream.toXML(keys)); List fields = new LinkedList<>(); for (String blockName : keys) { @@ -391,6 +392,7 @@ public List parseMultipleFieldsForDelete(JsonObject json) throws J } private List parseFieldsFromArray(JsonArray fieldsArray, Boolean testType) throws JsonParseException { + logger.log(Level.INFO, "entering parseFieldsFromArray method:testType={0}", testType); List fields = new LinkedList<>(); for (JsonObject fieldJson : fieldsArray.getValuesAs(JsonObject.class)) { try { @@ -805,17 +807,22 @@ public DatasetField parseField(JsonObject json) throws JsonParseException{ public DatasetField parseField(JsonObject json, Boolean testType) throws JsonParseException { + logger.log(Level.INFO, "parseField: testType={0}", testType); if (json == null) { return null; } DatasetField ret = new DatasetField(); DatasetFieldType type = datasetFieldSvc.findByNameOpt(json.getString("typeName", "")); - + if (type == null) { throw new JsonParseException("Can't find type '" + json.getString("typeName", "") + "'"); } + logger.log(Level.INFO, "DatasetFieldType:name={0}", type.getName()); + logger.log(Level.INFO, "testType={0}", testType); + logger.log(Level.INFO, "type.isAllowMultiples()={0}", type.isAllowMultiples()); + logger.log(Level.INFO, "json={0}", xstream.toXML(json)); if (testType && type.isAllowMultiples() != json.getBoolean("multiple")) { throw new JsonParseException("incorrect multiple for field " + json.getString("typeName", "")); } From fc04841d60126f9e44127f8a83d1c6e0bd77c0c7 Mon Sep 17 00:00:00 2001 From: Akio Sone Date: Tue, 30 Oct 2018 22:35:01 -0400 Subject: [PATCH 37/46] missing context-root tag was reinstated --- src/main/webapp/WEB-INF/glassfish-web.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/webapp/WEB-INF/glassfish-web.xml b/src/main/webapp/WEB-INF/glassfish-web.xml index 335d5bc9b94..ecd3ba15c40 100644 --- a/src/main/webapp/WEB-INF/glassfish-web.xml +++ b/src/main/webapp/WEB-INF/glassfish-web.xml @@ -1,6 +1,7 @@ + / From bac928200650585468d11641f3f1f8005fe5cdbf Mon Sep 17 00:00:00 2001 From: Akio Sone Date: Fri, 2 Nov 2018 15:51:16 -0400 Subject: [PATCH 38/46] ddi-import-related updates --- .../api/imports/ImportDDIServiceBean.java | 104 ++++++++++++++++-- .../api/imports/ImportServiceBean.java | 1 + .../iq/dataverse/util/json/JsonParser.java | 2 + 3 files changed, 95 insertions(+), 12 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java index 9968cd87302..3862909963b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java @@ -101,6 +101,17 @@ public class ImportDDIServiceBean { @EJB DatasetFieldServiceBean datasetFieldService; + // required field flags + // subject + boolean isSubjectAdded=false; + String subjectText; + String defaultSubjectText="Other"; + // email + boolean isContactEmailAdded = false; + String defaultEmailAddress = "emailAddressNotFound@dataverse.org"; + boolean isAbstractAdded = false; + String defaultAbsractText="Required description text(abstract tag) was not found"; + // TODO: stop passing the xml source as a string; (it could be huge!) -- L.A. 4.5 // TODO: what L.A. Said. public DatasetDTO doImport(ImportType importType, String xmlToParse) throws XMLStreamException, ImportException { @@ -191,15 +202,29 @@ private void processDDI(ImportType importType, XMLStreamReader xmlr, DatasetDTO processCodeBook(importType, xmlr, datasetDTO, filesMap); MetadataBlockDTO citationBlock = datasetDTO.getDatasetVersion().getMetadataBlocks().get("citation"); - if (codeBookLevelId != null && !codeBookLevelId.isEmpty()) { + if (StringUtils.isNotBlank(codeBookLevelId)) { if (citationBlock.getField("otherId")==null) { + logger.log(Level.INFO, "field:otherId does not exist"); + List> otherIds = new ArrayList<>(); + HashSet set = new HashSet<>(); + addToSet(set, "otherIdValue", codeBookLevelId); + if (!set.isEmpty()){ + otherIds.add(set); + } + citationBlock.addField(FieldDTO.createMultipleCompoundFieldDTO("otherId", otherIds)); // this means no ids were found during the parsing of the // study description section. we'll use the one we found in // the codeBook entry: - FieldDTO otherIdValue = FieldDTO.createPrimitiveFieldDTO("otherIdValue", codeBookLevelId); - FieldDTO otherId = FieldDTO.createCompoundFieldDTO("otherId", otherIdValue); - citationBlock.getFields().add(otherId); + // FieldDTO otherIdValue = FieldDTO.createPrimitiveFieldDTO("otherIdValue", codeBookLevelId); + // FieldDTO otherId = FieldDTO.createCompoundFieldDTO("otherId", otherIdValue); + // citationBlock.getFields().add(otherId); + + + + + } else { + logger.log(Level.INFO, "field:otherId already exits"); } } if (isHarvestImport(importType)) { @@ -482,21 +507,63 @@ private void processStdyInfo(XMLStreamReader xmlr, DatasetVersionDTO dvDTO) thro for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) { if (event == XMLStreamConstants.START_ELEMENT) { if (xmlr.getLocalName().equals("subject")) { - processSubject(xmlr, getCitation(dvDTO)); +// subjectText = parseText(xmlr); +// if (StringUtils.isBlank(subjectText)){ +// subjectText = defaultSubjectText; +// logger.log(Level.INFO, "subject is set to default"); +// } else { +// logger.log(Level.INFO, "subjectText={0}", subjectText); +// } + + processSubject(xmlr, getCitation(dvDTO)); } else if (xmlr.getLocalName().equals("abstract")) { HashSet set = new HashSet<>(); - addToSet(set,"dsDescriptionDate", xmlr.getAttributeValue(null, "date")); - addToSet(set,"dsDescriptionValue", parseText(xmlr, "abstract")); + addToSet(set, "dsDescriptionDate", xmlr.getAttributeValue(null, "date")); + addToSet(set, "dsDescriptionValue", parseText(xmlr, "abstract")); if (!set.isEmpty()) { + isAbstractAdded=true; descriptions.add(set); } + + } else if (xmlr.getLocalName().equals("sumDscr")) { + processSumDscr(xmlr, dvDTO); + } else if (xmlr.getLocalName().equals("notes")) { + processNotes(xmlr, dvDTO); - } else if (xmlr.getLocalName().equals("sumDscr")) processSumDscr(xmlr, dvDTO); - - else if (xmlr.getLocalName().equals("notes")) processNotes(xmlr,dvDTO); + } else if (xmlr.getLocalName().equals("contact")) { + HashSet set = new HashSet<>(); + + String emailText = xmlr.getAttributeValue(null, "email"); + if (StringUtils.isBlank(emailText)) { + emailText = defaultEmailAddress; + } + + addToSet(set, "datasetContactEmail", emailText); + addToSet(set, "datasetContactAffiliation", xmlr.getAttributeValue(null, "affiliation")); + addToSet(set, "datasetContactName", parseText(xmlr)); + if (!set.isEmpty()) { + + if (getCitation(dvDTO).getField("datasetContact") == null) { + List> datasetContacts = new ArrayList<>(); + datasetContacts.add(set); + getCitation(dvDTO).addField(FieldDTO.createMultipleCompoundFieldDTO("datasetContact", datasetContacts)); + } else { + getCitation(dvDTO).getField("datasetContact").getMultipleCompound().add(set); + } + } + } } else if (event == XMLStreamConstants.END_ELEMENT) { if (xmlr.getLocalName().equals("stdyInfo") ) { - if (descriptions.size()>0) { + if (!isAbstractAdded) { + logger.log(Level.INFO, "abstract tag was not found; default text is supplied because this is one of the required fields"); + HashSet set = new HashSet<>(); + addToSet(set, "dsDescriptionDate", null); + addToSet(set, "dsDescriptionValue", defaultAbsractText); + if (!set.isEmpty()) { + descriptions.add(set); + } + } + if (descriptions.size() > 0) { getCitation(dvDTO).getFields().add(FieldDTO.createMultipleCompoundFieldDTO("dsDescription", descriptions)); } return; @@ -505,6 +572,8 @@ private void processStdyInfo(XMLStreamReader xmlr, DatasetVersionDTO dvDTO) thro } } private void processSubject(XMLStreamReader xmlr, MetadataBlockDTO citation) throws XMLStreamException { + logger.log(Level.INFO, "processSubject is called"); + List subjects = new ArrayList<>(); List> keywords = new ArrayList<>(); List> topicClasses = new ArrayList<>(); for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) { @@ -530,6 +599,11 @@ private void processSubject(XMLStreamReader xmlr, MetadataBlockDTO citation) thr } } else if (event == XMLStreamConstants.END_ELEMENT) { if (xmlr.getLocalName().equals("subject")) { + subjectText=defaultSubjectText; + subjects.add(subjectText); + citation.getFields().add(FieldDTO.createMultipleVocabFieldDTO("subject", subjects)); + + if (keywords.size()>0) { citation.getFields().add(FieldDTO.createMultipleCompoundFieldDTO("keyword", keywords)); } @@ -1184,7 +1258,13 @@ private void processDistStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th } else if (xmlr.getLocalName().equals("contact")) { HashSet set = new HashSet<>(); - addToSet(set, "datasetContactEmail", xmlr.getAttributeValue(null, "email")); + + String emailText = xmlr.getAttributeValue(null, "email"); + if (StringUtils.isBlank(emailText)){ + emailText=defaultEmailAddress; + } + //addToSet(set, "datasetContactEmail", xmlr.getAttributeValue(null, "email")); + addToSet(set, "datasetContactEmail", emailText); addToSet(set, "datasetContactAffiliation", xmlr.getAttributeValue(null, "affiliation")); addToSet(set, "datasetContactName", parseText(xmlr)); datasetContacts.add(set); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index b4caafcd060..ddbd78a9c03 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -584,6 +584,7 @@ public JsonObjectBuilder doImportJson(DataverseRequest dataverseRequest, Dataver // Check data against required contraints List> violations = ds.getVersions().get(0).validateRequired(); + logger.log(Level.INFO, "DatasetField: how many violations:{0}", violations.size()); if (!violations.isEmpty()) { if ( importType.equals(ImportType.HARVEST) ) { // For migration and harvest, add NA for missing required values diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 96fc7dc9d3f..e0209282630 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -823,6 +823,8 @@ public DatasetField parseField(JsonObject json, Boolean testType) throws JsonPar logger.log(Level.INFO, "testType={0}", testType); logger.log(Level.INFO, "type.isAllowMultiples()={0}", type.isAllowMultiples()); logger.log(Level.INFO, "json={0}", xstream.toXML(json)); + + if (testType && type.isAllowMultiples() != json.getBoolean("multiple")) { throw new JsonParseException("incorrect multiple for field " + json.getString("typeName", "")); } From 4851f17b2d13ba9222b8a41e55e86d90d794ba6c Mon Sep 17 00:00:00 2001 From: Akio Sone Date: Wed, 14 Nov 2018 14:49:38 -0500 Subject: [PATCH 39/46] add trsa_registry table --- .../iq/dataverse/trsa/TrsaRegistry.java | 225 ++++++++++++++++++ 1 file changed, 225 insertions(+) create mode 100644 src/main/java/edu/harvard/iq/dataverse/trsa/TrsaRegistry.java diff --git a/src/main/java/edu/harvard/iq/dataverse/trsa/TrsaRegistry.java b/src/main/java/edu/harvard/iq/dataverse/trsa/TrsaRegistry.java new file mode 100644 index 00000000000..434569b3560 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/trsa/TrsaRegistry.java @@ -0,0 +1,225 @@ +package edu.harvard.iq.dataverse.trsa; + +import java.io.Serializable; +import java.util.Date; +import javax.persistence.Basic; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.Id; +import javax.persistence.NamedQueries; +import javax.persistence.NamedQuery; +import javax.persistence.Table; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; +import javax.validation.constraints.NotNull; +import javax.validation.constraints.Size; +import javax.xml.bind.annotation.XmlRootElement; + +/** + * + * @author asone + */ +@Entity +@Table(name = "trsa_registry", catalog = "dvndb", schema = "public") +@XmlRootElement +@NamedQueries({ + @NamedQuery(name = "TrsaRegistry.findAll", query = "SELECT t FROM TrsaRegistry t"), + @NamedQuery(name = "TrsaRegistry.findByInstallation", query = "SELECT t FROM TrsaRegistry t WHERE t.installation = :installation"), + @NamedQuery(name = "TrsaRegistry.findByEmail", query = "SELECT t FROM TrsaRegistry t WHERE t.email = :email"), + @NamedQuery(name = "TrsaRegistry.findByDataverseurl", query = "SELECT t FROM TrsaRegistry t WHERE t.dataverseurl = :dataverseurl"), + @NamedQuery(name = "TrsaRegistry.findByApitoken", query = "SELECT t FROM TrsaRegistry t WHERE t.apitoken = :apitoken"), + @NamedQuery(name = "TrsaRegistry.findByDatastoragelocation", query = "SELECT t FROM TrsaRegistry t WHERE t.datastoragelocation = :datastoragelocation"), + @NamedQuery(name = "TrsaRegistry.findByDataaccessinfo", query = "SELECT t FROM TrsaRegistry t WHERE t.dataaccessinfo = :dataaccessinfo"), + @NamedQuery(name = "TrsaRegistry.findByNotaryserviceurl", query = "SELECT t FROM TrsaRegistry t WHERE t.notaryserviceurl = :notaryserviceurl"), + @NamedQuery(name = "TrsaRegistry.findBySafeserviceurl", query = "SELECT t FROM TrsaRegistry t WHERE t.safeserviceurl = :safeserviceurl"), + @NamedQuery(name = "TrsaRegistry.findByRegistertime", query = "SELECT t FROM TrsaRegistry t WHERE t.registertime = :registertime"), + @NamedQuery(name = "TrsaRegistry.findByDisabled", query = "SELECT t FROM TrsaRegistry t WHERE t.disabled = :disabled"), + @NamedQuery(name = "TrsaRegistry.findByExpiretime", query = "SELECT t FROM TrsaRegistry t WHERE t.expiretime = :expiretime"), + @NamedQuery(name = "TrsaRegistry.findById", query = "SELECT t FROM TrsaRegistry t WHERE t.id = :id")}) +public class TrsaRegistry implements Serializable { + + private static final long serialVersionUID = 1L; + @Basic(optional = false) + @NotNull + @Size(min = 1, max = 255) + @Column(nullable = false, length = 255) + private String installation; + // @Pattern(regexp="[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?", message="Invalid email")//if the field contains email address consider using this annotation to enforce field validation + @Basic(optional = false) + @NotNull + @Size(min = 1, max = 255) + @Column(nullable = false, length = 255) + private String email; + @Basic(optional = false) + @NotNull + @Size(min = 1, max = 255) + @Column(nullable = false, length = 255) + private String dataverseurl; + @Basic(optional = false) + @NotNull + @Size(min = 1, max = 12) + @Column(nullable = false, length = 12) + private String apitoken; + @Size(max = 255) + @Column(length = 255) + private String datastoragelocation; + @Basic(optional = false) + @NotNull + @Size(min = 1, max = 255) + @Column(nullable = false, length = 255) + private String dataaccessinfo; + @Size(max = 255) + @Column(length = 255) + private String notaryserviceurl; + @Size(max = 255) + @Column(length = 255) + private String safeserviceurl; + @Temporal(TemporalType.TIMESTAMP) + private Date registertime; + private Boolean disabled; + @Temporal(TemporalType.TIMESTAMP) + private Date expiretime; + @Id + @Basic(optional = false) + @NotNull + @Column(nullable = false) + private Long id; + + public TrsaRegistry() { + } + + public TrsaRegistry(Long id) { + this.id = id; + } + + public TrsaRegistry(Long id, String installation, String email, String dataverseurl, String apitoken, String dataaccessinfo) { + this.id = id; + this.installation = installation; + this.email = email; + this.dataverseurl = dataverseurl; + this.apitoken = apitoken; + this.dataaccessinfo = dataaccessinfo; + } + + public String getInstallation() { + return installation; + } + + public void setInstallation(String installation) { + this.installation = installation; + } + + public String getEmail() { + return email; + } + + public void setEmail(String email) { + this.email = email; + } + + public String getDataverseurl() { + return dataverseurl; + } + + public void setDataverseurl(String dataverseurl) { + this.dataverseurl = dataverseurl; + } + + public String getApitoken() { + return apitoken; + } + + public void setApitoken(String apitoken) { + this.apitoken = apitoken; + } + + public String getDatastoragelocation() { + return datastoragelocation; + } + + public void setDatastoragelocation(String datastoragelocation) { + this.datastoragelocation = datastoragelocation; + } + + public String getDataaccessinfo() { + return dataaccessinfo; + } + + public void setDataaccessinfo(String dataaccessinfo) { + this.dataaccessinfo = dataaccessinfo; + } + + public String getNotaryserviceurl() { + return notaryserviceurl; + } + + public void setNotaryserviceurl(String notaryserviceurl) { + this.notaryserviceurl = notaryserviceurl; + } + + public String getSafeserviceurl() { + return safeserviceurl; + } + + public void setSafeserviceurl(String safeserviceurl) { + this.safeserviceurl = safeserviceurl; + } + + public Date getRegistertime() { + return registertime; + } + + public void setRegistertime(Date registertime) { + this.registertime = registertime; + } + + public Boolean getDisabled() { + return disabled; + } + + public void setDisabled(Boolean disabled) { + this.disabled = disabled; + } + + public Date getExpiretime() { + return expiretime; + } + + public void setExpiretime(Date expiretime) { + this.expiretime = expiretime; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + @Override + public int hashCode() { + int hash = 0; + hash += (id != null ? id.hashCode() : 0); + return hash; + } + + @Override + public boolean equals(Object object) { + // TODO: Warning - this method won't work in the case the id fields are not set + if (!(object instanceof TrsaRegistry)) { + return false; + } + TrsaRegistry other = (TrsaRegistry) object; + if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) { + return false; + } + return true; + } + + @Override + public String toString() { + return "edu.harvard.iq.dataverse.trsa.TrsaRegistry[ id=" + id + " ]"; + } + +} From 96d42e764b306d55295f202542d39eced1cd8957 Mon Sep 17 00:00:00 2001 From: Akio Sone Date: Wed, 30 Jan 2019 13:50:33 -0500 Subject: [PATCH 40/46] typo-correction in pom.xml --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index f5883336c66..81fa0022509 100644 --- a/pom.xml +++ b/pom.xml @@ -586,7 +586,7 @@ 1.19 - + com.thoughtworks.xstream xstream From a7b42f415724aed836cd30b4abb53c893813d02c Mon Sep 17 00:00:00 2001 From: Akio Sone Date: Wed, 30 Jan 2019 13:59:38 -0500 Subject: [PATCH 41/46] duplicated loggers in ImportDDIServiceBean.java --- .../harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java index b0bf5082382..f8dcf25a4d7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java @@ -95,7 +95,6 @@ public class ImportDDIServiceBean { public static final String NOTE_TYPE_REPLICATION_FOR = "DVN:REPLICATION_FOR"; private static final String HARVESTED_FILE_STORAGE_PREFIX = "http://"; private XMLInputFactory xmlInputFactory = null; - private static final Logger logger = Logger.getLogger(ImportDDIServiceBean.class.getName()); @EJB CustomFieldServiceBean customFieldService; From 1e193f214c422e747e1c604be96e867c739b65e9 Mon Sep 17 00:00:00 2001 From: donsizemore Date: Wed, 6 Feb 2019 11:06:06 -0500 Subject: [PATCH 42/46] push changes to build initial image --- conf/docker-aio/entrypoint.bash | 4 ++-- conf/docker-aio/prep_it.bash | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/conf/docker-aio/entrypoint.bash b/conf/docker-aio/entrypoint.bash index da01ee56153..58fb3f9b78a 100755 --- a/conf/docker-aio/entrypoint.bash +++ b/conf/docker-aio/entrypoint.bash @@ -2,6 +2,7 @@ export LANG=en_US.UTF-8 #sudo -u postgres /usr/bin/postgres -D /var/lib/pgsql/data & sudo -u postgres /usr/pgsql-9.6/bin/postgres -D /var/lib/pgsql/data & +sleep 15 cd /opt/solr-7.3.1/ # TODO: Run Solr as non-root and remove "-force". bin/solr start -force @@ -11,7 +12,6 @@ bin/solr create_core -c collection1 -d server/solr/collection1/conf -force apachectl -DFOREGROUND & # TODO: Run Glassfish as non-root. -cd /opt/glassfish4 -bin/asadmin start-domain --debug +cd /opt/glassfish4/bin/asadmin start-domain sleep infinity diff --git a/conf/docker-aio/prep_it.bash b/conf/docker-aio/prep_it.bash index 60e6c3cb04e..85858a633a2 100755 --- a/conf/docker-aio/prep_it.bash +++ b/conf/docker-aio/prep_it.bash @@ -10,7 +10,7 @@ n_retries=10 # glassfish healthy/ready retries n_wait=5 -cd conf/docker-aio +#cd conf/docker-aio ./0prep_deps.sh ./1prep.sh docker build -t dv0 -f c7.dockerfile . @@ -21,7 +21,7 @@ do # cleanup from previous runs if necessary docker rm -f dv # start container - docker run -d -p 15432:5432 -p 8084:80 -p 8083:8080 -p 9010:9009 --name dv dv0 + docker run -d -p 5432:5432 -p 80:80 -p 8080:8080 --name dv dv0 # wait for glassfish to be healthy i_wait=0 @@ -69,5 +69,5 @@ docker exec -it dv /opt/dv/configure_doi.bash cd ../.. echo "docker-aio ready to run integration tests ($i_retry)" -curl http://localhost:8084/api/info/version +curl http://localhost:8080/api/info/version echo $? From 5c82fc412c95be89fd35fcfc9a7dca6610ed50c7 Mon Sep 17 00:00:00 2001 From: donsizemore Date: Wed, 6 Feb 2019 13:21:01 -0500 Subject: [PATCH 43/46] revert to pete's asadmin syntax --- conf/docker-aio/entrypoint.bash | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/conf/docker-aio/entrypoint.bash b/conf/docker-aio/entrypoint.bash index 58fb3f9b78a..baefee9e029 100755 --- a/conf/docker-aio/entrypoint.bash +++ b/conf/docker-aio/entrypoint.bash @@ -12,6 +12,7 @@ bin/solr create_core -c collection1 -d server/solr/collection1/conf -force apachectl -DFOREGROUND & # TODO: Run Glassfish as non-root. -cd /opt/glassfish4/bin/asadmin start-domain +cd /opt/glassfish4/ +bin/asadmin start-domain sleep infinity From 76abb71429ab87279f09b12e44f9940202976291 Mon Sep 17 00:00:00 2001 From: Akio Sone Date: Fri, 8 Feb 2019 15:55:33 -0500 Subject: [PATCH 44/46] jsf-web for trsa_registry table --- conf/docker-aio/configure_doi.bash | 2 +- conf/docker-aio/entrypoint.bash | 1 + conf/docker-aio/setJvmOptions.bash | 18 ++ scripts/database/trsa_registry.sql | 24 +++ src/main/java/Bundle_trsa_registry.properties | 139 +++++++++++++++ .../iq/dataverse/trsa/TrsaRegistries.java | 96 ++++++++++ .../iq/dataverse/trsa/TrsaRegistry.java | 107 ++++++++++-- .../trsa/TrsaRegistryServiceBean.java | 88 ++++++++++ .../trsa/registry/AbstractFacade.java | 64 +++++++ .../trsa/registry/TrsaRegistryController.java | 164 ++++++++++++++++++ .../trsa/registry/TrsaRegistryFacade.java | 32 ++++ .../dataverse/trsa/registry/util/JsfUtil.java | 69 ++++++++ src/main/webapp/WEB-INF/faces-config.xml | 4 + src/main/webapp/dataverse_template.xhtml | 5 + src/main/webapp/resources/css/jsfcrud.css | 82 +++++++++ src/main/webapp/resources/js/jsfcrud.js | 8 + src/main/webapp/trsaRegistry/Create.xhtml | 79 +++++++++ src/main/webapp/trsaRegistry/Edit.xhtml | 83 +++++++++ src/main/webapp/trsaRegistry/List.xhtml | 121 +++++++++++++ src/main/webapp/trsaRegistry/View.xhtml | 61 +++++++ 20 files changed, 1233 insertions(+), 14 deletions(-) create mode 100755 conf/docker-aio/setJvmOptions.bash create mode 100644 scripts/database/trsa_registry.sql create mode 100644 src/main/java/Bundle_trsa_registry.properties create mode 100644 src/main/java/edu/harvard/iq/dataverse/trsa/TrsaRegistries.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/trsa/TrsaRegistryServiceBean.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/trsa/registry/AbstractFacade.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/trsa/registry/TrsaRegistryController.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/trsa/registry/TrsaRegistryFacade.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/trsa/registry/util/JsfUtil.java create mode 100644 src/main/webapp/resources/css/jsfcrud.css create mode 100644 src/main/webapp/resources/js/jsfcrud.js create mode 100644 src/main/webapp/trsaRegistry/Create.xhtml create mode 100644 src/main/webapp/trsaRegistry/Edit.xhtml create mode 100644 src/main/webapp/trsaRegistry/List.xhtml create mode 100644 src/main/webapp/trsaRegistry/View.xhtml diff --git a/conf/docker-aio/configure_doi.bash b/conf/docker-aio/configure_doi.bash index 24ed6005b95..c082fb9c6df 100755 --- a/conf/docker-aio/configure_doi.bash +++ b/conf/docker-aio/configure_doi.bash @@ -7,7 +7,7 @@ if [ ! -z "${DoiProvider}" ]; then curl -X PUT -d ${DoiProvider} http://localhost:8080/api/admin/settings/:DoiProvider fi if [ ! -z "${doi_username}" ]; then - bin/asadmin create-jvm-options "-Ddoi.username=${doi_password}" + bin/asadmin create-jvm-options "-Ddoi.username=${doi_username}" fi if [ ! -z "${doi_password}" ]; then bin/asadmin create-jvm-options "-Ddoi.password=${doi_password}" diff --git a/conf/docker-aio/entrypoint.bash b/conf/docker-aio/entrypoint.bash index da01ee56153..42b91121794 100755 --- a/conf/docker-aio/entrypoint.bash +++ b/conf/docker-aio/entrypoint.bash @@ -2,6 +2,7 @@ export LANG=en_US.UTF-8 #sudo -u postgres /usr/bin/postgres -D /var/lib/pgsql/data & sudo -u postgres /usr/pgsql-9.6/bin/postgres -D /var/lib/pgsql/data & +sleep 15 cd /opt/solr-7.3.1/ # TODO: Run Solr as non-root and remove "-force". bin/solr start -force diff --git a/conf/docker-aio/setJvmOptions.bash b/conf/docker-aio/setJvmOptions.bash new file mode 100755 index 00000000000..90940843ddf --- /dev/null +++ b/conf/docker-aio/setJvmOptions.bash @@ -0,0 +1,18 @@ +#!/usr/bin/env bash +set -a +. $1 +set +a + +echo "SITE_URL=${SITE_URL}" +echo "DOI_USERNAME=${DOI_USERNAME}" +echo "DOI_PASSWORD=${DOI_PASSWORD}" +echo "DOI_BASEURL=${DOI_BASEURL}" + + +docker exec -it dv /usr/local/glassfish4/bin/asadmin create-jvm-options "\"-Ddataverse.siteUrl=${SITE_URL}\"" +sleep 15 +docker exec -it dv /usr/local/glassfish4/bin/asadmin create-jvm-options "\"-Ddoi.username=${DOI_USERNAME}\"" +sleep 15 +docker exec -it dv /usr/local/glassfish4/bin/asadmin create-jvm-options "\"-Ddoi.password=${DOI_PASSWORD}\"" +sleep 15 +docker exec -it dv /usr/local/glassfish4/bin/asadmin create-jvm-options "\"-Ddoi.baseurlstring=${DOI_BASEURL}\"" diff --git a/scripts/database/trsa_registry.sql b/scripts/database/trsa_registry.sql new file mode 100644 index 00000000000..b6c60a56efd --- /dev/null +++ b/scripts/database/trsa_registry.sql @@ -0,0 +1,24 @@ +-- Drop table + +DROP TABLE public.trsa_registry + +CREATE TABLE public.trsa_registry ( + id serial NOT NULL, + installation varchar(255) NOT NULL, + email varchar(255) NOT NULL, + dataverseurl varchar(255) NOT NULL, + apitoken varchar(255) NOT NULL, + datastoragelocation varchar(255) NOT NULL, + dataaccessinfo varchar(255) NOT NULL, + notaryserviceurl varchar(255) NOT NULL, + safeserviceurl varchar(255) NOT NULL, + registertime timestamp NULL, + expiretime timestamp NULL, + disabled bool NULL, + CONSTRAINT trsa_registry_pkey null +); + +-- Permissions + +ALTER TABLE public.trsa_registry OWNER TO dvnapp; +GRANT ALL ON TABLE public.trsa_registry TO dvnapp; diff --git a/src/main/java/Bundle_trsa_registry.properties b/src/main/java/Bundle_trsa_registry.properties new file mode 100644 index 00000000000..a101b3f559a --- /dev/null +++ b/src/main/java/Bundle_trsa_registry.properties @@ -0,0 +1,139 @@ +PersistenceErrorOccured=A persistence error occurred. +Create=Create +View=View +Edit=Edit +Delete=Delete +Close=Close +Cancel=Cancel +Save=Save +SelectOneMessage=Select One... +Home=Home +Maintenance=Maintenance +AppName=dataverse + +TrsaRegistryCreated=TrsaRegistry was successfully created. +TrsaRegistryUpdated=TrsaRegistry was successfully updated. +TrsaRegistryDeleted=TrsaRegistry was successfully deleted. +CreateTrsaRegistryTitle=Create New TrsaRegistry +CreateTrsaRegistrySaveLink=Save +CreateTrsaRegistryShowAllLink=Show All TrsaRegistry Items +CreateTrsaRegistryIndexLink=Index +CreateTrsaRegistryLabel_installation=Installation: +CreateTrsaRegistryRequiredMessage_installation=The Installation field is required. +CreateTrsaRegistryTitle_installation=Installation +CreateTrsaRegistryLabel_email=Email: +CreateTrsaRegistryRequiredMessage_email=The Email field is required. +CreateTrsaRegistryTitle_email=Email +CreateTrsaRegistryLabel_dataverseurl=Dataverseurl: +CreateTrsaRegistryRequiredMessage_dataverseurl=The Dataverseurl field is required. +CreateTrsaRegistryTitle_dataverseurl=Dataverseurl +CreateTrsaRegistryLabel_apitoken=Apitoken: +CreateTrsaRegistryRequiredMessage_apitoken=The Apitoken field is required. +CreateTrsaRegistryTitle_apitoken=Apitoken +CreateTrsaRegistryLabel_datastoragelocation=Datastoragelocation: +CreateTrsaRegistryRequiredMessage_datastoragelocation=The Datastoragelocation field is required. +CreateTrsaRegistryTitle_datastoragelocation=Datastoragelocation +CreateTrsaRegistryLabel_dataaccessinfo=Dataaccessinfo: +CreateTrsaRegistryRequiredMessage_dataaccessinfo=The Dataaccessinfo field is required. +CreateTrsaRegistryTitle_dataaccessinfo=Dataaccessinfo +CreateTrsaRegistryLabel_notaryserviceurl=Notaryserviceurl: +CreateTrsaRegistryRequiredMessage_notaryserviceurl=The Notaryserviceurl field is required. +CreateTrsaRegistryTitle_notaryserviceurl=Notaryserviceurl +CreateTrsaRegistryLabel_safeserviceurl=Safeserviceurl: +CreateTrsaRegistryRequiredMessage_safeserviceurl=The Safeserviceurl field is required. +CreateTrsaRegistryTitle_safeserviceurl=Safeserviceurl +CreateTrsaRegistryLabel_registertime=Registertime: +CreateTrsaRegistryTitle_registertime=Registertime +CreateTrsaRegistryLabel_disabled=Disabled: +CreateTrsaRegistryTitle_disabled=Disabled +CreateTrsaRegistryLabel_expiretime=Expiretime: +CreateTrsaRegistryTitle_expiretime=Expiretime +CreateTrsaRegistryLabel_id=Id: +CreateTrsaRegistryRequiredMessage_id=The Id field is required. +CreateTrsaRegistryTitle_id=Id + + +EditTrsaRegistryTitle=Edit TrsaRegistry +EditTrsaRegistrySaveLink=Save +EditTrsaRegistryViewLink=View +EditTrsaRegistryShowAllLink=Show All TrsaRegistry Items +EditTrsaRegistryIndexLink=Index +EditTrsaRegistryLabel_installation=Installation: +EditTrsaRegistryRequiredMessage_installation=The Installation field is required. +EditTrsaRegistryTitle_installation=Installation +EditTrsaRegistryLabel_email=Email: +EditTrsaRegistryRequiredMessage_email=The Email field is required. +EditTrsaRegistryTitle_email=Email +EditTrsaRegistryLabel_dataverseurl=Dataverseurl: +EditTrsaRegistryRequiredMessage_dataverseurl=The Dataverseurl field is required. +EditTrsaRegistryTitle_dataverseurl=Dataverseurl +EditTrsaRegistryLabel_apitoken=Apitoken: +EditTrsaRegistryRequiredMessage_apitoken=The Apitoken field is required. +EditTrsaRegistryTitle_apitoken=Apitoken +EditTrsaRegistryLabel_datastoragelocation=Datastoragelocation: +EditTrsaRegistryTitle_datastoragelocation=Datastoragelocation +EditTrsaRegistryLabel_dataaccessinfo=Dataaccessinfo: +EditTrsaRegistryRequiredMessage_dataaccessinfo=The Dataaccessinfo field is required. +EditTrsaRegistryTitle_dataaccessinfo=Dataaccessinfo +EditTrsaRegistryLabel_notaryserviceurl=Notaryserviceurl: +EditTrsaRegistryTitle_notaryserviceurl=Notaryserviceurl +EditTrsaRegistryLabel_safeserviceurl=Safeserviceurl: +EditTrsaRegistryTitle_safeserviceurl=Safeserviceurl +EditTrsaRegistryLabel_registertime=Registertime: +EditTrsaRegistryTitle_registertime=Registertime +EditTrsaRegistryLabel_disabled=Disabled: +EditTrsaRegistryTitle_disabled=Disabled +EditTrsaRegistryLabel_expiretime=Expiretime: +EditTrsaRegistryTitle_expiretime=Expiretime +EditTrsaRegistryLabel_id=Id: +EditTrsaRegistryRequiredMessage_id=The Id field is required. +EditTrsaRegistryTitle_id=Id +ViewTrsaRegistryTitle=View +ViewTrsaRegistryDestroyLink=Destroy +ViewTrsaRegistryEditLink=Edit +ViewTrsaRegistryCreateLink=Create New TrsaRegistry +ViewTrsaRegistryShowAllLink=Show All TrsaRegistry Items +ViewTrsaRegistryIndexLink=Index +ViewTrsaRegistryLabel_installation=Installation: +ViewTrsaRegistryTitle_installation=Installation +ViewTrsaRegistryLabel_email=Email: +ViewTrsaRegistryTitle_email=Email +ViewTrsaRegistryLabel_dataverseurl=Dataverseurl: +ViewTrsaRegistryTitle_dataverseurl=Dataverseurl +ViewTrsaRegistryLabel_apitoken=Apitoken: +ViewTrsaRegistryTitle_apitoken=Apitoken +ViewTrsaRegistryLabel_datastoragelocation=Datastoragelocation: +ViewTrsaRegistryTitle_datastoragelocation=Datastoragelocation +ViewTrsaRegistryLabel_dataaccessinfo=Dataaccessinfo: +ViewTrsaRegistryTitle_dataaccessinfo=Dataaccessinfo +ViewTrsaRegistryLabel_notaryserviceurl=Notaryserviceurl: +ViewTrsaRegistryTitle_notaryserviceurl=Notaryserviceurl +ViewTrsaRegistryLabel_safeserviceurl=Safeserviceurl: +ViewTrsaRegistryTitle_safeserviceurl=Safeserviceurl +ViewTrsaRegistryLabel_registertime=Registertime: +ViewTrsaRegistryTitle_registertime=Registertime +ViewTrsaRegistryLabel_disabled=Disabled: +ViewTrsaRegistryTitle_disabled=Disabled +ViewTrsaRegistryLabel_expiretime=Expiretime: +ViewTrsaRegistryTitle_expiretime=Expiretime +ViewTrsaRegistryLabel_id=Id: +ViewTrsaRegistryTitle_id=Id +ListTrsaRegistryTitle=List +ListTrsaRegistryEmpty=(No TrsaRegistry Items Found) +ListTrsaRegistryDestroyLink=Destroy +ListTrsaRegistryEditLink=Edit +ListTrsaRegistryViewLink=View +ListTrsaRegistryCreateLink=Create New TrsaRegistry +ListTrsaRegistryIndexLink=Index +ListTrsaRegistryTitle_installation=Installation +ListTrsaRegistryTitle_email=Email +ListTrsaRegistryTitle_dataverseurl=Dataverseurl +ListTrsaRegistryTitle_apitoken=Apitoken +ListTrsaRegistryTitle_datastoragelocation=Datastoragelocation +ListTrsaRegistryTitle_dataaccessinfo=Dataaccessinfo +ListTrsaRegistryTitle_notaryserviceurl=Notaryserviceurl +ListTrsaRegistryTitle_safeserviceurl=Safeserviceurl +ListTrsaRegistryTitle_registertime=Registertime +ListTrsaRegistryTitle_disabled=Disabled +ListTrsaRegistryTitle_expiretime=Expiretime +ListTrsaRegistryTitle_id=Id diff --git a/src/main/java/edu/harvard/iq/dataverse/trsa/TrsaRegistries.java b/src/main/java/edu/harvard/iq/dataverse/trsa/TrsaRegistries.java new file mode 100644 index 00000000000..14b14b94b0c --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/trsa/TrsaRegistries.java @@ -0,0 +1,96 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package edu.harvard.iq.dataverse.trsa; + +import edu.harvard.iq.dataverse.api.AbstractApiBean; +import edu.harvard.iq.dataverse.trsa.TrsaRegistry; +import edu.harvard.iq.dataverse.trsa.TrsaRegistryServiceBean; +import java.util.List; +import javax.ejb.EJB; +import javax.ejb.Stateless; +import javax.json.Json; +import javax.json.JsonArrayBuilder; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import javax.ws.rs.Consumes; +import javax.ws.rs.DELETE; +import javax.ws.rs.GET; +import javax.ws.rs.POST; +import javax.ws.rs.PUT; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; + +/** + * + * @author asone + */ + +@Path("admin/trsaRegistries") +public class TrsaRegistries extends AbstractApiBean { + + @EJB + TrsaRegistryServiceBean trsaRegistryServiceBean; + + + @POST + @Consumes({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) + public void create(TrsaRegistry entity) { + trsaRegistryServiceBean.create(entity); + } + + @PUT + @Path("{id}") + @Consumes({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) + public void edit(@PathParam("id") Long id, TrsaRegistry entity) { + trsaRegistryServiceBean.edit(entity); + } + + @DELETE + @Path("{id}") + public void remove(@PathParam("id") Long id) { + trsaRegistryServiceBean.remove(trsaRegistryServiceBean.find(id)); + } + + @GET + @Path("{id}") + @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) + public TrsaRegistry find(@PathParam("id") Long id) { + return trsaRegistryServiceBean.find(id); + } + + @GET + @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) + public Response getTrsaRegistries() { + JsonArrayBuilder jab = Json.createArrayBuilder(); + trsaRegistryServiceBean.findAll().forEach((trsaRegistry)->{ + jab.add(trsaRegistry.toJson()); + }); + return ok(jab); + } + + + + + + @GET + @Path("{from}/{to}") + @Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) + public List findRange(@PathParam("from") Integer from, @PathParam("to") Integer to) { + return trsaRegistryServiceBean.findRange(new int[]{from, to}); + } + + @GET + @Path("count") + @Produces(MediaType.TEXT_PLAIN) + public String countREST() { + return String.valueOf(trsaRegistryServiceBean.count()); + } + + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/trsa/TrsaRegistry.java b/src/main/java/edu/harvard/iq/dataverse/trsa/TrsaRegistry.java index 434569b3560..7ef5d7c02bf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/trsa/TrsaRegistry.java +++ b/src/main/java/edu/harvard/iq/dataverse/trsa/TrsaRegistry.java @@ -1,10 +1,19 @@ package edu.harvard.iq.dataverse.trsa; +import com.ibm.icu.util.Calendar; +import com.ibm.icu.util.ULocale; import java.io.Serializable; +import java.math.BigDecimal; +import java.sql.Timestamp; import java.util.Date; +import java.util.logging.Logger; +import javax.json.Json; +import javax.json.JsonObjectBuilder; import javax.persistence.Basic; import javax.persistence.Column; import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.NamedQueries; import javax.persistence.NamedQuery; @@ -37,53 +46,79 @@ @NamedQuery(name = "TrsaRegistry.findByExpiretime", query = "SELECT t FROM TrsaRegistry t WHERE t.expiretime = :expiretime"), @NamedQuery(name = "TrsaRegistry.findById", query = "SELECT t FROM TrsaRegistry t WHERE t.id = :id")}) public class TrsaRegistry implements Serializable { + + private static final Logger logger = Logger.getLogger(TrsaRegistry.class.getName()); + + public static Integer DEFAULT_VALID_PERIOD=1; private static final long serialVersionUID = 1L; + + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Basic(optional = false) + @Column(name = "ID") + private Long id; + + @Basic(optional = false) @NotNull @Size(min = 1, max = 255) @Column(nullable = false, length = 255) private String installation; // @Pattern(regexp="[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?", message="Invalid email")//if the field contains email address consider using this annotation to enforce field validation + @Basic(optional = false) @NotNull @Size(min = 1, max = 255) @Column(nullable = false, length = 255) private String email; + @Basic(optional = false) @NotNull @Size(min = 1, max = 255) @Column(nullable = false, length = 255) private String dataverseurl; + @Basic(optional = false) @NotNull - @Size(min = 1, max = 12) - @Column(nullable = false, length = 12) + @Size(min = 1, max = 255) + @Column(nullable = false, length = 255) private String apitoken; - @Size(max = 255) - @Column(length = 255) + + + @Basic(optional = false) + @NotNull + @Size(min=1, max = 255) + @Column(nullable = false, length = 255) private String datastoragelocation; + @Basic(optional = false) @NotNull @Size(min = 1, max = 255) @Column(nullable = false, length = 255) private String dataaccessinfo; - @Size(max = 255) - @Column(length = 255) + + @Basic(optional = false) + @NotNull + @Size(min = 1, max = 255) + @Column(nullable = false, length = 255) private String notaryserviceurl; + + @Basic(optional = false) + @NotNull @Size(max = 255) - @Column(length = 255) + @Column(nullable = false, length = 255) private String safeserviceurl; + @Temporal(TemporalType.TIMESTAMP) private Date registertime; + private Boolean disabled; + @Temporal(TemporalType.TIMESTAMP) private Date expiretime; - @Id - @Basic(optional = false) - @NotNull - @Column(nullable = false) - private Long id; + public TrsaRegistry() { } @@ -92,13 +127,21 @@ public TrsaRegistry(Long id) { this.id = id; } - public TrsaRegistry(Long id, String installation, String email, String dataverseurl, String apitoken, String dataaccessinfo) { + public TrsaRegistry(Long id, String installation, String email, + String dataverseurl, String apitoken, String datastoragelocation, + String dataaccessinfo, String notaryserviceurl, + String safeserviceurl) { this.id = id; this.installation = installation; this.email = email; this.dataverseurl = dataverseurl; this.apitoken = apitoken; + this.datastoragelocation = datastoragelocation; this.dataaccessinfo = dataaccessinfo; + this.notaryserviceurl = notaryserviceurl; + this.registertime = new Timestamp(new Date().getTime()); + this.expiretime = generateExpireTimestamp(); + this.disabled=false; } public String getInstallation() { @@ -222,4 +265,42 @@ public String toString() { return "edu.harvard.iq.dataverse.trsa.TrsaRegistry[ id=" + id + " ]"; } + + public JsonObjectBuilder toJson() { + JsonObjectBuilder jab = Json.createObjectBuilder(); + return jab.add("id", getId()) + .add("installation", getInstallation()) + .add("email", getEmail()) + .add("dataverseurl", getDataverseurl()) + .add("apitoken", getApitoken()) + .add("datastoragelocation", getDatastoragelocation()) + .add("dataaccessinfo", getDataaccessinfo()) + .add("notaryserviceurl", getNotaryserviceurl()) + .add("registertime", getRegistertime().toString()) + .add("expiretime", getExpiretime().toString()) + ; + +// jab.add(DISPLAY_NAME, getDisplayName()); +// jab.add(DESCRIPTION, getDescription()); +// jab.add(TYPE, getType().text); +// jab.add(TOOL_URL, getToolUrl()); +// jab.add(TOOL_PARAMETERS, getToolParameters()); + } + + private Date generateExpireTimestamp(){ + return generateExpireTimestamp(null); + } + + + private Date generateExpireTimestamp(Integer year){ + if (year==null){ + year = DEFAULT_VALID_PERIOD; + } + Date baseline= this.registertime; + Calendar cal = Calendar.getInstance(); + cal.setTimeInMillis(baseline.getTime()); + cal.add(Calendar.YEAR, year); + return new Date(cal.getTime().getTime()); + + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/trsa/TrsaRegistryServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/trsa/TrsaRegistryServiceBean.java new file mode 100644 index 00000000000..a6b2d2ec703 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/trsa/TrsaRegistryServiceBean.java @@ -0,0 +1,88 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package edu.harvard.iq.dataverse.trsa; + +import java.util.List; +import java.util.logging.Logger; +import javax.ejb.Stateless; +import javax.inject.Named; +import javax.persistence.EntityManager; +import javax.persistence.NoResultException; +import javax.persistence.NonUniqueResultException; +import javax.persistence.PersistenceContext; +import javax.persistence.TypedQuery; + +/** + * + * @author asone + */ +@Stateless +@Named +public class TrsaRegistryServiceBean { + + private static final Logger logger = Logger.getLogger(TrsaRegistryServiceBean.class.getName()); + + @PersistenceContext(unitName = "VDCNet-ejbPU") + private EntityManager em; + + + public void create(TrsaRegistry entity) { + em.persist(entity); + } + + + public void edit(TrsaRegistry entity) { + em.merge(entity); + } + + + public void remove(TrsaRegistry entity) { + em.remove(em.merge(entity)); + } + + + public TrsaRegistry find(long id) { + return em.find(TrsaRegistry.class, id); + } + + + public List findAll() { + javax.persistence.criteria.CriteriaQuery cq = em.getCriteriaBuilder().createQuery(); + cq.select(cq.from(TrsaRegistry.class)); + return em.createQuery(cq).getResultList(); + } + + + public List findRange(int[] range) { + javax.persistence.criteria.CriteriaQuery cq = em.getCriteriaBuilder().createQuery(); + cq.select(cq.from(TrsaRegistry.class)); + javax.persistence.Query q = em.createQuery(cq); + q.setMaxResults(range[1] - range[0] + 1); + q.setFirstResult(range[0]); + return q.getResultList(); + } + + public long count() { + javax.persistence.criteria.CriteriaQuery cq = em.getCriteriaBuilder().createQuery(); + javax.persistence.criteria.Root rt = cq.from(TrsaRegistry.class); + cq.select(em.getCriteriaBuilder().count(rt)); + javax.persistence.Query q = em.createQuery(cq); + return (long) q.getSingleResult(); + } + + + public TrsaRegistry findById(long id) { + TypedQuery typedQuery = em.createQuery("SELECT OBJECT(o) FROM TrsaRegistry AS o WHERE o.id = :id", TrsaRegistry.class); + typedQuery.setParameter("id", id); + try { + TrsaRegistry trsaRegistry = typedQuery.getSingleResult(); + return trsaRegistry; + } catch (NoResultException | NonUniqueResultException ex) { + return null; + } + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/trsa/registry/AbstractFacade.java b/src/main/java/edu/harvard/iq/dataverse/trsa/registry/AbstractFacade.java new file mode 100644 index 00000000000..bd0ce7b56c3 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/trsa/registry/AbstractFacade.java @@ -0,0 +1,64 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package edu.harvard.iq.dataverse.trsa.registry; + +import java.util.List; +import javax.persistence.EntityManager; + +/** + * + * @author asone + */ +public abstract class AbstractFacade { + + private Class entityClass; + + public AbstractFacade(Class entityClass) { + this.entityClass = entityClass; + } + + protected abstract EntityManager getEntityManager(); + + public void create(T entity) { + getEntityManager().persist(entity); + } + + public void edit(T entity) { + getEntityManager().merge(entity); + } + + public void remove(T entity) { + getEntityManager().remove(getEntityManager().merge(entity)); + } + + public T find(Object id) { + return getEntityManager().find(entityClass, id); + } + + public List findAll() { + javax.persistence.criteria.CriteriaQuery cq = getEntityManager().getCriteriaBuilder().createQuery(); + cq.select(cq.from(entityClass)); + return getEntityManager().createQuery(cq).getResultList(); + } + + public List findRange(int[] range) { + javax.persistence.criteria.CriteriaQuery cq = getEntityManager().getCriteriaBuilder().createQuery(); + cq.select(cq.from(entityClass)); + javax.persistence.Query q = getEntityManager().createQuery(cq); + q.setMaxResults(range[1] - range[0] + 1); + q.setFirstResult(range[0]); + return q.getResultList(); + } + + public int count() { + javax.persistence.criteria.CriteriaQuery cq = getEntityManager().getCriteriaBuilder().createQuery(); + javax.persistence.criteria.Root rt = cq.from(entityClass); + cq.select(getEntityManager().getCriteriaBuilder().count(rt)); + javax.persistence.Query q = getEntityManager().createQuery(cq); + return ((Long) q.getSingleResult()).intValue(); + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/trsa/registry/TrsaRegistryController.java b/src/main/java/edu/harvard/iq/dataverse/trsa/registry/TrsaRegistryController.java new file mode 100644 index 00000000000..f71b5dfd39d --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/trsa/registry/TrsaRegistryController.java @@ -0,0 +1,164 @@ +package edu.harvard.iq.dataverse.trsa.registry; + +import edu.harvard.iq.dataverse.trsa.TrsaRegistry; +import edu.harvard.iq.dataverse.trsa.registry.util.JsfUtil; +import edu.harvard.iq.dataverse.trsa.registry.util.JsfUtil.PersistAction; + +import java.io.Serializable; +import java.util.List; +import java.util.ResourceBundle; +import java.util.logging.Level; +import java.util.logging.Logger; +import javax.ejb.EJB; +import javax.ejb.EJBException; +import javax.inject.Named; +import javax.enterprise.context.SessionScoped; +import javax.faces.component.UIComponent; +import javax.faces.context.FacesContext; +import javax.faces.convert.Converter; +import javax.faces.convert.FacesConverter; + +@Named("trsaRegistryController") +@SessionScoped +public class TrsaRegistryController implements Serializable { + + @EJB + private edu.harvard.iq.dataverse.trsa.registry.TrsaRegistryFacade ejbFacade; + private List items = null; + private TrsaRegistry selected; + + public TrsaRegistryController() { + } + + public TrsaRegistry getSelected() { + return selected; + } + + public void setSelected(TrsaRegistry selected) { + this.selected = selected; + } + + protected void setEmbeddableKeys() { + } + + protected void initializeEmbeddableKey() { + } + + private TrsaRegistryFacade getFacade() { + return ejbFacade; + } + + public TrsaRegistry prepareCreate() { + selected = new TrsaRegistry(); + initializeEmbeddableKey(); + return selected; + } + + public void create() { + persist(PersistAction.CREATE, ResourceBundle.getBundle("/Bundle_trsa_registry").getString("TrsaRegistryCreated")); + if (!JsfUtil.isValidationFailed()) { + items = null; // Invalidate list of items to trigger re-query. + } + } + + public void update() { + persist(PersistAction.UPDATE, ResourceBundle.getBundle("/Bundle_trsa_registry").getString("TrsaRegistryUpdated")); + } + + public void destroy() { + persist(PersistAction.DELETE, ResourceBundle.getBundle("/Bundle_trsa_registry").getString("TrsaRegistryDeleted")); + if (!JsfUtil.isValidationFailed()) { + selected = null; // Remove selection + items = null; // Invalidate list of items to trigger re-query. + } + } + + public List getItems() { + if (items == null) { + items = getFacade().findAll(); + } + return items; + } + + private void persist(PersistAction persistAction, String successMessage) { + if (selected != null) { + setEmbeddableKeys(); + try { + if (persistAction != PersistAction.DELETE) { + getFacade().edit(selected); + } else { + getFacade().remove(selected); + } + JsfUtil.addSuccessMessage(successMessage); + } catch (EJBException ex) { + String msg = ""; + Throwable cause = ex.getCause(); + if (cause != null) { + msg = cause.getLocalizedMessage(); + } + if (msg.length() > 0) { + JsfUtil.addErrorMessage(msg); + } else { + JsfUtil.addErrorMessage(ex, ResourceBundle.getBundle("/Bundle_trsa_registry").getString("PersistenceErrorOccured")); + } + } catch (Exception ex) { + Logger.getLogger(this.getClass().getName()).log(Level.SEVERE, null, ex); + JsfUtil.addErrorMessage(ex, ResourceBundle.getBundle("/Bundle_trsa_registry").getString("PersistenceErrorOccured")); + } + } + } + + public TrsaRegistry getTrsaRegistry(java.lang.Long id) { + return getFacade().find(id); + } + + public List getItemsAvailableSelectMany() { + return getFacade().findAll(); + } + + public List getItemsAvailableSelectOne() { + return getFacade().findAll(); + } + + @FacesConverter(forClass = TrsaRegistry.class) + public static class TrsaRegistryControllerConverter implements Converter { + + @Override + public Object getAsObject(FacesContext facesContext, UIComponent component, String value) { + if (value == null || value.length() == 0) { + return null; + } + TrsaRegistryController controller = (TrsaRegistryController) facesContext.getApplication().getELResolver(). + getValue(facesContext.getELContext(), null, "trsaRegistryController"); + return controller.getTrsaRegistry(getKey(value)); + } + + java.lang.Long getKey(String value) { + java.lang.Long key; + key = Long.valueOf(value); + return key; + } + + String getStringKey(java.lang.Long value) { + StringBuilder sb = new StringBuilder(); + sb.append(value); + return sb.toString(); + } + + @Override + public String getAsString(FacesContext facesContext, UIComponent component, Object object) { + if (object == null) { + return null; + } + if (object instanceof TrsaRegistry) { + TrsaRegistry o = (TrsaRegistry) object; + return getStringKey(o.getId()); + } else { + Logger.getLogger(this.getClass().getName()).log(Level.SEVERE, "object {0} is of type {1}; expected type: {2}", new Object[]{object, object.getClass().getName(), TrsaRegistry.class.getName()}); + return null; + } + } + + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/trsa/registry/TrsaRegistryFacade.java b/src/main/java/edu/harvard/iq/dataverse/trsa/registry/TrsaRegistryFacade.java new file mode 100644 index 00000000000..60c721cdf32 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/trsa/registry/TrsaRegistryFacade.java @@ -0,0 +1,32 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package edu.harvard.iq.dataverse.trsa.registry; + +import edu.harvard.iq.dataverse.trsa.TrsaRegistry; +import javax.ejb.Stateless; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; + +/** + * + * @author asone + */ +@Stateless +public class TrsaRegistryFacade extends AbstractFacade { + + @PersistenceContext(unitName = "VDCNet-ejbPU") + private EntityManager em; + + @Override + protected EntityManager getEntityManager() { + return em; + } + + public TrsaRegistryFacade() { + super(TrsaRegistry.class); + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/trsa/registry/util/JsfUtil.java b/src/main/java/edu/harvard/iq/dataverse/trsa/registry/util/JsfUtil.java new file mode 100644 index 00000000000..8881fe5bfa4 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/trsa/registry/util/JsfUtil.java @@ -0,0 +1,69 @@ +package edu.harvard.iq.dataverse.trsa.registry.util; + +import java.util.List; +import javax.faces.application.FacesMessage; +import javax.faces.component.UIComponent; +import javax.faces.context.FacesContext; +import javax.faces.convert.Converter; +import javax.faces.model.SelectItem; + +public class JsfUtil { + + public static SelectItem[] getSelectItems(List entities, boolean selectOne) { + int size = selectOne ? entities.size() + 1 : entities.size(); + SelectItem[] items = new SelectItem[size]; + int i = 0; + if (selectOne) { + items[0] = new SelectItem("", "---"); + i++; + } + for (Object x : entities) { + items[i++] = new SelectItem(x, x.toString()); + } + return items; + } + + public static boolean isValidationFailed() { + return FacesContext.getCurrentInstance().isValidationFailed(); + } + + public static void addErrorMessage(Exception ex, String defaultMsg) { + String msg = ex.getLocalizedMessage(); + if (msg != null && msg.length() > 0) { + addErrorMessage(msg); + } else { + addErrorMessage(defaultMsg); + } + } + + public static void addErrorMessages(List messages) { + for (String message : messages) { + addErrorMessage(message); + } + } + + public static void addErrorMessage(String msg) { + FacesMessage facesMsg = new FacesMessage(FacesMessage.SEVERITY_ERROR, msg, msg); + FacesContext.getCurrentInstance().addMessage(null, facesMsg); + } + + public static void addSuccessMessage(String msg) { + FacesMessage facesMsg = new FacesMessage(FacesMessage.SEVERITY_INFO, msg, msg); + FacesContext.getCurrentInstance().addMessage("successInfo", facesMsg); + } + + public static String getRequestParameter(String key) { + return FacesContext.getCurrentInstance().getExternalContext().getRequestParameterMap().get(key); + } + + public static Object getObjectFromRequestParameter(String requestParameterName, Converter converter, UIComponent component) { + String theId = JsfUtil.getRequestParameter(requestParameterName); + return converter.getAsObject(FacesContext.getCurrentInstance(), component, theId); + } + + public static enum PersistAction { + CREATE, + DELETE, + UPDATE + } +} diff --git a/src/main/webapp/WEB-INF/faces-config.xml b/src/main/webapp/WEB-INF/faces-config.xml index 2015ca55f5f..263d6e9f485 100644 --- a/src/main/webapp/WEB-INF/faces-config.xml +++ b/src/main/webapp/WEB-INF/faces-config.xml @@ -13,6 +13,10 @@ fr zh_CN + + /Bundle_trsa_registry + bundle_trsa_registry + diff --git a/src/main/webapp/dataverse_template.xhtml b/src/main/webapp/dataverse_template.xhtml index 43b8158d8df..46435704863 100644 --- a/src/main/webapp/dataverse_template.xhtml +++ b/src/main/webapp/dataverse_template.xhtml @@ -45,6 +45,11 @@ + + + + + diff --git a/src/main/webapp/resources/css/jsfcrud.css b/src/main/webapp/resources/css/jsfcrud.css new file mode 100644 index 00000000000..fa75f5b0d6a --- /dev/null +++ b/src/main/webapp/resources/css/jsfcrud.css @@ -0,0 +1,82 @@ +root { + display: block; +} + +body { + font-family: Arial, Helvetica, sans-serif; + color: #3a4f54; + background-color: #dfecf1; + font-size: small; +} + +a { + color: #e33b06; +} + +table { + empty-cells: show; +} + +form.jsfcrud_list_form th, td th { + font-size: x-small; + color: #4e6a71; + border-top-style: solid; + border-bottom-style: solid; + border-left-style: solid; + border-right-style: solid; + border-top-width: 1px; + border-bottom-width: 1px; + border-left-width: 1px; + border-right-width: 1px; + border-top-color: #b2d5d6; + border-bottom-color: #b2d5d6; + border-left-color: #90b4bd; + border-right-color: #90b4bd; + letter-spacing: 3px; + text-align: left; + padding-top: 6px; + padding-bottom: 6px; + padding-left: 6px; + padding-right: 6px; + background-color: #b2d5d6; +} + +td { + vertical-align: top; + padding-bottom: 8px; + font-size: small; +} + +form.jsfcrud_list_form td, td td { + border-top-style: solid; + border-bottom-style: solid; + border-left-style: solid; + border-right-style: solid; + border-top-width: 1px; + border-bottom-width: 1px; + border-left-width: 1px; + border-right-width: 1px; + border-top-color: #b2d5d6; + border-bottom-color: #b2d5d6; + border-left-color: #b2d5d6; + border-right-color: #b2d5d6; + vertical-align: baseline; + padding-bottom: 0px; +} + +tr.jsfcrud_odd_row { + background-color: #fefeff; + color: #4e6a71; +} + + +tr.jsfcrud_even_row { + background-color: #eff5fa; + color: #4e6a71; +} + +#busyImage { + position: absolute; + left: 50%; + top: 50%; +} diff --git a/src/main/webapp/resources/js/jsfcrud.js b/src/main/webapp/resources/js/jsfcrud.js new file mode 100644 index 00000000000..fb1c8c63b01 --- /dev/null +++ b/src/main/webapp/resources/js/jsfcrud.js @@ -0,0 +1,8 @@ +function handleSubmit(args, dialog) { + var jqDialog = jQuery('#' + dialog); + if (args.validationFailed) { + jqDialog.effect('shake', {times: 3}, 100); + } else { + PF(dialog).hide(); + } +} diff --git a/src/main/webapp/trsaRegistry/Create.xhtml b/src/main/webapp/trsaRegistry/Create.xhtml new file mode 100644 index 00000000000..6b75b4ea05a --- /dev/null +++ b/src/main/webapp/trsaRegistry/Create.xhtml @@ -0,0 +1,79 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/main/webapp/trsaRegistry/Edit.xhtml b/src/main/webapp/trsaRegistry/Edit.xhtml new file mode 100644 index 00000000000..27f2b28dede --- /dev/null +++ b/src/main/webapp/trsaRegistry/Edit.xhtml @@ -0,0 +1,83 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/main/webapp/trsaRegistry/List.xhtml b/src/main/webapp/trsaRegistry/List.xhtml new file mode 100644 index 00000000000..b5abd0a1e86 --- /dev/null +++ b/src/main/webapp/trsaRegistry/List.xhtml @@ -0,0 +1,121 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/main/webapp/trsaRegistry/View.xhtml b/src/main/webapp/trsaRegistry/View.xhtml new file mode 100644 index 00000000000..e40bb6f8338 --- /dev/null +++ b/src/main/webapp/trsaRegistry/View.xhtml @@ -0,0 +1,61 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + From 697e07e6443d437da3d9b009fb7ed3eb03f62d0e Mon Sep 17 00:00:00 2001 From: Akio Sone Date: Mon, 11 Feb 2019 08:39:07 -0500 Subject: [PATCH 45/46] trsa page was linked to the dashboard --- src/main/java/Bundle_trsa_registry.properties | 4 ++++ src/main/webapp/dashboard.xhtml | 15 +++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/src/main/java/Bundle_trsa_registry.properties b/src/main/java/Bundle_trsa_registry.properties index a101b3f559a..6eb1761fabc 100644 --- a/src/main/java/Bundle_trsa_registry.properties +++ b/src/main/java/Bundle_trsa_registry.properties @@ -137,3 +137,7 @@ ListTrsaRegistryTitle_registertime=Registertime ListTrsaRegistryTitle_disabled=Disabled ListTrsaRegistryTitle_expiretime=Expiretime ListTrsaRegistryTitle_id=Id + + +dashboardCardTrsaRegistryHeader=TRSA(Trusted Remote Storage Agent) Registry +dashboardCardTrsaRegistryManage=Manage TRSAs \ No newline at end of file diff --git a/src/main/webapp/dashboard.xhtml b/src/main/webapp/dashboard.xhtml index d43c591116f..0286fb1afb9 100644 --- a/src/main/webapp/dashboard.xhtml +++ b/src/main/webapp/dashboard.xhtml @@ -123,6 +123,21 @@ + + +
+
+

#{bundle_trsa_registry.dashboardCardTrsaRegistryHeader}

+ +
+
+ From 69bdfaafb01de01a7d319494a08cca1cb33b6c64 Mon Sep 17 00:00:00 2001 From: Akio Sone Date: Tue, 12 Feb 2019 11:31:33 -0500 Subject: [PATCH 46/46] changes to logging levels --- .../engine/command/impl/UpdateDatasetVersionCommand.java | 4 ++-- .../java/edu/harvard/iq/dataverse/util/json/JsonParser.java | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java index 8c76883b0a3..4e525eded3e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java @@ -138,7 +138,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // we don't want to create two draft versions! Dataset tempDataset = ctxt.em().merge(getDataset()); - logger.log(Level.INFO, "tempDataset={0}", xstream.toXML(tempDataset)); + logger.log(Level.FINE, "UpdateDatasetVersionCommand:execute:tempDataset={0}", xstream.toXML(tempDataset)); for (FileMetadata fmd : filesToDelete) { if (!fmd.getDataFile().isReleased()) { @@ -168,7 +168,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { tempDataset.setModificationTime(getTimestamp()); Dataset savedDataset = ctxt.em().merge(tempDataset); - logger.log(Level.INFO, "savedDataset={0}", xstream.toXML(savedDataset)); + logger.log(Level.FINE, "UpdateDatasetVersionCommand:execute:savedDataset={0}", xstream.toXML(savedDataset)); ctxt.em().flush(); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 75df0fa7fae..742b9c66050 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -553,7 +553,7 @@ public DataFile parseDataFile(JsonObject datafileJson) { // get parsing results of a DataTable List dataTables = parseDataTables(dataTablesJson); logger.log(Level.INFO, "dataTables:size={0}", dataTables.size()); - logger.log(Level.INFO, "returned dataTables={0}", xstream.toXML(dataTables)); + logger.log(Level.FINE, "returned dataTables={0}", xstream.toXML(dataTables)); dataFile.setDataTables(dataTables); dataFile.setDataTable(dataTables.get(0)); dataTables.get(0).setDataFile(dataFile);