diff --git a/src/main/java/org/breedinginsight/brapps/importer/controllers/ImportController.java b/src/main/java/org/breedinginsight/brapps/importer/controllers/ImportController.java index e0a61117a..69dcb3e98 100644 --- a/src/main/java/org/breedinginsight/brapps/importer/controllers/ImportController.java +++ b/src/main/java/org/breedinginsight/brapps/importer/controllers/ImportController.java @@ -37,6 +37,7 @@ import org.breedinginsight.api.model.v1.response.metadata.StatusCode; import org.breedinginsight.api.v1.controller.metadata.AddMetadata; import org.breedinginsight.brapps.importer.model.mapping.ImportMapping; +import org.breedinginsight.brapps.importer.model.workflow.ImportWorkflow; import org.breedinginsight.brapps.importer.services.ImportConfigManager; import org.breedinginsight.brapps.importer.model.config.ImportConfigResponse; import org.breedinginsight.brapps.importer.services.FileImportService; @@ -208,4 +209,27 @@ public HttpResponse>> getSystemMappings(@Nu Response> response = new Response(metadata, new DataResponse<>(result)); return HttpResponse.ok(response); } + + @Get("/import/mappings/{mappingId}/workflows") + @Produces(MediaType.APPLICATION_JSON) + @AddMetadata + @Secured(SecurityRule.IS_ANONYMOUS) + public HttpResponse>> getWorkflowsForSystemMapping(@PathVariable UUID mappingId) throws Exception { + + List workflows = null; + try { + workflows = fileImportService.getWorkflowsForSystemMapping(mappingId); + } catch (DoesNotExistException e) { + log.error(e.getMessage(), e); + return HttpResponse.status(HttpStatus.UNPROCESSABLE_ENTITY, e.getMessage()); + } + + List metadataStatus = new ArrayList<>(); + metadataStatus.add(new Status(StatusCode.INFO, "Successful Query")); + Pagination pagination = new Pagination(workflows.size(), workflows.size(), 1, 0); + Metadata metadata = new Metadata(pagination, metadataStatus); + + Response> response = new Response(metadata, new DataResponse<>(workflows)); + return HttpResponse.ok(response); + } } diff --git a/src/main/java/org/breedinginsight/brapps/importer/controllers/UploadController.java b/src/main/java/org/breedinginsight/brapps/importer/controllers/UploadController.java index f9d55bd20..eb2e2ec50 100644 --- a/src/main/java/org/breedinginsight/brapps/importer/controllers/UploadController.java +++ b/src/main/java/org/breedinginsight/brapps/importer/controllers/UploadController.java @@ -114,7 +114,7 @@ public HttpResponse> commitData(@PathVariable UUID prog @PathVariable UUID uploadId, @Body @Nullable Map userInput) { try { AuthenticatedUser actingUser = securityService.getUser(); - ImportResponse result = fileImportService.updateUpload(programId, uploadId, actingUser, userInput, true); + ImportResponse result = fileImportService.updateUpload(programId, uploadId, null, actingUser, userInput, true); Response response = new Response(result); return HttpResponse.ok(response).status(HttpStatus.ACCEPTED); } catch (DoesNotExistException e) { @@ -140,7 +140,60 @@ public HttpResponse> previewData(@PathVariable UUID pro @PathVariable UUID uploadId) { try { AuthenticatedUser actingUser = securityService.getUser(); - ImportResponse result = fileImportService.updateUpload(programId, uploadId, actingUser, null, false); + ImportResponse result = fileImportService.updateUpload(programId, uploadId, null, actingUser, null, false); + Response response = new Response(result); + return HttpResponse.ok(response).status(HttpStatus.ACCEPTED); + } catch (DoesNotExistException e) { + log.error(e.getMessage(), e); + return HttpResponse.notFound(); + } catch (AuthorizationException e) { + log.error(e.getMessage(), e); + return HttpResponse.status(HttpStatus.FORBIDDEN, e.getMessage()); + } catch (UnprocessableEntityException e) { + log.error(e.getMessage(), e); + return HttpResponse.status(HttpStatus.UNPROCESSABLE_ENTITY, e.getMessage()); + } catch (HttpStatusException e) { + log.error(e.getMessage(), e); + return HttpResponse.status(e.getStatus(), e.getMessage()); + } + } + + @Put("programs/{programId}/import/mappings/{mappingId}/workflows/{workflowId}/data/{uploadId}/preview") + @Produces(MediaType.APPLICATION_JSON) + @AddMetadata + @ProgramSecured(roles = {ProgramSecuredRole.BREEDER, ProgramSecuredRole.SYSTEM_ADMIN}) + public HttpResponse> previewData(@PathVariable UUID programId, @PathVariable UUID mappingId, + @PathVariable String workflowId, @PathVariable UUID uploadId) { + try { + AuthenticatedUser actingUser = securityService.getUser(); + ImportResponse result = fileImportService.updateUpload(programId, uploadId, workflowId, actingUser, null, false); + Response response = new Response(result); + return HttpResponse.ok(response).status(HttpStatus.ACCEPTED); + } catch (DoesNotExistException e) { + log.error(e.getMessage(), e); + return HttpResponse.notFound(); + } catch (AuthorizationException e) { + log.error(e.getMessage(), e); + return HttpResponse.status(HttpStatus.FORBIDDEN, e.getMessage()); + } catch (UnprocessableEntityException e) { + log.error(e.getMessage(), e); + return HttpResponse.status(HttpStatus.UNPROCESSABLE_ENTITY, e.getMessage()); + } catch (HttpStatusException e) { + log.error(e.getMessage(), e); + return HttpResponse.status(e.getStatus(), e.getMessage()); + } + } + + @Put("programs/{programId}/import/mappings/{mappingId}/workflows/{workflowId}/data/{uploadId}/commit") + @Produces(MediaType.APPLICATION_JSON) + @AddMetadata + @ProgramSecured(roles = {ProgramSecuredRole.BREEDER, ProgramSecuredRole.SYSTEM_ADMIN}) + public HttpResponse> commitData(@PathVariable UUID programId, @PathVariable UUID mappingId, + @PathVariable String workflowId, @PathVariable UUID uploadId, + @Body @Nullable Map userInput) { + try { + AuthenticatedUser actingUser = securityService.getUser(); + ImportResponse result = fileImportService.updateUpload(programId, uploadId, workflowId, actingUser, userInput, true); Response response = new Response(result); return HttpResponse.ok(response).status(HttpStatus.ACCEPTED); } catch (DoesNotExistException e) { diff --git a/src/main/java/org/breedinginsight/brapps/importer/model/imports/BrAPIImportService.java b/src/main/java/org/breedinginsight/brapps/importer/model/imports/BrAPIImportService.java index 1d520371c..4f6971b3c 100644 --- a/src/main/java/org/breedinginsight/brapps/importer/model/imports/BrAPIImportService.java +++ b/src/main/java/org/breedinginsight/brapps/importer/model/imports/BrAPIImportService.java @@ -17,22 +17,15 @@ package org.breedinginsight.brapps.importer.model.imports; -import org.brapi.client.v2.model.exceptions.ApiException; -import org.breedinginsight.brapps.importer.model.ImportUpload; import org.breedinginsight.brapps.importer.model.response.ImportPreviewResponse; -import org.breedinginsight.model.Program; -import org.breedinginsight.model.User; -import org.breedinginsight.services.exceptions.DoesNotExistException; -import org.breedinginsight.services.exceptions.MissingRequiredInfoException; -import org.breedinginsight.services.exceptions.UnprocessableEntityException; -import org.breedinginsight.services.exceptions.ValidatorException; -import tech.tablesaw.api.Table; +import org.breedinginsight.brapps.importer.model.workflow.ImportWorkflow; import java.util.List; public interface BrAPIImportService { String getImportTypeId(); BrAPIImport getImportClass(); + List getWorkflows() throws Exception; default String getInvalidIntegerMsg(String columnName) { return String.format("Column name \"%s\" must be integer type, but non-integer type provided.", columnName); } @@ -48,6 +41,6 @@ default String getMissingUserInputMsg(String fieldName) { default String getWrongUserInputDataTypeMsg(String fieldName, String typeName) { return String.format("User input, \"%s\" must be an %s", fieldName, typeName); } - ImportPreviewResponse process(List brAPIImports, Table data, Program program, ImportUpload upload, User user, Boolean commit) + ImportPreviewResponse process(ImportServiceContext context) throws Exception; } diff --git a/src/main/java/org/breedinginsight/brapps/importer/model/imports/DomainImportService.java b/src/main/java/org/breedinginsight/brapps/importer/model/imports/DomainImportService.java new file mode 100644 index 000000000..6cfffe73c --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/model/imports/DomainImportService.java @@ -0,0 +1,96 @@ +/* + * See the NOTICE file distributed with this work for additional information + * regarding copyright ownership. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.breedinginsight.brapps.importer.model.imports; + +import lombok.extern.slf4j.Slf4j; +import org.breedinginsight.brapps.importer.model.imports.experimentObservation.ExperimentObservation; +import org.breedinginsight.brapps.importer.model.response.ImportPreviewResponse; +import org.breedinginsight.brapps.importer.model.workflow.ExperimentWorkflow; +import org.breedinginsight.brapps.importer.model.workflow.ImportWorkflow; +import org.breedinginsight.brapps.importer.model.workflow.ImportWorkflowResult; +import org.breedinginsight.brapps.importer.model.workflow.Workflow; +import org.breedinginsight.brapps.importer.services.processors.ExperimentProcessor; +import org.breedinginsight.brapps.importer.services.processors.ProcessorManager; +import org.breedinginsight.brapps.importer.services.processors.experiment.ExperimentWorkflowNavigator; + +import javax.inject.Inject; +import javax.inject.Provider; +import javax.inject.Singleton; +import java.util.List; +import java.util.Optional; + +@Singleton +@Slf4j +public abstract class DomainImportService implements BrAPIImportService { + + // TODO: delete processor fields once WorkflowNavigator is used + private final Provider experimentProcessorProvider; + private final Provider processorManagerProvider; + private final Workflow workflowNavigator; + + + public DomainImportService(Provider experimentProcessorProvider, + Provider processorManagerProvider, + Workflow workflowNavigator) + { + this.experimentProcessorProvider = experimentProcessorProvider; + this.processorManagerProvider = processorManagerProvider; + this.workflowNavigator = workflowNavigator; + } + + @Override + public String getMissingColumnMsg(String columnName) { + return "Column heading does not match template or ontology"; + } + @Override + public List getWorkflows() throws Exception{ + return workflowNavigator.getWorkflows(); + } + + @Override + public ImportPreviewResponse process(ImportServiceContext context) + throws Exception { + + Optional.ofNullable(context.getWorkflowId()) + .filter(workflowId -> !workflowId.isEmpty()) + .ifPresent(workflowId -> log.info("Workflow: " + workflowId)); + + // TODO: return results from WorkflowNavigator once processing logic is in separate workflows + // return workflowNavigator.process(context).flatMap(ImportWorkflowResult::getImportPreviewResponse).orElse(null); + if (ExperimentWorkflowNavigator.Workflow.NEW_OBSERVATION.getId().equals(context.getWorkflowId())) { + Optional result = workflowNavigator.process(context); + + // Throw any exceptions caught during workflow processing + if (result.flatMap(ImportWorkflowResult::getCaughtException).isPresent()) { + throw result.flatMap(ImportWorkflowResult::getCaughtException).get(); + } + + return result.flatMap(ImportWorkflowResult::getImportPreviewResponse).orElse(null); + + } else { + return processorManagerProvider.get().process(context.getBrAPIImports(), + List.of(experimentProcessorProvider.get()), + context.getData(), + context.getProgram(), + context.getUpload(), + context.getUser(), + context.isCommit()); + } + } +} + diff --git a/src/main/java/org/breedinginsight/brapps/importer/model/imports/ImportServiceContext.java b/src/main/java/org/breedinginsight/brapps/importer/model/imports/ImportServiceContext.java new file mode 100644 index 000000000..90e8915f9 --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/model/imports/ImportServiceContext.java @@ -0,0 +1,42 @@ +/* + * See the NOTICE file distributed with this work for additional information + * regarding copyright ownership. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.breedinginsight.brapps.importer.model.imports; + +import lombok.*; +import org.breedinginsight.brapps.importer.model.ImportUpload; +import org.breedinginsight.model.Program; +import org.breedinginsight.model.User; +import tech.tablesaw.api.Table; + +import java.util.List; + +@Getter +@Setter +@Builder +@ToString +@AllArgsConstructor +@NoArgsConstructor +public class ImportServiceContext { + private String workflowId; + private List brAPIImports; + private Table data; + private Program program; + private ImportUpload upload; + private User user; + private boolean commit; +} diff --git a/src/main/java/org/breedinginsight/brapps/importer/model/imports/experimentObservation/ExperimentImportService.java b/src/main/java/org/breedinginsight/brapps/importer/model/imports/experimentObservation/ExperimentImportService.java index cd795564a..a9b6c62ac 100644 --- a/src/main/java/org/breedinginsight/brapps/importer/model/imports/experimentObservation/ExperimentImportService.java +++ b/src/main/java/org/breedinginsight/brapps/importer/model/imports/experimentObservation/ExperimentImportService.java @@ -18,36 +18,28 @@ package org.breedinginsight.brapps.importer.model.imports.experimentObservation; import lombok.extern.slf4j.Slf4j; -import org.breedinginsight.brapps.importer.model.ImportUpload; -import org.breedinginsight.brapps.importer.model.imports.BrAPIImport; -import org.breedinginsight.brapps.importer.model.imports.BrAPIImportService; -import org.breedinginsight.brapps.importer.model.response.ImportPreviewResponse; +import org.breedinginsight.brapps.importer.model.imports.DomainImportService; import org.breedinginsight.brapps.importer.services.processors.ExperimentProcessor; -import org.breedinginsight.brapps.importer.services.processors.Processor; import org.breedinginsight.brapps.importer.services.processors.ProcessorManager; -import org.breedinginsight.model.Program; -import org.breedinginsight.model.User; -import tech.tablesaw.api.Table; +import org.breedinginsight.brapps.importer.services.processors.experiment.ExperimentWorkflowNavigator; import javax.inject.Inject; import javax.inject.Provider; import javax.inject.Singleton; -import java.util.List; @Singleton @Slf4j -public class ExperimentImportService implements BrAPIImportService { +public class ExperimentImportService extends DomainImportService { private final String IMPORT_TYPE_ID = "ExperimentImport"; - private final Provider experimentProcessorProvider; - private final Provider processorManagerProvider; - + // TODO: delete processor fields once WorkflowNavigator is used @Inject - public ExperimentImportService(Provider experimentProcessorProvider, Provider processorManagerProvider) + public ExperimentImportService(Provider experimentProcessorProvider, + Provider processorManagerProvider, + ExperimentWorkflowNavigator workflowNavigator) { - this.experimentProcessorProvider = experimentProcessorProvider; - this.processorManagerProvider = processorManagerProvider; + super(experimentProcessorProvider, processorManagerProvider, workflowNavigator); } @Override @@ -60,20 +52,5 @@ public String getImportTypeId() { return IMPORT_TYPE_ID; } - @Override - public String getMissingColumnMsg(String columnName) { - return "Column heading does not match template or ontology"; - } - - @Override - public ImportPreviewResponse process(List brAPIImports, Table data, Program program, ImportUpload upload, User user, Boolean commit) - throws Exception { - - ImportPreviewResponse response = null; - List processors = List.of(experimentProcessorProvider.get()); - response = processorManagerProvider.get().process(brAPIImports, processors, data, program, upload, user, commit); - return response; - - } } diff --git a/src/main/java/org/breedinginsight/brapps/importer/model/imports/germplasm/GermplasmImportService.java b/src/main/java/org/breedinginsight/brapps/importer/model/imports/germplasm/GermplasmImportService.java index b4eac6b96..0caebe65e 100644 --- a/src/main/java/org/breedinginsight/brapps/importer/model/imports/germplasm/GermplasmImportService.java +++ b/src/main/java/org/breedinginsight/brapps/importer/model/imports/germplasm/GermplasmImportService.java @@ -21,7 +21,9 @@ import org.breedinginsight.brapps.importer.model.ImportUpload; import org.breedinginsight.brapps.importer.model.imports.BrAPIImport; import org.breedinginsight.brapps.importer.model.imports.BrAPIImportService; +import org.breedinginsight.brapps.importer.model.imports.ImportServiceContext; import org.breedinginsight.brapps.importer.model.response.ImportPreviewResponse; +import org.breedinginsight.brapps.importer.model.workflow.ImportWorkflow; import org.breedinginsight.brapps.importer.services.processors.GermplasmProcessor; import org.breedinginsight.brapps.importer.services.processors.Processor; import org.breedinginsight.brapps.importer.services.processors.ProcessorManager; @@ -32,6 +34,7 @@ import javax.inject.Inject; import javax.inject.Provider; import javax.inject.Singleton; +import java.util.ArrayList; import java.util.List; @Singleton @@ -56,18 +59,29 @@ public GermplasmImport getImportClass() { return new GermplasmImport(); } + @Override + public List getWorkflows() { + return new ArrayList<>(); + } + @Override public String getImportTypeId() { return IMPORT_TYPE_ID; } @Override - public ImportPreviewResponse process(List brAPIImports, Table data, Program program, ImportUpload upload, User user, Boolean commit) + public ImportPreviewResponse process(ImportServiceContext context) throws Exception { ImportPreviewResponse response = null; List processors = List.of(germplasmProcessorProvider.get()); - response = processorManagerProvider.get().process(brAPIImports, processors, data, program, upload, user, commit); + response = processorManagerProvider.get().process(context.getBrAPIImports(), + processors, + context.getData(), + context.getProgram(), + context.getUpload(), + context.getUser(), + context.isCommit()); return response; } } diff --git a/src/main/java/org/breedinginsight/brapps/importer/model/imports/sample/SampleSubmissionImportService.java b/src/main/java/org/breedinginsight/brapps/importer/model/imports/sample/SampleSubmissionImportService.java index 434626e68..eb7328ecf 100644 --- a/src/main/java/org/breedinginsight/brapps/importer/model/imports/sample/SampleSubmissionImportService.java +++ b/src/main/java/org/breedinginsight/brapps/importer/model/imports/sample/SampleSubmissionImportService.java @@ -21,7 +21,9 @@ import org.breedinginsight.brapps.importer.model.ImportUpload; import org.breedinginsight.brapps.importer.model.imports.BrAPIImport; import org.breedinginsight.brapps.importer.model.imports.BrAPIImportService; +import org.breedinginsight.brapps.importer.model.imports.ImportServiceContext; import org.breedinginsight.brapps.importer.model.response.ImportPreviewResponse; +import org.breedinginsight.brapps.importer.model.workflow.ImportWorkflow; import org.breedinginsight.brapps.importer.services.processors.Processor; import org.breedinginsight.brapps.importer.services.processors.ProcessorManager; import org.breedinginsight.brapps.importer.services.processors.SampleSubmissionProcessor; @@ -32,6 +34,7 @@ import javax.inject.Inject; import javax.inject.Provider; import javax.inject.Singleton; +import java.util.ArrayList; import java.util.List; @Singleton @@ -59,13 +62,19 @@ public BrAPIImport getImportClass() { } @Override - public ImportPreviewResponse process(List brAPIImports, - Table data, - Program program, - ImportUpload upload, - User user, - Boolean commit) throws Exception { + public List getWorkflows() { + return new ArrayList<>(); + } + + @Override + public ImportPreviewResponse process(ImportServiceContext context) throws Exception { List processors = List.of(sampleProcessorProvider.get()); - return processorManagerProvider.get().process(brAPIImports, processors, data, program, upload, user, commit); + return processorManagerProvider.get().process(context.getBrAPIImports(), + processors, + context.getData(), + context.getProgram(), + context.getUpload(), + context.getUser(), + context.isCommit()); } } diff --git a/src/main/java/org/breedinginsight/brapps/importer/model/workflow/ExperimentWorkflow.java b/src/main/java/org/breedinginsight/brapps/importer/model/workflow/ExperimentWorkflow.java new file mode 100644 index 000000000..c34794bab --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/model/workflow/ExperimentWorkflow.java @@ -0,0 +1,6 @@ +package org.breedinginsight.brapps.importer.model.workflow; + +@FunctionalInterface +public interface ExperimentWorkflow extends Workflow { + +} diff --git a/src/main/java/org/breedinginsight/brapps/importer/model/workflow/GermplasmWorkflow.java b/src/main/java/org/breedinginsight/brapps/importer/model/workflow/GermplasmWorkflow.java new file mode 100644 index 000000000..f26342d3b --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/model/workflow/GermplasmWorkflow.java @@ -0,0 +1,6 @@ +package org.breedinginsight.brapps.importer.model.workflow; + +@FunctionalInterface +public interface GermplasmWorkflow extends Workflow { + +} diff --git a/src/main/java/org/breedinginsight/brapps/importer/model/workflow/ImportContext.java b/src/main/java/org/breedinginsight/brapps/importer/model/workflow/ImportContext.java new file mode 100644 index 000000000..a7b6f7dc3 --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/model/workflow/ImportContext.java @@ -0,0 +1,58 @@ +/* + * See the NOTICE file distributed with this work for additional information + * regarding copyright ownership. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.breedinginsight.brapps.importer.model.workflow; + +import lombok.*; +import org.breedinginsight.brapps.importer.model.ImportUpload; +import org.breedinginsight.brapps.importer.model.imports.BrAPIImport; +import org.breedinginsight.brapps.importer.model.imports.ImportServiceContext; +import org.breedinginsight.brapps.importer.model.imports.PendingImport; +import org.breedinginsight.model.Program; +import org.breedinginsight.model.User; +import tech.tablesaw.api.Table; + +import java.util.List; +import java.util.Map; +import java.util.UUID; + +@Getter +@Setter +@Builder +@ToString +@AllArgsConstructor +@NoArgsConstructor +public class ImportContext { + private UUID workflowId; + private ImportUpload upload; + private List importRows; + private Table data; + private Program program; + private User user; + private boolean commit; + + public static ImportContext from(ImportServiceContext importServiceContext) { + return ImportContext.builder() + .program(importServiceContext.getProgram()) + .user(importServiceContext.getUser()) + .commit(importServiceContext.isCommit()) + .data(importServiceContext.getData()) + .importRows(importServiceContext.getBrAPIImports()) + .upload(importServiceContext.getUpload()) + .build(); + } +} \ No newline at end of file diff --git a/src/main/java/org/breedinginsight/brapps/importer/model/workflow/ImportWorkflow.java b/src/main/java/org/breedinginsight/brapps/importer/model/workflow/ImportWorkflow.java new file mode 100644 index 000000000..30dbac06a --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/model/workflow/ImportWorkflow.java @@ -0,0 +1,31 @@ +///* +// * See the NOTICE file distributed with this work for additional information +// * regarding copyright ownership. +// * +// * Licensed under the Apache License, Version 2.0 (the "License"); +// * you may not use this file except in compliance with the License. +// * You may obtain a copy of the License at +// * +// * http://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// */ + +package org.breedinginsight.brapps.importer.model.workflow; + +import lombok.*; + +@Getter +@Setter +@Builder +@ToString +@AllArgsConstructor +public class ImportWorkflow { + private String id; + private String name; + private int order; +} diff --git a/src/main/java/org/breedinginsight/brapps/importer/model/workflow/ImportWorkflowResult.java b/src/main/java/org/breedinginsight/brapps/importer/model/workflow/ImportWorkflowResult.java new file mode 100644 index 000000000..f59b83b55 --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/model/workflow/ImportWorkflowResult.java @@ -0,0 +1,34 @@ +///* +// * See the NOTICE file distributed with this work for additional information +// * regarding copyright ownership. +// * +// * Licensed under the Apache License, Version 2.0 (the "License"); +// * you may not use this file except in compliance with the License. +// * You may obtain a copy of the License at +// * +// * http://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// */ + +package org.breedinginsight.brapps.importer.model.workflow; + +import lombok.*; +import org.breedinginsight.brapps.importer.model.response.ImportPreviewResponse; + +import java.util.Optional; + +@Getter +@Setter +@Builder +@ToString +@AllArgsConstructor +public class ImportWorkflowResult { + private ImportWorkflow workflow; + private Optional importPreviewResponse; + private Optional caughtException; +} diff --git a/src/main/java/org/breedinginsight/brapps/importer/model/workflow/ProcessedData.java b/src/main/java/org/breedinginsight/brapps/importer/model/workflow/ProcessedData.java new file mode 100644 index 000000000..fc29774f0 --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/model/workflow/ProcessedData.java @@ -0,0 +1,32 @@ +/* + * See the NOTICE file distributed with this work for additional information + * regarding copyright ownership. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.breedinginsight.brapps.importer.model.workflow; + +import lombok.*; +import org.breedinginsight.brapps.importer.model.imports.PendingImport; +import org.breedinginsight.brapps.importer.model.response.ImportPreviewResponse; +import org.breedinginsight.brapps.importer.model.response.ImportPreviewStatistics; + +import java.util.Map; + +@Data +@ToString +@NoArgsConstructor +public class ProcessedData { + private Map mappedBrAPIImport; +} \ No newline at end of file diff --git a/src/main/java/org/breedinginsight/brapps/importer/model/workflow/SampleSubmissionWorkflow.java b/src/main/java/org/breedinginsight/brapps/importer/model/workflow/SampleSubmissionWorkflow.java new file mode 100644 index 000000000..19be9bdcc --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/model/workflow/SampleSubmissionWorkflow.java @@ -0,0 +1,6 @@ +package org.breedinginsight.brapps.importer.model.workflow; + +@FunctionalInterface +public interface SampleSubmissionWorkflow extends Workflow { + +} diff --git a/src/main/java/org/breedinginsight/brapps/importer/model/workflow/Workflow.java b/src/main/java/org/breedinginsight/brapps/importer/model/workflow/Workflow.java new file mode 100644 index 000000000..17f48ee57 --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/model/workflow/Workflow.java @@ -0,0 +1,17 @@ +package org.breedinginsight.brapps.importer.model.workflow; + +import io.micronaut.core.order.Ordered; +import org.breedinginsight.brapps.importer.model.imports.ImportServiceContext; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + +@FunctionalInterface +public interface Workflow extends Ordered { + Optional process(ImportServiceContext context); + default List getWorkflows() { + // Default implementation for getWorkflows method + return new ArrayList<>(); + } +} diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/FileImportService.java b/src/main/java/org/breedinginsight/brapps/importer/services/FileImportService.java index 05c48601d..e24cfeef2 100644 --- a/src/main/java/org/breedinginsight/brapps/importer/services/FileImportService.java +++ b/src/main/java/org/breedinginsight/brapps/importer/services/FileImportService.java @@ -36,10 +36,12 @@ import org.breedinginsight.brapps.importer.model.ImportUpload; import org.breedinginsight.brapps.importer.model.config.ImportConfigResponse; import org.breedinginsight.brapps.importer.model.imports.BrAPIImportService; +import org.breedinginsight.brapps.importer.model.imports.ImportServiceContext; import org.breedinginsight.brapps.importer.model.mapping.ImportMapping; import org.breedinginsight.brapps.importer.model.imports.BrAPIImport; import org.breedinginsight.brapps.importer.model.response.ImportResponse; import org.breedinginsight.brapps.importer.daos.ImportMappingDAO; +import org.breedinginsight.brapps.importer.model.workflow.ImportWorkflow; import org.breedinginsight.dao.db.tables.pojos.ImporterMappingEntity; import org.breedinginsight.dao.db.tables.pojos.ImporterMappingProgramEntity; import org.breedinginsight.model.Program; @@ -322,7 +324,7 @@ public ImportResponse uploadData(UUID programId, UUID mappingId, AuthenticatedUs return response; } - public ImportResponse updateUpload(UUID programId, UUID uploadId, AuthenticatedUser actingUser, Map userInput, Boolean commit) throws + public ImportResponse updateUpload(UUID programId, UUID uploadId, String workflow, AuthenticatedUser actingUser, Map userInput, Boolean commit) throws DoesNotExistException, UnprocessableEntityException, AuthorizationException { Program program = validateRequest(programId, actingUser); @@ -372,7 +374,7 @@ public ImportResponse updateUpload(UUID programId, UUID uploadId, AuthenticatedU } else { brAPIImportList = mappingManager.map(mappingConfig, data); } - processFile(brAPIImportList, data, program, upload, user, commit, importService, actingUser); + processFile(workflow, brAPIImportList, data, program, upload, user, commit, importService, actingUser); } catch (UnprocessableEntityException e) { log.error(e.getMessage(), e); ImportProgress progress = upload.getProgress(); @@ -418,13 +420,22 @@ public ImportUpload setDynamicColumns(ImportUpload newUpload, Table data, Import return newUpload; } - private void processFile(List finalBrAPIImportList, Table data, Program program, - ImportUpload upload, User user, Boolean commit, BrAPIImportService importService, - AuthenticatedUser actingUser) { + private void processFile(String workflowId, List finalBrAPIImportList, Table data, Program program, + ImportUpload upload, User user, Boolean commit, BrAPIImportService importService, + AuthenticatedUser actingUser) { // Spin off new process for processing the file CompletableFuture.supplyAsync(() -> { try { - importService.process(finalBrAPIImportList, data, program, upload, user, commit); + ImportServiceContext context = ImportServiceContext.builder() + .workflowId(workflowId) + .brAPIImports(finalBrAPIImportList) + .data(data) + .program(program) + .upload(upload) + .user(user) + .commit(commit) + .build(); + importService.process(context); } catch (UnprocessableEntityException e) { log.error(e.getMessage(), e); ImportProgress progress = upload.getProgress(); @@ -559,4 +570,14 @@ public List getSystemMappingByName(String name) { List importMappings = importMappingDAO.getSystemMappingByName(name); return importMappings; } + + public List getWorkflowsForSystemMapping(UUID mappingId) throws DoesNotExistException, Exception { + ImportMapping mappingConfig = importMappingDAO.getMapping(mappingId) + .orElseThrow(() -> new DoesNotExistException("Cannot find mapping config associated with upload.")); + BrAPIImportService importService = configManager.getImportServiceById(mappingConfig.getImportTypeId()) + .orElseThrow(() -> new DoesNotExistException("Config with that id does not exist")); + return importService.getWorkflows(); + } + + } diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/FileMappingUtil.java b/src/main/java/org/breedinginsight/brapps/importer/services/FileMappingUtil.java index cb93daf48..8d3ed8146 100644 --- a/src/main/java/org/breedinginsight/brapps/importer/services/FileMappingUtil.java +++ b/src/main/java/org/breedinginsight/brapps/importer/services/FileMappingUtil.java @@ -32,13 +32,7 @@ @Singleton public class FileMappingUtil { - public static final String EXPERIMENT_TEMPLATE_NAME = "ExperimentsTemplateMap"; - private FileImportService fileImportService; - - - @Inject - public FileMappingUtil(FileImportService fileImportService) { - this.fileImportService = fileImportService; + public FileMappingUtil() { } // Returns a list of integers to identify the target row of the relationship. -1 if no relationship was found diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/ImportStatusService.java b/src/main/java/org/breedinginsight/brapps/importer/services/ImportStatusService.java index e1dcefc63..d69934ed0 100644 --- a/src/main/java/org/breedinginsight/brapps/importer/services/ImportStatusService.java +++ b/src/main/java/org/breedinginsight/brapps/importer/services/ImportStatusService.java @@ -42,17 +42,20 @@ public ImportStatusService(ImportDAO importDAO, ObjectMapper objMapper) { } public void updateMessage(ImportUpload upload, String message) { + log.debug(message); upload.getProgress().setMessage(message); importDAO.update(upload); } public void startUpload(ImportUpload upload, long numberObjects, String message) { + log.debug(message); upload.getProgress().setTotal(numberObjects); upload.getProgress().setMessage(message); importDAO.update(upload); } public void finishUpload(ImportUpload upload, long numberObjects, String message) { + log.debug(message); // Update progress to reflect final finished and inProgress counts. upload.updateProgress(Math.toIntExact(numberObjects), 0); upload.getProgress().setMessage(message); @@ -61,6 +64,7 @@ public void finishUpload(ImportUpload upload, long numberObjects, String message } public void updateMappedData(ImportUpload upload, ImportPreviewResponse response, String message) { + log.debug(message); // Save our results to the db JSON config = new JSON(); String json = config.getGson().toJson(response); diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/processors/ProcessorData.java b/src/main/java/org/breedinginsight/brapps/importer/services/processors/ProcessorData.java index 41f432f72..1a4a806b9 100644 --- a/src/main/java/org/breedinginsight/brapps/importer/services/processors/ProcessorData.java +++ b/src/main/java/org/breedinginsight/brapps/importer/services/processors/ProcessorData.java @@ -26,27 +26,27 @@ public class ProcessorData { - static int getNumNewObjects(Map> objectsByName) { + public static int getNumNewObjects(Map> objectsByName) { long numNewObjects = objectsByName.values().stream() .filter(preview -> preview != null && preview.getState() == ImportObjectState.NEW) .count(); return Math.toIntExact(numNewObjects); } - static int getNumExistingObjects(Map> objectsByName) { + public static int getNumExistingObjects(Map> objectsByName) { long numExistingObjects = objectsByName.values().stream() .filter(preview -> preview != null && preview.getState() == ImportObjectState.EXISTING) .count(); return Math.toIntExact(numExistingObjects); } - static List getNewObjects(Map> objectsByName) { + public static List getNewObjects(Map> objectsByName) { return objectsByName.values().stream() .filter(preview -> preview != null && preview.getState() == ImportObjectState.NEW) .map(preview -> preview.getBrAPIObject()) .collect(Collectors.toList()); } - static Map getMutationsByObjectId(Map> objectsByName, Function dbIdFilter) { + public static Map getMutationsByObjectId(Map> objectsByName, Function dbIdFilter) { return objectsByName.entrySet().stream() .filter(entry -> ImportObjectState.MUTATED == entry.getValue().getState()) .collect(Collectors diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/DynamicColumnParser.java b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/DynamicColumnParser.java new file mode 100644 index 000000000..6b250facc --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/DynamicColumnParser.java @@ -0,0 +1,64 @@ +/* + * See the NOTICE file distributed with this work for additional information + * regarding copyright ownership. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.breedinginsight.brapps.importer.services.processors.experiment; + +import lombok.Getter; +import tech.tablesaw.api.Table; +import tech.tablesaw.columns.Column; + +import java.util.ArrayList; +import java.util.List; + +import static org.breedinginsight.brapps.importer.services.processors.experiment.ExperimentUtilities.TIMESTAMP_PREFIX; + +public class DynamicColumnParser { + + /** + * Parses dynamic columns from a table and separates them into phenotype and timestamp columns. + * + * @param data The table containing the dynamic columns. + * @param dynamicColumnNames An array of dynamic column names to be parsed. + * @return A DynamicColumnParseResult object containing the parsed phenotype and timestamp columns. + */ + public static DynamicColumnParseResult parse(Table data, String[] dynamicColumnNames) { + List> dynamicCols = data.columns(dynamicColumnNames); + List> phenotypeCols = new ArrayList<>(); + List> timestampCols = new ArrayList<>(); + + for (Column dynamicCol : dynamicCols) { + if (dynamicCol.name().startsWith(TIMESTAMP_PREFIX)) { + timestampCols.add(dynamicCol); + } else { + phenotypeCols.add(dynamicCol); + } + } + + return new DynamicColumnParseResult(phenotypeCols, timestampCols); + } + + @Getter + public static class DynamicColumnParseResult { + private final List> phenotypeCols; + private final List> timestampCols; + + public DynamicColumnParseResult(List> phenotypeCols, List> timestampCols) { + this.phenotypeCols = phenotypeCols; + this.timestampCols = timestampCols; + } + + } +} \ No newline at end of file diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/ExperimentUtilities.java b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/ExperimentUtilities.java new file mode 100644 index 000000000..f3b57fb03 --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/ExperimentUtilities.java @@ -0,0 +1,277 @@ +/* + * See the NOTICE file distributed with this work for additional information + * regarding copyright ownership. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.breedinginsight.brapps.importer.services.processors.experiment; + +import com.google.gson.JsonObject; +import io.micronaut.http.HttpStatus; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.codec.digest.DigestUtils; +import org.apache.commons.lang3.StringUtils; +import org.brapi.v2.model.core.BrAPIStudy; +import org.brapi.v2.model.germ.BrAPIGermplasm; +import org.brapi.v2.model.pheno.BrAPIObservation; +import org.brapi.v2.model.pheno.BrAPIScaleValidValuesCategories; +import org.breedinginsight.api.model.v1.response.ValidationError; +import org.breedinginsight.api.model.v1.response.ValidationErrors; +import org.breedinginsight.brapi.v2.constants.BrAPIAdditionalInfoFields; +import org.breedinginsight.brapps.importer.model.imports.BrAPIImport; +import org.breedinginsight.brapps.importer.model.imports.experimentObservation.ExperimentObservation; +import org.breedinginsight.brapps.importer.model.response.PendingImportObject; +import org.breedinginsight.brapps.importer.services.processors.experiment.create.model.PendingData; +import org.breedinginsight.brapps.importer.services.processors.experiment.create.model.ProcessedPhenotypeData; +import org.breedinginsight.model.Program; +import org.breedinginsight.model.Scale; +import org.breedinginsight.model.Trait; +import tech.tablesaw.columns.Column; + +import java.math.BigDecimal; +import java.time.OffsetDateTime; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeParseException; +import java.util.*; +import java.util.stream.Collectors; + +@Slf4j +public class ExperimentUtilities { + + public static final CharSequence COMMA_DELIMITER = ","; + public static final String TIMESTAMP_PREFIX = "TS:"; + public static final String TIMESTAMP_REGEX = "^"+TIMESTAMP_PREFIX+"\\s*"; + public static final String MIDNIGHT = "T00:00:00-00:00"; + public static final String MULTIPLE_EXP_TITLES = "File contains more than one Experiment Title"; + public static final String PREEXISTING_EXPERIMENT_TITLE = "Experiment Title already exists"; + public static final String MISSING_OBS_UNIT_ID_ERROR = "Experimental entities are missing ObsUnitIDs"; + + + + public static List importRowsToExperimentObservations(List importRows) { + return importRows.stream() + .map(trialImport -> (ExperimentObservation) trialImport) + .collect(Collectors.toList()); + } + + public static boolean validDateTimeValue(String value) { + DateTimeFormatter formatter = DateTimeFormatter.ISO_DATE_TIME; + try { + formatter.parse(value); + } catch (DateTimeParseException e) { + return false; + } + return true; + } + + public static String getObservationHash(String observationUnitName, String variableName, String studyName) { + String concat = DigestUtils.sha256Hex(observationUnitName) + + DigestUtils.sha256Hex(variableName) + + DigestUtils.sha256Hex(StringUtils.defaultString(studyName)); + return DigestUtils.sha256Hex(concat); + } + + /* + * this finds the YEAR from the season list on the BrAPIStudy and then + * will add the year to the additionalInfo-field of the BrAPIStudy + * */ + public static void addYearToStudyAdditionalInfo(Program program, BrAPIStudy study) { + JsonObject additionalInfo = study.getAdditionalInfo(); + + //if it is already there, don't add it. + if(additionalInfo==null || additionalInfo.get(BrAPIAdditionalInfoFields.ENV_YEAR)==null) { + String year = study.getSeasons().get(0); + addYearToStudyAdditionalInfo(program, study, year); + } + } + + /* + * this will add the given year to the additionalInfo field of the BrAPIStudy (if it does not already exist) + * */ + public static void addYearToStudyAdditionalInfo(Program program, BrAPIStudy study, String year) { + JsonObject additionalInfo = study.getAdditionalInfo(); + if (additionalInfo==null){ + additionalInfo = new JsonObject(); + study.setAdditionalInfo(additionalInfo); + } + if( additionalInfo.get(BrAPIAdditionalInfoFields.ENV_YEAR)==null) { + additionalInfo.addProperty(BrAPIAdditionalInfoFields.ENV_YEAR, year); + } + } + + public static String createObservationUnitKey(ExperimentObservation importRow) { + return createObservationUnitKey(importRow.getEnv(), importRow.getExpUnitId()); + } + + public static String createObservationUnitKey(String studyName, String obsUnitName) { + return studyName + obsUnitName; + } + + public static String getImportObservationHash(ExperimentObservation importRow, String variableName) { + return getObservationHash(createObservationUnitKey(importRow), variableName, importRow.getEnv()); + } + + public static String getVariableNameFromColumn(Column column) { + // TODO: timestamp stripping? + return column.name(); + } + + // TODO: common validation stuff, could probably be moved somewhere more specific to validation + public static void addRowError(String field, String errorMessage, ValidationErrors validationErrors, int rowNum) { + ValidationError ve = new ValidationError(field, errorMessage, HttpStatus.UNPROCESSABLE_ENTITY); + validationErrors.addError(rowNum + 2, ve); // +2 instead of +1 to account for the column header row. + } + + // TODO: will have different pending data objects between workflows so not totally reusable as-is + // could probably just pass in actual underlying maps + public static boolean isObservationMatched(ProcessedPhenotypeData phenotypeData, + PendingData pendingData, + String observationHash, + String value, + Column phenoCol, + Integer rowNum) { + Map> timeStampColByPheno = phenotypeData.getTimeStampColByPheno(); + + if (timeStampColByPheno.isEmpty() || !timeStampColByPheno.containsKey(phenoCol.name())) { + return isValueMatched(pendingData, observationHash, value); + } else { + String importObsTimestamp = timeStampColByPheno.get(phenoCol.name()).getString(rowNum); + return isTimestampMatched(pendingData, observationHash, importObsTimestamp) && isValueMatched(pendingData, observationHash, value); + } + } + + // TODO: will have different pending data objects between workflows so not totally reusable as-is + // could probably just pass in actual underlying maps + public static boolean isValueMatched(PendingData pendingData, String observationHash, String value) { + Map existingObsByObsHash = pendingData.getExistingObsByObsHash(); + + if (!existingObsByObsHash.containsKey(observationHash) || existingObsByObsHash.get(observationHash).getValue() == null) { + return value == null; + } + return existingObsByObsHash.get(observationHash).getValue().equals(value); + } + + // TODO: will have different pending data objects between workflows so not totally reusable as-is + // could probably just pass in actual underlying maps + public static boolean isTimestampMatched(PendingData pendingData, String observationHash, String timeStamp) { + OffsetDateTime priorStamp = null; + Map existingObsByObsHash = pendingData.getExistingObsByObsHash(); + + if(existingObsByObsHash.get(observationHash)!=null){ + priorStamp = existingObsByObsHash.get(observationHash).getObservationTimeStamp(); + } + if (priorStamp == null) { + return timeStamp == null; + } + boolean isMatched = false; + try { + isMatched = priorStamp.isEqual(OffsetDateTime.parse(timeStamp)); + } catch(DateTimeParseException e){ + //if timestamp is invalid DateTime not equal to validated priorStamp + log.error(e.getMessage(), e); + } + return isMatched; + } + + public static void validateObservationValue(Trait variable, String value, + String columnHeader, ValidationErrors validationErrors, int row) { + if (StringUtils.isBlank(value)) { + log.debug(String.format("skipping validation of observation because there is no value.\n\tvariable: %s\n\trow: %d", variable.getObservationVariableName(), row)); + return; + } + + if (isNAObservation(value)) { + log.debug(String.format("skipping validation of observation because it is NA.\n\tvariable: %s\n\trow: %d", variable.getObservationVariableName(), row)); + return; + } + + switch (variable.getScale().getDataType()) { + case NUMERICAL: + Optional number = validNumericValue(value); + if (number.isEmpty()) { + addRowError(columnHeader, "Non-numeric text detected detected", validationErrors, row); + } else if (!validNumericRange(number.get(), variable.getScale())) { + addRowError(columnHeader, "Value outside of min/max range detected", validationErrors, row); + } + break; + case DATE: + if (!validDateValue(value)) { + addRowError(columnHeader, "Incorrect date format detected. Expected YYYY-MM-DD", validationErrors, row); + } + break; + case ORDINAL: + if (!validCategory(variable.getScale().getCategories(), value)) { + addRowError(columnHeader, "Undefined ordinal category detected", validationErrors, row); + } + break; + case NOMINAL: + if (!validCategory(variable.getScale().getCategories(), value)) { + addRowError(columnHeader, "Undefined nominal category detected", validationErrors, row); + } + break; + default: + break; + } + + } + + public static Optional validNumericValue(String value) { + BigDecimal number; + try { + number = new BigDecimal(value); + } catch (NumberFormatException e) { + return Optional.empty(); + } + return Optional.of(number); + } + + public static boolean validNumericRange(BigDecimal value, Scale validValues) { + // account for empty min or max in valid determination + return (validValues.getValidValueMin() == null || value.compareTo(BigDecimal.valueOf(validValues.getValidValueMin())) >= 0) && + (validValues.getValidValueMax() == null || value.compareTo(BigDecimal.valueOf(validValues.getValidValueMax())) <= 0); + } + + public static boolean validDateValue(String value) { + DateTimeFormatter formatter = DateTimeFormatter.ISO_DATE; + try { + formatter.parse(value); + } catch (DateTimeParseException e) { + return false; + } + return true; + } + + public static boolean validCategory(List categories, String value) { + Set categoryValues = categories.stream() + .map(category -> category.getValue().toLowerCase()) + .collect(Collectors.toSet()); + return categoryValues.contains(value.toLowerCase()); + } + + public static boolean isNAObservation(String value){ + return value.equalsIgnoreCase("NA"); + } + + public static void validateTimeStampValue(String value, + String columnHeader, ValidationErrors validationErrors, int row) { + if (StringUtils.isBlank(value)) { + log.debug(String.format("skipping validation of observation timestamp because there is no value.\n\tvariable: %s\n\trow: %d", columnHeader, row)); + return; + } + if (!validDateValue(value) && !validDateTimeValue(value)) { + addRowError(columnHeader, "Incorrect datetime format detected. Expected YYYY-MM-DD or YYYY-MM-DDThh:mm:ss+hh:mm", validationErrors, row); + } + + } + +} \ No newline at end of file diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/ExperimentWorkflowNavigator.java b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/ExperimentWorkflowNavigator.java new file mode 100644 index 000000000..6acd4aad5 --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/ExperimentWorkflowNavigator.java @@ -0,0 +1,87 @@ +package org.breedinginsight.brapps.importer.services.processors.experiment; + +import io.micronaut.context.annotation.Primary; +import org.breedinginsight.brapps.importer.model.imports.ImportServiceContext; +import org.breedinginsight.brapps.importer.model.workflow.ImportWorkflow; +import org.breedinginsight.brapps.importer.model.workflow.ExperimentWorkflow; +import org.breedinginsight.brapps.importer.model.workflow.ImportWorkflowResult; + +import javax.inject.Singleton; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; + +@Primary +@Singleton +public class ExperimentWorkflowNavigator implements ExperimentWorkflow { + private final List workflows; + + public ExperimentWorkflowNavigator(List workflows) { + this.workflows = workflows; + } + + /** + * Process the import service context by executing a series of workflows in order + * + * This method iterates over the list of workflows provided, executing each workflow's process method + * with the given import service context. It then filters out empty results and returns the first non-empty result. + * + * @param context The import service context containing the data to be processed + * @return An Optional containing the first non-empty ImportWorkflowResult from the executed workflows, or an empty Optional if no non-empty result is found + */ + @Override + public Optional process(ImportServiceContext context) { + /** + * Have each workflow in order process the context, returning the first non-empty result + */ + return workflows.stream() + .map(workflow->workflow.process(context)) + .filter(Optional::isPresent) + .map(Optional::get) + .findFirst(); + } + + /** + * Retrieves a list of ImportWorkflow objects containing metadata about each workflow that processed the import context. + * + * @return List of ImportWorkflow objects with workflow metadata + */ + public List getWorkflows() { + List workflowSummaryList = workflows.stream() + .map(workflow -> workflow.process(null)) // Process each workflow with a null context + .filter(Optional::isPresent) // Filter out any workflows that do not return a result + .map(Optional::get) // Extract the result from Optional + .map(result -> result.getWorkflow()) // Retrieve the workflow metadata + .collect(Collectors.toList()); // Collect the workflow metadata into a list + + // Set the order field for each workflow based on its position in the list + for (int i = 0; i < workflowSummaryList.size(); i++) { + workflowSummaryList.get(i).setOrder(i); // Set the order for each workflow + } + + return workflowSummaryList; // Return the list of workflow metadata + } + + public enum Workflow { + NEW_OBSERVATION("new-experiment","Create new experiment"), + APPEND_OVERWRITE("append-dataset", "Append experimental dataset"); + + private String id; + private String name; + + Workflow(String id, String name) { + + this.id = id; + this.name = name; + } + + public String getId() { + return id; + } + public String getName() { return name; } + + public boolean isEqual(String value) { + return Optional.ofNullable(id.equals(value)).orElse(false); + } + } +} diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/append/workflow/AppendOverwritePhenotypesWorkflow.java b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/append/workflow/AppendOverwritePhenotypesWorkflow.java new file mode 100644 index 000000000..ea5c388cb --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/append/workflow/AppendOverwritePhenotypesWorkflow.java @@ -0,0 +1,54 @@ +package org.breedinginsight.brapps.importer.services.processors.experiment.append.workflow; + +import lombok.Getter; +import org.breedinginsight.brapps.importer.model.imports.ImportServiceContext; +import org.breedinginsight.brapps.importer.model.workflow.ImportWorkflow; +import org.breedinginsight.brapps.importer.model.workflow.ImportWorkflowResult; +import org.breedinginsight.brapps.importer.model.workflow.ExperimentWorkflow; +import org.breedinginsight.brapps.importer.services.processors.experiment.ExperimentWorkflowNavigator; + +import javax.inject.Singleton; +import java.util.Optional; + +@Getter +@Singleton +public class AppendOverwritePhenotypesWorkflow implements ExperimentWorkflow { + private final ExperimentWorkflowNavigator.Workflow workflow; + + public AppendOverwritePhenotypesWorkflow(){ + this.workflow = ExperimentWorkflowNavigator.Workflow.APPEND_OVERWRITE; + } + + public Optional process(ImportServiceContext context) { + // Workflow processing the context + ImportWorkflow workflow = ImportWorkflow.builder() + .id(getWorkflow().getId()) + .name(getWorkflow().getName()) + .build(); + + // No-preview result + Optional result = Optional.of(ImportWorkflowResult.builder() + .workflow(workflow) + .importPreviewResponse(Optional.empty()) + .build()); + + // Skip this workflow unless appending or overwriting observation data + if (context != null && !this.workflow.isEqual(context.getWorkflowId())) { + return Optional.empty(); + } + + // Skip processing if no context, but return no-preview result for this workflow + if (context == null) { + return result; + } + + // Start processing the import... + return result; + } + + @Override + public int getOrder() { + return 2; + } + +} diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/append/workflow/steps/PopulateModifiedPendingImportObjectsStep.java b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/append/workflow/steps/PopulateModifiedPendingImportObjectsStep.java new file mode 100644 index 000000000..3fe0ba4ee --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/append/workflow/steps/PopulateModifiedPendingImportObjectsStep.java @@ -0,0 +1,5 @@ +package org.breedinginsight.brapps.importer.services.processors.experiment.append.workflow.steps; + +// TODO: think this would be for other workflow only +public class PopulateModifiedPendingImportObjectsStep { +} diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/model/PendingData.java b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/model/PendingData.java new file mode 100644 index 000000000..70ba1b62a --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/model/PendingData.java @@ -0,0 +1,51 @@ +/* + * See the NOTICE file distributed with this work for additional information + * regarding copyright ownership. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.breedinginsight.brapps.importer.services.processors.experiment.create.model; + +import lombok.*; +import org.brapi.v2.model.core.BrAPIStudy; +import org.brapi.v2.model.core.BrAPITrial; +import org.brapi.v2.model.core.response.BrAPIListDetails; +import org.brapi.v2.model.germ.BrAPIGermplasm; +import org.brapi.v2.model.pheno.BrAPIObservation; +import org.brapi.v2.model.pheno.BrAPIObservationUnit; +import org.breedinginsight.brapps.importer.model.response.PendingImportObject; +import org.breedinginsight.model.ProgramLocation; + +import java.util.HashMap; +import java.util.Map; + +@Getter +@Setter +@Builder +@ToString +@AllArgsConstructor +@NoArgsConstructor +public class PendingData { + //NOTE: populated in populate existing and new steps depending on import + private Map> observationUnitByNameNoScope; + private Map> trialByNameNoScope; + private Map> studyByNameNoScope; + private Map> locationByName; + private Map> obsVarDatasetByName; + private Map> existingGermplasmByGID; + // TODO: see if we can change this to match PendingImport<> + private Map existingObsByObsHash; + + // NOTE: populated in populate new step + private Map> observationByHash; +} \ No newline at end of file diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/model/PendingImportObjectData.java b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/model/PendingImportObjectData.java new file mode 100644 index 000000000..1f26cdf8d --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/model/PendingImportObjectData.java @@ -0,0 +1,36 @@ +/* + * See the NOTICE file distributed with this work for additional information + * regarding copyright ownership. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.breedinginsight.brapps.importer.services.processors.experiment.create.model; + +import lombok.*; +import org.brapi.v2.model.core.BrAPIStudy; +import org.brapi.v2.model.core.BrAPITrial; +import org.brapi.v2.model.pheno.BrAPIObservationUnit; +import org.breedinginsight.brapps.importer.model.response.PendingImportObject; + +@Getter +@Setter +@Builder +@ToString +@AllArgsConstructor +@NoArgsConstructor +public class PendingImportObjectData { + private PendingImportObject trialPIO; + private PendingImportObject studyPIO; + private PendingImportObject obsUnitPIO; + +} diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/model/ProcessContext.java b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/model/ProcessContext.java new file mode 100644 index 000000000..b66f7207c --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/model/ProcessContext.java @@ -0,0 +1,31 @@ +/* + * See the NOTICE file distributed with this work for additional information + * regarding copyright ownership. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.breedinginsight.brapps.importer.services.processors.experiment.create.model; + +import lombok.*; +import org.breedinginsight.brapps.importer.model.workflow.ImportContext; + +@Getter +@Setter +@Builder +@ToString +@AllArgsConstructor +@NoArgsConstructor +public class ProcessContext { + private PendingData pendingData; + private ImportContext importContext; +} \ No newline at end of file diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/model/ProcessedPhenotypeData.java b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/model/ProcessedPhenotypeData.java new file mode 100644 index 000000000..c81e265cd --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/model/ProcessedPhenotypeData.java @@ -0,0 +1,38 @@ +/* + * See the NOTICE file distributed with this work for additional information + * regarding copyright ownership. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.breedinginsight.brapps.importer.services.processors.experiment.create.model; + +import lombok.*; +import org.breedinginsight.model.Trait; +import tech.tablesaw.columns.Column; + +import java.util.List; +import java.util.Map; + +// TODO: move to common higher level location, could be used by both append and create workflows so being located +// in the create namespace won't make sense if we decide to do that in the future. +@Getter +@Setter +@Builder +@ToString +@AllArgsConstructor +@NoArgsConstructor +public class ProcessedPhenotypeData { + private Map> timeStampColByPheno; + private List referencedTraits; + private List> phenotypeCols; +} diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/workflow/CreateNewExperimentWorkflow.java b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/workflow/CreateNewExperimentWorkflow.java new file mode 100644 index 000000000..d0a4ca975 --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/workflow/CreateNewExperimentWorkflow.java @@ -0,0 +1,307 @@ +/* + * See the NOTICE file distributed with this work for additional information + * regarding copyright ownership. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.breedinginsight.brapps.importer.services.processors.experiment.create.workflow; + +import io.micronaut.context.annotation.Prototype; +import io.micronaut.http.HttpStatus; +import io.micronaut.http.exceptions.HttpStatusException; +import lombok.extern.slf4j.Slf4j; +import lombok.val; +import org.apache.commons.lang3.StringUtils; +import org.brapi.v2.model.pheno.BrAPIObservation; +import org.breedinginsight.api.model.v1.response.ValidationErrors; +import org.breedinginsight.brapps.importer.model.ImportUpload; +import org.breedinginsight.brapps.importer.model.imports.BrAPIImport; +import org.breedinginsight.brapps.importer.model.imports.PendingImport; +import org.breedinginsight.brapps.importer.model.imports.experimentObservation.ExperimentObservation; +import org.breedinginsight.brapps.importer.model.response.ImportObjectState; +import org.breedinginsight.brapps.importer.model.response.ImportPreviewResponse; +import org.breedinginsight.brapps.importer.model.response.ImportPreviewStatistics; +import org.breedinginsight.brapps.importer.model.response.PendingImportObject; +import org.breedinginsight.brapps.importer.model.workflow.ImportContext; +import org.breedinginsight.brapps.importer.model.workflow.ProcessedData; +import org.breedinginsight.brapps.importer.model.workflow.Workflow; +import org.breedinginsight.brapps.importer.services.ImportStatusService; +import org.breedinginsight.brapps.importer.services.processors.experiment.ExperimentUtilities; +import org.breedinginsight.brapps.importer.services.processors.experiment.create.model.PendingData; +import org.breedinginsight.brapps.importer.services.processors.experiment.create.model.ProcessContext; +import org.breedinginsight.brapps.importer.services.processors.experiment.create.model.ProcessedPhenotypeData; +import org.breedinginsight.brapps.importer.services.processors.experiment.create.workflow.steps.CommitPendingImportObjectsStep; +import org.breedinginsight.brapps.importer.services.processors.experiment.create.workflow.steps.PopulateExistingPendingImportObjectsStep; +import org.breedinginsight.brapps.importer.services.processors.experiment.create.workflow.steps.PopulateNewPendingImportObjectsStep; +import org.breedinginsight.brapps.importer.services.processors.experiment.create.workflow.steps.ValidatePendingImportObjectsStep; +import org.breedinginsight.brapps.importer.services.processors.experiment.services.ExperimentPhenotypeService; +import org.breedinginsight.services.exceptions.ValidatorException; + +import javax.inject.Inject; +import javax.inject.Named; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; + +import lombok.Getter; +import org.breedinginsight.brapps.importer.model.imports.ImportServiceContext; +import org.breedinginsight.brapps.importer.model.workflow.ImportWorkflow; +import org.breedinginsight.brapps.importer.model.workflow.ImportWorkflowResult; +import org.breedinginsight.brapps.importer.model.workflow.ExperimentWorkflow; +import org.breedinginsight.brapps.importer.services.processors.experiment.ExperimentWorkflowNavigator; + +import javax.inject.Singleton; +import java.util.Optional; + +@Slf4j +@Getter +@Singleton +public class CreateNewExperimentWorkflow implements ExperimentWorkflow { + private final ExperimentWorkflowNavigator.Workflow workflow; + private final PopulateExistingPendingImportObjectsStep populateExistingPendingImportObjectsStep; + private final PopulateNewPendingImportObjectsStep populateNewPendingImportObjectsStep; + private final CommitPendingImportObjectsStep commitPendingImportObjectsStep; + private final ValidatePendingImportObjectsStep validatePendingImportObjectsStep; + private final ImportStatusService statusService; + private final ExperimentPhenotypeService experimentPhenotypeService; + + @Inject + public CreateNewExperimentWorkflow(PopulateExistingPendingImportObjectsStep populateExistingPendingImportObjectsStep, + PopulateNewPendingImportObjectsStep populateNewPendingImportObjectsStep, + CommitPendingImportObjectsStep commitPendingImportObjectsStep, + ValidatePendingImportObjectsStep validatePendingImportObjectsStep, + ImportStatusService statusService, + ExperimentPhenotypeService experimentPhenotypeService) { + this.populateExistingPendingImportObjectsStep = populateExistingPendingImportObjectsStep; + this.populateNewPendingImportObjectsStep = populateNewPendingImportObjectsStep; + this.commitPendingImportObjectsStep = commitPendingImportObjectsStep; + this.validatePendingImportObjectsStep = validatePendingImportObjectsStep; + this.statusService = statusService; + this.experimentPhenotypeService = experimentPhenotypeService; + this.workflow = ExperimentWorkflowNavigator.Workflow.NEW_OBSERVATION; + } + + private ImportPreviewResponse runWorkflow(ImportContext context) throws Exception { + + ImportUpload upload = context.getUpload(); + boolean commit = context.isCommit(); + List importRows = context.getImportRows(); + ProcessedData processedData = new ProcessedData(); + + // Make sure the file does not contain obs unit ids before proceeding + if (containsObsUnitIDs(context)) { + throw new HttpStatusException(HttpStatus.UNPROCESSABLE_ENTITY, "ObsUnitIDs are detected"); + } + + statusService.updateMessage(upload, "Checking existing experiment objects in brapi service and mapping data"); + + ProcessedPhenotypeData phenotypeData = experimentPhenotypeService.extractPhenotypes(context); + ProcessContext processContext = populateExistingPendingImportObjectsStep.process(context, phenotypeData); + populateNewPendingImportObjectsStep.process(processContext, phenotypeData); + ValidationErrors validationErrors = validatePendingImportObjectsStep.process(context, processContext.getPendingData(), phenotypeData, processedData); + + // short circuit if there were validation errors + if (validationErrors.hasErrors()) { + throw new ValidatorException(validationErrors); + } + + // TODO: move to experiment import service + ImportPreviewResponse response = buildImportPreviewResponse(importRows, processContext.getPendingData(), processedData, upload); + + statusService.updateMappedData(upload, response, "Finished mapping data to brapi objects"); + + // preview data + if (!commit) { + statusService.updateOk(upload); + return response; + } + + // commit data + long totalObjects = getNewObjectCount(response); + statusService.startUpload(upload, totalObjects, "Starting upload to brapi service"); + statusService.updateMessage(upload, "Creating new experiment objects in brapi service"); + + commitPendingImportObjectsStep.process(processContext, processedData); + + statusService.finishUpload(upload, totalObjects, "Completed upload to brapi service"); + return response; + } + + /** + * Process the import service context and returns an Optional ImportWorkflowResult. + * + * @param context The import service context to be processed. If null, then it skips processing but returns the result with no-preview. + * @return An Optional ImportWorkflowResult which contains the workflow and import preview response (if available). + * If the context is null, it returns the result with no-preview. + */ + public Optional process(ImportServiceContext context) { + // Workflow processing the context + ImportWorkflow workflow = ImportWorkflow.builder() + .id(getWorkflow().getId()) + .name(getWorkflow().getName()) + .build(); + + // No-preview result + ImportWorkflowResult workflowResult = ImportWorkflowResult.builder() + .workflow(workflow) + .importPreviewResponse(Optional.empty()) + .caughtException(Optional.empty()) + .build(); + + // Skip this workflow unless creating a new experiment + if (context != null && !this.workflow.isEqual(context.getWorkflowId())) { + return Optional.empty(); + } + + // Skip processing if no context, but return no-preview result for this workflow + if (context == null) { + return Optional.of(workflowResult); + } + + // TODO: unify usage of single import context type throughout + ImportContext importContext = ImportContext.from(context); + + // Start processing the import... + ImportPreviewResponse response; + try { + response = runWorkflow(importContext); + workflowResult.setImportPreviewResponse(Optional.of(response)); + } catch(Exception e) { + workflowResult.setCaughtException(Optional.of(e)); + } + + return Optional.of(workflowResult); + } + + @Override + public int getOrder() { + return 1; + } + + // TODO: move to shared area + private ImportPreviewResponse buildImportPreviewResponse(List importRows, PendingData pendingData, ProcessedData processedData, + ImportUpload upload) { + + Map mappedBrAPIImport = processedData.getMappedBrAPIImport(); + Map statistics = generateStatisticsMap(pendingData, importRows); + + ImportPreviewResponse response = new ImportPreviewResponse(); + response.setStatistics(statistics); + List mappedBrAPIImportList = new ArrayList<>(mappedBrAPIImport.values()); + response.setRows(mappedBrAPIImportList); + response.setDynamicColumnNames(upload.getDynamicColumnNamesList()); + return response; + } + + // TODO: move to shared area + private long getNewObjectCount(ImportPreviewResponse response) { + // get total number of new brapi objects to create + long totalObjects = 0; + for (ImportPreviewStatistics stats : response.getStatistics().values()) { + totalObjects += stats.getNewObjectCount(); + } + return totalObjects; + } + + private boolean containsObsUnitIDs(ImportContext importContext) { + List importRows = importContext.getImportRows(); + return importRows.stream() + .anyMatch(row -> { + ExperimentObservation expRow = (ExperimentObservation) row; + return StringUtils.isNotBlank(expRow.getObsUnitID()); + }); + } + + // TODO: move to shared area: experiment import service + private Map generateStatisticsMap(PendingData pendingData, List importRows) { + // Data for stats. + HashSet environmentNameCounter = new HashSet<>(); // set of unique environment names + HashSet obsUnitsIDCounter = new HashSet<>(); // set of unique observation unit ID's + HashSet gidCounter = new HashSet<>(); // set of unique GID's + + Map> observationByHash = pendingData.getObservationByHash(); + + for (BrAPIImport row : importRows) { + ExperimentObservation importRow = (ExperimentObservation) row; + // Collect date for stats. + addIfNotNull(environmentNameCounter, importRow.getEnv()); + addIfNotNull(obsUnitsIDCounter, ExperimentUtilities.createObservationUnitKey(importRow)); + addIfNotNull(gidCounter, importRow.getGid()); + } + + int numNewObservations = Math.toIntExact( + observationByHash.values() + .stream() + .filter(preview -> preview != null && preview.getState() == ImportObjectState.NEW && + !StringUtils.isBlank(preview.getBrAPIObject() + .getValue())) + .count() + ); + + int numExistingObservations = Math.toIntExact( + observationByHash.values() + .stream() + .filter(preview -> preview != null && preview.getState() == ImportObjectState.EXISTING && + !StringUtils.isBlank(preview.getBrAPIObject() + .getValue())) + .count() + ); + + int numMutatedObservations = Math.toIntExact( + observationByHash.values() + .stream() + .filter(preview -> preview != null && preview.getState() == ImportObjectState.MUTATED && + !StringUtils.isBlank(preview.getBrAPIObject() + .getValue())) + .count() + ); + + ImportPreviewStatistics environmentStats = ImportPreviewStatistics.builder() + .newObjectCount(environmentNameCounter.size()) + .build(); + ImportPreviewStatistics obdUnitStats = ImportPreviewStatistics.builder() + .newObjectCount(obsUnitsIDCounter.size()) + .build(); + ImportPreviewStatistics gidStats = ImportPreviewStatistics.builder() + .newObjectCount(gidCounter.size()) + .build(); + ImportPreviewStatistics observationStats = ImportPreviewStatistics.builder() + .newObjectCount(numNewObservations) + .build(); + ImportPreviewStatistics existingObservationStats = ImportPreviewStatistics.builder() + .newObjectCount(numExistingObservations) + .build(); + ImportPreviewStatistics mutatedObservationStats = ImportPreviewStatistics.builder() + .newObjectCount(numMutatedObservations) + .build(); + + return Map.of( + "Environments", environmentStats, + "Observation_Units", obdUnitStats, + "GIDs", gidStats, + "Observations", observationStats, + "Existing_Observations", existingObservationStats, + "Mutated_Observations", mutatedObservationStats + ); + } + + // TODO: move to common area + private void addIfNotNull(HashSet set, String setValue) { + if (setValue != null) { + set.add(setValue); + } + } +} + diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/workflow/steps/CommitPendingImportObjectsStep.java b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/workflow/steps/CommitPendingImportObjectsStep.java new file mode 100644 index 000000000..12e6bcc99 --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/workflow/steps/CommitPendingImportObjectsStep.java @@ -0,0 +1,381 @@ +/* + * See the NOTICE file distributed with this work for additional information + * regarding copyright ownership. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.breedinginsight.brapps.importer.services.processors.experiment.create.workflow.steps; + +import io.micronaut.http.server.exceptions.InternalServerException; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.collections4.map.CaseInsensitiveMap; +import org.apache.commons.lang3.StringUtils; +import org.brapi.client.v2.model.exceptions.ApiException; +import org.brapi.v2.model.core.BrAPIListSummary; +import org.brapi.v2.model.core.BrAPIStudy; +import org.brapi.v2.model.core.BrAPITrial; +import org.brapi.v2.model.core.request.BrAPIListNewRequest; +import org.brapi.v2.model.core.response.BrAPIListDetails; +import org.brapi.v2.model.germ.BrAPIGermplasm; +import org.brapi.v2.model.pheno.BrAPIObservation; +import org.brapi.v2.model.pheno.BrAPIObservationUnit; +import org.breedinginsight.api.auth.AuthenticatedUser; +import org.breedinginsight.api.model.v1.request.ProgramLocationRequest; +import org.breedinginsight.brapi.v2.constants.BrAPIAdditionalInfoFields; +import org.breedinginsight.brapi.v2.dao.*; +import org.breedinginsight.brapps.importer.model.ImportUpload; +import org.breedinginsight.brapps.importer.model.imports.PendingImport; +import org.breedinginsight.brapps.importer.model.response.PendingImportObject; +import org.breedinginsight.brapps.importer.model.workflow.ImportContext; +import org.breedinginsight.brapps.importer.model.workflow.ProcessedData; +import org.breedinginsight.brapps.importer.services.processors.ProcessorData; +import org.breedinginsight.brapps.importer.services.processors.experiment.ExperimentUtilities; +import org.breedinginsight.brapps.importer.services.processors.experiment.create.model.PendingData; +import org.breedinginsight.brapps.importer.services.processors.experiment.create.model.ProcessContext; +import org.breedinginsight.model.Program; +import org.breedinginsight.model.ProgramLocation; +import org.breedinginsight.model.Trait; +import org.breedinginsight.services.OntologyService; +import org.breedinginsight.services.ProgramLocationService; +import org.breedinginsight.services.exceptions.DoesNotExistException; +import org.breedinginsight.utilities.Utilities; + +import javax.inject.Inject; +import javax.inject.Singleton; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +@Singleton +@Slf4j +public class CommitPendingImportObjectsStep { + + private final BrAPIListDAO brAPIListDAO; + private final BrAPITrialDAO brapiTrialDAO; + private final BrAPIStudyDAO brAPIStudyDAO; + private final BrAPIObservationDAO brAPIObservationDAO; + private final BrAPIObservationUnitDAO brAPIObservationUnitDAO; + private final ProgramLocationService locationService; + private final OntologyService ontologyService; + + @Inject + public CommitPendingImportObjectsStep(BrAPIListDAO brAPIListDAO, + BrAPITrialDAO brapiTrialDAO, + BrAPIStudyDAO brAPIStudyDAO, + BrAPIObservationDAO brAPIObservationDAO, + BrAPIObservationUnitDAO brAPIObservationUnitDAO, + ProgramLocationService locationService, + OntologyService ontologyService) { + this.brAPIListDAO = brAPIListDAO; + this.brapiTrialDAO = brapiTrialDAO; + this.brAPIStudyDAO = brAPIStudyDAO; + this.brAPIObservationDAO = brAPIObservationDAO; + this.brAPIObservationUnitDAO = brAPIObservationUnitDAO; + this.locationService = locationService; + this.ontologyService = ontologyService; + } + + // TODO: some common code between workflows here that could be broken out, removed append/update specific code + public void process(ProcessContext processContext, ProcessedData processedData) { + + PendingData pendingData = processContext.getPendingData(); + ImportContext importContext = processContext.getImportContext(); + + ImportUpload upload = importContext.getUpload(); + Program program = importContext.getProgram(); + Map mappedBrAPIImport = processedData.getMappedBrAPIImport(); + + Map> trialByNameNoScope = pendingData.getTrialByNameNoScope(); + Map> studyByNameNoScope = pendingData.getStudyByNameNoScope(); + Map> obsVarDatasetByName = pendingData.getObsVarDatasetByName(); + Map> locationByName = pendingData.getLocationByName(); + Map> observationUnitByNameNoScope = pendingData.getObservationUnitByNameNoScope(); + Map> observationByHash = pendingData.getObservationByHash(); + + List newTrials = ProcessorData.getNewObjects(pendingData.getTrialByNameNoScope()); + + List newLocations = ProcessorData.getNewObjects(pendingData.getLocationByName()) + .stream() + .map(location -> ProgramLocationRequest.builder() + .name(location.getName()) + .build()) + .collect(Collectors.toList()); + + List newStudies = ProcessorData.getNewObjects(pendingData.getStudyByNameNoScope()); + + List newDatasetRequests = ProcessorData.getNewObjects(pendingData.getObsVarDatasetByName()).stream().map(details -> { + BrAPIListNewRequest request = new BrAPIListNewRequest(); + request.setListName(details.getListName()); + request.setListType(details.getListType()); + request.setExternalReferences(details.getExternalReferences()); + request.setAdditionalInfo(details.getAdditionalInfo()); + request.data(details.getData()); + return request; + }).collect(Collectors.toList()); + + Map datasetNewDataById = ProcessorData + .getMutationsByObjectId(pendingData.getObsVarDatasetByName(), BrAPIListSummary::getListDbId); + + List newObservationUnits = ProcessorData.getNewObjects(pendingData.getObservationUnitByNameNoScope()); + + // filter out observations with no 'value' so they will not be saved + List newObservations = ProcessorData.getNewObjects(observationByHash) + .stream() + .filter(obs -> !obs.getValue().isBlank()) + .collect(Collectors.toList()); + + AuthenticatedUser actingUser = new AuthenticatedUser(upload.getUpdatedByUser().getName(), new ArrayList<>(), upload.getUpdatedByUser().getId(), new ArrayList<>()); + + try { + List createdDatasets = new ArrayList<>(brAPIListDAO.createBrAPILists(newDatasetRequests, program.getId(), upload)); + createdDatasets.forEach(summary -> obsVarDatasetByName.get(summary.getListName()).getBrAPIObject().setListDbId(summary.getListDbId())); + + List createdTrials = new ArrayList<>(brapiTrialDAO.createBrAPITrials(newTrials, program.getId(), upload)); + // set the DbId to the for each newly created trial + for (BrAPITrial createdTrial : createdTrials) { + String createdTrialName = Utilities.removeProgramKey(createdTrial.getTrialName(), program.getKey()); + trialByNameNoScope.get(createdTrialName) + .getBrAPIObject() + .setTrialDbId(createdTrial.getTrialDbId()); + } + + List createdLocations = new ArrayList<>(locationService.create(actingUser, program.getId(), newLocations)); + // set the DbId to the for each newly created location + for (ProgramLocation createdLocation : createdLocations) { + String createdLocationName = createdLocation.getName(); + locationByName.get(createdLocationName) + .getBrAPIObject() + .setLocationDbId(createdLocation.getLocationDbId()); + } + + updateStudyDependencyValues(pendingData, mappedBrAPIImport, program.getKey()); + List createdStudies = brAPIStudyDAO.createBrAPIStudies(newStudies, program.getId(), upload); + + // set the DbId to the for each newly created study + for (BrAPIStudy createdStudy : createdStudies) { + String createdStudy_name_no_key = Utilities.removeProgramKeyAndUnknownAdditionalData(createdStudy.getStudyName(), program.getKey()); + studyByNameNoScope.get(createdStudy_name_no_key) + .getBrAPIObject() + .setStudyDbId(createdStudy.getStudyDbId()); + } + + updateObsUnitDependencyValues(pendingData, program.getKey()); + List createdObservationUnits = brAPIObservationUnitDAO.createBrAPIObservationUnits(newObservationUnits, program.getId(), upload); + + // set the DbId to the for each newly created Observation Unit + for (BrAPIObservationUnit createdObservationUnit : createdObservationUnits) { + // retrieve the BrAPI ObservationUnit from this.observationUnitByNameNoScope + String createdObservationUnit_StripedStudyName = Utilities.removeProgramKeyAndUnknownAdditionalData(createdObservationUnit.getStudyName(), program.getKey()); + String createdObservationUnit_StripedObsUnitName = Utilities.removeProgramKeyAndUnknownAdditionalData(createdObservationUnit.getObservationUnitName(), program.getKey()); + String createdObsUnit_key = ExperimentUtilities.createObservationUnitKey(createdObservationUnit_StripedStudyName, createdObservationUnit_StripedObsUnitName); + observationUnitByNameNoScope.get(createdObsUnit_key) + .getBrAPIObject() + .setObservationUnitDbId(createdObservationUnit.getObservationUnitDbId()); + } + + updateObservationDependencyValues(pendingData, program); + brAPIObservationDAO.createBrAPIObservations(newObservations, program.getId(), upload); + } catch (ApiException e) { + log.error("Error saving experiment import: " + Utilities.generateApiExceptionLogMessage(e), e); + throw new InternalServerException("Error saving experiment import", e); + } catch (Exception e) { + log.error("Error saving experiment import", e); + throw new InternalServerException(e.getMessage(), e); + } + + // NOTE: removed mutated trials code + + datasetNewDataById.forEach((id, dataset) -> { + try { + List existingObsVarIds = brAPIListDAO.getListById(id, program.getId()).getResult().getData(); + List newObsVarIds = dataset + .getData() + .stream() + .filter(obsVarId -> !existingObsVarIds.contains(obsVarId)).collect(Collectors.toList()); + List obsVarIds = new ArrayList<>(existingObsVarIds); + obsVarIds.addAll(newObsVarIds); + dataset.setData(obsVarIds); + brAPIListDAO.updateBrAPIList(id, dataset, program.getId()); + } catch (ApiException e) { + log.error("Error updating dataset observation variables: " + Utilities.generateApiExceptionLogMessage(e), e); + throw new InternalServerException("Error saving experiment import", e); + } catch (Exception e) { + log.error("Error updating dataset observation variables: ", e); + throw new InternalServerException(e.getMessage(), e); + } + }); + + // NOTE: removed mutated observations code + + } + + private void updateStudyDependencyValues(PendingData pendingData, Map mappedBrAPIImport, String programKey) { + // update location DbIds in studies for all distinct locations + Map> trialByNameNoScope = pendingData.getTrialByNameNoScope(); + + mappedBrAPIImport.values() + .stream() + .map(PendingImport::getLocation) + .forEach(location -> updateStudyLocationDbId(pendingData, location)); + + // update trial DbIds in studies for all distinct trials + trialByNameNoScope.values() + .stream() + .filter(Objects::nonNull) + .distinct() + .map(PendingImportObject::getBrAPIObject) + .forEach(trial -> updateTrialDbId(pendingData, trial, programKey)); + } + + private void updateTrialDbId(PendingData pendingData, BrAPITrial trial, String programKey) { + Map> studyByNameNoScope = pendingData.getStudyByNameNoScope(); + + studyByNameNoScope.values() + .stream() + .filter(study -> study.getBrAPIObject() + .getTrialName() + .equals(Utilities.removeProgramKey(trial.getTrialName(), programKey))) + .forEach(study -> study.getBrAPIObject() + .setTrialDbId(trial.getTrialDbId())); + } + + private void updateStudyLocationDbId(PendingData pendingData, PendingImportObject location) { + Map> studyByNameNoScope = pendingData.getStudyByNameNoScope(); + + studyByNameNoScope.values() + .stream() + .filter(study -> location.getId().toString() + .equals(study.getBrAPIObject() + .getLocationDbId())) + .forEach(study -> study.getBrAPIObject() + .setLocationDbId(location.getBrAPIObject().getLocationDbId())); + } + + private void updateObsUnitDependencyValues(PendingData pendingData, String programKey) { + Map> studyByNameNoScope = pendingData.getStudyByNameNoScope(); + Map> existingGermplasmByGID = pendingData.getExistingGermplasmByGID(); + + // update study DbIds + studyByNameNoScope.values() + .stream() + .filter(Objects::nonNull) + .distinct() + .map(PendingImportObject::getBrAPIObject) + .forEach(study -> updateStudyDbId(pendingData, study, programKey)); + + // update germplasm DbIds + existingGermplasmByGID.values() + .stream() + .filter(Objects::nonNull) + .distinct() + .map(PendingImportObject::getBrAPIObject) + .forEach(germplasm -> updateGermplasmDbId(pendingData, germplasm)); + } + + private void updateGermplasmDbId(PendingData pendingData, BrAPIGermplasm germplasm) { + Map> observationUnitByNameNoScope = pendingData.getObservationUnitByNameNoScope(); + + observationUnitByNameNoScope.values() + .stream() + .filter(obsUnit -> germplasm.getAccessionNumber() != null && + germplasm.getAccessionNumber().equals(obsUnit + .getBrAPIObject() + .getAdditionalInfo().getAsJsonObject() + .get(BrAPIAdditionalInfoFields.GID).getAsString())) + .forEach(obsUnit -> obsUnit.getBrAPIObject() + .setGermplasmDbId(germplasm.getGermplasmDbId())); + } + + private void updateStudyDbId(PendingData pendingData, BrAPIStudy study, String programKey) { + Map> observationUnitByNameNoScope = pendingData.getObservationUnitByNameNoScope(); + + observationUnitByNameNoScope.values() + .stream() + .filter(obsUnit -> obsUnit.getBrAPIObject() + .getStudyName() + .equals(Utilities.removeProgramKeyAndUnknownAdditionalData(study.getStudyName(), programKey))) + .forEach(obsUnit -> { + obsUnit.getBrAPIObject() + .setStudyDbId(study.getStudyDbId()); + obsUnit.getBrAPIObject() + .setTrialDbId(study.getTrialDbId()); + }); + } + + + private void updateObservationDependencyValues(PendingData pendingData, Program program) { + String programKey = program.getKey(); + Map> observationUnitByNameNoScope = pendingData.getObservationUnitByNameNoScope(); + Map> observationByHash = pendingData.getObservationByHash(); + + // update the observations study DbIds, Observation Unit DbIds and Germplasm DbIds + observationUnitByNameNoScope.values().stream() + .map(PendingImportObject::getBrAPIObject) + .forEach(obsUnit -> updateObservationDbIds(pendingData, obsUnit, programKey)); + + // Update ObservationVariable DbIds + List traits = getTraitList(program); + CaseInsensitiveMap traitMap = new CaseInsensitiveMap<>(); + for ( Trait trait: traits) { + traitMap.put(trait.getObservationVariableName(),trait); + } + for (PendingImportObject observation : observationByHash.values()) { + String observationVariableName = observation.getBrAPIObject().getObservationVariableName(); + if (observationVariableName != null && traitMap.containsKey(observationVariableName)) { + String observationVariableDbId = traitMap.get(observationVariableName).getObservationVariableDbId(); + observation.getBrAPIObject().setObservationVariableDbId(observationVariableDbId); + } + } + } + + // Update each ovservation's observationUnit DbId, study DbId, and germplasm DbId + private void updateObservationDbIds(PendingData pendingData, BrAPIObservationUnit obsUnit, String programKey) { + Map> observationByHash = pendingData.getObservationByHash(); + + // FILTER LOGIC: Match on Env and Exp Unit ID + observationByHash.values() + .stream() + .filter(obs -> obs.getBrAPIObject() + .getAdditionalInfo() != null + && obs.getBrAPIObject() + .getAdditionalInfo() + .get(BrAPIAdditionalInfoFields.STUDY_NAME) != null + && obs.getBrAPIObject() + .getAdditionalInfo() + .get(BrAPIAdditionalInfoFields.STUDY_NAME) + .getAsString() + .equals(Utilities.removeProgramKeyAndUnknownAdditionalData(obsUnit.getStudyName(), programKey)) + && Utilities.removeProgramKeyAndUnknownAdditionalData(obs.getBrAPIObject().getObservationUnitName(), programKey) + .equals(Utilities.removeProgramKeyAndUnknownAdditionalData(obsUnit.getObservationUnitName(), programKey)) + ) + .forEach(obs -> { + if (StringUtils.isBlank(obs.getBrAPIObject().getObservationUnitDbId())) { + obs.getBrAPIObject().setObservationUnitDbId(obsUnit.getObservationUnitDbId()); + } + obs.getBrAPIObject().setStudyDbId(obsUnit.getStudyDbId()); + obs.getBrAPIObject().setGermplasmDbId(obsUnit.getGermplasmDbId()); + }); + } + + private List getTraitList(Program program) { + try { + return ontologyService.getTraitsByProgramId(program.getId(), true); + } catch (DoesNotExistException e) { + log.error(e.getMessage(), e); + throw new InternalServerException(e.toString(), e); + } + } + +} diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/workflow/steps/PopulateExistingPendingImportObjectsStep.java b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/workflow/steps/PopulateExistingPendingImportObjectsStep.java new file mode 100644 index 000000000..c5196e662 --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/workflow/steps/PopulateExistingPendingImportObjectsStep.java @@ -0,0 +1,550 @@ +/* + * See the NOTICE file distributed with this work for additional information + * regarding copyright ownership. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.breedinginsight.brapps.importer.services.processors.experiment.create.workflow.steps; + +import io.micronaut.context.annotation.Property; +import io.micronaut.http.server.exceptions.InternalServerException; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; +import org.brapi.client.v2.model.exceptions.ApiException; +import org.brapi.v2.model.BrAPIExternalReference; +import org.brapi.v2.model.core.BrAPIListSummary; +import org.brapi.v2.model.core.BrAPIListTypes; +import org.brapi.v2.model.core.BrAPIStudy; +import org.brapi.v2.model.core.BrAPITrial; +import org.brapi.v2.model.core.response.BrAPIListDetails; +import org.brapi.v2.model.germ.BrAPIGermplasm; +import org.brapi.v2.model.pheno.BrAPIObservation; +import org.brapi.v2.model.pheno.BrAPIObservationUnit; +import org.breedinginsight.brapi.v2.constants.BrAPIAdditionalInfoFields; +import org.breedinginsight.brapi.v2.dao.*; +import org.breedinginsight.brapps.importer.model.imports.experimentObservation.ExperimentObservation; +import org.breedinginsight.brapps.importer.model.response.ImportObjectState; +import org.breedinginsight.brapps.importer.model.response.PendingImportObject; +import org.breedinginsight.brapps.importer.model.workflow.ImportContext; +import org.breedinginsight.brapps.importer.services.ExternalReferenceSource; +import org.breedinginsight.brapps.importer.services.processors.experiment.ExperimentUtilities; +import org.breedinginsight.brapps.importer.services.processors.experiment.create.model.PendingData; +import org.breedinginsight.brapps.importer.services.processors.experiment.create.model.ProcessContext; +import org.breedinginsight.brapps.importer.services.processors.experiment.create.model.ProcessedPhenotypeData; +import org.breedinginsight.brapps.importer.services.processors.experiment.services.ExperimentStudyService; +import org.breedinginsight.brapps.importer.services.processors.experiment.services.ExperimentTrialService; +import org.breedinginsight.model.Program; +import org.breedinginsight.model.ProgramLocation; +import org.breedinginsight.model.Trait; +import org.breedinginsight.services.ProgramLocationService; +import org.breedinginsight.utilities.Utilities; + +import javax.inject.Inject; +import javax.inject.Singleton; +import java.util.*; +import java.util.stream.Collectors; + +/** + * References code common between workflows in shared services. DAO access is done directly in the + * steps rather than another layer of services. + */ + +@Singleton +@Slf4j +public class PopulateExistingPendingImportObjectsStep { + + private final BrAPIObservationUnitDAO brAPIObservationUnitDAO; + private final BrAPITrialDAO brAPITrialDAO; + private final BrAPIStudyDAO brAPIStudyDAO; + private final ProgramLocationService locationService; + private final BrAPIListDAO brAPIListDAO; + private final BrAPIGermplasmDAO brAPIGermplasmDAO; + private final BrAPIObservationDAO brAPIObservationDAO; + private final ExperimentStudyService experimentStudyService; + private final ExperimentTrialService experimentTrialService; + + @Property(name = "brapi.server.reference-source") + private String BRAPI_REFERENCE_SOURCE; + + @Inject + public PopulateExistingPendingImportObjectsStep(BrAPIObservationUnitDAO brAPIObservationUnitDAO, + BrAPITrialDAO brAPITrialDAO, + BrAPIStudyDAO brAPIStudyDAO, + ProgramLocationService locationService, + BrAPIListDAO brAPIListDAO, + BrAPIGermplasmDAO brAPIGermplasmDAO, + BrAPIObservationDAO brAPIObservationDAO, + ExperimentStudyService experimentStudyService, + ExperimentTrialService experimentTrialService) { + this.brAPIObservationUnitDAO = brAPIObservationUnitDAO; + this.brAPITrialDAO = brAPITrialDAO; + this.brAPIStudyDAO = brAPIStudyDAO; + this.locationService = locationService; + this.brAPIListDAO = brAPIListDAO; + this.brAPIGermplasmDAO = brAPIGermplasmDAO; + this.brAPIObservationDAO = brAPIObservationDAO; + this.experimentStudyService = experimentStudyService; + this.experimentTrialService = experimentTrialService; + } + + public ProcessContext process(ImportContext input, ProcessedPhenotypeData phenotypeData) { + + List experimentImportRows = ExperimentUtilities.importRowsToExperimentObservations(input.getImportRows()); + Program program = input.getProgram(); + + // Populate pending objects with existing status + Map> observationUnitByNameNoScope = initializeObservationUnits(program, experimentImportRows); + Map> trialByNameNoScope = experimentTrialService.initializeTrialByNameNoScope(program, observationUnitByNameNoScope, experimentImportRows); + Map> studyByNameNoScope = initializeStudyByNameNoScope(program, trialByNameNoScope, observationUnitByNameNoScope, experimentImportRows); + // interesting we're using our data model instead of brapi for locations + Map> locationByName = initializeUniqueLocationNames(program, studyByNameNoScope, experimentImportRows); + Map> obsVarDatasetByName = initializeObsVarDatasetByName(program, trialByNameNoScope, experimentImportRows); + Map> existingGermplasmByGID = initializeExistingGermplasmByGID(program, observationUnitByNameNoScope, experimentImportRows); + Map existingObsByObsHash = fetchExistingObservations(phenotypeData.getReferencedTraits(), studyByNameNoScope, program); + + PendingData existing = PendingData.builder() + .observationUnitByNameNoScope(observationUnitByNameNoScope) + .trialByNameNoScope(trialByNameNoScope) + .studyByNameNoScope(studyByNameNoScope) + .locationByName(locationByName) + .obsVarDatasetByName(obsVarDatasetByName) + .existingGermplasmByGID(existingGermplasmByGID) + .existingObsByObsHash(existingObsByObsHash) + .observationByHash(new HashMap<>()) + .build(); + + return ProcessContext.builder() + .importContext(input) + .pendingData(existing) + .build(); + } + + /** + * Initializes the observation units for the given program and experimentImportRows. + * + * @param program The program object + * @param experimentImportRows A list of ExperimentObservation objects + * @return A map of Observation Unit IDs to PendingImportObject objects + * + * @throws InternalServerException + * @throws IllegalStateException + */ + private Map> initializeObservationUnits(Program program, List experimentImportRows) { + Map> observationUnitByName = new HashMap<>(); + + Map rowByObsUnitId = new HashMap<>(); + experimentImportRows.forEach(row -> { + if (StringUtils.isNotBlank(row.getObsUnitID())) { + if(rowByObsUnitId.containsKey(row.getObsUnitID())) { + throw new IllegalStateException("ObsUnitId is repeated: " + row.getObsUnitID()); + } + rowByObsUnitId.put(row.getObsUnitID(), row); + } + }); + + try { + List existingObsUnits = brAPIObservationUnitDAO.getObservationUnitsById(rowByObsUnitId.keySet(), program); + + // TODO: grab from externalReferences + /* + observationUnitByObsUnitId = existingObsUnits.stream() + .collect(Collectors.toMap(BrAPIObservationUnit::getObservationUnitDbId, + (BrAPIObservationUnit unit) -> new PendingImportObject<>(unit, false))); + */ + + String refSource = String.format("%s/%s", BRAPI_REFERENCE_SOURCE, ExternalReferenceSource.OBSERVATION_UNITS.getName()); + if (existingObsUnits.size() == rowByObsUnitId.size()) { + existingObsUnits.forEach(brAPIObservationUnit -> { + processAndCacheObservationUnit(brAPIObservationUnit, refSource, program, observationUnitByName, rowByObsUnitId); + + BrAPIExternalReference idRef = Utilities.getExternalReference(brAPIObservationUnit.getExternalReferences(), refSource) + .orElseThrow(() -> new InternalServerException("An ObservationUnit ID was not found in any of the external references")); + + ExperimentObservation row = rowByObsUnitId.get(idRef.getReferenceId()); + row.setExpTitle(Utilities.removeProgramKey(brAPIObservationUnit.getTrialName(), program.getKey())); + row.setEnv(Utilities.removeProgramKeyAndUnknownAdditionalData(brAPIObservationUnit.getStudyName(), program.getKey())); + row.setEnvLocation(Utilities.removeProgramKey(brAPIObservationUnit.getLocationName(), program.getKey())); + }); + } else { + List missingIds = new ArrayList<>(rowByObsUnitId.keySet()); + missingIds.removeAll(existingObsUnits.stream().map(BrAPIObservationUnit::getObservationUnitDbId).collect(Collectors.toList())); + throw new IllegalStateException("Observation Units not found for ObsUnitId(s): " + String.join(ExperimentUtilities.COMMA_DELIMITER, missingIds)); + } + + return observationUnitByName; + } catch (ApiException e) { + log.error("Error fetching observation units: " + Utilities.generateApiExceptionLogMessage(e), e); + throw new InternalServerException(e.toString(), e); + } + } + + /** + * Adds a new map entry to observationUnitByName based on the brAPIObservationUnit passed in and sets the + * expUnitId in the rowsByObsUnitId map. + * + * @param brAPIObservationUnit the BrAPI observation unit object + * @param refSource the reference source + * @param program the program object + * @param observationUnitByName the map of observation units by name (will be modified in place) + * @param rowByObsUnitId the map of rows by observation unit ID (will be modified in place) + * + * @throws InternalServerException + */ + private void processAndCacheObservationUnit(BrAPIObservationUnit brAPIObservationUnit, String refSource, Program program, + Map> observationUnitByName, + Map rowByObsUnitId) { + BrAPIExternalReference idRef = Utilities.getExternalReference(brAPIObservationUnit.getExternalReferences(), refSource) + .orElseThrow(() -> new InternalServerException("An ObservationUnit ID was not found in any of the external references")); + + ExperimentObservation row = rowByObsUnitId.get(idRef.getReferenceId()); + row.setExpUnitId(Utilities.removeProgramKeyAndUnknownAdditionalData(brAPIObservationUnit.getObservationUnitName(), program.getKey())); + observationUnitByName.put(ExperimentUtilities.createObservationUnitKey(row), + new PendingImportObject<>(ImportObjectState.EXISTING, + brAPIObservationUnit, + UUID.fromString(idRef.getReferenceId()))); + } + + + + /** + * Initializes studies by name without scope. + * + * @param program The program object. + * @param trialByNameNoScope A map of trial names with their corresponding pending import objects. + * @param experimentImportRows A list of experiment observation objects. + * @return A map of study names with their corresponding pending import objects. + * @throws InternalServerException If there is an error while processing the method. + */ + private Map> initializeStudyByNameNoScope(Program program, + Map> trialByNameNoScope, + Map> observationUnitByNameNoScope, + List experimentImportRows) { + Map> studyByName = new HashMap<>(); + if (trialByNameNoScope.size() != 1) { + return studyByName; + } + + try { + initializeStudiesForExistingObservationUnits(program, studyByName, observationUnitByNameNoScope); + } catch (ApiException e) { + log.error("Error fetching studies: " + Utilities.generateApiExceptionLogMessage(e), e); + throw new InternalServerException(e.toString(), e); + } catch (Exception e) { + log.error("Error processing studies", e); + throw new InternalServerException(e.toString(), e); + } + + List existingStudies; + Optional> trial = getTrialPIO(experimentImportRows, trialByNameNoScope); + + try { + if (trial.isEmpty()) { + // TODO: throw ValidatorException and return 422 + } + UUID experimentId = trial.get().getId(); + existingStudies = brAPIStudyDAO.getStudiesByExperimentID(experimentId, program); + for (BrAPIStudy existingStudy : existingStudies) { + experimentStudyService.processAndCacheStudy(existingStudy, program, BrAPIStudy::getStudyName, studyByName); + } + } catch (ApiException e) { + log.error("Error fetching studies: " + Utilities.generateApiExceptionLogMessage(e), e); + throw new InternalServerException(e.toString(), e); + } catch (Exception e) { + log.error("Error processing studies: ", e); + throw new InternalServerException(e.toString(), e); + } + + return studyByName; + } + + /** + * Retrieves the PendingImportObject of a BrAPITrial based on the given list of ExperimentObservation and trialByNameNoScope map. + * + * @param experimentImportRows The list of ExperimentObservation objects. + * @param trialByNameNoScope The map of trial names to PendingImportObject of BrAPITrial. + * @return The Optional containing the PendingImportObject of BrAPITrial, or an empty Optional if no matching trial is found. + */ + private Optional> getTrialPIO(List experimentImportRows, + Map> trialByNameNoScope) { + Optional expTitle = experimentImportRows.stream() + .filter(row -> StringUtils.isBlank(row.getObsUnitID()) && StringUtils.isNotBlank(row.getExpTitle())) + .map(ExperimentObservation::getExpTitle) + .findFirst(); + + if (expTitle.isEmpty() && trialByNameNoScope.keySet().stream().findFirst().isEmpty()) { + return Optional.empty(); + } + if(expTitle.isEmpty()) { + expTitle = trialByNameNoScope.keySet().stream().findFirst(); + } + + return Optional.ofNullable(trialByNameNoScope.get(expTitle.get())); + } + + + private void initializeStudiesForExistingObservationUnits( + Program program, + Map> studyByName, + Map> observationUnitByNameNoScope + ) throws Exception { + Set studyDbIds = observationUnitByNameNoScope.values() + .stream() + .map(pio -> pio.getBrAPIObject() + .getStudyDbId()) + .collect(Collectors.toSet()); + + List studies = experimentStudyService.fetchStudiesByDbId(studyDbIds, program); + for (BrAPIStudy study : studies) { + experimentStudyService.processAndCacheStudy(study, program, BrAPIStudy::getStudyName, studyByName); + } + } + + /** + * Initializes unique location names for a program. + * + * @param program The program object. + * @param studyByNameNoScope A map of study names and corresponding BrAPI study objects. + * @param experimentImportRows A list of experiment observation objects for import. + * @return A map of location names and their corresponding pending import objects. + * @throws InternalServerException If there is an error fetching locations. + */ + private Map> initializeUniqueLocationNames(Program program, + Map> studyByNameNoScope, + List experimentImportRows) { + Map> locationByName = new HashMap<>(); + + List existingLocations = new ArrayList<>(); + if(studyByNameNoScope.size() > 0) { + Set locationDbIds = studyByNameNoScope.values() + .stream() + .map(study -> study.getBrAPIObject() + .getLocationDbId()) + .collect(Collectors.toSet()); + try { + existingLocations.addAll(locationService.getLocationsByDbId(locationDbIds, program.getId())); + } catch (ApiException e) { + log.error("Error fetching locations: " + Utilities.generateApiExceptionLogMessage(e), e); + throw new InternalServerException(e.toString(), e); + } + } + + List uniqueLocationNames = experimentImportRows.stream() + .filter(experimentObservation -> StringUtils.isBlank(experimentObservation.getObsUnitID())) + .map(ExperimentObservation::getEnvLocation) + .distinct() + .filter(Objects::nonNull) + .collect(Collectors.toList()); + + try { + existingLocations.addAll(locationService.getLocationsByName(uniqueLocationNames, program.getId())); + } catch (ApiException e) { + log.error("Error fetching locations: " + Utilities.generateApiExceptionLogMessage(e), e); + throw new InternalServerException(e.toString(), e); + } + + existingLocations.forEach(existingLocation -> locationByName.put(existingLocation.getName(), new PendingImportObject<>(ImportObjectState.EXISTING, existingLocation, existingLocation.getId()))); + return locationByName; + } + + /** + * Initializes observation variable dataset by name. + * + * @param program The program associated with the dataset. + * @param trialByNameNoScope The map of trials identified by name without scope. + * @param experimentImportRows The list of experiment observation rows. + * @return The map of observation variable dataset indexed by name. + * + * @throws InternalServerException + */ + private Map> initializeObsVarDatasetByName(Program program, + Map> trialByNameNoScope, + List experimentImportRows) { + Map> obsVarDatasetByName = new HashMap<>(); + + Optional> trialPIO = getTrialPIO(experimentImportRows, trialByNameNoScope); + + if (trialPIO.isPresent() && trialPIO.get().getBrAPIObject().getAdditionalInfo().has(BrAPIAdditionalInfoFields.OBSERVATION_DATASET_ID)) { + String datasetId = trialPIO.get().getBrAPIObject() + .getAdditionalInfo() + .get(BrAPIAdditionalInfoFields.OBSERVATION_DATASET_ID) + .getAsString(); + try { + List existingDatasets = brAPIListDAO + .getListByTypeAndExternalRef(BrAPIListTypes.OBSERVATIONVARIABLES, + program.getId(), + String.format("%s/%s", BRAPI_REFERENCE_SOURCE, ExternalReferenceSource.DATASET.getName()), + UUID.fromString(datasetId)); + if (existingDatasets == null || existingDatasets.isEmpty()) { + throw new InternalServerException("existing dataset summary not returned from brapi server"); + } + BrAPIListDetails dataSetDetails = brAPIListDAO + .getListById(existingDatasets.get(0).getListDbId(), program.getId()) + .getResult(); + processAndCacheObsVarDataset(dataSetDetails, obsVarDatasetByName); + } catch (ApiException e) { + log.error(Utilities.generateApiExceptionLogMessage(e), e); + throw new InternalServerException(e.toString(), e); + } + } + return obsVarDatasetByName; + } + + /** + * Process and cache an object of type BrAPIListDetails. + * + * @param existingList The existing list to be processed and cached + * @param obsVarDatasetByName A map of ObsVarDatasets indexed by name (will be modified in place) + * + * @throws IllegalStateException + */ + private void processAndCacheObsVarDataset(BrAPIListDetails existingList, Map> obsVarDatasetByName) { + BrAPIExternalReference xref = Utilities.getExternalReference(existingList.getExternalReferences(), + String.format("%s/%s", BRAPI_REFERENCE_SOURCE, ExternalReferenceSource.DATASET.getName())) + .orElseThrow(() -> new IllegalStateException("External references wasn't found for list (dbid): " + existingList.getListDbId())); + obsVarDatasetByName.put(existingList.getListName(), + new PendingImportObject<>(ImportObjectState.EXISTING, existingList, UUID.fromString(xref.getReferenceId()))); + } + + /** + * Initializes existing germplasm objects by germplasm ID (GID). + * + * @param program The program object. + * @param observationUnitByNameNoScope A map of observation unit objects by name. + * @param experimentImportRows A list of experiment observation objects. + * @return A map of existing germplasm objects by germplasm ID. + * + * @throws InternalServerException + */ + private Map> initializeExistingGermplasmByGID(Program program, + Map> observationUnitByNameNoScope, + List experimentImportRows) { + Map> existingGermplasmByGID = new HashMap<>(); + + List existingGermplasms = new ArrayList<>(); + if(observationUnitByNameNoScope.size() > 0) { + Set germplasmDbIds = observationUnitByNameNoScope.values().stream().map(ou -> ou.getBrAPIObject().getGermplasmDbId()).collect(Collectors.toSet()); + try { + existingGermplasms.addAll(brAPIGermplasmDAO.getGermplasmsByDBID(germplasmDbIds, program.getId())); + } catch (ApiException e) { + log.error("Error fetching germplasm: " + Utilities.generateApiExceptionLogMessage(e), e); + throw new InternalServerException(e.toString(), e); + } + } + + List uniqueGermplasmGIDs = experimentImportRows.stream() + .filter(experimentObservation -> StringUtils.isBlank(experimentObservation.getObsUnitID())) + .map(ExperimentObservation::getGid) + .distinct() + .collect(Collectors.toList()); + + try { + existingGermplasms.addAll(getGermplasmByAccessionNumber(uniqueGermplasmGIDs, program.getId())); + } catch (ApiException e) { + log.error("Error fetching germplasm: " + Utilities.generateApiExceptionLogMessage(e), e); + throw new InternalServerException(e.toString(), e); + } + + existingGermplasms.forEach(existingGermplasm -> { + BrAPIExternalReference xref = Utilities.getExternalReference(existingGermplasm.getExternalReferences(), String.format("%s", BRAPI_REFERENCE_SOURCE)) + .orElseThrow(() -> new IllegalStateException("External references wasn't found for germplasm (dbid): " + existingGermplasm.getGermplasmDbId())); + existingGermplasmByGID.put(existingGermplasm.getAccessionNumber(), new PendingImportObject<>(ImportObjectState.EXISTING, existingGermplasm, UUID.fromString(xref.getReferenceId()))); + }); + return existingGermplasmByGID; + } + + /** + * Retrieves a list of germplasm with the specified accession numbers. + * + * @param germplasmAccessionNumbers The list of accession numbers to search for. + * @param programId The ID of the program. + * @return An ArrayList of BrAPIGermplasm objects that match the accession numbers. + * @throws ApiException if there is an error retrieving the germplasm. + */ + private ArrayList getGermplasmByAccessionNumber( + List germplasmAccessionNumbers, + UUID programId) throws ApiException { + List germplasmList = brAPIGermplasmDAO.getGermplasm(programId); + ArrayList resultGermplasm = new ArrayList<>(); + // Search for accession number matches + for (BrAPIGermplasm germplasm : germplasmList) { + for (String accessionNumber : germplasmAccessionNumbers) { + if (germplasm.getAccessionNumber() + .equals(accessionNumber)) { + resultGermplasm.add(germplasm); + break; + } + } + } + return resultGermplasm; + } + + /** + * Fetches existing observations based on the given referenced traits, studyByNameNoScope map, and program. + * + * @param referencedTraits The list of referenced traits. + * @param studyByNameNoScope The map of studies by name without scope. + * @param program The program. + * @return A map of existing observations with their unique keys. + */ + private Map fetchExistingObservations(List referencedTraits, + Map> studyByNameNoScope, + Program program) { + Set ouDbIds = new HashSet<>(); + Set variableDbIds = new HashSet<>(); + Map variableNameByDbId = new HashMap<>(); + Map ouNameByDbId = new HashMap<>(); + Map studyNameByDbId = studyByNameNoScope.values() + .stream() + .filter(pio -> StringUtils.isNotBlank(pio.getBrAPIObject().getStudyDbId())) + .map(PendingImportObject::getBrAPIObject) + .collect(Collectors.toMap(BrAPIStudy::getStudyDbId, brAPIStudy -> Utilities.removeProgramKeyAndUnknownAdditionalData(brAPIStudy.getStudyName(), program.getKey()))); + + studyNameByDbId.keySet().forEach(studyDbId -> { + try { + brAPIObservationUnitDAO.getObservationUnitsForStudyDbId(studyDbId, program).forEach(ou -> { + if(StringUtils.isNotBlank(ou.getObservationUnitDbId())) { + ouDbIds.add(ou.getObservationUnitDbId()); + } + ouNameByDbId.put(ou.getObservationUnitDbId(), Utilities.removeProgramKeyAndUnknownAdditionalData(ou.getObservationUnitName(), program.getKey())); + }); + } catch (ApiException e) { + throw new RuntimeException(e); + } + }); + + for (Trait referencedTrait : referencedTraits) { + variableDbIds.add(referencedTrait.getObservationVariableDbId()); + variableNameByDbId.put(referencedTrait.getObservationVariableDbId(), referencedTrait.getObservationVariableName()); + } + + List existingObservations = new ArrayList<>(); + try { + existingObservations = brAPIObservationDAO.getObservationsByObservationUnitsAndVariables(ouDbIds, variableDbIds, program); + } catch (ApiException e) { + throw new RuntimeException(e); + } + + return existingObservations.stream() + .map(obs -> { + String studyName = studyNameByDbId.get(obs.getStudyDbId()); + String variableName = variableNameByDbId.get(obs.getObservationVariableDbId()); + String ouName = ouNameByDbId.get(obs.getObservationUnitDbId()); + + String key = ExperimentUtilities.getObservationHash(ExperimentUtilities.createObservationUnitKey(studyName, ouName), variableName, studyName); + + return Map.entry(key, obs); + }) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + +} \ No newline at end of file diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/workflow/steps/PopulateNewPendingImportObjectsStep.java b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/workflow/steps/PopulateNewPendingImportObjectsStep.java new file mode 100644 index 000000000..f20c8a1a1 --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/workflow/steps/PopulateNewPendingImportObjectsStep.java @@ -0,0 +1,616 @@ +/* + * See the NOTICE file distributed with this work for additional information + * regarding copyright ownership. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.breedinginsight.brapps.importer.services.processors.experiment.create.workflow.steps; + +import com.google.gson.Gson; +import io.micronaut.context.annotation.Property; +import io.micronaut.http.HttpStatus; +import io.micronaut.http.exceptions.HttpStatusException; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; +import org.brapi.client.v2.JSON; +import org.brapi.client.v2.model.exceptions.ApiException; +import org.brapi.v2.model.core.BrAPIStudy; +import org.brapi.v2.model.core.BrAPITrial; +import org.brapi.v2.model.core.response.BrAPIListDetails; +import org.brapi.v2.model.germ.BrAPIGermplasm; +import org.brapi.v2.model.pheno.BrAPIObservation; +import org.brapi.v2.model.pheno.BrAPIObservationUnit; +import org.breedinginsight.brapi.v2.constants.BrAPIAdditionalInfoFields; +import org.breedinginsight.brapi.v2.dao.BrAPIObservationDAO; +import org.breedinginsight.brapi.v2.dao.BrAPIObservationUnitDAO; +import org.breedinginsight.brapps.importer.model.ImportUpload; +import org.breedinginsight.brapps.importer.model.imports.BrAPIImport; +import org.breedinginsight.brapps.importer.model.imports.experimentObservation.ExperimentObservation; +import org.breedinginsight.brapps.importer.model.response.ImportObjectState; +import org.breedinginsight.brapps.importer.model.response.PendingImportObject; +import org.breedinginsight.brapps.importer.model.workflow.ImportContext; +import org.breedinginsight.brapps.importer.model.workflow.ProcessedData; +import org.breedinginsight.brapps.importer.services.processors.experiment.ExperimentUtilities; +import org.breedinginsight.brapps.importer.services.processors.experiment.create.model.PendingData; +import org.breedinginsight.brapps.importer.services.processors.experiment.create.model.PendingImportObjectData; +import org.breedinginsight.brapps.importer.services.processors.experiment.create.model.ProcessContext; +import org.breedinginsight.brapps.importer.services.processors.experiment.create.model.ProcessedPhenotypeData; +import org.breedinginsight.brapps.importer.services.processors.experiment.services.ExperimentSeasonService; +import org.breedinginsight.brapps.importer.services.processors.experiment.services.ExperimentValidateService; +import org.breedinginsight.model.Program; +import org.breedinginsight.model.ProgramLocation; +import org.breedinginsight.model.User; +import org.breedinginsight.services.exceptions.MissingRequiredInfoException; +import org.breedinginsight.services.exceptions.UnprocessableEntityException; +import org.breedinginsight.utilities.Utilities; +import org.jooq.DSLContext; +import tech.tablesaw.api.Table; +import org.breedinginsight.model.Trait; +import tech.tablesaw.columns.Column; + +import javax.inject.Inject; +import javax.inject.Singleton; +import java.math.BigInteger; +import java.time.OffsetDateTime; +import java.time.format.DateTimeFormatter; +import java.util.*; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import static org.breedinginsight.brapps.importer.services.processors.experiment.ExperimentUtilities.*; + +@Singleton +@Slf4j +public class PopulateNewPendingImportObjectsStep { + + private final ExperimentSeasonService experimentSeasonService; + private final BrAPIObservationUnitDAO brAPIObservationUnitDAO; + private final DSLContext dsl; + private final Gson gson; + + @Property(name = "brapi.server.reference-source") + private String BRAPI_REFERENCE_SOURCE; + + @Inject + public PopulateNewPendingImportObjectsStep(ExperimentSeasonService experimentSeasonService, + BrAPIObservationUnitDAO brAPIObservationUnitDAO, + DSLContext dsl) { + this.experimentSeasonService = experimentSeasonService; + this.brAPIObservationUnitDAO = brAPIObservationUnitDAO; + this.dsl = dsl; + this.gson = new JSON().getGson(); + } + + /** + * TODO: in the future returning ProcessedData rather than modifying in-place would be preferrable. + * + * @param context (modified in-place) + * @param phenotypeData + * @return + * @throws MissingRequiredInfoException + * @throws UnprocessableEntityException + * @throws ApiException + */ + public void process(ProcessContext context, ProcessedPhenotypeData phenotypeData) + throws MissingRequiredInfoException, UnprocessableEntityException, ApiException { + populatePendingImportObjects(context, phenotypeData); + } + + + // NOTE: was called initNew + // initNewBrapiData(importRows, phenotypeCols, program, user, referencedTraits, commit); + // TODO: move to shared service + private void populatePendingImportObjects(ProcessContext processContext, + ProcessedPhenotypeData phenotypeData) + throws MissingRequiredInfoException, UnprocessableEntityException, ApiException { + + ImportContext importContext = processContext.getImportContext(); + List importRows = importContext.getImportRows(); + Program program = importContext.getProgram(); + boolean commit = importContext.isCommit(); + PendingData pendingData = processContext.getPendingData(); + + Supplier expNextVal = getNextExperimentSequenceNumber(program); + Supplier envNextVal = getNextEnvironmentSequenceNumber(program); + + // NOTE: this was moved to the get existing step and kept in PendingData + // existingObsByObsHash = fetchExistingObservations(referencedTraits, program); + + for (int rowNum = 0; rowNum < importRows.size(); rowNum++) { + ExperimentObservation importRow = (ExperimentObservation) importRows.get(rowNum); + + PendingImportObjectData pioData = populateIndependentVariablePIOsForRow(importContext, + phenotypeData, pendingData, importRow, expNextVal, envNextVal); + + processObservations(importContext, + phenotypeData, pendingData, importRow, rowNum, commit, pioData.getObsUnitPIO(), pioData.getStudyPIO()); + } + } + + // TODO: these sequence methods could be moved to common area + /** + * Returns a Supplier that generates the next experiment sequence number based on the given Program. + * + * @param program the Program for which to generate the next experiment sequence number + * @return a Supplier that generates the next experiment sequence number + * @throws HttpStatusException if the program is not properly configured for observation unit import + */ + private Supplier getNextExperimentSequenceNumber(Program program) { + String expSequenceName = program.getExpSequence(); + if (expSequenceName == null) { + log.error(String.format("Program, %s, is missing a value in the exp sequence column.", program.getName())); + throw new HttpStatusException(HttpStatus.UNPROCESSABLE_ENTITY, "Program is not properly configured for observation unit import"); + } + return () -> dsl.nextval(expSequenceName.toLowerCase()); + } + + /** + * Retrieves the next environment sequence number for a given program. + * + * @param program The program for which to get the next environment sequence number. + * @return A Supplier representing a function that generates the next environment sequence number. + * @throws HttpStatusException If the program is not properly configured for environment import. + */ + private Supplier getNextEnvironmentSequenceNumber(Program program) { + String envSequenceName = program.getEnvSequence(); + if (envSequenceName == null) { + log.error(String.format("Program, %s, is missing a value in the env sequence column.", program.getName())); + throw new HttpStatusException(HttpStatus.UNPROCESSABLE_ENTITY, "Program is not properly configured for environment import"); + } + return () -> dsl.nextval(envSequenceName.toLowerCase()); + } + + /** + * Populates independent variable PendingImportObjectData for a given row of import data. + * + * @param importContext The import context. + * @param phenotypeData The processed phenotype data. + * @param pendingData The pending data. + * @param importRow The import row. + * @param expNextVal The supplier for generating experiment next value. + * @param envNextVal The supplier for generating environment next value. + * @return The populated independent variable PendingImportObjectData. + * @throws MissingRequiredInfoException If any required information is missing. + * @throws UnprocessableEntityException If the entity is unprocessable. + * @throws ApiException If there is an API exception. + */ + // TODO: this could potentially be made reusable between workflows in the future + private PendingImportObjectData populateIndependentVariablePIOsForRow(ImportContext importContext, + ProcessedPhenotypeData phenotypeData, + PendingData pendingData, + ExperimentObservation importRow, + Supplier expNextVal, + Supplier envNextVal) + throws MissingRequiredInfoException, UnprocessableEntityException, ApiException { + + Program program = importContext.getProgram(); + User user = importContext.getUser(); + boolean commit = importContext.isCommit(); + List referencedTraits = phenotypeData.getReferencedTraits(); + + PendingImportObject trialPIO = null; + try { + trialPIO = populateTrial(importContext, pendingData, importRow, expNextVal); + + // NOTE: moved up a level + if (trialPIO.getState() == ImportObjectState.NEW) { + pendingData.getTrialByNameNoScope().put(importRow.getExpTitle(), trialPIO); + } + } catch (UnprocessableEntityException e) { + throw new HttpStatusException(HttpStatus.UNPROCESSABLE_ENTITY, e.getMessage()); + } + + String expSeqValue = null; + if (commit) { + expSeqValue = trialPIO.getBrAPIObject() + .getAdditionalInfo() + .get(BrAPIAdditionalInfoFields.EXPERIMENT_NUMBER) + .getAsString(); + + // updates pendingData obsVarDatasetByName PIO + fetchOrCreateDatasetPIO(importContext, pendingData, importRow, referencedTraits); + } + + // updates pendingData locationByName PIO + fetchOrCreateLocationPIO(pendingData, importRow); + + PendingImportObject studyPIO = fetchOrCreateStudyPIO(importContext, pendingData, expSeqValue, importRow, envNextVal); + + String envSeqValue = null; + if (commit) { + envSeqValue = studyPIO.getBrAPIObject() + .getAdditionalInfo() + .get(BrAPIAdditionalInfoFields.ENVIRONMENT_NUMBER) + .getAsString(); + } + + PendingImportObject obsUnitPIO = fetchOrCreateObsUnitPIO(importContext, pendingData, envSeqValue, importRow); + + return PendingImportObjectData.builder() + .trialPIO(trialPIO) + .studyPIO(studyPIO) + .obsUnitPIO(obsUnitPIO) + .build(); + } + + // TODO: some reusable stuff between workflows that could potentially be broken out + private void processObservations(ImportContext importContext, + ProcessedPhenotypeData phenotypeData, + PendingData pendingData, + ExperimentObservation importRow, + int rowNum, boolean commit, + PendingImportObject obsUnitPIO, + PendingImportObject studyPIO) + throws UnprocessableEntityException, ApiException, MissingRequiredInfoException { + Program program = importContext.getProgram(); + User user = importContext.getUser(); + List> phenotypeCols = phenotypeData.getPhenotypeCols(); + Map> timeStampColByPheno = phenotypeData.getTimeStampColByPheno(); + List referencedTraits = phenotypeData.getReferencedTraits(); + + for (Column column : phenotypeCols) { + //If associated timestamp column, add + String dateTimeValue = null; + if (timeStampColByPheno.containsKey(column.name())) { + dateTimeValue = timeStampColByPheno.get(column.name()).getString(rowNum); + //If no timestamp, set to midnight + if (!dateTimeValue.isBlank() && !validDateTimeValue(dateTimeValue)) { + dateTimeValue += MIDNIGHT; + } + } + + // get the study year either referenced from the observation unit or listed explicitly on the import row + // NOTE: removed append / update code + String studyYear = importRow.getEnvYear(); + String seasonDbId = experimentSeasonService.yearToSeasonDbId(studyYear, program.getId()); + fetchOrCreateObservationPIO( + phenotypeData, + pendingData, + program, + user, + importRow, + column, //column.name() gets phenotype name + rowNum, + dateTimeValue, + commit, + seasonDbId, + obsUnitPIO, + studyPIO, + referencedTraits + ); + } + } + + public PendingImportObject populateTrial(ImportContext importContext, + PendingData pendingData, + ExperimentObservation importRow, + Supplier expNextVal) + throws UnprocessableEntityException { + + PendingImportObject trialPio; + Program program = importContext.getProgram(); + User user = importContext.getUser(); + boolean commit = importContext.isCommit(); + Map> trialByNameNoScope = pendingData.getTrialByNameNoScope(); + Map> studyByNameNoScope = pendingData.getStudyByNameNoScope(); + + if (trialByNameNoScope.containsKey(importRow.getExpTitle())) { + PendingImportObject envPio; + trialPio = trialByNameNoScope.get(importRow.getExpTitle()); + envPio = studyByNameNoScope.get(importRow.getEnv()); + + // creating new units for existing experiments and environments is not possible + if (trialPio!=null && ImportObjectState.EXISTING==trialPio.getState() && + (StringUtils.isBlank( importRow.getObsUnitID() )) && (envPio!=null && ImportObjectState.EXISTING==envPio.getState() ) ){ + throw new UnprocessableEntityException(PREEXISTING_EXPERIMENT_TITLE); + } + } else if (!trialByNameNoScope.isEmpty()) { + throw new UnprocessableEntityException(MULTIPLE_EXP_TITLES); + } else { + UUID id = UUID.randomUUID(); + String expSeqValue = null; + if (commit) { + expSeqValue = expNextVal.get().toString(); + } + BrAPITrial newTrial = importRow.constructBrAPITrial(program, user, commit, BRAPI_REFERENCE_SOURCE, id, expSeqValue); + trialPio = new PendingImportObject<>(ImportObjectState.NEW, newTrial, id); + // NOTE: moved up a level + //trialByNameNoScope.put(importRow.getExpTitle(), trialPio); + } + + return trialPio; + } + + /** + * Fetches or creates a dataset for import. + * + * @param importContext The import context + * @param pendingData The pending data containing information about the import (modified in place) + * @param importRow The import row representing an observation + * @param referencedTraits The list of referenced Trait objects + * @throws UnprocessableEntityException if the import data is invalid + */ + public void fetchOrCreateDatasetPIO(ImportContext importContext, + PendingData pendingData, + ExperimentObservation importRow, + List referencedTraits) throws UnprocessableEntityException { + PendingImportObject pio; + Program program = importContext.getProgram(); + Map> trialByNameNoScope = pendingData.getTrialByNameNoScope(); + Map> obsVarDatasetByName = pendingData.getObsVarDatasetByName(); + + PendingImportObject trialPIO = trialByNameNoScope.get(importRow.getExpTitle()); + + // TODO: this is common to both workflows + String name = String.format("Observation Dataset [%s-%s]", + program.getKey(), + trialPIO.getBrAPIObject() + .getAdditionalInfo() + .get(BrAPIAdditionalInfoFields.EXPERIMENT_NUMBER) + .getAsString()); + if (obsVarDatasetByName.containsKey(name)) { + pio = obsVarDatasetByName.get(name); + } else { + UUID id = UUID.randomUUID(); + BrAPIListDetails newDataset = importRow.constructDatasetDetails( + name, + id, + BRAPI_REFERENCE_SOURCE, + program, + trialPIO.getId().toString()); + pio = new PendingImportObject(ImportObjectState.NEW, newDataset, id); + trialPIO.getBrAPIObject().putAdditionalInfoItem("observationDatasetId", id.toString()); + if (ImportObjectState.EXISTING == trialPIO.getState()) { + trialPIO.setState(ImportObjectState.MUTATED); + } + obsVarDatasetByName.put(name, pio); + } + addObsVarsToDatasetDetails(pio, referencedTraits, program); + } + + /** + * Add observation variable IDs to the dataset details of a pending import object. + * + * @param pio The pending import object with BrAPIListDetails (modified in place) + * @param referencedTraits The list of referenced Trait objects + * @param program The Program object + */ + // TODO: common to both workflows + private void addObsVarsToDatasetDetails(PendingImportObject pio, List referencedTraits, Program program) { + BrAPIListDetails details = pio.getBrAPIObject(); + referencedTraits.forEach(trait -> { + String id = Utilities.appendProgramKey(trait.getObservationVariableName(), program.getKey()); + + // TODO - Don't append the key if connected to a brapi service operating with legacy data(no appended program key) + + if (!details.getData().contains(id) && ImportObjectState.EXISTING != pio.getState()) { + details.getData().add(id); + } + if (!details.getData().contains(id) && ImportObjectState.EXISTING == pio.getState()) { + details.getData().add(id); + pio.setState(ImportObjectState.MUTATED); + } + }); + } + + /** + * Fetches or creates a ProgramLocation object for import. + * + * @param pendingData The PendingData object containing information about the import (modified in place) + * @param importRow The ExperimentObservation object representing an observation + */ + public void fetchOrCreateLocationPIO(PendingData pendingData, ExperimentObservation importRow) { + PendingImportObject pio; + String envLocationName = importRow.getEnvLocation(); + // NOTE: other worklow referenced map specific to itself + Map> locationByName = pendingData.getLocationByName(); + + // TODO: common to both workflows + if (!locationByName.containsKey((importRow.getEnvLocation()))) { + ProgramLocation newLocation = new ProgramLocation(); + newLocation.setName(envLocationName); + pio = new PendingImportObject<>(ImportObjectState.NEW, newLocation, UUID.randomUUID()); + locationByName.put(envLocationName, pio); + } + } + + /** + * Fetches an existing study or creates a new study for the given import row. + * + * @param importContext the import context + * @param pendingData the pending data + * @param expSequenceValue the experiment sequence value + * @param importRow the import row + * @param envNextVal the supplier for generating the next environment ID + * + * @return the pending import object containing the study + * + * @throws UnprocessableEntityException if the study is not processable + */ + private PendingImportObject fetchOrCreateStudyPIO( + ImportContext importContext, + PendingData pendingData, + String expSequenceValue, + ExperimentObservation importRow, + Supplier envNextVal + ) throws UnprocessableEntityException { + + Program program = importContext.getProgram(); + boolean commit = importContext.isCommit(); + + Map> studyByNameNoScope = pendingData.getStudyByNameNoScope(); + Map> locationByName = pendingData.getLocationByName(); + Map> trialByNameNoScope = pendingData.getTrialByNameNoScope(); + + PendingImportObject pio; + // TODO: multiple workflows + if (studyByNameNoScope.containsKey(importRow.getEnv())) { + pio = studyByNameNoScope.get(importRow.getEnv()); + if (!commit){ + ExperimentUtilities.addYearToStudyAdditionalInfo(program, pio.getBrAPIObject()); + } + } else { + // NOTE: specific to this workflow, rest common + PendingImportObject trialPIO = trialByNameNoScope.get(importRow.getExpTitle()); + UUID trialID = trialPIO.getId(); + UUID id = UUID.randomUUID(); + BrAPIStudy newStudy = importRow.constructBrAPIStudy(program, commit, BRAPI_REFERENCE_SOURCE, expSequenceValue, trialID, id, envNextVal); + newStudy.setLocationDbId(locationByName.get(importRow.getEnvLocation()).getId().toString()); //set as the BI ID to facilitate looking up locations when saving new studies + + // It is assumed that the study has only one season, And that the Years and not + // the dbId's are stored in getSeason() list. + String year = newStudy.getSeasons().get(0); // It is assumed that the study has only one season + if (commit) { + if(StringUtils.isNotBlank(year)) { + // TODO: look at if this needs to be cleared across runs + String seasonID = experimentSeasonService.yearToSeasonDbId(year, program.getId()); + newStudy.setSeasons(Collections.singletonList(seasonID)); + } + } else { + ExperimentUtilities.addYearToStudyAdditionalInfo(program, newStudy, year); + } + + pio = new PendingImportObject<>(ImportObjectState.NEW, newStudy, id); + studyByNameNoScope.put(importRow.getEnv(), pio); + } + return pio; + } + + private PendingImportObject fetchOrCreateObsUnitPIO(ImportContext importContext, + PendingData pendingData, + String envSeqValue, + ExperimentObservation importRow) + throws ApiException, MissingRequiredInfoException, UnprocessableEntityException { + PendingImportObject pio; + + Program program = importContext.getProgram(); + boolean commit = importContext.isCommit(); + Map> studyByNameNoScope = pendingData.getStudyByNameNoScope(); + Map> trialByNameNoScope = pendingData.getTrialByNameNoScope(); + Map> observationUnitByNameNoScope = pendingData.getObservationUnitByNameNoScope(); + Map> existingGermplasmByGID = pendingData.getExistingGermplasmByGID(); + + String key = ExperimentUtilities.createObservationUnitKey(importRow); + // NOTE: removed other workflow + + if (observationUnitByNameNoScope.containsKey(key)) { + pio = observationUnitByNameNoScope.get(key); + } else { + String germplasmName = ""; + if (existingGermplasmByGID.get(importRow.getGid()) != null) { + germplasmName = existingGermplasmByGID.get(importRow.getGid()) + .getBrAPIObject() + .getGermplasmName(); + } + PendingImportObject trialPIO = trialByNameNoScope.get(importRow.getExpTitle());; + UUID trialID = trialPIO.getId(); + UUID datasetId = null; + if (commit) { + datasetId = UUID.fromString(trialPIO.getBrAPIObject() + .getAdditionalInfo().getAsJsonObject() + .get(BrAPIAdditionalInfoFields.OBSERVATION_DATASET_ID).getAsString()); + } + PendingImportObject studyPIO = studyByNameNoScope.get(importRow.getEnv()); + UUID studyID = studyPIO.getId(); + UUID id = UUID.randomUUID(); + BrAPIObservationUnit newObservationUnit = importRow.constructBrAPIObservationUnit(program, envSeqValue, commit, germplasmName, importRow.getGid(), BRAPI_REFERENCE_SOURCE, trialID, datasetId, studyID, id); + + // check for existing units if this is an existing study + if (studyPIO.getBrAPIObject().getStudyDbId() != null) { + List existingOUs = brAPIObservationUnitDAO.getObservationUnitsForStudyDbId(studyPIO.getBrAPIObject().getStudyDbId(), program); + List matchingOU = existingOUs.stream().filter(ou -> importRow.getExpUnitId().equals(Utilities.removeProgramKeyAndUnknownAdditionalData(ou.getObservationUnitName(), program.getKey()))).collect(Collectors.toList()); + if (matchingOU.isEmpty()) { + throw new MissingRequiredInfoException(ExperimentUtilities.MISSING_OBS_UNIT_ID_ERROR); + } else { + pio = new PendingImportObject<>(ImportObjectState.EXISTING, (BrAPIObservationUnit) Utilities.formatBrapiObjForDisplay(matchingOU.get(0), BrAPIObservationUnit.class, program)); + } + } else { + pio = new PendingImportObject<>(ImportObjectState.NEW, newObservationUnit, id); + } + observationUnitByNameNoScope.put(key, pio); + } + return pio; + } + + private void fetchOrCreateObservationPIO(ProcessedPhenotypeData phenotypeData, + PendingData pendingData, + Program program, + User user, + ExperimentObservation importRow, + Column column, + Integer rowNum, + String timeStampValue, + boolean commit, + String seasonDbId, + PendingImportObject obsUnitPIO, + PendingImportObject studyPIO, + List referencedTraits) throws ApiException, UnprocessableEntityException { + PendingImportObject pio; + BrAPIObservation newObservation; + String variableName = column.name(); + String value = column.getString(rowNum); + String key; + Map existingObsByObsHash = pendingData.getExistingObsByObsHash(); + Map> observationByHash = pendingData.getObservationByHash(); + Map> trialByNameNoScope = pendingData.getTrialByNameNoScope(); + + // NOTE: removed append / update + key = getImportObservationHash(importRow, variableName); + + if (existingObsByObsHash.containsKey(key)) { + // NOTE: BI-2128 change added after refactor branch + // Update observation value only if it is changed and new value is not blank. + if (!isObservationMatched(phenotypeData, pendingData, key, value, column, rowNum) && StringUtils.isNotBlank(value)){ + + // prior observation with updated value + newObservation = gson.fromJson(gson.toJson(existingObsByObsHash.get(key)), BrAPIObservation.class); + if (!isValueMatched(pendingData, key, value)){ + newObservation.setValue(value); + } else if (!StringUtils.isBlank(timeStampValue) && !isTimestampMatched(pendingData, key, timeStampValue)) { + DateTimeFormatter formatter = DateTimeFormatter.ISO_INSTANT; + String formattedTimeStampValue = formatter.format(OffsetDateTime.parse(timeStampValue)); + newObservation.setObservationTimeStamp(OffsetDateTime.parse(formattedTimeStampValue)); + } + pio = new PendingImportObject<>(ImportObjectState.MUTATED, (BrAPIObservation) Utilities.formatBrapiObjForDisplay(newObservation, BrAPIObservation.class, program)); + } else { + + // prior observation + pio = new PendingImportObject<>(ImportObjectState.EXISTING, (BrAPIObservation) Utilities.formatBrapiObjForDisplay(existingObsByObsHash.get(key), BrAPIObservation.class, program)); + } + + observationByHash.put(key, pio); + } else if (!observationByHash.containsKey(key)){ + + // new observation + // NOTE: removed append / update code + PendingImportObject trialPIO = trialByNameNoScope.get(importRow.getExpTitle()); + + UUID trialID = trialPIO.getId(); + UUID studyID = studyPIO.getId(); + UUID id = UUID.randomUUID(); + newObservation = importRow.constructBrAPIObservation(value, variableName, seasonDbId, obsUnitPIO.getBrAPIObject(), commit, program, user, BRAPI_REFERENCE_SOURCE, trialID, studyID, obsUnitPIO.getId(), id); + //NOTE: Can't parse invalid timestamp value, so have to skip if invalid. + // Validation error should be thrown for offending value, but that doesn't happen until later downstream + if (timeStampValue != null && !timeStampValue.isBlank() && (validDateValue(timeStampValue) || validDateTimeValue(timeStampValue))) { + newObservation.setObservationTimeStamp(OffsetDateTime.parse(timeStampValue)); + } + + newObservation.setStudyDbId(studyPIO.getId().toString()); //set as the BI ID to facilitate looking up studies when saving new observations + + pio = new PendingImportObject<>(ImportObjectState.NEW, newObservation); + observationByHash.put(key, pio); + } + } + + +} \ No newline at end of file diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/workflow/steps/ValidatePendingImportObjectsStep.java b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/workflow/steps/ValidatePendingImportObjectsStep.java new file mode 100644 index 000000000..99fbc12eb --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/create/workflow/steps/ValidatePendingImportObjectsStep.java @@ -0,0 +1,471 @@ +/* + * See the NOTICE file distributed with this work for additional information + * regarding copyright ownership. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.breedinginsight.brapps.importer.services.processors.experiment.create.workflow.steps; + +import com.google.gson.Gson; +import com.google.gson.JsonArray; +import com.google.gson.JsonObject; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.collections4.map.CaseInsensitiveMap; +import org.apache.commons.lang3.StringUtils; +import org.brapi.client.v2.JSON; +import org.brapi.v2.model.core.BrAPIStudy; +import org.brapi.v2.model.core.BrAPITrial; +import org.brapi.v2.model.core.response.BrAPIListDetails; +import org.brapi.v2.model.germ.BrAPIGermplasm; +import org.brapi.v2.model.pheno.BrAPIObservation; +import org.brapi.v2.model.pheno.BrAPIObservationUnit; +import org.breedinginsight.api.model.v1.response.ValidationErrors; +import org.breedinginsight.brapi.v2.constants.BrAPIAdditionalInfoFields; +import org.breedinginsight.brapps.importer.model.imports.BrAPIImport; +import org.breedinginsight.brapps.importer.model.imports.ChangeLogEntry; +import org.breedinginsight.brapps.importer.model.imports.PendingImport; +import org.breedinginsight.brapps.importer.model.imports.experimentObservation.ExperimentObservation; +import org.breedinginsight.brapps.importer.model.response.ImportObjectState; +import org.breedinginsight.brapps.importer.model.response.PendingImportObject; +import org.breedinginsight.brapps.importer.model.workflow.ImportContext; +import org.breedinginsight.brapps.importer.model.workflow.ProcessedData; +import org.breedinginsight.brapps.importer.services.processors.experiment.ExperimentUtilities; +import org.breedinginsight.brapps.importer.services.processors.experiment.create.model.PendingData; +import org.breedinginsight.brapps.importer.services.processors.experiment.create.model.ProcessedPhenotypeData; +import org.breedinginsight.brapps.importer.services.processors.experiment.services.ExperimentSeasonService; +import org.breedinginsight.model.Program; +import org.breedinginsight.model.ProgramLocation; +import org.breedinginsight.model.Trait; +import org.breedinginsight.model.User; +import org.breedinginsight.utilities.Utilities; +import tech.tablesaw.columns.Column; + +import javax.inject.Inject; +import javax.inject.Singleton; +import java.time.OffsetDateTime; +import java.time.format.DateTimeFormatter; +import java.util.*; + +@Singleton +@Slf4j +public class ValidatePendingImportObjectsStep { + + private static final String BLANK_FIELD_EXPERIMENT = "Field is blank when creating a new experiment"; + private static final String ENV_LOCATION_MISMATCH = "All locations must be the same for a given environment"; + private static final String BLANK_FIELD_ENV = "Field is blank when creating a new environment"; + private static final String BLANK_FIELD_OBS = "Field is blank when creating new observations"; + private static final String ENV_YEAR_MISMATCH = "All years must be the same for a given environment"; + + private final ExperimentSeasonService experimentSeasonService; + private final Gson gson; + + @Inject + public ValidatePendingImportObjectsStep(ExperimentSeasonService experimentSeasonService) { + this.experimentSeasonService = experimentSeasonService; + this.gson = new JSON().getGson(); + } + + public ValidationErrors process(ImportContext importContext, PendingData pendingData, ProcessedPhenotypeData phenotypeData, ProcessedData processedData) { + + //Map mappedBrAPIImport = processedData.getMappedBrAPIImport(); + List importRows = importContext.getImportRows(); + List> phenotypeCols = phenotypeData.getPhenotypeCols(); + Program program = importContext.getProgram(); + List referencedTraits = phenotypeData.getReferencedTraits(); + boolean commit = importContext.isCommit(); + User user = importContext.getUser(); + + Map mappedBrAPIImport = prepareDataForValidation(importRows, pendingData, phenotypeCols); + ValidationErrors validationErrors = validateFields(importRows, mappedBrAPIImport, referencedTraits, program, phenotypeCols, commit, user, pendingData, phenotypeData); + processedData.setMappedBrAPIImport(mappedBrAPIImport); + return validationErrors; + } + + private Map prepareDataForValidation(List importRows, + PendingData pendingData, + List> phenotypeCols) { + + Map mappedBrAPIImport = new HashMap<>(); + + Map> observationUnitByNameNoScope = pendingData.getObservationUnitByNameNoScope(); + Map> trialByNameNoScope = pendingData.getTrialByNameNoScope(); + Map> studyByNameNoScope = pendingData.getStudyByNameNoScope(); + Map> locationByName = pendingData.getLocationByName(); + Map> obsVarDatasetByName = pendingData.getObsVarDatasetByName(); + Map> existingGermplasmByGID = pendingData.getExistingGermplasmByGID(); + Map> observationByHash = pendingData.getObservationByHash(); + + for (int rowNum = 0; rowNum < importRows.size(); rowNum++) { + ExperimentObservation importRow = (ExperimentObservation) importRows.get(rowNum); + PendingImport mappedImportRow = mappedBrAPIImport.getOrDefault(rowNum, new PendingImport()); + List> observations = mappedImportRow.getObservations(); + String observationHash; + + // NOTE: Removed append/update workflow code + mappedImportRow.setTrial(trialByNameNoScope.get(importRow.getExpTitle())); + mappedImportRow.setLocation(locationByName.get(importRow.getEnvLocation())); + mappedImportRow.setStudy(studyByNameNoScope.get(importRow.getEnv())); + mappedImportRow.setObservationUnit(observationUnitByNameNoScope.get(ExperimentUtilities.createObservationUnitKey(importRow))); + mappedImportRow.setGermplasm(getGidPIO(pendingData, importRow)); + + // loop over phenotype column observation data for current row + for (Column column : phenotypeCols) { + + // if value was blank won't be entry in map for this observation + observations.add(observationByHash.get(ExperimentUtilities.getImportObservationHash(importRow, ExperimentUtilities.getVariableNameFromColumn(column)))); + } + + mappedBrAPIImport.put(rowNum, mappedImportRow); + } + + return mappedBrAPIImport; + } + + private PendingImportObject getGidPIO(PendingData pendingData, ExperimentObservation importRow) { + + Map> existingGermplasmByGID = pendingData.getExistingGermplasmByGID(); + + if (existingGermplasmByGID.containsKey(importRow.getGid())) { + return existingGermplasmByGID.get(importRow.getGid()); + } + + return null; + } + + private ValidationErrors validateFields(List importRows, Map mappedBrAPIImport, List referencedTraits, Program program, + List> phenotypeCols, boolean commit, User user, + PendingData pendingData, + ProcessedPhenotypeData phenotypeData) { + //fetching any existing observations for any OUs in the import + CaseInsensitiveMap colVarMap = new CaseInsensitiveMap<>(); + ValidationErrors validationErrors = new ValidationErrors(); + + for ( Trait trait: referencedTraits) { + colVarMap.put(trait.getObservationVariableName(),trait); + } + Set uniqueStudyAndObsUnit = new HashSet<>(); + for (int rowNum = 0; rowNum < importRows.size(); rowNum++) { + ExperimentObservation importRow = (ExperimentObservation) importRows.get(rowNum); + PendingImport mappedImportRow = mappedBrAPIImport.get(rowNum); + // NOTE: validate Observations used by both workflows + if (StringUtils.isNotBlank(importRow.getGid())) { // if GID is blank, don't bother to check if it is valid. + validateGermplasm(importRow, validationErrors, rowNum, mappedImportRow.getGermplasm()); + } + validateTestOrCheck(importRow, validationErrors, rowNum); + validateConditionallyRequired(pendingData, validationErrors, rowNum, importRow, program, commit); + validateObservationUnits(pendingData, validationErrors, uniqueStudyAndObsUnit, rowNum, importRow); + validateObservations(pendingData, phenotypeData, validationErrors, rowNum, importRow, phenotypeCols, colVarMap, commit, user); + } + + return validationErrors; + } + + private void validateGermplasm(ExperimentObservation importRow, ValidationErrors validationErrors, int rowNum, PendingImportObject germplasmPIO) { + // error if GID is not blank but GID does not already exist + if (StringUtils.isNotBlank(importRow.getGid()) && germplasmPIO == null) { + ExperimentUtilities.addRowError(ExperimentObservation.Columns.GERMPLASM_GID, "A non-existing GID", validationErrors, rowNum); + } + } + + private void validateTestOrCheck(ExperimentObservation importRow, ValidationErrors validationErrors, int rowNum) { + String testOrCheck = importRow.getTestOrCheck(); + if ( ! ( testOrCheck==null || testOrCheck.isBlank() + || "C".equalsIgnoreCase(testOrCheck) || "CHECK".equalsIgnoreCase(testOrCheck) + || "T".equalsIgnoreCase(testOrCheck) || "TEST".equalsIgnoreCase(testOrCheck) ) + ){ + ExperimentUtilities.addRowError(ExperimentObservation.Columns.TEST_CHECK, String.format("Invalid value (%s)", testOrCheck), validationErrors, rowNum) ; + } + } + + private void validateConditionallyRequired(PendingData pendingData, ValidationErrors validationErrors, int rowNum, ExperimentObservation importRow, Program program, boolean commit) { + Map> trialByNameNoScope = pendingData.getTrialByNameNoScope(); + Map> studyByNameNoScope = pendingData.getStudyByNameNoScope(); + + ImportObjectState expState = trialByNameNoScope.get(importRow.getExpTitle()) + .getState(); + ImportObjectState envState = studyByNameNoScope.get(importRow.getEnv()).getState(); + + String errorMessage = BLANK_FIELD_EXPERIMENT; + if (expState == ImportObjectState.EXISTING && envState == ImportObjectState.NEW) { + errorMessage = BLANK_FIELD_ENV; + } else if(expState == ImportObjectState.EXISTING && envState == ImportObjectState.EXISTING) { + errorMessage = BLANK_FIELD_OBS; + } + + if(expState == ImportObjectState.NEW || envState == ImportObjectState.NEW) { + validateRequiredCell(importRow.getGid(), ExperimentObservation.Columns.GERMPLASM_GID, errorMessage, validationErrors, rowNum); + validateRequiredCell(importRow.getExpTitle(), ExperimentObservation.Columns.EXP_TITLE,errorMessage, validationErrors, rowNum); + validateRequiredCell(importRow.getExpUnit(), ExperimentObservation.Columns.EXP_UNIT, errorMessage, validationErrors, rowNum); + validateRequiredCell(importRow.getExpType(), ExperimentObservation.Columns.EXP_TYPE, errorMessage, validationErrors, rowNum); + validateRequiredCell(importRow.getEnv(), ExperimentObservation.Columns.ENV, errorMessage, validationErrors, rowNum); + if(validateRequiredCell(importRow.getEnvLocation(), ExperimentObservation.Columns.ENV_LOCATION, errorMessage, validationErrors, rowNum)) { + if(!Utilities.removeProgramKeyAndUnknownAdditionalData(studyByNameNoScope.get(importRow.getEnv()).getBrAPIObject().getLocationName(), program.getKey()).equals(importRow.getEnvLocation())) { + ExperimentUtilities.addRowError(ExperimentObservation.Columns.ENV_LOCATION, ENV_LOCATION_MISMATCH, validationErrors, rowNum); + } + } + if(validateRequiredCell(importRow.getEnvYear(), ExperimentObservation.Columns.ENV_YEAR, errorMessage, validationErrors, rowNum)) { + String studyYear = StringUtils.defaultString(studyByNameNoScope.get(importRow.getEnv()).getBrAPIObject().getSeasons().get(0) ); + String rowYear = importRow.getEnvYear(); + if(commit) { + rowYear = experimentSeasonService.yearToSeasonDbId(importRow.getEnvYear(), program.getId()); + } + if(StringUtils.isNotBlank(studyYear) && !studyYear.equals(rowYear)) { + ExperimentUtilities.addRowError(ExperimentObservation.Columns.ENV_YEAR, ENV_YEAR_MISMATCH, validationErrors, rowNum); + } + } + validateRequiredCell(importRow.getExpUnitId(), ExperimentObservation.Columns.EXP_UNIT_ID, errorMessage, validationErrors, rowNum); + validateRequiredCell(importRow.getExpReplicateNo(), ExperimentObservation.Columns.REP_NUM, errorMessage, validationErrors, rowNum); + validateRequiredCell(importRow.getExpBlockNo(), ExperimentObservation.Columns.BLOCK_NUM, errorMessage, validationErrors, rowNum); + + if(StringUtils.isNotBlank(importRow.getObsUnitID())) { + ExperimentUtilities.addRowError(ExperimentObservation.Columns.OBS_UNIT_ID, "ObsUnitID cannot be specified when creating a new environment", validationErrors, rowNum); + } + } else { + //Check if existing environment. If so, ObsUnitId must be assigned + validateRequiredCell( + importRow.getObsUnitID(), + ExperimentObservation.Columns.OBS_UNIT_ID, + ExperimentUtilities.MISSING_OBS_UNIT_ID_ERROR, + validationErrors, + rowNum + ); + } + } + + private boolean validateRequiredCell(String value, String columnHeader, String errorMessage, ValidationErrors validationErrors, int rowNum) { + if (StringUtils.isBlank(value)) { + ExperimentUtilities.addRowError(columnHeader, errorMessage, validationErrors, rowNum); + return false; + } + return true; + } + + private void validateObservationUnits( + PendingData pendingData, + ValidationErrors validationErrors, + Set uniqueStudyAndObsUnit, + int rowNum, + ExperimentObservation importRow) { + Map> observationUnitByNameNoScope = pendingData.getObservationUnitByNameNoScope(); + + validateUniqueObsUnits(validationErrors, uniqueStudyAndObsUnit, rowNum, importRow); + + String key = ExperimentUtilities.createObservationUnitKey(importRow); + PendingImportObject ouPIO = observationUnitByNameNoScope.get(key); + if(ouPIO.getState() == ImportObjectState.NEW && StringUtils.isNotBlank(importRow.getObsUnitID())) { + ExperimentUtilities.addRowError(ExperimentObservation.Columns.OBS_UNIT_ID, "Could not find observation unit by ObsUnitDBID", validationErrors, rowNum); + } + + validateGeoCoordinates(validationErrors, rowNum, importRow); + } + + /** + * Validate that the observation unit is unique within a study. + *
+ * SIDE EFFECTS: validationErrors and uniqueStudyAndObsUnit can be modified. + * + * @param validationErrors can be modified as a side effect. + * @param uniqueStudyAndObsUnit can be modified as a side effect. + * @param rowNum counter that is always two less the file row being validated + * @param importRow the data row being validated + */ + private void validateUniqueObsUnits( + ValidationErrors validationErrors, + Set uniqueStudyAndObsUnit, + int rowNum, + ExperimentObservation importRow) { + String envIdPlusStudyId = ExperimentUtilities.createObservationUnitKey(importRow); + if (uniqueStudyAndObsUnit.contains(envIdPlusStudyId)) { + String errorMessage = String.format("The ID (%s) is not unique within the environment(%s)", importRow.getExpUnitId(), importRow.getEnv()); + ExperimentUtilities.addRowError(ExperimentObservation.Columns.EXP_UNIT_ID, errorMessage, validationErrors, rowNum); + } else { + uniqueStudyAndObsUnit.add(envIdPlusStudyId); + } + } + + private void validateGeoCoordinates(ValidationErrors validationErrors, int rowNum, ExperimentObservation importRow) { + + String lat = importRow.getLatitude(); + String lon = importRow.getLongitude(); + String elevation = importRow.getElevation(); + + // If any of Lat, Long, or Elevation are provided, Lat and Long must both be provided. + if (StringUtils.isNotBlank(lat) || StringUtils.isNotBlank(lon) || StringUtils.isNotBlank(elevation)) { + if (StringUtils.isBlank(lat)) { + ExperimentUtilities.addRowError(ExperimentObservation.Columns.LAT, "Latitude must be provided for complete coordinate specification", validationErrors, rowNum); + } + if (StringUtils.isBlank(lon)) { + ExperimentUtilities.addRowError(ExperimentObservation.Columns.LONG, "Longitude must be provided for complete coordinate specification", validationErrors, rowNum); + } + } + + // Validate coordinate values + boolean latBadValue = false; + boolean lonBadValue = false; + boolean elevationBadValue = false; + double latDouble; + double lonDouble; + double elevationDouble; + + // Only check latitude format if not blank since already had previous error + if (StringUtils.isNotBlank(lat)) { + try { + latDouble = Double.parseDouble(lat); + if (latDouble < -90 || latDouble > 90) { + latBadValue = true; + } + } catch (NumberFormatException e) { + latBadValue = true; + } + } + + // Only check longitude format if not blank since already had previous error + if (StringUtils.isNotBlank(lon)) { + try { + lonDouble = Double.parseDouble(lon); + if (lonDouble < -180 || lonDouble > 180) { + lonBadValue = true; + } + } catch (NumberFormatException e) { + lonBadValue = true; + } + } + + if (StringUtils.isNotBlank(elevation)) { + try { + elevationDouble = Double.parseDouble(elevation); + } catch (NumberFormatException e) { + elevationBadValue = true; + } + } + + if (latBadValue) { + ExperimentUtilities.addRowError(ExperimentObservation.Columns.LAT, "Invalid Lat value (expected range -90 to 90)", validationErrors, rowNum); + } + + if (lonBadValue) { + ExperimentUtilities.addRowError(ExperimentObservation.Columns.LONG, "Invalid Long value (expected range -180 to 180)", validationErrors, rowNum); + } + + if (elevationBadValue) { + ExperimentUtilities.addRowError(ExperimentObservation.Columns.LONG, "Invalid Elevation value (numerals expected)", validationErrors, rowNum); + } + + } + + + private void validateObservations(PendingData pendingData, + ProcessedPhenotypeData phenotypeData, + ValidationErrors validationErrors, + int rowNum, + ExperimentObservation importRow, + List> phenotypeCols, + CaseInsensitiveMap colVarMap, + boolean commit, + User user) { + + Map existingObsByObsHash = pendingData.getExistingObsByObsHash(); + Map> timeStampColByPheno = phenotypeData.getTimeStampColByPheno(); + Map> observationByHash = pendingData.getObservationByHash(); + + phenotypeCols.forEach(phenoCol -> { + String importHash; + String importObsValue = phenoCol.getString(rowNum); + + // NOTE: removed append / update specifc code + importHash = ExperimentUtilities.getImportObservationHash(importRow, phenoCol.name()); + + // error if import observation data already exists and user has not selected to overwrite + if(commit && "false".equals(importRow.getOverwrite() == null ? "false" : importRow.getOverwrite()) && + existingObsByObsHash.containsKey(importHash) && + StringUtils.isNotBlank(phenoCol.getString(rowNum)) && + !existingObsByObsHash.get(importHash).getValue().equals(phenoCol.getString(rowNum))) { + ExperimentUtilities.addRowError( + phenoCol.name(), + String.format("Value already exists for ObsUnitId: %s, Phenotype: %s", importRow.getObsUnitID(), phenoCol.name()), + validationErrors, rowNum + ); + + // preview case where observation has already been committed and the import row ObsVar data differs from what + // had been saved prior to import + } else if (existingObsByObsHash.containsKey(importHash) && !ExperimentUtilities.isObservationMatched(phenotypeData, pendingData, importHash, importObsValue, phenoCol, rowNum)) { + + // different data means validations still need to happen + // TODO consider moving these two calls into a separate method since called twice together + ExperimentUtilities.validateObservationValue(colVarMap.get(phenoCol.name()), phenoCol.getString(rowNum), phenoCol.name(), validationErrors, rowNum); + + //Timestamp validation + if(timeStampColByPheno.containsKey(phenoCol.name())) { + Column timeStampCol = timeStampColByPheno.get(phenoCol.name()); + ExperimentUtilities.validateTimeStampValue(timeStampCol.getString(rowNum), timeStampCol.name(), validationErrors, rowNum); + } + + // add a change log entry when updating the value of an observation + // only will update and thereby need change log entry if no error + if (commit && (!validationErrors.hasErrors())) { + BrAPIObservation pendingObservation = observationByHash.get(importHash).getBrAPIObject(); + DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd:hh-mm-ssZ"); + String timestamp = formatter.format(OffsetDateTime.now()); + String reason = importRow.getOverwriteReason() != null ? importRow.getOverwriteReason() : ""; + String prior = ""; + if (ExperimentUtilities.isValueMatched(pendingData, importHash, importObsValue)) { + prior.concat(existingObsByObsHash.get(importHash).getValue()); + } + if (timeStampColByPheno.containsKey(phenoCol.name()) && ExperimentUtilities.isTimestampMatched(pendingData, importHash, timeStampColByPheno.get(phenoCol.name()).getString(rowNum))) { + prior = prior.isEmpty() ? prior : prior.concat(" "); + prior.concat(existingObsByObsHash.get(importHash).getObservationTimeStamp().toString()); + } + ChangeLogEntry change = new ChangeLogEntry(prior, + reason, + user.getId(), + timestamp + ); + + // create the changelog field in additional info if it does not already exist + if (pendingObservation.getAdditionalInfo().isJsonNull()) { + pendingObservation.setAdditionalInfo(new JsonObject()); + pendingObservation.getAdditionalInfo().add(BrAPIAdditionalInfoFields.CHANGELOG, new JsonArray()); + } + + if (pendingObservation.getAdditionalInfo() != null && !pendingObservation.getAdditionalInfo().has(BrAPIAdditionalInfoFields.CHANGELOG)) { + pendingObservation.getAdditionalInfo().add(BrAPIAdditionalInfoFields.CHANGELOG, new JsonArray()); + } + + // add a new entry to the changelog + pendingObservation.getAdditionalInfo().get(BrAPIAdditionalInfoFields.CHANGELOG).getAsJsonArray().add(gson.toJsonTree(change).getAsJsonObject()); + } + + // preview case where observation has already been committed and import ObsVar data is the + // same as has been committed prior to import + } else if(ExperimentUtilities.isObservationMatched(phenotypeData, pendingData, importHash, importObsValue, phenoCol, rowNum)) { + BrAPIObservation existingObs = existingObsByObsHash.get(importHash); + existingObs.setObservationVariableName(phenoCol.name()); + observationByHash.get(importHash).setState(ImportObjectState.EXISTING); + observationByHash.get(importHash).setBrAPIObject(existingObs); + + // preview case where observation has already been committed and import ObsVar data is empty prior to import + } else if(!existingObsByObsHash.containsKey(importHash) && (StringUtils.isBlank(phenoCol.getString(rowNum)))) { + observationByHash.get(importHash).setState(ImportObjectState.EXISTING); + } else { + ExperimentUtilities.validateObservationValue(colVarMap.get(phenoCol.name()), phenoCol.getString(rowNum), phenoCol.name(), validationErrors, rowNum); + + //Timestamp validation + if(timeStampColByPheno.containsKey(phenoCol.name())) { + Column timeStampCol = timeStampColByPheno.get(phenoCol.name()); + ExperimentUtilities.validateTimeStampValue(timeStampCol.getString(rowNum), timeStampCol.name(), validationErrors, rowNum); + } + } + }); + } +} diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/services/ExperimentPendingImportService.java b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/services/ExperimentPendingImportService.java new file mode 100644 index 000000000..9a1af1c37 --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/services/ExperimentPendingImportService.java @@ -0,0 +1,22 @@ +/* + * See the NOTICE file distributed with this work for additional information + * regarding copyright ownership. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.breedinginsight.brapps.importer.services.processors.experiment.services; + +public class ExperimentPendingImportService { + + +} diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/services/ExperimentPhenotypeService.java b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/services/ExperimentPhenotypeService.java new file mode 100644 index 000000000..ce744234d --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/services/ExperimentPhenotypeService.java @@ -0,0 +1,76 @@ +/* + * See the NOTICE file distributed with this work for additional information + * regarding copyright ownership. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.breedinginsight.brapps.importer.services.processors.experiment.services; + +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; +import org.breedinginsight.brapps.importer.model.ImportUpload; +import org.breedinginsight.brapps.importer.model.workflow.ImportContext; +import org.breedinginsight.brapps.importer.services.processors.experiment.DynamicColumnParser; +import org.breedinginsight.brapps.importer.services.processors.experiment.create.model.ProcessedPhenotypeData; +import org.breedinginsight.model.Program; +import org.breedinginsight.model.Trait; +import tech.tablesaw.api.Table; +import tech.tablesaw.columns.Column; + +import javax.inject.Inject; +import javax.inject.Singleton; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.breedinginsight.brapps.importer.services.processors.experiment.ExperimentUtilities.TIMESTAMP_REGEX; + +@Singleton +@Slf4j +public class ExperimentPhenotypeService { + + private final ExperimentValidateService experimentValidateService; + + @Inject + public ExperimentPhenotypeService(ExperimentValidateService experimentValidateService) { + this.experimentValidateService = experimentValidateService; + } + + /** + * Extracts phenotypes from the import context. + * + * @param importContext The import context containing the data, upload, and program information. + * @return A ProcessedPhenotypeData object with the extracted phenotypes. + */ + public ProcessedPhenotypeData extractPhenotypes(ImportContext importContext) { + Table data = importContext.getData(); + ImportUpload upload = importContext.getUpload(); + Program program = importContext.getProgram(); + + DynamicColumnParser.DynamicColumnParseResult result = DynamicColumnParser.parse(data, upload.getDynamicColumnNames()); + List traits = experimentValidateService.verifyTraits(program.getId(), result); + + Map> timeStampColByPheno = new HashMap<>(); + //Now know timestamps all valid phenotypes, can associate with phenotype column name for easy retrieval + for (Column tsColumn : result.getTimestampCols()) { + timeStampColByPheno.put(tsColumn.name().replaceFirst(TIMESTAMP_REGEX, StringUtils.EMPTY), tsColumn); + } + + return ProcessedPhenotypeData.builder() + .referencedTraits(traits) + .phenotypeCols(result.getPhenotypeCols()) + .timeStampColByPheno(timeStampColByPheno) + .build(); + } + +} diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/services/ExperimentSeasonService.java b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/services/ExperimentSeasonService.java new file mode 100644 index 000000000..c365cbf6d --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/services/ExperimentSeasonService.java @@ -0,0 +1,100 @@ +/* + * See the NOTICE file distributed with this work for additional information + * regarding copyright ownership. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.breedinginsight.brapps.importer.services.processors.experiment.services; + +import io.micronaut.context.annotation.Property; +import io.micronaut.context.annotation.Prototype; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; +import org.brapi.client.v2.model.exceptions.ApiException; +import org.brapi.v2.model.core.BrAPISeason; +import org.breedinginsight.brapi.v2.dao.BrAPISeasonDAO; +import org.breedinginsight.utilities.Utilities; + +import javax.inject.Inject; +import javax.inject.Singleton; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; + +// reset cache across uses by creating new instance each time this service is injected +@Prototype +@Slf4j +public class ExperimentSeasonService { + + private final BrAPISeasonDAO brAPISeasonDAO; + + // TODO: move season to actual cache rather than cacheing at application layer + private Map yearToSeasonDbIdCache = new HashMap<>(); + + @Inject + public ExperimentSeasonService(BrAPISeasonDAO brAPISeasonDAO) { + this.brAPISeasonDAO = brAPISeasonDAO; + } + + /** + * Converts year String to SeasonDbId + *
+ * NOTE: This assumes that the only Season records of interest are ones + * with a blank name or a name that is the same as the year. + * + * @param year The year as a string + * @param programId the program ID. + * @return the DbId of the season-record associated with the year + */ + public String yearToSeasonDbId(String year, UUID programId) { + String dbID = null; + if (yearToSeasonDbIdCache.containsKey(year)) { // get it from cache if possible + dbID = yearToSeasonDbIdCache.get(year); + } else { + dbID = yearToSeasonDbIdFromDatabase(year, programId); + yearToSeasonDbIdCache.put(year, dbID); + } + return dbID; + } + + private String yearToSeasonDbIdFromDatabase(String year, UUID programId) { + BrAPISeason targetSeason = null; + List seasons; + try { + seasons = brAPISeasonDAO.getSeasonsByYear(year, programId); + for (BrAPISeason season : seasons) { + if (null == season.getSeasonName() || season.getSeasonName().isBlank() || season.getSeasonName().equals(year)) { + targetSeason = season; + break; + } + } + if (targetSeason == null) { + BrAPISeason newSeason = new BrAPISeason(); + Integer intYear = null; + if( StringUtils.isNotBlank(year) ){ + intYear = Integer.parseInt(year); + } + newSeason.setYear(intYear); + newSeason.setSeasonName(year); + targetSeason = brAPISeasonDAO.addOneSeason(newSeason, programId); + } + + } catch (ApiException e) { + log.warn(Utilities.generateApiExceptionLogMessage(e)); + log.error(e.getResponseBody(), e); + } + + return (targetSeason == null) ? null : targetSeason.getSeasonDbId(); + } +} diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/services/ExperimentStudyService.java b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/services/ExperimentStudyService.java new file mode 100644 index 000000000..c1bd4cd2c --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/services/ExperimentStudyService.java @@ -0,0 +1,136 @@ +/* + * See the NOTICE file distributed with this work for additional information + * regarding copyright ownership. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.breedinginsight.brapps.importer.services.processors.experiment.services; + +import io.micronaut.context.annotation.Property; +import io.reactivex.functions.Function; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; +import org.brapi.client.v2.model.exceptions.ApiException; +import org.brapi.v2.model.BrAPIExternalReference; +import org.brapi.v2.model.core.BrAPISeason; +import org.brapi.v2.model.core.BrAPIStudy; +import org.breedinginsight.brapi.v2.dao.BrAPISeasonDAO; +import org.breedinginsight.brapi.v2.dao.BrAPIStudyDAO; +import org.breedinginsight.brapps.importer.model.response.ImportObjectState; +import org.breedinginsight.brapps.importer.model.response.PendingImportObject; +import org.breedinginsight.brapps.importer.services.ExternalReferenceSource; +import org.breedinginsight.brapps.importer.services.processors.experiment.ExperimentUtilities; +import org.breedinginsight.model.Program; +import org.breedinginsight.utilities.Utilities; + +import javax.inject.Inject; +import javax.inject.Singleton; +import java.util.*; +import java.util.stream.Collectors; + +@Singleton +@Slf4j +public class ExperimentStudyService { + + private final BrAPISeasonDAO brAPISeasonDAO; + private final BrAPIStudyDAO brAPIStudyDAO; + + @Property(name = "brapi.server.reference-source") + private String BRAPI_REFERENCE_SOURCE; + + @Inject + public ExperimentStudyService(BrAPISeasonDAO brAPISeasonDAO, + BrAPIStudyDAO brAPIStudyDAO) { + this.brAPISeasonDAO = brAPISeasonDAO; + this.brAPIStudyDAO = brAPIStudyDAO; + } + + // TODO: used by both workflows + public PendingImportObject processAndCacheStudy( + BrAPIStudy existingStudy, + Program program, + Function getterFunction, + Map> studyMap) throws Exception { + PendingImportObject pendingStudy; + BrAPIExternalReference xref = Utilities.getExternalReference(existingStudy.getExternalReferences(), String.format("%s/%s", BRAPI_REFERENCE_SOURCE, ExternalReferenceSource.STUDIES.getName())) + .orElseThrow(() -> new IllegalStateException("External references wasn't found for study (dbid): " + existingStudy.getStudyDbId())); + // map season dbid to year + String seasonDbId = existingStudy.getSeasons().get(0); // It is assumed that the study has only one season + if(StringUtils.isNotBlank(seasonDbId)) { + String seasonYear = seasonDbIdToYear(seasonDbId, program.getId()); + existingStudy.setSeasons(Collections.singletonList(seasonYear)); + } + pendingStudy = new PendingImportObject<>( + ImportObjectState.EXISTING, + (BrAPIStudy) Utilities.formatBrapiObjForDisplay(existingStudy, BrAPIStudy.class, program), + UUID.fromString(xref.getReferenceId()) + ); + studyMap.put( + Utilities.removeProgramKeyAndUnknownAdditionalData(getterFunction.apply(existingStudy), program.getKey()), + pendingStudy + ); + return pendingStudy; + } + + // TODO: used by both workflows + private String seasonDbIdToYear(String seasonDbId, UUID programId) { + String year = null; + // TODO: add season objects to redis cache then just extract year from those + // removing this for now here + //if (this.seasonDbIdToYearCache.containsKey(seasonDbId)) { // get it from cache if possible + // year = this.seasonDbIdToYearCache.get(seasonDbId); + //} else { + year = seasonDbIdToYearFromDatabase(seasonDbId, programId); + // this.seasonDbIdToYearCache.put(seasonDbId, year); + //} + return year; + } + + // TODO: used by both workflows + private String seasonDbIdToYearFromDatabase(String seasonDbId, UUID programId) { + BrAPISeason season = null; + try { + season = this.brAPISeasonDAO.getSeasonById(seasonDbId, programId); + } catch (ApiException e) { + log.error(Utilities.generateApiExceptionLogMessage(e), e); + } + Integer yearInt = (season == null) ? null : season.getYear(); + return (yearInt == null) ? "" : yearInt.toString(); + } + + // TODO: used by both worflows + /** + * Fetches a list of BrAPI studies by their study database IDs for a given program. + * + * This method queries the BrAPIStudyDAO to retrieve studies based on the provided study database IDs and the program. + * It ensures that all requested study database IDs are found in the result set, throwing an IllegalStateException if any are missing. + * + * @param studyDbIds a Set of Strings representing the study database IDs to fetch + * @param program the Program object representing the program context in which to fetch studies + * @return a List of BrAPIStudy objects matching the provided study database IDs + * + * @throws ApiException if there is an issue fetching the studies + * @throws IllegalStateException if any requested study database IDs are not found in the result set + */ + public List fetchStudiesByDbId(Set studyDbIds, Program program) throws ApiException { + List studies = brAPIStudyDAO.getStudiesByStudyDbId(studyDbIds, program); + if (studies.size() != studyDbIds.size()) { + List missingIds = new ArrayList<>(studyDbIds); + missingIds.removeAll(studies.stream().map(BrAPIStudy::getStudyDbId).collect(Collectors.toList())); + throw new IllegalStateException( + "Study not found for studyDbId(s): " + String.join(ExperimentUtilities.COMMA_DELIMITER, missingIds)); + } + return studies; + } + +} \ No newline at end of file diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/services/ExperimentTrialService.java b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/services/ExperimentTrialService.java new file mode 100644 index 000000000..9db927345 --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/services/ExperimentTrialService.java @@ -0,0 +1,196 @@ +/* + * See the NOTICE file distributed with this work for additional information + * regarding copyright ownership. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.breedinginsight.brapps.importer.services.processors.experiment.services; + +import io.micronaut.context.annotation.Property; +import io.micronaut.http.server.exceptions.InternalServerException; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; +import org.brapi.client.v2.model.exceptions.ApiException; +import org.brapi.v2.model.BrAPIExternalReference; +import org.brapi.v2.model.core.BrAPIStudy; +import org.brapi.v2.model.core.BrAPITrial; +import org.brapi.v2.model.pheno.BrAPIObservationUnit; +import org.breedinginsight.brapi.v2.dao.BrAPITrialDAO; +import org.breedinginsight.brapps.importer.model.imports.experimentObservation.ExperimentObservation; +import org.breedinginsight.brapps.importer.model.response.ImportObjectState; +import org.breedinginsight.brapps.importer.model.response.PendingImportObject; +import org.breedinginsight.brapps.importer.services.ExternalReferenceSource; +import org.breedinginsight.brapps.importer.services.processors.experiment.ExperimentUtilities; +import org.breedinginsight.model.Program; +import org.breedinginsight.utilities.Utilities; + +import javax.inject.Inject; +import javax.inject.Singleton; +import java.util.*; +import java.util.stream.Collectors; + +@Singleton +@Slf4j +public class ExperimentTrialService { + private final BrAPITrialDAO brAPITrialDAO; + + private final ExperimentStudyService studyService; + + @Property(name = "brapi.server.reference-source") + private String BRAPI_REFERENCE_SOURCE; + + @Inject + public ExperimentTrialService(BrAPITrialDAO brAPITrialDAO, + ExperimentStudyService studyService) { + this.brAPITrialDAO = brAPITrialDAO; + this.studyService = studyService; + } + + // TODO: also used in other workflow + /** + * Initializes trials for existing observation units. + * + * @param program The program object. + * @param observationUnitByNameNoScope A map containing observation units by name (without scope). + * @param trialByName A map containing trials by name. (will be modified in place) + * + */ + public void initializeTrialsForExistingObservationUnits(Program program, + Map> observationUnitByNameNoScope, + Map> trialByName) { + if(observationUnitByNameNoScope.size() > 0) { + Set trialDbIds = new HashSet<>(); + Set studyDbIds = new HashSet<>(); + + observationUnitByNameNoScope.values() + .forEach(pio -> { + BrAPIObservationUnit existingOu = pio.getBrAPIObject(); + if (StringUtils.isBlank(existingOu.getTrialDbId()) && StringUtils.isBlank(existingOu.getStudyDbId())) { + throw new IllegalStateException("TrialDbId and StudyDbId are not set for an existing ObservationUnit"); + } + + if (StringUtils.isNotBlank(existingOu.getTrialDbId())) { + trialDbIds.add(existingOu.getTrialDbId()); + } else { + studyDbIds.add(existingOu.getStudyDbId()); + } + }); + + //if the OU doesn't have the trialDbId set, then fetch the study to fetch the trialDbId + if(!studyDbIds.isEmpty()) { + try { + trialDbIds.addAll(fetchTrialDbidsForStudies(studyDbIds, program)); + } catch (ApiException e) { + log.error("Error fetching studies: " + Utilities.generateApiExceptionLogMessage(e), e); + throw new InternalServerException(e.toString(), e); + } + } + + try { + List trials = brAPITrialDAO.getTrialsByDbIds(trialDbIds, program); + if (trials.size() != trialDbIds.size()) { + List missingIds = new ArrayList<>(trialDbIds); + missingIds.removeAll(trials.stream().map(BrAPITrial::getTrialDbId).collect(Collectors.toList())); + throw new IllegalStateException("Trial not found for trialDbId(s): " + String.join(ExperimentUtilities.COMMA_DELIMITER, missingIds)); + } + + trials.forEach(trial -> processAndCacheTrial(trial, program, trialByName)); + } catch (ApiException e) { + log.error("Error fetching trials: " + Utilities.generateApiExceptionLogMessage(e), e); + throw new InternalServerException(e.toString(), e); + } + } + } + + /** + * Fetches trial DbIds for the given study DbIds by using the BrAPI studies API. + * + * @param studyDbIds The set of study DbIds for which to fetch trial DbIds. + * @param program The program associated with the studies. + * @return A set of trial DbIds corresponding to the provided study DbIds. + * @throws ApiException If there was an error while fetching the studies or if a study does not have a trial DbId. + * @throws IllegalStateException If the trial DbId is not set for an existing study. + */ + private Set fetchTrialDbidsForStudies(Set studyDbIds, Program program) throws ApiException { + Set trialDbIds = new HashSet<>(); + List studies = studyService.fetchStudiesByDbId(studyDbIds, program); + studies.forEach(study -> { + if (StringUtils.isBlank(study.getTrialDbId())) { + throw new IllegalStateException("TrialDbId is not set for an existing Study: " + study.getStudyDbId()); + } + trialDbIds.add(study.getTrialDbId()); + }); + + return trialDbIds; + } + + /** + * This method processes an existing trial, retrieves the experiment ID from the trial's external references, + * and caches the trial with the corresponding experiment ID in a map. + * + * @param existingTrial The existing BrAPITrial object to be processed and cached. + * @param program The Program object associated with the trial. + * @param trialByNameNoScope The map to cache the trial by its name without program scope. (will be modified in place) + * + * @throws InternalServerException + */ + private void processAndCacheTrial( + BrAPITrial existingTrial, + Program program, + Map> trialByNameNoScope) { + + //get TrialId from existingTrial + BrAPIExternalReference experimentIDRef = Utilities.getExternalReference(existingTrial.getExternalReferences(), + String.format("%s/%s", BRAPI_REFERENCE_SOURCE, ExternalReferenceSource.TRIALS.getName())) + .orElseThrow(() -> new InternalServerException("An Experiment ID was not found in any of the external references")); + UUID experimentId = UUID.fromString(experimentIDRef.getReferenceId()); + + trialByNameNoScope.put( + Utilities.removeProgramKey(existingTrial.getTrialName(), program.getKey()), + new PendingImportObject<>(ImportObjectState.EXISTING, existingTrial, experimentId)); + } + + /** + * Initializes trials by name without scope for the given program. + * + * @param program the program to initialize trials for + * @param observationUnitByNameNoScope a map of observation units by name without scope + * @param experimentImportRows a list of experiment observation rows + * @return a map of trials by name with pending import objects + * + * @throws InternalServerException + */ + public Map> initializeTrialByNameNoScope(Program program, Map> observationUnitByNameNoScope, + List experimentImportRows) { + Map> trialByName = new HashMap<>(); + + initializeTrialsForExistingObservationUnits(program, observationUnitByNameNoScope, trialByName); + + List uniqueTrialNames = experimentImportRows.stream() + .filter(row -> StringUtils.isBlank(row.getObsUnitID())) + .map(ExperimentObservation::getExpTitle) + .distinct() + .collect(Collectors.toList()); + try { + brAPITrialDAO.getTrialsByName(uniqueTrialNames, program).forEach(existingTrial -> + processAndCacheTrial(existingTrial, program, trialByName) + ); + } catch (ApiException e) { + log.error("Error fetching trials: " + Utilities.generateApiExceptionLogMessage(e), e); + throw new InternalServerException(e.toString(), e); + } + + return trialByName; + } + +} \ No newline at end of file diff --git a/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/services/ExperimentValidateService.java b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/services/ExperimentValidateService.java new file mode 100644 index 000000000..d12ff2509 --- /dev/null +++ b/src/main/java/org/breedinginsight/brapps/importer/services/processors/experiment/services/ExperimentValidateService.java @@ -0,0 +1,118 @@ +/* + * See the NOTICE file distributed with this work for additional information + * regarding copyright ownership. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.breedinginsight.brapps.importer.services.processors.experiment.services; + +import io.micronaut.http.HttpStatus; +import io.micronaut.http.exceptions.HttpStatusException; +import io.micronaut.http.server.exceptions.InternalServerException; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; +import org.breedinginsight.brapps.importer.services.FileMappingUtil; +import org.breedinginsight.brapps.importer.services.processors.experiment.DynamicColumnParser.DynamicColumnParseResult; +import org.breedinginsight.dao.db.tables.pojos.TraitEntity; +import org.breedinginsight.model.Trait; +import org.breedinginsight.services.OntologyService; +import org.breedinginsight.services.exceptions.DoesNotExistException; +import tech.tablesaw.columns.Column; + +import javax.inject.Inject; +import javax.inject.Singleton; +import java.util.Collection; +import java.util.List; +import java.util.Set; +import java.util.UUID; +import java.util.stream.Collectors; + +import static org.breedinginsight.brapps.importer.services.processors.experiment.ExperimentUtilities.COMMA_DELIMITER; +import static org.breedinginsight.brapps.importer.services.processors.experiment.ExperimentUtilities.TIMESTAMP_REGEX; + +@Singleton +@Slf4j +public class ExperimentValidateService { + + private final OntologyService ontologyService; + private final FileMappingUtil fileMappingUtil; + + @Inject + public ExperimentValidateService(OntologyService ontologyService, FileMappingUtil fileMappingUtil) { + this.ontologyService = ontologyService; + this.fileMappingUtil = fileMappingUtil; + } + + /** + * Verifies traits based on program ID and dynamic column parse result. + * + * @param programId The UUID of the program. + * @param cols The dynamic column parse result object containing phenotype and timestamp columns. + * @return The list of verified traits. + * @throws HttpStatusException If ontology terms are not found or timestamp columns lack corresponding phenotype columns. + */ + public List verifyTraits(UUID programId, DynamicColumnParseResult cols) { + Set varNames = cols.getPhenotypeCols().stream() + .map(Column::name) + .collect(Collectors.toSet()); + Set tsNames = cols.getTimestampCols().stream() + .map(Column::name) + .collect(Collectors.toSet()); + + // filter out just traits specified in file + List filteredTraits = fetchFileTraits(programId, varNames); + + // check that all specified ontology terms were found + if (filteredTraits.size() != varNames.size()) { + Set returnedVarNames = filteredTraits.stream() + .map(TraitEntity::getObservationVariableName) + .collect(Collectors.toSet()); + List differences = varNames.stream() + .filter(var -> !returnedVarNames.contains(var)) + .collect(Collectors.toList()); + //TODO convert this to a ValidationError + throw new HttpStatusException(HttpStatus.UNPROCESSABLE_ENTITY, + "Ontology term(s) not found: " + String.join(COMMA_DELIMITER, differences)); + } + + // Check that each ts column corresponds to a phenotype column + List unmatchedTimestamps = tsNames.stream() + .filter(e -> !(varNames.contains(e.replaceFirst(TIMESTAMP_REGEX, StringUtils.EMPTY)))) + .collect(Collectors.toList()); + if (unmatchedTimestamps.size() > 0) { + //TODO convert this to a ValidationError + throw new HttpStatusException(HttpStatus.UNPROCESSABLE_ENTITY, + "Timestamp column(s) lack corresponding phenotype column(s): " + String.join(COMMA_DELIMITER, unmatchedTimestamps)); + } + + // sort the verified traits to match the order of the trait columns + List phenotypeColNames = cols.getPhenotypeCols().stream().map(Column::name).collect(Collectors.toList()); + return fileMappingUtil.sortByField(phenotypeColNames, filteredTraits, TraitEntity::getObservationVariableName); + } + + private List fetchFileTraits(UUID programId, Collection varNames) { + try { + Collection upperCaseVarNames = varNames.stream().map(String::toUpperCase).collect(Collectors.toList()); + List traits = ontologyService.getTraitsByProgramId(programId, true); + // filter out just traits specified in file + return traits.stream() + .filter(e -> upperCaseVarNames.contains(e.getObservationVariableName().toUpperCase())) + .collect(Collectors.toList()); + } catch (DoesNotExistException e) { + log.error(e.getMessage(), e); + throw new InternalServerException(e.toString(), e); + } + } + + +} diff --git a/src/test/java/org/breedinginsight/brapps/importer/ExperimentFileImportTest.java b/src/test/java/org/breedinginsight/brapps/importer/ExperimentFileImportTest.java index 48bffd4c2..fbb02435e 100644 --- a/src/test/java/org/breedinginsight/brapps/importer/ExperimentFileImportTest.java +++ b/src/test/java/org/breedinginsight/brapps/importer/ExperimentFileImportTest.java @@ -24,6 +24,7 @@ import io.micronaut.http.HttpStatus; import io.micronaut.http.client.RxHttpClient; import io.micronaut.http.client.annotation.Client; +import io.micronaut.http.netty.cookies.NettyCookie; import io.micronaut.test.extensions.junit5.annotation.MicronautTest; import io.reactivex.Flowable; import lombok.SneakyThrows; @@ -77,6 +78,7 @@ import java.util.stream.Collectors; import java.util.stream.StreamSupport; +import static io.micronaut.http.HttpRequest.GET; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.jupiter.api.Assertions.*; @@ -142,9 +144,11 @@ public class ExperimentFileImportTest extends BrAPITest { private BrAPISeasonDAO seasonDAO; private Gson gson = new GsonBuilder().registerTypeAdapter(OffsetDateTime.class, (JsonDeserializer) - (json, type, context) -> OffsetDateTime.parse(json.getAsString())) - .registerTypeAdapter(BrAPIPagination.class, new PaginationTypeAdapter()) - .create(); + (json, type, context) -> OffsetDateTime.parse(json.getAsString())) + .registerTypeAdapter(BrAPIPagination.class, new PaginationTypeAdapter()) + .create(); + + private String newExperimentWorkflowId; @BeforeAll public void setup() { @@ -153,7 +157,27 @@ public void setup() { mappingId = (String) setupObjects.get("mappingId"); testUser = (BiUserEntity) setupObjects.get("testUser"); securityFp = (FannyPack) setupObjects.get("securityFp"); + newExperimentWorkflowId = getNewExperimentWorkflowId(); + } + + /** + * TODO: assumes new workflow is first in list, doesn't look at position property, would be more robust to + * look at that instead of assuming order + * @return + */ + public String getNewExperimentWorkflowId() { + // GET /import/mappings{?importName} + Flowable> call = client.exchange( + GET("/import/mappings/"+mappingId+"/workflows").cookie(new NettyCookie("phylo-token", "test-registered-user")), String.class + ); + HttpResponse response = call.blockingFirst(); + JsonObject result = JsonParser.parseString(response.body()).getAsJsonObject().getAsJsonObject("result"); + + return JsonParser.parseString(response.body()).getAsJsonObject() + .getAsJsonObject("result") + .getAsJsonArray("data") + .get(0).getAsJsonObject().get("id").getAsString(); } /* @@ -193,16 +217,20 @@ public void importNewExpNewLocNoObsSuccess() { validRow.put(Columns.COLUMN, "1"); validRow.put(Columns.TREATMENT_FACTORS, "Test treatment factors"); - Flowable> call = importTestUtils.uploadDataFile(importTestUtils.writeExperimentDataToFile(List.of(validRow), null), null, true, client, program, mappingId); - HttpResponse response = call.blockingFirst(); - assertEquals(HttpStatus.ACCEPTED, response.getStatus()); - String importId = JsonParser.parseString(response.body()).getAsJsonObject().getAsJsonObject("result").get("importId").getAsString(); + //String workflowId = "new-experiment"; + JsonObject uploadResponse = importTestUtils.uploadAndFetchWorkflow(importTestUtils.writeExperimentDataToFile(List.of(validRow), null), null, true, client, program, mappingId, newExperimentWorkflowId); - HttpResponse upload = importTestUtils.getUploadedFile(importId, client, program, mappingId); - JsonObject result = JsonParser.parseString(upload.body()).getAsJsonObject().getAsJsonObject("result"); - assertEquals(200, result.getAsJsonObject("progress").get("statuscode").getAsInt(), "Returned data: " + result); + // TODO: remove this + //Flowable> call = importTestUtils.uploadDataFile(importTestUtils.writeExperimentDataToFile(List.of(validRow), null), null, true, client, program, mappingId); + //HttpResponse response = call.blockingFirst(); + //assertEquals(HttpStatus.ACCEPTED, response.getStatus()); + //String importId = JsonParser.parseString(response.body()).getAsJsonObject().getAsJsonObject("result").get("importId").getAsString(); - JsonArray previewRows = result.get("preview").getAsJsonObject().get("rows").getAsJsonArray(); + //HttpResponse upload = importTestUtils.getUploadedFile(importId, client, program, mappingId); + //JsonObject result = JsonParser.parseString(upload.body()).getAsJsonObject().getAsJsonObject("result"); + //assertEquals(200, result.getAsJsonObject("progress").get("statuscode").getAsInt(), "Returned data: " + result); + + JsonArray previewRows = uploadResponse.get("preview").getAsJsonObject().get("rows").getAsJsonArray(); assertEquals(1, previewRows.size()); JsonObject row = previewRows.get(0).getAsJsonObject(); @@ -251,16 +279,18 @@ public void importNewExpMultiNewEnvSuccess() { secondEnv.put(Columns.COLUMN, "1"); secondEnv.put(Columns.TREATMENT_FACTORS, "Test treatment factors"); - Flowable> call = importTestUtils.uploadDataFile(importTestUtils.writeExperimentDataToFile(List.of(firstEnv, secondEnv), null), null, true, client, program, mappingId); - HttpResponse response = call.blockingFirst(); - assertEquals(HttpStatus.ACCEPTED, response.getStatus()); - String importId = JsonParser.parseString(response.body()).getAsJsonObject().getAsJsonObject("result").get("importId").getAsString(); + JsonObject uploadResponse = importTestUtils.uploadAndFetchWorkflow(importTestUtils.writeExperimentDataToFile(List.of(firstEnv, secondEnv), null), null, true, client, program, mappingId, newExperimentWorkflowId); - HttpResponse upload = importTestUtils.getUploadedFile(importId, client, program, mappingId); - JsonObject result = JsonParser.parseString(upload.body()).getAsJsonObject().getAsJsonObject("result"); - assertEquals(200, result.getAsJsonObject("progress").get("statuscode").getAsInt(), "Returned data: " + result); + //Flowable> call = importTestUtils.uploadDataFile(importTestUtils.writeExperimentDataToFile(List.of(firstEnv, secondEnv), null), null, true, client, program, mappingId); + //HttpResponse response = call.blockingFirst(); + //assertEquals(HttpStatus.ACCEPTED, response.getStatus()); + //String importId = JsonParser.parseString(response.body()).getAsJsonObject().getAsJsonObject("result").get("importId").getAsString(); - JsonArray previewRows = result.get("preview").getAsJsonObject().get("rows").getAsJsonArray(); + //HttpResponse upload = importTestUtils.getUploadedFile(importId, client, program, mappingId); + //JsonObject result = JsonParser.parseString(upload.body()).getAsJsonObject().getAsJsonObject("result"); + //assertEquals(200, result.getAsJsonObject("progress").get("statuscode").getAsInt(), "Returned data: " + result); + + JsonArray previewRows = uploadResponse.get("preview").getAsJsonObject().get("rows").getAsJsonArray(); assertEquals(2, previewRows.size()); JsonObject firstRow = previewRows.get(0).getAsJsonObject(); @@ -298,7 +328,8 @@ public void importExistingExpAndEnvErrorMessage() { newExp.put(Columns.ROW, "1"); newExp.put(Columns.COLUMN, "1"); - JsonObject expResult = importTestUtils.uploadAndFetch(importTestUtils.writeExperimentDataToFile(List.of(newExp), null), null, true, client, program, mappingId); + JsonObject expResult = importTestUtils.uploadAndFetchWorkflow(importTestUtils.writeExperimentDataToFile(List.of(newExp), null), null, true, client, program, mappingId, newExperimentWorkflowId); + //JsonObject expResult = importTestUtils.uploadAndFetch(importTestUtils.writeExperimentDataToFile(List.of(newExp), null), null, true, client, program, mappingId); Map dupExp = new HashMap<>(); dupExp.put(Columns.GERMPLASM_GID, "1"); @@ -315,16 +346,17 @@ public void importExistingExpAndEnvErrorMessage() { dupExp.put(Columns.ROW, "1"); dupExp.put(Columns.COLUMN, "1"); - Flowable> call = importTestUtils.uploadDataFile(importTestUtils.writeExperimentDataToFile(List.of(dupExp), null), null, false, client, program, mappingId); - HttpResponse response = call.blockingFirst(); - assertEquals(HttpStatus.ACCEPTED, response.getStatus()); + expResult = importTestUtils.uploadAndFetchWorkflowNoStatusCheck(importTestUtils.writeExperimentDataToFile(List.of(dupExp), null), null, true, client, program, mappingId, newExperimentWorkflowId); + //Flowable> call = importTestUtils.uploadDataFile(importTestUtils.writeExperimentDataToFile(List.of(dupExp), null), null, false, client, program, mappingId); + //HttpResponse response = call.blockingFirst(); + //assertEquals(HttpStatus.ACCEPTED, response.getStatus()); - String importId = JsonParser.parseString(response.body()).getAsJsonObject().getAsJsonObject("result").get("importId").getAsString(); + //String importId = JsonParser.parseString(response.body()).getAsJsonObject().getAsJsonObject("result").get("importId").getAsString(); - HttpResponse upload = importTestUtils.getUploadedFile(importId, client, program, mappingId); - JsonObject result = JsonParser.parseString(upload.body()).getAsJsonObject().getAsJsonObject("result"); - assertEquals(422, result.getAsJsonObject("progress").get("statuscode").getAsInt(), "Returned data: " + result); - assertTrue(result.getAsJsonObject("progress").get("message").getAsString().startsWith("Experiment Title already exists")); + //HttpResponse upload = importTestUtils.getUploadedFile(importId, client, program, mappingId); + //JsonObject result = JsonParser.parseString(upload.body()).getAsJsonObject().getAsJsonObject("result"); + assertEquals(422, expResult.getAsJsonObject("progress").get("statuscode").getAsInt(), "Returned data: " + expResult); + assertTrue(expResult.getAsJsonObject("progress").get("message").getAsString().startsWith("Experiment Title already exists")); } @Test @@ -347,9 +379,10 @@ public void importNewEnvNoObsSuccess() { newEnv.put(Columns.ROW, "1"); newEnv.put(Columns.COLUMN, "1"); - JsonObject result = importTestUtils.uploadAndFetch(importTestUtils.writeExperimentDataToFile(List.of(newEnv), null), null, true, client, program, mappingId); + //JsonObject result = importTestUtils.uploadAndFetch(importTestUtils.writeExperimentDataToFile(List.of(newEnv), null), null, true, client, program, mappingId); + JsonObject uploadResponse = importTestUtils.uploadAndFetchWorkflow(importTestUtils.writeExperimentDataToFile(List.of(newEnv), null), null, true, client, program, mappingId, newExperimentWorkflowId); - JsonArray previewRows = result.get("preview").getAsJsonObject().get("rows").getAsJsonArray(); + JsonArray previewRows = uploadResponse.get("preview").getAsJsonObject().get("rows").getAsJsonArray(); assertEquals(1, previewRows.size()); JsonObject row = previewRows.get(0).getAsJsonObject(); @@ -381,43 +414,53 @@ public void verifyMissingDataThrowsError(boolean commit) { Map noGID = new HashMap<>(base); noGID.remove(Columns.GERMPLASM_GID); - uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noGID), null), Columns.GERMPLASM_GID, commit); + //uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noGID), null), Columns.GERMPLASM_GID, commit); + uploadAndVerifyWorkflowFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noGID), null), Columns.GERMPLASM_GID, commit, newExperimentWorkflowId); Map noExpTitle = new HashMap<>(base); noExpTitle.remove(Columns.EXP_TITLE); - uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noExpTitle), null), Columns.EXP_TITLE, commit); + //uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noExpTitle), null), Columns.EXP_TITLE, commit); + uploadAndVerifyWorkflowFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noExpTitle), null), Columns.EXP_TITLE, commit, newExperimentWorkflowId); Map noExpUnit = new HashMap<>(base); noExpUnit.remove(Columns.EXP_UNIT); - uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noExpUnit), null), Columns.EXP_UNIT, commit); + //uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noExpUnit), null), Columns.EXP_UNIT, commit); + uploadAndVerifyWorkflowFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noExpUnit), null), Columns.EXP_UNIT, commit, newExperimentWorkflowId); Map noExpType = new HashMap<>(base); noExpType.remove(Columns.EXP_TYPE); - uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noExpType), null), Columns.EXP_TYPE, commit); + //uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noExpType), null), Columns.EXP_TYPE, commit); + uploadAndVerifyWorkflowFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noExpType), null), Columns.EXP_TYPE, commit, newExperimentWorkflowId); Map noEnv = new HashMap<>(base); noEnv.remove(Columns.ENV); - uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noEnv), null), Columns.ENV, commit); + //uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noEnv), null), Columns.ENV, commit); + uploadAndVerifyWorkflowFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noEnv), null), Columns.ENV, commit, newExperimentWorkflowId); Map noEnvLoc = new HashMap<>(base); noEnvLoc.remove(Columns.ENV_LOCATION); - uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noEnvLoc), null), Columns.ENV_LOCATION, commit); + //uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noEnvLoc), null), Columns.ENV_LOCATION, commit); + uploadAndVerifyWorkflowFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noEnvLoc), null), Columns.ENV_LOCATION, commit, newExperimentWorkflowId); Map noExpUnitId = new HashMap<>(base); noExpUnitId.remove(Columns.EXP_UNIT_ID); - uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noExpUnitId), null), Columns.EXP_UNIT_ID, commit); + //uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noExpUnitId), null), Columns.EXP_UNIT_ID, commit); + uploadAndVerifyWorkflowFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noExpUnitId), null), Columns.EXP_UNIT_ID, commit, newExperimentWorkflowId); Map noExpRep = new HashMap<>(base); noExpRep.remove(Columns.REP_NUM); - uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noExpRep), null), Columns.REP_NUM, commit); + //uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noExpRep), null), Columns.REP_NUM, commit); + uploadAndVerifyWorkflowFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noExpRep), null), Columns.REP_NUM, commit, newExperimentWorkflowId); Map noExpBlock = new HashMap<>(base); noExpBlock.remove(Columns.BLOCK_NUM); - uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noExpBlock), null), Columns.BLOCK_NUM, commit); + //uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noExpBlock), null), Columns.BLOCK_NUM, commit); + uploadAndVerifyWorkflowFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noExpBlock), null), Columns.BLOCK_NUM, commit, newExperimentWorkflowId); Map noEnvYear = new HashMap<>(base); noEnvYear.remove(Columns.ENV_YEAR); - uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noEnvYear), null), Columns.ENV_YEAR, commit); + //uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noEnvYear), null), Columns.ENV_YEAR, commit); + uploadAndVerifyWorkflowFailure(program, importTestUtils.writeExperimentDataToFile(List.of(noEnvYear), null), Columns.ENV_YEAR, commit, newExperimentWorkflowId); } @Test @@ -441,7 +484,8 @@ public void importNewExpWithObsVar() { newExp.put(Columns.COLUMN, "1"); newExp.put(traits.get(0).getObservationVariableName(), null); - JsonObject result = importTestUtils.uploadAndFetch(importTestUtils.writeExperimentDataToFile(List.of(newExp), traits), null, true, client, program, mappingId); + //JsonObject result = importTestUtils.uploadAndFetch(importTestUtils.writeExperimentDataToFile(List.of(newExp), traits), null, true, client, program, mappingId); + JsonObject result = importTestUtils.uploadAndFetchWorkflow(importTestUtils.writeExperimentDataToFile(List.of(newExp), null), null, true, client, program, mappingId, newExperimentWorkflowId); JsonArray previewRows = result.get("preview").getAsJsonObject().get("rows").getAsJsonArray(); assertEquals(1, previewRows.size()); @@ -491,7 +535,9 @@ public void verifyDiffYearSameEnvThrowsError(boolean commit) { row.put(Columns.BLOCK_NUM, "2"); rows.add(row); - uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(rows, null), Columns.ENV_YEAR, commit); + //uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(rows, null), Columns.ENV_YEAR, commit); + uploadAndVerifyWorkflowFailure(program, importTestUtils.writeExperimentDataToFile(rows, null), Columns.ENV_YEAR, commit, newExperimentWorkflowId); + } @ParameterizedTest @@ -529,7 +575,8 @@ public void verifyDiffLocSameEnvThrowsError(boolean commit) { row.put(Columns.BLOCK_NUM, "2"); rows.add(row); - uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(rows, null), Columns.ENV_LOCATION, commit); + //uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(rows, null), Columns.ENV_LOCATION, commit); + uploadAndVerifyWorkflowFailure(program, importTestUtils.writeExperimentDataToFile(rows, null), Columns.ENV_LOCATION, commit, newExperimentWorkflowId); } @ParameterizedTest @@ -554,7 +601,8 @@ public void importNewExpWithObs(boolean commit) { newExp.put(Columns.COLUMN, "1"); newExp.put(traits.get(0).getObservationVariableName(), "1"); - JsonObject result = importTestUtils.uploadAndFetch(importTestUtils.writeExperimentDataToFile(List.of(newExp), traits), null, commit, client, program, mappingId); + //JsonObject result = importTestUtils.uploadAndFetch(importTestUtils.writeExperimentDataToFile(List.of(newExp), traits), null, commit, client, program, mappingId); + JsonObject result = importTestUtils.uploadAndFetchWorkflow(importTestUtils.writeExperimentDataToFile(List.of(newExp), traits), null, true, client, program, mappingId, newExperimentWorkflowId); JsonArray previewRows = result.get("preview").getAsJsonObject().get("rows").getAsJsonArray(); assertEquals(1, previewRows.size()); @@ -594,7 +642,9 @@ public void verifyFailureImportNewExpWithInvalidObs(boolean commit) { newExp.put(Columns.COLUMN, "1"); newExp.put(traits.get(0).getObservationVariableName(), "Red"); - uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(List.of(newExp), traits), traits.get(0).getObservationVariableName(), commit); + //uploadAndVerifyFailure(program, importTestUtils.writeExperimentDataToFile(List.of(newExp), traits), traits.get(0).getObservationVariableName(), commit); + uploadAndVerifyWorkflowFailure(program, importTestUtils.writeExperimentDataToFile(List.of(newExp), traits), traits.get(0).getObservationVariableName(), commit, newExperimentWorkflowId); + } @ParameterizedTest @@ -617,21 +667,24 @@ public void verifyFailureNewOuExistingEnv(boolean commit) { newExp.put(Columns.ROW, "1"); newExp.put(Columns.COLUMN, "1"); - importTestUtils.uploadAndFetch(importTestUtils.writeExperimentDataToFile(List.of(newExp), null), null, true, client, program, mappingId); + //importTestUtils.uploadAndFetch(importTestUtils.writeExperimentDataToFile(List.of(newExp), null), null, true, client, program, mappingId); + importTestUtils.uploadAndFetchWorkflow(importTestUtils.writeExperimentDataToFile(List.of(newExp), null), null, true, client, program, mappingId, newExperimentWorkflowId); Map newOU = new HashMap<>(newExp); newOU.put(Columns.EXP_UNIT_ID, "a-2"); newOU.put(Columns.ROW, "1"); newOU.put(Columns.COLUMN, "2"); - Flowable> call = importTestUtils.uploadDataFile(importTestUtils.writeExperimentDataToFile(List.of(newOU), null), null, commit, client, program, mappingId); - HttpResponse response = call.blockingFirst(); - assertEquals(HttpStatus.ACCEPTED, response.getStatus()); + //Flowable> call = importTestUtils.uploadDataFile(importTestUtils.writeExperimentDataToFile(List.of(newOU), null), null, commit, client, program, mappingId); + //HttpResponse response = call.blockingFirst(); + //assertEquals(HttpStatus.ACCEPTED, response.getStatus()); - String importId = JsonParser.parseString(response.body()).getAsJsonObject().getAsJsonObject("result").get("importId").getAsString(); + //String importId = JsonParser.parseString(response.body()).getAsJsonObject().getAsJsonObject("result").get("importId").getAsString(); - HttpResponse upload = importTestUtils.getUploadedFile(importId, client, program, mappingId); - JsonObject result = JsonParser.parseString(upload.body()).getAsJsonObject().getAsJsonObject("result"); + //HttpResponse upload = importTestUtils.getUploadedFile(importId, client, program, mappingId); + //JsonObject result = JsonParser.parseString(upload.body()).getAsJsonObject().getAsJsonObject("result"); + + JsonObject result = importTestUtils.uploadAndFetchWorkflowNoStatusCheck(importTestUtils.writeExperimentDataToFile(List.of(newOU), null), null, true, client, program, mappingId, newExperimentWorkflowId); assertEquals(422, result.getAsJsonObject("progress").get("statuscode").getAsInt(), "Returned data: " + result); assertTrue(result.getAsJsonObject("progress").get("message").getAsString().startsWith("Experiment Title already exists")); @@ -1022,6 +1075,7 @@ public void verifyFailureImportNewObsExistingOuWithExistingObs(boolean commit) { - a new experiment is created after the first experiment - verify the second experiment gets created successfully */ + //TODO: this one @Test @SneakyThrows public void importSecondExpAfterFirstExpWithObs() { @@ -1043,7 +1097,8 @@ public void importSecondExpAfterFirstExpWithObs() { newExpA.put(Columns.COLUMN, "1"); newExpA.put(traits.get(0).getObservationVariableName(), "1"); - JsonObject resultA = importTestUtils.uploadAndFetch(importTestUtils.writeExperimentDataToFile(List.of(newExpA), traits), null, true, client, program, mappingId); + //JsonObject resultA = importTestUtils.uploadAndFetch(importTestUtils.writeExperimentDataToFile(List.of(newExpA), traits), null, true, client, program, mappingId); + JsonObject resultA = importTestUtils.uploadAndFetchWorkflow(importTestUtils.writeExperimentDataToFile(List.of(newExpA), traits), null, true, client, program, mappingId, newExperimentWorkflowId); JsonArray previewRowsA = resultA.get("preview").getAsJsonObject().get("rows").getAsJsonArray(); assertEquals(1, previewRowsA.size()); @@ -1071,7 +1126,8 @@ public void importSecondExpAfterFirstExpWithObs() { newExpB.put(Columns.COLUMN, "1"); newExpB.put(traits.get(0).getObservationVariableName(), "1"); - JsonObject resultB = importTestUtils.uploadAndFetch(importTestUtils.writeExperimentDataToFile(List.of(newExpB), traits), null, true, client, program, mappingId); + //JsonObject resultB = importTestUtils.uploadAndFetch(importTestUtils.writeExperimentDataToFile(List.of(newExpB), traits), null, true, client, program, mappingId); + JsonObject resultB = importTestUtils.uploadAndFetchWorkflow(importTestUtils.writeExperimentDataToFile(List.of(newExpB), traits), null, true, client, program, mappingId, newExperimentWorkflowId); JsonArray previewRowsB = resultB.get("preview").getAsJsonObject().get("rows").getAsJsonArray(); assertEquals(1, previewRowsB.size()); @@ -1337,10 +1393,10 @@ private Map assertRowSaved(Map expected, Program assertEquals(expected.get(Columns.GERMPLASM_GID), germplasm.getAccessionNumber()); if(expected.containsKey(Columns.TEST_CHECK) && StringUtils.isNotBlank((String)expected.get(Columns.TEST_CHECK))) { assertEquals(expected.get(Columns.TEST_CHECK), - ou.getObservationUnitPosition() - .getEntryType() - .name() - .substring(0, 1)); + ou.getObservationUnitPosition() + .getEntryType() + .name() + .substring(0, 1)); } assertEquals(expected.get(Columns.EXP_TITLE), Utilities.removeProgramKey(trial.getTrialName(), program.getKey())); assertEquals(expected.get(Columns.EXP_TITLE), Utilities.removeProgramKey(study.getTrialName(), program.getKey())); @@ -1433,10 +1489,10 @@ private Map assertValidPreviewRow(Map expected, if(traits != null) { assertNotNull(actual.get("observations")); observations = StreamSupport.stream(actual.getAsJsonArray("observations") - .spliterator(), false) - .map(obs -> gson.fromJson(obs.getAsJsonObject() - .getAsJsonObject("brAPIObject"), BrAPIObservation.class)) - .collect(Collectors.toList()); + .spliterator(), false) + .map(obs -> gson.fromJson(obs.getAsJsonObject() + .getAsJsonObject("brAPIObject"), BrAPIObservation.class)) + .collect(Collectors.toList()); ret.put("observations", observations); } @@ -1444,10 +1500,10 @@ private Map assertValidPreviewRow(Map expected, assertEquals(expected.get(Columns.GERMPLASM_GID), germplasm.getAccessionNumber()); if(expected.containsKey(Columns.TEST_CHECK) && StringUtils.isNotBlank((String)expected.get(Columns.TEST_CHECK))) { assertEquals(expected.get(Columns.TEST_CHECK), - ou.getObservationUnitPosition() - .getEntryType() - .name() - .substring(0, 1)); + ou.getObservationUnitPosition() + .getEntryType() + .name() + .substring(0, 1)); } assertEquals(expected.get(Columns.EXP_TITLE), Utilities.removeProgramKey(trial.getTrialName(), program.getKey())); assertEquals(expected.get(Columns.EXP_TITLE), Utilities.removeProgramKey(study.getTrialName(), program.getKey())); @@ -1518,8 +1574,8 @@ private String yearToSeasonDbId(String year, UUID programId) throws ApiException for (BrAPISeason season : seasons) { if (null == season.getSeasonName() || season.getSeasonName() - .isBlank() || season.getSeasonName() - .equals(year)) { + .isBlank() || season.getSeasonName() + .equals(year)) { return season.getSeasonDbId(); } } @@ -1530,17 +1586,17 @@ private String yearToSeasonDbId(String year, UUID programId) throws ApiException private Program createProgram(String name, String abbv, String key, String referenceSource, List germplasm, List traits) throws ApiException, DoesNotExistException, ValidatorException, BadRequestException { SpeciesEntity validSpecies = speciesDAO.findAll().get(0); SpeciesRequest speciesRequest = SpeciesRequest.builder() - .commonName(validSpecies.getCommonName()) - .id(validSpecies.getId()) - .build(); + .commonName(validSpecies.getCommonName()) + .id(validSpecies.getId()) + .build(); ProgramRequest programRequest1 = ProgramRequest.builder() - .name(name) - .abbreviation(abbv) - .documentationUrl("localhost:8080") - .objective("To test things") - .species(speciesRequest) - .key(key) - .build(); + .name(name) + .abbreviation(abbv) + .documentationUrl("localhost:8080") + .objective("To test things") + .species(speciesRequest) + .key(key) + .build(); TestUtils.insertAndFetchTestProgram(gson, client, programRequest1); @@ -1609,6 +1665,33 @@ private JsonObject uploadAndVerifyFailure(Program program, File file, String exp JsonObject result = JsonParser.parseString(upload.body()).getAsJsonObject().getAsJsonObject("result"); assertEquals(422, result.getAsJsonObject("progress").get("statuscode").getAsInt(), "Returned data: " + result); + JsonArray rowErrors = result.getAsJsonObject("progress").getAsJsonArray("rowErrors"); + assertEquals(1, rowErrors.size()); + JsonArray fieldErrors = rowErrors.get(0).getAsJsonObject().getAsJsonArray("errors"); + assertEquals(1, fieldErrors.size()); + JsonObject error = fieldErrors.get(0).getAsJsonObject(); + assertEquals(expectedColumnError, error.get("field").getAsString()); + assertEquals(422, error.get("httpStatusCode").getAsInt()); + + return result; + } + + private JsonObject uploadAndVerifyWorkflowFailure(Program program, File file, String expectedColumnError, boolean commit, String workflowId) throws InterruptedException, IOException { + + //Flowable> call = importTestUtils.uploadDataFile(file, null, true, client, program, mappingId); + //HttpResponse response = call.blockingFirst(); + //assertEquals(HttpStatus.ACCEPTED, response.getStatus()); + + //String importId = JsonParser.parseString(response.body()).getAsJsonObject().getAsJsonObject("result").get("importId").getAsString(); + + //HttpResponse upload = importTestUtils.getUploadedFile(importId, client, program, mappingId); + + JsonObject result = importTestUtils.uploadAndFetchWorkflowNoStatusCheck(file, null, true, client, program, mappingId, newExperimentWorkflowId); + //JsonObject result = JsonParser.parseString(upload).getAsJsonObject().getAsJsonObject("result"); + assertEquals(422, result.getAsJsonObject("progress").get("statuscode").getAsInt(), "Returned data: " + result); + + + JsonArray rowErrors = result.getAsJsonObject("progress").getAsJsonArray("rowErrors"); assertEquals(1, rowErrors.size()); JsonArray fieldErrors = rowErrors.get(0).getAsJsonObject().getAsJsonArray("errors"); diff --git a/src/test/java/org/breedinginsight/brapps/importer/ImportTestUtils.java b/src/test/java/org/breedinginsight/brapps/importer/ImportTestUtils.java index 12b79ac15..f5dd37f51 100644 --- a/src/test/java/org/breedinginsight/brapps/importer/ImportTestUtils.java +++ b/src/test/java/org/breedinginsight/brapps/importer/ImportTestUtils.java @@ -97,6 +97,38 @@ public Flowable> uploadDataFile(File file, Map> uploadWorkflowDataFile(File file, + Map userData, + Boolean commit, + RxHttpClient client, + Program program, + String mappingId, + String workflowId) { + + MultipartBody requestBody = MultipartBody.builder().addPart("file", file).build(); + + // Upload file + String uploadUrl = String.format("/programs/%s/import/mappings/%s/data", program.getId(), mappingId); + Flowable> call = client.exchange( + POST(uploadUrl, requestBody) + .contentType(MediaType.MULTIPART_FORM_DATA_TYPE) + .cookie(new NettyCookie("phylo-token", "test-registered-user")), String.class + ); + HttpResponse response = call.blockingFirst(); + assertEquals(HttpStatus.OK, response.getStatus()); + JsonObject result = JsonParser.parseString(response.body()).getAsJsonObject().getAsJsonObject("result"); + String importId = result.get("importId").getAsString(); + + // Process data + String url = String.format("/programs/%s/import/mappings/%s/workflows/%s/data/%s/%s", program.getId(), mappingId, workflowId, importId, commit ? "commit" : "preview"); + Flowable> processCall = client.exchange( + PUT(url, userData) + .cookie(new NettyCookie("phylo-token", "test-registered-user")), String.class + ); + return processCall; + + } + public HttpResponse getUploadedFile(String importId, RxHttpClient client, Program program, String mappingId) throws InterruptedException { Flowable> call = client.exchange( GET(String.format("/programs/%s/import/mappings/%s/data/%s?mapping=true", program.getId(), mappingId, importId)) @@ -123,16 +155,16 @@ public Map setup(RxHttpClient client, Gson gson, DSLContext dsl, // Species Species validSpecies = speciesService.getAll().get(0); SpeciesRequest speciesRequest = SpeciesRequest.builder() - .commonName(validSpecies.getCommonName()) - .id(validSpecies.getId()) - .build(); + .commonName(validSpecies.getCommonName()) + .id(validSpecies.getId()) + .build(); // Insert program ProgramRequest program = ProgramRequest.builder() - .name("Test Program") - .species(speciesRequest) - .key("TEST") - .build(); + .name("Test Program") + .species(speciesRequest) + .key("TEST") + .build(); Program validProgram = this.insertAndFetchTestProgram(program, client, gson); // Get import @@ -141,18 +173,18 @@ public Map setup(RxHttpClient client, Gson gson, DSLContext dsl, ); HttpResponse response = call.blockingFirst(); String mappingId = JsonParser.parseString(response.body()).getAsJsonObject() - .getAsJsonObject("result") - .getAsJsonArray("data") - .get(0).getAsJsonObject().get("id").getAsString(); + .getAsJsonObject("result") + .getAsJsonArray("data") + .get(0).getAsJsonObject().get("id").getAsString(); BiUserEntity testUser = userDAO.getUserByOrcId(TestTokenValidator.TEST_USER_ORCID).get(); dsl.execute(securityFp.get("InsertProgramRolesBreeder"), testUser.getId().toString(), validProgram.getId()); dsl.execute(securityFp.get("InsertSystemRoleAdmin"), testUser.getId().toString()); return Map.of("program", validProgram, - "mappingId", mappingId, - "testUser", testUser, - "securityFp", securityFp); + "mappingId", mappingId, + "testUser", testUser, + "securityFp", securityFp); } @@ -170,6 +202,43 @@ public JsonObject uploadAndFetch(File file, Map userData, Boolea return result; } + public JsonObject uploadAndFetchWorkflow(File file, + Map userData, + Boolean commit, + RxHttpClient client, + Program program, + String mappingId, + String workflowId) throws InterruptedException { + Flowable> call = uploadWorkflowDataFile(file, userData, commit, client, program, mappingId, workflowId); + HttpResponse response = call.blockingFirst(); + assertEquals(HttpStatus.ACCEPTED, response.getStatus()); + + String importId = JsonParser.parseString(response.body()).getAsJsonObject().getAsJsonObject("result").get("importId").getAsString(); + + HttpResponse upload = getUploadedFile(importId, client, program, mappingId); + JsonObject result = JsonParser.parseString(upload.body()).getAsJsonObject().getAsJsonObject("result"); + assertEquals(200, result.getAsJsonObject("progress").get("statuscode").getAsInt(), "Returned data: " + result); + return result; + } + + public JsonObject uploadAndFetchWorkflowNoStatusCheck(File file, + Map userData, + Boolean commit, + RxHttpClient client, + Program program, + String mappingId, + String workflowId) throws InterruptedException { + Flowable> call = uploadWorkflowDataFile(file, userData, commit, client, program, mappingId, workflowId); + HttpResponse response = call.blockingFirst(); + assertEquals(HttpStatus.ACCEPTED, response.getStatus()); + + String importId = JsonParser.parseString(response.body()).getAsJsonObject().getAsJsonObject("result").get("importId").getAsString(); + + HttpResponse upload = getUploadedFile(importId, client, program, mappingId); + JsonObject result = JsonParser.parseString(upload.body()).getAsJsonObject().getAsJsonObject("result"); + return result; + } + public List createTraits(int numToCreate) { List traits = new ArrayList<>(); for (int i = 0; i < numToCreate; i++) {