diff --git a/doc/sphinx-guides/source/admin/monitoring.rst b/doc/sphinx-guides/source/admin/monitoring.rst index 8ce5f65c5ca..84d6f31e6d7 100644 --- a/doc/sphinx-guides/source/admin/monitoring.rst +++ b/doc/sphinx-guides/source/admin/monitoring.rst @@ -108,6 +108,11 @@ Edit Draft Versions Logging Changes made to draft versions of datasets are logged in a folder called logs/edit-drafts. See https://github.com/IQSS/dataverse/issues/5145 for more information on this logging. +Solr Indexing Failures Logging +------------------------------ + +Failures occurring during the indexing of dataverses and datasets are logged in a folder called logs/process-failures. This logging will include instructions for manually re-running the failed processes. It may be advantageous to set up a automatic job to monitor new entries into this log folder so that indexes could be re-run. + EJB Timers ---------- diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 3d0112993c9..e4eb6aac88e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -12,6 +12,7 @@ import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.MarkupChecker; import edu.harvard.iq.dataverse.util.SystemConfig; +import java.io.IOException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; @@ -34,6 +35,7 @@ import javax.persistence.Query; import javax.persistence.TypedQuery; import org.apache.commons.lang.StringUtils; +import org.apache.solr.client.solrj.SolrServerException; /** * @@ -1093,8 +1095,13 @@ public JsonObjectBuilder fixMissingUnf(String datasetVersionId, boolean forceRec // reindexing the dataset, to make sure the new UNF is in SOLR: boolean doNormalSolrDocCleanUp = true; - Future indexingResult = indexService.indexDataset(datasetVersion.getDataset(), doNormalSolrDocCleanUp); - + try { + Future indexingResult = indexService.indexDataset(datasetVersion.getDataset(), doNormalSolrDocCleanUp); + } catch (IOException | SolrServerException e) { + String failureLogText = "Post UNF update indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + datasetVersion.getDataset().getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, datasetVersion.getDataset()); + } return info; } diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index ce90ff4b8c2..8dc85cb2d28 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -12,12 +12,14 @@ import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.search.SolrSearchResult; import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.File; +import java.io.IOException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Date; @@ -41,6 +43,7 @@ import javax.persistence.PersistenceContext; import javax.persistence.TypedQuery; import javax.ws.rs.core.Response; +import org.apache.solr.client.solrj.SolrServerException; /** * @@ -90,8 +93,16 @@ public Dataverse save(Dataverse dataverse) { Dataverse savedDataverse = em.merge(dataverse); /** * @todo check the result to see if indexing was successful or not + * added logging of exceptions */ - Future indexingResult = indexService.indexDataverse(savedDataverse); + try { + Future indexingResult = indexService.indexDataverse(savedDataverse); + } catch (IOException | SolrServerException e) { + String failureLogText = "Post-save indexing failed. You can kickoff a re-index of this dataverse with: \r\n curl http://localhost:8080/api/admin/index/dataverses/" + savedDataverse.getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, savedDataverse); + } + // logger.log(Level.INFO, "during dataverse save, indexing result was: {0}", indexingResult); return savedDataverse; } diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index dea0e35188e..025ac34fdc4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -1434,7 +1434,8 @@ public String save() { //if (newDraftVersion) { // return returnToDraftVersionById(); //} - indexService.indexDataset(dataset, true); + // indexService.indexDataset(dataset, true); + // indexing is handled by the commands logger.fine("Redirecting to the dataset page, from the edit/upload page."); return returnToDraftVersion(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java index 4483c074e8f..36d0f550955 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java +++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java @@ -32,6 +32,7 @@ import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean; import java.util.EnumSet; +import java.util.Stack; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.Resource; @@ -39,6 +40,7 @@ import javax.ejb.EJBException; import javax.ejb.TransactionAttribute; import static javax.ejb.TransactionAttributeType.REQUIRES_NEW; +import static javax.ejb.TransactionAttributeType.SUPPORTS; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.validation.ConstraintViolation; @@ -177,6 +179,9 @@ public class EjbDataverseEngine { @EJB ConfirmEmailServiceBean confirmEmailService; + @EJB + EjbDataverseEngineInner innerEngine; + @Resource EJBContext ejbCtxt; @@ -187,11 +192,19 @@ public class EjbDataverseEngine { public R submitInNewTransaction(Command aCommand) throws CommandException { return submit(aCommand); } + + private DvObject getRetType(Object r){ + + return (DvObject) r; + + } + + @TransactionAttribute(SUPPORTS) public R submit(Command aCommand) throws CommandException { final ActionLogRecord logRec = new ActionLogRecord(ActionLogRecord.ActionType.Command, aCommand.getClass().getCanonicalName()); - + try { logRec.setUserIdentifier( aCommand.getRequest().getUser().getIdentifier() ); @@ -233,7 +246,22 @@ public R submit(Command aCommand) throws CommandException { } } try { - return aCommand.execute(getContext()); + if (getContext().getCommandsCalled() == null){ + getContext().beginCommandSequence(); + } + getContext().addCommand(aCommand); + //This list of commands is held by the outermost command's context + //to be run on completeCommand method when the outermost command is completed + Stack previouslyCalled = getContext().getCommandsCalled(); + R r = innerEngine.submit(aCommand, getContext()); + if (getContext().getCommandsCalled().empty() && !previouslyCalled.empty()){ + for (Command c: previouslyCalled){ + getContext().getCommandsCalled().add(c); + } + } + //This runs the onSuccess Methods for all commands in the stack when the outermost command completes + this.completeCommand(aCommand, r, getContext().getCommandsCalled()); + return r; } catch ( EJBException ejbe ) { throw new CommandException("Command " + aCommand.toString() + " failed: " + ejbe.getMessage(), ejbe.getCausedByException(), aCommand); @@ -267,20 +295,61 @@ public R submit(Command aCommand) throws CommandException { throw re; } finally { - if ( logRec.getActionResult() == null ) { - logRec.setActionResult( ActionLogRecord.Result.OK ); + //when we get here we need to wipe out the command list so that + //failed commands don't have their onSuccess methods run. + getContext().cancelCommandSequence(); + if (logRec.getActionResult() == null) { + logRec.setActionResult(ActionLogRecord.Result.OK); } else { - ejbCtxt.setRollbackOnly(); + try{ + ejbCtxt.setRollbackOnly(); + } catch (IllegalStateException isEx){ + //Not in a transaction nothing to rollback + } } - logRec.setEndTime( new java.util.Date() ); + logRec.setEndTime(new java.util.Date()); logSvc.log(logRec); } } + + protected void completeCommand(Command command, Object r, Stack called) { + + if (called.isEmpty()){ + return; + } + + Command test = called.get(0); + if (!test.equals(command)) { + //if it's not the first command on the stack it must be an "inner" command + //and we don't want to run its onSuccess until all commands have comepleted successfully + return; + } + + for (Command commandLoop : called) { + commandLoop.onSuccess(ctxt, r); + } + + } + public CommandContext getContext() { if (ctxt == null) { ctxt = new CommandContext() { + public Stack commandsCalled; + + @Override + public void addCommand (Command command){ + commandsCalled.push(command); + } + + + @Override + public Stack getCommandsCalled(){ + return commandsCalled; + } + + @Override public DatasetServiceBean datasets() { return datasetService; @@ -495,6 +564,22 @@ public ActionLogServiceBean actionLog() { return logSvc; } + @Override + public void beginCommandSequence() { + this.commandsCalled = new Stack(); + } + + @Override + public boolean completeCommandSequence(Command command) { + this.commandsCalled.clear(); + return true; + } + + @Override + public void cancelCommandSequence() { + this.commandsCalled = new Stack(); + } + }; } diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngineInner.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngineInner.java new file mode 100644 index 00000000000..1b311f7491f --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngineInner.java @@ -0,0 +1,34 @@ + +package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.engine.command.Command; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import javax.ejb.Stateless; +import javax.ejb.TransactionAttribute; +import static javax.ejb.TransactionAttributeType.REQUIRED; + +import javax.inject.Named; + +/** + * + * @author skraffmi + * Inner class that does the actual execute action on a command + * Transaction attribute is required so that failures here cause a rollback + * the outer engine has a transaction attribute of "SUPPORTED" + * so that if there are failure in the onComplete method of the command + * the transaction will not be rolled back + * + */ +@Stateless +@Named +public class EjbDataverseEngineInner { + + @TransactionAttribute(REQUIRED) + public R submit(Command aCommand, CommandContext ctxt) throws CommandException { + + return aCommand.execute(ctxt); + + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java index 04084647584..0606a3a87f2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java @@ -241,6 +241,7 @@ public Boolean sendNotificationEmail(UserNotification notification, String comme logger.warning("Skipping " + notification.getType() + " notification, because email address is null"); } return retval; + } private String getDatasetManageFileAccessLink(DataFile datafile){ diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index f538d369610..bbcd33af323 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -77,6 +77,7 @@ import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.S3PackageImporter; import static edu.harvard.iq.dataverse.api.AbstractApiBean.error; +import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; @@ -145,6 +146,7 @@ import javax.ws.rs.core.Response; import static javax.ws.rs.core.Response.Status.BAD_REQUEST; import javax.ws.rs.core.UriInfo; +import org.apache.solr.client.solrj.SolrServerException; import org.glassfish.jersey.media.multipart.FormDataBodyPart; import org.glassfish.jersey.media.multipart.FormDataContentDisposition; import org.glassfish.jersey.media.multipart.FormDataParam; @@ -1681,7 +1683,14 @@ public Response deleteLocks(@PathParam("identifier") String id, @QueryParam("typ } // kick of dataset reindexing, in case the locks removed // affected the search card: - indexService.indexDataset(dataset, true); + try { + indexService.indexDataset(dataset, true); + } catch (IOException | SolrServerException e) { + String failureLogText = "Post lock removal indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, dataset); + + } return ok("locks removed"); } return ok("dataset not locked"); @@ -1694,7 +1703,14 @@ public Response deleteLocks(@PathParam("identifier") String id, @QueryParam("typ dataset = findDatasetOrDie(id); // ... and kick of dataset reindexing, in case the lock removed // affected the search card: - indexService.indexDataset(dataset, true); + try { + indexService.indexDataset(dataset, true); + } catch (IOException | SolrServerException e) { + String failureLogText = "Post lock removal indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, dataset); + + } return ok("lock type " + lock.getReason() + " removed"); } return ok("no lock type " + lockType + " on the dataset"); @@ -1725,7 +1741,15 @@ public Response lockDataset(@PathParam("identifier") String id, @PathParam("type // refresh the dataset: dataset = findDatasetOrDie(id); // ... and kick of dataset reindexing: - indexService.indexDataset(dataset, true); + try { + indexService.indexDataset(dataset, true); + } catch (IOException | SolrServerException e) { + String failureLogText = "Post add lock indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, dataset); + + } + return ok("dataset locked with lock type " + lockType); } catch (WrappedResponse wr) { return wr.getResponse(); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Index.java b/src/main/java/edu/harvard/iq/dataverse/api/Index.java index b0225b26f78..c1f5f6957e6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Index.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Index.java @@ -22,6 +22,7 @@ import edu.harvard.iq.dataverse.search.SolrQueryResponse; import edu.harvard.iq.dataverse.search.SolrSearchResult; import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.search.DvObjectSolrDoc; import edu.harvard.iq.dataverse.search.FacetCategory; import edu.harvard.iq.dataverse.search.FileView; @@ -223,7 +224,12 @@ public Response indexTypeById(@PathParam("type") String type, @PathParam("id") L /** * @todo Can we display the result of indexing to the user? */ - Future indexDataverseFuture = indexService.indexDataverse(dataverse); + + try { + Future indexDataverseFuture = indexService.indexDataverse(dataverse); + } catch (IOException | SolrServerException e) { + return error(Status.BAD_REQUEST, writeFailureToLog(e.getLocalizedMessage(), dataverse)); + } return ok("starting reindex of dataverse " + id); } else { String response = indexService.removeSolrDocFromIndex(IndexServiceBean.solrDocIdentifierDataverse + id); @@ -233,7 +239,13 @@ public Response indexTypeById(@PathParam("type") String type, @PathParam("id") L Dataset dataset = datasetService.find(id); if (dataset != null) { boolean doNormalSolrDocCleanUp = true; - Future indexDatasetFuture = indexService.indexDataset(dataset, doNormalSolrDocCleanUp); + try { + Future indexDatasetFuture = indexService.indexDataset(dataset, doNormalSolrDocCleanUp); + } catch (IOException | SolrServerException e) { + // + return error(Status.BAD_REQUEST, writeFailureToLog(e.getLocalizedMessage(), dataset)); + } + return ok("starting reindex of dataset " + id); } else { /** @@ -250,7 +262,12 @@ public Response indexTypeById(@PathParam("type") String type, @PathParam("id") L * @todo How can we display the result to the user? */ boolean doNormalSolrDocCleanUp = true; - Future indexDatasetFuture = indexService.indexDataset(datasetThatOwnsTheFile, doNormalSolrDocCleanUp); + try { + Future indexDatasetFuture = indexService.indexDataset(datasetThatOwnsTheFile, doNormalSolrDocCleanUp); + } catch (IOException | SolrServerException e) { + writeFailureToLog(e.getLocalizedMessage(), datasetThatOwnsTheFile); + } + return ok("started reindexing " + type + "/" + id); } else { return error(Status.BAD_REQUEST, "illegal type: " + type); @@ -300,7 +317,11 @@ public Response indexDatasetByPersistentId(@QueryParam("persistentId") String pe } if (dataset != null) { boolean doNormalSolrDocCleanUp = true; - Future indexDatasetFuture = indexService.indexDataset(dataset, doNormalSolrDocCleanUp); + try { + Future indexDatasetFuture = indexService.indexDataset(dataset, doNormalSolrDocCleanUp); + } catch (IOException | SolrServerException e) { + writeFailureToLog(e.getLocalizedMessage(), dataset); + } JsonObjectBuilder data = Json.createObjectBuilder(); data.add("message", "Reindexed dataset " + persistentId); data.add("id", dataset.getId()); @@ -739,5 +760,22 @@ public Response getFileMetadataByDatasetId( } return ok(data); } + + private String writeFailureToLog(String localizedMessage, DvObject dvo) { + String retVal = ""; + String logString = ""; + if(dvo.isInstanceofDataverse()){ + retVal = "Dataverse Indexing failed. " ; + logString += retVal + " You can kickoff a re-index of this dataverse with: \r\n curl http://localhost:8080/api/admin/index/dataverses/" + dvo.getId().toString(); + } + + if(dvo.isInstanceofDataset()){ + retVal += " Dataset Indexing failed. "; + logString += retVal + " You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dvo.getId().toString(); + } + retVal += " \r\n " + localizedMessage; + LoggingUtil.writeOnSuccessFailureLog(null, logString, dvo); + return retVal; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/util/LoggingUtil.java b/src/main/java/edu/harvard/iq/dataverse/batch/util/LoggingUtil.java index 27e21094dca..4a778dc7abb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/batch/util/LoggingUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/batch/util/LoggingUtil.java @@ -19,7 +19,10 @@ package edu.harvard.iq.dataverse.batch.util; +import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; +import static edu.harvard.iq.dataverse.batch.jobs.importer.filesystem.FileRecordJobListener.SEP; +import edu.harvard.iq.dataverse.engine.command.Command; import org.apache.commons.io.FileUtils; import javax.batch.runtime.JobExecution; @@ -37,6 +40,7 @@ public class LoggingUtil { private static final Logger logger = Logger.getLogger(LoggingUtil.class.getName()); + private static final SimpleDateFormat logFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss"); public static void saveJsonLog(String jobJson, String logDir, String jobId) { try { @@ -61,6 +65,25 @@ public static void saveLogFile(String fileContent, String logDir, String fileNam logger.log(Level.SEVERE, "Error saving log report: " + fileName + " " + e.getMessage()); } } + + public static void writeOnSuccessFailureLog(Command command, String failureNotes, DvObject dvo){ + String logDir = System.getProperty("com.sun.aas.instanceRoot") + SEP + "logs" + SEP + "process-failures" + SEP; + String identifier = dvo.getIdentifier(); + + if (identifier != null) { + identifier = identifier.substring(identifier.indexOf("/") + 1); + } else { + identifier = dvo.getId().toString(); + } + if (command != null){ + failureNotes = failureNotes + "\r\n Command: " + command.toString(); + } + + String logTimestamp = logFormatter.format(new Date()); + String fileName = "/process-failure" + "-" + identifier + "-" + logTimestamp + ".txt"; + LoggingUtil.saveLogFile(failureNotes, logDir, fileName); + + } public static void saveLogFileAppendWithHeader(String fileContent, String logDir, String fileName, String logHeader) { try { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/AbstractCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/AbstractCommand.java index 1876d47fc07..991ffe329ba 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/AbstractCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/AbstractCommand.java @@ -18,7 +18,7 @@ public abstract class AbstractCommand implements Command { private final Map affectedDvObjects; private final DataverseRequest request; - + static protected class DvNamePair { final String name; @@ -44,7 +44,7 @@ protected static DvNamePair dv(String s, DvObject d) { public AbstractCommand(DataverseRequest aRequest, DvObject anAffectedDvObject) { this(aRequest, dv("", anAffectedDvObject)); } - + public AbstractCommand(DataverseRequest aRequest, DvNamePair dvp, DvNamePair... more) { request = aRequest; affectedDvObjects = new HashMap<>(); @@ -53,7 +53,7 @@ public AbstractCommand(DataverseRequest aRequest, DvNamePair dvp, DvNamePair... affectedDvObjects.put(p.name, p.dvObject); } } - + public AbstractCommand(DataverseRequest aRequest, Map someAffectedDvObjects) { request = aRequest; affectedDvObjects = someAffectedDvObjects; @@ -93,5 +93,10 @@ public String describe() { } return sb.toString(); } + + @Override + public boolean onSuccess(CommandContext ctxt, Object r) { + return true; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/Command.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/Command.java index c6093432092..9b8403d5fc6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/Command.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/Command.java @@ -43,4 +43,17 @@ public interface Command { Map> getRequiredPermissions(); public String describe(); + + /** + * + * @param ctxt + * @param r - return value of the command + * @return - boolean indicating if the onSuccess processes where themselves successful + * + * The purpose of the onSuccess method of a command is to + * run those processes (such as indexing) that are ancillary to the command and + * whose failure should not rollback the transaction at the heart of the command + * For Indexing we have implemented a process for logging each failed index + */ + public boolean onSuccess(CommandContext ctxt, Object r); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java index 200f8e88676..cf0d6e781b0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java @@ -41,6 +41,8 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean; +import java.util.List; +import java.util.Stack; import javax.persistence.EntityManager; /** @@ -141,4 +143,14 @@ public interface CommandContext { public ConfirmEmailServiceBean confirmEmail(); public ActionLogServiceBean actionLog(); + + public void beginCommandSequence(); + + public boolean completeCommandSequence(Command command); + + public void cancelCommandSequence(); + + public Stack getCommandsCalled(); + + public void addCommand(Command command); } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java index 9ebc816a9cf..eb21f70f2cb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java @@ -3,6 +3,7 @@ import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil; import edu.harvard.iq.dataverse.datacapturemodule.ScriptRequestResponse; @@ -17,6 +18,7 @@ import java.util.Objects; import java.util.logging.Level; import java.util.logging.Logger; +import org.apache.solr.client.solrj.SolrServerException; /**; * An abstract base class for commands that creates {@link Dataset}s. @@ -130,20 +132,16 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // TODO: switch to asynchronous version when JPA sync works // ctxt.index().asyncIndexDataset(theDataset.getId(), true); - ctxt.index().indexDataset(theDataset, true); + try{ + ctxt.index().indexDataset(theDataset, true); + } catch (IOException | SolrServerException e) { + String failureLogText = "Post create dataset indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + theDataset.getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, theDataset); + } + ctxt.solrIndex().indexPermissionsOnSelfAndChildren(theDataset.getId()); - /* - if (DataCaptureModuleUtil.rsyncSupportEnabled(ctxt.settings().getValueForKey(SettingsServiceBean.Key.UploadMethods))) { - logger.fine("Requesting rsync support."); - try { - ScriptRequestResponse scriptRequestResponse = ctxt.engine().submit(new RequestRsyncScriptCommand(getRequest(), theDataset)); - logger.log(Level.FINE, "script: {0}", scriptRequestResponse.getScript()); - } catch (RuntimeException ex) { - logger.log(Level.WARNING, "Problem getting rsync script: {0}", ex.getLocalizedMessage()); - } - logger.fine("Done with rsync request."); - }*/ return theDataset; } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index 7a352590aac..c32e2e67ae3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -11,6 +11,7 @@ import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupProvider; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; +import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -18,6 +19,7 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import java.io.IOException; import java.sql.Timestamp; import java.util.ArrayList; @@ -25,6 +27,7 @@ import java.util.Date; import java.util.List; import java.util.logging.Logger; +import org.apache.solr.client.solrj.SolrServerException; /** * TODO make override the date and user more active, so prevent code errors. @@ -145,7 +148,7 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { managedDv.setPermissionModificationTime(new Timestamp(new Date().getTime())); managedDv = ctxt.dataverses().save(managedDv); - ctxt.index().indexDataverse(managedDv); + // ctxt.index().indexDataverse(managedDv); if (facetList != null) { ctxt.facets().deleteFacetsFor(managedDv); int i = 0; @@ -163,5 +166,19 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } return managedDv; } + + @Override + public boolean onSuccess(CommandContext ctxt, Object r) { + try{ + ctxt.index().indexDataverse((Dataverse) r); + } catch (IOException | SolrServerException e){ + Dataverse dv = (Dataverse) r; + String failureLogText = "Indexing failed. You can kickoff a re-index of this dataverse with: \r\n curl http://localhost:8080/api/admin/index/dataverses/" + dv.getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, dv); + return false; + } + return true; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java index c39d9efb1b7..b5f53c51096 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java @@ -8,7 +8,6 @@ import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; -import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import static edu.harvard.iq.dataverse.util.StringUtil.nonEmpty; @@ -117,6 +116,5 @@ public Map> getRequiredPermissions() { dv.isReleased() ? Collections.singleton(Permission.AddDataset) : new HashSet<>(Arrays.asList(Permission.AddDataset,Permission.ViewUnpublishedDataverse))); } - - + } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java index c161403faf7..6aa5e0e250a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java @@ -21,11 +21,10 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import java.io.IOException; -import java.util.ArrayList; -import java.util.ResourceBundle; import java.util.logging.Logger; -import java.util.logging.Level; -import edu.harvard.iq.dataverse.GlobalIdServiceBean; +import edu.harvard.iq.dataverse.batch.util.LoggingUtil; +import java.util.concurrent.Future; +import org.apache.solr.client.solrj.SolrServerException; /** * @@ -74,7 +73,6 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException { DatasetVersion managed = ctxt.em().merge(theVersion); boolean doNormalSolrDocCleanUp = true; - ctxt.index().indexDataset(managed.getDataset(), doNormalSolrDocCleanUp); ExportService instance = ExportService.getInstance(ctxt.settings()); @@ -101,5 +99,23 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException { return managed; } - + + @Override + public boolean onSuccess(CommandContext ctxt, Object r) { + boolean retVal = true; + DatasetVersion version = (DatasetVersion) r; + Dataset dataset = version.getDataset(); + + try { + ctxt.index().indexDataset(dataset, true); + Future indexString = ctxt.index().indexDataset(dataset, true); + } catch (IOException | SolrServerException e) { + String failureLogText = "Post-publication indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, dataset); + retVal = false; + } + return retVal; + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetLinkingDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetLinkingDataverseCommand.java index 055097fd492..aada2663bf6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetLinkingDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetLinkingDataverseCommand.java @@ -9,13 +9,17 @@ import edu.harvard.iq.dataverse.DatasetLinkingDataverse; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; +import java.io.IOException; import java.util.Collections; +import java.util.concurrent.Future; +import org.apache.solr.client.solrj.SolrServerException; /** * @@ -45,10 +49,34 @@ public Dataset execute(CommandContext ctxt) throws CommandException { DatasetLinkingDataverse doomedAndMerged = ctxt.em().merge(doomed); ctxt.em().remove(doomedAndMerged); - if (index) { - ctxt.index().indexDataset(editedDs, true); + try { ctxt.index().indexDataverse(doomed.getLinkingDataverse()); + } catch (IOException | SolrServerException e) { + String failureLogText = "Post delete linking dataverse indexing failed for Dataverse. "; + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, doomed.getLinkingDataverse()); } + return merged; - } + } + + @Override + public boolean onSuccess(CommandContext ctxt, Object r) { + boolean retVal = true; + Dataset dataset = (Dataset) r; + + if (index) { + try { + ctxt.index().indexDataset(dataset, true); + } catch (IOException | SolrServerException e) { + String failureLogText = "Post delete linked dataset indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, dataset); + retVal = false; + } + + } + + return retVal; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetVersionCommand.java index 236a943e153..3f63c3c6d27 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetVersionCommand.java @@ -6,6 +6,7 @@ import edu.harvard.iq.dataverse.RoleAssignment; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; +import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; @@ -14,9 +15,11 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.privateurl.PrivateUrl; +import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.logging.Logger; +import org.apache.solr.client.solrj.SolrServerException; /** * @@ -93,7 +96,14 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { } } boolean doNormalSolrDocCleanUp = true; - ctxt.index().indexDataset(doomed, doNormalSolrDocCleanUp); + try { + ctxt.index().indexDataset(doomed, doNormalSolrDocCleanUp); + } catch (IOException | SolrServerException e) { + String failureLogText = "Post delete version indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + doomed.getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, doomed); + } + return; } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataverseLinkingDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataverseLinkingDataverseCommand.java index 1331696711b..9eddfcd2b9c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataverseLinkingDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataverseLinkingDataverseCommand.java @@ -9,13 +9,17 @@ import edu.harvard.iq.dataverse.DataverseLinkingDataverse; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; +import java.io.IOException; import java.util.Collections; +import java.util.concurrent.Future; +import org.apache.solr.client.solrj.SolrServerException; /** * @@ -47,9 +51,33 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { ctxt.em().remove(doomedAndMerged); if (index) { - ctxt.index().indexDataverse(editedDv); - ctxt.index().indexDataverse(doomed.getLinkingDataverse()); + //can only index merged in the onSuccess method so must index doomed linking dataverse here + try { + ctxt.index().indexDataverse(doomed.getLinkingDataverse()); + } catch (IOException | SolrServerException e) { + String failureLogText = "Indexing failed for Linked Dataverse. You can kickoff a re-index of this datavese with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + doomed.getLinkingDataverse().getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, doomed.getLinkingDataverse()); + } } return merged; } + + @Override + public boolean onSuccess(CommandContext ctxt, Object r) { + + try { + Future retVal = ctxt.index().indexDataverse((Dataverse) r); + } catch (IOException | SolrServerException e) { + Dataverse dv = (Dataverse) r; + String failureLogText = "Indexing failed for Dataverse delinking. You can kickoff a re-index of this datavese with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dv.getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, (Dataverse) r); + return false; + } + + return true; + + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java index 6da6360f7ba..7f7fb0662a4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java @@ -23,6 +23,10 @@ import java.util.logging.Level; import java.util.logging.Logger; import edu.harvard.iq.dataverse.GlobalIdServiceBean; +import edu.harvard.iq.dataverse.batch.util.LoggingUtil; +import java.io.IOException; +import java.util.concurrent.Future; +import org.apache.solr.client.solrj.SolrServerException; /** * Same as {@link DeleteDatasetCommand}, but does not stop if the dataset is @@ -114,7 +118,15 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { IndexResponse resultOfSolrDeletionAttempt = ctxt.solrIndex().deleteMultipleSolrIds(datasetAndFileSolrIdsToDelete); logger.log(Level.FINE, "Result of attempt to delete dataset and file IDs from the search index: {0}", resultOfSolrDeletionAttempt.getMessage()); - ctxt.index().indexDataverse(toReIndex); + + try { + ctxt.index().indexDataverse(toReIndex); + } catch (IOException | SolrServerException e) { + String failureLogText = "Post-destroy dataset indexing of the owning dataverse failed. You can kickoff a re-index of this dataverse with: \r\n curl http://localhost:8080/api/admin/index/dataverses/" + toReIndex.getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, toReIndex); + } + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java index adaadfe8b5c..e134c3dc1b3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java @@ -29,6 +29,10 @@ import java.util.logging.Level; import java.util.logging.Logger; import edu.harvard.iq.dataverse.GlobalIdServiceBean; +import edu.harvard.iq.dataverse.batch.util.LoggingUtil; +import edu.harvard.iq.dataverse.engine.command.Command; +import java.util.concurrent.Future; +import org.apache.solr.client.solrj.SolrServerException; /** * @@ -101,9 +105,22 @@ public Dataset execute(CommandContext ctxt) throws CommandException { ddu.setLastUpdateDate(getTimestamp()); ctxt.em().merge(ddu); - updateParentDataversesSubjectsField(theDataset, ctxt); + try { + updateParentDataversesSubjectsField(theDataset, ctxt); + } catch (IOException | SolrServerException e) { + String failureLogText = "Post-publication indexing failed for Dataverse subject update. "; + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, theDataset); + + } + List previouslyCalled = ctxt.getCommandsCalled(); + PrivateUrl privateUrl = ctxt.engine().submit(new GetPrivateUrlCommand(getRequest(), theDataset)); + List afterSub = ctxt.getCommandsCalled(); + previouslyCalled.forEach((c) -> { + ctxt.getCommandsCalled().add(c); + }); if (privateUrl != null) { ctxt.engine().submit(new DeletePrivateUrlCommand(getRequest(), theDataset)); } @@ -116,10 +133,6 @@ public Dataset execute(CommandContext ctxt) throws CommandException { theDataset.getLatestVersion().setVersionState(RELEASED); } - exportMetadata(ctxt.settings()); - boolean doNormalSolrDocCleanUp = true; - ctxt.index().indexDataset(theDataset, doNormalSolrDocCleanUp); - ctxt.solrIndex().indexPermissionsForOneDvObject(theDataset); // Remove locks ctxt.engine().submit(new RemoveLockCommand(getRequest(), theDataset, DatasetLock.Reason.Workflow)); @@ -147,6 +160,30 @@ public Dataset execute(CommandContext ctxt) throws CommandException { return readyDataset; } + + @Override + public boolean onSuccess(CommandContext ctxt, Object r) { + boolean retVal = true; + Dataset dataset = null; + try{ + dataset = (Dataset) r; + } catch (ClassCastException e){ + dataset = ((PublishDatasetResult) r).getDataset(); + } + + try { + Future indexString = ctxt.index().indexDataset(dataset, true); + } catch (IOException | SolrServerException e) { + String failureLogText = "Post-publication indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, dataset); + retVal = false; + } + + ctxt.solrIndex().indexPermissionsForOneDvObject(dataset); + exportMetadata(ctxt.settings()); + return retVal; + } /** * Attempting to run metadata export, for all the formats for which we have @@ -170,7 +207,7 @@ private void exportMetadata(SettingsServiceBean settingsServiceBean) { /** * add the dataset subjects to all parent dataverses. */ - private void updateParentDataversesSubjectsField(Dataset savedDataset, CommandContext ctxt) { + private void updateParentDataversesSubjectsField(Dataset savedDataset, CommandContext ctxt) throws SolrServerException, IOException { for (DatasetField dsf : savedDataset.getLatestVersion().getDatasetFields()) { if (dsf.getDatasetFieldType().getName().equals(DatasetFieldConstant.subject)) { Dataverse dv = savedDataset.getOwner(); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java index 8e4f6370414..fa81abfd0e3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java @@ -9,6 +9,7 @@ import edu.harvard.iq.dataverse.DatasetLinkingDataverse; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -16,8 +17,10 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.util.BundleUtil; +import java.io.IOException; import java.sql.Timestamp; import java.util.Date; +import org.apache.solr.client.solrj.SolrServerException; /** * @@ -54,8 +57,24 @@ public DatasetLinkingDataverse execute(CommandContext ctxt) throws CommandExcept datasetLinkingDataverse.setLinkCreateTime(new Timestamp(new Date().getTime())); ctxt.dsLinking().save(datasetLinkingDataverse); ctxt.em().flush(); - boolean doNormalSolrDocCleanUp = true; - ctxt.index().indexDataset(linkedDataset, doNormalSolrDocCleanUp); + return datasetLinkingDataverse; - } + } + + @Override + public boolean onSuccess(CommandContext ctxt, Object r) { + boolean retVal = true; + DatasetLinkingDataverse dld = (DatasetLinkingDataverse) r; + + try { + ctxt.index().indexDataset(dld.getDataset(), true); + } catch (IOException | SolrServerException e) { + String failureLogText = "Post link dataset indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dld.getDataset().getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, dld.getDataset()); + retVal = false; + } + + return retVal; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java index e552867016a..1120c8c3773 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java @@ -9,6 +9,7 @@ import edu.harvard.iq.dataverse.DataverseLinkingDataverse; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -16,9 +17,13 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; +import java.io.IOException; import java.sql.Timestamp; import java.util.Collections; import java.util.Date; +import java.util.concurrent.Future; +import javax.ws.rs.core.Response; +import org.apache.solr.client.solrj.SolrServerException; /** * @@ -54,7 +59,23 @@ public DataverseLinkingDataverse execute(CommandContext ctxt) throws CommandExce dataverseLinkingDataverse.setLinkingDataverse(linkingDataverse); dataverseLinkingDataverse.setLinkCreateTime(new Timestamp(new Date().getTime())); ctxt.dvLinking().save(dataverseLinkingDataverse); - ctxt.index().indexDataverse(linkedDataverse); return dataverseLinkingDataverse; - } + } + + @Override + public boolean onSuccess(CommandContext ctxt, Object r) { + DataverseLinkingDataverse result = (DataverseLinkingDataverse) r; + + try { + ctxt.index().indexDataverse(result.getDataverse()); + } catch (IOException | SolrServerException e) { + String failureLogText = "Dataverse indexing failed. You can kickoff a re-index of this dataverse with: \r\n curl http://localhost:8080/api/admin/index/dataverses/" + result.getDataverse().getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, result.getDataverse()); + return false; + } + + return true; + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MergeInAccountCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MergeInAccountCommand.java index 7537d253aff..bf542138044 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MergeInAccountCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MergeInAccountCommand.java @@ -17,6 +17,7 @@ import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2TokenData; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailData; import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; @@ -27,8 +28,10 @@ import edu.harvard.iq.dataverse.search.IndexResponse; import edu.harvard.iq.dataverse.search.savedsearch.SavedSearch; import edu.harvard.iq.dataverse.workflows.WorkflowComment; +import java.io.IOException; import java.util.List; import java.util.logging.Logger; +import org.apache.solr.client.solrj.SolrServerException; /** * Merges one account into another. @@ -73,7 +76,14 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { cra.setAssigneeIdentifier(ongoingAU.getIdentifier()); ctxt.em().merge(cra); IndexResponse indexResponse = ctxt.solrIndex().indexPermissionsForOneDvObject(cra.getDefinitionPoint()); - ctxt.index().indexDvObject(cra.getDefinitionPoint()); + try { + ctxt.index().indexDvObject(cra.getDefinitionPoint()); + } catch (IOException | SolrServerException e) { + String failureLogText = "Post merge account dataset indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + cra.getDefinitionPoint().getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, cra.getDefinitionPoint()); + + } } // no else here because the any willDelete == true will happen in the named query below. } else { throw new IllegalCommandException("Original userIdentifier provided does not seem to be an AuthenticatedUser", this); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommand.java index cf96e16858c..bf18194966d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommand.java @@ -10,6 +10,7 @@ import edu.harvard.iq.dataverse.Template; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -19,12 +20,14 @@ import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; import edu.harvard.iq.dataverse.util.BundleUtil; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.logging.Logger; +import org.apache.solr.client.solrj.SolrServerException; /** * A command to move a {@link Dataverse} between two {@link Dataverse}s. @@ -295,10 +298,18 @@ public void executeImpl(CommandContext ctxt) throws CommandException { ctxt.indexBatch().indexDataverseRecursively(moved); //REindex datasets linked to moved dv - if (moved.getDatasetLinkingDataverses() != null && !moved.getDatasetLinkingDataverses().isEmpty()) { + if (moved.getDatasetLinkingDataverses() != null && !moved.getDatasetLinkingDataverses().isEmpty()) { for (DatasetLinkingDataverse dld : moved.getDatasetLinkingDataverses()) { Dataset linkedDS = ctxt.datasets().find(dld.getDataset().getId()); - ctxt.index().indexDataset(linkedDS, true); + try { + ctxt.index().indexDataset(linkedDS, true); + } catch (IOException | SolrServerException e) { + String failureLogText = "Post move dataverse dataset indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + linkedDS.getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, linkedDS); + + } + } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java index 41622507f1b..f2584d6a153 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java @@ -4,15 +4,18 @@ import edu.harvard.iq.dataverse.DatasetLock; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.workflow.Workflow; import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType; import java.util.Date; +import java.util.List; import java.util.Optional; import java.util.logging.Logger; import static java.util.stream.Collectors.joining; diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDataverseCommand.java index 2065e4595a8..1b963c644e7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDataverseCommand.java @@ -15,6 +15,7 @@ import java.sql.Timestamp; import java.util.Date; import java.util.List; +import java.util.concurrent.Future; @RequiredPermissions(Permission.PublishDataverse) public class PublishDataverseCommand extends AbstractCommand { @@ -53,16 +54,17 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { dataverse.setPublicationDate(new Timestamp(new Date().getTime())); dataverse.setReleaseUser((AuthenticatedUser) getUser()); Dataverse savedDataverse = ctxt.dataverses().save(dataverse); - /** - * @todo consider also - * ctxt.solrIndex().indexPermissionsOnSelfAndChildren(savedDataverse.getId()); - */ - /** - * @todo what should we do with the indexRespose? - */ - IndexResponse indexResponse = ctxt.solrIndex().indexPermissionsForOneDvObject(savedDataverse); + return savedDataverse; } + + @Override + public boolean onSuccess(CommandContext ctxt, Object r) { + Dataverse ret = (Dataverse) r; + IndexResponse permsResponse = ctxt.solrIndex().indexPermissionsForOneDvObject(ret); + ctxt.solrIndex().indexPermissionsOnSelfAndChildren(ret.getId()); + return true; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterDvObjectCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterDvObjectCommand.java index 09b1c34fe04..9169d6b4fe9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterDvObjectCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterDvObjectCommand.java @@ -14,6 +14,9 @@ import java.sql.Timestamp; import java.util.Date; import edu.harvard.iq.dataverse.GlobalIdServiceBean; +import edu.harvard.iq.dataverse.batch.util.LoggingUtil; +import java.io.IOException; +import org.apache.solr.client.solrj.SolrServerException; /** * @@ -132,8 +135,16 @@ protected void executeImpl(CommandContext ctxt) throws CommandException { if (this.migrateHandle) { //Only continue if you can successfully migrate the handle boolean doNormalSolrDocCleanUp = true; - ctxt.index().indexDataset((Dataset) target, doNormalSolrDocCleanUp); - ctxt.solrIndex().indexPermissionsForOneDvObject((Dataset) target); + Dataset dataset = (Dataset) target; + try { + ctxt.index().indexDataset(dataset, doNormalSolrDocCleanUp); + ctxt.solrIndex().indexPermissionsForOneDvObject( dataset); + } catch (IOException | SolrServerException e) { + String failureLogText = "Post migrate handle dataset indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, dataset); + + } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java index 08768558d0c..169f6d790d3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java @@ -5,6 +5,7 @@ import edu.harvard.iq.dataverse.UserNotification; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; @@ -12,7 +13,10 @@ import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.workflows.WorkflowComment; +import java.io.IOException; import java.util.List; +import java.util.concurrent.Future; +import org.apache.solr.client.solrj.SolrServerException; @RequiredPermissions(Permission.PublishDataset) public class ReturnDatasetToAuthorCommand extends AbstractDatasetCommand { @@ -57,13 +61,27 @@ Finally send a notification to the remaining (non-reviewing) authors - Hey! your for (AuthenticatedUser au : authors) { ctxt.notifications().sendNotification(au, getTimestamp(), UserNotification.Type.RETURNEDDS, savedDataset.getLatestVersion().getId(), comment); } - - - ctxt.index().indexDataset(savedDataset, true); + return savedDataset; } + + @Override + public boolean onSuccess(CommandContext ctxt, Object r) { + boolean retVal = true; + Dataset dataset = (Dataset) r; + try { + Future indexString = ctxt.index().indexDataset(dataset, true); + } catch (IOException | SolrServerException e) { + String failureLogText = "Post return to author indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, dataset); + retVal = false; + } + + return retVal; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetDatasetCitationDateCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetDatasetCitationDateCommand.java index 362b2ea29e4..fe14d56562d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetDatasetCitationDateCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetDatasetCitationDateCommand.java @@ -4,12 +4,16 @@ import edu.harvard.iq.dataverse.DatasetFieldType; import edu.harvard.iq.dataverse.DatasetFieldType.FieldType; import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import java.io.IOException; +import java.util.concurrent.Future; +import org.apache.solr.client.solrj.SolrServerException; /** * @@ -37,9 +41,23 @@ public Dataset execute(CommandContext ctxt) throws CommandException { } Dataset savedDataset = ctxt.datasets().merge(dataset); - ctxt.index().indexDataset(savedDataset, false); return savedDataset; } - + + @Override + public boolean onSuccess(CommandContext ctxt, Object r) { + boolean retVal = true; + Dataset dataset = (Dataset) r; + + try { + Future indexString = ctxt.index().indexDataset(dataset, false); + } catch (IOException | SolrServerException e) { + String failureLogText = "Post set dataset citation date indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, dataset); + retVal = false; + } + return retVal; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommand.java index 78734521a82..d1ea2aee89b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommand.java @@ -6,6 +6,7 @@ import edu.harvard.iq.dataverse.UserNotification; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -13,10 +14,12 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.util.BundleUtil; +import java.io.IOException; import java.sql.Timestamp; import java.util.Date; import java.util.List; import java.util.concurrent.Future; +import org.apache.solr.client.solrj.SolrServerException; @RequiredPermissions(Permission.EditDataset) public class SubmitDatasetForReviewCommand extends AbstractDatasetCommand { @@ -62,9 +65,23 @@ public Dataset save(CommandContext ctxt) throws CommandException { } // TODO: What should we do with the indexing result? Print it to the log? - boolean doNormalSolrDocCleanUp = true; - Future indexingResult = ctxt.index().indexDataset(savedDataset, doNormalSolrDocCleanUp); return savedDataset; } + + @Override + public boolean onSuccess(CommandContext ctxt, Object r) { + boolean retVal = true; + Dataset dataset = (Dataset) r; + + try { + Future indexString = ctxt.index().indexDataset(dataset, true); + } catch (IOException | SolrServerException e) { + String failureLogText = "Post submit for review indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, dataset); + retVal = false; + } + return retVal; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java index 22ac5d116de..0bcf11d371d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java @@ -3,18 +3,19 @@ import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import edu.harvard.iq.dataverse.datavariable.VarGroup; -import edu.harvard.iq.dataverse.datavariable.VariableMetadata; +import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; +import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; import java.util.List; +import java.util.concurrent.Future; import java.util.logging.Level; import java.util.logging.Logger; +import org.apache.solr.client.solrj.SolrServerException; /** * @@ -198,7 +199,6 @@ public Dataset execute(CommandContext ctxt) throws CommandException { ctxt.em().flush(); updateDatasetUser(ctxt); - ctxt.index().indexDataset(savedDataset, true); if (clone != null) { DatasetVersionDifference dvd = new DatasetVersionDifference(editVersion, clone); AuthenticatedUser au = (AuthenticatedUser) getUser(); @@ -213,7 +213,27 @@ public Dataset execute(CommandContext ctxt) throws CommandException { ctxt.datasets().removeDatasetLocks(theDataset, DatasetLock.Reason.EditInProgress); } } + return savedDataset; } + + @Override + public boolean onSuccess(CommandContext ctxt, Object r) { + + boolean retVal = true; + Dataset dataset = (Dataset) r; + + try { + Future indexString = ctxt.index().indexDataset(dataset, true); + } catch (IOException | SolrServerException e) { + String failureLogText = "Post update dataset indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, dataset); + retVal = false; + } + + return retVal; + + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index cf410d24008..0713cecec6b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -6,14 +6,19 @@ import edu.harvard.iq.dataverse.Dataverse.DataverseType; import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevel; import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.search.IndexResponse; +import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.concurrent.Future; import javax.persistence.TypedQuery; +import org.apache.solr.client.solrj.SolrServerException; /** * Update an existing dataverse. @@ -79,19 +84,26 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } } - ctxt.index().indexDataverse(result); - - //When these values are changed we need to reindex all children datasets - //This check is not recursive as all the values just report the immediate parent - // - //This runs async to not slow down editing --MAD 4.9.4 - if(!oldDvType.equals(editedDv.getDataverseType()) - || !oldDvName.equals(editedDv.getName()) - || !oldDvAlias.equals(editedDv.getAlias())) { - List datasets = ctxt.datasets().findByOwnerId(editedDv.getId()); - ctxt.index().asyncIndexDatasetList(datasets, true); - } return result; } + + @Override + public boolean onSuccess(CommandContext ctxt, Object r) { + Dataverse result = (Dataverse) r; + + List datasets = ctxt.datasets().findByOwnerId(result.getId()); + try { + Future indResponse = ctxt.index().indexDataverse(result); + ctxt.index().asyncIndexDatasetList(datasets, true); + } catch (IOException | SolrServerException e) { + String failureLogText = "Indexing failed for Updated Dataverse. You can kickoff a re-index of this datavese with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + result.getId().toString(); + failureLogText += "\r\n" + e.getLocalizedMessage(); + LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, result); + return false; + } + return true; + } + } + diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 5a829e80019..4ca53ad9fa4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -54,12 +54,15 @@ import javax.ejb.Stateless; import javax.ejb.TransactionAttribute; import static javax.ejb.TransactionAttributeType.REQUIRES_NEW; +import javax.inject.Inject; import javax.inject.Named; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import org.apache.commons.lang.StringUtils; +import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; +import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.UpdateResponse; import org.apache.solr.common.SolrDocument; @@ -126,13 +129,34 @@ public class IndexServiceBean { public static final String HARVESTED = "Harvested"; private String rootDataverseName; private Dataverse rootDataverseCached; + private SolrClient solrServer; + @PostConstruct + public void init() { + String urlString = "http://" + systemConfig.getSolrHostColonPort() + "/solr/collection1"; + solrServer = new HttpSolrClient.Builder(urlString).build(); + + rootDataverseName = findRootDataverseCached().getName(); + } + + @PreDestroy + public void close() { + if (solrServer != null) { + try { + solrServer.close(); + } catch (IOException e) { + logger.warning("Solr closing error: " + e); + } + solrServer = null; + } + } + @TransactionAttribute(REQUIRES_NEW) - public Future indexDataverseInNewTransaction(Dataverse dataverse) { + public Future indexDataverseInNewTransaction(Dataverse dataverse) throws SolrServerException, IOException{ return indexDataverse(dataverse); } - public Future indexDataverse(Dataverse dataverse) { + public Future indexDataverse(Dataverse dataverse) throws SolrServerException, IOException { logger.fine("indexDataverse called on dataverse id " + dataverse.getId() + "(" + dataverse.getAlias() + ")"); if (dataverse.getId() == null) { String msg = "unable to index dataverse. id was null (alias: " + dataverse.getAlias() + ")"; @@ -258,7 +282,7 @@ public Future indexDataverse(Dataverse dataverse) { } @TransactionAttribute(REQUIRES_NEW) - public Future indexDatasetInNewTransaction(Long datasetId) { //Dataset dataset) { + public Future indexDatasetInNewTransaction(Long datasetId) throws SolrServerException, IOException{ //Dataset dataset) { boolean doNormalSolrDocCleanUp = false; Dataset dataset = em.find(Dataset.class, datasetId); // return indexDataset(dataset, doNormalSolrDocCleanUp); @@ -268,18 +292,18 @@ public Future indexDatasetInNewTransaction(Long datasetId) { //Dataset d } @Asynchronous - public Future asyncIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) { + public Future asyncIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) throws SolrServerException, IOException { return indexDataset(dataset, doNormalSolrDocCleanUp); } @Asynchronous - public void asyncIndexDatasetList(List datasets, boolean doNormalSolrDocCleanUp) { + public void asyncIndexDatasetList(List datasets, boolean doNormalSolrDocCleanUp) throws SolrServerException, IOException { for(Dataset dataset : datasets) { indexDataset(dataset, true); } } - public Future indexDvObject(DvObject objectIn){ + public Future indexDvObject(DvObject objectIn) throws SolrServerException, IOException { if (objectIn.isInstanceofDataset() ){ return (indexDataset((Dataset)objectIn, true)); @@ -290,7 +314,7 @@ public Future indexDvObject(DvObject objectIn){ return null; } - public Future indexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) { + public Future indexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) throws SolrServerException, IOException { logger.fine("indexing dataset " + dataset.getId()); /** * @todo should we use solrDocIdentifierDataset or @@ -617,7 +641,7 @@ public Future indexDataset(Dataset dataset, boolean doNormalSolrDocClean return new AsyncResult<>(result); } } - + private String deleteDraftFiles(List solrDocIdsForDraftFilesToDelete) { String deleteDraftFilesResults = ""; IndexResponse indexResponse = solrIndexService.deleteMultipleSolrIds(solrDocIdsForDraftFilesToDelete); @@ -639,11 +663,11 @@ private IndexResponse indexDatasetPermissions(Dataset dataset) { return indexResponse; } - private String addOrUpdateDataset(IndexableDataset indexableDataset) { + private String addOrUpdateDataset(IndexableDataset indexableDataset) throws SolrServerException, IOException { return addOrUpdateDataset(indexableDataset, null); } - private String addOrUpdateDataset(IndexableDataset indexableDataset, Set datafilesInDraftVersion) { + private String addOrUpdateDataset(IndexableDataset indexableDataset, Set datafilesInDraftVersion) throws SolrServerException, IOException { IndexableDataset.DatasetState state = indexableDataset.getDatasetState(); Dataset dataset = indexableDataset.getDatasetVersion().getDataset(); logger.fine("adding or updating Solr document for dataset id " + dataset.getId()); @@ -1144,13 +1168,13 @@ private String addOrUpdateDataset(IndexableDataset indexableDataset, Set d try { solrClientService.getSolrClient().add(docs); - } catch (SolrServerException | IOException ex) { - return ex.toString(); - } - try { solrClientService.getSolrClient().commit(); } catch (SolrServerException | IOException ex) { - return ex.toString(); + if (ex.getCause() instanceof SolrServerException) { + throw new SolrServerException(ex); + } else if (ex.getCause() instanceof IOException) { + throw new IOException(ex); + } } Long dsId = dataset.getId(); diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java b/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java index 4fdc93f9a98..329f8509ec6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java @@ -8,6 +8,7 @@ import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean; import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailServiceBean; import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleServiceBean; +import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.pidproviders.FakePidProviderServiceBean; @@ -20,6 +21,8 @@ import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean; +import java.util.List; +import java.util.Stack; import javax.persistence.EntityManager; /** @@ -241,4 +244,29 @@ public ConfirmEmailServiceBean confirmEmail() { public ActionLogServiceBean actionLog() { return null; } + + @Override + public void beginCommandSequence() { + throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. + } + + @Override + public boolean completeCommandSequence(Command command) { + throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. + } + + @Override + public void cancelCommandSequence() { + throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. + } + + @Override + public Stack getCommandsCalled() { + throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. + } + + @Override + public void addCommand(Command command) { + throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/TestDataverseEngine.java b/src/test/java/edu/harvard/iq/dataverse/engine/TestDataverseEngine.java index b4289f55ea7..35350427af6 100644 --- a/src/test/java/edu/harvard/iq/dataverse/engine/TestDataverseEngine.java +++ b/src/test/java/edu/harvard/iq/dataverse/engine/TestDataverseEngine.java @@ -14,25 +14,28 @@ */ public class TestDataverseEngine implements DataverseEngine { - private final TestCommandContext ctxt; - + private final TestCommandContext ctxt; + private final Map> requiredPermissionsForObjects = new HashMap<>(); - - public TestDataverseEngine(TestCommandContext ctxt) { - this.ctxt = ctxt; - } + + public TestDataverseEngine(TestCommandContext ctxt) { + this.ctxt = ctxt; + } - @Override - public R submit(Command aCommand) throws CommandException { + @Override + public R submit(Command aCommand) throws CommandException { Map affectedDvs = aCommand.getAffectedDvObjects(); final Map> requiredPermissions = aCommand.getRequiredPermissions(); aCommand.getRequest(); - for ( String dvObjKey : affectedDvs.keySet() ) { - requiredPermissionsForObjects.put( affectedDvs.get(dvObjKey), requiredPermissions.get(dvObjKey) ); + for (String dvObjKey : affectedDvs.keySet()) { + requiredPermissionsForObjects.put(affectedDvs.get(dvObjKey), requiredPermissions.get(dvObjKey)); } - return aCommand.execute(ctxt); - } - + + R r = aCommand.execute(ctxt); + aCommand.onSuccess(ctxt, r); + return r; + } + public Map> getReqiredPermissionsForObjects() { return requiredPermissionsForObjects; }