Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
6401735
Merge branch 'develop' into 4425-add-onSuccess-method-to-commands
sekmiller May 1, 2019
abd65a3
Merge branch 'develop' into 4425-add-onSuccess-method-to-commands
sekmiller May 10, 2019
bed2acf
Merge branch 'develop' into 4425-add-onSuccess-method-to-commands
sekmiller May 13, 2019
7e934ea
#4425 POC add onSuccess method outside transaction
sekmiller May 13, 2019
0ef3f03
Merge branch 'develop' into 4425-add-onSuccess-method-to-commands
sekmiller May 15, 2019
ce3f8eb
#4425 remove debug timing code
sekmiller May 15, 2019
a275908
#4425 only trigger rollback if the inner command is submitted
sekmiller May 15, 2019
8ca4fce
#4425 test for existing transaction before rollback
sekmiller May 15, 2019
2db1270
Merge branch 'develop' into 4425-add-onSuccess-method-to-commands
sekmiller May 15, 2019
b665536
Merge branch 'develop' into 4425-add-onSuccess-method-to-commands
sekmiller May 15, 2019
8b3e5c7
Merge branch 'develop' into 4425-add-onSuccess-method-to-commands
sekmiller May 16, 2019
4eceecc
Merge branch 'develop' into 4425-add-onSuccess-method-to-commands
sekmiller May 20, 2019
5e23173
#4425 remove extra debug code
sekmiller May 20, 2019
d60a14e
#4425 Add onSuccess to other commands fix createDataverse error
sekmiller May 22, 2019
1b94342
Merge branch 'develop' into 4425-add-onSuccess-method-to-commands
sekmiller May 22, 2019
995d459
#4425 fix formatting
sekmiller May 23, 2019
a621ab8
Merge branch 'develop' into 4425-add-onSuccess-method-to-commands
sekmiller May 23, 2019
14a28a5
#4425 Implement OnSuccess for nested commands
sekmiller Jun 5, 2019
f20e109
Merge branch 'develop' into 4425-add-onSuccess-method-to-commands
sekmiller Jun 5, 2019
87632c6
#4425 clean up debug code
sekmiller Jun 6, 2019
70c174a
#4425 test for empty command list
sekmiller Jun 6, 2019
48276da
Merge branch 'develop' into 4425-add-onSuccess-method-to-commands
sekmiller Jun 6, 2019
4078c96
#4425 add indexing error handling to commands beans
sekmiller Jun 28, 2019
fcf54ec
Merge branch 'develop' into 4425-add-onSuccess-method-to-commands
sekmiller Jun 28, 2019
32a65fa
#4425 remove debug code
sekmiller Jul 1, 2019
aa8ad6f
#4425 add comments for onSuccess processing
sekmiller Jul 1, 2019
3941ade
#4425 code cleanup make completeCommand method void
sekmiller Jul 2, 2019
14b2d53
#4425 add documentation of index failure logging
sekmiller Jul 3, 2019
51da132
#4425 return notification email functionality
sekmiller Jul 3, 2019
4ee57c0
#4425 remove dead code
sekmiller Jul 9, 2019
63f4d89
#4425 more dead code
sekmiller Jul 9, 2019
1777aad
Merge branch 'develop' into 4425-add-onSuccess-method-to-commands
sekmiller Jul 11, 2019
9ff59b6
#4425 remove unused imports
sekmiller Jul 11, 2019
6dcdd90
Merge branch 'develop' into 4425-add-onSuccess-method-to-commands
sekmiller Jul 16, 2019
cd105d3
Merge branch 'develop' into 4425-add-onSuccess-method-to-commands
sekmiller Jul 22, 2019
86a9738
#4425 modifications suggested by Michael B-S.
sekmiller Jul 24, 2019
9560a28
#4425 fix typo in comment. add some detail
sekmiller Jul 24, 2019
6e1fc4a
Merge branch 'develop' into 4425-add-onSuccess-method-to-commands
sekmiller Aug 1, 2019
c117f2d
#4425 fix logging of index errors
sekmiller Aug 1, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions doc/sphinx-guides/source/admin/monitoring.rst
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,11 @@ Edit Draft Versions Logging

Changes made to draft versions of datasets are logged in a folder called logs/edit-drafts. See https://github.com/IQSS/dataverse/issues/5145 for more information on this logging.

Solr Indexing Failures Logging
------------------------------

Failures occurring during the indexing of dataverses and datasets are logged in a folder called logs/process-failures. This logging will include instructions for manually re-running the failed processes. It may be advantageous to set up a automatic job to monitor new entries into this log folder so that indexes could be re-run.

EJB Timers
----------

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import edu.harvard.iq.dataverse.util.BundleUtil;
import edu.harvard.iq.dataverse.util.MarkupChecker;
import edu.harvard.iq.dataverse.util.SystemConfig;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
Expand All @@ -34,6 +35,7 @@
import javax.persistence.Query;
import javax.persistence.TypedQuery;
import org.apache.commons.lang.StringUtils;
import org.apache.solr.client.solrj.SolrServerException;

/**
*
Expand Down Expand Up @@ -1093,8 +1095,13 @@ public JsonObjectBuilder fixMissingUnf(String datasetVersionId, boolean forceRec

// reindexing the dataset, to make sure the new UNF is in SOLR:
boolean doNormalSolrDocCleanUp = true;
Future<String> indexingResult = indexService.indexDataset(datasetVersion.getDataset(), doNormalSolrDocCleanUp);

try {
Future<String> indexingResult = indexService.indexDataset(datasetVersion.getDataset(), doNormalSolrDocCleanUp);
} catch (IOException | SolrServerException e) {
String failureLogText = "Post UNF update indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + datasetVersion.getDataset().getId().toString();
failureLogText += "\r\n" + e.getLocalizedMessage();
LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, datasetVersion.getDataset());
}
return info;
}

Expand Down
13 changes: 12 additions & 1 deletion src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,14 @@
import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.batch.util.LoggingUtil;
import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.search.IndexServiceBean;
import edu.harvard.iq.dataverse.search.SolrSearchResult;
import edu.harvard.iq.dataverse.util.SystemConfig;
import java.io.File;
import java.io.IOException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Date;
Expand All @@ -41,6 +43,7 @@
import javax.persistence.PersistenceContext;
import javax.persistence.TypedQuery;
import javax.ws.rs.core.Response;
import org.apache.solr.client.solrj.SolrServerException;

/**
*
Expand Down Expand Up @@ -90,8 +93,16 @@ public Dataverse save(Dataverse dataverse) {
Dataverse savedDataverse = em.merge(dataverse);
/**
* @todo check the result to see if indexing was successful or not
* added logging of exceptions
*/
Future<String> indexingResult = indexService.indexDataverse(savedDataverse);
try {
Future<String> indexingResult = indexService.indexDataverse(savedDataverse);
} catch (IOException | SolrServerException e) {
String failureLogText = "Post-save indexing failed. You can kickoff a re-index of this dataverse with: \r\n curl http://localhost:8080/api/admin/index/dataverses/" + savedDataverse.getId().toString();
failureLogText += "\r\n" + e.getLocalizedMessage();
LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, savedDataverse);
}

// logger.log(Level.INFO, "during dataverse save, indexing result was: {0}", indexingResult);
return savedDataverse;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1434,7 +1434,8 @@ public String save() {
//if (newDraftVersion) {
// return returnToDraftVersionById();
//}
indexService.indexDataset(dataset, true);
// indexService.indexDataset(dataset, true);
// indexing is handled by the commands
logger.fine("Redirecting to the dataset page, from the edit/upload page.");
return returnToDraftVersion();
}
Expand Down
97 changes: 91 additions & 6 deletions src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java
Original file line number Diff line number Diff line change
Expand Up @@ -32,13 +32,15 @@
import edu.harvard.iq.dataverse.util.SystemConfig;
import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean;
import java.util.EnumSet;
import java.util.Stack;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.Resource;
import javax.ejb.EJBContext;
import javax.ejb.EJBException;
import javax.ejb.TransactionAttribute;
import static javax.ejb.TransactionAttributeType.REQUIRES_NEW;
import static javax.ejb.TransactionAttributeType.SUPPORTS;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.validation.ConstraintViolation;
Expand Down Expand Up @@ -177,6 +179,9 @@ public class EjbDataverseEngine {
@EJB
ConfirmEmailServiceBean confirmEmailService;

@EJB
EjbDataverseEngineInner innerEngine;


@Resource
EJBContext ejbCtxt;
Expand All @@ -187,11 +192,19 @@ public class EjbDataverseEngine {
public <R> R submitInNewTransaction(Command<R> aCommand) throws CommandException {
return submit(aCommand);
}

private DvObject getRetType(Object r){

return (DvObject) r;

}


@TransactionAttribute(SUPPORTS)
public <R> R submit(Command<R> aCommand) throws CommandException {

final ActionLogRecord logRec = new ActionLogRecord(ActionLogRecord.ActionType.Command, aCommand.getClass().getCanonicalName());

try {
logRec.setUserIdentifier( aCommand.getRequest().getUser().getIdentifier() );

Expand Down Expand Up @@ -233,7 +246,22 @@ public <R> R submit(Command<R> aCommand) throws CommandException {
}
}
try {
return aCommand.execute(getContext());
if (getContext().getCommandsCalled() == null){
getContext().beginCommandSequence();
}
getContext().addCommand(aCommand);
//This list of commands is held by the outermost command's context
//to be run on completeCommand method when the outermost command is completed
Stack<Command> previouslyCalled = getContext().getCommandsCalled();
R r = innerEngine.submit(aCommand, getContext());
if (getContext().getCommandsCalled().empty() && !previouslyCalled.empty()){
for (Command c: previouslyCalled){
getContext().getCommandsCalled().add(c);
}
}
//This runs the onSuccess Methods for all commands in the stack when the outermost command completes
this.completeCommand(aCommand, r, getContext().getCommandsCalled());
return r;

} catch ( EJBException ejbe ) {
throw new CommandException("Command " + aCommand.toString() + " failed: " + ejbe.getMessage(), ejbe.getCausedByException(), aCommand);
Expand Down Expand Up @@ -267,20 +295,61 @@ public <R> R submit(Command<R> aCommand) throws CommandException {
throw re;

} finally {
if ( logRec.getActionResult() == null ) {
logRec.setActionResult( ActionLogRecord.Result.OK );
//when we get here we need to wipe out the command list so that
//failed commands don't have their onSuccess methods run.
getContext().cancelCommandSequence();
if (logRec.getActionResult() == null) {
logRec.setActionResult(ActionLogRecord.Result.OK);
} else {
ejbCtxt.setRollbackOnly();
try{
ejbCtxt.setRollbackOnly();
} catch (IllegalStateException isEx){
//Not in a transaction nothing to rollback
}
}
logRec.setEndTime( new java.util.Date() );
logRec.setEndTime(new java.util.Date());
logSvc.log(logRec);
}
}

protected void completeCommand(Command command, Object r, Stack<Command> called) {

if (called.isEmpty()){
return;
}

Command test = called.get(0);
if (!test.equals(command)) {
//if it's not the first command on the stack it must be an "inner" command
//and we don't want to run its onSuccess until all commands have comepleted successfully
return;
}

for (Command commandLoop : called) {
commandLoop.onSuccess(ctxt, r);
}

}


public CommandContext getContext() {
if (ctxt == null) {
ctxt = new CommandContext() {

public Stack<Command> commandsCalled;

@Override
public void addCommand (Command command){
commandsCalled.push(command);
}


@Override
public Stack<Command> getCommandsCalled(){
return commandsCalled;
}


@Override
public DatasetServiceBean datasets() {
return datasetService;
Expand Down Expand Up @@ -495,6 +564,22 @@ public ActionLogServiceBean actionLog() {
return logSvc;
}

@Override
public void beginCommandSequence() {
this.commandsCalled = new Stack();
}

@Override
public boolean completeCommandSequence(Command command) {
this.commandsCalled.clear();
return true;
}

@Override
public void cancelCommandSequence() {
this.commandsCalled = new Stack();
}

};
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@

package edu.harvard.iq.dataverse;

import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.CommandContext;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import javax.ejb.Stateless;
import javax.ejb.TransactionAttribute;
import static javax.ejb.TransactionAttributeType.REQUIRED;

import javax.inject.Named;

/**
*
* @author skraffmi
* Inner class that does the actual execute action on a command
* Transaction attribute is required so that failures here cause a rollback
* the outer engine has a transaction attribute of "SUPPORTED"
* so that if there are failure in the onComplete method of the command
* the transaction will not be rolled back
*
*/
@Stateless
@Named
public class EjbDataverseEngineInner {

@TransactionAttribute(REQUIRED)
public <R> R submit(Command<R> aCommand, CommandContext ctxt) throws CommandException {

return aCommand.execute(ctxt);

}

}
Original file line number Diff line number Diff line change
Expand Up @@ -241,6 +241,7 @@ public Boolean sendNotificationEmail(UserNotification notification, String comme
logger.warning("Skipping " + notification.getType() + " notification, because email address is null");
}
return retval;

}

private String getDatasetManageFileAccessLink(DataFile datafile){
Expand Down
30 changes: 27 additions & 3 deletions src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@
import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
import edu.harvard.iq.dataverse.S3PackageImporter;
import static edu.harvard.iq.dataverse.api.AbstractApiBean.error;
import edu.harvard.iq.dataverse.batch.util.LoggingUtil;
import edu.harvard.iq.dataverse.dataaccess.StorageIO;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
Expand Down Expand Up @@ -145,6 +146,7 @@
import javax.ws.rs.core.Response;
import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
import javax.ws.rs.core.UriInfo;
import org.apache.solr.client.solrj.SolrServerException;
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
import org.glassfish.jersey.media.multipart.FormDataParam;
Expand Down Expand Up @@ -1681,7 +1683,14 @@ public Response deleteLocks(@PathParam("identifier") String id, @QueryParam("typ
}
// kick of dataset reindexing, in case the locks removed
// affected the search card:
indexService.indexDataset(dataset, true);
try {
indexService.indexDataset(dataset, true);
} catch (IOException | SolrServerException e) {
String failureLogText = "Post lock removal indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString();
failureLogText += "\r\n" + e.getLocalizedMessage();
LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, dataset);

}
return ok("locks removed");
}
return ok("dataset not locked");
Expand All @@ -1694,7 +1703,14 @@ public Response deleteLocks(@PathParam("identifier") String id, @QueryParam("typ
dataset = findDatasetOrDie(id);
// ... and kick of dataset reindexing, in case the lock removed
// affected the search card:
indexService.indexDataset(dataset, true);
try {
indexService.indexDataset(dataset, true);
} catch (IOException | SolrServerException e) {
String failureLogText = "Post lock removal indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString();
failureLogText += "\r\n" + e.getLocalizedMessage();
LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, dataset);

}
return ok("lock type " + lock.getReason() + " removed");
}
return ok("no lock type " + lockType + " on the dataset");
Expand Down Expand Up @@ -1725,7 +1741,15 @@ public Response lockDataset(@PathParam("identifier") String id, @PathParam("type
// refresh the dataset:
dataset = findDatasetOrDie(id);
// ... and kick of dataset reindexing:
indexService.indexDataset(dataset, true);
try {
indexService.indexDataset(dataset, true);
} catch (IOException | SolrServerException e) {
String failureLogText = "Post add lock indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString();
failureLogText += "\r\n" + e.getLocalizedMessage();
LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, dataset);

}

return ok("dataset locked with lock type " + lockType);
} catch (WrappedResponse wr) {
return wr.getResponse();
Expand Down
Loading