Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
46 commits
Select commit Hold shift + click to select a range
0c73722
Improve error message
bbimber Jan 2, 2024
0a65541
Update Scatter/Gather logic to sort on contig size
bbimber Jan 4, 2024
59df489
Add server-side action to facilitate readset file archival
bbimber Jan 6, 2024
4c36b98
- Add additional fields to VCF/lucene indexing
bbimber Jan 7, 2024
06d625a
Update params on query URLs
bbimber Jan 8, 2024
002550f
Force inclusion of retainAmbiguousFeatures param
bbimber Jan 8, 2024
98cfa80
Fix loading lifecycle flow when getting session (#259)
bbimber Jan 9, 2024
fa471aa
Support maxLibrarySizeRatio for nimble
bbimber Jan 10, 2024
42d8c75
Bump follow-redirects from 1.15.3 to 1.15.4 in /jbrowse (#260)
dependabot[bot] Jan 11, 2024
f97bddb
Update defaults for TCR and update repseqio dependencies
bbimber Jan 13, 2024
d7df3ca
Allow alternate action name for MergeVcfsAndGenotypesHandler
bbimber Jan 16, 2024
5cdc941
Add column URL serialization (#261)
hextraza Jan 17, 2024
b2dc955
Add support for DeepVariant
bbimber Jan 19, 2024
ae8e9e4
Allow archived readsets for cell hashing
bbimber Jan 19, 2024
078b7ee
Add missing arguments to DeepVariant
bbimber Jan 19, 2024
089ccec
Second instance of allowing hashing and archived readsets
bbimber Jan 19, 2024
cd85ff2
Allow DeepVariant to run on CRAMs
bbimber Jan 19, 2024
8ea5625
Add new pipeline option to perform more frequent deletion of intermed…
bbimber Jan 20, 2024
3d7f838
Bugfix to DeepVariant docker command
bbimber Jan 20, 2024
a62258d
Bugfix to DeepVariant docker command
bbimber Jan 20, 2024
207b83e
Bugfix to DeepVariant docker command
bbimber Jan 22, 2024
c522d20
Bugfix to DeepVariant docker command
bbimber Jan 22, 2024
97ace2f
Dont set tmpdir for DeepVariant/docker
bbimber Jan 22, 2024
e4ddb61
Ensure DeepVariant/docker has FAI index
bbimber Jan 22, 2024
9419bbf
Support performCleanupAfterEachStep in more places of the UI
bbimber Jan 22, 2024
fd99e2a
Add --memory to deepvariant docker command
bbimber Jan 23, 2024
300300a
Add external TMPDIR to deepvariant docker command
bbimber Jan 23, 2024
f978134
Improve resume behavior for CRAM conversion
bbimber Jan 24, 2024
f50e5d8
Additional place to allow archived readsets
bbimber Jan 24, 2024
961ab8c
Correct arguments to BcftoolsFillTagsStep
bbimber Jan 24, 2024
9b6e109
Fix typo in CRAM conversion/resume logic
bbimber Jan 30, 2024
92b1f80
Add support for glnexus
bbimber Jan 31, 2024
c181b87
Allow folder-level setting of 10x defaults
bbimber Jan 31, 2024
abaee3d
Support ensureSamplesShareAllGenomes for AppendNimble
bbimber Jan 31, 2024
8c7216b
Bugfix to 10x setting panel when values are null
bbimber Jan 31, 2024
e3db53e
Correct name of GLNexus step
bbimber Jan 31, 2024
45d7f14
Update gVCF index locator in GLNexus step
bbimber Jan 31, 2024
8119303
Add calculated field and code to catch HTO libraries with single HTO
bbimber Jan 31, 2024
04a31f4
Update URLs for hashing/cite-seq feature counts
bbimber Jan 31, 2024
db93852
Add feature to auto-split cell/lane for 10x import
bbimber Jan 31, 2024
2407a00
Fix docker args in GLNexusHandler
bbimber Feb 1, 2024
00885c7
Fix docker args in GLNexusHandler
bbimber Feb 1, 2024
594ad2c
Update location of genome symlinks in mGAP
bbimber Feb 1, 2024
26471ac
Update glnexus arguments to match newest version
bbimber Feb 1, 2024
fcc8b13
Set workDir for DeepVariant/GLNexus
bbimber Feb 1, 2024
bb660ae
Merge discvr-23.11 to develop
bbimber Feb 2, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ public BcftoolsRunner(@Nullable Logger logger)
super(logger);
}

public File getBcfToolsPath()
public static File getBcfToolsPath()
{
return SequencePipelineService.get().getExeForPackage("BCFTOOLSPATH", "bcftools");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,8 @@ public interface TaskFileManager extends PipelineOutputTracker

boolean isDeleteIntermediateFiles();

public boolean performCleanupAfterEachStep();

boolean isCopyInputsLocally();

void addPicardMetricsFiles(List<PipelineStepOutput.PicardMetricsOutput> files) throws PipelineJobException;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
<column name="workbook" />
<column name="sraRuns" />
<column name="isArchived" />
<column name="totalFiles" />
<column name="readdataWithoutSra" />
<column name="files" />
</columns>
<sorts>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,14 @@ Ext4.define('SequenceAnalysis.panel.AlignmentImportPanel', {
inputValue: true,
checked: true,
xtype: 'checkbox'
},{
fieldLabel: 'Perform Cleanup After Each Step',
helpPopup: 'Is selected, intermediate files from this job will be deleted after each step, instead of once at the end of the job. This can reduce the working directory size. Note: this will only apply if deleteIntermediateFiles is selected, and this is not supported across every possible pipeline type.',
name: 'performCleanupAfterEachStep',
inputValue: true,
uncheckedValue: false,
checked: true,
xtype: 'checkbox'
},{
fieldLabel: 'Treatment of Input Files',
xtype: 'combo',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -309,6 +309,14 @@ Ext4.define('SequenceAnalysis.panel.SequenceAnalysisPanel', {
uncheckedValue: false,
checked: true,
xtype: 'checkbox'
},{
fieldLabel: 'Perform Cleanup After Each Step',
helpPopup: 'Is selected, intermediate files from this job will be deleted after each step, instead of once at the end of the job. This can reduce the working directory size. Note: this will only apply if deleteIntermediateFiles is selected, and this is not supported across every possible pipeline type.',
name: 'performCleanupAfterEachStep',
inputValue: true,
uncheckedValue: false,
checked: true,
xtype: 'checkbox'
},{
fieldLabel: 'Copy Inputs To Working Directory?',
helpPopup: 'Check to copy the input files to the working directory. Depending on your environment, this may or may not help performance.',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,14 @@ Ext4.define('SequenceAnalysis.panel.VariantProcessingPanel', {
inputValue: true,
checked: true,
xtype: 'checkbox'
},{
fieldLabel: 'Perform Cleanup After Each Step',
helpPopup: 'Is selected, intermediate files from this job will be deleted after each step, instead of once at the end of the job. This can reduce the working directory size. Note: this will only apply if deleteIntermediateFiles is selected, and this is not supported across every possible pipeline type.',
name: 'performCleanupAfterEachStep',
inputValue: true,
uncheckedValue: false,
checked: true,
xtype: 'checkbox'
}, this.getSaveTemplateCfg()]
};
},
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
Ext4.define('SequenceAnalysis.window.ArchiveReadsetsWindow', {
extend: 'Ext.window.Window',

statics: {
buttonHandler: function(dataRegionName){
Ext4.create('SequenceAnalysis.window.ArchiveReadsetsWindow', {
dataRegionName: dataRegionName,
readsetIds: LABKEY.DataRegions[dataRegionName].getChecked()
}).show();
}
},

initComponent: function() {
Ext4.apply(this, {
modal: true,
title: 'Archive Readsets',
width: 600,
bodyStyle: 'padding: 5px;',
defaults: {
border: false
},
items: [{
html: 'This helper will delete the actual FASTQ files associated with the selected readsets. It will error unless each readdata row has an SRA accession listed. You selected ' + this.readsetIds.length + ' readsets.',
style: 'padding-bottom: 10px;'
}],
buttons: [{
text: 'Submit',
scope: this,
handler: this.onSubmit
},{
text: 'Cancel',
handler: function(btn){
btn.up('window').close();
}
}]
});

this.callParent(arguments);
},

onSubmit: function(btn){
if (!this.readsetIds.length) {
Ext4.Msg.alert('Error', 'No readsets selected!');
return;
}

Ext4.Msg.wait('Saving...');
LABKEY.Ajax.request({
url: LABKEY.ActionURL.buildURL('sequenceanalysis', 'archiveReadsets', null),
method: 'POST',
jsonData: {
readsetIds: this.readsetIds
},
scope: this,
success: function(){
Ext4.Msg.hide();
this.close();
Ext4.Msg.alert('Success', 'Readsets archived!', function(){
if (this.dataRegionName){
LABKEY.DataRegions[this.dataRegionName].clearSelected();
}

LABKEY.DataRegions[this.dataRegionName].refresh();
}, this);
},
failure: LABKEY.Utils.getCallbackWrapper(LDK.Utils.getErrorCallback())
});
}
});
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@
import org.labkey.api.exp.api.ExperimentService;
import org.labkey.api.files.FileContentService;
import org.labkey.api.laboratory.NavItem;
import org.labkey.api.laboratory.security.LaboratoryAdminPermission;
import org.labkey.api.module.Module;
import org.labkey.api.module.ModuleHtmlView;
import org.labkey.api.module.ModuleLoader;
Expand Down Expand Up @@ -3297,6 +3298,15 @@ public ApiResponse execute(CheckFileStatusForm form, BindException errors)

toolArr.put(intermediateFiles);

JSONObject performCleanupAfterEachStep = new JSONObject();
performCleanupAfterEachStep.put("name", "performCleanupAfterEachStep");
performCleanupAfterEachStep.put("defaultValue", true);
performCleanupAfterEachStep.put("label", "Perform Cleanup After Each Step");
performCleanupAfterEachStep.put("description", "Is selected, intermediate files from this job will be deleted after each step, instead of once at the end of the job. This can reduce the working directory size. Note: this will only apply if deleteIntermediateFiles is selected, and this is not supported across every possible pipeline type.");
performCleanupAfterEachStep.put("fieldXtype", "checkbox");

toolArr.put(performCleanupAfterEachStep);

ret.put("toolParameters", toolArr);

ret.put("description", handler.getDescription());
Expand Down Expand Up @@ -5128,4 +5138,157 @@ public void setDataFileUrl(String dataFileUrl)
_dataFileUrl = dataFileUrl;
}
}

@RequiresPermission(UpdatePermission.class)
public static class ArchiveReadsetsAction extends MutatingApiAction<ArchiveReadsetsForm>
{
@Override
public ApiResponse execute(ArchiveReadsetsForm form, BindException errors) throws Exception
{
if (form.getReadsetIds() == null || form.getReadsetIds().length == 0)
{
errors.reject(ERROR_MSG, "No readset Ids provided");
return null;
}

TableInfo readData = QueryService.get().getUserSchema(getUser(), getContainer(), SequenceAnalysisSchema.SCHEMA_NAME).getTable(SequenceAnalysisSchema.TABLE_READ_DATA);
for (int readsetId : form.getReadsetIds())
{
Readset rs = SequenceAnalysisService.get().getReadset(readsetId, getUser());
Container c = ContainerManager.getForId(rs.getContainer());
if (!getContainer().equals(c))
{
Container toTest = c.isWorkbook() ? c.getParent() : c;
if (!getContainer().equals(toTest))
{
errors.reject(ERROR_MSG, "Readset is not from this container: " + readsetId);
return null;
}
}

if (!c.hasPermission(getUser(), LaboratoryAdminPermission.class))
{
errors.reject(ERROR_MSG, "Insufficient permissions to archive readsets in the folder: " + c.getPath());
return null;
}

Set<File> toDelete = new HashSet<>();
List<Map<String, Object>> toUpdate = new ArrayList<>();
for (ReadData rd : rs.getReadData())
{
if (rd.getSra_accession() == null)
{
errors.reject(ERROR_MSG, "Cannot mark a readdata as archived that does not have an SRA accession: " + readsetId + " / " + rd.getRowid());
return null;
}

toUpdate.add(new CaseInsensitiveHashMap<>(Map.of("rowid", rd.getRowid(), "archived", true, "container", rd.getContainer())));

// File 1:
ExpData d1 = ExperimentService.get().getExpData(rd.getFileId1());
if (d1 != null)
{
File file1 = d1.getFile();
if (file1 != null && file1.exists())
{
toDelete.add(file1);
}

// find matching readdata:
SimpleFilter filter = new SimpleFilter(FieldKey.fromString("fileid1/dataFileUrl"), d1.getDataFileUrl()).addCondition(FieldKey.fromString("rowid"), rd.getRowid(), CompareType.NEQ);
TableSelector ts = new TableSelector(readData, PageFlowUtil.set("rowid", "container"), filter, null);
if (ts.exists())
{
ts.forEachResults(r -> {
toUpdate.add(new CaseInsensitiveHashMap<>(Map.of("rowid", r.getInt(FieldKey.fromString("rowid")), "archived", true, "container", r.getString(FieldKey.fromString("container")))));
});
}
}

if (rd.getFileId2() != null)
{
ExpData d2 = ExperimentService.get().getExpData(rd.getFileId2());
if (d2 != null)
{
File file2 = d2.getFile();
if (file2 != null)
{
if (file2.exists())
{
toDelete.add(file2);
}

// find matching readdata:
SimpleFilter filter = new SimpleFilter(FieldKey.fromString("fileid2/dataFileUrl"), d2.getDataFileUrl()).addCondition(FieldKey.fromString("rowid"), rd.getRowid(), CompareType.NEQ);
TableSelector ts = new TableSelector(readData, PageFlowUtil.set("rowid", "container"), filter, null);
if (ts.exists())
{
ts.forEachResults(r -> {
toUpdate.add(new CaseInsensitiveHashMap<>(Map.of("rowid", r.getInt(FieldKey.fromString("rowid")), "archived", true, "container", r.getString(FieldKey.fromString("container")))));
});
}
}
}
}
}

if (!toUpdate.isEmpty())
{
List<Map<String, Object>> keys = new ArrayList<>();
toUpdate.forEach(row -> {

keys.add(new CaseInsensitiveHashMap<>(Map.of("rowid", row.get("rowid"))));
});

try
{
readData.getUpdateService().updateRows(getUser(), getContainer(), toUpdate, keys, null, null);
}
catch (Exception e)
{
_log.error(e);
errors.reject(ERROR_MSG, "Error archiving readset: " + readsetId + ", " + e.getMessage());
return null;
}
}

if (!toDelete.isEmpty())
{
for (File f : toDelete)
{
_log.info("Deleting archived file: " + f.getPath());
f.delete();
}
}
}

return new ApiSimpleResponse("Success", true);
}
}

public static class ArchiveReadsetsForm
{
private int[] _readsetIds;
private boolean _doNotRequireSra;

public int[] getReadsetIds()
{
return _readsetIds;
}

public void setReadsetIds(int... readsetIds)
{
_readsetIds = readsetIds;
}

public boolean isDoNotRequireSra()
{
return _doNotRequireSra;
}

public void setDoNotRequireSra(boolean doNotRequireSra)
{
_doNotRequireSra = doNotRequireSra;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@
import org.labkey.sequenceanalysis.analysis.BamHaplotypeHandler;
import org.labkey.sequenceanalysis.analysis.CombineStarGeneCountsHandler;
import org.labkey.sequenceanalysis.analysis.CombineSubreadGeneCountsHandler;
import org.labkey.sequenceanalysis.analysis.DeepVariantHandler;
import org.labkey.sequenceanalysis.analysis.GLNexusHandler;
import org.labkey.sequenceanalysis.analysis.GenotypeGVCFHandler;
import org.labkey.sequenceanalysis.analysis.HaplotypeCallerHandler;
import org.labkey.sequenceanalysis.analysis.LiftoverHandler;
Expand All @@ -58,6 +60,7 @@
import org.labkey.sequenceanalysis.analysis.SbtGeneCountHandler;
import org.labkey.sequenceanalysis.analysis.UnmappedSequenceBasedGenotypeHandler;
import org.labkey.sequenceanalysis.button.AddSraRunButton;
import org.labkey.sequenceanalysis.button.ArchiveReadsetsButton;
import org.labkey.sequenceanalysis.button.ChangeReadsetStatusButton;
import org.labkey.sequenceanalysis.button.ChangeReadsetStatusForAnalysesButton;
import org.labkey.sequenceanalysis.button.DownloadSraButton;
Expand All @@ -77,25 +80,7 @@
import org.labkey.sequenceanalysis.run.alignment.Pbmm2Wrapper;
import org.labkey.sequenceanalysis.run.alignment.StarWrapper;
import org.labkey.sequenceanalysis.run.alignment.VulcanWrapper;
import org.labkey.sequenceanalysis.run.analysis.BamIterator;
import org.labkey.sequenceanalysis.run.analysis.BcftoolsFillTagsStep;
import org.labkey.sequenceanalysis.run.analysis.ExportOverlappingReadsAnalysis;
import org.labkey.sequenceanalysis.run.analysis.GenrichStep;
import org.labkey.sequenceanalysis.run.analysis.HaplotypeCallerAnalysis;
import org.labkey.sequenceanalysis.run.analysis.ImmunoGenotypingAnalysis;
import org.labkey.sequenceanalysis.run.analysis.LofreqAnalysis;
import org.labkey.sequenceanalysis.run.analysis.MergeLoFreqVcfHandler;
import org.labkey.sequenceanalysis.run.analysis.NextCladeHandler;
import org.labkey.sequenceanalysis.run.analysis.PARalyzerAnalysis;
import org.labkey.sequenceanalysis.run.analysis.PangolinHandler;
import org.labkey.sequenceanalysis.run.analysis.PbsvAnalysis;
import org.labkey.sequenceanalysis.run.analysis.PbsvJointCallingHandler;
import org.labkey.sequenceanalysis.run.analysis.PindelAnalysis;
import org.labkey.sequenceanalysis.run.analysis.SequenceBasedTypingAnalysis;
import org.labkey.sequenceanalysis.run.analysis.SnpCountAnalysis;
import org.labkey.sequenceanalysis.run.analysis.SubreadAnalysis;
import org.labkey.sequenceanalysis.run.analysis.UnmappedReadExportHandler;
import org.labkey.sequenceanalysis.run.analysis.ViralAnalysis;
import org.labkey.sequenceanalysis.run.analysis.*;
import org.labkey.sequenceanalysis.run.assembly.TrinityRunner;
import org.labkey.sequenceanalysis.run.bampostprocessing.AddOrReplaceReadGroupsStep;
import org.labkey.sequenceanalysis.run.bampostprocessing.BaseQualityScoreRecalibrator;
Expand Down Expand Up @@ -280,6 +265,7 @@ public static void registerPipelineSteps()
SequencePipelineService.get().registerPipelineStep(new ImmunoGenotypingAnalysis.Provider());
SequencePipelineService.get().registerPipelineStep(new ViralAnalysis.Provider());
SequencePipelineService.get().registerPipelineStep(new HaplotypeCallerAnalysis.Provider());
SequencePipelineService.get().registerPipelineStep(new DeepVariantAnalysis.Provider());
SequencePipelineService.get().registerPipelineStep(new SnpCountAnalysis.Provider());
SequencePipelineService.get().registerPipelineStep(new ExportOverlappingReadsAnalysis.Provider());
SequencePipelineService.get().registerPipelineStep(new SubreadAnalysis.Provider());
Expand Down Expand Up @@ -346,6 +332,8 @@ public static void registerPipelineSteps()
SequenceAnalysisService.get().registerFileHandler(new NextCladeHandler());
SequenceAnalysisService.get().registerFileHandler(new ConvertToCramHandler());
SequenceAnalysisService.get().registerFileHandler(new PbsvJointCallingHandler());
SequenceAnalysisService.get().registerFileHandler(new DeepVariantHandler());
SequenceAnalysisService.get().registerFileHandler(new GLNexusHandler());

SequenceAnalysisService.get().registerReadsetHandler(new MultiQCHandler());
SequenceAnalysisService.get().registerReadsetHandler(new RestoreSraDataHandler());
Expand Down Expand Up @@ -396,9 +384,10 @@ public void doStartupAfterSpringConfig(ModuleContext moduleContext)
LDKService.get().registerQueryButton(new AddSraRunButton(), SequenceAnalysisSchema.SCHEMA_NAME, SequenceAnalysisSchema.TABLE_READSETS);
LDKService.get().registerQueryButton(new RunMultiQCButton(), SequenceAnalysisSchema.SCHEMA_NAME, SequenceAnalysisSchema.TABLE_READSETS);
LDKService.get().registerQueryButton(new DownloadSraButton(), SequenceAnalysisSchema.SCHEMA_NAME, SequenceAnalysisSchema.TABLE_READSETS);
LDKService.get().registerQueryButton(new ArchiveReadsetsButton(), SequenceAnalysisSchema.SCHEMA_NAME, SequenceAnalysisSchema.TABLE_READSETS);

LDKService.get().registerQueryButton(new ChangeReadsetStatusForAnalysesButton(), "sequenceanalysis", "sequence_analyses");
LDKService.get().registerQueryButton(new ChangeReadsetStatusButton(), "sequenceanalysis", "sequence_readsets");
LDKService.get().registerQueryButton(new ChangeReadsetStatusForAnalysesButton(), SequenceAnalysisSchema.SCHEMA_NAME, SequenceAnalysisSchema.TABLE_ANALYSES);
LDKService.get().registerQueryButton(new ChangeReadsetStatusButton(), SequenceAnalysisSchema.SCHEMA_NAME, SequenceAnalysisSchema.TABLE_READSETS);

ExperimentService.get().registerExperimentRunTypeSource(new ExperimentRunTypeSource()
{
Expand Down
Loading