Skip to content

Commit d9a46d2

Browse files
committed
merge discvr19.1 r64469-64543
SVN r64545 |2019-09-25 00:01:30 +0000
1 parent 87689ca commit d9a46d2

File tree

10 files changed

+82
-49
lines changed

10 files changed

+82
-49
lines changed

SequenceAnalysis/api-src/org/labkey/api/sequenceanalysis/pipeline/SequenceOutputHandler.java

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -77,6 +77,16 @@ default boolean requiresSingleGenome()
7777

7878
public boolean canProcess(SequenceOutputFile o);
7979

80+
/**
81+
* If false, this handler will not be returned with the list of available handlers for a given set of files.
82+
* This allows the developer to register handlers that feed into the pipeline, but can only be called through specific code/UI
83+
* @return Whether to show this handler in user-facing UI
84+
*/
85+
default boolean isVisible()
86+
{
87+
return true;
88+
}
89+
8090
/**
8191
* This should be a JS function that will be called after we have verified that the output files selected
8292
* can be processed by this handler. The handler should provide either a JS handler or a successURL. If both are provided,

SequenceAnalysis/resources/external/scRNAseq/Seurat3.rmd

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -124,6 +124,15 @@ if (length(unique(seuratObj$BarcodePrefix)) > 1) {
124124
125125
```
126126

127+
## Activation
128+
129+
```{r ActivationScore}
130+
131+
seuratObj <- ClassifySGSAndApply(seuratObj = seuratObj, geneSetName = 'HighlyActivated', geneList = OOSAP::Phenotyping_GeneList()$HighlyActivated, positivityThreshold = 0.5, saveFilePath = paste0(outPrefix, '.ha.txt'))
132+
saveRDS(seuratObj, file = saveFile)
133+
134+
```
135+
127136
## Write Summary
128137

129138
```{r Summary}

SequenceAnalysis/resources/web/SequenceAnalysis/field/IntervalField.js

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -51,15 +51,17 @@ Ext4.define('SequenceAnalysis.field.IntervalField', {
5151

5252
Ext4.Array.forEach(val, function(v, idx){
5353
var toTest = val[idx].split(':');
54-
if (toTest.length != 2){
54+
if (toTest.length > 2){
5555
msgs.push('Invalid interval: ' + v);
5656
return;
5757
}
58-
59-
var coords = toTest[1].split('-');
60-
if (coords.length != 2){
61-
msgs.push('Invalid interval: ' + v);
62-
return;
58+
//NOTE: an interval with just a contig name is valid
59+
else if (toTest.length === 2) {
60+
var coords = toTest[1].split('-');
61+
if (coords.length !== 2) {
62+
msgs.push('Invalid interval: ' + v);
63+
return;
64+
}
6365
}
6466
}, this);
6567

SequenceAnalysis/resources/web/SequenceAnalysis/panel/SequenceAnalysisPanel.js

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,15 +28,15 @@ Ext4.define('SequenceAnalysis.panel.SequenceAnalysisPanel', {
2828

2929
this.callParent(arguments);
3030

31+
this.addEvents('sectiontoggle', 'dataload');
32+
3133
LABKEY.Ajax.request({
3234
url: LABKEY.ActionURL.buildURL('sequenceanalysis', 'getAnalysisToolDetails'),
3335
method: 'POST',
3436
scope: this,
3537
success: LABKEY.Utils.getCallbackWrapper(this.onDataLoad, this),
3638
failure: LDK.Utils.getErrorCallback()
3739
});
38-
39-
this.addEvents('sectiontoggle');
4040
},
4141

4242
onDataLoad: function(results){
@@ -95,6 +95,8 @@ Ext4.define('SequenceAnalysis.panel.SequenceAnalysisPanel', {
9595

9696
var btn = this.down('#copyPrevious');
9797
btn.handler.call(this, btn);
98+
99+
this.fireEvent('dataload', this);
98100
},
99101

100102
//loads the exp.RowId for each file
@@ -213,9 +215,10 @@ Ext4.define('SequenceAnalysis.panel.SequenceAnalysisPanel', {
213215

214216
afterStoreLoad: function(){
215217
var dv = this.down('dataview');
218+
219+
//this will occur if the stores return before onDataLoad
216220
if (!dv){
217-
console.log('deferring dataview refresh');
218-
Ext4.defer(this.afterStoreLoad, 100, this);
221+
this.on('dataload', this.afterStoreLoad, this, {single: true, delay: 100});
219222
return;
220223
}
221224

SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisController.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4244,7 +4244,7 @@ public ApiResponse execute(GetAvailableHandlersForm form, BindException errors)
42444244
List<Integer> availableIds = new ArrayList<>();
42454245
for (SequenceOutputFile o : outputFiles)
42464246
{
4247-
if (handler.canProcess(o))
4247+
if (handler.isVisible() && handler.canProcess(o))
42484248
{
42494249
availableIds.add(o.getRowid());
42504250
}

SequenceAnalysis/src/org/labkey/sequenceanalysis/analysis/CellRangerSeuratHandler.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ public CellRangerSeuratHandler()
4848

4949
}}, null),
5050
ToolParameterDescriptor.create("doSplitJobs", "Run Separately", "If checked, each input dataset will be run separately. Otherwise they will be merged", "checkbox", new JSONObject(){{
51-
put("checked", false);
51+
put("checked", true);
5252
}}, false),
5353
ToolParameterDescriptor.create("useOutputFileContainer", "Submit to Source File Workbook", "If checked, each job will be submitted to the same workbook as the input file, as opposed to submitting all jobs to the same workbook. This is primarily useful if submitting a large batch of files to process separately. This only applies if 'Run Separately' is selected.", "checkbox", new JSONObject(){{
5454
put("checked", false);

SequenceAnalysis/src/org/labkey/sequenceanalysis/run/analysis/SubreadAnalysis.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -61,8 +61,8 @@ public Provider()
6161
put("checked", true);
6262
}}, true),
6363
ToolParameterDescriptor.createCommandLineParam(CommandLineParam.createSwitch("--ignoreDup"), "ignoreDup", "Ignore Duplicates", "If specified, reads flagged as duplicated will be ignored.", "checkbox", new JSONObject(){{
64-
put("checked", true);
65-
}}, true),
64+
put("checked", false);
65+
}}, false),
6666
ToolParameterDescriptor.create("strandSpecific", "Strand Specific", "If reads are stranded, specify that here.", "ldk-simplecombo", new JSONObject(){{
6767
put("storeValues", "Unstranded;Stranded;Reversely Stranded");
6868
put("value", "Unstranded");

SequenceAnalysis/src/org/labkey/sequenceanalysis/run/preprocessing/SummarizeAlignmentsStep.java

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121

2222
import java.io.File;
2323
import java.io.IOException;
24+
import java.text.DecimalFormat;
2425
import java.text.NumberFormat;
2526

2627
public class SummarizeAlignmentsStep extends AbstractPipelineStep implements AnalysisStep
@@ -54,22 +55,23 @@ public Output performAnalysisPerSampleRemote(Readset rs, File inputBam, Referenc
5455
SamReaderFactory fact = SamReaderFactory.makeDefault();
5556
fact.validationStringency(ValidationStringency.SILENT);
5657
fact.referenceSequence(referenceGenome.getWorkingFastaFile());
58+
long numRecords = 0L;
5759
try (SamReader bamReader = fact.open(inputBam); SAMRecordIterator it = bamReader.iterator(); CSVWriter writer = new CSVWriter(PrintWriters.getPrintWriter(tsv), '\t', CSVWriter.NO_QUOTE_CHARACTER))
5860
{
5961
writer.writeNext(new String[]{"Chr", "Start", "Strand", "ReadLength", "RefLength", "Ratio", "Cigar", "MAPQ"});
60-
NumberFormat pctFormat = NumberFormat.getPercentInstance();
61-
pctFormat.setMaximumFractionDigits(1);
62+
NumberFormat fmt = new DecimalFormat("0.##");
6263

63-
while(it.hasNext())
64+
while (it.hasNext())
6465
{
6566
SAMRecord rec = it.next();
6667
if (rec.isSecondaryAlignment() || rec.getReadUnmappedFlag())
6768
{
6869
continue;
6970
}
7071

72+
numRecords++;
7173
Double ratio = Double.valueOf(rec.getLengthOnReference()) / rec.getReadLength();
72-
String[] vals = new String[]{rec.getContig(), String.valueOf(rec.getReadNegativeStrandFlag() ? rec.getEnd() : rec.getStart()), (rec.getReadNegativeStrandFlag() ? "-" : "+"), String.valueOf(rec.getReadLength()), String.valueOf(rec.getLengthOnReference()), pctFormat.format(ratio), rec.getCigarString(), String.valueOf(rec.getMappingQuality())};
74+
String[] vals = new String[]{rec.getContig(), String.valueOf(rec.getReadNegativeStrandFlag() ? rec.getEnd() : rec.getStart()), (rec.getReadNegativeStrandFlag() ? "-" : "+"), String.valueOf(rec.getReadLength()), String.valueOf(rec.getLengthOnReference()), fmt.format(ratio), rec.getCigarString(), String.valueOf(rec.getMappingQuality())};
7375
writer.writeNext(vals);
7476
}
7577
}
@@ -79,7 +81,7 @@ public Output performAnalysisPerSampleRemote(Readset rs, File inputBam, Referenc
7981
}
8082

8183
output.addOutput(tsv, "Alignment Summary Table");
82-
output.addSequenceOutput(tsv, "Alignment Summary Table: " + rs.getName(), "Alignment Start Table", rs.getReadsetId(), null, referenceGenome.getGenomeId(), null);
84+
output.addSequenceOutput(tsv, "Alignment Summary Table: " + rs.getName(), "Alignment Start Table", rs.getReadsetId(), null, referenceGenome.getGenomeId(), "Records: " + numRecords);
8385

8486
return output;
8587
}

SequenceAnalysis/src/org/labkey/sequenceanalysis/run/variant/DepthOfCoverageHandler.java

Lines changed: 20 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -110,10 +110,12 @@ public void processFilesRemote(List<SequenceOutputFile> inputFiles, JobContext c
110110
String intervalString = StringUtils.trimToNull(ctx.getParams().optString("intervals"));
111111
if (intervalString != null)
112112
{
113-
for (Interval i : splitIntervals(intervalString))
113+
String[] intervals = intervalString.split(";");
114+
validateIntervals(intervals);
115+
for (String i : intervals)
114116
{
115117
extraArgs.add("-L");
116-
extraArgs.add(i.getContig() + ":" + i.getStart() + "-" + i.getEnd());
118+
extraArgs.add(i);
117119
}
118120
}
119121

@@ -140,6 +142,9 @@ public void processFilesRemote(List<SequenceOutputFile> inputFiles, JobContext c
140142
extraArgs.add(SequencePipelineService.get().getMaxThreads(ctx.getLogger()).toString());
141143
}
142144

145+
extraArgs.add("-U");
146+
extraArgs.add("ALLOW_N_CIGAR_READS");
147+
143148
List<File> inputBams = new ArrayList<>();
144149
Set<Integer> libraryIds = new HashSet<>();
145150
for (SequenceOutputFile so : inputFiles)
@@ -230,22 +235,24 @@ public void processFilesRemote(List<SequenceOutputFile> inputFiles, JobContext c
230235
}
231236
}
232237

233-
public static List<Interval> splitIntervals(String intervalString) throws PipelineJobException
238+
public static void validateIntervals(String[] intervals) throws PipelineJobException
234239
{
235-
List<Interval> intervals = new ArrayList<>();
236-
for (String i : intervalString.split(";"))
240+
for (String i : intervals)
237241
{
238-
String[] tokens = i.split(":|-");
239-
if (tokens.length != 3)
242+
//NOTE: the contig name can contain hyphen..
243+
String[] tokens = i.split(":");
244+
if (tokens.length > 2)
240245
{
241246
throw new PipelineJobException("Invalid interval: " + i);
242247
}
243-
244-
intervals.add(new Interval(tokens[0], Integer.parseInt(tokens[1]), Integer.parseInt(tokens[2])));
248+
else if (tokens.length == 2)
249+
{
250+
String[] coords = tokens[1].split("-");
251+
if (coords.length != 2)
252+
{
253+
throw new PipelineJobException("Invalid interval: " + i);
254+
}
255+
}
245256
}
246-
247-
Collections.sort(intervals);
248-
249-
return intervals;
250257
}
251258
}

cluster/src/org/labkey/cluster/pipeline/AbstractClusterExecutionEngine.java

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -177,25 +177,25 @@ public void runTestJob(Container c, User u) throws PipelineJobException
177177

178178
abstract protected Pair<String, String> getStatusForJob(ClusterJob job, Container c);
179179

180-
private File getSerializedJobFile(File statusFile)
180+
private File getSerializedJobFile(File jobLogFile)
181181
{
182-
if (statusFile == null)
182+
if (jobLogFile == null)
183183
{
184184
return null;
185185
}
186186

187-
String name = FileUtil.getBaseName(statusFile.getName());
187+
String name = FileUtil.getBaseName(jobLogFile.getName());
188188

189-
return new File(statusFile.getParentFile(), name + ".job.xml");
189+
return new File(jobLogFile.getParentFile(), name + ".job.json.txt");
190190
}
191191

192192
protected File writeJobToFile(PipelineJob job) throws IOException
193193
{
194-
//next, serialize job to XML. deleting any existing file which might be from a previous task
194+
//next, serialize job to JSON. deleting any existing file which might be from a previous task
195195
File serializedJobFile = getSerializedJobFile(job.getLogFile());
196196
if (NetworkDrive.exists(serializedJobFile))
197197
{
198-
_log.info("job XML already exists, deleting");
198+
_log.info("job JSON already exists, deleting");
199199
serializedJobFile.delete();
200200
}
201201

@@ -479,24 +479,24 @@ protected void updateJobStatus(@Nullable String status, ClusterJob j, @Nullable
479479
PipelineJob pj = null;
480480
if (sf != null && status != null)
481481
{
482-
File xml = getSerializedJobFile(new File(sf.getFilePath()));
483-
if (!xml.exists())
482+
File json = getSerializedJobFile(new File(sf.getFilePath()));
483+
if (!json.exists())
484484
{
485-
throw new PipelineJobException("unable to find pipeline XML file, expected: " + xml.getPath());
485+
throw new PipelineJobException("unable to find pipeline JSON file, expected: " + json.getPath());
486486
}
487487

488-
//NOTE: this should read from serialized XML file, not rely on the DB
489-
pj = PipelineJob.readFromFile(xml);
488+
//NOTE: this should read from serialized JSON file, not rely on the DB
489+
pj = PipelineJob.readFromFile(json);
490490
if (pj == null)
491491
{
492-
_log.error("unable to create PipelineJob from xml file: " + sf.getRowId());
492+
_log.error("unable to create PipelineJob from json file: " + sf.getRowId());
493493
return;
494494
}
495495

496496
String jobTaskId = pj.getActiveTaskId() == null ? "" : pj.getActiveTaskId().toString();
497497
if (!jobTaskId.equals(j.getActiveTaskId()))
498498
{
499-
pj.getLogger().debug("pipeline XML activeTaskId (" + jobTaskId + ") does not match submission record (" + j.getActiveTaskId() + "). this probably means it progressed tasks. will not update status");
499+
pj.getLogger().debug("pipeline json activeTaskId (" + jobTaskId + ") does not match submission record (" + j.getActiveTaskId() + "). this probably means it progressed tasks. will not update status");
500500
return;
501501
}
502502

@@ -513,7 +513,7 @@ protected void updateJobStatus(@Nullable String status, ClusterJob j, @Nullable
513513
if (taskStatus != null)
514514
{
515515
//if the remote job exits w/ a non-zero exit code, cluster might still count this as complete.
516-
//to differentiate completed w/ error from successful completion, test activeTaskStatus as recorded in the job XML
516+
//to differentiate completed w/ error from successful completion, test activeTaskStatus as recorded in the job json
517517
if (taskStatus == PipelineJob.TaskStatus.complete)
518518
{
519519
if (pj.getActiveTaskStatus() == PipelineJob.TaskStatus.error)
@@ -523,17 +523,17 @@ protected void updateJobStatus(@Nullable String status, ClusterJob j, @Nullable
523523
else if (pj.getActiveTaskStatus() == PipelineJob.TaskStatus.running)
524524
{
525525
//this might indicate the job aborted mid-task without properly marking itself as complete
526-
pj.getLogger().warn("marking job as complete, even though XML indicates task status is running. this might indicate the job aborted improperly?");
526+
pj.getLogger().warn("marking job as complete, even though JSON indicates task status is running. this might indicate the job aborted improperly?");
527527
}
528528
else if (pj.getActiveTaskStatus() != PipelineJob.TaskStatus.complete)
529529
{
530530
//this might indicate the job aborted mid-task without properly marking itself as complete
531-
pj.getLogger().warn("Cluster indicates job status is complete, but the job XML is not marked complete. this probably indicates the java process aborted improperly.");
531+
pj.getLogger().warn("Cluster indicates job status is complete, but the job JSON is not marked complete. this probably indicates the java process aborted improperly.");
532532
taskStatus = PipelineJob.TaskStatus.error;
533533
}
534534
else if (pj.getErrors() > 0)
535535
{
536-
pj.getLogger().warn("marking job as complete, even though XML indicates task has errors. this might indicate the job aborted improperly?");
536+
pj.getLogger().warn("marking job as complete, even though JSON indicates task has errors. this might indicate the job aborted improperly?");
537537
}
538538
}
539539

0 commit comments

Comments
 (0)