Skip to content

Commit 78934ba

Browse files
authored
Merge pull request #128 from LabKey/fb_merge_21.11_to_develop
Merge discvr-21.11 to develop
2 parents 218fbf2 + 765db05 commit 78934ba

File tree

71 files changed

+963
-195
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

71 files changed

+963
-195
lines changed

SequenceAnalysis/api-src/org/labkey/api/sequenceanalysis/pipeline/PipelineStepProvider.java

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -83,6 +83,11 @@ public interface PipelineStepProvider<StepType extends PipelineStep>
8383
*/
8484
public ToolParameterDescriptor getParameterByName(String name);
8585

86+
default boolean hasParameter(String name)
87+
{
88+
return getParameterByName(name) != null;
89+
}
90+
8691
/**
8792
* Creates the JSON object sent to the client that is used to build the client UI
8893
*/

SequenceAnalysis/api-src/org/labkey/api/sequenceanalysis/pipeline/SequenceOutputHandler.java

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,11 @@ public Class getProcessorClass()
6565

6666
public String getName();
6767

68+
default String getAnalysisType(PipelineJob job)
69+
{
70+
return getName();
71+
}
72+
6873
public String getDescription();
6974

7075
/**
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
CREATE INDEX IDX_asj_status_container_alignment_id_ref_nt_id ON sequenceanalysis.alignment_summary_junction (
2+
status ASC,
3+
container ASC,
4+
alignment_id ASC,
5+
ref_nt_id ASC
6+
);
7+
8+
CREATE INDEX IDX_readData_readset ON sequenceanalysis.readData (
9+
readset ASC
10+
)
11+
INCLUDE(fileid1, fileid2, runid);
12+
13+
CREATE INDEX IDX_quality_metrics_metricname_dataid_readset ON sequenceanalysis.quality_metrics (
14+
metricName ASC,
15+
dataId ASC,
16+
readset ASC
17+
)
18+
INCLUDE(metricValue);
19+
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
CREATE NONCLUSTERED INDEX IDX_asj_status_container_alignment_id_ref_nt_id ON sequenceanalysis.alignment_summary_junction (
2+
status ASC,
3+
container ASC,
4+
alignment_id ASC,
5+
ref_nt_id ASC
6+
);
7+
8+
CREATE NONCLUSTERED INDEX IDX_readData_readset ON sequenceanalysis.readData (
9+
readset ASC
10+
)
11+
INCLUDE(fileid1, fileid2, runid);
12+
13+
CREATE NONCLUSTERED INDEX IDX_quality_metrics_metricname_dataid_readset ON sequenceanalysis.quality_metrics (
14+
metricName ASC,
15+
dataId ASC,
16+
readset ASC
17+
)
18+
INCLUDE(metricValue);
19+

SequenceAnalysis/resources/web/SequenceAnalysis/Utils.js

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -137,7 +137,7 @@ SequenceAnalysis.Utils = new function(){
137137
exonPosition = nt_position - startNT + exon[0];
138138
nt_positions.push(exonPosition);
139139
exons.push(idx);
140-
if(nt_positions.length == 3)
140+
if(nt_positions.length === 3)
141141
return false; //jump to next exon
142142

143143
nt_position++;
@@ -152,6 +152,22 @@ SequenceAnalysis.Utils = new function(){
152152
nt_positions: nt_positions,
153153
exons: exons
154154
};
155+
},
156+
157+
// https://github.com/overset/javascript-natural-sort/blob/master/naturalSort.js
158+
naturalSortFn: function(o1, o2){
159+
o1 = SequenceAnalysis.Utils.getNaturalSortValue(o1);
160+
o2 = SequenceAnalysis.Utils.getNaturalSortValue(o2);
161+
162+
if (o1 === o2) {
163+
return 0;
164+
}
165+
166+
return o1 < o2 ? -1 : 1;
167+
},
168+
169+
getNaturalSortValue: function(value) {
170+
return value ? value.replace(/(\d+)/g, "0000000000$1").replace(/0*(\d{10,})/g, "$1") : value;
155171
}
156172
}
157173
}

SequenceAnalysis/resources/web/SequenceAnalysis/panel/SequenceImportPanel.js

Lines changed: 30 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -33,8 +33,8 @@ Ext4.define('SequenceAnalysis.panel.SequenceImportPanel', {
3333
sequenceImportPanel: this,
3434
extend: 'Ext.data.Model',
3535
fields: [
36-
{name: 'fileGroupId', allowBlank: false},
37-
{name: 'readset', allowBlank: false},
36+
{name: 'fileGroupId', allowBlank: false, sortType: SequenceAnalysis.Utils.getNaturalSortValue},
37+
{name: 'readset', allowBlank: false, sortType: SequenceAnalysis.Utils.getNaturalSortValue},
3838
{name: 'readsetname', useNull: true},
3939
{name: 'importType', useNull: true},
4040
{name: 'barcode5', useNull: true},
@@ -128,8 +128,8 @@ Ext4.define('SequenceAnalysis.panel.SequenceImportPanel', {
128128
model: Ext4.define('SequenceAnalysis.model.ReadsetDataModel', {
129129
extend: 'Ext.data.Model',
130130
fields: [
131-
{name: 'id'},
132-
{name: 'fileGroupId', allowBlank: false},
131+
{name: 'id', sortType: SequenceAnalysis.Utils.getNaturalSortValue},
132+
{name: 'fileGroupId', allowBlank: false, sortType: SequenceAnalysis.Utils.getNaturalSortValue},
133133
{name: 'fileRecord1'},
134134
{name: 'fileRecord2'},
135135
{name: 'platformUnit'},
@@ -332,7 +332,7 @@ Ext4.define('SequenceAnalysis.panel.SequenceImportPanel', {
332332
distinctGroups.push(r.get('fileGroupId'));
333333
}
334334
}, this);
335-
distinctGroups = Ext4.unique(distinctGroups);
335+
distinctGroups = Ext4.unique(distinctGroups).sort(SequenceAnalysis.Utils.naturalSortFn);
336336

337337
var found = [];
338338
Ext4.Array.forEach(this.fileGroupStore.getRange(), function(fg){
@@ -424,7 +424,13 @@ Ext4.define('SequenceAnalysis.panel.SequenceImportPanel', {
424424
TENX_REGEX: /^(.+?)(_[0-9]+){0,1}_S(.+)_L(.+)_(R){0,1}([0-9])(_[0-9]+){0,1}(\.f(ast){0,1}q)(\.gz)?$/i,
425425

426426
populateSamples: function(orderType, isPaired){
427-
this.fileNameStore.sort('displayName', 'ASC');
427+
this.fileNameStore.sort([{
428+
sorterFn: function(o1, o2){
429+
o1 = o1.get('displayName');
430+
o2 = o2.get('displayName');
431+
return o1 = SequenceAnalysis.Utils.naturalSortFn(o1, o2);
432+
}
433+
}]);
428434
this.readDataStore.removeAll();
429435
var errorMsgs = [];
430436

@@ -483,9 +489,7 @@ Ext4.define('SequenceAnalysis.panel.SequenceImportPanel', {
483489
}
484490
}, this);
485491

486-
var keys = Ext4.Object.getKeys(map);
487-
keys.sort();
488-
492+
var keys = Ext4.Object.getKeys(map).sort(SequenceAnalysis.Utils.naturalSortFn);
489493
Ext4.Array.forEach(keys, function(key){
490494
if (Ext4.isArray(map[key])){
491495
Ext4.Array.forEach(map[key], function(r){
@@ -563,14 +567,15 @@ Ext4.define('SequenceAnalysis.panel.SequenceImportPanel', {
563567
}
564568
}
565569

570+
this.readDataStore.sort('fileGroupId');
566571
this.down('#readDataGrid').getView().refresh();
567572

568573
//populate readsets
569574
var distinctNames = [];
570575
this.readDataStore.each(function(r){
571576
distinctNames.push(r.get('fileGroupId'));
572577
}, this);
573-
distinctNames = Ext4.unique(distinctNames);
578+
distinctNames = Ext4.unique(distinctNames).sort(SequenceAnalysis.Utils.naturalSortFn);
574579

575580
//update fileGroupIds
576581
Ext4.Array.forEach(distinctNames, function(name){
@@ -947,7 +952,7 @@ Ext4.define('SequenceAnalysis.panel.SequenceImportPanel', {
947952
model: this.fileNameStore.model
948953
});
949954

950-
this.fileNames.sort();
955+
this.fileNames.sort(SequenceAnalysis.Utils.naturalSortFn);
951956
Ext4.Msg.wait('Loading...');
952957
var multi = new LABKEY.MultiRequest();
953958
multi.add(LABKEY.Ajax.request, {
@@ -1073,8 +1078,20 @@ Ext4.define('SequenceAnalysis.panel.SequenceImportPanel', {
10731078
}
10741079
}, this);
10751080

1076-
this.fileNameStore.sort('displayName');
1077-
this.fileNameStoreCopy.sort('displayName');
1081+
this.fileNameStore.sort([{
1082+
sorterFn: function(o1, o2){
1083+
o1 = o1.get('displayName');
1084+
o2 = o2.get('displayName');
1085+
return SequenceAnalysis.Utils.naturalSortFn(o1, o2);
1086+
}
1087+
}]);
1088+
this.fileNameStoreCopy.sort([{
1089+
sorterFn: function(o1, o2){
1090+
o1 = o1.get('displayName');
1091+
o2 = o2.get('displayName');
1092+
return SequenceAnalysis.Utils.naturalSortFn(o1, o2);
1093+
}
1094+
}]);
10781095

10791096
this.down('#fileListView').refresh();
10801097
this.down('#totalFiles').update('Total files: ' + this.fileNameStore.getCount());

SequenceAnalysis/resources/web/SequenceAnalysis/panel/VariantProcessingPanel.js

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -243,6 +243,12 @@ Ext4.define('SequenceAnalysis.panel.VariantProcessingPanel', {
243243
label: 'Allow Old RMS Mapping Data',
244244
description: 'This must be checked to allow processing of gVCFs generated by GATK3.',
245245
defaultValue: false
246+
},{
247+
fieldXtype: 'ldk-integerfield',
248+
name: 'nativeMemoryBuffer',
249+
label: 'C++ Memory Buffer',
250+
description: 'By default, the pipeline java processes are allocated nearly all of the requested RAM. GenomicsDB requires memory for the C++ layer - this value (in GB) will be reserved for this. We recommend about 15-25% of the total job RAM',
251+
defaultValue: null
246252
},{
247253
fieldXtype: 'checkbox',
248254
name: 'disableFileLocking',

SequenceAnalysis/src/org/labkey/sequenceanalysis/SequenceAnalysisModule.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,7 @@ public String getName()
186186
@Override
187187
public Double getSchemaVersion()
188188
{
189-
return 12.328;
189+
return 12.329;
190190
}
191191

192192
@Override

SequenceAnalysis/src/org/labkey/sequenceanalysis/analysis/GenotypeGVCFHandler.java

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -366,6 +366,13 @@ private File runGenotypeGVCFs(PipelineJob job, JobContext ctx, ProcessVariantsHa
366366
{
367367
toolParams.add("--max-alternate-alleles");
368368
toolParams.add(ctx.getParams().get("variantCalling.GenotypeGVCFs.max_alternate_alleles").toString());
369+
370+
toolParams.add("--genomicsdb-max-alternate-alleles");
371+
372+
// See: https://gatk.broadinstitute.org/hc/en-us/articles/4418054384027-GenotypeGVCFs#--genomicsdb-max-alternate-alleles
373+
// "A typical value is 3 more than the --max-alternate-alleles value that's used by GenotypeGVCFs and larger differences result in more robustness to PCR-related indel errors"
374+
Integer maxAlt = ctx.getParams().getInt("variantCalling.GenotypeGVCFs.max_alternate_alleles") + 3;
375+
toolParams.add(maxAlt.toString());
369376
}
370377

371378
if (ctx.getParams().optBoolean("variantCalling.GenotypeGVCFs.includeNonVariantSites"))
@@ -411,7 +418,7 @@ private File runGenotypeGVCFs(PipelineJob job, JobContext ctx, ProcessVariantsHa
411418
int nativeMemoryBuffer = ctx.getParams().optInt("variantCalling.GenotypeGVCFs.nativeMemoryBuffer", 0);
412419
if (maxRam != null && nativeMemoryBuffer > 0)
413420
{
414-
ctx.getLogger().info("Adjusting RAM based on memory buffer (" + nativeMemoryBuffer + ")");
421+
ctx.getLogger().info("Adjusting RAM (" + maxRam + ") based on memory buffer (" + nativeMemoryBuffer + ")");
415422
maxRam = maxRam - nativeMemoryBuffer;
416423

417424
if (maxRam < 1)

SequenceAnalysis/src/org/labkey/sequenceanalysis/pipeline/SequenceOutputHandlerFinalTask.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ public RecordedActionSet run() throws PipelineJobException
110110
am.setModified(new Date());
111111
am.setCreatedby(getJob().getUser().getUserId());
112112
am.setModifiedby(getJob().getUser().getUserId());
113-
am.setType(getPipelineJob().getHandler().getName());
113+
am.setType(getPipelineJob().getHandler().getAnalysisType(getJob()));
114114
TableInfo analysisTable = SequenceAnalysisSchema.getTable(SequenceAnalysisSchema.TABLE_ANALYSES);
115115

116116
Set<Integer> readsetIds = new HashSet<>();

0 commit comments

Comments
 (0)