Skip to content

Commit eb37a58

Browse files
authored
Merge pull request #196 from BimberLab/23.3_fb_merge
Merge discvr-22.11 into 23.3
2 parents bbae51f + bc1b2cf commit eb37a58

File tree

29 files changed

+368
-67
lines changed

29 files changed

+368
-67
lines changed

OpenLdapSync/src/org/labkey/openldapsync/ldap/LdapScheduler.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
package org.labkey.openldapsync.ldap;
22

33
import org.apache.directory.api.ldap.model.exception.LdapException;
4-
import org.apache.logging.log4j.Logger;
54
import org.apache.logging.log4j.LogManager;
6-
import org.quartz.DailyTimeIntervalScheduleBuilder;
5+
import org.apache.logging.log4j.Logger;
76
import org.quartz.JobBuilder;
87
import org.quartz.JobDetail;
8+
import org.quartz.SimpleScheduleBuilder;
99
import org.quartz.Trigger;
1010
import org.quartz.TriggerBuilder;
1111
import org.quartz.impl.StdSchedulerFactory;
@@ -62,7 +62,7 @@ public synchronized void schedule()
6262

6363
Trigger trigger = TriggerBuilder.newTrigger()
6464
.withIdentity(LdapScheduler.class.getCanonicalName(), LdapScheduler.class.getCanonicalName())
65-
.withSchedule(DailyTimeIntervalScheduleBuilder.dailyTimeIntervalSchedule().withIntervalInHours(_frequency))
65+
.withSchedule(SimpleScheduleBuilder.simpleSchedule().withIntervalInHours(_frequency).repeatForever())
6666
.forJob(_job)
6767
.build();
6868

SequenceAnalysis/api-src/org/labkey/api/sequenceanalysis/pipeline/AlignerIndexUtil.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ public static File getIndexDir(ReferenceGenome genome, String name, boolean useW
6060
}
6161

6262
/**
63-
* If WorkDirectory is null, files will not be copied. Otherwise files be be copied to this destination.
63+
* If WorkDirectory is null, files will not be copied. Otherwise, files will be copied to this destination.
6464
*/
6565
private static boolean verifyOrCreateCachedIndex(PipelineContext ctx, @Nullable WorkDirectory wd, @Nullable AlignmentOutputImpl output, String localName, String webserverName, ReferenceGenome genome, boolean forceCopyLocal) throws PipelineJobException
6666
{
@@ -78,7 +78,7 @@ private static boolean verifyOrCreateCachedIndex(PipelineContext ctx, @Nullable
7878
File lockFile = new File(webserverIndexDir.getPath() + ".copyLock");
7979
if (lockFile.exists())
8080
{
81-
ctx.getLogger().error("Another job is actively saving this cached index. This job will skip that step; however, if this job tries to start alignment and use the index before copy is complete this might cause issues.");
81+
throw new PipelineJobException("Another job is actively saving this cached index. This error is being thrown as a precaution to avoid duplicate rsync jobs, and to prevent this job from progressing file that copy is in-progress. This job can be restarted after the copy is complete, and should resume normally.");
8282
}
8383

8484
hasCachedIndex = true;

SequenceAnalysis/src/org/labkey/sequenceanalysis/pipeline/CacheGenomePipelineJob.java

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
package org.labkey.sequenceanalysis.pipeline;
22

3+
import org.apache.commons.io.FileUtils;
34
import org.apache.commons.lang3.StringUtils;
45
import org.jetbrains.annotations.NotNull;
56
import org.labkey.api.data.Container;
@@ -28,6 +29,7 @@
2829
import org.labkey.api.view.ViewContext;
2930

3031
import java.io.File;
32+
import java.io.IOException;
3133
import java.util.Arrays;
3234
import java.util.Collections;
3335
import java.util.List;
@@ -190,7 +192,17 @@ public boolean isJobComplete(PipelineJob job)
190192
if (toDelete != null && toDelete.length > 0)
191193
{
192194
getJob().getLogger().info("Folders will be deleted: " + StringUtils.join(toDelete, ", "));
193-
//TODO: verify
195+
for (File x : toDelete)
196+
{
197+
try
198+
{
199+
FileUtils.deleteDirectory(x);
200+
}
201+
catch (IOException e)
202+
{
203+
throw new PipelineJobException(e);
204+
}
205+
}
194206
}
195207
}
196208

SequenceAnalysis/src/org/labkey/sequenceanalysis/run/variant/VariantAnnotatorStep.java

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,9 @@ public Provider()
4343
}}, null),
4444
ToolParameterDescriptor.create("maf", "Minor Allele Frequency", "If selected, MAF will be annotated.", "checkbox", new JSONObject(){{
4545
put("checked", true);
46+
}}, null),
47+
ToolParameterDescriptor.create("chromosomeCounts", "Chromosome Counts", "If selected, GATK ChromosomeCounts annotations, including AF and AC, will be annotated.", "checkbox", new JSONObject(){{
48+
put("checked", false);
4649
}}, null)
4750
), null, "");
4851
}
@@ -88,6 +91,12 @@ public Output processVariants(File inputVCF, File outputDirectory, ReferenceGeno
8891
options.add("MinorAlleleFrequency");
8992
}
9093

94+
if (getProvider().getParameterByName("chromosomeCounts").extractValue(getPipelineCtx().getJob(), getProvider(), getStepIdx(), Boolean.class, false))
95+
{
96+
options.add("-A");
97+
options.add("ChromosomeCounts");
98+
}
99+
91100
if (intervals != null)
92101
{
93102
intervals.forEach(interval -> {

cluster/src/org/labkey/cluster/pipeline/AbstractClusterExecutionEngine.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -101,8 +101,8 @@ public void submitJob(PipelineJob job) throws PipelineJobException
101101
}
102102
else
103103
{
104-
job.getLogger().error("duplicate submission attempt, skipping. original cluster id: " + existingSubmission.getClusterId());
105-
_log.error("duplicate submission attempt, skipping. original cluster id: " + existingSubmission.getClusterId() + ", job id: " + job.getJobGUID());
104+
job.getLogger().error("duplicate submission attempt, skipping. original cluster id: " + existingSubmission.getClusterId() + ", with status: " + existingSubmission.getStatus());
105+
_log.error("duplicate submission attempt, skipping. original cluster id: " + existingSubmission.getClusterId() + ", job id: " + job.getJobGUID() + ", with status: " + existingSubmission.getStatus());
106106
return;
107107
}
108108
}

jbrowse/resources/web/jbrowse/window/ModifyJsonConfigWindow.js

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -176,6 +176,52 @@ Ext4.define('JBrowse.window.ModifyJsonConfigWindow', {
176176
});
177177

178178
this.callParent(arguments);
179+
180+
if (this.jsonFiles.length === 1) {
181+
Ext4.Msg.wait('Loading...');
182+
LABKEY.Query.selectRows({
183+
schemaName: 'jbrowse',
184+
queryName: 'jsonfiles',
185+
filterArray: [
186+
LABKEY.Filter.create('objectid', this.jsonFiles.join(';'), LABKEY.Filter.Types.EQUALS_ONE_OF)
187+
],
188+
columns: 'objectid,trackjson',
189+
scope: this,
190+
success: this.onSelectLoad,
191+
failure: LDK.Utils.getErrorCallback()
192+
});
193+
}
194+
},
195+
196+
onSelectLoad: function(results){
197+
Ext4.Msg.hide();
198+
if (results.rows && results.rows.length) {
199+
if (!results.rows[0].trackJson) {
200+
return;
201+
}
202+
203+
var json = JSON.parse(results.rows[0].trackJson);
204+
var store = this.down('ldk-gridpanel').store;
205+
for (var attribute in json) {
206+
var val = json[attribute];
207+
var dataType = 'STRING';
208+
if (typeof val === 'number' && !isNaN(val)){
209+
dataType = Number.isInteger(val) ? 'INT' : 'FLOAT';
210+
}
211+
else if (typeof val === 'string') {
212+
dataType = 'STRING';
213+
}
214+
else if (typeof val === 'boolean') {
215+
dataType = 'BOOLEAN';
216+
}
217+
218+
store.add(store.createModel({
219+
attribute: attribute,
220+
value: val,
221+
dataType: dataType
222+
}));
223+
}
224+
}
179225
},
180226

181227
addAttribute: function(attribute, value, dataType){

jbrowse/src/org/labkey/jbrowse/model/JsonFile.java

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -680,6 +680,12 @@ private JSONObject getGxfTrack(Logger log, ExpData targetFile, ReferenceGenome r
680680

681681
public String getTrackType()
682682
{
683+
JSONObject extraConfig = getExtraTrackConfig();
684+
if (extraConfig != null && extraConfig.has("type"))
685+
{
686+
return extraConfig.getString("type");
687+
}
688+
683689
ExpData targetFile = getExpData();
684690
if (TRACK_TYPES.vcf.getFileType().isType(targetFile.getFile()))
685691
{
@@ -831,7 +837,7 @@ public File prepareResource(Logger log, boolean throwIfNotPrepared, boolean forc
831837
{
832838
if (!targetFile.getParentFile().equals(finalLocation.getParentFile()))
833839
{
834-
// Make local copy so we dont delete the original
840+
log.debug("Creating local copy of: " + targetFile.getPath());
835841
File local = new File(finalLocation.getParentFile(), targetFile.getName());
836842
if (local.exists())
837843
{

jbrowse/src/org/labkey/jbrowse/pipeline/JBrowseSessionTask.java

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -256,6 +256,8 @@ private void createOrAddToSession() throws PipelineJobException
256256
trackRecord.put("modifiedby", getJob().getUser().getUserId());
257257
trackRecord = Table.insert(getJob().getUser(), databaseMembers, trackRecord);
258258
databaseMemberRecordsCreated.add((Integer)trackRecord.get("rowid"));
259+
260+
json.prepareResource(getJob().getLogger(), false, false);
259261
}
260262
}
261263
else
@@ -305,6 +307,8 @@ private void createOrAddToSession() throws PipelineJobException
305307
trackRecord.put("modifiedby", getJob().getUser().getUserId());
306308
trackRecord = Table.insert(getJob().getUser(), databaseMembers, trackRecord);
307309
databaseMemberRecordsCreated.add((Integer) trackRecord.get("rowid"));
310+
311+
json.prepareResource(getJob().getLogger(), false, false);
308312
}
309313
}
310314

@@ -336,6 +340,7 @@ private void createOrAddToSession() throws PipelineJobException
336340
}
337341
}
338342
}
343+
339344
}
340345

341346
private JBrowseSessionPipelineJob getPipelineJob()

singlecell/api-src/org/labkey/api/singlecell/pipeline/AbstractSingleCellPipelineStep.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -479,7 +479,7 @@ protected Chunk createParamChunk(SequenceOutputHandler.JobContext ctx, List<Seur
479479

480480
body.addAll(loadChunkFromFile("singlecell", "chunks/Functions.R"));
481481

482-
return new Chunk("parameters", null, null, body);
482+
return new Chunk("parameters", null, null, body, "cache=FALSE");
483483
}
484484

485485
protected String printInputFile(SeuratObjectWrapper so)
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
totalPassed <- 0
2+
for (datasetId in names(seuratObjects)) {
3+
printName(datasetId)
4+
seuratObj <- readRDS(seuratObjects[[datasetId]])
5+
6+
cellsToKeep <- colnames(seuratObj)
7+
if (!all(is.null(cellbarcodesToDrop))) {
8+
cellsToKeep <- cellsToKeep[!cellsToKeep %in% cellbarcodesToDrop]
9+
}
10+
11+
if (length(cellsToKeep) == 0) {
12+
print('There were no matching cells')
13+
} else {
14+
print(paste0('Total passing cells: ', length(cellsToKeep)))
15+
seuratObj <- subset(seuratObj, cells = cellsToKeep)
16+
saveData(seuratObj, datasetId)
17+
totalPassed <- totalPassed + 1
18+
}
19+
20+
# Cleanup
21+
rm(seuratObj)
22+
gc()
23+
}
24+
25+
if (totalPassed == 0) {
26+
addErrorMessage('No cells remained in any seurat objects after subsetting')
27+
}

0 commit comments

Comments
 (0)