Skip to content

Commit 8bc4c3f

Browse files
committed
Enforce cell hashing discordance in TCR import
1 parent 669cb6e commit 8bc4c3f

File tree

6 files changed

+55
-48
lines changed

6 files changed

+55
-48
lines changed

SequenceAnalysis/resources/web/SequenceAnalysis/field/SequenceOutputFileSelectorField.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ Ext4.define('SequenceAnalysis.field.SequenceOutputFileSelectorField', {
4949
}
5050
}
5151
else {
52-
LDK.Utils.logError('unable to find library field in GenomeFileSelectorField');
52+
LDK.Utils.logError('unable to find library field in SequenceOutputFileSelectorField');
5353
}
5454
}
5555
else if (window && window.libraryId){

singlecell/api-src/org/labkey/api/singlecell/CellHashingService.java

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,9 @@
3232

3333
abstract public class CellHashingService
3434
{
35+
public static final String MAX_HASHING_PCT_FAIL = "maxHashingPctFail";
36+
public static final String MAX_HASHING_PCT_DISCORDANT = "maxHashingPctDiscordant";
37+
3538
private static CellHashingService _instance;
3639

3740
public static CellHashingService get()
@@ -74,6 +77,8 @@ static public void setInstance(CellHashingService instance)
7477

7578
abstract public File getExistingFeatureBarcodeCountDir(Readset parentReadset, BARCODE_TYPE type, SequenceAnalysisJobSupport support) throws PipelineJobException;
7679

80+
abstract public void copyHtmlLocally(SequenceOutputHandler.JobContext ctx) throws PipelineJobException;
81+
7782
public static class CellHashingParameters
7883
{
7984
public BARCODE_TYPE type;

singlecell/src/org/labkey/singlecell/CellHashingServiceImpl.java

Lines changed: 29 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@
5151
import java.io.File;
5252
import java.io.IOException;
5353
import java.io.PrintWriter;
54+
import java.nio.file.Files;
5455
import java.text.DecimalFormat;
5556
import java.util.ArrayList;
5657
import java.util.Arrays;
@@ -911,13 +912,13 @@ public List<ToolParameterDescriptor> getHashingCallingParams(boolean allowDemuxE
911912
put("joinReturnValue", true);
912913
}}, null));
913914

914-
ret.add(SeuratToolParameter.create("maxHashingPctFail", "Hashing Max Fraction Failed", "The maximum fraction of cells that can have no call (i.e. not singlet or doublet). Otherwise it will fail the job. This is a number 0-1.", "ldk-numberfield", new JSONObject(){{
915+
ret.add(SeuratToolParameter.create(MAX_HASHING_PCT_FAIL, "Hashing Max Fraction Failed", "The maximum fraction of cells that can have no call (i.e. not singlet or doublet). Otherwise it will fail the job. This is a number 0-1.", "ldk-numberfield", new JSONObject(){{
915916
put("minValue", 0);
916917
put("maxValue", 1);
917918
put("decimalPrecision", 2);
918919
}}, null));
919920

920-
ret.add(SeuratToolParameter.create("maxHashingPctDiscordant", "Hashing Max Fraction Discordant", "The maximum fraction of cells that can have discordant calls. High discordance is usually an indication of either poor quality data, or one caller performing badly.This is a number 0-1.", "ldk-numberfield", new JSONObject(){{
921+
ret.add(SeuratToolParameter.create(MAX_HASHING_PCT_DISCORDANT, "Hashing Max Fraction Discordant", "The maximum fraction of cells that can have discordant calls. High discordance is usually an indication of either poor quality data, or one caller performing badly.This is a number 0-1.", "ldk-numberfield", new JSONObject(){{
921922
put("minValue", 0);
922923
put("maxValue", 1);
923924
put("decimalPrecision", 2);
@@ -1339,6 +1340,32 @@ public File getExistingFeatureBarcodeCountDir(Readset parentReadset, BARCODE_TYP
13391340
return ret;
13401341
}
13411342

1343+
@Override
1344+
public void copyHtmlLocally(SequenceOutputHandler.JobContext ctx) throws PipelineJobException
1345+
{
1346+
try
1347+
{
1348+
for (File f : ctx.getOutputDir().listFiles())
1349+
{
1350+
if (f.getName().endsWith(".hashing.html"))
1351+
{
1352+
ctx.getLogger().info("Copying hashing HTML locally for debugging: " + f.getName());
1353+
File target = new File(ctx.getSourceDirectory(), f.getName());
1354+
if (target.exists())
1355+
{
1356+
target.delete();
1357+
}
1358+
1359+
Files.copy(f.toPath(), target.toPath());
1360+
}
1361+
}
1362+
}
1363+
catch (IOException e)
1364+
{
1365+
throw new PipelineJobException(e);
1366+
}
1367+
}
1368+
13421369
@Override
13431370
public Set<String> getHtosForParentReadset(Integer parentReadsetId, File webserverJobDir, SequenceAnalysisJobSupport support, boolean throwIfNotFound) throws PipelineJobException
13441371
{

singlecell/src/org/labkey/singlecell/SingleCellModule.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@
4545
import org.labkey.singlecell.run.NimbleAligner;
4646
import org.labkey.singlecell.run.NimbleAlignmentStep;
4747
import org.labkey.singlecell.run.VelocytoAlignmentStep;
48-
import org.labkey.singlecell.run.VelocytoPostProcessingStep;
48+
import org.labkey.singlecell.run.VelocytoAnalysisStep;
4949

5050
import java.util.Collection;
5151
import java.util.Collections;
@@ -135,7 +135,7 @@ public static void registerPipelineSteps()
135135
SequencePipelineService.get().registerPipelineStep(new CellRangerVDJWrapper.VDJProvider());
136136
SequencePipelineService.get().registerPipelineStep(new NimbleAligner.Provider());
137137
SequencePipelineService.get().registerPipelineStep(new VelocytoAlignmentStep.Provider());
138-
SequencePipelineService.get().registerPipelineStep(new VelocytoPostProcessingStep.Provider());
138+
SequencePipelineService.get().registerPipelineStep(new VelocytoAnalysisStep.Provider());
139139

140140
SequenceAnalysisService.get().registerReadsetHandler(new CellRangerFeatureBarcodeHandler());
141141

singlecell/src/org/labkey/singlecell/pipeline/singlecell/RunCellHashing.java

Lines changed: 1 addition & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@
2121
import java.io.File;
2222
import java.io.IOException;
2323
import java.io.PrintWriter;
24-
import java.nio.file.Files;
2524
import java.util.ArrayList;
2625
import java.util.Collection;
2726
import java.util.Collections;
@@ -74,7 +73,7 @@ public String getDockerContainerName()
7473
@Override
7574
protected void onFailure(SequenceOutputHandler.JobContext ctx, String outputPrefix) throws PipelineJobException
7675
{
77-
copyHtmlLocally(ctx);
76+
CellHashingService.get().copyHtmlLocally(ctx);
7877

7978
// Also delete the .done files, so hashing will repeat if we change params:
8079
for (File f : ctx.getOutputDir().listFiles())
@@ -87,31 +86,6 @@ protected void onFailure(SequenceOutputHandler.JobContext ctx, String outputPref
8786
}
8887
}
8988

90-
private void copyHtmlLocally(SequenceOutputHandler.JobContext ctx) throws PipelineJobException
91-
{
92-
try
93-
{
94-
for (File f : ctx.getOutputDir().listFiles())
95-
{
96-
if (f.getName().endsWith(".hashing.html"))
97-
{
98-
ctx.getLogger().info("Copying hashing HTML locally for debugging: " + f.getName());
99-
File target = new File(ctx.getSourceDirectory(), f.getName());
100-
if (target.exists())
101-
{
102-
target.delete();
103-
}
104-
105-
Files.copy(f.toPath(), target.toPath());
106-
}
107-
}
108-
}
109-
catch (IOException e)
110-
{
111-
throw new PipelineJobException(e);
112-
}
113-
}
114-
11589
@Override
11690
protected Map<Integer, File> prepareCountData(SingleCellOutput output, SequenceOutputHandler.JobContext ctx, List<SeuratObjectWrapper> inputObjects, String outputPrefix) throws PipelineJobException
11791
{

singlecell/src/org/labkey/singlecell/run/VelocytoPostProcessingStep.java renamed to singlecell/src/org/labkey/singlecell/run/VelocytoAnalysisStep.java

Lines changed: 17 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,11 @@
22

33
import org.json.JSONObject;
44
import org.labkey.api.pipeline.PipelineJobException;
5+
import org.labkey.api.sequenceanalysis.model.AnalysisModel;
56
import org.labkey.api.sequenceanalysis.model.Readset;
67
import org.labkey.api.sequenceanalysis.pipeline.AbstractPipelineStepProvider;
7-
import org.labkey.api.sequenceanalysis.pipeline.BamProcessingOutputImpl;
8-
import org.labkey.api.sequenceanalysis.pipeline.BamProcessingStep;
8+
import org.labkey.api.sequenceanalysis.pipeline.AnalysisOutputImpl;
9+
import org.labkey.api.sequenceanalysis.pipeline.AnalysisStep;
910
import org.labkey.api.sequenceanalysis.pipeline.PipelineContext;
1011
import org.labkey.api.sequenceanalysis.pipeline.PipelineStepProvider;
1112
import org.labkey.api.sequenceanalysis.pipeline.ReferenceGenome;
@@ -17,14 +18,14 @@
1718
import java.util.Arrays;
1819
import java.util.LinkedHashSet;
1920

20-
public class VelocytoPostProcessingStep extends AbstractCommandPipelineStep<VelocytoAlignmentStep.VelocytoWrapper> implements BamProcessingStep
21+
public class VelocytoAnalysisStep extends AbstractCommandPipelineStep<VelocytoAlignmentStep.VelocytoWrapper> implements AnalysisStep
2122
{
22-
public VelocytoPostProcessingStep(PipelineStepProvider provider, PipelineContext ctx)
23+
public VelocytoAnalysisStep(PipelineStepProvider provider, PipelineContext ctx)
2324
{
2425
super(provider, ctx, new VelocytoAlignmentStep.VelocytoWrapper(ctx.getLogger()));
2526
}
2627

27-
public static class Provider extends AbstractPipelineStepProvider<VelocytoPostProcessingStep>
28+
public static class Provider extends AbstractPipelineStepProvider<VelocytoAnalysisStep>
2829
{
2930
public Provider()
3031
{
@@ -45,16 +46,22 @@ public Provider()
4546
}
4647

4748
@Override
48-
public VelocytoPostProcessingStep create(PipelineContext ctx)
49+
public VelocytoAnalysisStep create(PipelineContext ctx)
4950
{
50-
return new VelocytoPostProcessingStep(this, ctx);
51+
return new VelocytoAnalysisStep(this, ctx);
5152
}
5253
}
5354

5455
@Override
55-
public Output processBam(Readset rs, File inputBam, ReferenceGenome referenceGenome, File outputDirectory) throws PipelineJobException
56+
public Output performAnalysisPerSampleLocal(AnalysisModel model, File inputBam, File referenceFasta, File outDir) throws PipelineJobException
5657
{
57-
BamProcessingOutputImpl output = new BamProcessingOutputImpl();
58+
return null;
59+
}
60+
61+
@Override
62+
public Output performAnalysisPerSampleRemote(Readset rs, File inputBam, ReferenceGenome referenceGenome, File outputDir) throws PipelineJobException
63+
{
64+
AnalysisOutputImpl output = new AnalysisOutputImpl();
5865
File gtf = getPipelineCtx().getSequenceSupport().getCachedData(getProvider().getParameterByName("gtf").extractValue(getPipelineCtx().getJob(), getProvider(), getStepIdx(), Integer.class));
5966
if (gtf == null)
6067
{
@@ -75,15 +82,9 @@ else if (!gtf.exists())
7582
}
7683
}
7784

78-
File loom = getWrapper().runVelocytoFor10x(inputBam, gtf, outputDirectory, mask);
85+
File loom = getWrapper().runVelocytoFor10x(inputBam, gtf, outputDir, mask);
7986
output.addSequenceOutput(loom, rs.getName() + ": velocyto", "Velocyto Counts", rs.getReadsetId(), null, referenceGenome.getGenomeId(), null);
8087

8188
return output;
8289
}
83-
84-
@Override
85-
public boolean expectToCreateNewBam()
86-
{
87-
return false;
88-
}
8990
}

0 commit comments

Comments
 (0)