Skip to content

Commit 8bdf720

Browse files
committed
Bugfix to dual hashing/cite-seq readset handling
1 parent 5a20ef0 commit 8bdf720

File tree

1 file changed

+9
-6
lines changed

1 file changed

+9
-6
lines changed

singlecell/src/org/labkey/singlecell/CellHashingServiceImpl.java

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -285,7 +285,7 @@ public void prepareHashingAndCiteSeqFilesIfNeeded(File sourceDir, PipelineJob jo
285285
}
286286

287287
// if distinct HTOs is 1, no point in running hashing. note: presence of hashing readsets is a trigger downstream
288-
HashMap<Integer, File> readsetToCountMap = new HashMap<>();
288+
HashMap<String, File> readsetToCountMap = new HashMap<>();
289289
if (distinctHTOs.size() > 1)
290290
{
291291
Set<Integer> hashingToRemove = new HashSet<>();
@@ -317,7 +317,7 @@ public void prepareHashingAndCiteSeqFilesIfNeeded(File sourceDir, PipelineJob jo
317317
}
318318

319319
SequenceOutputFile so = ts.getArrayList(SequenceOutputFile.class).get(0);
320-
readsetToCountMap.put(hashingReadsetId, so.getFile().getParentFile()); //this is the umi_counts dir
320+
readsetToCountMap.put(BARCODE_TYPE.hashing.name() + "-" + hashingReadsetId, so.getFile().getParentFile()); //this is the umi_counts dir
321321
}
322322
}
323323

@@ -379,7 +379,7 @@ else if (distinctHTOs.size() == 1)
379379
job.getLogger().info("Multiple CITE-seq count matrices found, using most recent: " + sos.get(0).getRowid());
380380
}
381381
SequenceOutputFile so = sos.get(0);
382-
readsetToCountMap.put(citeseqReadsetId, so.getFile().getParentFile()); //this is the umi_count dir
382+
readsetToCountMap.put(BARCODE_TYPE.citeseq.name() + "-" + citeseqReadsetId, so.getFile().getParentFile()); //this is the umi_count dir
383383
}
384384
}
385385

@@ -724,9 +724,12 @@ public Map<Integer, Integer> getCachedHashingReadsetMap(SequenceAnalysisJobSuppo
724724
return support.getCachedObject(READSET_TO_HASHING_MAP, PipelineJob.createObjectMapper().getTypeFactory().constructParametricType(Map.class, Integer.class, Integer.class));
725725
}
726726

727-
public Map<Integer, File> getCachedReadsetToCountMatrixMap(SequenceAnalysisJobSupport support) throws PipelineJobException
727+
public File getCachedReadsetToCountMatrix(SequenceAnalysisJobSupport support, int readsetId, CellHashingService.BARCODE_TYPE type) throws PipelineJobException
728728
{
729-
return support.getCachedObject(READSET_TO_COUNTS_MAP, PipelineJob.createObjectMapper().getTypeFactory().constructParametricType(Map.class, Integer.class, File.class));
729+
Map<String, File> map = support.getCachedObject(READSET_TO_COUNTS_MAP, PipelineJob.createObjectMapper().getTypeFactory().constructParametricType(Map.class, String.class, File.class));
730+
String key = type.name() + "-" + readsetId;
731+
732+
return(map.get(key));
730733
}
731734

732735
@Override
@@ -1331,7 +1334,7 @@ public File getExistingFeatureBarcodeCountDir(Readset parentReadset, BARCODE_TYP
13311334
throw new PipelineJobException("Unable to find cached readset of type " + type.name() + " for parent: " + parentReadset.getReadsetId());
13321335
}
13331336

1334-
File ret = getCachedReadsetToCountMatrixMap(support).get(childId);
1337+
File ret = getCachedReadsetToCountMatrix(support, childId, type);
13351338
if (ret == null)
13361339
{
13371340
throw new PipelineJobException("Unable to find cached count matrix of type " + type.name() + " for parent: " + parentReadset.getReadsetId());

0 commit comments

Comments
 (0)