Skip to content

Commit ea80eb0

Browse files
committed
Add column for RQG_alt_ID
1 parent f217864 commit ea80eb0

File tree

1 file changed

+0
-24
lines changed

1 file changed

+0
-24
lines changed

SequenceAnalysis/src/org/labkey/sequenceanalysis/run/RestoreSraDataHandler.java

Lines changed: 0 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,6 @@ public void init(PipelineJob job, SequenceAnalysisJobSupport support, List<Reads
126126
int totalArchivedPairs = 0;
127127

128128
Map<String, List<ReadData>> readdataToSra = new HashMap<>();
129-
HashSet<Integer> filesIdsDeleted = new HashSet<>();
130129
for (ReadData rd : rs.getReadData())
131130
{
132131
String accession = rd.getSra_accession();
@@ -237,12 +236,6 @@ public void init(PipelineJob job, SequenceAnalysisJobSupport support, List<Reads
237236
writer.println("Condensing/merging readdata: " + r.getRowid() + ", " + r.getFileId1() + ", " + d1.getFile().getPath() + ", " + (r.getFileId2() == null ? "N/A" : r.getFileId2()) + ", " + (r.getFileId2() == null ? "N/A" : d2.getFile().getPath()));
238237

239238
List<Map<String, Object>> toDelete = Arrays.asList(Map.of("rowid", r.getRowid()));
240-
filesIdsDeleted.add(r.getFileId1());
241-
if (r.getFileId2() != null)
242-
{
243-
filesIdsDeleted.add(r.getFileId2());
244-
}
245-
246239
QueryService.get().getUserSchema(job.getUser(), ContainerManager.getForId(r.getContainer()), SequenceAnalysisSchema.SCHEMA_NAME).getTable(SequenceAnalysisSchema.TABLE_READ_DATA).getUpdateService().deleteRows(job.getUser(), ContainerManager.getForId(r.getContainer()), toDelete, null, null);
247240
}
248241

@@ -263,20 +256,13 @@ public void init(PipelineJob job, SequenceAnalysisJobSupport support, List<Reads
263256
support.cacheReadset(rs.getReadsetId(), job.getUser(), true);
264257
support.cacheObject(UPDATED_ACCESSIONS, StringUtils.join(updatedAccessions, ";"));
265258
support.cacheObject(ACCESSION_TO_READS, accessionToReads);
266-
support.cacheObject(FILE_IDS_DELETED, filesIdsDeleted);
267259
}
268260

269261
private Map<String, Integer> getCachedReadCounts(SequenceAnalysisJobSupport support) throws PipelineJobException
270262
{
271263
return support.getCachedObject(ACCESSION_TO_READS, PipelineJob.createObjectMapper().getTypeFactory().constructParametricType(Map.class, String.class, Integer.class));
272264
}
273265

274-
private Set<Integer> getFilesIdsDeleted(SequenceAnalysisJobSupport support) throws PipelineJobException
275-
{
276-
return support.getCachedObject(FILE_IDS_DELETED, PipelineJob.createObjectMapper().getTypeFactory().constructParametricType(Set.class, Integer.class));
277-
}
278-
279-
private static final String FILE_IDS_DELETED = "filesIdsDeleted";
280266
private static final String UPDATED_ACCESSIONS = "updatedAccessions";
281267
private static final String ACCESSION_TO_READS = "accessionToReads";
282268

@@ -349,16 +335,6 @@ public void complete(PipelineJob job, List<Readset> readsets, List<SequenceOutpu
349335
{
350336
throw new PipelineJobException(e);
351337
}
352-
353-
// Delete pre-existing metrics:
354-
for (int dataId : getFilesIdsDeleted(((SequenceReadsetHandlerJob)job).getSequenceSupport()))
355-
{
356-
SimpleFilter filter = new SimpleFilter(FieldKey.fromString("readset"), rs.getRowId());
357-
filter.addCondition(FieldKey.fromString("container"), rs.getContainer());
358-
filter.addCondition(FieldKey.fromString("dataId"), dataId);
359-
int deleted = Table.delete(SequenceAnalysisManager.get().getTable(SequenceAnalysisSchema.TABLE_QUALITY_METRICS), filter);
360-
job.getLogger().debug("existing metrics deleted: " + deleted);
361-
}
362338
}
363339

364340
@Override

0 commit comments

Comments
 (0)