4545import org .labkey .sequenceanalysis .SequenceAnalysisSchema ;
4646import org .labkey .sequenceanalysis .pipeline .ReadsetCreationTask ;
4747import org .labkey .sequenceanalysis .pipeline .SequenceNormalizationTask ;
48+ import org .labkey .sequenceanalysis .pipeline .SequenceReadsetHandlerJob ;
4849import org .labkey .sequenceanalysis .util .SequenceUtil ;
4950
5051import java .io .File ;
@@ -125,6 +126,7 @@ public void init(PipelineJob job, SequenceAnalysisJobSupport support, List<Reads
125126 int totalArchivedPairs = 0 ;
126127
127128 Map <String , List <ReadData >> readdataToSra = new HashMap <>();
129+ HashSet <Integer > filesIdsDeleted = new HashSet <>();
128130 for (ReadData rd : rs .getReadData ())
129131 {
130132 String accession = rd .getSra_accession ();
@@ -223,6 +225,12 @@ public void init(PipelineJob job, SequenceAnalysisJobSupport support, List<Reads
223225 writer .println ("Condensing/merging readdata: " + r .getRowid () + ", " + r .getFile1 () + ", " + r .getFile1 ().getPath () + ", " + (r .getFileId2 () == null ? "N/A" : r .getFileId2 ()) + ", " + (r .getFileId2 () == null ? "N/A" : r .getFile2 ().getPath ()));
224226
225227 List <Map <String , Object >> toDelete = Arrays .asList (Map .of ("rowid" , r .getRowid ()));
228+ filesIdsDeleted .add (r .getFileId1 ());
229+ if (r .getFileId2 () != null )
230+ {
231+ filesIdsDeleted .add (r .getFileId2 ());
232+ }
233+
226234 QueryService .get ().getUserSchema (job .getUser (), ContainerManager .getForId (r .getContainer ()), SequenceAnalysisSchema .SCHEMA_NAME ).getTable (SequenceAnalysisSchema .TABLE_READ_DATA ).getUpdateService ().deleteRows (job .getUser (), ContainerManager .getForId (r .getContainer ()), toDelete , null , null );
227235 }
228236
@@ -243,14 +251,21 @@ public void init(PipelineJob job, SequenceAnalysisJobSupport support, List<Reads
243251 support .cacheReadset (rs .getReadsetId (), job .getUser (), true );
244252 support .cacheObject (UPDATED_ACCESSIONS , StringUtils .join (updatedAccessions , ";" ));
245253 support .cacheObject (ACCESSION_TO_READS , accessionToReads );
254+ support .cacheObject (FILE_IDS_DELETED , filesIdsDeleted );
246255 }
247256
248257 private Map <String , Integer > getCachedReadCounts (SequenceAnalysisJobSupport support ) throws PipelineJobException
249258 {
250259 return support .getCachedObject (ACCESSION_TO_READS , PipelineJob .createObjectMapper ().getTypeFactory ().constructParametricType (Map .class , Integer .class , String .class ));
251260 }
252261
253- private static final String UPDATED_ACCESSIONS = "updatedAccessons" ;
262+ private Set <Integer > getFilesIdsDeleted (SequenceAnalysisJobSupport support ) throws PipelineJobException
263+ {
264+ return support .getCachedObject (FILE_IDS_DELETED , PipelineJob .createObjectMapper ().getTypeFactory ().constructParametricType (Set .class , Integer .class ));
265+ }
266+
267+ private static final String FILE_IDS_DELETED = "filesIdsDeleted" ;
268+ private static final String UPDATED_ACCESSIONS = "updatedAccessions" ;
254269 private static final String ACCESSION_TO_READS = "accessionToReads" ;
255270
256271 @ Override
@@ -322,6 +337,16 @@ public void complete(PipelineJob job, List<Readset> readsets, List<SequenceOutpu
322337 {
323338 throw new PipelineJobException (e );
324339 }
340+
341+ // Delete pre-existing metrics:
342+ for (int dataId : getFilesIdsDeleted (((SequenceReadsetHandlerJob )job ).getSequenceSupport ()))
343+ {
344+ SimpleFilter filter = new SimpleFilter (FieldKey .fromString ("readset" ), rs .getRowId ());
345+ filter .addCondition (FieldKey .fromString ("container" ), rs .getContainer ());
346+ filter .addCondition (FieldKey .fromString ("dataId" ), dataId );
347+ int deleted = Table .delete (SequenceAnalysisManager .get ().getTable (SequenceAnalysisSchema .TABLE_QUALITY_METRICS ), filter );
348+ job .getLogger ().debug ("existing metrics deleted: " + deleted );
349+ }
325350 }
326351
327352 @ Override
0 commit comments