@@ -475,7 +475,16 @@ public boolean alwaysCopyIndexToWorkingDir()
475475 @ Override
476476 public void complete (SequenceAnalysisJobSupport support , AnalysisModel model , Collection <SequenceOutputFile > outputFilesCreated ) throws PipelineJobException
477477 {
478- File metrics = new File (model .getAlignmentFileObject ().getParentFile (), "metrics_summary.csv" );
478+ SequenceOutputFile outputForData = outputFilesCreated .stream ().filter (x -> LOUPE_CATEGORY .equals (x .getCategory ())).findFirst ().orElse (null );
479+ if (outputForData == null )
480+ {
481+ outputForData = outputFilesCreated .stream ().filter (x -> "10x Run Summary" .equals (x .getCategory ())).findFirst ().orElseThrow ();
482+ }
483+
484+ File outsDir = outputForData .getFile ().getParentFile ();
485+ Integer dataId = outputForData .getDataId ();
486+
487+ File metrics = new File (outsDir , "metrics_summary.csv" );
479488 if (metrics .exists ())
480489 {
481490 getPipelineCtx ().getLogger ().debug ("adding 10x metrics" );
@@ -501,17 +510,12 @@ public void complete(SequenceAnalysisJobSupport support, AnalysisModel model, Co
501510 i ++;
502511 }
503512
504- if (model .getAlignmentFile () == null )
505- {
506- throw new PipelineJobException ("model.getAlignmentFile() was null" );
507- }
508-
509513 TableInfo ti = DbSchema .get ("sequenceanalysis" , DbSchemaType .Module ).getTable ("quality_metrics" );
510514
511515 //NOTE: if this job errored and restarted, we may have duplicate records:
512516 SimpleFilter filter = new SimpleFilter (FieldKey .fromString ("readset" ), model .getReadset ());
513517 filter .addCondition (FieldKey .fromString ("analysis_id" ), model .getRowId (), CompareType .EQUAL );
514- filter .addCondition (FieldKey .fromString ("dataid" ), model . getAlignmentFile () , CompareType .EQUAL );
518+ filter .addCondition (FieldKey .fromString ("dataid" ), dataId , CompareType .EQUAL );
515519 filter .addCondition (FieldKey .fromString ("category" ), "Cell Ranger" , CompareType .EQUAL );
516520 filter .addCondition (FieldKey .fromString ("container" ), getPipelineCtx ().getJob ().getContainer ().getId (), CompareType .EQUAL );
517521 TableSelector ts = new TableSelector (ti , PageFlowUtil .set ("rowid" ), filter , null );
@@ -531,7 +535,7 @@ public void complete(SequenceAnalysisJobSupport support, AnalysisModel model, Co
531535 toInsert .put ("created" , new Date ());
532536 toInsert .put ("readset" , model .getReadset ());
533537 toInsert .put ("analysis_id" , model .getRowId ());
534- toInsert .put ("dataid" , model . getAlignmentFile () );
538+ toInsert .put ("dataid" , dataId );
535539
536540 toInsert .put ("category" , "Cell Ranger" );
537541 toInsert .put ("metricname" , header [j ]);
@@ -593,97 +597,4 @@ public void complete(SequenceAnalysisJobSupport support, AnalysisModel model, Co
593597 }
594598 }
595599 }
596-
597- private void addMetrics (File outDir , AnalysisModel model ) throws PipelineJobException
598- {
599- getPipelineCtx ().getLogger ().debug ("adding 10x metrics" );
600-
601- File metrics = new File (outDir , "metrics_summary.csv" );
602- if (!metrics .exists ())
603- {
604- throw new PipelineJobException ("Unable to find file: " + metrics .getPath ());
605- }
606-
607- if (model .getAlignmentFile () == null )
608- {
609- throw new PipelineJobException ("model.getAlignmentFile() was null" );
610- }
611-
612- try (CSVReader reader = new CSVReader (Readers .getReader (metrics )))
613- {
614- String [] line ;
615- List <String []> metricValues = new ArrayList <>();
616-
617- int i = 0 ;
618- while ((line = reader .readNext ()) != null )
619- {
620- i ++;
621- if (i == 1 )
622- {
623- continue ;
624- }
625-
626- metricValues .add (line );
627- }
628-
629- int totalAdded = 0 ;
630- TableInfo ti = DbSchema .get ("sequenceanalysis" , DbSchemaType .Module ).getTable ("quality_metrics" );
631-
632- //NOTE: if this job errored and restarted, we may have duplicate records:
633- SimpleFilter filter = new SimpleFilter (FieldKey .fromString ("readset" ), model .getReadset ());
634- filter .addCondition (FieldKey .fromString ("analysis_id" ), model .getRowId (), CompareType .EQUAL );
635- filter .addCondition (FieldKey .fromString ("dataid" ), model .getAlignmentFile (), CompareType .EQUAL );
636- filter .addCondition (FieldKey .fromString ("category" ), "Cell Ranger VDJ" , CompareType .EQUAL );
637- filter .addCondition (FieldKey .fromString ("container" ), getPipelineCtx ().getJob ().getContainer ().getId (), CompareType .EQUAL );
638- TableSelector ts = new TableSelector (ti , PageFlowUtil .set ("rowid" ), filter , null );
639- if (ts .exists ())
640- {
641- getPipelineCtx ().getLogger ().info ("Deleting existing QC metrics (probably from prior restarted job)" );
642- ts .getArrayList (Integer .class ).forEach (rowid -> {
643- Table .delete (ti , rowid );
644- });
645- }
646-
647- for (String [] row : metricValues )
648- {
649- //TODO
650- if ("Fastq ID" .equals (row [2 ]) || "Physical library ID" .equals (row [2 ]))
651- {
652- continue ;
653- }
654-
655- Map <String , Object > toInsert = new CaseInsensitiveHashMap <>();
656- toInsert .put ("container" , getPipelineCtx ().getJob ().getContainer ().getId ());
657- toInsert .put ("createdby" , getPipelineCtx ().getJob ().getUser ().getUserId ());
658- toInsert .put ("created" , new Date ());
659- toInsert .put ("readset" , model .getReadset ());
660- toInsert .put ("analysis_id" , model .getRowId ());
661- toInsert .put ("dataid" , model .getAlignmentFile ());
662-
663- toInsert .put ("category" , "Cell Ranger" );
664- toInsert .put ("metricname" , row [4 ]);
665-
666- row [5 ] = row [5 ].replaceAll ("," , "" ); //remove commas
667- Object val = row [5 ];
668- if (row [5 ].contains ("%" ))
669- {
670- row [5 ] = row [5 ].replaceAll ("%" , "" );
671- Double d = ConvertHelper .convert (row [5 ], Double .class );
672- d = d / 100.0 ;
673- val = d ;
674- }
675-
676- toInsert .put ("metricvalue" , val );
677-
678- Table .insert (getPipelineCtx ().getJob ().getUser (), ti , toInsert );
679- totalAdded ++;
680- }
681-
682- getPipelineCtx ().getLogger ().info ("total metrics added: " + totalAdded );
683- }
684- catch (IOException e )
685- {
686- throw new PipelineJobException (e );
687- }
688- }
689600}
0 commit comments