@@ -482,82 +482,80 @@ public void complete(PipelineJob job, List<Readset> readsets, List<SequenceOutpu
482482 Readset rs = readsets .get (0 );
483483
484484 File metrics = new File (so .getFile ().getParentFile ().getParentFile (), "metrics_summary.csv" );
485- if (metrics .exists ())
485+ if (! metrics .exists ())
486486 {
487- job .getLogger ().debug ("adding 10x metrics" );
488- try (CSVReader reader = new CSVReader (Readers .getReader (metrics )))
489- {
490- String [] line ;
491- String [] header = null ;
492- String [] metricValues = null ;
493-
494- int i = 0 ;
495- while ((line = reader .readNext ()) != null )
496- {
497- if (i == 0 )
498- {
499- header = line ;
500- }
501- else
502- {
503- metricValues = line ;
504- break ;
505- }
506-
507- i ++;
508- }
487+ throw new PipelineJobException ("unable to find metrics file: " + metrics .getPath ());
488+ }
509489
510- TableInfo ti = DbSchema .get ("sequenceanalysis" , DbSchemaType .Module ).getTable ("quality_metrics" );
490+ job .getLogger ().debug ("adding 10x metrics" );
491+ try (CSVReader reader = new CSVReader (Readers .getReader (metrics )))
492+ {
493+ String [] line ;
494+ String [] header = null ;
495+ String [] metricValues = null ;
511496
512- //NOTE: if this job errored and restarted, we may have duplicate records:
513- SimpleFilter filter = new SimpleFilter (FieldKey .fromString ("readset" ), so .getReadset ());
514- filter .addCondition (FieldKey .fromString ("dataid" ), so .getDataId (), CompareType .EQUAL );
515- filter .addCondition (FieldKey .fromString ("category" ), rs .getApplication (), CompareType .EQUAL );
516- filter .addCondition (FieldKey .fromString ("container" ), job .getContainer ().getId (), CompareType .EQUAL );
517- TableSelector ts = new TableSelector (ti , PageFlowUtil .set ("rowid" ), filter , null );
518- if (ts .exists ())
497+ int i = 0 ;
498+ while ((line = reader .readNext ()) != null )
499+ {
500+ if (i == 0 )
519501 {
520- job .getLogger ().info ("Deleting existing QC metrics (probably from prior restarted job)" );
521- ts .getArrayList (Integer .class ).forEach (rowid -> {
522- Table .delete (ti , rowid );
523- });
502+ header = line ;
524503 }
525-
526- for (int j = 0 ; j < header .length ; j ++)
504+ else
527505 {
528- Map <String , Object > toInsert = new CaseInsensitiveHashMap <>();
529- toInsert .put ("container" , job .getContainer ().getId ());
530- toInsert .put ("createdby" , job .getUser ().getUserId ());
531- toInsert .put ("created" , new Date ());
532- toInsert .put ("readset" , rs .getReadsetId ());
533- toInsert .put ("dataid" , so .getDataId ());
534-
535- toInsert .put ("category" , "Cell Ranger" );
536- toInsert .put ("metricname" , header [j ]);
537-
538- metricValues [j ] = metricValues [j ].replaceAll ("," , "" );
539- Object val = metricValues [j ];
540- if (metricValues [j ].contains ("%" ))
541- {
542- metricValues [j ] = metricValues [j ].replaceAll ("%" , "" );
543- Double d = ConvertHelper .convert (metricValues [j ], Double .class );
544- d = d / 100.0 ;
545- val = d ;
546- }
547-
548- toInsert .put ("metricvalue" , val );
549-
550- Table .insert (job .getUser (), ti , toInsert );
506+ metricValues = line ;
507+ break ;
551508 }
509+
510+ i ++;
552511 }
553- catch (IOException e )
512+
513+ TableInfo ti = DbSchema .get ("sequenceanalysis" , DbSchemaType .Module ).getTable ("quality_metrics" );
514+
515+ //NOTE: if this job errored and restarted, we may have duplicate records:
516+ SimpleFilter filter = new SimpleFilter (FieldKey .fromString ("readset" ), so .getReadset ());
517+ filter .addCondition (FieldKey .fromString ("dataid" ), so .getDataId (), CompareType .EQUAL );
518+ filter .addCondition (FieldKey .fromString ("category" ), rs .getApplication (), CompareType .EQUAL );
519+ filter .addCondition (FieldKey .fromString ("container" ), job .getContainer ().getId (), CompareType .EQUAL );
520+ TableSelector ts = new TableSelector (ti , PageFlowUtil .set ("rowid" ), filter , null );
521+ if (ts .exists ())
554522 {
555- throw new PipelineJobException (e );
523+ job .getLogger ().info ("Deleting existing QC metrics (probably from prior restarted job)" );
524+ ts .getArrayList (Integer .class ).forEach (rowid -> {
525+ Table .delete (ti , rowid );
526+ });
527+ }
528+
529+ for (int j = 0 ; j < header .length ; j ++)
530+ {
531+ Map <String , Object > toInsert = new CaseInsensitiveHashMap <>();
532+ toInsert .put ("container" , job .getContainer ().getId ());
533+ toInsert .put ("createdby" , job .getUser ().getUserId ());
534+ toInsert .put ("created" , new Date ());
535+ toInsert .put ("readset" , rs .getReadsetId ());
536+ toInsert .put ("dataid" , so .getDataId ());
537+
538+ toInsert .put ("category" , "Cell Ranger" );
539+ toInsert .put ("metricname" , header [j ]);
540+
541+ metricValues [j ] = metricValues [j ].replaceAll ("," , "" );
542+ Object val = metricValues [j ];
543+ if (metricValues [j ].contains ("%" ))
544+ {
545+ metricValues [j ] = metricValues [j ].replaceAll ("%" , "" );
546+ Double d = ConvertHelper .convert (metricValues [j ], Double .class );
547+ d = d / 100.0 ;
548+ val = d ;
549+ }
550+
551+ toInsert .put ("metricvalue" , val );
552+
553+ Table .insert (job .getUser (), ti , toInsert );
556554 }
557555 }
558- else
556+ catch ( IOException e )
559557 {
560- job . getLogger (). warn ( "unable to find metrics file: " + metrics . getPath () );
558+ throw new PipelineJobException ( e );
561559 }
562560 }
563561 }
0 commit comments