@@ -331,9 +331,6 @@ private Map<NimbleGenome, File> doAlignment(List<NimbleGenome> genomes, List<Fil
331331 alignArgs .add (String .valueOf (maxThreads ));
332332 }
333333
334- alignArgs .add ("--log" );
335- alignArgs .add ("/work/" + getNimbleLogFile (getPipelineCtx ().getWorkingDirectory (), genomes .size () == 1 ? genomes .get (0 ).genomeId : null ).getName ());
336-
337334 boolean alignOutput = getProvider ().getParameterByName (ALIGN_OUTPUT ).extractValue (getPipelineCtx ().getJob (), getProvider (), getStepIdx (), Boolean .class , false );
338335 File alignmentOutputFile = new File (getPipelineCtx ().getWorkingDirectory (), "nimbleAlignment." + (genomes .size () == 1 ? genomes .get (0 ).genomeId + "." : "" ) + "bam" );
339336 if (alignOutput )
@@ -394,26 +391,6 @@ else if (!alignResultsGz.exists())
394391 throw new PipelineJobException ("Expected to find gz file: " + alignResultsGz .getPath ());
395392 }
396393
397- File log = getNimbleLogFile (alignResultsGz .getParentFile (), genome .genomeId );
398- if (!log .exists ())
399- {
400- throw new PipelineJobException ("Expected to find file: " + log .getPath ());
401- }
402-
403- getPipelineCtx ().getLogger ().info ("Nimble alignment stats for genome :" + genome .getGenomeId ());
404- try (BufferedReader reader = Readers .getReader (log ))
405- {
406- String line ;
407- while ((line = reader .readLine ()) != null )
408- {
409- getPipelineCtx ().getLogger ().info (line );
410- }
411- }
412- catch (IOException e )
413- {
414- throw new PipelineJobException (e );
415- }
416-
417394 // Now run nimble report. Always re-run since this is fast:
418395 List <String > reportArgs = new ArrayList <>();
419396 reportArgs .add ("python3" );
@@ -446,11 +423,6 @@ else if (!alignResultsGz.exists())
446423 return resultMap ;
447424 }
448425
449- public static File getNimbleLogFile (File baseDir , @ Nullable Integer genomeId )
450- {
451- return new File (baseDir , "nimbleStats." + (genomeId == null ? "" : genomeId + "." ) + "txt" );
452- }
453-
454426 private File getNimbleDoneFile (File parentDir , String resumeString )
455427 {
456428 return new File (parentDir , "nimble." + resumeString + ".done" );
@@ -636,74 +608,4 @@ public double getScorePercent()
636608 return scorePercent ;
637609 }
638610 }
639-
640- public static void importQualityMetrics (SequenceOutputFile so , PipelineJob job ) throws PipelineJobException
641- {
642- try
643- {
644- if (so .getDataId () == null )
645- {
646- throw new PipelineJobException ("DataId is null for SequenceOutputFile" );
647- }
648-
649- ExpData d = ExperimentService .get ().getExpData (so .getDataId ());
650- File cachedMetrics = getNimbleLogFile (so .getFile ().getParentFile (), so .getLibrary_id ());
651-
652- Map <String , Object > metricsMap ;
653- if (cachedMetrics .exists ())
654- {
655- job .getLogger ().debug ("reading previously calculated metrics from file: " + cachedMetrics .getPath ());
656- metricsMap = new HashMap <>();
657- try (CSVReader reader = new CSVReader (Readers .getReader (cachedMetrics ), ':' ))
658- {
659- String [] line ;
660- while ((line = reader .readNext ()) != null )
661- {
662- if (metricsMap .containsKey (StringUtils .trim (line [0 ])))
663- {
664- throw new PipelineJobException ("Unexpected duplicate metric names: " + StringUtils .trim (line [0 ]));
665- }
666-
667- String value = StringUtils .trim (line [1 ]);
668- if (value == null )
669- {
670- continue ;
671- }
672-
673- metricsMap .put (StringUtils .trim (line [0 ]), value .split (" " )[0 ]);
674- }
675- }
676-
677- job .getLogger ().debug ("Total metrics: " + metricsMap .size ());
678- }
679- else
680- {
681- throw new PipelineJobException ("Unable to find metrics file: " + cachedMetrics .getPath ());
682- }
683-
684- TableInfo metricsTable = DbSchema .get (SingleCellSchema .SEQUENCE_SCHEMA_NAME , DbSchemaType .Module ).getTable (SingleCellSchema .TABLE_QUALITY_METRICS );
685- for (String metricName : metricsMap .keySet ())
686- {
687- Map <String , Object > r = new HashMap <>();
688- r .put ("category" , "Nimble" );
689- r .put ("metricname" , metricName );
690- r .put ("metricvalue" , metricsMap .get (metricName ));
691- r .put ("dataid" , d .getRowId ());
692- r .put ("readset" , so .getReadset ());
693- r .put ("container" , so .getContainer ());
694- r .put ("createdby" , job .getUser ().getUserId ());
695-
696- Table .insert (job .getUser (), metricsTable , r );
697- }
698-
699- if (cachedMetrics .exists ())
700- {
701- cachedMetrics .delete ();
702- }
703- }
704- catch (Exception e )
705- {
706- throw new PipelineJobException (e );
707- }
708- }
709611}
0 commit comments