@@ -117,7 +117,7 @@ public void complete(PipelineJob job, List<SequenceOutputFile> inputFiles, List<
117117 throw new PipelineJobException ("Unable to find parent for output: " + so .getRowid ());
118118 }
119119
120- processAndImportNextCladeAa (job , so , parent .getAnalysis_id (), parent .getFile (), true );
120+ processAndImportNextCladeAa (job , so . getFile () , parent .getAnalysis_id (), so . getLibrary_id (), so . getDataId (), so . getReadset (), parent .getFile (), true );
121121 }
122122 }
123123
@@ -134,6 +134,11 @@ public void processFilesRemote(List<SequenceOutputFile> inputFiles, JobContext c
134134 }
135135 }
136136
137+ public static File getJsonFile (File outputDir , File consensusFasta )
138+ {
139+ return new File (outputDir , FileUtil .getBaseName (consensusFasta ) + ".json" );
140+ }
141+
137142 public static File runNextClade (File consensusFasta , Logger log , PipelineOutputTracker tracker , File outputDir ) throws PipelineJobException
138143 {
139144 if (!consensusFasta .getParentFile ().equals (outputDir ))
@@ -151,7 +156,7 @@ public static File runNextClade(File consensusFasta, Logger log, PipelineOutputT
151156 }
152157 }
153158
154- File jsonFile = new File (outputDir , FileUtil . getBaseName ( consensusFasta ) + ".json" );
159+ File jsonFile = getJsonFile (outputDir , consensusFasta );
155160
156161 File localBashScript = new File (outputDir , "dockerWrapper.sh" );
157162 try (PrintWriter writer = PrintWriters .getPrintWriter (localBashScript ))
@@ -224,17 +229,18 @@ private static JSONObject parseNextClade(File jsonFile) throws PipelineJobExcept
224229 }
225230 }
226231
227- public static void processAndImportNextCladeAa (PipelineJob job , SequenceOutputFile so , int analysisId , File consensusVCF , boolean dbImport ) throws PipelineJobException
232+ public static void processAndImportNextCladeAa (PipelineJob job , File jsonFile , int analysisId , int libraryId , int alignmentId , int readsetId , File consensusVCF , boolean dbImport ) throws PipelineJobException
228233 {
229- JSONObject sample = parseNextClade (so . getFile () );
234+ JSONObject sample = parseNextClade (jsonFile );
230235
231- ReferenceGenome genome = SequenceAnalysisService .get ().getReferenceGenome (so . getLibrary_id () , job .getUser ());
236+ ReferenceGenome genome = SequenceAnalysisService .get ().getReferenceGenome (libraryId , job .getUser ());
232237 String clade = sample .getString ("clade" );
233- saveClade (so , clade , analysisId , job );
238+ saveClade (clade , analysisId , alignmentId , readsetId , job );
234239
235240 if (!dbImport )
236241 {
237242 job .getLogger ().info ("DB Import not selected, will not import AA SNPs" );
243+ return ;
238244 }
239245
240246 JSONArray aaSubstitutions = sample .getJSONArray ("aaSubstitutions" );
@@ -334,17 +340,17 @@ public static void processAndImportNextCladeAa(PipelineJob job, SequenceOutputFi
334340 }
335341 }
336342
337- private static void saveClade (SequenceOutputFile so , String clade , int analysisId , PipelineJob job ) throws PipelineJobException
343+ private static void saveClade (String clade , int analysisId , int alignmentId , int readsetId , PipelineJob job ) throws PipelineJobException
338344 {
339345 List <Map <String , Object >> toInsert = new ArrayList <>();
340346 Map <String , Object > row1 = new CaseInsensitiveHashMap <>();
341- row1 .put ("dataid" , so . getDataId () );
342- row1 .put ("readset" , so . getReadset () );
347+ row1 .put ("dataid" , alignmentId );
348+ row1 .put ("readset" , readsetId );
343349 row1 .put ("analysis_id" , analysisId );
344350 row1 .put ("category" , "NextClade" );
345351 row1 .put ("metricName" , "NextCladeClade" );
346352 row1 .put ("qualvalue" , clade );
347- row1 .put ("container" , so .getContainer ());
353+ row1 .put ("container" , job .getContainer (). getId ());
348354 toInsert .add (row1 );
349355
350356 Container targetContainer = job .getContainer ().isWorkbook () ? job .getContainer ().getParent () : job .getContainer ();
0 commit comments