@@ -188,7 +188,8 @@ public void init(PipelineJob job, SequenceAnalysisJobSupport support, List<Reads
188188 rd .setArchived (true );
189189 rd .setSra_accession (accession );
190190
191- File expectedFastq1 = new File (toMerge .get (0 ).getFile1 ().getParentFile (), accession + "_1.fastq.gz" );
191+ ExpData data1 = ExperimentService .get ().getExpData (toMerge .get (0 ).getFileId1 ());
192+ File expectedFastq1 = new File (data1 .getFile ().getParentFile (), accession + "_1.fastq.gz" );
192193 ExpData expData1 = ExperimentService .get ().createData (ContainerManager .getForId (rs .getContainer ()), new DataType ("SequenceData" ), accession );
193194 expData1 .setDataFileURI (expectedFastq1 .toURI ());
194195 expData1 .save (job .getUser ());
@@ -197,7 +198,8 @@ public void init(PipelineJob job, SequenceAnalysisJobSupport support, List<Reads
197198
198199 if (toMerge .get (0 ).getFileId2 () != null )
199200 {
200- File expectedFastq2 = new File (toMerge .get (0 ).getFile1 ().getParentFile (), accession + "_2.fastq.gz" );
201+ ExpData data2 = ExperimentService .get ().getExpData (toMerge .get (0 ).getFileId2 ());
202+ File expectedFastq2 = new File (data2 .getFile ().getParentFile (), accession + "_2.fastq.gz" );
201203 ExpData expData2 = ExperimentService .get ().createData (ContainerManager .getForId (rs .getContainer ()), new DataType ("SequenceData" ), accession );
202204 expData2 .setDataFileURI (expectedFastq2 .toURI ());
203205 expData2 .save (job .getUser ());
@@ -217,12 +219,14 @@ public void init(PipelineJob job, SequenceAnalysisJobSupport support, List<Reads
217219 job .getLogger ().debug ("Total reads from prior data: " + totalReads );
218220
219221 job .getLogger ().debug ("Merging readdata for accession: " + accession );
220- File sraLog = new File (toMerge . get ( 0 ). getFile1 ().getParentFile (), FileUtil .makeLegalName ("sraDownload.txt" ));
222+ File sraLog = new File (data1 . getFile ().getParentFile (), FileUtil .makeLegalName ("sraDownload.txt" ));
221223 try (PrintWriter writer = PrintWriters .getPrintWriter (IOUtil .openFileForWriting (sraLog , sraLog .exists ())))
222224 {
223225 for (ReadData r : toMerge )
224226 {
225- writer .println ("Condensing/merging readdata: " + r .getRowid () + ", " + r .getFile1 () + ", " + r .getFile1 ().getPath () + ", " + (r .getFileId2 () == null ? "N/A" : r .getFileId2 ()) + ", " + (r .getFileId2 () == null ? "N/A" : r .getFile2 ().getPath ()));
227+ ExpData d1 = ExperimentService .get ().getExpData (r .getFileId1 ());
228+ ExpData d2 = r .getFileId2 () == null ? null : ExperimentService .get ().getExpData (r .getFileId2 ());
229+ writer .println ("Condensing/merging readdata: " + r .getRowid () + ", " + r .getFileId1 () + ", " + d1 .getFile ().getPath () + ", " + (r .getFileId2 () == null ? "N/A" : r .getFileId2 ()) + ", " + (r .getFileId2 () == null ? "N/A" : d2 .getFile ().getPath ()));
226230
227231 List <Map <String , Object >> toDelete = Arrays .asList (Map .of ("rowid" , r .getRowid ()));
228232 filesIdsDeleted .add (r .getFileId1 ());
@@ -235,7 +239,7 @@ public void init(PipelineJob job, SequenceAnalysisJobSupport support, List<Reads
235239 }
236240
237241 rd = Table .insert (job .getUser (), SequenceAnalysisSchema .getTable (SequenceAnalysisSchema .TABLE_READ_DATA ), rd );
238- writer .println ("Adding merged readdata: " + rd .getRowid () + ", " + rd .getFile1 () + ", " + rd .getFile1 (). getPath () + ", " + (rd .getFileId2 () == null ? "N/A" : rd .getFileId2 ()) + ", " + (rd .getFileId2 () == null ? "N/A" : rd .getFile2 ().getPath ()));
242+ writer .println ("Adding merged readdata: " + rd .getRowid () + ", " + rd .getFileId1 () + ", " + ExperimentService . get (). getExpData ( rd .getFileId1 ()). getFile (). getPath () + ", " + (rd .getFileId2 () == null ? "N/A" : rd .getFileId2 ()) + ", " + (rd .getFileId2 () == null ? "N/A" : ExperimentService . get (). getExpData ( rd .getFileId2 ()). getFile ().getPath ()));
239243 }
240244 catch (QueryUpdateServiceException | SQLException | InvalidKeyException | BatchValidationException e )
241245 {
0 commit comments