Skip to content

Commit e9e589f

Browse files
committed
Merge discvr-21.3 to discvr-21.7
2 parents f62a9f5 + a34e24b commit e9e589f

File tree

2 files changed

+41
-0
lines changed

2 files changed

+41
-0
lines changed

SequenceAnalysis/src/org/labkey/sequenceanalysis/pipeline/ReadsetCreationTask.java

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
import org.jetbrains.annotations.NotNull;
2020
import org.labkey.api.collections.CaseInsensitiveHashMap;
2121
import org.labkey.api.data.CompareType;
22+
import org.labkey.api.data.Container;
2223
import org.labkey.api.data.DbSchema;
2324
import org.labkey.api.data.DbScope;
2425
import org.labkey.api.data.SimpleFilter;
@@ -35,7 +36,11 @@
3536
import org.labkey.api.pipeline.PipelineJobException;
3637
import org.labkey.api.pipeline.RecordedAction;
3738
import org.labkey.api.pipeline.RecordedActionSet;
39+
import org.labkey.api.query.BatchValidationException;
3840
import org.labkey.api.query.FieldKey;
41+
import org.labkey.api.query.InvalidKeyException;
42+
import org.labkey.api.query.QueryService;
43+
import org.labkey.api.query.QueryUpdateServiceException;
3944
import org.labkey.api.reader.Readers;
4045
import org.labkey.api.sequenceanalysis.SequenceAnalysisService;
4146
import org.labkey.api.sequenceanalysis.model.Readset;
@@ -159,6 +164,7 @@ private void importReadsets() throws PipelineJobException
159164
Set<Integer> fileIdsWithExistingMetrics = new HashSet<>();
160165
try (DbScope.Transaction transaction = schema.getScope().ensureTransaction())
161166
{
167+
Set<Integer> readsetsToDeactivate = new HashSet<>();
162168
TableInfo readsetTable = schema.getTable(SequenceAnalysisSchema.TABLE_READSETS);
163169
TableInfo readDataTable = schema.getTable(SequenceAnalysisSchema.TABLE_READ_DATA);
164170

@@ -171,6 +177,7 @@ private void importReadsets() throws PipelineJobException
171177
List<ReadDataImpl> preexistingReadData;
172178
if (readsetExists)
173179
{
180+
readsetsToDeactivate.add(r.getReadsetId());
174181
preexistingReadData = ((SequenceReadsetImpl)SequenceAnalysisService.get().getReadset(r.getReadsetId(), getJob().getUser())).getReadDataImpl();
175182
}
176183
else
@@ -425,8 +432,33 @@ private void importReadsets() throws PipelineJobException
425432
}
426433
}
427434

435+
if (!readsetsToDeactivate.isEmpty())
436+
{
437+
getJob().getLogger().info("Setting " + readsetsToDeactivate.size() + " readsets to status=replaced");
438+
List<Map<String, Object>> toUpdate = new ArrayList<>();
439+
List<Map<String, Object>> toUpdateKeys = new ArrayList<>();
440+
readsetsToDeactivate.forEach(rs -> {
441+
Map<String, Object> row = new CaseInsensitiveHashMap<>();
442+
row.put("rowid", rs);
443+
row.put("status", "Replaced");
444+
toUpdate.add(row);
445+
446+
row = new CaseInsensitiveHashMap<>();
447+
row.put("rowid", rs);
448+
toUpdateKeys.add(row);
449+
});
450+
451+
Container targetContainer = getJob().getContainer().isWorkbook() ? getJob().getContainer().getParent() : getJob().getContainer();
452+
TableInfo ti = QueryService.get().getUserSchema(getJob().getUser(), targetContainer, SequenceAnalysisSchema.SCHEMA_NAME).getTable(SequenceAnalysisSchema.TABLE_READSETS);
453+
ti.getUpdateService().updateRows(getJob().getUser(), targetContainer, toUpdate, toUpdateKeys, null, null);
454+
}
455+
428456
transaction.commit();
429457
}
458+
catch (SQLException | InvalidKeyException | BatchValidationException | QueryUpdateServiceException e)
459+
{
460+
throw new PipelineJobException(e);
461+
}
430462

431463
//NOTE: this is outside the transaction because it can take a long time.
432464
int idx = 0;

SequenceAnalysis/src/org/labkey/sequenceanalysis/run/preprocessing/TagPcrSummaryStep.java

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -165,12 +165,21 @@ public Output performAnalysisPerSampleRemote(Readset rs, File inputBam, Referenc
165165
{
166166
primerTable = new File(outputDir, basename + ".primers.txt");
167167
}
168+
else
169+
{
170+
getPipelineCtx().getLogger().info("will not design primers");
171+
}
168172

169173
File genbank = null;
170174
if (outputGenbank)
171175
{
172176
genbank = new File(outputDir, basename + ".sites.gb");
173177
}
178+
else
179+
{
180+
getPipelineCtx().getLogger().info("will not output genbank file");
181+
}
182+
174183
File metrics = getMetricsFile(inputBam, outputDir);
175184

176185
List<String> extraArgs = new ArrayList<>(getClientCommandArgs());

0 commit comments

Comments
 (0)