Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,9 @@ public static void solveCombineAndSaveBlocks(final IntensityCorrectionSetup cmdL
throws IOException, ExecutionException, InterruptedException, NoninvertibleModelException {

final Worker<ArrayList<AffineModel1D>, ?> worker = block.createWorker(solverSetup.distributedSolve.threadsWorker);
return new ArrayList<>(worker.call());
final List<BlockData<ArrayList<AffineModel1D>, ?>> resultList = new ArrayList<>(worker.call());
LOG.info("createAndRunWorker: returning resultList with {} entries", resultList.size());
return resultList;
}

public ResultContainer<ArrayList<AffineModel1D>> assembleBlocks(final List<BlockData<ArrayList<AffineModel1D>, ?>> allItems) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ public class FIBSEMIntensityCorrectionParameters<M>
private final double maxAllowedError;
private final int maxIterations;
private final int maxPlateauWidth;
private final boolean useRansacMatching;


public FIBSEMIntensityCorrectionParameters(
final M blockSolveModel,
Expand All @@ -53,7 +55,8 @@ public FIBSEMIntensityCorrectionParameters(
intensityAdjust.equilibrationWeight,
intensityAdjust.maxAllowedError,
intensityAdjust.maxIterations,
intensityAdjust.maxPlateauWidth);
intensityAdjust.maxPlateauWidth,
intensityAdjust.useRansacMatching);
}

public FIBSEMIntensityCorrectionParameters(
Expand All @@ -72,7 +75,8 @@ public FIBSEMIntensityCorrectionParameters(
final double equilibrationWeight,
final double maxAllowedError,
final int maxIterations,
final int maxPlateauWidth) {
final int maxPlateauWidth,
final boolean useRansacMatching) {
// TODO: properly copy blockSolveModel
super(baseDataUrl, owner, project, stack, blockSolveModel);

Expand All @@ -87,6 +91,7 @@ public FIBSEMIntensityCorrectionParameters(
this.maxAllowedError = maxAllowedError;
this.maxIterations = maxIterations;
this.maxPlateauWidth = maxPlateauWidth;
this.useRansacMatching = useRansacMatching;
}

public long maxPixelCacheGb() { return maxPixelCacheGb; }
Expand All @@ -100,6 +105,7 @@ public FIBSEMIntensityCorrectionParameters(
public double maxAllowedError() { return maxAllowedError; }
public int maxIterations() { return maxIterations; }
public int maxPlateauWidth() { return maxPlateauWidth; }
public boolean useRansacMatching() { return useRansacMatching; }

@Override
public Worker<ArrayList<AffineModel1D>, FIBSEMIntensityCorrectionParameters<M>> createWorker(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@
import org.janelia.alignment.spec.TileSpec;
import org.janelia.alignment.util.ImageProcessorCache;
import org.janelia.render.client.intensityadjust.AdjustBlock;
import org.janelia.render.client.intensityadjust.intensity.PointMatchFilter;
import org.janelia.render.client.intensityadjust.intensity.RansacRegressionReduceFilter;
import org.janelia.render.client.newsolver.BlockData;
import org.janelia.render.client.newsolver.assembly.ResultContainer;
import org.janelia.render.client.newsolver.blocksolveparameters.FIBSEMIntensityCorrectionParameters;
Expand Down Expand Up @@ -86,9 +84,9 @@ public List<BlockData<ArrayList<AffineModel1D>, FIBSEMIntensityCorrectionParamet
});

LOG.info("call: exit, renderStack={}, blockData={}, processing took {} seconds",
renderStack, blockData, (System.currentTimeMillis() - startTime) / 1000.0);
renderStack, blockData.toDetailsString(), (System.currentTimeMillis() - startTime) / 1000.0);

return new ArrayList<>(List.of(blockData));
return List.of(blockData);
}

private void fetchResolvedTiles()
Expand Down Expand Up @@ -146,7 +144,7 @@ private HashMap<String, IntensityTile> splitIntoCoefficientTiles(

final List<ValuePair<TileSpec, TileSpec>> patchPairs = findOverlappingPatches(tiles, parameters.zDistance());

LOG.info("splitIntoCoefficientTiles: matching intensities, renderStack={}, blockData={}, patchPairs.size={}, numThreads={}",
LOG.info("splitIntoCoefficientTiles: matching intensities, renderStack={}, blockData={}, patchPairs.size={}, numThreads={}",
renderStack, blockData, patchPairs.size(), numThreads);

// for all pairs of images that do overlap, extract matching intensity values (intensity values that should be the same)
Expand All @@ -157,7 +155,7 @@ private HashMap<String, IntensityTile> splitIntoCoefficientTiles(
for (final ValuePair<TileSpec, TileSpec> patchPair : patchPairs) {
final TileSpec p1 = patchPair.getA();
final TileSpec p2 = patchPair.getB();
final Runnable matchJob = () -> matcher.match(p1, p2, coefficientTiles);
final Runnable matchJob = () -> matcher.match(renderStack, p1, p2, coefficientTiles);
matchTasks.add(exec.submit(matchJob));
}

Expand Down Expand Up @@ -194,7 +192,12 @@ private IntensityMatcher getIntensityMatcher(
final List<TileSpec> tiles,
final ImageProcessorCache imageProcessorCache
) {
final PointMatchFilter filter = new RansacRegressionReduceFilter(new AffineModel1D());
final MatchFilter filter;
if (this.parameters.useRansacMatching()) {
filter = new RansacMatchFilter();
} else {
filter = new HistogramMatchFilter();
}
final int meshResolution = (int) tiles.get(0).getMeshCellSize();
return new IntensityMatcher(filter, parameters, meshResolution, imageProcessorCache);
}
Expand Down Expand Up @@ -244,8 +247,9 @@ private static ArrayList<ValuePair<TileSpec, TileSpec>> findOverlappingPatches(
@SuppressWarnings("SameParameterValue")
private void solveForGlobalCoefficients(final Map<String, IntensityTile> coefficientTiles) {

LOG.info("solveForGlobalCoefficients: entry, renderStack={}, blockData={}",
renderStack, blockData);
final String stackAndBlockForLog = "stack=" + renderStack + ", blockData=" + blockData;
LOG.info("solveForGlobalCoefficients: entry, {}, coefficientTiles.size={}, numThreads={}",
stackAndBlockForLog, coefficientTiles.size(), numThreads);

final IntensityTile equilibrationTile = new IntensityTile(IdentityModel::new, 1, 1);

Expand All @@ -263,16 +267,13 @@ private void solveForGlobalCoefficients(final Map<String, IntensityTile> coeffic
fixedTile = tiles.get(0);
}

LOG.info("solveForGlobalCoefficients: optimize tiles, renderStack={}, blockData={}, tiles.size={}, numThreads={}",
renderStack, blockData, tiles.size(), numThreads);

final IntensityTileOptimizer optimizer = new IntensityTileOptimizer(
blockData.solveTypeParameters().maxAllowedError(),
blockData.solveTypeParameters().maxIterations(),
blockData.solveTypeParameters().maxPlateauWidth(),
1.0,
numThreads);
optimizer.optimize(tiles, fixedTile);
optimizer.optimize(tiles, fixedTile, stackAndBlockForLog);

// TODO: this is not the right error measure, what is idToBlockErrorMap supposed to be exactly?
coefficientTiles.forEach((tileId, tile) -> {
Expand All @@ -283,8 +284,8 @@ private void solveForGlobalCoefficients(final Map<String, IntensityTile> coeffic
blockData.getResults().recordAllErrors(tileId, errorMap);
});

LOG.info("solveForGlobalCoefficients: exit, renderStack={}, blockData={}, returning intensity coefficients for {} tiles",
renderStack, blockData, coefficientTiles.size());
LOG.info("solveForGlobalCoefficients: exit, {}, returning intensity coefficients for {} tiles",
stackAndBlockForLog, coefficientTiles.size());
}

private void connectTilesWithinPatches(
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
package org.janelia.render.client.newsolver.solvers.intensity;


/**
* A flat collection of intensity matches with weights.
*/
public class FlatIntensityMatches {
// Package-private fields for convenience reasons
final double[] p;
final double[] q;
final double[] w;
int nItems;


/**
* Create a new intensity match collection with the specified capacity.
* @param capacity the number of matches to allocate space for
*/
public FlatIntensityMatches(final int capacity) {
this.p = new double[capacity];
this.q = new double[capacity];
this.w = new double[capacity];
this.nItems = 0;
}

/**
* Add a new intensity match to the collection.
* @param p the intensity value in the first tile
* @param q the intensity value in the second tile
* @param w the weight of the match
*/
public void put(final double p, final double q, final double w) {
if (nItems >= this.p.length) {
throw new IllegalStateException("Cannot add more items than allocated: " + this.p.length);
}
this.p[nItems] = p;
this.q[nItems] = q;
this.w[nItems] = w;
this.nItems++;
}

/**
* Get the number of matches in the collection.
* @return the number of matches
*/
public int size() {
return nItems;
}

/**
* Check whether the collection is empty.
* @return true if the collection is empty, false otherwise
*/
public boolean isEmpty() {
return nItems == 0;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
package org.janelia.render.client.newsolver.solvers.intensity;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

import mpicbg.models.AffineModel1D;
import mpicbg.models.IllDefinedDataPointsException;
import mpicbg.models.PointMatch;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


/**
* A match filter that
* - creates matches based on percentiles of the intensity distributions
* - reduces the matches to two surrogate matches that yield the same affine model when fitted
* Note: this ignores match weights!
*/
public class HistogramMatchFilter implements MatchFilter {
final AffineModel1D model = new AffineModel1D();

@Override
public List<PointMatch> filter(final FlatIntensityMatches matches) throws IOException {
// Sort values to easily get percentiles
final double[] sortedPValues = Arrays.copyOf(matches.p, matches.size());
Arrays.sort(sortedPValues);
final double[] sortedQValues = Arrays.copyOf(matches.q, matches.size());
Arrays.sort(sortedQValues);

// Create percentile matches
final int nValues = sortedPValues.length;
final int nSamples = Math.max(nValues, 100);
final List<PointMatch> candidates = new ArrayList<>(nSamples);
for (int i = 0; i < nSamples; i++) {
final double percentile = (i + 0.5) / nSamples;
final int index = Math.min((int) (percentile * nValues), nValues - 1);
final Point1D p = new Point1D(sortedPValues[index]);
final Point1D q = new Point1D(sortedQValues[index]);
candidates.add(new PointMatch(p, q, 1.0));
}

// Fit an affine model to the percentile matches
try {
model.fit(candidates);
} catch (final IllDefinedDataPointsException e) {
// All intensity values are identical -> assume translation only and
// return the single match and hope that there's more matches with other tiles
final List<PointMatch> reducedCandidates = new ArrayList<>(2);
reducedCandidates.add(new PointMatch(new Point1D(sortedPValues[0]), new Point1D(sortedQValues[0]), 2.0));
return reducedCandidates;
} catch (final Exception e) {
final int modelIdentityHashCode = System.identityHashCode(model);
final String sortedPValuesString = Arrays.toString(sortedPValues);
final String sortedQValuesString = Arrays.toString(sortedQValues);
LOG.error("failed to fit candidates to affine model {}, sortedPValues={}, sortedQValues={}",
modelIdentityHashCode, sortedPValuesString, sortedQValuesString);
throw new IOException("error fitting affine model " + modelIdentityHashCode + " to histogram matches", e);
}

// Reduce to two surrogate matches that yield the same model
final double min = sortedPValues[0];
final double max = sortedPValues[nValues - 1];
final Point1D p1 = new Point1D(min);
final Point1D q1 = new Point1D(model.apply(p1.getL())[0]);
final Point1D p2 = new Point1D(max);
final Point1D q2 = new Point1D(model.apply(p2.getL())[0]);

final List<PointMatch> reducedCandidates = new ArrayList<>(2);
reducedCandidates.add(new PointMatch(p1, q1, 1.0));
reducedCandidates.add(new PointMatch(p2, q2, 1.0));

return reducedCandidates;
}

private static final Logger LOG = LoggerFactory.getLogger(HistogramMatchFilter.class);
}
Loading