Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
767632e
with current channel and timepoint in the viewIdpath loop
cgoina Aug 21, 2025
34e1138
Merge branch 'main' into zarr-group-arg
cgoina Aug 21, 2025
4374240
Restored group arg changes
cgoina Aug 21, 2025
0d7cf0e
added some comments
cgoina Aug 21, 2025
a3ed0ea
factored out Compressions in its own class file
cgoina Sep 15, 2025
cdf29fb
removed commented code
cgoina Sep 15, 2025
96a0ed1
create the dataset.xml for the original czi and resave it as OME-ZARR
cgoina Sep 15, 2025
91a7f54
removed resave from CreateStitchingDataset
cgoina Sep 15, 2025
cdaa430
renamed createstitchingdataset to createdataset
cgoina Sep 15, 2025
1559eda
chain command
cgoina Sep 16, 2025
4dbff7c
create the parent directory if missing
cgoina Sep 16, 2025
3662ce2
added a mechanism to create a project for an N5 container that has th…
cgoina Sep 18, 2025
b7df061
fixes for tile index
cgoina Sep 20, 2025
6625354
Merge branch 'main' into zarr-group-arg
cgoina Sep 20, 2025
8bbb5b5
removed Takashi's code
cgoina Sep 20, 2025
c1f88e1
print voxel dimensions
cgoina Sep 21, 2025
530b006
try again Takashi's fix
cgoina Sep 22, 2025
00bdcee
downgraded multiview-reconstruction
cgoina Sep 22, 2025
fc5cad3
Merge branch 'main' into zarr-group-arg
cgoina Sep 27, 2025
79f2b90
merges from main
cgoina Sep 27, 2025
722402b
Merge branch 'main' into zarr-group-arg
cgoina Oct 8, 2025
24def9b
Merge branch 'main' into zarr-group-arg
cgoina Oct 27, 2025
210c86a
use multiview reconstruction 8.0.1-snapshot to test OME scale values
cgoina Oct 28, 2025
e1f2d26
changes based on multiview reconstruction scale fixes
cgoina Oct 28, 2025
fa6bd3b
use the average calibration
cgoina Oct 31, 2025
272fb08
Merge branch 'main' into fix-downsampled-resolutions
cgoina Dec 12, 2025
7daab28
getting the package to compile
cgoina Dec 12, 2025
f3a2a67
merge the downsampled changes
cgoina Dec 12, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions install
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,8 @@ install_command solve-intensities "net.preibisch.bigstitcher.spark.IntensitySolv
install_command create-fusion-container "net.preibisch.bigstitcher.spark.CreateFusionContainer"
install_command affine-fusion "net.preibisch.bigstitcher.spark.SparkAffineFusion"
install_command nonrigid-fusion "net.preibisch.bigstitcher.spark.SparkNonRigidFusion"
install_command create-dataset "net.preibisch.bigstitcher.spark.CreateDataset"
install_command chain-commands "net.preibisch.bigstitcher.spark.ChainCommands"

echo 'Installing utils ...'

Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@
<bigdataviewer-n5.version>1.0.2</bigdataviewer-n5.version>

<spim_data.version>2.3.5</spim_data.version>
<multiview-reconstruction.version>8.0.0</multiview-reconstruction.version>
<multiview-reconstruction.version>8.1.2-SNAPSHOT</multiview-reconstruction.version>
<BigStitcher.version>2.5.0</BigStitcher.version>

<n5-universe.version>2.3.0</n5-universe.version>
Expand Down
129 changes: 129 additions & 0 deletions src/main/java/net/preibisch/bigstitcher/spark/ChainCommands.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
package net.preibisch.bigstitcher.spark;

import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Stack;
import java.util.concurrent.Callable;

import mpicbg.spim.data.SpimDataException;
import net.preibisch.bigstitcher.spark.abstractcmdline.AbstractInfrastructure;
import picocli.CommandLine;

public class ChainCommands extends AbstractInfrastructure implements Callable<Void>, Serializable
{
private static final long serialVersionUID = 1584686229152127469L;

static class CommandWithArguments {
final List<String> cmdArgs;

CommandWithArguments(List<String> cmdArgs) {
this.cmdArgs = cmdArgs;
}
}

/**
* Custom converter to collect all arguments after --command until a separator
* (either ';' or '+') or the end of the input.
*/
static class CommandArgsConverter implements CommandLine.IParameterConsumer {
@Override
public void consumeParameters(Stack<String> args, CommandLine.Model.ArgSpec argSpec, CommandLine.Model.CommandSpec commandSpec) {
List<CommandWithArguments> currentCommands = argSpec.getValue();
List<String> commandArgs = new ArrayList<>();
while (!args.isEmpty()) {
String arg = args.pop();

if (";".equals(arg) || "+".equals(arg)) {
break;
}
if (arg.equals("-h") || arg.equals("--help")) {
// add back the help flag at the bottom of the stack
// but before check if there was anything left and if there wasn't stop after this
boolean done = args.isEmpty();
args.add(0, arg);
if (done) break;
} else
commandArgs.add(arg);
}
currentCommands.add(new CommandWithArguments(commandArgs));
}
}

@CommandLine.Option(names = { "-h", "--help" }, description = "display this help message", usageHelp = true)
boolean helpFlag;

@CommandLine.Option(names = { "--command" }, parameterConsumer = CommandArgsConverter.class,
description = "Command to execute with its arguments. Multiple commands can be chained using ';' or '+'.\n"
+ "Example: --command create-dataset --input-path /data/images/ --input-pattern '*.tif' ; "
+ "--command detect-interestpoints --detector SIFT --descriptor SIFT ; "
+ "--command match-interestpoints --matcher FLANN ; stitching --stitchingModel Affine")
List<CommandWithArguments> commands = new ArrayList<>();

@Override
public Void call() throws Exception {
for (CommandWithArguments commandArgs : commands) {
if (commandArgs.cmdArgs.isEmpty())
continue;

String cmdName = commandArgs.cmdArgs.get(0);
List<String> cmdArgs = new ArrayList<>(commandArgs.cmdArgs.subList(1, commandArgs.cmdArgs.size()));
addCommonOptions(cmdArgs);

AbstractInfrastructure cmdInstance = getCmdInstance(cmdName);
CommandLine currentCmdLine = new CommandLine(cmdInstance);
System.out.println("Execute command: " + cmdName + " with args: " + cmdArgs);
int exitCode = currentCmdLine.execute(cmdArgs.toArray(new String[0]));
if (exitCode != 0) {
System.err.println("Command " + cmdName + " failed with exit code " + exitCode);
System.exit(exitCode);
}
}
return null;
}

private AbstractInfrastructure getCmdInstance(String name) {
switch (name) {
case "clear-interestpoints": return new ClearInterestPoints();
case "clear-registrations": return new ClearRegistrations();
case "create-container": return new CreateFusionContainer();
case "detect-interestpoints": return new SparkInterestPointDetection();
case "match-interestpoints": return new SparkGeometricDescriptorMatching();
case "nonrigid-fusion": return new SparkNonRigidFusion();
case "create-dataset": return new CreateDataset();
case "stitching": return new SparkPairwiseStitching();
case "resave": return new SparkResaveN5();
case "downsample": return new SparkDownsample();
case "affine-fusion": return new SparkAffineFusion();
case "solver": return new Solver();
default: throw new IllegalArgumentException("Unknown command: " + name);
}
}

private void addCommonOptions(List<String> cmdArgs) {
if (this.dryRun) {
cmdArgs.add("--dryRun");
}
if (this.localSparkBindAddress) {
cmdArgs.add("--localSparkBindAddress");
}
if (this.s3Region != null && !this.s3Region.isEmpty()) {
cmdArgs.add("--s3Region");
cmdArgs.add(this.s3Region);
}
}

public static void main(final String... args) throws SpimDataException {
System.out.println(Arrays.toString(args));

ChainCommands chainedCommands = new ChainCommands();
CommandLine commandLine = new CommandLine(chainedCommands)
.setUnmatchedOptionsArePositionalParams(true)
;


System.exit(commandLine.execute(args));
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
package net.preibisch.bigstitcher.spark;

public enum Compressions {
Lz4, Gzip, Zstandard, Blosc, Bzip2, Xz, Raw
}
68 changes: 68 additions & 0 deletions src/main/java/net/preibisch/bigstitcher/spark/CreateDataset.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
package net.preibisch.bigstitcher.spark;

import java.io.Serializable;
import java.net.URI;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.concurrent.Callable;

import mpicbg.spim.data.SpimDataException;
import net.preibisch.bigstitcher.spark.abstractcmdline.AbstractBasic;
import net.preibisch.mvrecon.dataset.SpimDatasetBuilder;
import net.preibisch.mvrecon.fiji.spimdata.SpimData2;
import net.preibisch.mvrecon.fiji.spimdata.XmlIoSpimData2;
import picocli.CommandLine;
import picocli.CommandLine.Option;
import util.URITools;

public class CreateDataset extends AbstractBasic implements Callable<Void>, Serializable
{
private static final long serialVersionUID = -5155338208494730656L;

@Option(names = {"--input-path"}, required = true, description = "Path to the input images, e.g. /data/images/")
private String inputPath = "/Users/goinac/Work/HHMI/stitching/datasets/tiny_4_bigstitcher/t1/";

@Option(names = {"--input-pattern"}, description = "Glob pattern for input images, e.g. /data/images/*.tif")
private String inputPattern = "*";

@Override
public Void call() throws Exception {
this.setRegion();

SpimData2 spimData = createDataset();

URI xmlURI = URITools.toURI(xmlURIString);

System.out.println("Save spimData with original tiles to " + xmlURI);
prepareSaveLocation(xmlURI);
new XmlIoSpimData2().save(spimData, xmlURI);

return null;
}

private SpimData2 createDataset() {
SpimDatasetBuilder spimDatasetBuilder = new SpimDatasetBuilder(inputPattern);
return spimDatasetBuilder.createDataset(URITools.toURI(inputPath));
}

private void prepareSaveLocation(URI xmlURI) {
if (URITools.isFile( xmlURI )) {
Path xmlPath = Paths.get(xmlURI);
// create parent directories if necessary
if ( !xmlPath.getParent().toFile().exists() ) {
if (!xmlPath.getParent().toFile().mkdirs()) {
// log the error but continue
// if the directory wasn't create it will fail later when trying to write the file
System.out.println("Failed to create parent directory for " + xmlURI);
}
}
}
}

public static void main(final String... args) throws SpimDataException {
System.out.println(Arrays.toString(args));

System.exit(new CommandLine(new CreateDataset()).execute(args));
}
}
Loading
Loading