Skip to content

Commit

Permalink
add --surfaceBlockSize parameter that allows default 128,128 value
Browse files Browse the repository at this point in the history
  • Loading branch information
trautmane committed Apr 5, 2024
1 parent 6315f00 commit 30b370f
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,13 @@ public String getCostDatasetName(final int index) {
@Option(name = "--surfaceMaxDistance", usage = "maximum distance between the both surfaces, e.g. 30")
private double surfaceMaxDistance = 30;

@Option(name = "--surfaceBlockSize", usage = "surface block size in pixels, e.g. 128,128")
private String surfaceBlockSizeString = "128,128";

private long[] getSurfaceBlockSize() {
return parseCSLongArray(surfaceBlockSizeString);
}

@Option(name = "--localSparkBindAddress", usage = "specify Spark bind address as localhost")
private boolean localSparkBindAddress = false;

Expand Down Expand Up @@ -195,6 +202,9 @@ public static void computeCost(
System.out.println("median Z: " + options.median );
System.out.println("smooth cost Z: " + options.smoothCost );

final long[] surfaceBlockSize = options.getSurfaceBlockSize();
System.out.println("surfaceBlockSize: " + Util.printCoordinates(surfaceBlockSize) );

final N5Reader n5 = new N5FSReader(n5Path);
final N5Writer n5w = new N5FSWriter(costN5Path);

Expand Down Expand Up @@ -321,7 +331,8 @@ public static void computeCost(
options.surfaceMaxDistance,
true, // no need to permute with multi-sem
false);
sparkSurfaceFit.callWithSparkContext(sparkContext);
sparkSurfaceFit.callWithSparkContext(sparkContext,
options.getSurfaceBlockSize());
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
*/
package org.janelia.saalfeldlab.hotknife;

import com.beust.jcommander.Parameter;

import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
Expand Down Expand Up @@ -1408,6 +1410,12 @@ else if ( IntegerType.class.isInstance( t ) )

public void callWithSparkContext(final JavaSparkContext sc)
throws IOException {
callWithSparkContext(sc, new long[] {128, 128});
}

public void callWithSparkContext(final JavaSparkContext sc,
final long[] blockSize)
throws IOException {

final N5Reader n5 = isZarr( n5Path ) ? new N5ZarrReader( n5Path ) : new N5FSReader(n5Path);

Expand Down Expand Up @@ -1491,7 +1499,6 @@ public void callWithSparkContext(final JavaSparkContext sc)

System.out.println( "Processing scale: " + s );

final long[] blockSize = new long[] {128, 128};
final long[] blockPadding = new long[] {32, 32};

final double maxDeltaZ = (s == lastScaleIndex ) ? this.finalMaxDeltaZ : this.maxDeltaZ;
Expand Down

0 comments on commit 30b370f

Please sign in to comment.