Skip to content

Commit

Permalink
code for SparkNormalizeLayerIntensityN5 is fully working again ...
Browse files Browse the repository at this point in the history
  • Loading branch information
StephanPreibisch committed Nov 23, 2024
1 parent 1952296 commit 20f5a05
Showing 1 changed file with 14 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import java.io.Serializable;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicLong;
Expand Down Expand Up @@ -119,9 +120,9 @@ public static void main(final String... args) throws IOException, InterruptedExc
throw new IllegalArgumentException("Options were not parsed successfully");
}

//final SparkConf conf = new SparkConf().setAppName("SparkNormalizeN5");
//final JavaSparkContext sparkContext = new JavaSparkContext(conf);
//sparkContext.setLogLevel("ERROR");
final SparkConf conf = new SparkConf().setAppName("SparkNormalizeN5");
final JavaSparkContext sparkContext = new JavaSparkContext(conf);
sparkContext.setLogLevel("ERROR");

final N5Reader n5Input = new N5Factory().openReader( StorageFormat.N5, options.n5PathInput );//new N5FSReader(options.n5PathInput);

Expand All @@ -141,21 +142,20 @@ public static void main(final String... args) throws IOException, InterruptedExc
throw new IllegalArgumentException("Normalized data set exists: " + fullPath);
}

/*
final String downScaledDataset = options.n5DatasetInput + "/s5";
final Img<UnsignedByteType> downScaledImg = N5Utils.open(n5Input, downScaledDataset);

System.out.println( "Computing shifts ... " );
System.out.println( new Date( System.currentTimeMillis() ) + ": Computing shifts ... " );

final List<Double> shifts = computeShifts(downScaledImg);
*/

System.out.println( "Creating " + fullScaleOutputDataset );
shifts.forEach( d -> System.out.println( "\t" + d ) );

System.out.println( new Date( System.currentTimeMillis() ) + ": Creating " + fullScaleOutputDataset );

n5Output.createDataset(fullScaleOutputDataset, dimensions, blockSize, DataType.UINT8, new GzipCompression());

/*
System.out.println( "Kicking off Spark for re-saving ... " );
System.out.println( new Date( System.currentTimeMillis() ) + ": Kicking off Spark for re-saving ... " );

final JavaRDD<long[][]> pGrid = sparkContext.parallelize(grid);
pGrid.foreach(
Expand All @@ -167,11 +167,10 @@ public static void main(final String... args) throws IOException, InterruptedExc
dimensions,
blockSize,
gridBlock ));
*/

n5Output.close();
n5Input.close();
/*

final int[] downsampleFactors = parseCSIntArray(options.factors);
if (downsampleFactors != null) {
downsampleScalePyramid(sparkContext,
Expand All @@ -180,8 +179,10 @@ public static void main(final String... args) throws IOException, InterruptedExc
outputDataset,
downsampleFactors);
}
*/
//sparkContext.close();

sparkContext.close();
System.out.println( new Date( System.currentTimeMillis() ) + ": Done." );

}

private static List<Double> computeShifts(RandomAccessibleInterval<UnsignedByteType> rai) {
Expand Down

0 comments on commit 20f5a05

Please sign in to comment.