Skip to content

Commit

Permalink
Merge pull request #226 from melissalinkert/picocli-default-reset
Browse files Browse the repository at this point in the history
Update setters to accept default values
  • Loading branch information
sbesson authored Nov 29, 2023
2 parents e5a2a2e + ec390b6 commit 082fbd8
Show file tree
Hide file tree
Showing 3 changed files with 140 additions and 57 deletions.
2 changes: 1 addition & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ configurations.all {
dependencies {
implementation 'net.java.dev.jna:jna:5.10.0'
implementation 'ome:formats-gpl:7.0.1'
implementation 'info.picocli:picocli:4.6.1'
implementation 'info.picocli:picocli:4.7.5'
implementation 'com.univocity:univocity-parsers:2.8.4'
implementation 'dev.zarr:jzarr:0.4.2'
// implementation 'org.carlspring.cloud.aws:s3fs-nio:1.0-SNAPSHOT'
Expand Down
150 changes: 94 additions & 56 deletions src/main/java/com/glencoesoftware/bioformats2raw/Converter.java
Original file line number Diff line number Diff line change
Expand Up @@ -130,49 +130,41 @@ public class Converter implements Callable<Integer> {

private Map<String, String> outputOptions;
private volatile Integer pyramidResolutions;
private volatile List<Integer> seriesList = new ArrayList<Integer>();
private volatile List<Integer> seriesList;

private volatile int tileWidth = 1024;
private volatile int tileHeight = 1024;
private volatile int chunkDepth = 1;
private volatile String logLevel = "WARN";
private volatile int tileWidth;
private volatile int tileHeight;
private volatile int chunkDepth;
private volatile String logLevel;
private volatile boolean progressBars = false;
private volatile boolean printVersion = false;

// cap the default worker count at 4, to prevent problems with
// large images that are not tiled
private volatile int maxWorkers =
(int) Math.min(4, Runtime.getRuntime().availableProcessors());

private volatile int maxCachedTiles = 64;
private volatile ZarrCompression compressionType = ZarrCompression.blosc;
private volatile Map<String, Object> compressionProperties =
new HashMap<String, Object>();;
private volatile Class<?>[] extraReaders = new Class[] {
PyramidTiffReader.class, MiraxReader.class,
BioTekReader.class, ND2PlateReader.class
};
private volatile int maxWorkers;
private volatile int maxCachedTiles;
private volatile ZarrCompression compressionType;
private volatile Map<String, Object> compressionProperties;
private volatile Class<?>[] extraReaders;
private volatile boolean omeroMetadata = true;
private volatile boolean nested = true;
private volatile String pyramidName = null;
private volatile String scaleFormatString = "%d/%d";
private volatile String scaleFormatString;
private volatile Path additionalScaleFormatStringArgsCsv;

/** Additional scale format string arguments after parsing. */
private volatile List<String[]> additionalScaleFormatStringArgs;

private volatile DimensionOrder dimensionOrder = DimensionOrder.XYZCT;
private volatile DimensionOrder dimensionOrder;
private volatile File memoDirectory;
private volatile boolean keepMemoFiles = false;
private volatile Downsampling downsampling = Downsampling.SIMPLE;
private volatile Downsampling downsampling;
private volatile boolean overwrite = false;
private volatile Short fillValue = null;
private volatile List<String> readerOptions = new ArrayList<String>();
private volatile List<String> readerOptions;
private volatile boolean noHCS = false;
private volatile boolean noOMEMeta = false;
private volatile boolean noRootGroup = false;
private volatile boolean reuseExistingResolutions = false;
private volatile int minSize = MIN_SIZE;
private volatile int minSize;

/** Scaling implementation that will be used during downsampling. */
private volatile IImageScaler scaler = new SimpleImageScaler();
Expand Down Expand Up @@ -219,11 +211,17 @@ public class Converter implements Callable<Integer> {
@Parameters(
index = "0",
arity = "1",
description = "file to convert"
description = "file to convert",
defaultValue = Option.NULL_VALUE
)
public void setInputPath(String input) {
// this could be expanded later to support files not on disk
inputPath = Paths.get(input);
if (input != null) {
// this could be expanded later to support files not on disk
inputPath = Paths.get(input);
}
else {
inputPath = null;
}
}

/**
Expand All @@ -238,7 +236,8 @@ public void setInputPath(String input) {
"Filesystems. For example, if the output path given " +
"is 's3://my-bucket/some-path' *and* you have an "+
"S3FileSystem implementation in your classpath, then " +
"all files will be written to S3."
"all files will be written to S3.",
defaultValue = Option.NULL_VALUE
)
public void setOutputPath(String output) {
outputLocation = output;
Expand All @@ -256,7 +255,8 @@ public void setOutputPath(String output) {
"to be used as an additional argument to Filesystem " +
"implementations if used. For example, " +
"--output-options=s3fs_path_style_access=true|... " +
"might be useful for connecting to minio."
"might be useful for connecting to minio.",
defaultValue = Option.NULL_VALUE
)
public void setOutputOptions(Map<String, String> options) {
outputOptions = options;
Expand All @@ -266,16 +266,19 @@ public void setOutputOptions(Map<String, String> options) {
* Define the number of resolutions in the generated pyramid.
* By default, the resolution count is calculated based upon
* the input image size. The resolution count includes the
* full-resolution image, so must be greater than 0.
* full-resolution image. If the resolution count is null,
* then the number of resolutions will be calculated automatically.
* If not null, it must be greater than 0.
*
* @param resolutions pyramid resolution count
*/
@Option(
names = {"-r", "--resolutions"},
description = "Number of pyramid resolutions to generate"
description = "Number of pyramid resolutions to generate",
defaultValue = Option.NULL_VALUE
)
public void setResolutions(int resolutions) {
if (resolutions > 0) {
public void setResolutions(Integer resolutions) {
if (resolutions == null || resolutions > 0) {
pyramidResolutions = resolutions;
}
else {
Expand All @@ -294,12 +297,16 @@ public void setResolutions(int resolutions) {
names = {"-s", "--series"},
arity = "0..1",
split = ",",
description = "Comma-separated list of series indexes to convert"
description = "Comma-separated list of series indexes to convert",
defaultValue = Option.NULL_VALUE
)
public void setSeriesList(List<Integer> seriesToConvert) {
if (seriesToConvert != null) {
seriesList = seriesToConvert;
}
else {
seriesList = new ArrayList<Integer>();
}
}

/**
Expand Down Expand Up @@ -394,7 +401,8 @@ public void setLogLevel(String level) {
@Option(
names = {"-p", "--progress"},
description = "Print progress bars during conversion",
help = true
help = true,
defaultValue = "false"
)
public void setProgressBars(boolean useProgressBars) {
progressBars = useProgressBars;
Expand All @@ -409,7 +417,8 @@ public void setProgressBars(boolean useProgressBars) {
@Option(
names = "--version",
description = "Print version information and exit",
help = true
help = true,
defaultValue = "false"
)
public void setPrintVersionOnly(boolean versionOnly) {
printVersion = versionOnly;
Expand All @@ -423,10 +432,15 @@ public void setPrintVersionOnly(boolean versionOnly) {
*/
@Option(
names = {"--max-workers", "--max_workers"},
description = "Maximum number of workers (default: ${DEFAULT-VALUE})"
description = "Maximum number of workers (default: ${DEFAULT-VALUE})",
defaultValue = "4"
)
public void setMaxWorkers(int workers) {
if (workers > 0) {
int availableProcessors = Runtime.getRuntime().availableProcessors();
if (workers > availableProcessors) {
maxWorkers = availableProcessors;
}
else if (workers > 0) {
maxWorkers = workers;
}
else {
Expand Down Expand Up @@ -478,12 +492,16 @@ public void setCompression(ZarrCompression compression) {
names = {"--compression-properties"},
description = "Properties for the chosen compression (see " +
"https://jzarr.readthedocs.io/en/latest/tutorial.html#compressors" +
" )"
" )",
defaultValue = Option.NULL_VALUE
)
public void setCompressionProperties(Map<String, Object> properties) {
if (properties != null) {
compressionProperties = properties;
}
else {
compressionProperties = new HashMap<String, Object>();
}
}

/**
Expand All @@ -498,7 +516,12 @@ public void setCompressionProperties(Map<String, Object> properties) {
arity = "0..1",
split = ",",
description = "Separate set of readers to include; " +
"(default: ${DEFAULT-VALUE})"
"(default: ${DEFAULT-VALUE})",
defaultValue =
"com.glencoesoftware.bioformats2raw.PyramidTiffReader," +
"com.glencoesoftware.bioformats2raw.MiraxReader," +
"com.glencoesoftware.bioformats2raw.BioTekReader," +
"com.glencoesoftware.bioformats2raw.ND2PlateReader"
)
public void setExtraReaders(Class<?>[] extraReaderList) {
if (extraReaderList != null) {
Expand All @@ -518,7 +541,8 @@ public void setExtraReaders(Class<?>[] extraReaderList) {
description = "Whether to calculate minimum and maximum pixel " +
"values. Min/max calculation can result in slower " +
"conversions. If true, min/max values are saved as " +
"OMERO rendering metadata (true by default)"
"OMERO rendering metadata (true by default)",
defaultValue = "false"
)
public void setCalculateOMEROMetadata(boolean noMinMax) {
omeroMetadata = !noMinMax;
Expand All @@ -533,7 +557,8 @@ public void setCalculateOMEROMetadata(boolean noMinMax) {
@Option(
names = "--no-nested", negatable=true,
description = "Whether to use '/' as the chunk path separator " +
"(true by default)"
"(true by default)",
defaultValue = "false"
)
public void setUnnested(boolean unnested) {
nested = !unnested;
Expand All @@ -550,7 +575,8 @@ public void setUnnested(boolean unnested) {
@Option(
names = "--pyramid-name",
description = "Name of pyramid (default: ${DEFAULT-VALUE}) " +
"[Can break compatibility with raw2ometiff]"
"[Can break compatibility with raw2ometiff]",
defaultValue = Option.NULL_VALUE
)
public void setPyramidName(String pyramid) {
pyramidName = pyramid;
Expand All @@ -568,7 +594,8 @@ public void setPyramidName(String pyramid) {
"by any additional arguments brought in from " +
"`--additional-scale-format-string-args` " +
"[Can break compatibility with raw2ometiff] " +
"(default: ${DEFAULT-VALUE})"
"(default: ${DEFAULT-VALUE})",
defaultValue = "%d/%d"
)
public void setScaleFormat(String formatString) {
if (formatString != null) {
Expand All @@ -588,7 +615,8 @@ public void setScaleFormat(String formatString) {
"scale format string mapping the at the corresponding CSV " +
"row index. It is expected that the CSV file contain " +
"exactly the same number of rows as the input file has " +
"series"
"series",
defaultValue = Option.NULL_VALUE
)
public void setAdditionalScaleFormatCSV(Path scaleFormatCSV) {
additionalScaleFormatStringArgsCsv = scaleFormatCSV;
Expand All @@ -601,7 +629,8 @@ public void setAdditionalScaleFormatCSV(Path scaleFormatCSV) {
*/
@Option(
names = "--memo-directory",
description = "Directory used to store .bfmemo cache files"
description = "Directory used to store .bfmemo cache files",
defaultValue = Option.NULL_VALUE
)
public void setMemoDirectory(File memoDir) {
memoDirectory = memoDir;
Expand All @@ -615,7 +644,8 @@ public void setMemoDirectory(File memoDir) {
*/
@Option(
names = "--keep-memo-files",
description = "Do not delete .bfmemo files created during conversion"
description = "Do not delete .bfmemo files created during conversion",
defaultValue = "false"
)
public void setKeepMemoFiles(boolean keepMemos) {
keepMemoFiles = keepMemos;
Expand Down Expand Up @@ -645,7 +675,8 @@ public void setDownsampling(Downsampling downsampleType) {
*/
@Option(
names = "--overwrite",
description = "Overwrite the output directory if it exists"
description = "Overwrite the output directory if it exists",
defaultValue = "false"
)
public void setOverwrite(boolean canOverwrite) {
overwrite = canOverwrite;
Expand All @@ -660,7 +691,8 @@ public void setOverwrite(boolean canOverwrite) {
@Option(
names = "--fill-value",
description = "Default value to fill in for missing tiles (0-255)" +
" (currently .mrxs only)"
" (currently .mrxs only)",
defaultValue = Option.NULL_VALUE
)
public void setFillValue(Short tileFill) {
if (tileFill == null || (tileFill >= 0 && tileFill <= 255)) {
Expand All @@ -682,12 +714,16 @@ public void setFillValue(Short tileFill) {
names = "--options",
split = ",",
description =
"Reader-specific options, in format key=value[,key2=value2]"
"Reader-specific options, in format key=value[,key2=value2]",
defaultValue = Option.NULL_VALUE
)
public void setReaderOptions(List<String> readerOpts) {
if (readerOpts != null) {
readerOptions = readerOpts;
}
else {
readerOptions = new ArrayList<String>();
}
}

/**
Expand All @@ -700,7 +736,8 @@ public void setReaderOptions(List<String> readerOpts) {
*/
@Option(
names = "--no-hcs",
description = "Turn off HCS writing"
description = "Turn off HCS writing",
defaultValue = "false"
)
public void setNoHCS(boolean noHCSWriting) {
noHCS = noHCSWriting;
Expand All @@ -716,7 +753,8 @@ public void setNoHCS(boolean noHCSWriting) {
@Option(
names = "--no-ome-meta-export",
description = "Turn off OME metadata exporting " +
"[Will break compatibility with raw2ometiff]"
"[Will break compatibility with raw2ometiff]",
defaultValue = "false"
)
public void setNoOMEMeta(boolean noOMEMetaWriting) {
noOMEMeta = noOMEMetaWriting;
Expand All @@ -731,8 +769,8 @@ public void setNoOMEMeta(boolean noOMEMetaWriting) {
@Option(
names = "--no-root-group",
description = "Turn off creation of root group and corresponding " +
"metadata [Will break compatibility with raw2ometiff]"

"metadata [Will break compatibility with raw2ometiff]",
defaultValue = "false"
)
public void setNoRootGroup(boolean noRootGroupWriting) {
noRootGroup = noRootGroupWriting;
Expand All @@ -749,8 +787,8 @@ public void setNoRootGroup(boolean noRootGroupWriting) {
@Option(
names = "--use-existing-resolutions",
description = "Use existing sub resolutions from original input format" +
"[Will break compatibility with raw2ometiff]"

"[Will break compatibility with raw2ometiff]",
defaultValue = "false"
)
public void setReuseExistingResolutions(boolean reuse) {
reuseExistingResolutions = reuse;
Expand Down Expand Up @@ -806,7 +844,7 @@ public void setDimensionOrder(DimensionOrder order) {
* @return path to input data
*/
public String getInputPath() {
return inputPath.toString();
return inputPath == null ? null : inputPath.toString();
}

/**
Expand Down
Loading

0 comments on commit 082fbd8

Please sign in to comment.