This plugin is available on DockerHub from the WIPP organization
docker pull wipp/wipp-unet-cnn-inference-plugin
#!/bin/bash
version=0.0.7
docker build . -t wipp/wipp-unet-cnn-inference-plugin:latest
docker build . -t wipp/wipp-unet-cnn-inference-plugin:${version}
docker run --gpus device=all \
-v "path/to/input/data/folder":/data/inputs \
-v "path/to/output/folder":/data/outputs \
-v "path/to/model/folder":/data/model \
wipp/wipp-unet-cnn-inference-plugin \
--outputDir /data/outputs \
--imageDir /data/images
--savedModel /data/model
usage: inference [-h]
--savedModel SAVED_MODEL_FILEPATH
--imageDir IMAGE_DIR
--outputDir OUTPUT_DIR
[--useIntensityScaling USE_INTENSITY_SCALING]
[--useTiling USE_TILING]
[--tileSize TILE_SIZE]