From d15eeadc4e25fe6614338531d4f4733876cd22b9 Mon Sep 17 00:00:00 2001 From: Wenqi Li Date: Fri, 2 Oct 2020 14:09:30 +0100 Subject: [PATCH 1/8] fixes typos Signed-off-by: Wenqi Li --- modules/dynunet_tutorial.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/dynunet_tutorial.ipynb b/modules/dynunet_tutorial.ipynb index 3826710348..a96a37272c 100644 --- a/modules/dynunet_tutorial.ipynb +++ b/modules/dynunet_tutorial.ipynb @@ -454,7 +454,7 @@ " out_channels=n_class,\n", " kernel_size=kernels,\n", " strides=strides,\n", - " upsamle_kernel_size=strides[1:],\n", + " upsample_kernel_size=strides[1:],\n", " norm_name=\"instance\",\n", " deep_supervision=True,\n", " deep_supr_num=2,\n", From cc7b248201f0b75729ed562c4df845c67f16f84b Mon Sep 17 00:00:00 2001 From: Wenqi Li Date: Fri, 2 Oct 2020 15:12:10 +0100 Subject: [PATCH 2/8] update install commands Signed-off-by: Wenqi Li --- 2d_classification/mednist_tutorial.ipynb | 4 ++-- 3d_segmentation/brats_segmentation_3d.ipynb | 4 ++-- 3d_segmentation/spleen_segmentation_3d.ipynb | 4 ++-- .../spleen_segmentation_3d_lightning.ipynb | 2 +- .../unet_segmentation_3d_catalyst.ipynb | 4 ++-- .../unet_segmentation_3d_ignite.ipynb | 2 +- README.md | 3 +-- acceleration/automatic_mixed_precision.ipynb | 14 ++--------- acceleration/dataset_type_performance.ipynb | 4 ++-- acceleration/fast_training_tutorial.ipynb | 14 ++--------- acceleration/multi_gpu_test.ipynb | 2 +- acceleration/transform_speed.ipynb | 2 +- modules/3d_image_transforms.ipynb | 4 ++-- modules/dynunet_tutorial.ipynb | 14 ++--------- modules/integrate_3rd_party_transforms.ipynb | 4 ++-- modules/load_medical_images.ipynb | 22 +---------------- modules/mednist_GAN_tutorial.ipynb | 4 ++-- modules/mednist_GAN_workflow_array.ipynb | 24 ++----------------- modules/mednist_GAN_workflow_dict.ipynb | 16 ++----------- modules/models_ensemble.ipynb | 14 +---------- modules/nifti_read_example.ipynb | 2 +- modules/post_transforms.ipynb | 4 ++-- modules/public_datasets.ipynb | 4 ++-- modules/transforms_demo_2d.ipynb | 4 ++-- 24 files changed, 40 insertions(+), 135 deletions(-) diff --git a/2d_classification/mednist_tutorial.ipynb b/2d_classification/mednist_tutorial.ipynb index d26558f41c..9670e28c0b 100644 --- a/2d_classification/mednist_tutorial.ipynb +++ b/2d_classification/mednist_tutorial.ipynb @@ -41,7 +41,7 @@ } ], "source": [ - "%pip install -qU \"monai[pillow]\"" + "%pip install -q \"monai[pillow, tqdm]\"" ] }, { @@ -60,7 +60,7 @@ } ], "source": [ - "%pip install -qU matplotlib\n", + "%pip install -q matplotlib\n", "%matplotlib inline" ] }, diff --git a/3d_segmentation/brats_segmentation_3d.ipynb b/3d_segmentation/brats_segmentation_3d.ipynb index 6fc25cc825..62488536f6 100644 --- a/3d_segmentation/brats_segmentation_3d.ipynb +++ b/3d_segmentation/brats_segmentation_3d.ipynb @@ -53,7 +53,7 @@ }, "outputs": [], "source": [ - "%pip install -qU \"monai[nibabel]\"" + "%pip install -q \"monai[nibabel, tqdm]\"" ] }, { @@ -64,7 +64,7 @@ }, "outputs": [], "source": [ - "%pip install -qU matplotlib\n", + "%pip install -q matplotlib\n", "%matplotlib inline" ] }, diff --git a/3d_segmentation/spleen_segmentation_3d.ipynb b/3d_segmentation/spleen_segmentation_3d.ipynb index a9b151140d..eb69663189 100644 --- a/3d_segmentation/spleen_segmentation_3d.ipynb +++ b/3d_segmentation/spleen_segmentation_3d.ipynb @@ -55,7 +55,7 @@ } ], "source": [ - "%pip install -qU \"monai[gdown, nibabel]\"" + "%pip install -q \"monai[gdown, nibabel, tqdm]\"" ] }, { @@ -74,7 +74,7 @@ } ], "source": [ - "%pip install -qU matplotlib\n", + "%pip install -q matplotlib\n", "%matplotlib inline" ] }, diff --git a/3d_segmentation/spleen_segmentation_3d_lightning.ipynb b/3d_segmentation/spleen_segmentation_3d_lightning.ipynb index 8108c2e2cd..5f6cc9ac78 100644 --- a/3d_segmentation/spleen_segmentation_3d_lightning.ipynb +++ b/3d_segmentation/spleen_segmentation_3d_lightning.ipynb @@ -79,7 +79,7 @@ } ], "source": [ - "%pip install -qU matplotlib\n", + "%pip install -q matplotlib\n", "%matplotlib inline" ] }, diff --git a/3d_segmentation/unet_segmentation_3d_catalyst.ipynb b/3d_segmentation/unet_segmentation_3d_catalyst.ipynb index 20af719853..056501f956 100644 --- a/3d_segmentation/unet_segmentation_3d_catalyst.ipynb +++ b/3d_segmentation/unet_segmentation_3d_catalyst.ipynb @@ -45,7 +45,7 @@ }, "outputs": [], "source": [ - "%pip install -qU \"monai[nibabel, tensorboard]\"" + "%pip install -q \"monai[nibabel, tensorboard]\"" ] }, { @@ -56,7 +56,7 @@ }, "outputs": [], "source": [ - "%pip install -qU matplotlib\n", + "%pip install -q matplotlib\n", "%matplotlib inline" ] }, diff --git a/3d_segmentation/unet_segmentation_3d_ignite.ipynb b/3d_segmentation/unet_segmentation_3d_ignite.ipynb index 52d7d2f0a3..d2ad9ac57e 100644 --- a/3d_segmentation/unet_segmentation_3d_ignite.ipynb +++ b/3d_segmentation/unet_segmentation_3d_ignite.ipynb @@ -24,7 +24,7 @@ }, "outputs": [], "source": [ - "%pip install -qU \"monai[ignite, nibabel, tensorboard]\"" + "%pip install -q \"monai[ignite, nibabel, tensorboard]\"" ] }, { diff --git a/README.md b/README.md index 5cc9c3eed3..88960ef08e 100644 --- a/README.md +++ b/README.md @@ -74,8 +74,7 @@ The examples show how to execute distributed training and evaluation based on 3 - PyTorch ignite and MONAI workflows. They can run on several distributed nodes with multiple GPU devices on every node. -#### [automatic_mixed_precision](./acceleration/accautomatic_mixed_precision.ipynb) -This tutorial shows how to apply the automatic mixed precision(AMP) feature of PyTorch into training and evaluation programs. +#### [automatic_mixed_precision](./acceleration/automatic_mixed_precision.ipynb) And compares the training speed and memory usage with/without AMP. #### [dataset_type_performance](./acceleration/dataset_type_performance.ipynb) This notebook compares the performance of `Dataset`, `CacheDataset` and `PersistentDataset`. These classes differ in how data is stored (in memory or on disk), and at which moment transforms are applied. diff --git a/acceleration/automatic_mixed_precision.ipynb b/acceleration/automatic_mixed_precision.ipynb index 043e169ec1..6338bc4fa4 100644 --- a/acceleration/automatic_mixed_precision.ipynb +++ b/acceleration/automatic_mixed_precision.ipynb @@ -40,17 +40,7 @@ } ], "source": [ - "%pip install -qU \"monai[gdown, nibabel]\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# temporarily need this, FIXME: remove when MONAI v0.3 released\n", - "%pip install -qU git+https://github.com/Project-MONAI/MONAI#egg=MONAI" + "%pip install -q \"monai[gdown, nibabel, tqdm]\"" ] }, { @@ -67,7 +57,7 @@ } ], "source": [ - "%pip install -qU matplotlib\n", + "%pip install -q matplotlib\n", "%matplotlib inline" ] }, diff --git a/acceleration/dataset_type_performance.ipynb b/acceleration/dataset_type_performance.ipynb index 7108080722..647e745b8d 100644 --- a/acceleration/dataset_type_performance.ipynb +++ b/acceleration/dataset_type_performance.ipynb @@ -35,7 +35,7 @@ }, "outputs": [], "source": [ - "%pip install -qU \"monai[gdown, nibabel]\"" + "%pip install -q \"monai[gdown, nibabel, tqdm]\"" ] }, { @@ -46,7 +46,7 @@ }, "outputs": [], "source": [ - "%pip install -qU matplotlib\n", + "%pip install -q matplotlib\n", "%matplotlib inline" ] }, diff --git a/acceleration/fast_training_tutorial.ipynb b/acceleration/fast_training_tutorial.ipynb index 27cff5def3..4a116f7889 100644 --- a/acceleration/fast_training_tutorial.ipynb +++ b/acceleration/fast_training_tutorial.ipynb @@ -42,17 +42,7 @@ } ], "source": [ - "%pip install -qU \"monai[nibabel]\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# temporarily need this, FIXME: remove when MONAI v0.3 released\n", - "%pip install -qU git+https://github.com/Project-MONAI/MONAI#egg=MONAI" + "%pip install -q \"monai[nibabel, tqdm]\"" ] }, { @@ -69,7 +59,7 @@ } ], "source": [ - "%pip install -qU matplotlib\n", + "%pip install -q matplotlib\n", "%matplotlib inline" ] }, diff --git a/acceleration/multi_gpu_test.ipynb b/acceleration/multi_gpu_test.ipynb index d627d22826..a058a5596f 100644 --- a/acceleration/multi_gpu_test.ipynb +++ b/acceleration/multi_gpu_test.ipynb @@ -32,7 +32,7 @@ } ], "source": [ - "%pip install -qU \"monai[ignite]\"" + "%pip install -q \"monai[ignite]\"" ] }, { diff --git a/acceleration/transform_speed.ipynb b/acceleration/transform_speed.ipynb index f723da0dbb..cd5414d0f1 100644 --- a/acceleration/transform_speed.ipynb +++ b/acceleration/transform_speed.ipynb @@ -34,7 +34,7 @@ } ], "source": [ - "%pip install -qU \"monai[nibabel]\"" + "%pip install -q \"monai[nibabel]\"" ] }, { diff --git a/modules/3d_image_transforms.ipynb b/modules/3d_image_transforms.ipynb index 3d1856a558..d42f481724 100644 --- a/modules/3d_image_transforms.ipynb +++ b/modules/3d_image_transforms.ipynb @@ -34,7 +34,7 @@ } ], "source": [ - "%pip install -qU \"monai[gdown, nibabel]\"" + "%pip install -q \"monai[gdown, nibabel]\"" ] }, { @@ -53,7 +53,7 @@ } ], "source": [ - "%pip install -qU matplotlib\n", + "%pip install -q matplotlib\n", "%matplotlib inline" ] }, diff --git a/modules/dynunet_tutorial.ipynb b/modules/dynunet_tutorial.ipynb index a96a37272c..23ab1dc255 100644 --- a/modules/dynunet_tutorial.ipynb +++ b/modules/dynunet_tutorial.ipynb @@ -29,7 +29,7 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install -qU \"monai[itk, ignite]\"" + "%pip install -q \"monai[itk, ignite, tqdm]\"" ] }, { @@ -38,17 +38,7 @@ "metadata": {}, "outputs": [], "source": [ - "# temporarily need this, FIXME: remove when MONAI v0.3 released\n", - "%pip install -qU git+https://github.com/Project-MONAI/MONAI#egg=MONAI" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%pip install -qU matplotlib\n", + "%pip install -q matplotlib\n", "%matplotlib inline" ] }, diff --git a/modules/integrate_3rd_party_transforms.ipynb b/modules/integrate_3rd_party_transforms.ipynb index b79b66798f..3716fb140d 100644 --- a/modules/integrate_3rd_party_transforms.ipynb +++ b/modules/integrate_3rd_party_transforms.ipynb @@ -35,7 +35,7 @@ } ], "source": [ - "%pip install -qU \"monai[gdown]\"" + "%pip install -q \"monai[gdown]\"" ] }, { @@ -54,7 +54,7 @@ } ], "source": [ - "%pip install -qU matplotlib\n", + "%pip install -q matplotlib\n", "%matplotlib inline" ] }, diff --git a/modules/load_medical_images.ipynb b/modules/load_medical_images.ipynb index 8383b54190..fb0ada177e 100644 --- a/modules/load_medical_images.ipynb +++ b/modules/load_medical_images.ipynb @@ -34,27 +34,7 @@ } ], "source": [ - "%pip install -qU \"monai[itk, nibabel, pillow]\"" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": { - "tags": [] - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Note: you may need to restart the kernel to use updated packages.\n" - ] - } - ], - "source": [ - "# temporarily need this, FIXME: remove when MONAI v0.3 released\n", - "%pip install -qU git+https://github.com/Project-MONAI/MONAI#egg=MONAI" + "%pip install -q \"monai[itk, nibabel, pillow]\"" ] }, { diff --git a/modules/mednist_GAN_tutorial.ipynb b/modules/mednist_GAN_tutorial.ipynb index d89b361c7d..b1b9419965 100644 --- a/modules/mednist_GAN_tutorial.ipynb +++ b/modules/mednist_GAN_tutorial.ipynb @@ -40,7 +40,7 @@ } ], "source": [ - "%pip install -qU monai" + "%pip install -q monai" ] }, { @@ -59,7 +59,7 @@ } ], "source": [ - "%pip install -qU matplotlib\n", + "%pip install -q matplotlib\n", "%matplotlib inline" ] }, diff --git a/modules/mednist_GAN_workflow_array.ipynb b/modules/mednist_GAN_workflow_array.ipynb index a996ff32b9..dbe9b1d41b 100644 --- a/modules/mednist_GAN_workflow_array.ipynb +++ b/modules/mednist_GAN_workflow_array.ipynb @@ -35,26 +35,6 @@ "### Setup environment" ] }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": { - "tags": [] - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Note: you may need to restart the kernel to use updated packages.\n" - ] - } - ], - "source": [ - "# temporarily need this, FIXME: remove when MONAI v0.3 released\n", - "%pip install -qU git+https://github.com/Project-MONAI/MONAI#egg=MONAI" - ] - }, { "cell_type": "code", "execution_count": 2, @@ -69,7 +49,7 @@ } ], "source": [ - "%pip install -qU \"monai[ignite]\"" + "%pip install -q \"monai[ignite, tqdm]\"" ] }, { @@ -88,7 +68,7 @@ } ], "source": [ - "%pip install -qU matplotlib\n", + "%pip install -q matplotlib\n", "%matplotlib inline" ] }, diff --git a/modules/mednist_GAN_workflow_dict.ipynb b/modules/mednist_GAN_workflow_dict.ipynb index 8eb1229fca..d0a90c7c6e 100644 --- a/modules/mednist_GAN_workflow_dict.ipynb +++ b/modules/mednist_GAN_workflow_dict.ipynb @@ -51,19 +51,7 @@ } ], "source": [ - "%pip install -qU \"monai[ignite]\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# temporarily need this, FIXME: remove when MONAI v0.3 released\n", - "%pip install -qU git+https://github.com/Project-MONAI/MONAI#egg=MONAI" + "%pip install -q \"monai[ignite, tqdm]\"" ] }, { @@ -82,7 +70,7 @@ } ], "source": [ - "%pip install -qU matplotlib\n", + "%pip install -q matplotlib\n", "%matplotlib inline" ] }, diff --git a/modules/models_ensemble.ipynb b/modules/models_ensemble.ipynb index f01bc2fd62..1bafb0ded9 100644 --- a/modules/models_ensemble.ipynb +++ b/modules/models_ensemble.ipynb @@ -45,19 +45,7 @@ } ], "source": [ - "%pip install -qU \"monai[ignite, nibabel]\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# temporarily need this, FIXME remove when MONAI v0.3 released\n", - "%pip install -qU git+https://github.com/Project-MONAI/MONAI#egg=MONAI" + "%pip install -q \"monai[ignite, nibabel, tqdm]\"" ] }, { diff --git a/modules/nifti_read_example.ipynb b/modules/nifti_read_example.ipynb index 38c4c44b6a..7cb405468d 100644 --- a/modules/nifti_read_example.ipynb +++ b/modules/nifti_read_example.ipynb @@ -34,7 +34,7 @@ } ], "source": [ - "%pip install -qU \"monai[nibabel]\"" + "%pip install -q \"monai[nibabel]\"" ] }, { diff --git a/modules/post_transforms.ipynb b/modules/post_transforms.ipynb index b7f99a10a7..56b0dc7694 100644 --- a/modules/post_transforms.ipynb +++ b/modules/post_transforms.ipynb @@ -48,7 +48,7 @@ } ], "source": [ - "%pip install -qU \"monai[gdown, nibabel, skimage]\"" + "%pip install -q \"monai[gdown, nibabel, skimage, tqdm]\"" ] }, { @@ -67,7 +67,7 @@ } ], "source": [ - "%pip install -qU matplotlib\n", + "%pip install -q matplotlib\n", "%matplotlib inline" ] }, diff --git a/modules/public_datasets.ipynb b/modules/public_datasets.ipynb index 9a89cef891..2b99634003 100644 --- a/modules/public_datasets.ipynb +++ b/modules/public_datasets.ipynb @@ -41,7 +41,7 @@ } ], "source": [ - "%pip install -qU \"monai[nibabel, ignite]\"" + "%pip install -q \"monai[nibabel, ignite, tqdm]\"" ] }, { @@ -60,7 +60,7 @@ } ], "source": [ - "%pip install -qU matplotlib\n", + "%pip install -q matplotlib\n", "%matplotlib inline" ] }, diff --git a/modules/transforms_demo_2d.ipynb b/modules/transforms_demo_2d.ipynb index ccb049f868..dd0fea5a27 100644 --- a/modules/transforms_demo_2d.ipynb +++ b/modules/transforms_demo_2d.ipynb @@ -39,7 +39,7 @@ } ], "source": [ - "%pip install -qU \"monai[pillow]\"" + "%pip install -q \"monai[pillow, tqdm]\"" ] }, { @@ -58,7 +58,7 @@ } ], "source": [ - "%pip install -qU matplotlib\n", + "%pip install -q matplotlib\n", "%matplotlib inline" ] }, From acfa8b41983b4bbed7d0f6589c1664126aed72a9 Mon Sep 17 00:00:00 2001 From: Wenqi Li Date: Sat, 3 Oct 2020 18:22:56 +0100 Subject: [PATCH 3/8] update readme for colab instructions Signed-off-by: Wenqi Li --- README.md | 27 +++++++++++++++++++++++++-- 1 file changed, 25 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 88960ef08e..302302cb78 100644 --- a/README.md +++ b/README.md @@ -6,6 +6,7 @@ Most of the examples and tutorials require [matplotlib](https://matplotlib.org/) and [Jupyter Notebook](https://jupyter.org/). These could be installed by: + ```bash python -m pip install -U pip python -m pip install -U matplotlib @@ -13,11 +14,33 @@ python -m pip install -U notebook ``` Some of the examples may require optional dependencies. In case of any optional import errors, -please install the relevant packages according to the error message. +please install the relevant packages according to MONAI's [installation guide](https://docs.monai.io/en/latest/installation.html). Or install all optional requirements by: -``` + +```bash pip install -r https://raw.githubusercontent.com/Project-MONAI/MONAI/master/requirements-dev.txt ``` + +#### Run the notebooks from Colab + +Most of the Jupyter Notebooks have an "Open in Colab" button. +Please right-click on the button, and select "Open Link in New Tab" to start a Colab page with the corresponding notebook content. + +To use GPU resources through Colab, please remember to change the runtime type to `GPU`: + +1. From the `Runtime` menu select `Change runtime type` +1. Choose `GPU` from the drop-down menu +1. Click `SAVE` +This will reset the notebook and may ask you if you are a robot (these instructions assume you are not). + +Running: + +```bash +!nvidia-smi +``` + +in a cell will verify this has worked and show you what kind of hardware you have access to. + ### 2. List of notebooks and examples **2D classification** #### [mednist_tutorial](./2d_classification/mednist_tutorial.ipynb) From a0492faa8b41a0a305d27ea0495161b28afbbdfa Mon Sep 17 00:00:00 2001 From: Wenqi Li Date: Sun, 4 Oct 2020 16:27:55 +0100 Subject: [PATCH 4/8] fixes amp imports Signed-off-by: Wenqi Li --- acceleration/automatic_mixed_precision.ipynb | 3 +++ 1 file changed, 3 insertions(+) diff --git a/acceleration/automatic_mixed_precision.ipynb b/acceleration/automatic_mixed_precision.ipynb index 6338bc4fa4..96ee4c51a2 100644 --- a/acceleration/automatic_mixed_precision.ipynb +++ b/acceleration/automatic_mixed_precision.ipynb @@ -105,6 +105,7 @@ "from monai.metrics import compute_meandice\n", "from monai.utils import set_determinism\n", "from monai.config import print_config\n", + "from monai.apps import download_and_extract\n", "\n", "print_config()\n", "\n", @@ -137,6 +138,8 @@ } ], "source": [ + "import tempfile\n", + "\n", "directory = os.environ.get(\"MONAI_DATA_DIRECTORY\")\n", "root_dir = tempfile.mkdtemp() if directory is None else directory\n", "print(f\"root dir is: {root_dir}\")" From e7bd591ecf1615f2f8d47f651e1195602b58616f Mon Sep 17 00:00:00 2001 From: Wenqi Li Date: Sun, 4 Oct 2020 16:57:34 +0100 Subject: [PATCH 5/8] fixes dataset notebook typos Signed-off-by: Wenqi Li --- acceleration/dataset_type_performance.ipynb | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/acceleration/dataset_type_performance.ipynb b/acceleration/dataset_type_performance.ipynb index 647e745b8d..14af9b63ec 100644 --- a/acceleration/dataset_type_performance.ipynb +++ b/acceleration/dataset_type_performance.ipynb @@ -11,9 +11,9 @@ "\n", "`Dataset` provides the simplest model of data loading. Each time a dataset is needed, it is reloaded from the original datasources, and processed through the all non-random and random transforms to generate analyzable tensors. This mechanism has the smallest memory footprint, and the smallest temporary disk footprint.\n", "\n", - "`CacheDataset` provides a mechanism to pre-load all original data and apply non-random transforms into analyzable tensors loaded in memory prior to starting analysis. The `CacheDataset` requires all tensor representations of data requested to be loaded into memory at once. The subset of random transforms are applied to the cached components before use. This is the highest performance dataset if all data fits in core memory.\n", + "`CacheDataset` provides a mechanism to pre-load all original data and apply non-random transforms into analyzable tensors loaded in memory prior to starting analysis. The `CacheDataset` requires all tensor representations of data requested to be loaded into memory at once. The subset of random transforms is applied to the cached components before use. This is the highest performance dataset if all data fit in core memory.\n", "\n", - "`PersistentDataset` processes original data sources through the non-random transforms on first use, and stores these intermediate tensor values to an on-disk persistence representation. The intermediate processed tensors are loaded from disk on each use for processing by the random-transforms for each analysis request. The `PersistentDataset` has a similar memory footprint to the simple `Dataset`, with performance characterisics close to the `CacheDataset` at the expense of disk storage. Additially, the cost of first time processing of data is distributed across each first use.\n", + "`PersistentDataset` processes original data sources through the non-random transforms on first use, and stores these intermediate tensor values to an on-disk persistence representation. The intermediate processed tensors are loaded from disk on each use for processing by the random-transforms for each analysis request. The `PersistentDataset` has a similar memory footprint to the simple `Dataset`, with performance characteristics close to the `CacheDataset` at the expense of disk storage. Additionally, the cost of first time processing of data is distributed across each first use.\n", "\n", "It's modified from the [Spleen 3D segmentation tutorial notebook](https://github.com/Project-MONAI/tutorials/blob/master/3d_segmentation/spleen_segmentation_3d.ipynb).\n", "\n", @@ -270,7 +270,7 @@ " f\"current epoch: {epoch + 1} current mean dice: {metric:.4f}\"\n", " f\" best mean dice: {best_metric:.4f} at epoch: {best_metric_epoch}\"\n", " )\n", - " print(f\"time consuming of epoch {epoch + 1} is: {(time.time() - epoch_start):.4f}\")\n", + " print(f\"time of epoch {epoch + 1} is: {(time.time() - epoch_start):.4f}\")\n", " epoch_times.append(time.time() - epoch_start)\n", "\n", " print(\n", From 7791df1ddd0d6d3f3066600c3408938904057cc2 Mon Sep 17 00:00:00 2001 From: Wenqi Li Date: Sun, 4 Oct 2020 17:01:00 +0100 Subject: [PATCH 6/8] fixes fast training demo Signed-off-by: Wenqi Li --- acceleration/fast_training_tutorial.ipynb | 3 +++ 1 file changed, 3 insertions(+) diff --git a/acceleration/fast_training_tutorial.ipynb b/acceleration/fast_training_tutorial.ipynb index 4a116f7889..e36e19dded 100644 --- a/acceleration/fast_training_tutorial.ipynb +++ b/acceleration/fast_training_tutorial.ipynb @@ -133,6 +133,7 @@ "from monai.metrics import compute_meandice\n", "from monai.utils import set_determinism\n", "from monai.config import print_config\n", + "from monai.apps import download_and_extract\n", "\n", "print_config()\n", "\n", @@ -165,6 +166,8 @@ } ], "source": [ + "import tempfile\n", + "\n", "directory = os.environ.get(\"MONAI_DATA_DIRECTORY\")\n", "root_dir = tempfile.mkdtemp() if directory is None else directory\n", "print(f\"root dir is: {root_dir}\")" From 4be988c7d43dd36ace7c66ba93c9f62e40595e33 Mon Sep 17 00:00:00 2001 From: Wenqi Li Date: Sun, 4 Oct 2020 17:04:24 +0100 Subject: [PATCH 7/8] fixes fast training demo typos Signed-off-by: Wenqi Li --- acceleration/fast_training_tutorial.ipynb | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/acceleration/fast_training_tutorial.ipynb b/acceleration/fast_training_tutorial.ipynb index e36e19dded..6216844460 100644 --- a/acceleration/fast_training_tutorial.ipynb +++ b/acceleration/fast_training_tutorial.ipynb @@ -264,12 +264,12 @@ "metadata": {}, "source": [ "## Define the training progress\n", - "For typical PyTorch regular training progress, use regular `Dataset`, `Adam` optimizer, and train the model.\n", + "For a typical PyTorch regular training procedure, use regular `Dataset`, `Adam` optimizer, and train the model.\n", "\n", - "For MONAI fast training progress, we mainly introduce below features:\n", - "1. `CacheDataset`: Dataset with cache mechanism that can load data and cache deterministic transforms' result during training.\n", - "2. `Novograd` optimizer: Novograd is based on paper \"Stochastic Gradient Methods with Layer-wise Adaptive Moments for Training of Deep Networks\" ``.\n", - "3. `AMP` (auto mixed precision): AMP is an important feature released in PyTorch v1.6, NVIDIA CUDA 11 added strong support for AMP and obviously improved training speed." + "For MONAI fast training progress, we mainly introduce the following features:\n", + "1. `CacheDataset`: Dataset with the cache mechanism that can load data and cache deterministic transforms' result during training.\n", + "2. `Novograd` optimizer: Novograd is based on the paper \"Stochastic Gradient Methods with Layer-wise Adaptive Moments for Training of Deep Networks\" ``.\n", + "3. `AMP` (auto mixed precision): AMP is an important feature released in PyTorch v1.6, NVIDIA CUDA 11 added strong support for AMP and significantly improved training speed." ] }, { From 90b6e77e760fb0849ba169eab36844d7a4763256 Mon Sep 17 00:00:00 2001 From: Wenqi Li Date: Sun, 4 Oct 2020 17:09:41 +0100 Subject: [PATCH 8/8] fixes transform speed notebook Signed-off-by: Wenqi Li --- acceleration/transform_speed.ipynb | 2 -- 1 file changed, 2 deletions(-) diff --git a/acceleration/transform_speed.ipynb b/acceleration/transform_speed.ipynb index cd5414d0f1..8635c72d42 100644 --- a/acceleration/transform_speed.ipynb +++ b/acceleration/transform_speed.ipynb @@ -164,8 +164,6 @@ "metadata": {}, "outputs": [], "source": [ - "root_dir = tempfile.mkdtemp()\n", - "\n", "for i in range(5):\n", " im, seg = create_test_image_3d(256, 256, 256)\n", "\n",