From 236ad5062c7268877d8db3c5110da69ea07a4b02 Mon Sep 17 00:00:00 2001 From: Nic Ma Date: Mon, 18 Jan 2021 18:51:13 +0800 Subject: [PATCH 1/5] [DLMED] change NiftiDataset to ImageDataset Signed-off-by: Nic Ma --- docs/source/data.rst | 10 +++++----- monai/data/__init__.py | 2 +- monai/data/{nifti_reader.py => image_dataset.py} | 4 ++-- 3 files changed, 8 insertions(+), 8 deletions(-) rename monai/data/{nifti_reader.py => image_dataset.py} (96%) diff --git a/docs/source/data.rst b/docs/source/data.rst index f6ed71c266..11609964c3 100644 --- a/docs/source/data.rst +++ b/docs/source/data.rst @@ -63,6 +63,11 @@ Generic Interfaces :members: :special-members: __getitem__ +`ImageDataset` +~~~~~~~~~~~~~~ +.. autoclass:: ImageDataset + :members: + :special-members: __getitem__ Patch-based dataset ------------------- @@ -104,11 +109,6 @@ PILReader Nifti format handling --------------------- -Reading -~~~~~~~ -.. autoclass:: monai.data.NiftiDataset - :members: - Writing Nifti ~~~~~~~~~~~~~ .. autoclass:: monai.data.NiftiSaver diff --git a/monai/data/__init__.py b/monai/data/__init__.py index e2bd32861c..302ca5ea00 100644 --- a/monai/data/__init__.py +++ b/monai/data/__init__.py @@ -25,7 +25,7 @@ from .grid_dataset import GridPatchDataset, PatchDataset from .image_reader import ImageReader, ITKReader, NibabelReader, NumpyReader, PILReader from .iterable_dataset import IterableDataset -from .nifti_reader import NiftiDataset +from .image_dataset import ImageDataset from .nifti_saver import NiftiSaver from .nifti_writer import write_nifti from .png_saver import PNGSaver diff --git a/monai/data/nifti_reader.py b/monai/data/image_dataset.py similarity index 96% rename from monai/data/nifti_reader.py rename to monai/data/image_dataset.py index 1378fb25a0..95a21673fb 100644 --- a/monai/data/nifti_reader.py +++ b/monai/data/image_dataset.py @@ -18,9 +18,9 @@ from monai.utils import MAX_SEED, get_seed -class NiftiDataset(Dataset, Randomizable): +class ImageDataset(Dataset, Randomizable): """ - Loads image/segmentation pairs of Nifti files from the given filename lists. Transformations can be specified + Loads image/segmentation pairs of files from the given filename lists. Transformations can be specified for the image and segmentation arrays separately. """ From aafbbbcf21a0bf6cfcac5578466b8dde2adbc886 Mon Sep 17 00:00:00 2001 From: Nic Ma Date: Mon, 18 Jan 2021 21:52:39 +0800 Subject: [PATCH 2/5] [DLMED] update CI tests Signed-off-by: Nic Ma --- monai/data/image_dataset.py | 47 ++++++++++--------- ...nifti_dataset.py => test_image_dataset.py} | 24 +++++----- tests/test_integration_sliding_window.py | 4 +- 3 files changed, 40 insertions(+), 35 deletions(-) rename tests/{test_nifti_dataset.py => test_image_dataset.py} (88%) diff --git a/monai/data/image_dataset.py b/monai/data/image_dataset.py index 95a21673fb..e66580844b 100644 --- a/monai/data/image_dataset.py +++ b/monai/data/image_dataset.py @@ -9,11 +9,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Callable, Optional, Sequence +from typing import Any, Callable, Optional, Sequence, Union import numpy as np from torch.utils.data import Dataset +from monai.data.image_reader import ImageReader from monai.transforms import LoadImage, Randomizable, apply_transform from monai.utils import MAX_SEED, get_seed @@ -22,18 +23,22 @@ class ImageDataset(Dataset, Randomizable): """ Loads image/segmentation pairs of files from the given filename lists. Transformations can be specified for the image and segmentation arrays separately. - """ + The difference between this dataset and `ArrayDataset` is that this dataset can apply transform chain to images + and segs and return both the images and metadata, and no need to specify transform to load images from files. + """ def __init__( self, image_files: Sequence[str], seg_files: Optional[Sequence[str]] = None, labels: Optional[Sequence[float]] = None, - as_closest_canonical: bool = False, transform: Optional[Callable] = None, seg_transform: Optional[Callable] = None, image_only: bool = True, dtype: Optional[np.dtype] = np.float32, + reader: Optional[Union[ImageReader, str]] = None, + *args, + **kwargs, ) -> None: """ Initializes the dataset with the image and segmentation filename lists. The transform `transform` is applied @@ -43,11 +48,16 @@ def __init__( image_files: list of image filenames seg_files: if in segmentation task, list of segmentation filenames labels: if in classification task, list of classification labels - as_closest_canonical: if True, load the image as closest to canonical orientation transform: transform to apply to image arrays seg_transform: transform to apply to segmentation arrays - image_only: if True return only the image volume, other return image volume and header dict + image_only: if True return only the image volume, otherwise, return image volume and the metadata dtype: if not None convert the loaded image to this data type + reader: register reader to load image file and meta data, if None, still can register readers + at runtime or use the default readers. If a string of reader name provided, will construct + a reader object with the `*args` and `**kwargs` parameters, supported reader name: "NibabelReader", + "PILReader", "ITKReader", "NumpyReader" + args: additional parameters for reader if providing a reader name. + kwargs: additional parameters for reader if providing a reader name. Raises: ValueError: When ``seg_files`` length differs from ``image_files``. @@ -63,13 +73,11 @@ def __init__( self.image_files = image_files self.seg_files = seg_files self.labels = labels - self.as_closest_canonical = as_closest_canonical self.transform = transform self.seg_transform = seg_transform self.image_only = image_only - self.dtype = dtype + self.loader = LoadImage(reader=reader, image_only=image_only, dtype=dtype, *args, **kwargs) self.set_random_state(seed=get_seed()) - self._seed = 0 # transform synchronization seed def __len__(self) -> int: @@ -81,21 +89,18 @@ def randomize(self, data: Optional[Any] = None) -> None: def __getitem__(self, index: int): self.randomize() meta_data = None - img_loader = LoadImage( - reader="NibabelReader", - image_only=self.image_only, - dtype=self.dtype, - as_closest_canonical=self.as_closest_canonical, - ) - if self.image_only: - img = img_loader(self.image_files[index]) - else: - img, meta_data = img_loader(self.image_files[index]) seg = None - if self.seg_files is not None: - seg_loader = LoadImage(image_only=True) - seg = seg_loader(self.seg_files[index]) label = None + + if self.image_only: + img = self.loader(self.image_files[index]) + if self.seg_files is not None: + seg = self.loader(self.seg_files[index]) + else: + img, meta_data = self.loader(self.image_files[index]) + if self.seg_files is not None: + seg, _ = self.loader(self.seg_files[index]) + if self.labels is not None: label = self.labels[index] diff --git a/tests/test_nifti_dataset.py b/tests/test_image_dataset.py similarity index 88% rename from tests/test_nifti_dataset.py rename to tests/test_image_dataset.py index f5d6e11290..d79a7d884c 100644 --- a/tests/test_nifti_dataset.py +++ b/tests/test_image_dataset.py @@ -16,7 +16,7 @@ import nibabel as nib import numpy as np -from monai.data import NiftiDataset +from monai.data import ImageDataset from monai.transforms import Randomizable FILENAMES = ["test1.nii.gz", "test2.nii", "test3.nii.gz"] @@ -35,7 +35,7 @@ def __call__(self, data): return data + self._a -class TestNiftiDataset(unittest.TestCase): +class TestImageDataset(unittest.TestCase): def test_dataset(self): with tempfile.TemporaryDirectory() as tempdir: full_names, ref_data = [], [] @@ -47,46 +47,46 @@ def test_dataset(self): nib.save(nib.Nifti1Image(test_image, np.eye(4)), save_path) # default loading no meta - dataset = NiftiDataset(full_names) + dataset = ImageDataset(full_names) for d, ref in zip(dataset, ref_data): np.testing.assert_allclose(d, ref, atol=1e-3) # loading no meta, int - dataset = NiftiDataset(full_names, dtype=np.float16) + dataset = ImageDataset(full_names, dtype=np.float16) for d, _ in zip(dataset, ref_data): self.assertEqual(d.dtype, np.float16) # loading with meta, no transform - dataset = NiftiDataset(full_names, image_only=False) + dataset = ImageDataset(full_names, image_only=False) for d_tuple, ref in zip(dataset, ref_data): d, meta = d_tuple np.testing.assert_allclose(d, ref, atol=1e-3) np.testing.assert_allclose(meta["original_affine"], np.eye(4)) # loading image/label, no meta - dataset = NiftiDataset(full_names, seg_files=full_names, image_only=True) + dataset = ImageDataset(full_names, seg_files=full_names, image_only=True) for d_tuple, ref in zip(dataset, ref_data): img, seg = d_tuple np.testing.assert_allclose(img, ref, atol=1e-3) np.testing.assert_allclose(seg, ref, atol=1e-3) # loading image/label, no meta - dataset = NiftiDataset(full_names, transform=lambda x: x + 1, image_only=True) + dataset = ImageDataset(full_names, transform=lambda x: x + 1, image_only=True) for d, ref in zip(dataset, ref_data): np.testing.assert_allclose(d, ref + 1, atol=1e-3) # set seg transform, but no seg_files with self.assertRaises(RuntimeError): - dataset = NiftiDataset(full_names, seg_transform=lambda x: x + 1, image_only=True) + dataset = ImageDataset(full_names, seg_transform=lambda x: x + 1, image_only=True) _ = dataset[0] # set seg transform, but no seg_files with self.assertRaises(RuntimeError): - dataset = NiftiDataset(full_names, seg_transform=lambda x: x + 1, image_only=True) + dataset = ImageDataset(full_names, seg_transform=lambda x: x + 1, image_only=True) _ = dataset[0] # loading image/label, with meta - dataset = NiftiDataset( + dataset = ImageDataset( full_names, transform=lambda x: x + 1, seg_files=full_names, @@ -100,7 +100,7 @@ def test_dataset(self): np.testing.assert_allclose(meta["original_affine"], np.eye(4), atol=1e-3) # loading image/label, with meta - dataset = NiftiDataset( + dataset = ImageDataset( full_names, transform=lambda x: x + 1, seg_files=full_names, labels=[1, 2, 3], image_only=False ) for idx, (d_tuple, ref) in enumerate(zip(dataset, ref_data)): @@ -111,7 +111,7 @@ def test_dataset(self): np.testing.assert_allclose(meta["original_affine"], np.eye(4), atol=1e-3) # loading image/label, with sync. transform - dataset = NiftiDataset( + dataset = ImageDataset( full_names, transform=RandTest(), seg_files=full_names, seg_transform=RandTest(), image_only=False ) for d_tuple, ref in zip(dataset, ref_data): diff --git a/tests/test_integration_sliding_window.py b/tests/test_integration_sliding_window.py index 92cc9397cb..c4d020276e 100644 --- a/tests/test_integration_sliding_window.py +++ b/tests/test_integration_sliding_window.py @@ -19,7 +19,7 @@ from ignite.engine import Engine from torch.utils.data import DataLoader -from monai.data import NiftiDataset, create_test_image_3d +from monai.data import ImageDataset, create_test_image_3d from monai.handlers import SegmentationSaver from monai.inferers import sliding_window_inference from monai.networks import eval_mode, predict_segmentation @@ -30,7 +30,7 @@ def run_test(batch_size, img_name, seg_name, output_dir, device="cuda:0"): - ds = NiftiDataset([img_name], [seg_name], transform=AddChannel(), seg_transform=AddChannel(), image_only=False) + ds = ImageDataset([img_name], [seg_name], transform=AddChannel(), seg_transform=AddChannel(), image_only=False) loader = DataLoader(ds, batch_size=1, pin_memory=torch.cuda.is_available()) net = UNet( From bf5deb9d5fb42b6e324e232255cd43c6e0d7595a Mon Sep 17 00:00:00 2001 From: monai-bot Date: Mon, 18 Jan 2021 13:57:30 +0000 Subject: [PATCH 3/5] [MONAI] python code formatting Signed-off-by: monai-bot --- monai/data/__init__.py | 2 +- monai/data/image_dataset.py | 1 + tests/test_savitzky_golay_filter.py | 304 ++++++++++++++-------------- tests/test_savitzky_golay_smooth.py | 140 ++++++------- 4 files changed, 224 insertions(+), 223 deletions(-) diff --git a/monai/data/__init__.py b/monai/data/__init__.py index 302ca5ea00..e0db1e17ae 100644 --- a/monai/data/__init__.py +++ b/monai/data/__init__.py @@ -23,9 +23,9 @@ ) from .decathlon_datalist import load_decathlon_datalist, load_decathlon_properties from .grid_dataset import GridPatchDataset, PatchDataset +from .image_dataset import ImageDataset from .image_reader import ImageReader, ITKReader, NibabelReader, NumpyReader, PILReader from .iterable_dataset import IterableDataset -from .image_dataset import ImageDataset from .nifti_saver import NiftiSaver from .nifti_writer import write_nifti from .png_saver import PNGSaver diff --git a/monai/data/image_dataset.py b/monai/data/image_dataset.py index e66580844b..9a024d5e63 100644 --- a/monai/data/image_dataset.py +++ b/monai/data/image_dataset.py @@ -27,6 +27,7 @@ class ImageDataset(Dataset, Randomizable): and segs and return both the images and metadata, and no need to specify transform to load images from files. """ + def __init__( self, image_files: Sequence[str], diff --git a/tests/test_savitzky_golay_filter.py b/tests/test_savitzky_golay_filter.py index d76c42c15f..9163204810 100644 --- a/tests/test_savitzky_golay_filter.py +++ b/tests/test_savitzky_golay_filter.py @@ -1,152 +1,152 @@ -# Copyright 2020 MONAI Consortium -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import numpy as np -import torch -from parameterized import parameterized - -from monai.networks.layers import SavitzkyGolayFilter -from tests.utils import skip_if_no_cuda - -# Zero-padding trivial tests - -TEST_CASE_SINGLE_VALUE = [ - {"window_length": 3, "order": 1}, - torch.Tensor([1.0]).unsqueeze(0).unsqueeze(0), # Input data: Single value - torch.Tensor([1 / 3]).unsqueeze(0).unsqueeze(0), # Expected output: With a window length of 3 and polyorder 1 - # output should be equal to mean of 0, 1 and 0 = 1/3 (because input will be zero-padded and a linear fit performed) - 1e-15, # absolute tolerance -] - -TEST_CASE_1D = [ - {"window_length": 3, "order": 1}, - torch.Tensor([1.0, 1.0, 1.0]).unsqueeze(0).unsqueeze(0), # Input data - torch.Tensor([2 / 3, 1.0, 2 / 3]) - .unsqueeze(0) - .unsqueeze(0), # Expected output: zero padded, so linear interpolation - # over length-3 windows will result in output of [2/3, 1, 2/3]. - 1e-15, # absolute tolerance -] - -TEST_CASE_2D_AXIS_2 = [ - {"window_length": 3, "order": 1}, # along default axis (2, first spatial dim) - torch.ones((3, 2)).unsqueeze(0).unsqueeze(0), - torch.Tensor([[2 / 3, 2 / 3], [1.0, 1.0], [2 / 3, 2 / 3]]).unsqueeze(0).unsqueeze(0), - 1e-15, # absolute tolerance -] - -TEST_CASE_2D_AXIS_3 = [ - {"window_length": 3, "order": 1, "axis": 3}, # along axis 3 (second spatial dim) - torch.ones((2, 3)).unsqueeze(0).unsqueeze(0), - torch.Tensor([[2 / 3, 1.0, 2 / 3], [2 / 3, 1.0, 2 / 3]]).unsqueeze(0).unsqueeze(0), - 1e-15, # absolute tolerance -] - -# Replicated-padding trivial tests - -TEST_CASE_SINGLE_VALUE_REP = [ - {"window_length": 3, "order": 1, "mode": "replicate"}, - torch.Tensor([1.0]).unsqueeze(0).unsqueeze(0), # Input data: Single value - torch.Tensor([1.0]).unsqueeze(0).unsqueeze(0), # Expected output: With a window length of 3 and polyorder 1 - # output will be equal to mean of [1, 1, 1] = 1 (input will be nearest-neighbour-padded and a linear fit performed) - 1e-15, # absolute tolerance -] - -TEST_CASE_1D_REP = [ - {"window_length": 3, "order": 1, "mode": "replicate"}, - torch.Tensor([1.0, 1.0, 1.0]).unsqueeze(0).unsqueeze(0), # Input data - torch.Tensor([1.0, 1.0, 1.0]).unsqueeze(0).unsqueeze(0), # Expected output: zero padded, so linear interpolation - # over length-3 windows will result in output of [2/3, 1, 2/3]. - 1e-15, # absolute tolerance -] - -TEST_CASE_2D_AXIS_2_REP = [ - {"window_length": 3, "order": 1, "mode": "replicate"}, # along default axis (2, first spatial dim) - torch.ones((3, 2)).unsqueeze(0).unsqueeze(0), - torch.Tensor([[1.0, 1.0], [1.0, 1.0], [1.0, 1.0]]).unsqueeze(0).unsqueeze(0), - 1e-15, # absolute tolerance -] - -TEST_CASE_2D_AXIS_3_REP = [ - {"window_length": 3, "order": 1, "axis": 3, "mode": "replicate"}, # along axis 3 (second spatial dim) - torch.ones((2, 3)).unsqueeze(0).unsqueeze(0), - torch.Tensor([[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]]).unsqueeze(0).unsqueeze(0), - 1e-15, # absolute tolerance -] - -# Sine smoothing - -TEST_CASE_SINE_SMOOTH = [ - {"window_length": 3, "order": 1}, - # Sine wave with period equal to savgol window length (windowed to reduce edge effects). - torch.as_tensor(np.sin(2 * np.pi * 1 / 3 * np.arange(100)) * np.hanning(100)).unsqueeze(0).unsqueeze(0), - # Should be smoothed out to zeros - torch.zeros(100).unsqueeze(0).unsqueeze(0), - # tolerance chosen by examining output of SciPy.signal.savgol_filter when provided the above input - 2e-2, # absolute tolerance -] - - -class TestSavitzkyGolayCPU(unittest.TestCase): - @parameterized.expand( - [ - TEST_CASE_SINGLE_VALUE, - TEST_CASE_1D, - TEST_CASE_2D_AXIS_2, - TEST_CASE_2D_AXIS_3, - TEST_CASE_SINE_SMOOTH, - ] - ) - def test_value(self, arguments, image, expected_data, atol): - result = SavitzkyGolayFilter(**arguments)(image) - np.testing.assert_allclose(result, expected_data, atol=atol) - - -class TestSavitzkyGolayCPUREP(unittest.TestCase): - @parameterized.expand( - [TEST_CASE_SINGLE_VALUE_REP, TEST_CASE_1D_REP, TEST_CASE_2D_AXIS_2_REP, TEST_CASE_2D_AXIS_3_REP] - ) - def test_value(self, arguments, image, expected_data, atol): - result = SavitzkyGolayFilter(**arguments)(image) - np.testing.assert_allclose(result, expected_data, atol=atol) - - -@skip_if_no_cuda -class TestSavitzkyGolayGPU(unittest.TestCase): - @parameterized.expand( - [ - TEST_CASE_SINGLE_VALUE, - TEST_CASE_1D, - TEST_CASE_2D_AXIS_2, - TEST_CASE_2D_AXIS_3, - TEST_CASE_SINE_SMOOTH, - ] - ) - def test_value(self, arguments, image, expected_data, atol): - result = SavitzkyGolayFilter(**arguments)(image.to(device="cuda")) - np.testing.assert_allclose(result.cpu(), expected_data, atol=atol) - - -@skip_if_no_cuda -class TestSavitzkyGolayGPUREP(unittest.TestCase): - @parameterized.expand( - [ - TEST_CASE_SINGLE_VALUE_REP, - TEST_CASE_1D_REP, - TEST_CASE_2D_AXIS_2_REP, - TEST_CASE_2D_AXIS_3_REP, - ] - ) - def test_value(self, arguments, image, expected_data, atol): - result = SavitzkyGolayFilter(**arguments)(image.to(device="cuda")) - np.testing.assert_allclose(result.cpu(), expected_data, atol=atol) +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import torch +from parameterized import parameterized + +from monai.networks.layers import SavitzkyGolayFilter +from tests.utils import skip_if_no_cuda + +# Zero-padding trivial tests + +TEST_CASE_SINGLE_VALUE = [ + {"window_length": 3, "order": 1}, + torch.Tensor([1.0]).unsqueeze(0).unsqueeze(0), # Input data: Single value + torch.Tensor([1 / 3]).unsqueeze(0).unsqueeze(0), # Expected output: With a window length of 3 and polyorder 1 + # output should be equal to mean of 0, 1 and 0 = 1/3 (because input will be zero-padded and a linear fit performed) + 1e-15, # absolute tolerance +] + +TEST_CASE_1D = [ + {"window_length": 3, "order": 1}, + torch.Tensor([1.0, 1.0, 1.0]).unsqueeze(0).unsqueeze(0), # Input data + torch.Tensor([2 / 3, 1.0, 2 / 3]) + .unsqueeze(0) + .unsqueeze(0), # Expected output: zero padded, so linear interpolation + # over length-3 windows will result in output of [2/3, 1, 2/3]. + 1e-15, # absolute tolerance +] + +TEST_CASE_2D_AXIS_2 = [ + {"window_length": 3, "order": 1}, # along default axis (2, first spatial dim) + torch.ones((3, 2)).unsqueeze(0).unsqueeze(0), + torch.Tensor([[2 / 3, 2 / 3], [1.0, 1.0], [2 / 3, 2 / 3]]).unsqueeze(0).unsqueeze(0), + 1e-15, # absolute tolerance +] + +TEST_CASE_2D_AXIS_3 = [ + {"window_length": 3, "order": 1, "axis": 3}, # along axis 3 (second spatial dim) + torch.ones((2, 3)).unsqueeze(0).unsqueeze(0), + torch.Tensor([[2 / 3, 1.0, 2 / 3], [2 / 3, 1.0, 2 / 3]]).unsqueeze(0).unsqueeze(0), + 1e-15, # absolute tolerance +] + +# Replicated-padding trivial tests + +TEST_CASE_SINGLE_VALUE_REP = [ + {"window_length": 3, "order": 1, "mode": "replicate"}, + torch.Tensor([1.0]).unsqueeze(0).unsqueeze(0), # Input data: Single value + torch.Tensor([1.0]).unsqueeze(0).unsqueeze(0), # Expected output: With a window length of 3 and polyorder 1 + # output will be equal to mean of [1, 1, 1] = 1 (input will be nearest-neighbour-padded and a linear fit performed) + 1e-15, # absolute tolerance +] + +TEST_CASE_1D_REP = [ + {"window_length": 3, "order": 1, "mode": "replicate"}, + torch.Tensor([1.0, 1.0, 1.0]).unsqueeze(0).unsqueeze(0), # Input data + torch.Tensor([1.0, 1.0, 1.0]).unsqueeze(0).unsqueeze(0), # Expected output: zero padded, so linear interpolation + # over length-3 windows will result in output of [2/3, 1, 2/3]. + 1e-15, # absolute tolerance +] + +TEST_CASE_2D_AXIS_2_REP = [ + {"window_length": 3, "order": 1, "mode": "replicate"}, # along default axis (2, first spatial dim) + torch.ones((3, 2)).unsqueeze(0).unsqueeze(0), + torch.Tensor([[1.0, 1.0], [1.0, 1.0], [1.0, 1.0]]).unsqueeze(0).unsqueeze(0), + 1e-15, # absolute tolerance +] + +TEST_CASE_2D_AXIS_3_REP = [ + {"window_length": 3, "order": 1, "axis": 3, "mode": "replicate"}, # along axis 3 (second spatial dim) + torch.ones((2, 3)).unsqueeze(0).unsqueeze(0), + torch.Tensor([[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]]).unsqueeze(0).unsqueeze(0), + 1e-15, # absolute tolerance +] + +# Sine smoothing + +TEST_CASE_SINE_SMOOTH = [ + {"window_length": 3, "order": 1}, + # Sine wave with period equal to savgol window length (windowed to reduce edge effects). + torch.as_tensor(np.sin(2 * np.pi * 1 / 3 * np.arange(100)) * np.hanning(100)).unsqueeze(0).unsqueeze(0), + # Should be smoothed out to zeros + torch.zeros(100).unsqueeze(0).unsqueeze(0), + # tolerance chosen by examining output of SciPy.signal.savgol_filter when provided the above input + 2e-2, # absolute tolerance +] + + +class TestSavitzkyGolayCPU(unittest.TestCase): + @parameterized.expand( + [ + TEST_CASE_SINGLE_VALUE, + TEST_CASE_1D, + TEST_CASE_2D_AXIS_2, + TEST_CASE_2D_AXIS_3, + TEST_CASE_SINE_SMOOTH, + ] + ) + def test_value(self, arguments, image, expected_data, atol): + result = SavitzkyGolayFilter(**arguments)(image) + np.testing.assert_allclose(result, expected_data, atol=atol) + + +class TestSavitzkyGolayCPUREP(unittest.TestCase): + @parameterized.expand( + [TEST_CASE_SINGLE_VALUE_REP, TEST_CASE_1D_REP, TEST_CASE_2D_AXIS_2_REP, TEST_CASE_2D_AXIS_3_REP] + ) + def test_value(self, arguments, image, expected_data, atol): + result = SavitzkyGolayFilter(**arguments)(image) + np.testing.assert_allclose(result, expected_data, atol=atol) + + +@skip_if_no_cuda +class TestSavitzkyGolayGPU(unittest.TestCase): + @parameterized.expand( + [ + TEST_CASE_SINGLE_VALUE, + TEST_CASE_1D, + TEST_CASE_2D_AXIS_2, + TEST_CASE_2D_AXIS_3, + TEST_CASE_SINE_SMOOTH, + ] + ) + def test_value(self, arguments, image, expected_data, atol): + result = SavitzkyGolayFilter(**arguments)(image.to(device="cuda")) + np.testing.assert_allclose(result.cpu(), expected_data, atol=atol) + + +@skip_if_no_cuda +class TestSavitzkyGolayGPUREP(unittest.TestCase): + @parameterized.expand( + [ + TEST_CASE_SINGLE_VALUE_REP, + TEST_CASE_1D_REP, + TEST_CASE_2D_AXIS_2_REP, + TEST_CASE_2D_AXIS_3_REP, + ] + ) + def test_value(self, arguments, image, expected_data, atol): + result = SavitzkyGolayFilter(**arguments)(image.to(device="cuda")) + np.testing.assert_allclose(result.cpu(), expected_data, atol=atol) diff --git a/tests/test_savitzky_golay_smooth.py b/tests/test_savitzky_golay_smooth.py index 2be0da1360..63dcce1b05 100644 --- a/tests/test_savitzky_golay_smooth.py +++ b/tests/test_savitzky_golay_smooth.py @@ -1,70 +1,70 @@ -# Copyright 2020 MONAI Consortium -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import numpy as np -from parameterized import parameterized - -from monai.transforms import SavitzkyGolaySmooth - -# Zero-padding trivial tests - -TEST_CASE_SINGLE_VALUE = [ - {"window_length": 3, "order": 1}, - np.expand_dims(np.array([1.0]), 0), # Input data: Single value - np.expand_dims(np.array([1 / 3]), 0), # Expected output: With a window length of 3 and polyorder 1 - # output should be equal to mean of 0, 1 and 0 = 1/3 (because input will be zero-padded and a linear fit performed) - 1e-15, # absolute tolerance -] - -TEST_CASE_2D_AXIS_2 = [ - {"window_length": 3, "order": 1, "axis": 2}, # along axis 2 (second spatial dim) - np.expand_dims(np.ones((2, 3)), 0), - np.expand_dims(np.array([[2 / 3, 1.0, 2 / 3], [2 / 3, 1.0, 2 / 3]]), 0), - 1e-15, # absolute tolerance -] - -# Replicated-padding trivial tests - -TEST_CASE_SINGLE_VALUE_REP = [ - {"window_length": 3, "order": 1, "mode": "replicate"}, - np.expand_dims(np.array([1.0]), 0), # Input data: Single value - np.expand_dims(np.array([1.0]), 0), # Expected output: With a window length of 3 and polyorder 1 - # output will be equal to mean of [1, 1, 1] = 1 (input will be nearest-neighbour-padded and a linear fit performed) - 1e-15, # absolute tolerance -] - -# Sine smoothing - -TEST_CASE_SINE_SMOOTH = [ - {"window_length": 3, "order": 1}, - # Sine wave with period equal to savgol window length (windowed to reduce edge effects). - np.expand_dims(np.sin(2 * np.pi * 1 / 3 * np.arange(100)) * np.hanning(100), 0), - # Should be smoothed out to zeros - np.expand_dims(np.zeros(100), 0), - # tolerance chosen by examining output of SciPy.signal.savgol_filter() when provided the above input - 2e-2, # absolute tolerance -] - - -class TestSavitzkyGolaySmooth(unittest.TestCase): - @parameterized.expand([TEST_CASE_SINGLE_VALUE, TEST_CASE_2D_AXIS_2, TEST_CASE_SINE_SMOOTH]) - def test_value(self, arguments, image, expected_data, atol): - result = SavitzkyGolaySmooth(**arguments)(image) - np.testing.assert_allclose(result, expected_data, atol=atol) - - -class TestSavitzkyGolaySmoothREP(unittest.TestCase): - @parameterized.expand([TEST_CASE_SINGLE_VALUE_REP]) - def test_value(self, arguments, image, expected_data, atol): - result = SavitzkyGolaySmooth(**arguments)(image) - np.testing.assert_allclose(result, expected_data, atol=atol) +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +from parameterized import parameterized + +from monai.transforms import SavitzkyGolaySmooth + +# Zero-padding trivial tests + +TEST_CASE_SINGLE_VALUE = [ + {"window_length": 3, "order": 1}, + np.expand_dims(np.array([1.0]), 0), # Input data: Single value + np.expand_dims(np.array([1 / 3]), 0), # Expected output: With a window length of 3 and polyorder 1 + # output should be equal to mean of 0, 1 and 0 = 1/3 (because input will be zero-padded and a linear fit performed) + 1e-15, # absolute tolerance +] + +TEST_CASE_2D_AXIS_2 = [ + {"window_length": 3, "order": 1, "axis": 2}, # along axis 2 (second spatial dim) + np.expand_dims(np.ones((2, 3)), 0), + np.expand_dims(np.array([[2 / 3, 1.0, 2 / 3], [2 / 3, 1.0, 2 / 3]]), 0), + 1e-15, # absolute tolerance +] + +# Replicated-padding trivial tests + +TEST_CASE_SINGLE_VALUE_REP = [ + {"window_length": 3, "order": 1, "mode": "replicate"}, + np.expand_dims(np.array([1.0]), 0), # Input data: Single value + np.expand_dims(np.array([1.0]), 0), # Expected output: With a window length of 3 and polyorder 1 + # output will be equal to mean of [1, 1, 1] = 1 (input will be nearest-neighbour-padded and a linear fit performed) + 1e-15, # absolute tolerance +] + +# Sine smoothing + +TEST_CASE_SINE_SMOOTH = [ + {"window_length": 3, "order": 1}, + # Sine wave with period equal to savgol window length (windowed to reduce edge effects). + np.expand_dims(np.sin(2 * np.pi * 1 / 3 * np.arange(100)) * np.hanning(100), 0), + # Should be smoothed out to zeros + np.expand_dims(np.zeros(100), 0), + # tolerance chosen by examining output of SciPy.signal.savgol_filter() when provided the above input + 2e-2, # absolute tolerance +] + + +class TestSavitzkyGolaySmooth(unittest.TestCase): + @parameterized.expand([TEST_CASE_SINGLE_VALUE, TEST_CASE_2D_AXIS_2, TEST_CASE_SINE_SMOOTH]) + def test_value(self, arguments, image, expected_data, atol): + result = SavitzkyGolaySmooth(**arguments)(image) + np.testing.assert_allclose(result, expected_data, atol=atol) + + +class TestSavitzkyGolaySmoothREP(unittest.TestCase): + @parameterized.expand([TEST_CASE_SINGLE_VALUE_REP]) + def test_value(self, arguments, image, expected_data, atol): + result = SavitzkyGolaySmooth(**arguments)(image) + np.testing.assert_allclose(result, expected_data, atol=atol) From 65b27b2270dcd84c4d21b643b1bdd8bfe9b7e4cf Mon Sep 17 00:00:00 2001 From: Nic Ma Date: Mon, 18 Jan 2021 21:59:42 +0800 Subject: [PATCH 4/5] [DLMED] fix typo Signed-off-by: Nic Ma --- monai/data/image_dataset.py | 13 ++++++------- tests/min_tests.py | 2 +- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/monai/data/image_dataset.py b/monai/data/image_dataset.py index 9a024d5e63..dacf990088 100644 --- a/monai/data/image_dataset.py +++ b/monai/data/image_dataset.py @@ -53,15 +53,14 @@ def __init__( seg_transform: transform to apply to segmentation arrays image_only: if True return only the image volume, otherwise, return image volume and the metadata dtype: if not None convert the loaded image to this data type - reader: register reader to load image file and meta data, if None, still can register readers - at runtime or use the default readers. If a string of reader name provided, will construct - a reader object with the `*args` and `**kwargs` parameters, supported reader name: "NibabelReader", - "PILReader", "ITKReader", "NumpyReader" - args: additional parameters for reader if providing a reader name. - kwargs: additional parameters for reader if providing a reader name. + reader: register reader to load image file and meta data, if None, will use the default readers. + If a string of reader name provided, will construct a reader object with the `*args` and `**kwargs` + parameters, supported reader name: "NibabelReader", "PILReader", "ITKReader", "NumpyReader" + args: additional parameters for reader if providing a reader name + kwargs: additional parameters for reader if providing a reader name Raises: - ValueError: When ``seg_files`` length differs from ``image_files``. + ValueError: When ``seg_files`` length differs from ``image_files`` """ diff --git a/tests/min_tests.py b/tests/min_tests.py index daf238a154..9a2dc0f05f 100644 --- a/tests/min_tests.py +++ b/tests/min_tests.py @@ -69,7 +69,7 @@ def run_testsuit(): "test_load_imaged", "test_load_spacing_orientation", "test_mednistdataset", - "test_nifti_dataset", + "test_image_dataset", "test_nifti_header_revise", "test_nifti_rw", "test_nifti_saver", From f9b47f08691f53d9704dd62b01dbb77f5cae0ed6 Mon Sep 17 00:00:00 2001 From: Nic Ma Date: Mon, 18 Jan 2021 22:17:21 +0800 Subject: [PATCH 5/5] [DLMED] fix flake8 issue Signed-off-by: Nic Ma --- monai/data/image_dataset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monai/data/image_dataset.py b/monai/data/image_dataset.py index dacf990088..7dd55431af 100644 --- a/monai/data/image_dataset.py +++ b/monai/data/image_dataset.py @@ -76,7 +76,7 @@ def __init__( self.transform = transform self.seg_transform = seg_transform self.image_only = image_only - self.loader = LoadImage(reader=reader, image_only=image_only, dtype=dtype, *args, **kwargs) + self.loader = LoadImage(reader, image_only, dtype, *args, **kwargs) self.set_random_state(seed=get_seed()) self._seed = 0 # transform synchronization seed