From f5ccf2a22736c6ecdd99b32698abb924e435ce27 Mon Sep 17 00:00:00 2001 From: Mohammad Adil Date: Wed, 4 Mar 2020 20:07:40 -0800 Subject: [PATCH 01/40] Adding rotation transform. (#126) --- monai/transforms/transforms.py | 37 ++++++++++++++++++++++++++++++ requirements.txt | 1 + tests/test_rotate.py | 41 ++++++++++++++++++++++++++++++++++ 3 files changed, 79 insertions(+) create mode 100644 tests/test_rotate.py diff --git a/monai/transforms/transforms.py b/monai/transforms/transforms.py index dc6f571106..454c3aa7f6 100644 --- a/monai/transforms/transforms.py +++ b/monai/transforms/transforms.py @@ -15,6 +15,7 @@ import numpy as np import torch +import scipy.ndimage import monai from monai.data.utils import get_random_patch, get_valid_patch_size @@ -80,6 +81,42 @@ def __call__(self, img): return np.flip(img, self.axis) +@export +class Rotate: + """ + Rotates an input image by given angle. Uses scipy.ndimage.rotate. For more details, see + http://lagrange.univ-lyon1.fr/docs/scipy/0.17.1/generated/scipy.ndimage.rotate.html. + + Args: + angle (float): Rotation angle in degrees. + axes (tuple of 2 ints): Axes of rotation. Default: (1, 2). This is the first two + axis in spatial dimensions according to MONAI channel first shape assumption. + reshape (bool): If true, output shape is made same as input. Default: True. + order (int): Order of spline interpolation. Range 0-5. Default: 1. This is + different from scipy where default interpolation is 3. + mode (str): Points outside boundary filled according to this mode. Options are + 'constant', 'nearest', 'reflect', 'wrap'. Default: 'constant'. + cval (scalar): Values to fill outside boundary. Default: 0. + prefiter (bool): Apply spline_filter before interpolation. Default: True. + """ + + def __init__(self, angle, axes=(1, 2), reshape=True, order=1, + mode='constant', cval=0, prefilter=True): + self.angle = angle + self.reshape = reshape + self.order = order + self.mode = mode + self.cval = cval + self.prefilter = prefilter + self.axes = axes + + def __call__(self, img): + return scipy.ndimage.rotate(img, self.angle, self.axes, + reshape=self.reshape, order=self.order, + mode=self.mode, cval=self.cval, + prefilter=self.prefilter) + + @export class ToTensor: """ diff --git a/requirements.txt b/requirements.txt index 8325300968..91985396c3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,3 +7,4 @@ coverage nibabel parameterized tensorboard +scipy \ No newline at end of file diff --git a/tests/test_rotate.py b/tests/test_rotate.py new file mode 100644 index 0000000000..98e25f587f --- /dev/null +++ b/tests/test_rotate.py @@ -0,0 +1,41 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import numpy as np + +import scipy.ndimage +from parameterized import parameterized + +from monai.transforms import Rotate +from tests.utils import NumpyImageTestCase2D + + +class RotateTest(NumpyImageTestCase2D): + + @parameterized.expand([ + (90, (1, 2), True, 1, 'reflect', 0, True), + (-90, (2, 1), True, 3, 'constant', 0, True), + (180, (2, 3), False, 2, 'constant', 4, False), + ]) + def test_correct_results(self, angle, axes, reshape, + order, mode, cval, prefilter): + rotate_fn = Rotate(angle, axes, reshape, + order, mode, cval, prefilter) + rotated = rotate_fn(self.imt) + + expected = scipy.ndimage.rotate(self.imt, angle, axes, reshape, order=order, + mode=mode, cval=cval, prefilter=prefilter) + self.assertTrue(np.allclose(expected, rotated)) + + +if __name__ == '__main__': + unittest.main() From f39e8c1cdb3489df8e5b1f884e817d1438131259 Mon Sep 17 00:00:00 2001 From: Wenqi Li Date: Thu, 5 Mar 2020 12:03:54 +0000 Subject: [PATCH 02/40] 108-resize (#125) * Add Resize transform (spatial scaling). * Adding tests. --- monai/transforms/transforms.py | 42 +++++++++++++++++++++++++++ tests/test_resize.py | 53 ++++++++++++++++++++++++++++++++++ 2 files changed, 95 insertions(+) create mode 100644 tests/test_resize.py diff --git a/monai/transforms/transforms.py b/monai/transforms/transforms.py index 454c3aa7f6..4447cdd282 100644 --- a/monai/transforms/transforms.py +++ b/monai/transforms/transforms.py @@ -15,6 +15,7 @@ import numpy as np import torch +from skimage.transform import resize import scipy.ndimage import monai @@ -81,6 +82,47 @@ def __call__(self, img): return np.flip(img, self.axis) +@export +class Resize: + """ + Resize the input image to given resolution. Uses skimage.transform.resize underneath. + For additional details, see https://scikit-image.org/docs/dev/api/skimage.transform.html#skimage.transform.resize. + + Args: + order (int): Order of spline interpolation. Default=1. + mode (str): Points outside boundaries are filled according to given mode. + Options are 'constant', 'edge', 'symmetric', 'reflect', 'wrap'. + cval (float): Used with mode 'constant', the value outside image boundaries. + clip (bool): Wheter to clip range of output values after interpolation. Default: True. + preserve_range (bool): Whether to keep original range of values. Default is True. + If False, input is converted according to conventions of img_as_float. See + https://scikit-image.org/docs/dev/user_guide/data_types.html. + anti_aliasing (bool): Whether to apply a gaussian filter to image before down-scaling. + Default is True. + anti_aliasing_sigma (float, tuple of floats): Standard deviation for gaussian filtering. + """ + + def __init__(self, output_shape, order=1, mode='reflect', cval=0, + clip=True, preserve_range=True, + anti_aliasing=True, anti_aliasing_sigma=None): + assert isinstance(order, int), "order must be integer." + self.output_shape = output_shape + self.order = order + self.mode = mode + self.cval = cval + self.clip = clip + self.preserve_range = preserve_range + self.anti_aliasing = anti_aliasing + self.anti_aliasing_sigma = anti_aliasing_sigma + + def __call__(self, img): + return resize(img, self.output_shape, order=self.order, + mode=self.mode, cval=self.cval, + clip=self.clip, preserve_range=self.preserve_range, + anti_aliasing=self.anti_aliasing, + anti_aliasing_sigma=self.anti_aliasing_sigma) + + @export class Rotate: """ diff --git a/tests/test_resize.py b/tests/test_resize.py new file mode 100644 index 0000000000..7feaf9f634 --- /dev/null +++ b/tests/test_resize.py @@ -0,0 +1,53 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import skimage +from parameterized import parameterized + +from monai.transforms import Resize +from tests.utils import NumpyImageTestCase2D + + +class ResizeTest(NumpyImageTestCase2D): + + @parameterized.expand([ + ("invalid_order", "order", AssertionError) + ]) + def test_invalid_inputs(self, _, order, raises): + with self.assertRaises(raises): + resize = Resize(output_shape=(128, 128, 3), order=order) + resize(self.imt) + + @parameterized.expand([ + ((1, 1, 64, 64), 1, 'reflect', 0, True, True, True, None), + ((1, 1, 32, 32), 2, 'constant', 3, False, False, False, None), + ((1, 1, 256, 256), 3, 'constant', 3, False, False, False, None), + ]) + def test_correct_results(self, output_shape, order, mode, + cval, clip, preserve_range, + anti_aliasing, anti_aliasing_sigma): + resize = Resize(output_shape, order, mode, cval, clip, + preserve_range, anti_aliasing, + anti_aliasing_sigma) + expected = skimage.transform.resize(self.imt, output_shape, + order=order, mode=mode, + cval=cval, clip=clip, + preserve_range=preserve_range, + anti_aliasing=anti_aliasing, + anti_aliasing_sigma=anti_aliasing_sigma) + self.assertTrue(np.allclose(resize(self.imt), expected)) + + +if __name__ == '__main__': + unittest.main() From 28ad305da5b39b59adf3bffa81d597f63b09272c Mon Sep 17 00:00:00 2001 From: Wenqi Li Date: Thu, 5 Mar 2020 12:13:06 +0000 Subject: [PATCH 03/40] 113-gaussian-noise (#139) --- monai/transforms/transforms.py | 18 ++++++++++++++++ tests/test_gaussian_noise.py | 38 ++++++++++++++++++++++++++++++++++ 2 files changed, 56 insertions(+) create mode 100644 tests/test_gaussian_noise.py diff --git a/monai/transforms/transforms.py b/monai/transforms/transforms.py index 4447cdd282..7f8961b4e6 100644 --- a/monai/transforms/transforms.py +++ b/monai/transforms/transforms.py @@ -64,6 +64,24 @@ def __call__(self, img): return rescale_array(img, self.minv, self.maxv, self.dtype) +@export +class GaussianNoise(Randomizable): + """Add gaussian noise to image. + + Args: + mean (float or array of floats): Mean or “centre” of the distribution. + scale (float): Standard deviation (spread) of distribution. + size (int or tuple of ints): Output shape. Default: None (single value is returned). + """ + + def __init__(self, mean=0.0, std=0.1): + self.mean = mean + self.std = std + + def __call__(self, img): + return img + self.R.normal(self.mean, self.R.uniform(0, self.std), size=img.shape) + + @export class Flip: """Reverses the order of elements along the given axis. Preserves shape. diff --git a/tests/test_gaussian_noise.py b/tests/test_gaussian_noise.py new file mode 100644 index 0000000000..400ce4ad73 --- /dev/null +++ b/tests/test_gaussian_noise.py @@ -0,0 +1,38 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import numpy as np + +from parameterized import parameterized + +from monai.transforms import GaussianNoise +from tests.utils import NumpyImageTestCase2D + + +class GaussianNoiseTest(NumpyImageTestCase2D): + + @parameterized.expand([ + ("test_zero_mean", 0, 0.1), + ("test_non_zero_mean", 1, 0.5) + ]) + def test_correct_results(self, _, mean, std): + seed = 42 + gaussian_fn = GaussianNoise(mean=mean, std=std) + gaussian_fn.set_random_state(seed) + noised = gaussian_fn(self.imt) + np.random.seed(seed) + expected = self.imt + np.random.normal(mean, np.random.uniform(0, std), size=self.imt.shape) + assert np.allclose(expected, noised) + + +if __name__ == '__main__': + unittest.main() From 7f2947758d5355f6cde653527992267b409f2087 Mon Sep 17 00:00:00 2001 From: Wenqi Li Date: Thu, 5 Mar 2020 13:36:44 +0000 Subject: [PATCH 04/40] 133 dataset and example with dict transforms (#134) * [DLMED] add UNet example with dict based transforms * [DLMED] temporarily remove constants, will discuss after GTC * [DLMED] adjust default behavior according to comments --- ...on_3d.py => unet_segmentation_3d_array.py} | 0 examples/unet_segmentation_3d_dict.py | 185 ++++++++++++++++++ monai/data/nifti_reader.py | 75 ++++++- monai/transforms/composables.py | 24 ++- monai/transforms/transforms.py | 2 +- tests/test_add_channeld.py | 37 ++++ 6 files changed, 320 insertions(+), 3 deletions(-) rename examples/{unet_segmentation_3d.py => unet_segmentation_3d_array.py} (100%) create mode 100644 examples/unet_segmentation_3d_dict.py create mode 100644 tests/test_add_channeld.py diff --git a/examples/unet_segmentation_3d.py b/examples/unet_segmentation_3d_array.py similarity index 100% rename from examples/unet_segmentation_3d.py rename to examples/unet_segmentation_3d_array.py diff --git a/examples/unet_segmentation_3d_dict.py b/examples/unet_segmentation_3d_dict.py new file mode 100644 index 0000000000..8c6955d87b --- /dev/null +++ b/examples/unet_segmentation_3d_dict.py @@ -0,0 +1,185 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import sys +import tempfile +from glob import glob +import logging + +import nibabel as nib +import numpy as np +import torch +from torch.utils.tensorboard import SummaryWriter +from ignite.engine import Events, create_supervised_trainer, create_supervised_evaluator, _prepare_batch +from ignite.handlers import ModelCheckpoint, EarlyStopping +from torch.utils.data import DataLoader + +# assumes the framework is found here, change as necessary +sys.path.append("..") + +import monai +import monai.transforms.compose as transforms +from monai.data.nifti_reader import NiftiDatasetd +from monai.transforms.composables import AddChanneld, RandRotate90d +from monai.handlers.stats_handler import StatsHandler +from monai.handlers.mean_dice import MeanDice +from monai.visualize import img2tensorboard +from monai.data.synthetic import create_test_image_3d +from monai.handlers.utils import stopping_fn_from_metric + +monai.config.print_config() + +# Create a temporary directory and 50 random image, mask paris +tempdir = tempfile.mkdtemp() + +for i in range(50): + im, seg = create_test_image_3d(128, 128, 128) + + n = nib.Nifti1Image(im, np.eye(4)) + nib.save(n, os.path.join(tempdir, 'im%i.nii.gz' % i)) + + n = nib.Nifti1Image(seg, np.eye(4)) + nib.save(n, os.path.join(tempdir, 'seg%i.nii.gz' % i)) + +images = sorted(glob(os.path.join(tempdir, 'im*.nii.gz'))) +segs = sorted(glob(os.path.join(tempdir, 'seg*.nii.gz'))) + +# Define transforms for image and segmentation +transforms = transforms.Compose([ + AddChanneld(keys=['image', 'seg']), + RandRotate90d(keys=['image', 'seg'], prob=0.8, axes=[1, 3]) +]) + +# Define nifti dataset, dataloader. +ds = NiftiDatasetd(images, segs, transform=transforms) +loader = DataLoader(ds, batch_size=10, num_workers=2, pin_memory=torch.cuda.is_available()) +check_data = monai.utils.misc.first(loader) +print(check_data['image'].shape, check_data['seg'].shape) + +lr = 1e-5 + +# Create UNet, DiceLoss and Adam optimizer. +net = monai.networks.nets.UNet( + dimensions=3, + in_channels=1, + num_classes=1, + channels=(16, 32, 64, 128, 256), + strides=(2, 2, 2, 2), + num_res_units=2, +) + +loss = monai.losses.DiceLoss(do_sigmoid=True) +opt = torch.optim.Adam(net.parameters(), lr) + + +# Since network outputs logits and segmentation, we need a custom function. +def _loss_fn(i, j): + return loss(i[0], j) + + +# Create trainer +def prepare_batch(batch, device=None, non_blocking=False): + return _prepare_batch((batch['image'], batch['seg']), device, non_blocking) + + +device = torch.device("cuda:0") +trainer = create_supervised_trainer(net, opt, _loss_fn, device, False, + prepare_batch=prepare_batch, + output_transform=lambda x, y, y_pred, loss: [y_pred, loss.item(), y]) + +# adding checkpoint handler to save models (network params and optimizer stats) during training +checkpoint_handler = ModelCheckpoint('./runs/', 'net', n_saved=10, require_empty=False) +trainer.add_event_handler(event_name=Events.EPOCH_COMPLETED, + handler=checkpoint_handler, + to_save={'net': net, 'opt': opt}) +train_stats_handler = StatsHandler() +train_stats_handler.attach(trainer) + + +@trainer.on(Events.EPOCH_COMPLETED) +def log_training_loss(engine): + # log loss to tensorboard with second item of engine.state.output, loss.item() from output_transform + writer.add_scalar('Loss/train', engine.state.output[1], engine.state.epoch) + + # tensor of ones to use where for converting labels to zero and ones + ones = torch.ones(engine.state.batch['seg'][0].shape, dtype=torch.int32) + first_output_tensor = engine.state.output[0][1][0].detach().cpu() + # log model output to tensorboard, as three dimensional tensor with no channels dimension + img2tensorboard.add_animated_gif_no_channels(writer, "first_output_final_batch", first_output_tensor, 64, + 255, engine.state.epoch) + # get label tensor and convert to single class + first_label_tensor = torch.where(engine.state.batch['seg'][0] > 0, ones, engine.state.batch['seg'][0]) + # log label tensor to tensorboard, there is a channel dimension when getting label from batch + img2tensorboard.add_animated_gif(writer, "first_label_final_batch", first_label_tensor, 64, + 255, engine.state.epoch) + second_output_tensor = engine.state.output[0][1][1].detach().cpu() + img2tensorboard.add_animated_gif_no_channels(writer, "second_output_final_batch", second_output_tensor, 64, + 255, engine.state.epoch) + second_label_tensor = torch.where(engine.state.batch['seg'][1] > 0, ones, engine.state.batch['seg'][1]) + img2tensorboard.add_animated_gif(writer, "second_label_final_batch", second_label_tensor, 64, + 255, engine.state.epoch) + third_output_tensor = engine.state.output[0][1][2].detach().cpu() + img2tensorboard.add_animated_gif_no_channels(writer, "third_output_final_batch", third_output_tensor, 64, + 255, engine.state.epoch) + third_label_tensor = torch.where(engine.state.batch['seg'][2] > 0, ones, engine.state.batch['seg'][2]) + img2tensorboard.add_animated_gif(writer, "third_label_final_batch", third_label_tensor, 64, + 255, engine.state.epoch) + engine.logger.info("Epoch[%s] Loss: %s", engine.state.epoch, engine.state.output[1]) + + +writer = SummaryWriter() + +# Set parameters for validation +validation_every_n_epochs = 1 +metric_name = 'Mean_Dice' + +# add evaluation metric to the evaluator engine +val_metrics = {metric_name: MeanDice(add_sigmoid=True, to_onehot_y=False)} +evaluator = create_supervised_evaluator(net, val_metrics, device, True, + prepare_batch=prepare_batch, + output_transform=lambda x, y, y_pred: (y_pred[0], y)) + +# Add stats event handler to print validation stats via evaluator +logging.basicConfig(stream=sys.stdout, level=logging.INFO) +val_stats_handler = StatsHandler() +val_stats_handler.attach(evaluator) + +# Add early stopping handler to evaluator. +early_stopper = EarlyStopping(patience=4, + score_function=stopping_fn_from_metric(metric_name), + trainer=trainer) +evaluator.add_event_handler(event_name=Events.EPOCH_COMPLETED, handler=early_stopper) + +# create a validation data loader +val_ds = NiftiDatasetd(images[-20:], segs[-20:], transform=transforms) +val_loader = DataLoader(ds, batch_size=5, num_workers=8, pin_memory=torch.cuda.is_available()) + + +@trainer.on(Events.EPOCH_COMPLETED(every=validation_every_n_epochs)) +def run_validation(engine): + evaluator.run(val_loader) + + +@evaluator.on(Events.EPOCH_COMPLETED) +def log_metrics_to_tensorboard(engine): + for _, value in engine.state.metrics.items(): + writer.add_scalar('Metrics/' + metric_name, value, trainer.state.epoch) + + +# create a training data loader +logging.basicConfig(stream=sys.stdout, level=logging.INFO) + +train_ds = NiftiDatasetd(images[:20], segs[:20], transform=transforms) +train_loader = DataLoader(train_ds, batch_size=5, num_workers=8, pin_memory=torch.cuda.is_available()) + +train_epochs = 30 +state = trainer.run(train_loader, train_epochs) diff --git a/monai/data/nifti_reader.py b/monai/data/nifti_reader.py index 287c97fdde..753691de20 100644 --- a/monai/data/nifti_reader.py +++ b/monai/data/nifti_reader.py @@ -14,7 +14,6 @@ from torch.utils.data import Dataset from torch.utils.data._utils.collate import np_str_obj_array_pattern - from monai.utils.module import export from monai.transforms.compose import Randomizable @@ -107,6 +106,7 @@ def __getitem__(self, index): else: img, meta_data = load_nifti(self.image_files[index], as_closest_canonical=self.as_closest_canonical, image_only=self.image_only, dtype=self.dtype) + target = None if self.seg_files is not None: target = load_nifti(self.seg_files[index]) elif self.labels is not None: @@ -135,3 +135,76 @@ def __getitem__(self, index): continue compatible_meta[meta_key] = meta_datum return img, target, compatible_meta + + +@export("monai.data") +class NiftiDatasetd(Dataset): + """ + Loads image/segmentation pairs of Nifti files from the given filename lists. Dict level transformations can be + specified for the dictionary data which is constructed by image, label and other metadata. + """ + + def __init__(self, image_files, seg_files=None, labels=None, as_closest_canonical=False, transform=None, dtype=None): + """ + Initializes the dataset with the image and segmentation filename lists. The transform `transform` is applied + to the images and `seg_transform` to the segmentations. + + Args: + image_files (list of str): list of image filenames. + seg_files (list of str): if in segmentation task, list of segmentation filenames. + labels (list or array): if in classification task, list of classification labels. + as_closest_canonical (bool): if True, load the image as closest to canonical orientation. + transform (Callable, optional): dict transforms to excute operations on dictionary data. + dtype (np.dtype, optional): if not None convert the loaded image to this data type. + """ + + if len(image_files) != len(seg_files): + raise ValueError('Must have same number of image and segmentation files') + + self.image_files = image_files + self.seg_files = seg_files + self.labels = labels + self.as_closest_canonical = as_closest_canonical + self.transform = transform + self.dtype = dtype + + def __len__(self): + return len(self.image_files) + + def __getitem__(self, index): + meta_data = None + img, meta_data = load_nifti( + filename_or_obj=self.image_files[index], + as_closest_canonical=self.as_closest_canonical, + image_only=False, + dtype=self.dtype + ) + + seg = None + if self.seg_files is not None: + seg = load_nifti(self.seg_files[index]) + label = None + if self.labels is not None: + label = self.labels[index] + + compatible_meta = {} + assert isinstance(meta_data, dict), 'meta_data must be in dictionary format.' + for meta_key in meta_data: + meta_datum = meta_data[meta_key] + if type(meta_datum).__name__ == 'ndarray' \ + and np_str_obj_array_pattern.search(meta_datum.dtype.str) is not None: + continue + compatible_meta[meta_key] = meta_datum + + data = {'image': img} + if seg is not None: + data['seg'] = seg + if label is not None: + data['label'] = label + if len(compatible_meta) > 0: + data.update(compatible_meta) + + if self.transform is not None: + data = self.transform(data) + + return data diff --git a/monai/transforms/composables.py b/monai/transforms/composables.py index 4c13e105ca..d177bc98e1 100644 --- a/monai/transforms/composables.py +++ b/monai/transforms/composables.py @@ -18,7 +18,7 @@ import monai from monai.data.utils import get_random_patch, get_valid_patch_size from monai.transforms.compose import Randomizable, Transform -from monai.transforms.transforms import Rotate90, SpatialCrop +from monai.transforms.transforms import Rotate90, SpatialCrop, AddChannel from monai.utils.misc import ensure_tuple from monai.transforms.utils import generate_pos_neg_label_crop_centers @@ -149,6 +149,28 @@ def __call__(self, data): return d +@export +class AddChanneld(MapTransform): + """ + dictionary-based wrapper of AddChannel. + """ + + def __init__(self, keys): + """ + Args: + keys (hashable items): keys of the corresponding items to be transformed. + See also: monai.transform.composables.MapTransform + """ + MapTransform.__init__(self, keys) + self.adder = AddChannel() + + def __call__(self, data): + d = dict(data) + for key in self.keys: + d[key] = self.adder(d[key]) + return d + + @export class RandCropByPosNegLabeld(Randomizable, MapTransform): """ diff --git a/monai/transforms/transforms.py b/monai/transforms/transforms.py index 7f8961b4e6..3cca4a01c3 100644 --- a/monai/transforms/transforms.py +++ b/monai/transforms/transforms.py @@ -286,7 +286,7 @@ def __init__(self, k=1, axes=(1, 2)): self.plane_axes = axes def __call__(self, img): - return np.rot90(img, self.k, self.plane_axes) + return np.ascontiguousarray(np.rot90(img, self.k, self.plane_axes)) @export diff --git a/tests/test_add_channeld.py b/tests/test_add_channeld.py new file mode 100644 index 0000000000..a2940ffffb --- /dev/null +++ b/tests/test_add_channeld.py @@ -0,0 +1,37 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import numpy as np +from parameterized import parameterized +from monai.transforms.composables import AddChanneld + +TEST_CASE_1 = [ + {'keys': ['img', 'seg']}, + { + 'img': np.array([[0, 1], [1, 2]]), + 'seg': np.array([[0, 1], [1, 2]]) + }, + (1, 2, 2), +] + + +class TestAddChanneld(unittest.TestCase): + + @parameterized.expand([TEST_CASE_1]) + def test_shape(self, input_param, input_data, expected_shape): + result = AddChanneld(**input_param)(input_data) + self.assertEqual(result['img'].shape, expected_shape) + self.assertEqual(result['seg'].shape, expected_shape) + + +if __name__ == '__main__': + unittest.main() From 445a568e3ae74cae8b5c99f19602c6e0e98409dc Mon Sep 17 00:00:00 2001 From: Mohammad Adil Date: Thu, 5 Mar 2020 11:54:38 -0800 Subject: [PATCH 05/40] 107-zoom (#138) * Adding zoom transform and tests. * Fix import error. --- monai/transforms/transforms.py | 61 ++++++++++++++++++++++++++++++++ tests/test_zoom.py | 64 ++++++++++++++++++++++++++++++++++ 2 files changed, 125 insertions(+) create mode 100644 tests/test_zoom.py diff --git a/monai/transforms/transforms.py b/monai/transforms/transforms.py index 3cca4a01c3..e24e5c84ad 100644 --- a/monai/transforms/transforms.py +++ b/monai/transforms/transforms.py @@ -177,6 +177,67 @@ def __call__(self, img): prefilter=self.prefilter) +@export +class Zoom: + """ Zooms a nd image. Uses scipy.ndimage.zoom or cupyx.scipy.ndimage.zoom in case of gpu. + For details, please see https://docs.scipy.org/doc/scipy/reference/generated/scipy.ndimage.zoom.html. + + Args: + zoom (float or sequence): The zoom factor along the axes. If a float, zoom is the same for each axis. + If a sequence, zoom should contain one value for each axis. + order (int): order of interpolation. Default=3. + mode (str): Determines how input is extended beyond boundaries. Default is 'constant'. + cval (scalar, optional): Value to fill past edges. Default is 0. + use_gpu (bool): Should use cpu or gpu. + keep_size (bool): Should keep original size (pad if needed). + """ + def __init__(self, zoom, order=3, mode='constant', cval=0, prefilter=True, use_gpu=False, keep_size=False): + assert isinstance(order, int), "Order must be integer." + self.zoom = zoom + self.order = order + self.mode = mode + self.cval = cval + self.prefilter = prefilter + self.use_gpu = use_gpu + self.keep_size = keep_size + + def __call__(self, img): + zoomed = None + if self.use_gpu: + try: + import cupy + from cupyx.scipy.ndimage import zoom as zoom_gpu + + zoomed_gpu = zoom_gpu(cupy.array(img), zoom=self.zoom, order=self.order, + mode=self.mode, cval=self.cval, prefilter=self.prefilter) + zoomed = cupy.asnumpy() + except ModuleNotFoundError: + print('For GPU zoom, please install cupy. Defaulting to cpu.') + except Exception: + print('Warning: Zoom gpu failed. Defaulting to cpu.') + + if not zoomed or not self.use_gpu: + zoomed = scipy.ndimage.zoom(img, zoom=self.zoom, order=self.order, + mode=self.mode, cval=self.cval, prefilter=self.prefilter) + + # Crops to original size or pads. + if self.keep_size: + shape = img.shape + pad_vec = [[0, 0]] * len(shape) + crop_vec = list(zoomed.shape) + for d in range(len(shape)): + if zoomed.shape[d] > shape[d]: + crop_vec[d] = shape[d] + elif zoomed.shape[d] < shape[d]: + # pad_vec[d] = [0, shape[d] - zoomed.shape[d]] + pad_h = (float(shape[d]) - float(zoomed.shape[d])) / 2 + pad_vec[d] = [int(np.floor(pad_h)), int(np.ceil(pad_h))] + zoomed = zoomed[0:crop_vec[0], 0:crop_vec[1], 0:crop_vec[2]] + zoomed = np.pad(zoomed, pad_vec, mode='constant', constant_values=self.cval) + + return zoomed + + @export class ToTensor: """ diff --git a/tests/test_zoom.py b/tests/test_zoom.py new file mode 100644 index 0000000000..727565d368 --- /dev/null +++ b/tests/test_zoom.py @@ -0,0 +1,64 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +from scipy.ndimage import zoom as zoom_scipy +from parameterized import parameterized + +from monai.transforms import Zoom +from tests.utils import NumpyImageTestCase2D + + +class ZoomTest(NumpyImageTestCase2D): + + @parameterized.expand([ + (1.1, 3, 'constant', 0, True, False, False), + (0.9, 3, 'constant', 0, True, False, False), + (0.8, 1, 'reflect', 0, False, False, False) + ]) + def test_correct_results(self, zoom, order, mode, cval, prefilter, use_gpu, keep_size): + zoom_fn = Zoom(zoom=zoom, order=order, mode=mode, cval=cval, + prefilter=prefilter, use_gpu=use_gpu, keep_size=keep_size) + zoomed = zoom_fn(self.imt) + expected = zoom_scipy(self.imt, zoom=zoom, mode=mode, order=order, + cval=cval, prefilter=prefilter) + self.assertTrue(np.allclose(expected, zoomed)) + + @parameterized.expand([ + ("gpu_zoom", 0.6, 3, 'constant', 0, True) + ]) + def test_gpu_zoom(self, _, zoom, order, mode, cval, prefilter): + zoom_fn = Zoom(zoom=zoom, order=order, mode=mode, cval=cval, + prefilter=prefilter, use_gpu=True, keep_size=False) + zoomed = zoom_fn(self.imt) + expected = zoom_scipy(self.imt, zoom=zoom, mode=mode, order=order, + cval=cval, prefilter=prefilter) + self.assertTrue(np.allclose(expected, zoomed)) + + def test_keep_size(self): + zoom_fn = Zoom(zoom=0.6, keep_size=True) + zoomed = zoom_fn(self.imt) + self.assertTrue(np.array_equal(zoomed.shape, self.imt.shape)) + + @parameterized.expand([ + ("no_zoom", None, 1, TypeError), + ("invalid_order", 0.9, 's', AssertionError) + ]) + def test_invalid_inputs(self, _, zoom, order, raises): + with self.assertRaises(raises): + zoom_fn = Zoom(zoom=zoom, order=order) + zoomed = zoom_fn(self.imt) + + +if __name__ == '__main__': + unittest.main() From db059424c569b6a0e4cbf6184d3eeb6012b3b1fc Mon Sep 17 00:00:00 2001 From: Mohammad Adil Date: Thu, 5 Mar 2020 17:41:23 -0800 Subject: [PATCH 06/40] Fix gpu zoom errors. (#149) --- monai/transforms/transforms.py | 11 ++++++----- tests/test_zoom.py | 17 ++++++++++------- 2 files changed, 16 insertions(+), 12 deletions(-) diff --git a/monai/transforms/transforms.py b/monai/transforms/transforms.py index e24e5c84ad..fb1bf468d6 100644 --- a/monai/transforms/transforms.py +++ b/monai/transforms/transforms.py @@ -188,7 +188,8 @@ class Zoom: order (int): order of interpolation. Default=3. mode (str): Determines how input is extended beyond boundaries. Default is 'constant'. cval (scalar, optional): Value to fill past edges. Default is 0. - use_gpu (bool): Should use cpu or gpu. + use_gpu (bool): Should use cpu or gpu. Uses cupyx which doesn't support order > 1 and modes + 'wrap' and 'reflect'. Defaults to cpu for these cases or if cupyx not found. keep_size (bool): Should keep original size (pad if needed). """ def __init__(self, zoom, order=3, mode='constant', cval=0, prefilter=True, use_gpu=False, keep_size=False): @@ -210,13 +211,13 @@ def __call__(self, img): zoomed_gpu = zoom_gpu(cupy.array(img), zoom=self.zoom, order=self.order, mode=self.mode, cval=self.cval, prefilter=self.prefilter) - zoomed = cupy.asnumpy() + zoomed = cupy.asnumpy(zoomed_gpu) except ModuleNotFoundError: print('For GPU zoom, please install cupy. Defaulting to cpu.') - except Exception: - print('Warning: Zoom gpu failed. Defaulting to cpu.') + except NotImplementedError: + print("Defaulting to CPU. cupyx doesn't support order > 1 and modes 'wrap' or 'reflect'.") - if not zoomed or not self.use_gpu: + if zoomed is None: zoomed = scipy.ndimage.zoom(img, zoom=self.zoom, order=self.order, mode=self.mode, cval=self.cval, prefilter=self.prefilter) diff --git a/tests/test_zoom.py b/tests/test_zoom.py index 727565d368..f445895bfc 100644 --- a/tests/test_zoom.py +++ b/tests/test_zoom.py @@ -12,6 +12,8 @@ import unittest import numpy as np +import importlib + from scipy.ndimage import zoom as zoom_scipy from parameterized import parameterized @@ -35,15 +37,16 @@ def test_correct_results(self, zoom, order, mode, cval, prefilter, use_gpu, keep self.assertTrue(np.allclose(expected, zoomed)) @parameterized.expand([ - ("gpu_zoom", 0.6, 3, 'constant', 0, True) + ("gpu_zoom", 0.6, 1, 'constant', 0, True) ]) def test_gpu_zoom(self, _, zoom, order, mode, cval, prefilter): - zoom_fn = Zoom(zoom=zoom, order=order, mode=mode, cval=cval, - prefilter=prefilter, use_gpu=True, keep_size=False) - zoomed = zoom_fn(self.imt) - expected = zoom_scipy(self.imt, zoom=zoom, mode=mode, order=order, - cval=cval, prefilter=prefilter) - self.assertTrue(np.allclose(expected, zoomed)) + if importlib.util.find_spec('cupy'): + zoom_fn = Zoom(zoom=zoom, order=order, mode=mode, cval=cval, + prefilter=prefilter, use_gpu=True, keep_size=False) + zoomed = zoom_fn(self.imt) + expected = zoom_scipy(self.imt, zoom=zoom, mode=mode, order=order, + cval=cval, prefilter=prefilter) + self.assertTrue(np.allclose(expected, zoomed)) def test_keep_size(self): zoom_fn = Zoom(zoom=0.6, keep_size=True) From 2bda58672dbd5622f1ce6597100f70806c4d8aed Mon Sep 17 00:00:00 2001 From: Ben Murray Date: Fri, 6 Mar 2020 01:57:04 +0000 Subject: [PATCH 07/40] =?UTF-8?q?58-implement-transform-adaptor-2:=20Re-ad?= =?UTF-8?q?ding=20work=20via=20new=20branch=20as=20th=E2=80=A6=20(#143)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- monai/transforms/adaptors.py | 237 +++++++++++++++++++++++++++++++++++ tests/test_adaptors.py | 164 ++++++++++++++++++++++++ 2 files changed, 401 insertions(+) create mode 100644 monai/transforms/adaptors.py create mode 100644 tests/test_adaptors.py diff --git a/monai/transforms/adaptors.py b/monai/transforms/adaptors.py new file mode 100644 index 0000000000..b0a8571574 --- /dev/null +++ b/monai/transforms/adaptors.py @@ -0,0 +1,237 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import monai + +""" +How to use the adaptor function + +The key to using 'adaptor' lies in understanding the function that want to +adapt. The 'inputs' and 'outputs' parameters take either strings, lists/tuples +of strings or a dictionary mapping strings, depending on call signature of the +function being called. + +The adaptor function is written to minimise the cognitive load on the caller. +There should be a minimal number of cases where the caller has to set anything +on the input parameter, and for functions that return a single value, it is +only necessary to name the dictionary keyword to which that value is assigned. + +Use of `outputs` + +`outputs` can take either a string, a list/tuple of string or a dict of string +to string, depending on what the transform being adapted returns: +. If the transform returns a single argument, then outputs can be supplied a + string that indicates what key to assign the return value to in the + dictionary +. If the transform returns a list/tuple of values, then outputs can be supplied + a list/tuple of the same length. The strings in outputs map the return value + at the corresponding position to a key in the dictionary +. If the transform returns a dictionary of values, then outputs must be supplied + a dictionary that maps keys in the function's return dictionary to the + dictionary being passed between functions + +Note, the caller is free to use a more complex way of specifying the outputs +parameter than is required. The following are synonymous and will be treated +identically: +``` + # single argument + adaptor(MyTransform(), 'image') + adaptor(MyTransform(), ['image']) + adaptor(MyTransform(), {'image': 'image'}) + + # multiple arguments + adaptor(MyTransform(), ['image', 'label']) + adaptor(MyTransform(), {'image': 'image', 'label': 'label'}) +``` + +Use of `inputs` + +`inputs` can usually be omitted when using `adaptor`. It is only required when a +the function's parameter names do not match the names in the dictionary that is +used to chain transform calls. + +``` +class MyTransform1: + ... + def __call__(image): + return '''do stuff to image''' + +class MyTransform2: + ... + def __call__(img): + return '''do stuff to image''' + +d = {'image': i} + +Compose([ + adaptor(MyTransform1(), 'image'), + adaptor(MyTransform2(), 'image', {'img':'image'}) +]) +``` + +Inputs: +dictionary in: None | Name maps +params in (match): None | Name list | Name maps +params in (mismatch): Name maps +params & **kwargs (match) : None | Name maps +params & **kwargs (mismatch) : Name maps + +Outputs: +dictionary out: None | Name maps +list/tuple out: list/tuple +variable out: string + +""" + + +@monai.utils.export('monai.transforms') +def adaptor(function, outputs, inputs=None): + + def must_be_types_or_none(variable_name, variable, types): + if variable is not None: + if not isinstance(variable, types): + raise ValueError( + "'{}' must be None or {} but is {}".format( + variable_name, types, type(variable))) + + def must_be_types(variable_name, variable, types): + if not isinstance(variable, types): + raise ValueError( + "'{}' must be one of {} but is {}".format( + variable_name, types, type(variable))) + + def map_names(ditems, input_map): + return {input_map(k, k): v for k, v in ditems.items()} + + def map_only_names(ditems, input_map): + return {v: ditems[k] for k, v in input_map.items()} + + def _inner(ditems): + + sig = FunctionSignature(function) + + if sig.found_kwargs: + must_be_types_or_none('inputs', inputs, (dict,)) + # we just forward all arguments unless we have been provided an input map + if inputs is None: + dinputs = dict(ditems) + else: + # dict + dinputs = map_names(ditems, inputs) + + else: + # no **kwargs + # select only items from the method signature + dinputs = dict((k, v) for k, v in ditems.items() if k in sig.non_var_parameters) + must_be_types_or_none('inputs', inputs, (str, list, tuple, dict)) + if inputs is None: + pass + elif isinstance(inputs, str): + if len(sig.non_var_parameters) != 1: + raise ValueError("if 'inputs' is a string, function may only have a single non-variadic parameter") + dinputs = {inputs: ditems[inputs]} + elif isinstance(inputs, (list, tuple)): + dinputs = dict((k, dinputs[k]) for k in inputs) + else: + # dict + dinputs = map_only_names(ditems, inputs) + + ret = function(**dinputs) + + # now the mapping back to the output dictionary depends on outputs and what was returned from the function + op = outputs + if isinstance(ret, dict): + must_be_types_or_none('outputs', op, (dict,)) + if op is not None: + ret = {v: ret[k] for k, v in op.items()} + elif isinstance(ret, (list, tuple)): + if len(ret) == 1: + must_be_types('outputs', op, (str, list, tuple)) + else: + must_be_types('outputs', op, (list, tuple)) + + if isinstance(op, str): + op = [op] + + if len(ret) != len(outputs): + raise ValueError("'outputs' must have the same length as the number of elements that were returned") + + ret = dict((k, v) for k, v in zip(op, ret)) + else: + must_be_types('outputs', op, (str, list, tuple)) + if isinstance(op, (list, tuple)): + if len(op) != 1: + raise ValueError("'outputs' must be of length one if it is a list or tuple") + op = op[0] + ret = {op: ret} + + ditems = dict(ditems) + for k, v in ret.items(): + ditems[k] = v + + return ditems + + return _inner + + +@monai.utils.export('monai.transforms') +def apply_alias(fn, name_map): + + def _inner(data): + + # map names + pre_call = dict(data) + for _from, _to in name_map.items(): + pre_call[_to] = pre_call.pop(_from) + + # execute + post_call = fn(pre_call) + + # map names back + for _from, _to in name_map.items(): + post_call[_from] = post_call.pop(_to) + + return post_call + + return _inner + + +@monai.utils.export('monai.transforms') +def to_kwargs(fn): + def _inner(data): + return fn(**data) + + return _inner + + +class FunctionSignature: + def __init__(self, function): + import inspect + sfn = inspect.signature(function) + self.found_args = False + self.found_kwargs = False + self.defaults = {} + self.non_var_parameters = set() + for p in sfn.parameters.values(): + if p.kind is inspect.Parameter.VAR_POSITIONAL: + self.found_args = True + if p.kind is inspect.Parameter.VAR_KEYWORD: + self.found_kwargs = True + else: + self.non_var_parameters.add(p.name) + self.defaults[p.name] = p.default is not p.empty + + def __repr__(self): + s = " Date: Sun, 8 Mar 2020 16:39:17 +0000 Subject: [PATCH 08/40] update doc api deps (#158) --- docs/requirements.txt | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index e3a16afce8..65d0917d7c 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,8 +1,11 @@ --f https://download.pytorch.org/whl/cpu/torch-1.4.0%2Bcpu-cp37-cp37m-linux_x86_64.whl +-f https://download.pytorch.org/whl/cpu/torch-1.4.0%2Bcpu-cp37-cp37m-linux_x86_64.whl torch>=1.4.0 pytorch-ignite==0.3.0 numpy nibabel +parameterized +scipy +scikit-image tensorboard commonmark==0.9.1 recommonmark==0.6.0 From 03bebf5e8fc0544ac0e6a9884c3dbae3b1768583 Mon Sep 17 00:00:00 2001 From: Mohammad Adil Date: Mon, 9 Mar 2020 03:20:27 -0700 Subject: [PATCH 09/40] 152-random-zoom (#153) --- monai/transforms/transforms.py | 56 +++++++++++++++++++++---- tests/test_random_zoom.py | 76 ++++++++++++++++++++++++++++++++++ 2 files changed, 125 insertions(+), 7 deletions(-) create mode 100644 tests/test_random_zoom.py diff --git a/monai/transforms/transforms.py b/monai/transforms/transforms.py index fb1bf468d6..0cef9dd783 100644 --- a/monai/transforms/transforms.py +++ b/monai/transforms/transforms.py @@ -441,10 +441,52 @@ def __call__(self, img): return data -# if __name__ == "__main__": -# img = np.array((1, 2, 3, 4)).reshape((1, 2, 2)) -# rotator = RandRotate90(prob=0.0, max_k=3, axes=(1, 2)) -# # rotator.set_random_state(1234) -# img_result = rotator(img) -# print(type(img)) -# print(img_result) +@export +class RandZoom(Randomizable): + """Randomly zooms input arrays with given probability within given zoom range. + + Args: + prob (float): Probability of zooming. + min_zoom (float or sequence): Min zoom factor. Can be float or sequence same size as image. + max_zoom (float or sequence): Max zoom factor. Can be float or sequence same size as image. + order (int): order of interpolation. Default=3. + mode ('reflect', 'constant', 'nearest', 'mirror', 'wrap'): Determines how input is + extended beyond boundaries. Default: 'constant'. + cval (scalar, optional): Value to fill past edges. Default is 0. + use_gpu (bool): Should use cpu or gpu. Uses cupyx which doesn't support order > 1 and modes + 'wrap' and 'reflect'. Defaults to cpu for these cases or if cupyx not found. + keep_size (bool): Should keep original size (pad if needed). + """ + + def __init__(self, prob=0.1, min_zoom=0.9, max_zoom=1.1, order=3, + mode='constant', cval=0, prefilter=True, + use_gpu=False, keep_size=False): + if hasattr(min_zoom, '__iter__') and \ + hasattr(max_zoom, '__iter__'): + assert len(min_zoom) == len(max_zoom), "min_zoom and max_zoom must have same length." + self.min_zoom = min_zoom + self.max_zoom = max_zoom + self.prob = prob + self.order = order + self.mode = mode + self.cval = cval + self.prefilter = prefilter + self.use_gpu = use_gpu + self.keep_size = keep_size + + self._do_transform = False + self._zoom = None + + def randomize(self): + self._do_transform = self.R.random_sample() < self.prob + if hasattr(self.min_zoom, '__iter__'): + self._zoom = (self.R.uniform(l, h) for l, h in zip(self.min_zoom, self.max_zoom)) + else: + self._zoom = self.R.uniform(self.min_zoom, self.max_zoom) + + def __call__(self, img): + self.randomize() + if not self._do_transform: + return img + zoomer = Zoom(self._zoom, self.order, self.mode, self.cval, self.prefilter, self.use_gpu, self.keep_size) + return zoomer(img) diff --git a/tests/test_random_zoom.py b/tests/test_random_zoom.py new file mode 100644 index 0000000000..d193a16dd2 --- /dev/null +++ b/tests/test_random_zoom.py @@ -0,0 +1,76 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import importlib + +from scipy.ndimage import zoom as zoom_scipy +from parameterized import parameterized + +from monai.transforms import RandZoom +from tests.utils import NumpyImageTestCase2D + + +class ZoomTest(NumpyImageTestCase2D): + + @parameterized.expand([ + (0.9, 1.1, 3, 'constant', 0, True, False, False), + ]) + def test_correct_results(self, min_zoom, max_zoom, order, mode, + cval, prefilter, use_gpu, keep_size): + random_zoom = RandZoom(prob=1.0, min_zoom=min_zoom, max_zoom=max_zoom, order=order, + mode=mode, cval=cval, prefilter=prefilter, use_gpu=use_gpu, + keep_size=keep_size) + random_zoom.set_random_state(234) + + zoomed = random_zoom(self.imt) + expected = zoom_scipy(self.imt, zoom=random_zoom._zoom, mode=mode, + order=order, cval=cval, prefilter=prefilter) + + self.assertTrue(np.allclose(expected, zoomed)) + + @parameterized.expand([ + (0.8, 1.2, 1, 'constant', 0, True) + ]) + def test_gpu_zoom(self, min_zoom, max_zoom, order, mode, cval, prefilter): + if importlib.util.find_spec('cupy'): + random_zoom = RandZoom( + prob=1.0, min_zoom=min_zoom, max_zoom=max_zoom, order=order, + mode=mode, cval=cval, prefilter=prefilter, use_gpu=True, + keep_size=False) + random_zoom.set_random_state(234) + + zoomed = random_zoom(self.imt) + expected = zoom_scipy(self.imt, zoom=random_zoom._zoom, mode=mode, order=order, + cval=cval, prefilter=prefilter) + + self.assertTrue(np.allclose(expected, zoomed)) + + def test_keep_size(self): + random_zoom = RandZoom(prob=1.0, min_zoom=0.6, + max_zoom=0.7, keep_size=True) + zoomed = random_zoom(self.imt) + self.assertTrue(np.array_equal(zoomed.shape, self.imt.shape)) + + @parameterized.expand([ + ("no_min_zoom", None, 1.1, 1, TypeError), + ("invalid_order", 0.9, 1.1 , 's', AssertionError) + ]) + def test_invalid_inputs(self, _, min_zoom, max_zoom, order, raises): + with self.assertRaises(raises): + random_zoom = RandZoom(prob=1.0, min_zoom=min_zoom, max_zoom=max_zoom, order=order) + zoomed = random_zoom(self.imt) + + +if __name__ == '__main__': + unittest.main() From 1e230ed78e250a876fb44f326dc57b3176652e27 Mon Sep 17 00:00:00 2001 From: Mohammad Adil Date: Mon, 9 Mar 2020 03:55:26 -0700 Subject: [PATCH 10/40] Adding RandomFlip. (#154) --- monai/transforms/transforms.py | 48 ++++++++++++++++++++++++++-------- tests/test_random_flip.py | 44 +++++++++++++++++++++++++++++++ 2 files changed, 81 insertions(+), 11 deletions(-) create mode 100644 tests/test_random_flip.py diff --git a/monai/transforms/transforms.py b/monai/transforms/transforms.py index 0cef9dd783..1098c23fab 100644 --- a/monai/transforms/transforms.py +++ b/monai/transforms/transforms.py @@ -108,12 +108,12 @@ class Resize: Args: order (int): Order of spline interpolation. Default=1. - mode (str): Points outside boundaries are filled according to given mode. + mode (str): Points outside boundaries are filled according to given mode. Options are 'constant', 'edge', 'symmetric', 'reflect', 'wrap'. cval (float): Used with mode 'constant', the value outside image boundaries. clip (bool): Wheter to clip range of output values after interpolation. Default: True. preserve_range (bool): Whether to keep original range of values. Default is True. - If False, input is converted according to conventions of img_as_float. See + If False, input is converted according to conventions of img_as_float. See https://scikit-image.org/docs/dev/user_guide/data_types.html. anti_aliasing (bool): Whether to apply a gaussian filter to image before down-scaling. Default is True. @@ -121,7 +121,7 @@ class Resize: """ def __init__(self, output_shape, order=1, mode='reflect', cval=0, - clip=True, preserve_range=True, + clip=True, preserve_range=True, anti_aliasing=True, anti_aliasing_sigma=None): assert isinstance(order, int), "order must be integer." self.output_shape = output_shape @@ -137,7 +137,7 @@ def __call__(self, img): return resize(img, self.output_shape, order=self.order, mode=self.mode, cval=self.cval, clip=self.clip, preserve_range=self.preserve_range, - anti_aliasing=self.anti_aliasing, + anti_aliasing=self.anti_aliasing, anti_aliasing_sigma=self.anti_aliasing_sigma) @@ -154,13 +154,13 @@ class Rotate: reshape (bool): If true, output shape is made same as input. Default: True. order (int): Order of spline interpolation. Range 0-5. Default: 1. This is different from scipy where default interpolation is 3. - mode (str): Points outside boundary filled according to this mode. Options are + mode (str): Points outside boundary filled according to this mode. Options are 'constant', 'nearest', 'reflect', 'wrap'. Default: 'constant'. cval (scalar): Values to fill outside boundary. Default: 0. prefiter (bool): Apply spline_filter before interpolation. Default: True. """ - def __init__(self, angle, axes=(1, 2), reshape=True, order=1, + def __init__(self, angle, axes=(1, 2), reshape=True, order=1, mode='constant', cval=0, prefilter=True): self.angle = angle self.reshape = reshape @@ -172,18 +172,18 @@ def __init__(self, angle, axes=(1, 2), reshape=True, order=1, def __call__(self, img): return scipy.ndimage.rotate(img, self.angle, self.axes, - reshape=self.reshape, order=self.order, - mode=self.mode, cval=self.cval, + reshape=self.reshape, order=self.order, + mode=self.mode, cval=self.cval, prefilter=self.prefilter) @export class Zoom: - """ Zooms a nd image. Uses scipy.ndimage.zoom or cupyx.scipy.ndimage.zoom in case of gpu. + """ Zooms a nd image. Uses scipy.ndimage.zoom or cupyx.scipy.ndimage.zoom in case of gpu. For details, please see https://docs.scipy.org/doc/scipy/reference/generated/scipy.ndimage.zoom.html. Args: - zoom (float or sequence): The zoom factor along the axes. If a float, zoom is the same for each axis. + zoom (float or sequence): The zoom factor along the axes. If a float, zoom is the same for each axis. If a sequence, zoom should contain one value for each axis. order (int): order of interpolation. Default=3. mode (str): Determines how input is extended beyond boundaries. Default is 'constant'. @@ -441,6 +441,32 @@ def __call__(self, img): return data +@export +class RandomFlip(Randomizable): + """Randomly flips the image along axes. + + Args: + prob (float): Probability of flipping. + axes (None, int or tuple of ints): Axes along which to flip over. Default is None. + """ + + def __init__(self, prob=0.1, axis=None): + self.axis = axis + self.prob = prob + + self._do_transform = False + + def randomize(self): + self._do_transform = self.R.random_sample() < self.prob + + def __call__(self, img): + self.randomize() + if not self._do_transform: + return img + flipper = Flip(axis=self.axis) + return flipper(img) + + @export class RandZoom(Randomizable): """Randomly zooms input arrays with given probability within given zoom range. @@ -450,7 +476,7 @@ class RandZoom(Randomizable): min_zoom (float or sequence): Min zoom factor. Can be float or sequence same size as image. max_zoom (float or sequence): Max zoom factor. Can be float or sequence same size as image. order (int): order of interpolation. Default=3. - mode ('reflect', 'constant', 'nearest', 'mirror', 'wrap'): Determines how input is + mode ('reflect', 'constant', 'nearest', 'mirror', 'wrap'): Determines how input is extended beyond boundaries. Default: 'constant'. cval (scalar, optional): Value to fill past edges. Default is 0. use_gpu (bool): Should use cpu or gpu. Uses cupyx which doesn't support order > 1 and modes diff --git a/tests/test_random_flip.py b/tests/test_random_flip.py new file mode 100644 index 0000000000..ec95485f20 --- /dev/null +++ b/tests/test_random_flip.py @@ -0,0 +1,44 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +from parameterized import parameterized + +from monai.transforms import RandomFlip +from tests.utils import NumpyImageTestCase2D + + +class RandomFlipTest(NumpyImageTestCase2D): + + @parameterized.expand([ + ("wrong_axis", ['s', 1], TypeError), + ("not_numbers", 's', AssertionError) + ]) + def test_invalid_inputs(self, _, axis, raises): + with self.assertRaises(raises): + flip = RandomFlip(prob=1.0, axis=axis) + flip(self.imt) + + @parameterized.expand([ + ("no_axis", None), + ("one_axis", 1), + ("many_axis", [0, 1, 2]) + ]) + def test_correct_results(self, _, axis): + flip = RandomFlip(prob=1.0, axis=axis) + expected = np.flip(self.imt, axis) + self.assertTrue(np.allclose(expected, flip(self.imt))) + + +if __name__ == '__main__': + unittest.main() From 7304182ef4a7b468fa213bb91e0d76959c8d148d Mon Sep 17 00:00:00 2001 From: Wenqi Li Date: Mon, 9 Mar 2020 17:17:39 +0000 Subject: [PATCH 11/40] 112 combining spatial transforms (#131) * affine transforms * randomised affine and elastic deformation * add unit tests * add a 2D notebook demo * add speed demo --- examples/transform_speed.ipynb | 372 +++++++++++++++++++ examples/transforms_demo_2d.ipynb | 271 ++++++++++++++ monai/networks/layers/convutils.py | 12 + monai/networks/layers/simplelayers.py | 44 +++ monai/transforms/transforms.py | 505 +++++++++++++++++++++++++- monai/transforms/utils.py | 136 ++++++- tests/test_affine.py | 64 ++++ tests/test_affine_grid.py | 75 ++++ tests/test_create_grid_and_affine.py | 176 +++++++++ tests/test_gaussian_filter.py | 58 +++ tests/test_random_affine.py | 67 ++++ tests/test_random_affine_grid.py | 96 +++++ tests/test_random_deform_grid.py | 94 +++++ tests/test_random_elastic_2d.py | 66 ++++ tests/test_random_elastic_3d.py | 55 +++ tests/test_resampler.py | 75 ++++ 16 files changed, 2164 insertions(+), 2 deletions(-) create mode 100644 examples/transform_speed.ipynb create mode 100644 examples/transforms_demo_2d.ipynb create mode 100644 tests/test_affine.py create mode 100644 tests/test_affine_grid.py create mode 100644 tests/test_create_grid_and_affine.py create mode 100644 tests/test_gaussian_filter.py create mode 100644 tests/test_random_affine.py create mode 100644 tests/test_random_affine_grid.py create mode 100644 tests/test_random_deform_grid.py create mode 100644 tests/test_random_elastic_2d.py create mode 100644 tests/test_random_elastic_3d.py create mode 100644 tests/test_resampler.py diff --git a/examples/transform_speed.ipynb b/examples/transform_speed.ipynb new file mode 100644 index 0000000000..ca9779e5ba --- /dev/null +++ b/examples/transform_speed.ipynb @@ -0,0 +1,372 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Data loading pipeline examples\n", + "\n", + "The purpose of this notebook is to illustrate reading Nifti files and test speed of different methods." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "MONAI version: 0.0.1\n", + "Python version: 3.5.6 |Anaconda, Inc.| (default, Aug 26 2018, 16:30:03) [GCC 4.2.1 Compatible Clang 4.0.1 (tags/RELEASE_401/final)]\n", + "Numpy version: 1.18.1\n", + "Pytorch version: 1.4.0\n", + "Ignite version: 0.3.0\n" + ] + } + ], + "source": [ + "%matplotlib inline\n", + "\n", + "import os\n", + "import sys\n", + "from glob import glob\n", + "import tempfile\n", + "\n", + "import numpy as np\n", + "import nibabel as nib\n", + "\n", + "\n", + "import torch\n", + "from torch.utils.data import DataLoader\n", + "from torch.multiprocessing import Pool, Process, set_start_method\n", + "try:\n", + " set_start_method('spawn')\n", + "except RuntimeError:\n", + " pass\n", + "\n", + "sys.path.append('..') # assumes this is where MONAI is\n", + "\n", + "import monai\n", + "from monai.transforms.compose import Compose\n", + "from monai.data.nifti_reader import NiftiDataset\n", + "from monai.transforms import (AddChannel, Rescale, ToTensor, \n", + " UniformRandomPatch, Rotate, RandAffine)\n", + "\n", + "monai.config.print_config()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 0. Preparing input data (nifti images)\n", + "\n", + "Create a number of test Nifti files, 3d single channel images with spatial size (256, 256, 256) voxels." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "tempdir = tempfile.mkdtemp()\n", + "\n", + "for i in range(5):\n", + " im, seg = monai.data.synthetic.create_test_image_3d(256,256,256)\n", + " \n", + " n = nib.Nifti1Image(im, np.eye(4))\n", + " nib.save(n, os.path.join(tempdir, 'im%i.nii.gz'%i))\n", + " \n", + " n = nib.Nifti1Image(seg, np.eye(4))\n", + " nib.save(n, os.path.join(tempdir, 'seg%i.nii.gz'%i))" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "# prepare list of image names and segmentation names\n", + "images = sorted(glob(os.path.join(tempdir,'im*.nii.gz')))\n", + "segs = sorted(glob(os.path.join(tempdir,'seg*.nii.gz')))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 1. Test image loading with minimal preprocessing" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([3, 1, 256, 256, 256]) torch.Size([3, 1, 256, 256, 256])\n" + ] + } + ], + "source": [ + "imtrans = Compose([\n", + " AddChannel(),\n", + " ToTensor()\n", + "]) \n", + "\n", + "segtrans = Compose([\n", + " AddChannel(),\n", + " ToTensor()\n", + "]) \n", + " \n", + "ds = NiftiDataset(images, segs, transform=imtrans, seg_transform=segtrans)\n", + "loader = DataLoader(ds, batch_size=3, num_workers=8)\n", + "\n", + "im, seg = monai.utils.misc.first(loader)\n", + "print(im.shape, seg.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "5.11 s ± 207 ms per loop (mean ± std. dev. of 7 runs, 1 loop each)\n" + ] + } + ], + "source": [ + "%timeit data = next(iter(loader))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2. Test image-patch loading with CPU multi-processing:\n", + "\n", + "- rotate (256, 256, 256)-voxel in the plane axes=(1, 2)\n", + "- extract random (64, 64, 64) patches\n", + "- implemented in MONAI using ` scipy.ndimage.rotate`" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([3, 1, 64, 64, 64]) torch.Size([3, 1, 64, 64, 64])\n" + ] + } + ], + "source": [ + "images = sorted(glob(os.path.join(tempdir,'im*.nii.gz')))\n", + "segs = sorted(glob(os.path.join(tempdir,'seg*.nii.gz')))\n", + "\n", + "imtrans = Compose([\n", + " Rescale(),\n", + " AddChannel(),\n", + " Rotate(angle=45.),\n", + " UniformRandomPatch((64, 64, 64)),\n", + " ToTensor()\n", + "]) \n", + "\n", + "segtrans = Compose([\n", + " AddChannel(),\n", + " Rotate(angle=45.),\n", + " UniformRandomPatch((64, 64, 64)),\n", + " ToTensor()\n", + "]) \n", + " \n", + "ds = NiftiDataset(images, segs, transform=imtrans, seg_transform=segtrans)\n", + "loader = DataLoader(ds, batch_size=3, num_workers=8, pin_memory=torch.cuda.is_available())\n", + "\n", + "im, seg = monai.utils.misc.first(loader)\n", + "print(im.shape, seg.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "10.3 s ± 175 ms per loop (mean ± std. dev. of 7 runs, 3 loops each)\n" + ] + } + ], + "source": [ + "%timeit -n 3 data = next(iter(loader))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "(the above results were based on a 2.9 GHz 6-Core Intel Core i9)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 3. Test image-patch loading with preprocessing on GPU:\n", + "\n", + "- random rotate (256, 256, 256)-voxel in the plane axes=(1, 2)\n", + "- extract random (64, 64, 64) patches\n", + "- implemented in MONAI using native pytorch resampling" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "torch.Size([3, 1, 64, 64, 64]) torch.Size([3, 1, 64, 64, 64])\n" + ] + } + ], + "source": [ + "images = sorted(glob(os.path.join(tempdir,'im*.nii.gz')))\n", + "segs = sorted(glob(os.path.join(tempdir,'seg*.nii.gz')))\n", + "\n", + "# same parameter with different interpolation mode for image and segmentation\n", + "rand_affine_img = RandAffine(prob=1.0, rotate_range=np.pi/4, translate_range=(96, 96, 96),\n", + " spatial_size=(64, 64, 64), mode='bilinear',\n", + " as_tensor_output=True, device=torch.device('cuda:0'))\n", + "rand_affine_seg = RandAffine(prob=1.0, rotate_range=np.pi/4, translate_range=(96, 96, 96),\n", + " spatial_size=(64, 64, 64), mode='nearest',\n", + " as_tensor_output=True, device=torch.device('cuda:0'))\n", + " \n", + "imtrans = Compose([\n", + " Rescale(),\n", + " AddChannel(),\n", + " rand_affine_img,\n", + "]) \n", + "\n", + "segtrans = Compose([\n", + " AddChannel(),\n", + " rand_affine_seg,\n", + "]) \n", + " \n", + "ds = NiftiDataset(images, segs, transform=imtrans, seg_transform=segtrans)\n", + "loader = DataLoader(ds, batch_size=3, num_workers=0)\n", + "\n", + "im, seg = monai.utils.misc.first(loader)\n", + "\n", + "print(im.shape, seg.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1.42 s ± 1.72 ms per loop (mean ± std. dev. of 7 runs, 3 loops each)\n" + ] + } + ], + "source": [ + "%timeit -n 3 data = next(iter(loader))" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "TITAN Xp COLLECTORS EDITION\n", + "|===========================================================================|\n", + "| PyTorch CUDA memory summary, device ID 0 |\n", + "|---------------------------------------------------------------------------|\n", + "| CUDA OOMs: 0 | cudaMalloc retries: 0 |\n", + "|===========================================================================|\n", + "| Metric | Cur Usage | Peak Usage | Tot Alloc | Tot Freed |\n", + "|---------------------------------------------------------------------------|\n", + "| Allocated memory | 6144 KB | 156672 KB | 16680 MB | 16674 MB |\n", + "|---------------------------------------------------------------------------|\n", + "| Active memory | 6144 KB | 156672 KB | 16680 MB | 16674 MB |\n", + "|---------------------------------------------------------------------------|\n", + "| GPU reserved memory | 225280 KB | 225280 KB | 225280 KB | 0 B |\n", + "|---------------------------------------------------------------------------|\n", + "| Non-releasable memory | 14336 KB | 77824 KB | 11219 MB | 11205 MB |\n", + "|---------------------------------------------------------------------------|\n", + "| Allocations | 2 | 14 | 2222 | 2220 |\n", + "|---------------------------------------------------------------------------|\n", + "| Active allocs | 2 | 14 | 2222 | 2220 |\n", + "|---------------------------------------------------------------------------|\n", + "| GPU reserved segments | 8 | 8 | 8 | 0 |\n", + "|---------------------------------------------------------------------------|\n", + "| Non-releasable allocs | 1 | 6 | 1460 | 1459 |\n", + "|===========================================================================|\n", + "\n" + ] + } + ], + "source": [ + "print(torch.cuda.get_device_name(0))\n", + "print(torch.cuda.memory_summary(0, abbreviated=True))" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "!rm -rf {tempdir}" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.5.6" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/examples/transforms_demo_2d.ipynb b/examples/transforms_demo_2d.ipynb new file mode 100644 index 0000000000..ff4b5cea39 --- /dev/null +++ b/examples/transforms_demo_2d.ipynb @@ -0,0 +1,271 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# 2D image transformation demo\n", + "\n", + "This demo shows how to apply 2D transforms in MONAI.\n", + "Main features:\n", + " - Random elastic transforms implemented in native Pytorch\n", + " - Easy-to-use interfaces that are designed and implemented in the pythonic way\n", + " \n", + "Find out more in MONAI's wiki page: https://github.com/Project-MONAI/MONAI/wiki" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Before running this demo\n", + "**please download the GLAS (gland segmentation in histology images) challenge data from:**\n", + "https://warwick.ac.uk/fac/sci/dcs/research/tia/glascontest/download/\n", + "\n", + "The dataset used in this competition is provided for research purposes only. Commercial uses are not allowed.\n", + "\n", + "If you intend to publish research work that uses this dataset, you must cite our review paper to be published after the competition\n", + "\n", + "K. Sirinukunwattana, J. P. W. Pluim, H. Chen, X Qi, P. Heng, Y. Guo, L. Wang, B. J. Matuszewski, E. Bruni, U. Sanchez, A. Böhm, O. Ronneberger, B. Ben Cheikh, D. Racoceanu, P. Kainz, M. Pfeiffer, M. Urschler, D. R. J. Snead, N. M. Rajpoot, \"Gland Segmentation in Colon Histology Images: The GlaS Challenge Contest\" http://arxiv.org/abs/1603.00275 [Preprint]" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "MONAI version: 0.0.1\n", + "Python version: 3.6.9 |Anaconda, Inc.| (default, Jul 30 2019, 19:07:31) [GCC 7.3.0]\n", + "Numpy version: 1.18.1\n", + "Pytorch version: 1.4.0\n", + "Ignite version: 0.3.0\n" + ] + } + ], + "source": [ + "%matplotlib inline\n", + "\n", + "import sys\n", + "import torch\n", + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "from PIL import Image\n", + "\n", + "sys.path.append('..') # assumes this is where MONAI is\n", + "\n", + "import monai\n", + "from monai.transforms import Affine, Rand2DElastic\n", + "\n", + "monai.config.print_config()" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "img_name = './Warwick QU Dataset (Released 2016_07_08)/train_22.bmp'\n", + "seg_name = './Warwick QU Dataset (Released 2016_07_08)/train_22_anno.bmp'\n", + "im = np.array(Image.open(img_name))\n", + "seg = np.array(Image.open(seg_name))" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(522, 775, 3) (522, 775)\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAACJCAYAAADXL3gjAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nOy9Z5ikZZ23fd6pcq7OOU33dPeknp7MDDPkIQgiKNGEruii7iqrG9x9Nrg+uIuPaQ2IihFEBCSHCczA5NgTO+fc1d2V853eD74+r6+LCjjA6Pb5qauPK9SH/3HW7/hfV90lmKbJAgsssMACf16Ib/cbWGCBBRZY4NyzIPcFFlhggT9DFuS+wAILLPBnyILcF1hggQX+DFmQ+wILLLDAnyELcl9ggQUW+DPkTZG7IAhbBUHoEQShXxCEv3sz9lhggbeDhdpe4E8F4VzfcxcEQQJ6gcuAceAIcItpmp3ndKMFFniLWajtBf6UeDOS+xqg3zTNQdM088DDwHVvwj4LLPBWs1DbC/zJIL8Ja5YDY7/xehxY+9uDBEH4CPARAKfF3t5Y3oxoMzAFmBnK4XVkSWgCbqsEukw8mUAVDYqDEpmUnXwui8spo2ZMsoaIaNXIJlS8DiemCIJokkplEAQT1TCQRQFNM5BlGdVQwRRx2ewosoxhiuRzWYLFPlQth5rV0fMqM6k4ZR4/U/E4FtmC02rBZbeSzuaIpFJYZQsKBpggiwKiLGFqJpIkYFEU0pqKqhsU+F1MhVPYBAFBEPAE3IiCQT5tIIgmuq6TyeeIZzL4rU5i+RwmIhbBRBYlsloeSRQRkDHMPJoBdsWChIjVKiNLFtRcFqvTjqmDoGjkcuD02BCtIqHpJPlMktJCL4gCkiIzOjGPauQoL/CTyyewKV4isTwOh4xVlNA0nVRWRcAgEPCQSWaIZ7MYpoFTcZLWkgScBaRys1hlF7pu4HQHEC15BElEkH5VWtGZKIagks/nKfCVMBefQhJEdF3A5/ZgaDpWi410MoeOSFZNIiHgtFoRBAUEAxMD2SqDKaDm8riCTkyLgCBKCBok5pIoFhHZKiOIEtlklkQyh98vIyl2RudGCafiwttR2xJSuwPPG9pMDzjf0DwAKZx6w3MXOLdULE2+KeuOn3aRJUXezL1qbb8Zcn9NmKZ5P3A/QLG12NzxyW8Ro4SfPLCdXMtZSo1WbrhlEZ+5/2XaRAt3P7SGnQ/LbLmtlciEjs+vcOzxAZpXFbDrzB5efOQIi5p8nAxluLB5CT0jvSTtGTySlZHYHC0llfTPHGdj1SZUMUdRoJULrwzy5YcO8OFV69g3PMBUROGjn2wnMR9HG9fJiAq2mhwvPPUDPvz+jzE0oJPvMdl+9BBui4GoCZQUefjCqScJUkmRJFNitXDXh65GmNX41ol9DA/N8OP7Pkdypp+Hfhzh6f7dvHvpDXj0ES6/ZT2jkxE8SYmAEkFvKIJUMc5GK8ODJ+l5tofW8iDPb8+gWqcZCCVw27NIqp2/ePd1RIw4JWXljJ08SWWrE0fZBTz+yBEe6/gSj9z3dYSAyD9/5FsIRXVUu85w5c3vo2htCb2H7fzwwQf54hevRbAnmdhhoOkmA+NjvHLgGd534Z30jx6nxFlMCguTqRmqNRsd84OE8xkuqW3n0GQX77hiCflAnOrm1YgVAcT0NO+96VFaKxr5zPsvZnYgQi4zRd6U2HYmyiLPKYo86zClUvYN/5TLr9pCPqZgTZUxOnyAJ4/3MJebZUNhFRdUN9OPnXqfQF5Ns3jTUmb6w4RGwiy+sBTv+go0QSIfSjC2c5KO/b08O/Yyi0ubWVnXynV31iA0FPLCv3Xz13s+/LbVtkcImGuFS97YQhGI37rudU/zPHQQzsVH2QLnhHufOXjO1/xMzTrKBThk7vydY96Mnvt64F9M07zi/3399wCmad7zu+Z4pALz04s/wV9cVsKJ0QqKvF2svvODnH52hILyBIGaWgZP9lDYUImcdXF473Fy4zr7Tg3T2lbNBlsx8/EYL/TuYzqho0gmxT4nw9E5/JITu01gOpxkiddCa2Ed5StqmJxzUlic4JWX01x7VQFu0aSvO03J6koUrxV/pZujDw3RNxWi2OXlVHiA2264Hl+5gJGPk5gFbTaPxjhnT+So2FhKNJLGawnxnUdGuPWGFaxoKkIu9HDo4X4OdB4lkTGpCzjIaRkal61n8XI3z//0CG0eJ96Elc+N7qXC7WZ5VQP7z3bwD399E5ZELencJLKYpKbFx5kdnVjKnBRcsIiXHh6j6+Q+brtqCZk5Ky+emKF5VZxyfxkt65fxrc/vwOuHLeuvYLB/Nxf/03Xk+xQOPPYEbevqcW6qQhItJHbNYrOW0X9ginDoKAFbGc5gFfncMMF6P3v2DiAJ8xS5l/Hjww8wkYLPvvMSUK5gKD3MTZ+u4uf3DjLWf4wbL99A8RITfa6ZgePj+JQ039mxi7rKehxT+2lctobOsJOiMg8XX7aIe7/+GDXBYkoDAV48+BS6abC8zkfHkEIqOY5fqWRluZO1K1ehJUw6hmdRzARymY+NFyxiajTK0VPzTA4dRRdLGMp18I7VVSy98d3kzoikxyLc9Mwt9MeH/mjdvaHa/mPk/ir8Idl7HnrtIpn8zIb/+3fZvfvf8Hta4A9z7/C5E/xnav6/Gjhk7iRuhl+1tt8Mucv86tDpEmCCXx063Wqa5tnfNaelaIl5e8H1tNqKWXtjKwe6Knjo+QfYWpfk/Z/8W0KJXoK1i3np5zH6Tm9HscZBKECLKOzLpnjgv25DceeYeKwfdTrH4VNHKAvUMBWdxGl3oKopGpobaP3IBaSjUaLdeTo65sgkemloXYmSneflQ1OsKiri6bHjfKB5E+lcGiknMxUfJpMSQVE4HZ5HkBT8VgdLiyX8TglPqZ9EysUrx47gDkJdYwOnzg5w8fo1vPDyAV4c6uWvLrqCwan9vPdzf42WSrF3ZxeDJ+dYWldCfbGb0hIJPezj4UM/JugtoH9U4q573omq57nv84+xcc1KbHMyviI7J07M4SiXKS9eRH5qgkRCpzd1ltZFi3HJZbgdOh3xAToOxrn+0kqmh2coC7rQXFtQY90Y4SnqliwlGk7TP9HL6cEQjkwOm2KC5EIkw0QygsUawIOIS9bxWfwYdhupTIRD8xPUBRaztqmckegsM/Hj3HDJlRx8+RR1RRXULQ4SnXPw0NHTXLlCJB5yEvS6aFi+nJn+LrI5Nx2xM0zN7uLqzZ9Fi0dx25wYosjpQ0cZCxtEjCwD8504RB8YAu9f0cKBgRCl/iqOT/SgyVEwnQgkUTQfBUE/cVVlhVsmWFLG2r9sY+hshn/48neZTZ4kYp3h9GzXuZD7667tcy333+TXon89Qof/v9RfjT8X0adeqHtd451bB9+kd/Ir/ljB/6bUf81bKncAQRCuAr4KSMADpml+4feNbw02mo/fvB1DOkxfaCnbjnyTTZVlLGtso3CVwXMP6Yj5fi64qJKK+s0c3zfCi4c6kJQMycQsd1x/NUXVHib3D/HIoSO0NV1EwKlxomeAd3+oHVeVE/IK2aECItFeAlaVwyMwNjSJ2+OiIuDkS09/kxvrL6VYNjk0e5aJlAOvw4YWP0NGq0C2+TDlGB6LjVAmS0wLsdK1li11HvxFBWjWCnTrWTLF1bjzCrKaQYiMEYuU0nI1PPeDURqXlCHn4UBnHHewmM7JKa7eUMPBA2e4cH0NqRkNQQyy/cR+KgtLGYz3szc0yD1br6Ltola++qNj5BITjE+G+eDWy/GYAqqYo6IxT3LEJC8HCFbmeOH5aRymzqKWGlrW1TL4yhBVK0uJZdwYkSiBjVXs/vZzdHaPMpkcQNVMKpxeKjxO3NZmRtJTTEUiZMUw9a4gmbQdp9PGvDpPvexhKJekqFBHVBdTV1fCLzqeZiqR5NOb3kFhfSU7DxwlkRC5cI0LVD/FfgdKaSO7dwzQZFXomDrKfNTJus01CMkQ66+pJ5ZSyCdSnHopxOHeo4RSs4j42eAzkUU7E4kkhl1hYmqEssICToXTSEKOeu8qanx2lrdWUFgWJJSbJZ/2cLrzGLlEloHkBL+cf4yuyOA5aVS83tp+M+X+RvhDYv9t/pRE/3pl/vt4M0X/eiX/alL/NW+53F8v9a5682sr/4as1U9tsICoaGF4oJ/VdWU4Su1875cvYxVjfPx9F/Cvj88wEznJPVe8B1cwwFxUx+ETkMIZBCOLLShhE13YnCUkymQe2JFh0wW9ZPZ4cHsUFi0PMLiriyfO7KdKKOamz1+HOiLy6Pd+hsvq4mQmjCsPsqCzurISQwxSf3kx+VEY7RxlLjLNcDaJW7FyYG6cSiSKC6qZS3SzNzzJf93578zPzhJUvAiCg9YNGgdeCWHOyiSkNB1dE1xxpYcnX0ohWOJUFRlcuXkZRQ2LSE1n+cJXniTPDLe3rmBn5zFaK1ezdkM7uw93cmRoNxcvvxBZM9BUlQ2XlZPKFzMdGUZLugjlh/CYQRTdyrqPbwFznFynyJMP7ObiNet4/uhpMokZ9Nw8ZdYAWdmKoGWodRajallS+SzVy5oJzUxCNMKeoVGWVlUwM5ehJz9LgdNLPKGiGnGWlS1Gy+Y4Hj3G6rq1lEk1FCwp4Z8fvIePXfAe3EVJtndMcmjgMN++5RPsHe7idP9h7th0O/+565ssb72O53Y+xjf+5q8Ij+UZjI7T0X2alqJGtl7Wwo9+Mc7NawqYHO9GNnw83jPIrctW0nPmKDV15TzaJ9BW56ImWIo36CMaijCeijAXHmH90jb2nxknKycZmuxjZ+Z5Tod63pYu9Pki99cr9d/kfBb8uRT6b/NmJ3l4ddH/Ppn/Nr9P7m/bgepvktOzdM1pxLUwNQXVqFlw+fuZiEJj2VZa2x7l1us/w+grYS6ucREuKsfuN8mkVfp7JxB1lS1b28hn02Rn88xGEhhqCs+gyfiubzDRdwN+/yz37H2Zu+ZuYtU6D7u3D9He5MHlEzn96E7mMjJ9yXkKlCTlvgbec9dmpvuSpEoFguuKsLtEIvckEWWd+uAiAh43JScFTkQizERmGE6b/POGmxFCEprRw5df7qOlpJQvPp3hlpoy2ta3kY4FaL9zOaQkPn5HnLQ5Q/eOEQ4+NUVNk5PR7m584gwTOR3ZV8p73/M+CuuDPPaTH7NnZICucJoNMQ8zQhoplUCzypS2y7jj1Rx5eJpdB3bx2Y/cjVLsRLCmSeWLueszH2FV0ZV86+c/45KGejSHFZu9BV3M4fUXUFxXiWbVmR7uQZ1VcNd5EUoMUiMFrDZE/BUlzCX7WeOooj87TzyToMjuYN/oGSR0ajy1zIXmmZZy3B4sJpVRmIl5uOKT12Ixd3Db5ov48faTDEydYU1ZG8EGF/+59Esc6rifv7/janKayfKtRUSfz3Bh3XIEShjsTrH1wjr6JkQOTUn47HFuaNvKsZkEYuFa4m7IynvpChtMhP10HP8J/3b5B6hw+IhE/Lh8hTTVZUnHAywvKOTF/dvf7hJ/W/ljxP7r+W9U8P1f+e+iavjUuek/v5li//X6b7bgX4/IXy/nRXKvc1SY37/8ZwzE9rGtK4LFGOBTt2yiqHAjozGFbz32M+y6wX9+6GoefHSEVetTFNkvJNRzirGMk+VOK2Kxg9RcBMXrxWqKVLW6Gerq50RXkqxNp21ZI5US7Oh+nvKmG/HZFBZd5+T+u55gMB4nnYmx1F/M+sWtKN4glhqBIr+PeT1P8Uof2pxKx/e70NU87RtbsNRaiJ/ReOn4QY50dpLFpNReRdY+xgfu+AC15S4e+t6TNHtW8eXdD1BlkfmnL34YXbfBTIrvf/cY/fETJHQnRVYZhySgW0UUJFyCg8l8GFHTSAgCN9RsIGTJ8q6/u4wzu09x/08e4s6Wqzg0OUqdqxyn34aZFpg2ewjrOuvar6W1XeHz/3I/ecOCYVG51F2OxWGnpKKZZDZMbXMlE6FpglVVOJdIhAcE4gcnCK4rxBv0MtU9hb/ci63ZgjqkMb57AL/PzdmzI8hqjtPzsySMDEHZQ31JJb1zE1T5i5jL2oin4zTXpygrW8HOjhNctn49xY1zlF2whqm9ab76nccod9hZtkhg6Zar+MI39vEXV7TRtNJJKhan9/A02ajJL86+jJQOU1++BYdk4LenWLelguLWRoYPS/QfPoXTq1FVILLjcDdBdzWhWIqWLYtZt3ERTz51FFcqw127/4rec9SWeb283cn9jxX7b/JaBf9qQv9dvFHRv9lifzXeTNH/ZGzfq/7/vZUX/N55531y10WT6kUTLPbcxsjcw3zm7kuQbU10H9IYPfY833vwDpRoMSd/1E+Z02T/Pg1BfRxEKxlT43BsAoutjPWVi1FD0xR7FYozMJc0qW9ZgTswypHDE8wVpvjKkUN8a8sN5Gd1dtz9DMemu1EAQ3NR6yjEWenDEbBhq/QxfiKEw6Ez8lQEfVqjrNZNxcZmxnoieEYznDo2SIPVy7jbx0RkjrYKHxZLI645gVe278WuF2MRRrjuwsu48YtXYNEN/tftD5KM9DAdSvHOxjWYOLFiYJg6R2dPUuAsRE1kyEkpPLZSHOkRook+bKaTvh+dIjbj4dLF6zDdMvVFrTSWV6FmB9gRGuepoQk+vekdLLnJy757hwm6YszH3GytvBCHJcuKT12EUSLRv2OQYE0NFgoIn5lAttWAM4Sv3o7T72e8K0RqUkWMx1AjdoKNThouricbztMsWDl7Yph5dYhSf4DO+RlOdA3jdmQpEaF1QytP7/wZQxMrWVTtREvGefHgET5adzUjj40z3Zemb7KDuz5+I4KrHqdQSC7+EgdezmPL+QguDVDTGMRfGGFfr0jToias1gSOpJdDw+Nkn/cjvriH/vkwHiXNwd4plpWaRMNF5OL9zGUTTL54kv59hYi+zZTX+nAVvLF75m8l6uWr/uAYZdvR17XmuRT7a+H1SP2357weyb8dYj/X/C6Zv9Zxf0j6cJ7I3SqKZFLVKK45PvuRtVir19D55HOIaZ2bv3QLZx+I8MLBp0jGO2kMVNMfm8Fh5HAJCklZZTYrs8kbJJ6ZZyg6znDCjygIFN29ieqAxsQ2meuuzSM4RP7JNkdDSwWxnaOMmwKKaeB1FXNt+XKSkoyuihhxk9RAhNBEAqeQw1HpInhtIaJiJ+PI4V0ikevRWNxegRkK0xIpxTRV+sIZGooEEuFRnho9w41l7Sy7pZW2xRa+86GdGLETpFUT0RCoD5aw4tJllNe5CY3E0SdFFkW8DE/HWbKyiW0ndtGbnEDFyp65KZrt9Yx3nGRRSTOX3nApk/1nKBQD5IM6TgpoT3nYeE0p9qJKcHsYHjzKQAxKRAU1E2Y+U8DZp6eouixI3doGkr0G3vogk/4JpECcKns5+cUGzoAFW5WFbD6OPeMgNHaWyRknxRWLkPMzqBo4rR42V7Wg5VX8hS4ipo5PtlAfqMKMJPnHr3yRh760jeGhFHd/8hY++e//St/+Rn7a8RR1JTlaCrcy0hMjrSUZHXqYm1Z+nO1nv067sIaa9g3sfDxFz4+fZW1BHUbeyva+PnSzG6fVRTQ8x9HIXhRRZmlwI0VKhnxMRxAzWEQbJXYJi7UZuyTykxP3sbhbQ81n3u4S/528Fqn/9tjXIvm3UuxvROq/a40/JPk/dbG/Vqm/1nWuuOp3f0HqvJC7JNp4alsnf/tIG1N7ytEfPMGy268gocd4+d4Mew89hUUQmcrlcGizeCUfVmuMRquP2XSI6kAJDeVB+qYHkbHgNE1e6e6h8ksp/qsnxGf+fiVD3Srz83HsygqGfnaUr+3eSVQV2bq2hcKZcqLZGIVWLzPdPdgLlpP3zHLhP6xAlQQEUUTHRDRNwiNxXEEHcpNG4lieidEwpjjCSDxBc4HJQz3H+eEtd7D0uJuNtzaw5wdD/ODwdpJignsu/hiBqSOs2HQlp3d14vRI5D0BSjfbGTmTZXFhG+EnOtAqBNYJF1PadYyj0REMVWYyP4agi0wPjTPygx72TXdz7223cvpsD6MpkbPJEJ9YeR3ZHQInevejSh7WlLyDNp+VmDXN8vWLKWjy0bHjFCv/ahUZLUvWZ1JVVYaZlbHWWVF0E0HQcQcsZE9JWErslASXIKoQ7VGZPjqGkRQoK7ARSwrEDCejsyMkNZ1AWTk21zyLlrTz+b87wKLCEg4Ovkz2mTqu2/ARnur4CZuaN5GJ5LDaQ7TWXMbw6CRZX5TGojxbPv55Ro+l6PpplrGXnmdpeSMdo0ki2TNIFitqLk0iP0eRL01xppSImaTcqdJgaSaj5RC9CsfnTnNJ83pyeoThkSnW+xsZzMwxn4i93SX+qrwesb/avNeb5M8150Lqr7bmuerJn2veih78ueS8kPtoIk3d5u+TTT7PgV3drK6ZQhcbefLfuugbfYXT6TD1jjRriipR8gpr69uQk5PkBY0WRwN5wcRpsxAoXMNEcors3DSj+SSlso9Pv6eMkvp2UoPP41D9mILA3MQAuprDkCRGerO0rvFTVlpHPjLN2JSTljvKsFRVkppKo/gcqHoOq9WDZuQprfNw8KdnsYzD6NgwzgzkzDJc3jESqodCIc4T3xhm00VtnH1lisdP7+dda5ZTFKxiPDvMOz92JYlyC++61MXhn6ZpXCEzO2xS3liMGMxTu7IFU9SRzFk8IxaucpVxbH4cp1bCqVQYxevCZhh8YOmFnDo7S9GSRTS5izj53a/Q6CvkJIc59nI3SW2UBv+lrLizEL1pMbpVRc+btC5eimGRsDudzPdPMvH0MGs/thlVUEEyERQd1RSwBd3oShZbgQsjp2MdnyWfiOCrakDWRbIxEQMFt60YTc+TFezsH44RM0+weZmfk2cOIxmV3Hfo29zSfgU3bryJ1iaFp/d0saahkkRWQM3EaVvezlSsmBe/+DTXX7OOZ7c/yVgyzOGpQbyOQiLpORTFQr1FZm3T1aTjc6xaHEQUZXQ1SzZjIkkarqDEh6+5knBPEkVvoszrRdQ8HO0d5pnZn73dJf7feKNiP194M8T+m2u/muAXUvvr47x4nnuV38FNt/2YZx/oZNepn1P9zgY6Hwwhx6cwNSetgSI0yUezK8iS+pXUVQdRiorI5gVyqQyyzYWwyEnFxXU0rm2kZGkNsprjwFgnh3YkUEqzVK9bxnRompgJY7FZnDYfK90lbAjUkQ2lSE7kmY7pbP7H5bjqvUiiSFY1sIkG+V6BvJrDTBjMTEtUOl1Y8wbNi2qprCzFaZfQ5jNk9BgfaN7ClhsX0XTbMpJWkZyaxlm0hOV3NdF+aROqrOJyiYTzzUQS89gKbciyghzMIsgWJoeGkHVITKewW6ykM3ZaXE20VDZQ5fEwNzPEgYmznB0a5+p3b2bviZ08/uhL/PXGd8BMP6WYFMp27rjpRuoucHL8+RBH7zmMPq6jWezs/coQQiiNzSUTP5vBknRx6r5elAkFUbSAJqGYNlzVVpw+B7piQQnkcS52ghBES6UIjc5jESVcboO8oeJxWBBFg4aiChLhDDW1y7jrL9/N5YvL+adLPklp9Ub+4/EvMzueYNOGVp7p7mcoFEIJBile2UQsPkxLqYeUEcDtbsRqtRG0y2SzE6yvrKZAlFkUWEQmHqWkpAyL24m/0EU+ryPLIoUFCk3XLsYsr8BVXowjoDIaVtHNCTa3NSMJ0ttd4m8Kv+sD4s1oyZzP1yEXeHXOC7nHMkl6ux7n6lsb+dpPPs7IcDPLNxj0x0MoZo5WxcrnNr8Lb3MLpev9iA1Q2lJAS3MtqqzgrPXSsKkYpUrBV2bh1KFeLE43kXCUY5FBjn2jm9mJEDDP4jUmqmnDQYrh/AjPDh4iEc2hx2ZwWF3kJRuJ6Cy5vgxKTqb/x5OEnx1BDUNKS+K2Zpkbi+OrcmK1wHw8yZHRCTRLisX2Ggob5gmu9vP4J55h9c1VWCwKlS0uBNlDQhEYPp1ATRm4nXl8Hg/JrilGjswQmTLZ/fXjBGQHfWf6UXI2RE8ZrmI/xXVVmBaD9cXVfLDlCopsNUylZuh44Syf+pu/ZGODkx5DYXh7D6o7yHvufw+FNzZjK7RTVN9COBGi/4WjHL9vgJbmYoZ+2cPRfzjG5PFZslkHpe2QU+bIR1RMU0Ufz5N6agwEA8Vqw8SDZHXjLStEcsqIUhwzA2eHe5lPDdM/M8ZMWCWWmaWmsYq7f/gFth09Q1W7l0iinmDewhc++Fle6YmipxzctPWdXPy3a1EqbJw+PsoFf1lG7UVLeOaR/WTVBG7JgUvQuL65neXl1Vxcuxqn3UKhz4NoaGBYOdE3R3FNEf5FFhqvXUtWcmH06MSmp5mdjbB2TR1xpYxXeg7g81nf7hL/s+LNTO1/rrzVqR3Ok7ZMWXURizZ/lFwmQWrWweAvjrFnqgcDGzkhRrHVg3+dAy3qxZAz2J0yoYk0pVcFkV6yEc7liE25CUcGMU6qmNkMRaKGIhnkjBjGaJQHD2R4/9aLyEwN89J4BwFnCZeXrKfe6yJrZJjP5Vn3kU1YnQ6M8CyjOyKU3FJD5W1O1HEPFOTwOdwMbeskOpzC12DBW+NCiMLSpIxdXE5VnUIo20jvL4+wY2gfrR1tfOqOO5jqGqNshRNbiUxp1k2qOwzeLGY2TGSmnKJWK6WrfJSuvgB1PkXZmRQzoyEcY2mkknKsTgtir4X0VAo1N0XQaRLNepidjqGnVcpqivjiD7/Fp9Zcz+ZP1HLnjd/kkg1NBLUy6pv6WHL7BQRsOrpTwoyqFF/QSNG6EyjLV5IfSGJv8GOmNBTZJDdhcOh7L7F+QxOzXfN4alUEyUHHI92U2kQE3KgWkxCzlDn8iJJJRyKCxzqOkq3CJ+X58PorMTIZhkYTpKUxUmkXwYIy1i4q5PtP76fEm+J6/wf5yvceZ3FpMXbPVs6+fAozrTOWHCbgSLC8aDF2xYlpyriDOSSxmMi8QesGK4ELF1Eab8QqRtE1F3kFrJZJ4nMGrZc0ER2epm90Ao+3kuv+9hq+/LkH3u4SR3S7ATASiTd1n7f6hswC5y/nhdznxzJ84kPfpdKvUqDbuKB9FfKcBkEvL490kbT6OPj8CWyuMiSphHQkw9IWldMAACAASURBVMpPVJCUdMrvrsX2y7N0PzSJTUqik2JNbRsHJ7vpmwtR4bSTms0hOvO4q2IMP2dQ7SmhxlmGkk3yw5l9qGojN12whO7dsyx/j4pY6sG7Mo/VKZGdtWEr9mDm0kz1z9D1izmWXFqKsrgUwS3jKohz7F472cwp/s9ffYTjT0QJn5kipqZ57r7dXH7VhTQsr6Dz6TPULmkgK2UgJNPdPc2KK5djWRoEqw9TFtFyMqbHQAiYeJwlSKVZ0l155kcSjI1MoOYiOBQP+WwOq10hkZ9Fd+RxXtxK6Y8CiF4R3DLu1ByPbuvmUyvfR2BzC2ZeIBHXEaNJwgMapsuJvXYNgm4gFfgwMgIWq51UIo+zyMqG911J2jdHUAyQTWY58IuDrF5fSLbTwuldZ8hmc8i2AEOZBLNJFS8KPiWImNfoHchzaiaMoZ9mJFjB9KRGzCzDeujbrGu7irVL7Sy94EpO7A/RGojwwfddi8tVwKxpYTzdxXQuTChpMi+P0mZqFDoDlFVV0Tc/wRX/sZSuV7L4JRsFlSZ5ZxkWSSM9pSGYVXib04jlfgSbhTVrFjO5bx7LEg/57NtX24IoIrrc//e16HafU8Grl696Sw9W/yen9j+lw1Q4T+QuoFNid9M59TLV4hKMHpNkbAJ0NzZRRNY1yEgUlEp4C5xECywopQ58eRumBoF1RZR16piCRGgsST6TJZtIkBMijKhpUralbGh08PCzaazjncyl8xTbJGwuJ5eLy2leXY23uQZdyjP04gwVW+pxVspISRFXoZVUPEW8I4rabbD180vpfDhJSWMCQXPjdoqMTRzjlo3Xc9PfPMK7m9tIZmGNbwmz6RD6XByhyk5BhR9BzxENJaluL2DyiIp9sYOsCKJhIBqAFEGMa4iSQsamESz3IIhJFMNK5TWFaNMJRp4dZSgfx5kGV0E9siBy4Mcn+daT/5sPffATlN2zhmpfM1X0Ud7mJ9qv4feLJEIaNkXHV2xHshgobiumZkFTkkg2KyAhGXkM1UrOGcUclJnTp+h6MobHaSJbFmFYx6lcXMN07yCmI0V4LIpp1XCaCpF4lsWVlRSUa1RtaaW4bgkju3Q8q+PIhRVM97TSP/0cS7dcx//5+lepkFto9l+CrzjIwLYIWaaJqhroEuWuUtpKC6mvbCc81UM4lWHzPetJxTQCpRmkMgkBKxYphaFZsQVFtFgeay6AJoHVKtN3eAJbgYGkFSDbzr/n3yrbjv7JH6oucH5zXsg9nkvRP3OAzZVrSZoCWmwPM2mJoDiLXSpCT+do3bqCXEAjHxDJ9YUwchXE++L467zkwlGmBmaQZR3FYsHqcxDtS6AJVla4l1HbWodlqU7ipTFyZS2UhOMYJHAUN1PbUkEmPYdJkvy4ib1EYOBnR9FtEnVXNqKlYfCRMVwxlaqPNSHaLTS9H05/ewbFyHN2coLaonpWrC/hqlAtz5w6jNUcp4TFXLliJaMDc6hIeOoCSE1B1FAMWwG0Xl+NanNgFfLkTRPT0EhnBOyyjik7CPrSyHoOdcKJ7DawLHKhFppUzi5lY2iAs5Fx9o+GCP5ghsz8FNlZhU9d/9domR7agjZELuXsvhQNpQpq1E5RXZBMNIkTCJ+IIDklLD4dPW+SS+XJZ7OgwkxsjkChF2dAoef5YdbfXUdmuphtX9hOaYWH6OgUw8k0smgh6CpCECRKPAEsWp6Mx0vGM8fiJcV88992ctW6ZThECzkRGtt9vPD9UjZOjKLkajmZOoIRsFN5YCNaYphljXVMZOx4MxHCsXm8ziUkYv1IikgmPoKYasVmd9M9dha/WoFsyaKpEoaRJB8XkDw2fMV5zIwVpUikrL0QzdQwpLf/G9i/zblM729Fav/1YerbndqdWwffthszf2qpHc6TA1WLIHBZ1XrI54nGJqlwepjT4VCkB6cmcTalYLGnEdwuBGsOxetADufACeIMPPe1cTBTCLIFU3KiyTpFgQKaXV7K3QbNTV5Gdg5TVCFR5XCxuKiKvCkzMJumb7iXdNxg/Ewax1Iftgo31ZeVsmhTPePbRkjsGSEQsKJ5ZEb3TZGdNchnrdjdEk/s+SlHB45AXmPnrjnq/CUErCII1bRvWEXL9StZf0cbNS1uJs90cujbZ1h5zQo0pQBdkUidiaBpEhbRghbKMbUvjkUuxSjVMN0gGQ7Gh0ZRjTSyDEZKoaezG5tUhMtwUGlV6e7upK21ktjZ0xDTcQbypLMy49EpxiZforqlFDMokp2MYY7HiA7EmD45hi3lxoiJONx25jszDL4wQPzIHK6Im2P39XLg/mPYHU6mdoeZ2D6FV9GZGxxCknXm8xoH548yF8nRFTqN0xRoWFLCv+1+BGdhM9//7klOzpwgGomR9hRQUecnNHiW1RUKiWScf/ybW3lfwwba1ZV0d+3EXpDkhf2TnBnpIpmKUuBwE/TmSCWiSAEHNn85kkcimgxT37gYQQBD51fpXXExsyeJU9LQNCtZPYeRlzFydrJTObKRHOJ5EWH+O2/3PfUFXht/imKH8yS5O2Q7mYyFigI7TdXLmZ2fJaDIWDwBerOTlIg5IolVSF4/8ZE8sx1zjDwzjWiWYZNnWGT3YA9WoosJEBVi0RRRHRp9jSQVFWFphu/fc4B3tNejzgeYiITIZlJMCz0YchVltT5iehabKCEVesnraax2E92IEJ7x4fZasbXlYMpk5NkB5hN5wpNJaoPLaG5MYk/VM22JEZ2NEs1G8FvLabuyHktbAJusMNKfZY1nDYN9eQioKCrYhULSixKo8TSGrJCfn6O4wE5+JIG9WEJXA0x1TlFRVUgslCXZmWD0uTmM+STZ9DyJvEDV4ioSuomez/HE9mH0tIfyBokp4kzkhtlavJ59Xz1CSW0JdquJze5hbnwcl+Kk4xe7EVIZklmd9ps34t1aDQMCwy8eoW1RLaePjlDk93B82yHyeRGv8qt77dv6T2JxOHBKXnQtwuqiWopLVQ6fdnPPdbdy8MgpZiem+Ny7bsdb7iWV15k8EkfxKjhaV/LMnlk2ze/DYSmiLGBg8zkJ1GUoO6WQNSpIWDQKlAI0VDz2Quy5OJrNSfJIFLFaQalyIRoGiL9K7YYq0HBtIYl4BpfHRJTdZEMRXH4n0bCO3ZQQ9POvLXMu0vtbmdr/p/KnKnY4T5K73WGjMiBzxd+vYumHVtF8wZXcsGEDKa2ApBamSDJR5xOcefoEkROT5MMhFEPETI2j5hXcwQA5OwiKkyO9vbxwphvRMJhP5gjPynzuzp188Zb3srSoiuXVBqFUHtNeSFaNkJjJkZMkKlv85BQbplPAtEukxtIoaT8uw4pgKhRWV+Fa7EbORti560VePLmdU5O9WIVL+FH/MPs7ejg4NEyhq4LVNXUU1EiY+Sz5+QQ20U3WruKsUdE0MDDIqSkki4jilzDUBIOveIj0RwgNhQmfiJBJRDEkyJsictJk8qlOMmODhMIZTMOCaqZoqlmOiMJnn/oRO3qeZ806LwcPddBQUoWQc9MT7UOfzxB0iqTEBFOTQxRXBZFc4Mg7ES0OGpcsgWIRwVCZ7p2k+ppGbBXldHX2MDE8TPtfXkuwYhFpn4cT0XlEJUg8naXF48Zhc3Fx+4W8cFJnKH4fNl1lUZOVtmWr+M6JTkb6FYa7R7FXBjgzLDPWtZOeqe8yn57E8Oh05bIM940zPFTM4fFBMkIcNWelf6aPkYkELrefnO7ESBn0PnEK7XQaNamRieYxMyaZqIGZV1C1HK4CDxqgG0lEl4WTLw1R2VoCIpiG8XaX+FvGuZTxb671Vrdkftd+b6Vsz+Ver+VZMOea80LuBjoVy2sYO5ogNysSnw6wdKNIeUELDsmOw9VIqC+PnI2iaGl0i0Y4OweyiE2M0N3dy/x8mJd7u5m0OljWWk0oDcuWlVEamOPymgz3Pv4IanM9puymtaaQxQEfhRY3/qIs39l1lnTERXR8Bn1aY3rvPD1PjZOfiSOZc0zs6yCXyzN3MslLZxI4ZYXlxZWsKqpjLHaSK0oCmE4BUUsRT4yyrWsPX/vEk8wdiyI73OQiGsJUAGeliGFoGGIeUdEQJA3DNMgJsPx2hZrrCijcUox7dRlmTCQzGUV2GWTnE7isNiTFSl41yYs26txl7HhpO9fcvIUiagnKWZ58ah/v2rQOh0OgxmUnZOr0hEY5vXcAMaRSVuohNpEnNpFmfHiCxKjGxJkZZvcMMPTLecb2jfHK987y3P2/pMTrxaJbyNk0pmKzjM6HcPpcWBUVq2IQlK18+F3tbDs7wcY6Fx+4fAvBNT2QLKZ9eYJPbymhqtRN64p1nH32GLNDkzRUCnz9F/dw1ZffS6i+ilPzArbAKKH+CLdeeilFtkLymQhup4IhCWjiNEFPKZmMSc0iF4lcAikjkJ5LkUxmEfM5BHsWIWMBLQ2ShJpRCE/NU1rrAFVhtn8OLXd+yv3X1yPfCL8vtf9PT9vngj/lxP5rzou2jLXITssdDbzytTEKJocRFZX4fDGFtgKmrAZOLUxOUnhhMMaW9rV0DxzBr6QpKCnHUJyMTfQyNDzKxW3FWOdGGZk2uPMdm3lq93GmwnEe0qcpFMqJd6R4ePchVtaVcMvDN3P2By/z8H27WBZo5d+/9zi3bNxI33PHSaaLKfKYCJYsullAJKvzxD/uJsMUneOzNDqrWVRWxtmZCbwZhUvu3Ir60xj3j5wlJwhscjez1u/h1I8O0f1gB3afFeuiOqbNGO7WYhTRjqmImDmd5PM6x/ccY8t/XIKEE11PY4vamdg7QUEixak9g5AVUNUoBb4qJvUIffMH8LqaWBts4Cfff4YPX1TJynd+CLnch+BwsvPz+7joija2vThCaUBHLHbzi/0HkdNZPE4n5YUNZNMqDmme6fEZBvpSmJILRdIQ5QB5NcGOqWH2Pn6GD3jdOESBTFbGsNqptjbyQvwQR8Z7sAw2cMmFFRQ1e7G1eLnzhl/y0fek6djlRhHAoexjf8dLrChoR3ZYEWQP4z+bIuwbo31pHdufOMhEv8l/PlhDOmyhbCxEpW8pObmTmfk0z8+EWFORRTRE8FfgyClM7xpA0W1MW2M0tZcQm01i9amkJ7L4i4IYaQ0FH4KWYM+/HEBMRtFT2ttd4ueU19KOKbt3/xu+834+fDj8vufL/Fq85/pw9c0W+nsrL3hLv8x0XshdwATZj+iPooajlLfUIjkN2uvsHB6NU2BpYkefSjQbZm/fcZLZF9G0RcyPzlHryTBvRogbEQ73y6xzOVjXWkFovg/RGWTLEjdLJstYv7KeyVODkDvArp4W2r/Wx8qPrOPqQTvTs0ep1pI8e+IlIvlxVniXMJKQGU6GWF3WwIHhw7jEQrYsvoaA8BRXXl7Gs6/o3PXTW5g7nCUuZ1h38yp+dPIlLrevpCpQTTKbo6G4HG9bIUqhiewz0V6U0a8CPZvHZgctZeDakKS6z092fhhyldjCGv07OrHIIinNSVmJk1hII1joJTMfZTzei929mgohwTs/cwn7XpigbaObmZTMx67/Zx76+j+yp2sXje13c9udQTp3zDE12otPs+KyQUpzsKPnNOUuCafpJ67GcSheJiKnKLSWomohljsDtHgUvK4Coke7sBge6ooV2v/XNaAY3By+jOhQEmk6gd0ZpHPvBJ6DGVYuqaTG6yNkZCiSQmgJO2sKN9C4KUhTtcHEcZl/f2kvDe4ROkN+1pVKXHz5RtJdCaK6HUdlCbaIydmRNGnVYCaX5um+U3zqoosY7phDlMNUVLYiXupCSahs/+oOVl/TylyBg/KyAizk6NkzRXWrn8SYQKETDLEIUzv/bsz8mtd7JfL19Nlfr+DPB6m/Hn5Txn+M6N/KlP5WCv68kDsm5MwU3sQUJVUFxCOTVJXV4g84WFd0G00rJ5CztTy3Z45cNE5aq2VCUxHNYWZTKmnZQsDup1gQKWraiCZM8+VtO7CYNi5c1c6LR2KIQoplW2q5e/0XiZ3uZXjXUVZeUUNlrQ9nvom4e4JZKYJNk1ns9TChx1CjOqVikBXeChRKqCqfpHj9Fj52/yM89PC/oztN9v68g3fcu5UHPrSLS6Q1TGkmY0N9XFhajhpQyTt1XJU+zEiW8dF9FP4giPdiN9SKpFJJPIqbRR9dTfTlUTqPDWFJDeC0FmCpDjKyv5d8LEtOFLHZFb7TtZ8Kl591zTU4LBkGk508efwE9Us2kplVed9mN2Z4CFEXGOk6QOknNrKuopBUn5vT20NE8tMcG+/HQwKvWYpgSeNCx1AzlFoVKl12crqL+8Z6CFgyvLNwNQdGTnBB61pqr2lHDxoYqoG1WiHxRJJEbgqXkUUghdVt4pEVth3t4rED+7lxdTm3f/mzGPM5cCqcfPhR6pct5ZPpG8lEh7j9XVaq37OS7ECYB//3w9xy+6082nGModg+Pt12EzPTcabmd+J2FfLY0T7aiitIppy0bpjHtBQxl4jj9biJnZ4gZbdhK4oh5iF0fAS104Hb50WSFVKpHLp2fif31yr4N3KA+pvC/n2iP5/E/kaeCvl6Rf/n0Hb5Q5wXv8S0tHqZ+c3V/0TV0hJCB+cwXE6QDWbmDU4PHOVda0qRqjfz0vP7mEiOMxIfx6JDtdvLcC6JHw9LqirIpC2cSE7T5CqgpbGM2oYqZkZOkY5naW2/mIolIp//+y+R0H2cjM/yy91fZ+75Pfzr116k2rmCW2+ooG6DlcSASceBME5XmomZLFdsqMK61MVVd/wXRVIxf/u+m7EW2/jh/a+wubGMiz+2gYc+8wS9qR7OxGe4oKCZq1vW4motRFQEtEwWLSwSn5/GbvVRvMHLXELDkA3kiEF0OIWYyiIIOpZiO8GiEjQjQ9/REfRwnJ/PnaTU0Yg/EeejH3of23Zu553fW88PPz7K6tU59GgFew4c464vbuXZ7+5hcmySa25Yj5HN8/ijfRwKnaa8oIK733U1331iG8tL63mpt4cSv4N3rKwhUORhaipGVrAyNNlHky3Ao6e3UVXt5p0XX0Hx8mJymo+JyQR6yMY3fr6d6cQJyh12PnbVJfRGK/nZzp/z/7D33mFyneXh9n1m5kzvuzO7M9t70a5677Ikd8sNGxfATsAm7YshkABJCIRAIISYACaAfwQwxTa2ce+WrS6rraTdlbS9zdaZ2el95pw53x/G1+cfcZGLLCVf7uvaa/Y6df545r6e87zP+56bNtdy5SevQ9FrkPUmlEwcZRLyGi0J/xyP/Of9nAkP8vmVt6Oqa+TkcBqTosJZmKLbP8Ylm7dwoOsppIiWEtGCq1TP0yP91DkbmAjNoeRD3Lj+SmzLqgifDhLsH8PhtJEnR7m7AlOFgbkzKSbHzpBEptRowuOt5Kpnb2cwMXxeWmZs6lJltXnH2x7zxq6ZN5P8hdAy+WENqF6oy/1+0HxQ2fsll8/T3Z2/cN/EVIgkMGVc7H2ynxKNhBKzMpeCQHoandqKWCti9ZhYt6KcY/0OnFo7JmGWZTVr0HtNFEMZ9k9N4dHHKCmzs769lumElaGxIvOjAmuWNPPM6CBz3Ta++q2/5dXHnkezM4BR0VPe4aJB72BRi0D1siqKczJ7jsfon3gRreMSdnx+DYWKUjL+NN/7zPXYrIuwVrjxPfMUNyxpQdbY+NE/vYxYTJOWjHh1OlqMHoKJEKkeDalsBG0R8vk0Jp2FYpWKjCRiq9Bj1unwzUSwOdOozHZCkyE0aRWJ+Qhas52G9eUcezyNOpUnUDjMssZt7H71cS77+Q0gWVhUOUnbpk386LuP88uRB6j/uYlXzviwaxTQOYlMxbj5M+u4ZMLAGd8ZjHYNV11Vzr7nu8kWurEbWtDV3Ug+d4qcbOHl7gH6Qz6injm+8vlvM7BvFv8ZP93751Dl+ylvrsCd8XL3n25kYGAJj+16hideHuWKTTX85YYV1Gxez9En45zumWc+/gotDZvQyCpWLC3H09KIUaqg01jBdw/tofz0GQbmT2IUbLj0NhRTGU++uBO9UMOh2CPcvvpOSot5trvbOBaJkMnFseisHOmfoC0pUcwXEcU8yUQMrcXCzOwMubE88XQBi1FPKlFgz9w86fFeMoX8+Q7xs+ZCEPn54lyJXdztOavjCptnP5D7bTv15i2uOzv+vwH0D6M8c0Fk7k2mGuVbLX9HdakZdFZ2D4wzlZxGBuJyiI+01NJx8VocFSs59ttn0aky9PjTHJ3tp6h1s642R+9ghh2Ll1GCnuHYIX7Z08tP/+zPGTgiMx7zkcOKkjhDY+0iFMFENj/PZE7NxZvqqLmolUBPgty8j3S6yMPPHiOc8XHp8pv58d5/xWKo49LKxSzZfgUVngnCA2qS2TlePRFhUV09Y/6n2T+YZbHNSXfMhwYLXq2TulIb/myUcmMVbm2Guqoy8hoHlvUW9E4nmqLE4ONnUKMhOh9DJSkUpQwWg4PKTY2E8gmmd43RPdNPg9GIqaaJi+/eQUEbQB03M/NElInxeb7/6A/QGA1c23gtfVE//eGXWepdzaGRw3xyzdWoTSL3v/oIK9rquPKiK4n1g6zK8YUH78Op7ueev/8Vg/uH+PnhF7ACbZ5G/MkUK1qq+d2xPdRZvaDyctV1a5g91MVLE71sal/PhosXEiNG2WoDj/7Ni5CH7rCfkcgIf7bhUkZmp9i8eRGquJpnDwxxOjBEPOPHbhaRUyKNFY10zw1gN1QiF7XYDDJyosDaumauuraT7977FJ+4aCkHj43ji08Sy5Zhc7oIJieQMGJWZDrqbWTjZQzHR6kzlXMseBi9Vk+ZXkdIsuGPx3h8/h5mcv7/Fpn72ZC+dtXb7jc+dvhdXe9sONeZ+7kQ+9lK/Q95L5J/K6G/FW8UPbz3TP7tMvcLQu61xmrldtef0WK341dKCUsj6FRpbGoPkdhJ/vRPrse8eRGHvvkqy5eV82KfjhcO/Jw7L76C7z76OJc1VHHl9u2INgPD+/qYyxTwheIcCo6w1FbKS2NHKTVW0mivYD49QyCrUGbREpLU3LJgDVaXCvcCC5+951EuXbARTSKHVpZYu6iOjDaDkkvyo1e6GM9nuPfrf4Ipp0VTFefbf/UDPvPtv8eqsZBNjLLz7h76Z06T0ImY1HoMiprxRBC5qGZxzUpOz07wsWVL0VmLJCOQkLI4BROJTJh8voDJakSn0pOPzWNv7ODxQztJ5IKsLG1l+baLUNVJuJcZ0CQtHL7vKJJiI+6b4ODkQfRWIzsW3oqiCTA+eYRs2oEJhcFQhLVL13B6YoCLVnsITEu01rXxs5d60MknGI45iCXn+dzH7iQzP8zRY12kUjCUDLLc5abcWkMuGeHRmePUGSswqgykNBlySpYlpQ143NVc9K3NdD/STWZqmoHBGGdGT3HL1Vvp+OgqsgE9L/74GZ4/dYrmcoFwJMhVS1bQumU1U74gYycn0OcK7JrysaDcwU7fcRwso8oi02C3UVfiRXAYmR4bpmjUMBwUseuCDM4HySsS7U4rIzkLFklmcbOd3d0nkEU1C8xecoqKOXmGX049yGDMd0HK/d2I/Z2k/od80JI/V4K/kMT+Rs5G8u9W6m/kDwX/Rs5G9h+vWve2L8i+IOTeYKlTVqvXoNWaKTN7MWhEVjbCisWXoFTHMa9dzMgvVSjJU7Rf1Eghr+X4M/upbbJilgUSIRue6yv5yVdephCZQKW1MxQ4jpxRs7W2nWB8gpLKFl7uG6YMHQajgsOmZj5npT86QYt5IX/zD/V85+4X+Mgll7LzmVMYSp1kC0Xu+MoWXr13D+WtDdhXJSnkCrg7F3H9xi+xpf02Fi/JoI2pWHnXQuZ3zfCP330MWyFGucZElcPJ8UQUJVkk71ChD+foqGqn1OEiGVdRoo+A4GQwMUYgI6HJvTYJZz4fprqyisl4EFMizbal2/Guc+PZLpI75uLPvvNjMoFjfO+Oz2JztSGUxZDQY7W76dpznNFTSTas8pAUyohODnNoKIxK0fDRi2Rc29dz5J49jE878ee6OeYfRINAqb4aszrLcm81FaUejp5+FatJTxYrgUSW2YiPBnMzY4Ve5tISbt0i0oUo9VY929qrWPxPVzB5Ms3Uy9PsOXqIP//7q7AsreDrVz3MlPQya1ylmEw1uPRa7M5q3GvaUdmTlIpuXvze41gcVio6XERH5nno4DRhzTjbXG1ojWEOz2e4qKaJRZ2tRFPDhIZNjIUi5PMxXGUikfwsAX+GLU3LKEoQzqmxlSoI+TTFoos7j9/Fydm+C07u51Lsr3OhC/5CFfvrvJ3g34/YX+ftBH82XPByr7dWKdcat2Az1OO1y2xYuxKVtoFUZJDqyxeQOOrkvmeeo8Qxw62Xt9O1X8OWW5rJmGWmdkVR64u88PhBuuI+jBorlzXVEvTHqPK0sXxjA73HDpL0q9CoC+RyEhFJTSgXxGTWYSj18uzxET6xaTGX7FhB/8l5KhdYUQo65gcOYKltRPH7cN26jMQo7HzgFLWZAA8N/BKXuIxyWweXrS7jwQP9CKYFNEh7OBFIsEkSyGtL6FHyFPImjk6f4fNX3sz3X36OJms5avx4Na0MZ+cZlIbRSHpMWTN1Li3JVBZJX8SlynHFpu3Ur28mELKw97kXcEla9gQO0ObZyC13udn5syB19S7GAkaeOfYSvvg+PrfuJlqX1zJ6cgiruwRvh50jz+8i67yV48/vZHVTJacmB9m6pAljzUJy4VHkpIzF6yIWSGK0SISHZBRNElkxkUwmcNttqDXTzEfUHPH3onJOsmXptZTam3nu6T3MJ5LIpjkUbScLjcOs+tRfMvHEQU4N+eiNTHBpawdtKxuJT0aY6s/QfHEnjgU6bBkz+3/xMnULazA31jF2yMeZvjnqlpXT05NnRbWD+vUhTLUr+PpXfsNlLW2gyuILhvGUmlmyoQGz1kPXoTke3vsCg/FePrVFYtXarxM4NER5lciKn9zGhkC3CAAAIABJREFUVGbigpD7e1ly4L2K/XUu1DLNhS7213krwX8Qcof3J/i3k/sFMaBq1hu49fqbaesE7eYN5I7GmZ4Psq/Xy+DXf8s17dvIpwO8Gp3hWtnE+j/ezlRQ4chPD7BlnZO//fErqLUGSpQi+UIEJVaGXimlfmMZSn0BZcJJXbWNqjodwdMx8imBvgmF8flZ1AyztCrDTECm//A+tIUWkroCzz52hN5wlH9e7GJqTst3b/o3Ll+xGCFdy2/O9JNJrefqRauwt1Tg2Khm9dEoFdIEM/qtNHh7mVBlsUgVlEcjtDVYUUthjvSfRk2cRF6k3uzmSLgHRVVEm5UokKAgZlju2cLJoI8dbWsYUEU46U9i7J8kkchy1RUXMRnJ8ql4GxUfKWf/gQTjgTkixPjO7u/ztS03EzxTisaY4x9+FuS2xQv47f4T6A7s4S8/9U3+8dt3k2SG4bkkrU0rqdrWQGQ2jai3Y3AX0Rm1VGndRLrToAsgq1ToLXmaLm5C7zaQHnCR6x3h0vqtNG1dT3R4mKODIu1rlrP/lQNoaeCaK2tpuu1qZp7wY4hpqJZNYKsmF8+Qn05hdtQhF3uQ56LE0wamglE85XUU1RqiQwFy6VmWd7QwNDpAOtbP8ISD0fEMC6+q4dTsK0jxLv6fW/6OusZhNA2lqM0eDj11HLPKxA1rm0gmk3hMC3ngvlc5FOjh85s2UOY0nLfYVorF97WGzPsV+7mi8bOH3rPg/7tI/Y3X/kPBf1BiP5dcEHLXOg10fHobsZFepN0C/twUf/Vv3+S2qhoi4QZ6Dj/E1k13sGdkHG2LlVy1nvlnQjR57YT0Li5Z2MrR0SnsKiudZRXoVTpc5U5iIxmGT/ixqMG5pZpCtYCj3EHPA8M0tyzCUWngkUNHCQlp1huH6epvZ8V1fj752d/ytT+/gdsubWbo4SkmTnZxddV6pkdOMJ05zJKaag7N5jG5iqjiMr0PT/BiWmKdx80Lk73c/ulV1DQa0VjrKfR3M/VyhK1LljMxkaCkupnJZIJFravp33Mv9eWVuDSNlFpsOFwmAtkMtcVaNF41P3v0BW5uWUlJaTOPPPtjVk9KjARO4XUtJvsrgcD4KKf9QyzLuVig0bP3+BnMuiJTBwRmojsRDX/NZctLsGtuIZEO0VSp0GBdjUmoRTRm6XnsNFWNLlQlRUw2F1qbGpVbj02nY7p3FM9iBxXXtgIm9LYiudk0aq2BTDzP/HgvE6eS7Dr6M9q8C6i3uSl16Gm+fR0jv/Vz4nfdnAiE0atDdJRUIeUURk/PYbdLrFixkJ5+H4svyiDYMwR8eoQJUAsS1dVt5BQfyb4R0kqIqYQfnd5BrnuIL6z4BmpLP77EGYTULM8+P4RJeJiP3fAnGC0p/COVlBsNvNo7zXCoj8n0CR45PEoqeWEuP/BOfFBiT1+76pxk72+U9B+K/u0EPvSLZW+5r+n2rnf9Pc6l2P87c0HIfWY8xM8+/e/ceMNSXFs0dP3UhCW7hKIEtRXgKdroaEiRis2SO+Ng3mfCXOzhpZMFfMd3U2/y8icXX0QqWSSbi6I2uai6sQxtqZ4F7nYEJc7kI340fh0pXxKHU0MiHKS8pI6ORgeHJno4MDzL17c3cPKRIVaa27BmnJz8kY9oWmDvdI52+zy3/OTTCFoVJ/9zJyOzcbr6u2iol3jmZB9qTYJnx0XEdITIU83YV5lJKYNE+3zYPdU80z3Nq1Oz/O2aTmzxNDZDkDt3XE3jLeu4/7k5hvvmueOmJp667yhWk4PfvDjEMtNifIFZnnr4KSZj0HPsFbaUNTEvhVD6fQzPTFCIB2lc1ITFuga3zsuDp49xSXULX/jh33Lg354mLbp5tvtJElo9bZVNVFd40NY70aq1lKvizB5NIEcUDFY9OmcZeo2Gkd4AXq8bU4sbQWVEkGXiMzAzBXJSRpLjzB6KImUtmNQL6J0ZZHWpnTK9mx/8yUFWm2aQ1RPU2VV87LZbUbsKjO+O4x8boECBvGMvFZvWo17swajJULtcxoOeU78cQeuQKKtvw1pSjWnPIK9MPcJSm4Oiyk5IPM6P97zCZe5qrtu6me1Ns+SkStyWNKcGY5w4tI/Opmo8FQZWSE2YTRYane1oC73nO8TfNRdqxv5WnE02/nZSf7Nj3ovoPww+6Kx926nE+669vxkXhNxNoh5v5WK+/etRjL96iWvWXc2/3NpCPNnKH91Wg+IqkohKdJapiB6TKFkvMmuuRFJ2oslEGMxnWB6sRzTkSEt21ny8lozJgsqgAAXyOSOVl9WQnI9DvEhaSqGKQWRigPmZ01gKAdZUXcJUCKazXu64DRy19QjbQF/eiPBNiaWb2hm4r4/hk36Ojg6iysMTszJrw2NcXbsYkyGPXqPn22eex94u07drhkdHTuJQObhsfY4tNUYqTEVS2SzHg5MULPV4O6tQ1Vq5fsVhfLoq9v6mh5OjCWJSF8VUipxiwEQOOaugUmnZUO9h59AId3/iNkq3l/PS5d+lrXwD7spy3HUdBKZmuOv2FmxCO4/e+wpl+kkuunoFveM9OIsaxqe6mSjksUsK7lYPBpuZeFGiohrOvDSOIRrHpFFh0SqILSIxvxqDQyYei0A6SamYJltvRSzYmZqZQZ2L02Atw2J1kswWyBVTTA3swdp+KTt9I9Tp48zpHJTXF6lxleA8YGf3y3tZfdNNaI7mcC9ykJWMaHVZikmB+fg8p5+dJpaXKNEX8KcLWFVNDAZV+AMvcvXFl7PJt48br1lJfE5LzaJWRMFCz7FZqutsNFy/lVQyDdksi9eX43/8AXRV/cSHzt+4kiBq0LjKkWbnztt3eLdMfvm/zmSt+qf3N4P1bMT+due8lej/N2t/ay4IuUdzMXpPPEGdpZMaez3P9EV4tudlPt7so/+rTlo6l7LndJwWeYytVxkwL6riZ3f3cDI0TV62cFVFI2P+SSzWcrb/QytpkwAhEW2ZgKCSUYsSeUlB0WowtJsQj0NOHyCWFokkFQSpkgVN5bw0/gJB+Xp0uiyPPvlrbvz0Vez+5C46b+zkBz/cQ2eDjiMzwyhyiKxSj7W4k5xmM/v8B3GrygllsywTFzDTX+TA9DhawwmsWiPuWpnjpztYt7GDElcK84CIGNfgUOI8d+evycpurMIgPz3zIu12L9Z8mGX1K3hl/DAL7C2UWXQ4rDZ2jU7g1oBNU8Y/3/wbyqx2alsW8vWHf02dsYabt60iMevB3Gjmou1V9L5o5/vfe5hEPkSV1cXmppUUikbUKYlUZB5NtJSqdifRwCgX/fki5BoTBVlBbxUppgRUiobwYBCD0Uh6KotzSS2ZQh45o1DjtREYiLBYnuF7+/u4o62UbMHBJ9q2E8lbMavSXLamjZISBXQaUtEg8ahE28JOtFoT3m1q5GIBvaglXxAQbSKNK1wsqqlFSEn07enBd3yKjBRmRUMj6qyDjis7Ge3fRv+pIdqbr2J2cAglO43TVmDu9Biiu47hCQmTbpL1ujxXlm3B49Xx5dwPz3eIo/GUA5yV5D/orP1sSjJvJvQ32/9eJP9exP5m1/hDwZ8Psf93qLW/zjvKXRCEKuCXQBmgAPcqivI9QRCcwG+BWmAcuFFRlIggCALwPeByIA3crijK8be7h1pQo9WvYFaOE5/3c8VSJ1WuEi7ZUo5r/c1cf+3fcLNLJKnfwUToJOH/OIk/PIpNCLPQoafVoENlsmO1OMjMlRNNjVPaoSKTBpNNhyCoEAQFu1PHU/ecwBbNYQHmI1mMFhvt5iWYTCZUhRybak7gXHs9VfuiHPunAzwz9wqP/WSaQr6b6rlaqtGwsf1ajoQPkUytZbgQYbOnE3U2wMHRY4gmN1rVDPXlAr84GuKO6z/GI89PMON7kVcPOBjL+bhpiZlrbvoE+w+PMxrqI65J45JjLDTVceOWtdRfu4DQqSBLYrUM7ClQYtWjIsbFCzo5OjbI0NFRVjtKOT5T5J6Xv0mDxYDbUsFwTx/fGHmYPT+4i6mjZZzJRMilJBaaaymxlRGWilS1lpKLhMn1ZRhMTdG0Q0fllatIyRLqrAqVVUtWyaESC0z3zFNe40A1r0aoNpCXC6RTObQ5I9H5Av65NA5LBRtKh9CJZYhaie7ZEXQ6G1bBxr5jcQ52/4Zydzuiyo9TL+J0lZOOpDGbBCRJQioU0GhNvPAfJ2hxerGWAjWlGMZKsY8l6QoUMGlrWLSqEv/oBC1LvLy010EotYdKWx2+WRWFyXEMxjL2Hukiq5nl1vZGHn3iEZ4Ipfl4QIvNYDxvsf2HaDwXRhb/TjJ/p/POVvIfhNjPNx/UzNW34lyUZODsMncJ+JyiKMcFQbAAXYIgvATcDrysKMq3BEH4IvBF4AvAZUDT7/9WAT/6/edbYha1DKaDTCT7aFZH8HXXcfGl2wnu82N1zmBIm2jwuPna/qdpdW6jfGkNcv453DqBpU1ODDo1uqoKTOo8Ac04FpcZ2aJFKBYpyhKiVkuhIJCT03QsbaZESTJ7KohLNU1gZpQdzTYEt5v9mc1c4rTx4zsfZjwVo6JEi0HU0GGC9u13YivNkBrL0fiZTpo0jUSOhLj/P55mXUsn02Mp1jaPsnLDDnIqDV/+zwf4wQ13UXfHJsQv3UdTWZQb1jWTjFVhaCtBaIujeilJPu0mrwowljWwo60VY10JRXsaVaeJEpox9QewiGEEvYdsIcnaunr29xwnHS9jUeks80IrTm2WQCpLqU7i7k1XEomtQRk5iBw/xXRhjgVGM0Uhh1NvQectYF7WilDMYp0NUHbzasgk0MgminKRbLqALqtw+Lc+KhfqUecNaEtVRE+GCIzlCE2NoZVyGMwaFFHL3Yfuoc5g4vYvX8z4fpkTZ7ppMpcim6a46ysfx9+XRhZryAfsxOcNaMR5wk+OMWwSWXB5E6LWyDOfHaJCexpH22JGJ00UigGEtJXuYB+VWgcEBOo3dhDr34t5+3JyB79MiXYlCSnMbOYJOpzXMJ2Is73Gw3PjgzzXP0pFySK+vaEDRYiiGnjlvMX2u+VcZ+3vVervhXMp9v+/lGNemDn5lvsu8S5+23PfUe6KoswCs7//PyEIQh9QAVwNbP79YfcBu3ntB3A18EvltQb6Q4Ig2AVB8Pz+Om+KyW7h1kWbOTOW5vislX3R00w8PsaazsX07e2j3aPnyQkJm0qiod7H/S+oQAWzaTuPnAxx1x3bUetkwlNpqqMV6BbJiMUiiq5ILqlDUcXRFDWku2XE6TyzVhXFphZqF9Ty/U9so+ehOJZ4gisMp+nprqOl2s2BrqNsqNnCBk872pUOzIv1lHhqSPcH8PfMo3WqMNfYaFK10ntimEQmQOlNnyITLpAfTrO6fIKeMzGqfVPctHUh2UwdsmQiI+cp3ejkhR+FeeVIN1r0VNs1dNR50Jps6K0mJLUZi1vN3IlZnDUCqpCedDrLnH8Wq06HV2Wg6MhxOlKgRGNAp+5kPnmCRWsX4m32MrxvAn84Dhknt3R4cLqdWKtLUDQiGqceg1GLhIJYV0omFUUnaJBURQqZFKJRjaTRULVWj1G0IVYqjO8NkcvOYVtmp2J5K8N7DzI3I9I918/asuWsqqtCzFYxOXYEnTlJszNEVf0t/J8f+Ni2rQUpocFencLSYWD0iIqa1Rp8j/no+7cIwWCUMoeWotBAKgnkY/Sfmmb3WBcGg0y5kmfDlaWMHztJxaoSbDaZW7ffReyMn/HQLLFwJeNKikgugWhNsMS9kUOzexiP7mbrkkU8vn+GWDxz3mL7zfiwsvf/qWL/n8SbZe1vJ/Q/PG7lJem33P+u3sQkCEItsAQ4DJS9IajneO3RFl77cUy+4bSp3297S4oySIqEXeOh2a3FoqvFaYPB4GlKazZxZl6NrjDMnaudjJyxMjziI6Nk8Rh1XL/kcsikKETy5KJqQgdGyQxJpOIychLkXIpiBHIDKg7/5wDT4zP4jo9y/wNP8tCvnmH+lIjVqhDWW3CUrkJWxzg27eNLF9/A+utWYK9z0LC6hRKvm5wQRNDrKEZzmAUNZ16axu4EvTNPaaWTR77177y8b4qnDj1CkQUcHu0i0h3jiZfHSc9VE5+aIzwXIX3cwCsH7mcg9CrJ9ATVohWD1kEkOsfgwTBaOUd2Ik0+pWP4ZICZRIFU2I+iaLAYVHjLSqgoV1FmtJLKZRmJdlHQqNEXFTKygMM0wnD4NIKYRlHqMFaUE0kWkU1aLKVW1EWZmD9GqiAhIKAURUSNGp1Bj0qlQskXmfWnsHtLmOvO46jV4FnZhnN9NcZ1DhZ+8hJqKqpxGk3MJw1MzyXZ+Yu9bP6rFXxsxx0UzV4SEz5yRT2x/jDFbBhjVSvJqQJly+1kzeWs+8wG4pKMs7QES7mIvlxLV/ckeZ1Ilz9Mha2MZDpLZ0UDOZ2KyqsbGd3rJJ5TMdk1zb6+MULJeTR6LaOBoywsdeJQG9AWI2xxlnFb5yYiCYjEZlGp3rkV8lzF9oXC/4r9wuP9iP1sOOsBVUEQzMDvgM8oihJ/rfz4GoqiKIIgvKuWBEEQ7gTuBCg3lpH3T9FZY2dmqIKiKoiQc7NsVR5Hp4a+6C5urPGy2nExfeMRHJYsqoRISlWHQJqpkyoKjjDeZid6p4XYM5Ow2IquzoBer3Di29243EbcBpiem2VWUlBLYS7qaOFM1wDVDQ7CvTMcH+gnkIizrqqagtNBvkLBjJapMz4868sQ5RLy2hRqlYax55K0NNeQuciM2Wwl9PQUf6y+ksC0n1mjkYZGB1ue+jT6wS7GHnyWucnnafFewrq1pQSPddGoq6LF28iWRY1kXCKu5iqqxVoIRnn+rw7iqTOw6HMbqN3qIeUPMvhDmY5WC7oSM6HBJJlwntnscYYSfvSCRJ3ZhaLVkZHjhAJmTKYWcoUIM1k/2ZM69EUFbUFkcmYW0SJTKGoob6lHLQHaPIWcBiknoS4KqFQCizZVk4hE0FoFUuE0BpsNRa0ik9KiFgrIehG9WsfC5gye2izpkIaiYuTxxw5xRZsJq7kes3icE1NwbUc144eHESQ9ogaESpnUuIC+KGMwgX1VM3qPFp3Xxs4H9uLSZ3BaPDR5tOTUFaSSIvPHk6QT/Rz7doqJqThDqXEWGD2s87SRMqXQqdLIRRUNDg01qzZiNbsY7g2xrXYpDwTffnLquYxtvdr8bk79wDgXfe1nw7kS+xsHU89nSWZnh+WcLTvwQYodzlLugiCIvBb8v1EU5dHfb/a//kgqCIIHCPx++zRQ9YbTK3+/7f9CUZR7gXsBWqyNSkaZYz7bSlKeQpcvIS6Yqb96FUV7jEptKVdeew2B4yLz/gF2tLXwrT0aNMU+HuyfoU3dzievbKLrUARNRiCtUeE1aRADGmZ6h8hG/UykXBiNBfbMhDAXJ6iySDzTbeGrv/w43d/bx+DIPOlihHKHmWq7HTJJNKZ68o4EZkmh6EsyO5NBLmoYfnoAr6MM/S2lCLpyBKGAe52D2Z5JhhITSCoT9x2dZ/KLp/Hmsvzp9r/hZ4N9vNR3hgqNHp8ty8GUwEp3HNfFS4hH55BkNYJRhWI0gU1N65eWUzRIFJQCWocBsd6BoNIRmMwxMxJHVJJUlZiZlxchi2ncGi0Z2cKZA3O0LW5CP32KyjIdXeNnGFO06LUiK0WZMruD9Fyc5GyMpo82Ep8X0JolRJ0KjahCAbQqLaHRJMaMivGuNA01NmSvgF6lIyRHOPN/fARmZqny1IPsp4QmTk+MsHCqyKG5e1CSa1hQ08pwcIaAFGByRsPAYAYpLmMwQmwsw/TQFI1NOoLBJNUlC5DlHLm8Fikrk1MyFDSjGD3XIyR20bh1Dft/tJvVX/0IR//6MUosGS6rbaDcWQkaHYKSxdhswVnhJDA4h/PKDoxaDa5sAV1ai1FUn7fYtmndH3of5vksx/xP41wMpp6rAdQ/5Gy6ZQTgP4E+RVHufsOuJ4HbgG/9/vOJN2z/C0EQHuS1wabYO9UkNVo1nR21dJ8RkdI55lVhOu0+qLkeJSXy3cuu5SeP6FhXlcC7wILxsjKWnChHpYmycUEtksvLo49O0eQoMj0hYY4UGUZBb8zSu28Cj0aDLxvl2OQgDY4qlmlX0qtV+ItbNnPvP+5iscPOifQJdIqJ7RVtBNMZdL40cm8KjUkma1FIKgXqV5SRlWXK6pZStEQ58/M53FstqFUik3uCCEWJxaZmfMlJlhrSjO5/jpoljezsi4IqyLaFDh6YOol3eo7VpTZqta0MdA1ht+oQiBH15VFCcTZ9bCmi2ohSVKFBRjQIZNNzmNtbKaYkXBUeRg8NkvHLZFLjZKQ8wyobdn+MK7Z2MuSLYFFZMcgG1jZasKk12MxektE46kIeQdLjqjRz8Mt7WfOFTcxHUrgryygoBQSpSGwijiwIFDCjDRyh56iW8rUtBKNFIv1n0GcT7JvsRpw0cOfFi/BNH2TDFRU8cd9JLq35DAW7hkXrWzEdVPDHIzx/LE7/5BHKDEbaDF6COYlsPkFy0IJWsPDIPx/GaBM5evoUDiFPuTXNLbft4C/ufoEv3NXC7EASq13LsZ/M0jMVoqZcT6W2GVVpmkLORDGrwbuykqhkoPfpbpQBDzrBSNe+CeyqGGr1m4f5hxHbb8bb1duNjx3+QAdV/7cc88HzQWXv55qzydzXAR8HegVBeP254W95LfAfEgThk8AEcOPv9z3La61iw7zWLvZH73SDfFbixa5Jjk1OYNUWqdPb8Nphz6/nePb++/nnL+9ghbuXbzwwyvLSblz1neyN7GWHZxk5jYNHnnyZ6nIbXl05KinPiD/IwrJOJmcm2e/34zbBXHCUzTVV/GpsjFRFLZ+95zoO/nAYMSPwgu8YFqMRXVYhZ5apV9sxes1MdR0nLjpZ98UW0qRQaYyIqjymCjO5OZHWOzSM/GIWvS2POSOQFIxI5gD6QhnB1BRWg0yddQFfP/zv/PDmLYjurUTvf4p98Sl0+Sy1DUYMoQSzE3GMege5YoxUuEBLvYNMJoPepiU5l8WYN1JRWUnOqMVmKSEaV1O9oYGY0sSBwBytjloWumx858D9GNXX4NVDLCvy5OxhSlCx3dvKqYk4brNAhWiiIFgJhVJodFlGHu/C0rIUWQoSUzSUlFlJzqRRFCO56Wn0nkqsqTTB/ceQU1kk1JwMZOnUVjGen8O7sIYnTkTY80gvHW0NvNzzLKvkVvTmChYsbOHp3z2OJA0h6ozMpoawmKqI+ydQKyrEkgSCWMmeoV/SYr0KjTqFVhzntq9fzFzQy6oagRce07OpPY/eXcpo/7MMpAYpj9Zg6lCTCGYJxqaYnvKT94zgXdHJ2utXUVJqIRHzc81XL0Ma9KOc+Np5i+0Pk/NVijnXXIgzVV/Pvt/vOu6v80GXZODsumX2A29VtNz6JscrwJ+/my+hFjXkc2qKQgatRqbNXsqST2znk/8+gUeT4pHvvIrO0MJ17TG2/9EiEvIiDpQ5GIqq2GhPY1aFmJ4X2FLVxH3H9pKX1fTui5KVJTKZMRLqMj696gpMlQaO5vZT19LKY59/CYtdR0dZgQqrlbrF7RiFHKFACRUNRgppiZlIgdVfaienUaMXdchKDtGgIhfLopJU5AoymXAYo2hGq9FTUuFAF9ExHD+DxeBkgWMx0dgAX9+4gud6klSbZzEZhviry5azYPmVHN89gjqfwSzqKGkpYKxqQ+9QM/X4OK4byxAGcqhlC7sff4XOmmrS2SDZnIhod2I2O4kEdlHvyhMM+/C0X8qVlYtx2uJYDQ1MZg9iVRux6I14GjwsbPcSC+aJBcPUrKnEuMBGXtIQP5YiNXoaOaGif1cIs0WiqsmNYHejqTKgSughr0GrQLQ4y+Bokpn0KAZR5Ja1dcyPZdDnCiysWk5xOketbRE2rYn4dAKTsYbrVmxkeGKCjJRgZC6LsQiiWcCig5bqOgy6GvrnW9ALYS5fVcvir32a8UMK997zKte1L+Pk5F7qV97FV77xBM3mHJu9K8gKEvMzE2gdFtovbsAr1eBe6sZk1pDMSmQ1CorOTlyVQlIUFN68LPNhxPYfcjZdMu8le38zsf9vOeb9cTYlmbOV/IdVinkjF8QM1aIsI6krKaqPoaOSEmsVI4eN3LXMya9fKqe6Zpq2S1bxr/fMEP9mhLVrX+AvL/0yGd0IqWENqxs6icaL7B49QkyKYjc7UBckFnoMWPSr6WysJxAvUHNpPVfpxjnVk6BOitFafQVZV5q9kwnmlTJKpwMwGSXq8+O0VoA6S2wugb2sFFk0oCKOlAO5IJMpZlFTQCNqUGSFUCxEaj6JKOjY1r6YwWCO4fEjrFpzLb/bGWYksZflGxZTveHveezBX/PzB++h0dFAh9NFzVILisaAwSWTKzFSskxP4OdRiuYUDVutbN6wnOfu28vyixahGFUU1SLP/66H2o4GHCEjmek4/vQkf3rvzbzywwnqltkxHLQSTvhZ4CqnZsMS5gJBJL2J8jVG8gYrar0GvVGPo1kktGcYk72OtlY9kgTT/UHKW2yoNCJGmwk5HSI1ZsAXzeAuszA4o2dLWyd6Yz0zviwJIc2ydXWEhnvRWTy4SvX4Tk2QKMzy3LifbH6CRa42rli9lnAmR4PDh6e2gV8/+TJeWxSz0YW7QqTx4m389NMTjM36cep28pveUozKOh66/0EaXe3YtDlGE+M4RT1YmynWOMkVRLydVejLFKJzKaLhKJW1ZSgUIG4gnY+gUp+X1X7/C++m/fF1Wb+d5P+nZuoXAu+21n4+5P1OXBByj+dyTKcPUW8sxaNR2B0Modv7NFcub+dz12zFWKPnsYN6nKphNnaq6Z1u5aGuf+XmjhspbdZx6drNBF+dJ6QI1BqrWLWwgrGggb2js9x4UQe7uo+i11fSmZbofiJK0OxnslhHVWKE4e4xNCmJXeF+FjsaWFTbSDquRWXNU1pahl7lTSalAAAgAElEQVRrQtJmUUkFVKIOpQiiugBGG/ND0zTdVA5pI2qfCWUoTiIYo38yyuC8j6JkIJVLM5v18ZkV69FX2PjNfd8hkUqjVpnYuFqgob6UcFQkE5QZ2RPCVq3Q91IvNiVP2yc2kLQZUQsSS1fW0vXkIdSiBcFiwRcboH82Q6engrZNTVRf1440LaBRJpGNpXRHT/HRi1ajyVYy0DWEyWtCYxTJ5nKIlgSZlIVMGFylJiRFg22tlaKsJvK8hC6nYuywn/I6Gyp9nly6iCIlmUml0IlF6ox2KgwGdI16Hr33AK3eFAZLCQMBAb0avBt0nHyoSEZS+PTaBlCXkdXU0FDhYufeJxnM+nng6ASH/K+ysWwLjWaZm2+5jgOPPc7uU4dwiiJ/fNVNhDMOhsdHuHbTZaSSp+gdcSNnTrN92yJ8YT3pE2G0C2SsGhN50YzZY8JU5SLtm8LiNrP7J/updlnI5qXzHeLvmTfL4j+I5QT+u3A+SjLnekbqh8UFIXezXkQj5zDpJGo8LVQ3pfjVSQdD2U4Wlw8RHklzfftiVB+5i72v9qKKDnJLzaXs6f45weNeflK6hC3GCQ7Ou7n3X65jYO806++ysnTchDhfSad5hvJKL+nhADdf5eL2R3fyk5uXEBnVUyh1MjE3QbWhhjqtiBxLIAsmlGgGvaQw/1QE7yc9FPIyweMRRFQYm/SM743QsKQSORUhk0mDWUCsFSn40rwy1svGilo0YgyjO8/mxvXEMjpi/WfY0FjPZFgipJSwf1hiKDBJQ5mHitoq1IrE9J5JykQdDau8dD04TsMaF0azkYKkp6gzMRUOMh08ij8ZJVpQs2XtQiztNZz8rQ8hHWXl2nryfjUNnlL2H/WRKO5nQ8VGrLFlFOOjNC3yoi41oUpr0aoEhl8OkBwFXcFAPJNjpHcOu6yCdIZYf4yElCeUSVNQJPJimkUuByX2Vp4MH2dDVylLambYsnEHU8PdXP3F9RQsNtR2NSV7H6WuPIBBVUUhFceYLOLrPcXqxmY8y67gWilGb+9m6le5MLe7CfWFmA3XcX27BrOpmpqlTiyJDGs/vo3xM2HSPpH5uUki+QaOHC7QslTGdGMregX6903RdnUtWr0OOR3FUuVk/KkgzUuaMQF63buaznFOeD+Tlv43Q/+/KWye/VBfyvFOLDz+X58Me5aefZPUuai3wwUi90g6zi1LluCsFrGu0DFweBOrbI/x9P7fsHaBB41zIeNjs+z60TxrKkI8OOSntSTJbVuvZWh8J4tXxClpuoFtcoT57hkaPy6SyKe46/tjfHGxTE4pYihESYgigyovGyqX4nQ7+M5DTxCXsuRIEhLUzMfLWFIhsn55PZlUFlWdhWI2y/GvHGPBxjoMnTImSkjuiRJ4to+KXCe5Ugm1WsBq05COOAkbzlCQpumbltncfjkOe5ZLbqtB8JiZPuDHmalifM8uBmaHuby5gboKF+7lXkpXGAgeTmD1mxgcnkUfraBcmOPEk37KXEZIQyI9zanwHCa9iQW2chZWG1j56U2EpofRZ2cxFZ38Yv9xjp6e52u33srd//EdmqxV7PWdIDLcRZ25gvGZKTr62sERI+LPYsgJuN16Ju8fZ2LUh04lEYlnedUfQFQXWFjeysjcLHFlno+t30Q8nGY8OMtFC2VMcoBr1tzM2ITEglobSnkJ3b+KMLCrn2Iig2/YhN3Qh0ZVRk2NgwPTh8gNyGyJ6XGtbWbJTXr2PT3OT7/xEJc22WivLWfJNR/jlef38Y3vHuKP7/o4Q3smSQ6GKaRixJIxTNoIy5dVU2x2IWqNaEU1za0gn0yRX1RELxuIHk8gxGQkTY6+qSKSdH7LMhfCWjIfJO93hciz5cPM2s9W7G8m8rc77t1I/oPmgpC7Sa1GXTJD2Uf/iL+7p5873IfwrlrB5hs1CA0NONV6spoA4T0v4lNG+esbv4xztYb8sI6FGxczE9Qy1TeCpyiQjxcZ+0Uap5Rii7MXxdJCm5THl1OoyZaRGz/NZCDIrx96BnUxjSiArJTQVFJGZ4WZ2qoO0tk4ugorpVvLQSOh3W9hPp+gSmokPj2MVK6mssPC6Z2vULR5sJY6cZSZCAbD7D4qsMTupdreyqbb3UhqBy8+cpJCLMf22xqZ3DmJQw03NXdwMhpncPYEDZMZVkhriYen6DvRT5m1gaJfTzin5cWxHiwTWew6HbG8ihaHifl4kks21FBz/Wae+voR9r7axR9vbyZlV7hydQO+4ztxey/j8x+/k9PHBpFnJqjI2RiJnUKldhE6EaC21I7N6CaiFjlxoo+LO8xEcwKpLPT5R0nm8xj0IvlMgvqyOA3eDqaDKuRMiPWfu4nJV2aQ85OoEhKaMRsnJue4/54fkFIGWGFpZSARIyWFWV5SzWwqyfDsHJtaoD/g5KmjxyicOM0Crx6n6GFHTRKtLU7rjk4mZgaZnxmhorLIA/d+kb/7yveI6E+it1ZiH2xjdGwnkwEwFFPUVdWSSASY3DOLJhRH3iXgdmqYD6qZD+aJpAIMBcKkcm++/MCHgVKQ3nrI9n8YTbd3fWDtkO8k9tdl/GG9CPtspf5W550PyV8Q71DtdNcoJ3bt4/kn0vQ8/i986rKPsOtYlJnpJGrFzKnMLDfUtbFv7BgRxUiFQc2OhZ2I5nKMq0QMyjzyES0nT/ZQnciSlEVe0il0bulkcM9JbljdjNheirZe5NSDM3z+d/9InfajXFKWIq5W+KMvXEc4FCI9KKMvMyIlJUx1ThRvkZJ6I0oApo4XKWk3EpgZxWurINYXJDToJx2cJyVbicoZnu3fh0ZOcFHNRtZft4OhuQSne36HKVPBaNbPwooWevxzfGrBJozqSb790qOYTDamc2BCQ621jkDKT5XOTntTLXG5QDw+xcNndtNoqidbSOGxuvnkju3cfyTKDWuthCdnELWlWKsVSrxexgehtm6UA3szvHK0C4+5heXVIpOT03R6qnnkdDclzmocmjihtJ6cNkcsHqTFXk9AzhJNR5lKzNOor0CriXJpSzuOpmrK25xkAkaCkRzkwuSkKPGojt2nhvFnE6TS09TYvXTYNKQLMusXd3DgSC+KtQqvKUwu6UWtTtNQ1Urv9AEO+BRORw6x2L2WNGEuqehk+zcvRxGjSLIWeU5DbPwkxNJgz6PkOpgeDLHr+BPkYi4+duVGRiZnUKeK2PWQkQXSmTx6vYl8NkQwYyaQCZFOBbgvdg+nAr7zolir4FRWCa813qib6t/xeHlo9AO577mquZ9N1v5+Bf9uM/b3Kvh3kvp7Ffpb8XaCf6+lGbsndGG/IHt5xzJl36+O8vSXTlCnH+fpIR2nQy9z5+JOXg2JyKkD3Hbl7Rw9GkAohpALAo02Jw5bCcNGgXWXFUgfs1CMQWoyRE6YZDJaIF9IsaJhOftiEa7/yloe+9ILzBUFaoUiRdGBXknz/7J3n3Fy3uW9/z9zT++97e5s0VatVlr1YhVLcpFlS4ALzaaYHgOhJIFDOCEhkAOEECChJIBpBncwLgg32VaX1Ver1Wp779P7zD13OQ/+f86LOAZcZEsk+T6bmbs8ueb9ul7X3PP7bblhJff/5gQ9Q1O8+/orCXbaGHkqhmuFgYbrl6JqNSTO9TD6RJyGjnZkrYDGVCI/lsdsldm35wQFjZ/z4/sIe1pYFYjS1LiSnnEnukKKE/Fn8DoCvOVD6/CHmxk8H2PfT08wkyvR4DYylZvn1OwA62rW0BU9j98axudzE8tEmUpnWG5rYCB7jM3ezVgNCrv/6loyZTc1a/KM31tEqy1y7FCK6cwCtTUiQqaW9btXc/cz+1gWjtAQVMlm4pw92c+JGbjjilryYpEj58ao9ViJl82MxvtoDnQyEh/HqQNJl2JL4xUsWuHE3h4kOukgP9aDJ9xEKT5LKl/mxIkcF/KzII6wob4Dm1jA5w+iN+vJSwobP7IRtSzwzHf3Y65U8PhNiBUd7lo7rjUerG4bE4cy7Hmoi6JpH412H2s7biMai2OLKERjWprXL+Pr3/oJ79v9Zu6483/xjrpdRKpCzEWnSZZM6AWFWpuVdL6Az2mgayTJ8pZqZopTxNISNmGWK69cxm3PfpmTPV2XDPcrWj7wss97tci/Fri/nHHMKwH+1Y5hXgryr2WX/lLzapH/3dUg/9AG2ZcF7p2NS9UvNH2fY30H8Fnc2ASRZWEtZVc10aSVgYUYAUsCs2zgQqLEomorYqqMQaNhbZMBnauB+fQ0XUMpdm7cwbnZQ/T2Ftm1bjVCIMmxZ4e45f/chsPRy+hDJwg5ryDtKmLUuBg7Nkf3wEm8wSq2v+tavvTpn9FRXcuWd3USbApSmE0yfWAKMVfBEa6jlE8jlQvYbCaePThFKJzh2PAUFqVCxGxn85YavvTLIUJGC9etr6OpYQPOkJG5rn729J3l/EKKD1y9hnufeoyr2zrQyi4M5TTPjI5TkosUhDzzZROBSgKD2UXEbGV9y2p0Ni3+9iaqdoUoG+BvbtnDOzZ5OX8uQU4uYjdXmEyIlE0yt2++hp8//QPWR94MFi9XfLoOWzDF5APPcPJJL0vaM2g1bsyODA/tyVLttWMVtZjNAiuur0IfcaLRm9E1BvnQjo/xjx+8mTO9BjbfsILRE0MM9Q/wbN8MQaeFrQ0GOq9u5viBBULhEMHltfR39bHk5mWoZiOZrhmGnxtl1cc2IM5WSE+nCG70IMsCBkXH6IN96BUTv953mqHUDIn8DJ+69V14lptZmE4xdcHPvz/zVT69azWJ6Aq0lT5mxwqMZebx2GrIFlM4LWXSZTsZZYE6l522YAOLWwP4V0TQrqpm09u3crL3Nf7G/p44TSF1Q927X9G5lxPwr3TO/lKQfy1m678L/cv5ofS1hh3++Ijm9wH/Ykv8Xva4tzjb1Hc3fY6usf2EDBWubKyl2rcCt9dGNF1kdGIUq2Jh/eoWHuzqw1SIY7CG6JkZYc2Sa7jlqytJns6xf88FDu/vxVUls8xnIlMAbc5MpN2OoIZp3hHmUPcU+jOTDMetLO80Mt0fZ1KQ+Mg1q/hfd91LxKwjWfRw203XcO7MGMOJJBtdCmaHDXcwxMLUDL1z8yiKiUJpClmwsC2yEpsrwbJdYe7+bg/bNpooZy2IZZWMtQmhMonL0MgXHvoukpjj67d+iLPnZmlta4WgAyUxhDFXIZVN8+NTF0hWSvzgrzYxNVomsLERwWxH8NpQ4jp++JVzPNX3LB32GG0tm5megW1LnEwvTHNgppc6Y4nrN70DjdZKcr6HodkKxaKFd3xzA08+UOT6GzzMHB7H6gyTlMvUb9KSH+nBam5CcpnRN7hQtTILgzkqZ88jjwVJILNsY5Djv4mhr8QZHZ+iIA9R7bSw/t1vQ6kSkJIKo0emWf2R5UzevYB9sY6iWiA1BC5kKr4KJouFxHAF/zIwWpxMdSVxlvPkJkX2Hn6enlgvWpPCNW3Lufpjt/Ldz+1n1/XNzHZ30dm8EvdWBw98+TgxOcqaGhOjMQcGSzfTMzVUcpN8+PPtzC9sxWI34HTGmStpeOKRKH/9m5uIlef/5HCHSw/8xfrx9IXIX47/On09YIeLO3//Q7hfFj+oJgtJctkEDgsssZtZsugKtE4DjpARUyGCSwnQuDrJvOpmu7CFUj7Jnc8eYlnIxfG+HjYOdrLnvn9jf+8YiUKRj4ffxYV0AmNJ4cZ3rcApSHzzu3fz+T1xPnXzZ1iz1sH62nmM2gBPjBZoy1xg8JDMCpOH83NnCDtj/OY3ZeL5GMuql3B4rI8sGiS1gk2oxmwtUC5JeJUibU31hFar1F+9kel941zz/is4/NgzBEU3WYOOWk2exjVhZvsU/v3WN3PPk6M8cX6cG1e2MZ48zBOPKZh1ARqarNQYzYRdBZSSg6GpCA27/ei1Oe76XgypMEz32TPctqUGhzHFtva3MxIbR1a09MS1aPMF1nhWccN1jXzn192sq2niyje2cfwnT1KSXNz9N6cx6o7SXViPy95G9/wws+cLHH98lELRTcidwNBixqTRMTOaZe+579FUWcSuzSorrl2Lok+SmRrk0YF9tNksvHH3dWgbApRUDeV4mUhjNam+FFI6T2SzjVjPEOENEZx1RjQoqHNF4rEYYixPfsFLvDRFdVM1x799AYuliiVVTUh0kpZhZkog1lVEyo/z7BE9gtqINZFH+kWSLRsaMNa1cOCUlrU1MRKF3bzp4zk01usZvxCnuiZPSSeRtwjUtHpwPzCI02i61CX+iqNtXvSqgP9dnF8O9Bf7iZjLEfPfzesF++uZywJ3nUZLMJ/EbgrT7PRgNNgwuC3oAlaYV7BusaC4fXhrnUx/d5bh2Cw7F5V5uGeA29s3cfC7Z9h19W3c8XGJe792nntP7eH9u7ey8kPXoC1l0VlMXHfhbby5GEMuDJBKeHG3BKjMy1zo/QZjhg3kxXo2La/ikWfGserCLDXKtBmrWSgOo7e6ucptYywxjl2TYllDiIUStLd24l7ppDCno//eQQqlCr+4+xluuHINNSu8FE8vcGQsxbPni7gr81TbtDTZBH4xPIS7qFAwykQXJtnRKbJ1w2L2P63nxvU30jc8Stivok3D4B4RS+IkD184Tae/QinVgtewgrIokXOEcWsrXP+Pmzj673qWmaqYGUtRLoo83n+EC193MVGSsch51lb5qL3iz8i4ijz86wGePvlj/EKAkM/H8emH+MTKd7PS28TRp75FU+0Glu9aid9bxcKwRH5igmdPLDAZnafTX0dbuAHfta3IGpnMnIgxLpEOl6jd3sqBb/yMDZ95A6ZV9eQqFfSChnisQvUyO/KME615DiQTNp2FqUNjtK6qYWz8AkpZj0k4z0xRwmO1oy0X2LRjNd3nojx2/qsUo+tprQ6gRlbx6R90EZ38GTuC27CHm1DvyeCr05IreLggJuga0JOrTFBlGyMs1aFivNQl/rol/r4NAHh/ePQ/ffZSoH8lqI9+acN/eN3w2f9874uZ0fuW/af3Gt7W/Zre808xl8VYpslSq/5y81fJKxai8Wm8dV78AQ+mRh+Z8RyetS489UYyM05mDl/AMKknVUmSGz/N92fGWWSsgyVXstUrMTg6j+yc4CN//17SssJvfj1D7JlZiqkhPKGVbKrO8u9H9lLnbMNezJM0mthQG6apFroHEvziwtNkhCB/uXgL9e0eXK44x87KrLkpghhNMFmwsO/Zo1Q17qAmFad9S4WRc37isSka3UZ+PdjH5JSCy2lgvdvEwelTxAsCS53NaIQCunKJ+6KDbLAGUQ02jNZNvH19E4bqNO/7l29yZeB6Pnj9Uk7ZtEw838/b71iK1Wlhfs9etLpG9vcVMYVC+HQWVi8to1tSxfHv7ePE+SLnU2NohQzNVj8XopO4dR60tiSFUoilTh3744MUxDxvb9rK0Pw8E9IYFW01YZ2D7c0iWBbT0VqDY10IbcDG2AGRR39+hnCNjszYaaaVcVQxxCc/sh3D1naMlgr5GFAoIKXM2Dq0aASRbMqExaqgZCFXyuBuERC0NqbPpDAqBkoxCbGcpzQURe8qY9F6GDsf5+5DB8nnYyyyW/n8t/6O4/vO0eIxoTU7mBkfxYgfrT5KOVtFNHmedbva6D3qwlYdpbouRN/hUYYnVR7tPkDAVuDacCMVq5X37/tLJnNTf5JjGXhpo5nfov778mLYv9y8EPGXkosJ/Yuh/p/u9wqQvxRd+8UazVz2YxmNRqCi9yFlkvjtXobHy6xcU0ReYsWy0sHU/lE01JOZnMacAzmcxGr08eiZKA6dCatBQRo6y5ODMlcvW8INf7ud2f3D/Pz7e9hS3YCtcIFEcwOnn/8llVQIsZzGmo/h9Oc5MzqNTd3IY4ML1Ch+rl/5HtY2epmNVxjPVjA1dtLWUeHYkRiT50z0TT3OyVQX/9TgofrWm7nns7+gc904+pyeU8kFUokCHq0RXWaCqo07ec8b2gjVBCn05+g9tkDrxhoCzxxFVxA4MDLCRO5evvnsGt65agV3ffB9+JqCfPkr/8ZseQ6bex3f+oc0N6wLYHRsJz1zBmcljjQxiamqht5jdmqSA+ztmqNSFtFJUXxGWBIOU2V2cHz6KDtrNnN4tI+tHdfSfSBJyBZgVl1ANHnZYHGRzE9wJj1GU/Pf8sBv7qV/aJ63m3YxbZ/h9GMHcTonmZx0sMwVwl02Ud3YwL6HJ9i2JILsMqPaFfKTKbwbnCxMpCAr4V8hMXtSoHaDF30KSnkFbVFPuNNEeRgyJyZxXuHGXxWhkKwgpc3k57R4NTkaI00sdsmILjPiTJa8muWpE9Pc/mdXcOr4OZYvc/G5v/8R7Q3Xc/+X76HBVc07Oq6iq2uIYL2V6fPdtNfo0ItRxos3MBYtUKn86S4/8Mfyx1B/seNeLvSvBPUXnvtKkX8poL/w+JcD/H/Fccxvc1l07m32OvUby7+JRlNArZToXKEj+K7dSOYMquhCnIGu+/rwkcIcriY1E+N09zn6UkPoBC1+NQxhhby+jzve+jmMGi3GFvjqh+4naDyDUWkmobOxKhgimk/QuKmFpW9ayky0RGhklvd8/RG2BfxM5lRCRpkFycSbrlqPxS3xFz95hreu2U0sMcK2Jh8TgymWv6sTJSfyyIEeGoI+mpMJcqKB+84+iV2n4qGaKreXq//mLRjrCySmc4RqfDz8wUM0ODRYqqsQbBMUYyrFvMzj54fpj8bZ0bKSnX+1BbPRwmP/ugdZlGiIVFOy+hBiFdyWLLO5OF2xOTZWr2Hp+kUMDUrsO/gUfcUhnBj57Btv40JsiujoELUmI5LWgKCXieUqDKcnWBJpZyyhcHziAnpVw/ZFjWzYvpRnnhhmaVDgjoPf51Ot14KuGa9BwRJQuedQN3V2I83+WlZd1Q6KRGxWwRypwrulgmwx4IrYkUoCpVgBOW7E5ZOQzCawltDq9ChlBUGjku4qU04kyUk6jFqIHxGZH50nkR1k3+Q8K0INLAqVeGzMz6duXsfRvqc5Oyrw1hUwVIyw644Qe382h0u6gDPcjLehhN8dYfhghlNRmZHe/Xz8vbt44Kdd+MwZQM+nzn2N/szwf7nO/aXC/mJ5qcC/GthfmJcD/MtF/UXv90eQv1Sw/7f6QVVv0tK21oxW24BckimHYhSTMnpBRzqdxhuoplBJki4LlEbGyCY0hD0hBGuCZDzEskY329+5irLHTvyxKezX1ZE5PUl/YYF5NUiztkB9TYTFdyxHf6SLwMrlzCXKOP0uhAYLt+9t4a7TPbjNAomKhvlygaqT1UQaq/jGu5p48ngX17f7CS81U7tVB3V27DYJ98/PsmbJbu4734ehmKcjuJi1G9dgEFIsjJTJRrVoQ2aCi7wU5iRcAQmtyUHZJOKpXYrkydLaYMVCiPvPPkbfQjcNd2bZP23AYTSzukHgwQMnGVRPsKt2LWtXvJlNG2qQHp5l7+kxvnfol3iMejodc3SYGthQXc2JU2fwe2xUhesgI2J2FClmfVAaZWV4FYnYDIXsHB51no7GbdQu383xvm6q6g3YTS62Oy0MFJo5vbCHH3z2oxzYk6TGbcSn0+MOeCkLOow2PbpEBkP4eQw121BEFSoCCjKWgBFVLxBPpdDm9SQns9S1RNCgQSzpsfhEKjMCWrVMcjCF2S0S1qtM94aoCKeIL8zi9m9GrRzhb358gN11W6iS7mV85gM4vH0kjrjZcvsyzg10UGfNMfJED32Jbo5NxjEjoLEs5v1feIiNQR8diyMMTlopKv+1OvdXg/rvXuMPAX8xUf/da74U4C8G7L+9zn/nWfxlgbvB68K7dQlKFlI9UZwNizE3OSj0DeAWHMSHLuBXBHQeA139w0zFkxRkKyJO6iwCjf5q1KVmvvjek3yyTUXXG+AbP7qLcjnOOn8DZv0iHO4gmrwTn7+OUiyPp0qLIsLg01Moso1bGzp4dqaXel+IbXWrkOUkiYlJnku7ufHaOpQpiUzQQLhtKcNDRX7yhVHeefVu7vn1YdKxMXZtuAVHfQ0Wr0SmoKNmhYHZPadIDVhJZirEh6eoqtgx2P1UNGkGTg3j9tnomZhhNpllqW850zonkwktf/1ePT/9jZGvHvk+n1i/lkDVVUT1V/HTx+7mPdYbWMhGSAh9eI0SAcHAiradNGxai2e9m8k9vcS7EsjJLBr0aBUvRlOOuto6stkSNsFFh1OHItp4snc/z/UdoMZdTaWcZWv1UoLGrXTUF/Dp2rG0uBj+3gmiZZGyWqIqmaGUtlMqaBka72HHx9+CrK2gs5uQJRGj3kRFrqBYBdSkEaPDjEk1se8HPfjdEaqWyCSGKpgDBmxGHRqtgFAsYBktkloYx2N28YGbbuSZrgyTsWkaHRV0jimK6VompvZzsGuBwNEs/rvcnIk/y+Z1H+OTn13NA18+jtOr0CRo2Dd2lKsbxnjv3/4Dp85X0SEcoXT89+8Qf7nnhV37xYD9d6/1YsC/FrC/1Fws2H/3epcT8K/nMgSXBe4go7UYSEdzLKRFsseS2AsVKmU7DrNCzzNZltYUiOvCrGhfS9PsCCPJcYScQrysxxox8b9v/w032V383ZlJmifSTMQL3LR4J7f89SpyRiOpR0YYPxBDr8iUhFFKGT8m8sjZHB6DlxmpxKZwO1kxz/PHHyGhLbA2uJKdjYuY7J7h8ePH6d2T4W3NG3hi9gxXudfx6JESh2fP88mtN9K0yYkk5KkkKpRjUeIJKzqDgjeSwpXXE2rwIFQiRAfiVNdX4VsjYWt1ojW7WFJJcubbZ3h+/1kGZJUV526kmHuWf/7otdS86RrUMQHpyGG+eMdbeOyhPHPZScrlKcDKlobNVNCiVJtJpWX0tUFiz4/ReU0bjoiRuTNxZL2F7IRIRdKht6vEYj5qwhC0rOKR+R4m0jP4nFpmCzKG1iXEC1Ns2bmF3gPTzBVy1Hk17LiiFYu9DqYRyNIAACAASURBVJPbiXW1g+AOL4Jeg6A1QkWgmNWicSnoRB0VWYfeZSY6XqR6sYphfQidoKGsljCWFCpzoHWWWTiXozhVZGBuiKBHQy5dRXRcpbP5NG988zswr12HqlNJZ1XiXUU6u/aSjIbpHk/Q6l/C1NAxHv2qiqkwy5ama7nzwGFs2iZCS9bwj/+qsNi4B59BQ8Bdc6kL/H/yEnKxYf/d614OwL/e68tcJrgL5IYy9B+YYuNGLdNxEW2sADhI9k/hN2iYyrgw+fMo6JgoiEwmrWxw62mri5BYSGJcGGHQ2cDWSJAT/YfZ6A2xblcziZQDX4cJaWs1pV9NkSkmkXUGxNk8ikFDJS7jDpqo6WghU5A5cvwYAZeNeDTBibEBap0elmxoQjk9R0j2cmXEjllVqatLcNeTvfzd7bfh2bIIa7WLQqmEPlWkZtrNwMNdLP3CJvKTIm6HFzmYJTNZIhjII5cr6E1O5IqCYEySFfX46v14ni8wk5eZnRzhLdftwLqimhN3TnHnUyMsE6a59a0beWbin/EZq+hwLGFFpBpBFRmbKiD+fAhz0Iq7TmL9F69F0JSQCiZqVzegynrUxBxjhxZw6O2sdFsQnRosszKth1UeOn4UsWQjWpnguvUegk0dnB0fQjO/iE/cWot7xUYy0zKJmX60iyJIOhWd2Y8iSSAZQQOCQaFSKSGX9GiEIgadGUGbQtG78CzVkBktUBkx4l2kI5FOI01bcDvh0MEBkpkyG2qtrAzBgrmPKz7yafTpMk99eYqxyWFCdoWQw4ResxG5OMDaGjv6ShUNyxdx17PdWA1mkl0PstoUwWJTOXG6QKb8c1YuW8q8zkcikbrUBf6K8lp27f+T1zcvBvttfVP/4fXdbRe3CbkscFdElTPPqlx7UxF50QY8yQyF4Rguf5A5krR3LOW57x8nlM1Q0gY4OxdjemEIn24D62tM/Pz4Udrf8CZ2VxX4P3c9hkFy0eCvZ3xEZUmnhDifZe5kmqKikMwVCTQYcFk1YNVRrrPjcduRozpM6TSLQx0cnD1H2Wogl5MYSBbZ0O7lfdIH2HvoENHzU/TlYGh+Cq3JQNvudvLOMtmEhFanp6wzUnYl6fjcFkxGAcmvQdXLmLUa8DuQ5AJKfxFjRUtpOM3EbBm728nI/kGuCrVz78Bp8uUKsXSUX395HxFjLa3Ko6yJXMNdDz+Ixxhid/smGtod5LUKPo+TSNyAGk1Qf5OFSiiCrM0hGBwUNTK5fAmrRaGos1KzIULvIzM0+A1UkjaShgx5qYUb1zTwxJkDCEjUd7QgL/ayfY2P5/+xm3v2BlAe+SU7l7SQFWWq14Gs16GKMhoVVKGCRqdDI6moFYXydAFjnRWtrojD5UQrGyhMK+TOa1ASUVIpJ+n5EnImz6mzo6Sz/azxuZFFI+diMT7zzk/zndsfJxU/RVoo49JqcRPgZ73dXLdoK2dFE85klMV+K4OHR2muqmNi6B4s7lWEw0Xalm4m+sQj3HbLTcyV3ByZWCArJy91ib/sXKwFxF5uLtVI5rXq2i+HvBD2F6L+wvcvFvKXBe6ZaJ4tu2fI1a5DWchjsJiJJrWYTQksi4PkjXMEr2xl350PsTwiIpVkVKNI2GMhLeZR0pM4xmP8+GCC+ayGkFkgr2gxn+vnub5enN4AQb+e8EYftZ5adIJARXaTGR7D21hFLpckkUgzeXoKS0Vl2bJFTA+fxKOxUi6nECbsZPRd3HzzdqxL3Hy428M//eSH7LxiBYNHJCQV0ETxB03kxgu4d3rR6TRIBi36sIAiSIiiiVQ+ga8+QFbJYDY7MHp1mHUiMyeG0aInXhKottexqCpAXrRiEkwY6xRu8V/Lo+cXiGU0OB1xItV6FKeB2iV+cjGglCfQ1IRaq0GLikY2o8gKGp2EVjahGgR0Zg2i0cb8dIz5oXlaV9RiVi1YNGlO9oPF5MMka8gi4ZK1PPdghiq7HfPsc4yV5jnZL7GmaRndP+7FVRNB78vS+M7FqDoJjUZH7kKJspjGGzBSKRdZGBdxerRkLihUShrS/bOYLBKlaBI5DolSFFmYISEWWb1yA9/ZP8Ftq6r57Jc/R6lsJVNMUJZyLFgt5FMlRMnE6Yl+ZuUsijHA4QURnVph96J2mmpvwNC4FGMxxowSwu5fScoe4bmD02iVGVymy6LMX1Vei679Yjz7fjHy3wX234f6C3OxkL8sqr4oFjD4liEIBVSbkUrRQGrWQLBJRl/UYK2tQjBlmc0lGeo/wo7qlUyqTlqrQ4T/3Ib1J9dhnrXzTOpJNNix61woiBQkCTdm1t/YhHGRBcHqRfHEUcoWpFQOS9DO2J5JHCYHYa8dW0uQ5FSU6GwSfRlKcoGtVWvQdxpYLF7JyIUo1dM6njz8S/pip6jvNmMvJyBUhb/BBRqVpE5DwK4i5lXE6Swmnx2dSUCuSDhMFlIzRZx+GxPPDRDaWIu5MY2nEESHnvRklkOx5zk1YMSmuqh1N3DFRzcw9+xh5FNdFDQiZDz09c1Q7pnBcThOoCWITpAxX6enIOsxGxVks0wlB1anhfxcASmmxeo0IGZUVt5URaDWxanvHmZuUMLj9YBuknAwRGw4Q7YvwXzMxFjPGUwWI16ti7hRZHnbYuwRI8mePFK0n9RYHFE0ElgfYHLwOB23dIAYpBzNIFAh4HQxdWyM5IwWXXQARzCE0eRhbnQGozXL+WED69cF2Ni2hoWSiFpM8+DZAgtSEqFYZLnbhqUiEFdUzATQGbNojCEadBK5nIhiMZISZzhz/hDVzgDWuQQe2zzzc0b2nrsHAzpmE+cxF6dxmPyXusRfVi5V1/4/ubj5LewvFfUX5ra+qVcF/GWBu6DKnP3WM1Tf3IqIjYWzMRYujLN0+2IqDhfZTImxM3EKqhG7Ls2F3DiV8hzN37yGb39mnKHJOYzJX7LI04re4GB1TRXOhjAm0wRjKR90eijrC5htZTSyHZkMZoOLSsRKVZMByZxCIxvR202UFIHjM3EcDhvlfAlXjZ5v/NMhlmyuprNZzx2f+iduqKuj1daOK+Ki6o5VSEIFq9ZIWanQ1uklncrg1NswBXSIJQVJ1aBRoCCWcZrdKLoYZjzMHZ5j5GiM1rZ6FBkEh4d1odVsqmuiHJAxh2xockXkmEJUVkhkRlkb2YRFp6AXJerDAgZPmUQyRk4Twmb2IxvKlOZFLG4r5VIFg9tKIVkgn9dgrCgQqSGrU+j8xCYanoxz8LHzDCdmMUYLbGlZxeBUH5s2+lnyvm3MPFCh/UKUldkkc8U0peEi9pAOV6AKXcKHMDNJ/LEFtE4bk0+UcHpVZg5mwVJmYnaKgLhAMNCAJtjGSNcIucwQepuDhaiIU0lx4cgQjSEDF2J56sMeMvkEkbJAZ4MJu+4K5tLn2N7QiaLMkC+EWba+kdmpNo5emOD0/BRLQgIX4hPkUjqurQdDeAfqmbO8u2MXXqeBRcYQrYtW87279lyy2lbLIvLgyEtayx1eHPb/6dr/9PJqYf9tXg3wlwXuikbDw2fHqBmeZl9yhDWeNowaNz2PTlC1zs+PHxzBXo7yhpYwpoqbSl07ba3jTByaZrU1jsZQpluoRivLrLEH8G1qxhI0U0xbCCzyopiLWDQWFI2AqhTQ6RyU9SIaWYu1ykh2wUiuUiE+l8CiEXHas5yIZ9gebKLuDctQ/+Er5Hq3ELptK7sX19AadjKZF6mpqcdgA3nGSF7MojeZmRiJ0bg9iJovIyW0qIKCXClhNpkxWTWgSZIfLWNdpMOU0tNSY2N68DQBi8ojp8bI5WdpCddQvWolaqqHcjxNSuegmNfR2WRiVe12Yvk+/MFqtD4dhaSCVjahXFApbFCwqgYsbhnUCoJOANmAzSyglQwkByepeLUYbEUkUU8iLzNQqFDrdxPNjGK0LWU6rmJf56c0JxJLzXKkr5uZ6Dx5xcEVkQ7cXiu5+SLotRREAZNexaGXcWrySLEymsQ0bY3NeMIJyv0RinMJbE12DAicTIwSjgssrW4gJZfYGFnJ0QUtR6b2cZW3ibZAAFNVPRanl6JUQK+xY7cZyZRCtF1dTc4so5eM+IZHeXPLdewbP06yNInVFufAGQuzZx8hP5/CqHNw7UYnDw9nCPZNkEtnLllta8wmKPGSgH+9YL+c0vC27tcc+Nf7SZmLBfurzWWBu9ko0ODJcS6W5KZIE90Lg5QFUE67MZ9PU0jsQ6vT4mjuxLbIicUm4bxyK/HHe1mYGaE7ncJiFXljWzueRj/FDDiqgZKO1HyUiM5LqaRgsunRCiYKUzJat4g8L3H0OycIN/qp5MuIeRlEgRxurgwYWdvaQKIo86m7Pk054eSBv+5l8/YrMSXitNg0DI+mmP9qjkiznertfoxeKzqnhcywTKFQxOYXkAt6RKlIMSEhaHToctNk5uvQm2cwaZ2YzEae7h4jpTdTYzXRbK/iXHyeJk+UabGRL332Hm5fuQ6P3UQlV8VjA8/SZLOw+K0N6CxGSAkEjTLpRAHLQA7FIaANCugEDVpZizg+h2gw0vvgALqyFpPXjBx30Td0jImUQMDtxlYpcuvt70Vjs/P8D2fp/kmJxbd4yGSPs/KKbUw99CPUcorIpgb2/bpMh7NCSTZg1C7gDdQiu/QYfSEKvSlKFQNx0qglO6opx/x0mVImx4/Od7MkWM1GJyg+H6a8wl3nUxi1U1znrsXrsmNwLyFYp0HvN2HQK/TvS5DKxqnf4ka2VHB4PEgzFZqrqzg9MkE6P0XIWMKttSELUULlaRZqzATs+8hNVzE0dRCrP4/bYb+k9S0sb0fp6n1R4C/FCOZy6dpfj/wh2LtXqq/JbksXG/VX2r1fFrhb7Hau+sBN+A6P8NTBvaxxRmiuq2MsITNYAL1hCZ/8bBX3HNmM+9wPueHLO8kXFT73rYfY0m5AFPR8ZPEWjE1WJIsGXVlk6nAREyJ+WWD8kEL98gpqOkfeWEGvNzLzfJbCQoL65gAu0cDpgWH0djugUlbzzMgWvnZ0nm21IjcvD/GXH/0+/kIWn2EtaqbI48kM2ypparesoeaqAGrFSLmoUMnKdD2TZPWuIIVCFLtTQCtb0RYtpCsqzsAiLPVZxp/KMXwsTcisx2WD3qmDSIkQN27axsm8gWTKjTQ5zvGFI7y51Eyzycw5cRytamDLR27B1lyHyaTH4yiTm68n8eRJpD0JZhYchHZaEQsq4lwacS6BX5IxImIzOdCoGtD2Ei8s8PzkAFtLK1jcaSGdimE0O2msn+fAo79g6foPs+l929FmLAQPBZmzePnynefwGTIIkp8WSyNOn5l8UcRcsDBzZpqJo0MYDWb6Dszh9IR5/PAFRgpddFi8rA6V8CpTOBZv49jZAWKZDE5Vwqst01C3mJU7F9PfP09R1mL12NFoyszPmQg5RMRKA6WFIqWJNCPHxrBRobrKwowskE/ZQG/AY2qmoaaeRX+2jeEhleH7z/H+zUVUzye495G9l7rEEZa3A6ACSlfvSzrnv3rX/lrnUnTsl7pb/91cFrhrDTp8q7xsrfYwNjZDtbFEvJBg/Q4jOxvSqFs/wKfePMinbn0I/1tuJTGpUM6nWWzR0tZ+B9nKcc4mZK6Qg8TOzmHXF5F0Ovw7q5GqBewZFfm8lqIsUohGmZxWyA1P0xAJojgFEvkMqtXMs7OD2A0m3MYAzYYKOmWKgw8/ym2f/BCeyihOg4/EXBKplCWWOMP5SjPNoyrpGZlMKkP+QpEVb/HStjHJxP4YzW9o4/zDk0hRmWC7gHdLNTqTQCHlInCtFYd5gMnHZhmNTtDpa2eFvYaMyYlHznP0rqf41YwWs86B0S2xaXUn5w5mcNhd6Hw2KqKIWCpgK5tZ6BlAnoOCoMMcmkIz7SJ5dpi6tcswrQ1SGRARTqQYGY3isC0wnTcxV7Rx2/obMGQGWXvzzRw7fJbGJSW0usXsuLKeJx4+TkewhshbVRx6MwvjA4QNKRTVxlxawq2dJahEGO4v4xydYLIg4dYoxAtacuMzVPnTyGqe1UYjQZtCZ8NyLswIfP033ZQ1U7gVleubOtF7K1RED/EqHbVVbTz1lX1sqHUhBkxs+GAIS7CJ+YEolZECxWyZhtowJSWFlBSYnLcjCkkcZT9nJs6zLNzEg3//OOOxCm/bsoI+/99z+u5/IJmNX+oS/w/5bSd/KXIpu/bft/TA6zGa+X25GN37az2G+ZOeuQuKRHHIgtaXYjylpyBe4OqGG5jqFwjsfBfHP3eBL9/Wg+7K9zJ9rsB8/3lcksI7b34H33n0fiRFJBRoJ7swRbDeQiqpwRy2ksmIOBJOUolJunorbNwiMD9YxKVaiLQFGBudpKa2hYJWpawZ4lxikmazkw57BEd7E3/WeQ0/vOdOHvn4ad62cRdyyoSgFtCW8wwnPCgVlYFz00xMRdn5scWUl9UhCxWsrjqoVzBJHqoj/SiqkwJ6NKqGYiaHTjAipnQY9A72zR+g3uai1hVk21+uZuwJHen4UTZc1YBt/zH0jR04l7UyfvQCy5w2TsxP8i//6zCf+sbNmLwFLuydQshY8VlExiamaV4VpJCwo1Ht5BNFzFKQghTDYDOQTic4MJ6jyqcwNTeIX7eWN21zc+j+syiVJHPIFPOLGD17jnvPPEtGEHnXwG6G8mlW1y5hb/8AZn2ChZJIKZmmNy+xyO6iN5FiLDGO1uSlUk5iNHrIFVLsqFmMx2XHYHazEMszkxvCo89Sa5FoDLRS195I6J01RE8skHoezs33s+uLLaQrHqwuPbJkwmzQUhuppxweJXFYh73Fw0h3mXIqidssM51PklP6KCJhNkik0n1sq6ujuiHL8z0z6G1xrIn/Puu5/6FcjrD/v88vMvAvp2v/Lc4vF/lX8pjj65nLAvdiVuLIz45gkku8t9PCgLiCY0MnuP1Nt6FLptn45yFOnlhD+Bd9lHUKZ47HecutDaT2jbO2wc3ZjI5CQYt3cRg1nyM+UKa+QYMj5KFYLDM/JrDqtkWUvRbiiV7CGguSvkSV30t/Vy92k4BB1qNRozRY6ujsbMa21oWUMRK0dCDO9fO3hwZw6QMYjVX8xRtW0TTUT8VpIl6cYfXGWyj5DeTiCcx6C0arhnK+SGx4Er2+HskURZvTU54TEQUJu92MU6vjti8cZHe9jbU1W2n9SCOSuUzDGpmWHTfzb1/8IbUuO+lohlBTiYHxLKsaF+NzyGx5/2Ym+oexFKDv2UEMUgBNWaG9NUKhAJnoHEVRJDeYIhsHgwInuqew250szD6PPl6F3hLmxhUShZKDudkeTP4An//Jk7T5XPzF7pX86M8/R+xEPyV/gJGRccYyBZoWLSI572O1A0qSl6ZQlLNRmZH8DKuDVsplGYvNSo8o4jNnMFZpsHjD9HZr+Neee7FqVN4UqaMlcDU57JhrnMQndEQnHRQmz7HpY1spKPPEzibw6pxk5otkTAVsDheSO8JEeQhPV5rKQhKtnKLDaiOr1FApT7JjSR2qEGbLruvpqOrmxw9PcM3aBIu2f43Hv7T9Upf4Jc/lDPv/O+4iAf9KxzEvFfmX+qekS53LAneDyUBzWxB9ycCvToywbLGX1toAp5/eS8NkHff1+GlV9xLe0MlXfikT0T9IdvwveOLUJAcXRlnd0MbRicO0DjeSHIyyY0szZZOO9GSR4kQWoajBVGsFVSIQMmIuWFC8bmyuPLmeUUbnM5zL5rk+3MyKcCOSqifbm0fRq+yd7qY1uIurQ1M0moz8auI+ntp/C5ubN3Bq4BQGVzVPHTlC2byBpVcZEHRaxESZs3sGcNeEEEwWqoJGpvcN0HPEgb8ziDw/wtf2PMB6R4nVS7fR+rE1FPQz/NPtUyTifWyvb+RCUWCZ14TProHBIWqDNjReF2ZDJ4/d+S+cG4uxUHbhs3gImC1sjixF8tg4sn+EaCzOpo427j98CklQafT6ODHbxQpfC2ur29i2Jsi5GSfd/QrPxM8ym0/y8cgavnLDGEb9Rg6M2fj1Py+wolGi0S6zbpGXyKJGjh2vILrSdEXH+ez/3sBDP1cxmm28N1JDJZjl+GwQQ+Is71wiYbVcz6l5MwfPPUwst5LN/jVsrKuhblWYUlxGEyswdmwY7YiVUq7I2r+9nqKUwqoJUduew+yy4q/1kClW0CkF+vfOMXX+NPZgCCoVNHof5YqJJreL6cpqlJSeQV2AuYVfsXj7dewfPcTZ4Ryfv7WFclG81CX+J5OGzx69ZMv8wqsH/mLM2V/OGjCXK+xwmeCuImAWNHQPzyEYDDx28nEifj+L6m7miQPneMOSERrW3MSppyf4TKsHQ+DtHJo1s8zjoyhu5N2+IySzQX69fx9/dc06ojkbjlo95YEecgsVGrcsIz6QQGdxYHeGkBNGTFYzw89NcmJmkFXV7Wjmz7K69Vr6DSpHu+a5faWfrlwPa51WPvx3DorSh7DoiqzsbeT+e0ZZcvN2KndJLGgEjp2/j/35JOefb0JDETEep1GnJZmJ0ra2k65Dk5RzGfb1n0I5lWeLfwmf7LgSwWpCo7Fz7muzTE/ejzE7TaMtxPL6Wk5mhzlcXMO733s1Tz87zA1r63CvreWTd3yHSnwF7+ww0j/Zh0GO49OY+WbXST5oXU3emKOlVaZ6lQXl0BTZskhA72FrtYUjCSO3d1RhEiPMzezjvZ9oIXSkkUR8jKrqHuwNW/nFz/vomu1m52I/33y8j5ZjAm7TekZOPE2d08+NK/y07d5ASY2weoeWH/7kIepbV/DA06NMJ3/MDz7zUY7M1HH40Gm08ecoaKspqY9gVk0Uy1bGxvyYJBW5BJFwDTToMazSk0vEMTrMyIqIzuokkyqg9Wixm/VU0g6WbLeSfOIQhayFQimLRkiSVRfIZHKIpeOUzLU82pPkmm21DD/cxx3XNuBUeyn7JVT1v+6GDC8ll6JrfzU7ML0S4C/FwmCvB+yv5k9Ml8VmHYvdjeqP132LaHyeYSmPKiVp98IZYwsX+u7nmz++k/mnEzQ3mzixP8VPe05zlX0Ivbaa+MQYj6aW4hT28fblN/HouRE+evNWhib6CGtV0rKd2s4qwkstGHxORvbHiB+Z5oezCWxyiUruCG/teCPNVVM8P9LEI71fZVlNCx/95Jf49rf3oXPosDW1sNyo4JVtPN99nHwhy86dVxCwztN3rJ+R2RTpBQ2j5RJzhWlaQ04mkwVqrH6mpDLr7G4SUhKfO0KDC5ZuqeWL3z/LLR8I0fVLLRtqAxRS4xgtNuaFDOvWhCgXejCVFY4ru1nZPM34wBxTsw62bazD2FHkwv2jSKlq1NgTHEtZmYmN8pa3v4e6jdXoq+303jVO5Ww3VqueBW0Tk5lDzI0cZl3jm+gui9x643ry01as+S5MV69h9rDAD/b8AIPoZsdVFk52FyiLJc7Gh3hL8zoy5SxZg5Prr9/G1InzyFYrVV47U9Ee5BUr+OqX/o77f/4FpsZb+fk/foXVbi0zahXjC/3YtSYa7QYaXGsxOiScBgWNWWAhOcvGL2wnZ9ChK2rQ2k1IlTKFHFgkiWxBQB6fx7Q0wrHvTOOYHsDg0oKoJV+Ex4ZH8biHsalVbG5fSqBN4KHTRmYXjnHHh69g7miGvp4Qf3P27YznL9FmHZYqdX3bB/7T+6/XEzN/DPaFj/5/e6kGvv0f9059Nd37a7GH6u/D/lKt9vh6dex/DPc/tFnHS8Zdo9FogZPAtKqquzQaTQNwH+AFTgHvVFVV1Gg0RuAuYBUQB96qqurYH7r2slCb+oMVX2M0nuZUbJSknGFncwsZPERqUrjNq+iZKzI+PsQ7N63H5DzDsaEKzx3q5bbVSxkcmiImiZxZGMJvqqEp7EcqWfDYdfRly2yta6Tj2mZ6uoeYmymxZpuFX37zXmYUG+9oasFQ3YxvqQ5SAX75xBPsaN/J2EIPe0eOUmPykVe0lB21LDZnseGh8yov/VMVUikbyw0iX+8ZZGx+mvXVQWqVGepDYc4Pz1PU6+jNxKjW2fj8T25Ca/PTe3+CI8ef5+pQhGkpRlVTLaZWBwFthecP5TnaJ/PG3RHG+xK4jBLeqhC/+ulehsR+7ILKu7feiGjxoEgj6PQeSkkrK9a0MNizB7IeBIsHm9OAzBwWi4PE0CRHup7AZDNQlqqZLVqw6vVMq4PMZEqYRR/LnS6yooGYQWBnYxWRcBUZ+SzTM/VM5PcS8V9DMhGnZ76H4cQQX/nzv6S7t8zUyDxX3byRz377DpZHfPzD3d/hzo89T2bu11yxfDcPHjvBwuwJ3rN2M8uXbWdsaBJUGZ1Gxe33YG11IK0N4HaZkGUoFvNIxQJyUYssZfCqXtRQgdy0iZEfPYeU1iKZrYjFPIOJAqJRQYOZgH2KtcuvZl50sZD/KdvfuJtssob06VEeP3aEf+77F2ZK0y/+BXgN6xpePe7wyoF/Iey/hfz35YXAw8tD/rXeGPv3Ze7hxS/puNCbLlyU+72eo5jXC/e/AFYDjv//S/AA8JCqqvdpNJp/B86qqvpvGo3mw8AyVVX/TKPRvA24UVXVt/6hay+rW6Le/+bH+dUj+xCVODl1ij/fEObJ8k6Gzx/kxrV1xAclnpreS0vTcnJ5O++/aSXaaJxz87Ocn60g/F/2zjPMsbM82PfRUe91pBlN73XbzPa+3l17jXujGAzGFJsSEnAooX0kJECooRnsgBvYGBvjju21t3p7m9nZ2em9SCNp1Lt0pO9HTC5CDG5re8333b+OdM55peua59zz6HnbeC89yTwBNHQJeaqaK3m47xmWal2I8gbOhLyUWmGRvQqLQomYV9DebiZm1lCztpK+3izP3PUMLoMJf3ia6XyaHc4agqkQ9wwcp7EErtv2Dp44sICYdvDBT2xCuXCSPDWEQgdIpefRq2UYUtfivlrk4N1xvrbvO3xhxyVs+9Rl4JXR+8gRHnihh2u2Xc4LL9xJU5Ub/WWyJgAAIABJREFUd30bNcubOPnoHKUWNdORO1i5/B184Xu7+MxFH+X+sSlqtHG2d3iZT5mZGs/waI+fxQYVGbWRd2y2EQnEiGeLtLdVkQ9JyAopntkdxWqP8sSZSWSSQFRWxEaKaMFPqaKWzkUuiok8v+75LXqFC1FQUCiCXF5Kk1qgVNOO0yrD3OzANzrFj3p2cmHDdtZVLFC5Yx2zc6Xc/P2r+MaH70LkCZo7lmB1LONXX/kDVinKaFyOJz7E4oo11Cnz2CpLSHjnqFnRgD+ZJR2ScHaosF9aTj6kQhLShD1JjC4jgraAGNEiL+aY60sz/OQhXCUyPANJiuTxJyN0BwJkkOMwGmkwVrHyAjkybSt3PDLMnOc0X7rFhqN+MwceDfDBR69gKvrSG2S/kXENLy331zIM8vVsgP1yUv9TXkrwf+Qvif7NlPorFfnL8XpEf75k7XAO5C4IQjlwN/CvwKeBSwE/4CoWi3lBEFYD/6dYLF4oCMIzLx4fEgRBDngBR/GvfFCTs7V4+4Yf8NjRcQwqL5sq8hhtlcQNZay+ooMXvv0Ey8nh71rHT49MYY4+xy1XbkV+4SIWnpomoaghNp/DuFmBdughhHg7TPrZe2QCuVbAYq5h0WIRi2ghr24kVAhQ1Qnfu/Ms8eEkqPuIRZsQZQHmBB3t6gyD/hFu3nYT4/1naXeruGNogbHoYVqc9VTkiwTSTtTaFgwdWm75l1Yy8RTf+ORxPrChkemzftwmOdPzvyFDF66OakL903THNQwOPsGyjquZmeynqVLF8sZqoj41uaKbAyd3UV9SQtrk5KL355jfa+W+3f/BJz50JZNTEqaqOkpXy0kfnsETMHDrz3/EzWuWsvGayxjvWcCVt3B2wseugZPE0wtEMyks6jRpQYUYybOuppKxWAADeioqi3RUb6Z3LM4L4zspUUuU64z0JwNEQxJOUx159GxdJtK1yc3jj04xNjXHVatu5pkTB5Hy4zRUNLCovZW82Uswp+Nff93Dg99azwPf7aFvthe1RoeqWKDNsQiFKsvGtYvxKNNo5EWIy8lbFNivcCLm5USi8yyciuFqL0fUpBGKOqaf8iIPjDF+OklWyuN0WUiGc7ww3c90XIZFmceoyLK6wsSqj7+TQ/sH2P/sM3zhX5aSdm3m6393FGPiDL+Y/SVDkcH/9QC80XEN507ur4VXI/U/5a8J/rUw9LMV/+N1481HX3Ub50rqL8WrFf3bRe6vtEP1B8BngT/O47YB4WKx+MfNKWcA94vHbmAa4MUHJPLi9YE/bVAQhI8AHwHQixaODR2hRDfK5mo3rtVXU3FJNfl4Gs8+BRMsJp7z0ez1MuYJ8J7yFYRjWhS7ipzcP8VQ9AS7RrrZcGILN16+HpldxdOPzvGz4Cy/XHoZDZfWcd0XHmWNU8fNl0Upsxi47VvHmAyPs7WkiXhCxmn5IMtcHVxjDxMKubh4xUqmx8awWlxkRGhTnGZ9zQd5eupRcrpSItkRqtRRDj0/jXJ8G+//8CK26UcIHjax+l0uctU2+n5yNft6+9GHYjQ5avHM7+SDO77NsktnobqTk7fP8+M/7CWRnGWZM0mlQcWp4GEcSTu7f7CSnCVFR/0HEaQu+g7fS+z5AVR31tG6eRWN7gL/ftGtKO0C6lIDVUUld37/IMP+Kaoscmq1GRSaDO+94eMsePxkMkqyyTxrMiITCwvUu4r4BJHx+eN88t0bcGxpJ6dV8Z6Qnp2/6OGOg08hFU8SObiOp3oX+PoXVlEUjJwZDrFttomk1k5ea0epTzLtb+aRR26nED1GwHc5nsAcWpWD6WiABpUBizJHVWszh44NU72kkmACMiE/yrkp5BU70JhAnlGidVuQSQUC3ToG/nCAVnc5ho5mpufGKPgjHBiaJ51J4UuHsWk02JQi9To7lZvWE4sucOyon4JMQVBcicEK05FnqRMCzCf/4nru5zyu/zy21QrT/zj3Rov9tQr9L7XxWkT/5zJ/qfOvRvBvpNj/2P65KtmcK87Fmu4vK3dBEC4BfMVi8YQgCJte9ye+SLFYvB24HaBKW1Gc8XvZ0X4Zpe0N3PJYD4Xb7+dT29bQtbGTnuDTlOgC1JS8l3J1kIn0PipCV3Lnfb9kLJfk01su5+PvbcDg6mLfCT8nD42zc26cj9dn2DmdZeS3AyyV9XDZqpVMTIXpeXiGNvspjs0kUVU10rD0GupHRqjdtopoPEGV0ortkhJKjvTTf88ZfF4567ouZmqij+1OFw0bLkYdOcyX/3CMlc5GFFKAvmMp9LXbeLB7nIO/y3HJFh13n/wDueRBbt6wkb5jeXSuNSQKB/jeD9ykQ3s4PL2HTnc7X/2HD5CpKUWUJVg7I9H30O+590QfBdUCVzob+eldg6x1xtl06+W877N7+PV9X6JFbOU/vvNhDv1qCnOTkx99/9sUJZFI1ode0UJn42JK1pRTbCmhgBq5lEch5pDHC1SKGtQ1BXw7Paxe0goddSQEHZnJLAWXiK7awr83tmK+8J+YfyjCw7t+zW33iKxas5KzTz6HPRpF2bCEJ7r3UdNn5GSohJRUYGvFxRhKFNQaNPT5hynVKpnMFWhJ+tEMKqFQwHtoFLlWhsVdQShRirS3B6vNwejpOGaLSECWIr8QoM5VTv/wFLqhGpTyHChKEKRepnNR7KIZIxHiRSMykxZp6jC799pZX54hre8g55/k9AEHH1zUwZINTu7+4t43La7/PLZN2rIivD2k/pfafaWCfzmp/6VrX0smf645HwX/enklmfta4DJBEC4G1IAR+A/ALAiC/MUspxyYffH6WaACmHnx56uJ/+qA+os4HFq++P7vYutaoHdBxdW7esg5a0gk/Xz2W7/jxi4fKt2tmJQLfGbLCs4EbYTic5Q46mi0SjjNJcTTMoYfGkDnSPCeS2I0l66h0WXFF52n60uN8K0uDp88y/HpQbJ5I0qhmi7zPGOpNGZfFIOxlcBEFrk8wdhImlgiTcgfwmouR1GvwHlhNY3mVfh7DpI/YiAjNPIud4An/SEWKwuU6OV89bf38u6Oek5NjfLVHx7ll3e/D6X+H5F8KmozPXz96E6qhWVMj/2AHR0buXHTRXiSKoy1DoZPeSlrF9E49LRt3k5F3w9w6xvplTtZiB5mQlPHoV+eJR/fxZ6n7+S7f7+fH/5wN5/55jXc9eXfE0xNE0jLqdLrUac9zE6omJ0L0uJpRqVWUBQklBormZSfWKrA3G4fylwa17JmXPUVJOJpZAaQiZBP9+A1bOehr/yWhpoKrr1mI+7lLfz43x+lwbKFJ733cZXRy4U7oqy77GqmvvAC5rRAUR0jnxRw1C4j4Bkjl0vhMGo4OBOm3qYgmIQLlrWRTocZn59k0doKZDVWYsNK8qkBNLVVJOYVzM/nmJ8exGi341voIVKAhfwgomBke7mW054gcnmci5u3UVEp4glNYCkvZfG7DUTnmph9YZCcFOGIT4d5LIhWa3qpsHvD4xqgmEq/bcX+Snk1Uv9r97+U5N/orP185FztxCR7uQuKxeIXisViebFYrAbeBewqFovXA7uBa1687P3Aoy8eP/bia148v+vl6pKJRJ6w7CmmsgpmHnuMJUuz1K1o4Nf7R9lmG2N8ZBPNpWfIy+IU1GbcSzTYSlbSqk6xpqSZ2nUlJHwajIvrmJ62cfj3AfLRGUbmUqz8uw3c8/MU9JowpCJUFib56Jo2Vm5sZ+sVF3FReyXpRJZ8JkRszs+CN01NpRplOkXKkySnzJOPKwn3hhg+EmR8WEU46GF8LIo/paZZlaKo1tF/eBCHeQ2HA+1ce+EtzOeHERrdHPp+iMe/9yQZKY4UjdE7O8i3vvUfdLxzObbta3FYrPQdHMCgt+KZyDN/ehS5Vs6Htl+KVecmML+PCkcjQ8kAh8eLNNa2cfCuGZpNAtX6CH3fP4Bl4STNWg1/39hBs9aATJTISHnsSgORgQi+swv4zgY5vW+eeX8UhWOa9q0lZORqZCE5c0f8zJ2MEB+Sc+iOYTa86yqWXSTRWCKw6YYyFgql/PLfeinVNPCjA9+h3LoMbWoxM9Gb+diNjyCLpZjBxENjHrQV8yz+JyspuY6tZfUYtWFkYhZvdgqrushEMMHhoXkSC0kUcjtC2sDQwXHKTVZUMg2hhTSiIoPBqGLWv8BcMs6kfwylrIBNp8RlKmNVVYYlri5spgL+vBxjRRdnBnaRm69k/MgAGo2f6sZqLqyvpNRdj/IlVh94M+L6zeDtLva/xv9rYv91c/k53Uf19Uxi+hzwG0EQvg6cAn7x4vu/AO4VBGEECPJfD85fJZ1Xo8mpkR06xfZtixkZM/GTx86wxQQrWq6hdlkpKYWSKa8SrbeXL//KzkdK7qdzzcXMpURm957hYMCBffA4i9ztBCsrWbaqlUyVlt5vP41x5gx+SeTo/DQo1rLFXkZYb2fRRhueE340qRQyX5JcuIi52UXeJEeZFzDqRfQykXA4iSImorUnQStgqFERGUhRrhf5zXgag2KaLlsrNuUfaNLWsL+3lpsX72D4C0dYiAfxBePMp1UsKXNSKdciuPI4bU5CowlKF1eRO/sc/n4RvVWN3m6lIIlEEbBbtdzo2MD+mXEWUkbS+bO02tp48OkH+OLNl3Dg9yGmioOMaFQc8waZjfdzWdMS2i7vwLrKTD6Uw7/Lx5EjQdaului6eSnIlBSjUTBJXHh5NSf+/SD2tAO9GkrboWllDcHRNJm5DGLQyc8/M8ZZ/x60uTgXt23hwsom9nn30eBaTpnSwo3LdAhOkYPd49zyziUUfDoy4xOsqbNycCRNpcGITBYnEBUoK1NzbPIsKyscLKpoIBMWOPjAUxSlKPGyRuK+Ar74LL3zSsyKIDOREDqFgpwgIU8XcNvs9KUydJidINZR1EBVfQFVs5t14WVksj7qGhb4P79ZoH/8k/zii/+IaV09wp3KtySu32j+1sT+p7X4N1vsr6Yk8+vm8nPaqXquN8b+I69K7sVicQ+w58XjMeB//XWLxWIauPbVtGvS5VDWmPjuL/Xw9DN8vFPP3y+po8a9iILOgG9mAf+CjK71ahSrWlg37KF7DA6Oj7G8XCBjmeP5Y3B9ZxPlq424atZTLJEYfyLH8HSC358NIhhW0mQqUCUa8JwYpnSxgKfHTDwcxdnkQLIV0KfTRJISgk2BplqDUpCTT1mwVQlYVroRM3nEERvJiTgljfUUx8zUB4McCU4RKmb5u59/ivHdKR758X0s7lhM/XslBn6mIRIfIp1MkEuN0trYiffAPNY1bvR6B3LDAo5YJwt9E8gNOnR6LbH4NLqqSpL+WYJpNyOBs6jkYQKZLJOBXr774C3kfdDS5+H2nQGmhGlKZUXet3QVxvIqaKoiI5eTVSagzsTKyhnsVy4im09RREJuMSHlJOSCRMv7NiNKCeKzIkVlgcGDEgOPnSaeP8b+mShJIYxb48RqLqPGUqBrw8XoH78fZ0MXYz17aei6iLsefpDW0hqCvnKevP0EP+3t4yvv/RgXde3lmd4plHMapmJ+JuN+LDqJofkUg/ODrAwkODGTRK0voIqkmZjpISODydghtCU1iIo8MlmMEpWOOnsZNrWdeHQYQ309ubSAF5Ej+wSqxg5gNTnZv3OGCpOV9dohSt1rSc/6+eEnQ4yN/vXqyRsV1/+fv13OheDfKKn/kfNi+QEpr2Rsj4wa2TFQR3jyVBinWWTD319PIpah0H0GpZSkoFnMkSfirJRnWbLZQsa5FuNGOwce62fHxG62fb4CoQixEYnd/zzArtFdhCVoc9XTpPVSrawkYVcjiqDxhumf3IXe5UK7qhTjGhXyUQ+pfTLQylFbVTRdvxy/b5bYwSxZrQy1TkRepkTlUzLf50GQyTmVyOEyWPGEIyjzdmTeITavW8uKT3eBUo1n4HuozDJOTXq5eu0HqG1U4OufwyckqOksJR6Vo1TKCaeU1C2pIxZP46xq4sRvBrAIOQI+DyYVGBVpKlUqguksU48HcVSZONw3wywDNAOfuvEmvLE8amcJwlySQLRIeqrI1P4hNn6lhUKiAHIVMkWGolCAvJJkMoLGoSQ1laP7nm5kxBDJU0gVAS03tFbx/Jifo8F+blmxhQc9ArtfuI17v/Q1fviz+/nM9av4zq9OU64uYdPicow2B/6RBT7YKOfeh25ho/sanKVV9OQGuLjFyUJMoDc4TyY5hVah5dEzM8QL40gJM5HxCLFigkZbOWZlng57CwFhL3p5CyHg2clhLu30sfZDl3Py8QjtlXnGBsbQ6PYi8mEeOWjgaP8vuecrV7H4gqs4tGs/R3rzZDO96GQvW3182/G3lrW/lbzWjtSXkvMrEf4bLfU/cl7IPRKLcs+hhzGkXTjK1+KsUlNTCPOjDz+CXeugzj1L1/aVzJ0IU+U8iGRfyRP3qTgQP0j7r6Z43jvDrZddB9pKdt96HL06xp6JgxRzei53FMnIspRWLEHpNtKxxElw1sfEmQDuhkqKo150ohvBowJtHZIwj6nFhGBXkSsUKKZclCzKEx3Kk5eDdkJgrNdLzOdnIKXEJUsQTWdoatUyvV+JPxWkc3M9RaWcuz60k+HgPNnEEj7bJSCp+hHFJbiaGpDHRfzPe9HbdGCSY5BLZPoTFMQAfbsErLEwgXiWkChh1NeSS4epFBVsqtGzcHYaY6ucRErBBbYQay/9HJJLiaNe4vhve2ndoMVsrESwy3A26ghNyzA3FigIErlsDpUokMsmUWlVFJJ6hg4dwWYT0WvsyFVqjOogSCZ+2d2DDvhC52L0eRfRlJeuRZVoZL18/v3X8c3fPEiDK4BOK/Hxx8M45TtJFqKssNVz65abkCHwm0MjTIZzeCOnuPWdF+DyNhKaG2JsIcds2kOD0c7pYBSzTo+COEtLVGyuuYzjoz46qrfwwuAo7Y216NQRGi+4mG9+sxev73d86WMf5OSwRF60MHL8cT533UVcf937ETpbKGjkmCeXEDz5MFLGgFz2FpbG9RqKS5b8r7eFA92vucm/dbG/nWvtb5a4XwnnhdzT+RSpnIlkYYCUN8Tg3BTazZ9gLvkoo4lRMjk5x2/rx+Y0seXyVRzumUWpzKKJHmXVVc0k9pzkkeFJPJ/PYlen2D94ltl0Gpeopap8KbpyB5GxMDJHGdnSEsRMEWeFRCIhULe+neeenmDZlk48wzOgGkVWZiXkSWIwWrGU5/EPZog87CWvK6A2ZqloMRCOy+mZ2k1MCqBBi5Sdx7VuBt+Ak6Ayw8S90zwx/gw6mYQ9eRJz7bvRK3Vk1Go0thQ6mZ3Rk2HyC0oq1yRYepWNnufnIJBCj4Kp4AJSLouukCaa8JHIFYhp5cQjS1BlJTq1Tja1ubC+8wF+82+PcM0HljPbPcu6a9ah6BJBFFBGVPi7RZRxBeEz8yirzQj5DBjVZNNRon0KbLUSTnsdsvACcxMJ5KoU+YISIS/DJY+i02cY9jWwYZmTDvMZam68lMfv8rFEcYKrW8qxlbnJRWu4vzGGoHYyFJCI5Q7TF82R1zYzmNjJZe1VjE8ZiebbyWWnqW00Uj6jpEHMMR5xckN9jOXLHRwZWsrO4xM4FEM4bKXsGRjk0m0b2XXiEO/+0Homk27U2TvZ1L6D79yzhzWOapxKiaaqOP/50D202i/gI201xIMyxvePYDKaSApl5P572Pr5Q3Ht/xT+K5X9my32cz2h6ZXguqL/TRP839rwxz/lvJC7XBAoV2qI5dPUa2UMLyTYdSjAfDrAMkMdA2ENovoYk+NaXHcbufADS3h2VI3TrGDfWIRT2S2syJ/gyms/wWM/34vK5MCZDCMl58kpq0mrg5hrrfTvncVhdyDP5dFVmjFICeaTStrMJjKBBHq3GvpdJPu1RLJRjMgIzycI3HuGmCRSXleGymxDY1QxJ5vFE89Rpq+kS29k7drL8Z1JccYXwTpVwuTzu9nqMDMWT1NnXUz14na8/lkMOi0GUcmp7iQbrqqkYK8nFg2gchhoc4dQTI7iP+rAsMiIsVxNbiJFdE8387E4SSFPthhBp9CTXBjluM/HnTddQrO4mOn9JagdILoVZDNpcgWJ9FySmlVmIr4o+eQ8tho1ok6DmJOjKKowdAgIsixj3QvYdBkKJjlT42E8oQjjcR9+yUJzZoamBpET46e54l+34fMoaS8LU/2hFmSDRpLjGew1eYKhKWrUcOf9T1Kh0bOmZop81M/GMpHWmk6y6jgLnmGSeTOT/UY00SmKhnJsTXFMlUa+9huRzRuX4S/Mks1E0RWhYBSpWK3k/e+/lP3fi/F03zeosJvJaVtxqB6kvspJR/v1HD6a4L3Ln2fnwADfvXWKzUsXsf5KPbffrmPWN4tRq3urQ/xlKa5d8rKCf6szdvjbKsf8rXNeyF0pKugyGWhdtJnJXClqpYbx6C6WW0zMpxdwKwycCduxqkdIYScypaeibRn3dR/j4yVNVGRL2H7TNo7d+ywHxscJIFFa1sByUcBgT2PSVhKVJ7hgXYY9D7yAe4OZjo8uJ5NWMPuLXho6ZUx1HycogU5jRpeUE5wOMvWUD7m9CHYVlXYjwV4vYZkSk7samS6HohhHnlDRvnwD44lB0kNyllXEKd3UhWvEzsyQwIhvgq0dah79z/vRaBtwVEKFXcBRliZmbkFlSiKTZIhpAblFiULsJDcyhrGihGQgTSaZIZQtMpkKolUpWG13E603kNSU09I6w+WeSzkT76Ov7zSVFTU0GkSUBQWJ+Ty6mIqwN0o8myafc+HK6lAbZCCXsXA2itvmZM93BqmtV6AymjApckTG04gqL9tq8swEq5AyZvaMzbFlqQJkBiyFLK5r2zj9+yS/f+D7fGjDZcynJJREOZnO8S8Xvot+z50k03VMTflYXL+S/micg32/Qr7ma+RiU7SWi7RVGMk1ruQrX7sRW/k7mIuMYZRHqK800GqWsXL9EigMQKmG8B/mOTvTR1GSE/CpWWR4lu2rK2i9eBtf/fp+bM4q7t19ltWuWjxRP6d6u3mu96OEZxeocfZC4vzL3N8OvBVZ+x95M7P3v1XOC7krZCo6ljaT02ZxpJTEZ8vpqKhA0uYQIyZGfOM41INECiq86TgnTg1RXWvikmtvJJLVs2KHiDAbQMo5MGuimBRpmoszqLRLmYnnUGkSxLMxRgJO1MI8muoOiGVQiiksZjO5lRWYW8yUPJ8hpVISVgRR6lUoWrTEEgkM9RZEjZ6GajuTxyY4duBx9i2MoFQaqLPUU3Qt4NatI6CcpPpDq3junwZ4sEfJ+7Yt58oVVci1TmJtOVqbNYSmJRZ8k5S1rEMoihAroDabKU5HyaVznH5wiIXxOQrHRKRYgpQsx1BoimA+RUgq0LZGxX3PHWRIpabRWkHGEKHRso37p3ezSFbGwr92I9eAUVRQZhXR2TVkUxKl1mm8B4LEYi48JxbQOUOktTPUuVV4JoLIlWoURQU5Ic6IP85k0kyZQkFv5Gm2Va5m0cU3EDpjY/yZE4SlFOFAnEzayK+OnEUnujEa9Zwa34VbrsdmvoqbPlTOd+/Yz8Pjx9nYsZXm6rXc8+wnKVdt4aRumhvcN5Hdn6ek7AqSwT5u2biJ2x4LMJ/YR7x2AzvvGKHKruajOQMTp49jKMqxaMqwqgscnwpgNG2mKWLFbslgTx7h5mWbyEmtBCdHaG+tYE9Pmg0VUXLt38Df+9ZvkP1K+GvZ+/mQtb/RnA8zVV+KssOG/z6eWxV7C7/Jq+P8kLtKQNNkR8yJjO8bpaOyQEpfzYDHT335CBlxEbunNZilEwTTYda7XGi2dvHcJ/6OirJOykyXUne5gp4H+5mRLRCLiOgUS6kOy1ny8WpKTAp0Q0pSvRFU9UZKRjwc6pWjk8BgyaHMJEjK8qhqBAbOJGnXa5FjJO7zolYYkIQERqOGaCpL0SFjNO7DLAcDEm1GFbpiKfc98TA3fPMaUGWQ+Y9QIe/m7kfy1JkWsbUjTc076glq9Mgb5JS2m4imMhgUMrAUyYYyyI0Kov1hWi6sItQXIXU2wHQqRi6TwKZMky1EMIl6SipXYFHO8MCuPj77+VZMJ0u5/czvcOk1VBk9dNRlsZZV450WMBR0eE+OoygRka1vQREQqLInkAI5qppMBCbS5NIC6YQKWVrGpG8CUWugwykyHQywIEszF1Wzdl0ngWMpDvc8ytCCiLKQQkmaqXgf9qIHszDNmE+GSISIzIYlfjd3/7yGSrmC7miKZ0/9gY9d8j5ayx14R7wkddfz9fu+zrcWb+Wi1jKeGG7neN8JLqwoYrWmKaGD056DdI9lue3eaTa01xKLn2be72FJ8xLq9TJCk91Ehm2896oVjGYq+Ocf38s2x5Mo8j2Mju4gn+xlx02f4b77f4tSSLzVIf62463M2v/IG529v1S9/U9F/lKcS9F/ZqTvv4+/W9/2utp6Kc4Puas1JNMKyCdprtRSsG0iMRDGbohTyK6B7BA2IQUyKzEpS2x6ntOP+9m+/ApOj8docFtAGURIZ6iQ+6hsXU6ZKUYg4EKlBa83gkojI681YNFVMCKNIU6rqL6qHGl2gf7fe1FaC+QjED87SbrdgaCS8E1GcTtBrjezEEgxPRIm4g+SkSKkRTvry6po3taOLx7GJxWQlxnIxPykZRqmolbyoha1Uk6hYKB/ZwC9RYXCIpFPKghMn6LM3kbNNXkEmZFTu2ZprbWSjmYpa1hMQD1N4ugE+oKFM6E5NGo91SobX739+4R8Olz2KIfu0mIzh1iq0eHLWlBZq8nLnHz+jp0sMltYZDdRiMuwW0sI9CxgM+iJKoqoMhAYlYiFIen3cGp2jDPhKRqMTcwFznKB7WIuWybgSYf52IUriC4UefzIKU4G+nHrZXjTMeyigmX2JoLBAJsayvHmfSSzLiZTs+B+D4cHnmdRyWLWlGY4Pn+SQOgYK27oQi+tJBpKsn0OWozmAAAYTElEQVRmNeVr1nO8v4SZE//Aco0LeXYZHv/HmEgNkcmrscizhBJ9PHJ4FKO1mbAwy+GZbq7tWEl5bi2BkIRn1E8wl8Kcn+eoJ8w/LL8OoehgTmXkQ9/4Mbcu+xhqxflfc/9/nTc7a/9zsb+c1F+KssOGVyX4P5X5y507F7I/L+ReoIgtZyNQjCGo82Rj05SVJtBVlTLYHaA/EoFiP26VmTqdBSxujPI4zwx7+ex7tnFoPMfSliCRdIKxhJXpZA8mWR2rbDlOPeagdrGd/qcnMRRnWfb+rbTI6jl8dhDRJCJpbJQb5ljoFpkZ7ycfUTD0dB92lxyFWoe8Rsf0832clBx0til54oU5Wl3lWJUyLvnyFgLH1Hzuzp/xzqYq5IKK3JyenFJPNDfNMkOW5bbtWFRarC4lnr5eChojzjU2qitryMfm6f+VEVl+luBwH90OJxX1SuaSpRSjCxyfSTIUHaPRXMsym5Z3fHQl44/v5o7nTvOJLR/i4RN7ee7wBO+sX8yx4AzzwTipgo4qjZdQUkE0KRGJJshNJnDIGknmgliMRdKpLKGhJBpNnt6ISCKvolpjoj82ylaLA411mkgaTs0osCl0PHi8H0n00WIsYpKZcBkUNOtdZLVzVLddjKPVgGHQgmc6TXNtJ7cffgCVlODw3BQ3VndxzXs/T66pguknYlR/IMfTP+8nX9zGyQdOs0Y4xbvL38GR4RmUpoNMztiptLmYDM9glFtQSHJkygLE09zQsBhfLMlPjvWx1LSP3EwnDXobNXojHeYZLqzawLfOxKmQ5FzQ7uKK9ioE+Rh53vJVAt5WnA9Z+x8519n7uZD6n9//coL/a1L/a/e8XsGfF7M75EqBpNVDeDaJokSFrcGEekUl2sXllKxoYGPNElRCC7OJFKFMiEWrXLS16fjidSvpOfo43/rd/ZiqK7BqHFh1MjSijgqtkr6CDON8L9nnT1OiSFC2eSlCvQxJlUfl1JImjarUjORWY2yV6PZOsmvuDFa5iCIv0t8/AgoLIWOeyEgPRmOOJkMpVSVNLGpr5JuffhDv8Gm2Na2ntXUT99z0Qw4/8ChLV1ew2N3E0agJTXU9WbeKaF6Bdmktzm3VmNa4kBqN5I1WpOg4ROepqa3GIIoExvLMD08wNTGGrDCHVEgzGfGw/MqlTA5FoGwll6+9mOGZCZY67Cwtb6PEXUqDoYxEPkI47cWqq0GrzPC7wTMcWBimtr4cSzHLwpyfRw+d5bnTQ5xNBdg904s/1M9o7DROm5tFWgf1pa10ba/nhVCCFqfAb089iEzei1kl0FXZSUKUMMt1KFChpxGlTc3cTAqzyYlSLjI3NYpbLkOJAbeunKPRMLlomF99/8c07DCSfKZIJjyETT/KP6yrJZCr5He9ewmlpklJWuRihHAsjEunw6DMUhDllKrrSMqCHJ6aYCDso1QZw27ailufZSY0xkjgCJ++6qtEdU0YciPMSwd4bvBZKtqW0PZ3KzHa9W91iL9teDmxvxEZ9su1ea6GK55rsb8SXovY//Te13P/eSH3YrrIwcf2oZYCmLvKUDbakDu0yG1Q4oCneg4yn5/DorSTyOsIeQokzyYY7I1R617EtqZK4r2zGHQq5Eo9WUnFYCKIKRXhheFxnuwdA7sKvR1EQYHWXYJNNGO3laFWFiDlIF1Y4Gg0xGxB4tB4N2fOjJLLxEnJ5Sx+1zIWhHmcrY28719XU9dg4okeL9lCgcCcn0jmOCcmRrCarUxPiORzWYSQnq11rZw5PURsJkAs4ENUSQgFPbK8hCJaYLJ3nKQ3DVKBkbFp1BYlQlHCXZVgJJLjVGQal9rBTR97D2UrNZhzOuSFIo7qSkYnovz4yB52es7ij0pUOctYU1HG1tWljIR8zKRjfOrq9yDI9Bwb7uf44BBPThzBlw4y5O2lrkXLQDAAmSIulQVvcI4LOtqQDCosaKmRFzg20sdoMMp4Ik6jwYDdqGBJSQXNlTVojHr0Oh1iIQthiag3QKnDhEESWFeyGD1qUtIMNm0Kvz/JKf8Eh/f+DvslVqqVnejmdYz2jpAs5MkIMuIyOcFIkRU17awsE1lauowGWztGlZ517S3sqOpik8vNBWX1XLJiO56oD3k+iyc6x2holLv37CSjhBvXXobbZiCd1qKrqUNhVBD1Jd/qEH9F/LWhkG90Nl3y44NvScb+Sv9ZvF7Bv93Efi7aOS/KMjkpR1fDMmxbVMRiMgxWOdlEktyYyK4f9VKtE5jNShhlEpvrWlEKTrr37qV98WKe7TlLMKhh7HkDmZSSckoZFUZBIXAiPkyTuZmF2AS7ekRqvNPU9LuQm03YSoN87aaneNeHl6OXCdz2wz20iWoO46NIFUd8k8iMHVy/WkF0LstHLtqKf0iO1jPGxMAQygi4LWV0LOvE92gQv6RG59Dxvi+vpu/wPqqa7Tx+6ihbqtr48e5H0GXNbA93sWaLlvBpGTFvCF1CiWDWMOLxoZfLeaH7ELG8Et/ZAiUGORbJwtWXLKOupYYzv9hFw8pWHvjFfobjWWSSjBvWvIfnhgcIl48R9JcQGg9imjNwwaJ1LK6ykyJBo66Wbt8kcklGUa1HIVdRVlFJ2L+AUW6gxlyCoMqj19Yyk5Rx6Sc7ePqnQ/R7/aRTabbXlOMQ3Tg0KhLBJC6rDrO7gukpD9OeMMhSCEkHWpWcSDyEqFFh0Gi4wXEBj08MMp+MIEph3r1jI+svuJbhu/exctsG9j3xJKdnJ7GV1LJIbqazsYTnj59EFolRUVpHTsoRK2ZxlFaSSHkRijIUGhNavYkys5YdTRVMz81Q63RydG6MhYleyq0mPEkT8lQ1Mt0AhWCI5PMiykLurQ7x85bXIvTGm4+ek/Hur/ZXwJ8K+vXsm/pGi/1cSf3P23y1ZZrzQu5yjZr5oJ/EUzb8MR+ZYgRFUSQVn2N8TsGM5COTS5Mw6mlqbWHqWA+XuWyMpfK01Jdzes9uUqUWwo4os2OgEh2IxTSpghuTw0SVWsVEPEpIrSF2YI4Ltlv45SNPMZMs8MP/mMRuqEWv0lFX0kYqOoytuga32826G7sozqQ4eocfu1WkJDTNJ+/+NQ2aNkRFiitXt5KVwnSsaeIzj97HO+Ur2PWtDP50kAabla+uv4J4fJZur54dtfWEYtP079ShVEbpnQnQXFnOw929LDc76c6cod5QyVDEQ4moQy2pWO1uw+WoJTQ7iEah4PNf+Tbvu/hqNhr0LExmKEizKJlictjCpnoXVreVvtEipwemiAXGeWKsh0XOFpaoOlHkRXqiA2ypWQoECEoSl9Y00NpRxeNHx/jPvnv4yiU38chth/EH4gz7g0iKPLmAlSsaChRFNbF8DrvMiHIxNK5rodNpJjtboO+pM5TZNMjDMpQLOlLFFGMhH2IxT0qScJY08fTZBxl84reUVq/i2z/9PkaZSE4m4jZmMdktWBUWrlu+ktHhCVIZFQXZAjZDFdliHK1RIqtRY19URdKTZ3JyGJfLRZ2xBbUvjEorZ+fgMEf6T/G5z3yJ8J2/45BXJDmn5vTuF8gWCm91iL8sr2SGasmPD56zIZGvN0t/vYJ/veWd15rJvx3F/lo5L+Su0MiJhLzYjVU4bVFiSTP5dJbBeT3dmePoRB0mLawsMRPMCNhkDUQzo+RK1FRui9Dz8BDl+5w83d2LS93ExbU25n15BFmK3rMD2OxWGgylKPRR+udHUO5PMuDP4lKbUMrUdNormYuHaW224Eg0o3AasIgmUmejFMwi69sEgnEJlz7JPV/7LP4zI0Q9aRKzafRbq8j2qFhd3kqFrZSMlOXhs0dRaXX89KrrCU84aLVW8eTUGeL5CBfX1tE9dYol9e0cPLGXXK5ARFSiFErxRiJ8cMlFxPILTKOkfZWB79x/GzqhlsUVZVRYVvGtxx/mk+s7icYCTEWMLDNWU0jkGR04yqmCmjJ7HQXRwKnZbsJpH6s7r6PBWIY8GWa52ESYWZZ1thGYCyBl3KQIE8+PcvPyqzFWaViyoQqNzsqOyHpkqRRGXRj/hAldnQytzEFoKkRIilLdaoVUAVEXoeGGcrzPBnBcZMdpNVDMGPDvH+Gxu55nQVJRsIEqpyE6v4yd+5/Dm5xiTumkS6fFIhTJBHMk8ZFKpViyvJmcxseZAT3ulQaMlSYCY1HSHjlxv8SCJ87KW5aSzGopzuRJHY8QiJWyocmERZKQCXLy0hx3fvcfeOEXA0RlKgpIb3WInzNej+DPddnltQr+rRrP/nYX+6vN3s8LuReLAu6yanL40agdaHRKwokIWW2E6pJqfOEkpqJIb6JAeWqOY4GjbNt8Aa0rKzg7YKRa14Y8qcWmdtPubiOU9RBEwSpXA+pqNc9OHIOigZinQJ22Aymq5cb67aSVCcI5NavftYKSpRaKE5McuX2BToORvD6KrtaJPpbg6CEfAVmcf+47ylKrjkBCZCGVA0WYRYOLWdGl4cLqxbR1VnLrbT9gU+dK3OZW/ONeKo1q1rQ0UxxWM+WfpNZuomdGRff4KBvqViHOzLKqoZ5fHTvI1eu6uOPYPhosej76ruvwDg5zU9clTE4niIXm+OTHFqH4bYo/DI+ztbGS4HiY8XySWp2DRFGHUZLwzE7x4SubeOpQNc0ZC9JsmrGZfh4afJBqYzVucyX/9sijfPniVRw43E9GlLNz5gQ/XL2Nluut/OzdD9M77yGcyXNp42I2XtiJ6MigtVjwjE+QNgvYLRZSnjQyRZ5IWMJWYiGuTVOi0qLUagkkMzi6nJQ83kRFqsCCT+DalTv4/eH9NFdUU+aqwjc5S41Fh0FuQ2/KolTpyckFIlUxZBXVrL3cTHLORiQ+gui2YVujw1LQIfSH8Q+EUajySN4iUgamgl7iyThWdzU//NFDOEuyZGYk9kwcQxRk5KXzomvpL/JqFxH7o6T/muTfrPr5KxX8+TpB6fVyPk9qOi/kHvFEcJTpKBjNzE5FUBXixOISyVSI0eAMJYoUDbZmaha189AJGTkreI7vJrarlH+++zJ++Z5/4cmbH2Lxuhpy4RxnF+KMpCfxRO187wNLqB5qJq+JYYzAwqwHQ3UZle4iOx85RTArMf7CCL5RgchkkWXVDvb0P0GdoYU9z/dQEFLsmz1Bi7YaA3ZKbcvwhk4yGfZSqSvlQPY0O38np9IW4vjwAFe1XUalSU0GBe42F57dR6jfsRmPv0CjWcPytXZqNl9L974JGoxa2tyVPHDotywzV+LS1fKBTgUxk5x0appf7NvHDeub6FzdyuHDZ/n29x6lq0yLU2zgzNkRapR2iiYbSiFFvU5Lx3XVmFUVfOSLP0CUJ1DkJKLVXvaORzAq9dRZLaQNShR5GQ/tDfLkzLNc03Ytm5YtRafOc+xfxsgXBVrKmxELWUo1GoaPRSjrKjDnLdJw1RLypmlkikrykSyCIoih2ohcn0CYn0BhayOVTWO2SmTHCrS7GpjwzXGgr5f1y1fQabKxd+QMA5Ex3t9+OeU2BYKkJJ2OMef30vHpFkyL6qAoR0rk0FSAIKsgGUqTni6isiooW9PM2buOos+5KeTDhP1xTDITcTmU2rJorY1U6KsYOOJFJRaYSxYoFjNvdYi/IZwvQxbfDuJ+u2ftr4XzQu4alcD0aBop24uupIRgRM4zx48xH5lEpyhQriqnfkkJj58YoMUSZ++EnYH4AD+59SIu2341//auf6djYwdSxodTrKW2qEV+rJLrNrXyk5/sZlG7Hpu7hoPHj/OYd4KtM6u4oNzEkqQKWTaK3zfGo0e83LD2KhZC/Sy1NBKIJ+iNePDHg7Rr63FYDbS72lj9niY6042kDsxy1/OPIEi1rG/Q8fvRg4zLFExlptkmX8qvRx7lJytvxrVxHflqFUu2WTmzc4EHnxhgMjLGYMjDlppqRsMpatzLufK6FUyfnOf46REsej37enyMez0cPK3ikWPzaDISCrWSqaiCsflTGCxmwtl54qk51pe5yeR9PHDbCP50nM9ftJ5H+nz8YfRRcpMr+d4n/h5DQ47Pfeo+Wlub+fClFzA7nGB104dZ1NTOg/uPU9Cbkaam8Cd9FLJz1NsdiPJShJSPTK+WeDaEcFMZctGJlImRGM5hLpOTzkgkZdDfncC1eA5VTSmZyQx7f36G0Ug3WxqWcmhinMDIPH2hIRbSGbY5G5ElQHKpMJbpUWYr0a8/jb6pgkKuiKyQRpAXEOUF0skiKp0WzSKByRNenKUGTDVuBK+C7IISlVwgXlQwFpzm+RH4wncuJdo9Rd/eWaotLTRYkzzgUbzVIf4XeT1L//5/zg/eLLG/LTtURbWG0roy+g5Oo1ekGZ2ewUgBhcGB3GhgY/kWTPI0E6EjDPs9hLIzPHHvP9H3ZA+3rP8qZaUFnth7kEAsjYvTHEj6sBWNFBe2sXXHdo7s/jVzPR4uv3gtNalWYs+NYZI5STSX4izqWfaxcurvHUB06Fl9/QVkJ7185eY7uKSpE71gJBpbICUpWHGhDUNZhviExFQswMbyTiT0HJztplG/lFhmighFeqbm+NSln2D45BxLrmqj79Qo3/jxf7LK1cCi6tVsXFOJyaSmp3+O/WNPs9JVz30/fYSzwRkqS0tIYkKpreL9G1w8d+wMCuUsPopoiiJ5MUVCgEn/BOVaB23mcp4ZOcY/veOd2D1azs6P8M0nH+Kj2z7AxbVf5/OHb+Mff/gFvr1lB+/ZtIHmJe0MHx/g1ORJwrk8TVVaPvmlyxnqOczwRACNWolFXU8yFqSiVmRiAmYDc1jLjEj+LGKJhsE/jFO/2U1RVUCaTyF6siyq0NHz61FyijMkZAlyRQeRZIp0ysoVl5rw9oSYD8kw51XUlNShK1cgVhiR5HKS+SyVyzeQz2SQKxTkyCMIAvOzMYwmG8X/294ZvMZVRWH899mYVIw2aa1hMMUmIEJXNlRo0IUogpTiqouKYDdudKO4kIT+BboQFcQqirhQrFbREpRSa9fRSmvFxrQjCo201qaYpg0ixuPinaRDaWIdMu/eeZwfPHLvuZfcby7fnHn3vvdm/oLLs7P0rl/HpbkZOm/tQDf8Sf+9t1M/Osf2gZvoqf/D/IVp5ge74VgfByc+Z9oucmlunu6u1aktHlSQnLdkAJTBb/wiaRaYTK1jCW4DzqcWsQw568tF251mtj7FwOHtpslZG+Sjb0lvZ3HmDkya2ZbUIq6FpCO5aoO89eWsrUTC202QszbIXx9k8oRqEARBsLJEcg+CIKgguST3N1MLWIactUHe+nLWVhY5z0Foa57c9eVxQTUIgiBYWXI5cw+CIAhWkEjuQRAEFSR5cpf0iKRJSXVJIwnG3yDpsKQTkn6Q9IzH10o6KOmU/+31uCS96nqPSxoqQeMqSUcljXl9QNK4a9grqdPjXV6ve/vGFuvqkbRP0o+SJiQN5zRvKQlfX5fGLH3tY7a/t80s2QGsAn4CBoFO4DtgU8kaasCQl28BTgKbgBeBEY+PAC94eRvwBSBgKzBegsbngPeBMa9/COz08h7gKS8/Dezx8k5gb4t1vQs86eVOoCeneUt1hK/b29c+Ttt7O+3gMAwcaKiPAqOJNX0GPEzxVGHNYzWKh1EA3gAea+i/2K9FevqBQ8CDwJgb6DzQcfUcAgeAYS93eD+1SNca4Oer/38u85bYQ+Hr/9aTpa99jEp4O/W2zB3A6Yb6lMeS4Mu9zcA40GdmZ7zpLNDn5bI1vww8Dyz84sQ64A8z+/sa4y9q8/YZ798KBoDfgXd8af2WpJvJZ95SktVrDV//byrh7dTJPRskdQMfA8+a2cXGNis+jku/Z1TSduCcmX1b9tjXQQcwBLxuZpuByxRL1UVSzVtwhfB1U1TC26mT+6/AhoZ6v8dKRdKNFG+A98zsEw//Jqnm7TXgnMfL1Hwf8KikX4APKJawrwA9kha+F6hx/EVt3r4GmG6RtilgyszGvb6P4g2Rw7ylJovXGr5umkp4O3Vy/wa4y6+Sd1JcLNlfpgBJAt4GJszspYam/cAuL++i2LNciD/hV8i3AjMNS7UVxcxGzazfzDZSzM1XZvY4cBjYsYS2Bc07vH9Lzi7M7CxwWtLdHnoIOEEG85YB4etlyNnXrq8a3k696U9xpfkkxd0FuxOMfz/F8uo4cMyPbRR7eoeAU8CXwFrvL+A11/s9sKUknQ9w5a6CQeBroA58BHR5fLXX694+2GJN9wBHfO4+BXpzm7dUR/i6fX3tY7a9t+PrB4IgCCpI6m2ZIAiCoAVEcg+CIKggkdyDIAgqSCT3IAiCChLJPQiCoIJEcg+CIKggkdyDIAgqyL+i7xC6L14pXAAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "f, axes = plt.subplots(1,2)\n", + "axes[0].imshow(im)\n", + "axes[1].imshow(seg)\n", + "print(im.shape, seg.shape)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Affine transformation" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(3, 300, 400) (1, 300, 400)\n" + ] + } + ], + "source": [ + "from monai.transforms import Affine\n", + "\n", + "# MONAI transforms always take channel-first data: [channel x H x W]\n", + "im_data = np.moveaxis(im, -1, 0) # make them channel first\n", + "seg_data = np.expand_dims(seg, 0) # make a channel for the segmentation\n", + "\n", + "# create an Affine transform\n", + "affine = Affine(rotate_params=np.pi/4, scale_params=(1.2, 1.2), translate_params=(200, 40), \n", + " padding_mode='zeros', device=torch.device('cuda:0'))\n", + "# convert both image and segmentation using different interpolation mode\n", + "new_img = affine(im_data, (300, 400), mode='bilinear')\n", + "new_seg = affine(seg_data, (300, 400), mode='nearest')\n", + "print(new_img.shape, new_seg.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAACVCAYAAACjO7rDAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nOy9aZBd532n95x9ufva+4YGuhsLsXABuIGrqJUyLWs8ojwzthPH8nji+EuqUhMllVR5KpOaSTxxylWxLTtexuOy7JFHsmTJIilKIkWRBEFiBwig0ft29/3es5+TD7YmGpclURJBgHQ/X7rvPee871vVv3ruv/737XOEKIrYZZdddtnlvYV4qxewyy677LLL28+u3HfZZZdd3oPsyn2XXXbZ5T3Irtx32WWXXd6D7Mp9l1122eU9yK7cd9lll13eg9wUuQuC8EFBEK4JgnBDEIR/eTPm2GWXW8Futnd5tyC83fvcBUGQgOvAE8AmcBr4ZBRFV97WiXbZ5R1mN9u7vJu4GZX7ceBGFEXLURS5wGeBp27CPLvs8k6zm+1d3jXcDLmPARvf9Xrzb9/bZZd3O7vZ3uVdg3yrJhYE4VPApwAMybhrIp1jp+mQUAwSKZNme0AY+iBIpBQJWVaQTAXFkBElAVEUQIQodLDKffp2gCCAIigEYUA/EkCUIYKYJCEJAbbnEkQBQRiACF7gE6Fgyhqa6NH3AyRkDCWOLnt0vZAgCtAVhaSu07FsBDFCjCS8KMRBJTukoIsRrVqPbt8lJKSQyqAoEq4d0HP7iKFKTBfxPA9R1lH1AM+1CTGQQg8vBEkQEQQf2wE3DJAFECUFIRRI5wwIQ4giIkJESSYMQiq1Bl4EKhKCIJEwDSRNQtIkQi9EFCAKIXRCosjD90GWJUQhIIpkfM9H0BXkpIKiiAiKCKKAgEAUAUIIERBGRGGEIIoI4nfG9PCtgEHfQ4lAUiUEAQI/IApDwiAkAiRBxvN9+l4PBIkICUUSiGsphNBClDR0E2qNLl3fIUJiOF1EUYK/+TvLEoIUEXoCvu/iWR6CIBIGEZIkoWgyQShiux6qrOILAZ5lI0kSsZiCY4dstNbp+33hVmRbQrrLJPmWr3XHYjdrWf8Zdat/0+d4OxDmlVu9hB+b6Jp3U8e36eNGzt+b7Zsh9y1g4rtej//te/8FURR9BvgMwEJqOro/vI+tTMjDE4/zM//9E5z97d/hc0ubrPdD7h/5MEfvPsSJwgZrSzZSbIpsUkU1TZJ5gWQqwJfa3Hh2CcfRsbpNtq1TvLndp4fGgaFHGdZbbDbb2CFca69T1JMIco/NQRZDNvnA2B04vVUcSeXw6D5agzNUggy5wiESUgtfNhmZtFhZtQnrCqvdl/mPN84w7/4T9g1L3HMiRyy/gWbuQUlM43R6XHvtDGfqdXTSfOLkCS4vncFMT3LvyaMYe3wuPPsyd/ziY4Q3rvE7/8Pz2OoQUwWVv770OgEq09kMR0b2c/LBu8jdr7L5Qo03zlygG8R45HiC57/4DG0HOqHDuHmQhelx0tMFhLjL6Eya5rJHNIiwGju4bYi8AbIeQ5YUwmiAk1Q49Kl53MgkFG2UnIkiyPhRhCArhIGL7IPvB4hiSORrCBL0ql1SwxnCdovWyyXaLQvVNNDzGrIiYgzLUJbpLbdYOX0d0RLZqq1TGpToBWNICQ2vWUWM69QsmfcffoD5vQF/8Fef5+xOjQdn7uL+6b2osWFyswGOkCaREcgeVAgrHo3VJaxwmUEtRbkiU6t73FivkTBktkvrdDWZw5ki73voTuJJl8d/9xduWbaTQjY6ITz+1mfY/v9/XfnX9/1oq/w+zHz6FXjHPuZ+dJRvjtzqJbxteI/s3LSxT0XPf89jN0Pup4F9giDM8DfBfxr4me93wSBQ+IXHnuTgz3yI2B06cj9k/Ff+Mce2rvHil7/KjfIb3DjXIrZ/hI2yRypepeVarG71KGQyZBUNR7A5ta2hhzaxgsHJ4X/K8YMe9VqIodlEQZ6RbJOdRpm0Ns90YhJLqDPb6xDXIkbyKpl9C5zbafBar83Hf+EpdCeGvXaZle0UsyeGwd9GXfYwFQ/ZmOMTMxqTuSwTB3L0+2la1QxB02N5+zV0SSQXMxn1ZURTp9Nq88TP7kcWe3zrL5ZIvSRhhTG6rwdY0jAffOIAG6tNOp0mdxYLbDUqZMMEpt/g0pVVBk0doT1gs7KC1VU41ZlhavgO3txeptdSWLa38XWZ46ZBIqlR3uijaCJKxkefHiIWhayfrVLdrIJoks/GsL2IaKDiSgKxXAxRjhCRkfAJQxdFFnBdF1lREIjotzoYcYX4iIZHGzWho0xlcNoKyUIObVJE1iVC3UfLx8gcTGBOxth5sUIsF5LZGOJ85SLXagMyQQzJ10mqPW4sv8a9j72fmBrn5NgMJycKYMgYsQHbizaBvYJljrPybZdnV8s4RpY9Uyc4lN5gYmiB/Og28USH8eweWptJNusNJrNjRJ0BjiAhKdIty/aPw8ynX/m+x38Y+f+gsX4QK589/IPnePrCjzXHd+M9svOeEvyt4G2XexRFviAIvwI8A0jA70dRdPn7XSPhUx7sIfa1debmcohCkig/xcTCHJ+45246F17j937P4wtnLpMKA6RmksbiBlGY5qGxLHMFmzPXbnBEDBlILXLaURS6yHGdQw+OoeoBvcVt+u0ERSH3N4VLo8pOZYgjh44yPBLjCy9eIMob+EGD6X17GQwGdHwZ30wSOgGFw0l8bI4rk5QvrOBveUyG+yl1u6y9XmGj5uOIdVTfxKDLWk9mekjlSm0dIRIwEj/PVD9L6l6DE5uneObzNjuDHoGjEpdNtOIE+x+S2bmQIp2qcveeMXLjOQr5UZ49e4Fn/mqV8fgejk6FnLto07QtRooCUWTjewMCOcWVjUU0OceCmCEzpiAXRAJVxEPGJiJ/MkNhM8HauR2q9RqxmRkkW0YrSvihhyrq+EGAKAhIkkAYhAR4SKKKIOiEToSr+uh6gtDv4gYB+kie4No2ICDJOQIxQA00enaHWNYkyKsk8zH6wQgBS2TlEUx/G1uOYdV3iOsmghri+SYPHZ3j62+scHU7z098YoSG7ULNpLLVojloI0R9ktYZTl/dpnxD5qP/9b+g1dri4hsBZSHOoQM5EqbIUHccARczP0O1sYEoarcs2zeTmU+/wsq/vu8///x+5/2wvBWZf79r3k7R7/KjcVN67lEUfQX4yls9P5mQyaT6CEHAK/+mBoOQ2T17EOMeoahTPivywWMmI6ujzM+OM5rVuX45xkZ1BbMs8I3NGjvRNA8koVTbYblWRJlPktN8aksa45+IEz8yjCmrqJZK581zxPIn0K5ts/OtMr1Wg5QckRsd59CH5tEmcwSlJl65g35klvGjMv3A5/pzNaQrO5zfuMziThlZGUOUamhSgdbgOsl4nH7Poyvv0PUHXNnMY8QEOvUaL179MuV/N0peEDHkFpdbFQJJp9SsUswMcTx/kAnlDmb399hZLbDWcRjN6MiGjzTosje+yUjWZX7uaUZSL7G96KFpQ+SNIm6ixIpVQo10atU3kQpHsPoBykgKJAlFV0AJSUxPIh0K2DOSp3b2Go1ule3tNJlCAVkE0QFPcAkjmSgK8TwHRTYh9EB00Ysi1o5F0NBQ4yFqWseLfEAnEBWcjoeq6DiShTkaJ/Q8xEFEfyCzU2ugmxqaDTFHZbPzJo4o4vkGw2qM9aUl8pOH+OTeOPV1m61yiz/96y9xuZLgUx98gGJK5n/786/z5In7eczoMjqXJhzx2HvvHLnpLf7lv/0cm3ee4K47s3hWFykaw5FuMLIJnfbb12P+YbN9s/mOuN+JyvyHHW9X8LeWW/aF6nejZEwOf8Sn8e0+G1s1tpoVSo06I7kkRVyIjfPGRoWNapOqO0Q2liBhJem4h2moIkXDxmyf5ax9hP17Rsltn2Lr6iQjEzkarTI9DlBLJtl3KGIo56EPD9G5uIy1JGGkC9Rba1R6EK1oZO/JooRprq85vP7VLR59VMc3auREieBCQMcJWdvZIVBU6naZ4YRG1e9hKBlSgUBf6iEJGnk5ohNaxCQDM5OhbK+x2amz5KYYSyRw3C66NkTN3aRTabH17IDDNxYYjsvUWossNR3q9iE+dLyAqeznUOoesok+qlonP3yCoVyV5pqNGFogDSNELnbkEopxttoDFqZTIIokCgpiUkM2BZS8huM1UPIKSjZDKu5SmMzg2QFCQiESI8QQgjBEiCIUUUORIlwxRBZkJE1EDVWWX11nZD7LoFRGcBT23TlKr+Fi13u0rB5a2iAMFRQhoHu1RbveRxHBCWUkzSDAI53YC5HBUELBFAR6A4HxOw2SI/di2RvUyi7jeYml0govns7x/uOHCINNvvXmq/zKiUe5cLXOlUGW4HALVVL42eNH8bYusS0dQZBAVFZp9RMsHJ3AMG59zK//9vEfeM7cP3/tHVjJO8d3PjB2JX9ruPWpBwJfZBDs57lL38KqNjBVjWLkktfh9GKD18vn0KU0j9w/y6EHJhiZnUZR57FKF3jx82f51iUDgbt4aDZgIAvsf/gh7jUdqldE/KiKfek5Li/W+I+2hKLO85MnRjl5cpp+okK3t4SpxcmnIZnqIDULnHn2FH/0lS+hCHk0L85Hnj7Amb++wZlLb7Ay2CEQQohsQl9GiescFX360Sgn9o1zafVNGq0AL9jmWt8nE6QZSgYcKejElAKrjee5WDfxFJFpJUKNIvzAoe9e4PJak6VIRFVSiFKNjNVle6XC9FCNl1fPsVFP8MSTH2Y63+LVv+jRrUsUE1n6nUtsuhqR6NPuVdjYsZiaKZLJaASqji8Ef9NKGQjocoKBYhNPD9Fav8LSS+uMPDSDHkEQ+QiRhCRHeLaIREAoxJCiCM8TkOyQzTdajBcMOl6LdG4I5AjX82hutijuSRNPCHitPttn1mlthGA10ASNmam9rFTWWEgPkZlvsrFlcX51k5W6QFwdpXBxlTufnEcY10jPBDRWbe6c/AD7D12HoMaBvQY/13mCaifiUtWmUCwyIpa5/MqA1VrEE4/cQ2pMxLWu02v3cFrjGL7P5W+cQ7iFMXcmTa7/Tz9Y7PBffgC8k6J/u6v27zX+Dyv590rfXfnmyE39UvV7cVvIvbxW5w/+1z/Ck/tEvoRnm0TmEGuVFmdXzrERtRkxjtJe6/KvXniJfHqNn3ryENPH9vHwrx5i37Nf5dsvbWEP9nL4qIEy6hDExkilttj8WpwxOckDezTWL5+nba/w3KmjSI0GuYVphKFZoiQcU0d56eKb/NavV5ibfYmlXpn5RIIDaYcXP/uXPHNxBV+s4SHiOE1UNcNoPEnCHmJ63wLJWBM1bfLY9D2sLm4he/McKG0hCTG0WI5kUSSe1JlrfJzwjbN0ghZW1OD900Wu1lq4bhrbqqNqo+x0ryNocfYZKaaOJ2ks3kFmyGJzY4Nv/NUW/9WvjXPP08f49//z53GMLE/MPY289gIbTpGYLhH4CjsrddILSWJKCiXmELS6kFMJOj5BU6Ky0yKeToHnYq23cQYiuYU4ogiuHSApElEAeAM61R6qqNJfdxi/I8ZAgpF0jMgVsYUQKXIR+iHuZh8pkAmbLvFBROB3sAcSkqlw8doV8lMZCvuG2Tv+KbqDFg/XS3zmN/8YWZHwVI9AShNZA9pNkdeurKCoFo8+9Rjj94yjiXVyazG+/fwzXN+yOT55ksJYmpXKWfaNDPPnXy7z1Efm8Buz5IYFnIxKYkjGrPdACm51xH9oviP691I1v9uTf2e5LeQeRgJDwzaLZY1YrEfkFen1L7O4KaCqPkYQJ5UMuLZeomS9Rs3J85WvhKSfNXj4+AGG7j7GQyeXsXdyxJVhUgUNNePjBwZ52ScIQ7ZbXcYSSUbCFJWgTt0dJlVqcebqNklNRT+Y5+rpZ9kobVGtHuFILmK/kWStvMip5RUUrY8mWnRsiaw5wrBkMGVkyGVlgn6TKJcmtbeAKltku0X0QAEvYrFUIyt12buwHyVpkOimeVKJ2NnYZLsnc8eh4xS3z7HaaCP6oxBl2C5ViSyXK5e+Tax2hGWxjWCL9NUuE34TTx9Hn8tSbjfYrvo8WhhC0GXqjQGW7WKg0uxlqd7YgUgnoycRZAuv7xOUQpbPbCLTIFWYwgk6DFa65FM5wgGEsoAsREQCiJKIiEh6WCNqqZCz8Y0UcRnkvIdT9RnUWwRNleRYHknpIhEgyQncABJIeE6TbreOEOjYNgy8Jhk1Qf9yn6H3zfLTH/1JLp36JkNGhtpriyTvlFg6v4Ek1UkrCtE2rH19keEjQ9zz+IeQRwd85c9eIHJu0PdPYAQJWuWQyL7Kxa8K7JvNstPKsN3vkJgx2Nmx8cJ37/3xbrbkb3bVvsut47aQux16uNIjyNJf0nKL0N+h59qktUPsNRIczU1w/11FfKocfMllM5CQhYucXlvjYqmA9uwwCaHAo3s7OL0OziDNZtVkeHgCaaZOYFkcKe6jfzFOpMOTe3L82cUqL26tsqAUMSSFV1caJLWT/PIdZ/n84hofPvgR4qJCu7uBmRKRXZ2Ech/HR8c4MS2xsr5OdxAgDAIa1jaCpjNyp4g6EidpqshNl+FQYrNeQ00moGggzqgksib5Y8NoX76I92aTWsNCHT3MQrFLecdmY2ORslOjqBSZTpqsbF1hSwpRdZmJ9F0cu3MaGimCnMfeIwHXT9fxxB54AnMpD5sCk4kEllPDW0myuH6e4sYoUj6JqkWsnFpjbI9OurgHx/WIxfJsLL3JhVMVnvq1O4hSMmGUxO+18AYhYgRiIo5ZCLCrKmLPotP3GYknsJoWm89ZTE3IeHqZwM+jKR6+BrFJATuWoOjC9qKIlu1TbbUobid45eIyK6sOwzsqR++Y5Y2Xr9BpB6QOJogVYywcrZJxDlG1XH7jL/+MrCHy4PIHePCf7OPoiZ/k6OMfRnQCvK0+R8+UuPZiGUPSkYU+3XbE5qBCoMP26yLESrQ7745/2vl+XP/t4+/JKv4fQgV/K1oycJvIXSZEFBocTCT4SkPhI0ce5r60yuXN68QK8zzw84dxJZdOY4IPP3AX9maH7umXWCqvsdkK+dQhk63tM3zu/Bh7MiN8wC+SKS5Df4JSs8SpxS4LB1N87L95mMHmDZ57tspgcJVqt0MuoXLEhFxjhWsUEYaO8bu/NMPKyxZCL6Re7ZDUVVa6LSKpR3WwwpXtOLFYnumsQrVSI6maRL0m66/HKe5LIqZFBnhEaZ190yk2KwM6mzb5YYNODRKGi1wwsM9t0Wk3yEQml7bXCf0Iy/co2C53DI9QqvaoB22iKOLh2TspFgW+dm6F+VBjbCHiIz/7Udy1ywxNjqIGMV5eWWOjVSenReQTGdr9HhEROxeW0BWDobzC1BxMnJwlTEhEWx7tSw2GslkK6TaXv7BK09ZYOJ7DyGhEmoDnBKiNNiXFJ5XTUTWIOSKd7RaVizvk4jBoBtj1JOnhBq6hoGRMJMNASoSQcwk3e2xvBRzYu8CbF5YZBH1eWPoWzmKBmPZxjh99nNMXX+APf3uZ40/uYyiR5NiTh3j+z77JqCHT9uN845VnOXXlm5z8wId49OkhopiEvm+UKPJIWFvEtufYKPvUA4++WCccJFiYD8gOvw/ljV+/1RF/W3i7q/jboWr/QZJ/L/Td/0H33LMxg2HZJ2VOMd4uU2oYDH8ki3umRF3cQ0sO0HyZoNOn7PRIJeMo4wscn6lztyZw6OQdTGxksV/7Nm/UlvjsaxVGUpM8fKxCbXuDdl0moR/GN1OsVcaYnljjuW2RvmtzYGgIxakh+nnG4jYZ7Qg7V+LkVYGWI/N6qcmmX8JQAgahTKlrMDucRDFkXGTShTTewEVOxvEHHTwrCaFAKpViEG+RSo/QcStYJRd3IyS+IGKvO/htiUImhq6JpIcyjPa26dQiZE1itjjFkJHlamMLQ0mh6w6+N0QmH2KsXOaNlx12Nif5+D/bx2NPObRLFkouwl2pEgkV3GCOsm+TxEUWwXE8UnoK3zMxczm0uIljhKgTKv0NB8m3kcMhts/skFooIocS1sAmdEKSRY3utRa+qKCNZmludZFCkdD1yA0VMHIejWqb9tIyViuBmdIwswGMxLDtHoIuk8tmiXoVzi1d5kajia/o7MlIdMISp194hXx6lhutbdqlEmu/+ypPfuAwg1DmyuYazShJzd1gKOZRCLI0zr5Id+GThMk+YsImtjfHVP4IalVCfvk6m2WNsGejCRMcPHmQ5B0p4n9q3uqIv62816r49zr/oCt3I65zx9FxRCT+hTLPWnXAzqZEW0iyMG7hDFTMPWmSeyRa5QipBqV+gqNHHiQ3v5f8MZFEM8svfHCGj124yqc/8zJ3SjHK15scmj/B/r0dQukiL7+a4ti+AwgLEyxcXmU2/xHe//RhwsVrXP3KMhnjGK3A4tIzVxjL5xlLaAhBmyIykZnhvoU97BkxsT0F8NGUNIolk4gr2HToOgHdUoXkVI5eRWV7w8GUffLTQ6y/uUFps8ro4h5CemjWAJME/sBndXEHgTQJ02YqOUOu2aPSK0HYZcfpMSqkaFeWWVod4YNP3cHZFyp0d3bYPD2EnB0iaXVpuSITySymMKBq3UALVbbtCN1MITg+KB5Hp2P0t10GfRfQCIWQvt8jciS6m1XGDxRIzcSxNzcgoWL7STrLDYT1Dktnm1yb2+LYR2dA8ZFNBVE3cejj+y7Td8VYfPY8/ZJOqpCgX84jZWSymQjfSBKpfezuDm17k2bHIZtaIB9KpASTrco5xEDBdrbJxOaJqiGvrZ/GlCRm1Dpj6TQLuRmyQ4e4uHqGP/zDl3jygwcpjus4fsigp/GNLywxOpNiYt5n3DpEOhWHnVU4tA8jkbi1Ab8JvNcE//32xb+bq/dbJXa4TeQeRgLp8Qx2GYRUREGwyQ4r+E6AJ+sUhnI4jkW/LuC3U8iiSzrR5fp2yL7RkPLagEqpycFH9xLXND795hZ/fnYdqX4YJXmdx35mL1HrLj7/ey/S3BK578gYv/Kze7m2FqPScJBnkmizOeR4jeWdZQSliF3rcb21gRSCHeX5+PseZuakRkKLI4sm7WaDygUPJe7QG7gIuowSaviRS3mjQWYoz52fnEAyBALVYG+UpXnZwSxUWT/dZWe7g9cGSY9Q/QDNjIOq0bIkEokkLdtlWE+xR5NIKnlKSgvrWo7jP3eYh3Iyl77ZxNm4jDmh0vRTjM63wZ/CXE6jRlX6okCTLuV6k4ym0gtEWp0UvfIA9fUEmUN9AkMgJiTYLC0TU3RkXWXgShjJAigQBRZBt4WuBJhph6JpoqkakhnH9hyMlISupdElCbtSRRANfEegX+9hKjJxO0GvqtDsDKg7beLJkHFrGlNs0LWW6MoFNEdgPCbTR4PIRVN0nr+wimsFxGWHBw/MkRsxEaQYaztnGLTWsKiwdjmFrMVIypPopszxe0JEyyHqTtMXS/QHJvValQPrAsLuA8d2uQXcSrHD7SJ3D/zQJJYW8IcUxiZCGksWqt6h1rKg18QdtFAsE03ScbsuVmgQN9dobXSplSREXaO+7BNPTTPxEyeRX/sDmt0OL60e5dFsAaf5BoP+EuWaR6WRwClMkxzLUsiJ/P5nKrz/0aN0V1cZys9TSJs49R4rWzV27D4bboWV+oMcnhklcF3stoAwrjGRiNFaaeCvqcSVEC/sExuNc+SpaXxbxpNCJMkDCUJlmNR9Mp7rMTYSMXL/AH+lTvvcIm++KZAWJSJdRhFdIltkrGCi6vtJJ/IM/JBTV7/Elr2I9n8oPPBLR1l4rEL5nEYq6xKJOtpYhoUjO1T+nUPSk6m0AmpeGTMSmUkuMKKOEHQcigmD9uvLLH1bQJVUFLlPUo3R9z0k14dOF99XSI4n0XUdbBO76pBSTDQzj2iE+HpAvKAThhA6FtpoDFmXmTgccePUtxHCSbx6n34/YrvZID+isi8/z2tLa/T8DmPpCVqOCUKI4zQZGR1nbL6IOXoSTROpLJ9mqzxgYmE/e+54EHVGIbJ9ZuvTnNiaonVJwe+rqFYM2CSoZti4JjE7WyA5avBbv/M5tl2dRsfnTz9wFNf2b3XEbwrvter9+/Fuq95vtdjhdpF7ECF4HpYpINohjuMDHWolEUm0UT0dTRkhlu/gdXr0yj3sfpnIK1Jveex5cAotX8Xa9hA8H9fLcGA6zalVj16vw+ILVUamDZ46ei+FzASZYZWlkk+nbFOfCHj6Y3nevFRn/6N5FgKDMy9XUUybXDrGXOgQbqW4454xeg2XoBuQmFCRpQK+LGD2PDyhQ/NKh66lMnEwheuqSGIfJBO76yFFEWJCIBA8JMVHUCMEQQNpiKl9efSXr7N8ysX0PcJQoteHKBAQxD6L5XOcq7aIQp2cXOVrV16g929WWFM06s2QAzNzfOgeEcudYDg5jrF3g8XXEiTdFaYYQ48lCD2BxcoO3cEA3SgwllCZLCZxBj3iskK7XMVM6vSqIkPpAlomjuf7RHaIZhZo9q8hOrB+bo2xh6aQdRsCCVX2scMskeoimT5hGCM3McfS1QqNcpt4coSUHtEpifRyFV5cfxZfibg3mmVfbhJP8LjzvocY+sQU4p4E7qaFPRDZ/9gBFpoSi29sEEYCjqsQdUVK1+PQ2k+jfo6EKOA146j1UXpra1SWXuZ//2KDufQhfv74U5Q7F3ljdYVv/4c6+G/bjcNuO3YFf/txO4gdbhO5u65PvRSQHVcQ/IDQNCmO6HTKK5jxBNZGBasnoR7PI8oSui5S2d4kX4hIpuNU1zqoPZG4odCt9RjOGAhhnrZXYi7WZfG1Hv1GlrEZnzUbXn3D5sBoj/XlFVa3Z+kc2WIsd5CeN0J6j0vsbIuBnUWJHeQT799Ha/Ma7XYHPZkkQsG3AyQTzIKL4sWolfrEigpuWyZREBHMPmAiOwJSXsXtBAx6A3RDxXFCtLRIgIvnKIQjCvJ4lti1Teolm6QiIEoBfcdC0ExEsUPPq5PV8hjCNEZSJ6tqLJYusNpbxbYOk+w/wD13b1Dd2MPQ3jHyVy8wkd3HIAjZaDjs2BYtyyWhS/T8Tdfejw0AACAASURBVAJvhqXmgFw0YBApyJL2N/diH0T4loDsh2jJGHbUw6v08HsOdt9FDD1e/5NzzH1kD/oUKHEJvD5eyaO56oNnY/c0WoJLZdBhpTtgw64wmbmDfW2Nu8Z1CvlpRiiSSKZwvB6pY0PcWILt0w2K/Qaz9+9h8Gaf+laZ6itLeFeyvHihRRR6SFaDsdQUhUmVTk3k8sZFeMUgoQvgHCDN/0ujvs3KytMUEgdIxgK+eO5P6LTe/Vshvxf/UMT+HW53wd8uYofbRO6K7FLbHJDWJRxfpeFFzB8yCdU4vhjhNmViRZ/GyjZqS6WyuEMiMHAbAyo3NhESKsKmgB2PoWge1zcsnttcpRnqnPEqjJhV/LJKR+7xzNef4fWWyifv+iCGblEqvcr/tawyXEjwy4JDfxDw5+e3eXJOp1b/c379j+9n/7EHOJ4TCZ04ydEAexAROjaOrxPEVaZPinSudBFbEfpYFtAJJQcSAYHgoBRVZF9DCgREXQVBAFUiNi0iaiGxwxmmfR/llTqDmsho0aZU0gmFEDM2z+OCghEL2eo1aVohkRySURJooknLWsJ39nLlbJLctsd9n5qls1Kmfj2g1mjSsDq4ThdRFqlbGqbs8fzONbIKHC3uYau6RjvwOZybpWCEdBMpIkGhbzeRBRuhFxKFIpoRILREgrUSlz9TRcoOM7w3x8B20CUZyXdplupYDtR6TQZ+n+F0goTpslz/Elp4F4+OPoiupIipMq4RsuZO8Zlf/w1GApGFkTFGDtzD5T+6RGlzEUmBsZTMtfMb5FSPC81tal6d06UVgjcdMnGDDDIlu4uqSOyP7+Gnhn+SPaM62aKJXmwjN/IMD/f54l+3b3XEbxrvpcr9rd5s7HYV/A8j9vdd6r6l87526EffDHBbyN0wY8heicX1PFrcZmRWpNNuIbhd6o048ShATsZRNIvKSofyssXcHSNIokh6Lke5VEGWVYh0ZEmku7EJtoTg7DBq7uWLl84giSafnruHDz4+wdxag8l4nsm5Jr//pS006xp+28DujjMSNDm79gon5t/PiDxPq/WnfPMbJzh8ZB5ro4DkgOWBLkGlWkdTIkxZprk6wMMj8MaQ9C4qcSLfJxQl8EEgIBAlJAE8P8SyByiqQmiF+KgwFEMtVmlsWPRDEcf3sO0+gW/jySMUjDhHMxpfPf8yFCTShoEx6GEIeWqDHleaXeaFNsYL0xz/iXlq32qwfkZErvpcbVpI7oCCBmVLxA3WUPQilytbDMQWjhNgJboYaZXQ6SEMDLS4hC4XqVbXScdj2KGPnpNo1HdQ3BRxu0H1cgOrbaLEfTJJk9XlCh0RCCOKeRPDzzBkprlr/wKFkSR78ndy5oU38CKZxWqbhtXk/SMmw+lRTA6ydL5CKarRcBuMhlPE7IiUaFLzWnSCgGQQIxACukKVgZUinywSugEdu0NJ6FAZeAwNfxDEPvvvPsSdRY0DtU1yr/3BrY74Lm8z3xHp7SL5tyr2tyr1v3v+jyL520LuoiGRH0uSO5wmfSCk7RcJwz5jd8/iby6hlWJcPz/AUCzK17qM3p0ieyKJJuvU1/uIUgzPs+nVttHlBDEbUkqfpHwA1a+StJqktEmWrgnc875/Rkrtw04fu3uUD93dZb40w4NHD1NrRijDs/zqx4q8/K0+/2h/wIHYDOvdG/zJb+n8/AcKrOsRw8UEfioibA0IApWtVhun3UdQQyrnO4yYBh2nh6aL4EbIMRk/DBAFmUgKQHQxVJ0gCgARVQUrLlE8kKN6tYrjRRiGR+QKxBIFrq2d4WJFJRJCcorK9Y0qezMik/pebF+n2V3mm63TuLwP9flvk8s9zOzDGYyhJJlFneRpaA5kluo9mk4dP4owtCJrnatEnko+FkcVZZKxOKubVdZLPYppE0USKDc95Aiyhk6nNaDcNelELYreEMVkwDeXvwVyihOzcxhJHS3o0et0sa0saS3i8Z+4C8lJYekm11YuIacSWEGE4FncM76X6UKM+iCkb2lcrC0xNB2ntlFmxa2yPRhlPqZjByB7NlP5SXJaESiw0rHIJmQ22nUiwaEXiNwYXKB2zWJ6O4sdmhz+pXHih/cix/RbHfGbynupev9hudWSfytS/2GF/v3G+GEkf1vIPYwge2If2aMJAs3C8CT0WBxB0hmYBxikGuwp9tj5SpNsMY+mQKdjoGdU+moVXZMJupCZ1vG2A4xwiDuG9lF2fPbnHmGgioRBn0bpBq8/77G+WcIbLXAiJzIz9z4SzQab2wMES8eydB751C8SLD+D5QuIQciwKrPjrPK55wQMVeeR/cNocR9EgZieQJUgnVRZveaSGnfBNTFTKgIWkSrhhz6ioNDrdUim4oRWiBCI6AmD0PMInQgtkSIcTZIeL7NxocNQOoWmD9hpdphKp2nV1ljqNegbs0zGDVwvjyiusNi5wrB5B/tNgaykkI2H+JsrWHNHMB9QmL57H5NPVpHKZb78/7wOlT46w/h+mZgskE5kGI6SuG6P19daCIJF36ni9vPIqkS5WUczJPwwTbm5zY1ehTCSaQ762N4IPa+GoQZs1K8zXxwjSuRItGeR6DGaHmH9so+618RvLaH6Hje6Hdpth596cJxuU+WvXrvCwBN5rXWZ0FPIXo345Z94nKvXzrKx1aUVQEwW2ZccZXZ0lLGxOcycyFyzwva1GmGijU2IL1epeQmcwQorA4sbz/0njlw5zM/82od5VzxXbpcfi3da8u+U1P++Md+q4G8Lubt2hFgIaNTamEmBUAXPjyNGMlLQQFcUqp7GnkcmuHSqhiaMIwwCKvUKwzMx7GafQNBIyNBvl2nVBwy6I1weNFi3SjhsU286fHTyEK3tDUzB4vR1lblMijsfNFj9zTbXdmpMpFOULxis76Qojk4htdtMxnVWq03GxD7dANo9DUeeYGaqSLMyQFQEmn0BJaFgBzXqGy5Zv4fmx5DkBGAjSiJCGBCPxbB6AV7bQzEDgiBEkWUQAnAgcB1ye8YxvDKLi3Viiko6I6N0EkwkiqiSQaiVafgyahgxpma4IlWZSk0Q0zWI4owX8yi6yJ/9/hk+9ov7Se33EBI6ZCc49pEqkxdzdBoOVzdKKIFCQjAYSyfo2TLLzTVKTp2YlERRJQJPp+Q2yQUFCoZMlwjbjyCyscQ2tZ5IwozTsWTcnMj42BSlyg3uPTrG0LTC2pk6jdp1BGsJmSRDI0NoesBmqcaVpYCVxg0st00U9UiKNkOFiMeOPURyaA8fOrzAlZdeo7Lq4DoDxrU8KX2YvlVHDcfRJANF8hlPTXPo7hGWd1YRlrqEpFltNWi4DitVk1d/Z5XQvtUJ3+Wt8uM+5OO7pXuzRH+rxP53x/5Bkr8t5B55AWFNJXVvHMF3kIIApBa+o6P4BqFmM/rQNINXl5CbEhu9CkORRSaXoF0JiZsFUkYPe10griRY6XWx3PPkA5tqB0xZZz6dJROLk8ttcX0x5JFilfrmGMstgfmfknjj/27zl5cs7vcNTuYGhDNJ2isR+W5IUOvjCDHagoqZLlJdPUNnO4EYM8kkFSJJpL49zMH70jS3SlTfnGb4rgBJcgjEAFH42614gYBhgOomCKMQTZZxOg6q5hHqGtZAJBANpNlpDg0nWHx5iZ1+SErROVcKGUpMcfd8isDpkomPojZ61MNRnrp3L6GTpd8USQ+Pc2P5HOcurjH6hSnuVyRicxJocQoPHSezr0ZUj1hY3+HsqR1mRvI0KlVGdYlYSyTckRm4XXLaOJt2Ay2UcJUmWz2BHadFQTXIZuK0myGa6GGKOebn4Z4TY6QlHbs3hidJXFuWaDvw2Mc+inZSJ1IDvv4b3+Tr159HcwVOLQnU+3WIZMYTw3xy7/3sf98oxkgMYUhDjsU4MvEgL/7mNwhcA8+PMGMeyXwe1x0gewHNesDdD84hTcLC3BOMTy9w5cIql3deBlGk3iuxsfgydmtwawP+DvCjtGZmnr5wW9xf5mbxdyX848r+ZvXVf1Ted6nLqYPf+/htIffAjWhea5E4qkC8j6SkCOwYghDg5wNal0N0OSQxkwO9gtjps3q2SzI7imgK9FIuihQy2Khh9VyUbpf54YPQqiD3QuLxBJLv0+zrjI7dxcd/VePsc1folLd57fdUPvrL+9mr38CKb+NU4kjxO+mWB8RzPfS+xuZWnCIGjTBOPpUgoxVIDo+AEJEaj9EZ9DDTGno6hjno0rxaYrWXYf7xMcLARSAgigQETca3XCIBBMEn8EFRVDotiYTh09qu4jYE9JhGZmyYA8d9wlPX6LkSTrDBZidgzj3GoSMSk+M67eU4J90dCvFxvn65xomDh6httpkyTYakPs+88FXk8FGMvUmmj6rkjRBjchIm+yhTWY7qbcpvjpKeEVAilVEjgWWvUhu0mZxKYy+2mRzJMhg4WPSJRwHFTI7hXIo9qR7prIyoRIzddZDs1J2snHqTrZ1VnEqKjrXNzNx+5MMC7VodRRsm6rc4GssRxYqsVbaw1DIdX6fUHjCemSRwc6jDCRzZQ5J95OEEM9MHudR/HaEXYvU9BL1FMhan2g9ImCbZWQMxNkp30EOMJ8kWR0mpCdqRSzca0As7ROJuW+Z78V4X/Hfz98n+re68ud3E/h2SB7/3swpuC7nLGmD2EXppIjeNLVsoqozQ61P5psjI4ypW6OP0BMKgjeso5NU4QbVKgEXL1VASA1wbsEySYwex2l10pY2gW9R6MJEvoNllGuUWv/qvFulFWQR/h2N7c7Rf9Lj/nz7Ohf9whvPbHcKvXeenPzrB6qURpo7luG/4Bn/0/CXOd6/z5o04Tz8wgTgko0kGg8hCzMYINZFeM8KXBJrnlyi/rBA1LYz5iGyiiGr2qVUURufjREafMDRo71iocYO46vLqH1+kvRJw5HiB5IRJqErI+QnmVI3SGysciE/RCTq8dOVFthsGR+YfYTbRpYHG2maFenuDl65l+bn/NsXyN4r89AMn8XrbPP/qn/LSF3scLB7nv/sf76XetvjSl2/w+vnXeOqxj/PUh0dw15MIcpukHaMf9kl2FrAcn5H8FKOzeVp1m3qtxkJuD65YptFWOXzXSfRCkna7xfXzLTZf+ALLy23mp4Z54KkC2bED1Cs5/s//5SUSSpp7JpdJ9GMc3L+fwr3zaJ5MIp1l9fJf8/rpZer9VW4sjnD/8RhCXECS47jlAUo8YHZ4P1Z5g0HTRxEjPFFk48YOswtD1Noa8VQDY0RHy8YQoj77L03QtHaoRRqZyTE6a86tjvgutyHfEfbbtTf9nRb7D+K2kDtBhNwdYtDxSeYCPEki7AvYLZDGbVBSaMKA1jmL7NAUckZm0NmhulXD8UBXDKxmDM9qE9Hn4uXzlCwRW5RQ1CSmZnN1u8bQ9Cg1OaTUqJA018nlLF5cfAPZ0/nnB8fQW2e4Ut5GCJ/g5I5GpSWw1zxKYcHn0Nmz1NsCmcQmnjtCq57i6o014oUio0KL0ckC/V6V8laTkfFR5nJx6PdY+3qIcucQiQmdXNamsRagmCq+U0UmjRoJNGstjh0YYqm2Q3ndI79fpdeMEDIew3cnufBql3PtLRLSEPlYQLXS5IXO64j7E4hhgO5XUaQifu8af/HZE3zw5+cRKmWwMtyfAPncIhudq3z23yo8/bF9/ORMyPty+0kMXSOs97l8fh0hlmNqfpqpIYkdLlDa8nDFFs3rbRbrbXrKgKH0XfzjJ57Co4Gys8yViy06/SaL21UqzSUibRPHf4SdrWN89XNrjGRanF78XSRvhrz3BPOFgPWmyM7z50mGJittj0fvOczj9+2jfK3C4qUz8O8l7v2FcWr1ClEtIJ5MU9c26SoOkRvQ7wskYjK5dBolliBb0FD11P/H3pvGWnKeh5lP7cs5dfbl7lv37e7be7NJNldRokTJshTJki2PHCd2PEacZMZjTH4EE2QGmD+DwAMDg3ESwJogsRPZ8RbLWqzFEimRokixuTSbvfftu2/n3nv2tU7tNT9EDTgKLZESyW4tD3Bx635Vp6ou8NaD97zfW1XEpoAmx/TlkFEtT9lMcrG2zvM3dvD84e2O8Duan6bs/e3iThM73CFyD6OQ2s5N9KVjNJf28C2N0ZKGnhSxBZ3Vb1SRPIH+Xo38RBnBbyG1csypAosrNbphh3RCIm0YVGoNPMFBNjq4wyGhI9MayOR0i41eltX6BrlMl//1t38TuX2Jf/b/3GJ3P8Gtryd5+Ng4Y1rES8192o1plKDP6souhcQYh6d+iY73HGcfuRtN1Li2WsHut1nZqSAemGFalthtdRARSJazSHkNSUgzK7WoPXONJXWMw3clsNJD+lUV0VDQJ2z8UMbKZJHTfabDNM9+qc10LUcgBqiOiufKZJI6otJn13bpehkO5hK0mttgH+fIhMXkXIG/uP4F7G6B4+oNNi4qHP3EDP6LeaZms0iRzvFak8WVG1xaHmX+sEJhcg6FgFtbcPLd43R2OnS2ltD1NBNH72Zq3iF7pIwg93j8C3/FN19x2O98m8WLEo2Kz44domlr7DTbSLLCoekcJw88ysHTB/iPn32e62trPDRzF+8+Ps6LVz36jTrS1EH69T0iR2MoQialEUYpkuUkx8sTOE98lXblAhc+DZMLGSRXpN+ziQUFz3MIJOj2I1y+84YoQZYZViOaex1Kx1SaFZ/r51dJWxpurOJKWTbbF0CIbneI/4yf8Y5zR8idMEb3Ipa/9i1CJ4+USlD8YAk3kybjr6Nu21x/eQuxWMQwmiTKBpqhopQSnJB7vHJNoFJv0vAFpKjNIPToDAb4GEiqTkkWGbXKjGe3qbaS/POP/w+M33WMr301x7/7rRySU2TkkMhe8ySnxj/K+0ohzeotekmFMKwhCT6B2GciP0Hp+BxBc5GNbyzTH3okFRNT11m7scmg2SOVKxPJEmEiwpyQiGMDtemSrVyl8rhBrGokynmycyZOziKRiZFMICqROp7A+7PHaVwskZq0EPSQjWYDKzXJe0cP8Y2tZcQohNDkXQdGkcOIoTvkC890uFJ7hUDxOOV/kqjZwN84hDQRkB4zkUZPIyx1GJmwoTeks9snM5ojmTCZLUn0ehHDTkC/V4dQwW520RSd7W/dRMuM8a6f+8ccGW9QW7/J+tJ1XmlcotJzyVsnODqiMVPSSYoL1DZ6dJZeobr/BBudNVJbGp94+NeZ179FOEixvFNBFR16vsDazgqnxkeRrBpKIkOrGXL0zDn6NYdmc431b1qkUjl6To/xsxp3/fqHEfQM+xf3UZJ9Btub1NaamCkQ3JjVJ2H3+iaq0Ec2Uwx9l2M5C1NUeWmo3O4If9v5Ufvcf5a9//DciVk73ClyB5rNBmlLozvYota2aO4XSCe7DOIs8mxIbjdFvSGCbyCFKcy8gNcLEUcPUdy6QmOvxWZ/iKmWUaUeC5kMG/0eXTfEMzLs2iLeoEBGNbHjPDeefJnnPnsd/aOPcmrCY7ebYLjbIlvaQy7mSZknmSnHLD++Ts3tEhsGeVPBXY0xizne++ABbD1JWkjS70ssXtklg4gmRUSBBLHEsKMSOz1UVUMwTOQ4xjBkojBk2HGIuiJGPsId6sgMEeUeCTVg+5bLSASS7tHZ22NiqkRIkqwp0HckMtkS+WQWg4i0nqPaeJ7HDp1Ajve4+8hBjp89zNVvXSeZGWPQklheqjGeSFIyC+TudjBKJ7CDCFmLiP0YI9LIjxmMVVPsrNdotx0Gjs3AsUkGMesvaBDvMH1qhtETBiNbEWuVTcbSGqeOvgt9Yoyl8y+w+EqFkmWgiVkOJz0s1eOLl3f4nV88yfKLIavdJpGgMz0ice7IKfyhgutm2LwaEEs+CcFC1z3GRqbYbSwzdHyOf2Ae49AoQSGAtkxH6pBR8mQfzJKZrvPNv9ijoKWxxB7jRQPVyKBaKmnHI9EKSRvvRqg8ebvD+8eCnwn+J4s7Ru4DN2SsXMIgS1bziHod7KUkWiomcNPoIz28jR1uXXAYPzhKIh3T7zuULYvS9BSr7RbioEXHXyUb55nKlBhRbV7obhKKMady+2wHPUbKx7D8Bs9evEY5GXLt689gTOUpH1SYOH6MxNkUBHWkapv+bkC1JTNzANzIxdccGuv7uG2DQ/e9D+kMVF/aYumLV3lpc5Gx1Bx1p87RjEEmSODIbYRd8Hs2vYaPaSRJjlp0u13sago9IxCOuEiBhJwW2bsEE5kU69sruLspQpIIYpJBT2Gv3SV0S2SlkH59m7aiMzk2yuWdK9iighyWGXCarywvc3l5j8mswM3eBS7VV4kR8OZPcmjhCI1tFWdxnemHs6w957O5XiOfURCCiFgsYaoGkdgnbyZI6Cq6ahK5+8TmKJ4N2uj9PHjy/SS/9DLhUGBpuUW5G+K1TI6OibTaHXxbZzZ9L1OpIXqyRq0ziZvcZ7C6QT+EG9sSd0+f4eiJNGLsMXIoiZpoUbm8T7XeIXbLJBM5iodn0Y/lUOQknt/A8apMHTXByuAPQT6d532Gwov/ZY1YSmEWJeSUgmKaZEyVXC9L7boHsX+7w/tt5a28O/V2C/6n4Z2q7xR3hNyDICCTSBIINr4mEMUaghDQrw8xCwaCEJDIJRnPWdzY2mH3qkM6nyVXSLFZ22O7VmGISVaVaQ8iItPHcSOKVoaJQYdH736Eox+5B/XUENeTufQHX6O9P+RoqczCmWkKR+dQxlO88vkLLH8mIFHKEw18smWdXLJFZWOMudk8sddj220jKwOWngtwHh+ytbHMxu4Ktt/kpd06B1PzFCtZnHaEZGkYwZBup8qg7UMQ0W2JWKM5OtVNNp4KMfeyZCc6+MgUCQhGxohrm2y3PCJlnRuVNsEVEVmRSIg+uqZz7/gU85MJPr1cZaU94J/fs8Bip8JzV7+IoSQ5feJdmMME0yM5FNMlFMZBHOHPH++CYpOKTZqLVW609lnyBMatJMcP5tm++Txp/TDV7gp+NMrBskVC6zE5U0YODBJCn9r5q2yej5AzGtN3p1FDnRcev0aj7lLMT5JSdyn32pwYk1i4/2PUwwaD9T5OM2QQGuhKguFghVwqIKWlkA7kMKyQ+lYMkw3Gp6eobVaQfINmvwetJOnRKoElkcwWEQWJMAjR5AD6Pt7EHGd/xeD8H11jaqSIGFt0Gm3UxCSofWS1hyr8ZD9+4Pux97mFN7TdyC/c+P+Wb7fgfxROvvzm2l4v3xX/yMd84rh1R5Zm7gi5R8QISoAnKgiiT+SG+C2JnlunNJwhIiKOFGJTw1QhDHrsVh0WGzb4LdY6dVQSDH0ZXVfx8VC0JAk55uT0CfRMgqazS34rjyy4VPdlJq0xZrLTSC2Nq3+1QQebxb1bhK7H/rKPqYB73eRUaRRFvUmzcYCZ8RzTIzaxVkCxW9SrLcqygJrNstpvU1BBFGzW9qr4fkhKT3CgIOG5MYqsoEgSsQ/dvS66qNFx2yx/a5sz904xMlOksdlma7/CfrNGxw0wkgaR0CWXSNN1JObzo7hBxJHpObCSDKp/QEpOcvi+T7L+xSrHywu8++89SEFJkpJUrl9cJu0ucG17Gd9dR5RcDEWnMkhhJ0J0uUmvvceFXY2+ex8j0oBnt79GSrLoBousDzJYYoiytkjZmGNhRMYyHAw1wcxjY+ilCYia3JuaZfWbdfaqHVwzxamDE8Rqma3dNQ4dyuBMC9zaNMhoKpqmcsTMM3Ygh2BJNJY2sXMWoR1j70GsiuxtOMzMlkkmRWJ3gGSMIiITRxGB4BELClEkERuwe3UPua+AJKCg4QxthFCnvr5D6Gp0OwJR/Hf3Av+483pZ+xsV+ut95ruSvx2CfzNZ+8mXBS7fFb9pmb/efuCtkfydxh0h9zCO2XOrCNstJnI5UlaGnfUqUeCwHlSwEgqSEGH3XbKpBHutJtf7Tba6PVQ9jRC2gAIjaQ3fFyiLKh1Bpz2sMVMuUJgvkSwl6S+53HjuBpNqQKOQpG5vcfPaSwhiRCXW2BiENNwBmmQhiFmq9k2qdpOkouIHy5TWShTMEUqmy+RchukDSbxuivRugg/QZH8oE8QJVro32Ol0GUlNIsbj7Dn7bDUHTJkz3CdL9GKNYj7Pjf0XeHmvxc3WkBNjFbb2BwSeQ2/Ywxds1GaKoZxlIqvgDIdYWon3Hi+QPTuC13L4x7/8D5l6zzTbz/a4/7GDlGd/HlIRTi3kqb+scGNpk812nbyepKCmyAg+Na9PN16k1x1jJj3KL0wYLHV7dDsvIYxM0xgusifkOFFU2anv0hJlkpS51X2Ga505To5mKGVyNP54yNnfFtCmE6ipoxwaq1J8vkkY77J8voRrd2ivmhSKp+h1XmRx93karoAu+yxMHWNtRcDu3eLo0Tn6630KB3OoI1k0TWbqocPc/NYqe4sSB+cOIkoSfhyiiMGrD2ADUQCkkNHDY+j9Jtf/q09zt4Fkqgz9GG/oUmvu02yb+D+hcn+t2H8Yob8er5X8d2X7Tkj+jYr9tTL/UcX+vfv9SRP8HSF3L/S5vNYgrxVxhz0mxlx0KcV2dwcqMU4qz/pejViKyAkuy60+btQmjIc0HQFdipGjBt6gRN6EWEyz626Q0jJMZ+bI3D1C55lbXH9uj1u7q1xuR2TjBpraR5dNEp6GnhgSdPsomsjAa+APQhzBRYr2MUKNMcOkaq+x312jXhzBXy1RuV5keqLN5k6DG8I5Hio3CfwsnbCCGxqUNYmEJdJqbFJ1B0i0eOqWydWOzULxJIdyHar9Cm7QYqo8jSq6tCTIZkOWKy55XWbU7NFujTJfOsBEMYP1vjkS9xVQ2h6qkKezqtLcW2X2I3dz6/OLbCz1WF1cJxcFeAzQpZgw7KEaBopgIdlDjDCFEwaEsU1sTfPwSR1JCEnm5zlZMri+u0wmeYj7x2y+uXiL6XyKrVqD/eAir6zIJMQSHRQe/51bvPf9p3n4n07D3AjOis/i7uEWmQAAIABJREFUNxxadpspS2HQ6rN6/tsoRQNTV9m3V2m5El+9ehNTzjKeGMFteeimipaXSWWTmKU0oRly4sMneOZTz1F5oktmuoA6qRAKPiIaQQjIMmKQRJYaOLtlBM8gldMQ5Qjdz+LJMsXsOOmyy+//SfZ2h/hbztsh9tey97mF/18WD2+95N9sff2tlPkb2f+bkf2dWJr5gXIXBOEPgQ8D1TiOj786lgP+ApgB1oFfjuO4JQiCAPw+8POADfyjOI5f/kHHiBHYHy5iKklCbcDlNZFSAq5Xuxwuiqxt3GA3CMkrKRqqh6t5eJ6OHLfwfRdXlknLGdImlJWDxOI6ppVAJMVgok/3mT2a17pc37zCK7UKnjSgF6gEXh5T9vC8Onn/APcUdDZ6NWqE+EGAGweIcYJxNctMtkhl6xJDEmzu1bCHClmqeDsjjGRtnrz4ZYwDH2U+u4Gvn+SAsM7PnZjl8aUNLDXDRM5n0i/RDpoEYZ1OuMTM+IPkN2qUNBPLl4kkF82NqNkmSX1AIWGwMDWKqa8wWngAX/WxFiywY6RQRLOhcvkmGWWEvfMVNi5ucW2tym6/Q00TCGIHU4J8fpIokEkkk4yqNhOWzL495K77ppiaHMfP6CiWSq/WZ+T+w0xKd2GWU0T9IcUXZK5eNzicLaE0m9QlcMIBVTZYr1xn9MmIQb3LkY+V0YyQMHCIvD3afha7H7ApNDibPUgpkSZiCsHpstz1yJkibjRgMKzQt3WG7QnSioyUiVF1CbFjESUs5HCf5S8ZnPonkwTxd17JKCkSUiCCYRNez/Lc33yDvAXoGnraIhy0cPodNtYEsu0ivf7rPznsnYjtt4O3W+yv3ff31uLhR5P8Dzth+naL/e865o9zNv9GMvf/BPw74NOvGfuXwNfjOP5dQRD+5at//y/AB4H5V3/OAX/w6u/viyoppNTj1Jx9NlYFxg2LncYVPNXjxb0tdCFLHAsEYR9Z0YgdkX7oks6MofR7DMKAvBixUDzJuXvL7JgBa5ddQqPP0YWjvPynn6G606Dj7+AINo4jocsO8rAKCQldFrD9DQbBGc6O5/nW9iaqWmRUjHG9KRZGjjObjRk6l3l2v4YmZenbN2mFKg1P4C6y/KNcgy+s/BkvW0c5oD/NrY6OsXGQ9548gj9Y5q+uhdy0XfIazBYmOJc6hu32OZc0KaUNDDmNmh7jgYcD7J2YzU0F1dDpOjKhdYZCeoLmvkNxJ4E9HOB0faI1j+3NJej2meyKjJgtVoR1dBVajse4mWehOEEqA6gy4zOjxIMkXldHbPaZ+uA8iWKRfmCTtCw6F7YRKBDpPk5OwSn6TI5/go3tL5EMJ5koTfPC8isogk7WTtNQPQSnz+L5vyFaP83oiXsoHNxlsWLT2Mly/9E0e9e+yR++2OHuySmShPRED9utEdBnzJxnNjNFzWkQ2x6ddRvDSiMXJJobDQwvIhyIDO1tnvu9iOwDeXKTOprssb9bZffLKyTpMaGqrHRDdDsglRcYPVFi5IDJi//2iwzCKaLw77xD9W2P7beS762vv51if+0xXit4eOc7Wm6H2F/v2D9I9Hda9v4D5R7H8dOCIMx8z/BHgXe/uvyfgaf4zgXwUeDTcRzHwHlBEDKCIIzGcfx9H96gKQofPrDAheYGrW6bRCZNc79N15GRJYPZRJbQ6ZK2skSCTsGo4jsuuwOVcUVF0gv8/L1HGTlp8dLVLs8sy/yDX1jg0P1pejdrjM0c4ORHjvNziR67V2/x4iv7KG6GIO7hDGC/OUATY3QCgjjNmGGy2x8wlijhqz6m3oTyBA8ffJRjzQrrN1yqgybXGhJde5tv7wZ8aOwxPqxfZLO7S06dp5SPOTuZZth0SZ26l79fuM61l3b41r6D05PYlJq47SOcnDlKPi0iySkWtyE2F5h7qIN4UWRol7i2tYg0tAmCmEKixct/u0/d8Yn6MWNKRNGdYctdY+TeBZTU/ZTPlNi4sc3j51cQEFEtnVgymTs0h1YQcFs6otRD7kWopkWAgaIOEeIIVQkQxAHpzChiHjJaksHykPLMKAkXHKfPkZEpdF3DdcbZGuzQ7GwiGgVaQoR9/Smy+jQfe/8s29f7yPIo86PH2W69xHJ9wJmREQb1iISRxRJ8BGHIteoOlqyxu9mmpMvg95BLEhvPbJMKFQLJw3MrKI0dVv4QltUZYkRkucGD787hyFMYpolhVHj6+V0ezmewkgmU5Ayj5WdouC4xr39RvhOx/VZwu1/E8b2TrT+t/Lhl8j9szb38mqDeA8qvLo8DW6/ZbvvVse97AXhhgJtTuLB0k4fGJ3nwSJnPuT3kRoJkQkBAZC5nMhhCTtQwkuPcd3iCaj3mxsBlfPQhjGM5qmvPce3GS6jhCAfPfYCo4/Jnf/qnPLc9yfhqkQlL4tzpj3Dw8B7lsoQY+DS3Xaxsm+5qRBD7+P0hmlAiqVTR4xKmGeO2I3pDlfJDB8mnznD0vR7V81fZ/so3qHg+DXubz6zNs5A8SEIbEikZzo1OIGUN1rYChk+u0womODJ9nE8ejsmZNqKvs+3vcOqBB8DSCNpbFP08jcBFNqYZPW2yeH6RvDnkvkceI3c8jTvoMbpziyuX29TrSVRPZq+9z9MbO1S+dor7z1kUs3cx9+gCv3PvaZ79y0uosUgqr2PHHYgtpKSBqIgU/QSiKRHKfSRFxlNdYjmFoqr4sYs81PB8By9oIRcSeF5MMZFE1wo0Oluops6clieT2mapssHXby2jShY5tYLnnWBizKLbqvOu0/NkRwzsZp+um+DE6CGG9iWy+gLbwzr1uEsv1Eh6bRpXuzSkCpNjJvrQR86opBJlLGcMe9AgNRojRkMqDZ/x8RSdOEOmlEbUAhILs0ysrPHVr77Cve2jBIMVTGa4VH+WKH5Tmd9bGtvaps2hf/oCtz517w888O2W+A/i9bL4t5vbmbW/Hj9Ogv+RJ1TjOI4FQXjT/60gCL8F/BZAXs2TK+r85qOP8fQ1iRubEY/OzWLdU+DQY4fRDlmEvRiiNqKbQggcqlcHVF56HLsn8NTlL3L1hooj7tNmjv/+7x2ge2GPyy9fpLa5jCV+nZefk2mnFxgJH+SeB6dIFi1AwAtXadZKZA9pGAWZ9LEk4U6ftSdvsrjYJ6WD4w/IlSMCP0sqaTL0XJQD8/zSqQZXtm6x09HZdTts+CFeKJF1HJ7oi0xu7xLJNRJagSm9wbUbDtPpcZbVHCOJGDsUqVzY4NaOiyN7PPJLk6QTIte+tMjVpW32mm1mUgcofLBAaIJBCk7dhbq6ihUvUe+4XNizMZQuu7f+lpc3Zzh8coS7HnyExWsbnPrAPbRWOyTHMyBBZEOoalhihCA6+F2XIKkjBQp6DE6vhVWQ6O/2yI2kUSOLZnMbLZVn+uE8YdDCeaJPvF7CMH2UyCdlH0MSx+kvP46iWThuFcN0OfDIYforVRYXdZqNOqcfuItb61us3qry7oVHeOLaBWQJRFEhncpQ6fUYGAkOF3LYokCMgxirhIUhSVFHrZepbO9RzqYZNxwKIzn6fQ8vCBEUSJZVRg8eZGv/CttXL6OQph4MWbQDoji4bbGtYwJ3vrjfKO9kFn+nif27fL/2yTupNPPDyn3/u19JBUEYBaqvju8Ak6/ZbuLVsf+GOI7/PfDvAY4WFuKDE5NI1gSnj1dp7soUsll8yaXbCLDoo2k+YgzD7YjdrSruRp+DZgZ9JuY/bV4gYR2nP6yRSs1z7PQYw0s13BtVZs0pNCWL5K0znS0xPzWNbKZRTLB7HcJ+CgkbSTPQpi0GsQiFmIWPHUJ+4go7l2R01aBVCzDGHAZ1Aafh4A9jckdO8tjkPOs31nlqfZHVbh1BkRlKBQrxgFqc5rFHP8zcu1TEcJdwscvv/9E+G71tJqxR5hMNnsNnRlPwhh615QHagQ5H3zWBG7/I2lMBQTEmVmMYhLiOiJIKGVa7dJsikQAd7wKGVkIJk3iElDWDxRevoRdVrOkU6Yk0+xt9wMO3I9yeS3/o0BgOSNwMGH9PkgibTqeHno4QUpDM5Ah9j1CO8NsKhaMqpATEOI2vNtFzMXNn5sCMWPzKMsGwxZnCYfp4xMkc/bbI4391k3NHLQo5H9lp0lxt0Q+63OjtMz96hPGkQT+O2XcHtPdC8paBEAaEYodhAxIpmcx9abRcioQVsv3NBr1+j9FyHkUIcQOBOJZobtmUD+cJZRlJ0kioAq1hRMet0pe65Mw6Qvf2xXZKyP14pHlvkrc7i79Txf5a7vQsXvwhP/cF4NdfXf514POvGf814TvcB3TeSE1SNiJ2urvYG33+458s8tfPPMH6oEJ6Vmd/o8m137/BK3++QvsZm83zV+leb6DnTcqHz3Jr80UeGj3LmYmYqamP84kPvpdmt0tvt4IuqRhyg6VqBVE6zIhVImjZ7CzWuPrNdfauV9lYWWFrY4jrh7h2jGT0kbM63ugYMx99CC0rIBrgd3r41QHNq7tIQ5ANGe2ghTZapDA9xbumZ5BVjSQJSnoZ195it3+V//wnTb7wB7D97RmeXTnI2bmQPfdZrjSfx8odRRclXHmM6ZkRvN4eo8Ux/ubPr/DsKz7ptIoSmzhVh/qKzdKLq1z+4zWS8YCTB5OsO7ss5A4hBTBmSeSTAU6vg92qk5ss4FsWTCuMPFqmPD+K5w6J1E2mHslw7EMec/e18Qf7DNtDLDlJry7S24vBiNDSJoogU6/YKFkBFAG/7qAlLPILI3AgRTxtceKjJzl6YAZNT9F0fJp2wOWdRV7ZWOXqZpJItHGiPHGvw4SkUdD6fHFlhYdPz3F2eoxDaZWxrEdKdcmmFOp9iWqjzsSZIyTGi6h5Bc9IUj49T9/z8CIRI6sxHDg0d5uIw5jeZoO1p7ew2wHpdIFKr89Kf5uWMySpzCJLbyqHeUtj++3inZhMvV3n8OMg9u9y8mXhvznfN/MS67eTN9IK+Wd8Z4KpIAjCNvC/A78L/KUgCL8JbAC//OrmX+Y7rWLLfKdd7DfeyEm0mwM++43zvO/IGT50+hjf2izxxBMbDNtVDpfmUE9P4fVbxGoPTImsMUq21Key0mI8VebIzCH29m8y2Fpi/YWQieQcXldHUUxmtWnORgIdd4gSa4TBgNh1KRYyyFqWiftT7Gwu89x5hfcfzCBEOrKk0293MSSViaMzLF6sILX6uJ0MyaJJu+tjlBTkwKLt7JBIW4yNjzCyvIiARsoSKGeOoGcDvnDlb3j68lEOyaNYkctACLlvJMdOF3TF4+lbS6z0tnhg8jgzOZOZkwNOT/d477vPsvjCNh2xg+hG1Hf3uHn+FqVIQ/RGOXSszAONJq7b5tTkCHHgEZGn0wkJ9AHGWAIv5eIFIoYpE054pBcUJs89CgmThDhNJMUYQxevNURMicw9VEA0JaJIwPNDPM+m3RgSejqR57P6lSGFAzLCSAxyTBAIqCmd3UEPYo3RVIiKSM2RWelt8+zieRrFDGemR7G7Afg6DyyU+OqFLVLjD5Ip7VMs5dja7VDvuIwWpjh/6zJSVGTqRoO8pJBZyKBqEY7TR1S79Lou06ctjH2RjZVNZEVj2NeRBY1mLWS/O2QQuwzCbcrKKepeQBS9fpi/E7H9k85bncH/OIn9TueNdMv8yt+x6r2vs20M/I9v+iwiEWkQcnO5wtHJUY5mYTuWuPAy6HNwYfVJTE3gkU88wJYxZLo0ZNCNqWzUySWPoWdkDsrTXNjssNvvUXrxIts7Q9pRm2u1Go4SMSbOgV9h0EkzfmSW4vEkQ2GAXkwxzxillRWqN+soGwUyZ7qkLROnY6OPueTXFEwnid10kQURUhphz4GUSDptEYkS9p7D8ckpkmKZVecqi41RThlT1NvPgCBzYT3JmZPHue/caYq3pvCaTaJOzAOTbZYWV/iTlUt8oH6Y/qdOMjWSYW0rYv74Oc6e1VAyClmrw4FSTGJQ5umLz7JWKVPOjqLHMrERM52cxprOsHplC40CoaMSyh5WNoVrh0jpBJM/fxyHFlKoIMo+USgjJxOkdR1BjQkFDTEOsZs2pqzjhjaT95ioeYkw8FHHtmnWy0xNlLDbNrEgEbR6CLFHQgfVnyaRKJGRG8heHzm9zF41yzfaA3LmAT72q5OsXOzywRN5/uhvLzNbmKHebWBlRrj/kMAzS6usdAfoUZPW7hhiroA5pqJlFGRBoNvR2Pav079wL0vrmwRVnwuVq+SsPL4bUq33caIWba9NWT9AJqGQNB2iyuv3ub8jsf1TwFtVh/9xFvv31uHvhNr7HXGHahiF5GSDj59bYEiPrXWBUnKEnNViq7rNV1a+TiSrPLh/hOf/9hKf72/z8QcfoNUQ0WigiQFaUuPb1a8ihkXuzx6h0r7G4rANsUBCzCHILSaOPII1bzL2YJa+NCSRMbG7IpqlkTg6w1zJY39li+GLBltxH6FvYHddhHjARneboJFAG2SZncqQtFJEHZ9IVWi2quwPB6R0iTErYMI6Tb8jUyiVyasB6+1NiCe4fuFZEM4xefQEnjUgjD3G6p/k6IHzrNY26LYKTE3JNPd3eHpzn08vNvjl3hwfUPOMH7qPslrh07/7ArX2kG5/ke3GNhPpOc5mxml0JNpLPcJQIpIbVC8XmJgu0m320TMGqq7iDV1kNYUkRUSApGmEkY8YhsRxjCQEQIRfH9KVfBJ5k9HjRZAkIKJ05CC97RXqewH5o6O4VRdlXybwIoplE2cg0enWECKJ8ew8qqbS0/aoNC/iuT1EfY7533mY9u99kxvdz/L0Tsh/N/EAQmeTb++XudasEse7WEaeS1uXqN0Q0L50gEw65tRIgk6/je1OcOX5z+O7NrKQY8dpsFPfZdKScegQhypy5DJdOEBBjYnUDCnduM0R/tZyJ5RkXo/b0U1zJ/Pd8sztkvwdIXcESCdEvnZpjYnkNEhttGGHZLHIjtNlGIWIgovf15lOjdP1Va5cuYwZTmFGbZ68uM4vnTuJpWYYV1KkvAyypFFKjCGFHqqYJKWItPcdrLEsXdkm7osEooZugigKhKJIbOokU3nsfhf74hqpssjCQ5OYI7NErk0cr9C7EnDpQhtBlcnnBDxXR0bEcQecfehhMnMFhhXYWalT69YwAomMpXKt7jMi+0SXlvB6k6wNOhQnQGsXwD3CzOkHyWWHiIJOe6vOFy9vULG3GGznkMIeXqRwfamKEdSwwx6doIMep0kIPpFko+ng9FzwJYRIobbSRl/U0YoCajkmDAJiMUaUBILIByEmGHbRVINIERFEjVh08doRVk6nXulj5hNYszn2N9pkc0mUtEIhewyFIb1Wn+52C3ZMxo6OMnd6HL/bZOdZgeX1XRKkyRt1Mu2IwJpktlCgsgOTD2gsnC7x8MoY317ZZCCcYFTdwJMTTDkd9p0EGhKqKGMKLYLoKltbPdL+YezYheEymgiafoCIFoNGSKQ16fszEKfJpSMMr4+XzLNwxETTiljbqdsd4T/jp4R3epK1e036O9fdEXKPgPM7+xyfVRF7AoP+MqPWASYOWswdz7PX/SjbtT32az3e/+F57mke5q+/+CVOH8zytxe32exV+NTjHf7h6UfRpZjusI0vZ0mJMgXLIGMWMRM9vPYG579R5z1Tj1A6DpHvQaBBHCKEGnEvxu26NPf7zH3oGMVDCcK0iSYCqogozmKe9Xno5BLXn9xld2Magn3kIMnsTJ7lZQdjY4ft3iKVnS7yMIluzVGwB3issNZPUY09dDXBrebneGnxGCfzSW70HFJbafTMHL/262XGSwf4B92rxE81uGtBI05mufqZl9l6+goHZI1mYsC8NU5Zm0Q2xhHlHpGmkstmaOz0SSVkqnsNVh/vcfJXDxAJAoIoIWgygRODKBJ4AZIsI8QyQeigmh4RMaoZ4XiQnzCxlQ7mMM1ouYjnDgkTGrEbMEDBHE9ATiE0HUIpjVNKYC0UKMd5Gp0uq5st2mKJktbmpcY6jWiazJVdIlNm7PgE71n+HX7xgzt86j+8QKMr0FdbRF6ejOYwn83QHfogi3T6LdJqidDzEGMJQ03gSR4FfYyVzg5Z1UOSbeYzdQrFI1iywnJjyExql8l3PYYkGMRf+mH7Bn7GO8WPc0nmB/HaCda3Iot/oxO2d4bcowBFDLmxvcakaTL0YzbbNxGelRl5dB5Td/nk/ffhJOuEUYrtRYdxqYDtWHTcBh4xZkJnbCxGdDPcalVQNRHDsNDNSXJ5lWE3z73nfKqrLhe/dpMH0/MIuogydIiCGN/xad3ax68rmIUCqXwG2TCACESBECD2IIpJHjtKeqNBtLXG2isSKWOIa6uEgc+e7bDftHH8HlHQQ3dtqpFEGChAn9aggbZX4kw5yze3+4iJBpt7a2SG02TdHk98TubBuy2mR9/Hb5wTwfe4+lQPeX8bRchS0zT0uTOcOnMPYneD3WtbeF6RQjlLLEBhPGbYl1D1Oka5hFUaxxE6yJKCJAp4oociaQiKgOeJiGKInpCJEIhjHwEVRRky2ErQH9g0nDbjZxKEQUhvzyZRzKBEIYOhiylrDM0B/U5MShHx7QBPFxg9OUlupMripX1sJU9KtWk31riybEMoMj4/ieRvoeZm+fsfv8nVmy6rlQF6KsH4eJaTh9JsXo/JN3ZYqnUxVRNVSFJMemhCmmZ3m8ykwkHZYt3tkDLn+eWPfRw/ctlrbnLXRz6Gkptk+bpNJtnBG3q3N8B/ivhZaeb784Nq8d9d/1Z03NwRcq/6VSJJwPZj9uwlsobJAxNzFHIxL375GZYGbfa8DvfM3M+ff+ZJZEnm2MQcTrjFZPk46cEeeU3B6xYgriPKSYxIxHEUZiZVxmYmGDgdotI0I7kd1KUaVz67ztixDPmJLFIU4g4GjI7AUIlIz1h02gFC2iMxJhCjIUohMQZR7BBFAYfed4reJZex8SqXn+oQ+z1qvS65XIL3nDuFNZJEVQSaNxdZurXBkzeX2EMk4UGfFXzxXn7zdAplqk+5NILRS3OxtkOlssPu8ii5QobCXTI3Lu5zdfWvSflFStEel1r7bLl1nru6h6GkmM+MURqNGdSG33kXqyAiyTJaKoFk5Gh3emRKAogQugKiKIEMEjGaLIIYEccKUewhxBr2YIg8VLB7dUpFGXlWIwoiWot9rFKCYcVBSMpEkYObiFFTMkFTRvZUwtYQezBAmrM4+MBhRh7e4uW/uAKNSTzNYaW5SUqPEf5CYsMfMLEqcuZ9v8rs+zy8tsKNK/scPCSilwrMfMIkTrWJt1tIvS6DXQ+vZZPNWmxvvo+l9QCn8xwPnJhhrGCxXKtRnM6QTh9m9XKAHVxjfDREdEpI4k9uVviTwlvxbPY7hR9UmvlB4n6rWinvCLkD/F/r/yf/+uD/Qcd32OqvkK5qZFoGCTPFSBxDGNLuV1jc2ePU+GGknsRTWys89sgDBKKFOEji2tuoQYmJMtzactH0mMV9iY3GZda7NXrdSxwpzPHwiTx3HRJwOj67z6+SKaaRUh7doYac1HGcCDPloCYEPMdAMWLiwCcSQhAkBA08UcV6IMJfMSku7+I2DU6cKmPMJkjMpuh0BoSpFLljd3NX9wBHnz7GZ77wVa77Bl6ocmtvg7Ozj/LcK/v0lBIFHUzF4LMr/xU59xv8/GyZm8/ts1a5Rb1fY0KxUJQsklglLxZA3UUIW6QdE6ceMlA9csU0iiYj4iBFCtWXdCLhGoJ8ErUgI0UuXuiQyKYJFQVZivBsh9geIusJ/NBF1SCKA4wyNDoB6Y6CoquMnBjBHdpkShqy0sPtSmhJla6joyc6BPsCSzc7WKaIXtRgTMYYmeGhf5Kk8m8+R/36ZdpejiiZYfXmJb5YeRFfyPFrlXs5cvJevvynf8xH7ruXz/+Hayy2pji9MMbZB4rIYzksfRQ5k2Dx5g7P/vVl9jvPk5ObnJooI8oSA0nmyLlJrIVpBDmicI9Pb61Pe2+afilFvTu43eH9U8XPsvc7gztG7gD/avl/438a+58xfYueYTMcDAibDbJJhZymUVlrs99v8Znlr3D4wV+j4Qs89+0L/OLHP8DedoAT2yRKPvGwQBx3CQQQ3BfYrfcZ2F2cQKVRA9U5SXs3xTBuk7ZktJxKoMp09locnB1l4+YmxWM5nJ5EHDkIsoaiiRCriIJHKEoEUYwiZ1AmJFLTLep2jJwooBeyBGZEypQhNHEHPnJ+Ev2Qz0NnzvLyU9/CiSUyokK13qKc7uLXXRLFSa5tX8MQUzQq12jXDNJanUJCoW/nMK0EjfoWtifiOD1MMYEZpolilXa/g2GaBD0P33NQJJHQ8TBTq6j2AfprW+yebzN96CDWlEgwcJESQ/yeSRjriIqDH3gIkoVAG78bkcqkkMUhkasipkQiAgRZQVYlYtlE0EMc20MykiRSAdefvsbUYZE4mSCQDCJBxfe7SKksR2YP4zUjVvsugVYklfA5ZJlc7+/T3d2nr+yhSyEDzSBTCFha/C9E0Rlq+/ew5PSJtYAPTWd5+cIiovEyx2YVZsZGmD9yhuKkRexm8GWVWJQQ+klqqx0KR08hxWt8+TMvEvnu7Q7tn/FTxp1w9+odJXeAf1v5v/m9I/+a3W6HyBBIyApqqNGo2BzM6dxfOs7FxhbL9R0eG7V4ccNB7kFlfYdae5fZbo9zx+/mF96/QGl+lKfPb3Lt4iVyssUsKkczKXZ3d4h2GihCl0F6hERkM3a0gCvUaVYbzLx/HEf0cLsdUlqa0PEhkBGkgFgSQYxRZBXCiMA1MGZMas92SMgusdRHiA3snoKmOPRrDu6eQ2FilIWfS/PBnVW+tlxHE03avSHv/ZVjLL/co7K3z9APSeoy98xMsLjhMTd1mDOTfbJrm9iVJsl0RKLTpCeLyIioqgSqQ0ZPIootBu0eiUQOKSGgZjN4gwFDb4/xI5OMPTCKN/CIeiKtzTYSKnbcoXxERhC4eQPUAAAgAElEQVSSIEQIQoP6jo8Su2iihajGSJpPjEaED4pIGPngyGiqxPZSg3xJpD90GJ9J0x+AlkqTKUKrZqMbEpIkYqXyGGqJ/c4F2m6N2cT9fPKRx3hx9ZucPZyjvz/gyHiR889cZuh5jKXy7HfWqQ138IbXUFSDwsK/IJ9V2XFPcXj+Xk7clUQai3jhb/q0N1aR5Bql2YNMHSpRulcG00HKlXjkPovMt83bHdY/dfww2ftPUmnmTuCOkzvAv7j5r/hnhd+m74Y4CY3aoMNIMonMNF3vFhNWgrDRYnz6BMJUAyvs8IFzB/mrxzepNnosL29x3yeOs9fbhb1tnF4LPwiJpSSNYcQEIX7UAMnEkhLEzR43nurh2ENKocqYphMLGqri4kURuhDjdCKUlII3CNG0FH5gMwxtpIGGXRtiGQK9vQgMHY2IRFIiCj10XUQJTRRTxtYNjkyd4lb1GaTAZrO9yd7qx6kMn2GxERD5O6jCIfymzHbjGlV7nvuOlRFCg+WeSqs74FBqgk63gytIaEKSRmufnpoknVAJA5lAAdF0kEQJRVMQBIvYyRBEMqIGYaePIIokpBAtIeEOVQwtBEJ8V6eQ04lsFTHI4IcNJFlDimREOSJWZby6S+jFJEoKqWKG0PEZxE1GpybRnCGN7h5qYw6UId1WTDoDkhQzNz7J87UNdlybpL1G1B7jA+/7ELIqoaoeWTvB5AGXZmuK3JJDL3LIKBG51HvoyjrQoeUvM/Bk7GGb5ef/X/beM0qW8zzMfCp3dY4z3ZNn7ty5c3NGzgQBkBRNUiRNUcESqZVJRVuW90jHK1u7tnRWXktem4omj1YSJQZZJkWJBEmQAIhwkS5uzmly6J7Osbpy7Q8APrBEiACIcC+I55w53ae6urvO9NtPvfV+3/d2GC1h45cXKSSqbLl3N62VVYy2wPlDefSeRfniZdJhGRnlzQ7pt3mb15QH1k9w79Cef3Qf4bmFd28uL9V576dTv4CneYQFCVlQuH1wC2uGS8NYJRPJ0PHSePZlbtuxl1JfYsd4mm88eo6xrEY7E/Dto+fwfZcP37CfesnCsEF1RGS/xnh8mng0SnRSZfTgEJ4C5uol1lfjbPvJXaihAEERsHwb35QQAgUiPWRBJ5BNBEHGq7n4jojoG/QvGJTOFmkHfbIz28lMxLE0A12zkXyw+1Fo9vjKp5/l9NIytt1lLJZh//AoamSFvqYhi30Gk7MsnKlTcRd4omgxntrGXZMa37p8FtsRuC6eYtUo0XBtLEdnT3II1+9yYGQrQSiEPhBDiQt4UZmQCr1ek7YZZvzWGcKTMp7ZYeNSnbFdo/S7NkLCAVVFDmSMholn2nQbAorWI5osoERVFN3G8XxUXcIzA7zAAk+gvyAh6g79RoNIUsUsy+hhn2qxycBQBqM5R3NtEK+8AbaOIC/yuQe+ymjoADsmZjj487dgSy2CSoPy3DCV7hJrV0qEvQ65vMtgfIpmV+SLR59B9uLcvnkAMVxl/137ULwap77pkx+QSE7upjfgUPzmaXol6HhdVNfF6csIYYtfOvKrXGhfeVNSwriQDq4X/sGC1++bq3Uh09/n1dTe3yrZ+2tZlnlg/cR33X7dvSscOWl+13/YVZm5v8CfNH6fP93+KZqORb1bZr5TY19hGtvLUOm3uO2OCf7gc1WW1xoEEZt+I+COvVs5vHCMSW+K3alNzHfW2HXgek48fJqHFh4hgsBNqVlEycIWFdKTQ/SRUGIh9E27idHAKIsom1wESURydDyjhSqrBP0wG8tdCnuSBJ6NlgmD2ae2IaEV4mzfm6X8xDJHHznLanaELXencBIR+lhc+focaxdKrK09Rdfs0hVUvK7PcDuBzmYK+TxG+QpXigrZoQDNLLDbLlPqtpke3sfFchlHV6mVLLZnpxnKBjy+tM5q3+K26R3IcRnD94kmoshxkSAaoGYkkpFNxFaKdM5t4NRjqGNxshMSBiZmIKB3QxByMX0LbIFoJkxyBBASNK/UmPtamfxt06QKHr4s4/kGgeWw9EgDPZ+l/NQ8QwNpqnWdgXGH4mkLwUlTaYDbmaRXXiKwAryghxS9jk9+9DoWHz1COpOgsVgnPGPSkwa5tPQsa/MrDBemaHdGueW2NOuXGrTKHW4by/PoyXUev9xmZniY4toApy9KmI5NXB7myW98G7dYZiyXo2OWqHWibHTq2JpKtGHRc757+4Frmfz7z18zgn+bV89LSf3lcFXLHeBjZ3+Jz2z9L/iRLAu9DQ6KEyiuzdRwgZldo+x9Yg3bbCIaHmYQJp+K4V2ME9ht3nvdXrI7bkBN6FiBTeAZiJLCWEZDUQQiKYV+AxynQ7BhoMgCguOycqiGHE8QzUoIrkut1qEwHQckZMvHWuwhKGH6moFQNjj70DHGUgcI3e6TvC7NPtfi1FOXOf2NTcQGLGyng9KtseNGmTvf/Q7OP3mGv37mBD1PYmQcwrEyZ8+02JoNcXJtAzuuk4hOYEbhzgmL+JY01jGD3obPDdsLuOs1rsxnySsbnLG7nF9qIWo+BybHMbtdCEeI6DqyLuOYNoIm4pabXHy8ztCOHIO3qAiBjL1hYMkioq+RHIlTrKwyNjSELwmAjx7TSGQT9M73MMoqw/tkAlvG77vkNw3SqzbI6xKg4RpLVObzpCI2q8fbbCwtIjkBAyMygpTgytw6bXuJle+o7B5Ikw1arD0bZvvsKNJAl1iqTSoS4uKSTb31NB/e+eMkustI7R7rXR8t1EboKQwrNSpnzvD044fRNAnFSHD8WIVsOIUnKwyKAwRCBUc2aPVqDKXHUMSXXsX3Nq8vb8+cefV8P2KHa0DuAD9z/l/yU6mf4Oax/RhShL7bpb2+yrknw3zsX93D53/rfhTX4Eq1zaFLG9TNCkPpPE+dOsZAc5DcwAZnVxeZ0kNMZ6bJjYwgqz6tpky1XGFUzRIadvEcjVA2Qm9lnSO/02Hy/SNI2Sajw8MgO3iiQG6nhl0JcGs2G3MXaZ6xsFcc1vQLJJO7SG5Jkrsrwzuv30b55NPkpm7CdHu0KgGVkMJy4LHtp6/j1/cd54//5Fn88F3s/0mP6GeO8uRim4eLT3CvdDtX1h/jqaYB2j5Gl30+9sn3ow+5CPoY3UfPs37oAg5ZCgmXBaOPFOgUm8uIlQBpZYjBzUPoaQPR9WmUygS2i1bfYPUxH9dMk7suRTKVptnsIkd8OtV1hiaHsRwHJZCQEelYYQZmRGSxR2mti9cFzVfptjxa620SURV7ehAtZ5PrjXL5iUV6CGRG82BW8BoWRitHZhj6Rp3T1SJRTeThdYW+fzNbx6qsnRyhsDvOrrv2s55sceFvHsHwTFaOOkhKisn7NpFbanH9vXcw9/gFXFPiq09f5IaREI7jsji/TlSTqLprDLouibFNjKUVJlsxLl65TFJxkHhrXOb/oPCDPrD6/Ur9Ba4JuQP8WeMvCIULvPfGW1hZ6vL44jzC6WUGhwboSVXUVIaZ4QiG3aPa7/GHx79BXJXJ1JJMKyMkJRE3PIJNwJnLc1w3sxVVruIXZUpmirAXIppWsC2fSlFh83iX5mOnSV+/j15EQuhJ6IMOfhDgtVTql2p0rphIhsvwYAbbMjj2tcNYD0yy894kqaEQXmI7lY1lZCGGEhM58oX/gdUWuDQ4zbbcTmYyc3zt0J/hix9iMJSn73wVxffIpwTarodcW6VbSmBXXJrLs2iZSXrFMudKDpf6IjkvQaPVZNGuIAkq880Yvugx2lrEaSyj6hnCmoYc9QiHdVJTIwSCitPocuEhnx23ZOhdbJDbncTTwqxcWiEWGUSTBVpujbQaplpvEc+pyLLK6S+tMrk5jtlx0cPgqQl6jQ2UdB5TdEmOpUmE4mwsX8AUQ4jRDuX5FcyuQjYmMth3ObnWxNZUDnuPEQptQb18mK88HOGmu7ciR/pMT8j0rABz4RLYOsuLXaSkwslDT7Nrd4Ze1WTXqE+7P0DPCTMatslFW5huh8JwjopRY3h8E2osz0AiRbvsIYrXTJi/It4uzVzdvNLpkK+V1F/gmor6P177f3jP0iaGwweoGueoWJdInRlnqd3jXLVNpychyhJtJ4IliJStGmF/ACUvYdnQcg06zSrbs9PUOx1qVYeZTcMIvktzroNYDCGpGqoUEJ8IYRUluqePsnJcodNIkd81SCTuUrx4hs6awXQ+RtvyQPQIhSKk7AblxrMc/ps8iaFhZrcphBIJJCvg2SuXMHomycR1mM3LlPwMa90ujuSSHNugYEQoxBR2hXPs3pmhvVym40ZI5rLsuGea5obGyYcvs3V7lkp5lWfW1nlvbhRRqFF02qiBht3vktUlOshIsTEEx0XUXcx+FCHkEBtJE40qNKprpNclLn6pimXLKJKMTZ1oNk481adVLKL7KWq9JrHBKF7TQOg7DMQ9lKSEkldor/ahVkaXA+xKl1DIR40o9F2T6NAoKjUkO0NlbQ2PHJ4UYlNOQVLP8dhqkVCQQk/HyeycYsQ+x/knLnDw7hE2j+YZm7idSDLM/NEyxlKbSmuJpJ5keXWcyakU9vk2i+vzyFEBQVLY6EtcP1ZADSW5bvsoZb+G4sYobJvG84vwEj+Q/TZXL2+F7P3liv21lvoLXFNyB3jvNz7BH2//TabzI3x5/jh3WKvkMxMcmn+C3kqHG8d2c8P0KLGyRyyiYtQ3GM5s4eTiGbRIEsHQuNircvrMEvtSOUrkOL20yA07R7DbDp16k8HxFK2WBraBpMUZSjgsLBzFvTyEklfYkkvRjKnIeoDSlgnFFVzRQQ3iZBwPv7GBWm5zZB2SsVFyUQ+t3OId128nMmljNQYxFjuc0wPSukU6uY/QpMN7Bt7NxPEqsrub3R/KssU4QELYw7lLc9RXLqCHMjRWm1xc7qCzwql+lDu3bMc5dwpH6nEpMFGULIuCilGscWAoR6tlo6ouiX2TiIKMaYWIR6awxQ3CxhLJSAhz/hKikKB4ukSrMIJvBRi10wyOpZBEDzssoOYBUadZN0hkUiSGNDS7w4Uny/S7DTQ5TiwjEZ9QESwfgRwb1YDR8RxPnTrMqXqNzZlBQso+3jt2ATES4+b3TdPs9ogkQ3hem4XFHrY/RVCzObtqkdQ65Ka3kNwY4Jlnn0KrnGZlySYcVMhFbEr9Pm4khSj3iCZHqVQdrKBENJkhSAqsrTSxPRdZeevW3N/K2ftbQfDfi9dL7HANyh3gk2d/nf9r5D8Q8WUagscnfvZurD8N420sIFsWquzxL+67jk5riIWNEpm4SaUxSKm3Qdft0G97RGWXvqbznaNn6dBguDjIhNKg35VxLBGr3ECTFGy7S78rkEkVELUEoh7HC/Xx2mAFIWyvCR2BWD5O0GshpsKUNnqsdxzatsOppeOkoiqj+hTxZo6B+DDpQZt+qsgHkztZPN9idEJgreojpEXimxMUEh5PPGAyODuFvKvH459bQrGT7Nls4/s+m3WT4eEog0MqjhQwu3mIlZUG4aCMRBOv00OKZLhQyxASmuSTY8TWfMIxE68j0K53qa7UycST6IkwSiKJ2fAIJJ+gX0dRFaJxCVOKITgRIikPzCT6QBV73SXo92l2VTKZODvvDfP4Hxyh55fpriZp1UUiySGcShGvbXF6KaBlKaREh9WWS0LoMp5JcdvtB1AzaSKqSWDV0JU0jz51iYgus/fjU6iPrCIlRllcncNs1ahaLcxeGVEI8aPvvY4deypcPreMJySYioYoGgGWX6VTn8ClhE6KwOkQioko+jUZ5i+btwV/9fFysvbXU+xwjcod4DdW/y0/m/9lzFZA+UqPwxefRApMDhXX+fEbP8TyBQ9PvMxQOMrEns04DQXjElS8FgIOcSXJqeIqoq4R9BU2DYk8fqLLkCdw+ZxJJJ5FD7togkCr0yEZTtLtVKAYIj6gkExGWLvSQvMCzE4fVVGxnQDRkRhKZ2iblzG6Dcq2xWLFZ0Pvc+IhDf3kNFuGhtl6fYzw2Hairku52eHEg1+nXY4w6GaJ7p+nevYwF585w5mhWb548jMMh24Ca5JtMZE9d44RcoewexkePNrlqQ2fD2zfwrsKo6wVzxOTI1xuVxnRTfRoHs/tUl9YZLkSJx8folMtIRodxJSGh4nb1ZFkl/Vij9FsjGgqSWDJ9MwuQlOhfNohmVUQJRXVDagu+iRTJu116E+G2XL7NnBllNoSpbU+Tz35DWIhiZVGiiu9U3Rtk4yWJSvLiHKD/dNjKMMFupbM+jwoUpKZm6c5tvAU5loG1xsmiBlYXY/9P7cH40Kb0//xi+hKmFpnne88sUa+EGJ8egdWX6Zuu0xNa8jJKfBtGkt1Kss1rG7A8JYR/ODak8Mr5a0ueLh25r+/2W0HXuCalTvAH5X+X/IDv8Ta/BgxNc9K8wiCJLJWXaat1BgmSnQ8zNoTDSYmba6UE5hekuFYjPONJl2/z4QeZf+uvVxeWuHcynmMSI6d8iCB2UQRYL1lcarZ4eBojEQIKhvryEIYAQXV7yNIAhtVC6NXQQQkfOyQiO/PMpGBovkI5f4yG5ZC1O2yeOkox5eG2H5qO5tyfRabJdKPTXLfVo2z55aIeg06F3exf2oXj188TX3jOJlAIRwtcdPHfoQvfPGPqTS2kEqN47NEaeM485Xz/P6Tu9k/dpAJcRZRqTISWabUVylaVe4Yj1NzbBaWqjzaOMxMeoZNWo+5pT75QGN1ZYVISCUwKrRXIvSqXdS4TK9rg1VEEgXcSodIOoSkSIwk45x87CKi7iPN5SiMipi1Dq4XpmeFqLseRidOOnkGxZAJgjohNUZYEEgrOv2QTsTT+Op/e5jWWomDB26hdeYkn364xkQmyZXf/ToD8iDvuiVO47TD6e9UODC7mU7HIqLn6fSa+O0Ya65Mz5ZQ6ZHZnENXNSQtTHI2C/oSrfUqK5USovbWLcu8mLey4OHazeLfLK5puQP8xqlP8b/d+EF+Yv8NPHYmzGqnSa3l8lTzNLfl9/KnD3yGn931UeqByI03RMheGidKng1rEV0sMZ6dZbCg85XzHVp+E1GN0XAF5lYrpEODWP4ay50GsY0suwZ9PFHFbIWw+xt02qDqAh2zhh2kUT2LwVScJ8/NUZFMFC9Al7OkVJG87NIxwyghF8Opojqn8dQ7WbJXcJoGJT/NsljBdVYYC02x1rPZN6JSNTMc2DeAH7mH9IBOqTLHRmmO4noEnR6WpHDfqMul4gkqZYtMZICWp9ETwghCg6SqMTU4zqXKMmulMh2/xPGaTSozTSFmsr6wQlRUaTdNJC9EuXQeVcqRTIuk9AiEVRqGgbMRIBPBDGzMqgtCD6OSRpPmsBhhqexwYr1Iz/BZa1eJCwET2j7GU/NEm3HigkQsHGHH5hRTN81y4swczYUlcukcrd45wtI4UvcETWuFfnSAUuw006d+nNLcJZbWl+n6Fvcd3IuWNOlvdOn0OozsTpAcy1I/V6K00cN2krh+h3hBJ7F7gKEbE1w5eQb3sPdmh+kbxtuCf3N5JVn7vUN73q65fy+G/9vt/N093+Ibpb9B93x+7ROf4Q//8kucaa9QRuCby4+wvzFFsT7AgTvHGJjJofx1m6HB6zi+/CRfvn+JY62TzIhxImKI8815VvsNmnaFpGrhWGU2uhe5xCiN5hp788N4skfVh3axgmnWuWKtMhKKslRrY6lF1hpVckqOzdEwD1Xm2Jy5gd1hKLkpZuI6gZLl0MLnWah0MZN1Vot5Dq2XEGQNwRmkXHqKfHonew/mMfpjyGKYylmXD71zktXVRVKRHLe+5+dITg4jRpN0j1/COrbM6fN1slEHUY9wadEkKU5g9VrofY8DWY+ypVPqtmk4FuX1Ds9Wq2xKFRhK9JmvVpjQd3JgNktoSML1fFo1Fbtf4sJCB2mljOFYdNsWnX4bJbRBRJLJFS9TM03mO2fx3QA7cJn3i6R6CfL5CHuGNhOP5cnsTDN1y23UFtsYlQtMjuepOBEmhyXSis3d+UFiSZFsocT5hSHqxcMY2jBtb5mQrzK32mWgA/mdQ6SHJaKTeeqNNrG7p7AfaVBqLbH/vj30Ql2wdXzJY+bu96B84bff7BB9Q3lh0dCbIfk3YsHSiwV6NYn+lZZj3q65v0z+ybfu4Rc3/e8cqx5FlmV++EPv4cnHLiEFTbbkpxgp7ODcwiqHjl7mx7ZG2b5rF9V2nbk5lba7QE6UySTTrDc6rNh1UBwMq0cuHAFBw/ECqkaVitCh5qZQgSvlFmHNY3I6g7e0xFwzTiLeIKUobNfCdPsWQ4kYB80+K+Y6o2MHobRGNBilgcVgJMeg3yUW9BmPFbhr81k27Cm+PvcdEm6biFzk/KlVrr95H/3QGnInxc7pD7LtpgrxyGbiW8bwPAWn06TNZvr5Scb9FbymRTSlsJsVJD+FF3SRRJ1ku0Co3UYJqkTlPkvmGj2vihXIZOQcNU8g8OdZL4WwuyMsWWW8usdISmV8tMf9F6+QkcJYRo+G0yUpjaFLHRCSVK0umqjiywGDgkK1L9LsrxKuZdj6wRsY+MA+OufbNJpNLp1ZY3NG5XRb4ZHTx2i37+Qjd2d514HbmNq9CyPco/PZM2giWFaLZCTPWtNiixigigGmExBPFOj2TeKpEJZhYkswtH0EhlWiQgKrY9FaN0gXAqS38GyZq4E3cwXq1VKPfzV19rcz91fA7839J/79jt/BPneaVmiMhZVD3LdzL1Pb9qC22pw6tsD6ygIjWppUSsVY85jQ4niKzlhoG4VYimWpQapvUO+bjKSj9F0fT5AhcFnoLhCSFFYNj5iwgazIVE2RaLPA6OAsI6nHOF40qbUsxOg2tkQVktow7xjbgmunMfQuy7ECVkJlR8bhTGuQfl8hFBGxxBg/9IH/m3Ljr/jr+w/TcbOU+ytMRHejJGPcf38COdLg5ls3kdJmUTUJZ9Fh/VKfv/vKeRZXH0TwNNLRaRLxGFs6CUaHNxNP+MTsEGVNYGQ0S2bDYDYo0TVdvLKLnNPYlhwhq0g4kTRpXcLqlOkYl4koMnVRww1XuO2un+GB87+JJibQIzpDUog1q8HW9ChxfROV/jHassatSYmJ7dtZn1/GVrNcam7n/gtjOP/xIunOOlvSCfZcP83aRRPZW6FqH+L4MnzA+AiuMIypDuAqNaLJEoo6wageYqHcZ2vSY3LEoVpXCAUevicj4RN4Mn4ALbfLxI4RLMlDkySUpE60L2MGffw3OzDfJF7PDP5qaynw9+X6Rsn+ahk8/W68peQO8O/O/Gv2p7+IGC4TSo9wpu7wPz73WZquyUd3X0elNUyn22NqahNx3SOaMNgZSrOyWuTUyhJTiSg9K8VkPEIUAVGNgd5hQFeIBUl6rsdEagDZ7WN2W4zFxmj2SzT6czTZy3R4nlRYIZmEdEwilpZZqDYxLJ3ypcs8sP4og6nbmP2nN9N/+ghqUGMsMUOjWeehR55h954f5pd/ZS+i2OPMtzW+fazIV89dRurU6SsWf/X0AAOxKe7dPklMDXj46HdYs1a4PTJOV+wSOGe4vNzi3FyUZMhBFYa5azSJKuaJKVE82aHdC+PJI+waGuB6PYETFhC6KtuSSey+xam1dRxXQZdUov4VDsz+HH05xMF0mKF8nlxYpF2PM9pcR4oN41qL7Bp2SUub0dUd1NsORiLD4tolLtb/gmxPZP/MOLPbh1H9YVavPEM0mef6gzN80p7k1Oo6jxw+xcxYnORKB1HtEpbH8LwExxca6FKIcDxFdV2i41j45ywUuYY2LiAOKqiuhNYXCBQZDRnH8RFF0AcVmms2373n6A8OLxbxayH6q03s342XKt1czTJ+rXnLyR3gPY/9CMu/8CCmnkLV+3itBM+2uqzVfeJ+FE1PEpsQMCpdRsai0BnGJMJkXyAdkdgw6mhBl5FcHs/2mcimyAgqVmSODdtHsbuMxEcRZI/VYol538D0s/SNQ7xz8x6CQOeSDeO9AEmTaDYsau1LZCMxJpNt4qEy+WSG92zLYMZlQgzSrue5XDvC+QcSTM8N8857Mhxe/yOOV4pYnsDeWAbbMGi2v8lUaBNu+SYsxWUsXmS7H2WiUCA/0qJnTBA6fgmBMktNjY7s0lJHiPg+D5yfZ2ywwOxghPPzF+kLOvGIzt6ZMJkBm2MXPNIqjKdi1D2HqAjjqS305jcwlufZVTjIei/J4VoDw23Q7oq07QopNYLq7mDPJhkjOMffnRDwFQnX7dGyB9BDQxTL15MKuqRTVYZmbqTUXSYeVbhp90+R0k5RbDVptmya0STxeJdmXaHtdokHBiWzTr0TodO3mYrJ1DsORtdgOjRBq9ZBEmVcz0FCea43jiYQ+CKoHq5rchV0tb5q+G5ifiXCf7PF/mMXVl/Wfp+bHfmf93+QhP5irup+7t8v7d88TGlBwHaWWGpITI7m+YMHHuFo6Vt8dOhGEupmMruG0enx6NMOZ5qfxdNk3pm/nrA8wKAuMpwcIJ0UMNsR0pt9Hnlsg8VOg7ge4uDMJj7/6Ne5YJZwgPfmZxEEh41+nZCkcd3sNvZtT+PFRHw/jNnr4XYNyq0CanaGo/NrmO0V/F4Dz4ehgRJ/c2IBVROYCt3JqcbXSDoqXQTGktMMywbRwRYf/ZXfIjJoc+FzJ1k+3USLpFguLeG4UUyhj9lssu7W6dtNfMFGUDQG5BRGcIVLpklS2Mf2tMdqyyAQA3SiSEqK7VGbvhDB9UKMxmRGomFcYLltENJieK7HlfZZDB9igUEirDA6HMJL22TGtrLrHXsRUfAuL/GpT3+WYwtVwnoYz1UYV0cY3/RBrlSe5t79m3ArRaZ376HTWiMeSlBeWgUtxMy2SSz6fOFvH8U3VSJKimrtMsl4HBGRodQAo6EEjghKJELXtxEJUAo6sx8ZRUxHkSWVABPkGLLdY/8H7uLExVNvqX7uPyi8XJm/lrz4xPB68/3W3G5bW28AACAASURBVK/Zfu7fL/Ffv44jH36YB46XWXPn+MT4zfzCJ+/jY//nMzRQ8EImR599gH5PJhEexhFiOFYfy5UIBxGaWocQJqaZxDOg3AqT2x9ikhyy4LM612d60GFxo0vMTpJLRRhKxBnetgXbajN6442oeR1Ji+B7Bla1gd2Kkexc4Mj9T7Fw8VmmBmMUMgFRZRqZWd49ssKheoa2tcbWhIMuTLNWKyHbfaamCtz93g/Tnmtw/psOl0+uYTQNmtQx7TXajkvTFfCdPgnZx6BGLpRgw2rT8lVm4rOk5IusdM5R6e0hL7do+n3KTpeo32ZNSOIHDVacNmvtQRLhHAoBJaNDJtICp4umePhmQLJQ4J7dm1gpyQyltxOddHHFBGq8hT89wSc//l5OPXqSbz17hUDxGYim+c7cZ5iv1xmPhLl+a5LC5hjmyRimY5JIFRBia3gRiZqR5n337qW5fAHf6HJG0qi2BURVoG341LEJAoeRiES720PQJCJeFIkInuXhSD6KKqOIIvVlAyEQ3+xQfJtXwZsh9hfe940S/Os5qPqWztxf4FdHfploZpjxgQijyWEGBqL89WMX2agtsu722ZrNcPueWS6eaRIIfe649QYKUxKxTBgtEqN5ZYNwT+U7h5c5V1/AsnTiQpjt03G2FjLIwjq2OUkQ7yBFIwzsnYUZG/dKH1sMaBW7iJ0I5StVnjpxnJxaYOfucVKDXSr1KJa7Sq0scXhuldnpWRbn/ozDCy75zE7qHZs9KZ07biuwa/cmelKSr//ed6h0OjQDB6dfQdIUTAdEySYk1ai0m1jqZkYkBdcPSCsSatgmKY9w3c7NIG1w8fwS7X4IXTI5Xp3HI85QokBadJjv1FjquQwnsrhml5pgkRJzDERVRDdO1V8mkLKk5c3UuseJaFOE9SZhdZgdezax6z3jCGGDUDaKXbrA+S9f4G8fbBBWL7AlI7Fj/33YvkMoNUK9Z6EjIvSbpAdSrHdD/KvPfp5kJMM/u/H9jEdMVlfP8fSlIr7TBdFmMJxnPJYhny6A1EUNKYRjMk2iJHZEGLs5hyoJyOkw819b40c+/WGOnD/5duZ+DfFmif278XqL/vuR+z+Wuf9AyB3gX0/8H2iCxS0zt7B58zTnFo8hxwSS0TSpIEcwaJGbTdCpOiwdq1CvtNFDCQTfx7caNBsCiRDsmJrgyNwitukxGBklkdCRYg75jM/FBZuBZIZAFgjCceJjIpLQBtvCtVwEXGK6ytrhJRaaQzyx8hi2aFLv6Ny1ZZRTl57AFTXesfM+Ti08wKrf51i5z8/dvI077/wkaqLKwpeXWVw+wolOC8mPoUkBruSiyi6CE2FbfhO5VI+w7CGLLeaXDRRyJGJ90uNbiQ5n8ZwA7HV6czKO18L2+3i2TywRo15r4Doyxd5lapbFsD5Cy2wyoEfpBDJNw6TodZmrOwRaEd3dgqpb2NYSKblAIZIgnsgzkN/H+A1xdv3QEP31BdafeIpPf24eQjL/ZMcdeNEwq/USuyclNt8Q4y/+qMTUQJx0TOc3vvG77NHGGIiO8b537sAQhuk1H+ehQyU8QUZWIS9lGcqkEV2Bni0wOJgkU9BwNRc/P0I4JOLrIkFg8YH/9KOcWDjxttyvEa4msb8Ur7XwX63gv6+yjCAIo8BngUGe65366SAI/qsgCGngr4AJYBH4p0EQNARBEID/CrwbMICfCoLg2Ks68teQ31n8Lf5g+x8wMwWGu8qx+T4TmwbYFIuiRfqM3bkNL20T78todphHvvUsllljY8MjqrkcmJrgxJUSf/HYt2lYDqrgkYk49Nc05HCKzQMpGo05js032D4kEpHj9FbiJAs2wzvG0W/VEEQB2zUZz0isfvU0IwWFdrNGYFisFUdIxRUCW0Iy69wxM82Kv8hGtcXdd+0iOWbg1+rIpokoSqSCLgtOh77jMqCk0USNhNJjUDcQzBhCXCOX3wz+PK1mlGRYpVmOI0caaKE4jplAS3tYXY+wHCGkiXi6TE7zWV5zKMSn2KRLmJ5M1FIZyuYwbYNW2cBrN6goDXwpiSit0Ohb2K7NQMKi0Y+w4Z1DFyOc+WYMWxDZcmeWTfddx8FDVzi9odCV13G6G3hmwPCdt6KN7CMQ/jNG7wBZvcYPj0+wWBtF8iUsU6ZuVInEbyatP0rREQgJHnXHxmzIFDRIJwJwAnrdEJ1Gm4zXQUqkcLQ+6WmJlxpRfavE9qth6OnYy953/YbO63gk/yvXgtjhtS/dvPBj169liebl1Nxd4FeCIDgmCEIMOCoIwreBnwIeCoLgtwVB+DXg14BfBd4FbH7+73rgj56/fdP5+bM/z7nbv8n49Qf44HCW5aMlhJ5HtyNw7ltrlBZLVDaaTE3FiaZGeeL4tzAtqBTXOVub5/rh3dAvsGYcAVoEgo3ghXG9gDlLomTKdCwfy4zT9zKM52pMiQO4skvWgXJIoRtW2LtnG/dM+RS+cJJmZRP9qSZPz18gE9zE7TelSEUVriyFuefu9/KBd53l6TMRbrwnTbDap2Kc53TbxpEGycrr+IFPWg2wHfCEDIEQI5PNIesuy0UbTRoinmjhOD0k16Q2rxDJRgirAe22hev7TOydQMwrRGdlZDtK+DvnsBZlEtMBEVnjyCNn6BGnMJ5DFirsCSuMJUpUzS7njIAZKU5TMDC8NEXrAtEgQrl3hXZNRLi/i1SdZWzPILkDdzN6vM6XDy3z0+/axw3XR0lOTVG+2Ga+WKYav4Tj55nZ9XHeNXWF5kqdQ2c9up7Az/9yB2vDp3m2zWrHp2d5JMJNqmqYTUKBrSM61fICkidQ7/dY9qpsPVCgsyLiuy/ZfuAtE9svxSuR+Mt9jddL9teK2F/g9ajNv5aS/54jTUEQFF/IToIg6ADngWHgfcCfP7/bnwPvf/7++4DPBs/xNJAUBKHwfR/pa8S2P7yP2LTF6JDIoJbHcwLaXYNnv/EQYtei16ny4BNz7Lo+YN/+j+CIFqKcpWab1Np1BmIuETWEFeiogo+iyIhOh3CggtMgojZQNJOx6BJ9z6WyVmT9zArPPrLEg3/yDF/9vYs8+JdlOs4UozftZvuuQQQhwpWuSsf1UCIdUkMxwiGVpSNFjp6eYstmg/Vv9lk6InLBrRESq9S7dcwgiSQNoYXH2Tvqs2tYxjF7VCtFKuU+QbuL2+ujhWJIqkZIFXA6bcorCxitLiHdJ3AbVLsOYiaKLem0XQknrCFNZgjvzxPZO8TYpnE8oYjjakTTOno4Ri66jenMTu4aSHIgv5d8JI7fnUfXuhimQq0bISa2yNLHWS/RPFUm4jgY1nma0klOXQxwxTh210CLlRlPdkipNuVul2cvLuD4NzA4cwdDsSsE7hFWl3ez64N38I6bRjk4DIPpBoZVxfJUPNHhzEaXYi+EnhxEk5PEPYNLp5ZobFgEwXdfxvRWi+2/z2sh9jfyda9FXq8T0guS/354RbNlBEGYAPYCzwCDQRAUn3+oxHOXtvDcl2PlRU9bfX5bkasE4ZZhjn30IeqrDZ6sL1AYGuR8p0rxioKqSLSdNT7zRy1+5MM3YjbeTXntAhcq57lsLXOjMs5sdIQqG2S1DPtmpzk5t8qB0TGcM0dwkOkbPpVApucsEYQLXKicBEziOtyUgdhimaWvHWTqo5tZO36EkajCltAcD5VOsPj4T/CxHS28PnTMeR48LfEvbrmbpaef5pEnlyi0wfclRuMKgtvFsR1828fyBjk4uhspI7C4UaQQVpGzKpKvsLZew/JEwqEoAwUF1wkw+yKFlEpITGMYFvXLDaRSnLDUo7HWZtuBabpmGUFP4acHoFRFjomER4aJDJq013u0aw6tfp6q02KbsotwRudCr0cxKGP4Boo5yd8snuZWRyMWJNh06xYQi+T1JqXVZ/jSf58kXdnPnr1pPvG7/57jX3qQ02fnOL7QYvpQiGyuwf5b7+HuXIOzpx4nrO9n90/kKJw2+cgOnfWnHuPJB8/Ra2nIYZG15iIrTYWQMMxURqJR91isztPruN87Jt4isf0Cr7eAX3j9N7Jkc7Xyes2uebHgX00m/7LlLghCFPgS8C+DIGg/V358jiAIglc6KCoIwj8H/vkrec5ryb4vvIOv7P8SK80+TW8diYCVxgKFRJ5BLUfJafPwN4/z0Q//EIaV5OLjeWTHQlQ0wr5JfHyIaCiMadvctWcnF5eLmFqCmBzBcOqEnB4SSQKxxWrnMoanojRFcpE95JQh7OJhzv/ZOIVCCuIRPj74Y7yn+jS/df/9/LviHfzMriyVZp2zSw/z2S9sYk+iTrd3AV07SLUPvtfEUQvEtRSzMZF4EGWhXicbzmK5IsLsKJ4S0G/3SG0Sifc9xF6A2U0iSk02ehuk5B3IWY2UGKa24lAtn2Qgs4mFK0UkaRkllSAxYFIv17D6ErHxGC2jTSQTQ7XD6KKDvVGh5y2gSmNkU3kOal0WFZ2Kr9GyG5hOHd+vIBsZnnxgGSk8iidGSeYzlGsP89nPfpUrx7bzb/7DJ4gE09x29xaGzv0F2RRMTm6lZ5Ux1gaIpTSMYgmnkaIvr3PuUI7hoXu58yOztOeXufisR11bxndlaq3LBERwPY92w8Jx7H80Fl7P2A4RfsWxeS3xWkn+WivJvNG8GtG/rAnAgiAoPBf8nwuC4MvPb9544ZL0+dvy89vXgNEXPX3k+W3/C0EQfDoIggNBEBx4WUf6OvD+ox8kkUrh4xAQR9c0mk6LaDhOUoozoMXoFruYVYGp2REMz0ZybdS4j2SrGB2QRIWQplNxBfq2ie8ZDEZU0qEwimpimga+0MBVGshonGtX6ZgmiYFZZLFLu2khuFGC0Ay69g6uywVI/sM8OLfAeNxFExLUFs8zkYwxFnVo+QbxhEEs0cY0L9HoLGKaClpIAq9Pu9zCNsANejhOi0Bwcfwwpi/hhzXMwKRphYiP1LEzKUbekUQetIklDWZuDJEszKO6Nol0GbOxyOJTZ1g5s4quJLB6DrqWgohOKCnRaSkMxAVmC7vYPJ0noUaQJR3TF8Gr4YhFRmIpWlZAK8gRtFfRLAHPaXNmZY6mKJALXPIZEcE36dtLCE2BbXvvRBVS2EEIFJ1oIk40HKHdgGYzQA0ibJkN4bsdXGcINTfL8ESUW/fvIZM0yCdswr6HIrh4koAXvHTL39c7thW01yBSXzlvdEb9g16qeSNPTvcO7XlZZZuXM1tGAP4EOB8EwX9+0UN/B/wk8NvP3/7ti7b/giAIX+S5wabWiy5xrzp+/dwv8ovj/5apSBqsBPkBGXouM/lhNk1orLRXGds0SbNrMHNwBq/hUVlewwy6JPQEASLNRpPtA1k0q0laixAVFLSIwKnuKXqWxg3Z6zjVWsYNOXSMImV7goTvkBbCNKp9kps1vvH4SUrlJnff/s9Yf/BTRNUig8nb2Z1/lImoSyGzlXftLXL/kzb1rk/JzlBQ4/g+LDsWSlcmKYm0+zXCoQj9mkB0QCdQXHzbxDRN1hoG4dEU++6GfvBBIqMa9T5EkwlCngGhCNGwxPvfN0O3GaUQKGhql41D55HlEIKmo2cjtBbXqC0byOIqE7v306padNodNF1gQI6zU+2yUIdBKUHf9anYNn9++i+Z0GaZds6QjWeYGDjDaivKQHiE9sIExhUJsy9j9DcIxWfYfLDK0tkO7X6bbCSg0+5g10q4ShjXiRIankLPtfAaHqKoMXbDDMh1JveO41Q3aJsmQzty4Aj8+S//l+/62b/VY3v9hs4bKt2hp2Nvl2neQO4d2sOloPaSj3/Pee6CINwCPA6chv/ZYO/f8Fxt8r8DY8ASz00Xqz//hfl94D6emy72sSAIjnyP93jTJ9t/fPAXec/YDqYKIWo1DUWJYlg9dt+WQxwq0BUsBofSWI0+/pUW50/2yEUsXM9GCOvossbSlSVUUcL3RWwhIKyZtJ02YTHKIyvHaFsK2dAQI8MHKDZWiaVTxDyHbZtmOdb+Wx58vMuewo1MjB+l7yS4fnA7lxdOUKtJ/Ogn72P9vMOXH38ax6jTdxuEgjSq4mOLfbDT3L41h9EJkBUfVU4RzmdQwgrxeBjL6BAaE9G2RiGs40kxknkB13DxLR+zZxPJiahCHF/ycGoySG3slowakWg3anh9H6fhMf/EGuZCj3wmihu0ECQBQVXQRB/LtXFNgWKjz+VGmbLdp2V30ALIqWmmUllmYnn0UYf13ipPLpi8c/cN3PzxPVROLjF/uEVItTi7ILB3RiG7LY5hKqytW6yfr+K5PqYk4zkhNt2a4Ia7NQTPw2iGSQ2nISHQrEmkMhYA0rYUAEHwD39r742I7athnvubkVW/Gsm/FUozb2TrAoBngodoB/VXN889CIJDwEstAPkHURs8d7b4+Vd0hFcB/9/G7/HuzX+CJKUZKHSprPcQBY+2KzGUExG6YQzXQYvJuOkwuUEfr2XjhURiUQ3Jc4lEEoj4dPotFFknpOWIxwZQRY87nP0YbpfzvYAn5h/Hty20hsxUpMASIZKFOio1TKtIczVHYWKIZD7JcGc7B/a2OXHWIa008MwauD6+H6XiNggFUfLaAKP5DpIWZzjt4hoRbNPA67YJLJn1VgcfiQM/ugeSfQRkSldcxEwEXX9O0DIqnhXgp0WsTg81pWJVBNrVBqODA7hCEskzcQKLodEq1YaA0fUZmMgQHwnw5QiSJKBHY3TWW+gLq3QNGVnw6JgbaFqcuCYSRqcrdJC8LNu37OfYwmlOnWsQ+0qDctUgaNZxC2lWuydIr17HuiJg2z065TIrzSo1q41gG+haDOnpAvnpe5nZa6AHMkfvr7Ba9YmnDd7xY0N86vOf+Uc/8x+U2H6jM3h4O4u/GnhL95Z5pXzo0E/zyC2fRyVJROkSiqTpXGqy3hPRR5NotkJHdPBlDz2fYsGosHlfBiSBzskcjnOYgVwOUckhaTJaRCGSTKLHfaRcnE5R5v9v776DJrnv+86/O09Pjk8Ou/tsTljsgosFQFAkAWbCJBV8FH0mHc68O9tVDiq7FK7uSmWrfMoqlWXJss+maMkiQZEAGIRIEEQggMVisTk+++Q8OfR07r4/9oEKxQIDgN1nHqz6VTW1PT3PTn9r+vt85je/7unn/MUnyApdbMlDR8V2mrRbdd5/z3amT1mMD7u4hg3LDa7ltvP0bJP8bBNFOc+H972PfO4M7eVlDDHB9lyWuJQmr2dI9B3C9206DQdX8sjnEmjxAqoeYtmzVBsgx2VCPY0QQCbXZuVMmdTWHHpaRFQCJDFGYFmoukbQDnEtm5gKzaqFVtTxbR+KGqMf/BDjdzRpXGyCrKPk4/gBtNsV4uMpQlySzjC3KTmeunic0dheunaXFTp0abJXzZKzbLxwJ4F6CUGbZ/GiTVLVWGuu0uzfwe5Bm3Mzz5Ov9tG0LWzXQxRbOF6DhbLFrr4mrYrLpa98Bev8B7E6Tb757BMsNARu3znOq/Vv88t//Gu9bqlN490Q8H+xe+SWGL1vFtEVlX7I+5//HM2wjJ6O0zXbyMSpLNWYujDF8tVlnJaAUgzJ3x5ncP8WtOEihR3bUAdaqJksruuC6JBI6IiqgO2bVGoOekbDsMrsLyQoaRqZWIGYomEJXSqWgxg7yi98bBzZ34lMnrie57GXTvKD2UfRdZmZtQZfOfUsKb+AFVNIaDEafpyyrTJtmJyYusK5hRUCRSOXz5AdSdOxulTrbdr1IYpjW7EFA99z8UUBJSXTty2GsdIg8EIsExRNJvQdgq4HvkKsX0GJi1htk6BlIokxQtHEdcEMZcJ8ilB1sGwTTROQE0kkRUSM+ajpBB5d+sM4mVSGVF5G1zLU7EUW6jqCr3NtcZbjU68xU4dAKDA0kOK9h9LYZ89QroSMlOIMl1qMjTm8716Jo/sHOTSWoZSuMCIPsC3vMpjrw1+YpTN7jbTs06df44W5/xIF+5voxUj6rb6hbPS0xo22md6conB/Ex974QvYiogQC6jbDYREwL3//B72f/EQw58cRskWoSjRd0hHHYvRFbvkD5eYuHMHNR90NUZttYzXbeEaPn43xKirlIaSdIIMFjkc1aHh+tS7VSxjhsqSSCr5UZabL7PaqTFbq9JsPspEZpBCsh+8SZZar/BSbY1j4/1sLZg41jKWN8dC5Sr5sIUorWF2fKrLVcoLddL5kFRJITsaYjsK9QsSZksk9C1WF1axbJVkUadWbRO6Poun5hGCGAIqrmcg+EliYylC0UOKSTQuWYSiR7w/IJ7VCQyX2qRDd17EWmsjBhLduofXUVm9DE1jmMt1n6vtBtc6LkvNOnboMThSITU4wrkrVxmJLzOkCcwuTnNhqsulqe1MHChx5549lGtJvvSKz5PnR6gt7iOX2I1VnuD+7Xdw9PCH2NG3F9wAs7rCtnyMTx07hp2Y5uGpp3rdQpvWu2Gq5N0e8JvF35oLh70dp79wCiFeZ/8X92BnZOSOgpAW8E0Tcw5C2SNWUhBtAdsIWP7+EqoZUH2tgm9XQNWQ1BgxzaVuetQqDTQE5GTAxfI8ekxjb24vK2aTbHKQ4V1buHD1HJNX5xjL6xw5MIRhTpPS9/AnT5wjpXW4bYtOwttJrTbLNdfD9SQ0P2QinSab1Bgfy9MwQkbvHEMbSxLUTfAFRAuW5rqkD2XxpRjJYpfylE1+LEmyFIc4CIKI13LwnYDq8ir5UgYll0FNhXhOAL5CUA2wJAdNDDEurbDwcgtVkAglCYhTazbprNTJag6+EOBK00y2WiyVbXw7iyjW2NM3xkBiC27QYu/hHAs1kVdfnePgUInXyh0u1Fb46P6jOMEFvnH2GSRL5rbSISbiEyAvUmmImAQMTBzAqrYR5ZDzVZPfvvz5H7kv3+yA6kbYDAdU38xGT9G81TeVzTQCfqs28s3pxx1QjUbuP8Ztf3aI4t0FwoSDaGm4uoNj++Bn8LwuiVgGyZKwqi6yEmfk9gH69vbR8jvIWgFNSuM5Fq2qRdjNIAQpXD9Dyx/Ajt/Ogv8ZLrXTtDoKZs3m5Il5zk1ahPE0Kb0fvbCfGXsPC4HD7fmQIT3JRz99Hwf2BxT7coh2A9euEyoKhpSlNDSEnxygf7eOEJMJtBAnKxBmZcK0Qr0yS9CVGdrqkOgvMrwnhdl2ML2AIIDQ82kuGwhhSHGkj9RgFiSJQFSQ4xKIIs1WDb8ToCZiZLZmCFoOghMQNC3aS/NQW0EJWmiyBgE0WodIBh9hUD+KnMiSz+zHsAZJ6YOU+hXIHqCY8xgsKFRaChknRok6Lcsgw1b+yZEdfGxbCs2P4SkGxcwoW/slhvtEnr18hVeWFgmT1zhb/7OfvEMjfyM6D/7WFx1Q/QmG/v5Bmo/Noo22kVc0bNHC8+vo6RjteA3d1HCbFoLtIwoyjAok0kmGikXatSbVNQPXCxnqlxga3sn5mbO8fOUUZX8Nx32Qa/oQhxNDjJR8zMZllq1V/CAkN/YAjYuneeH0CbKpUe4du4c9/RYvvQL7dmxHnZpiMJZjsVOm48yzYricnpTZOyQjy0VMSaaUzBC6HcozFnYzwKi0mfneD7BrR0kerpAslUhKbSQdZFUjCGy0UpLQNlibE8g2AqrXuoweziL2QyB5iKFIZ81Bi6VxWjb5vjgiSaxuBVGM44giKUVmsWKwaAvMtK/hYZPUBCRMLL9KV/To2APkU0WW59uM9B8lnnmBjASlMR9tZgJBcViLwX0/96scGwuIlU8y/YKF2VAIg37aVy6wI3yR2NAY37k8x2Nr3+11q7zr9OIg608rOrj6zkXh/lPIfHSclYcmWX1pBb/osnPnDoJUm6QgIeg+shZH0FzqzZDhgSSJVAZiHrbkoElJcDpMz00RE2XW2gFIJglfIRR1mp7FlbbAeCLEJ0fTXiHPIILZwvIvUDV99o81Uf0mtRWZWnOatLMFy2mSjGvkxEEwmmiCA0ELx9KwygpTp6aojYxiOg6OVUO024yP5Mn1pylP1+iUSmRyJvFkCtezECULryOghyp+CKWYg7FsERhtJLVA6AUEjoyiyZgVFzHtMH3cJkWC3LiC1CzRKVdwwxBIUal0qFo1unaFDi5NS2Vb7hCueYXBRJIVu0YmvoVu4zw1d4j+dJLSgEjL2MZU7TTXluqY+Cx+eZGtfTZHju3DdGdZWnGYq7m0m3GO7fowz0rH+avj/6PXLfKuFQX8rSsK95/SwGe2475i4DhtauVFCk6a2lWR5IBKbJuBYKpkNYGZF6fR+2O0mi7xRBJtKMbyikPckIhLAhnNxGxL2KGGJkok3SahuoAb5pm3Zlgyy4xkRxjpizO3UkLXyyw5H+buiVnCUId0yMH3Jjj3rELcT2O70FQLrFlLLJk2p5ZP0q+McqQksHDhJeJqkq1DJZKDfegDBZZrDSQtoPvKJE9+S2bk4Dg7HpBpLmkoeoBrtckkUxg+FAYS5LbE8GICWhjiCD71pQbleY+r3zlOX3aEsJhk4Vwbny6qJ+GEKXynhRpaoNTRJAdd8rBCi7ZzlmTgU1AGyOXnmaqtcbC0BV+JES/m8ZQYzcUl5mcrGM4UvpqltvRlsvZ+nl65ysGMjh/PMrnyJOOZER63L/Lvvvefet0a73pRwN+aonB/C5T3JAiudOjTZMymg54EWQ8IpBjGlIvctREMDWm7SDohUz8J9Us1fDGGGgtxfBvLd1F9GUfQiWs+D0zcQ7dRZWtWwZdTTDZ8xvsTFLMDuMIU3vkqy2tf4pX8A9wWj6M3JZ59sMnE6Cgha+wYNLl8dZnp+jUSikompmK6GqdaGkeGBknpMaRCBleGTqWBjo5pGHTqHfoxkOctLv7XOLPTS2zZu5PB23N0fBtZVXFKElgWp5+YIkmGpckmGXuVsDvFiLiVwFmlMd8FxcXsOjiejyDqvLB4lZrrIAUqGUVCj8cJQ0iqAZIlIBUKvO+BI6yuZenMG+RTa/zVqXmmZww+h5AElAAAGzZJREFUsjvPP/zMIHryEFPXKpSNJp1qnHrbZOv7S1iTV1iqLvKieZnnyw/3uiVuGZs54N9NNtOZPlG4v0XiziTmS210HWwUgtBH9UOmn6mQGjIRdZ+YksEKJGL7VHKtbbjTV7DCFhNjw6w2LMzQwQ9byEKcmWWX0NG4a8+d2H1N+qt5MvE+0kWZejvGlmwCA43vvvoCM4OH+MJ4nMXpEzSNMQbHkpTlgFQ4w0hMRRBjCMToak00K4XjScwYFkNNj2JJRFczVJeaeJ6Lb4XkilkEScCxymwriQzHfLoXyqzUHZbKNVLaEAMjCYRrDVYr0ySVLrqYIJ4oIflg+gJCYBA4PproEwqwZqziej4tbwFd0BiLD6GrGknFQxHyOLLAlXLAyZkSjctXyAcGliBSb5/B85s4ylH0LYeQU1kGg1WyNR9hosLVKwHXZnO89xfexx8/83WeLz/W61a45WzWgI9G729PFO5vg34sxeKXLxMrycTWZJYnA8RwjeLEALGRBH4o4Tkhnm+T+7CC8vx2pi5dY2G5TCKuU9AzGI5AVstiB5NM5LYyWb3E0O77+X9/+27kRB7L6FCb1dmiH8Kjw3L9PJWKT+Ge+2gu2Cy15uhWdzIr+pRyt9MfX+N8LUbDLCMrAcmkyJVui76kzEq1QRCO0q7OQuDiuwaKlKbezhBPSjiBS4iKF5MojueILazQutaiL91HzJgnr1axww4ZKY8Yh64PZscglEJUKUZMzyL5Hdp2A9+20aji+yGW4CGJKrYl0zFDhtMGXctl2TZ4/JGvoostkvJ2JmyNREWmUEgT8/MszQ1QqV1Bdl0ywykeeSpAtWuMVC/zP3/lHF+bjYL9ZrlZAf9Oz855NwT8Zhq1QxTub9vw53cx+xsXOH9iBqtpsu1wmtR4ASEl024YKKqMQAwxriCMJJDOCxjYLLcbiFIckxai1+Cu/jvYumeMv75UZeXsEvXUBAc+kkfIKYzuLdGud0lpGSqdVRzJR+nfxe77TOZPT3PVbGMr8yyGRQ6lxknYrxDKHkllOy1zGUnMYXUdYlJIvXqOYkkkq+s4JpjdOnKlSbOtIygBXVtmbMswlhEQhDoxRaftVlD8DJVyHEkWkFUFQVWRww5e18RyQrxAoyG2wJXxPKj5DulEBsWsk9BiGKaEEFZwJAkcibF0jER8iivVw6xVBQ4mZwm8Me7Yq7BcHmVtucPJ2ecQHYGE4lC+VuHlKy8SaD7ewiJPVr7a611/y7vRAX+jTrvczAG/2YIdovPc35HxX9tLQVFJqyaO16DVMKgsNBBDkUA0MTsGiBaxeJeFjsnMWoe5Vp3BNOzOlTg8sov4cB/fuFDj2dnHEJRzDIkX6JyqYc5I9N+5kw/+o3t4YbLNbHIYpXQXp2dXEQ7fifyRj/EfL1/l2e5e9oxephoI/LMH7uKO9A4U/yqOO8jV6gVmK9M0hSoLTovvX63x4hxM1bpkNRlTSpNQkuhBgOq1uHr6MtMvz9NeriIJAVLgYNdtUnqASJdqx6TTMel0JLR4gbiq4Dp11qorTFaWWG5YdLw2FzoGoDGgZ4AaC12ReGjQnxpkdPtRPv7AP6BuvszZ7iXmbJmCKjGQOkxe9lhpNaisPc/syuPIUpszUw/hugtM1Z6Kgn0D3ahAvtHn02+2EP2L3SObrqbXRSP3d2jfg/fy3PsfZOpSwI5P+xieTrvaITeYJKHqWIttlq4KnF6YouHNk1LySL7EgXg/W4ezPDV9jZcnzxFIVUStQuHw36ORVGinG8ydEhmJZ/nAPQK1x5+g0J6lXt3Cc8tH6IYenxh6hWtz52mN/+90wjkuLu5lz+1ptlZiPHXuGv2ySiJZ4IWFefSYixY4dOwGgprjSt1EFx18R8X2RTx0Dm0psLjaRc2ksVyHUMljdJvkdAm/lQbJRPBEun5IrdpETyeoSSKGWMPzJBY9DT0+yl3paVxLpm571MQuybjBXTs+yMidt3F+Xmb33Rl+rfgJzj3/BN3JOIviMJ32Igd2jeJ5F9k9MMTiqkUx5vFv/87d/OW5p3niuTO93tV/67zTEfzN+qLUZhnBb9ZQf110+YEb5Lv3/Rna+BC33TuGl5LQshDWk5x84iytKjz64sNYTkArkPnXP7OXjLAVR89ysnqBS/UrDJRC7r/3I0zcvodT32pwdaGFEiwxvLuPfXtc7AtTnHnaoBSE1LUUr1gtDDuJprsgdpgtD3Go3+aO7VuJxwwa1TlaawKnKz7TzWfpTwyghWkENYnviwi2QDqmEpcVukKAGPiMjt9GTDDRLJW6scxiu814tkQpFWOltkzge8TTQ6x1KlimRzqpcHa5jCvqjCgBC1aNULQYHDzK3YUmFWuFC3Nt7t1aoDR+gJdWW9iNKrGdH+BTnx7Gm57l5W/NIKghK94+RvpOsUvMkkjDpdmL6JnbeKjzdf79I7/3k3fATxBdfuDtezsBv1HfgO1FyG+mUP9xlx+Iwv0Geuijf0TLSFCQi2iKhhl00RMqFxaWqNQnkQSF3RNJju3/eY7Pz1ErP8cX//CXcNckJKGBUFAIOxbXvn2Wq1fXMFoqa90s7/+7h9h2TEGvT1F+ehrX28Lawlm+feoUa05ArRNj1T7D0b2f5LPvvZuLr13k7JzBe0eHSElLpJIBnj/My9NXmDUsfNchDAPEUCAWixF2bXKJFGXXoi+bZr7RYTyRom6apNQEDc8i9HVwVsgkxynb88hSmmIyxWjSYLqzjOP0IQWrNNwAWfD4mfEHyOqr5NIlBnYNcWqqwomrMyy1AnJqnfeMHqTRhe8uzFI3HazWKYZTB7j/wBH2f2iCs8dX+NKJ/8m3J3/rhuybKNzfmbcS8L26ONnNDvrNFOqve0d/rCPy0/ul536fX931qyyHc5xfmSOrpxjLJHBrUPMt+nSLB+79PBdbFo88/zS3j+p0KzXc8ybLi3Fsr4nkSqTEPvaO7eLS8rMkBjs88Z0rzH07wSc+uZV7PzXMy19d4MraEEOFJnsSML08w5KRY0+hyZ89OUXoV5mQ6jwzJ3KkWARLpZQJUcQuohfD9dooqobvdAiEEC8u4sZF+v2ApKqT0ldRkyVWG2uUEhqW08bzDCRVQxMaiLJH2y4T9xLU3D4CQSWwmwwnRoh7NXJJmUx8BWJpqrEUxYkCR4aaTF2dB9rsH9rK+IhLdbaC2T4JbpOY5jGsrnB6/jxrf53gu3O/x7cnv9zrXRpZ99NM0fT6ipOvh+/NCPnNGOw/STRyvwn+45GHOL3wDLtL4+wpjhA4BqaQwFZtZloLfPzAYRabHQ5nR1g1F8mMZenfOYxWysFMmYsvztBuWVi2y8TenTz8/Yepmg2G4wPEt32Ch179bXyjxEdGshSyA6S37OC1WZ8PvG+QB7/yVRxXIRZm2BGHdEJDEGFQG8Q0F2h5VfzAptx1WDV9NC2GEAgUE3kEuUUuuZUTVya5e2KEZ+dnyEhJEEzqTpORfIalaoiqxvDCKkltC9uK0OgY5CWVHaU8lu9S7qZ4zTBpdwUGch5Ht+wgmfDodJYJuwKDIwWyQ0muGi7DIyCsteleWiLQU6STDn9w+jn+00s3NtijkfuN8WYB3+tQ/3HeSdC/GwI9mpbpgT899luUlBLvObqLp19apkMNTW5z36GPcerUi2BqDG3v58LCHEpcp7xaZjzVYSB+mIQjkNoScO1ag4W2TVaOUXM8Rvu3k9DPc37++xhGnDtG7qNvJM//9ehjrHplRmL93LPtNnz3DDG7hJwaxnE6NA0PWcmTlJqodp1CfJDpWhlR8iklFETXItC2YAdVFD1OTvaYbtt47TVUFJq+RKW7yHjxACv1Cv1ZidAX8RyX4eEjHNvrYLlxPCOH5Z5l0azwwqVXGRsc58CBu3nPRw9iaBpf/4NnaLTy/OMv7uJrz6/x3R88RaE+QBgP2ZdL8HP3bOMPX/0mv/n8b9zw/RGF+43zesBv5lB/Mz8p6N8NYf7DommZHvjiS/+W2X81z/PPncdghZ/9yG3ITo6vP/YaC+1LbMlvZfHCKlVDIAwX8YIAx5BZVqco6nH6Z/NoSh5VrHNy9RwrjsRCs8UnD98GlkUmkSOZLeBYIYpbQffWMCSfQuoIQ/n70JUVXr1qYOKjSl1UzyebcsmlRnBCi5Yk0DI9fDHL3oE+rIxN0FZJhjtIaItcmm0SC8YIaFDSBUrxFHFVpj9lkVIGUSUFR6+SCKaYa91JIeXiZtdA2MPB0S6JPX1UmxOM37aL2YZDxfJpmwKjmWmee7bI5OlzeGun0dMmHbuDqd3B75/5Gr/z/O/0etdFfoKfFOq/NHn+bT3v727f97b+30/r3Rje70QU7jfR+O+PYv7pEpVzO1i+coWZays4/hIfHjzImWqTvpEiltVixXJZaswwHO/DSSjMdmcotPvJaUk8QaPmKcTkeVTR49HX2lihT7exjN11Gesf5r//q3/Ei8e/w5JhMdPIMzK8nfF9u2n53+PUxSay7vOeO0Y591qXpi2zPTeC434Fx81h+BIGedbm6wyMZBA7dZpVjb+zQyWndCm3WzhWBsPLgO2we9sB0gWRUi6BaeXo2z6Mo9fxm3002iKTa/M8+lieyqqBErzAsC7Rdiz++/e/TDI8zMDBIxyamCHl+VxUR5lbyaHQ5M/P/GfOtp/r9S6LvA1vN8x/3PPc7KD/2yCaltkAL3zmIRr1gJmWRyFZ41x5Aath0/BX+dx9/5QTrzxJJ/AInCqiNogamIyO7KDbcQnFgGu1BpY5Qyk2StmfYSy1HyWUadktYoJLIb6P0bwKepenrpyl6ub5+JGjfPpTKnWrRXJwlJg+QGP2PA//5RUGEhkGMiItr0y2P05+QCFbb3Hu0hLDwx9AUVxcZxVRCPFcnWQqju3Xmb8istZaJejv48j9E+BKDIxnMJa6LNRNat0RBGkOv/4oZ67MMNA3wMff838wWy7z5Pf+mOcvrrJ/5C7+wec/idMA1TrHmVNX+drk0/z57Pdv6j6IpmVujhsV7D9OFPQ/WjQt02P3PPQZfnP3v8e0bOZWdURVAVEhEJq0KpMMFMaZX54np28h9DQ6ss94rkAzbmBYCtuskJfsOBmlCvYQqQRIssfCrImk5uhY86xVsgSaT19MI9RWmFlt0Lb3Eh/dzspUBS1jkD8wwkfFOrowTLO7StK/l3QKDEnDzckMe5N4jo3tJgjM3WRyAaFTxzDTpJIpxkbLKEaafF8SLYyjZGxaQZzleovXXvKYXnoGT8lw+LZP8Q+/WCOZyFNrVsi7CscO3MHu4av0p/sRuzqK0qE0eoRXTxy/6cEeuXl+d/u+mx7wrz9/FPJvTTRy30C/t++3ibldbttxBwvLl/FEndHCMGPFQS5cO8d4JocdOJRbBnoMgliJpeYKQ/k8j5+fYqF7grHUKB86ej8vnz6FFwpIfoy6U0WVUhQViSDocq0dMFGU+Ce/+L/RZoFu06Am5YiLEn/++DS+muDTD+zgwQfPYHdd1roXmEjt5RffP4i5ZtJfspEkD7wUNUckFgvBcTHtJoMxl7mKwEIrBCnDituhXmmQEpapuR5r3QBV6ePnPvEBxrcnScQFZl85SapYotHUSA0ME1hziO0uX73yHX79r39zQ177aOR+c23ECB6igP9h0ch9k/jX5/8NXzvyp2zryzOSv5Ol5UsYjSbycIJSJkXba5PLDBL6CULFxxddtudHkUKFbEym4oxhdzs0K1U0T8MJbWx8BD+g69VpOCmqbhdB9BDVYdYWy4iawVy1yHx7GVluIXVPUq7KfO0vV1hefYaY2MW3A5pyldWlD5NKJjBEhcE+HddyyTabLCy4FDOgjuxmpr5MIefSrM5h22WMWpOGWWdkdIz/5f4kzeZZXp1r0VhOk/AKrHRipHJbWZpu43Yv4bRN8imR/3HuEf7D936317skcoNsxAgerr+JRAH/04lG7j1w4ucfAVEln4xRXnEw3TZFNY7jeMgC+KGH4VgkUkm6jsVgsciJc9cwfBNR9MhpRcDCTCRZXJ6larRBFQmDHEKwTN1vEVd28Lm7dyL6/ZydP0PHWsJR9lD1Z3A6bZJyhu05i7lmmoud4xyb6OOOwv3MdyxWjAZhapSthVEOvrdA0ltDyYb8we88hUSRD9+9i/FdGkHgM3l2itcuvMK2fUf54OfuJkjLdCodtLZAc65FJhbjz790gmq7Ql4dp1DQeSV4lN96+vc39DWPRu4bIxrBb6xo5L7J3PFXn2Lx189gWh47dkjY1SLzV5fA7aKIOpIskI/34zgGupRkYWWNvBZnUImhx2OEoU+oCJQ7GXzfQpNlHDek5swwIPcjCSqKv8zS1D46XOB4y0INZPpyPt1yF10ZYO/WEuHw7RjnnqHTHuWpy8PEdhdYrS+Qz5UpL5xiqjyCEt7J+z53mOp0lXt3ZtE0nX33DuPk00CTQ8MqQ9sV7OQ26Lv+B0MuvSIy++pZBtMFDMFlYMRj7oyDzmX+4uLzPLz4zV7vgshNspEj+Ne3F3lzUbj3yPD/c5CZ//AqHV9FSAmk0zoNy8VzJcLAwhfbhKGF6SdRxRBUCT/wiGkiliWg+mn0oM6B/E4uNaeZ6ZgQysTjKoq3RtrOIPcFUAswm01crYJvbiMRTyBhc3K+zNqlE6wYP8CxRXYXHE7NG3QMly3i7fzsPbtwACGms3BuEd+Ns+89+5lfUqi107QXVrE6BVQ1jpa2SKUSOC0dRzD48l99g/Kqwu35MtuLCtqwhRNqPLL4NM80omC/1W1UwEd+vOh67j205VeOkFR9Qs/Hz49SlWQaapsl18FCwmCESrvM2PZxSoP9DBQHaZoOsgK+3yURSxI4BoYnUdJCDg7FSSpJ+sUhBocP8Nzlq3zn6ipb0w3yJNkan0IToe2k+ZmtIWPyeY7lRjlWDNk3sAurvYYTC5gsz/DvnqzT9kpYHZeu3aB5bY4wX8Sq1Ljw+GPMvriMO38cc+o8RT/N4vFpTv3ROR77jeMYq0/TtR9GdS4zUZJJaW1eq3ydZxoP9folj2yQjRpRR28iP1o0cu+xD/7B/8qX/t5/YehoSPa2NClhAt9zMRMW9pJCoZHBE5NoogamQw7oGjWEUMa02siqz1hiJ2ZwkaTXT1tpong6K7U1EObx5C4L3QFGNZFifD8z3WlMZxI58Vk+c9f3WF1II+UHSCcHKTLPdNvm8YXjZOLbUGPbqDZUvvVIh/5SBuFknYndMQwjRbfaxLPjiPEuF890WVlukx1wGNZWOJrJgljEFbPMVgO+NzvNU/Vnev1SRzZYdJC1t6IDqptE++l5BC+Fq0Ky5CMKIaESR7AdVs5XKGQV5r9f5wffep7bB8cQiNE2Wzh+m7hSRI2rxNMe3z6zRNurM5zLktcVzs+8Rs232ZaaYE9xmNnuNJebdT50+H7e99lRAmROnSwjGDLt6jyZ4QQPPvsD/HaMnznyaVT7Yf7b03WKiskDd/0CxdIQp6aXuFRxGOzfwt89Eqdcm2ZqscLUnIYmx/n7ny7wXx+5wBMXn6LprHLS+HavX97ogGoPRQdZb57ogOq7QOqDo7S/u4qaCfFFjVCQCQMPFOjb0UfQcUncIfKJwWMsnzAQOj6xUKaQmKDeXcLDpNvSOJjvZ84XWVheZF4ZJ5GZ4Ehaoy83gNYV+OTB7WROXkOxZJxKH8+89gonn1+g7Sf4zB19fOOb56m2FkHWuTZ1mc/+7Of5o6PPQBinuewxdf5VpPoyrbUXOTclMqL9G3YnJbbEs2y57Tiy3MfUeZEB8QfUvRc5ZVzo9Usb6bFoDr43onDfRFL39dM+UUbExg9BAULFxpFiCAmXQlxiZj7DwF0yjWkPJn1kxSUlJPE9j9DV6C8JpLpZ9K7MmfYcC6sWM63txBca7E/1cejOrWw/6KOHLqdfXqFec1k1ZhhIpXG1Yd6/12V+JcH55SRmrUWgVEjt+QChHJBaVWktn8LHI1XZSn9qkkRsnuL4II4fYNgfQJU0gliF185YnGpHwR65Lgr4jbdZpmXawOVe1/EmikCl10W8iaiut2Y8DMNSLzYc9fZbFtX11vzI3t4sI/fLYRje0esifpggCCeiun56m7WuHot6+y2I6rpxolMhI5FI5BYUhXskEoncgjZLuP9prwv4EaK63prNWlcvbdbXJKrrrdmsdf1Im+KAaiQSiURurM0yco9EIpHIDdTzcBcE4aOCIFwWBGFSEIRf3uBt/zdBENYEQTj3hnV5QRCeFATh6vq/ufX1giAIf7he5xlBEA7fxLpGBUH4niAIFwRBOC8Iwr/YDLUJghATBOG4IAin1+v69fX1WwVBeHl9+18VBEFdX6+t359cf3zLzahrM+plX69vf9P1dtTXGywMw57dAAm4BmwDVOA0sHcDt/8+4DBw7g3rfgv45fXlXwZ+c33548CjgAAcA16+iXUNAofXl1PAFWBvr2tbf/7k+rICvLy+vQeBz66v/xPg/1xf/qfAn6wvfxb4ai/7bQP7qqd9vV7DpuvtqK83uA97unG4C3j8Dfd/BfiVDa5hyw/9AlwGBt/QjJfXl/8z8Itv9nMbUOMjwIc2U21AHDgJ3Mn1L3fIP7xPgceBu9aX5fWfE3rZcxu0v3re1+vb3dS9HfX1zb31elpmGJh/w/2F9XW91B+G4fL68grQv77ck1rXP/LdzvXRRM9rEwRBEgThFLAGPMn1EWojDEPvTbb9N3WtP94ECjejrk1mM/Y1bIL+eV3U1zdfr8N9UwuvvzX37HQiQRCSwNeBfxmGYeuNj/WqtjAM/TAMDwEjwFFg90bXEHnnetnbUV9vjF6H+yIw+ob7I+vremlVEIRBgPV/19bXb2itgiAoXP8F+IswDL+xmWoDCMOwAXyP6x9Xs4IgvH4pizdu+2/qWn88A1RvZl2bxGbsa9gE/RP19cbpdbi/AuxYPyqtcv3gRK//Dts3gS+sL3+B6/OCr6///PoR/GNA8w0fJW8oQRAE4P8DLoZh+HubpTZBEEqCIGTXl3Wuz5de5Povw8//iLper/fngafXR2a3us3Y19D7/on6eiP1etKf60fEr3B9juvXNnjbfwksAy7X59T+Mdfnzr4LXAWeAvLrPysAf7Re51ngjptY13u5/tH0DHBq/fbxXtcGHAReW6/rHPB/r6/fBhwHJoGvAdr6+tj6/cn1x7f1ut82sLd61tfr2990vR319cbeom+oRiKRyC2o19MykUgkErkJonCPRCKRW1AU7pFIJHILisI9EolEbkFRuEcikcgtKAr3SCQSuQVF4R6JRCK3oCjcI5FI5Bb0/wOY6EimUMAdsAAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "f, axes = plt.subplots(1,2)\n", + "axes[0].imshow(np.moveaxis(new_img.astype(int), 0, -1))\n", + "axes[1].imshow(new_seg[0].astype(int))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Elastic deformation" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(3, 224, 224) (1, 224, 224)\n" + ] + } + ], + "source": [ + "from monai.transforms import Rand2DElastic\n", + "\n", + "# create an elsatic deformation transform\n", + "deform = Rand2DElastic(prob=1.0, spacing=(30, 30), magnitude_range=(5, 6),\n", + " rotate_range=(np.pi/4,), scale_range=(0.2, 0.2), translate_range=(100, 100), \n", + " padding_mode='zeros', device=torch.device('cuda:0'))\n", + "# transform both image and segmentation using different interpolation mode\n", + "deform.set_random_state(seed=123)\n", + "new_img = deform(im_data, (224, 224), mode='bilinear')\n", + "deform.set_random_state(seed=123)\n", + "new_seg = deform(seg_data, (224, 224), mode='nearest')\n", + "print(new_img.shape, new_seg.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAC7CAYAAACend6FAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nOy9aaxl2Xme96y19rzPfOehbg1d1UOxq5vdzSabU0uiyFiDKYa0JCgRZMBCINiwflg2AiuJoV+JESQQEuRX7MAS5FgKI5m2hWgwRVLN5twkex6rusZbdW/d8Zx7pj2uIT9ukyJl0hLFru4i+z7AAc4+d2Hvtfd58Z7vfvvb6xPOOY444ogjjvjhQr7ZEzjiiCOOOOL158jcjzjiiCN+CDky9yOOOOKIH0KOzP2II4444oeQI3M/4ogjjvgh5MjcjzjiiCN+CLll5i6E+AkhxHkhxEUhxK/fquMcccQbyZGuj/hBQdyKOnchhAIuAB8CbgBfA/4r59xLr/vBjjjiDeJI10f8IHGrIvd3Ahedc5edcxXwceAjt+hYRxzxRnGk6yN+YLhV5r4CXP+W7RuvfXbEET/IHOn6iB8YvDfrwEKIXwF+BSAU/kNL0RwA30gTKQdSCJSUCBWgLUhfUDtLfzJCCp/ZVgclNPuTAgGkYUQYKaKGQhqYjqcMhiOQPmnUJAxD0rkQW1UMNkZo59BWE3opzZ6PkB4qAKkUzjqcACEsTlucUYhAIJQAIRGAsQaBQ3zLvJ1z4MBUBlM6pFGoQKBSR50bbCkRHkhP4IwD6xiNS2pd4aHodRNqYxhPpnjCR2uLUhbjwBiHkJJQecgwRAWK0KvQhcJqh5ACGXtoA5PRFOkcvqgpdEVhJZHySMMAoSTWaCAgSiR+I8IYd3jOzqGkh5AglANxeB2klCAcNrPgBHVegVIIZ7BGIl3NNCsotMYJicOBszhn8b2ITprgZMHeaMpUl/giZibuEvgG5TmcFQghcVicC1AeeIGgzCzOWpRncXiAxNiavdEIgaURxAShRxgFSCepdY3WFhwIBAA3p9sclEPxZmhbquChuDX/HcfJwfSWHL88lt6S/b4ehNf/4pzrO6L/7Fj/UvFt23/V+DeDvzzHN5qCKZUrv6O2b5W5bwDHvmV79bXPvolz7l8C/xLgRLzsfuPMPzj8g9U4bWk5RSgEM2kT0V1lkFviuZibdsLvfeFTxHKBX/nJ/5KOv8fvfPEC0kjefeouTt/b5q53tgjGjic/+xX+4I8/jYiXeec9P8apO07yzn9wkuz6dT7xP3yafmnYzbc5OfcuPvCzi3itBTrLlrDXpsorrO9QssAOMuqDJt5ahN+SuCDGt4JJNkRiUcKitQHnsNpgtePg2gHjqxVxv0N3zaP1DsPNF/pMrrWIu45gwccMasSk5jOff5XN/evMuDY//9EHuDkc8/kvPMFMuEB/kNNqlYxKQ39siaKY4705GieO0zrZ4Uz7BluvtMgHJWES0Dg3w/YQvvypJ4l0yaK3zYWdq5wvI+5sLvKuU8vIbkoxHIBd4u53NFl+790M+xpdVZRlTSvt4AceQbfCqBobCMJmhOdpps9MIffZfHkd1ejiVSPycUJsbvLlJ1/hwt4+tRdh0Gg7RZcTlnr38rH33EcWvMJvf+rrfG3vVRajt/OL9/4dTi4N6fYqskwShinaFej6GO15R+dYzJUnM/R0QnehoHZtnGkyzLb5V5/+j3gi570n3sax4/PcefcasU25sbPJYGcKFoQ7lPcvf+pX3zBd/2VtN3rH3Ln/4h99x501fv8rr9e8vo2L/+SRW7Lf14PTv/YX57z5m2f/s2OXP/rttzL+qvFvBn95jm80T7jPfNe/3Spz/xpwRghxkkPx/wLwX3+3wVIIInGYIZIyQPqCVEMSBMRRk4NS0k4ChkZwfXOAwyCdJVRQGI21GuMUfqgQxmGdwBhD4AVEUuIHPrWusM6CcBhtQEq0LRFIBAKlApACh8QKixMWgcA5iQo9aiGoJxVeI8SYGl8C1qCUwGmDqSustrhCUk0146sF1a6lNytIlyK82JI0W0ysxmaKelQjxpBnU8qqoK4L4igiUClFuUfkNSmLnLzOaBqFtoKKCl05smofu9vFRT4jZQkiyU4hyeuS3Rf7bI0V+/0bLMUtGk2JkYpAQSe0JG0P/BR/ZszBToUUKUaHODmlLjW6NiipkJ7FCk1tDYEfgbBgObw+2uB5AhVKlPFQEjwkjSQiCUKmzmKMQRuLAwpdU+mSxtwC3Y5E7CpKM8WaHOnVhA2BESlYH8+r8JsOJyQ4D0eNkgZhS1zooycToMY3FUEgsJVhNMgYjAZMzZR8MiRQCqTFU9/Q1+tWNPA96fqIb+dbjf2IW88tMXfnnBZC/CrwSUABv+Wce/G7jVdC0lCH/3J5QuJJSUNYkiRFeV28WtNOW1yfZFzb2cJTMcIaYr9mb6KxpqbGESY+EtAFCOOQIiTxA8IwxOgaQ4VFYyoLEmpXUSFQQuEpH3M4m8MZS8BZkB6oECcceqrBBlhTIqSPMBXOCGxp0VlNnRncxGPcn1Js1XgiJJwPiVYSZORQUYUwNXVhoA/kiuFohHE5ptak3QZVrRiPLKGf0p/2mVQZ83UL7SRa1Oi6YFSAKIbYLcdBqvDI2J0O8EXMJNvm0u6EwfgGS/Gd2GaLSihmYo92OyDoBMRBiku77G0YpGhgRQZUlHmFkD5SgfQtRVkiIoUXBAhZ4TRY59C1IQgk+A7fD5naAmcFrTTGC32CqiQ3CkeAcQXGGgqtmW2eYbbzRaSLUE7j7BCED77CixOyUU0USeJZSzWOqWtHbQckvkdZS1QnRGd9lFTEGHwifBFQ5469zSlO7EHh0QgUnrIEwaG+xOuUkPledf3duFUR+w8bm//+LMsffYnNf3/7Rew/CNyynLtz7k+AP/nrjA2DkDuO3QE4fM/h+5ZmL2U01ewODMe6kqsHe/z+U49ToTkezTAf+QTJhM8/+QK1VkhVs7gA3a7P8EqBJyD0ZnjbsTP46Sy74xJrCjKzjxdKLDDKB9w0lr+9EoHvkyQKJxR+U2CcQFQC68CoBqozYnzFACXJqZCqqhE3HLs3Rmxd2cSUDlf4iLxJkoTMH5tFdhTJ+2PKpCIUDdS8h/MKskHB7mWPrYMbrG/dZGJK5ptLrJ28g5eu9ulvbrPcbPDFjZfYz0ecmFsirLYYZaDVPtezjP3astRvs1vcxXUx4cLGKyi22CkCAjdmiCRNIuLacnquSSN6EK9haRyfZe3cEtb0uPj0C4iFEZWIaIgGOi0xxiDjnMyUyIZP1G6A72GLinIsCUiYTgZ4jR6lG2KNoK4Nvuqy0NXYqxsYKUD6VHmOURDUY3YvT7m0t8va8k9z19yTmHxKf1yzMVjjhe1d0nRKWyasxcfpzqRcyvcZbh+weTDhkbf1uLR3BkZD5pWPMzFrC10iO0OsWojaYfZCfBkSJBVOWEpXUZT6UIu8fuW+34uu4TCvfmTm/2nU/tc17Nvd2L/xA3Q78qbdUP1WhPKRzdnX3nsYIRhMLUYXKLnH+UHJxas3Ca3ieFdx35xPKz7NaDplMi1peB186Wh4Gdo08SYTTJzgrGK+s4gIEvaHFc4JhPGJZjzAIaqaXtCml0qyfEQy52GQWBMgQ8gnJb70QEmUikAP6V85QEZdMj1BbVp2z+8xuKaJVEDowdyiIeoavDti/JaPlAIxEehY0+hBVpRMbuY8f73PVn2VPJ8Seh6jqWFiNXlpCaUlFApXZwQC+sbQiz0KdqirCi0UTaXx5xSNZcmaMLx49fDGZMQUCczGHmt+kyrwmF1LiJsBZ+5bpbOaopuChm5RCYHfjvCDmNIM8T2fJIkoXUXtNN1uBxlIhDDoqYS8YLRr0DonbXXw4ybG5kxlyWa/wOY1orYUZoy1PklQY40l9Vs4lVOWu3SCJT78oY9y/qkrDKcbHFyaUsWaebsIYkhz9gSVvUGUjnjppYy41aOiyXPXvkY1nfCRB98BYc7azCqBS2lELWRsWFkIMSZkONrG1rNI5SGkBAS44M2U9xFHvCncFubunMHqMQBGSJQvEVLQH+4zHE4YKw1+xiNryxzrRqwuP4yIIoy+zlqrSXGg6c50CYRlMNJ0WhLnaqy2RH7CuDAoJLEIkaVEJQZnNIEfMNOZJy8NwUQhA4vBYbXDGEtVV/ihhxcIXO6RNjx0UbD31C6Toma42YeBIfUgTSFJIzpLs9RdQdRLCGdCnLYMhzk9EYCL8LTioNjg0vAZpApIw5haW4R1XL6+h/AUp+KEqoR2NEOgd1kf7KIbIYFXkdsKa5ukwRIriys8/OEe558NyMpNtOrw0GpIZj3mkmVmj/nc2KyZm2vx0C8+gIkjCl2Dc+ggI2msEHghTu6j8RHKo7Y1fijodjsQGFAGxg7fOcwkYH/zCkEgaCxXRKFPNsjwQsv+7jae1HhSEtiAA1chrGWutcpKKhkNc+I5SFqG97z3o/TyZ3j8iQuMq4sU1kfVEWsrXapqE1Pu0Wgk9PcOWDvV5uqliude/AKP3PswaTNlKnLmu23mZ+dRnkV2HSsrKwgvIM+HjNYtdW4xh4E73m2h8iN+WLldo/fbQvYboyH/3Wf+FABPSnwEJ8Iec2mbllWE7YTFmbO885H7SXsj/J5ERB5mZ4lux8ONDaXXQHvHaKUTLu3XFP1NklrTpmZ8UJCKJl7m0P0auyzpJo5pY5GDquYLF2/wc/et0L/haC8G2KJA+R5Js0FgPMpsiO8cB9s10uvSmJ0yl7bImyHmoCTPDFYoaufY2j+g3CtJ8xTVDUhnG7S6DcpsSr41JJSaRq9By81jvDEN0+L4SsSDx05wbWOX7mJKXC7y+CvPsG8KOukie9k+06HP2dYJsrriZjlGiwOC+gzRPaeYcztYFeKbgF/6jb9LSYjMPK49/hLn3jPLiQdW6OcZZpyjjaEz08PDkvYskdcj2xpRjGuStiaaETRPNdFSo7IIho58kLH/1AE3Ll9mZe4YKoJ8r2Q6HTC8MWK4s0/qR3gi5p7eCpNin89vbzAi5/zuRbrmNCuh5J7FNVbTNjKd4B3PufCnL4KoubtxBw/0ApZURId5kplF9m3Ih39uiX/xL/4AG6zxyF0P8Y47jmO8kmcvjvipf/gh6AiqakpQRTiXowcHmOIcN4evUE5HVFkJQK31mynvN5zTv/YVLv5vt0fFzHe6iXq7p1r+JtyOBn9bmHuE4C53WNrgERCoBmutc3SjGj8p6K4tcPz0Gl57iGiEJMeXMJRcuyKpcFgstVZU44q335PyxNMb5GVGU+U0bYQnPQLh4WpDnTVxZYanWsymJRdHrzIe34VRAqkSqqLGDCRhWiGUR4VBZAHT8ZB6Igk664z6LbAClVgCLyYyDoM9TANMQ0pXUVYaNZSMsgF6PEHYjPJqQn+YcePGDg+fcsyurjEbhYz7i2yXi6x7BV4Vs3dwkefLa0xtiTGCU3MJg0HOyGk8IeklKUVVcvnaOj9y8ABNL0RrwcriMeRcG98Dc21CsLBM3KrJhxrrSRoLHp6xZJMIXe3T7DQQ8YBq5DCloLEQIrs+zjsM2OubOcObGfn+gItPX2eh08Z5GY4Go61NAhMw2Z5QZzXK1Rhh8SIf6To0411kAUkYQl7T6nb52vO7bO1WxDt3sDsqacQJvaDNoyfOEtSGSgis8yjHMHP/AD2IuLk7prumOHvqGFHSZVLtE4YKuRxQNT3CAsSkILsm0QdNtje3WV8foosSU1UAVLV5M+X9hnO7GPtbjTfK4P/e+WvffH/pY+V3HXdbmHsYRNx9/G0AaMAKWDdbPD/VyKkkzlI6NzZoao3OKoriKSILrRTuOnkfKr1B2EjwGx42niHMXuXlrYJ2GJIkHhkeM2GXraDmbASjS1NikVJ199m8OqAbFNRZRrU/ZBQYkrkueJJ0TmGsxp/W7F7LsJlP3JxDWMFgfcD82hzefEweDimZkiYpnvWIbcD00gEqtwwuaV64+irnB1tUpsR3DRbDRd71wP1MygihDvh3l59kd/gYD/VOcXz1If785efpNWAuXMJkgsX2EvcdO04x2AU95crOVQYYrlXr/If/9Ys88r5j/Pe/8EuM6gllLagmBXpsSXohgx2NSUe0VltURYUIOsh8i/2XDjh9qsN0KFh/4SZJ0sT4AjeOePVT6wwvjcj3Mnzrk8SO2bSF70vGexXN1gFJGTLaMni1R6p8sFBXEg/LnF9xIpzjsrmBqWuuqQmDmy+QRrMkg4AP36+5eaNDvf0Ac80Or1zepK/3OSPOEL14jWNeh41PNrj2uedoJAe8f6XB/e8/yaScsn9llXt+ZIoJM4IqwAURUyH5vd/+LM+++BSB3uDOhWO00yVarSUcDod9cwX+BnM7RO7frezxhzFqv9V8q5l/L9wW5g4FrXgdgEE+YXd8wJfWhyg/RFjQ0pKrRd7WPsXD6YS7Z+eZbYdoqWioPbw0QfuOxkoLooDTZ2OeujGhEk32OGBSBCTpnRw7KXG5o971CIUkKw0BCmczir0+k6xg2K9pnezjB23ygwA8j6iaUu+BtJaqEEjnY8aWSjtcUJD2EhLr44SP47C0M+kFuN2CyXCPG+Ndbg53qY0ltEMWTixS18vMLfW5vrXPfrFFpByP3HMv13c3uTYesjZ/gvVsQiR8knab5qrljt4KB5MhLU8xGE/YdCUNBzdeHdJbSjC7juloj3Z7FtXzGRYZE5uRxA1sbQm9mLqYsH1tAzmeoz+ZErkOc80Gu8MtLjwu8fR1tl/KyCcTWklK2grwFGRlxt7eiKVuG3LNcKvEacPsrMWamPGoAlExntQYG9BOauaFRI6XEWaKH/ns1ENaegi6zYl3NvGzhBtXB9ysJmChGmkyf0p/PWJ/6zLn+69wdvUkd54+y/ZkCrZN91hGkQbYgY8rAy6u7/HMky/yxFc/TyAPaMi70KMWg4lmIK8AoOvbpwl8/5ffTe+3vvzN9wC93/ryt73/Qeeonv314W9q6t/glqwK+b1yqnHK/fNz/xwAjcE6gzAF0hTk5R59M4cnZul1NGMdI+MendTwNiS4iu58g/Vsi09XAQ8/fIpHzuzz7/6PJ7m4XVLpPTYGNT/5oQ/x4Z8/xehVj64ouPbUZb76/Mt8bfAykbfAXG+JjlAsx/NENPADAe2aKGhCXmMLTW82JZ2PqTJLdlDQuXuR+HSEnJe0O22Ey6gz0AcHXHtywuUXtnGDDV4dTREmJzeaR0+9m96i5OJWQS+EVugo05Ivnd8gGFgkA14aDdE25L2nztEOAh79lfdhFDQbAXpvyuYz1yg3xuxu14hAcuL0AmI+hQDS3iz0DJHvEFPBS09eoLfU5fiPnEKUmtG1IYOX9hj2c7zeGjMrjmkxZGYtoXdylfH+Pu1GiNEV+aig6JcUByU3ntwiDXqQSWJVoMKIvf0CXR2AA8oaq2uc1tS1w1JTUxMlbbLa8PjGq4zQzAURs+05Lk1Lfv1n/w7ZcBNUQSOdsvFMxiRrIBiyLUrOb1ziH//6P2Wn3mV5pUsx2WHnuiHC4+rWc2xszvLElx4jYIcZdTcFFZkumegMg4XXSiA/sf+v2am33rDlB76VdO6Yu/sjv/Z97+d7Nf03I3L/q0z9rRC1vx5pme/F1P/Zx17k8vPTN3T5ge+JcTHhsxe/CMDh0iyCIIpJhE8ofGw4ZFpsslP7VLpECR9hQ2ZWT4FUsF9h6pxXzj9DnsX86Lk1ZjuOV7Yydia7jFzM0tkmXqxwecbesELXliBQ+DbEuYj9g012ZUw/Ezyw0KIT5Xgip5oosumUUPq4OsFkFWlLQK0YXN2k8DqsHF+idhkuU1z9zE02r+3zx088gRAej55OcPsZd8+vMdtKuK5HHFxMOX024vjZVVrHuvxf//snyMeO4+kqpRa05A6Zc7y81eed996N6FoiQsL5FNWCXrHGTrjBQiPn1Zf7jAvDXOQxzSt2r+4S7qVUxzxMaZhvzzPagY0/X0f5HlUe0e/nuKLFyqymeWeLpeUuXsundJJoZh4tLcpBsFdQuD51XnH3B+4kjAw3vzrGDFvsDvtYm5PGEmvBb7TAOmxlscbRHx/QVk3CsMOouMrxzhwvj2+iq5rrg2tsT2rI9iimEe3jPrvXGizMXyTZ9hi5nN3tKWeOneKFG68QxDUNZdjb36PDaYzcpyhWuXbpOUI7BmY4Pz2P8gxSCExVIR34UuEA595aN1TfDI6M/fbjtjB3T8J8cBhlWQQGx6QcUXshViiGwxEoSUiCFAmNJMHXjksHF7gw2sK5RVqNNm+L+7z6/OcJDv4bTp9rsnVzxPZuQeEmPPi3HqAdVjz78ifZ2GrzyD0x4c0my+0OI1tyZTSm5ykKOwE5QAgfaomnLLOtlFTVHJgp+bagJ7ssH4/4yheuUl7dJTjwaM0ZJlXC737idxkNC/B8wgjK3Tt5cPEUs62YzSzjeDfk+EMJSw/cz1TBZz/7dR46OQMaPv65L7Nn9knVGo04YlSu89lnD3j3q2+jea9Hnk+xvqLxzh7+3ZKgDDhxfZfrf77DzsVd0sUFTr1vmagx5uWv7LBwbo10NSZ+dcDujZLls10SX9O59xzJrCJaDbG2RAQhWjgCa9CugZAZTmt2buxTDWpmF5fw5wJCKRg9cYAnS07dvYAf+KiWoK4rQIIT2ExTTKeIdUOdOfIsJ/GbTPsX2Bv3iaM1VpuzjOprWK9gWu9y8akmi3M+g+shV7M9rvczHnnbGi9du8r8/hZa30+e7TDTibm+k/HlL7zIlSvPU9Yt0tAQCOjFKcoKAhUQ+T6RFxCpw/r2aBq+iep+ffhe0zZvVN79r5OCeSsZ+/dzU/X7TcP8ZW4Lc49lxL3tuwGoraE0mjqxSCWwWMLmGk4pSs9Ql4I4ifEoubl3E/DYGW6R2TH3t2dYCzJG646ZxgnOrJV8+Uqfys3SaHggK7aGYzZHmo2+o/YzPGKqbBeFJMKn4fm0kxRtBaWLqZwiNjmFMdwcGxbammyUsl46DvKcYZYzffIC7aRNkgz50VOzXNkd0h9DJzjJzzx6ks0RrD5ygoeON+hfypHtGXIvY7BxnbvvaBMOPJ784k2m5oBhYRiodVbpkcoGHo6LT6+zXM2x9L4ZhKkpp4YgbhO0PHILJz/W5cu/8xSiE1DSI4gb6I1d9qvrdH56jnDYBrON1wloLLaoYkPUMpR1jZASKSTWOpwOEMEEW1t8FPlWm8XFAqkUXuST7UyRoSINJZVnmNYTGiYBT2DqkkAFBI0QJyXtTkHh5cR+QLMQfLD9EJeefpxBfY33d3+Kt58MuXKlIm0lrO/3GZYJM3rAUxsv8MJwj4uDEfffOUFxjqh5kZk7TuC3O3z84x+n39+kEQ7oxvv0p4tMXIFykCoPJQWptEReTSAd4JBvSkLmzedWGvxRXv314/U29W9wW5i7F/i0l2YAEEoifZ+4m9KaSWn0GgQWHBP0RFPs52xdmiC8EXuTKYwqvLAkq2PyrMGd7ZO8+tg1Tpxd473vmeOFlyacUft4VLha88TFV7g2zNnL3smP33WaMw3DIM+YTveJRJPEl+xsXWFiDEUYI5RiWg4xVcnmZILe8nlkSbOxv8eV0RWa8Rx2vElV5pxon+anH/0p3v1oSnzS5/997BX+ly++yK/+ow8iz8Q89ok+7uY2977bp/u2OZI7Iy49tsG/+e3HkVYSp4pz/iydaJbED2jIGG1h67mLVNsTooaHtwTtlS6eg2zrgKTXZBpPePiD93L5qS2ufHKLUx9a5d5/sopQbXAZzZblLnMKJ3zwIqJJjh07vIaPCAw48BzUwuJZgZtAf8dw4u0S7R2ms+o9jdf1OX7vHOWVIS73iTyP6uYEnKIqJJM6oxzmOO3IxvvUWROhxsRdx2B3SMNvgGvgJLzjAz/FH/3heX7iJ8/x0pXPcc9dPqvRwwx1yB3HXuEXPvhBRpXh5E+cou5rPvObX6Hcm9IKDQeqwbXxNoqCWdViNQ55x/I76DU7BMKjrizG/kXOvbH1f7+J6n59+ZtE8PD65OD/Job+Vora4XvPud8qY4fbxNz9QDG31AHn8CIf4XsMSgekaB0SzdfQnqPXsKhRDd4Ntq/DmfQuEn2Z50YDpLDgSpwcc624QDL0OLgYsja3wLQ+gdIFciII5RihMiblNa6OjvF3/3GD8fMDnn18hkLnOGNoeI7c5lgxZZxNOSiG1LVmr3SoGrpJxoX1fcBSj/dYmVmh2+yw2PE48+Ndnn7+Gp/4Hx8n9hT/0+/9faoo5+nffpmNr6zzgV9+H7P39Rjt3+QL/+IVblzeJvAmuGSGt8t3sbAo2RyMaTYanG7MEYQVg0nNZLjH+pdCTrzrGN6Sh5GGsNNgsD9G1ZLShwmGu3/Mo3WfRJsmdT8jMxPavQg9rvB8qOsahIcfKIQSCCRSVhjrIUWNLSoGV2o8r4C5FsIWyMrDSyKqNOdgeIAdVlD72KqmLgKsFkyzMVWVM52OKMucZiLRcoxnE65fG+IImbhdEtGgnfYoqXj+6tOcem6G+86mfOaKY/WE5Gf+6QcJ7KPsXqk58aEOw69OefY/PEcqBGWa8uTuqxR6wGI0w1zaYSXosdKaodUKqVzFgcnIKFG+QqlDeWvx1qpz/058vyZ/FKm//txKY4fbpFrmwTPn3GO/+W8BUFIirUAOJJde3qW/XlDmBWFXsfaOFq20iQhj/EnGs3/4HJ1ghgsXL5Alli9feZWluMvzBwNOto8xl8yQRBOmJubH33kPc/c1iX1JbQsi4+Oljv62o5EsUMprZLseSsaM11+hP1JcOH+VvLaUrsDamuEkp9UJaTTuRLiC1XZCHLc597cf4E8+9xR/8Nn/h0Y5y4l4iV/4mfdy6uwyn/7zC1y98Bznjj1IIx1x8tH7WDgr+cPf+Crj/QFeXTJiwEbu0F6AL0rmOit0lGIpDA+frjQGTyqslOQGRGuWpbWUudVV0k6FU4br6yXHz80w++gcta2pKoGvDDZTZNOcMBAEkYJEgI6xXkWxD0ECKhDYQBK5CrOdMF3v4/d8RDeg1hopPFTtcKVh+MIevVWPve0KzwmGgz2UEnRaPTibQcIAACAASURBVDzfp5H4OB2y98IOWX+K0yOef36DbX3AE7u7pJHjHz7yY8RnjvFv/uzjbO0K/t4H/xbr488wf+ZeHv7Rn2Zw/iYz72uwe1XyqX/2uzgPSpvTDjuszK2xOxnRwBCHASZQNGcj1t47T7jQQrUbhL0uwvMQr+VjHv7Zd/P1F5/8ga6W+U68XmWT383wXw9DP4rc/1NeT1O/7atlnBK4mQYAVh52//EWNCfWWixdVrz86QnjLdh+2jDpjlg5G1L4O6yeWmTz0gG9jkQPxzgnsdWEtl+yPd1AyjF9rbhZwkN7jtbLNekHFpCuZv8pS14MOP3gKnv7l/naE4Kev4+e7vJHT26SFxXKFxhPg2jS8Lv83E/MYPoB5w+2WZ1Ned/7l1HHUv7oK5/k9z/5p+QVLLXHHF+4k7t//ATXv7hFLxsi2j3aLUF3JmL9iRJz0Ke/dQGjFVoPyEPJ1ORs6CmJSNjbkXREwB13rYFToBtEcYpqKUIlmYynjDb22F2/ycLaGokf4HdT1OLh0rpSKOIU0A7TVKRJgjMW64N0Eu1ylJR4ShN5bWxVoKTAGInWFhPUOC8lDSzCkzhz+GCUh8PJkOkUvCjCak1jaY60HRG0BSLM0KaNMIJotIjxBtSbhivDXfZNxYmZhCybUEcJK8ttHly9l4PGAc3CcXfzZ1i67x76FwZMtq+w+fEWo/Xr+J0z9KfXmW8tokjY3t2gnYb4YZPGfMDdP3MKf66Nlk2kVCilcCOLcfq1Hlkcdrz6IeRba+a/H25VVP5WM/bbjdvC3IWSBI3DigYrHSiHSFO8oomcW+Lhdx1HWRg+cYkbT0954V+PMHJEJaEWObYQBN4cH1jyKPKa/MAi4wRRanamI7rRmGzQ5MK4w0I94NiZBV659BKPnR/ycwcjnn7261w9cBSixjlHK22wvNQkdEMORgOubN2k8OZpve+XOHVPxYPCoLoBoszRU5/3vlfT2nJI7fPof/sT7N8YsP5nm3Rnl4jf3WHn5hbH7w0Y7A74oz/+M/InDKcJsXJMEfiMXIJQjnK8Q60MgXC0WosYkxBGIclqm6jlkywlhM0281VBWZbkuqRRJUyHU5gZ05u/j4oSEVRUOkDgY6sK5SlE5KGnBa5q4icgKsvetZLMbSBac0RLmmZDU1qFPxMTRwKLoa4cZa5pNWp2XwSkIYzaFEWfqBMRLjYBS02Fs7P4qkAPLNPygE6rw0G/wZbeZ19PmZmk3NtZ5vR77sIuKu5cm4PVk3zhxRHHZycM/+iApcYsYjTLM698kfViSliO6SUzzIRNPFnzro88THBXl8J22b1ywKXPa/LJFqa4hBcqZCTwO5IgjAjCEHBUoze3Fdqt5PUy+CO+f/6qqP1Wp2H+MreFuQMYKwCHlArnQKoI4R0ADqMDRGRpP7iKH63z0vUxZhwx0wrIa8eBFJSVwK8Vib/JyfYChVAEMdiJx4m5BS5VlvUrF3gP76TTnvDsKxe4MdjmfHGCmXZAX4CYlAgp2O+vU9U9lud+nOMnh5y990mmhaUV14i44usXK+6IAqrNw/z18spdPPzhLuFxn+EFzYXPGObiGdY3rqLihFcvDzCcIqTL2DzP1JR4YQNPldRGktcO7UJQCokiET69IKLdbhKkISYwEAdMhcQKgx8VqFDSCruYwZRQBBRJjfFqMA6jfXxlcQ7w1GFfVCVQnkQiERiqsUBRYQcC50+QtHA4hF/iKos1Gp1rHArlC6pSs3OlZu1ETFGWOE8RNGKUp3De4VOqUiqqYYYtJaayZNoga0Xs+VSFJowbdOIGomMwXsbiPaf4/J+/xCdf+GN+/tzHOH2mTV5vY+oALUpSF7LcmqUdN7jj/g7NlYh6donJDcXmSxuYzOKqEb6QOOczHVmqkaNTtymVA3W47ICp36LlMke8pbk9zN2B1YdlawDKkzgN2gRIz+ALH4TFpB7J3cdorV5AX69RixHLjYDZvSlb62MmwxpjuyzNKm6MpwgvJNUVVzZG3NBXMVnGtZv38f73rUC3pr0b8Mkvf4FffMdZkkkNYkQcz9CLQvqjIeV0g70sxTv2KHPLgnihZILh3/7OK7zvPXciNvc5fscKy4sCfzmhzn02nhiTb+9gV0KqwS7SNdnc2mKtvUja9NFuiq6m6LBLK/TIC01e5ThpEBYCz6eTtmgHMUnDJ2hG1LHGBhblxYRpjHA5GIkfJyhRYPEpdYn1BUiLEBIpA4wpAHG4toqTqLhC1gZbwKRf40yNyQOUKJFIsAnCGyJqQVWVqEghpENJQT4q0brEuRita6wnkdFhC8JKVEjPRxiDnUAxnEIu8DxH7jStMIaRRTiBH8Z4CdSRIpqf5/LgayhRESZDpnIeV1su7zhmVY87T84wsxwyvzxL6Vu293PKy8+jsxDPQKsZYmOfrJxwcGCpAeELdkbXkR54/mGgoE39Zqr7lvPDtHTBEa8ft4e5A+obfS6txWlwlcX3fZxVGFWC9pBSQddy/H0nyL50hfapFNX2aaqTrOQHPP3xZ9BGcGF3n8JqhNasNiV7eU4zCjl7z1leHO3xf378Kr/00Z8nf++LmLqDb0Ke29rkjpn7mA9rdnYadNUccZoTB5pi7zpVNs/60wmfeOz/4+uPnye98hEaccByV7P71Bms9Wg2J2hzmZljGZ5d4W33LvLpF9a5nl3hZ8/ehx4PON2Z4epYUqiANJolr7cpTQ3ygFgJUuWx2pphvj2DSEKIgEZCayYm7Ka4Rg1lhDUBVmm0duxvClqn5jCVQSQ+SjiMtghx+ONorUNVKbWosNJysK6pS0hMzETVIALC1GF1jXWgPEVRFCRBA6zGU4KDscMPJuTTDl5T0+rNEjQ8RKjAKJSucdriCktd1/h4TCcF+6MB3bBBO4oZlH2C+BzCh7TVYLAxZb7hc3x+hVNz8whvn+V3nWL1gy2i5bPUSEyu2Hq5T7w1oj17jPapEmcLsgPB7mbF9ctb4Cpml32EAu3A4r/WW++1xom3jcpvLUcpmiO+ldtP9ofPi+Os+5aPDEIphFU4UROthnhRE+cLhOfjtR30eiytLLC5vcNKmlDaHOMJtnON1R7+fMxj1zbZHBZEUrO7Peb0nfdT2j3iJObchYrrNzXXtc9DZxrMTmKMgzANGdWWRlSx/uwGl566gTI5HhZnMgKtKEaQtlICv0CGU4QOib2ApLeAlucZ5zXJbJfC36cVBMQywOBABgRKgrBYZwikR+R5JJ5P4Ico38MJwHdYKRCRw9gaEGhTE4ceZe4YHQxYTGdxzh42GFcSrUukCMFppPRwRuAcCFExGRakaQDaI2iBC3ycrMA4rAVpD01RyMOnTgUCjETIGqMFgQLpSxASIUAJddhAW3D4cmCtoyw1ZVkRSB9f+kxrd6i4SuFqn8lkxOlOj5eNQPk1JCnNtRSx7OGkBB1x8LkB+WbN2l1d1HzMcLcgHwdsXMgZ9Q9IUk1nJqS7kFAbyzSzZIUBoQ5fvDanI454i3F7mLs7XFPmmxsWbG2RgcBx6BvOt9hSIq3E9jRr9yfsbjSJY6gahmhRsvDe48hnNHowT13kXNofMB6sk3iOKCtZ8YfMzZSMrWKU7/Lil3M6iwG+0Dz6sfvQk5ThzhU+99kNZGC5785jNIOaurlIZUL6468TyIIg6CHMTVa7pyhGHcaxQ6Y5Ld/hxwlmHJAuhTCXstpbJVVP4eKQuYVVluY22BvmTLUB1STwYhAjDDWp16QXNWn5Ib7nIz2fgpxGK8F4Bi1LpICyAt8X1MUIvRcxPrjOsXaMcR1MmeOJGItG4eMcKOkdNrY2mkjBdFzRaUM9VXgzIdFcA6dG1GNJXWl85w670+EQQiI4zNfHSUJZVjSVQ4UOUCBASomVFiEESiokEm1qsqygNhalAiQKpMdBWTBen5CQ8sqTL3F8/hin51d49nKfhx5uYT2wvsevfeRfUdDmf/77P8KxuzyKmYjB8wOe/tJ5rC1ZPqG4993HSWbmcUKQVVAXhjB2qCLAGIl5rUpGqrdOm72j6P2IbyDf7AnANzLth2HfYeD+WuTuxDcHCOuwOJyT4Eni5Sa+51EVhsAobCWoEolKO+TGkTkQ0vJTdzX5sdPHiaJ5anmGPLiLSPR47Ks7XLg5wPMdnoWxS5CrkubC/bzr3BItal65sMn5G0PiZo8wFPh+xUzi0Wk0CayhGTSZTCsGkyGairoWKCKs9ZENhQ4tcRyRBFDnOUHSJU1D0kBRWot1Ein9w3MUDl96hMrHVx5CCISUWAHKV3i+AOtQUqEkhIGE2mEyD2NKfBuDObx21jpQCussAnl4gYVDOIkU4AxIaahrjXMSP5IIHKY2OOMOr/9r34kQh9+J9ARBEGJdjZAK8drNSiH4Zj35N4xeCoGzltpYrHNIIREOAi8ky2uG22NkUTHaKxlP9nj4Rx6gNPtcvnjAYMsgJEz6+7j953HSQhnw5H+8xtc+/yqCbZbWAk7ee4rWcgeXhIhGE+1FyMAjSSOEVDhnX1uF7tsihyOOeMtwW0TuAl7LkcJh6t1hjcHWAuEfbgtj0T5gBL6foFYc+onLjAYltObxhhH+oGBaaYysCeI2q82AUs4yGgx5det5RqamriOcPODEfA1xk6vrNXPJHmG/h201MTMlKz96ksYdkmc++XWuXN+hPxKsHlvgwbvfznBnTHrNpxtZro4mlJNLzJsW6cwxEk/h24AorHFhE5o1KrIcX2pRHwhcMyTthiy0Q9azDO08PL+BEjtY4ZMEMa0wIfZ8HAInQChFURa0Ww0EBrQgEIZ6UJP1Dbu7N8nGA6rtgGhJowKfyWhKMpdSVxlKJEjPUdUjhFaY3COJJZ7nEIEhjEL8SBxG6UqhFNR1icYglYdzFlNp/n/23jvYtus87Puttfs+vdxz+73v3dfxADw8dIIEC0iBEkmJqo4Vy7GcyEoy1nhGY4+lTJxMJhqXZCZOJpNkIsWJR7FiSZRkhpREmmInCIAAiPp6vb2fXvbZda38cQGNokISwCPxAOH3z2l773PunXV+851vfWt9jmfjVg2yZIjQLsJ8NcUiNVqp1/YNO4j0tSCOEkajkDjLSBKFTjUTeU27O2KwbbISr6Ijxc7WFvf9nU/iHinyD371n/Cj+z/DDz18kp+86zhPnF/h7//z/4FjHOfxRw7z8KOzzDx0BqVgp2MysAUk+9jSojhlI7UmGQZIqYkGkjg4iF2k8VaM6reOdydYf/B8L4uX/vWJxR9oOeRtIffXUgAHkeKrzymFyhTGa4GtEJhakugUU9skcYKUgixUxN2YKHMJNrZpbWZUHZNivUqkm/Q3eiRjwUzeZVY6TExNYTl30xtHrGxewpd57rwvz+WV8zTXKjz60AxBX5JrJMws+mwHu/RGHfxunUc+OsOR9UdJ+y9QqOZ5YuMKStdxapIkFOjMwmGEna+hpIdZChiNU6qFImlioUyF4WhyvnnQFFxLhLAxhCKTBrZhYQoToUELSZplYEASJ6j0oPpEJQYqDFGBJhmljEY9bMMlG0iyUGEUBEmYobIMKW10JkErkClprJCZRqIxpcTMK5ycdRC1KwPTswGFkBlSmkjLQGUQRwmmYWC4EtMErTRKH6xHOIiQ1cFzaXYQ/aeQxClhGKIy9er5NonOSLKMoDuk3YmwjIThforEoVaZZro6y1PXv8KDR8+SExMcbaQ0d/axctscOf4BCicLxK6DKQVilKKtFM9yMVIbq+CRxiZGZqLTEVEUMByO4NVfEe/yLn/duC3kLoTAtA/CKykEUoBMU5QW6MwENNIQpOMYS9h0exGWoZl8b4n5/gQbr6zhBiYyKGOV+nT3Y6Jwh2LexvEnaUYjVsIKcRbwzMaL5K0CA5ExbZZ4cFYTr+X4X/7g9xhLzad/f5Z//o9/mMvf3EMtLfHTj9zF//jPvkgu30fMVli4K2E+/wn+7VP/L5f2byDyS3w4naFRDQmFwnNncf0a9tGMxIerly2miydRmc9wqJk+fRJpR1R290C7SCHwpYdrRJjCI0slpoIUg0wlGKaJEZsknYxRMgbVp2AXGHdCsr5GBQFT9RLp3gCSCtiSEjbZIMWwDaRQZMkAkzKyOkbtj3CkxrQVWApltYn2bMj5aEPjlG18ywWpUUJjKUmpUmTYa7G1NsTJBHFfYYYW0k5xpEQoCXFG0k/o7vQRHWjvdhl3+wglacchlm2DCrAI2dnbwLMHDKOAoF9m9YmbzJ0q88s/9h+SDQYY7ire1RYTRsDjUwvUK6cwF2foGn3ywcH2wraVYRgGI13AzwssMyaLNTo1SGSAV1EUJvKgQd4Wo/yvHzM/cfHdVap/jh9k9H7bDHuhDhoqSNtCGAZaKYRhHFR6cBDFSylIkoh80UNHAUq6jJyAcJBDDWPyNR+ZyzClSbc9Zme4x0vLN9lsdoiMhCQJyPs2kU4oJ2XumzuMJEc7bfPQbJ2nt5ZppTe4tpKR8wN+/4svs3H2MI8/WGJtI2Dj6THNaxaOuY1hdbDMlHK6zR1nZyiWHVrtDrXiFMNEMVkfcO4ph+XNb1Py7wOZkqWS+tFptLND6Vs3MZRApiaeNMhElSTRDFWG5zj0VYIEhIgwM4tslJCGKbY0SYMRQVPQ73extU/eySMZsn0hZPHQUZR0UGGEzNTBtgOmi7ZGB/nwksnuzZh8w0JmFnubCYXKQSoIP0LJDGkpkjRBaAOpJNlYMdqP6dwMmV0y0UZMHEQgDExhE6sESwvsDIxYkQ0FQX9EmiQgbSKR4qCpWRWOL+bYbY2p+R5LDY/tjQ5i3CPZ8xCjCtXpGbJsh67sUvaKRNEeZRcGqy2cyERMKeIsQhgmKQrPtmAMoYyIhwrXzlGuz2JZEvVqkw7T+uszofou7/Iab0ruQogVYMBBQXGqtb5fCFEFfhc4BKwAf0Nr3flO19EaUnUQuasYhATDlki7D8JCxf7BJBkZtitJgxE6yUjRYNkUD8eMl0dEQUI2iInCGN9VRO0EM5YUCyU6oz424DvTTOYdHj9+llhpPnP+SXZHdcpWniPeLJ53mEYvIyuf5X7v83z1CyuEDz7Iz/3S3Wz/yTfo77WhWuDv/+P/gp8dvEJt6iSZP2btmS2SvTL2R02GQ81//KO/w2rzHDI0ud6U1GojFjlJXxlMPvowv1A/zNY3LnPj20OOJ4LdQcqFwQ62WWTsHsNKBoQ9AxFU0c6YLEkJBhESg3gcMh4HxGHM1JSH7wiGQ427Acn1GOtYzGiTg8ncZMj5r9zAnp3n+CM2Rj7P1B1tvv1/b3Hs4SqGl6M/vokWdfQoh+UYWHmBabnEg5jebgfVVuw+t8/UZAV/0iVRkuhan7EB3qEyyW7I2gsD4mZC0u8SjFvI4ZCim2OluY+WGsMs8tiD72OgI4KoSbFQZ+HeSY6fGfG/fnqLkn+FOyaqLNkTVOseI8vAkPt8c3sbsbPCf/7hGTIUwV7AIFLkfEmKQpBimBp73yYaKVZ31sjVLaycJlexQEOSvv5FTLdqbL8VvJtrfxe4NZH7h7TWzT/z+FeBL2ut/4UQ4ldfffwr3+kCQmoM++ALKA2BEAeleFliYNgmSmcY2kZITRQPsSIHkZiEUYjQEjohkhQSkyTOaLVC9tpN1npNmmmPOIpITYWbyzHhFTh96DjoFrVjHk+90OWDxyb5jz7+EBdeeIVzl4aU5iqYxjJLv/STfFI3uXGujTWOmPrYBzlVTcF3SRKLxuLDCCNl49lrDG6Wma5lfO3fvMzm5XXE4CpOIoiNXXrJDXprC7Qu+lhzPcg09l1FplWdjY0AsztFweySml1SNeDC5i7HJkrsLUfYuR6GA8QmWTpAqYNepQiBZUiyIEc3EjiLNv4hH70V0++MsRdc0sGYpBFT9MpsfP0qRWuRqQcMZh5YwMkCVp7tUl3w6QxT5FhRO60xYpNsEDNWASKAQmYSqZTJRpnSRJF+t0caZfS2hoz7Cctf2yYNIuj1kIDnmxRcCysrshsMGcdDelFC0bLZ2R5RPJwhpWI76UCvQnsnZHv1WXLTFt/cc/j2coEf/9j7udRrstMMKSCZrZSZfmSe/c0ujlHAjYaAxCGg1daEY43UMfE4ZHa+SmnWJxMRr9X8vJbyeyvG9ru8y1vF9yMt80ngg6/e/03ga3yXL4DWAqVfbYWm5Ks5eFBakSiNsDSKMVKBLfLonMYwMzrPjuheGzI818ZSmjiIKFkGy1uXiQ1JZqYYY4OSU+b0zCHyfpnpkoPjZHSzEb/52etoO8fL65Lr10oc+9hHuevvBax/ts+oVWF37TJ+o0LOrvLcZ1Yo1SS/u3qDG/su05VDfPieEiIeYwc5ikbKeGUTfzTG8gSnq0d5/5REpDaemyeIErovX+DGt02OJg1OPDSFe/cpHirX+ff/1fMk8QBPmSgjZr23xm4/x5FihYpyyYU+ih5hZGEYAqUz4kRhWUX2ojbKtLnrYY+xylBJn85OwGRHMWhK/MdKHPqIR7rbof1sBxVETP3UDNVHj1N/T4FR2GRel1BpRtJThLEii0f4noW0XcZZl9QTVN9Xp7W/TzXzGO0E6J7AHiS0m10MIZCuQ5IqNvfbpJlCWDkQNsIy6PZafPTYGWpnLJ758gpPt1fojAQ89ynSTHPv9Ac5NDPJ1s5VRr0uTjCCJCJK+5xqTHKoUOWJf/kyh+5fwDgUYtqSIBKoXJWCk5GMY+TIxKsUUA7sDkYYUmBaBmhNduu2c3/dY/sHye0Ysf91yLu/3gYdP6i8+5uVuwb+RAihgV/XWv8GMKm13n719R1g8rteRWikmwECrTMMYaIVGKZASFDpwXNZppAqRUkfpSOyzSadc1uUswLSACeLiYXP2DAZxDFxqqlXJynbNdLMxVE5irU8k+4+X7h8g1G4y3jcYelIhdn3CHJzNZ7+4iZPn1M8ulhHtTK6+7s0KnnkOGDl4jY1x+BqZ51GQ3IsZ7ITNxCBwSDKaOkcz1zbYKAi0jhjtj7PEdtB2SZ7icYtCKxrbdaezHP4ZB+zZmKUyyR2jJOvMjtqsxz2SdMhiAzhVhhnCcpK6EceJSFwDJuDbIFCqwDXyTNKAzItkCRo28H3XMSkgaVbkJUYA40PTNG7HuLncjB0yVxBJrpIpRi0LHLlHMILMfQYOTIZdhP21ndJhhH1SgWjlKfu1RlcaxO3U0QGvp8jLowQQBpCMBihUo1pGfRHTbRhMAwTfCPjjpPH+dYrF5Ba04r3iJKETJcxjR1MQoTI+MCZ+9jb2cbWAp8QX48IElhuh9SzEde/3mcxO46wBdKPyEYmtuswjgZYkY8KY3AVlakaSvCn5bXSeEPLOW7N2P4BcDtK/c/yThf8m+mb+v3kzcr9fVrrTSFEA/iiEOLyn31Ra61f/XL8BYQQvwj8IsD8zDymtF89x0BriTBCNCFoSZpkaClQWqG1ichApIp6vYozmxB3XcJRh80w4Xxzha1hm8xycKRPLnII4j6HFg9Rm4iZPeSws1mgPOFjbWkeWZjn4594iMLdU+w9P+Drn3+Sln2a+lIFs67YvukSJmMa0yUKhYx2t8m9xw8zCDSfv5KHsUDHewyikO39LdqDPsq00anL3tDk9Ewdke8zmSm0U2GiPGB3BXZvpEw6CuEpSvMG+8s2lZxNJ7HwTUmUxfSAnd0h9aJBmPaw6/N0R2MqrsCzHUQqmKx6LG/1SFLwCga4Brm6ZLAT4xbK7DwVUTmTkZ/36A1MKECWJAhPITOJ65dwk5D9q13KRy10LaHXHDO6mdB8cZO859DeDgg2Q1zLJukMkalCxymaFIRFkqWEyYB8ySDtx0jLIItTOmlKc9zj5MQU7WYfM8vI5zSWSskVFUG6iW/PkI3H+EUHkVOEWtAfKny3QSNvshH1qVkpDaNCveIz3uwjPJPyTAlPOljaIMpy5KsW/V5IlkWYjgBLoF91unxjS/Vuydi2c5U39ObfK7e72P86cDuKHd6k3LXWm6/e7gkhPg08COwKIaa11ttCiGlg76849zeA3wC47857NCoAOMihawnaAuGTZiloE5RGKIESIdFun62Lm7gdQe3oIq2L2/R3Mp7bOsf1bpeRkTEZ2SwUZrhjYZbFQ0WO3V1hrEaMbMn08TpFdR9H37tA9dTD+F6VZ//NKzz91WcpkueXfu39lGYKKDQLwxaf/29+my88u4/DaX7sgQVOZBGD3B6fe+YpxspmpAaQlgCB4fQxgbO1ezhUzBFLyVRtntlFh529iMrsDMXuK3ztd2D2hMPRMzke/MAjfLn9JZqbVeZqPtfaXaQG2usoFK3RHoGSdDZ3yGKbRm6CIxOT5CyHa72MXHmSpK/IzciD5fhpjOtEdLZ67K5cY+UlzdJ9S7hVF6sGza+u4R0rEY4FBm3SpiBoDumvJTDyKCUOlTTCny6Qy+eJwoSNG01aUY94JLGloBuESBUgDJP+YEicabqpJlBgSsnhiTninevk7YjD+TmO39dg8K1NPrW6zMmJHFEW4ycfZbohOfPeKmc/cidSuxRe2GHtlW1O3DnHyy8bFKM9JIpYG2AalCeKmEZK1OrTb2YYNkgvZRRZeKZPcz+j1e9jegama6CBdPz669xv1djOTcx/X5bHvt2k/poA32kR/BsV+w8iNfOG5S6EyAFSaz149f7jwH8LfBb4O8C/ePX2M9/b9Q4+iso00jDJEgtpCKQ82JBKa4FghIOms97C6blMzNuMw4g0pyjX89jrHTz61I3DHK3N0sgtcnipyNyxWZrDFlq41Bc9jEqOnB7i5h7ANlxUJ6F99RJ5z2B6pkqhnhG3Q7Juj1GcMXGoztxKxF66wahd48THjlE7cZalixOsXx9y5fwFttsJcWZjGzM42mSm0mCuWsTwI+K4SLlYxo7OQ1ZgIGp0928yHPvEowqP/9BJjp64g83ml1nbgcksZcsQOEjyfo4gCVEypRfHZFlMWZWJlCIK+kp4RwAAIABJREFURnQTi3reYzqUjDoRlPrkch5WTmBaNqbhUs6l7J3fRpmSWJkIS2O/1GPhwRNYRsbVl19BDitMT+QwrBBtRSRCk8ky3b4kTS1m5iVJ6NPZHtBu7ZCXMAqGTE4tkYQhg6hHL43ZDQOkNqhVS2iriKdT/IqHeDDP9pd6ZKlkqnaMflvx8L0lpC2456c/iJ330E1FbXKKNdnBdTMmSgXGrSZre32OFCv09nsUZxukvsJzC6RxhkrH6JGP0CBMjQhgv9XGyZu4Ofcg556kb+nYvtW83cT+Lm8NbyZynwQ+LQ7ymibwb7XW/14I8RzwKSHEfwKsAn/ju11IKU04PKiWSdMMIWNMO0YqQZpEuNIgbYdsvthjuB/htKBWKLIxiHFKErfuk6ZjfvzDn+CLX/km7z9zP7Mzc5SPFLm0OeCbN1ex/SP4hSFup4yMhtjWEYw0ZfO5PZoX1zEGJosTp5k3yjz1axdI2zFrPUVsdnn47Gk+9uETLN4xgxmGbPRKiDjl8Ec/yPwH9vjjX7xILEqE6YC9TkbZWuCeEwV0LmCsJFmvTdXJc/jOI3TObxI3DWbkdXZ2BzyzWmC4rTl95wIP3/EIvn6edDRHKe1SMl1u7O1SnZjEDIdYcYaWOQI9ZrvbpBsNaI4VJ1SdqS0bX+UoehpVHhJjomNBnIaU8nl8Q5PEIWYwIpfWGfa2WfvcMxg5QWNiGlUOGaYRxaqFtiAexIxbIYSaNEnppiPCKCGLNNXKBEpnuNY0f3TpK7QQTFqTZFmXdrxHYoC/pakVK4go4b0/fJYLf7DFaDTkpx76BKW5S8yeOELx2BLGdBmRmUTtHuf+ZIOnv/4ihkhYuON+nOaAfDfiZL3OYBzSGxlUd3rYrkM320UpjeMUsK0I3bcYyz6mMKEfQQTZ6NUOTK+/zd4tG9u3kneC1N/p+ffbiTcsd631TeDMX/J8C/jw67pWpkl741fPlyAtYp1hyINNr2IxJmrC3vkdWjdSFmcL7EYjtjsJA2ly7K5Z/JqP21acnbmT8sQ08YTLxauaT1+5Qb9XZOnQNvc0SiSlGIYO2gzJel12V7YZDGJ8itj9lOGoy/ZokzB2sLXBoTsnKcwIdjYUf/iNbU7fOcnW+Zt4m3ly3oBSIWZejmglGWPhMpXvENDh8mYPkwKlYorIjzGVZjwShIMEz9IURR3hQJ4h1y9fZtBLefzxJU6lq3zmy+ucLM8ySjbJScGU5zHud0l9l+EwROEzUBEZKVEWYrqSNIoQAUTNCAODVrPHcK+FRwVbgEKDlGgjwLA1BVllFPRZODLPRrOHX/RIRgHtbYNCqUSvkzAYBISdPhKBDjskCVgyj12z2W5FlP2Mgm2yMwhInZTZmQU6128QqAJ5zycZtzgyeYz2ceg9t8bS7N24RcX8e+6nMDUPDYssldjDIeOdPs9/6xp7zXVm6hVyjSKTnQJqv063nWKZEd0I0iTGQBCnI2zXZTTYItQeynCxhIWwxyRZRBJEJL2DgCF7neUyt3Jsv8tf5F3BH/D9Ts3cFitURabIRgc/nY1Eo8dDhhf6dHYyxrpE0N7EEBn+IKWWDhnspphuxBPnLmPlKlxavsjho4/w6F15KuVjRNEeV559if/jiScYZpMUvBo/c/Ix7OE+g0s+gWoR9ruI7ohQK6oiYxi32LEHLK/t8kRvl4KT52/9zU9w19kqT3zmczx/IaIxVeVK9xKLk8fYvvktLm2P6Mcxe1EJpUo0bJgq5xlnin56lZc2pji1ZHJm+n5uPt3CaYRsP1mmUTCpJhHFqIKUimljk876V/n878xxx3vu5ez8Fs/vdDnl+5hmH3ZvcGdumss6ZZ9dZJAQWwmRnWLriHLepuznsdKMG09cpheH5D0bQwhyRQ+FS7Pdx7RttBbs91o4pkUaarYv7yELOcbtEX7ZIskyouEYUyuKJQNTesTDIWFfsJuN+PrOVe4YnKbf36HtbDFvzpO3Eq60ltkZV6jkjlIRATNWnkPz89z/2Ht58n/7NisDg7/7t48jj0p27QJPfnWdGdtnsu7jjDpcekZx9eY2lu1Tyx9n8p46tVqB1pWMmUnBk+vLuB5cuNxidtbiyF1zpFKQc6ZJo5S0m6EymygOMY3k1X3wD8prDXFbbH76prhVUfvVX3/gez72+H/63C15z7+Mt7vgb9Uk6vdT8LeF3INOxMVP3QABKs7QqcLe14yGIyJvg8WZMiaSnWFIlGlMRgwHmn01JGx2OTNxmGq6xtZ5m7qRY75ynFc2LnCo5CPdIh84dRY7GpMRs9Z7FiUd0lGKF49wCh62MtjDZHXrBpuDAY4DOZlnHMH6tR6XlgUFq8hwb4hbnOSacZ2bm210rEljm1DuoHQfHdYo+Ic54ZdIjSatNGSqMIFbSjF3FTvXtuhZHp5VxDIElpViaoOyv8Ss12FlsMnu099mZuEO1ruXSSnSKB7FizdxDZO65bMue1ieTy8aMogTZmyHar5CmGaMwgFlx2Y4CMHK8OwctpaMhj2SMCQYjMkyhWtJnIImjCJcodlcHjM97RNkGgxJqeFBwUbIAWY/IOqV0J0hSV8QGz2Wh8+RiQziHIHeZ8qZp2Yv00u3KcqjlClwduEopz92ip7tMTZiao6LnjVIih7/0z/6BuNWnx/9yALzuTlWX5B0ggtgrHO0OkulAEpIjPkC+XKGTjUF1WJ7P6VplRDGIoePH+zxP7YSbMcmcgKiocK2BhSKPkpI1KvNOqT59pb7GxX76xH593L+rZb921Xwt2t1zJ/ntpA7cUayfrCKW2ZApuiPRtiOgxkK1jYDPDvF9yOETDh3Y5fVKCAvyhyqFHjsjuP0gxwLiwZ+2UAcDviRQz/Me++EZ59ZYbAxIDP2kTqmmSZs9RJSFVPN1bhv1mNtvcXzO9cYjLuEqoCrLIZRh6e+8FmeSiMc06boF6hIk5Kxw+qVHu3WEMMaMlGYojOsUXZcTh5e5Pev/Ta7vSIPzz3MiYk5tq8GXHjlKZpGgTjYYzNVVNszHHFcql6Cr8ZMZz4TLDI7WyJWDp2kwCOnJJdaexS8o6TrA6Rj0vBdKr5JnGaMdEK32+KnH/wRMmnyyuYW+UzQCjocrU6QRiahNuh0WkgSkBEKSDPFSjNCdnyqpTIqiPDMAfFujn5m49dtKqfmwA8ZtU26HYuSrHKlf5O2GuImBkPR5eNzp3GdPI++90fYa2murn2J/lBz/5kS+eJJcg9E/Hf/89dgPOLjdx3m5MfP8kefvcH/+enfwM9y3Lc0wZwxS3N9g2cvXcISY/7WI58kPzXGL81y/Q93yecLzD1QpHctoWCVGVsBg3jApfVnaX3uMEV3gruOTiGNIX7JQCSK1MrhSY3UKUIfpGUEb9/93L+b2N+swF8P32/Z3658v2X+r08sAtzyCP62kLvKYLR3kBc1DYltmhhYGIBpZEzbmvVRxFf2Wqgkwyak5gyRqaZmT/OlS21+4hMCR+aJuint8zEvPt+n7ggGoz26o4S5qTlKtuLpK8+zGUTUnApSpWSRZpyOGKQxnTRFiYAsM/EsgTnOUcsXOHvoOKWSTbEkIPHxsyZHKmPKvodOu3RXrmAyxX5LkTMspL7Ji90ZfLdAo5ZyStb4o72XuRQELNoesdTsZA1aaAq2RFRC2tkaU7aPNiJCVWd6osq17jJ72Q0WGg26wQhHOhwvzrCz36LmZJAa5BrTfOmFJ9hoDTlemkSLIYEqkZcZCQHXYoEeDdkItzEV2JaL4XrUU0FvP2SyVGW7lTJTMIn0NrPhafZfusggCnnpxYCxpZkr9/HtkF4SUrQz5v0ZMGdp9eAz3/oS9UqBuZlD+MemOf6hBkqO+e3fuIQxztgfZEw0jrG2vc3W2nVmHJsf/+AiD9z7KPtNzaULLWwdsFBbonHCxhAOO+sjTDvk8qV1zh6ZpmC0KTsOoEjVmAiLKN5hpb+BZ59gplxDkEPqEJ0ZDHsuWsiDeQYgS95+ffa+k9R/kEL/Tlz99QfetOBv1xLJt0t0/p0Q+jboUrPoL+hfPvwP0YBjGqgspdnrs6PbICosGAGtJAa3RjmzeHz2MM8reM+RlCP3P8CO0lz841coRhlrzXXOhy1SZVHyi9hpjCEklu9iZGNe2blCIiSkkqpX5J7GUSYd+PLKFTKVkjM87l26m5nJCv50Eatqkp/xcHyB7VjI0GD12W3Sssddn5xj3B0xfGqf1ZdWubKyQT82SZ2ItXbGQm2e04tVrlzt8qFDk+DAIG1Sm6jwtRdfppMpxqbFKEsYBUPGsYF2Y+riCHcs3snf/S+PsPvyAHq7BK0e3c0UraCE5MWrLUxf8tzyBboEuIZHtVBg0q/RGnRpjVNSPWZsJoRRRJREWJaFYUiScUw+V6BkFREGDPodCp6HmVYplG1u7K2Sy1lURMQgMTCl4KQ3w1S1xpmzd9FKu9zx8ycQ1Sm0mRJnIbZjIsaSr/33T7O9fIXFqTqHz5SpHz/O2pMB61trPPSTd2Pmxnz5N2+wuaYYjXdwrQE/8pH3kZ8xMbwhaXuWtavXMMwBT13Y5/cvfoU57yj/9Mc/RHs35ukLF8kwiNIM2zKpFUpYtkmxUKZgm9gyJo1HHLTgPUjL/PyTv8Kl3o23xPC5iXl98pO//D0f/1dJ/XYR+nfizYr+rRb87SL01xPB/5OfvMDNc6O/dGzfFpF7lCXc7G0B4BsmjmEg9AgzEZQsg7lSjsF+xF7k8h98+CGeePIl3vPoUY4+cobe+g7PPnOV+cYMzz/3JAXLJBpH+HZGNopJzRIFS8LYpJar8PHD97A3CuiF4HoGlh4jjDzHc2Usy6c+McXU4RnyUzmMSQOzZODOFDEsB8Nz0UFI445ptrdGjDOJUS9T/TGbqQ/Mcfiba3z1D17hXL+LcDWjtE+7Y7McnKe8kVG1oFqaIMXlk2cfwjKatMYBL27us2NK9jvL9AcGqSuYqc2x//KQ+lkHM38Xo40+3rOb7F6IeGFlDc/NmCr6XM2BSGyk4ZNzHdqjPqvdbTBMXFuSRxClEfmcSxjHhMGYkuej0wDfzjNfrLNOxHY4YsJWtAYmOROs2GXpkM/ebkTeXOBQsYDK57GOT5A3FGHZxzQChBwjTQOZmuhhStLucNeR05z6eB1jZpb2RouXr5/j2fMdGidOYEUjdndajNI2GRH5YhW7YdEfGZx/so+jBbVGjfmzZU6wS3oxYnfUBqdKaX6H7JJFc7RLM4mIMkUtmMa1oNTtUfYKOIZJo5DHsW1s86AXwGuSv515O0Tq3403G8m/VTn420Xqt5rbQu6uYXKsWALA8zxKvsc9E0dI2ynWQpHQbNG+uII1kLy01ee+hx9me23EZLbK7114hunE4evXrzJUITcHI475h2kUBMMgo5Kv0zUNzkyVMfIFSmWDYBzRHY/wdIYjXOJBQqFYw6g62BWf8qlJvJqPcMF0JTnfRWmNadtkrom7ELJQLdG51MXJ+WRFKFZ9yh89wUPBgMLXBZ/ZvAIipVE5xdJuiU62wdPtNjO9WWw8FptVyk6CJ0N0O4AI1mSR2fIZHrvjJBNmHloa1dJ0mkPGOxHnn884894qjTMm55/aZZSk3D99miAKYcLigx86xn/9L3+LWb9EyVJU3CWkkUE5xbUkljQOOiqJjFDAxMQSm+2QDx1xkMPDzNQM+pnCTsEVkLp5olNbHL+7wNiqEw4jCqcCytVp2DWIe9DaHKNjzY0nNrCtFqdOnKHntVm7abD8hy/w5PmbPLn8DdLxkHufX+L82iW2Ots0ChZaGATDST71qee4tn+T7V6X6VyDj939Pmr1o5QmHuPX/9FxdGuVgehx4VKLPT1GyxnmvS12gzbtaAU3kWhRpTfYw9UmemoBwzCxXm3jFWevbxHTrcRojv70/huZGH27iP01/uznfSOi/8tE+5rw/6z836lChluXg78t5F7wPD52790HzbGVSZoYBIkJUxPUFsu8dOU62uwTDENEt0C/00IVHF643uFEpcrF1UtsjyxKjqTul8HWVP1JarJJed5iYmCQqAxXOaQueI6NP1vFUjHd1Sax0piVPLogIW/hFjy0THFyEsMyiMgASRqOMdwU0y0TdocII8V1LdApWS/ELuWZOjXF5qWUh+MhaSaYq5UxD59ku73K5eYam/EKeTNPc3dAza0zk5+jWhzjpH22Nmso2WVqOiPa26XbNAguuWyd6yCSLmgTaZf5/LPr7DZHzJVKzJkWbmLw/sfu5MbyTU5W6hzyFmmbBgs1i2JZY1k2KimSJIJMaUzTIRorGpOS5niH1YuTHDudsKOaXNnOCDBolCe5dy5PMXectNIgXxtidl12LigmPI9Xrp3npau7RB2DvAX3LuUpNWpo8yqj1SK7axssd26yvH0Jg5Q7qjM0t64h9B6dYMA4dJGWy8R4jV1jyHjQxXci7j1cpz/a4vJ5iyubCUt3V7jnWIPOxjKt8Do5J0+hYFKlSMmxaYcZqRKQxDjCwHI8hmGA0BqhD7YdeL117read8LiozfCrcjJw/9f5O9kqf953myZ5G0h936s+fxqChps28C14UMfqVKeKGJP+My6RVb2PAx/gmEmmTY7RM0h73nvD1Fu5Pmhn3kIchnrL9xk+cVNum2FsqFWnGe3eTARGewPMR2bcnGJrDrEDm2U1jQmK2zGXUZSU2vUKEwX8Asu0hOYOYk0BVEUIU2BaQg0FrIq8VUK+w69jT6z95QYoBi0ulheHmdWMbc7g1fJsXruJlNTmq2ozLR/nBuj60RZQGls0IsS4lGHjjdBtXSaX/h5g4o9TTTcY99uc/XKiJtfj7lrqcrcrMmRpQqtlYDVqxtsD7f59rrHr7zvUWbMjG9+7iYzU5KHz7yHmcOzTPzoIdbHY77xezfYuN4kh8Eji2XuesCDgkHSNrj4yj7T8yf5yA/VaK5G/OZXvsmFboDthZRWHWr+xzn7QINwbY1v/bvLrO4OCPom8yWb8/1XOLd/g5QxNbOMF3ycamGLNBgi3RxetUrJqLE4sjhe8Lln4gQ6kdxbabCxL0mIuaukqNuSMCgjZ+uouE2n3+ToPWf53NOfZXcY4tTOcHz+/SycOcHQ2Of/+e2rVO0+taX7WKxIGs0Nwixhp9fGcj3CLKMT9BHwp71TM/3Wyv2N8HaL2P8qXvs7fpCVNYUn6t/x9cGjze/4+u3EmxH8bSF3pCDIDn6+urkCtZl5ElMRY+PUIz71u1t0Wz0mcjaWdAnSSXKlOuPekO54yFBVOP2xCkvlu2jMVzn3mTZxOkSYgoKVEkQBnplj0OnRkCG24+HVLOKuRCqDidSgvbtFHCUIJLbhIQVEwRiEwjBNhDbRXoxKDQSa1LRJQpdC3iIY21hhgNIOeys9xgMHIYekoWYgFL2OTb2Ssty7iSKhIIqMRA/96mZouWxAr3uZ69+ax/Fj0qyI7y1ycfslNruSxXgK12uQjENWr98kittII6JqZPgFnyTsUq64TB9qkBQiikc8tjeHfOFPbtI8d4H2UPCzP/kw5XttRs0iG+da9DZ67K+vo4WNGXnst5tM2HDYTQCDMwtnuetUCcfVDFPJ4aVZpuua7dXrpJFEmpoJr0LemiPnmswcm2G2ZjFhjzEMxUpPkWWSR064PHFtiVH+JPPzLpapuX/qc/RDmxMzR+n0m9Qyn1LD5tvLHeYbVcbDfcJQUS8anDzmMjGZ4OaqzB2aJuetEWQJ1/Z20IbFrGVjC03OtBiNh9imQJsOIHmtVuDtVyvzLm+U7yb21455Own+jXJbyN1zLM4cOYwGCkUf0+6RjVO8hsHFr93A3eqSWkPKZpmn9m7w2ORRPnRqhq0bHQLD4slze3z1y3V+6Z/di3+npP5Sl6gzASojb2VYRkCaKNLYQYcZKZLYSJFlC8Py8EIDq2WShBHD7hAzb2KYgkwkaBTSsJCWJmfnQIakSqANGy8XI5WmdXEPq+iTjSO2rzSJeyGGlEhzhDl0KNgRH/x7H+D0s8dpyhhDDti63Gd7sw2BRehEmMKhub+JEjapzFGrWbzcOkeqJ9lrNWi1CnhpB5m1yBkapTKEIRmPUjqdhJnpU5gTY4xJH3OhzoXff4n0RpfWaMzHT9eYvK/M1uWE7av7hM01smGfRr6ElSmef+U6PRXhmdMslWocm6kzM30cz5hBqCGDfpv8XI38CPKVHmo/4tqLiyyWpylZdfK2RbBvshIn1M6exrRCji3FTPd3sKo/gfWNK8TdDYSqUGr43Hf8I+SdIZdXO1hpnekjBr1UUsn5TNeL9FsRXm5AFuc4cmgB1zExEouJRpVTU5LLuyY39i8RhSZrxTITnsspv0jNKGKhiJWFxua1vSCtdztkv+XcqhTNd+J7EfufP/btIPk3moO/LUoh7545qT/3C//Xnz5OtMHaZowbgxVGrC2fo2vv8a/OX8UVY/7pJ/4zCoZmZTlE2hkXt8+TqDI3ByNqhVP88j94mN7mCuOWTbDRxBcmSRQQhhpnssHkPXXKD1aI+gGJUBBFBC8pgnCE4dsIz0EjEIaBYQosJ8PwNHg2qVQU5n2k1KhmihwZXHvxBiWngAolRmyBGNNo5Bh2hlx9ZRWjYfO5l/eYf/9DzK/tcvKeKqurO/QSiScDykPN3l6LCM3q8CJf3Oxx2H+EzfgLTBpHOT1xilOLVabMClkWsBveIOibFHMF9oOMrUGTE/OLGMLiA794B4WTkyQ3NxkuXyNTdcJBAb3fwtUG7VHE08+0GRsJzVAzUw1Y21whIKVkTTJVqOBaAqIyM4frCCvk+vILNGpHmC7Nshyt8NiDJ7j5whC/rrjnp6ZR5QYy7EMv4PpTy2zdgIXFGTrjJksfPEv+hEANRzz3W8s40sRFI8w+E4aNO+ERaklzbZ9BO8YwPS4v77Ix3qVozvCxx++g3VWU84coztvEo5dZvf4c//tnv01LWMgEpDZxjTLHanPcNTnHoVoNifjTnPvf/vovcbFz9S0J4Iuiqh8Sr287mndKSuYv4/sh+Ncj9b+Kt4Pk4S8K/rYvhUzjjN3tNgBGJrGFRS0JiayYMM4olwyu7rgoJ6OR1KjlfcYdi1KhRz9OuefuEzxzeQXd36fXyXjuC2Ua0wVcFdFYqjDcjbAth1Q3iYYpw/WQ0r0SmbOwhoKRTpE5F8OwSNIMI7RBGEgDhNbkKgd9XE1pE6fgWB6GqYmtAZiKdAQySem3NccerJIVLQpLeeQNE+/qKoNml364yu6XN0jdRWqOzzdeuECMQRg1cQvHuHuiyIxpcWW8T2oMscn4yNQ8pjcJwwQjU0gX4niMGS5S8EbYdple63lsJ0WOIYo6XHhuwMlqgVG3Qr9VJ2vZpH3NhFGjNU55ebXNetQm0SnDuIva8UhVhmc5GH7IbrBJWULFD1jfyigIjSchJxTDTpssHbJ2uUP1VJ/5B46hJupgKTIKSFHFLYcY/irNnS6+7RA8tU04ajBYN/mjpy9BrFkqzHLnSZNj782hnTl62wNct0Hm32Q4bOFYISfqRaZzHioc4yibuN8m2Cvil6ZZOPp+7jt8mZW9MVd6EqEyPCNBZhnCyPCrJoZpI98h2w+8U7hdxf7add4Ogn8tin+Npr7+Vx57W8gdaeJY0yAOlscLD/KqR1UVkE6dX3vmq4RpxsfqH+HnfuJudi832eoGvLTeZaB6pBcdLMfivskpgkjS316nHM3SGcVMHa1ROeGT7ku8qsXeRpPhesLw5SJy0cKwQnzTxJlJiAKLKJJkWYow0oP/jpAkkYPQKamh0I5NfGOfkWkgCgoih966ojqh0MEQlhbwajkS38Y4BUtnFrn67WV+9vQSTjbi3714lQthyt/88Yd44snf5OkbIUG8RTi6k3/4oR9j9lCMEV7g8WMnuPNnP4mVh5WXNzn3kmTs9Tl2aoliBQwXOs2Y4vJpXKtAMhiQyy9y7stPc/mPJZWyoGxOslBboDna5ktrK2yH22wPdkmtKm7WZmcMoWNRdH3GsWJrt8mEVeLIzDHSOGJjuEyaaCzTw9ApU1MthhuKrhPzwE89wMg3+cJv3eDG8jbL1/expclhz2VhqsHkTEwzHPOv/mQb68tf5/G5CTY3nwATHp74IA1vhktPOOy2X2LmCFy8mGJWM6YK07zn2Enqsx4rV9aRSYgQI3SW0F4b0C+CkoIH7v05HtAJYrgOUuF4ZWpTk+RzORzHP+gFYCSAwPnc7THMvxfeqVH7rRb7rZL6O5nbYtQLMhI2QWsM00OnHhWvQhgm7GYr+AwhLPP+x+6lMTXH00+8xIWdMbu9m0hhYRkBpHVaowa2jBmFJrEMmKwImhstwrhArV5FApWJAoNOzGgzopKzkHM14t4+qaUQfoZtS7JMgBAIaQAKIeKDCdYkBi0x8haDlQC36hOPRlhmiF2w8XSdXE0SC4XOhlhOjtz/x96bxkqWnvd9v/c9+zm116279za9Tc8MZyMpmkOJoihZCxUtDqAkhAAbSSQbgQMoQABH/hIDDoI4iGMgiQEJMhJHEgIrhqHEEqTEIimRw52cIWffeu+++6296uzvkg+3aTCWKHEoidMj9g+4qDpvnbp1Pjz1P08929vqEzoj3ri1RDgBRWDZrV7jN/5fj62Hfo5f+MkD2i+d4e5sSmoWNJp9fvzxj1Hbkq9+IiTcVLzw/Keply3OX3iY2bLk5UOPrNaohSRSDlFY8+SliJ3ZHKfVJ/OmPBRvcXrN461rt7g2znm9epPJdI6ULmdkhOe2WFRDWm5IVZUsFERG0Us8co6Y1jnjPCNXJb4X481DCjdmox0QRx7p3YBRMefGl+7wxt3bTOtd4jAgqDaZLGYE4sO8NbzOC3u/S82Yn7r4d2k4PksSLj31fvyGZXrrDmk+wfU6TLMhhe4wPXSZzZc84kpU6pAbTdT2GI4XLGcp2dJDCcPBTKGsodk8QxT0kNpwvKdpNBTr/QDfD/G21s6OAAAgAElEQVT8k6kyytz/TUwP+Pb5yxL2d4v3/u1yX4i740o2Tw8AKHNNWdS8dOMq48ldbg2HZHoLEzT51HNvMpsc82+u3qbrK2pbYIFFWYGdMKwrtuNtvq+dMx9bsiDisTNr7I2POV5qkkFI0g9JVprsX7/LdDRg64OCcM1D5QHW1eBYnG+MmhIGKQVCeGhtcAuJmpaMp4pGFNA+7VJHTbYeez+mKFgtlrzyRwc89PhZSl+jq5xXn30Dr1jw0vAacbzOIytPcfF8xvmHzvL6F7/GF35zlzvpAQ93L/HUR1qsv3edtVWFuz/j5//m36Mfn+FUP2Eyqvj1wz9EWwe3tjRCw6AR8zd//kdpbW3ypU/u0xCaz7z1Lzjfc9hfxPzhm3uM6xFVPQK3YMXpspGs8eNPP4kVNYvFElvDb736eQI8nuhu0WxtcW15RFm5GL0kMBbHaAI7oVcK1jdPk6ucNz/9VZalw3C0QLsF0voo5TNONV3f58adF/D1lB9rn+NLo5z/68tf5qNrH+X28i6T0Q329jc50xuwZgbE/pyf/fgP8g/+x9+k5Cv8ZOthdHmR/kZCc/08aZnw6oufYDIfM14YhtWQg8VbZCzQ1lJrC66g1jWBTtjuXqIbt+jETay1HAxH76R5f0/zbvPY/yoJ/H0h7q7nEPghAM2Wh5/4rPY0r31lxHjcRJpjqlqxHjUYHR7QE4IYj9SAdR1cL8IgaXqSTB9zzXkvl0JNXQlu7Bf0W5ssJ2Mq7dB6xMdozWBdMBnvIsZd6vKkckY4EiSIe+kJoy2mttRKI4VEOxa3L4kDgd8M8Bo+DpK61rQ2OmRFhfxKxt4Lt2hsBuR7U8Iyp64yHmp6zOyYL+xn0P0RfvCXH+Psx5/gvV94nt/8x6/jhRXWjFHTBtWuxx9+YUrLCTjVFthM4Hp3meYKQ8jHP/AYj19u0lxpMLNN3vii5crDOdlxyeMrl9hLcybzI9bbJY7SiHCTVtRgvdXn9MYmK+cHJGs+5TTn+NqYx+646KrF6f5ZhFcw3x+j/YpQtmk1Bb0kph1us8xLJmR0bZPd0ZQb8zlXpy9SYTFijqNClNnH+oLTnSfRStKJVliNV3GNZk/tILyEf/7ZfXbz1/l7f+1HONvSHM4THnm6yYVGiQh6/MRPP0l4ZRs7E3ztd67xwvVX2d2XqHpKXmf4SrIWhgRuxGo3AGGopsfUyqfCIUlmBH5F4KaAxZXvXIfq9yr3c3z9fuW/vvG1t/2eX/zp9Fu+dl9Uy7z3oUftV/+H3wbAFIp6UfHis6/ziRde4dr+He7KNnlV0ncHPNXb4EqYkyvD3nLO2mqfJx5ZQ0QG8jbj6YLjgzGFdQiMoBM0aTW7RE1B2OxAYRGUKKHxogbTWYnrgfdQi2YzxnMsQimwFmMFRgi8RozXcgm2BZ4f4rgGpxkhvBpEiHZyqlmJu4D6zoLj5zIcU1EtLdXkmKpKeX3vmDvFFL/UZL6Lqtd55Ice48rlNo8/7FJOb3PnS+sksSZqNDi+nXLlZ9c5vnqVva+VVDrj8g89zNIsaBufoDNH9nxMc4AdNvncr3yS1w8OuDG5yVLXXGxf5vLKKdw4o9McsLIe47ZiKjeh/YRP2HWJjI9eLvn0P/ksnWiAIyuWS2g0coaTgrt1zlGuOF7c5Lg4Yqt3hf/mf/1bhELyr/7brzEZLZjOhliGdKMGsZfQi1tEToeVXhOvKYj7NccHGWZp+eSbr/HISswX9kfMyh1+4twznL/S4Z9/8SWazPk7P/YzHI3g2o2M/YnkVHNIFJ7DCSPeuP4Ghcp54lwXL27y/Fv7TBZzZnJELTWomEbQoOX3ONfvEfmS0D2x7V/68i9xfXH1XVEt81ch5v69Iux/Xg//OxHzf5df/Okd3nipvH+rZfLc8Nqri5OEampQmWacjRHcJasWFNwG26SyMVVlUFFEoVNW1gc8/MR7efjfu0jkl+w+d4eVSYsgakNt6Pe7iMWcxXCOrDrIssRfCTFVzfJ2zcIrOfvDq3TPb+PJAlsG1MpQG4HVEqklQoCMSkyYkQxaCCEpZYYjXITjg1NT5xZXOShpOFjMyWYZnUjgBy5eO0IvDdPapdJzMnwmqeGRToz8+mt8/ouWzV/8ANNbK3zm1d/m5UOf77v8JO9bTzj+csjyOKG37bN26Sncy4ZAtrA9iRttQyl4/tdusP/8CyTCkriaTnOASHM8UaPtgu1kwGBzDds36DgijiXNTkA61YgGaN9Hyxrh1kR+i6qeM5poZBAymeyT1mManoPLaX7oQ5eRbgfRLLlxeBW9yEj8BlI2SPzLOMJBWkMU+TQ3LfOsIPAeonXmNpNdh8pYmo0Ojw9SLj/6ETrNp+iuCey/eZZOs0H3zAWG6Q6z8ha785v0mpeYjq5zUJVM8rs04h55tUozcdD2DpU7YlLsoUxFpWP2M0sgI25Pt/BkQiBjwLIo8nfaxB/w5+B+FHb4zkM4fxGi/u1wX4h7ucx484/eQABKZUgJDhGPrJ3hdOeQF27vk5tN+m6f7ZUuDrucbbf4wA9/lJHK+b9/9SWmpeGxVYd+7HHm0goq04gc5lIj2j36gybpaMFoL2dlbcD2D/jEqzH2tEOwFWM8F+lKQhd8rbDWIu79qDE4IJqgfbASP2+hpMILLVWq8AqHcpQhvQj5oiQUDstxjS81aWnQWOb5nEpLHuq3uWgcWt6cbn/ABxor6OtD1LHHDzz6QZrOp/nUl36X+vG/zo/qhGYseWPH4G+MWPW76EP45D99meuvHXIwvstQTAmcFq4q8WTEEz2Pra01qFbpbwYMzm2S2ZLuuTXCfsCiXLCsDPFKBDLFXUjayYBOM6aymt5KSDsOKbWiQU0Y9NF+xAeurPPI+x8luzbi958dcrwY05Rd6nqBZ3IGnSW9VpO1M5sczRf85ievoYuK7eaQj/+NDzNRt/nYkxfoJj2e+uEPcCR77P7hl9DBgI9//89Q1oplWrH6SJPpF4fs53s0j1yevniB7hKuVz49d4FSFbcnRxyVR4xUSi27eE6fruzRT0raQU1eViCXCCcDwJPvvvED71b+Ir32+1XUv5m3K/DfLWGH+0TcEQYtT5Je1tNoLNZ2iP0tEnedD17e5q29CSutbda6Dl69TbsdEXddbry1YLa4y3CeseucouuvUhmHKImo6wLX8/Ct4Ph4Cg4EVc1wd0S3M2B1q8/IzhHaYgkQWmIrC46DlIA8aYJx8cEKHOujrcZQ4TY9MAZHgbRgtMB3PbLpDBF6FHXBPAUv0NS1wIoJUdjH0ytc2e6z1l7luCrQzQRnG5J4SZmu8uG/8XOsbt+mUVuCc3vM9vqsrHusnHP5/D+7y2jnmOevPctwmbHQNXM7peU1wTq4NqQfPIKr2pSmRtgmGk1rpUXYCbGBJAoS8rKmNppmkDCzGUnQR4qUqmzQ6GnSakIStEiCgOmyIvZ7uE6In+bcPsgZHtRgU4xIcIym4Vu2tjdor6xwq8h4Ze+ASbGPtJqdmWWySIljzYUnn2T36wfUpsGrL90iMhI7nzOXHkJ5zO7mdM5HnN/e4us7ryGE5s7OlAsbXbZ668SOodeWyKjDsPLguGYyOSa3B7STjxAE25RmioiWGFthOBF1Kx8MIPhu8L0m7G+X76aww30i7soKpic7omFxsBZ0XePOHTajM1TKpxP0ubzRI3Agaq3QXrfc2Jlx9/aIo+NdhpMxZ1bOEjdi2ishwih06aLSiKgbU+wO6SQtkpUAHMv82pSb6S5r/1GXSlT4NoLaoNSJ126tAQFCiHsVMwITlVhX4w8ialviZBJlFemooNvqMNk7xO/WhF6Ch4v1C46Pa1YHHZ569DLXd6fo2qOcWG5ylc+99TWeHe8xCB+m3y9Y7XyMZ1YE/9Mnfou2PcM/bv/75GXBtWuH3P3agjcOvspRDodqRitJCStLUyck0jAIVjnfX6URxORFRm0dilwQdxO8VQe7WhPFPrYwNKIWaMXuSyMYj5EixXN8HFlT1hB3OviOy+nNLeTxkErXLKYOOzckpe2QHn6Oto3QekaQbBC3PY5o8/qtik++8SzzNCXQBZ4AI9/ihRdP89STG7iXVjm1EZDezGi7cLsY05i2WR5ULLKUr7x2nSt3zvDMez/Ep154nlhmWF3y5u6UH3/mUbJiQdyCIgt5X/R9bCQ3mM1fZKlq0vIGQ7skDhJssaRWFqVPbs5GmT/F+h5wP/BuFvNvx3v/bgs73CfibpHUJjo5kBKkg9Y5ptTowGJsSRAWgMb4Ln4EfhRwNK1JM40nFM3IIQ4DrG+RoaJeGIysaaw2MNawcqZDGDbwraHOLP2NNulsjp+fRrRqcCyiBkeAFRKEPBlBbC0Gg+NKrC9xHIPBIpWHqUusMmAt1im5/qUJq60Ym2qSWGAdOHY9lJFUtkGaD7H1FNVyGQwS/Fs1sdTMzZS+WaeY3ObLX61RShJ5CfPxkrIUPH42IZ8dMRwDFCSiBybA9S2x7CGtwCOkH7VptlvItGKWLTDSYiKJG/tESYyxmlKAaypcbdFFiW89POniuj6ea/BcD6TGSodG3CQIp6jcUFUwnyoaqwbHSIQM8GWFRIJ1mWSWu0cHLLIJyjh4VmEBKRV1VlKmFfN9SWvLJVkt2ViD0R0XlStccoQRLNNdXr6eE7QSmoGL67u0g5Bx5hG1OxQs8dsr4CvKyYDQn9GP21TLEZVd4otVDIq68lGVotIlAMY+8NzvV97Non6/c3+IuxXUxr935CCkgzEz0AJjBH5QIqip3Zog8HECF8+PKPMZZVniWUj8Bp12TNCWeKHD/GCJrkL83jeakUrcpkugHMwio1hogn4CVuM7PlZYjGOQjo8tNQhOBk8JkEGA8ByMLbBaI6zFFT5ptcAVHnEE8/ERo5sp/SseYahxap+qEITujEpVXN3dobQhfR+cMOTcmS5PHq2xN5tzqGvOFn3G7k2q4zbnvNOcPb3BaFSw9libvcKgVcDUrOK4hq4tcajpRZJ+o0dWW4LEZ3O9jRP7uHPJYmeJsRYR+NiGAVFTF4qyKMhrS8M4OEbjSR/XdaiUxnHBkS6VrjHaEvohoRuwZElVlVSFxnUKAi9ESUMkY6TRWOUxnBxzPDlCmRRLiECd5E6EixA5KlekL4/wmiE4Llk6ZK2bQH7Aem+FfDHEEzWz2SFffvFFtptdsnzKVqvDeD4iq8CLWsSdhKhrqFSbcDZmJWlT2pKqDJGyQkqfrE7Rqjy52Vuwy3dPzP3S3/nqX4mKmQe889wX4m4sLCsAi5CAFHj1ErSPcQzdVog1a7RbHqvrTXB9hGqSTY6o1ZTECYi8iDPnY84+0eJwvyQdatRsSRg06W94mFaIaNY0VvvImYv3xpR6Zrj++R02P9ijc7oLVmGtgBiMUAjPA+WgK3Bqi3QFwvMQFlRZEjgB6TTF5hmv/quMRy+HiMAl7sQs93NcP8aNNIfzHIxkWo758MoFNrcHPPcWrF78af7h9yvefP6ItArZuvQo61cGTO/u8U9+47d4zv8Ap8OCT1x9C60ERT3Elilne2fYanQpZMLFj76HtdNN3nzpLnGvhxc6+MdLhsMpZWkoq5qm8CnmkirTNGTCcn/OLK9xTcTw9ojAdUizHL8BxrqAJC1yEC7NsEVeVCyzlEZjcRLX73rcOnJohAkigML63Nzb4yC9SVVluM4MnzaBdPFFiGCEKa7wledv8n0bWxR1k6J0eebHz7A47pLvNbn86Cb170muzm4xn9/hF37xP+OFP/gMq81Vvq8n+PSzX+H7P/I0TssnKzNkq6C7dorLyzGNqc94bPDigGk2QbkVraRF5HcBcObvng7Vd6uwv914+/eax/4PH3r6ux6a+TMnKgkh/jchxJEQ4pVvWusJIT4hhLh677F7b10IIf5nIcQ1IcRLQoinv52LMNaQVRlZlZNWGWmZooxFixIZaRphSCuKabQikhUP4UCRpqhqjkARJ02CwCVoCPAFy6llOa2pS0XsOBgrKIsSKRy8UCJCh+R0m7gXU908ZvzskJ03RyhrwTNYI0EHqLJEqxIPF11rsAKtLUaBqmqyZY4jHUYvTek3FWErwUtCgkYH6UmqumJZVowWC3RRUhlFu9Ulbge88cZ1vvjcTZ5/vaDbsbRbhqNRxHNfnlGYFuf7MZ7UvPTWVQ4nu4yyMdoOCS2sN2PCwKW/GbBxuUM9CKmdEJmEGANeW+JHDrWpSadL6pmgGFWQO1RLQzavsdpFLRT1dIF0wPVdJAJtK4QQOFKi6pNxx4EXooyhrOeUuSbyNGhzMm8HS1bDLF9SMsMog2M1jhA4NsCXAXleUZY1dyZTxmNDK0yJC4PsOXQun6esSmR4lkZrlYHbxXctv/MH/w9r62dYaomjerSCBXdv3kYbgZaCWmukY+l1+qw1VunGDRq+xDMlDd/BdS2uyHHEty6D/G7Y9gMeAPdvzP1/B/4p8BvftPbLwKestf9ICPHL947/K+AngIv3/j4A/Mq9xz8VaTWxmgEn3aFSWvqRoNntcGG9TaOh0aLF+tNrxAPF8PdAzXfIZ0c4tqYzWKe/3mDtoTUmheHzn7zJVjtgsNJAOwVFLdl4dICVEUcHhwR+RHAmwq7krB3G3LxRcLazZLGfo01I8/w6YnWI32ieePJS4Z5E4BHaJU8LPOPgyYh5usCpHTqXAtzAo7IhS1JUXWJrwfXREcP5krHSVOWC7oVV0qpiZ3aHMIt5NS0ZN3OOp1P+aP86Z5rn+cELF2lUj3MkXbqNs3x/s0XsjMjGq6xv9rl84RTbp1YIrrRZNlzK2ZK11QZYi2xHCCs5fXnA1dcP2Hs+REwz9GAN4VYUWUkxLZC+S373gCSLqMMCKS2qEhhR4ccxnuuilSL0QppxlziY4VFSzUpawqPlFHhVRB25HGYzjuobZPqQhtOgIXwiL6Lr+jy61iNbSCaLu3z28Dlu/+4O//0vPUPj/dsc3ekT9AV3peHqJ69yKHyqKKZvz/Ly1VdYjjVP98/QWmnTbV6kqQP2X1uy+v5V3ONDRFizsXWG1fYGXX+HsprT1DU5FiE9XP8kjxPufksf5i/dtt8u3wthme81r/2d4s8Ud2vts0KIs//O8s8AH7n3/NeBT3PyBfgZ4DfsSdvrl4QQHSHEhrV2/0/7jMDVnO3NgZN8qu/7rHbWaccbrK2EiMaAgoi420GYinI2RBagygJXniRYL145iw4sL315l/FwznZvDeMqnEjhRSGKGtwC4bo4vqRC4YRNUjtl63ROt7+NjSdUjZB8dIN816XzqMGPE4QrqVSF5/sYbQhEiPRqqqVBHNasnutR1iV+0KCYL0FrfD9iUc4Zz1McR1BUc/qeRhEh7BFNT1CrgGluWQlSdmb7eK4DviG1+0xLwyuLV4gdie9ss9Y5y1NbFs84dNdjxr4hWVTYssDVEieMsNIgySmVwA1d+t0mO/tTdm9o6r0lXkPiCx8HwSJP0RMfY2oiT2CURjoBSpcomxLGMb5bYVxJ7TiIsEHkCxwkgWmwGfdAhuwXDrNyQmlLsD6JlDQCQyhdus2QIIgpM40ULuPyFnI2w8gfJK1DRteuUuUDqr2Swhru3n0ZT3pstlxay5B5MeLsxcfIZUm0YjBzSXq8w+Q1iRfGaKYI30VHJa2kxVJYTg1Oo2WJdN2T/gQg8P5kM/9u2PYDHvBO8Z3G3Ne+yagPgLV7z7eAu9903s69tT/1C9A7vcrP/8p/CYAVJ8m83bcmjN/wGVlwy5Csrhk/f8Du64bu/Bin1qSuj3AS3v/w40jT4df/5ae4+tm7XIq3ORyO2XrqDH6viRt6OJ6H1+7iJiVB4JEuQMuaVt+lVD5KLDg6HNMSPcJTCW0S3EJTj3KKBWjl4kuFXVHIyMVRDsU4w2m4pFmNwUGVKdK4SOtQV3NMveD18W3aToMfOXWJupii1F1Wz3dYaTrcmh1glEt/ULM1aTKsQUrBj/3Cx0jvfI7tzzVZDAOW2ZzQMdRmC2sEe68OSZXAbyesX2jieA54BmkV8XqMLw0L1SCIHXord+muagbnmvj9Fsmmj4m6CF1T7u6x/+oB86sBOp+hiUBIRF5RzpcoLE4g6CQOWqyhxILDVKFtxXp8kb2s4sbsc0zKGatOiO/FhMlpysqyGWqEbPLaWBMUOa28pC92+WvnG/grXcrD63SbA9af1tx5TWLKmoOp4nix4MbUY721Adaw89Yxj/31hxmXJf6ZiN3n7jJ68YDWpZgw9kklhOsDtk8JZvubeDKnmBvQFnOvFNLzonfMtr8XeDvx9gde+3ePP3dC1VprhRBve0CNEOJvA38bYL27wY2v5yBgcjxhdDji9ks38LIOrahDmh8zmg4wzYpLqy6iXnJ76XBn0aTV92lEPrNFxStf2WcQWVxp8cOA3mCTUqa4YQSOpSrHGOHhWo+wKzFFzdwCXshSafqnNk/+12yGn9TkeoRxAuJOG1UUsFkS2DWK5RC1EDRaIVI1qFVOUc6ppMRzXLxQIN2QYy2IpGShF7iqwVOPr1E2AvbHLS4OPsBGdJthabnw0DY9bvPhtSsk62sc3bzLmff8DB+6/EE4rOmbOYvhDDX3WIxLFsMp3TCiFBGO9vFjjaoLTK0pDhyElIyHY6bDgic+fIXOYx3ElkuFREuFVhpfRkStM6zFgmvPThjEgsooXCmpqEAZXDdAaEHpSZxYYa3hq68+hxeGdFkwrAUDavp+gIwGaJocp3MwGTLsAx7aVpR2gTIDfurDF/nYx34YmSSEUYjO1zm6XfGFm7e5fmNMJ0u42O+RlzM8ChpuE5ecm8/vsfHEFtLCpSsBrz0/I9J9bGFpnApw3Rjt1kQbJflUE3UihFli9UkFlnS+s/lJfxG2HRK/7c/9XgjNPOAvn+9U3A+/8ZNUCLEBHN1b3wVOfdN52/fW/hjW2l8Dfg3gfPOC/cqvfg0seJ4m8Eqe6GwQ9lKU2eVQJ5y+sGTr0StsPO4z25sy/+w1Jl8a4egL/Pbvv8j+8VW2vZpF5XOtXDA6CjhzLWfzXMDw5pRe1SJqhZS2QkQ5xta4qmB4rcLrlGytrpHVGSIW+ITkuwVuAXUlWAZTvBR2fusu5Ltsf6RH+2KL7CDFkQrrSpyWwM5LqkqwvKOZHN3kM2+8jpI5p8JVRJRx/aDNY2dDXnvhZcaVYbDWZtNvcnRDUrLBF79Y8OjDHteHz/HKr/1rprWDLzLev/4Mp2IPGXi0Wg36/XW62w1m4znl8ZR8T1BU0GyHDIs7BGhOPT7g3A9fIDkXQstFOQLXkQhhcH0wUiI8j+TSJTY/9FnGXwtwWVIoiTUSYwzTYkGhBYlt4NoRAsPlVoBRU7xojYGVXLcBhypjXs0JzYiPntoC67K/CDhWYxaLMR9bu4LX9Xjyff8Fq1unOfijMeMdRTD9Aos7muM3XyJUCyZ5g3QcsenEnFrfoN31abYSRoc1+bTA7bUIz5zjycuK67+/g0Kzuh4iEkspDI2tHv5Wi9BrUZUFRlcAyN8J3jHbboned3Rn+YY3fL+L/F/2vqjvdt6JROo3+E7F/XeAvwX8o3uP//qb1v9zIcRvcZJsmn07MclG4vOhDw0AiyciJAFZWnK4OEQkCR/+icsYa5mnOS/vhfz2/+nR9jZ4Txt6gaasd/FFwdHCMLeCgCMmeZOr1w9wbYQqKyIvQOeKRT2nFQfMpjWynLE8gm7DooqSMDL4jgFb4ocWr53g4xAsl9R5SaImLHMB41PMXi4xfYtEEfsBZa2wtUaMFCIvGB+XCLeko0M2kphW4NPd0qw8GvDUuctMXss4uLYkywVx27LZXSVd3mR8dJNuUONbl8gcMdeWq5ObpNkKjww8WGhK2txKR7ieSxzGLLMZjiqYHkvm9ZTNlT622SVYcSnR+NrBdz2MNlhHgjAYrfA8D2Msq5sbqFtTYrfGdXxU5eO7DovxmDR1qOyEosyQCLzQRaoN9rOazKQM1QhUxfl2h6YfovSAaW45KnbRakETl0ceaXNtz2ft0goikhwfH+DoJfOqx6TQFLbJ1AXpKZqeoNQV7VbEYlKydqGL1zhG2QpbznCcFayF7Usxo50SM1pCnNBaD3AcjXA8XK/CjcGYk0SqdN9WE9NfqG3/eXngxT/gO+XPFHchxL/gJMG0IoTYAf4BJ4b/L4UQ/ylwG/gP7p3++8DHgGtABvzH385FGGVQU0BIVL3ElCPMQx4PPbpFZ1NiPIdqd8qv/i+/xhf3rqFtH133+dmnf4hzawnh5E2oesznNzjWSwLbAnfM9PodDtyLOMJy6+YdrCvYuCBY+E3SnSXt0ICyRCJCjWt0OQdtyFMIfJ+qAoyhShUmN8ioS8dVPPcHX2b19CpnHjlNVlVUXoVXh9Sp4dWdO7x2c59sNOXy1ipudUwSJFx4tEn/qYvYfpu61oizGY982CPpW8xxCUqjezWDZsJzn6nYigpy7yFqOcGPjkmXR9wYX+FcMoJY0k4SvFgSbsPWe1fQtYteTHnu9xVpR3HlvWvUQYnnePhxQGmqe2MULAiBIx3KsoJCY1sF4+aci5ceRkuPInMInQzvOCSZK3y2GI0XWGPAzMkXglcnN0h1jbYuJZbXJlMcR+Lam0gEbTz6cYutdo8Xry94+PR5wl6H5e0xdjJltC/YnV9jZzqh0ZO065BuM2ZrsEHDCZgsC5LI5/jOkrDVw+Qp+WGBLC1+X5xMnjznMr2bM93ZZ+WpHrbp0Nr0MK5C1SDEicdu+JOd5++GbdfrCTv/yTP/v7Xt/+4L385b/y33q8A/8NpPuF839/h2qmU+/i1e+mNDqu9VEvzdt3sRlYLbtxVYIPGQjYRB1kAdhqRzF5VOWBzOqGcLGo7muJjTCAIm4ymTwONyu0sjPyaaSAbaEKgQ3wFjC/aHR0SBS5pWKCuh0aCfcJI0rDSO9BHCxaoCmQqmxZyg00GSUSkHXRnqtNLySmcAACAASURBVMAXHnGzgZPlxL5gsjdGLWvCdkIYuTTNHO0IPvX61ymmFUHdYFmGBLJLO5SIsAOhg600gW1QLypGb5bI8x7ah3xo8aMYPMvKoEfz1pj+SsjK+Q5X1hssjgTTXUma1VRSoKQk9DyiKKYAiATWb7F5LkDQxgbgRC66tuhagQQrQFgX0IBFIKjqGqqQprNOYTOsDSDw0b5H//wak/0F9cTSbkV4AcwXlr3RMcLzsHWJtQVQE3knm4lrBLU2LADfCLLKpetHuG6ErkrKDOqyxugAvIy4KXDUgNWG4uGzA4oSPBkTmzGOFxBsGALPZToVuNogZEZJE6kDhDA0V2vGByWTN4c0tmNSIUi2GziuxglOOlPFt3Dcvxu2/Sex8/f/fGL/gHcH72RIBu6TDtVlseSPrn4SC4RBjOv4UPpckAF1EnCznJCEDj/1xIf4iWLGyzduMjcdbGEJpCVzfO6UlpX2No94XSZqwNL6VFazM9zF93x84VLXkmo/IGvN2KxyxkVKKxlQ5SDiCt9xMWmBlSVGnDQseTIgnXlkeUm77ZPWgkwK1sOIr+/s00ljVqzH9XzJ8wdXuZvvkdSWzB3w+sEWP7q5zelOzqjq4u2B5xmcRU1eZBhjeeMzMxrdBqIQRKHk4K5idXULN7nL0uuR3omQrafodqe857JFVhnXXtmnzDXLvEIkPm7Hwe+G4ISc+8nHOHw+oziQRJdztA5QSuGGLhaLMTWe52ONwfcl+JqXvpKz2K84M+hTUaO1JEuXjCofUwUsphPQOa7j8ZUbOxwuJ9yc75yMZsDBERJVFwgJgQxwOWleMmVOGY7ZWD9F4cyZD5ccvjZhOnOpHc3cKLT0MOYIK8/SunCG1cghmyyJdAsvFHQeXQXf0HyyDcdjPv1/XMeNj2ifsji2S9yG45lDnU+IZhEPVTG71QI/iYjDE1VX1Ttr338W3yz230ro71fv/QF/Mu+0sMN9Iu5SQCtsnBwIgVYVUpa8bpbMD3IyP8HJHVp+m4f7azxzPqBQBS8e5TRCh8AVHE9T+o0eK62Eo5ECR2BqQ64LDBbHsWhzsiVeWVms0ScxaKMx2gIOAgFW4vnqZOQAPjqtoYA4iChNyTBdcDhfkgw8htmEYV6iOz1UWeC7mkCU4BjC2kP5JbH0iBPBqNRQO6jSRRvLzesHdFoNrA44PhzSjlp0I4kCQtkhcgMO0kNsFXPn2g2K1QGNEDY7faJ2ijAGZSxlqrEBxKs+VhS4nQHtc8dM3iqIL4c40qesUlzrnDSIOQJjak52RtGEYUjkOiyUYjJdYhyF1QV64bMcTVB1DSZHippKGdpyThWlzCrFvKqprQArkE6IRaCtT+T4NKVL04NW7OGFgrKqyUcaWSoq5VAwQvouVVHhCsuizlC5REQSP4gI2xbcgKJYkoR9rGdQvS69zRAvMDz0gU0sGulHXAwqhtc9hlnKSO1g9mBYlNT3SiGrPHunTPsBD3jHuC/EvZ2EfOyDFwEQxiK0ZTQZ8freVRbVHgdTiRAdekGflU6Ly84pmmsx/+GTfcbZnDeu3+CGGrGziIgtDLMcHdQIPLIipdYKL2qgtYMoNXUIQtqTyY9GYbXGagdra3TukR8IVCWp8gWWjG6/xcHhjKM7Q8aFw93MJ04L9ufXCf2Ayq4QCUkjSHhftMXBfEaaHTLwUoyzges16MRzbLRK4hTMPEF6ZBjv7xMHMYIGeuWIzV6HM+9Z4+7n5/TbbfaPc3JZUE6vs7PcI99bIz3VZb07QKmKUFiW5RSxbGJqjdcHv+fTdQVv/t4Q/VyTjScbuF6I1QqlNX540mtr7t3cTF3S3XLxpKS/3iLNM1Q2p3lK02mDzhpMh4Iqn2D9nJdGS5bSpRU8RLPRYtNajCxI6zlYg0ZjLfS9mEFnwNntcxhVsXn6NMs9n0gsydQhQgTIOqDhtxH1Lc50XKrxEfNFzOHhknMfOoVMXKKuR14UeIGLTCQXf+o92HDG2nu3sV4D3JryqGbzfT6nDBilMGWFcMS/TaQ2PpW8g9b99viGF/8nefD3k/f+IN5+wv0ab4f7RNzdRsD6Ry8BYGuNrTX9uy2aiUOCxyw9pNSQqgVqdEAebzNdLFjdNEhH8NL4iFm1ZMM3LI1DiaKqUgKvgdb1iUcZhBhjUcqitQEp70WdAQxWC6SwUEvKtKZSGVWeE8Y+t25OuXlrysHiiI3VgNLdobfyNMEtyXGRoXwFsoGLw6VmQguXG3lFEoaIMERpQRy4OOECx42xJsPKiiqtKfIZvSimHivCRoT0Ic/nhIGPS4DSFUZXRJ5HUcyZzBMipyKOY7SxdDrrZHqJrWs8L0S6PlnoI80x2bHEIjGyheOAg4PWGtdxsEagSkMgHKTj0V1v4KxBw8Y4pUcUBSyPNLODOU6zYH9syBaaxBeMygP2C5dYNBFBg6bn0w066LpCWE0UuHTikDOnBqyvNimykmAA8705s9kcKUdI+TTav0E1n3O2u4n22swyQ6/jIoVB5Qon8KgLie8obCmpTInTCAjaA0SSoISHFBn+KmT7JdJ38RwPEbkIAfreZh0n0+ge8BfBA1F/93BfiLt1JHVy0uxhjcEagy4z1vwtfL/m+YN9ZnlNExfH8bhRHaGWDtnUYzhN2ZscU1Q5ylZkjRgwKFUhhIPS5cn2fbrCGhdT19SVC544GYkrBFqddMVaU2MqD7Ia7Somsxo3t4znh0xmxyjT5OJmk8PjFS6cafPs15ustRd4ZYZnNUGjQTMZ0Ek8RF4TNArcQDMu22xGPp6fsyxcwFLoCqtTjBGk+T5CeIjCw0iJ0jlRIKm1YlmWeALQBuO7zLNDOmGE50oMAp0WGBkitEArDyUctGMQMkeUAU7dRLsKbQM8z2JshTYGYQSmAun5WAVR0qR2LVES4fseQizIDgOiFYsl4v1WkacOyzcqlGpwt6pY6imD0KMyhkxJGl5E23VoRDHtlo+LRlVzwk4DPAV1yWweYp0OpbLoMsGTE5SOubN/zMqiTTatQdUsDpfIvKLl9knaLosspTAF7VUHP/HRWoBMkTJAiZLGIGZxvMALJVqdDHez9/bosO/CvTp2/v4z91Wi9YGo/3HuZ68d7hNxd3xBsinBgpASrCVtbOJWsHV5iw9dv8sXru2RmyX7WcaXj95g4J/HtzXtxoj10GF3ZsndgtBrE+QW6cCinGOsQRjFsgShQCiXWjUovSamrvCSGJOVlFmJNgX5sqSeL7k9nbAzyYmcEFUPOc6OqOuUnZ0e7+uvMn19jw+diXHkRfr9HpEX4VifPBtjOc8z75+RxCGz+YLUlNgAdm/BrTsl5y8vOdhb4MglTddDCcGsguu3a8KhptXto8uUwoxwUVC2CTsVm62Y2uYUlcQvO8SJh/RqGm3L3o2cTgVyvYWbRJz/0Yu8+ekDzF2XcBUy56QaydYlN1/IiFuKtYf62BSEq3HjGK8boooKXZeovEVuhnTjBoPtLv/sszd5Ze8agVyy1Wxyrk6IKYiSJm0/5FKngfQS+iunSbwQIwuq0CUvNUnY4PnPv0lHCaLAIR37zMuXsEh8P+DFw5s4wuc4W+K5AkfAe5ShPUjwlSWPLGVTs/XwKY6XE+JoBSsVeBajFMIG6ECSbPXQVY6jBVYpsPdKIL/DDtV3mndC4P+yRXzxA8N3zQiC71S874dkKtwn4i5xiNzByYGpsDYjaBqMqjFjjXAt2nWorIOSAT1ZY4Tk2u6Cx89v88TGHV463KO0FYn0wPNxrMZSUQmJsgJtKoS1eHpCvigouhG+tSAMQoOpa/LKoy5SFlnG0XTGMqsoWVLJGVldc7oV4WSK1UdjktUOFxuXiVo5i6JmOZmiRhFhu4tlj36yShi0mNuCXtBg77pmOplD2WR2GHBnltMONI12k1ILFlnBZJ7TMZZC5Ky1Q9aDkKkp6TfbbG6s0U1qxjMfx5aU5RLHbeIDda1o9jJmRzUbehUpYwwzHC/k4Osl/qmc3iMNqlxS5YpWlcBhhn/Rok0KpSLLK+QsxHNqZB0jNbRWFE7W5trLQ27sfxbXrfHEJvulw8+dO4u2CQetPmVRk4cNVhODtDlzpdB4pHnKzICZHmIyxWBrhdQVsH8NlUdEyYLlwoCuCUKHMFBYbQgQbK3GCCtQ05qi0ISyQRS06EXgRS7KKETtIYWLERXWKbHWOZnB74D3/7H3ZrGWZedh3rfWnvc+83DnGm7dquqq6oE9skl2N0mTlERSkg1KsKRIcGQ7gfwQBwGSvCQIkCBO8mQnCBLAhhM4dhJRiiPLicTIoSaSYpMiu5s9VtfUNd2685mnPe+9Vh6qKbQUSmKTTXax1R9wgXP22Xfv+/CfD//997/Wb5p3+/J5M2H4EeWHJfgfZmZ+rwv++8nI7xWxwz0i92QS88ZvvgLw5hdSI4q7ZYMoiukEW1xo53zl8Bp/vOhyrLqGzKaIasJgz6V7+ml+5WF4tldns7qO6ZVMwpBJ4mIRoQ2LDAO0wrU0tspRRsFBKhjt7nD/sQ2mgynjyRhHKnqDPoUKScoJjlXFyeZ0q10+9zOP4q9sEByvs/3qhHg4ZL1ao26EqKjKld6cl65e4zDLyZyUhlNhnsXUjTmnKx6+kRPURnzxD+6wU75CPruPWhCQqB2GUUz06ginssQwUzx8bIN1/yQn2iZDq8qdyYKZsug2HGahYrlRo3ZWsrbVQU2rDK8PyRly7bdvUFtqMJjcJropqDtDrJsm5nwNbzXAq0LRPiQrLIa3Ghxc3acdWPiBJo4nZHiYzRKrWhK+7HPxm7sYxoJnNp6m3lmif3iVerVFKurszS+zc/glxnnKb8xyUAXL9eOc6p7mQtOjSAeYwuH06bO07nd49VWY7h5ystnhvnqbZ2+9TulB06rh64KnTp3AwiQtBGlYYkhwozFlTzLtDcge62IvW4zHPTq1DklZooVGCJM8y5FCIYVAWhIQd/fl58/vc/9R4a2Cf6e3JXi/3PKnuddLLW+He0LuQguMN4cYCwyUgmyUk4YJSRQxHWqW3A1OGLeYiZjhtED7Nkv5CsI74OvXfT6y9VF+fC2jMoVQrWPYI9zZFEyTUBXkwgIlMQyDWq1NLxkwmMwwrSqWkOyMEmxb0/AsRmXIaBFSpcHp1hK1oMGZD68il5dISbj9f05xlUHr9HFKX+KutWjdt+Dp3SEdp+DO7g6/t3ebw2gPkw6zQNNofhKPPlmmCIw+TWOZqq+o+S5nV7uMRoA2uLazy8nlGjWR0a34XJ+NeLl/DSENVmarzKqaojBYPd2htrxCuZKQ12ZYA0k1q3Lt1dscXethZhkUJUPLp+ZW6L28wN6e3x1xN9dMRwui7Dnq9QUrpx7AXTXxyi4JEzBzhO2w99qQ8w+aDAYV7HqbLISHTp8nS1IwJ5jOcRpel53JIZbYJSpSFuFNbmcHtMRTbK0c5/7zy9QvnME/vuDyV/6Qs50lpHd3Rm27XScKE+5rmnTabXyrzTSO0VlOtWIgbQHKxkCjagHKKEnnBYbjo7WBbUKhFIYES9oIrdFvlmKUVrw5KBH956xQ/VHmneiceV/sf5rvV+z3UtYO94jcy7RkfmPK3W5ziVawmM1JsgilqjQshyiSfGz1DDMZ8/LBLaZ5weXJbY7SFk3zFb7y8io/+9nHMTsJK5FFs9sliQS7t4eoNGE+npKWmjvZjKPpDvuLOVXPQ0d7fGvcY8EuVTZwKxU2GgWukfHxsx/igQ9X+ea1hOH8DEveguntEqlypmmfX/+Xv02UayrmFqs1l+WKRHm7RH7Gj33wGTqtDlXrCtGdklAp3MDizOojnHHr/JzTxhA+SVSyvLnKjJzX7oxYNXM6BwteeGkHw2tyvrHM129fQpsOI31AvtjDLM/Qe3WfdDdn669foHYsx93K2JlMqAiLOJ6zsrlFrZYSDwsW8xlHwz1OLG3Rvn8F92yBaTZRcUhxveSrv/4ypxebZMaESrPBzss3SQYer4wv89qghlAZj20co7asOfuzF3D9BG2tsLN9g2vPTmnLJ/k7jzdp+Av+1T/+Igf9grXlBrZnMYsMWiW8/Gu7yFgTi4hxXGWYJdzuZXRtB7OsMJ0ajIs7KF1iGDnSDJBllUxZ2K6FmGdc/tXrfOCXzxNlC6JRBWGUuNWCUgiEvjv0VmsJSAQCrQ0Q3F2a+yPOdyrPvFXOb0f094LU77XSzHtN7HCPyN0wBfXlOkJAkaeUeYprN8iTKpPxEYsyQIkSoaBiOXiWT5hpcpURphPmuc1KNYK84GB7TKvRJu/B6jGDRuQzn1s4ysTKMjZyQZkOaHg1bNvidKXO7fkONbVERaQcq3VYO9GkmS+47+Mn2Ss0AVPWT/QYXDWoNGLGgz4yNmhSgPbRumCSFliBxRn1EHFZouMxxeyQsThNv5xzwla0anWUHVHzPoDpleSzEiuZ0vvWgJ3xNl++9AaWtcpnH3Zxi5ib05w4r1CVFrNsRqxhIgOENJgc9FhPEha/aXDhMxbF0EQfOmhDoW0T4WqioEF3eYZ3e535gaQoJGlU4KkqwhTEmcatZKSTjO2Lu9SWuxzcGlKRBlNzSJbMkQX4pGjnBJ2tNgQ2WZoxeWOXgxcPufnKdQI/oOyfwQwsPvPv/gTl/hFf+Z0BpiPIZwdc+f2QfJxRWE1emyRcHn6NoijYDAK2GstMwgKZCrotB9uVmKbGKA1SrZlGIa6yUTonNTXxIMPZ8NHEaG2QRSaGIRCmolQlwhRoIdCaP2l05T2Yuf9ZvttM/l4Q+73Ge1HscI/IHUdSnLj7BTTxcUQdM7RIRjOMdo2KCjk6jDgaVKmYBmveiFiN0LkgqJS83t8hUhnX9hd8/OEurz8/oG/7DArBg09ssFlPmN0JKaIY16rQH7b56nM7OLbJ1voKxr7H1toSlcBinrjEZsKnPvYh/vh39vErHWSnyhd+t0d32eKVP36DJ09ucHHRY+quIGywAhe3ZmA3Ldr3+3QbDvWkQRpreqMp2zs3yE+f5ShSRNMhm2uC8OYIAxsnMPn1l64zSsYcLQaE2Tafq/w9zPZFrt8qWUzqPFyrcrNs8KWjF7G0ScceoRKbed9i0rvEtYsr+NVVHL2gZrZo1jRu26M0ZuztDMgnFuGsJLkaYggXp1rB6YCDTxRmWColHuZksxjhOLyQp+yOdolDxZK/x9kLj/LkT9+PrkP/+Ssk4yXi+Ap1u8JTD53BD2LyYsJ0oTl6uY0rWpzealBrSEx5xPUXFBcPr3NrcUCsMkwtaDlwofYwgRyzfqaJ5VhU2y3iaUiRCMKwZJ6OWFqrUmaKwikIdMzNF/bYFMcJtkyyHGRNgFGirbt9/GjQJUghEH9SbP/Rz9zhL3+4+k6J+8avPvKn3m/90kvvyHX/LPda9v69cK+KHe4RuQtDUK3VAFC6pFQluApDegiRYxsBy9YSYXRAlhtsBB3KrKRvGnhC0ZY9kjxGLQqmc4Pzj3nEh4K9YczhrQrVpTqdM5IsSbANk1NnI6JZnVRptj4W0LmesnZynTvDgt//f3b4zM+c5gvPJaxqSbsreP7qiGyc8tpwF0sX7PZTbo0jdJJhaYGeHDHre+ybTY5GgtZSxv3nAmqOwhNzHjm7TjwX1P2C5aUlgnqB41i4RoPRsMd8+hoH0zFpKch0SmBpGm6F4WKHjU6TRqdF4+B18iLC0Q5r7RPYpUWz0eagPyWRfQbDNzhbX+eBYy6lsJkPxrQ3TRrHN0m9BcOjbcrEYDZI6M4LElliWII8BNMwmKcJhYbxPOFWNiNMUrJoztnmCcyaQWRnZKOC6a2AMD5gbeMkpjPn9lWII5v1Y4JGzWB/oMjqmo3jNVSU4/kb7I4vMormZEmK6ztIrWj4FpXahFrbp76xAragSHK0WVJIjd+1cM0KtU6DdFZSKogyTb1ionOQbortOqgiAxsMVaFUBUootCowTeNPOiH/KmTu7xR/VuxvPfaDkvz7/GC4J+RuWhbt9VUAiiKnLAqyOCRYU9Rjl9HrKUrCuYcdFuMFt27AsWUf3dtnXmnyiyc2eWX3MovFhHBYxz93HCn22NpqsDfYZnjdYHS7hunadE4aOLbJ2qlNTMND+hm1J07yxX/zAs8+d4WbvYSv/6NDzruC//Tfvw93dcZv/+FtTi516L+xz4lmiyu7t9BMWLKHGFJhmy00krLYJrx5kWjb5ugFScXwWKus4VQ2uJP08R2f83FObVhDOhGJjDGVwd/90JNc7+3yzZu3OL9xP4eXr6CSE/zHP7lKxXO5di3E8Uyeaq5SdZvUvQa3B32ePxwhVIwqCzy5Qi8JeXFnxLLv8+GKz+HMohAxgefSbVaYTGL6d7ZxqhqjZtFaqVEsckAyTSJMR3O7t0+/DCnykrpvsLVW4eM/9zTUPK5/6VsMdhXLK4rOsS7zoYvPLlmasndUwzAcahUT39E46y5R6HH14gEHsyGOZRGLuxOjTvodXCGpnVrCa7fwlx1mkxQ9d8lFQm3dwV93kfUKltlgupdRzkzIIkajgvnlESt+FaumyecRTqWCt1wibY1hafKypNAa+e2Vqfq9I/cfVGvkd5L6d3PO9yv8dzt7f6+0PX4n7gm5oyGcJKDBqVg4gU+WZwg0dkvj1QvmQ4Pchmq3jXV7hFvrUJkPmeYLpvZxPvroI7x+bUoUjZj0KpxYcnjl2Zf4g8M646RgpV1DWDZr/Q7Nls3DqxbTJOWl/6vHZJgT9efMDnsoNaWQA842T1EstdG1AEdC4AsqpiIwDDK1oCJCfEMgpE+IT5QWRJkk0SZIk7qpKe2CdfoY5W0u7yZMY9hvnuBMt0IgLbIyw7FanNnwCcoWtbqL63bAAOYxfuUkZjXDd3I2m6c5u9JCOi5fvfgKozKhUDktWcGzTMZGSZokzOUepl5CiS6OFvSGGcov8QwT2/bJkpx8lmPbDvkspUxBeopiVIDKyFkgdIJQMb7RYOvsGqohkLkBcUachfj+EgtTM8fHawWYiwzDUGRWjN/sUKl6FGrK0aTH169c5E68T6FzKA3qts3J1SVObKxQW+ti2BZZUkAmKchxaiaVlkUicirVAMdykcOCdF5geSUqm5HHKWlPY5Ue7SWHyWzGaDfGqXl4VRukgUZRUNwNrx/BFap/EffS6tV3ooTzbgv+vco9Ifc8LhlemaK1xnAdDFvQOd9AyYJSKOoPSJw9g+G1klhDd3OZyeEA26/QXIz5wuUXuLD6EOcaBmXpsfPKIfnWEg9+5sO0r/whL756jS+9tMMktzhUFko7/PyDP0+vvMQ5b4VPPeZx5/kBLQSvDKf8jUfPEpw8TSQ26D9/wH3Ha1j+mKWqT8UywIxoyBzHaBIWHofTIVqGGMacNpKOucKxtRN4QUC12kZqk196tMfNO6/yrf2bpJHLg+3O3X1Q1Jgrrxr4FYmXDbnZE9x3pkWZ+1y7NKSzfJyzH4Jxz+flr18GYdC0Sx5UBlPLoypb7CyOsMuUmfTI4gWZLtgetRmOY06uLrNzNEJZJTorKcKC7ABWsoxs6lLmKXsTxayU2GWKMEsCHWI4Bg+crHP8p86TeSaqNyGdxNTqAqcp+N0vvcF0otlYliwvrbO10aHiKww1oyhC+sOcnZt9DuZ91oMxp7tnCWrHWV5aRdZiaqstCkNAqkBbmIaN0ZliNypoP6C055itKkUmKVQBUiGEgS1zhJDoRYVZWHJ0Lcarlji1NvtXEuzagJMXKghTII278SXeQ5n7t7mXBP9WvlfZfzuD/mFK/r3U0/6duCfkrsqSZDIFAUZkYQc+s4MYb9m+u/d4JcHq+Di3E7K8JKhV0Upx7fCITAuKcI/nDwTdE1tYYQSBYtwbcScIWD71BM9070Naz3N9d0BznJCplEk0Y8MXSLqoeIm8FtENlvlbj59k7fjDXDtqspjEBIVgbb3KcNKjYjtYpokpCxxDIvBYZArXibBERFVUeHTlMerNJpVOFekXeGsByiho6Ps5vrVK5ctv4NolUVQgDAudl9zKKpwoIqI8RcUR1fomXnPOC5cy1KygMVdI7XDu5AYqzRmby8TFmLTM2I1nlIZJGscUOqdbaaLJuXEY0R9PMT2XjYrJ7eGMw8mcaab4gGUiywxtGnS9NqNoSKgiityF0qJqBWgF5+/bQDUdyhR6t2OKHFr+JtpIuHXjiEE/JM/apHkVv1XQsAWdehVLFnA0IIozolHOZveTFHbArUGEaUzZXOkgsVFliu/b5Ilkkcxpd2yUZZAqid+q3R0akpSUSmEZAVka4js2RWmQhyOULrEwSIdw87UDcCJOne/AtE4hTco37a6LeyLM/0ryduv172fx7xz3RNQbhqB7xgMBybAkmS+Y70FpVAmWHKTrYFZd3EaMtSjJREalboBKmCQZS8JiJ9zl1b0N0saURlLgYZLeStHKpiJzLqyfoiE30LPrTEVMqwqPbz3M4a0p+Rjmk4Cj+ZD2+WfwLixTtQT9g+uc2OyQj3yG44yq5WObJrZUuKZNUjhE5QBfKjrWKsfam6xsXKAUHizv4HUbeKurVBs1DvfusPnEBVbfSOnvTRHOhN5sgrICbkyv8HIocVTBurtPrs7TatWQ4g6D2XVWpqcx1YzV46vovCSozEkKQZEbTHcd2oYGJXCyBSofk6uAy9nLhKnCOqjT3mwSWCVpNiXSmu2JQ1Uu4ToQFXPCLCRSJWmUEDg1fFOSKTj/wQfANhALxc7lhEoBdtVHK8Wof8BocYNsd5MwE1SbHmlWpep5CCOm0e3i+7dxArg52qUyCckLh7q5RHvs0Wo2sHKF6ZskYYkSOdJx0IZFUggqjol0HYrhAikFtlVjNl5gOgWWIxBygVXWmc32mI0Mohmsn6xgRgk3LvYROkPoDBBk8T0+reN75F7N3r8TN371kXtK8O9E1v5fnnr0nq673xNyNysGwYNVAIIcRCrY/vIdZlenTEY1guWAoKHRSWiCXwAAIABJREFUqzb2tMQ8KlkUFqdPbhJfv8aw8HDSklnyR7zWP8/Tx1eYD8bE+3Bw5OD6BvVqE3fJ4lf+nUdRkcGrLw24fmnKp/+DcxhZQH1yyMH1Oq0zJap1QGY8y5X9MxRUWR8pXL1O1XWZiyGGtIhVg91sh1JEPLbySdY2fNKiSu0Bk2AN7I1Hsdsl2pYYhY+va0xtj9OfapH8ZkT39Ef51OkZg+0exbNv8I39G/TKKqZ4iIsvTBBtzSuTkEU0ZDQzQJmcPlOnLg0qboPDccrOeIfXBvuU0qRr+3zm6SfpXbvDS8NtdKaYlIdsz55ndLnOIyfPYVgFMpnw3NEeo+w0J606qda4MmdWZETKwCs91oJ1Vpox/tPniY7gG1+8yKVvjHl8y0fV9vDUccpwn5RrXL99SG/6HK73OBvj43jxBQLHJerNcKYNTnhLvDC+Tq6q2A7oQtO/FWOqGZ0PuCRxSZrmtNeriFqNLEwIAolRr+C0fPIbIZ5n0987wq6YGAFIR2MkHkl2yHBb4Nk2p9dDijzk6OqUSW+KVC5S2QDk8/jdDO97nu/mYeo7dZ+3W6aBH26p5r3EPSF3BBiuCQJKWVBQUFlaZnLYw8kt3NKgmBSoQqENgZY5juNT9SzWApfb8yOybMGjZx5ChGMc4yQr6x5+1cZra5zAoNZuIF0frxGAKGjsh9QCiQht8pWczplHCM4M0EGXoF6jt3+JV16Yc0Ve5pfPtanqFUJrQT8qGKSS3NAUcoFXVKk3XaZRRnNd0Nyqo4JlRL0PMkDEitKaUllq4bowjST9VKH398iNNWpLLZ68cJXeZMrXFxnjeIfbsU1tYZDMjjAjm4k1wwti4nGEXzFIS4EtNa2KRW1SoReNWLp/k6f/5sPcedaifn2Jq1deomauEOsaLbNO0zM5LI27IwWNjN58RLWyYJSmnKpuYBR9Ur1AFBEV2eLJz3wAFSVcem7A9RcUw/EeyewUhkwx7YS6aRPmK7TsOWYSY47nuG6EkSaAptMGx3SYJxUa+xaVKnTaK8zjnPUWRJMpUc/Gsh2kmaItk0LfXdCW6Zx6xUaVMbkoETrDCUykJbHrLo5tEiVTdOqz1DTxggKsBuWiIJvn2NRQRoHpqbvbEbw32ty/I99v9v7DEvtb7/d2H7q+G/X49wL3htwNA1m9m7lrH0wklXMzWuvrDF4pSNUcvyoZ96dkYUk005RhiDWxaQdnub9T46H2hN+6+nWs/BSrFyCegqU9itgh90qSOxN0cYQpJfMw5vdevE2cTvg3l6+zSGv8Z//Nz5P4Lkd3ppyYSdZnMzrzG/ziZz9Lun3ETpwzihP+6I3rRGpMw5XUjQar9SbKhQsfbuF3lrGXV8iUiei1yZKY6VhT8WqE4YB4MKH3XA8jzsiKAamWHG4XWOUWT53dZGn7axj6fvZnd9hPanzs7DEqFRPPlmyeWievrzDfz6Gf06gkbNortLzrjBbH0UmDl79wi0c/9xAnP+XyE/LjZP19rv1On9kgZxTdZKNhM1cZk7DEMseERYtMF4yzmK3lY5jFARfOnOWRT/8E/UXEl/+rq4x7h9jpTX7swjrLqwZhVoO9A4SjOSuO8bc/7HD89CmM9ocYDTNeuXRIlg+QZk67EnB87QN8IvS5enSDG3uXULrBa19+iaa7xDOzR2l1qtTPNEi1RsdT/E4Vf0li2JoyzpCpyXzUwbb38Gs1tA2GtkjHc2zt4a6USBmQjHPKeYFKS9yqjV3xsTwbNJiW8e7G9w+IHyWp/9l7f69dNW/le5H9e/0h6lu5N+QuNIIc0AhpUKoSVUtIVYzVVahDzWJsIKSmmCqivUOiUtKuVWiJiN+8fURqVvlbD32cmzsHmGSQO5iJJDBNBArtmyigzAvKsUQXuxzFIVF8i6rV5Rv/osn/e+kSc7HEzz10kq55jv/wb68TLnx6qWAwNsmtGRV/waZbZdlrsz89YtltUF+RFMUaqaoQ3s4Q5QwVxogkYTAIuTG6jp5NMDNBEYbUqxVWNj3yMkGIGhWrixcukPEjvN7fB53QT3MunHoC4S2wKmvIZsC//lKfqzeu8WMnz9AxK+RFRNWyqB6vYgrB0rk2g1lCpZviyIByeZkLf7NGehDSe7XJ4GBIjQBb9Qh0ihEo1ExhyYiK5fHwhz7NzId/8RvP88Byg6qcoGsZ59xNzKqLXbNZPSGZjKp84vEK0cRlbeuXMHzFrcvXONg/orc/J0tLpICJITlyPR68z2cWe8RJyLwI0ZaNFqClxFaKIk8wioDl0w2UJ7ACiyIEo+JRlorb169w/myDRZhiFgKdRnjCQImSvACpYXo4wJQWnulgugGmYSO/vSskP7pb/r7TvJtSf6d5uxn9D0Ls93Ld/Z6Qu9aCNBeAACHQWuLaPmVqkqcpRjvCKSWOCgiCnI7TQc8jDvZm7OVzPrhms71zi//7dQ8TB69SoWy1Wf6QRb3pY6gm5aQkmc4oxR7Ht0qejB6gc7DLEyvnOOZW+ScX/2fycouTQZsnP3CMF67u87/962vsHvw+u5nBo8stzm9U2cxO8sxHHkOnM86MTpBmmuHthOHh69iOJM8N8kVBs1JjdQlaQUJjraSy3iVSC9LtGlqY2JUOQs1QMqKoKPympKNqbOQx/cUe9xkbfO3FERc2Vtn8kMV//8+/xFdvfgXbr1JPQp5Za3PiZJVbxYP0wpKt+3Nu3RjyxLl14jciLj03YHuxT6+s0V1a45OfO8NG9T4e5iP8bJQyvXnE6HKCt+xTe6JDMlBYlTZb1UuYwxMIOeXcY0+hvR2ObtXI6NGPFgyv1Jgd9fno536SZNpGOhNGOwWGsaC9LLD8kDKfYqgMkTmQVLi132W9s8kk8hmP7xDGCZk14fmrr9EJAp5ZegKnNSfMA2rtgMUwxPbAs3Je/+J1ltZ9/HOrVNwCI7PJZgULdUARpRBaTIZDHDvAlDZxmhDPY4y0QBryzXmx751G9+81W7/XpP69Zu/fiXc7G79XBX9PyB0NUrgAlHmOLgt0oSkSQaYKmssBZIqol6Eik0IZVB0bacaYhcXzR1NMx2PdEEgNqiwQ7pjAP4FlmWRxgrvk0dhc58Zlh939kjvzV5BuC7On6PuSn/nYL1OEQ+Z5zO/+wet85cZzlGabbr3K06Lgya1TRNWA840xlW6FIi+xXJsiXTDYi5AllDMLOwdTlWx9RKBch1I0cTsCZibmNKR3fYhZdjFUH1UaVD2LxPVwbY9WnrGYxsg7gtKzGYYjVGEyG3nEHGCZ4CQRzzxqsbJqsydX+PxXv8lU7vPx4cc41szoH8Q02y1S73VWHcXVb32FF19UvHbt45y9/zgPfbDKmXN13HMWFZnRecxHiRSv3aasFCzGZ3n1+q/xkSce4ujgNtN0zOVr27huzkE/p1UrqVenhP2TVE9BbBik04TDm3UW0ypx5OC7MfedquBKQZ6HNGSL2Z6iom9yLki5qU0KqalWG/gVl3RgkaqSk5tN4uEcKTU6T9D7DQwvYu1sA9FS5FaJXUjycMFkvsA3msymfcq0wJYmSRoTJymmG1BmkvLNvWW0eg8X3b8L7jWxf5t3UvDv8//n3pC7KFGM77407g5psIce/eEYv1PBqEtUUeDGilwX6LnLMMkplCJOZqhIM1ElkeFRky5LdsSm+TByVDIjwXENjvpvMLs8JIpsOt0OP3/2UZQCb2bwwitf4uqrTcJin8Us4GjxLTaXbDaXaljKYav1FLN0nzwt6W58AGnaaGz8VYNoDsfcFpPBjDjKufALLYxqi4gUWwf4pxw0HvmRwnIUTsOm27QYjQUlGclUUDUTMreK0Vxi9T6XzZuXGJVDnh8vsPY9PlGd88kNh4dq67StdZbXTtHPNV9/7reQ2XWWTQFpneffKDicm3z6s0uc//QmZgobZzaYTY44uPIsNy/2+d+/EZPq86A6NFMHlY+wzDpHasL2YAe/pnmgbnPl0nGKcMg0OaK0C24lMDm6xd5gmZOr67z0R9tUvt5gVss5Kmf87rNfo0gjHjwmOVF7Cs/yqLcFCIvtq3Mu3RxybuM+2uaHMe58i53IpzceMuyn6MyiWXPRMsBtaurHHLKJ4uWvvcCTP3UMeW6FwtJYoqSclWirQtVbZjIcoaMSV3oki5S8UIBBEsYIQyGMN/dzV+W7GNzvHD8qbY9/FbkXs/e/tBgphPhnQoieEOLiW479F0KIPSHEy2/+fPYtn/0nQojrQoirQoif+O7+CIGjDRxt4AobI7a4ebHPYiDRiaRMNHkmKJXCNiVpPKcoS+ZJxDDLyfOINEuRakbd0jx4/jgXPlyQ16bUViSBVyG+ZRPu1JCqTqW5StS0kRs+3K85/5FnWMynJJHgaDRitb3EqROnOf/Aac4+8TDHPmpw9vEmFavBdNQjzwqKvCQOU8rcYRjGHAxDNh5qIpprxK5LnlTxGl3Gw4LFLCfXBVGcY9c1sqpx3RJRSspUIbSJTnMKrdG2y3q3im0IonxKb1FSxHO2Om0e2jxNt9Whc7KLaSQ4csHJZpf1SpsiLwnjjP7RiG99fY/bFzOOxorOheMsnX+QpSf+Oh944qd55sJPM5rc5vLOi/ze7peZqiMePwMdZpjmADs55P7VVeIiY3veozBCOoFPk5Kq7+DX5owmB9zcGxCHA1QoSScDkniG1kOaXkKFKrPDJtJaovv4CU6eaaDllNvTMUJmCEsxjQ5wrZDVeg2zzJC5yfDOVfav7jB4ecDBH+/jpHN0uw2FQJYlMnfQWGgK5vMps8EEAxuVQVEUKF0ihLp7D5kjpUZKDeI7r1D9YcT2u829mrW/zw+e7yZz/+fA/wj8r3/m+H+ntf6Hbz0ghLgA/AJwP7AG/L4Q4qzW+i9MnaJBzuVfOwAB2RzyVFGxfWxXk8UxbuaQJwkIk6xMmM1CesMFmQ65Pk/ITIFpGZwIVnls9T7WnzlOuSY4HjRQL1VI5imO1WClswZrCkvn+HaNMtK8/JVdbt4+YiAiTlQCHtpYZf3CBU48VcNeq7BIxxjmMnGyYOP4nJoIuP1Cj3mkKHOBKQTZfMojn+3ibDSJSojDkkbdRCdTRtcTamuK9lpA4kn8lVWcmiIMxwRakOaSojCwc43wPfJEc/r8GkfzAxjssDMdE5ZLrDfOkC406xd8qidqdOeax+4/zbXbKWEk8bTkdEOzVDmkIzV2r8F0X8PuGrbtsOq2YCXgzEbGY6cV22/c4FrP4InVj9KfH3Bm6TgPrH2QM6d9HOkSlQfkMkPlXW4eDvjIfR8gXjrD6zeu8uo8YyxukMddPvHJJWp5k+1mTtUNWHc+y3SmCI8OsNunsR5ssvxkydblBr/97ISz3WUWSciKM0Lap6kGNo5IsMyUIHTo9yZk+2Mu3LfO/q6LYbsoI6WIbYwS4r0eo6szejcG+IaPTiFOFggjw7Ys0iIFQwASKb7dJfPnlmV+4LH9TvFezdrfL8384PhL5a61/iMhxMnv8np/A/h1rXUK3BJCXAc+CPzxX3iPAkY3EgBc18c0JKUckcYebtEmDUtQgmKeMBlPiaMUrWCealpizh1ASoO1zjpr6wbGaoDsxGQLG7NaUOQ5flOCk1CmGitokk4ltw+mvHTzOt+6cws/WEM0ztBc9+ludbDXXGLbpNY8SUlBO20wwcfAIDg2oRIbzHfG9HYyTjxRxT7uIqoOxkLg2Aopc8o5bKwtM9K3yBML07UQpgWmwKqaCCUoFyVZnuGWNqZhoNFUGwFVy8cRBVG+YJEpktymVAq3GSAc8CqC5c4y2/tjoqnCdku6rWXOnfTw6w3sSoBVJBiBJi9jsjTBtExM26Oy8WEePPkox15/gzLTHGCyQhPPG7BY2BzIDiutGse66wzCIVnU4Gg6pNuG1RXJvh4zD01m3ozlriQoT3Du+BqOOUFFPsP5AK01R3dCeMOm+ZEGy+vrxNkhmTDxzRrCLEgcSVE4NFsmYZJg2iZ5GuPYNsP+jE7DJJ2VSLskjaYYqcX4csL4MMGVDqY2SKI5RZnjewZFkb85qENgGBJp/MUtkD+M2H43eT9r/6vN91Nz//tCiH8beAH4j7TWY2Ad+MZbztl989hfjC5w9RwQqHSK4VoQu5Q6YprlWMMAVeZE4xEqjCmyjMFsxLVFQs0JsNMBOjY4v1bHqh9HGAF7F1MCQxBN+kidYDS2UOEOZcPnS1+7xbeuTvnU8Yyd3i0kY1bSDeqLhO03SvyTksXrBisf9Cm1QTRVFCS4vqS0JM2tZfrbQxJ8lJFw7JP3QcdmNooJag7KttCpoqwUuLWYIGxCLtCxpJAFCBevDobhk+cho/kC225Q0QVKSIx2l81TCWePFN+Y3+Jg9iC1kUmtonGXa8i6pr21gVcbI15dYIvi7j4tQuBWT5DgsHtrBEqh8hFaaxzAkgJbmix8hzgbMIsGFOmIy6/vMDYUjbWnOHaqwSd+aQvp9rn4D19DzFs88sgKxWTIpdsRk7zJhc4ye3yLJUsw6oX4zQp19z7SLGeaTAmzHnmZUO4qrn1hifM/fpZTnzxP9Ku/xzzN2agEpN4GX96/yFzkLFeXSAyBURbUmh0Oh2Pa7Tpmo8alL16h2m1jmoLxYJ9oO8cxY4wiQukOWRZhOBngkitBWhS4vo9p2ZiWBZq3DO14F2L7ff7KcK/V3b9Xuf9j4B9wdwrCPwD+EfB3384FhBC/AvwKwIq3hFm82ZNsWBilTa5ShABLa4q5RhUCnZgIZeOaPWbJlMIwmYYZtumgLUnLaGJVa6iJRTIZ4bYsdKlwgzX04hZuzeV/+a0bbB/e5KHmOovpgjzPOFvdYsWv4ns5rigZ7x2RpwHVVR/cEMvM8WwPYYESGUFXME9dnN05a5hQdcgLgcSjIMN0HBQWZhAhLHDKKjLLEKUkLxNUBra0wBYYpqDMMvJCI3QJhkR6AZ2VKsu2ga1DZlFBb5yz2hZIz0K4ErfVIl3MAI00SwppMlUllwY5vdmEXv8OaRrhuh5SmXgEuKZNxbfYtH1cYxmZlPi+yYMfbFIUEePdFxlfNPjD/2HM+rE2y6t95mXIS6/3+MW/8zjnh1P+j8+/zjzyONE6TbMyoUwzsnjOqWPH2N+bEosZjhDYliJMh6SzBtpQeGsWuVaI0qFZr7M7TgizQ7ygYJp5LJRJpCRFkpGUmsFihhMIGE3pDSLyUiKiFNcyMIoCSwSgU6SZYdnyzSEvJnlW4AV3SzJSGG/W29+W3N/R2DZrzbfzq9+R76Uk86OUtb9fmvnB8D3JXWt99O3XQoj/CfjCm2/3gGNvOXXjzWPf6Rr/FPinAOfqZ3Wef/sDiZUJhGHi2jZlklHoCRQWsgwxrYCi9MhlSpoUmEpjmRWE1ChXkFkGV7ePmM8UJ0+tMaDPYpqwXHe5s6O5dP2bOKVLtTJgGM7o1o/TcGr4QQtZtai5klxNKeKcgxcFQWcJb9Og7mpUXpBr0L6NWw/wN2YsH+tQWJIiKzCkQtgOSZbiVz0Mz0QlAmkZlGmJ1BaqTMjTGMt2UJZA2hKtSvKiQGrz7oIu18NpBnSCOt7oiFkaMpgGONW7M0w1GuHYSMdBCo20SmZAOI/Yu7TP7mCfKLqFqRU16WILSdUxcB2LoOJRWXuAk8ebHOuaxEPJjUGPKDWgdowVR7GYvcGVy3v81E88QvloyWJ0kyRp0vjsFltfv03vcE6ndgzHMVCGSapLltZr5IlBOD+kattM5wnDpIeRNaBUEIAnTJYDQTWoIOfbCF2gtaA/hYIZWVFi+BVG0wGrDcn4sKBVdfC0wY3DAxo1hSM7aCVwfJskScEocZwaSZpSluXd8XoIBPLNGR3iban9nY5tb/XYD32/4R8lsb/PD47vSe5CiFWt9cGbbz8HfLvb4LeAzwsh/lvuPnQ6Azz3l13PELAUVABIspTFdE6mFKltYDgmZQGWoZDK5nZ/hmEX1B2b/uAO3coybZVz9tgxijWDf/bF36QXNfmFf+sjOKfBuwFrbZN/9fmLfOObX+OTa38NW+fk+RBtOSy11ljrVjj/48eY64yloIPqRRQjxVGSIxcjkos5qeWgjQxtSXTWgm7IyofOIAJQJUhbYFccVK7xqhUwFLp0EI5AL1IyUvyqz+h6iLukKXDRlDgrLq2FTxSF2LaPm8zA3cReKdHmE7Qa17k6f41AHIfuCXIMxlcnFKVk57pPmAzIlcXuKEUpi6vxb2MaBnVd54Tv8fSFYzS7azi1NXJlkBbw+vaAFy5eQYWaj5zf4KlPnGHRL/m1b1ym7XZ4uLtFvujz1T/cobLR5cz6M9SebvHa51/h/C9+kpOvHtCsLNCLFZ67VnLt1RnnT7uc2+pyomyhihCxXzIupxh6H+PAoFhx+MnTNn07pFZrsbzU4vzhcYQxJcpGmEZJqWwOJzcxtMLnHFFmcrgzI1cGXcen09mgv3cLpUoWRUJRCHRZIS8gcMERoIWBLgS5LpD5Xa/qt7Gf+zsd298vbzdrf1/s7y73UmnmL5W7EOLXgI8DHSHELvCfAx8XQjzM3X9dbwN/D0Br/boQ4l8Cl4AC+Pe+m24CrTTzRYgGJBpbSKI4RJQmqrQQloFne8TRhCQ5ZFEYGK6PY2iSUnOitcaDZ09x9cY2s4XFbPEqlv40qTZodRroQcTR3jWONww6nkQaY+4MfSZZzKrhcOKDZ6mesiDXzEtFa9XAmbi0ruXkcYlVlRi+hfIsHNuk9+IOy6fqFEaCWTgIRyJMCZa+e55ZIkqTUpdIkSNtgWsHFGaELEwMBUKaKFuDI6kvV7GGKeNZSSWw0VpCaoMVYhUmcy2ZhSXxTDB9eUZ/O2KaVbnzxhRZClxSLGNAllWwDDC1y4NLS3zw9Bmqxx9jkQiu3Owxmo5ZxBNG0yOKMqNVseicPsNCLTOYDlhPDhld2WZ3/Wla7Rb3fQCqsuD1S6+gyodI+4LJNyL6ewnrn24jXEH4zVukyYLXr4fYRU6NOp4fMC9yQqFw8gLTb1MOU5766TUuPuuRyzmtjSordyx8q8V46iHshNeGBYURYaeC/mzEIlGExCgsJD57N67SMTx8oUgKxWShMYTCJKfMoOobuAYUeYksQYjyzfh692L7h8n7Yn+ftyLeTlbzg+Jc9ZT+J4/91wAYWkOpmCc5VtBCGh6emjPPBMPkkN35kGGmQUuWWRCaLj/7+DNETUFlJeLGK9/Aaj/CX/v7H0P5NvlgRv/ZbcoXCiypmKQjdndDdhYTllZWefTnztM628CqOcjMppQpOSZxnGHpDN+xWCQhOlP4wiPp56TCYOWhClmmifIM17cwhUa6mtwNsJCQZQgtiEYJjmUiDEE5Sbj+G7fJLehcWKK5VCUTKbVAkQ5h+Oou47Gi9H2WjnWZf+XG/8fem8dYmp33ec853/59d69be3VV792zcaZnpoezcBF3SxYZUqIsIYoBIrYlA7YTQAkgB4izKEaUGAEMI0FsJAhgSaEDxRIt0xZNiRIXDYdDcmY4a093T++1b3e/3/6dc/JHcxyGCCWSGnKa5DxAoavQ99aCevHcX73nnPfw6rVv8JntKzT9GT547AFKnXKxD4fZa0ij+dgTT3Bh2yes9iiyHqFzjfPvej+Hos3FSxuUeyPmwoijJ7oIO8TxI1q1AM9VjIYxvY2Ir7/yLFtxwZHFkGS4x/ODISERf/ev/SpHP1ARHPWZ/Kuv8ul/02OQlHTbEccePE8n7/PVF69SygmT6S7vfeABTr9zlckg5nd//+vgCOykwbs+/A4Ghcvj72lQjffoPQdhM8F2JJPBs7zwVERWJeyv3yCXOY3aArmsceVwEykElrAYFYqUMcfDWWZr8xTFIVmVMi4Emcg5Ea0xF9VpBzahdTuzvF7an3j2P+fi5Oqbckw1WDxijv7Hv/Y9P+8nMbH/IPrup57xvuvHXjmfv6Ff+4eR4P/WRza59FL+/1vbd8QJ1UpDnFkIoCxzlCo5KMbsjPfIc4d7FhYZTw9IFShVoAFjFCJssSY0OAErKw7h6S4rZ5cI71tmsJlz8cKAU50RHavDeuMGw74F1QSlLc49cAzPDsmmFpMDm/zGGOmAEYBdEIQgO20Ia4SuS5nn5KqknClpuCETUeK7knoQEI9ySqXwtIvnOJQqx5QaoxWWJbBkgPA0eVHSXqhhsBn0psjAortUJ1Ylsitonlsm2hlw7ZkNbm5tEQmfuXmfxb6m5iUINWKpFnJx/zKeLlnpBHzm5ZxXN/+Ev3LXUd7z0Bm659+PqLd48ff/iJaqWHvwLKqQhNKj7kX4fsiN7ZL9Qcqz1y8RD4c0vIJRvMvG9gx2VeDbQ4bxLX7nc1/gg3vLzCwuYrIZYnWLCWP21qe8tPMFTnc7vLB1g5laHd/M8uQlzcG0xEoLMu0ymYypK4unnr5IkiuOzz9KPm0Tj7YwTkB43Gbm/p9iTVzmtWdymstnOPHNNtcL+/vMOjZ9XVK3DbFWmGzMOJVEIYxiCy1sBmpMUWk21CZGzCNEHTv0cW0L+Xpu+RGaPvDjvnj6w+B7Efq3P++NFPxvHH/w37//ZrRq7gi5D7MJn770JAJIdEFqKjLXwpU+vigY3egT1TxUZtHyupxyC1KTcf7Ug1wdDfi36+u859TD3HeySSU1n/z1W2xt3OAgN/z1c6vUvQrVaOM6FuOex5Z9hb2Le8y5AefmIwIrpTFfx7ZDEldRTSWqb0iGEwaqR1D3EbZAZeDgY91nocsKVdjgl/i12xMrjdKkowTpF0h8hDJkeUoyHNLs2sggxlttU+VD5u0mVdrjta9uc/z8CbSTE5zwKJeO8dA7TpHt73Hxf71Iu71Eu7nI3vACu8MajcY8f+vnH+DLl2/x9MXnOBz8W95/1xy/9JsfZ/+rmmAhZbwlKbYVZ5cXWWwdY6tX8fnnLzMdv0ZWDtnKLlOUBae7Z6myEftFiu/Z5OmQcdZjqb2EZxfc2P5jPtmo6aXIAAAgAElEQVQPmfPbeOUsM01DlvrgbDPOC17cLmnpOcin7LKD3t4m3n6eowurHOxt4hFxbLnD204rxmPY+MrLzM7Ocfo+l8k4o9rIWH8pxJt9kMf/I4/AfgWrryizkPYLTQ57GQfbW+QI0t46qSU4qEb4kxmQmlE2xegCWyr285xyqBlnEyq9hC+df1/clbqzB4f9uB5Q+mHx/cr8jfhc3+2LwbeK/tv5QYn/jpC7EQotDxBA4RgUtxO8kx/BdqDTELgiBdelKno0a4u0bMEgm/JKb4fIC/j6F7e4/5dmcLOYl248j8eIhmVQZplUO2xcu8Xa0ip/8uJVdpIR72zaLC92UfM++nSdtHN7L7hnWTiFQkiBhYeoHCph0Llgsl+yOxxw+twakReijUVRKcpJgWWBwEE4AmEkVQGukWAZvNAGJVDCI2iEKAEqrcindVotOLgyoLHSoD7jMRlOQNUwDYfuuTaTdU1c/CEt26anhgzzBtNkgjM9IFQu/8kvPcpdH3sfm68cMOeusV84fP7fXOZnzp2icfQY/+4LO+xubTBJN5mUQwqV46k686FHQ5bImoUWc6R5SsI6YdggUBbNqIGxC5QUxNk2l8yEbq+NKxXdukNgp+SZwHVyal6HUTJBWApb+5QqRZclwsu50c857y9w/ztrjHdHDF5LeOo5n1F/SDks8fQWnncRY4e4a2d42xOz1FdLjq7MslIUVNkZnN4I/UnFXO6wkaTIaogjuwS2Ra/MMKXE9W1KEialpB+PqLsBge0AoL5T0/1N5i2p/+V4I6X+ZvK6+N9oyd8Rcg8sm3PdZUAwLGIGRcIGPoaEmutwJmrT9C2Kqk4Q1jm5MMNekXD8SJv5xoB+5bJbrfPZT6/yob/qUUyvcVjEWETcurFLp2kItWB3fZ+97BZlPgXvCbKwS3iig6jZ2LlE2oY8rxC2i+t4KNtGa4WUErth6LQ0zkAz7SuimQBVTSEXVENNZlU05gIs7TDtp9QaApSFbdnYgYspKoavjGksGNyOi6lgbqFFPPJgkjG9NSYfKoKOj9XMiHMbrxlytX+F3niKERaLZo+2tcR+3mZmdpmPn1lj9YPnSPctrn5tyv9y4Rny7Uu87+wRntkM2PnKl+hPdxmXUxItCLCYrS9y7swa7UbI5v51tJEE+PiWz0G/hnY85vxVhDehNJI8TciNzZ9c/T85LFbxvBpHuYuyPGRGzLHSsWh6AZd7B0xcwYnWPIGZoxGNKHTFKLnEl/+ow8qNOcy9SxRs8z//3j9ClwX3zryTeT/k64fPslGO+djce9h7VhI6dc5+7DHmjizSePvtKZ8//+7jpFf3eOlfvkbaH3F584CWU8NOYno6xqaBL32MpdlP9xgXNoHlY4BSl39RCf7AcHbjtyT+BnMnSf2NbOW80W2cO0LulrSwvBYgaDsekRdRTnpgHBa9GRrhAq3AA69ktt3BtiCZVFjtJrPOObqdLcTWiKefOuQDHz+FMil1V1KWMSM5ZqW5wvAwZXM4YJyOCQykuUYZgeXZaKPx7NvpT+SKKnOoSoH0SjAGyze4nkWBorM0w6RXMt7OqTcEYLAtC9/3UUWJMRppNNrklKWN0JIKgzAVOo/JJh5OQ6IDjZEVoamTJQWNmYydzRFJUjDrNwnrms31CVbm0Kk1mBSKVtQmqwybey/z4NpRVt/Z4dLlnP1LfS5eu8L19c9ydzhDwzrDlfV1qjhnUibkCtpexHytw2KtxUyjQVqOWVlaIUmmzASzpP2UfadBgARb4tdCasqj9DPKvM5MtIQyMBUJCM2M38RSFjU7YK5TI9r0GGUJrrTR5OQlFMbFs1NupNeJb7YQ19bZnO4iUEhPMcqvULMXabqS3WybF+IXqdWewPcF+1c3+PTvXefv/U/34y566FaD8J4W906mDK820TjsHIxo2R4ygKGeICyHQIQISiqlSdXti7HvgD0Db/EW3xNvxJbKO0LurhUy45/EAIFt40jBY2sTSg1g4ZsSYVWMjceN/gZjPUOuYeJIjj5msXNhhZ96xwr/6jf+bz71W3+Dv/nXH+O3f+uLdJwO43HF+sGIOcfmxt4VjJrQCFpgFVimgEzhNiWJkvR3SzzLhtyiTCXTcUpZaoaTCZGccrA+Jc9DHv3wIq1FiXEskniKF3kYpaCwwCowqgLhABYOkulBie2XKOVR9KeITohdc6kcgdYJ4ZyHHNfoen3KfsHLv/My8UjSGG7RtDxqus6wmhBom9Gkz8zyvSjL5+l/scG13UvMRSHd4pC3RRbvPvYgw0nCaFigZEEvLVhuzfLoyikaUYCxMrprDjBHNi1o15v0tw/YGg0xOET1iKW3dXEDGyszTA9diiSjG6yhLEMZ99Em48GFRYY5LMytEkYuc16HoiiQuiBWI/aLfRwhmbVbbI43efbwZc53H+fBloMpltnLNkFIDtMpWsyz4kU8PusSV+vc2DO8+2fOc+ml3+fv/PLLHJu7j//0v78Pbz6i9vh9BPdULB7dZrAz4oUvXmW93yPKD8l0jBE5vlPHNja2vm11+SO0oPqTyne7U+ZOSu2v80YvxL7OX1bwd4TcQ8/iiVMtAAQWQkiKagXl9MnKnDKZxbduHzxSVUTbypgWGlk3iGAOxQZkM7TDjMufu8bP/Pw871jssDMOCYXAFDmOq1EqxRYaWwioDEJL4lFOvVlnvBMz3imQ2iXZnZAmU5pBhuPHrHSaNMMas8s1mEwo4gCokeU9tIG8hCh0gBIjBfWmT6IqKBR4FpYWVGlGPDU4IsYbTPGaHrZqgAbtFwgVYxwHy+rTVoagGmGcLoI+jl0hTUat1sBxBHOdmHpzimwJ9keLeFZJJ1hirXkD1wT0kl2UhImJaQQ15r0mjVqA9EtaMxHG8RBGIo0iT3JGwwFJJrj77jVay23Cox4Gge4LrLSCMsaqDIEBz27RsVt0ZzQi8cF2ifOCwPGIbBuJIfBdtMrAsakHkrqShCIm5xqZOUlbzFJrtIiLLXayGgf5q1SWw8nGrzK/1Gc0usbG13d47z0Pcdj7Aw56e3zjX64RHlWc+2gdKxKUCzUaruTo2Tb+DZeLmymOur1rydg5RtoYeXukhXlL7nc8380IgjtR7D9o/jKCvyPkPingyd3bv7hSF+QqJpETDuIe01GBHW3RDhd45L6THFsqsYxDo63IM4fezphgwWO/3+Phxv1spq+C+HlOn7UIrq+T4yPYJ6p3WYg0+/iU2qVm59jG0H9lwt7LhmrSx/EEzVVYeETiNhoE7WW0lWFsRVU6NMY9xtcbqN6UdMlg+QF+JNFJQlk4xGmM7QRYdol2JX5QUdmGylY4nsEqKqJml3GvZG5mBeUUlHpIpgosu4njjMmtNrOzPpUluXhpg8IzJKrJWn2Wk/Mz7FYlvX1JOZas1DpE9hZR1ye75fDA/Ad4ZmOLVI8Z5oLYVJyfO8La7BytZUljpklueVSTjGpSYVeCNBkQO7M88r6T1M5FVAEESz6UJWyOceo2+WaLew+PMo6HtPolS90W3eMdZh2b6xcS9nuHSGOIQpuGaDEbCdaCGu2oxulGl8NY0hIpo1RxQe9y18oqpDkNvcReaRikKdemPS6MP83Juz5OVD/Jhf0dHpuXfOTh/5AgHDDZ+wM+/1TFJ//5SR5/3z189JcX0XMTVryzzJye0vhqxPbhHqN4wrCfAuB8c0FV3pnrqW/xLfyoz5Z5/YXnB5Xgv53Xhb/9cvQdn3dHyD30Sx655xYAWSpJY8nXt3ZJhyP2+j0G4xmEtcn6YcapxRV++j0RlCH5tGRyyWL5VICa+EyNR2j5vPrKAaKc4vsWqkgRhPiuS8fz8bICgwVCI7Ug3SsQckrnRELQCaivdHHnImzfQhuFqQSUDirNCP2IIJiimz5R4FPIBF0JhO9isgrXlXi+Q5aVSGwychxlIUOJjsF36oR1i1JVjAcJYTMjqtUBlzybIloaRxiqcUpZuNQaEc/s3qSXTpB+k8PemNJL6B0Oac2uEMwX3H1/jUrOMpzcYqbtsHUhxnJ8pE5olAUtPyQMLeqzITpw0ZlCIhGVRZaP6Y1yTt51F/acjWlZhDUfLRXSBRkI/MhH+wrX8YiE5PjsPLpMSXWIKQuME5KVGYHtUFaCwPaoREzDDak5daygQVrt44gQnVZErZJr+1MoJpyodVFK0wpP8r52yCA/xt6NVzl+5kGuv/oMcfoYH/nFOS5/NsP1n+BnT13gHzz9JF/4bJ/3fvxj1J0aci4ntB0WzgxwwwUODjyM3qfMDZjbI3/lW32Zt/gx4/8V/p9+x8fcEXLPS83NwwohYDwqiPMQLY5z+njI6TO7JPtPszPu8dzuRS7uN3jh2gM8uHqC96x6uJbL4SVJJXMqMizh8eTzz9LTTRbtgNOLsFhvE0SGMwurPD+8QFblJE7F1d0d1jpHOHFeI0/N4zTauI6PFAYhQWgHjUJUElFClVokSmNiRZ5PUAcet270OfZYC+OAK2sURUnguUx7Y7xQYywLR6cMLk7xAptx30K4mqy/xSTXBH6IMiW2VdIIbdKBS9oHFwtHjSFPWPTh3pkGXz+8wqjUvOv+t2EEfOP5HU6cP83qT6+R37/I1d+7jvJ79ErDWtfmA3c/Aspn6fRRnDlFmhd4gUPZ1/iWy+bBVRQdxALYqy6qyCkKiFoWKIPTbGKEJIgt/MinP2pQC2eIk4y9m4eMk4ThYAdfOszWGgQdH6ULSu2x2q7j2rMUecR+cUAgKz58/hxad/jKlZfJjeCrkx1a7pR33/Ugdz/0HuJNxY1L23z+2dd4+MhdvHjjGX7vH2fMnV/i8XNDqvFJfo2Mf3fhAv/1J8acvPthfuXX5rGXOsy3H6Y96nMsjSk2B+QTRTYpQMA//tR3Tjdv8ebz3aT2H5WWzLd/nz+IJP/dckfIPS00z7xSAgJEASKmJg6IEajCwau/i9Nru6zMXeL6rsX1w20u7wb8lbN30Wy5zEQlW4MmV4pNTkcR+c5NLo822A5dznYfYHQo6bQUVlCntAR5VVJmJakas3iqIDg+Sx6ESNsHx0KZFIlFqQx5obCVxBWSw/UeaJewm1ILV7n8wg2uP7NJnsTMn17Aa8b4tTpYCmNc3FTzzJOK048YwnoDcossnlAOKg5v9CnL292PqOlRi+rEaOLpLp2mRX/Yp7V2nHOOIFQ22tGMNnocm7+LWlinLANmFvZIin1UugQtH62gKi1UleFyD63Zu5lOb2A1JE5bMumneG5EoseEloe0Q8IgpN6q49k2QVTHcTyKgwSjK4pxAbFACwcnCAiilDjOEQhcrZmve1QxuIVP3Td0O7PsT2IcPAwheSEo7D5tvySgRSBPcGs8YC9XYARzQU7Xdagtu5iopPnAhLvmDNFrNYq9PueOrvDMtYukz77Cu3/ub1OrDlnOBB+wSv7RFy7gvgaD/Q/TmOvhdI/gtGqY1MedadKocqhub4G0/9h502q7WIq4+Xce+47/f/S/vGPv+niLN4Af1GLrd8MdIXepDTKZAuDYAoOmKiXdukPk+biug+/ey5EHHkfZr/Llp2+RFRIjWhzsj7iFhW9PubV/mVvVEifDIegDRvFxXpx6TIY9fuHkEYw7oW75uChy4YOZsnD/EszaoCykVZEVCYya2I0J0nFxjSDUCtW3CU1IaWtm51dI4j4iuchavY4zhO2v7RPMhyw9mGG5DkHdIByf3ecusrpygtpCjmUqupVDOVV4kUOhC6qqZJJlHO71mV/y8KwJ+7e63BhN6McHSN3iA/eusTvc5NG1VU6eWqTTdfFcGzecY3vfIR857G4MUFJQlBVVmZHngpefX+foiSbj6ZSOrCM9C43BCm3G+z2MMFQG4n6CtBqgY6bxFFlMqbTBKInKUnRSIS0H23bJ8imz7TYLc20m1YjlYplkqMAu2BuVuF4LS4AQc+RThePlPLh0kio1XNzdZmu8xSgb0vYL7p5/mM6yRvAwF15IScY2rWCedmMfXSiOLM7yz164SLus8dynvsL9H32E9qKLHzo88Y1Xear3Ip/5p/fyVz9xF67jYDU0wq9RuDEoG5QLgP4LbmR6M7n5D2+L/y3J//jyZgn+jpB7aiTfKAMwUNcWDcvibEsxKDMuZwltG+xpxYWbI06053n87CMsrHbwlkBXi1Tpa4z7U4JS4FhXefy+86yuT3D0kIv7n+L5g3VOvvo3eceRlPfNr9Ab2lyvcs4udhk3ariFRBeGUASouCJqV1RCU05GTHcEycAw7U9J+pKobrNVbdBszRKYe7GP92mfnGFaZpShwm9JLEcQJwrhZvz0ry3x6uc30TTQXsbCiRblVoozjVCxxMSGJJ2SS7i+WdFoNbm0/wrZZEBVZSw1j/B8H8rS4f4PfZj5FY+DddD6Gotth9MPneTW3pSZaszVccFuNsB3YJk9/LJi+0ad5mAO6Tq49ZBc5dgzNuXUIG2PpL/DwaaNZMr1r4FSPo3GFM/JsZRBaAeBRZlIXBOw2LEJvID1zT5GK6g0nu/geyFaFJSUVGXAeDrBclICUWc+aHCYHLA3eJWmAw+1Aoy1xPVpxbPPahLzW4gqo+naWI7NcFjwC+fehdyOOTJf47kbT/H7n1HcfHmf9/6N99J99xF+1fov+FjvIr/9e3/Af/dffZmfe+yjLK0FhHPPs73fZtSvGE4UQsCk9+b9afzd8rrkX+cnRfY/Ti2ZP48f5ILrd+KOkHtVThG9pwDY03Dg1dhLVnAsj1kpOEgPcUxFI2hx8SDiZh6zeq3J2vIJwrmUranH1y6O+W9/853UZIjahHse3Efli7SelRDvkU8ydLWMkQWJ2qJV6zCsAnRcIuu3L5zYHezgVAYLCIiwsopganHpzzZJ9nYRUUTUrDFnheR2Qm2pIEltikJjLIllSVzfw7iGUPhYyiFtQPdYip3VcWSdclwyiRWFrtCF5Mqu4EvXbjLJY8Jai/Z+k6anmVnsMJsFLJ9a43NXXqXTbrO6lhE0NFFYIFgim+R88ZM7PPKRRSZjm8FwD0cqQBP6PlZlk6Y9RnlFsO7RXjYgI9JRRn/scPd7j9M4ZpNlAfacontin9HeHoObFtNxiKgqHLvEtiq8IMRoiRqnUFWQjDHa0GjVKcoSLSyE8LANjNIRzYaLkDZl4TId9TnS7qLkIb5ss164XBseMhlcoOYKfCDwmuSJpFn3WJktKRiT5pozzgJxq8KYrzBI3ovYHLAXdwiPl6zWH+SXP9xn61aPp555npmrEd3WCg89lHN6WRL4txP7//jUm1ba3zffS6I/8cvP/0gOD/tR3yHz/fDDTPF3hNyXmnX+m4/8ByAE6bhHkcfc3N2jN02Icw9d67LgC9L4gMv9AcVok8uui77wZVp+QKzGFDrm5Sf/LtuBz8d/waEdH8UZdJldOs5R/xSf30ioVgRyUGdB1Plqb8pnr75E6/9q88AHzuPNTak5IXmhufW5PbZePaDau4HnW+jYp5AWtaKAyYjDvs1oZp6Z5YBiVDHcO6B7qk1Ycxj1SmpthzTVuDYY5XPkofvY+ZMrXPjjmIXFDoNJghkOuLm/wc14l83xdZQQDHsOB1ZE12rgH6/jNdbQmcev/MpPIZoad2kepQMuPHWDV57eY3v9Inv9Pl9+xuUXHj7HxYN1zi7aLNWbrIR1HMeHfolrOTDeJzV1SrtgkAjueqem9tA8hVth6zoWGidqM3NW0HkiBS0Q+QiTTdHJhGQzQycOlq0RRR2VpEx3K9K+D4SMy5KyyAhDm/ZslyKR2DrDcmLCSJLnJUfqR2j5EWmvh3Ji7HCeVPu8NNpGlT08YO/gFp2ow6MnIib9W6z4dQbtnyNoSuqyz3A948yqYJxHVHMlp46/n1Mi4sRnv8H+xh6YS1z5xpQkU5T6ttyn4/hNre+/DD+uif57EfuV8/mPRXr/YXNHyF1YLpXVBMBr1qlLi8hNiKcO/fiQmqUp84xbjkUQTYlzQ5wllKXGs1vMuCEzrsfWxSfZmuxw1flFmmFAFGxS5iHuimBmK6TlzIEVE9eOMbn1aSJnCnmTjZfXcU8GlMOS4XrFrWcvMc2m6MrgjDVSH+LZUKqAqedy9swaaVaR7qVU04yN/gHBvEP7xBLGCKpJQZYogq6LzjISY7FwvMaNr28wXI+J84qv3lqnFIfkIsUPIspK0XF9mrrD6Y7Lu1bv48mdmDm9ynA8S62bY5HgRJK95w4pdw4ZJpfYzwSBWGBvFJMWihnOI9M5VNPDs6He0XgSXKdBqSogxrFtnHCZyq2QhUH4FUqDcFKMNmAchCVBthB+G9lxieYEVApRjJj2psx13kZ9r2D/lV28UYbp5fjGIxkpkvEES1jYdoHvedQ8jRtaTDLJxPaZbbahjImrOgf5PrEYUhQFDbuJkAGpVlzb67EsbBqejz/toSqbiVyiPy65/mzOaGmLeWuOzpkAN6pYedcR5jYc9q90CZsVghLb0hgD/vXgzS3wt3iLN4E7Qu7aSOLi9nY1IW/fVl9bnWOuIznSmGWyL+nvpDx42OTsdJ1PX7uJrQLO3/Uuzh5r8tprl+hPYw4PDpkWNawbt7hYDjjsV4TukKrqcP4dZ/FWDfKqTz5aZ8leYLads37hWZ550WMjGeJKiSs0bjnBQ+FGLllVEloNmkGdPC3RacWNpzeYdV1atkFZgmma8epnbzA49LnrZ+pgS7odj2Q0QUqPcqzZ/HIffWjxzN4VBvmUg/QQy7LwLJe7nRYr3YimV8NRNc498QhxvMXpTo3d5CWe+/xRlq60OHO+ieiMOPJYjdj+Krw4YFRV7OgMTx1jIfAYZDfJ3Qy7N8PKrEujPkPotPGbGmWn2JGD2ckQUQWFR6Vd7FGGMuA1muRVDhKEkNiBjTaKyhR4DYnQkGxZ+K0uelHgLEhmT4a42FSHGVk/Zni1RzHMGGyUqDxkMCkwssAQkCUCne3T8StOLc8wVBX5bsXEsqkKw7RMGZcCu5pwcfsyZXSa993t085TtmIXbaa8kDeIevvcN47ouRv0dzoUThNNRpJYjHcilhvgigolbh9mMvrOXVD9XvnWJP+tKf5HqTXzk9iO+VZ+WK2ZO0LulmXhuB5gCGoh9UYN1cmx6gpcB21iIlcRWEfw/SaPFSGXdwfcd/I0w2wdozImchusVXrpTa5OTpKOd4jcGda6NbYOEjrLsyT9HoIpkeezutTi4CDHs0qy5ABHGowqsKjoei6+63IzneJIm1bDBpNTuILeNKbpAHnAYZlRCcVwopG5ZvdLr9F92124EXSWmlihSy0LKe2EqRJsFPv0syG+rFOpFMdv0a51Oem2aYchZQ7dtkt/MkFmNqnySOMdbEdhSougFmH8McN0jPE8Gsd8upOKRXtK4AQcmY24MPLYHPaoOS3CiUTpCHd5hKMaCDuiyAqsoCC+6VCfk1iNHtnUpcxKlNEEoU+WpaR5ge16SCEwUqIsiRAK2XCoConxLKQSCCcC28NqudhA62iFiQNQh8SDBN91sJ2IOCuY7VRYMsC1HSopiCcZc80WiZpQWhk7OiGWFWVlKE3BJM+wpE1kexQmxxUlU5UzjcfM7Tl0ZyJMtsHOeMxhOcKQYZclVTJP4Nr4TgUGiurH74jqj2p75vsV+1utme+dO0Luwpa0ltu3P3AEuV3hN0OcBYugEzJbs6myEdatIdWtgJd+d5+j8/Nsb32GLGtzoz/Gs6FyLXKzSX9zwLy3gucWHDm/inM95fNfv06jNsddnS6hbVM6KR0n5YX1IZNsjBGClm/TsAK8Wov+ZAKFoW4LpHEZ5xkHeY+DwZgTbshKo87TBzuMc+gXPdw8Qkjo/HPJXLPBl6pDCp3z4uYtdvc3+cXjZ6iphNOdGsakzHsnicI6dQuOzvkEbWicXqVh2bz6XEE9muHa1svYNjS7GUt3G7ZSePZpiy9+UVCkS0xLgfEvk+tjpO6QxdkOVwZbeE7MzdEtKFocudtGBBqrPUOVpuRxQlTrIIcptpNh5zOYuqG+JMFoBBZBVMMzFSYvqcoSaTtY0sGyPVI5xvEtihjKIsdxJJZVUfjgLnTw5ucgVyT+Fs4wpY5DlRTkBQRRRVivE8cZFhJ3c8jMqIslciqdk29vEpcFk1KSVRUDfYBSR1hozHJruI6WDgfZPpka8WIBq9OIIzM2c/aI9Z1vsB2PGKeCuqcJfUPktsBAXAzf3AL/IXEnp/ef9LT+7fww0vudIXdLYNW++a1Yt2Vvty3syMF2PbQG40eU0YjcmhJ5DpNcEchjzC3FJD0f20uZzjQo0oiOV6LcmF5aQzZm6Z4a8Gf/+lUmTsF9Z7vkg4KvxF3cMkVbN7FlSikbeHYT1wupqhIhDL4pmKsvEWc5ldQUVYFr26jQZsvEBJYiDCWuMEjpgDboYko5nJL0rvOVgwP2xAZdr2Ccz7DWtVgNl7FsH+k0qXcFfjBl8Vgd1bCxWieYXjtEhntU4ZTA9sGeMDPrUG86PHOlz0sX9tk/vIopY8qyRAQtJvkc7VqLIldIe5fR8ICWbZg53qUV1bGWPKppgvYaNKIO460DvLbPzT8YMPuegmg5RJUuds1QqRz07THI0qtu/0xliecFFHlJzathtECRIz0HR9oYZXA9j6IoqLISq7SI2nVsY2FXAsvySGOJ18nQNYMTODjao1VIEp1TD25ffOLbDq4wgEFpyEx2exyz9HClTWoMFjlZlbJtenQ9QVW08W2XlVBAnhJ4gnpN4kofR5QgDPZb4wfe4ieQO0LuCIERBmMMrushLIuqD7qQlNOcoCUwZU5yA8Z7NpEnkaWFVgHarPGh9z/Av3jyKa6/2qNw7+Fg+5DWbE429XG/PMu7PtTlPW9rsLFT0Dpq8+yLGzz9zOcYy5J7ug/wyLEB6bRkttWkKgz90RTbaGbXTlBvNNk6OCTPM5TUGMfh672SkyrhnedO4UQhe+t7JFqTarDZZzArKSUAACAASURBVCQUD66d5j2nztOeWaaMxqx+6AxqtI88ugCBBUUG5ZjksMeNnTU6bp3s+ga9KxlSN5imN2hENRxPsTTfZDpI+OKfPcPL11+mzG/imSYSm0k6oumV+M3zVPGYOBMIv0ZsmtQth859dcLZiJiA0PHIkzFlPSKoOew+eYtX/5nP4795P9KSlLpCaBsjNJZwbr9geRXKkhRKYXuGopqihy7+jItSikrd3nppIRHKwq/dvoIwKB1UFnK4M8HREr9m4XbaUK9QEwvPCwjDMY4dMTOawSZjZq/PIEvolzlG3J53n5UZvh3QqkW48T6yXWO4ndBL+hx6Bcdsl7IwrM3fw/LSMcZJSqxbFKXm9dv1bOsnZ/zAm5He30rldyZ3hNyNMpTx7f4oRQVSUzrg5hZOYZgkCSbPiAc9dKJRlUE4ICrFZPsWX+rt8NreNkMxYhoPOOoss9XL0H7J4aUH2T/uMDfboIx9CrsgN7s0rBxTHXC9N+GJM6s0/UNc2yc2E8Kog+c7SJlghKYwFdooSgMTozkmx7zzwWOcfbyBqcGpUZf9XkU+KPFUg1iHvLJfkpsDTpUFS7KLWZXgLmFMTmUJ7CREhC5B4GNvDphsxshxTGTZZM0J+70J9WgJ7BZFWZIfKgbDaxTZIXllI4XCIudEMMNP3yWo3BhZ+dTtEf3C0K5NqYcuVdNFuSFuJMkzhWn6BHMl46Gie+8S/tUR2fYQa7aN6zloo5CVRaUrjCewHAdtKowuARc9lViOQukKy7JRaYFBI02OVhYlLrZlIaWkKBNGg5ym5+B6GlVFWKJAWzmVlMhII5uCoBZhKUPDrxE5HlIkYAxGKLKqIHJqWJ5Fo3IRNQ9PQOSmCGfMymoL1/g0my5JHsNejBnbUGXk1e0FVfkTdlnH67L9QUr+LaG/OfzR9gv/n48f+VDyHR97R8hdl4r84Pb4gdxIlIbSgLE1yJy5bp0yz9m/FjM6GCKnt3dwaCw838LulbxjXuBF57iQJRSTIfeEM7g2bPQv8rnPDHn8XJfO0YDmOYsPPvoYj//scdSuRMUHvPy1KY9+4m04ecFgfY/1jQ43dhP+9JZhuNNDmwJXG+bFIaeO1vh7v/HzqMVV3CJEDB12np5guftsVTn9m9d47dbL3HXUZunY3aTzHa5tltz89adotjt8+oV1Eh/a4TFmmy4zHcVD97exKFg47TPcsdi9vEmrtsCRszaltnCaK+zubXNWJizWCrrLXSJ/hrYSPPz4OYJT9+LmkG/HPLSyinNjg93Y4wvXxnRerrP8YI7X7GDcEZaIcFcD7IWKlqu54duoJw3jmT1mPjiPtA2T3pioXocclFUhpaEoBbLQ+HZInlaQGixXYwsPbUrGYwnSwQ9dHBfstqKZNhhcLZHJFBME5PkIdhtEHUlWxBhTx+5k+J1l9LjHfUfWKNXTHOYvUlY5SihGac5s5DP0a6yF86ycctgZbrBkHecd7zzHsVMncWx46WtDhrsV+wcDXt57Ca00St22elymb2Z5/0C4+Q8f+wsXVd/IFH8nyPwndVH124X+3XJHyN0ohUpuHzRR2qC0odCayhQ4jiAuSpLhmORghJNochWAbbB1DlrjhwuEZZevDab0Jxs0m6eZyBClBuxXAwaDAX/4tcfo3Grytz96FK+mkOUsTmPA3jOzPPz2CcKfpywzlPJIYofXrm5x5fAypSmw7ZgZP+RtZ+b52Z99FNqnsZWiuNGnvLHE/t510kObbHRIrg554uwxru3Arf2MUl5hPnK5q6XRA8Gj7Vs8v1tw6/AbvCZDPHGCaf8oq10HK5mlPiNZ7ChaSxF5sIIa7FGNhth5wtFWg7TRIjFrSBWRuDk7w1N0L81xmG7S3+3Rrq8wG8ZM0ymj3oQ//qPXuPugwbs/0aYQEVZVYLkWUdMHV3H07TXyw4Tsac3gYkJnLcCyQozS4GksIVDG4LoWujKIQONoF6wMLSp0JdCWxq8JhClxjYHcZnwwYbw5JZsmTPsjZn2X+nKI7eQMNwO8KKBwJjgYZOiRDCPcKKRbW2Mx2OYgzUBAWUqEpbl8MKXsNPjF06tcu7jEwuIsJ04cxVtokowydm5lTIc36U0PgBJpgePI1yvsTavtHyTfzSnWv6zg7wSpfys/qYL/frgj5I7RUGbA7RRfVYq80GilKYVBUmGUoCYiyrCkcFLCuke3OQfKoqgqbu1tMd2/gr3QwoxTLh1cxYnAcz2qUclm+SS702Ve+LNF7n0kwpkpqHpzPPXCOh95f5fqqkWRa/S4w4hnOVBPA/sYbaHyGSynxuLiB8msFeqmgkOf3gsWg/4VdvcGzLY9TviCrLFMYM3iWhuUOqcmHJwgwioUTuVS65yg4V7nUzd2sVRMLK/xuecfZb67BCNFWHc5cv4E/tmI5/51n/7BiAdbdYJwmUPrOP2RRGRdjJdhezmtGxMaJ/YY743ZvFlRehWWr7AKC2GGPHPpj9nqneWBdxzHWzWk2ofthPqCRRrnBJGPsiC2b5G9OmSyucjC2220HWAh0UbcPtCkDVJK0iyjzHJCr854b4oxJVL6GBMjKVHaQaeSvRf7bFy6RDoocCc+cVIQdM8QtC0sOyEdChzfQlselTkgVxLbSbB8Sd1dYZxvooxCKY2xxxz2XmGSHvDRwQInZ49y5P4VnIZPVdoc3poiBvvYWUFZGppuDWlbiG/exGTLH5997t8P3yro70X0d5rYf9L4fhP76/yFchdCHAF+G5jndgT634wx/0QI0QF+FzgK3AT+mjFmIIQQwD8BfgZIgE8YY/7ce6K0UqSjEQBlWaAqhRQ+kR9iy9uHhGzHJmxGWL5LIzS012ZonGhgdV2MLlkddVn532vsjlP8e+eYq80xPZzwOy9dZy8f0ZAlEbtc+i1J+/l7OPr2JfauPMXZxSNceGqP4cGAphvQnulwunmCMx/oIss98qzPzsEA7SzSyDMG+wX9T5VMt3YRTo/X1vd56uo11jrHuGf+KP29p8jsF5lvt3BqLYIjx2gdtVhuLpP1LnHzDz06zbfx7qWIRDlM1YBBtkeRbPKHlyUL3jyndudZ+kqbV159hlB0eK2RIOhzNB1wEoWRu5jSgFIgHHrXDK/2vkTkRTRm72aWk1gbVxlKcPJtbIb8H//Aobvc5omPnGXxYYPKFV4UImJI9zPmW0uML22QHV5nY7zC6k9J7IYN2BRVgkg9BjsjikyysNphtHuArVz8yEEzplLVN+vFwo40q0/UWXn07WRpRdtRxFcP+OwnX6TVqXH2nmUs18YOHWyRM7yZoOJD6vNznF5bJgwmvPKVTWwbXKfEi2a4O3I5eaZLol1otIjOdLn15CYvf/kFru9ss1/cRAgLgcCUFZaQuPbt8i6VetNq+4fB9zqH5ked17cQvpXg/3y+m+ReAf+ZMeYbQog68JwQ4nPAJ4A/Ncb8D0KIvw/8feDXgZ8GTn3z7e3AP/3mv98ZA0Vxe/a2EBZhEBK5dbSGSle06g2KqgInwPICwpkQJS1KEUDlIEWDaN7i1HtyBv9Pe28eK9l133d+zjl3q317S7/X/Xrl0t0U1aRIUaQoyaYNy5YcQGPA49jBZIzAAwOJDIxnkplx4kmQv4IsiP9IJpgZBwlgB84YdmwnwkzssSUPlUgiJe5sskn2+l7325d6td39nnPmj3qkWjI3Rc1+j836AIW6de6t+/vVqV9963d+99x7/3CRdGODVmcBIxeIk6epY7infhzPBExPHaIbjWjpgOY98+x8dYejM1Xue2KO1e42zU6DcHkTbBUzrBI4p6ls75Ikkm5hca5skNvXsHmDl0abvHD1ZepS4CaXsFHAjmkzTKZZXZ5ibtZw98mEumnjzVZxZ+7iZKtHshZz/2snWF5fZyupo0WFVFlOuWAcQWN+gVd211gpajR9j5MqIshLyNjHlYJSsIOxgjx3wS0ol7qcO3YC7bk064KdVYcTp06Spkf53PwZynVBeUYSLg9YfPol6vOfoj7ngZaEAxhuRNgti5WC9olpXn1ykSKZpf1Am858gDEVhmvb1B2FM2VYemmdiq0QZj2iw9CabiEBaxVSelhrx4LqCErtgliFVB+6lwdeG/LKM0u88Yym6repzEjSfIg0Ho6F7e0Byu0QFse4t3WdG7tbgCKfSfmRzz3MvZ86w+ZGD0yM7fsMexFLy1cp3E3agYc2CVJAYS2eFDh7R1KVeMeyzAcf27eR91OHn/DR4T3F3Vq7BqztLQ+FEK8Bh4EvAT+6t9lvAU8y/gF8Cfhta60FnhZCNIUQc3v7eVuU8phqLOwZFOOZEiZBKoOSAm1BKJc4SknynFog8U3A1otb4IDxHFStwsbzK2hhOXT2KN68z3D5El+av4vLYcSF/oCGH2EuX+WLn3iCf/IvfhshDuMwYPNbmo9fPM50dYp7Pt5gYUpyaanPV79xnm4vZGQ0YWo51TpFsTnix051eOrqNq30Or/6wF2c32mz1d1kK7zI44+WcAOBW2SEI3Cik2yc7xMvX6a32qPWqVPrRHxjo8dav0S7dZrHH94lsD4UG/hFm+df2SDPFtmJr3B5N6FT/XHuO3mIT366hSgXJOUdBkPFMJ7h4jM7nGpPcbyVU+kYlPZADdnajQhDw/JL6zimwXPdgFAovKDEA//5Go+cAsfOEu9GxOsDAjJC0yBMNnjsC0dJopQXf/MF+oOcZt2jPNNEGMPGuqFUUvgipjHVoLYZsNseYFsW64CqBji+i191QIUY6WDSMpHKOPEzD+JMH2LnqddIo11WX3ORClqtCkZbLvevc3HjO0QyZ6bcYa4ScGJ+hul7Z5n/2FnCpYIF5bNz4SXO/953OHa35n/6jb+KkZbtjRSpCnQck2yNr7hp9bgs8zv/5t/vW2xP+OC4kzP4H7YkAz9gzV0IcRx4EPg2MHtTUK8zHtrC+Mdx46a3Le+1veMPoNCa3eFwvFxoJIJaJUApn6zI0EqijUEELtV2BeEbUjOiNtvAWovj5xRmm7poUq5ndC+ssPvGLElSZ7vo0w930cWIkmowXZJcW38DxTrXhxs8NuUR+gMWFx2K9jYzNYeOE7J6LeTSzjZxEmGLXVKb8eLmMi6SVuPTXO29wi99+iHi1mlmoxd4+PMBaqFJ58z9KFeQhSHRqubqt4cUXYm0XWbmBUlu6W/PcLgtGeUXWN2+xJ+ff4z7z9X5zIN3Y3TBudIGu8sxl3eHpMllMt3BU1P0aNC7sc2ll0r0hoad3etUGbLhZjz+yDShzTn8yF0sXnyVFy4N6fjw4+fm6S/vMsqe5lLPsBL1+fN//yivnXiEX7g/YnMQMqMiyqUma+ur1BsOw8TilBXVckpQRFSadZozLoEQVPrL7MaG5lQDk8TkAx9rc+puDeEK8jQjkynlw21K9WmydES/b5g+5KGHiszX1GammJYOVy6vUyC5ur5LrjWF8lDCpW7LlJXEeJIUD9mqITNNdUqw/OwA+tNcXb1EVP8UW+clWdTFTTc51DlGu3OYG+llwlFIlo2niRn93pcf+KBi+04i/n9P/IW20k9e2wdPPvy829mpt0LY4QcQdyFEFfgD4FettYNx+XGMtdYK8c5j33fY3y8DvwwwXzvE/IPzAGRZQZ7nxGmKrPhUyhWcqkT4gvpMHc91sY6DNYY8zlBY4nXIdhXNusKdbqMvDhkOlxAmYSUq8BpzlPqC1FbRQmPDnL/86CN89dVV7po5wn/zuRZ/4/f+mBvDJhfXVrjn/IN89qfP8A8fvkq4ErC0WUKnI0qHBacfeZj/8x8/zeePO3ztckKp+Bb/8//2U1hVwcRVdrc2WbswZPjGLEsrq7ywso7ONGdn5pBOznAYY72Cx++DR4/M0h+0udz9U84/eZxK+ji1ahunU3D8xBP8Dw+eo314irzYYbgriHo9lFHMHXOY1gVHkgrazoDcZa2osHi+x4OrSzx8xPBS/kc8tS558rVPcqoxxy/c/6Ms+KusdOtciK6y/frv8PvLnyAzA37igc8QrnY5fW8Lp1Yic0LitQBMFVNymTl+nMq9LrkyVKIF/FFKEsdUAkXJRsSbCevXu+RWEDRqlOsletdHDDJDUhpQPV2jf82w9Mo6rUoTZ05SKTVxRxH9zT6DuE+oU7bTXRyhyPI+g36dx44GTB+rsHp1By4Inn1xmc6uR0306A2vcOlr1+A/l6i7NT57973srF9gS1yh8KBcL9OenQI7vhDdfsW202j9IG89ULydmL+fbW634H/YMvgDdT13IYTLOPh/x1r7h3vNG28OSYUQc8DmXvsKsHDT24/stX0P1trfBH4T4ONHzljn0Di7MqFBxwa37yDxSFJNfbpK0PJJg/FsEL/pIKzADT2Esbi6QGWGIqmBNfjT1fFt7OKU+8w1Xt3ocz0elx7OzixweAYye4TLu0s4lVnmwhrKW+TarsOGvY9mfIkwPEz1Rx6m/THJ4a0Ruu3iNY/gTrf5/LldZHCCxedfZL6lyNZrLK5vk6Q3WL+QsbOcUQxLbG1vM8g38UTOy+s5kciZkYqaK3jqmaPMT4ccm2nx6L2fIspDRC8k19e4fKPCq1de5EcW6uxe14jZPocWfKrFPVx9bYsbV3sMepDEEGZXMeR8dXudkuhy+okT5MMTfPl//K95+ZmX+bdfucJuoumJKYKgRdXdoeOleNYhK5ZxZEF/a5VSdYtK7TNQSimEokh6mCzHlQprwcgSqhojyxYbSuK+xcFFlQRuuUyplpOlMdEwptdVZFLhOCVMVOH802tUg5yHHy8hrOaV1ZhRHHNjpUQWF+ykV9A6JcslvSKm5kk+f/oYndI8fV0hem6F9SRmuDriULvB6s4aW3kdU+7hWYm1KUIlyELRSwb49Ta5tiTWjEt89p21+YOO7eDwwodyHub7EfZ3e+8ko99/3s9sGQH8K+A1a+1v3LTqK8AvAv9w7/k/3NT+K0KI32V8sKn/XjVJ5Skq8zUQEKSGPMrZudYjiQdUgxqlVoDX9vE647scKSWw2mBLBoQlqLo47ZzB+YykB36pRSESpM1oB5L7Z3fo5xfY0SPWdMJ8v8K9JxVHSkuMVq6zZH6Cx6fuIRssovyL/K1/8DdpnzyGqXooUzCai3CWHOj5rFzYpuSe5D8tXefQXIOzD57jn/0f32D5qqbkF1g8qmLESvcihZPjZSMWAoeBXsaxiquJwCqPM2VDreeyqYdsrJTxyjU6JzNmz57l3uMhM3/wEs9fW+MTjU9yOG+yctHj1ZUrRCOJb3KC1ib1ZsT6Ykw33OG4WcF4GdNTn2K06+CHd/P4f/cIj/1sl92nbvDcn+zSqisOOQ16l7fwnJSKtXSBphdy8uRDJLKPTRSjzYy2X0UFXYrQMtpIkEc9KrUGopJR7ksWBz2yLKeeBeAYyo0qjtuh2vYodI7uxxTpJlGSk20anupt8/WXXOrKYWE6QxUj8qgg0hmZ8nD9gGpWwy+WaHkVsD5fX7rAzz/U4eRnPs4D53z09YQ/+3tPUiuO05Zfo0eJ09UKp+YqNKY8oqGmJJvkA0mWZdjhWFdt8fb6ejti+8PGDyPq77af2yH2B3kO/PvN1m9VSQbeX+b+OPBXgfNCiDct/x3Ggf97QohfApaAn9tb9x8ZTxW7zHi62F97LwPGWqJ078NbEB6UZwPKOgAhsSKDXKLcMonNSbKCihNQFAlaZygUSZFhfPAdhziNGfZzwr5mGBtWuyGpKZEWKYvry5w54VNqWj556hSMAg7Pl2hFp+hMh4j+NKZ3jK21lNK9OdVwyPorJQbnt5itu6SDCBeXbJQjyz7rl2F3ZYt+OGQQFjjCI5MKX1l2kyGecHFVwMemJUMkp4oB61HEYrLOdvcQ90kokoLBhkelf41X3gj5hb/5MPf/dMr6P79KJdvm29caXLy+yPNr3ybPIj7W6XDv0dNU7An+0heuY5IyT37TpfCbpIOYZJCgbixQW6nQG3pUT9coff0SVsc0y5qKHOBqKIuQwp2hXlOYUobOFEooRJKSqQJrJWmWkSU5MpXoVKOliykywiwjywyusriBxJ/2KIxh0B0hTEHdy/DcOrVqzOsr69zoXWVoBD4lEIfpNAJKfgM3S6lxhXpllpnqgHrjLNUpg6eOsCocgiMt1LkGWvRRxwOapTUKuc1saYp7yjXmj50lcEtU2wpNH+Hl6DihMA42ccchZd+x5v6Bx/aEMW+K/Ucxo9+Pm2PD+5st8w3gnYqWP/4221vgyz+IE8pVlKfKAEgcJA6Ol9DdiIh6MF2aRuOxez1EOym1mSpaC7JQoEOJGGnyIWSbhnQ7ZXSjxzAejrPhVs6m1cyagKqXEpqIUZxTPXaUH/vZw2w8PeJPfv8C/e2ASvBTzNYSXv7jS1QCTb0WMEwTBsmQLMq4EmZIY3HymMFoh0sbAcPtC7TsOpk7YDHcIjU9OpV5FlQd15Ncjrf5Zjfgf3n0k5RlhfY9BToTuDvbhN2IwajgxeUddvQq69emcGY0v/4rl3noyH10Zu8mSWKOVp6B5nlmvJxDU4d58MHT5FmJJCrRvXKMvGr5kc89gJktcfHp11nsZZzIQobXN8hKMbLqcPpHhyx+p4bwyviOxc1zSuU5otQl96ZIHMWlazukosyCiGlVaoTDmDwu6G0OKN9ooIcZ5UCSlwWtSpne5jaxEuSp4sbwCsp1CSptCgsrVpAbQX8wRcm5wSOHHZ5e6mO9CCeuY2nTLBeUWy5SPkR3ENE++RBOWdI+PM3VtetY2gzjgopTsPmcYevCZQbex3n4UydpXLxOb3cV121grMP28pB4BFYLypUyeZ6RJeOznsU7lGVuR2x/mLhVWfv7sXGnivwPI+S3MmuHA3KGqpSKcrU5XsbBGkGymxP2E9LQMtrpkUQuKrYEFY/EFmBz8jBFxBqzFeNqh7g7IhtJtCiYbdeRmeaNDc3ituFGllLxpmlJQ5gUGDFCyJOE3TdY68YcrvZpeTBKLexcQVXKBH6dWs1jcSNjsz+AKKLNHL4XooWhn2+yGXkcnQ4oJZqyt0icRKRZl1z5zLVbLC2vkdqcwm1CVeM3O5RaFXIzjRrkTBWW+Stdrl2e5T++eIGlkUdQukpvoCmVzpIEOaXaw9x/5gRHz9WJUo8irNPdSgh7sB4sMl2vUj98lrx6DU2J4WCHC7LGXU7MPS1FLhTDnWO0jqyQrAp832U388nLgpU4xqkHJLGltxsT2Yx62XJI1zHaYnVBnsWMNkPqXhPjQVxE1Jp14mgXtEUWikQalMmplSxZkrDSz9kYDNgc5Rwt1zl+/Divbz2H1HMcbYQ4bsFC5ySe4xA3PVpHHZoNQ/nQLPX5gJ1v9Vm5/gqvfucnmPtsi9Uby2xcXqW3vc5XvvYGYdKiU3FpRwmOn5DFIUpZkBIpIPAhCJy9+Jpc8ve9uB3CfjvYz9LMfmXo78SBEHesRdjxWYTWaIQFxxganmQn2mXtxhbSeFSCFko6BJ3xjTHyQqOkpdA5RRFjsoL+bhdnYFjuxyyFmn6+jhGbSDbYDhVBtcNGZshqlkaaEpgG99W36LTPEBYFc62CI0crRIVPEUr8LMfGQ0rZDqO8gfA2uefMFIvfWmKUr7LhZJyQJykrxRExhfAKpDWkKsWajIXAYQvDty9bfubzR1CNEnmjRXW2QhblhIOUqU8K9PUbfPmJT5Gs9Phf//BZUq9Mb+Uivz98nep3OizUPM48dYJKdYr6VEyaarqbXbZHWzwbLXPlpMfP/r1P86jusfMVeP7iiwzKRyj593DfQ1NsL/ZxnRnKc0t8ovUpLrzwAr+9+AY9W2XQ71ORDZL+gEESsTs1T7dX4EoXLXPQmv7aGkUW0zzeRHhV/FJEpz3H9aVlUjwiU0VrzWs3FqlUSqyNrrE2Wmfd5CReC9Yf4Mtf+DSXrq5wxDlCa9Zl5rPnKGRA2MuIRppBscOh4zWGueTH//LnePrvbrD2+tfZemqOw7MGd2HE8GLK9lrMjrSQpjgLLaTv0Oi0KZcCiiwjjROEraDE+N6pQh2MMD+I7Jeo32z3Vmfx+yHwB03Y4YCIuzGWLBlnVyYvyKOMjas7dBd3yUY5Ki6jjIdJBTiwW4QoaRE2RwqD7+XILCGQAqdTobu1jecq+uEq13VOpF2kqKHdnGnHo4yhc/c8ukgowi1mpqpIBCdP1elMD2h8bAbcmHxQ4Kk2N7ZvcHnHIhmxUwRsxS6qZlBdB60TpPBo1QXSVNFFiTi3WCtAOhyu1Qm726yvPMtW7/McakwxsgVRnCOlD1IyGIxwgznqRz3E9Cryj66wur3GkZm7Mf2MmWCXe6bOMdz12Nm+xHQiccURhv2MmtLMlQZsLm/zzX+3yf2PWc7e22RjcwlfbLJ+o4IKIs7dM8/maoGVMzQOuQSH1smX1rF5TCByoqhHko5wRE4Yp+SpRkiFkA6FBakF0SCkGlUpcoPjBCAHqMClP4xY6e+iHIdAQUxOWvTRdkhVuXTKimmhuboxh0pqHP38DGbOJ8zLOLt1ll5fYWt7h4QdGHaYuQdWtce544oHTx5GiRKVo9M0PMO3/mCTpBiR6B1OzB8n0QVlITH45MLBKSkqThlbwF6+8JHJ3H/Qs1MPSrb+YS/VHERhhwMi7nEv5cL/fRUEZGFKOkxId4Z4SlEpBczMKpRb4HoJSJcC0MaAC9IRqEoN5TUJjMP21jZBLLl05TqHO4ZLq5fZSQxtx6MkJDgNTnWarP0/V8inPoZfvYEun+P+n3SpzHuo+mlsG6wzhatz8iXLhfVX6SUOUvQYJCUGFw0DXeWw4zEqJEWsuPt4h3QmZ3ezQ6wtRebhuC4PHXuAU5tX+ObaBv3tFWTjLtzEsPx8l0qjhhEaIXJKfpWl7R5uPsMXjz7Mi8vX0Jnkx9pz3H36CTbp8efPPEkRNXmiejf3HNJ4QtMfRNSyKidmNpEvPc+K+xgf/yspR9uHeHa5VnynHwAADoBJREFU4I+fWuHwy13uvX8B3x2xslrj5P2WhfQwZ1+9iJQdZo7O8OSrVxnlPaqBh1U5yrXERrGVCmyWUdMKGxXkV7rMdjqcv7yM1gmdahWZDdmVOwxGI0puSDNpcmq2gwC2w5SdkeV1sc3PVT2iqYRLV0oMn+lyuD1iMNgltZKyTDk6XyEfXWL1pRozpxZ48N7jvLYZwAs7fOH4HMWpNtPVp4m0yy889jESHMJBQsW0yEfj4wOOAM8NEMLCXq393aZCftQ4KIL+dtxKkb9d2ftBFXY4IOJuM0N8bYgQoHONMBoPhbSglEELg7UKnTs4no/FYtEgLFYq0kygcLEiR3gV/OY69RmPi4uShiNwXJdEpShdJrU+1lRYezVm+uwWO3EbE6ZU5g9Bu4nxMhAOShSEyyErzw+JeiN0kVHYjCzTBLlhuJ0yU3YgAkdFOKpFfaZF1a8yGvXRfYkt+yi3zP3Hj/FcN6VICgqj0UYQbXQpIotfF0zPlTHGw9ce+bDL0flp8kHM1b7D7NQMV/sDrg82GcQDPJUySKfYiqApq9x/pknaVwQzHbrDLW5ceIMzZ46hSz6XblznytZ10sYhtnYHSBtjrAPpKWrNDR473eHqWo2SK4i6GWkW0Sg5HJqpgyfJMSQ6J0xTChTCWnIR0aq1yBNDmGa4hU/Jr2G5TiFihqnB9TNapkVWTklNxlK/z+ycSyQ+zfrKiCC7SCYKviUkJIogc6n7FexGh1Q4DEZDnnlplfvncl5+/RIvPFXiic8+ges3KMsSP9pyyY9+jDTLcOJrpHmIyS24FoISNo1RUrx1TZl3mS0z4QDyYcnkD7KwwwERd6xFJuMxtMJgAcfxcD0Hz/WIIwVCIj2ByDOUK0CCKyyOqwCBKwXKlagAhlEJrXLCwuDYgLLjYe0uGBcpAN1HlRbwWxtsXxvRqNaRfgvt1hDOGko2MOk2K68a3vjODcIkJnAtcapwnPEZj77VzLbrpMkQTRkpFbXqNM1mhWS4TrQZIZ0ypVaZWlBivr6KpyVpH1KdU/JcHCfm0HSLVBmsibC5pVJXREGD+44cYzV5jYEzw3OXXifRA5rGI3AkvnTY2BoRC5g91KZy9BhqNqV3fsTFjR5nXpmjNV9jyvaIWWNzGNDbGtKupMS5w2hXYunQqd/F1jAlqAgqMobaFIGvmWr5CM+SmZwoj4nylCgvqPhlilHEKEqwjiIe5XTznEangi8EgZLkWmP8jCSx5F6ZRMSccAf89LnTlOWQ5qkRX3/GkuopVBiSjgpkVTMsJTy/fIWVLqS6y5lSTtF+iKXwPDsDh/Wdx1mYLfBdlw2mWP4z2Cli5o3PidkBjXoNz3EQ1oDy0NaSpeneXJg7vyzzfkoyBzlr/7Bx0IUdDoq4YxHEe0sWKQRSSqy1RKFBGA+pFImb43iSUqUMOOTCI8w0jVlNlhsc30f4Q8JRjRvrq0T0iY1H2Wvi5imu41MtJGm6wv1f+jnUVJfLL/ap1h0KkSP1DlI12HwjpKIafOtPnqN7bYV8mEHDYmWZoLRGktVAKQK/yfFanVy5hLrKIafF9MIMUi3Q375GYEqUGjNYHfL4qbuJ3SqluElRpORZzqEzIfVTLqvXIN4YYMMMv1pH+U1aRxRnRjM8dS0hjK9SUpJD9SlqlSpGF+giIvZyVhdPcmi+wu8+22V9KwQRIr49xV//a4d48IFpxHe+wcCr0N2oomoj0mHE2naXmprGtwWN8hp6SrNwqMPVtWvMHp1i7kidOC3AScltRGpTtochgRcw53fY3h2yE/bo6ZhUp1S1pOEJTJ7jNV0absDLoy20DBDC4a88fBdu0CKlT7jRJjDX8VhhKigRND1WwhqXVy1/evFJrDegEGXOTT0EpT5n/Wm+Kb5FulGQTYUMBnU2dML53r+jGy3Smv5L0MoZDTwaxkHblCSLkFKiHAUWdPH2l/z9KPFhFPYf9kzXD6o082EQdjgg4i4EOM7eXXP2kiwrLFpnmELhSheLpRAGKwVqaLAipzABKoDCjJC2Sq4tUlo0Gak2pMaSGQiEpOIH2BB8AeWKg6qM7yAkBRhyEOMyj04tuxsJsYH+sE+cprhOSJp5COvjizJ5Xozt6waBG2GlxaIwQiFcgXJL+PUyPmXcUkCaZpTKFVLhoaxEWYnODVK5WONhiwxVuBQmIYkzXN/HxCkOPkaHeFJTcjxc6WG0RPge0qRYYgbxEp08Z9DbYpjuIIViqz8Cm9OcbjLrl9g0KTp3scZHkZPGEUo4GJvhCA+36lOp+JRLLp3ZNkHNQQuQ0iKlAaHJbY4wktwWaC1IsmR8Mw2jMcbgKQdXSAIlKRmXZVIqODSVRpZbmNRlkMSkkcRxQxwCKpU2uXGIRiNIQiQDUAOKXIGVSGE4OR/gX9eE8Qb99Xm0zomzLph1siLBE5AXBUhNkeZAgbQCYSxi7zZ7TGruEz6CyPfe5INHCIHjyPFDSZQjERjQOUIbHCOw2oLRiMySDgvyOCcOY7IkI0sk1hq0NQipkcqQG0tsITMSjSDwAjypKDmCIHDBtxhp0VqjybHCIoRFJ5rdtZTuekIYDclNiuclSDSFFkhRIs81VmRkuoHnlFHCYJEYIbEyRXkBTuDh18t45QAjJU6pjBKKPM+R1gAZcu9yuBQgtUIIgdYZju9jdIU8U2AzKp5H2fWRuBgjQCiEUwaRUuiIPN8lSzaJs12SfMDuKCKPDJVKiVnfxTcJWVoghI+ShiKJMGYE1mB1hlMOKJUdarWA5nQLtypxPYWQ4z9LMFhbkNmM3GhynRNlMRpNYQ3GGlzp4jk+ChehXZTMKcuClmcQfpMis0RhQJYbUBav1MENZii0j68EFSclUBIlE6rG4iuB68DMrE9TOgxHG3S3PSwJaT5EGAdXOPgqwZrxTdOLLMcUFolECoWAvcedX5aZMOH7ORDi/hd4x0RL8NYv9rsLfO9F/8T3LL31yn7P07vfVlO8eSXBt4YR37f+5tf2XXZlv9eQfRuheTfducn5t5/xsff539xQfLd/3nT/TQ/ecvntdmNv6pz3yHLFW89v47j9XgPvtCfx5uex9i3HrH1z++/d75vuCMTepjd/kHfr+wkTPtocCHG31lJo89ZDm3EmjHKwUqKlRSiBlRIcgVdRuIFDUA5wfRfHMwghUUJijcQUAkcIfAGONEgsSZ6SG0OiLWlaQC6QViClQuKCFVgrUL6iOePRmPYplyo4wiPPfYyVOMpibIKrJFgXVw3IigRjxyMNaS3CeJg8pUgzsmFCHqdIYyiScabruA5GSMDDGI10Y1BglMFai1QeRZYiVYTrGhAeUV6Mr51DgRTjE75sEYN1USrAceq4/hSBU8d3ajTKJZySJIoStrOcXPq4rou1GdpKHL+EFBVAIpSLTjKSuGAUpgy6A/LQUGQaa8R4pIBECAdPuCipcJVL4PpIxn0uhSQ3OZnJ0BSgcgrjEBtFP5PYdIDjCUrlFM+RWC0o0l2KbAdHpqSFJSpcUq3RxiMUglRb8gK6Wxl9U1Apd2i0cgQenlPFypzcFuTaR0iBUgLlOQglMBiM1W/9sU3+AiZ8FBEHYQ6wEGILCIHt/fYFmGL//TgIPsCd48cxa+30rXLmB0EIMQTe2A/b38ed8l3eKu4UP94xtg+EuAMIIZ611j488eNg+DDx49ZwUHyf+PHR8+NAlGUmTJgwYcKtZSLuEyZMmHAHcpDE/Tf324E9DoIfB8EHmPhxKzgovk/8+F7ueD8OTM19woQJEybcOg5S5j5hwoQJE24R+y7uQoifEkK8IYS4LIT4tdtse1EIcV4I8aIQ4tm9trYQ4s+EEJf2nlsfgN1/LYTYFEK8clPb29oVY/7ZXv+8LIT4xAfsx98XQqzs9cmLQogv3rTub+/58YYQ4idvoR8LQoj/TwhxQQjxqhDiv99rv+19ciuZxPZbbR/J2N73uLbW7tsDUMAV4CTgAS8BZ2+j/UVg6vva/jHwa3vLvwb8ow/A7ueATwCvvJddxjdk/mPGp2Y+Cnz7A/bj7wN/6222Pbv3/fjAib3vTd0iP+aAT+wt14CLe/Zue5/cwr6dxPZ72L3TY3u/43q/M/dHgMvW2qvW2gz4XeBL++zTl4Df2lv+LeC/utUGrLX/Cei+T7tfAn7bjnkaaAoh5j5AP96JLwG/a61NrbXXgMuMv79b4ceatfb5veUh8BpwmH3ok1vIJLbf2+4dHdv7Hdf7Le6HgRs3vV7ea7tdWOBPhRDPCSF+ea9t1lq7tre8DszeJl/eye5+9NGv7A0L//VNQ/fb4ocQ4jjwIPBtDlaf/KDst4+T2H579iW29yOu91vc95vPWGs/AXwB+LIQ4nM3r7TjsdJtn060X3b3+N+BU8ADwBrwT2+XYSFEFfgD4FettYOb1+1zn3wYmcT2X2RfYnu/4nq/xX0FWLjp9ZG9ttuCtXZl73kT+CPGQ7GNN4dCe8+bt8mdd7J7W/vIWrthrdV2fG+6f8l3h6cfqB9CCJfxD+B3rLV/uNd8IPrkv5BJbH+XA/E97kds72dc77e4PwPcLYQ4IYTwgJ8HvnI7DAshKkKI2pvLwOeBV/bs/+LeZr8I/Ifb4c+72P0K8N/uHUl/FOjfNKS75Xxfje9nGPfJm378vBDCF0KcAO4GvnOLbArgXwGvWWt/46ZVB6JP/guZxPZ3ORDf4+2O7X2P6x/2iPAtOKL8RcZHka8Av34b7Z5kfIT8JeDVN20DHeBrwCXgq0D7A7D9fzEeFuaM62q/9E52GR85/xd7/XMeePgD9uPf7Nl5eS/Y5m7a/tf3/HgD+MIt9OMzjIemLwMv7j2+uB99MontSWzfqtje77ienKE6YcKECXcg+12WmTBhwoQJHwATcZ8wYcKEO5CJuE+YMGHCHchE3CdMmDDhDmQi7hMmTJhwBzIR9wkTJky4A5mI+4QJEybcgUzEfcKECRPuQP5/Y8rSGUjPc28AAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "f, axes = plt.subplots(1,2)\n", + "axes[0].imshow(np.moveaxis(new_img.astype(int), 0, -1))\n", + "axes[1].imshow(new_seg[0].astype(int))" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.9" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/monai/networks/layers/convutils.py b/monai/networks/layers/convutils.py index 009d828e95..0a1f8ff0b2 100644 --- a/monai/networks/layers/convutils.py +++ b/monai/networks/layers/convutils.py @@ -35,3 +35,15 @@ def calculate_out_shape(in_shape, kernel_size, stride, padding): out_shape = tuple(int(s) for s in out_shape) return tuple(out_shape) if len(out_shape) > 1 else out_shape[0] + + +def gaussian_1d(sigma, truncated=4.): + if sigma <= 0: + raise ValueError('sigma must be positive') + + tail = int(sigma * truncated + .5) + sigma2 = sigma * sigma + x = np.arange(-tail, tail + 1) + out = np.exp(-.5 / sigma2 * x ** 2) + out /= out.sum() + return out diff --git a/monai/networks/layers/simplelayers.py b/monai/networks/layers/simplelayers.py index 716c9291b3..5ed491354b 100644 --- a/monai/networks/layers/simplelayers.py +++ b/monai/networks/layers/simplelayers.py @@ -11,6 +11,9 @@ import torch import torch.nn as nn +import torch.nn.functional as F + +from monai.networks.layers.convutils import gaussian_1d, same_padding class SkipConnection(nn.Module): @@ -30,3 +33,44 @@ class Flatten(nn.Module): def forward(self, x): return x.view(x.size(0), -1) + + +class GaussianFilter: + + def __init__(self, spatial_dims, sigma, truncated=4., device=None): + """ + Args: + sigma (float): std. + truncated (float): spreads how many stds. + """ + self.kernel = torch.nn.Parameter(torch.tensor(gaussian_1d(sigma, truncated)), False) + self.spatial_dims = spatial_dims + self.conv_n = [F.conv1d, F.conv2d, F.conv3d][spatial_dims - 1] + self.padding = same_padding(self.kernel.size()[0]) + self.device = device + + self.kernel = self.kernel.to(self.device) + + def __call__(self, x): + """ + Args: + x (tensor): in shape [Batch, chns, H, W, D]. + """ + if not torch.is_tensor(x): + x = torch.Tensor(x) + chns = x.shape[1] + sp_dim = self.spatial_dims + x = x.to(self.device) + + def _conv(input_, d): + if d < 0: + return input_ + s = [1] * (sp_dim + 2) + s[d + 2] = -1 + kernel = self.kernel.reshape(s).float() + kernel = kernel.repeat([chns, 1] + [1] * sp_dim) + padding = [0] * sp_dim + padding[d] = self.padding + return self.conv_n(input=_conv(input_, d - 1), weight=kernel, padding=padding, groups=chns) + + return _conv(x, sp_dim - 1) diff --git a/monai/transforms/transforms.py b/monai/transforms/transforms.py index 1098c23fab..bec727e8bb 100644 --- a/monai/transforms/transforms.py +++ b/monai/transforms/transforms.py @@ -20,8 +20,11 @@ import monai from monai.data.utils import get_random_patch, get_valid_patch_size +from monai.networks.layers.simplelayers import GaussianFilter from monai.transforms.compose import Randomizable -from monai.transforms.utils import rescale_array +from monai.transforms.utils import (create_control_grid, create_grid, create_rotate, create_scale, create_shear, + create_translate, rescale_array) +from monai.utils.misc import ensure_tuple export = monai.utils.export("monai.transforms") @@ -516,3 +519,503 @@ def __call__(self, img): return img zoomer = Zoom(self._zoom, self.order, self.mode, self.cval, self.prefilter, self.use_gpu, self.keep_size) return zoomer(img) + + +class AffineGrid: + """ + Affine transforms on the coordinates. + """ + + def __init__(self, + rotate_params=None, + shear_params=None, + translate_params=None, + scale_params=None, + as_tensor_output=True, + device=None): + self.rotate_params = rotate_params + self.shear_params = shear_params + self.translate_params = translate_params + self.scale_params = scale_params + + self.as_tensor_output = as_tensor_output + self.device = device + + def __call__(self, spatial_size=None, grid=None): + """ + Args: + spatial_size (list or tuple of int): output grid size. + grid (ndarray): grid to be transformed. Shape must be (3, H, W) for 2D or (4, H, W, D) for 3D. + """ + if grid is None: + if spatial_size is not None: + grid = create_grid(spatial_size) + else: + raise ValueError('Either specify a grid or a spatial size to create a grid from.') + + spatial_dims = len(grid.shape) - 1 + affine = np.eye(spatial_dims + 1) + if self.rotate_params: + affine = affine @ create_rotate(spatial_dims, self.rotate_params) + if self.shear_params: + affine = affine @ create_shear(spatial_dims, self.shear_params) + if self.translate_params: + affine = affine @ create_translate(spatial_dims, self.translate_params) + if self.scale_params: + affine = affine @ create_scale(spatial_dims, self.scale_params) + affine = torch.tensor(affine, device=self.device) + + if not torch.is_tensor(grid): + grid = torch.tensor(grid) + if self.device: + grid = grid.to(self.device) + grid = (affine.float() @ grid.reshape((grid.shape[0], -1)).float()).reshape([-1] + list(grid.shape[1:])) + if self.as_tensor_output: + return grid + return grid.cpu().numpy() + + +class RandAffineGrid(Randomizable): + """ + generate randomised affine grid + """ + + def __init__(self, + rotate_range=None, + shear_range=None, + translate_range=None, + scale_range=None, + as_tensor_output=True, + device=None): + """ + Args: + rotate_range (a sequence of positive floats): rotate_range[0] with be used to generate the 1st rotation + parameter from `uniform[-rotate_range[0], rotate_range[0])`. Similarly, `rotate_range[2]` and + `rotate_range[3]` are used in 3D affine for the range of 2nd and 3rd axes. + shear_range (a sequence of positive floats): shear_range[0] with be used to generate the 1st shearing + parameter from `uniform[-shear_range[0], shear_range[0])`. Similarly, `shear_range[1]` to + `shear_range[N]` controls the range of the uniform distribution used to generate the 2nd to + N-th parameter. + translate_range (a sequence of positive floats): translate_range[0] with be used to generate the 1st + shift parameter from `uniform[-translate_range[0], translate_range[0])`. Similarly, `translate_range[1]` + to `translate_range[N]` controls the range of the uniform distribution used to generate + the 2nd to N-th parameter. + scale_range (a sequence of positive floats): scaling_range[0] with be used to generate the 1st scaling + factor from `uniform[-scale_range[0], scale_range[0]) + 1.0`. Similarly, `scale_range[1]` to + `scale_range[N]` controls the range of the uniform distribution used to generate the 2nd to + N-th parameter. + + See also: + `from monai.transforms.utils import (create_rotate, create_shear, create_translate, create_scale)` + """ + self.rotate_range = ensure_tuple(rotate_range) + self.shear_range = ensure_tuple(shear_range) + self.translate_range = ensure_tuple(translate_range) + self.scale_range = ensure_tuple(scale_range) + + self.rotate_params = None + self.shear_params = None + self.translate_params = None + self.scale_params = None + + self.as_tensor_output = as_tensor_output + self.device = device + + def randomize(self): + if self.rotate_range: + self.rotate_params = [self.R.uniform(-f, f) for f in self.rotate_range if f is not None] + if self.shear_range: + self.shear_params = [self.R.uniform(-f, f) for f in self.shear_range if f is not None] + if self.translate_range: + self.translate_params = [self.R.uniform(-f, f) for f in self.translate_range if f is not None] + if self.scale_range: + self.scale_params = [self.R.uniform(-f, f) + 1.0 for f in self.scale_range if f is not None] + + def __call__(self, spatial_size=None, grid=None): + """ + Returns: + a 2D (3xHxW) or 3D (4xHxWxD) grid. + """ + self.randomize() + affine_grid = AffineGrid(self.rotate_params, self.shear_params, self.translate_params, self.scale_params, + self.as_tensor_output, self.device) + return affine_grid(spatial_size, grid) + + +class RandDeformGrid(Randomizable): + """ + generate random deformation grid + """ + + def __init__(self, spacing, magnitude_range, as_tensor_output=True, device=None): + """ + Args: + spacing (2 or 3 ints): spacing of the grid in 2D or 3D. + e.g., spacing=(1, 1) indicates pixel-wise deformation in 2D, + spacing=(1, 1, 1) indicates voxel-wise deformation in 3D, + spacing=(2, 2) indicates deformation field defined on every other pixel in 2D. + magnitude_range (2 ints): the random offsets will be generated from + `uniform[magnitude[0], magnitude[1])`. + as_tensor_output (bool): whether to output tensor instead of numpy array. + defaults to True. + device (torch device): device to store the output grid data. + """ + self.spacing = spacing + self.magnitude = magnitude_range + + self.rand_mag = 1.0 + self.as_tensor_output = as_tensor_output + self.random_offset = 0.0 + self.device = device + + def randomize(self, grid_size): + self.random_offset = self.R.normal(size=([len(grid_size)] + list(grid_size))) + self.rand_mag = self.R.uniform(self.magnitude[0], self.magnitude[1]) + + def __call__(self, spatial_size): + control_grid = create_control_grid(spatial_size, self.spacing) + self.randomize(control_grid.shape[1:]) + control_grid[:len(spatial_size)] += self.rand_mag * self.random_offset + if self.as_tensor_output: + control_grid = torch.tensor(control_grid, device=self.device) + return control_grid + + +class Resample: + + def __init__(self, padding_mode='zeros', as_tensor_output=False, device=None): + """ + computes output image using values from `img`, locations from `grid` using pytorch. + supports spatially 2D or 3D (num_channels, H, W[, D]). + + Args: + padding_mode ('zeros'|'border'|'reflection'): mode of handling out of range indices. Defaults to 'zeros'. + as_tensor_output(bool): whether to return a torch tensor. Defaults to False. + device (torch.device): device on which the tensor will be allocated. + """ + self.padding_mode = padding_mode + self.as_tensor_output = as_tensor_output + self.device = device + + def __call__(self, img, grid, mode='bilinear'): + """ + Args: + img (ndarray or tensor): shape must be (num_channels, H, W[, D]). + grid (ndarray or tensor): shape must be (3, H, W) for 2D or (4, H, W, D) for 3D. + mode ('nearest'|'bilinear'): interpolation order. Defaults to 'bilinear'. + """ + if not torch.is_tensor(img): + img = torch.tensor(img) + if not torch.is_tensor(grid): + grid = torch.tensor(grid) + if self.device: + img = img.to(self.device) + grid = grid.to(self.device) + + for i, dim in enumerate(img.shape[1:]): + grid[i] = 2. * grid[i] / (dim - 1.) + grid = grid[:-1] / grid[-1:] + grid = grid[range(img.ndim - 2, -1, -1)] + grid = grid.permute(list(range(grid.ndim))[1:] + [0]) + out = torch.nn.functional.grid_sample(img[None].float(), + grid[None].float(), + mode=mode, + padding_mode=self.padding_mode, + align_corners=False)[0] + if not self.as_tensor_output: + return out.cpu().numpy() + return out + + +@export +class Affine: + """ + transform ``img`` given the affine parameters. + """ + + def __init__(self, + rotate_params=None, + shear_params=None, + translate_params=None, + scale_params=None, + spatial_size=None, + mode='bilinear', + padding_mode='zeros', + as_tensor_output=False, + device=None): + """ + The affines are applied in rotate, shear, translate, scale order. + + Args: + rotate_params (float, list of floats): a rotation angle in radians, + a scalar for 2D image, a tuple of 2 floats for 3D. Defaults to no rotation. + shear_params (list of floats): + a tuple of 2 floats for 2D, a tuple of 6 floats for 3D. Defaults to no shearing. + translate_params (list of floats): + a tuple of 2 floats for 2D, a tuple of 3 floats for 3D. Translation is in pixel/voxel + relative to the center of the input image. Defaults to no translation. + scale_params (list of floats): + a tuple of 2 floats for 2D, a tuple of 3 floats for 3D. Defaults to no scaling. + spatial_size (list or tuple of int): output image spatial size. + if `img` has two spatial dimensions, `spatial_size` should have 2 elements [h, w]. + if `img` has three spatial dimensions, `spatial_size` should have 3 elements [h, w, d]. + mode ('nearest'|'bilinear'): interpolation order. Defaults to 'bilinear'. + padding_mode ('zeros'|'border'|'reflection'): mode of handling out of range indices. Defaults to 'zeros'. + as_tensor_output (bool): the computation is implemented using pytorch tensors, this option specifies + whether to convert it back to numpy arrays. + device (torch.device): device on which the tensor will be allocated. + """ + self.affine_grid = AffineGrid(rotate_params, + shear_params, + translate_params, + scale_params, + as_tensor_output=True, + device=device) + self.resampler = Resample(padding_mode, as_tensor_output=as_tensor_output, device=device) + self.spatial_size = spatial_size + self.mode = mode + + def __call__(self, img, spatial_size=None, mode=None): + """ + Args: + img (ndarray or tensor): shape must be (num_channels, H, W[, D]), + spatial_size (list or tuple of int): output image spatial size. + if `img` has two spatial dimensions, `spatial_size` should have 2 elements [h, w]. + if `img` has three spatial dimensions, `spatial_size` should have 3 elements [h, w, d]. + mode ('nearest'|'bilinear'): interpolation order. Defaults to 'bilinear'. + """ + spatial_size = spatial_size or self.spatial_size + mode = mode or self.mode + grid = self.affine_grid(spatial_size) + return self.resampler(img, grid, mode) + + +@export +class RandAffine(Randomizable): + """ + Random affine transform. + """ + + def __init__(self, + prob=0.1, + rotate_range=None, + shear_range=None, + translate_range=None, + scale_range=None, + spatial_size=None, + mode='bilinear', + padding_mode='zeros', + as_tensor_output=True, + device=None): + """ + Args: + prob (float): probability of returning a randomized affine grid. + defaults to 0.1, with 10% chance returns a randomized grid. + spatial_size (list or tuple of int): output image spatial size. + if `img` has two spatial dimensions, `spatial_size` should have 2 elements [h, w]. + if `img` has three spatial dimensions, `spatial_size` should have 3 elements [h, w, d]. + mode ('nearest'|'bilinear'): interpolation order. Defaults to 'bilinear'. + padding_mode ('zeros'|'border'|'reflection'): mode of handling out of range indices. Defaults to 'zeros'. + as_tensor_output (bool): the computation is implemented using pytorch tensors, this option specifies + whether to convert it back to numpy arrays. + device (torch.device): device on which the tensor will be allocated. + + See also: + RandAffineGrid for the random affine paramters configurations. + Affine for the affine transformation parameters configurations. + """ + + self.rand_affine_grid = RandAffineGrid(rotate_range, shear_range, translate_range, scale_range, True, device) + self.resampler = Resample(padding_mode=padding_mode, as_tensor_output=as_tensor_output, device=device) + + self.spatial_size = spatial_size + self.mode = mode + + self.do_transform = False + self.prob = prob + + def set_random_state(self, seed=None, state=None): + self.rand_affine_grid.set_random_state(seed, state) + Randomizable.set_random_state(self, seed, state) + return self + + def randomize(self): + self.do_transform = self.R.rand() < self.prob + + def __call__(self, img, spatial_size=None, mode=None): + """ + Args: + img (ndarray or tensor): shape must be (num_channels, H, W[, D]), + spatial_size (list or tuple of int): output image spatial size. + if `img` has two spatial dimensions, `spatial_size` should have 2 elements [h, w]. + if `img` has three spatial dimensions, `spatial_size` should have 3 elements [h, w, d]. + mode ('nearest'|'bilinear'): interpolation order. Defaults to 'bilinear'. + """ + self.randomize() + spatial_size = spatial_size or self.spatial_size + mode = mode or self.mode + if self.do_transform: + grid = self.rand_affine_grid(spatial_size=spatial_size) + else: + grid = create_grid(spatial_size) + return self.resampler(img, grid, mode) + + +@export +class Rand2DElastic(Randomizable): + """ + Random elastic deformation and affine in 2D + """ + + def __init__(self, + spacing, + magnitude_range, + prob=0.1, + rotate_range=None, + shear_range=None, + translate_range=None, + scale_range=None, + spatial_size=None, + mode='bilinear', + padding_mode='zeros', + as_tensor_output=False, + device=None): + """ + Args: + spacing (2 ints): distance in between the control points. + magnitude_range (2 ints): the random offsets will be generated from + `uniform[magnitude[0], magnitude[1])`. + prob (float): probability of returning a randomized affine grid. + defaults to 0.1, with 10% chance returns a randomized grid, + otherwise returns a `spatial_size` centered area centered extracted from the input image. + spatial_size (2 ints): specifying output image spatial size [h, w]. + mode ('nearest'|'bilinear'): interpolation order. Defaults to 'bilinear'. + padding_mode ('zeros'|'border'|'reflection'): mode of handling out of range indices. Defaults to 'zeros'. + as_tensor_output (bool): the computation is implemented using pytorch tensors, this option specifies + whether to convert it back to numpy arrays. + device (torch.device): device on which the tensor will be allocated. + + See also: + RandAffineGrid for the random affine paramters configurations. + Affine for the affine transformation parameters configurations. + """ + self.deform_grid = RandDeformGrid(spacing, magnitude_range, as_tensor_output=True, device=device) + self.rand_affine_grid = RandAffineGrid(rotate_range, shear_range, translate_range, scale_range, True, device) + self.resampler = Resample(padding_mode=padding_mode, as_tensor_output=as_tensor_output, device=device) + + self.spatial_size = spatial_size + self.mode = mode + self.prob = prob + self.do_transform = False + + def set_random_state(self, seed=None, state=None): + self.deform_grid.set_random_state(seed, state) + self.rand_affine_grid.set_random_state(seed, state) + Randomizable.set_random_state(self, seed, state) + return self + + def randomize(self): + self.do_transform = self.R.rand() < self.prob + + def __call__(self, img, spatial_size=None, mode=None): + """ + Args: + img (ndarray or tensor): shape must be (num_channels, H, W), + spatial_size (2 ints): specifying output image spatial size [h, w]. + mode ('nearest'|'bilinear'): interpolation order. Defaults to 'self.mode'. + """ + self.randomize() + spatial_size = spatial_size or self.spatial_size + mode = mode or self.mode + if self.do_transform: + grid = self.deform_grid(spatial_size) + grid = self.rand_affine_grid(grid=grid) + grid = torch.nn.functional.interpolate(grid[None], spatial_size, mode='bicubic', align_corners=False)[0] + else: + grid = create_grid(spatial_size) + return self.resampler(img, grid, mode) + + +@export +class Rand3DElastic(Randomizable): + """ + Random elastic deformation and affine in 3D + """ + + def __init__(self, + sigma_range, + magnitude_range, + prob=0.1, + rotate_range=None, + shear_range=None, + translate_range=None, + scale_range=None, + spatial_size=None, + mode='bilinear', + padding_mode='zeros', + as_tensor_output=False, + device=None): + """ + Args: + sigma_range (2 ints): a Gaussian kernel with standard deviation sampled + from `uniform[sigma_range[0], sigma_range[1])` will be used to smooth the random offset grid. + magnitude_range (2 ints): the random offsets on the grid will be generated from + `uniform[magnitude[0], magnitude[1])`. + prob (float): probability of returning a randomized affine grid. + defaults to 0.1, with 10% chance returns a randomized grid, + otherwise returns a `spatial_size` centered area centered extracted from the input image. + spatial_size (2 ints): specifying output image spatial size [h, w, d]. + mode ('nearest'|'bilinear'): interpolation order. Defaults to 'bilinear'. + padding_mode ('zeros'|'border'|'reflection'): mode of handling out of range indices. Defaults to 'zeros'. + as_tensor_output (bool): the computation is implemented using pytorch tensors, this option specifies + whether to convert it back to numpy arrays. + device (torch.device): device on which the tensor will be allocated. + + See also: + - ``RandAffineGrid`` for the random affine paramters configurations. + - ``Affine`` for the affine transformation parameters configurations. + """ + self.rand_affine_grid = RandAffineGrid(rotate_range, shear_range, translate_range, scale_range, True, device) + self.resampler = Resample(padding_mode=padding_mode, as_tensor_output=as_tensor_output, device=device) + + self.sigma_range = sigma_range + self.magnitude_range = magnitude_range + self.spatial_size = spatial_size + self.mode = mode + self.device = device + + self.prob = prob + self.do_transform = False + self.rand_offset = None + self.magnitude = 1.0 + self.sigma = 1.0 + + def set_random_state(self, seed=None, state=None): + self.rand_affine_grid.set_random_state(seed, state) + Randomizable.set_random_state(self, seed, state) + return self + + def randomize(self, grid_size): + self.do_transform = self.R.rand() < self.prob + if self.do_transform: + self.rand_offset = self.R.uniform(-1., 1., [3] + list(grid_size)) + self.magnitude = self.R.uniform(self.magnitude_range[0], self.magnitude_range[1]) + self.sigma = self.R.uniform(self.sigma_range[0], self.sigma_range[1]) + + def __call__(self, img, spatial_size=None, mode=None): + """ + Args: + img (ndarray or tensor): shape must be (num_channels, H, W, D), + spatial_size (2 ints): specifying output image spatial size [h, w, d]. + mode ('nearest'|'bilinear'): interpolation order. Defaults to 'self.mode'. + """ + spatial_size = spatial_size or self.spatial_size + mode = mode or self.mode + self.randomize(spatial_size) + grid = create_grid(spatial_size) + if self.do_transform: + grid = torch.tensor(grid).to(self.device) + gaussian = GaussianFilter(3, self.sigma, 3., device=self.device) + grid[:3] += gaussian(self.rand_offset[None])[0] * self.magnitude + grid = self.rand_affine_grid(grid=grid) + return self.resampler(img, grid, mode) diff --git a/monai/transforms/utils.py b/monai/transforms/utils.py index f477a24754..cc1de277fb 100644 --- a/monai/transforms/utils.py +++ b/monai/transforms/utils.py @@ -9,11 +9,12 @@ # See the License for the specific language governing permissions and # limitations under the License. - import random import numpy as np +from monai.utils.misc import ensure_tuple + def rand_choice(prob=0.5): """Returns True if a randomly chosen number is less than or equal to `prob', by default this is a 50/50 chance.""" @@ -208,3 +209,136 @@ def generate_pos_neg_label_crop_centers(label, size, num_samples, pos_ratio, ran centers.append(center_ori) return centers + + +def create_grid(spatial_size, spacing=None, homogeneous=True, dtype=float): + """ + compute a `spatial_size` mesh. + + Args: + spatial_size (sequence of ints): spatial size of the grid. + spacing (sequence of ints): same len as ``spatial_size``, defaults to 1.0 (dense grid). + homogeneous (bool): whether to make homogeneous coordinates. + dtype (type): output grid data type. + """ + spacing = spacing or tuple(1.0 for _ in spatial_size) + ranges = [np.linspace(-(d - 1.) / 2. * s, (d - 1.) / 2. * s, int(d)) for d, s in zip(spatial_size, spacing)] + coords = np.asarray(np.meshgrid(*ranges, indexing='ij'), dtype=dtype) + if not homogeneous: + return coords + return np.concatenate([coords, np.ones_like(coords[0:1, ...])]) + + +def create_control_grid(spatial_shape, spacing, homogeneous=True, dtype=float): + """ + control grid with two additional point in each direction + """ + grid_shape = [] + for d, s in zip(spatial_shape, spacing): + d = int(d) + if d % 2 == 0: + grid_shape.append(np.ceil((d - 1.) / (2. * s) + 0.5) * 2. + 2.) + else: + grid_shape.append(np.ceil((d - 1.) / (2. * s)) * 2. + 3.) + return create_grid(grid_shape, spacing, homogeneous, dtype) + + +def create_rotate(spatial_dims, radians): + """ + create a 2D or 3D rotation matrix + Args: + spatial_dims (2|3): spatial rank + radians (float or a sequence of floats): rotation radians + when spatial_dims == 3, the `radians` sequence corresponds to + rotation in the 1st, 2nd, and 3rd dim respectively. + """ + radians = ensure_tuple(radians) + if spatial_dims == 2: + if len(radians) >= 1: + sin_, cos_ = np.sin(radians[0]), np.cos(radians[0]) + return np.array([[cos_, -sin_, 0.], [sin_, cos_, 0.], [0., 0., 1.]]) + + if spatial_dims == 3: + affine = None + if len(radians) >= 1: + sin_, cos_ = np.sin(radians[0]), np.cos(radians[0]) + affine = np.array([ + [1., 0., 0., 0.], + [0., cos_, -sin_, 0.], + [0., sin_, cos_, 0.], + [0., 0., 0., 1.], + ]) + if len(radians) >= 2: + sin_, cos_ = np.sin(radians[1]), np.cos(radians[1]) + affine = affine @ np.array([ + [cos_, 0.0, sin_, 0.], + [0., 1., 0., 0.], + [-sin_, 0., cos_, 0.], + [0., 0., 0., 1.], + ]) + if len(radians) >= 3: + sin_, cos_ = np.sin(radians[2]), np.cos(radians[2]) + affine = affine @ np.array([ + [cos_, -sin_, 0., 0.], + [sin_, cos_, 0., 0.], + [0., 0., 1., 0.], + [0., 0., 0., 1.], + ]) + return affine + + raise ValueError('create_rotate got spatial_dims={}, radians={}.'.format(spatial_dims, radians)) + + +def create_shear(spatial_dims, coefs): + """ + create a shearing matrix + Args: + spatial_dims (int): spatial rank + coefs (floats): shearing factors, defaults to 0. + """ + coefs = list(ensure_tuple(coefs)) + if spatial_dims == 2: + while len(coefs) < 2: + coefs.append(0.0) + return np.array([ + [1, coefs[0], 0.], + [coefs[1], 1., 0.], + [0., 0., 1.], + ]) + if spatial_dims == 3: + while len(coefs) < 6: + coefs.append(0.0) + return np.array([ + [1., coefs[0], coefs[1], 0.], + [coefs[2], 1., coefs[3], 0.], + [coefs[4], coefs[5], 1., 0.], + [0., 0., 0., 1.], + ]) + raise NotImplementedError + + +def create_scale(spatial_dims, scaling_factor): + """ + create a scaling matrix + Args: + spatial_dims (int): spatial rank + scaling_factor (floats): scaling factors, defaults to 1. + """ + scaling_factor = list(ensure_tuple(scaling_factor)) + while len(scaling_factor) < spatial_dims: + scaling_factor.append(1.) + return np.diag(scaling_factor[:spatial_dims] + [1.]) + + +def create_translate(spatial_dims, shift): + """ + create a translation matrix + Args: + spatial_dims (int): spatial rank + shift (floats): translate factors, defaults to 0. + """ + shift = ensure_tuple(shift) + affine = np.eye(spatial_dims + 1) + for i, a in enumerate(shift[:spatial_dims]): + affine[i, spatial_dims] = a + return affine diff --git a/tests/test_affine.py b/tests/test_affine.py new file mode 100644 index 0000000000..e179be1fc1 --- /dev/null +++ b/tests/test_affine.py @@ -0,0 +1,64 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import torch +from parameterized import parameterized + +from monai.transforms.transforms import Affine + +TEST_CASES = [ + [ + dict(padding_mode='zeros', as_tensor_output=False, device=None), + {'img': np.arange(4).reshape((1, 2, 2)), 'spatial_size': (4, 4)}, + np.array([[[0., 0., 0., 0.], [0., 0., 0.25, 0.], [0., 0.5, 0.75, 0.], [0., 0., 0., 0.]]]) + ], + [ + dict(rotate_params=[np.pi / 2], padding_mode='zeros', as_tensor_output=False, device=None), + {'img': np.arange(4).reshape((1, 2, 2)), 'spatial_size': (4, 4)}, + np.array([[[0., 0., 0., 0.], [0., 0.5, 0., 0.], [0., 0.75, 0.25, 0.], [0., 0., 0., 0.]]]) + ], + [ + dict(padding_mode='zeros', as_tensor_output=False, device=None), + {'img': np.arange(8).reshape((1, 2, 2, 2)), 'spatial_size': (4, 4, 4)}, + np.array([[[[0., 0., 0., 0.], [0., 0., 0., 0.], [0., 0., 0., 0.], [0., 0., 0., 0.]], + [[0., 0., 0., 0.], [0., 0., 0.125, 0.], [0., 0.25, 0.375, 0.], [0., 0., 0., 0.]], + [[0., 0., 0., 0.], [0., 0.5, 0.625, 0.], [0., 0.75, 0.875, 0.], [0., 0., 0., 0.]], + [[0., 0., 0., 0.], [0., 0., 0., 0.], [0., 0., 0., 0.], [0., 0., 0., 0.]]]]) + ], + [ + dict(rotate_params=[np.pi / 2], padding_mode='zeros', as_tensor_output=False, device=None), + {'img': np.arange(8).reshape((1, 2, 2, 2)), 'spatial_size': (4, 4, 4)}, + np.array([[[[0., 0., 0., 0.], [0., 0., 0., 0.], [0., 0., 0., 0.], [0., 0., 0., 0.]], + [[0., 0., 0., 0.], [0., 0.25, 0., 0.], [0., 0.375, 0.125, 0.], [0., 0., 0., 0.]], + [[0., 0., 0., 0.], [0., 0.75, 0.5, 0.], [0., 0.875, 0.625, 0.], [0., 0., 0., 0.]], + [[0., 0., 0., 0.], [0., 0., 0., 0.], [0., 0., 0., 0.], [0., 0., 0., 0.]]]]) + ], +] + + +class TestAffine(unittest.TestCase): + + @parameterized.expand(TEST_CASES) + def test_affine(self, input_param, input_data, expected_val): + g = Affine(**input_param) + result = g(**input_data) + self.assertEqual(torch.is_tensor(result), torch.is_tensor(expected_val)) + if torch.is_tensor(result): + np.testing.assert_allclose(result.cpu().numpy(), expected_val.cpu().numpy(), rtol=1e-4, atol=1e-4) + else: + np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_affine_grid.py b/tests/test_affine_grid.py new file mode 100644 index 0000000000..759f1f10af --- /dev/null +++ b/tests/test_affine_grid.py @@ -0,0 +1,75 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import torch +from parameterized import parameterized + +from monai.transforms.transforms import AffineGrid + +TEST_CASES = [ + [{'as_tensor_output': False, 'device': torch.device('cpu:0')}, {'spatial_size': (2, 2)}, + np.array([[[-0.5, -0.5], [0.5, 0.5]], [[-0.5, 0.5], [-0.5, 0.5]], [[1., 1.], [1., 1.]]])], + [{'as_tensor_output': True, 'device': None}, {'spatial_size': (2, 2)}, + torch.tensor([[[-0.5, -0.5], [0.5, 0.5]], [[-0.5, 0.5], [-0.5, 0.5]], [[1., 1.], [1., 1.]]])], + [{'as_tensor_output': False, 'device': None}, {'grid': np.ones((3, 3, 3))}, + np.ones((3, 3, 3))], + [{'as_tensor_output': True, 'device': torch.device('cpu:0')}, {'grid': np.ones((3, 3, 3))}, + torch.ones((3, 3, 3))], + [{'as_tensor_output': False, 'device': None}, {'grid': torch.ones((3, 3, 3))}, + np.ones((3, 3, 3))], + [{'as_tensor_output': True, 'device': torch.device('cpu:0')}, {'grid': torch.ones((3, 3, 3))}, + torch.ones((3, 3, 3))], + [{'rotate_params': (1., 1.), 'scale_params': (-20, 10), 'as_tensor_output': True, 'device': torch.device('cpu:0')}, + {'grid': torch.ones((3, 3, 3))}, + torch.tensor([[[-19.2208, -19.2208, -19.2208], [-19.2208, -19.2208, -19.2208], [-19.2208, -19.2208, -19.2208]], + [[-11.4264, -11.4264, -11.4264], [-11.4264, -11.4264, -11.4264], [-11.4264, -11.4264, -11.4264]], + [[1., 1., 1.], [1., 1., 1.], [1., 1., 1.]]])], + [ + { + 'rotate_params': (1., 1., 1.), 'scale_params': (-20, 10), 'as_tensor_output': True, 'device': + torch.device('cpu:0') + }, + {'grid': torch.ones((4, 3, 3, 3))}, + torch.tensor([[[[-9.5435, -9.5435, -9.5435], [-9.5435, -9.5435, -9.5435], [-9.5435, -9.5435, -9.5435]], + [[-9.5435, -9.5435, -9.5435], [-9.5435, -9.5435, -9.5435], [-9.5435, -9.5435, -9.5435]], + [[-9.5435, -9.5435, -9.5435], [-9.5435, -9.5435, -9.5435], [-9.5435, -9.5435, -9.5435]]], + [[[-20.2381, -20.2381, -20.2381], [-20.2381, -20.2381, -20.2381], [-20.2381, -20.2381, -20.2381]], + [[-20.2381, -20.2381, -20.2381], [-20.2381, -20.2381, -20.2381], [-20.2381, -20.2381, -20.2381]], + [[-20.2381, -20.2381, -20.2381], [-20.2381, -20.2381, -20.2381], [-20.2381, -20.2381, + -20.2381]]], + [[[-0.5844, -0.5844, -0.5844], [-0.5844, -0.5844, -0.5844], [-0.5844, -0.5844, -0.5844]], + [[-0.5844, -0.5844, -0.5844], [-0.5844, -0.5844, -0.5844], [-0.5844, -0.5844, -0.5844]], + [[-0.5844, -0.5844, -0.5844], [-0.5844, -0.5844, -0.5844], [-0.5844, -0.5844, -0.5844]]], + [[[1.0000, 1.0000, 1.0000], [1.0000, 1.0000, 1.0000], [1.0000, 1.0000, 1.0000]], + [[1.0000, 1.0000, 1.0000], [1.0000, 1.0000, 1.0000], [1.0000, 1.0000, 1.0000]], + [[1.0000, 1.0000, 1.0000], [1.0000, 1.0000, 1.0000], [1.0000, 1.0000, 1.0000]]]]), + ], +] + + +class TestAffineGrid(unittest.TestCase): + + @parameterized.expand(TEST_CASES) + def test_affine_grid(self, input_param, input_data, expected_val): + g = AffineGrid(**input_param) + result = g(**input_data) + self.assertEqual(torch.is_tensor(result), torch.is_tensor(expected_val)) + if torch.is_tensor(result): + np.testing.assert_allclose(result.cpu().numpy(), expected_val.cpu().numpy(), rtol=1e-4, atol=1e-4) + else: + np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_create_grid_and_affine.py b/tests/test_create_grid_and_affine.py new file mode 100644 index 0000000000..7359485b2f --- /dev/null +++ b/tests/test_create_grid_and_affine.py @@ -0,0 +1,176 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np + +from monai.transforms.utils import (create_control_grid, create_grid, create_rotate, create_scale, create_shear, + create_translate) + + +class TestCreateGrid(unittest.TestCase): + + def test_create_grid(self): + with self.assertRaisesRegex(TypeError, ''): + create_grid(None) + with self.assertRaisesRegex(TypeError, ''): + create_grid((1, 1), spacing=2.) + with self.assertRaisesRegex(TypeError, ''): + create_grid((1, 1), spacing=2.) + + g = create_grid((1, 1)) + expected = np.array([[[0.]], [[0.]], [[1.]]]) + np.testing.assert_allclose(g, expected) + + g = create_grid((1, 1), homogeneous=False) + expected = np.array([[[0.]], [[0.]]]) + np.testing.assert_allclose(g, expected) + + g = create_grid((1, 1), spacing=(1.2, 1.3)) + expected = np.array([[[0.]], [[0.]], [[1.]]]) + np.testing.assert_allclose(g, expected) + + g = create_grid((1, 1, 1), spacing=(1.2, 1.3, 1.0)) + expected = np.array([[[[0.]]], [[[0.]]], [[[0.]]], [[[1.]]]]) + np.testing.assert_allclose(g, expected) + + g = create_grid((1, 1, 1), spacing=(1.2, 1.3, 1.0), homogeneous=False) + expected = np.array([[[[0.]]], [[[0.]]], [[[0.]]]]) + np.testing.assert_allclose(g, expected) + + g = create_grid((1, 1, 1), spacing=(1.2, 1.3, 1.0), dtype=int) + np.testing.assert_equal(g.dtype, np.int64) + + g = create_grid((2, 2, 2)) + expected = np.array([[[[-0.5, -0.5], [-0.5, -0.5]], [[0.5, 0.5], [0.5, 0.5]]], + [[[-0.5, -0.5], [0.5, 0.5]], [[-0.5, -0.5], [0.5, 0.5]]], + [[[-0.5, 0.5], [-0.5, 0.5]], [[-0.5, 0.5], [-0.5, 0.5]]], + [[[1., 1.], [1., 1.]], [[1., 1.], [1., 1.]]]]) + np.testing.assert_allclose(g, expected) + + g = create_grid((2, 2, 2), spacing=(1.2, 1.3, 1.0)) + expected = np.array([[[[-0.6, -0.6], [-0.6, -0.6]], [[0.6, 0.6], [0.6, 0.6]]], + [[[-0.65, -0.65], [0.65, 0.65]], [[-0.65, -0.65], [0.65, 0.65]]], + [[[-0.5, 0.5], [-0.5, 0.5]], [[-0.5, 0.5], [-0.5, 0.5]]], + [[[1., 1.], [1., 1.]], [[1., 1.], [1., 1.]]]]) + np.testing.assert_allclose(g, expected) + + def test_create_control_grid(self): + with self.assertRaisesRegex(TypeError, ''): + create_control_grid(None, None) + with self.assertRaisesRegex(TypeError, ''): + create_control_grid((1, 1), 2.) + + g = create_control_grid((1., 1.), (1., 1.)) + expected = np.array([ + [[-1., -1., -1.], [0., 0., 0.], [1., 1., 1.]], + [[-1., 0., 1.], [-1., 0., 1.], [-1., 0., 1.]], + [[1., 1., 1.], [1., 1., 1.], [1., 1., 1.]], + ]) + np.testing.assert_allclose(g, expected) + + g = create_control_grid((1., 1.), (2., 2.)) + expected = np.array([ + [[-2., -2., -2.], [0., 0., 0.], [2., 2., 2.]], + [[-2., 0., 2.], [-2., 0., 2.], [-2., 0., 2.]], + [[1., 1., 1.], [1., 1., 1.], [1., 1., 1.]], + ]) + np.testing.assert_allclose(g, expected) + + g = create_control_grid((2., 2.), (1., 1.)) + expected = np.array([ + [[-1.5, -1.5, -1.5, -1.5], [-0.5, -0.5, -0.5, -0.5], [0.5, 0.5, 0.5, 0.5], [1.5, 1.5, 1.5, 1.5]], + [[-1.5, -0.5, 0.5, 1.5], [-1.5, -0.5, 0.5, 1.5], [-1.5, -0.5, 0.5, 1.5], [-1.5, -0.5, 0.5, 1.5]], + [[1., 1., 1., 1.], [1., 1., 1., 1.], [1., 1., 1., 1.], [1., 1., 1., 1.]], + ]) + np.testing.assert_allclose(g, expected) + + g = create_control_grid((2., 2.), (2., 2.)) + expected = np.array([ + [[-3., -3., -3., -3.], [-1., -1., -1., -1.], [1., 1., 1., 1.], [3., 3., 3., 3.]], + [[-3., -1., 1., 3.], [-3., -1., 1., 3.], [-3., -1., 1., 3.], [-3., -1., 1., 3.]], + [[1., 1., 1., 1.], [1., 1., 1., 1.], [1., 1., 1., 1.], [1., 1., 1., 1.]], + ]) + np.testing.assert_allclose(g, expected) + + g = create_control_grid((1., 1., 1.), (2., 2., 2.), homogeneous=False) + expected = np.array([[[[-2., -2., -2.], [-2., -2., -2.], [-2., -2., -2.]], + [[0., 0., 0.], [0., 0., 0.], [0., 0., 0.]], [[2., 2., 2.], [2., 2., 2.], [2., 2., 2.]]], + [[[-2., -2., -2.], [0., 0., 0.], [2., 2., 2.]], + [[-2., -2., -2.], [0., 0., 0.], [2., 2., 2.]], + [[-2., -2., -2.], [0., 0., 0.], [2., 2., 2.]]], + [[[-2., 0., 2.], [-2., 0., 2.], [-2., 0., 2.]], + [[-2., 0., 2.], [-2., 0., 2.], [-2., 0., 2.]], + [[-2., 0., 2.], [-2., 0., 2.], [-2., 0., 2.]]]]) + np.testing.assert_allclose(g, expected) + + +def test_assert(func, params, expected): + m = func(*params) + np.testing.assert_allclose(m, expected, atol=1e-7) + + +class TestCreateAffine(unittest.TestCase): + + def test_create_rotate(self): + with self.assertRaisesRegex(TypeError, ''): + create_rotate(2, None) + + with self.assertRaisesRegex(ValueError, ''): + create_rotate(5, 1) + + test_assert(create_rotate, (2, 1.1), + np.array([[0.45359612, -0.89120736, 0.], [0.89120736, 0.45359612, 0.], [0., 0., 1.]])) + test_assert( + create_rotate, (3, 1.1), + np.array([[1., 0., 0., 0.], [0., 0.45359612, -0.89120736, 0.], [0., 0.89120736, 0.45359612, 0.], + [0., 0., 0., 1.]])) + test_assert( + create_rotate, (3, (1.1, 1)), + np.array([[0.54030231, 0., 0.84147098, 0.], [0.74992513, 0.45359612, -0.48152139, 0.], + [-0.38168798, 0.89120736, 0.24507903, 0.], [0., 0., 0., 1.]])) + test_assert( + create_rotate, (3, (1, 1, 1.1)), + np.array([[0.24507903, -0.48152139, 0.84147098, 0.], [0.80270075, -0.38596121, -0.45464871, 0.], + [0.54369824, 0.78687425, 0.29192658, 0.], [0., 0., 0., 1.]])) + test_assert(create_rotate, (3, (0, 0, np.pi / 2)), + np.array([[0., -1., 0., 0.], [1., 0., 0., 0.], [0., 0., 1., 0.], [0., 0., 0., 1.]])) + + def test_create_shear(self): + test_assert(create_shear, (2, 1.), np.array([[1., 1., 0.], [0., 1., 0.], [0., 0., 1.]])) + test_assert(create_shear, (2, (2., 3.)), np.array([[1., 2., 0.], [3., 1., 0.], [0., 0., 1.]])) + test_assert(create_shear, (3, 1.), + np.array([[1., 1., 0., 0.], [0., 1., 0., 0.], [0., 0., 1., 0.], [0., 0., 0., 1.]])) + + def test_create_scale(self): + test_assert(create_scale, (2, 2), np.array([[2., 0., 0.], [0., 1., 0.], [0., 0., 1.]])) + test_assert(create_scale, (2, [2, 2, 2]), np.array([[2., 0., 0.], [0., 2., 0.], [0., 0., 1.]])) + test_assert(create_scale, (3, [1.5, 2.4]), + np.array([[1.5, 0., 0., 0.], [0., 2.4, 0., 0.], [0., 0., 1., 0.], [0., 0., 0., 1.]])) + test_assert(create_scale, (3, 1.5), + np.array([[1.5, 0., 0., 0.], [0., 1., 0., 0.], [0., 0., 1., 0.], [0., 0., 0., 1.]])) + test_assert(create_scale, (3, [1, 2, 3, 4, 5]), + np.array([[1., 0., 0., 0.], [0., 2., 0., 0.], [0., 0., 3., 0.], [0., 0., 0., 1.]])) + + def test_create_translate(self): + test_assert(create_translate, (2, 2), np.array([[1., 0., 2.], [0., 1., 0.], [0., 0., 1.]])) + test_assert(create_translate, (2, [2, 2, 2]), np.array([[1., 0., 2.], [0., 1., 2.], [0., 0., 1.]])) + test_assert(create_translate, (3, [1.5, 2.4]), + np.array([[1., 0., 0., 1.5], [0., 1., 0., 2.4], [0., 0., 1., 0.], [0., 0., 0., 1.]])) + test_assert(create_translate, (3, 1.5), + np.array([[1., 0., 0., 1.5], [0., 1., 0., 0.], [0., 0., 1., 0.], [0., 0., 0., 1.]])) + test_assert(create_translate, (3, [1, 2, 3, 4, 5]), + np.array([[1., 0., 0., 1.], [0., 1., 0., 2.], [0., 0., 1., 3.], [0., 0., 0., 1.]])) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_gaussian_filter.py b/tests/test_gaussian_filter.py new file mode 100644 index 0000000000..ade658e74c --- /dev/null +++ b/tests/test_gaussian_filter.py @@ -0,0 +1,58 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import torch + +from monai.networks.layers.simplelayers import GaussianFilter + + +class GaussianFilterTestCase(unittest.TestCase): + + def test_1d(self): + a = torch.ones(1, 8, 10) + g = GaussianFilter(1, 3, 3, torch.device('cpu:0')) + expected = np.array([[ + [ + 0.56658804, 0.69108766, 0.79392236, 0.86594427, 0.90267116, 0.9026711, 0.8659443, 0.7939224, 0.6910876, + 0.56658804 + ], + ]]) + expected = np.tile(expected, (1, 8, 1)) + np.testing.assert_allclose(g(a).cpu().numpy(), expected) + + def test_2d(self): + a = torch.ones(1, 1, 3, 3) + g = GaussianFilter(2, 3, 3, torch.device('cpu:0')) + expected = np.array([[[[0.13380532, 0.14087981, 0.13380532], [0.14087981, 0.14832835, 0.14087981], + [0.13380532, 0.14087981, 0.13380532]]]]) + + np.testing.assert_allclose(g(a).cpu().numpy(), expected) + + def test_3d(self): + a = torch.ones(1, 1, 4, 3, 4) + g = GaussianFilter(3, 3, 3, torch.device('cpu:0')) + expected = np.array( + [[[[[0.07294822, 0.08033235, 0.08033235, 0.07294822], [0.07680509, 0.08457965, 0.08457965, 0.07680509], + [0.07294822, 0.08033235, 0.08033235, 0.07294822]], + [[0.08033235, 0.08846395, 0.08846395, 0.08033235], [0.08457965, 0.09314119, 0.09314119, 0.08457966], + [0.08033235, 0.08846396, 0.08846396, 0.08033236]], + [[0.08033235, 0.08846395, 0.08846395, 0.08033235], [0.08457965, 0.09314119, 0.09314119, 0.08457966], + [0.08033235, 0.08846396, 0.08846396, 0.08033236]], + [[0.07294822, 0.08033235, 0.08033235, 0.07294822], [0.07680509, 0.08457965, 0.08457965, 0.07680509], + [0.07294822, 0.08033235, 0.08033235, 0.07294822]]]]],) + np.testing.assert_allclose(g(a).cpu().numpy(), expected) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_random_affine.py b/tests/test_random_affine.py new file mode 100644 index 0000000000..5149a5a80d --- /dev/null +++ b/tests/test_random_affine.py @@ -0,0 +1,67 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import torch +from parameterized import parameterized + +from monai.transforms.transforms import RandAffine + +TEST_CASES = [ + [ + dict(as_tensor_output=False, device=None), {'img': torch.ones((3, 3, 3)), 'spatial_size': (2, 2)}, + np.ones((3, 2, 2)) + ], + [ + dict(as_tensor_output=True, device=None), {'img': torch.ones((1, 3, 3, 3)), 'spatial_size': (2, 2, 2)}, + torch.ones((1, 2, 2, 2)) + ], + [ + dict(prob=0.9, + rotate_range=(np.pi / 2,), + shear_range=[1, 2], + translate_range=[2, 1], + as_tensor_output=True, + spatial_size=(2, 2, 2), + device=None), {'img': torch.ones((1, 3, 3, 3)), 'mode': 'bilinear'}, + torch.tensor([[[[1.0000, 0.7776], [0.4174, 0.0780]], [[0.0835, 1.0000], [0.3026, 0.5732]]]],) + ], + [ + dict(prob=0.9, + rotate_range=(np.pi / 2,), + shear_range=[1, 2], + translate_range=[2, 1], + scale_range=[.1, .2], + as_tensor_output=True, + device=None), {'img': torch.arange(64).reshape((1, 8, 8)), 'spatial_size': (3, 3)}, + torch.tensor([[[27.3614, 18.0237, 8.6860], [40.0440, 30.7063, 21.3686], [52.7266, 43.3889, 34.0512]]]) + ], +] + + +class TestRandAffine(unittest.TestCase): + + @parameterized.expand(TEST_CASES) + def test_rand_affine(self, input_param, input_data, expected_val): + g = RandAffine(**input_param) + g.set_random_state(123) + result = g(**input_data) + self.assertEqual(torch.is_tensor(result), torch.is_tensor(expected_val)) + if torch.is_tensor(result): + np.testing.assert_allclose(result.cpu().numpy(), expected_val.cpu().numpy(), rtol=1e-4, atol=1e-4) + else: + np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_random_affine_grid.py b/tests/test_random_affine_grid.py new file mode 100644 index 0000000000..b5c51e394e --- /dev/null +++ b/tests/test_random_affine_grid.py @@ -0,0 +1,96 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import torch +from parameterized import parameterized + +from monai.transforms.transforms import RandAffineGrid + +TEST_CASES = [ + [{'as_tensor_output': False, 'device': None}, {'grid': torch.ones((3, 3, 3))}, + np.ones((3, 3, 3))], + [{'rotate_range': (1, 2), 'translate_range': (3, 3, 3)}, {'grid': torch.arange(0, 27).reshape((3, 3, 3))}, + torch.tensor( + np.array([[[-32.81998, -33.910976, -35.001972], [-36.092968, -37.183964, -38.27496], + [-39.36596, -40.456955, -41.54795]], + [[2.1380205, 3.1015975, 4.0651755], [5.028752, 5.9923296, 6.955907], [7.919484, 8.883063, 9.84664]], + [[18., 19., 20.], [21., 22., 23.], [24., 25., 26.]]]))], + [{'translate_range': (3, 3, 3), 'as_tensor_output': False, 'device': torch.device('cpu:0')}, + {'spatial_size': (3, 3, 3)}, + np.array([[[[0.17881513, 0.17881513, 0.17881513], [0.17881513, 0.17881513, 0.17881513], + [0.17881513, 0.17881513, 0.17881513]], + [[1.1788151, 1.1788151, 1.1788151], [1.1788151, 1.1788151, 1.1788151], + [1.1788151, 1.1788151, 1.1788151]], + [[2.1788151, 2.1788151, 2.1788151], [2.1788151, 2.1788151, 2.1788151], + [2.1788151, 2.1788151, 2.1788151]]], + [[[-2.283164, -2.283164, -2.283164], [-1.283164, -1.283164, -1.283164], + [-0.28316402, -0.28316402, -0.28316402]], + [[-2.283164, -2.283164, -2.283164], [-1.283164, -1.283164, -1.283164], + [-0.28316402, -0.28316402, -0.28316402]], + [[-2.283164, -2.283164, -2.283164], [-1.283164, -1.283164, -1.283164], + [-0.28316402, -0.28316402, -0.28316402]]], + [[[-2.6388912, -1.6388912, -0.6388912], [-2.6388912, -1.6388912, -0.6388912], + [-2.6388912, -1.6388912, -0.6388912]], + [[-2.6388912, -1.6388912, -0.6388912], [-2.6388912, -1.6388912, -0.6388912], + [-2.6388912, -1.6388912, -0.6388912]], + [[-2.6388912, -1.6388912, -0.6388912], [-2.6388912, -1.6388912, -0.6388912], + [-2.6388912, -1.6388912, -0.6388912]]], + [[[1., 1., 1.], [1., 1., 1.], [1., 1., 1.]], [[1., 1., 1.], [1., 1., 1.], [1., 1., 1.]], + [[1., 1., 1.], [1., 1., 1.], [1., 1., 1.]]]])], + [{'rotate_range': (1., 1., 1.), 'shear_range': (0.1,), 'scale_range': (1.2,)}, + {'grid': torch.arange(0, 108).reshape((4, 3, 3, 3))}, + torch.tensor( + np.array([[[[-9.4201e+00, -8.1672e+00, -6.9143e+00], [-5.6614e+00, -4.4085e+00, -3.1556e+00], + [-1.9027e+00, -6.4980e-01, 6.0310e-01]], + [[1.8560e+00, 3.1089e+00, 4.3618e+00], [5.6147e+00, 6.8676e+00, 8.1205e+00], + [9.3734e+00, 1.0626e+01, 1.1879e+01]], + [[1.3132e+01, 1.4385e+01, 1.5638e+01], [1.6891e+01, 1.8144e+01, 1.9397e+01], + [2.0650e+01, 2.1902e+01, 2.3155e+01]]], + [[[9.9383e-02, -4.8845e-01, -1.0763e+00], [-1.6641e+00, -2.2519e+00, -2.8398e+00], + [-3.4276e+00, -4.0154e+00, -4.6032e+00]], + [[-5.1911e+00, -5.7789e+00, -6.3667e+00], [-6.9546e+00, -7.5424e+00, -8.1302e+00], + [-8.7180e+00, -9.3059e+00, -9.8937e+00]], + [[-1.0482e+01, -1.1069e+01, -1.1657e+01], [-1.2245e+01, -1.2833e+01, -1.3421e+01], + [-1.4009e+01, -1.4596e+01, -1.5184e+01]]], + [[[5.9635e+01, 6.1199e+01, 6.2764e+01], [6.4328e+01, 6.5892e+01, 6.7456e+01], + [6.9021e+01, 7.0585e+01, 7.2149e+01]], + [[7.3714e+01, 7.5278e+01, 7.6842e+01], [7.8407e+01, 7.9971e+01, 8.1535e+01], + [8.3099e+01, 8.4664e+01, 8.6228e+01]], + [[8.7792e+01, 8.9357e+01, 9.0921e+01], [9.2485e+01, 9.4049e+01, 9.5614e+01], + [9.7178e+01, 9.8742e+01, 1.0031e+02]]], + [[[8.1000e+01, 8.2000e+01, 8.3000e+01], [8.4000e+01, 8.5000e+01, 8.6000e+01], + [8.7000e+01, 8.8000e+01, 8.9000e+01]], + [[9.0000e+01, 9.1000e+01, 9.2000e+01], [9.3000e+01, 9.4000e+01, 9.5000e+01], + [9.6000e+01, 9.7000e+01, 9.8000e+01]], + [[9.9000e+01, 1.0000e+02, 1.0100e+02], [1.0200e+02, 1.0300e+02, 1.0400e+02], + [1.0500e+02, 1.0600e+02, 1.0700e+02]]]]))], +] + + +class TestRandAffineGrid(unittest.TestCase): + + @parameterized.expand(TEST_CASES) + def test_rand_affine_grid(self, input_param, input_data, expected_val): + g = RandAffineGrid(**input_param) + g.set_random_state(123) + result = g(**input_data) + self.assertEqual(torch.is_tensor(result), torch.is_tensor(expected_val)) + if torch.is_tensor(result): + np.testing.assert_allclose(result.cpu().numpy(), expected_val.cpu().numpy(), rtol=1e-4, atol=1e-4) + else: + np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_random_deform_grid.py b/tests/test_random_deform_grid.py new file mode 100644 index 0000000000..390672ab98 --- /dev/null +++ b/tests/test_random_deform_grid.py @@ -0,0 +1,94 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import torch +from parameterized import parameterized + +from monai.transforms.transforms import RandDeformGrid + +TEST_CASES = [ + [ + dict(spacing=(1, 2), magnitude_range=(1., 2.), as_tensor_output=False, device=None), + {'spatial_size': (3, 3)}, + np.array([[[-3.45774551, -0.6608006, -1.62002671, -4.02259806, -2.77692349], + [1.21748926, -4.25845712, -1.57592837, 0.69985342, -2.16382767], + [-0.91158377, -0.12717178, 2.00258405, -0.85789449, -0.59616292], + [0.41676882, 3.96204313, 3.93633727, 2.34820726, 1.51855713], + [2.99011186, 4.00170105, 0.74339613, 3.57886072, 0.31633439]], + [[-4.85634965, -0.78197195, -1.91838077, 1.81192079, 2.84286669], + [-4.34323645, -5.75784424, -2.37875058, 1.06023016, 5.24536301], + [-4.23315172, -1.99617861, 0.92412057, 0.81899041, 4.38084451], + [-5.08141703, -4.31985211, -0.52488611, 2.77048576, 4.45464513], + [-4.01588556, 1.21238156, 0.55444352, 3.31421131, 7.00529793]], + [[1., 1., 1., 1., 1.], [1., 1., 1., 1., 1.], [1., 1., 1., 1., 1.], [1., 1., 1., 1., 1.], + [1., 1., 1., 1., 1.]]]) + ], + [ + dict(spacing=(1, 2, 2), magnitude_range=(1., 3.), as_tensor_output=False, device=None), + {'spatial_size': (1, 2, 2)}, + np.array([[[[-2.81748977, 0.66968869, -0.52625642, -3.52173734], + [-1.96865364, 1.76472402, -5.06258324, -1.71805669], + [1.11934537, -2.45103851, -2.13654555, -1.15855539], + [1.49678424, -2.06960677, -1.74328475, -1.7271617]], + [[3.69301983, 3.66097025, 1.68091953, 0.6465273], [1.23445289, 2.49568333, -1.56671014, 1.96849393], + [-2.09916271, -1.06768069, 1.51861453, -2.39180117], + [-0.23449363, -1.44269211, -0.42794076, -4.68520972]], + [[-1.96578162, -0.17168741, 2.55269525, 0.70931081], + [1.00476444, 2.15217619, -0.47246061, 1.4748298], [-0.34829048, -1.89234811, 0.34558185, 1.9606272], + [1.56684302, 0.98019418, 5.00513708, 1.69126978]]], + [[[-1.36146598, 0.7469491, -5.16647064, -4.73906938], + [1.91920577, -2.33606298, -0.95030633, 0.7901769], [2.49116076, 3.93791246, 3.50390686, 2.79030531], + [1.70638302, 4.33070564, 3.52613304, 0.77965554]], + [[-0.62725323, -1.64857887, -2.92384357, -3.39022706], + [-3.00611521, -0.66597021, -0.21577072, -2.39146379], + [2.94568388, -0.83686357, -2.55435186, 2.74064119], [2.3247117, 2.78900974, 1.59788581, + 0.31140512]], + [[-0.89856598, -4.15325814, -0.21934502, -1.64845891], + [-1.52694693, -2.81794479, -2.22623861, -3.0299247], + [4.49410486, 1.27529645, 2.92559679, -1.12171559], [3.30307684, 4.97189727, 2.43914751, + 4.7262225]]], + [[[-4.81571068, -3.28263239, 1.635167, 2.36520831], [-1.92511521, -4.311247, 2.19242556, 7.34990574], + [-3.04122716, -0.94284154, 1.30058968, -0.11719455], + [-2.28657395, -3.68766906, 0.28400757, 5.08072864]], + [[-4.2308508, -0.16084264, 2.69545963, 3.4666492], + [-5.29514976, -1.55660775, 4.28031473, -0.39019547], + [-3.4617024, -1.92430221, 1.20214712, + 4.25261228], [-0.30683774, -1.4524049, 2.35996724, 3.83663135]], + [[-2.20587965, -1.94408353, -0.66964855, 1.15838178], + [-4.26637632, -0.46145396, 2.27393031, + 3.5415298], [-3.91902371, 2.02343374, 3.54278271, 2.40735681], + [-4.3785335, -0.78200288, 3.12162619, 3.55709275]]], + [[[1., 1., 1., 1.], [1., 1., 1., 1.], [1., 1., 1., 1.], [1., 1., 1., 1.]], + [[1., 1., 1., 1.], [1., 1., 1., 1.], [1., 1., 1., 1.], [1., 1., 1., 1.]], + [[1., 1., 1., 1.], [1., 1., 1., 1.], [1., 1., 1., 1.], [1., 1., 1., 1.]]]]) + ], +] + + +class TestRandDeformGrid(unittest.TestCase): + + @parameterized.expand(TEST_CASES) + def test_rand_deform_grid(self, input_param, input_data, expected_val): + g = RandDeformGrid(**input_param) + g.set_random_state(123) + result = g(**input_data) + self.assertEqual(torch.is_tensor(result), torch.is_tensor(expected_val)) + if torch.is_tensor(result): + np.testing.assert_allclose(result.cpu().numpy(), expected_val.cpu().numpy(), rtol=1e-4, atol=1e-4) + else: + np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_random_elastic_2d.py b/tests/test_random_elastic_2d.py new file mode 100644 index 0000000000..53f768bf36 --- /dev/null +++ b/tests/test_random_elastic_2d.py @@ -0,0 +1,66 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import torch +from parameterized import parameterized + +from monai.transforms.transforms import Rand2DElastic + +TEST_CASES = [ + [{'spacing': (.3, .3), 'magnitude_range': (1., 2.), 'prob': 0.0, 'as_tensor_output': False, 'device': None}, + {'img': torch.ones((3, 3, 3)), 'spatial_size': (2, 2)}, + np.ones((3, 2, 2))], + [ + {'spacing': (.3, .3), 'magnitude_range': (1., 2.), 'prob': 0.9, 'as_tensor_output': False, 'device': None}, + {'img': torch.ones((3, 3, 3)), 'spatial_size': (2, 2), 'mode': 'bilinear'}, + np.array([[[0., 0.608901], [1., 0.5702355]], [[0., 0.608901], [1., 0.5702355]], [[0., 0.608901], + [1., 0.5702355]]]), + ], + [ + { + 'spacing': (1., 1.), 'magnitude_range': (1., 1.), 'scale_range': [1.2, 2.2], 'prob': 0.9, 'padding_mode': + 'border', 'as_tensor_output': True, 'device': None, 'spatial_size': (2, 2) + }, + {'img': torch.arange(27).reshape((3, 3, 3))}, + torch.tensor([[[1.0849, 1.1180], [6.8100, 7.0265]], [[10.0849, 10.1180], [15.8100, 16.0265]], + [[19.0849, 19.1180], [24.8100, 25.0265]]]), + ], + [ + { + 'spacing': (.3, .3), 'magnitude_range': (1., 2.), 'translate_range': [-.2, .4], 'scale_range': [1.2, 2.2], + 'prob': 0.9, 'as_tensor_output': False, 'device': None + }, + {'img': torch.arange(27).reshape((3, 3, 3)), 'spatial_size': (2, 2)}, + np.array([[[0., 1.1731534], [3.8834658, 6.0565934]], [[0., 9.907095], [12.883466, 15.056594]], + [[0., 18.641037], [21.883465, 24.056593]]]), + ], +] + + +class TestRand2DElastic(unittest.TestCase): + + @parameterized.expand(TEST_CASES) + def test_rand_2d_elastic(self, input_param, input_data, expected_val): + g = Rand2DElastic(**input_param) + g.set_random_state(123) + result = g(**input_data) + self.assertEqual(torch.is_tensor(result), torch.is_tensor(expected_val)) + if torch.is_tensor(result): + np.testing.assert_allclose(result.cpu().numpy(), expected_val.cpu().numpy(), rtol=1e-4, atol=1e-4) + else: + np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_random_elastic_3d.py b/tests/test_random_elastic_3d.py new file mode 100644 index 0000000000..5fb3a3130a --- /dev/null +++ b/tests/test_random_elastic_3d.py @@ -0,0 +1,55 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import torch +from parameterized import parameterized + +from monai.transforms.transforms import Rand3DElastic + +TEST_CASES = [ + [{'magnitude_range': (.3, 2.3), 'sigma_range': (1., 20.), 'prob': 0.0, 'as_tensor_output': False, 'device': None}, + {'img': torch.ones((2, 3, 3, 3)), 'spatial_size': (2, 2, 2)}, + np.ones((2, 2, 2, 2))], + [ + {'magnitude_range': (.3, .3), 'sigma_range': (1., 2.), 'prob': 0.9, 'as_tensor_output': False, 'device': None}, + {'img': torch.arange(27).reshape((1, 3, 3, 3)), 'spatial_size': (2, 2, 2)}, + np.array([[[[3.2385552, 4.753422], [7.779232, 9.286472]], [[16.769115, 18.287868], [21.300673, 22.808704]]]]), + ], + [ + { + 'magnitude_range': (.3, .3), 'sigma_range': (1., 2.), 'prob': 0.9, 'rotate_range': [1, 1, 1], + 'as_tensor_output': False, 'device': None, 'spatial_size': (2, 2, 2) + }, + {'img': torch.arange(27).reshape((1, 3, 3, 3)), 'mode': 'bilinear'}, + np.array([[[[6.016205, 2.3112855], [12.412318, 11.182229]], [[14.619441, 6.9230556], [17.23721, 16.506298]]]]), + ], +] + + +class TestRand3DElastic(unittest.TestCase): + + @parameterized.expand(TEST_CASES) + def test_rand_3d_elastic(self, input_param, input_data, expected_val): + g = Rand3DElastic(**input_param) + g.set_random_state(123) + result = g(**input_data) + self.assertEqual(torch.is_tensor(result), torch.is_tensor(expected_val)) + if torch.is_tensor(result): + np.testing.assert_allclose(result.cpu().numpy(), expected_val.cpu().numpy(), rtol=1e-4, atol=1e-4) + else: + np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_resampler.py b/tests/test_resampler.py new file mode 100644 index 0000000000..fa62e126c6 --- /dev/null +++ b/tests/test_resampler.py @@ -0,0 +1,75 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import torch +from parameterized import parameterized + +from monai.transforms.transforms import Resample +from monai.transforms.utils import create_grid + +TEST_CASES = [ + [ + dict(padding_mode='zeros', as_tensor_output=False, device=None), + {'grid': create_grid((2, 2)), 'img': np.arange(4).reshape((1, 2, 2))}, + np.array([[[0., 0.25], [0.5, 0.75]]]) + ], + [ + dict(padding_mode='zeros', as_tensor_output=False, device=None), + {'grid': create_grid((4, 4)), 'img': np.arange(4).reshape((1, 2, 2))}, + np.array([[[0., 0., 0., 0.], [0., 0., 0.25, 0.], [0., 0.5, 0.75, 0.], [0., 0., 0., 0.]]]) + ], + [ + dict(padding_mode='border', as_tensor_output=False, device=None), + {'grid': create_grid((4, 4)), 'img': np.arange(4).reshape((1, 2, 2))}, + np.array([[[0., 0., 1., 1.], [0., 0., 1., 1.], [2., 2., 3, 3.], [2., 2., 3., 3.]]]) + ], + [ + dict(padding_mode='reflection', as_tensor_output=False, device=None), + {'grid': create_grid((4, 4)), 'img': np.arange(4).reshape((1, 2, 2)), 'mode': 'nearest'}, + np.array([[[3., 2., 3., 2.], [1., 0., 1., 0.], [3., 2., 3., 2.], [1., 0., 1., 0.]]]) + ], + [ + dict(padding_mode='zeros', as_tensor_output=False, device=None), + {'grid': create_grid((4, 4, 4)), 'img': np.arange(8).reshape((1, 2, 2, 2)), 'mode': 'bilinear'}, + np.array([[[[0., 0., 0., 0.], [0., 0., 0., 0.], [0., 0., 0., 0.], [0., 0., 0., 0.]], + [[0., 0., 0., 0.], [0., 0., 0.125, 0.], [0., 0.25, 0.375, 0.], [0., 0., 0., 0.]], + [[0., 0., 0., 0.], [0., 0.5, 0.625, 0.], [0., 0.75, 0.875, 0.], [0., 0., 0., 0.]], + [[0., 0., 0., 0.], [0., 0., 0., 0.], [0., 0., 0., 0.], [0., 0., 0., 0.]]]]) + ], + [ + dict(padding_mode='border', as_tensor_output=False, device=None), + {'grid': create_grid((4, 4, 4)), 'img': np.arange(8).reshape((1, 2, 2, 2)), 'mode': 'bilinear'}, + np.array([[[[0., 0., 1., 1.], [0., 0., 1., 1.], [2., 2., 3., 3.], [2., 2., 3., 3.]], + [[0., 0., 1., 1.], [0., 0., 1., 1.], [2., 2., 3., 3.], [2., 2., 3., 3.]], + [[4., 4., 5., 5.], [4., 4., 5., 5.], [6., 6., 7., 7.], [6., 6., 7., 7.]], + [[4., 4., 5., 5.], [4., 4., 5., 5.], [6., 6., 7., 7.], [6., 6., 7., 7.]]]]) + ], +] + + +class TestResample(unittest.TestCase): + + @parameterized.expand(TEST_CASES) + def test_resample(self, input_param, input_data, expected_val): + g = Resample(**input_param) + result = g(**input_data) + self.assertEqual(torch.is_tensor(result), torch.is_tensor(expected_val)) + if torch.is_tensor(result): + np.testing.assert_allclose(result.cpu().numpy(), expected_val.cpu().numpy(), rtol=1e-4, atol=1e-4) + else: + np.testing.assert_allclose(result, expected_val, rtol=1e-4, atol=1e-4) + + +if __name__ == '__main__': + unittest.main() From 39b2cb245118f0833cbb25663d827da6ac281341 Mon Sep 17 00:00:00 2001 From: Nic Ma Date: Tue, 10 Mar 2020 05:10:52 +0800 Subject: [PATCH 12/40] 144 loadnifti transform common dataset (#145) * [DLMED] add LoadNifti transform and common Dataset * [DLMED] update according to comments * [DLMED] fix typo * [DLMED] update example to use latest API * [DLMED] update code to add more features * [DLMED] update to AsChannelFirst * [DLMED] update to use np.moveaxis API instead * [DLMED] update according to comments * update generalized dice to be consistent with the changes in dice loss Co-authored-by: Wenqi Li --- examples/unet_segmentation_3d_dict.py | 44 +++++++---- monai/data/dataset.py | 45 +++++++++++ monai/data/nifti_reader.py | 73 ------------------ monai/data/synthetic.py | 18 ++++- monai/losses/dice.py | 4 +- monai/transforms/composables.py | 107 ++++++++++++++++++++------ monai/transforms/transforms.py | 75 ++++++++++++++++++ tests/test_as_channel_first.py | 49 ++++++++++++ tests/test_as_channel_firstd.py | 58 ++++++++++++++ tests/test_dataset.py | 64 +++++++++++++++ tests/test_dice_loss.py | 6 +- tests/test_generalized_dice_loss.py | 8 +- tests/test_load_nifti.py | 54 +++++++++++++ tests/test_load_niftid.py | 49 ++++++++++++ tests/test_spatial_crop.py | 5 +- 15 files changed, 533 insertions(+), 126 deletions(-) create mode 100644 monai/data/dataset.py create mode 100644 tests/test_as_channel_first.py create mode 100644 tests/test_as_channel_firstd.py create mode 100644 tests/test_dataset.py create mode 100644 tests/test_load_nifti.py create mode 100644 tests/test_load_niftid.py diff --git a/examples/unet_segmentation_3d_dict.py b/examples/unet_segmentation_3d_dict.py index 8c6955d87b..d7ea3795ea 100644 --- a/examples/unet_segmentation_3d_dict.py +++ b/examples/unet_segmentation_3d_dict.py @@ -28,13 +28,14 @@ import monai import monai.transforms.compose as transforms -from monai.data.nifti_reader import NiftiDatasetd -from monai.transforms.composables import AddChanneld, RandRotate90d +from monai.transforms.composables import \ + LoadNiftid, AsChannelFirstd, RandCropByPosNegLabeld, RandRotate90d from monai.handlers.stats_handler import StatsHandler from monai.handlers.mean_dice import MeanDice from monai.visualize import img2tensorboard from monai.data.synthetic import create_test_image_3d from monai.handlers.utils import stopping_fn_from_metric +from monai.data.utils import list_data_collate monai.config.print_config() @@ -42,28 +43,37 @@ tempdir = tempfile.mkdtemp() for i in range(50): - im, seg = create_test_image_3d(128, 128, 128) + im, seg = create_test_image_3d(128, 128, 128, channel_dim=-1) n = nib.Nifti1Image(im, np.eye(4)) - nib.save(n, os.path.join(tempdir, 'im%i.nii.gz' % i)) + nib.save(n, os.path.join(tempdir, 'img%i.nii.gz' % i)) n = nib.Nifti1Image(seg, np.eye(4)) nib.save(n, os.path.join(tempdir, 'seg%i.nii.gz' % i)) -images = sorted(glob(os.path.join(tempdir, 'im*.nii.gz'))) +images = sorted(glob(os.path.join(tempdir, 'img*.nii.gz'))) segs = sorted(glob(os.path.join(tempdir, 'seg*.nii.gz'))) +train_files = [{'img': img, 'seg': seg} for img, seg in zip(images[:40], segs[:40])] +val_files = [{'img': img, 'seg': seg} for img, seg in zip(images[-10:], segs[-10:])] # Define transforms for image and segmentation -transforms = transforms.Compose([ - AddChanneld(keys=['image', 'seg']), - RandRotate90d(keys=['image', 'seg'], prob=0.8, axes=[1, 3]) +train_transforms = transforms.Compose([ + LoadNiftid(keys=['img', 'seg']), + AsChannelFirstd(keys=['img', 'seg'], channel_dim=-1), + RandCropByPosNegLabeld(keys=['img', 'seg'], label_key='seg', size=[96, 96, 96], pos=1, neg=1, num_samples=4), + RandRotate90d(keys=['img', 'seg'], prob=0.8, axes=[1, 3]) +]) +val_transforms = transforms.Compose([ + LoadNiftid(keys=['img', 'seg']), + AsChannelFirstd(keys=['img', 'seg'], channel_dim=-1) ]) # Define nifti dataset, dataloader. -ds = NiftiDatasetd(images, segs, transform=transforms) -loader = DataLoader(ds, batch_size=10, num_workers=2, pin_memory=torch.cuda.is_available()) +ds = monai.data.Dataset(data=train_files, transform=train_transforms) +loader = DataLoader(ds, batch_size=2, num_workers=2, collate_fn=list_data_collate, + pin_memory=torch.cuda.is_available()) check_data = monai.utils.misc.first(loader) -print(check_data['image'].shape, check_data['seg'].shape) +print(check_data['img'].shape, check_data['seg'].shape) lr = 1e-5 @@ -88,7 +98,7 @@ def _loss_fn(i, j): # Create trainer def prepare_batch(batch, device=None, non_blocking=False): - return _prepare_batch((batch['image'], batch['seg']), device, non_blocking) + return _prepare_batch((batch['img'], batch['seg']), device, non_blocking) device = torch.device("cuda:0") @@ -160,8 +170,9 @@ def log_training_loss(engine): evaluator.add_event_handler(event_name=Events.EPOCH_COMPLETED, handler=early_stopper) # create a validation data loader -val_ds = NiftiDatasetd(images[-20:], segs[-20:], transform=transforms) -val_loader = DataLoader(ds, batch_size=5, num_workers=8, pin_memory=torch.cuda.is_available()) +val_ds = monai.data.Dataset(data=val_files, transform=val_transforms) +val_loader = DataLoader(ds, batch_size=5, num_workers=8, collate_fn=list_data_collate, + pin_memory=torch.cuda.is_available()) @trainer.on(Events.EPOCH_COMPLETED(every=validation_every_n_epochs)) @@ -178,8 +189,9 @@ def log_metrics_to_tensorboard(engine): # create a training data loader logging.basicConfig(stream=sys.stdout, level=logging.INFO) -train_ds = NiftiDatasetd(images[:20], segs[:20], transform=transforms) -train_loader = DataLoader(train_ds, batch_size=5, num_workers=8, pin_memory=torch.cuda.is_available()) +train_ds = monai.data.Dataset(data=train_files, transform=train_transforms) +train_loader = DataLoader(train_ds, batch_size=2, num_workers=8, collate_fn=list_data_collate, + pin_memory=torch.cuda.is_available()) train_epochs = 30 state = trainer.run(train_loader, train_epochs) diff --git a/monai/data/dataset.py b/monai/data/dataset.py new file mode 100644 index 0000000000..4b3221d19d --- /dev/null +++ b/monai/data/dataset.py @@ -0,0 +1,45 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import torch +from monai.utils.module import export + + +@export("monai.data") +class Dataset(torch.utils.data.Dataset): + """ + General Dataset to handle dictionary format data, it can operate transforms for specific fields. + For example, typical input data can be a list of dictionaries: + [{ { { + 'img': 'image1.nii.gz', 'img': 'image2.nii.gz', 'img': 'image3.nii.gz', + 'seg': 'label1.nii.gz', 'seg': 'label2.nii.gz', 'seg': 'label3.nii.gz', + 'extra': 123 'extra': 456 'extra': 789 + }, }, }] + """ + + def __init__(self, data, transform=None): + """ + Args: + data (Iterable): input data to load and transform to generate dataset for model. + transform (Callable, optional): transforms to excute operations on input data. + """ + self.data = data + self.transform = transform + + def __len__(self): + return len(self.data) + + def __getitem__(self, index): + data = self.data[index] + if self.transform is not None: + data = self.transform(data) + + return data diff --git a/monai/data/nifti_reader.py b/monai/data/nifti_reader.py index 753691de20..3ece2264d9 100644 --- a/monai/data/nifti_reader.py +++ b/monai/data/nifti_reader.py @@ -135,76 +135,3 @@ def __getitem__(self, index): continue compatible_meta[meta_key] = meta_datum return img, target, compatible_meta - - -@export("monai.data") -class NiftiDatasetd(Dataset): - """ - Loads image/segmentation pairs of Nifti files from the given filename lists. Dict level transformations can be - specified for the dictionary data which is constructed by image, label and other metadata. - """ - - def __init__(self, image_files, seg_files=None, labels=None, as_closest_canonical=False, transform=None, dtype=None): - """ - Initializes the dataset with the image and segmentation filename lists. The transform `transform` is applied - to the images and `seg_transform` to the segmentations. - - Args: - image_files (list of str): list of image filenames. - seg_files (list of str): if in segmentation task, list of segmentation filenames. - labels (list or array): if in classification task, list of classification labels. - as_closest_canonical (bool): if True, load the image as closest to canonical orientation. - transform (Callable, optional): dict transforms to excute operations on dictionary data. - dtype (np.dtype, optional): if not None convert the loaded image to this data type. - """ - - if len(image_files) != len(seg_files): - raise ValueError('Must have same number of image and segmentation files') - - self.image_files = image_files - self.seg_files = seg_files - self.labels = labels - self.as_closest_canonical = as_closest_canonical - self.transform = transform - self.dtype = dtype - - def __len__(self): - return len(self.image_files) - - def __getitem__(self, index): - meta_data = None - img, meta_data = load_nifti( - filename_or_obj=self.image_files[index], - as_closest_canonical=self.as_closest_canonical, - image_only=False, - dtype=self.dtype - ) - - seg = None - if self.seg_files is not None: - seg = load_nifti(self.seg_files[index]) - label = None - if self.labels is not None: - label = self.labels[index] - - compatible_meta = {} - assert isinstance(meta_data, dict), 'meta_data must be in dictionary format.' - for meta_key in meta_data: - meta_datum = meta_data[meta_key] - if type(meta_datum).__name__ == 'ndarray' \ - and np_str_obj_array_pattern.search(meta_datum.dtype.str) is not None: - continue - compatible_meta[meta_key] = meta_datum - - data = {'image': img} - if seg is not None: - data['seg'] = seg - if label is not None: - data['label'] = label - if len(compatible_meta) > 0: - data.update(compatible_meta) - - if self.transform is not None: - data = self.transform(data) - - return data diff --git a/monai/data/synthetic.py b/monai/data/synthetic.py index a51d730357..4efd4fe393 100644 --- a/monai/data/synthetic.py +++ b/monai/data/synthetic.py @@ -14,12 +14,13 @@ from monai.transforms.utils import rescale_array -def create_test_image_2d(width, height, num_objs=12, rad_max=30, noise_max=0.0, num_seg_classes=5): +def create_test_image_2d(width, height, num_objs=12, rad_max=30, noise_max=0.0, num_seg_classes=5, channel_dim=None): """ Return a noisy 2D image with `numObj' circles and a 2D mask image. The maximum radius of the circles is given as `radMax'. The mask will have `numSegClasses' number of classes for segmentations labeled sequentially from 1, plus a background class represented as 0. If `noiseMax' is greater than 0 then noise will be added to the image taken from - the uniform distribution on range [0,noiseMax). + the uniform distribution on range [0,noiseMax). If `channel_dim' is None, will create an image without channel + dimemsion, otherwise create an image with channel dimension as first dim or last dim. """ image = np.zeros((width, height)) @@ -40,10 +41,16 @@ def create_test_image_2d(width, height, num_objs=12, rad_max=30, noise_max=0.0, norm = np.random.uniform(0, num_seg_classes * noise_max, size=image.shape) noisyimage = rescale_array(np.maximum(image, norm)) + if channel_dim is not None: + assert isinstance(channel_dim, int) and channel_dim in (-1, 0, 2), 'invalid channel dim.' + noisyimage, labels = noisyimage[None], labels[None] \ + if channel_dim == 0 else noisyimage[..., None], labels[..., None] + return noisyimage, labels -def create_test_image_3d(height, width, depth, num_objs=12, rad_max=30, noise_max=0.0, num_seg_classes=5): +def create_test_image_3d(height, width, depth, num_objs=12, rad_max=30, + noise_max=0.0, num_seg_classes=5, channel_dim=None): """ Return a noisy 3D image and segmentation. @@ -69,4 +76,9 @@ def create_test_image_3d(height, width, depth, num_objs=12, rad_max=30, noise_ma norm = np.random.uniform(0, num_seg_classes * noise_max, size=image.shape) noisyimage = rescale_array(np.maximum(image, norm)) + if channel_dim is not None: + assert isinstance(channel_dim, int) and channel_dim in (-1, 0, 3), 'invalid channel dim.' + noisyimage, labels = (noisyimage[None], labels[None]) \ + if channel_dim == 0 else (noisyimage[..., None], labels[..., None]) + return noisyimage, labels diff --git a/monai/losses/dice.py b/monai/losses/dice.py index 46792a4714..808c3c65d3 100644 --- a/monai/losses/dice.py +++ b/monai/losses/dice.py @@ -78,7 +78,7 @@ def forward(self, pred, ground, smooth=1e-5): intersection = psum * tsum sums = psum + tsum - score = 2.0 * (intersection.sum(2) + smooth) / (sums.sum(2) + smooth) + score = (2.0 * intersection.sum(2) + smooth) / (sums.sum(2) + smooth) return 1 - score.mean() @@ -159,5 +159,5 @@ def forward(self, pred, ground, smooth=1e-5): b[infs] = 0.0 b[infs] = torch.max(b) - score = 2.0 * (intersection.sum(2) * w) / (sums.sum(2) * w + smooth) + score = (2.0 * intersection.sum(2) * w + smooth) / (sums.sum(2) * w + smooth) return 1 - score.mean() diff --git a/monai/transforms/composables.py b/monai/transforms/composables.py index d177bc98e1..af2a973a4a 100644 --- a/monai/transforms/composables.py +++ b/monai/transforms/composables.py @@ -18,7 +18,7 @@ import monai from monai.data.utils import get_random_patch, get_valid_patch_size from monai.transforms.compose import Randomizable, Transform -from monai.transforms.transforms import Rotate90, SpatialCrop, AddChannel +from monai.transforms.transforms import LoadNifti, AsChannelFirst, AddChannel, Rotate90, SpatialCrop from monai.utils.misc import ensure_tuple from monai.transforms.utils import generate_pos_neg_label_crop_centers @@ -53,6 +53,89 @@ def __init__(self, keys): raise ValueError('keys should be a hashable or a sequence of hashables, got {}'.format(type(key))) +@export +class LoadNiftid(MapTransform): + """ + dictionary-based wrapper of LoadNifti, must load image and metadata together. + """ + + def __init__(self, keys, as_closest_canonical=False, dtype=None, meta_key_format='{}.{}', overwriting_keys=False): + """ + Args: + keys (hashable items): keys of the corresponding items to be transformed. + See also: monai.transform.composables.MapTransform + as_closest_canonical (bool): if True, load the image as closest to canonical axis format. + dtype (np.dtype, optional): if not None convert the loaded image to this data type. + meta_key_format (str): key format to store meta data of the nifti image. + it must contain 2 fields for the key of this image and the key of every meta data item. + overwriting_keys (bool): whether allow to overwrite existing keys of meta data. + default is False, which will raise exception if encountering existing key. + """ + MapTransform.__init__(self, keys) + self.loader = LoadNifti(as_closest_canonical, False, dtype) + self.meta_key_format = meta_key_format + self.overwriting_keys = overwriting_keys + + def __call__(self, data): + d = dict(data) + for key in self.keys: + data = self.loader(d[key]) + assert isinstance(data, (tuple, list)), 'if data contains metadata, must be tuple or list.' + d[key] = data[0] + assert isinstance(data[1], dict), 'metadata must be in dict format.' + for k in sorted(data[1].keys()): + key_to_add = self.meta_key_format.format(key, k) + if key_to_add in d and self.overwriting_keys is False: + raise KeyError('meta data key is alreay existing.') + d[key_to_add] = data[1][k] + return d + + +@export +class AsChannelFirstd(MapTransform): + """ + dictionary-based wrapper of AsChannelFirst. + """ + + def __init__(self, keys, channel_dim=-1): + """ + Args: + keys (hashable items): keys of the corresponding items to be transformed. + See also: monai.transform.composables.MapTransform + channel_dim (int): which dimension of input image is the channel, default is the last dimension. + """ + MapTransform.__init__(self, keys) + self.converter = AsChannelFirst(channel_dim=channel_dim) + + def __call__(self, data): + d = dict(data) + for key in self.keys: + d[key] = self.converter(d[key]) + return d + + +@export +class AddChanneld(MapTransform): + """ + dictionary-based wrapper of AddChannel. + """ + + def __init__(self, keys): + """ + Args: + keys (hashable items): keys of the corresponding items to be transformed. + See also: monai.transform.composables.MapTransform + """ + MapTransform.__init__(self, keys) + self.adder = AddChannel() + + def __call__(self, data): + d = dict(data) + for key in self.keys: + d[key] = self.adder(d[key]) + return d + + @export class Rotate90d(MapTransform): """ @@ -149,28 +232,6 @@ def __call__(self, data): return d -@export -class AddChanneld(MapTransform): - """ - dictionary-based wrapper of AddChannel. - """ - - def __init__(self, keys): - """ - Args: - keys (hashable items): keys of the corresponding items to be transformed. - See also: monai.transform.composables.MapTransform - """ - MapTransform.__init__(self, keys) - self.adder = AddChannel() - - def __call__(self, data): - d = dict(data) - for key in self.keys: - d[key] = self.adder(d[key]) - return d - - @export class RandCropByPosNegLabeld(Randomizable, MapTransform): """ diff --git a/monai/transforms/transforms.py b/monai/transforms/transforms.py index bec727e8bb..81e5b6dc7e 100644 --- a/monai/transforms/transforms.py +++ b/monai/transforms/transforms.py @@ -14,7 +14,9 @@ """ import numpy as np +import nibabel as nib import torch +from torch.utils.data._utils.collate import np_str_obj_array_pattern from skimage.transform import resize import scipy.ndimage @@ -29,6 +31,79 @@ export = monai.utils.export("monai.transforms") +@export +class LoadNifti: + """ + Load Nifti format file from provided path. + """ + + def __init__(self, as_closest_canonical=False, image_only=False, dtype=None): + """ + Args: + as_closest_canonical (bool): if True, load the image as closest to canonical axis format. + image_only (bool): if True return only the image volume, other return image volume and header dict. + dtype (np.dtype, optional): if not None convert the loaded image to this data type. + + Note: + The loaded image volume if `image_only` is True, or a tuple containing the volume and the Nifti + header in dict format otherwise. + header['original_affine'] stores the original affine loaded from `filename_or_obj`. + header['affine'] stores the affine after the optional `as_closest_canonical` transform. + """ + self.as_closest_canonical = as_closest_canonical + self.image_only = image_only + self.dtype = dtype + + def __call__(self, filename): + """ + Args: + filename (str or file): path to file or file-like object. + """ + img = nib.load(filename) + + header = dict(img.header) + header['filename_or_obj'] = filename + header['original_affine'] = img.affine + header['affine'] = img.affine + header['as_closest_canonical'] = self.as_closest_canonical + + if self.as_closest_canonical: + img = nib.as_closest_canonical(img) + header['affine'] = img.affine + + if self.dtype is not None: + img = img.get_fdata(dtype=self.dtype) + else: + img = np.asanyarray(img.dataobj) + + if self.image_only: + return img + compatible_meta = dict() + for meta_key in header: + meta_datum = header[meta_key] + if type(meta_datum).__name__ == 'ndarray' \ + and np_str_obj_array_pattern.search(meta_datum.dtype.str) is not None: + continue + compatible_meta[meta_key] = meta_datum + return img, compatible_meta + + +@export +class AsChannelFirst: + """ + Change the channel dimension of the image to the first dimension. + Args: + channel_dim (int): which dimension of input image is the channel, default is the last dimension. + """ + + def __init__(self, channel_dim=-1): + assert isinstance(channel_dim, int) and channel_dim >= -1, 'invalid channel dimension.' + self.channel_dim = channel_dim + + def __call__(self, img): + return np.moveaxis(img, self.channel_dim, 0) + + @export class AddChannel: """ diff --git a/tests/test_as_channel_first.py b/tests/test_as_channel_first.py new file mode 100644 index 0000000000..ccd0f3765a --- /dev/null +++ b/tests/test_as_channel_first.py @@ -0,0 +1,49 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import numpy as np +from parameterized import parameterized +from monai.transforms.transforms import AsChannelFirst + +TEST_CASE_1 = [ + { + 'channel_dim': -1 + }, + (4, 1, 2, 3) +] + +TEST_CASE_2 = [ + { + 'channel_dim': 3 + }, + (4, 1, 2, 3) +] + +TEST_CASE_3 = [ + { + 'channel_dim': 2 + }, + (3, 1, 2, 4) +] + + +class TestAsChannelFirst(unittest.TestCase): + + @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3]) + def test_shape(self, input_param, expected_shape): + test_data = np.random.randint(0, 2, size=[1, 2, 3, 4]) + result = AsChannelFirst(**input_param)(test_data) + self.assertTupleEqual(result.shape, expected_shape) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_as_channel_firstd.py b/tests/test_as_channel_firstd.py new file mode 100644 index 0000000000..6f9b450c4f --- /dev/null +++ b/tests/test_as_channel_firstd.py @@ -0,0 +1,58 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import numpy as np +from parameterized import parameterized +from monai.transforms.composables import AsChannelFirstd + +TEST_CASE_1 = [ + { + 'keys': ['image', 'label', 'extra'], + 'channel_dim': -1 + }, + (4, 1, 2, 3) +] + +TEST_CASE_2 = [ + { + 'keys': ['image', 'label', 'extra'], + 'channel_dim': 3 + }, + (4, 1, 2, 3) +] + +TEST_CASE_3 = [ + { + 'keys': ['image', 'label', 'extra'], + 'channel_dim': 2 + }, + (3, 1, 2, 4) +] + + +class TestAsChannelFirstd(unittest.TestCase): + + @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3]) + def test_shape(self, input_param, expected_shape): + test_data = { + 'image': np.random.randint(0, 2, size=[1, 2, 3, 4]), + 'label': np.random.randint(0, 2, size=[1, 2, 3, 4]), + 'extra': np.random.randint(0, 2, size=[1, 2, 3, 4]) + } + result = AsChannelFirstd(**input_param)(test_data) + self.assertTupleEqual(result['image'].shape, expected_shape) + self.assertTupleEqual(result['label'].shape, expected_shape) + self.assertTupleEqual(result['extra'].shape, expected_shape) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_dataset.py b/tests/test_dataset.py new file mode 100644 index 0000000000..6829812dbc --- /dev/null +++ b/tests/test_dataset.py @@ -0,0 +1,64 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import os +import shutil +import numpy as np +import tempfile +import nibabel as nib +from parameterized import parameterized +from monai.data.dataset import Dataset +from monai.transforms.composables import LoadNiftid + +TEST_CASE_1 = [ + (128, 128, 128) +] + + +class TestDataset(unittest.TestCase): + + @parameterized.expand([TEST_CASE_1]) + def test_shape(self, expected_shape): + test_image = nib.Nifti1Image(np.random.randint(0, 2, size=[128, 128, 128]), np.eye(4)) + tempdir = tempfile.mkdtemp() + nib.save(test_image, os.path.join(tempdir, 'test_image1.nii.gz')) + nib.save(test_image, os.path.join(tempdir, 'test_label1.nii.gz')) + nib.save(test_image, os.path.join(tempdir, 'test_extra1.nii.gz')) + nib.save(test_image, os.path.join(tempdir, 'test_image2.nii.gz')) + nib.save(test_image, os.path.join(tempdir, 'test_label2.nii.gz')) + nib.save(test_image, os.path.join(tempdir, 'test_extra2.nii.gz')) + test_data = [ + { + 'image': os.path.join(tempdir, 'test_image1.nii.gz'), + 'label': os.path.join(tempdir, 'test_label1.nii.gz'), + 'extra': os.path.join(tempdir, 'test_extra1.nii.gz') + }, + { + 'image': os.path.join(tempdir, 'test_image2.nii.gz'), + 'label': os.path.join(tempdir, 'test_label2.nii.gz'), + 'extra': os.path.join(tempdir, 'test_extra2.nii.gz') + } + ] + dataset = Dataset(data=test_data, transform=LoadNiftid(keys=['image', 'label', 'extra'])) + data1 = dataset[0] + data2 = dataset[1] + shutil.rmtree(tempdir) + self.assertTupleEqual(data1['image'].shape, expected_shape) + self.assertTupleEqual(data1['label'].shape, expected_shape) + self.assertTupleEqual(data1['extra'].shape, expected_shape) + self.assertTupleEqual(data2['image'].shape, expected_shape) + self.assertTupleEqual(data2['label'].shape, expected_shape) + self.assertTupleEqual(data2['extra'].shape, expected_shape) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_dice_loss.py b/tests/test_dice_loss.py index a7ad9171b9..c5640a5660 100644 --- a/tests/test_dice_loss.py +++ b/tests/test_dice_loss.py @@ -39,7 +39,7 @@ 'ground': torch.tensor([[[[1., 1.], [1., 1.]]], [[[1., 0.], [1., 0.]]]]), 'smooth': 1e-4, }, - 0.416636, + 0.416657, ] TEST_CASE_3 = [ # shape: (2, 2, 3), (2, 1, 3) @@ -64,7 +64,7 @@ 'ground': torch.tensor([[[1., 0., 0.]], [[1., 1., 0.]]]), 'smooth': 1e-4, }, - 0.435015, + 0.435050, ] TEST_CASE_5 = [ # shape: (2, 2, 3), (2, 1, 3) @@ -77,7 +77,7 @@ 'ground': torch.tensor([[[1., 0., 0.]], [[1., 1., 0.]]]), 'smooth': 1e-4, }, - 0.383678, + 0.383713, ] TEST_CASE_6 = [ # shape: (1, 1, 2, 2), (1, 1, 2, 2) diff --git a/tests/test_generalized_dice_loss.py b/tests/test_generalized_dice_loss.py index fe29bc2d11..e08ff1d296 100644 --- a/tests/test_generalized_dice_loss.py +++ b/tests/test_generalized_dice_loss.py @@ -39,7 +39,7 @@ 'ground': torch.tensor([[[[1., 1.], [1., 1.]]], [[[1., 0.], [1., 0.]]]]), 'smooth': 1e-4, }, - 0.41678, + 0.416597, ] TEST_CASE_2 = [ # shape: (2, 2, 3), (2, 1, 3) @@ -64,7 +64,7 @@ 'ground': torch.tensor([[[1., 0., 0.]], [[1., 1., 0.]]]), 'smooth': 1e-4, }, - 0.435111, + 0.435034, ] TEST_CASE_4 = [ # shape: (2, 2, 3), (2, 1, 3) @@ -77,7 +77,7 @@ 'ground': torch.tensor([[[1., 0., 0.]], [[1., 1., 0.]]]), 'smooth': 1e-4, }, - 0.383776, + 0.383699, ] TEST_CASE_5 = [ # shape: (2, 2, 3), (2, 1, 3) @@ -89,7 +89,7 @@ 'ground': torch.tensor([[[0., 0., 0.]], [[0., 0., 0.]]]), 'smooth': 1e-8, }, - 1.0, + 0.0, ] TEST_CASE_6 = [ # shape: (1, 1, 2, 2), (1, 1, 2, 2) diff --git a/tests/test_load_nifti.py b/tests/test_load_nifti.py new file mode 100644 index 0000000000..de0660ccb3 --- /dev/null +++ b/tests/test_load_nifti.py @@ -0,0 +1,54 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import os +import shutil +import numpy as np +import tempfile +import nibabel as nib +from parameterized import parameterized +from monai.transforms.transforms import LoadNifti + +TEST_CASE_IMAGE_ONLY = [ + { + 'as_closest_canonical': False, + 'image_only': True + }, + (128, 128, 128) +] + +TEST_CASE_IMAGE_METADATA = [ + { + 'as_closest_canonical': False, + 'image_only': False + }, + (128, 128, 128) +] + + +class TestLoadNifti(unittest.TestCase): + + @parameterized.expand([TEST_CASE_IMAGE_ONLY, TEST_CASE_IMAGE_METADATA]) + def test_shape(self, input_param, expected_shape): + test_image = np.random.randint(0, 2, size=[128, 128, 128]) + tempdir = tempfile.mkdtemp() + nib.save(nib.Nifti1Image(test_image, np.eye(4)), os.path.join(tempdir, 'test_image.nii.gz')) + test_data = os.path.join(tempdir, 'test_image.nii.gz') + result = LoadNifti(**input_param)(test_data) + shutil.rmtree(tempdir) + if isinstance(result, tuple): + result = result[0] + self.assertTupleEqual(result.shape, expected_shape) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_load_niftid.py b/tests/test_load_niftid.py new file mode 100644 index 0000000000..071972f03f --- /dev/null +++ b/tests/test_load_niftid.py @@ -0,0 +1,49 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import os +import shutil +import numpy as np +import tempfile +import nibabel as nib +from parameterized import parameterized +from monai.transforms.composables import LoadNiftid + +KEYS = ['image', 'label', 'extra'] + +TEST_CASE_1 = [ + { + 'keys': KEYS, + 'as_closest_canonical': False + }, + (128, 128, 128) +] + + +class TestLoadNiftid(unittest.TestCase): + + @parameterized.expand([TEST_CASE_1]) + def test_shape(self, input_param, expected_shape): + test_image = nib.Nifti1Image(np.random.randint(0, 2, size=[128, 128, 128]), np.eye(4)) + tempdir = tempfile.mkdtemp() + test_data = dict() + for key in KEYS: + nib.save(test_image, os.path.join(tempdir, key + '.nii.gz')) + test_data.update({key: os.path.join(tempdir, key + '.nii.gz')}) + result = LoadNiftid(**input_param)(test_data) + shutil.rmtree(tempdir) + for key in KEYS: + self.assertTupleEqual(result[key].shape, expected_shape) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_spatial_crop.py b/tests/test_spatial_crop.py index 2a3c2e7f9c..8a99d90f63 100644 --- a/tests/test_spatial_crop.py +++ b/tests/test_spatial_crop.py @@ -20,7 +20,7 @@ 'roi_size': [2, 2, 2] }, np.random.randint(0, 2, size=[3, 3, 3, 3]), - (3, 2, 2, 2), + (3, 2, 2, 2) ] TEST_CASE_2 = [ @@ -29,9 +29,10 @@ 'roi_end': [2, 2, 2] }, np.random.randint(0, 2, size=[3, 3, 3, 3]), - (3, 2, 2, 2), + (3, 2, 2, 2) ] + class TestSpatialCrop(unittest.TestCase): @parameterized.expand([TEST_CASE_1, TEST_CASE_2]) From 35da65c1e42fbe0cd638e9e2c3fdfe4fbdd60c9e Mon Sep 17 00:00:00 2001 From: Wenqi Li Date: Tue, 10 Mar 2020 11:28:00 +0000 Subject: [PATCH 13/40] spacing and orientation; revise transforms cropping and zooming (#162) --- monai/data/nifti_reader.py | 8 +- monai/data/utils.py | 62 +++++++++ monai/transforms/composables.py | 106 +++++++++++--- monai/transforms/transforms.py | 240 +++++++++++++++++++++++--------- tests/test_header_correct.py | 36 +++++ tests/test_orientation.py | 38 +++++ tests/test_orientationd.py | 55 ++++++++ tests/test_spacing.py | 47 +++++++ tests/test_spacingd.py | 63 +++++++++ tests/test_spatial_crop.py | 20 ++- tests/test_zoom.py | 14 +- 11 files changed, 599 insertions(+), 90 deletions(-) create mode 100644 tests/test_header_correct.py create mode 100644 tests/test_orientation.py create mode 100644 tests/test_orientationd.py create mode 100644 tests/test_spacing.py create mode 100644 tests/test_spacingd.py diff --git a/monai/data/nifti_reader.py b/monai/data/nifti_reader.py index 3ece2264d9..01a2b86045 100644 --- a/monai/data/nifti_reader.py +++ b/monai/data/nifti_reader.py @@ -9,13 +9,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -import numpy as np import nibabel as nib - +import numpy as np from torch.utils.data import Dataset from torch.utils.data._utils.collate import np_str_obj_array_pattern -from monai.utils.module import export + +from monai.data.utils import correct_nifti_header_if_necessary from monai.transforms.compose import Randomizable +from monai.utils.module import export def load_nifti(filename_or_obj, as_closest_canonical=False, image_only=True, dtype=None): @@ -38,6 +39,7 @@ def load_nifti(filename_or_obj, as_closest_canonical=False, image_only=True, dty """ img = nib.load(filename_or_obj) + img = correct_nifti_header_if_necessary(img) header = dict(img.header) header['filename_or_obj'] = filename_or_obj diff --git a/monai/data/utils.py b/monai/data/utils.py index 1e7de42141..81f9ac8c56 100644 --- a/monai/data/utils.py +++ b/monai/data/utils.py @@ -9,6 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import warnings import math from itertools import starmap, product from torch.utils.data._utils.collate import default_collate @@ -191,3 +192,64 @@ def list_data_collate(batch): elem = batch[0] data = [i for k in batch for i in k] if isinstance(elem, list) else batch return default_collate(data) + + +def correct_nifti_header_if_necessary(img_nii): + """ + check nifti object header's format, update the header if needed. + in the updated image pixdim matches the affine. + + Args: + img (nifti image object) + """ + dim = img_nii.header['dim'][0] + if dim >= 5: + return img_nii # do nothing for high-dimensional array + # check that affine matches zooms + pixdim = np.asarray(img_nii.header.get_zooms())[:dim] + norm_affine = np.sqrt(np.sum(np.square(img_nii.affine[:dim, :dim]), 0)) + if np.allclose(pixdim, norm_affine): + return img_nii + if hasattr(img_nii, 'get_sform'): + return rectify_header_sform_qform(img_nii) + return img_nii + + +def rectify_header_sform_qform(img_nii): + """ + Look at the sform and qform of the nifti object and correct it if any + incompatibilities with pixel dimensions + + Adapted from https://github.com/NifTK/NiftyNet/blob/v0.6.0/niftynet/io/misc_io.py + """ + d = img_nii.header['dim'][0] + pixdim = np.asarray(img_nii.header.get_zooms())[:d] + sform, qform = img_nii.get_sform(), img_nii.get_qform() + norm_sform = np.sqrt(np.sum(np.square(sform[:d, :d]), 0)) + norm_qform = np.sqrt(np.sum(np.square(qform[:d, :d]), 0)) + sform_mismatch = not np.allclose(norm_sform, pixdim) + qform_mismatch = not np.allclose(norm_qform, pixdim) + + if img_nii.header['sform_code'] != 0: + if not sform_mismatch: + return img_nii + if not qform_mismatch: + img_nii.set_sform(img_nii.get_qform()) + return img_nii + if img_nii.header['qform_code'] != 0: + if not qform_mismatch: + return img_nii + if not sform_mismatch: + img_nii.set_qform(img_nii.get_sform()) + return img_nii + + norm_affine = np.sqrt(np.sum(np.square(img_nii.affine[:, :3]), 0)) + to_divide = np.tile(np.expand_dims(np.append(norm_affine, 1), axis=1), [1, 4]) + pixdim = np.append(pixdim, [1.] * (4 - len(pixdim))) + to_multiply = np.tile(np.expand_dims(pixdim, axis=1), [1, 4]) + affine = img_nii.affine / to_divide.T * to_multiply.T + warnings.warn('Modifying image affine from {} to {}'.format(img_nii.affine, affine)) + + img_nii.set_sform(affine) + img_nii.set_qform(affine) + return img_nii diff --git a/monai/transforms/composables.py b/monai/transforms/composables.py index af2a973a4a..4d05d6fc9a 100644 --- a/monai/transforms/composables.py +++ b/monai/transforms/composables.py @@ -18,13 +18,16 @@ import monai from monai.data.utils import get_random_patch, get_valid_patch_size from monai.transforms.compose import Randomizable, Transform -from monai.transforms.transforms import LoadNifti, AsChannelFirst, AddChannel, Rotate90, SpatialCrop +from monai.transforms.transforms import (LoadNifti, AsChannelFirst, Orientation, + AddChannel, Spacing, Rotate90, SpatialCrop) from monai.utils.misc import ensure_tuple from monai.transforms.utils import generate_pos_neg_label_crop_centers +from monai.utils.aliases import alias export = monai.utils.export("monai.transforms") +@export class MapTransform(Transform): """ A subclass of ``monai.transforms.compose.Transform`` with an assumption @@ -54,6 +57,85 @@ def __init__(self, keys): @export +@alias('SpacingD', 'SpacingDict') +class Spacingd(MapTransform): + """ + dictionary-based wrapper of :class: `monai.transforms.transforms.Spacing`. + """ + + def __init__(self, keys, affine_key, pixdim, interp_order=2, keep_shape=False, output_key='spacing'): + """ + Args: + affine_key (hashable): the key to the original affine. + The affine will be used to compute input data's pixdim. + pixdim (sequence of floats): output voxel spacing. + interp_order (int or sequence of ints): int: the same interpolation order + for all data indexed by `self,keys`; sequence of ints, should + correspond to an interpolation order for each data item indexed + by `self.keys` respectively. + keep_shape (bool): whether to maintain the original spatial shape + after resampling. Defaults to False. + output_key (hashable): key to be added to the output dictionary to track + the pixdim status. + + """ + MapTransform.__init__(self, keys) + self.affine_key = affine_key + self.spacing_transform = Spacing(pixdim, keep_shape=keep_shape) + interp_order = ensure_tuple(interp_order) + self.interp_order = interp_order \ + if len(interp_order) == len(self.keys) else interp_order * len(self.keys) + print(self.interp_order) + self.output_key = output_key + + def __call__(self, data): + d = dict(data) + affine = d[self.affine_key] + original_pixdim, new_pixdim = None, None + for key, interp in zip(self.keys, self.interp_order): + d[key], original_pixdim, new_pixdim = self.spacing_transform(d[key], affine, interp_order=interp) + d[self.output_key] = {'original_pixdim': original_pixdim, 'current_pixdim': new_pixdim} + return d + + +@export +@alias('OrientationD', 'OrientationDict') +class Orientationd(MapTransform): + """ + dictionary-based wrapper of :class: `monai.transforms.transforms.Orientation`. + """ + + def __init__(self, keys, affine_key, axcodes, labels=None, output_key='orientation'): + """ + Args: + affine_key (hashable): the key to the original affine. + The affine will be used to compute input data's orientation. + axcodes (N elements sequence): for spatial ND input's orientation. + e.g. axcodes='RAS' represents 3D orientation: + (Left, Right), (Posterior, Anterior), (Inferior, Superior). + default orientation labels options are: 'L' and 'R' for the first dimension, + 'P' and 'A' for the second, 'I' and 'S' for the third. + labels : optional, None or sequence of (2,) sequences + (2,) sequences are labels for (beginning, end) of output axis. + see: ``nibabel.orientations.ornt2axcodes``. + """ + MapTransform.__init__(self, keys) + self.affine_key = affine_key + self.orientation_transform = Orientation(axcodes=axcodes, labels=labels) + self.output_key = output_key + + def __call__(self, data): + d = dict(data) + affine = d[self.affine_key] + original_ornt, new_ornt = None, None + for key in self.keys: + d[key], original_ornt, new_ornt = self.orientation_transform(d[key], affine) + d[self.output_key] = {'original_ornt': original_ornt, 'current_ornt': new_ornt} + return d + + +@export +@alias('LoadNiftiD', 'LoadNiftiDict') class LoadNiftid(MapTransform): """ dictionary-based wrapper of LoadNifti, must load image and metadata together. @@ -92,6 +174,7 @@ def __call__(self, data): @export +@alias('AsChannelFirstD', 'AsChannelFirstDict') class AsChannelFirstd(MapTransform): """ dictionary-based wrapper of AsChannelFirst. @@ -115,6 +198,7 @@ def __call__(self, data): @export +@alias('AddChannelD', 'AddChannelDict') class AddChanneld(MapTransform): """ dictionary-based wrapper of AddChannel. @@ -137,6 +221,7 @@ def __call__(self, data): @export +@alias('Rotate90D', 'Rotate90Dict') class Rotate90d(MapTransform): """ dictionary-based wrapper of Rotate90. @@ -162,6 +247,7 @@ def __call__(self, data): @export +@alias('UniformRandomPatchD', 'UniformRandomPatchDict') class UniformRandomPatchd(Randomizable, MapTransform): """ Selects a patch of the given size chosen at a uniformly random position in the image. @@ -189,6 +275,7 @@ def __call__(self, data): @export +@alias('RandRotate90D', 'RandRotate90Dict') class RandRotate90d(Randomizable, MapTransform): """ With probability `prob`, input arrays are rotated by 90 degrees @@ -233,6 +320,7 @@ def __call__(self, data): @export +@alias('RandCropByPosNegLabelD', 'RandCropByPosNegLabelDict') class RandCropByPosNegLabeld(Randomizable, MapTransform): """ Crop random fixed sized regions with the center being a foreground or background voxel @@ -285,19 +373,3 @@ def __call__(self, data): results[i][key] = data[key] return results - - -# if __name__ == "__main__": -# import numpy as np -# data = { -# 'img': np.array((1, 2, 3, 4)).reshape((1, 2, 2)), -# 'seg': np.array((1, 2, 3, 4)).reshape((1, 2, 2)), -# 'affine': 3, -# 'dtype': 4, -# 'unused': 5, -# } -# rotator = RandRotate90d(keys=['img', 'seg'], prob=0.8) -# # rotator.set_random_state(1234) -# data_result = rotator(data) -# print(data_result.keys()) -# print(data_result['img'], data_result['seg']) diff --git a/monai/transforms/transforms.py b/monai/transforms/transforms.py index 81e5b6dc7e..e7ec89af5a 100644 --- a/monai/transforms/transforms.py +++ b/monai/transforms/transforms.py @@ -14,14 +14,14 @@ """ import numpy as np +import scipy.ndimage import nibabel as nib import torch from torch.utils.data._utils.collate import np_str_obj_array_pattern from skimage.transform import resize -import scipy.ndimage import monai -from monai.data.utils import get_random_patch, get_valid_patch_size +from monai.data.utils import get_random_patch, get_valid_patch_size, correct_nifti_header_if_necessary from monai.networks.layers.simplelayers import GaussianFilter from monai.transforms.compose import Randomizable from monai.transforms.utils import (create_control_grid, create_grid, create_rotate, create_scale, create_shear, @@ -31,6 +31,110 @@ export = monai.utils.export("monai.transforms") +@export +class Spacing: + """ + Resample input image into the specified `pixdim`. + """ + + def __init__(self, pixdim, keep_shape=False): + """ + Args: + pixdim (sequence of floats): output voxel spacing. + keep_shape (bool): whether to maintain the original spatial shape + after resampling. Defaults to False. + """ + self.pixdim = pixdim + self.keep_shape = keep_shape + self.original_pixdim = pixdim + + def __call__(self, data_array, original_affine=None, original_pixdim=None, interp_order=1): + """ + Args: + data_array (ndarray): in shape (num_channels, H[, W, ...]). + original_affine (4x4 matrix): original affine. + original_pixdim (sequence of floats): original voxel spacing. + interp_order (int): The order of the spline interpolation, default is 3. + The order has to be in the range 0-5. + https://docs.scipy.org/doc/scipy/reference/generated/scipy.ndimage.zoom.html + Returns: + resampled array (in spacing: `self.pixdim`), original pixdim, current pixdim. + """ + if original_affine is None and original_pixdim is None: + raise ValueError('please provide either original_affine or original_pixdim.') + spatial_rank = data_array.ndim - 1 + if original_affine is not None: + affine = np.array(original_affine, dtype=np.float64, copy=True) + if not affine.shape == (4, 4): + raise ValueError('`original_affine` must be 4 x 4.') + original_pixdim = np.sqrt(np.sum(np.square(affine[:spatial_rank, :spatial_rank]), 1)) + + inp_d = np.asarray(original_pixdim)[:spatial_rank] + if inp_d.size < spatial_rank: + inp_d = np.append(inp_d, [1.] * (inp_d.size - spatial_rank)) + out_d = np.asarray(self.pixdim)[:spatial_rank] + if out_d.size < spatial_rank: + out_d = np.append(out_d, [1.] * (out_d.size - spatial_rank)) + + self.original_pixdim, self.pixdim = inp_d, out_d + scale = inp_d / out_d + if not np.isfinite(scale).all(): + raise ValueError('Unknown pixdims: source {}, target {}'.format(inp_d, out_d)) + zoom_ = monai.transforms.Zoom(scale, order=interp_order, mode='nearest', keep_size=self.keep_shape) + return zoom_(data_array), self.original_pixdim, self.pixdim + + +@export +class Orientation: + """ + Change the input image's orientation into the specified based on `axcodes`. + """ + + def __init__(self, axcodes, labels=None): + """ + Args: + axcodes (N elements sequence): for spatial ND input's orientation. + e.g. axcodes='RAS' represents 3D orientation: + (Left, Right), (Posterior, Anterior), (Inferior, Superior). + default orientation labels options are: 'L' and 'R' for the first dimension, + 'P' and 'A' for the second, 'I' and 'S' for the third. + labels : optional, None or sequence of (2,) sequences + (2,) sequences are labels for (beginning, end) of output axis. + see: ``nibabel.orientations.ornt2axcodes``. + """ + self.axcodes = axcodes + self.labels = labels + + def __call__(self, data_array, original_affine=None, original_axcodes=None): + """ + if `original_affine` is provided, the orientation is computed from the affine. + + Args: + data_array (ndarray): in shape (num_channels, H[, W, ...]). + original_affine (4x4 matrix): original affine. + original_axcodes (N elements sequence): for spatial ND input's orientation. + Returns: + data_array (reoriented in `self.axcodes`), original axcodes, current axcodes. + """ + if original_affine is None and original_axcodes is None: + raise ValueError('please provide either original_affine or original_axcodes.') + spatial_rank = len(data_array.shape) - 1 + if original_affine is not None: + affine = np.array(original_affine, dtype=np.float64, copy=True) + if not affine.shape == (4, 4): + raise ValueError('`original_affine` must be 4 x 4.') + original_axcodes = nib.aff2axcodes(original_affine, labels=self.labels) + original_axcodes = original_axcodes[:spatial_rank] + self.axcodes = self.axcodes[:spatial_rank] + src = nib.orientations.axcodes2ornt(original_axcodes, labels=self.labels) + dst = nib.orientations.axcodes2ornt(self.axcodes) + spatial_ornt = nib.orientations.ornt_transform(src, dst) + spatial_ornt[:, 0] += 1 # skip channel dim + ornt = np.concatenate([np.array([[0, 1]]), spatial_ornt]) + data_array = nib.orientations.apply_orientation(data_array, ornt) + return data_array, original_axcodes, self.axcodes + + @export class LoadNifti: """ @@ -60,6 +164,7 @@ def __call__(self, filename): filename (str or file): path to file or file-like object. """ img = nib.load(filename) + img = correct_nifti_header_if_necessary(img) header = dict(img.header) header['filename_or_obj'] = filename @@ -199,8 +304,7 @@ class Resize: """ def __init__(self, output_shape, order=1, mode='reflect', cval=0, - clip=True, preserve_range=True, - anti_aliasing=True, anti_aliasing_sigma=None): + clip=True, preserve_range=True, anti_aliasing=True, anti_aliasing_sigma=None): assert isinstance(order, int), "order must be integer." self.output_shape = output_shape self.order = order @@ -223,7 +327,7 @@ def __call__(self, img): class Rotate: """ Rotates an input image by given angle. Uses scipy.ndimage.rotate. For more details, see - http://lagrange.univ-lyon1.fr/docs/scipy/0.17.1/generated/scipy.ndimage.rotate.html. + https://docs.scipy.org/doc/scipy/reference/generated/scipy.ndimage.rotate.html Args: angle (float): Rotation angle in degrees. @@ -238,8 +342,7 @@ class Rotate: prefiter (bool): Apply spline_filter before interpolation. Default: True. """ - def __init__(self, angle, axes=(1, 2), reshape=True, order=1, - mode='constant', cval=0, prefilter=True): + def __init__(self, angle, axes=(1, 2), reshape=True, order=1, mode='constant', cval=0, prefilter=True): self.angle = angle self.reshape = reshape self.order = order @@ -250,8 +353,7 @@ def __init__(self, angle, axes=(1, 2), reshape=True, order=1, def __call__(self, img): return scipy.ndimage.rotate(img, self.angle, self.axes, - reshape=self.reshape, order=self.order, - mode=self.mode, cval=self.cval, + reshape=self.reshape, order=self.order, mode=self.mode, cval=self.cval, prefilter=self.prefilter) @@ -261,8 +363,9 @@ class Zoom: For details, please see https://docs.scipy.org/doc/scipy/reference/generated/scipy.ndimage.zoom.html. Args: - zoom (float or sequence): The zoom factor along the axes. If a float, zoom is the same for each axis. - If a sequence, zoom should contain one value for each axis. + zoom (float or sequence): The zoom factor along the spatial axes. + If a float, zoom is the same for each spatial axis. + If a sequence, zoom should contain one value for each spatial axis. order (int): order of interpolation. Default=3. mode (str): Determines how input is extended beyond boundaries. Default is 'constant'. cval (scalar, optional): Value to fill past edges. Default is 0. @@ -270,6 +373,7 @@ class Zoom: 'wrap' and 'reflect'. Defaults to cpu for these cases or if cupyx not found. keep_size (bool): Should keep original size (pad if needed). """ + def __init__(self, zoom, order=3, mode='constant', cval=0, prefilter=True, use_gpu=False, keep_size=False): assert isinstance(order, int), "Order must be integer." self.zoom = zoom @@ -280,41 +384,59 @@ def __init__(self, zoom, order=3, mode='constant', cval=0, prefilter=True, use_g self.use_gpu = use_gpu self.keep_size = keep_size - def __call__(self, img): - zoomed = None if self.use_gpu: try: - import cupy from cupyx.scipy.ndimage import zoom as zoom_gpu - zoomed_gpu = zoom_gpu(cupy.array(img), zoom=self.zoom, order=self.order, - mode=self.mode, cval=self.cval, prefilter=self.prefilter) - zoomed = cupy.asnumpy(zoomed_gpu) - except ModuleNotFoundError: + self._zoom = zoom_gpu + except ImportError: print('For GPU zoom, please install cupy. Defaulting to cpu.') - except NotImplementedError: - print("Defaulting to CPU. cupyx doesn't support order > 1 and modes 'wrap' or 'reflect'.") - - if zoomed is None: - zoomed = scipy.ndimage.zoom(img, zoom=self.zoom, order=self.order, - mode=self.mode, cval=self.cval, prefilter=self.prefilter) - - # Crops to original size or pads. - if self.keep_size: - shape = img.shape - pad_vec = [[0, 0]] * len(shape) - crop_vec = list(zoomed.shape) - for d in range(len(shape)): - if zoomed.shape[d] > shape[d]: - crop_vec[d] = shape[d] - elif zoomed.shape[d] < shape[d]: - # pad_vec[d] = [0, shape[d] - zoomed.shape[d]] - pad_h = (float(shape[d]) - float(zoomed.shape[d])) / 2 - pad_vec[d] = [int(np.floor(pad_h)), int(np.ceil(pad_h))] - zoomed = zoomed[0:crop_vec[0], 0:crop_vec[1], 0:crop_vec[2]] - zoomed = np.pad(zoomed, pad_vec, mode='constant', constant_values=self.cval) - - return zoomed + self._zoom = scipy.ndimage.zoom + self.use_gpu = False + else: + self._zoom = scipy.ndimage.zoom + + def __call__(self, img): + """ + Args: + img (ndarray): channel first array, must have shape: (num_channels, H[, W, ..., ]), + """ + zoomed = [] + if self.use_gpu: + import cupy + for channel in cupy.array(img): + zoom_channel = self._zoom(channel, + zoom=self.zoom, + order=self.order, + mode=self.mode, + cval=self.cval, + prefilter=self.prefilter) + zoomed.append(cupy.asnumpy(zoom_channel)) + else: + for channel in img: + zoomed.append( + self._zoom(channel, + zoom=self.zoom, + order=self.order, + mode=self.mode, + cval=self.cval, + prefilter=self.prefilter)) + zoomed = np.stack(zoomed) + + if not self.keep_size or np.allclose(img.shape, zoomed.shape): + return zoomed + + pad_vec = [[0, 0]] * len(img.shape) + slice_vec = [slice(None)] * len(img.shape) + for idx, (od, zd) in enumerate(zip(img.shape, zoomed.shape)): + diff = od - zd + half = abs(diff) // 2 + if diff > 0: # need padding + pad_vec[idx] = [half, diff - half] + elif diff < 0: # need slicing + slice_vec[idx] = slice(half, half + od) + zoomed = np.pad(zoomed, pad_vec) + return zoomed[tuple(slice_vec)] @export @@ -468,7 +590,7 @@ def __call__(self, img): @export class SpatialCrop: """General purpose cropper to produce sub-volume region of interest (ROI). - It can support to crop 1, 2 or 3 dimensions spatial data. + It can support to crop ND spatial (channel-first) data. Either a center and size must be provided, or alternatively if center and size are not provided, the start and end coordinates of the ROI must be provided. The sub-volume must sit the within original image. @@ -485,37 +607,27 @@ def __init__(self, roi_center=None, roi_size=None, roi_start=None, roi_end=None) roi_end (list or tuple): voxel coordinates for end of the crop ROI. """ if roi_center is not None and roi_size is not None: - assert isinstance(roi_center, (list, tuple)), 'roi_center must be list or tuple.' - assert isinstance(roi_size, (list, tuple)), 'roi_size must be list or tuple.' - assert all(x > 0 for x in roi_center), 'all elements of roi_center must be positive.' - assert all(x > 0 for x in roi_size), 'all elements of roi_size must be positive.' roi_center = np.asarray(roi_center, dtype=np.uint16) roi_size = np.asarray(roi_size, dtype=np.uint16) self.roi_start = np.subtract(roi_center, np.floor_divide(roi_size, 2)) self.roi_end = np.add(self.roi_start, roi_size) else: assert roi_start is not None and roi_end is not None, 'roi_start and roi_end must be provided.' - assert isinstance(roi_start, (list, tuple)), 'roi_start must be list or tuple.' - assert isinstance(roi_end, (list, tuple)), 'roi_end must be list or tuple.' - assert all(x >= 0 for x in roi_start), 'all elements of roi_start must be greater than or equal to 0.' - assert all(x > 0 for x in roi_end), 'all elements of roi_end must be positive.' - self.roi_start = roi_start - self.roi_end = roi_end + self.roi_start = np.asarray(roi_start, dtype=np.uint16) + self.roi_end = np.asarray(roi_end, dtype=np.uint16) + + assert np.all(self.roi_start >= 0), 'all elements of roi_start must be greater than or equal to 0.' + assert np.all(self.roi_end > 0), 'all elements of roi_end must be positive.' + assert np.all(self.roi_end >= self.roi_start), 'invalid roi range.' def __call__(self, img): max_end = img.shape[1:] - assert (np.subtract(max_end, self.roi_start) >= 0).all(), 'roi start out of image space.' - assert (np.subtract(max_end, self.roi_end) >= 0).all(), 'roi end out of image space.' - assert (np.subtract(self.roi_end, self.roi_start) >= 0).all(), 'invalid roi range.' - if len(self.roi_start) == 1: - data = img[:, self.roi_start[0]:self.roi_end[0]].copy() - elif len(self.roi_start) == 2: - data = img[:, self.roi_start[0]:self.roi_end[0], self.roi_start[1]:self.roi_end[1]].copy() - elif len(self.roi_start) == 3: - data = img[:, self.roi_start[0]:self.roi_end[0], self.roi_start[1]:self.roi_end[1], - self.roi_start[2]:self.roi_end[2]].copy() - else: - raise ValueError('unsupported image shape.') + sd = min(len(self.roi_start), len(max_end)) + assert np.all(max_end[:sd] >= self.roi_start[:sd]), 'roi start out of image space.' + assert np.all(max_end[:sd] >= self.roi_end[:sd]), 'roi end out of image space.' + + slices = [slice(None)] + [slice(s, e) for s, e in zip(self.roi_start[:sd], self.roi_end[:sd])] + data = img[tuple(slices)].copy() return data diff --git a/tests/test_header_correct.py b/tests/test_header_correct.py new file mode 100644 index 0000000000..b4d38b6dbf --- /dev/null +++ b/tests/test_header_correct.py @@ -0,0 +1,36 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import nibabel as nib +import numpy as np + +from monai.data.utils import correct_nifti_header_if_necessary + + +class TestCorrection(unittest.TestCase): + + def test_correct(self): + test_img = nib.Nifti1Image(np.zeros((1, 2, 3)), np.eye(4)) + test_img.header.set_zooms((100, 100, 100)) + test_img = correct_nifti_header_if_necessary(test_img) + np.testing.assert_allclose( + test_img.affine, np.array([[100., 0., 0., 0.], [0., 100., 0., 0.], [0., 0., 100., 0.], [0., 0., 0., 1.]])) + + def test_affine(self): + test_img = nib.Nifti1Image(np.zeros((1, 2, 3)), np.eye(4) * 20.) + test_img = correct_nifti_header_if_necessary(test_img) + np.testing.assert_allclose( + test_img.affine, np.array([[20., 0., 0., 0.], [0., 20., 0., 0.], [0., 0., 20., 0.], [0., 0., 0., 20.]])) + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_orientation.py b/tests/test_orientation.py new file mode 100644 index 0000000000..8cd1c55f79 --- /dev/null +++ b/tests/test_orientation.py @@ -0,0 +1,38 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +from parameterized import parameterized + +from monai.transforms.transforms import Orientation + +TEST_CASES = [ + [{'axcodes': 'RAS'}, + np.ones((2, 10, 15, 20)), {'original_axcodes': 'ALS'}, (2, 15, 10, 20)], + [{'axcodes': 'AL'}, + np.ones((2, 10, 15)), {'original_axcodes': 'AR'}, (2, 10, 15)], + [{'axcodes': 'L'}, + np.ones((2, 10)), {'original_axcodes': 'R'}, (2, 10)], +] + + +class TestOrientationCase(unittest.TestCase): + + @parameterized.expand(TEST_CASES) + def test_ornt(self, init_param, img, data_param, expected_shape): + res = Orientation(**init_param)(img, **data_param) + np.testing.assert_allclose(res[0].shape, expected_shape) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_orientationd.py b/tests/test_orientationd.py new file mode 100644 index 0000000000..999f31efe2 --- /dev/null +++ b/tests/test_orientationd.py @@ -0,0 +1,55 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np + +from monai.transforms.composables import Orientationd + + +class TestOrientationdCase(unittest.TestCase): + + def test_orntd(self): + data = {'seg': np.ones((2, 1, 2, 3)), 'affine': np.eye(4)} + ornt = Orientationd(keys='seg', affine_key='affine', axcodes='RAS') + res = ornt(data) + np.testing.assert_allclose(res['seg'].shape, (2, 1, 2, 3)) + self.assertEqual(res['orientation']['original_ornt'], ('R', 'A', 'S')) + self.assertEqual(res['orientation']['current_ornt'], 'RAS') + + def test_orntd_3d(self): + data = {'seg': np.ones((2, 1, 2, 3)), 'img': np.ones((2, 1, 2, 3)), 'affine': np.eye(4)} + ornt = Orientationd(keys=('img', 'seg'), affine_key='affine', axcodes='PLI') + res = ornt(data) + np.testing.assert_allclose(res['img'].shape, (2, 2, 1, 3)) + self.assertEqual(res['orientation']['original_ornt'], ('R', 'A', 'S')) + self.assertEqual(res['orientation']['current_ornt'], 'PLI') + + def test_orntd_2d(self): + data = {'seg': np.ones((2, 1, 3)), 'img': np.ones((2, 1, 3)), 'affine': np.eye(4)} + ornt = Orientationd(keys=('img', 'seg'), affine_key='affine', axcodes='PLI') + res = ornt(data) + np.testing.assert_allclose(res['img'].shape, (2, 3, 1)) + self.assertEqual(res['orientation']['original_ornt'], ('R', 'A')) + self.assertEqual(res['orientation']['current_ornt'], 'PL') + + def test_orntd_1d(self): + data = {'seg': np.ones((2, 3)), 'img': np.ones((2, 3)), 'affine': np.eye(4)} + ornt = Orientationd(keys=('img', 'seg'), affine_key='affine', axcodes='L') + res = ornt(data) + np.testing.assert_allclose(res['img'].shape, (2, 3)) + self.assertEqual(res['orientation']['original_ornt'], ('R',)) + self.assertEqual(res['orientation']['current_ornt'], 'L') + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_spacing.py b/tests/test_spacing.py new file mode 100644 index 0000000000..ceaff9a9e6 --- /dev/null +++ b/tests/test_spacing.py @@ -0,0 +1,47 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +from parameterized import parameterized + +from monai.transforms.transforms import Spacing + +TEST_CASES = [ + [{'pixdim': (1.0, 2.0, 1.5)}, + np.ones((2, 10, 15, 20)), {'original_pixdim': (0.5, 0.5, 1.0)}, (2, 5, 4, 13)], + [{'pixdim': (1.0, 2.0, 1.5), 'keep_shape': True}, + np.ones((1, 2, 1, 2)), {'original_pixdim': (0.5, 0.5, 1.0)}, (1, 2, 1, 2)], + [{'pixdim': (1.0, 0.2, 1.5), 'keep_shape': False}, + np.ones((1, 2, 1, 2)), {'original_affine': np.eye(4)}, (1, 2, 5, 1)], + [{'pixdim': (1.0, 2.0), 'keep_shape': True}, + np.ones((3, 2, 2)), {'original_pixdim': (1.5, 0.5)}, (3, 2, 2)], + [{'pixdim': (1.0, 0.2), 'keep_shape': False}, + np.ones((5, 2, 1)), {'original_pixdim': (1.5, 0.5)}, (5, 3, 2)], + [{'pixdim': (1.0,), 'keep_shape': False}, + np.ones((1, 2)), {'original_pixdim': (1.5,), 'interp_order': 0}, (1, 3)], +] + + +class TestSpacingCase(unittest.TestCase): + + @parameterized.expand(TEST_CASES) + def test_spacing(self, init_param, img, data_param, expected_shape): + res = Spacing(**init_param)(img, **data_param) + np.testing.assert_allclose(res[0].shape, expected_shape) + if 'original_pixdim' in data_param: + np.testing.assert_allclose(res[1], data_param['original_pixdim']) + np.testing.assert_allclose(res[2], init_param['pixdim']) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_spacingd.py b/tests/test_spacingd.py new file mode 100644 index 0000000000..3ee0b66ae1 --- /dev/null +++ b/tests/test_spacingd.py @@ -0,0 +1,63 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np + +from monai.transforms.composables import Spacingd + + +class TestSpacingDCase(unittest.TestCase): + + def test_spacingd_3d(self): + data = {'image': np.ones((2, 10, 15, 20)), 'affine': np.eye(4)} + spacing = Spacingd(keys='image', affine_key='affine', pixdim=(1, 2, 1.4)) + res = spacing(data) + np.testing.assert_allclose(res['image'].shape, (2, 10, 8, 14)) + np.testing.assert_allclose(res['spacing']['current_pixdim'], (1, 2, 1.4)) + np.testing.assert_allclose(res['spacing']['original_pixdim'], (1, 1, 1)) + + def test_spacingd_2d(self): + data = {'image': np.ones((2, 10, 20)), 'affine': np.eye(4)} + spacing = Spacingd(keys='image', affine_key='affine', pixdim=(1, 2, 1.4)) + res = spacing(data) + np.testing.assert_allclose(res['image'].shape, (2, 10, 10)) + np.testing.assert_allclose(res['spacing']['current_pixdim'], (1, 2)) + np.testing.assert_allclose(res['spacing']['original_pixdim'], (1, 1)) + + def test_spacingd_1d(self): + data = {'image': np.ones((2, 10)), 'affine': np.eye(4)} + spacing = Spacingd(keys='image', affine_key='affine', pixdim=(0.2,)) + res = spacing(data) + np.testing.assert_allclose(res['image'].shape, (2, 50)) + np.testing.assert_allclose(res['spacing']['current_pixdim'], (0.2,)) + np.testing.assert_allclose(res['spacing']['original_pixdim'], (1,)) + + def test_interp_all(self): + data = {'image': np.ones((2, 10)), 'seg': np.ones((2, 10)), 'affine': np.eye(4)} + spacing = Spacingd(keys=('image', 'seg'), affine_key='affine', interp_order=0, pixdim=(0.2,)) + res = spacing(data) + np.testing.assert_allclose(res['image'].shape, (2, 50)) + np.testing.assert_allclose(res['spacing']['current_pixdim'], (0.2,)) + np.testing.assert_allclose(res['spacing']['original_pixdim'], (1,)) + + def test_interp_sep(self): + data = {'image': np.ones((2, 10)), 'seg': np.ones((2, 10)), 'affine': np.eye(4)} + spacing = Spacingd(keys=('image', 'seg'), affine_key='affine', interp_order=(2, 0), pixdim=(0.2,)) + res = spacing(data) + np.testing.assert_allclose(res['image'].shape, (2, 50)) + np.testing.assert_allclose(res['spacing']['current_pixdim'], (0.2,)) + np.testing.assert_allclose(res['spacing']['original_pixdim'], (1,)) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_spatial_crop.py b/tests/test_spatial_crop.py index 8a99d90f63..3d8862cad9 100644 --- a/tests/test_spatial_crop.py +++ b/tests/test_spatial_crop.py @@ -32,10 +32,28 @@ (3, 2, 2, 2) ] +TEST_CASE_3 = [ + { + 'roi_start': [0, 0], + 'roi_end': [2, 2] + }, + np.random.randint(0, 2, size=[3, 3, 3, 3]), + (3, 2, 2, 3), +] + +TEST_CASE_4 = [ + { + 'roi_start': [0, 0, 0, 0, 0], + 'roi_end': [2, 2, 2, 2, 2] + }, + np.random.randint(0, 2, size=[3, 3, 3, 3]), + (3, 2, 2, 2), +] + class TestSpatialCrop(unittest.TestCase): - @parameterized.expand([TEST_CASE_1, TEST_CASE_2]) + @parameterized.expand([TEST_CASE_1, TEST_CASE_2, TEST_CASE_3, TEST_CASE_4]) def test_shape(self, input_param, input_data, expected_shape): result = SpatialCrop(**input_param)(input_data) self.assertTupleEqual(result.shape, expected_shape) diff --git a/tests/test_zoom.py b/tests/test_zoom.py index f445895bfc..874e587a98 100644 --- a/tests/test_zoom.py +++ b/tests/test_zoom.py @@ -29,9 +29,9 @@ class ZoomTest(NumpyImageTestCase2D): (0.8, 1, 'reflect', 0, False, False, False) ]) def test_correct_results(self, zoom, order, mode, cval, prefilter, use_gpu, keep_size): - zoom_fn = Zoom(zoom=zoom, order=order, mode=mode, cval=cval, + zoom_fn = Zoom(zoom=zoom, order=order, mode=mode, cval=cval, prefilter=prefilter, use_gpu=use_gpu, keep_size=keep_size) - zoomed = zoom_fn(self.imt) + zoomed = zoom_fn(self.imt[0]) expected = zoom_scipy(self.imt, zoom=zoom, mode=mode, order=order, cval=cval, prefilter=prefilter) self.assertTrue(np.allclose(expected, zoomed)) @@ -43,15 +43,19 @@ def test_gpu_zoom(self, _, zoom, order, mode, cval, prefilter): if importlib.util.find_spec('cupy'): zoom_fn = Zoom(zoom=zoom, order=order, mode=mode, cval=cval, prefilter=prefilter, use_gpu=True, keep_size=False) - zoomed = zoom_fn(self.imt) + zoomed = zoom_fn(self.imt[0]) expected = zoom_scipy(self.imt, zoom=zoom, mode=mode, order=order, cval=cval, prefilter=prefilter) self.assertTrue(np.allclose(expected, zoomed)) def test_keep_size(self): zoom_fn = Zoom(zoom=0.6, keep_size=True) - zoomed = zoom_fn(self.imt) - self.assertTrue(np.array_equal(zoomed.shape, self.imt.shape)) + zoomed = zoom_fn(self.imt[0]) + self.assertTrue(np.array_equal(zoomed.shape, self.imt.shape[1:])) + + zoom_fn = Zoom(zoom=1.3, keep_size=True) + zoomed = zoom_fn(self.imt[0]) + self.assertTrue(np.array_equal(zoomed.shape, self.imt.shape[1:])) @parameterized.expand([ ("no_zoom", None, 1, TypeError), From 5c49f8f1b65ebca45dafaeacfffd3a112ad53bc5 Mon Sep 17 00:00:00 2001 From: Nic Ma Date: Tue, 10 Mar 2020 20:05:38 +0800 Subject: [PATCH 14/40] 156 event handlers support arbitrary format (#159) --- ...rence_3d.py => unet_inference_3d_array.py} | 21 ++-- examples/unet_inference_3d_dict.py | 95 +++++++++++++++++++ examples/unet_segmentation_3d_array.py | 4 +- examples/unet_segmentation_3d_dict.py | 6 +- monai/handlers/segmentation_saver.py | 27 ++++-- monai/handlers/stats_handler.py | 36 ++++--- monai/utils/sliding_window_inference.py | 5 +- tests/integration_sliding_window.py | 3 +- tests/test_handler_stats.py | 38 +++++++- tests/test_sliding_window_inference.py | 2 +- 10 files changed, 188 insertions(+), 49 deletions(-) rename examples/{unet_inference_3d.py => unet_inference_3d_array.py} (80%) create mode 100644 examples/unet_inference_3d_dict.py diff --git a/examples/unet_inference_3d.py b/examples/unet_inference_3d_array.py similarity index 80% rename from examples/unet_inference_3d.py rename to examples/unet_inference_3d_array.py index aa84a6560d..8fe417c7dd 100644 --- a/examples/unet_inference_3d.py +++ b/examples/unet_inference_3d_array.py @@ -17,13 +17,16 @@ import nibabel as nib import numpy as np import torch -import torchvision.transforms as transforms from ignite.engine import Engine from torch.utils.data import DataLoader +# assumes the framework is found here, change as necessary +sys.path.append("..") + from monai import config from monai.handlers.checkpoint_loader import CheckpointLoader from monai.handlers.segmentation_saver import SegmentationSaver +import monai.transforms.compose as transforms from monai.data.nifti_reader import NiftiDataset from monai.transforms import AddChannel, Rescale, ToTensor from monai.networks.nets.unet import UNet @@ -31,11 +34,11 @@ from monai.data.synthetic import create_test_image_3d from monai.utils.sliding_window_inference import sliding_window_inference -sys.path.append("..") # assumes the framework is found here, change as necessary config.print_config() tempdir = tempfile.mkdtemp() # tempdir = './temp' +print('generating synthetic data to {} (this may take a while)'.format(tempdir)) for i in range(50): im, seg = create_test_image_3d(256, 256, 256) @@ -51,7 +54,7 @@ segtrans = transforms.Compose([AddChannel(), ToTensor()]) ds = NiftiDataset(images, segs, transform=imtrans, seg_transform=segtrans, image_only=False) -device = torch.device("cpu:0") +device = torch.device("cuda:0") roi_size = (64, 64, 64) sw_batch_size = 4 net = UNet( @@ -65,7 +68,7 @@ net.to(device) -def _sliding_window_processor(_engine, batch): +def _sliding_window_processor(engine, batch): net.eval() img, seg, meta_data = batch with torch.no_grad(): @@ -75,11 +78,11 @@ def _sliding_window_processor(_engine, batch): infer_engine = Engine(_sliding_window_processor) -# checkpoint_handler = ModelCheckpoint('./', 'net', n_saved=10, save_interval=3, require_empty=False) -# infer_engine.add_event_handler(event_name=Events.EPOCH_COMPLETED, handler=checkpoint_handler, to_save={'net': net}) - -SegmentationSaver(output_path='tempdir', output_ext='.nii.gz', output_postfix='seg').attach(infer_engine) -CheckpointLoader(load_path='./net_checkpoint_9.pth', load_dict={'net': net}).attach(infer_engine) +# for the arrary data format, assume the 3rd item of batch data is the meta_data +SegmentationSaver(output_path='tempdir', output_ext='.nii.gz', output_postfix='seg', + batch_transform=lambda x: x[2]).attach(infer_engine) +# the model was trained by "unet_segmentation_3d_array" exmple +CheckpointLoader(load_path='./runs/net_checkpoint_120.pth', load_dict={'net': net}).attach(infer_engine) loader = DataLoader(ds, batch_size=1, num_workers=1, pin_memory=torch.cuda.is_available()) state = infer_engine.run(loader) diff --git a/examples/unet_inference_3d_dict.py b/examples/unet_inference_3d_dict.py new file mode 100644 index 0000000000..405b49aa8d --- /dev/null +++ b/examples/unet_inference_3d_dict.py @@ -0,0 +1,95 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import sys +import tempfile +from glob import glob + +import nibabel as nib +import numpy as np +import torch +from ignite.engine import Engine +from torch.utils.data import DataLoader + +# assumes the framework is found here, change as necessary +sys.path.append("..") + +import monai +from monai.data.utils import list_data_collate +from monai.utils.sliding_window_inference import sliding_window_inference +from monai.data.synthetic import create_test_image_3d +from monai.networks.utils import predict_segmentation +from monai.networks.nets.unet import UNet +from monai.transforms.composables import LoadNiftid, AsChannelFirstd +import monai.transforms.compose as transforms +from monai.handlers.segmentation_saver import SegmentationSaver +from monai.handlers.checkpoint_loader import CheckpointLoader +from monai import config + +config.print_config() + +tempdir = tempfile.mkdtemp() +# tempdir = './temp' +print('generating synthetic data to {} (this may take a while)'.format(tempdir)) +for i in range(50): + im, seg = create_test_image_3d(256, 256, 256, channel_dim=-1) + + n = nib.Nifti1Image(im, np.eye(4)) + nib.save(n, os.path.join(tempdir, 'im%i.nii.gz' % i)) + + n = nib.Nifti1Image(seg, np.eye(4)) + nib.save(n, os.path.join(tempdir, 'seg%i.nii.gz' % i)) + +images = sorted(glob(os.path.join(tempdir, 'im*.nii.gz'))) +segs = sorted(glob(os.path.join(tempdir, 'seg*.nii.gz'))) +val_files = [{'img': img, 'seg': seg} for img, seg in zip(images, segs)] +val_transforms = transforms.Compose([ + LoadNiftid(keys=['img', 'seg']), + AsChannelFirstd(keys=['img', 'seg'], channel_dim=-1) +]) +val_ds = monai.data.Dataset(data=val_files, transform=val_transforms) + +device = torch.device("cuda:0") +roi_size = (64, 64, 64) +sw_batch_size = 4 +net = UNet( + dimensions=3, + in_channels=1, + num_classes=1, + channels=(16, 32, 64, 128, 256), + strides=(2, 2, 2, 2), + num_res_units=2, +) +net.to(device) + + +def _sliding_window_processor(engine, batch): + net.eval() + with torch.no_grad(): + seg_probs = sliding_window_inference(batch['img'], roi_size, sw_batch_size, lambda x: net(x)[0], device) + return predict_segmentation(seg_probs) + + +infer_engine = Engine(_sliding_window_processor) + +# for the arrary data format, assume the 3rd item of batch data is the meta_data +SegmentationSaver(output_path='tempdir', output_ext='.nii.gz', output_postfix='seg', + batch_transform=lambda batch: {'filename_or_obj': batch['img.filename_or_obj'], + 'original_affine': batch['img.original_affine'], + 'affine': batch['img.affine'], + }).attach(infer_engine) +# the model was trained by "unet_segmentation_3d_array" exmple +CheckpointLoader(load_path='./runs/net_checkpoint_120.pth', load_dict={'net': net}).attach(infer_engine) + +val_loader = DataLoader(val_ds, batch_size=1, num_workers=4, collate_fn=list_data_collate, + pin_memory=torch.cuda.is_available()) +state = infer_engine.run(val_loader) diff --git a/examples/unet_segmentation_3d_array.py b/examples/unet_segmentation_3d_array.py index 0e68eb67c8..de5996470d 100644 --- a/examples/unet_segmentation_3d_array.py +++ b/examples/unet_segmentation_3d_array.py @@ -30,7 +30,7 @@ import monai.transforms.compose as transforms from monai.data.nifti_reader import NiftiDataset -from monai.transforms import (AddChannel, Rescale, ToTensor, UniformRandomPatch) +from monai.transforms import AddChannel, Rescale, ToTensor, UniformRandomPatch from monai.handlers.stats_handler import StatsHandler from monai.handlers.mean_dice import MeanDice from monai.visualize import img2tensorboard @@ -41,7 +41,7 @@ # Create a temporary directory and 50 random image, mask paris tempdir = tempfile.mkdtemp() - +print('generating synthetic data to {} (this may take a while)'.format(tempdir)) for i in range(50): im, seg = create_test_image_3d(128, 128, 128) diff --git a/examples/unet_segmentation_3d_dict.py b/examples/unet_segmentation_3d_dict.py index d7ea3795ea..640bed21c0 100644 --- a/examples/unet_segmentation_3d_dict.py +++ b/examples/unet_segmentation_3d_dict.py @@ -41,7 +41,7 @@ # Create a temporary directory and 50 random image, mask paris tempdir = tempfile.mkdtemp() - +print('generating synthetic data to {} (this may take a while)'.format(tempdir)) for i in range(50): im, seg = create_test_image_3d(128, 128, 128, channel_dim=-1) @@ -70,7 +70,7 @@ # Define nifti dataset, dataloader. ds = monai.data.Dataset(data=train_files, transform=train_transforms) -loader = DataLoader(ds, batch_size=2, num_workers=2, collate_fn=list_data_collate, +loader = DataLoader(ds, batch_size=2, num_workers=4, collate_fn=list_data_collate, pin_memory=torch.cuda.is_available()) check_data = monai.utils.misc.first(loader) print(check_data['img'].shape, check_data['seg'].shape) @@ -190,7 +190,7 @@ def log_metrics_to_tensorboard(engine): logging.basicConfig(stream=sys.stdout, level=logging.INFO) train_ds = monai.data.Dataset(data=train_files, transform=train_transforms) -train_loader = DataLoader(train_ds, batch_size=2, num_workers=8, collate_fn=list_data_collate, +train_loader = DataLoader(train_ds, batch_size=2, num_workers=4, collate_fn=list_data_collate, pin_memory=torch.cuda.is_available()) train_epochs = 30 diff --git a/monai/handlers/segmentation_saver.py b/monai/handlers/segmentation_saver.py index a87e517f81..e0fa50310a 100644 --- a/monai/handlers/segmentation_saver.py +++ b/monai/handlers/segmentation_saver.py @@ -23,13 +23,15 @@ class SegmentationSaver: """ def __init__(self, output_path='./', dtype='float32', output_postfix='seg', output_ext='.nii.gz', - output_transform=lambda x: x, name=None): + batch_transform=lambda x: x, output_transform=lambda x: x, name=None): """ Args: output_path (str): output image directory. dtype (str): to convert the image to save to this datatype. output_postfix (str): a string appended to all output file names. output_ext (str): output file extension name. + batch_transform (Callable): a callable that is used to transform the + ignite.engine.batch into expected format to extract the meta_data dictionary. output_transform (Callable): a callable that is used to transform the ignite.engine.output into the form expected nifti image data. The first dimension of this transform's output will be treated as the @@ -40,6 +42,7 @@ def __init__(self, output_path='./', dtype='float32', output_postfix='seg', outp self.dtype = dtype self.output_postfix = output_postfix self.output_ext = output_ext + self.batch_transform = batch_transform self.output_transform = output_transform self.logger = None if name is None else logging.getLogger(name) @@ -88,24 +91,28 @@ def _create_file_basename(postfix, input_file_name, folder_path, data_root_dir=" def __call__(self, engine): """ - This method assumes: - - 3rd output of engine.state.batch is a meta data dict, and have the keys: - 'filename_or_obj' -- for output file name creation - and optionally 'original_affine', 'affine' for data orientation handling. - - output file datatype from `engine.state.output.dtype`. + This method assumes self.batch_transform will extract Metadata from the input batch. + Metadata should have the following keys: + + - ``'filename_or_obj'`` -- for output file name creation + - ``'original_affine'`` (optional) for data orientation handling + - ``'affine'`` (optional) for data output affine. + + output file datatype is determined from ``engine.state.output.dtype``. """ - meta_data = engine.state.batch[2] # assuming 3rd output of input dataset is a meta data dict + meta_data = self.batch_transform(engine.state.batch) filenames = meta_data['filename_or_obj'] original_affine = meta_data.get('original_affine', None) affine = meta_data.get('affine', None) + engine_output = self.output_transform(engine.state.output) for batch_id, filename in enumerate(filenames): # save a batch of files seg_output = engine_output[batch_id] - _affine = affine[batch_id] - _original_affine = original_affine[batch_id] + affine_ = affine[batch_id] + original_affine_ = original_affine[batch_id] if isinstance(seg_output, torch.Tensor): seg_output = seg_output.detach().cpu().numpy() output_filename = self._create_file_basename(self.output_postfix, filename, self.output_path) output_filename = '{}{}'.format(output_filename, self.output_ext) - write_nifti(seg_output, _affine, output_filename, _original_affine, dtype=seg_output.dtype) + write_nifti(seg_output, affine_, output_filename, original_affine_, dtype=seg_output.dtype) self.logger.info('saved: {}'.format(output_filename)) diff --git a/monai/handlers/stats_handler.py b/monai/handlers/stats_handler.py index b1ba3563d5..47709543fe 100644 --- a/monai/handlers/stats_handler.py +++ b/monai/handlers/stats_handler.py @@ -10,7 +10,7 @@ # limitations under the License. import logging - +import torch from ignite.engine import Engine, Events KEY_VAL_FORMAT = '{}: {:.4f} ' @@ -19,29 +19,37 @@ class StatsHandler(object): """StatsHandler defines a set of Ignite Event-handlers for all the log printing logics. It's can be used for any Ignite Engine(trainer, validator and evaluator). - And it can support logging for epoch level and iteration level with pre-defined StatsLoggers. - By default, this class logs the dictionary of `engine.state.metrics`. + And it can support logging for epoch level and iteration level with pre-defined loggers. + By default: + (1) epoch_print_logger logs `engine.state.metrics`. + (2) iteration_print_logger logs loss value, expected output format is (y_pred, loss). """ def __init__(self, epoch_print_logger=None, iteration_print_logger=None, + batch_transform=lambda x: x, + output_transform=lambda x: x, name=None): """ Args: epoch_print_logger (Callable): customized callable printer for epoch level logging. - must accept parameter "engine", use default printer if None. + must accept parameter "engine", use default printer if None. iteration_print_logger (Callable): custimized callable printer for iteration level logging. - must accept parameter "engine", use default printer if None. + must accept parameter "engine", use default printer if None. + batch_transform (Callable): a callable that is used to transform the + ignite.engine.batch into expected format to extract input data. + output_transform (Callable): a callable that is used to transform the + ignite.engine.output into expected format to extract several output data. name (str): identifier of logging.logger to use, defaulting to `engine.logger`. """ self.epoch_print_logger = epoch_print_logger self.iteration_print_logger = iteration_print_logger - + self.batch_transform = batch_transform + self.output_transform = output_transform self.logger = None if name is None else logging.getLogger(name) - def attach(self, engine: Engine): """Register a set of Ignite Event-Handlers to a specified Ignite engine. @@ -119,15 +127,16 @@ def _default_epoch_print(self, engine: Engine): def _default_iteration_print(self, engine: Engine): """Execute iteration log operation based on Ignite engine.state data. - print the values from ignite state.logs dict. + Print the values from ignite state.logs dict. + Default behaivor is to print loss from output[1], skip if output[1] is not loss. Args: engine (ignite.engine): Ignite Engine, it can be a trainer, validator or evaluator. """ - prints_dict = engine.state.metrics - if not prints_dict: - return + loss = self.output_transform(engine.state.output)[1] + if loss is None or (torch.is_tensor(loss) and len(loss.shape) > 0): + return # not printing multi dimensional output num_iterations = engine.state.epoch_length current_iteration = (engine.state.iteration - 1) % num_iterations + 1 current_epoch = engine.state.epoch @@ -138,9 +147,6 @@ def _default_iteration_print(self, engine: Engine): num_epochs, current_iteration, num_iterations) - - for name in sorted(prints_dict): - value = prints_dict[name] - out_str += KEY_VAL_FORMAT.format(name, value) + out_str += KEY_VAL_FORMAT.format('Loss', loss.item() if torch.is_tensor(loss) else loss) self.logger.info(out_str) diff --git a/monai/utils/sliding_window_inference.py b/monai/utils/sliding_window_inference.py index 2efc7a7481..0125a19d06 100644 --- a/monai/utils/sliding_window_inference.py +++ b/monai/utils/sliding_window_inference.py @@ -10,9 +10,8 @@ # limitations under the License. import torch - +from ignite.utils import convert_tensor from monai.transforms.transforms import ImageEndPadder -from monai.transforms.transforms import ToTensor from monai.data.utils import dense_patch_slices @@ -49,7 +48,7 @@ def sliding_window_inference(inputs, roi_size, sw_batch_size, predictor, device) # in case that image size is smaller than roi size image_size = tuple(max(image_size[i], roi_size[i]) for i in range(num_spatial_dims)) inputs = ImageEndPadder(roi_size, 'constant')(inputs) # in np array - inputs = ToTensor()(inputs) + inputs = convert_tensor(torch.from_numpy(inputs), device, False) # TODO: interval from user's specification scan_interval = _get_scan_interval(image_size, roi_size, num_spatial_dims) diff --git a/tests/integration_sliding_window.py b/tests/integration_sliding_window.py index 32abbfff5c..db10d7cc49 100644 --- a/tests/integration_sliding_window.py +++ b/tests/integration_sliding_window.py @@ -58,7 +58,8 @@ def _sliding_window_processor(_engine, batch): infer_engine = Engine(_sliding_window_processor) with tempfile.TemporaryDirectory() as temp_dir: - SegmentationSaver(output_path=temp_dir, output_ext='.nii.gz', output_postfix='seg').attach(infer_engine) + SegmentationSaver(output_path=temp_dir, output_ext='.nii.gz', output_postfix='seg', + batch_transform=lambda x: x[2]).attach(infer_engine) infer_engine.run(loader) diff --git a/tests/test_handler_stats.py b/tests/test_handler_stats.py index 5bbe17d1c2..58a62133d2 100644 --- a/tests/test_handler_stats.py +++ b/tests/test_handler_stats.py @@ -9,6 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import torch import logging import re import unittest @@ -29,12 +30,12 @@ def test_metrics_print(self): # set up engine def _train_func(engine, batch): - pass + return None, torch.tensor(0.0) engine = Engine(_train_func) # set up dummy metric - @engine.on(Events.ITERATION_COMPLETED) + @engine.on(Events.EPOCH_COMPLETED) def _update_metric(engine): current_metric = engine.state.metrics.get(key_to_print, 0.1) engine.state.metrics[key_to_print] = current_metric + 0.1 @@ -52,9 +53,36 @@ def _update_metric(engine): matched = [] for idx, line in enumerate(output_str.split('\n')): if grep.match(line): - self.assertTrue(has_key_word.match(line)) - matched.append(idx) - self.assertEqual(matched, [1, 2, 3, 5, 6, 7, 8, 10]) + if idx in [5, 10]: + self.assertTrue(has_key_word.match(line)) + + def test_loss_print(self): + log_stream = StringIO() + logging.basicConfig(stream=log_stream, level=logging.INFO) + key_to_handler = 'test_logging' + key_to_print = 'Loss' + + # set up engine + def _train_func(engine, batch): + return None, torch.tensor(0.0) + + engine = Engine(_train_func) + + # set up testing handler + stats_handler = StatsHandler(name=key_to_handler) + stats_handler.attach(engine) + + engine.run(range(3), max_epochs=2) + + # check logging output + output_str = log_stream.getvalue() + grep = re.compile('.*{}.*'.format(key_to_handler)) + has_key_word = re.compile('.*{}.*'.format(key_to_print)) + matched = [] + for idx, line in enumerate(output_str.split('\n')): + if grep.match(line): + if idx in [1, 2, 3, 6, 7, 8]: + self.assertTrue(has_key_word.match(line)) if __name__ == '__main__': diff --git a/tests/test_sliding_window_inference.py b/tests/test_sliding_window_inference.py index e0d727c407..142d3e31e5 100644 --- a/tests/test_sliding_window_inference.py +++ b/tests/test_sliding_window_inference.py @@ -34,7 +34,7 @@ def test_sliding_window_default(self, image_shape, roi_shape, sw_batch_size): device = torch.device("cpu:0") def compute(data): - return data.to(device) + 1 + return data + 1 result = sliding_window_inference(inputs, roi_shape, sw_batch_size, compute, device) expected_val = np.ones(image_shape, dtype=np.float32) + 1 From d166f6a5c4b93970068788d0052afb1dd2084341 Mon Sep 17 00:00:00 2001 From: Mohammad Adil Date: Tue, 10 Mar 2020 05:24:42 -0700 Subject: [PATCH 15/40] 152-random-rotate (#155) * Add RandomRotate. --- monai/transforms/composables.py | 1 - monai/transforms/transforms.py | 50 +++++++++++++++++++++++++++++++++ tests/test_random_rotate.py | 43 ++++++++++++++++++++++++++++ 3 files changed, 93 insertions(+), 1 deletion(-) create mode 100644 tests/test_random_rotate.py diff --git a/monai/transforms/composables.py b/monai/transforms/composables.py index 4d05d6fc9a..0e404c5233 100644 --- a/monai/transforms/composables.py +++ b/monai/transforms/composables.py @@ -85,7 +85,6 @@ def __init__(self, keys, affine_key, pixdim, interp_order=2, keep_shape=False, o interp_order = ensure_tuple(interp_order) self.interp_order = interp_order \ if len(interp_order) == len(self.keys) else interp_order * len(self.keys) - print(self.interp_order) self.output_key = output_key def __call__(self, data): diff --git a/monai/transforms/transforms.py b/monai/transforms/transforms.py index e7ec89af5a..a2352e5db8 100644 --- a/monai/transforms/transforms.py +++ b/monai/transforms/transforms.py @@ -631,6 +631,56 @@ def __call__(self, img): return data +@export +class RandRotate(Randomizable): + """Randomly rotates the input arrays. + + Args: + prob (float): Probability of rotation. + degrees (tuple of float or float): Range of rotation in degrees. If single number, + angle is picked from (-degrees, degrees). + axes (tuple of 2 ints): Axes of rotation. Default: (1, 2). This is the first two + axis in spatial dimensions according to MONAI channel first shape assumption. + reshape (bool): If true, output shape is made same as input. Default: True. + order (int): Order of spline interpolation. Range 0-5. Default: 1. This is + different from scipy where default interpolation is 3. + mode (str): Points outside boundary filled according to this mode. Options are + 'constant', 'nearest', 'reflect', 'wrap'. Default: 'constant'. + cval (scalar): Value to fill outside boundary. Default: 0. + prefiter (bool): Apply spline_filter before interpolation. Default: True. + """ + + def __init__(self, degrees, prob=0.1, axes=(1, 2), reshape=True, order=1, + mode='constant', cval=0, prefilter=True): + self.prob = prob + self.degrees = degrees + self.reshape = reshape + self.order = order + self.mode = mode + self.cval = cval + self.prefilter = prefilter + self.axes = axes + + if not hasattr(self.degrees, '__iter__'): + self.degrees = (-self.degrees, self.degrees) + assert len(self.degrees) == 2, "degrees should be a number or pair of numbers." + + self._do_transform = False + self.angle = None + + def randomize(self): + self._do_transform = self.R.random_sample() < self.prob + self.angle = self.R.uniform(low=self.degrees[0], high=self.degrees[1]) + + def __call__(self, img): + self.randomize() + if not self._do_transform: + return img + rotator = Rotate(self.angle, self.axes, self.reshape, self.order, + self.mode, self.cval, self.prefilter) + return rotator(img) + + @export class RandomFlip(Randomizable): """Randomly flips the image along axes. diff --git a/tests/test_random_rotate.py b/tests/test_random_rotate.py new file mode 100644 index 0000000000..29036663af --- /dev/null +++ b/tests/test_random_rotate.py @@ -0,0 +1,43 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import numpy as np + +import scipy.ndimage +from parameterized import parameterized + +from monai.transforms import RandRotate +from tests.utils import NumpyImageTestCase2D + + +class RandomRotateTest(NumpyImageTestCase2D): + + @parameterized.expand([ + (90, (1, 2), True, 1, 'reflect', 0, True), + ((-45, 45), (2, 1), True, 3, 'constant', 0, True), + (180, (2, 3), False, 2, 'constant', 4, False), + ]) + def test_correct_results(self, degrees, axes, reshape, + order, mode, cval, prefilter): + rotate_fn = RandRotate(degrees, prob=1.0, axes=axes, reshape=reshape, + order=order, mode=mode, cval=cval, prefilter=prefilter) + rotate_fn.set_random_state(243) + rotated = rotate_fn(self.imt) + + angle = rotate_fn.angle + expected = scipy.ndimage.rotate(self.imt, angle, axes, reshape, order=order, + mode=mode, cval=cval, prefilter=prefilter) + self.assertTrue(np.allclose(expected, rotated)) + + +if __name__ == '__main__': + unittest.main() From 5b1f258238f984def1729f61a739cea3f8000929 Mon Sep 17 00:00:00 2001 From: Wenqi Li Date: Wed, 11 Mar 2020 10:07:50 +0000 Subject: [PATCH 16/40] add dictionary-based wrapper random spatial transforms (#166) --- monai/networks/layers/simplelayers.py | 3 + monai/transforms/composables.py | 234 +++++++++++++++++++++++++- monai/transforms/transforms.py | 100 ++++++----- tests/test_flip.py | 2 +- tests/test_random_affine.py | 4 +- tests/test_random_affined.py | 90 ++++++++++ tests/test_random_elastic_2d.py | 17 +- tests/test_random_elastic_3d.py | 3 +- tests/test_random_elasticd_2d.py | 88 ++++++++++ tests/test_random_elasticd_3d.py | 72 ++++++++ tests/test_random_flip.py | 8 +- 11 files changed, 552 insertions(+), 69 deletions(-) create mode 100644 tests/test_random_affined.py create mode 100644 tests/test_random_elasticd_2d.py create mode 100644 tests/test_random_elasticd_3d.py diff --git a/monai/networks/layers/simplelayers.py b/monai/networks/layers/simplelayers.py index 5ed491354b..c41ff93a0f 100644 --- a/monai/networks/layers/simplelayers.py +++ b/monai/networks/layers/simplelayers.py @@ -40,8 +40,11 @@ class GaussianFilter: def __init__(self, spatial_dims, sigma, truncated=4., device=None): """ Args: + spatial_dims (int): number of spatial dimensions of the input image. + must have shape (Batch, channels, H[, W, ...]). sigma (float): std. truncated (float): spreads how many stds. + device (torch.device): device on which the tensor will be allocated. """ self.kernel = torch.nn.Parameter(torch.tensor(gaussian_1d(sigma, truncated)), False) self.spatial_dims = spatial_dims diff --git a/monai/transforms/composables.py b/monai/transforms/composables.py index 0e404c5233..8ccd5a747a 100644 --- a/monai/transforms/composables.py +++ b/monai/transforms/composables.py @@ -13,15 +13,18 @@ defined in `monai.transforms.transforms`. """ +import torch from collections.abc import Hashable import monai from monai.data.utils import get_random_patch, get_valid_patch_size +from monai.networks.layers.simplelayers import GaussianFilter from monai.transforms.compose import Randomizable, Transform from monai.transforms.transforms import (LoadNifti, AsChannelFirst, Orientation, - AddChannel, Spacing, Rotate90, SpatialCrop) + AddChannel, Spacing, Rotate90, SpatialCrop, + RandAffine, Rand2DElastic, Rand3DElastic) from monai.utils.misc import ensure_tuple -from monai.transforms.utils import generate_pos_neg_label_crop_centers +from monai.transforms.utils import generate_pos_neg_label_crop_centers, create_grid from monai.utils.aliases import alias export = monai.utils.export("monai.transforms") @@ -36,15 +39,15 @@ class MapTransform(Transform): The ``keys`` parameter will be used to get and set the actual data item to transform. That is, the callable of this transform should follow the pattern: - ``` + .. code-block:: python + def __call__(self, data): for key in self.keys: if key in data: - update output data with some_transform_function(data[key]). + # update output data with some_transform_function(data[key]). else: - do nothing or some exceptions handling. + # do nothing or some exceptions handling. return data - ``` """ def __init__(self, keys): @@ -372,3 +375,222 @@ def __call__(self, data): results[i][key] = data[key] return results + + +@export +@alias('RandAffineD', 'RandAffineDict') +class RandAffined(Randomizable, MapTransform): + """ + A dictionary-based wrapper of ``monai.transforms.transforms.RandAffine``. + """ + + def __init__(self, keys, + spatial_size, prob=0.1, + rotate_range=None, shear_range=None, translate_range=None, scale_range=None, + mode='bilinear', padding_mode='zeros', as_tensor_output=True, device=None): + """ + Args: + keys (Hashable items): keys of the corresponding items to be transformed. + spatial_size (list or tuple of int): output image spatial size. + if ``data`` component has two spatial dimensions, ``spatial_size`` should have 2 elements [h, w]. + if ``data`` component has three spatial dimensions, ``spatial_size`` should have 3 elements [h, w, d]. + prob (float): probability of returning a randomized affine grid. + defaults to 0.1, with 10% chance returns a randomized grid. + mode ('nearest'|'bilinear'): interpolation order. Defaults to ``'bilinear'``. + if mode is a tuple of interpolation mode strings, each string corresponds to a key in ``keys``. + this is useful to set different modes for different data items. + padding_mode ('zeros'|'border'|'reflection'): mode of handling out of range indices. + Defaults to ``'zeros'``. + as_tensor_output (bool): the computation is implemented using pytorch tensors, this option specifies + whether to convert it back to numpy arrays. + device (torch.device): device on which the tensor will be allocated. + + See also: + - ``monai.transform.composables.MapTransform`` + - ``RandAffineGrid`` for the random affine paramters configurations. + """ + MapTransform.__init__(self, keys) + default_mode = 'bilinear' if isinstance(mode, (tuple, list)) else mode + self.rand_affine = RandAffine(prob=prob, + rotate_range=rotate_range, shear_range=shear_range, + translate_range=translate_range, scale_range=scale_range, + spatial_size=spatial_size, + mode=default_mode, padding_mode=padding_mode, + as_tensor_output=as_tensor_output, device=device) + self.mode = mode + + def set_random_state(self, seed=None, state=None): + self.rand_affine.set_random_state(seed, state) + Randomizable.set_random_state(self, seed, state) + return self + + def randomize(self): + self.rand_affine.randomize() + + def __call__(self, data): + d = dict(data) + self.randomize() + + spatial_size = self.rand_affine.spatial_size + if self.rand_affine.do_transform: + grid = self.rand_affine.rand_affine_grid(spatial_size=spatial_size) + else: + grid = create_grid(spatial_size) + + if isinstance(self.mode, (tuple, list)): + for key, m in zip(self.keys, self.mode): + d[key] = self.rand_affine.resampler(d[key], grid, mode=m) + return d + + for key in self.keys: # same interpolation mode + d[key] = self.rand_affine.resampler(d[key], grid, self.rand_affine.mode) + return d + + +@export +@alias('Rand2DElasticD', 'Rand2DElasticDict') +class Rand2DElasticd(Randomizable, MapTransform): + """ + A dictionary-based wrapper of ``monai.transforms.transforms.Rand2DElastic``. + """ + + def __init__(self, keys, + spatial_size, spacing, magnitude_range, prob=0.1, + rotate_range=None, shear_range=None, translate_range=None, scale_range=None, + mode='bilinear', padding_mode='zeros', as_tensor_output=False, device=None): + """ + Args: + keys (Hashable items): keys of the corresponding items to be transformed. + spatial_size (2 ints): specifying output image spatial size [h, w]. + spacing (2 ints): distance in between the control points. + magnitude_range (2 ints): the random offsets will be generated from + ``uniform[magnitude[0], magnitude[1])``. + prob (float): probability of returning a randomized affine grid. + defaults to 0.1, with 10% chance returns a randomized grid, + otherwise returns a ``spatial_size`` centered area extracted from the input image. + mode ('nearest'|'bilinear'): interpolation order. Defaults to ``'bilinear'``. + if mode is a tuple of interpolation mode strings, each string corresponds to a key in ``keys``. + this is useful to set different modes for different data items. + padding_mode ('zeros'|'border'|'reflection'): mode of handling out of range indices. + Defaults to ``'zeros'``. + as_tensor_output (bool): the computation is implemented using pytorch tensors, this option specifies + whether to convert it back to numpy arrays. + device (torch.device): device on which the tensor will be allocated. + + See also: + - ``RandAffineGrid`` for the random affine paramters configurations. + - ``Affine`` for the affine transformation parameters configurations. + """ + MapTransform.__init__(self, keys) + default_mode = 'bilinear' if isinstance(mode, (tuple, list)) else mode + self.rand_2d_elastic = Rand2DElastic(spacing=spacing, magnitude_range=magnitude_range, prob=prob, + rotate_range=rotate_range, shear_range=shear_range, + translate_range=translate_range, scale_range=scale_range, + spatial_size=spatial_size, + mode=default_mode, padding_mode=padding_mode, + as_tensor_output=as_tensor_output, device=device) + self.mode = mode + + def set_random_state(self, seed=None, state=None): + self.rand_2d_elastic.set_random_state(seed, state) + Randomizable.set_random_state(self, seed, state) + return self + + def randomize(self, spatial_size): + self.rand_2d_elastic.randomize(spatial_size) + + def __call__(self, data): + d = dict(data) + spatial_size = self.rand_2d_elastic.spatial_size + self.randomize(spatial_size) + + if self.rand_2d_elastic.do_transform: + grid = self.rand_2d_elastic.deform_grid(spatial_size) + grid = self.rand_2d_elastic.rand_affine_grid(grid=grid) + grid = torch.nn.functional.interpolate(grid[None], spatial_size, mode='bicubic', align_corners=False)[0] + else: + grid = create_grid(spatial_size) + + if isinstance(self.mode, (tuple, list)): + for key, m in zip(self.keys, self.mode): + d[key] = self.rand_2d_elastic.resampler(d[key], grid, mode=m) + return d + + for key in self.keys: # same interpolation mode + d[key] = self.rand_2d_elastic.resampler(d[key], grid, mode=self.rand_2d_elastic.mode) + return d + + +@export +@alias('Rand3DElasticD', 'Rand3DElasticDict') +class Rand3DElasticd(Randomizable, MapTransform): + """ + A dictionary-based wrapper of ``monai.transforms.transforms.Rand3DElastic``. + """ + + def __init__(self, keys, + spatial_size, sigma_range, magnitude_range, prob=0.1, + rotate_range=None, shear_range=None, translate_range=None, scale_range=None, + mode='bilinear', padding_mode='zeros', as_tensor_output=False, device=None): + """ + Args: + keys (Hashable items): keys of the corresponding items to be transformed. + spatial_size (3 ints): specifying output image spatial size [h, w, d]. + sigma_range (2 ints): a Gaussian kernel with standard deviation sampled + from ``uniform[sigma_range[0], sigma_range[1])`` will be used to smooth the random offset grid. + magnitude_range (2 ints): the random offsets on the grid will be generated from + ``uniform[magnitude[0], magnitude[1])``. + prob (float): probability of returning a randomized affine grid. + defaults to 0.1, with 10% chance returns a randomized grid, + otherwise returns a ``spatial_size`` centered area extracted from the input image. + mode ('nearest'|'bilinear'): interpolation order. Defaults to ``'bilinear'``. + if mode is a tuple of interpolation mode strings, each string corresponds to a key in ``keys``. + this is useful to set different modes for different data items. + padding_mode ('zeros'|'border'|'reflection'): mode of handling out of range indices. + Defaults to ``'zeros'``. + as_tensor_output (bool): the computation is implemented using pytorch tensors, this option specifies + whether to convert it back to numpy arrays. + device (torch.device): device on which the tensor will be allocated. + + See also: + - ``RandAffineGrid`` for the random affine paramters configurations. + - ``Affine`` for the affine transformation parameters configurations. + """ + MapTransform.__init__(self, keys) + default_mode = 'bilinear' if isinstance(mode, (tuple, list)) else mode + self.rand_3d_elastic = Rand3DElastic(sigma_range=sigma_range, magnitude_range=magnitude_range, prob=prob, + rotate_range=rotate_range, shear_range=shear_range, + translate_range=translate_range, scale_range=scale_range, + spatial_size=spatial_size, + mode=default_mode, padding_mode=padding_mode, + as_tensor_output=as_tensor_output, device=device) + self.mode = mode + + def set_random_state(self, seed=None, state=None): + self.rand_3d_elastic.set_random_state(seed, state) + Randomizable.set_random_state(self, seed, state) + return self + + def randomize(self, grid_size): + self.rand_3d_elastic.randomize(grid_size) + + def __call__(self, data): + d = dict(data) + spatial_size = self.rand_3d_elastic.spatial_size + self.randomize(spatial_size) + grid = create_grid(spatial_size) + if self.rand_3d_elastic.do_transform: + device = self.rand_3d_elastic.device + grid = torch.tensor(grid).to(device) + gaussian = GaussianFilter(spatial_dims=3, sigma=self.rand_3d_elastic.sigma, truncated=3., device=device) + grid[:3] += gaussian(self.rand_3d_elastic.rand_offset[None])[0] * self.rand_3d_elastic.magnitude + grid = self.rand_3d_elastic.rand_affine_grid(grid=grid) + + if isinstance(self.mode, (tuple, list)): + for key, m in zip(self.keys, self.mode): + d[key] = self.rand_3d_elastic.resampler(d[key], grid, mode=m) + return d + + for key in self.keys: # same interpolation mode + d[key] = self.rand_3d_elastic.resampler(d[key], grid, mode=self.rand_3d_elastic.mode) + return d diff --git a/monai/transforms/transforms.py b/monai/transforms/transforms.py index a2352e5db8..370a7fb305 100644 --- a/monai/transforms/transforms.py +++ b/monai/transforms/transforms.py @@ -268,15 +268,14 @@ def __call__(self, img): @export class Flip: """Reverses the order of elements along the given axis. Preserves shape. - Uses np.flip in practice. See numpy.flip for additional details. + Uses ``np.flip`` in practice. See numpy.flip for additional details. + https://docs.scipy.org/doc/numpy/reference/generated/numpy.flip.html Args: - axes (None, int or tuple of ints): Axes along which to flip over. Default is None. + axis (None, int or tuple of ints): Axes along which to flip over. Default is None. """ def __init__(self, axis=None): - assert axis is None or isinstance(axis, (int, list, tuple)), \ - "axis must be None, int or tuple of ints." self.axis = axis def __call__(self, img): @@ -638,19 +637,19 @@ class RandRotate(Randomizable): Args: prob (float): Probability of rotation. degrees (tuple of float or float): Range of rotation in degrees. If single number, - angle is picked from (-degrees, degrees). + angle is picked from (-degrees, degrees). axes (tuple of 2 ints): Axes of rotation. Default: (1, 2). This is the first two axis in spatial dimensions according to MONAI channel first shape assumption. reshape (bool): If true, output shape is made same as input. Default: True. order (int): Order of spline interpolation. Range 0-5. Default: 1. This is different from scipy where default interpolation is 3. - mode (str): Points outside boundary filled according to this mode. Options are + mode (str): Points outside boundary filled according to this mode. Options are 'constant', 'nearest', 'reflect', 'wrap'. Default: 'constant'. cval (scalar): Value to fill outside boundary. Default: 0. prefiter (bool): Apply spline_filter before interpolation. Default: True. """ - def __init__(self, degrees, prob=0.1, axes=(1, 2), reshape=True, order=1, + def __init__(self, degrees, prob=0.1, axes=(1, 2), reshape=True, order=1, mode='constant', cval=0, prefilter=True): self.prob = prob self.degrees = degrees @@ -682,17 +681,19 @@ def __call__(self, img): @export -class RandomFlip(Randomizable): - """Randomly flips the image along axes. +class RandFlip(Randomizable): + """Randomly flips the image along axes. Preserves shape. + See numpy.flip for additional details. + https://docs.scipy.org/doc/numpy/reference/generated/numpy.flip.html Args: prob (float): Probability of flipping. - axes (None, int or tuple of ints): Axes along which to flip over. Default is None. + axis (None, int or tuple of ints): Axes along which to flip over. Default is None. """ def __init__(self, prob=0.1, axis=None): - self.axis = axis self.prob = prob + self.flipper = Flip(axis=axis) self._do_transform = False @@ -703,8 +704,7 @@ def __call__(self, img): self.randomize() if not self._do_transform: return img - flipper = Flip(axis=self.axis) - return flipper(img) + return self.flipper(img) @export @@ -802,8 +802,7 @@ def __call__(self, spatial_size=None, grid=None): affine = affine @ create_scale(spatial_dims, self.scale_params) affine = torch.tensor(affine, device=self.device) - if not torch.is_tensor(grid): - grid = torch.tensor(grid) + grid = torch.tensor(grid) if not torch.is_tensor(grid) else grid.clone().detach() if self.device: grid = grid.to(self.device) grid = (affine.float() @ grid.reshape((grid.shape[0], -1)).float()).reshape([-1] + list(grid.shape[1:])) @@ -874,8 +873,9 @@ def __call__(self, spatial_size=None, grid=None): a 2D (3xHxW) or 3D (4xHxWxD) grid. """ self.randomize() - affine_grid = AffineGrid(self.rotate_params, self.shear_params, self.translate_params, self.scale_params, - self.as_tensor_output, self.device) + affine_grid = AffineGrid(rotate_params=self.rotate_params, shear_params=self.shear_params, + translate_params=self.translate_params, scale_params=self.scale_params, + as_tensor_output=self.as_tensor_output, device=self.device) return affine_grid(spatial_size, grid) @@ -943,8 +943,7 @@ def __call__(self, img, grid, mode='bilinear'): """ if not torch.is_tensor(img): img = torch.tensor(img) - if not torch.is_tensor(grid): - grid = torch.tensor(grid) + grid = torch.tensor(grid) if not torch.is_tensor(grid) else grid.clone().detach() if self.device: img = img.to(self.device) grid = grid.to(self.device) @@ -1002,13 +1001,13 @@ def __init__(self, whether to convert it back to numpy arrays. device (torch.device): device on which the tensor will be allocated. """ - self.affine_grid = AffineGrid(rotate_params, - shear_params, - translate_params, - scale_params, + self.affine_grid = AffineGrid(rotate_params=rotate_params, + shear_params=shear_params, + translate_params=translate_params, + scale_params=scale_params, as_tensor_output=True, device=device) - self.resampler = Resample(padding_mode, as_tensor_output=as_tensor_output, device=device) + self.resampler = Resample(padding_mode=padding_mode, as_tensor_output=as_tensor_output, device=device) self.spatial_size = spatial_size self.mode = mode @@ -1023,8 +1022,8 @@ def __call__(self, img, spatial_size=None, mode=None): """ spatial_size = spatial_size or self.spatial_size mode = mode or self.mode - grid = self.affine_grid(spatial_size) - return self.resampler(img, grid, mode) + grid = self.affine_grid(spatial_size=spatial_size) + return self.resampler(img=img, grid=grid, mode=mode) @export @@ -1062,7 +1061,9 @@ def __init__(self, Affine for the affine transformation parameters configurations. """ - self.rand_affine_grid = RandAffineGrid(rotate_range, shear_range, translate_range, scale_range, True, device) + self.rand_affine_grid = RandAffineGrid(rotate_range=rotate_range, shear_range=shear_range, + translate_range=translate_range, scale_range=scale_range, + as_tensor_output=True, device=device) self.resampler = Resample(padding_mode=padding_mode, as_tensor_output=as_tensor_output, device=device) self.spatial_size = spatial_size @@ -1078,6 +1079,7 @@ def set_random_state(self, seed=None, state=None): def randomize(self): self.do_transform = self.R.rand() < self.prob + self.rand_affine_grid.randomize() def __call__(self, img, spatial_size=None, mode=None): """ @@ -1095,7 +1097,7 @@ def __call__(self, img, spatial_size=None, mode=None): grid = self.rand_affine_grid(spatial_size=spatial_size) else: grid = create_grid(spatial_size) - return self.resampler(img, grid, mode) + return self.resampler(img=img, grid=grid, mode=mode) @export @@ -1121,13 +1123,14 @@ def __init__(self, Args: spacing (2 ints): distance in between the control points. magnitude_range (2 ints): the random offsets will be generated from - `uniform[magnitude[0], magnitude[1])`. + ``uniform[magnitude[0], magnitude[1])``. prob (float): probability of returning a randomized affine grid. defaults to 0.1, with 10% chance returns a randomized grid, - otherwise returns a `spatial_size` centered area centered extracted from the input image. + otherwise returns a ``spatial_size`` centered area extracted from the input image. spatial_size (2 ints): specifying output image spatial size [h, w]. - mode ('nearest'|'bilinear'): interpolation order. Defaults to 'bilinear'. - padding_mode ('zeros'|'border'|'reflection'): mode of handling out of range indices. Defaults to 'zeros'. + mode ('nearest'|'bilinear'): interpolation order. Defaults to ``'bilinear'``. + padding_mode ('zeros'|'border'|'reflection'): mode of handling out of range indices. + Defaults to ``'zeros'``. as_tensor_output (bool): the computation is implemented using pytorch tensors, this option specifies whether to convert it back to numpy arrays. device (torch.device): device on which the tensor will be allocated. @@ -1136,8 +1139,11 @@ def __init__(self, RandAffineGrid for the random affine paramters configurations. Affine for the affine transformation parameters configurations. """ - self.deform_grid = RandDeformGrid(spacing, magnitude_range, as_tensor_output=True, device=device) - self.rand_affine_grid = RandAffineGrid(rotate_range, shear_range, translate_range, scale_range, True, device) + self.deform_grid = RandDeformGrid(spacing=spacing, magnitude_range=magnitude_range, + as_tensor_output=True, device=device) + self.rand_affine_grid = RandAffineGrid(rotate_range=rotate_range, shear_range=shear_range, + translate_range=translate_range, scale_range=scale_range, + as_tensor_output=True, device=device) self.resampler = Resample(padding_mode=padding_mode, as_tensor_output=as_tensor_output, device=device) self.spatial_size = spatial_size @@ -1151,21 +1157,23 @@ def set_random_state(self, seed=None, state=None): Randomizable.set_random_state(self, seed, state) return self - def randomize(self): + def randomize(self, spatial_size): self.do_transform = self.R.rand() < self.prob + self.deform_grid.randomize(spatial_size) + self.rand_affine_grid.randomize() def __call__(self, img, spatial_size=None, mode=None): """ Args: img (ndarray or tensor): shape must be (num_channels, H, W), spatial_size (2 ints): specifying output image spatial size [h, w]. - mode ('nearest'|'bilinear'): interpolation order. Defaults to 'self.mode'. + mode ('nearest'|'bilinear'): interpolation order. Defaults to ``self.mode``. """ - self.randomize() spatial_size = spatial_size or self.spatial_size + self.randomize(spatial_size) mode = mode or self.mode if self.do_transform: - grid = self.deform_grid(spatial_size) + grid = self.deform_grid(spatial_size=spatial_size) grid = self.rand_affine_grid(grid=grid) grid = torch.nn.functional.interpolate(grid[None], spatial_size, mode='bicubic', align_corners=False)[0] else: @@ -1195,15 +1203,16 @@ def __init__(self, """ Args: sigma_range (2 ints): a Gaussian kernel with standard deviation sampled - from `uniform[sigma_range[0], sigma_range[1])` will be used to smooth the random offset grid. + from ``uniform[sigma_range[0], sigma_range[1])`` will be used to smooth the random offset grid. magnitude_range (2 ints): the random offsets on the grid will be generated from - `uniform[magnitude[0], magnitude[1])`. + ``uniform[magnitude[0], magnitude[1])``. prob (float): probability of returning a randomized affine grid. defaults to 0.1, with 10% chance returns a randomized grid, - otherwise returns a `spatial_size` centered area centered extracted from the input image. - spatial_size (2 ints): specifying output image spatial size [h, w, d]. - mode ('nearest'|'bilinear'): interpolation order. Defaults to 'bilinear'. - padding_mode ('zeros'|'border'|'reflection'): mode of handling out of range indices. Defaults to 'zeros'. + otherwise returns a ``spatial_size`` centered area extracted from the input image. + spatial_size (3 ints): specifying output image spatial size [h, w, d]. + mode ('nearest'|'bilinear'): interpolation order. Defaults to ``'bilinear'``. + padding_mode ('zeros'|'border'|'reflection'): mode of handling out of range indices. + Defaults to ``'zeros'``. as_tensor_output (bool): the computation is implemented using pytorch tensors, this option specifies whether to convert it back to numpy arrays. device (torch.device): device on which the tensor will be allocated. @@ -1238,12 +1247,13 @@ def randomize(self, grid_size): self.rand_offset = self.R.uniform(-1., 1., [3] + list(grid_size)) self.magnitude = self.R.uniform(self.magnitude_range[0], self.magnitude_range[1]) self.sigma = self.R.uniform(self.sigma_range[0], self.sigma_range[1]) + self.rand_affine_grid.randomize() def __call__(self, img, spatial_size=None, mode=None): """ Args: img (ndarray or tensor): shape must be (num_channels, H, W, D), - spatial_size (2 ints): specifying output image spatial size [h, w, d]. + spatial_size (3 ints): specifying spatial 3D output image spatial size [h, w, d]. mode ('nearest'|'bilinear'): interpolation order. Defaults to 'self.mode'. """ spatial_size = spatial_size or self.spatial_size diff --git a/tests/test_flip.py b/tests/test_flip.py index a70b9c92c5..3b027ec2c8 100644 --- a/tests/test_flip.py +++ b/tests/test_flip.py @@ -22,7 +22,7 @@ class FlipTest(NumpyImageTestCase2D): @parameterized.expand([ ("wrong_axis", ['s', 1], TypeError), - ("not_numbers", 's', AssertionError) + ("not_numbers", 's', TypeError) ]) def test_invalid_inputs(self, _, axis, raises): with self.assertRaises(raises): diff --git a/tests/test_random_affine.py b/tests/test_random_affine.py index 5149a5a80d..60c436cc6d 100644 --- a/tests/test_random_affine.py +++ b/tests/test_random_affine.py @@ -34,7 +34,7 @@ as_tensor_output=True, spatial_size=(2, 2, 2), device=None), {'img': torch.ones((1, 3, 3, 3)), 'mode': 'bilinear'}, - torch.tensor([[[[1.0000, 0.7776], [0.4174, 0.0780]], [[0.0835, 1.0000], [0.3026, 0.5732]]]],) + torch.tensor([[[[0.0000, 0.6577], [0.9911, 1.0000]], [[0.7781, 1.0000], [1.0000, 0.4000]]]]) ], [ dict(prob=0.9, @@ -44,7 +44,7 @@ scale_range=[.1, .2], as_tensor_output=True, device=None), {'img': torch.arange(64).reshape((1, 8, 8)), 'spatial_size': (3, 3)}, - torch.tensor([[[27.3614, 18.0237, 8.6860], [40.0440, 30.7063, 21.3686], [52.7266, 43.3889, 34.0512]]]) + torch.tensor([[[16.9127, 13.3079, 9.7031], [26.8129, 23.2081, 19.6033], [36.7131, 33.1083, 29.5035]]]) ], ] diff --git a/tests/test_random_affined.py b/tests/test_random_affined.py new file mode 100644 index 0000000000..b07f7015e5 --- /dev/null +++ b/tests/test_random_affined.py @@ -0,0 +1,90 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import torch +from parameterized import parameterized + +from monai.transforms.composables import RandAffined + +TEST_CASES = [ + [ + dict(as_tensor_output=False, device=None, spatial_size=(2, 2), keys=('img', 'seg')), + {'img': torch.ones((3, 3, 3)), 'seg': torch.ones((3, 3, 3))}, + np.ones((3, 2, 2)) + ], + [ + dict(as_tensor_output=True, device=None, spatial_size=(2, 2, 2), keys=('img', 'seg')), + {'img': torch.ones((1, 3, 3, 3)), 'seg': torch.ones((1, 3, 3, 3))}, + torch.ones((1, 2, 2, 2)) + ], + [ + dict(prob=0.9, + rotate_range=(np.pi / 2,), + shear_range=[1, 2], + translate_range=[2, 1], + as_tensor_output=True, + spatial_size=(2, 2, 2), + device=None, + keys=('img', 'seg'), + mode='bilinear'), {'img': torch.ones((1, 3, 3, 3)), 'seg': torch.ones((1, 3, 3, 3))}, + torch.tensor([[[[0.0000, 0.6577], [0.9911, 1.0000]], [[0.7781, 1.0000], [1.0000, 0.4000]]]]) + ], + [ + dict(prob=0.9, + rotate_range=(np.pi / 2,), + shear_range=[1, 2], + translate_range=[2, 1], + scale_range=[.1, .2], + as_tensor_output=True, + spatial_size=(3, 3), + keys=('img', 'seg'), + device=None), {'img': torch.arange(64).reshape((1, 8, 8)), 'seg': torch.arange(64).reshape((1, 8, 8))}, + torch.tensor([[[16.9127, 13.3079, 9.7031], [26.8129, 23.2081, 19.6033], [36.7131, 33.1083, 29.5035]]]) + ], + [ + dict(prob=0.9, + mode=('bilinear', 'nearest'), + rotate_range=(np.pi / 2,), + shear_range=[1, 2], + translate_range=[2, 1], + scale_range=[.1, .2], + as_tensor_output=False, + spatial_size=(3, 3), + keys=('img', 'seg'), + device=torch.device('cpu:0')), + {'img': torch.arange(64).reshape((1, 8, 8)), 'seg': torch.arange(64).reshape((1, 8, 8))}, + {'img': np.array([[[16.9127, 13.3079, 9.7031], [26.8129, 23.2081, 19.6033], [36.7131, 33.1083, 29.5035]]]), + 'seg': np.array([[[19., 12., 12.], [27., 20., 21.], [35., 36., 29.]]])} + ], +] + + +class TestRandAffined(unittest.TestCase): + + @parameterized.expand(TEST_CASES) + def test_rand_affined(self, input_param, input_data, expected_val): + g = RandAffined(**input_param).set_random_state(123) + res = g(input_data) + for key in res: + result = res[key] + expected = expected_val[key] if isinstance(expected_val, dict) else expected_val + self.assertEqual(torch.is_tensor(result), torch.is_tensor(expected)) + if torch.is_tensor(result): + np.testing.assert_allclose(result.cpu().numpy(), expected.cpu().numpy(), rtol=1e-4, atol=1e-4) + else: + np.testing.assert_allclose(result, expected, rtol=1e-4, atol=1e-4) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_random_elastic_2d.py b/tests/test_random_elastic_2d.py index 53f768bf36..d01fd5c556 100644 --- a/tests/test_random_elastic_2d.py +++ b/tests/test_random_elastic_2d.py @@ -24,8 +24,7 @@ [ {'spacing': (.3, .3), 'magnitude_range': (1., 2.), 'prob': 0.9, 'as_tensor_output': False, 'device': None}, {'img': torch.ones((3, 3, 3)), 'spatial_size': (2, 2), 'mode': 'bilinear'}, - np.array([[[0., 0.608901], [1., 0.5702355]], [[0., 0.608901], [1., 0.5702355]], [[0., 0.608901], - [1., 0.5702355]]]), + np.array([[[0., 0.], [0., 0.04970419]], [[0., 0.], [0., 0.04970419]], [[0., 0.], [0., 0.04970419]]]), ], [ { @@ -33,17 +32,17 @@ 'border', 'as_tensor_output': True, 'device': None, 'spatial_size': (2, 2) }, {'img': torch.arange(27).reshape((3, 3, 3))}, - torch.tensor([[[1.0849, 1.1180], [6.8100, 7.0265]], [[10.0849, 10.1180], [15.8100, 16.0265]], - [[19.0849, 19.1180], [24.8100, 25.0265]]]), + torch.tensor([[[1.6605, 1.0083], [6.0000, 6.2224]], [[10.6605, 10.0084], [15.0000, 15.2224]], + [[19.6605, 19.0083], [24.0000, 24.2224]]]), ], [ { - 'spacing': (.3, .3), 'magnitude_range': (1., 2.), 'translate_range': [-.2, .4], 'scale_range': [1.2, 2.2], - 'prob': 0.9, 'as_tensor_output': False, 'device': None + 'spacing': (.3, .3), 'magnitude_range': (.1, .2), 'translate_range': [-.01, .01], + 'scale_range': [0.01, 0.02], 'prob': 0.9, 'as_tensor_output': False, 'device': None, 'spatial_size': (2, 2), }, - {'img': torch.arange(27).reshape((3, 3, 3)), 'spatial_size': (2, 2)}, - np.array([[[0., 1.1731534], [3.8834658, 6.0565934]], [[0., 9.907095], [12.883466, 15.056594]], - [[0., 18.641037], [21.883465, 24.056593]]]), + {'img': torch.arange(27).reshape((3, 3, 3))}, + np.array([[[0.2001334, 1.2563337], [5.2274017, 7.90148]], [[8.675412, 6.9098353], [13.019891, 16.850012]], + [[17.15069, 12.563337], [20.81238, 25.798544]]]) ], ] diff --git a/tests/test_random_elastic_3d.py b/tests/test_random_elastic_3d.py index 5fb3a3130a..065d260de7 100644 --- a/tests/test_random_elastic_3d.py +++ b/tests/test_random_elastic_3d.py @@ -32,8 +32,7 @@ 'as_tensor_output': False, 'device': None, 'spatial_size': (2, 2, 2) }, {'img': torch.arange(27).reshape((1, 3, 3, 3)), 'mode': 'bilinear'}, - np.array([[[[6.016205, 2.3112855], [12.412318, 11.182229]], [[14.619441, 6.9230556], [17.23721, 16.506298]]]]), - ], + np.array([[[[1.6566806, 7.695548], [7.4342523, 13.580086]], [[11.776854, 18.669481], [18.396517, 21.551771]]]])], ] diff --git a/tests/test_random_elasticd_2d.py b/tests/test_random_elasticd_2d.py new file mode 100644 index 0000000000..1f560651ea --- /dev/null +++ b/tests/test_random_elasticd_2d.py @@ -0,0 +1,88 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import torch +from parameterized import parameterized + +from monai.transforms.composables import Rand2DElasticd + +TEST_CASES = [ + [ + { + 'keys': ('img', 'seg'), 'spacing': (.3, .3), 'magnitude_range': (1., 2.), 'prob': 0.0, 'as_tensor_output': + False, 'device': None, 'spatial_size': (2, 2) + }, + {'img': torch.ones((3, 3, 3)), 'seg': torch.ones((3, 3, 3))}, + np.ones((3, 2, 2)), + ], + [ + { + 'keys': ('img', 'seg'), 'spacing': (.3, .3), 'magnitude_range': (1., 2.), 'prob': 0.9, 'as_tensor_output': + False, 'device': None, 'spatial_size': (2, 2), 'mode': 'bilinear' + }, + {'img': torch.ones((3, 3, 3)), 'seg': torch.ones((3, 3, 3))}, + np.array([[[0., 0.], [0., 0.04970419]], [[0., 0.], [0., 0.04970419]], [[0., 0.], [0., 0.04970419]]]), + ], + [ + { + 'keys': ('img', 'seg'), 'spacing': (1., 1.), 'magnitude_range': (1., 1.), 'scale_range': [1.2, 2.2], 'prob': + 0.9, 'padding_mode': 'border', 'as_tensor_output': True, 'device': None, 'spatial_size': (2, 2) + }, + {'img': torch.arange(27).reshape((3, 3, 3)), 'seg': torch.arange(27).reshape((3, 3, 3))}, + torch.tensor([[[1.6605, 1.0083], [6.0000, 6.2224]], [[10.6605, 10.0084], [15.0000, 15.2224]], + [[19.6605, 19.0083], [24.0000, 24.2224]]]), + ], + [ + { + 'keys': ('img', 'seg'), 'spacing': (.3, .3), 'magnitude_range': (.1, .2), 'translate_range': [-.01, .01], + 'scale_range': [0.01, 0.02], 'prob': 0.9, 'as_tensor_output': False, 'device': None, 'spatial_size': (2, 2), + }, + {'img': torch.arange(27).reshape((3, 3, 3)), 'seg': torch.arange(27).reshape((3, 3, 3))}, + np.array([[[0.2001334, 1.2563337], [5.2274017, 7.90148]], [[8.675412, 6.9098353], [13.019891, 16.850012]], + [[17.15069, 12.563337], [20.81238, 25.798544]]]) + ], + [ + { + 'keys': ('img', 'seg'), 'mode': ('bilinear', 'nearest'), 'spacing': (.3, .3), 'magnitude_range': (.1, .2), + 'translate_range': [-.01, .01], + 'scale_range': [0.01, 0.02], 'prob': 0.9, 'as_tensor_output': True, 'device': None, 'spatial_size': (2, 2), + }, + {'img': torch.arange(27).reshape((3, 3, 3)), 'seg': torch.arange(27).reshape((3, 3, 3))}, + {'img': torch.tensor([[[0.2001334, 1.2563337], [5.2274017, 7.90148]], + [[8.675412, 6.9098353], [13.019891, 16.850012]], + [[17.15069, 12.563337], [20.81238, 25.798544]]]), + 'seg': torch.tensor([[[0., 2.], [6., 8.]], [[9., 11.], [15., 17.]], [[18., 20.], [24., 26.]]])} + ], +] + + +class TestRand2DElasticd(unittest.TestCase): + + @parameterized.expand(TEST_CASES) + def test_rand_2d_elasticd(self, input_param, input_data, expected_val): + g = Rand2DElasticd(**input_param) + g.set_random_state(123) + res = g(input_data) + for key in res: + result = res[key] + expected = expected_val[key] if isinstance(expected_val, dict) else expected_val + self.assertEqual(torch.is_tensor(result), torch.is_tensor(expected)) + if torch.is_tensor(result): + np.testing.assert_allclose(result.cpu().numpy(), expected.cpu().numpy(), rtol=1e-4, atol=1e-4) + else: + np.testing.assert_allclose(result, expected, rtol=1e-4, atol=1e-4) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_random_elasticd_3d.py b/tests/test_random_elasticd_3d.py new file mode 100644 index 0000000000..a72aa3bbb9 --- /dev/null +++ b/tests/test_random_elasticd_3d.py @@ -0,0 +1,72 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import torch +from parameterized import parameterized + +from monai.transforms.composables import Rand3DElasticd + +TEST_CASES = [ + [{'keys': ('img', 'seg'), 'magnitude_range': (.3, 2.3), 'sigma_range': (1., 20.), + 'prob': 0.0, 'as_tensor_output': False, 'device': None, 'spatial_size': (2, 2, 2)}, + {'img': torch.ones((2, 3, 3, 3)), 'seg': torch.ones((2, 3, 3, 3))}, + np.ones((2, 2, 2, 2))], + [ + {'keys': ('img', 'seg'), 'magnitude_range': (.3, .3), 'sigma_range': (1., 2.), + 'prob': 0.9, 'as_tensor_output': False, 'device': None, 'spatial_size': (2, 2, 2)}, + {'img': torch.arange(27).reshape((1, 3, 3, 3)), 'seg': torch.arange(27).reshape((1, 3, 3, 3))}, + np.array([[[[3.2385552, 4.753422], [7.779232, 9.286472]], [[16.769115, 18.287868], [21.300673, 22.808704]]]]), + ], + [ + { + 'keys': ('img', 'seg'), 'magnitude_range': (.3, .3), 'sigma_range': (1., 2.), 'prob': 0.9, + 'rotate_range': [1, 1, 1], 'as_tensor_output': False, 'device': None, + 'spatial_size': (2, 2, 2), 'mode': 'bilinear' + }, + {'img': torch.arange(27).reshape((1, 3, 3, 3)), 'seg': torch.arange(27).reshape((1, 3, 3, 3))}, + np.array([[[[1.6566806, 7.695548], [7.4342523, 13.580086]], [[11.776854, 18.669481], [18.396517, 21.551771]]]]), + ], + [ + { + 'keys': ('img', 'seg'), 'mode': ('bilinear', 'nearest'), 'magnitude_range': (.3, .3), + 'sigma_range': (1., 2.), 'prob': 0.9, 'rotate_range': [1, 1, 1], + 'as_tensor_output': True, 'device': torch.device('cpu:0'), 'spatial_size': (2, 2, 2) + }, + {'img': torch.arange(27).reshape((1, 3, 3, 3)), 'seg': torch.arange(27).reshape((1, 3, 3, 3))}, + {'img': torch.tensor([[[[1.6566806, 7.695548], [7.4342523, 13.580086]], + [[11.776854, 18.669481], [18.396517, 21.551771]]]]), + 'seg': torch.tensor([[[[1., 11.], [7., 17.]], [[9., 19.], [15., 25.]]]])} + ], +] + + +class TestRand3DElasticd(unittest.TestCase): + + @parameterized.expand(TEST_CASES) + def test_rand_3d_elasticd(self, input_param, input_data, expected_val): + g = Rand3DElasticd(**input_param) + g.set_random_state(123) + res = g(input_data) + for key in res: + result = res[key] + expected = expected_val[key] if isinstance(expected_val, dict) else expected_val + self.assertEqual(torch.is_tensor(result), torch.is_tensor(expected)) + if torch.is_tensor(result): + np.testing.assert_allclose(result.cpu().numpy(), expected.cpu().numpy(), rtol=1e-4, atol=1e-4) + else: + np.testing.assert_allclose(result, expected, rtol=1e-4, atol=1e-4) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_random_flip.py b/tests/test_random_flip.py index ec95485f20..ee89a133d9 100644 --- a/tests/test_random_flip.py +++ b/tests/test_random_flip.py @@ -14,7 +14,7 @@ import numpy as np from parameterized import parameterized -from monai.transforms import RandomFlip +from monai.transforms import RandFlip from tests.utils import NumpyImageTestCase2D @@ -22,11 +22,11 @@ class RandomFlipTest(NumpyImageTestCase2D): @parameterized.expand([ ("wrong_axis", ['s', 1], TypeError), - ("not_numbers", 's', AssertionError) + ("not_numbers", 's', TypeError) ]) def test_invalid_inputs(self, _, axis, raises): with self.assertRaises(raises): - flip = RandomFlip(prob=1.0, axis=axis) + flip = RandFlip(prob=1.0, axis=axis) flip(self.imt) @parameterized.expand([ @@ -35,7 +35,7 @@ def test_invalid_inputs(self, _, axis, raises): ("many_axis", [0, 1, 2]) ]) def test_correct_results(self, _, axis): - flip = RandomFlip(prob=1.0, axis=axis) + flip = RandFlip(prob=1.0, axis=axis) expected = np.flip(self.imt, axis) self.assertTrue(np.allclose(expected, flip(self.imt))) From 3a8f99c03598307ecc7b34b2080e38add6227ba8 Mon Sep 17 00:00:00 2001 From: Nic Ma Date: Thu, 12 Mar 2020 09:26:24 +0800 Subject: [PATCH 17/40] 160 develop TensorBoard event handler (#161) Co-authored-by: Kevin Lu Co-authored-by: Wenqi Li --- examples/densenet_classification_3d.py | 4 +- examples/unet_segmentation_3d.ipynb | 4 +- examples/unet_segmentation_3d_array.py | 80 ++++---- examples/unet_segmentation_3d_dict.py | 4 +- monai/handlers/segmentation_saver.py | 5 +- monai/handlers/stats_handler.py | 77 ++++++-- monai/handlers/tensorboard_handlers.py | 249 +++++++++++++++++++++++++ monai/handlers/utils.py | 4 + monai/networks/utils.py | 4 +- monai/utils/misc.py | 15 ++ monai/visualize/img2tensorboard.py | 10 +- tests/integration_sliding_window.py | 10 +- tests/integration_unet2d.py | 9 +- tests/test_handler_stats.py | 38 +++- tests/test_handler_tb_image.py | 60 ++++++ tests/test_handler_tb_stats.py | 81 ++++++++ tests/test_unet.py | 6 +- 17 files changed, 567 insertions(+), 93 deletions(-) create mode 100644 monai/handlers/tensorboard_handlers.py create mode 100644 tests/test_handler_tb_image.py create mode 100644 tests/test_handler_tb_stats.py diff --git a/examples/densenet_classification_3d.py b/examples/densenet_classification_3d.py index 07ac3ffe04..753097a2a9 100644 --- a/examples/densenet_classification_3d.py +++ b/examples/densenet_classification_3d.py @@ -91,7 +91,7 @@ trainer.add_event_handler(event_name=Events.EPOCH_COMPLETED, handler=checkpoint_handler, to_save={'net': net, 'opt': opt}) -train_stats_handler = StatsHandler() +train_stats_handler = StatsHandler(output_transform=lambda x: x[3]) train_stats_handler.attach(trainer) @trainer.on(Events.EPOCH_COMPLETED) @@ -108,7 +108,7 @@ def log_training_loss(engine): # Add stats event handler to print validation stats via evaluator logging.basicConfig(stream=sys.stdout, level=logging.INFO) -val_stats_handler = StatsHandler() +val_stats_handler = StatsHandler(output_transform=lambda x: None) val_stats_handler.attach(evaluator) # Add early stopping handler to evaluator. diff --git a/examples/unet_segmentation_3d.ipynb b/examples/unet_segmentation_3d.ipynb index bfba0a70bf..0d49742f10 100644 --- a/examples/unet_segmentation_3d.ipynb +++ b/examples/unet_segmentation_3d.ipynb @@ -197,7 +197,7 @@ "trainer.add_event_handler(event_name=Events.EPOCH_COMPLETED,\n", " handler=checkpoint_handler,\n", " to_save={'net': net, 'opt': opt})\n", - "train_stats_handler = StatsHandler()\n", + "train_stats_handler = StatsHandler(output_transform=lambda x: x[1])\n", "train_stats_handler.attach(trainer)\n", "\n", "writer = SummaryWriter()\n", @@ -260,7 +260,7 @@ "\n", "# Add stats event handler to print validation stats via evaluator\n", "logging.basicConfig(stream=sys.stdout, level=logging.INFO)\n", - "val_stats_handler = StatsHandler()\n", + "val_stats_handler = StatsHandler(lambda x: None)\n", "val_stats_handler.attach(evaluator)\n", "\n", "# Add early stopping handler to evaluator.\n", diff --git a/examples/unet_segmentation_3d_array.py b/examples/unet_segmentation_3d_array.py index de5996470d..3b0d880f10 100644 --- a/examples/unet_segmentation_3d_array.py +++ b/examples/unet_segmentation_3d_array.py @@ -18,7 +18,6 @@ import nibabel as nib import numpy as np import torch -from torch.utils.tensorboard import SummaryWriter from ignite.engine import Events, create_supervised_trainer, create_supervised_evaluator from ignite.handlers import ModelCheckpoint, EarlyStopping from torch.utils.data import DataLoader @@ -32,8 +31,8 @@ from monai.data.nifti_reader import NiftiDataset from monai.transforms import AddChannel, Rescale, ToTensor, UniformRandomPatch from monai.handlers.stats_handler import StatsHandler +from monai.handlers.tensorboard_handlers import TensorBoardStatsHandler, TensorBoardImageHandler from monai.handlers.mean_dice import MeanDice -from monai.visualize import img2tensorboard from monai.data.synthetic import create_test_image_3d from monai.handlers.utils import stopping_fn_from_metric @@ -88,70 +87,71 @@ loss = monai.losses.DiceLoss(do_sigmoid=True) opt = torch.optim.Adam(net.parameters(), lr) + # Since network outputs logits and segmentation, we need a custom function. def _loss_fn(i, j): return loss(i[0], j) + # Create trainer -device = torch.device("cuda:0") +device = torch.device("cpu:0") trainer = create_supervised_trainer(net, opt, _loss_fn, device, False, - output_transform=lambda x, y, y_pred, loss: [y_pred, loss.item(), y]) + output_transform=lambda x, y, y_pred, loss: [y_pred[1], loss.item(), y]) # adding checkpoint handler to save models (network params and optimizer stats) during training checkpoint_handler = ModelCheckpoint('./runs/', 'net', n_saved=10, require_empty=False) trainer.add_event_handler(event_name=Events.EPOCH_COMPLETED, handler=checkpoint_handler, to_save={'net': net, 'opt': opt}) -train_stats_handler = StatsHandler() +logging.basicConfig(stream=sys.stdout, level=logging.INFO) + +# print training loss to commandline +train_stats_handler = StatsHandler(output_transform=lambda x: x[1]) train_stats_handler.attach(trainer) +# record training loss to TensorBoard at every iteration +train_tensorboard_stats_handler = TensorBoardStatsHandler( + output_transform=lambda x: {'training_dice_loss': x[1]}, # plot under tag name taining_dice_loss + global_epoch_transform=lambda x: trainer.state.epoch) +train_tensorboard_stats_handler.attach(trainer) + + @trainer.on(Events.EPOCH_COMPLETED) def log_training_loss(engine): - # log loss to tensorboard with second item of engine.state.output, loss.item() from output_transform - writer.add_scalar('Loss/train', engine.state.output[1], engine.state.epoch) - - # tensor of ones to use where for converting labels to zero and ones - ones = torch.ones(engine.state.batch[1][0].shape, dtype=torch.int32) - first_output_tensor = engine.state.output[0][1][0].detach().cpu() - # log model output to tensorboard, as three dimensional tensor with no channels dimension - img2tensorboard.add_animated_gif_no_channels(writer, "first_output_final_batch", first_output_tensor, 64, - 255, engine.state.epoch) - # get label tensor and convert to single class - first_label_tensor = torch.where(engine.state.batch[1][0] > 0, ones, engine.state.batch[1][0]) - # log label tensor to tensorboard, there is a channel dimension when getting label from batch - img2tensorboard.add_animated_gif(writer, "first_label_final_batch", first_label_tensor, 64, - 255, engine.state.epoch) - second_output_tensor = engine.state.output[0][1][1].detach().cpu() - img2tensorboard.add_animated_gif_no_channels(writer, "second_output_final_batch", second_output_tensor, 64, - 255, engine.state.epoch) - second_label_tensor = torch.where(engine.state.batch[1][1] > 0, ones, engine.state.batch[1][1]) - img2tensorboard.add_animated_gif(writer, "second_label_final_batch", second_label_tensor, 64, - 255, engine.state.epoch) - third_output_tensor = engine.state.output[0][1][2].detach().cpu() - img2tensorboard.add_animated_gif_no_channels(writer, "third_output_final_batch", third_output_tensor, 64, - 255, engine.state.epoch) - third_label_tensor = torch.where(engine.state.batch[1][2] > 0, ones, engine.state.batch[1][2]) - img2tensorboard.add_animated_gif(writer, "third_label_final_batch", third_label_tensor, 64, - 255, engine.state.epoch) engine.logger.info("Epoch[%s] Loss: %s", engine.state.epoch, engine.state.output[1]) -writer = SummaryWriter() # Set parameters for validation validation_every_n_epochs = 1 metric_name = 'Mean_Dice' # add evaluation metric to the evaluator engine -val_metrics = {metric_name: MeanDice(add_sigmoid=True)} -evaluator = create_supervised_evaluator(net, val_metrics, device, True, - output_transform=lambda x, y, y_pred: (y_pred[0], y)) +val_metrics = {metric_name: MeanDice( + add_sigmoid=True, to_onehot_y=False, output_transform=lambda output: (output[0][0], output[1])) +} +evaluator = create_supervised_evaluator(net, val_metrics, device, True) # Add stats event handler to print validation stats via evaluator -logging.basicConfig(stream=sys.stdout, level=logging.INFO) -val_stats_handler = StatsHandler() +val_stats_handler = StatsHandler( + output_transform=lambda x: None, # disable per iteration output + global_epoch_transform=lambda x: trainer.state.epoch) val_stats_handler.attach(evaluator) -# Add early stopping handler to evaluator. +# add handler to record metrics to TensorBoard at every epoch +val_tensorboard_stats_handler = TensorBoardStatsHandler( + output_transform=lambda x: None, # no iteration plot + global_epoch_transform=lambda x: trainer.state.epoch) # use epoch number from trainer +val_tensorboard_stats_handler.attach(evaluator) +# add handler to draw several images and the corresponding labels and model outputs +# here we draw the first 3 images(draw the first channel) as GIF format along Depth axis +val_tensorboard_image_handler = TensorBoardImageHandler( + batch_transform=lambda batch: (batch[0], batch[1]), + output_transform=lambda output: output[0][1], + global_iter_transform=lambda x: trainer.state.epoch +) +evaluator.add_event_handler(event_name=Events.EPOCH_COMPLETED, handler=val_tensorboard_image_handler) + +# Add early stopping handler to evaluator early_stopper = EarlyStopping(patience=4, score_function=stopping_fn_from_metric(metric_name), trainer=trainer) @@ -166,10 +166,6 @@ def log_training_loss(engine): def run_validation(engine): evaluator.run(val_loader) -@evaluator.on(Events.EPOCH_COMPLETED) -def log_metrics_to_tensorboard(engine): - for name, value in engine.state.metrics.items(): - writer.add_scalar('Metrics/{name}', value, trainer.state.epoch) # create a training data loader logging.basicConfig(stream=sys.stdout, level=logging.INFO) diff --git a/examples/unet_segmentation_3d_dict.py b/examples/unet_segmentation_3d_dict.py index 640bed21c0..0e1e78811b 100644 --- a/examples/unet_segmentation_3d_dict.py +++ b/examples/unet_segmentation_3d_dict.py @@ -111,7 +111,7 @@ def prepare_batch(batch, device=None, non_blocking=False): trainer.add_event_handler(event_name=Events.EPOCH_COMPLETED, handler=checkpoint_handler, to_save={'net': net, 'opt': opt}) -train_stats_handler = StatsHandler() +train_stats_handler = StatsHandler(output_transform=lambda x: x[1]) train_stats_handler.attach(trainer) @@ -160,7 +160,7 @@ def log_training_loss(engine): # Add stats event handler to print validation stats via evaluator logging.basicConfig(stream=sys.stdout, level=logging.INFO) -val_stats_handler = StatsHandler() +val_stats_handler = StatsHandler(output_transform=lambda x: None) val_stats_handler.attach(evaluator) # Add early stopping handler to evaluator. diff --git a/monai/handlers/segmentation_saver.py b/monai/handlers/segmentation_saver.py index e0fa50310a..1f3fe2615d 100644 --- a/monai/handlers/segmentation_saver.py +++ b/monai/handlers/segmentation_saver.py @@ -10,7 +10,7 @@ # limitations under the License. import os - +import numpy as np import torch from ignite.engine import Events @@ -114,5 +114,6 @@ def __call__(self, engine): seg_output = seg_output.detach().cpu().numpy() output_filename = self._create_file_basename(self.output_postfix, filename, self.output_path) output_filename = '{}{}'.format(output_filename, self.output_ext) - write_nifti(seg_output, affine_, output_filename, original_affine_, dtype=seg_output.dtype) + # change output to "channel last" format and write to nifti format file + write_nifti(np.moveaxis(seg_output, 0, -1), affine_, output_filename, original_affine_, dtype=seg_output.dtype) self.logger.info('saved: {}'.format(output_filename)) diff --git a/monai/handlers/stats_handler.py b/monai/handlers/stats_handler.py index 47709543fe..9d2e518919 100644 --- a/monai/handlers/stats_handler.py +++ b/monai/handlers/stats_handler.py @@ -9,47 +9,63 @@ # See the License for the specific language governing permissions and # limitations under the License. +import warnings import logging import torch from ignite.engine import Engine, Events +from monai.utils.misc import is_scalar -KEY_VAL_FORMAT = '{}: {:.4f} ' +DEFAULT_KEY_VAL_FORMAT = '{}: {:.4f} ' +DEFAULT_TAG = 'Loss' class StatsHandler(object): """StatsHandler defines a set of Ignite Event-handlers for all the log printing logics. It's can be used for any Ignite Engine(trainer, validator and evaluator). And it can support logging for epoch level and iteration level with pre-defined loggers. - By default: - (1) epoch_print_logger logs `engine.state.metrics`. - (2) iteration_print_logger logs loss value, expected output format is (y_pred, loss). + + Default behaviors: + - When EPOCH_COMPLETED, logs ``engine.state.metrics`` using ``self.logger``. + - When ITERATION_COMPELTED, logs + ``self.output_transform(engine.state.output)`` using ``self.logger``. """ def __init__(self, epoch_print_logger=None, iteration_print_logger=None, - batch_transform=lambda x: x, output_transform=lambda x: x, - name=None): + global_epoch_transform=lambda x: x, + name=None, + tag_name=DEFAULT_TAG, + key_var_format=DEFAULT_KEY_VAL_FORMAT): """ Args: epoch_print_logger (Callable): customized callable printer for epoch level logging. must accept parameter "engine", use default printer if None. iteration_print_logger (Callable): custimized callable printer for iteration level logging. must accept parameter "engine", use default printer if None. - batch_transform (Callable): a callable that is used to transform the - ignite.engine.batch into expected format to extract input data. output_transform (Callable): a callable that is used to transform the - ignite.engine.output into expected format to extract several output data. - name (str): identifier of logging.logger to use, defaulting to `engine.logger`. + ``ignite.engine.output`` into a scalar to print, or a dictionary of {key: scalar}. + in the latter case, the output string will be formated as key: value. + by default this value logging happens when every iteration completed. + global_epoch_transform (Callable): a callable that is used to customize global epoch number. + For example, in evaluation, the evaluator engine might want to print synced epoch number + with the trainer engine. + name (str): identifier of logging.logger to use, defaulting to ``engine.logger``. + tag_name (string): when iteration output is a scalar, tag_name is used to print + tag_name: scalar_value to logger. Defaults to ``'Loss'``. + key_var_format (string): a formatting string to control the output string format of key: value. """ self.epoch_print_logger = epoch_print_logger self.iteration_print_logger = iteration_print_logger - self.batch_transform = batch_transform self.output_transform = output_transform + self.global_epoch_transform = global_epoch_transform self.logger = None if name is None else logging.getLogger(name) + self.tag_name = tag_name + self.key_var_format = key_var_format + def attach(self, engine: Engine): """Register a set of Ignite Event-Handlers to a specified Ignite engine. @@ -116,12 +132,12 @@ def _default_epoch_print(self, engine: Engine): prints_dict = engine.state.metrics if not prints_dict: return - current_epoch = engine.state.epoch + current_epoch = self.global_epoch_transform(engine.state.epoch) out_str = "Epoch[{}] Metrics -- ".format(current_epoch) for name in sorted(prints_dict): value = prints_dict[name] - out_str += KEY_VAL_FORMAT.format(name, value) + out_str += self.key_var_format.format(name, value) self.logger.info(out_str) @@ -134,19 +150,42 @@ def _default_iteration_print(self, engine: Engine): engine (ignite.engine): Ignite Engine, it can be a trainer, validator or evaluator. """ - loss = self.output_transform(engine.state.output)[1] - if loss is None or (torch.is_tensor(loss) and len(loss.shape) > 0): - return # not printing multi dimensional output + loss = self.output_transform(engine.state.output) + if loss is None: + return # no printing if the output is empty + + out_str = '' + if isinstance(loss, dict): # print dictionary items + for name in sorted(loss): + value = loss[name] + if not is_scalar(value): + warnings.warn('ignoring non-scalar output in StatsHandler,' + ' make sure `output_transform(engine.state.output)` returns' + ' a scalar or dictionary of key and scalar pairs to avoid this warning.' + ' {}:{}'.format(name, type(value))) + continue # not printing multi dimensional output + out_str += self.key_var_format.format(name, value.item() if torch.is_tensor(value) else value) + else: + if is_scalar(loss): # not printing multi dimensional output + out_str += self.key_var_format.format(self.tag_name, loss.item() if torch.is_tensor(loss) else loss) + else: + warnings.warn('ignoring non-scalar output in StatsHandler,' + ' make sure `output_transform(engine.state.output)` returns' + ' a scalar or a dictionary of key and scalar pairs to avoid this warning.' + ' {}'.format(type(loss))) + + if not out_str: + return # no value to print + num_iterations = engine.state.epoch_length current_iteration = (engine.state.iteration - 1) % num_iterations + 1 current_epoch = engine.state.epoch num_epochs = engine.state.max_epochs - out_str = "Epoch: {}/{}, Iter: {}/{} -- ".format( + base_str = "Epoch: {}/{}, Iter: {}/{} --".format( current_epoch, num_epochs, current_iteration, num_iterations) - out_str += KEY_VAL_FORMAT.format('Loss', loss.item() if torch.is_tensor(loss) else loss) - self.logger.info(out_str) + self.logger.info(' '.join([base_str, out_str])) diff --git a/monai/handlers/tensorboard_handlers.py b/monai/handlers/tensorboard_handlers.py new file mode 100644 index 0000000000..2d5116bd59 --- /dev/null +++ b/monai/handlers/tensorboard_handlers.py @@ -0,0 +1,249 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numpy as np +import warnings +import torch +from torch.utils.tensorboard import SummaryWriter +from ignite.engine import Engine, Events +from monai.visualize import img2tensorboard +from monai.utils.misc import is_scalar +from monai.transforms.utils import rescale_array + + +class TensorBoardStatsHandler(object): + """TensorBoardStatsHandler defines a set of Ignite Event-handlers for all the TensorBoard logics. + It's can be used for any Ignite Engine(trainer, validator and evaluator). + And it can support both epoch level and iteration level with pre-defined TensorBoard event writer. + The expected data source is ignite ``engine.state.output`` and ``engine.state.metrics``. + + Default behaviors: + - When EPOCH_COMPLETED, write each dictionary item in + ``engine.state.metrics`` to TensorBoard. + - When ITERATION_COMPELTED, write each dictionary item in + ``self.output_transform(engine.state.output)`` to TensorBoard. + """ + + def __init__(self, + summary_writer=None, + epoch_event_writer=None, + iteration_event_writer=None, + output_transform=lambda x: {'Loss': x}, + global_epoch_transform=lambda x: x): + """ + Args: + summary_writer (SummaryWriter): user can specify TensorBoard SummaryWriter, + default to create a new writer. + epoch_event_writer (Callable): customized callable TensorBoard writer for epoch level. + must accept parameter "engine" and "summary_writer", use default event writer if None. + iteration_event_writer (Callable): custimized callable TensorBoard writer for iteration level. + must accept parameter "engine" and "summary_writer", use default event writer if None. + output_transform (Callable): a callable that is used to transform the + ``ignite.engine.output`` into a dictionary of (tag_name: scalar) pairs to be plotted onto tensorboard. + by default this scalar plotting happens when every iteration completed. + global_epoch_transform (Callable): a callable that is used to customize global epoch number. + For example, in evaluation, the evaluator engine might want to use trainer engines epoch number + when plotting epoch vs metric curves. + """ + self._writer = SummaryWriter() if summary_writer is None else summary_writer + self.epoch_event_writer = epoch_event_writer + self.iteration_event_writer = iteration_event_writer + self.output_transform = output_transform + self.global_epoch_transform = global_epoch_transform + + def attach(self, engine: Engine): + """Register a set of Ignite Event-Handlers to a specified Ignite engine. + + Args: + engine (ignite.engine): Ignite Engine, it can be a trainer, validator or evaluator. + + """ + if not engine.has_event_handler(self.iteration_completed, Events.ITERATION_COMPLETED): + engine.add_event_handler(Events.ITERATION_COMPLETED, self.iteration_completed) + if not engine.has_event_handler(self.epoch_completed, Events.EPOCH_COMPLETED): + engine.add_event_handler(Events.EPOCH_COMPLETED, self.epoch_completed) + + def epoch_completed(self, engine: Engine): + """handler for train or validation/evaluation epoch completed Event. + Write epoch level events, default values are from ignite state.metrics dict. + + Args: + engine (ignite.engine): Ignite Engine, it can be a trainer, validator or evaluator. + + """ + if self.epoch_event_writer is not None: + self.epoch_event_writer(engine, self._writer) + else: + self._default_epoch_writer(engine, self._writer) + + def iteration_completed(self, engine: Engine): + """handler for train or validation/evaluation iteration completed Event. + Write iteration level events, default values are from ignite state.logs dict. + + Args: + engine (ignite.engine): Ignite Engine, it can be a trainer, validator or evaluator. + + """ + if self.iteration_event_writer is not None: + self.iteration_event_writer(engine, self._writer) + else: + self._default_iteration_writer(engine, self._writer) + + def _default_epoch_writer(self, engine: Engine, writer: SummaryWriter): + """Execute epoch level event write operation based on Ignite engine.state data. + Default is to write the values from ignite state.metrics dict. + + Args: + engine (ignite.engine): Ignite Engine, it can be a trainer, validator or evaluator. + writer (SummaryWriter): TensorBoard writer, created in TensorBoardHandler. + + """ + current_epoch = self.global_epoch_transform(engine.state.epoch) + summary_dict = engine.state.metrics + for name, value in summary_dict.items(): + writer.add_scalar(name, value, current_epoch) + writer.flush() + + def _default_iteration_writer(self, engine: Engine, writer: SummaryWriter): + """Execute iteration level event write operation based on Ignite engine.state data. + Default is to write the loss value of current iteration. + + Args: + engine (ignite.engine): Ignite Engine, it can be a trainer, validator or evaluator. + writer (SummaryWriter): TensorBoard writer, created in TensorBoardHandler. + + """ + loss_dict = self.output_transform(engine.state.output) + if loss_dict is None: + return # do nothing if output is empty + if not isinstance(loss_dict, dict): + raise ValueError('TensorBoardStatsHandler requires' + ' output_transform(engine.state.output) returning a dictionary' + ' of key and scalar pairs to plot' + ' got {}.'.format(type(loss_dict))) + for name, value in loss_dict.items(): + if not is_scalar(value): + warnings.warn('ignoring non-scalar output in tensorboard curve plotting,' + ' make sure `output_transform(engine.state.output)` returns' + ' a dictionary of key and scalar pairs to avoid this warning.' + ' Got {}:{}'.format(name, type(value))) + continue + plot_value = value.item() if torch.is_tensor(value) else value + writer.add_scalar(name, plot_value, engine.state.iteration) + writer.flush() + + +class TensorBoardImageHandler(object): + """TensorBoardImageHandler is an ignite Event handler that can visualise images, labels and outputs as 2D/3D images. + 2D output (shape in Batch, channel, H, W) will be shown as simple image using the first element in the batch, + for 3D to ND output (shape in Batch, channel, H, W, D) input, + the last three dimensions will be shown as GIF image along the last axis (typically Depth). + + It's can be used for any Ignite Engine (trainer, validator and evaluator). + User can easily added it to engine for any expected Event, for example: ``EPOCH_COMPLETED``, + ``ITERATION_COMPLETED``. The expected data source is ignite's ``engine.state.batch`` and ``engine.state.output``. + + Default behavior: + - Show y_pred as images (GIF for 3D) on TensorBoard when Event triggered, + - need to use ``batch_transform`` and ``output_transform`` to specify + how many images to show and show which channel. + - Expects ``batch_transform(engine.state.batch)`` to return data + format: (image[N, channel, ...], label[N, channel, ...]). + - Expects ``output_transform(engine.state.output)`` to return a torch + tensor in format (y_pred[N, channel, ...], loss). + """ + + def __init__(self, + summary_writer=None, + batch_transform=lambda x: x, + output_transform=lambda x: x, + global_iter_transform=lambda x: x, + max_channels=1, + max_frames=64): + """ + Args: + summary_writer (SummaryWriter): user can specify TensorBoard SummaryWriter, + default to create a new writer. + batch_transform (Callable): a callable that is used to transform the + ``ignite.engine.batch`` into expected format to extract several label data. + output_transform (Callable): a callable that is used to transform the + ``ignite.engine.output`` into expected format to extract several output data. + global_iter_transform (Callable): a callable that is used to customize global step number for TensorBoard. + For example, in evaluation, the evaluator engine needs to know current epoch from trainer. + max_channels (int): number of channels to plot. + max_frames (int): number of frames for 2D-t plot. + + """ + self._writer = SummaryWriter() if summary_writer is None else summary_writer + self.batch_transform = batch_transform + self.output_transform = output_transform + self.global_iter_transform = global_iter_transform + + self.max_frames = max_frames + self.max_channels = max_channels + + def __call__(self, engine): + step = self.global_iter_transform(engine.state.iteration) + + show_images = self.batch_transform(engine.state.batch)[0] + if torch.is_tensor(show_images): + show_images = show_images.detach().cpu().numpy() + if show_images is not None: + if not isinstance(show_images, np.ndarray): + raise ValueError('output_transform(engine.state.output)[0] must be an ndarray or tensor.') + self._add_2_or_3_d(show_images, step, 'input_0') + + show_labels = self.batch_transform(engine.state.batch)[1] + if torch.is_tensor(show_labels): + show_labels = show_labels.detach().cpu().numpy() + if show_labels is not None: + if not isinstance(show_labels, np.ndarray): + raise ValueError('batch_transform(engine.state.batch)[1] must be an ndarray or tensor.') + self._add_2_or_3_d(show_labels, step, 'input_1') + + show_outputs = self.output_transform(engine.state.output) + if torch.is_tensor(show_outputs): + show_outputs = show_outputs.detach().cpu().numpy() + if show_outputs is not None: + if not isinstance(show_outputs, np.ndarray): + raise ValueError('output_transform(engine.state.output) must be an ndarray or tensor.') + self._add_2_or_3_d(show_outputs, step, 'output') + + self._writer.flush() + + def _add_2_or_3_d(self, data, step, tag='output'): + # for i, d in enumerate(data): # go through a batch of images + d = data[0] # show the first element in a batch + + if d.ndim == 2: + d = rescale_array(d, 0, 1) + dataformats = 'HW' + self._writer.add_image('{}_{}'.format(tag, dataformats), d, step, dataformats=dataformats) + return + + if d.ndim == 3: + if d.shape[0] == 3 and self.max_channels == 3: # rgb? + dataformats = 'CHW' + self._writer.add_image('{}_{}'.format(tag, dataformats), d, step, dataformats='CHW') + return + for j, d2 in enumerate(d[:self.max_channels]): + d2 = rescale_array(d2, 0, 1) + dataformats = 'HW' + self._writer.add_image('{}_{}_{}'.format(tag, dataformats, j), d2, step, dataformats=dataformats) + return + + if d.ndim >= 4: + spatial = d.shape[-3:] + for j, d3 in enumerate(d.reshape([-1] + list(spatial))[:self.max_channels]): + d3 = rescale_array(d3, 0, 255) + img2tensorboard.add_animated_gif( + self._writer, '{}_HWD_{}'.format(tag, j), d3[None], self.max_frames, 1.0, step) + return diff --git a/monai/handlers/utils.py b/monai/handlers/utils.py index 377d4d0073..1cd849d18e 100644 --- a/monai/handlers/utils.py +++ b/monai/handlers/utils.py @@ -12,13 +12,17 @@ def stopping_fn_from_metric(metric_name): """Returns a stopping function for ignite.handlers.EarlyStopping using the given metric name.""" + def stopping_fn(engine): return engine.state.metrics[metric_name] + return stopping_fn def stopping_fn_from_loss(): """Returns a stopping function for ignite.handlers.EarlyStopping using the loss value.""" + def stopping_fn(engine): return -engine.state.output + return stopping_fn diff --git a/monai/networks/utils.py b/monai/networks/utils.py index 5a22884846..bca9922374 100644 --- a/monai/networks/utils.py +++ b/monai/networks/utils.py @@ -52,6 +52,6 @@ def predict_segmentation(logits): """ # generate prediction outputs, logits has shape BCHW[D] if logits.shape[1] == 1: - return (logits[:, 0] >= 0).int() # for binary segmentation threshold on channel 0 + return (logits >= 0).int() # for binary segmentation threshold on channel 0 else: - return logits.max(1)[1] # take the index of the max value along dimension 1 + return logits.argmax(1).unsqueeze(1) # take the index of the max value along dimension 1 diff --git a/monai/utils/misc.py b/monai/utils/misc.py index 775b8f2ebd..261e521adb 100644 --- a/monai/utils/misc.py +++ b/monai/utils/misc.py @@ -11,6 +11,9 @@ import itertools +import numpy as np +import torch + def zip_with(op, *vals, mapfunc=map): """ @@ -40,3 +43,15 @@ def ensure_tuple(vals): vals = (vals,) return tuple(vals) + + +def is_scalar_tensor(val): + if torch.is_tensor(val) and val.ndim == 0: + return True + return False + + +def is_scalar(val): + if torch.is_tensor(val) and val.ndim == 0: + return True + return np.isscalar(val) diff --git a/monai/visualize/img2tensorboard.py b/monai/visualize/img2tensorboard.py index 82211a8ecd..8fce996685 100644 --- a/monai/visualize/img2tensorboard.py +++ b/monai/visualize/img2tensorboard.py @@ -27,8 +27,8 @@ def _image3_animated_gif(imp, scale_factor=1): # x=numpy.random.randint(0,256,[10,10,10],numpy.uint8) (tag, ims) = imp ims = [ - (np.asarray((ims[i, :, :])) * scale_factor).astype(np.uint8) - for i in range(ims.shape[0]) + (np.asarray((ims[:, :, i])) * scale_factor).astype(np.uint8) + for i in range(ims.shape[2]) ] ims = [GifImage.fromarray(im) for im in ims] img_str = b'' @@ -49,8 +49,8 @@ def _image3_animated_gif(imp, scale_factor=1): def make_animated_gif_summary(tag, tensor, max_out=3, - animation_axes=(1,), - image_axes=(2, 3), + animation_axes=(3,), + image_axes=(1, 2), other_indices=None, scale_factor=1): """ @@ -58,7 +58,7 @@ def make_animated_gif_summary(tag, Args: tag: Data identifier - tensor: tensor for the image, expected to be in CDHW format + tensor: tensor for the image, expected to be in CHWD format max_out: maximum number of slices to animate through animation_axes: axis to animate on (not currently used) image_axes: axes of image (not currently used) diff --git a/tests/integration_sliding_window.py b/tests/integration_sliding_window.py index db10d7cc49..31c7a1248a 100644 --- a/tests/integration_sliding_window.py +++ b/tests/integration_sliding_window.py @@ -65,17 +65,19 @@ def _sliding_window_processor(_engine, batch): basename = os.path.basename(img_name)[:-len('.nii.gz')] saved_name = os.path.join(temp_dir, basename, '{}_seg.nii.gz'.format(basename)) - testing_shape = nib.load(saved_name).get_fdata().shape + # get spatial dimensions shape, the saved nifti image format: HWDC + testing_shape = nib.load(saved_name).get_fdata().shape[:-1] if os.path.exists(img_name): os.remove(img_name) if os.path.exists(seg_name): os.remove(seg_name) - - return testing_shape == input_shape + if testing_shape != input_shape: + print('testing shape: {} does not match input shape: {}.'.format(testing_shape, input_shape)) + return False + return True if __name__ == "__main__": result = run_test() - sys.exit(0 if result else 1) diff --git a/tests/integration_unet2d.py b/tests/integration_unet2d.py index 1fd9074c66..7b0f116b77 100644 --- a/tests/integration_unet2d.py +++ b/tests/integration_unet2d.py @@ -51,12 +51,13 @@ def loss_fn(pred, grnd): trainer = create_supervised_trainer(net, opt, loss_fn, device, False) trainer.run(src, 1) - - return trainer.state.output + loss = trainer.state.output + print('Loss:', loss) + if loss >= 1: + print('Loss value is wrong, expect to be < 1.') + return loss if __name__ == "__main__": result = run_test() - print(result) - sys.exit(0 if result < 1 else 1) diff --git a/tests/test_handler_stats.py b/tests/test_handler_stats.py index 58a62133d2..fdb0600e04 100644 --- a/tests/test_handler_stats.py +++ b/tests/test_handler_stats.py @@ -30,7 +30,7 @@ def test_metrics_print(self): # set up engine def _train_func(engine, batch): - return None, torch.tensor(0.0) + return torch.tensor(0.0) engine = Engine(_train_func) @@ -50,7 +50,6 @@ def _update_metric(engine): output_str = log_stream.getvalue() grep = re.compile('.*{}.*'.format(key_to_handler)) has_key_word = re.compile('.*{}.*'.format(key_to_print)) - matched = [] for idx, line in enumerate(output_str.split('\n')): if grep.match(line): if idx in [5, 10]: @@ -60,16 +59,44 @@ def test_loss_print(self): log_stream = StringIO() logging.basicConfig(stream=log_stream, level=logging.INFO) key_to_handler = 'test_logging' - key_to_print = 'Loss' + key_to_print = 'myLoss' # set up engine def _train_func(engine, batch): - return None, torch.tensor(0.0) + return torch.tensor(0.0) engine = Engine(_train_func) # set up testing handler - stats_handler = StatsHandler(name=key_to_handler) + stats_handler = StatsHandler(name=key_to_handler, tag_name=key_to_print) + stats_handler.attach(engine) + + engine.run(range(3), max_epochs=2) + + # check logging output + output_str = log_stream.getvalue() + grep = re.compile('.*{}.*'.format(key_to_handler)) + has_key_word = re.compile('.*{}.*'.format(key_to_print)) + for idx, line in enumerate(output_str.split('\n')): + if grep.match(line): + if idx in [1, 2, 3, 6, 7, 8]: + self.assertTrue(has_key_word.match(line)) + + def test_loss_dict(self): + log_stream = StringIO() + logging.basicConfig(stream=log_stream, level=logging.INFO) + key_to_handler = 'test_logging' + key_to_print = 'myLoss1' + + # set up engine + def _train_func(engine, batch): + return torch.tensor(0.0) + + engine = Engine(_train_func) + + # set up testing handler + stats_handler = StatsHandler(name=key_to_handler, + output_transform=lambda x: {key_to_print: x}) stats_handler.attach(engine) engine.run(range(3), max_epochs=2) @@ -78,7 +105,6 @@ def _train_func(engine, batch): output_str = log_stream.getvalue() grep = re.compile('.*{}.*'.format(key_to_handler)) has_key_word = re.compile('.*{}.*'.format(key_to_print)) - matched = [] for idx, line in enumerate(output_str.split('\n')): if grep.match(line): if idx in [1, 2, 3, 6, 7, 8]: diff --git a/tests/test_handler_tb_image.py b/tests/test_handler_tb_image.py new file mode 100644 index 0000000000..9bf55e162b --- /dev/null +++ b/tests/test_handler_tb_image.py @@ -0,0 +1,60 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import glob +import os +import shutil +import unittest + +import numpy as np +import torch +from ignite.engine import Engine, Events +from parameterized import parameterized + +from monai.handlers.tensorboard_handlers import TensorBoardImageHandler + +TEST_CASES = [ + [[20, 20]], + [[2, 20, 20]], + [[3, 20, 20]], + [[20, 20, 20]], + [[2, 20, 20, 20]], + [[2, 2, 20, 20, 20]], +] + + +class TestHandlerTBImage(unittest.TestCase): + + @parameterized.expand(TEST_CASES) + def test_tb_image_shape(self, shape): + default_dir = os.path.join('.', 'runs') + shutil.rmtree(default_dir, ignore_errors=True) + + # set up engine + def _train_func(engine, batch): + return torch.zeros((1, 1, 10, 10)) + + engine = Engine(_train_func) + + # set up testing handler + stats_handler = TensorBoardImageHandler() + engine.add_event_handler(Events.ITERATION_COMPLETED, stats_handler) + + data = zip(np.random.normal(size=(10, 4, *shape)), np.random.normal(size=(10, 4, *shape))) + engine.run(data, epoch_length=10, max_epochs=1) + + self.assertTrue(os.path.exists(default_dir)) + self.assertTrue(len(glob.glob(default_dir)) > 0) + shutil.rmtree(default_dir) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_handler_tb_stats.py b/tests/test_handler_tb_stats.py new file mode 100644 index 0000000000..53a691701f --- /dev/null +++ b/tests/test_handler_tb_stats.py @@ -0,0 +1,81 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import shutil +import tempfile +import unittest +import glob + +from ignite.engine import Engine, Events +from torch.utils.tensorboard import SummaryWriter + +from monai.handlers.tensorboard_handlers import TensorBoardStatsHandler + + +class TestHandlerTBStats(unittest.TestCase): + + def test_metrics_print(self): + default_dir = os.path.join('.', 'runs') + shutil.rmtree(default_dir, ignore_errors=True) + + # set up engine + def _train_func(engine, batch): + return batch + 1.0 + + engine = Engine(_train_func) + + # set up dummy metric + @engine.on(Events.EPOCH_COMPLETED) + def _update_metric(engine): + current_metric = engine.state.metrics.get('acc', 0.1) + engine.state.metrics['acc'] = current_metric + 0.1 + + # set up testing handler + stats_handler = TensorBoardStatsHandler() + stats_handler.attach(engine) + engine.run(range(3), max_epochs=2) + # check logging output + + self.assertTrue(os.path.exists(default_dir)) + shutil.rmtree(default_dir) + + def test_metrics_writer(self): + default_dir = os.path.join('.', 'runs') + shutil.rmtree(default_dir, ignore_errors=True) + with tempfile.TemporaryDirectory() as temp_dir: + + # set up engine + def _train_func(engine, batch): + return batch + 1.0 + + engine = Engine(_train_func) + + # set up dummy metric + @engine.on(Events.EPOCH_COMPLETED) + def _update_metric(engine): + current_metric = engine.state.metrics.get('acc', 0.1) + engine.state.metrics['acc'] = current_metric + 0.1 + + # set up testing handler + writer = SummaryWriter(log_dir=temp_dir) + stats_handler = TensorBoardStatsHandler( + writer, output_transform=lambda x: {'loss': x * 2.0}, + global_epoch_transform=lambda x: x * 3.0) + stats_handler.attach(engine) + engine.run(range(3), max_epochs=2) + # check logging output + self.assertTrue(len(glob.glob(temp_dir)) > 0) + self.assertTrue(not os.path.exists(default_dir)) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_unet.py b/tests/test_unet.py index 98102375a6..c1e838c284 100644 --- a/tests/test_unet.py +++ b/tests/test_unet.py @@ -26,7 +26,7 @@ 'num_res_units': 1, }, torch.randn(16, 1, 32, 32), - (16, 32, 32), + (16, 1, 32, 32), ] TEST_CASE_2 = [ # single channel 3D, batch 16 @@ -39,7 +39,7 @@ 'num_res_units': 1, }, torch.randn(16, 1, 32, 24, 48), - (16, 32, 24, 48), + (16, 1, 32, 24, 48), ] TEST_CASE_3 = [ # 4-channel 3D, batch 16 @@ -52,7 +52,7 @@ 'num_res_units': 1, }, torch.randn(16, 4, 32, 64, 48), - (16, 32, 64, 48), + (16, 1, 32, 64, 48), ] From e40917fe79992fb21e2c36640b684642bd506391 Mon Sep 17 00:00:00 2001 From: Mohammad Adil Date: Wed, 11 Mar 2020 18:49:14 -0700 Subject: [PATCH 18/40] Adding dict-based and random spatial transforms. (#163) Co-authored-by: Nic Ma --- monai/transforms/composables.py | 230 +++++++++++++++++- monai/transforms/transforms.py | 3 +- tests/test_flip.py | 33 ++- ...{test_random_flip.py => test_rand_flip.py} | 29 ++- ...t_random_rotate.py => test_rand_rotate.py} | 0 ...{test_random_zoom.py => test_rand_zoom.py} | 22 +- tests/test_rotate.py | 23 +- tests/test_zoom.py | 38 ++- 8 files changed, 330 insertions(+), 48 deletions(-) rename tests/{test_random_flip.py => test_rand_flip.py} (62%) rename tests/{test_random_rotate.py => test_rand_rotate.py} (100%) rename tests/{test_random_zoom.py => test_rand_zoom.py} (76%) diff --git a/monai/transforms/composables.py b/monai/transforms/composables.py index 8ccd5a747a..c19c3f7df9 100644 --- a/monai/transforms/composables.py +++ b/monai/transforms/composables.py @@ -22,7 +22,8 @@ from monai.transforms.compose import Randomizable, Transform from monai.transforms.transforms import (LoadNifti, AsChannelFirst, Orientation, AddChannel, Spacing, Rotate90, SpatialCrop, - RandAffine, Rand2DElastic, Rand3DElastic) + RandAffine, Rand2DElastic, Rand3DElastic, + Flip, Rotate, Zoom) from monai.utils.misc import ensure_tuple from monai.transforms.utils import generate_pos_neg_label_crop_centers, create_grid from monai.utils.aliases import alias @@ -476,7 +477,6 @@ def __init__(self, keys, as_tensor_output (bool): the computation is implemented using pytorch tensors, this option specifies whether to convert it back to numpy arrays. device (torch.device): device on which the tensor will be allocated. - See also: - ``RandAffineGrid`` for the random affine paramters configurations. - ``Affine`` for the affine transformation parameters configurations. @@ -551,7 +551,6 @@ def __init__(self, keys, as_tensor_output (bool): the computation is implemented using pytorch tensors, this option specifies whether to convert it back to numpy arrays. device (torch.device): device on which the tensor will be allocated. - See also: - ``RandAffineGrid`` for the random affine paramters configurations. - ``Affine`` for the affine transformation parameters configurations. @@ -594,3 +593,228 @@ def __call__(self, data): for key in self.keys: # same interpolation mode d[key] = self.rand_3d_elastic.resampler(d[key], grid, mode=self.rand_3d_elastic.mode) return d + + +@export +@alias('FlipD', 'FlipDict') +class Flipd(MapTransform): + """Dictionary-based wrapper of Flip. + + Args: + keys (dict): Keys to pick data for transformation. + axis (None, int or tuple of ints): Axes along which to flip over. Default is None. + """ + + def __init__(self, keys, axis=None): + MapTransform.__init__(self, keys) + self.flipper = Flip(axis=axis) + + def __call__(self, data): + d = dict(data) + for key in self.keys: + d[key] = self.flipper(d[key]) + return d + + +@export +@alias('RandFlipD', 'RandFlipDict') +class RandFlipd(Randomizable, MapTransform): + """Dict-based wrapper of RandFlip. + + Args: + prob (float): Probability of flipping. + axis (None, int or tuple of ints): Axes along which to flip over. Default is None. + """ + + def __init__(self, keys, prob=0.1, axis=None): + MapTransform.__init__(self, keys) + self.axis = axis + self.prob = prob + + self._do_transform = False + self.flipper = Flip(axis=axis) + + def randomize(self): + self._do_transform = self.R.random_sample() < self.prob + + def __call__(self, data): + self.randomize() + d = dict(data) + if not self._do_transform: + return d + for key in self.keys: + d[key] = self.flipper(d[key]) + return d + + +@export +@alias('RotateD', 'RotateDict') +class Rotated(MapTransform): + """Dictionary-based wrapper of Rotate. + + Args: + keys (dict): Keys to pick data for transformation. + angle (float): Rotation angle in degrees. + axes (tuple of 2 ints): Axes of rotation. Default: (1, 2). This is the first two + axis in spatial dimensions according to MONAI channel first shape assumption. + reshape (bool): If true, output shape is made same as input. Default: True. + order (int): Order of spline interpolation. Range 0-5. Default: 1. This is + different from scipy where default interpolation is 3. + mode (str): Points outside boundary filled according to this mode. Options are + 'constant', 'nearest', 'reflect', 'wrap'. Default: 'constant'. + cval (scalar): Values to fill outside boundary. Default: 0. + prefiter (bool): Apply spline_filter before interpolation. Default: True. + """ + + def __init__(self, keys, angle, axes=(1, 2), reshape=True, order=1, + mode='constant', cval=0, prefilter=True): + MapTransform.__init__(self, keys) + self.rotator = Rotate(angle=angle, axes=axes, reshape=reshape, + order=order, mode=mode, cval=cval, prefilter=prefilter) + + def __call__(self, data): + d = dict(data) + for key in self.keys: + d[key] = self.rotator(d[key]) + return d + + +@export +@alias('RandRotateD', 'RandRotateDict') +class RandRotated(Randomizable, MapTransform): + """Randomly rotates the input arrays. + + Args: + prob (float): Probability of rotation. + degrees (tuple of float or float): Range of rotation in degrees. If single number, + angle is picked from (-degrees, degrees). + axes (tuple of 2 ints): Axes of rotation. Default: (1, 2). This is the first two + axis in spatial dimensions according to MONAI channel first shape assumption. + reshape (bool): If true, output shape is made same as input. Default: True. + order (int): Order of spline interpolation. Range 0-5. Default: 1. This is + different from scipy where default interpolation is 3. + mode (str): Points outside boundary filled according to this mode. Options are + 'constant', 'nearest', 'reflect', 'wrap'. Default: 'constant'. + cval (scalar): Value to fill outside boundary. Default: 0. + prefiter (bool): Apply spline_filter before interpolation. Default: True. + """ + def __init__(self, keys, degrees, prob=0.1, axes=(1, 2), reshape=True, order=1, + mode='constant', cval=0, prefilter=True): + MapTransform.__init__(self, keys) + self.prob = prob + self.degrees = degrees + self.reshape = reshape + self.order = order + self.mode = mode + self.cval = cval + self.prefilter = prefilter + self.axes = axes + + if not hasattr(self.degrees, '__iter__'): + self.degrees = (-self.degrees, self.degrees) + assert len(self.degrees) == 2, "degrees should be a number or pair of numbers." + + self._do_transform = False + self.angle = None + + def randomize(self): + self._do_transform = self.R.random_sample() < self.prob + self.angle = self.R.uniform(low=self.degrees[0], high=self.degrees[1]) + + def __call__(self, data): + self.randomize() + d = dict(data) + if not self._do_transform: + return d + rotator = Rotate(self.angle, self.axes, self.reshape, self.order, + self.mode, self.cval, self.prefilter) + for key in self.keys: + d[key] = self.flipper(d[key]) + return d + + +@export +@alias('ZoomD', 'ZoomDict') +class Zoomd(MapTransform): + """Dictionary-based wrapper of Zoom transform. + + Args: + zoom (float or sequence): The zoom factor along the spatial axes. + If a float, zoom is the same for each spatial axis. + If a sequence, zoom should contain one value for each spatial axis. + order (int): order of interpolation. Default=3. + mode (str): Determines how input is extended beyond boundaries. Default is 'constant'. + cval (scalar, optional): Value to fill past edges. Default is 0. + use_gpu (bool): Should use cpu or gpu. Uses cupyx which doesn't support order > 1 and modes + 'wrap' and 'reflect'. Defaults to cpu for these cases or if cupyx not found. + keep_size (bool): Should keep original size (pad if needed). + """ + + def __init__(self, keys, zoom, order=3, mode='constant', cval=0, + prefilter=True, use_gpu=False, keep_size=False): + MapTransform.__init__(self, keys) + self.zoomer = Zoom(zoom=zoom, order=order, mode=mode, cval=cval, + prefilter=prefilter, use_gpu=use_gpu, keep_size=keep_size) + + def __call__(self, data): + d = dict(data) + for key in self.keys: + d[key] = self.zoomer(d[key]) + return d + + +@export +@alias('RandZoomD', 'RandZoomDict') +class RandZoomd(Randomizable, MapTransform): + """Dict-based wrapper of RandZoom. + + Args: + keys (dict): Keys to pick data for transformation. + prob (float): Probability of zooming. + min_zoom (float or sequence): Min zoom factor. Can be float or sequence same size as image. + max_zoom (float or sequence): Max zoom factor. Can be float or sequence same size as image. + order (int): order of interpolation. Default=3. + mode ('reflect', 'constant', 'nearest', 'mirror', 'wrap'): Determines how input is + extended beyond boundaries. Default: 'constant'. + cval (scalar, optional): Value to fill past edges. Default is 0. + use_gpu (bool): Should use cpu or gpu. Uses cupyx which doesn't support order > 1 and modes + 'wrap' and 'reflect'. Defaults to cpu for these cases or if cupyx not found. + keep_size (bool): Should keep original size (pad if needed). + """ + + def __init__(self, keys, prob=0.1, min_zoom=0.9, + max_zoom=1.1, order=3, mode='constant', + cval=0, prefilter=True, use_gpu=False, keep_size=False): + MapTransform.__init__(self, keys) + if hasattr(min_zoom, '__iter__') and \ + hasattr(max_zoom, '__iter__'): + assert len(min_zoom) == len(max_zoom), "min_zoom and max_zoom must have same length." + self.min_zoom = min_zoom + self.max_zoom = max_zoom + self.prob = prob + self.order = order + self.mode = mode + self.cval = cval + self.prefilter = prefilter + self.use_gpu = use_gpu + self.keep_size = keep_size + + self._do_transform = False + self._zoom = None + + def randomize(self): + self._do_transform = self.R.random_sample() < self.prob + if hasattr(self.min_zoom, '__iter__'): + self._zoom = (self.R.uniform(l, h) for l, h in zip(self.min_zoom, self.max_zoom)) + else: + self._zoom = self.R.uniform(self.min_zoom, self.max_zoom) + + def __call__(self, data): + self.randomize() + d = dict(data) + if not self._do_transform: + return d + zoomer = Zoom(self._zoom, self.order, self.mode, self.cval, self.prefilter, self.use_gpu, self.keep_size) + for key in self.keys: + d[key] = zoomer(d[key]) + return d diff --git a/monai/transforms/transforms.py b/monai/transforms/transforms.py index 370a7fb305..8f140972f6 100644 --- a/monai/transforms/transforms.py +++ b/monai/transforms/transforms.py @@ -434,7 +434,7 @@ def __call__(self, img): pad_vec[idx] = [half, diff - half] elif diff < 0: # need slicing slice_vec[idx] = slice(half, half + od) - zoomed = np.pad(zoomed, pad_vec) + zoomed = np.pad(zoomed, pad_vec, mode='constant') return zoomed[tuple(slice_vec)] @@ -696,6 +696,7 @@ def __init__(self, prob=0.1, axis=None): self.flipper = Flip(axis=axis) self._do_transform = False + self.flipper = Flip(axis=axis) def randomize(self): self._do_transform = self.R.random_sample() < self.prob diff --git a/tests/test_flip.py b/tests/test_flip.py index 3b027ec2c8..a261c315e2 100644 --- a/tests/test_flip.py +++ b/tests/test_flip.py @@ -14,31 +14,44 @@ import numpy as np from parameterized import parameterized -from monai.transforms import Flip +from monai.transforms import Flip, Flipd from tests.utils import NumpyImageTestCase2D +INVALID_CASES = [("wrong_axis", ['s', 1], TypeError), + ("not_numbers", 's', TypeError)] + +VALID_CASES = [("no_axis", None), + ("one_axis", 1), + ("many_axis", [0, 1, 2])] + class FlipTest(NumpyImageTestCase2D): - @parameterized.expand([ - ("wrong_axis", ['s', 1], TypeError), - ("not_numbers", 's', TypeError) - ]) + @parameterized.expand(INVALID_CASES) def test_invalid_inputs(self, _, axis, raises): with self.assertRaises(raises): flip = Flip(axis) flip(self.imt) - @parameterized.expand([ - ("no_axis", None), - ("one_axis", 1), - ("many_axis", [0, 1, 2]) - ]) + @parameterized.expand(INVALID_CASES) + def test_invalid_cases_dict(self, _, axis, raises): + with self.assertRaises(raises): + flip = Flipd(keys='img', axis=axis) + flip({'img': self.imt}) + + @parameterized.expand(VALID_CASES) def test_correct_results(self, _, axis): flip = Flip(axis=axis) expected = np.flip(self.imt, axis) self.assertTrue(np.allclose(expected, flip(self.imt))) + @parameterized.expand(VALID_CASES) + def test_correct_results_dict(self, _, axis): + flip = Flipd(keys='img', axis=axis) + expected = np.flip(self.imt, axis) + res = flip({'img': self.imt}) + assert np.allclose(expected, res['img']) + if __name__ == '__main__': unittest.main() diff --git a/tests/test_random_flip.py b/tests/test_rand_flip.py similarity index 62% rename from tests/test_random_flip.py rename to tests/test_rand_flip.py index ee89a133d9..be03ff5a28 100644 --- a/tests/test_random_flip.py +++ b/tests/test_rand_flip.py @@ -14,31 +14,38 @@ import numpy as np from parameterized import parameterized -from monai.transforms import RandFlip +from monai.transforms import RandFlip, RandFlipd from tests.utils import NumpyImageTestCase2D +INVALID_CASES = [("wrong_axis", ['s', 1], TypeError), + ("not_numbers", 's', TypeError)] -class RandomFlipTest(NumpyImageTestCase2D): +VALID_CASES = [("no_axis", None), + ("one_axis", 1), + ("many_axis", [0, 1, 2])] - @parameterized.expand([ - ("wrong_axis", ['s', 1], TypeError), - ("not_numbers", 's', TypeError) - ]) +class RandFlipTest(NumpyImageTestCase2D): + + @parameterized.expand(INVALID_CASES) def test_invalid_inputs(self, _, axis, raises): with self.assertRaises(raises): flip = RandFlip(prob=1.0, axis=axis) flip(self.imt) - @parameterized.expand([ - ("no_axis", None), - ("one_axis", 1), - ("many_axis", [0, 1, 2]) - ]) + @parameterized.expand(VALID_CASES) def test_correct_results(self, _, axis): flip = RandFlip(prob=1.0, axis=axis) expected = np.flip(self.imt, axis) self.assertTrue(np.allclose(expected, flip(self.imt))) + @parameterized.expand(VALID_CASES) + def test_correct_results_dict(self, _, axis): + flip = RandFlipd(keys='img', prob=1.0, axis=axis) + res = flip({'img': self.imt}) + + expected = np.flip(self.imt, axis) + self.assertTrue(np.allclose(expected, res['img'])) + if __name__ == '__main__': unittest.main() diff --git a/tests/test_random_rotate.py b/tests/test_rand_rotate.py similarity index 100% rename from tests/test_random_rotate.py rename to tests/test_rand_rotate.py diff --git a/tests/test_random_zoom.py b/tests/test_rand_zoom.py similarity index 76% rename from tests/test_random_zoom.py rename to tests/test_rand_zoom.py index d193a16dd2..530504b887 100644 --- a/tests/test_random_zoom.py +++ b/tests/test_rand_zoom.py @@ -17,15 +17,14 @@ from scipy.ndimage import zoom as zoom_scipy from parameterized import parameterized -from monai.transforms import RandZoom +from monai.transforms import RandZoom, RandZoomd from tests.utils import NumpyImageTestCase2D +VALID_CASES = [(0.9, 1.1, 3, 'constant', 0, True, False, False)] class ZoomTest(NumpyImageTestCase2D): - @parameterized.expand([ - (0.9, 1.1, 3, 'constant', 0, True, False, False), - ]) + @parameterized.expand(VALID_CASES) def test_correct_results(self, min_zoom, max_zoom, order, mode, cval, prefilter, use_gpu, keep_size): random_zoom = RandZoom(prob=1.0, min_zoom=min_zoom, max_zoom=max_zoom, order=order, @@ -39,6 +38,21 @@ def test_correct_results(self, min_zoom, max_zoom, order, mode, self.assertTrue(np.allclose(expected, zoomed)) + @parameterized.expand(VALID_CASES) + def test_correct_results_dict(self, min_zoom, max_zoom, order, mode, + cval, prefilter, use_gpu, keep_size): + keys = 'img' + random_zoom = RandZoomd(keys, prob=1.0, min_zoom=min_zoom, max_zoom=max_zoom, order=order, + mode=mode, cval=cval, prefilter=prefilter, use_gpu=use_gpu, + keep_size=keep_size) + random_zoom.set_random_state(234) + + zoomed = random_zoom({keys: self.imt}) + expected = zoom_scipy(self.imt, zoom=random_zoom._zoom, mode=mode, + order=order, cval=cval, prefilter=prefilter) + + self.assertTrue(np.allclose(expected, zoomed[keys])) + @parameterized.expand([ (0.8, 1.2, 1, 'constant', 0, True) ]) diff --git a/tests/test_rotate.py b/tests/test_rotate.py index 98e25f587f..0c34f5809e 100644 --- a/tests/test_rotate.py +++ b/tests/test_rotate.py @@ -15,17 +15,16 @@ import scipy.ndimage from parameterized import parameterized -from monai.transforms import Rotate +from monai.transforms import Rotate, Rotated from tests.utils import NumpyImageTestCase2D +TEST_CASES = [(90, (1, 2), True, 1, 'reflect', 0, True), + (-90, (2, 1), True, 3, 'constant', 0, True), + (180, (2, 3), False, 2, 'constant', 4, False)] class RotateTest(NumpyImageTestCase2D): - @parameterized.expand([ - (90, (1, 2), True, 1, 'reflect', 0, True), - (-90, (2, 1), True, 3, 'constant', 0, True), - (180, (2, 3), False, 2, 'constant', 4, False), - ]) + @parameterized.expand(TEST_CASES) def test_correct_results(self, angle, axes, reshape, order, mode, cval, prefilter): rotate_fn = Rotate(angle, axes, reshape, @@ -36,6 +35,18 @@ def test_correct_results(self, angle, axes, reshape, mode=mode, cval=cval, prefilter=prefilter) self.assertTrue(np.allclose(expected, rotated)) + @parameterized.expand(TEST_CASES) + def test_correct_results_dict(self, angle, axes, reshape, + order, mode, cval, prefilter): + key = 'img' + rotate_fn = Rotated(key, angle, axes, reshape, order, + mode, cval, prefilter) + rotated = rotate_fn({key: self.imt}) + + expected = scipy.ndimage.rotate(self.imt, angle, axes, reshape, order=order, + mode=mode, cval=cval, prefilter=prefilter) + self.assertTrue(np.allclose(expected, rotated[key])) + if __name__ == '__main__': unittest.main() diff --git a/tests/test_zoom.py b/tests/test_zoom.py index 874e587a98..83795542bc 100644 --- a/tests/test_zoom.py +++ b/tests/test_zoom.py @@ -17,17 +17,22 @@ from scipy.ndimage import zoom as zoom_scipy from parameterized import parameterized -from monai.transforms import Zoom +from monai.transforms import Zoom, Zoomd from tests.utils import NumpyImageTestCase2D +VALID_CASES = [(1.1, 3, 'constant', 0, True, False, False), + (0.9, 3, 'constant', 0, True, False, False), + (0.8, 1, 'reflect', 0, False, False, False)] + +GPU_CASES = [("gpu_zoom", 0.6, 1, 'constant', 0, True)] + +INVALID_CASES = [("no_zoom", None, 1, TypeError), + ("invalid_order", 0.9, 's', AssertionError)] + class ZoomTest(NumpyImageTestCase2D): - @parameterized.expand([ - (1.1, 3, 'constant', 0, True, False, False), - (0.9, 3, 'constant', 0, True, False, False), - (0.8, 1, 'reflect', 0, False, False, False) - ]) + @parameterized.expand(VALID_CASES) def test_correct_results(self, zoom, order, mode, cval, prefilter, use_gpu, keep_size): zoom_fn = Zoom(zoom=zoom, order=order, mode=mode, cval=cval, prefilter=prefilter, use_gpu=use_gpu, keep_size=keep_size) @@ -36,9 +41,19 @@ def test_correct_results(self, zoom, order, mode, cval, prefilter, use_gpu, keep cval=cval, prefilter=prefilter) self.assertTrue(np.allclose(expected, zoomed)) - @parameterized.expand([ - ("gpu_zoom", 0.6, 1, 'constant', 0, True) - ]) + @parameterized.expand(VALID_CASES) + def test_correct_results_dict(self, zoom, order, mode, cval, prefilter, use_gpu, keep_size): + key = 'img' + zoom_fn = Zoomd(key, zoom=zoom, order=order, mode=mode, cval=cval, + prefilter=prefilter, use_gpu=use_gpu, keep_size=keep_size) + zoomed = zoom_fn({key: self.imt[0]}) + + expected = zoom_scipy(self.imt, zoom=zoom, mode=mode, order=order, + cval=cval, prefilter=prefilter) + self.assertTrue(np.allclose(expected, zoomed[key])) + + + @parameterized.expand(GPU_CASES) def test_gpu_zoom(self, _, zoom, order, mode, cval, prefilter): if importlib.util.find_spec('cupy'): zoom_fn = Zoom(zoom=zoom, order=order, mode=mode, cval=cval, @@ -57,10 +72,7 @@ def test_keep_size(self): zoomed = zoom_fn(self.imt[0]) self.assertTrue(np.array_equal(zoomed.shape, self.imt.shape[1:])) - @parameterized.expand([ - ("no_zoom", None, 1, TypeError), - ("invalid_order", 0.9, 's', AssertionError) - ]) + @parameterized.expand(INVALID_CASES) def test_invalid_inputs(self, _, zoom, order, raises): with self.assertRaises(raises): zoom_fn = Zoom(zoom=zoom, order=order) From 204e42dc0fa4f10c8560c6fc11142f33e4cab3fd Mon Sep 17 00:00:00 2001 From: Wenqi Li Date: Thu, 12 Mar 2020 19:08:15 +0000 Subject: [PATCH 19/40] two stages ci (#172) --- .gitlab-ci.yml | 48 +++++++++++++++++++++++++++++++----------------- 1 file changed, 31 insertions(+), 17 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 77d37a5c6b..e6bb748b47 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,20 +1,34 @@ stages: - - build + - build + - coverage -.base_template : &BASE - script: - - nvidia-smi - - export CUDA_DEVICE_ORDER=PCI_BUS_ID - - export CUDA_VISIBLE_DEVICES=0,1 - - python -m pip install --upgrade pip - - pip uninstall -y torch torchvision - - pip install -r requirements.txt - # - pip list - - ./runtests.sh --net - - echo "Done with runtests.sh" +full integration: + stage: build + script: + - nvidia-smi + - export CUDA_DEVICE_ORDER=PCI_BUS_ID + - export CUDA_VISIBLE_DEVICES=0,1 + - python -m pip install --upgrade pip + - pip uninstall -y torch torchvision + - pip install -q -r requirements.txt + - ./runtests.sh --net + - echo "Done with runtests.sh --net" + tags: + - test -build-ci-test: - stage: build - tags: - - test - <<: *BASE +coverage test: + stage: coverage + only: + - master + - ci-stages + script: + - nvidia-smi + - export CUDA_DEVICE_ORDER=PCI_BUS_ID + - export CUDA_VISIBLE_DEVICES=0,1 + - python -m pip install --upgrade pip + - pip uninstall -y torch torchvision + - pip install -q -r requirements.txt + - pip list + - ./runtests.sh --coverage + tags: + - test From 895e0f2dc9491c9b39329d820f1496a6f05f2abe Mon Sep 17 00:00:00 2001 From: Nic Ma Date: Fri, 13 Mar 2020 10:23:28 +0800 Subject: [PATCH 20/40] 165 update check all examples (#171) * [DLMED] add 3D classification inference examples * [DLMED] change UNet to 1 output * [DLMED] add 3D classification inference examples * [DLMED] update and clear all the examples also added unit tests for ClassificationSaver and Resized * [DLMED] fix transforms spatial axis issue and update unit tests * update demos: - unet: num_classes -> out_channels (signature consistency) - segmentation demo changed to binary ground truth - changed segmentation training dict to have validation every n iterations - segmentation demo image sizes to 128, window size to 96, lr 1e-3 (good results) - attach classification saver finalize() to Events.COMPLETE * [DLMED] fix tag_name bug, fix DenseNet3D bug, add comments Co-authored-by: root Co-authored-by: Wenqi Li --- .../densenet_evaluation_array.py | 97 ++++++++++ .../densenet_evaluation_dict.py | 98 ++++++++++ .../densenet_training_array.py} | 81 ++++---- .../densenet_training_dict.py | 164 +++++++++++++++++ examples/multi_gpu_test.ipynb | 2 +- .../unet_evaluation_array.py} | 59 ++++-- .../unet_evaluation_dict.py} | 57 ++++-- .../unet_training_array.py} | 146 +++++++-------- .../unet_training_dict.py} | 173 ++++++++---------- examples/unet_segmentation_3d.ipynb | 2 +- monai/data/synthetic.py | 7 +- monai/handlers/classification_saver.py | 95 ++++++++++ monai/handlers/segmentation_saver.py | 8 +- monai/handlers/tensorboard_handlers.py | 57 +++--- monai/networks/nets/densenet3d.py | 1 - monai/networks/nets/unet.py | 9 +- monai/transforms/composables.py | 131 +++++++++---- monai/transforms/transforms.py | 172 +++++++++++------ tests/integration_sliding_window.py | 4 +- tests/integration_unet2d.py | 7 +- tests/test_dice_loss.py | 2 +- tests/test_flip.py | 36 ++-- tests/test_flipd.py | 48 +++++ tests/test_generalized_dice_loss.py | 2 +- tests/test_handler_classification_saver.py | 55 ++++++ ...t_random_affine.py => test_rand_affine.py} | 0 ...ffine_grid.py => test_rand_affine_grid.py} | 0 ...random_affined.py => test_rand_affined.py} | 0 ...eform_grid.py => test_rand_deform_grid.py} | 0 ..._elastic_2d.py => test_rand_elastic_2d.py} | 0 ..._elastic_3d.py => test_rand_elastic_3d.py} | 0 ...lasticd_2d.py => test_rand_elasticd_2d.py} | 0 ...lasticd_3d.py => test_rand_elasticd_3d.py} | 0 tests/test_rand_flip.py | 31 ++-- tests/test_rand_flipd.py | 38 ++++ tests/test_rand_rotate.py | 21 ++- tests/test_rand_rotate90.py | 38 ++-- tests/test_rand_rotate90d.py | 50 +++-- tests/test_rand_rotated.py | 46 +++++ tests/test_rand_zoom.py | 45 ++--- tests/test_rand_zoomd.py | 83 +++++++++ tests/test_resize.py | 33 ++-- tests/test_resized.py | 56 ++++++ tests/test_rotate.py | 37 ++-- tests/test_rotate90.py | 38 ++-- tests/test_rotate90d.py | 48 +++-- tests/test_rotated.py | 43 +++++ tests/test_unet.py | 14 +- tests/test_uniform_rand_patch.py | 10 +- tests/test_uniform_rand_patchd.py | 14 +- tests/test_zoom.py | 32 ++-- tests/test_zoomd.py | 82 +++++++++ 52 files changed, 1678 insertions(+), 594 deletions(-) create mode 100644 examples/classification_3d/densenet_evaluation_array.py create mode 100644 examples/classification_3d/densenet_evaluation_dict.py rename examples/{densenet_classification_3d.py => classification_3d/densenet_training_array.py} (62%) create mode 100644 examples/classification_3d/densenet_training_dict.py rename examples/{unet_inference_3d_array.py => segmentation_3d/unet_evaluation_array.py} (58%) rename examples/{unet_inference_3d_dict.py => segmentation_3d/unet_evaluation_dict.py} (63%) rename examples/{unet_segmentation_3d_array.py => segmentation_3d/unet_training_array.py} (57%) rename examples/{unet_segmentation_3d_dict.py => segmentation_3d/unet_training_dict.py} (50%) create mode 100644 monai/handlers/classification_saver.py create mode 100644 tests/test_flipd.py create mode 100644 tests/test_handler_classification_saver.py rename tests/{test_random_affine.py => test_rand_affine.py} (100%) rename tests/{test_random_affine_grid.py => test_rand_affine_grid.py} (100%) rename tests/{test_random_affined.py => test_rand_affined.py} (100%) rename tests/{test_random_deform_grid.py => test_rand_deform_grid.py} (100%) rename tests/{test_random_elastic_2d.py => test_rand_elastic_2d.py} (100%) rename tests/{test_random_elastic_3d.py => test_rand_elastic_3d.py} (100%) rename tests/{test_random_elasticd_2d.py => test_rand_elasticd_2d.py} (100%) rename tests/{test_random_elasticd_3d.py => test_rand_elasticd_3d.py} (100%) create mode 100644 tests/test_rand_flipd.py create mode 100644 tests/test_rand_rotated.py create mode 100644 tests/test_rand_zoomd.py create mode 100644 tests/test_resized.py create mode 100644 tests/test_rotated.py create mode 100644 tests/test_zoomd.py diff --git a/examples/classification_3d/densenet_evaluation_array.py b/examples/classification_3d/densenet_evaluation_array.py new file mode 100644 index 0000000000..9785605aad --- /dev/null +++ b/examples/classification_3d/densenet_evaluation_array.py @@ -0,0 +1,97 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +import logging +import numpy as np +import torch +from ignite.engine import create_supervised_evaluator, _prepare_batch +from torch.utils.data import DataLoader + +# assumes the framework is found here, change as necessary +sys.path.append("../..") +import monai +import monai.transforms.compose as transforms +from monai.data.nifti_reader import NiftiDataset +from monai.transforms import (AddChannel, Rescale, Resize) +from monai.handlers.stats_handler import StatsHandler +from monai.handlers.classification_saver import ClassificationSaver +from monai.handlers.checkpoint_loader import CheckpointLoader +from ignite.metrics import Accuracy + +monai.config.print_config() +logging.basicConfig(stream=sys.stdout, level=logging.INFO) + +# IXI dataset as a demo, dowloadable from https://brain-development.org/ixi-dataset/ +images = [ + "/workspace/data/medical/ixi/IXI-T1/IXI607-Guys-1097-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI175-HH-1570-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI385-HH-2078-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI344-Guys-0905-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI409-Guys-0960-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI584-Guys-1129-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI253-HH-1694-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI092-HH-1436-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI574-IOP-1156-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI585-Guys-1130-T1.nii.gz" +] +# 2 binary labels for gender classification: man and woman +labels = np.array([ + 0, 0, 1, 0, 1, 0, 1, 0, 1, 0 +]) + +# Define transforms for image +val_transforms = transforms.Compose([ + Rescale(), + AddChannel(), + Resize((96, 96, 96)) +]) +# Define nifti dataset +val_ds = NiftiDataset(image_files=images, labels=labels, transform=val_transforms, image_only=False) +# Create DenseNet121 +net = monai.networks.nets.densenet3d.densenet121( + in_channels=1, + out_channels=2, +) +device = torch.device("cuda:0") + +metric_name = 'Accuracy' +# add evaluation metric to the evaluator engine +val_metrics = {metric_name: Accuracy()} + + +def prepare_batch(batch, device=None, non_blocking=False): + return _prepare_batch((batch[0], batch[1]), device, non_blocking) + + +# ignite evaluator expects batch=(img, label) and returns output=(y_pred, y) at every iteration, +# user can add output_transform to return other values +evaluator = create_supervised_evaluator(net, val_metrics, device, True, prepare_batch=prepare_batch) + +# Add stats event handler to print validation stats via evaluator +val_stats_handler = StatsHandler( + name='evaluator', + output_transform=lambda x: None # no need to print loss value, so disable per iteration output +) +val_stats_handler.attach(evaluator) + +# for the arrary data format, assume the 3rd item of batch data is the meta_data +prediction_saver = ClassificationSaver(output_dir='tempdir', batch_transform=lambda batch: batch[2], + output_transform=lambda output: output[0].argmax(1)) +prediction_saver.attach(evaluator) + +# the model was trained by "densenet_training_array" exmple +CheckpointLoader(load_path='./runs/net_checkpoint_40.pth', load_dict={'net': net}).attach(evaluator) + +# create a validation data loader +val_loader = DataLoader(val_ds, batch_size=2, num_workers=4, pin_memory=torch.cuda.is_available()) + +state = evaluator.run(val_loader) diff --git a/examples/classification_3d/densenet_evaluation_dict.py b/examples/classification_3d/densenet_evaluation_dict.py new file mode 100644 index 0000000000..d2e33cfe5e --- /dev/null +++ b/examples/classification_3d/densenet_evaluation_dict.py @@ -0,0 +1,98 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ignite.metrics import Accuracy +import sys +import logging +import numpy as np +import torch +from ignite.engine import create_supervised_evaluator, _prepare_batch +from torch.utils.data import DataLoader + +# assumes the framework is found here, change as necessary +sys.path.append("../..") +from monai.handlers.classification_saver import ClassificationSaver +from monai.handlers.checkpoint_loader import CheckpointLoader +from monai.handlers.stats_handler import StatsHandler +from monai.transforms.composables import LoadNiftid, AddChanneld, Rescaled, Resized +import monai.transforms.compose as transforms +import monai + +monai.config.print_config() +logging.basicConfig(stream=sys.stdout, level=logging.INFO) + +# IXI dataset as a demo, dowloadable from https://brain-development.org/ixi-dataset/ +images = [ + "/workspace/data/medical/ixi/IXI-T1/IXI607-Guys-1097-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI175-HH-1570-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI385-HH-2078-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI344-Guys-0905-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI409-Guys-0960-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI584-Guys-1129-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI253-HH-1694-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI092-HH-1436-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI574-IOP-1156-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI585-Guys-1130-T1.nii.gz" +] +# 2 binary labels for gender classification: man and woman +labels = np.array([ + 0, 0, 1, 0, 1, 0, 1, 0, 1, 0 +]) +val_files = [{'img': img, 'label': label} for img, label in zip(images, labels)] + +# Define transforms for image +val_transforms = transforms.Compose([ + LoadNiftid(keys=['img']), + AddChanneld(keys=['img']), + Rescaled(keys=['img']), + Resized(keys=['img'], output_spatial_shape=(96, 96, 96)) +]) + +# Create DenseNet121 +net = monai.networks.nets.densenet3d.densenet121( + in_channels=1, + out_channels=2, +) +device = torch.device("cuda:0") + + +def prepare_batch(batch, device=None, non_blocking=False): + return _prepare_batch((batch['img'], batch['label']), device, non_blocking) + + +metric_name = 'Accuracy' +# add evaluation metric to the evaluator engine +val_metrics = {metric_name: Accuracy()} +# ignite evaluator expects batch=(img, label) and returns output=(y_pred, y) at every iteration, +# user can add output_transform to return other values +evaluator = create_supervised_evaluator(net, val_metrics, device, True, prepare_batch=prepare_batch) + +# Add stats event handler to print validation stats via evaluator +val_stats_handler = StatsHandler( + name='evaluator', + output_transform=lambda x: None # no need to print loss value, so disable per iteration output +) +val_stats_handler.attach(evaluator) + +# for the arrary data format, assume the 3rd item of batch data is the meta_data +prediction_saver = ClassificationSaver(output_dir='tempdir', name='evaluator', + batch_transform=lambda batch: {'filename_or_obj': batch['img.filename_or_obj']}, + output_transform=lambda output: output[0].argmax(1)) +prediction_saver.attach(evaluator) + +# the model was trained by "densenet_training_dict" exmple +CheckpointLoader(load_path='./runs/net_checkpoint_40.pth', load_dict={'net': net}).attach(evaluator) + +# create a validation data loader +val_ds = monai.data.Dataset(data=val_files, transform=val_transforms) +val_loader = DataLoader(val_ds, batch_size=2, num_workers=4, pin_memory=torch.cuda.is_available()) + +state = evaluator.run(val_loader) diff --git a/examples/densenet_classification_3d.py b/examples/classification_3d/densenet_training_array.py similarity index 62% rename from examples/densenet_classification_3d.py rename to examples/classification_3d/densenet_training_array.py index 753097a2a9..4993556404 100644 --- a/examples/densenet_classification_3d.py +++ b/examples/classification_3d/densenet_training_array.py @@ -18,19 +18,20 @@ from torch.utils.data import DataLoader # assumes the framework is found here, change as necessary -sys.path.append("..") +sys.path.append("../..") import monai import monai.transforms.compose as transforms - from monai.data.nifti_reader import NiftiDataset -from monai.transforms import (AddChannel, Rescale, ToTensor, UniformRandomPatch) +from monai.transforms import (AddChannel, Rescale, Resize, RandRotate90) from monai.handlers.stats_handler import StatsHandler +from monai.handlers.tensorboard_handlers import TensorBoardStatsHandler from ignite.metrics import Accuracy from monai.handlers.utils import stopping_fn_from_metric monai.config.print_config() +logging.basicConfig(stream=sys.stdout, level=logging.INFO) -# FIXME: temp test dataset, Wenqi will replace later +# IXI dataset as a demo, dowloadable from https://brain-development.org/ixi-dataset/ images = [ "/workspace/data/medical/ixi/IXI-T1/IXI314-IOP-0889-T1.nii.gz", "/workspace/data/medical/ixi/IXI-T1/IXI249-Guys-1072-T1.nii.gz", @@ -53,37 +54,42 @@ "/workspace/data/medical/ixi/IXI-T1/IXI574-IOP-1156-T1.nii.gz", "/workspace/data/medical/ixi/IXI-T1/IXI585-Guys-1130-T1.nii.gz" ] +# 2 binary labels for gender classification: man and woman labels = np.array([ 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0 ]) -# Define transforms for image and segmentation -imtrans = transforms.Compose([ +# Define transforms +train_transforms = transforms.Compose([ + Rescale(), + AddChannel(), + Resize((96, 96, 96)), + RandRotate90() +]) +val_transforms = transforms.Compose([ Rescale(), AddChannel(), - UniformRandomPatch((96, 96, 96)), - ToTensor() + Resize((96, 96, 96)) ]) -# Define nifti dataset, dataloader. -ds = NiftiDataset(image_files=images, labels=labels, transform=imtrans) -loader = DataLoader(ds, batch_size=2, num_workers=2, pin_memory=torch.cuda.is_available()) -im, label = monai.utils.misc.first(loader) +# Define nifti dataset, dataloader +check_ds = NiftiDataset(image_files=images, labels=labels, transform=train_transforms) +check_loader = DataLoader(check_ds, batch_size=2, num_workers=2, pin_memory=torch.cuda.is_available()) +im, label = monai.utils.misc.first(check_loader) print(type(im), im.shape, label) -lr = 1e-5 - -# Create DenseNet121, CrossEntropyLoss and Adam optimizer. +# Create DenseNet121, CrossEntropyLoss and Adam optimizer net = monai.networks.nets.densenet3d.densenet121( in_channels=1, out_channels=2, ) - loss = torch.nn.CrossEntropyLoss() +lr = 1e-5 opt = torch.optim.Adam(net.parameters(), lr) - -# Create trainer device = torch.device("cuda:0") + +# ignite trainer expects batch=(img, label) and returns output=loss at every iteration, +# user can add output_transform to return other values, like: y_pred, y, etc. trainer = create_supervised_trainer(net, opt, loss, device, False) # adding checkpoint handler to save models (network params and optimizer stats) during training @@ -91,45 +97,58 @@ trainer.add_event_handler(event_name=Events.EPOCH_COMPLETED, handler=checkpoint_handler, to_save={'net': net, 'opt': opt}) -train_stats_handler = StatsHandler(output_transform=lambda x: x[3]) + +# StatsHandler prints loss at every iteration and print metrics at every epoch, +# we don't set metrics for trainer here, so just print loss, user can also customize print functions +# and can use output_transform to convert engine.state.output if it's not loss value +train_stats_handler = StatsHandler(name='trainer') train_stats_handler.attach(trainer) -@trainer.on(Events.EPOCH_COMPLETED) -def log_training_loss(engine): - engine.logger.info("Epoch[%s] Loss: %s", engine.state.epoch, engine.state.output) +# TensorBoardStatsHandler plots loss at every iteration and plots metrics at every epoch, same as StatsHandler +train_tensorboard_stats_handler = TensorBoardStatsHandler() +train_tensorboard_stats_handler.attach(trainer) # Set parameters for validation validation_every_n_epochs = 1 -metric_name = 'Accuracy' +metric_name = 'Accuracy' # add evaluation metric to the evaluator engine val_metrics = {metric_name: Accuracy()} +# ignite evaluator expects batch=(img, label) and returns output=(y_pred, y) at every iteration, +# user can add output_transform to return other values evaluator = create_supervised_evaluator(net, val_metrics, device, True) # Add stats event handler to print validation stats via evaluator -logging.basicConfig(stream=sys.stdout, level=logging.INFO) -val_stats_handler = StatsHandler(output_transform=lambda x: None) +val_stats_handler = StatsHandler( + name='evaluator', + output_transform=lambda x: None, # no need to print loss value, so disable per iteration output + global_epoch_transform=lambda x: trainer.state.epoch) # fetch global epoch number from trainer val_stats_handler.attach(evaluator) -# Add early stopping handler to evaluator. +# add handler to record metrics to TensorBoard at every epoch +val_tensorboard_stats_handler = TensorBoardStatsHandler( + output_transform=lambda x: None, # no need to plot loss value, so disable per iteration output + global_epoch_transform=lambda x: trainer.state.epoch) # fetch global epoch number from trainer +val_tensorboard_stats_handler.attach(evaluator) + +# Add early stopping handler to evaluator early_stopper = EarlyStopping(patience=4, score_function=stopping_fn_from_metric(metric_name), trainer=trainer) evaluator.add_event_handler(event_name=Events.EPOCH_COMPLETED, handler=early_stopper) # create a validation data loader -val_ds = NiftiDataset(image_files=images[-5:], labels=labels[-5:], transform=imtrans) -val_loader = DataLoader(ds, batch_size=2, num_workers=2, pin_memory=torch.cuda.is_available()) +val_ds = NiftiDataset(image_files=images[-10:], labels=labels[-10:], transform=val_transforms) +val_loader = DataLoader(val_ds, batch_size=2, num_workers=2, pin_memory=torch.cuda.is_available()) @trainer.on(Events.EPOCH_COMPLETED(every=validation_every_n_epochs)) def run_validation(engine): evaluator.run(val_loader) -# create a training data loader -logging.basicConfig(stream=sys.stdout, level=logging.INFO) -train_ds = NiftiDataset(image_files=images[:15], labels=labels[:15], transform=imtrans) +# create a training data loader +train_ds = NiftiDataset(image_files=images[:10], labels=labels[:10], transform=train_transforms) train_loader = DataLoader(train_ds, batch_size=2, num_workers=2, pin_memory=torch.cuda.is_available()) train_epochs = 30 diff --git a/examples/classification_3d/densenet_training_dict.py b/examples/classification_3d/densenet_training_dict.py new file mode 100644 index 0000000000..c0017df969 --- /dev/null +++ b/examples/classification_3d/densenet_training_dict.py @@ -0,0 +1,164 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +import logging +import numpy as np +import torch +from ignite.engine import Events, create_supervised_trainer, create_supervised_evaluator, _prepare_batch +from ignite.handlers import ModelCheckpoint, EarlyStopping +from torch.utils.data import DataLoader + +# assumes the framework is found here, change as necessary +sys.path.append("../..") +import monai +import monai.transforms.compose as transforms +from monai.transforms.composables import \ + LoadNiftid, AddChanneld, Rescaled, Resized, RandRotate90d +from monai.handlers.stats_handler import StatsHandler +from monai.handlers.tensorboard_handlers import TensorBoardStatsHandler +from ignite.metrics import Accuracy +from monai.handlers.utils import stopping_fn_from_metric + +monai.config.print_config() +logging.basicConfig(stream=sys.stdout, level=logging.INFO) + +# IXI dataset as a demo, dowloadable from https://brain-development.org/ixi-dataset/ +images = [ + "/workspace/data/medical/ixi/IXI-T1/IXI314-IOP-0889-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI249-Guys-1072-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI609-HH-2600-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI173-HH-1590-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI020-Guys-0700-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI342-Guys-0909-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI134-Guys-0780-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI577-HH-2661-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI066-Guys-0731-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI130-HH-1528-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI607-Guys-1097-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI175-HH-1570-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI385-HH-2078-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI344-Guys-0905-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI409-Guys-0960-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI584-Guys-1129-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI253-HH-1694-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI092-HH-1436-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI574-IOP-1156-T1.nii.gz", + "/workspace/data/medical/ixi/IXI-T1/IXI585-Guys-1130-T1.nii.gz" +] +# 2 binary labels for gender classification: man and woman +labels = np.array([ + 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0 +]) +train_files = [{'img': img, 'label': label} for img, label in zip(images[:10], labels[:10])] +val_files = [{'img': img, 'label': label} for img, label in zip(images[-10:], labels[-10:])] + +# Define transforms for image +train_transforms = transforms.Compose([ + LoadNiftid(keys=['img']), + AddChanneld(keys=['img']), + Rescaled(keys=['img']), + Resized(keys=['img'], output_spatial_shape=(96, 96, 96)), + RandRotate90d(keys=['img'], prob=0.8, spatial_axes=[0, 2]) +]) +val_transforms = transforms.Compose([ + LoadNiftid(keys=['img']), + AddChanneld(keys=['img']), + Rescaled(keys=['img']), + Resized(keys=['img'], output_spatial_shape=(96, 96, 96)) +]) + +# Define dataset, dataloader +check_ds = monai.data.Dataset(data=train_files, transform=train_transforms) +check_loader = DataLoader(check_ds, batch_size=2, num_workers=4, pin_memory=torch.cuda.is_available()) +check_data = monai.utils.misc.first(check_loader) +print(check_data['img'].shape, check_data['label']) + +# Create DenseNet121, CrossEntropyLoss and Adam optimizer +net = monai.networks.nets.densenet3d.densenet121( + in_channels=1, + out_channels=2, +) +loss = torch.nn.CrossEntropyLoss() +lr = 1e-5 +opt = torch.optim.Adam(net.parameters(), lr) +device = torch.device("cuda:0") + + +# ignite trainer expects batch=(img, label) and returns output=loss at every iteration, +# user can add output_transform to return other values, like: y_pred, y, etc. +def prepare_batch(batch, device=None, non_blocking=False): + return _prepare_batch((batch['img'], batch['label']), device, non_blocking) + + +trainer = create_supervised_trainer(net, opt, loss, device, False, prepare_batch=prepare_batch) + +# adding checkpoint handler to save models (network params and optimizer stats) during training +checkpoint_handler = ModelCheckpoint('./runs/', 'net', n_saved=10, require_empty=False) +trainer.add_event_handler(event_name=Events.EPOCH_COMPLETED, + handler=checkpoint_handler, + to_save={'net': net, 'opt': opt}) + +# StatsHandler prints loss at every iteration and print metrics at every epoch, +# we don't set metrics for trainer here, so just print loss, user can also customize print functions +# and can use output_transform to convert engine.state.output if it's not loss value +train_stats_handler = StatsHandler(name='trainer') +train_stats_handler.attach(trainer) + +# TensorBoardStatsHandler plots loss at every iteration and plots metrics at every epoch, same as StatsHandler +train_tensorboard_stats_handler = TensorBoardStatsHandler() +train_tensorboard_stats_handler.attach(trainer) + +# Set parameters for validation +validation_every_n_epochs = 1 + +metric_name = 'Accuracy' +# add evaluation metric to the evaluator engine +val_metrics = {metric_name: Accuracy()} +# ignite evaluator expects batch=(img, label) and returns output=(y_pred, y) at every iteration, +# user can add output_transform to return other values +evaluator = create_supervised_evaluator(net, val_metrics, device, True, prepare_batch=prepare_batch) + +# Add stats event handler to print validation stats via evaluator +val_stats_handler = StatsHandler( + name='evaluator', + output_transform=lambda x: None, # no need to print loss value, so disable per iteration output + global_epoch_transform=lambda x: trainer.state.epoch) # fetch global epoch number from trainer +val_stats_handler.attach(evaluator) + +# add handler to record metrics to TensorBoard at every epoch +val_tensorboard_stats_handler = TensorBoardStatsHandler( + output_transform=lambda x: None, # no need to plot loss value, so disable per iteration output + global_epoch_transform=lambda x: trainer.state.epoch) # fetch global epoch number from trainer +val_tensorboard_stats_handler.attach(evaluator) + +# Add early stopping handler to evaluator +early_stopper = EarlyStopping(patience=4, + score_function=stopping_fn_from_metric(metric_name), + trainer=trainer) +evaluator.add_event_handler(event_name=Events.EPOCH_COMPLETED, handler=early_stopper) + +# create a validation data loader +val_ds = monai.data.Dataset(data=val_files, transform=val_transforms) +val_loader = DataLoader(val_ds, batch_size=2, num_workers=4, pin_memory=torch.cuda.is_available()) + + +@trainer.on(Events.EPOCH_COMPLETED(every=validation_every_n_epochs)) +def run_validation(engine): + evaluator.run(val_loader) + + +# create a training data loader +train_ds = monai.data.Dataset(data=train_files, transform=train_transforms) +train_loader = DataLoader(train_ds, batch_size=2, num_workers=4, pin_memory=torch.cuda.is_available()) + +train_epochs = 30 +state = trainer.run(train_loader, train_epochs) diff --git a/examples/multi_gpu_test.ipynb b/examples/multi_gpu_test.ipynb index 8f1827f15b..98911e12f2 100644 --- a/examples/multi_gpu_test.ipynb +++ b/examples/multi_gpu_test.ipynb @@ -53,7 +53,7 @@ "net = monai.networks.nets.UNet(\n", " dimensions=2,\n", " in_channels=1,\n", - " num_classes=1,\n", + " out_channels=1,\n", " channels=(16, 32, 64, 128, 256),\n", " strides=(2, 2, 2, 2),\n", " num_res_units=2,\n", diff --git a/examples/unet_inference_3d_array.py b/examples/segmentation_3d/unet_evaluation_array.py similarity index 58% rename from examples/unet_inference_3d_array.py rename to examples/segmentation_3d/unet_evaluation_array.py index 8fe417c7dd..f5c039a39e 100644 --- a/examples/unet_inference_3d_array.py +++ b/examples/segmentation_3d/unet_evaluation_array.py @@ -13,7 +13,7 @@ import sys import tempfile from glob import glob - +import logging import nibabel as nib import numpy as np import torch @@ -21,26 +21,29 @@ from torch.utils.data import DataLoader # assumes the framework is found here, change as necessary -sys.path.append("..") +sys.path.append("../..") from monai import config from monai.handlers.checkpoint_loader import CheckpointLoader from monai.handlers.segmentation_saver import SegmentationSaver import monai.transforms.compose as transforms from monai.data.nifti_reader import NiftiDataset -from monai.transforms import AddChannel, Rescale, ToTensor +from monai.transforms import AddChannel, Rescale from monai.networks.nets.unet import UNet from monai.networks.utils import predict_segmentation from monai.data.synthetic import create_test_image_3d from monai.utils.sliding_window_inference import sliding_window_inference +from monai.handlers.stats_handler import StatsHandler +from monai.handlers.mean_dice import MeanDice config.print_config() +logging.basicConfig(stream=sys.stdout, level=logging.INFO) tempdir = tempfile.mkdtemp() # tempdir = './temp' print('generating synthetic data to {} (this may take a while)'.format(tempdir)) -for i in range(50): - im, seg = create_test_image_3d(256, 256, 256) +for i in range(5): + im, seg = create_test_image_3d(128, 128, 128, num_seg_classes=1) n = nib.Nifti1Image(im, np.eye(4)) nib.save(n, os.path.join(tempdir, 'im%i.nii.gz' % i)) @@ -50,39 +53,59 @@ images = sorted(glob(os.path.join(tempdir, 'im*.nii.gz'))) segs = sorted(glob(os.path.join(tempdir, 'seg*.nii.gz'))) -imtrans = transforms.Compose([Rescale(), AddChannel(), ToTensor()]) -segtrans = transforms.Compose([AddChannel(), ToTensor()]) + +# Define transforms for image and segmentation +imtrans = transforms.Compose([Rescale(), AddChannel()]) +segtrans = transforms.Compose([AddChannel()]) ds = NiftiDataset(images, segs, transform=imtrans, seg_transform=segtrans, image_only=False) device = torch.device("cuda:0") -roi_size = (64, 64, 64) -sw_batch_size = 4 net = UNet( dimensions=3, in_channels=1, - num_classes=1, + out_channels=1, channels=(16, 32, 64, 128, 256), strides=(2, 2, 2, 2), num_res_units=2, ) net.to(device) +# define sliding window size and batch size for windows inference +roi_size = (96, 96, 96) +sw_batch_size = 4 + def _sliding_window_processor(engine, batch): net.eval() img, seg, meta_data = batch with torch.no_grad(): - seg_probs = sliding_window_inference(img, roi_size, sw_batch_size, lambda x: net(x)[0], device) - return predict_segmentation(seg_probs) + seg_probs = sliding_window_inference(img, roi_size, sw_batch_size, net, device) + return seg_probs, seg.to(device) -infer_engine = Engine(_sliding_window_processor) +evaluator = Engine(_sliding_window_processor) + +# add evaluation metric to the evaluator engine +MeanDice(add_sigmoid=True, to_onehot_y=False).attach(evaluator, 'Mean_Dice') + +# StatsHandler prints loss at every iteration and print metrics at every epoch, +# we don't need to print loss for evaluator, so just print metrics, user can also customize print functions +val_stats_handler = StatsHandler( + name='evaluator', + output_transform=lambda x: None # no need to print loss value, so disable per iteration output +) +val_stats_handler.attach(evaluator) # for the arrary data format, assume the 3rd item of batch data is the meta_data -SegmentationSaver(output_path='tempdir', output_ext='.nii.gz', output_postfix='seg', - batch_transform=lambda x: x[2]).attach(infer_engine) -# the model was trained by "unet_segmentation_3d_array" exmple -CheckpointLoader(load_path='./runs/net_checkpoint_120.pth', load_dict={'net': net}).attach(infer_engine) +file_saver = SegmentationSaver( + output_path='tempdir', output_ext='.nii.gz', output_postfix='seg', name='evaluator', + batch_transform=lambda x: x[2], output_transform=lambda output: predict_segmentation(output[0])) +file_saver.attach(evaluator) + +# the model was trained by "unet_training_array" exmple +ckpt_saver = CheckpointLoader(load_path='./runs/net_checkpoint_50.pth', load_dict={'net': net}) +ckpt_saver.attach(evaluator) +# sliding window inferene need to input 1 image in every iteration loader = DataLoader(ds, batch_size=1, num_workers=1, pin_memory=torch.cuda.is_available()) -state = infer_engine.run(loader) +state = evaluator.run(loader) diff --git a/examples/unet_inference_3d_dict.py b/examples/segmentation_3d/unet_evaluation_dict.py similarity index 63% rename from examples/unet_inference_3d_dict.py rename to examples/segmentation_3d/unet_evaluation_dict.py index 405b49aa8d..e78abeac3f 100644 --- a/examples/unet_inference_3d_dict.py +++ b/examples/segmentation_3d/unet_evaluation_dict.py @@ -13,7 +13,7 @@ import sys import tempfile from glob import glob - +import logging import nibabel as nib import numpy as np import torch @@ -21,7 +21,7 @@ from torch.utils.data import DataLoader # assumes the framework is found here, change as necessary -sys.path.append("..") +sys.path.append("../..") import monai from monai.data.utils import list_data_collate @@ -29,19 +29,22 @@ from monai.data.synthetic import create_test_image_3d from monai.networks.utils import predict_segmentation from monai.networks.nets.unet import UNet -from monai.transforms.composables import LoadNiftid, AsChannelFirstd +from monai.transforms.composables import LoadNiftid, AsChannelFirstd, Rescaled import monai.transforms.compose as transforms from monai.handlers.segmentation_saver import SegmentationSaver from monai.handlers.checkpoint_loader import CheckpointLoader +from monai.handlers.stats_handler import StatsHandler +from monai.handlers.mean_dice import MeanDice from monai import config config.print_config() +logging.basicConfig(stream=sys.stdout, level=logging.INFO) tempdir = tempfile.mkdtemp() # tempdir = './temp' print('generating synthetic data to {} (this may take a while)'.format(tempdir)) -for i in range(50): - im, seg = create_test_image_3d(256, 256, 256, channel_dim=-1) +for i in range(5): + im, seg = create_test_image_3d(128, 128, 128, num_seg_classes=1, channel_dim=-1) n = nib.Nifti1Image(im, np.eye(4)) nib.save(n, os.path.join(tempdir, 'im%i.nii.gz' % i)) @@ -52,44 +55,62 @@ images = sorted(glob(os.path.join(tempdir, 'im*.nii.gz'))) segs = sorted(glob(os.path.join(tempdir, 'seg*.nii.gz'))) val_files = [{'img': img, 'seg': seg} for img, seg in zip(images, segs)] + +# Define transforms for image and segmentation val_transforms = transforms.Compose([ LoadNiftid(keys=['img', 'seg']), - AsChannelFirstd(keys=['img', 'seg'], channel_dim=-1) + AsChannelFirstd(keys=['img', 'seg'], channel_dim=-1), + Rescaled(keys=['img', 'seg']) ]) val_ds = monai.data.Dataset(data=val_files, transform=val_transforms) device = torch.device("cuda:0") -roi_size = (64, 64, 64) -sw_batch_size = 4 net = UNet( dimensions=3, in_channels=1, - num_classes=1, + out_channels=1, channels=(16, 32, 64, 128, 256), strides=(2, 2, 2, 2), num_res_units=2, ) net.to(device) +# define sliding window size and batch size for windows inference +roi_size = (96, 96, 96) +sw_batch_size = 4 + def _sliding_window_processor(engine, batch): net.eval() with torch.no_grad(): - seg_probs = sliding_window_inference(batch['img'], roi_size, sw_batch_size, lambda x: net(x)[0], device) - return predict_segmentation(seg_probs) + seg_probs = sliding_window_inference(batch['img'], roi_size, sw_batch_size, net, device) + return seg_probs, batch['seg'].to(device) + +evaluator = Engine(_sliding_window_processor) -infer_engine = Engine(_sliding_window_processor) +# add evaluation metric to the evaluator engine +MeanDice(add_sigmoid=True, to_onehot_y=False).attach(evaluator, 'Mean_Dice') + +# StatsHandler prints loss at every iteration and print metrics at every epoch, +# we don't need to print loss for evaluator, so just print metrics, user can also customize print functions +val_stats_handler = StatsHandler( + name='evaluator', + output_transform=lambda x: None # no need to print loss value, so disable per iteration output +) +val_stats_handler.attach(evaluator) -# for the arrary data format, assume the 3rd item of batch data is the meta_data -SegmentationSaver(output_path='tempdir', output_ext='.nii.gz', output_postfix='seg', +# convert the necessary metadata from batch data +SegmentationSaver(output_path='tempdir', output_ext='.nii.gz', output_postfix='seg', name='evaluator', batch_transform=lambda batch: {'filename_or_obj': batch['img.filename_or_obj'], 'original_affine': batch['img.original_affine'], 'affine': batch['img.affine'], - }).attach(infer_engine) -# the model was trained by "unet_segmentation_3d_array" exmple -CheckpointLoader(load_path='./runs/net_checkpoint_120.pth', load_dict={'net': net}).attach(infer_engine) + }, + output_transform=lambda output: predict_segmentation(output[0])).attach(evaluator) +# the model was trained by "unet_training_dict" exmple +CheckpointLoader(load_path='./runs/net_checkpoint_50.pth', load_dict={'net': net}).attach(evaluator) +# sliding window inferene need to input 1 image in every iteration val_loader = DataLoader(val_ds, batch_size=1, num_workers=4, collate_fn=list_data_collate, pin_memory=torch.cuda.is_available()) -state = infer_engine.run(val_loader) +state = evaluator.run(val_loader) diff --git a/examples/unet_segmentation_3d_array.py b/examples/segmentation_3d/unet_training_array.py similarity index 57% rename from examples/unet_segmentation_3d_array.py rename to examples/segmentation_3d/unet_training_array.py index 3b0d880f10..c9cb70875b 100644 --- a/examples/unet_segmentation_3d_array.py +++ b/examples/segmentation_3d/unet_training_array.py @@ -14,7 +14,6 @@ import tempfile from glob import glob import logging - import nibabel as nib import numpy as np import torch @@ -23,26 +22,28 @@ from torch.utils.data import DataLoader # assumes the framework is found here, change as necessary -sys.path.append("..") +sys.path.append("../..") import monai import monai.transforms.compose as transforms from monai.data.nifti_reader import NiftiDataset -from monai.transforms import AddChannel, Rescale, ToTensor, UniformRandomPatch +from monai.transforms import AddChannel, Rescale, UniformRandomPatch, Resize from monai.handlers.stats_handler import StatsHandler from monai.handlers.tensorboard_handlers import TensorBoardStatsHandler, TensorBoardImageHandler from monai.handlers.mean_dice import MeanDice from monai.data.synthetic import create_test_image_3d from monai.handlers.utils import stopping_fn_from_metric +from monai.networks.utils import predict_segmentation monai.config.print_config() +logging.basicConfig(stream=sys.stdout, level=logging.INFO) -# Create a temporary directory and 50 random image, mask paris +# Create a temporary directory and 40 random image, mask paris tempdir = tempfile.mkdtemp() print('generating synthetic data to {} (this may take a while)'.format(tempdir)) -for i in range(50): - im, seg = create_test_image_3d(128, 128, 128) +for i in range(40): + im, seg = create_test_image_3d(128, 128, 128, num_seg_classes=1) n = nib.Nifti1Image(im, np.eye(4)) nib.save(n, os.path.join(tempdir, 'im%i.nii.gz' % i)) @@ -54,124 +55,119 @@ segs = sorted(glob(os.path.join(tempdir, 'seg*.nii.gz'))) # Define transforms for image and segmentation -imtrans = transforms.Compose([ +train_imtrans = transforms.Compose([ + Rescale(), + AddChannel(), + UniformRandomPatch((96, 96, 96)) +]) +train_segtrans = transforms.Compose([ + AddChannel(), + UniformRandomPatch((96, 96, 96)) +]) +val_imtrans = transforms.Compose([ Rescale(), AddChannel(), - UniformRandomPatch((96, 96, 96)), - ToTensor() + Resize((96, 96, 96)) ]) -segtrans = transforms.Compose([ +val_segtrans = transforms.Compose([ AddChannel(), - UniformRandomPatch((96, 96, 96)), - ToTensor() + Resize((96, 96, 96)) ]) -# Define nifti dataset, dataloader. -ds = NiftiDataset(images, segs, transform=imtrans, seg_transform=segtrans) -loader = DataLoader(ds, batch_size=10, num_workers=2, pin_memory=torch.cuda.is_available()) -im, seg = monai.utils.misc.first(loader) +# Define nifti dataset, dataloader +check_ds = NiftiDataset(images, segs, transform=train_imtrans, seg_transform=train_segtrans) +check_loader = DataLoader(check_ds, batch_size=10, num_workers=2, pin_memory=torch.cuda.is_available()) +im, seg = monai.utils.misc.first(check_loader) print(im.shape, seg.shape) -lr = 1e-5 +# create a training data loader +train_ds = NiftiDataset(images[:20], segs[:20], transform=train_imtrans, seg_transform=train_segtrans) +train_loader = DataLoader(train_ds, batch_size=5, num_workers=8, pin_memory=torch.cuda.is_available()) +# create a validation data loader +val_ds = NiftiDataset(images[-20:], segs[-20:], transform=val_imtrans, seg_transform=val_segtrans) +val_loader = DataLoader(val_ds, batch_size=5, num_workers=8, pin_memory=torch.cuda.is_available()) -# Create UNet, DiceLoss and Adam optimizer. + +# Create UNet, DiceLoss and Adam optimizer net = monai.networks.nets.UNet( dimensions=3, in_channels=1, - num_classes=1, + out_channels=1, channels=(16, 32, 64, 128, 256), strides=(2, 2, 2, 2), num_res_units=2, ) - loss = monai.losses.DiceLoss(do_sigmoid=True) +lr = 1e-3 opt = torch.optim.Adam(net.parameters(), lr) +device = torch.device("cuda:0") -# Since network outputs logits and segmentation, we need a custom function. -def _loss_fn(i, j): - return loss(i[0], j) - - -# Create trainer -device = torch.device("cpu:0") -trainer = create_supervised_trainer(net, opt, _loss_fn, device, False, - output_transform=lambda x, y, y_pred, loss: [y_pred[1], loss.item(), y]) +# ignite trainer expects batch=(img, seg) and returns output=loss at every iteration, +# user can add output_transform to return other values, like: y_pred, y, etc. +trainer = create_supervised_trainer(net, opt, loss, device, False) # adding checkpoint handler to save models (network params and optimizer stats) during training checkpoint_handler = ModelCheckpoint('./runs/', 'net', n_saved=10, require_empty=False) trainer.add_event_handler(event_name=Events.EPOCH_COMPLETED, handler=checkpoint_handler, to_save={'net': net, 'opt': opt}) -logging.basicConfig(stream=sys.stdout, level=logging.INFO) -# print training loss to commandline -train_stats_handler = StatsHandler(output_transform=lambda x: x[1]) +# StatsHandler prints loss at every iteration and print metrics at every epoch, +# we don't set metrics for trainer here, so just print loss, user can also customize print functions +# and can use output_transform to convert engine.state.output if it's not a loss value +train_stats_handler = StatsHandler(name='trainer') train_stats_handler.attach(trainer) -# record training loss to TensorBoard at every iteration -train_tensorboard_stats_handler = TensorBoardStatsHandler( - output_transform=lambda x: {'training_dice_loss': x[1]}, # plot under tag name taining_dice_loss - global_epoch_transform=lambda x: trainer.state.epoch) +# TensorBoardStatsHandler plots loss at every iteration and plots metrics at every epoch, same as StatsHandler +train_tensorboard_stats_handler = TensorBoardStatsHandler() train_tensorboard_stats_handler.attach(trainer) -@trainer.on(Events.EPOCH_COMPLETED) -def log_training_loss(engine): - engine.logger.info("Epoch[%s] Loss: %s", engine.state.epoch, engine.state.output[1]) - - -# Set parameters for validation validation_every_n_epochs = 1 +# Set parameters for validation metric_name = 'Mean_Dice' - # add evaluation metric to the evaluator engine -val_metrics = {metric_name: MeanDice( - add_sigmoid=True, to_onehot_y=False, output_transform=lambda output: (output[0][0], output[1])) -} +val_metrics = {metric_name: MeanDice(add_sigmoid=True, to_onehot_y=False)} + +# ignite evaluator expects batch=(img, seg) and returns output=(y_pred, y) at every iteration, +# user can add output_transform to return other values evaluator = create_supervised_evaluator(net, val_metrics, device, True) + +@trainer.on(Events.EPOCH_COMPLETED(every=validation_every_n_epochs)) +def run_validation(engine): + evaluator.run(val_loader) + + +# Add early stopping handler to evaluator +early_stopper = EarlyStopping(patience=4, + score_function=stopping_fn_from_metric(metric_name), + trainer=trainer) +evaluator.add_event_handler(event_name=Events.EPOCH_COMPLETED, handler=early_stopper) + # Add stats event handler to print validation stats via evaluator val_stats_handler = StatsHandler( - output_transform=lambda x: None, # disable per iteration output - global_epoch_transform=lambda x: trainer.state.epoch) + name='evaluator', + output_transform=lambda x: None, # no need to print loss value, so disable per iteration output + global_epoch_transform=lambda x: trainer.state.epoch) # fetch global epoch number from trainer val_stats_handler.attach(evaluator) -# add handler to record metrics to TensorBoard at every epoch +# add handler to record metrics to TensorBoard at every validation epoch val_tensorboard_stats_handler = TensorBoardStatsHandler( - output_transform=lambda x: None, # no iteration plot - global_epoch_transform=lambda x: trainer.state.epoch) # use epoch number from trainer + output_transform=lambda x: None, # no need to plot loss value, so disable per iteration output + global_epoch_transform=lambda x: trainer.state.epoch) # fetch global epoch number from trainer val_tensorboard_stats_handler.attach(evaluator) -# add handler to draw several images and the corresponding labels and model outputs -# here we draw the first 3 images(draw the first channel) as GIF format along Depth axis + +# add handler to draw the first image and the corresponding label and model output in the last batch +# here we draw the 3D output as GIF format along Depth axis, at every validation epoch val_tensorboard_image_handler = TensorBoardImageHandler( batch_transform=lambda batch: (batch[0], batch[1]), - output_transform=lambda output: output[0][1], + output_transform=lambda output: predict_segmentation(output[0]), global_iter_transform=lambda x: trainer.state.epoch ) evaluator.add_event_handler(event_name=Events.EPOCH_COMPLETED, handler=val_tensorboard_image_handler) -# Add early stopping handler to evaluator -early_stopper = EarlyStopping(patience=4, - score_function=stopping_fn_from_metric(metric_name), - trainer=trainer) -evaluator.add_event_handler(event_name=Events.EPOCH_COMPLETED, handler=early_stopper) - -# create a validation data loader -val_ds = NiftiDataset(images[-20:], segs[-20:], transform=imtrans, seg_transform=segtrans) -val_loader = DataLoader(ds, batch_size=5, num_workers=8, pin_memory=torch.cuda.is_available()) - - -@trainer.on(Events.EPOCH_COMPLETED(every=validation_every_n_epochs)) -def run_validation(engine): - evaluator.run(val_loader) - - -# create a training data loader -logging.basicConfig(stream=sys.stdout, level=logging.INFO) - -train_ds = NiftiDataset(images[:20], segs[:20], transform=imtrans, seg_transform=segtrans) -train_loader = DataLoader(train_ds, batch_size=5, num_workers=8, pin_memory=torch.cuda.is_available()) train_epochs = 30 state = trainer.run(train_loader, train_epochs) diff --git a/examples/unet_segmentation_3d_dict.py b/examples/segmentation_3d/unet_training_dict.py similarity index 50% rename from examples/unet_segmentation_3d_dict.py rename to examples/segmentation_3d/unet_training_dict.py index 0e1e78811b..018f7076a7 100644 --- a/examples/unet_segmentation_3d_dict.py +++ b/examples/segmentation_3d/unet_training_dict.py @@ -14,36 +14,36 @@ import tempfile from glob import glob import logging - import nibabel as nib import numpy as np import torch -from torch.utils.tensorboard import SummaryWriter from ignite.engine import Events, create_supervised_trainer, create_supervised_evaluator, _prepare_batch from ignite.handlers import ModelCheckpoint, EarlyStopping from torch.utils.data import DataLoader # assumes the framework is found here, change as necessary -sys.path.append("..") +sys.path.append("../..") import monai import monai.transforms.compose as transforms from monai.transforms.composables import \ - LoadNiftid, AsChannelFirstd, RandCropByPosNegLabeld, RandRotate90d + LoadNiftid, AsChannelFirstd, Rescaled, RandCropByPosNegLabeld, RandRotate90d from monai.handlers.stats_handler import StatsHandler +from monai.handlers.tensorboard_handlers import TensorBoardStatsHandler, TensorBoardImageHandler from monai.handlers.mean_dice import MeanDice -from monai.visualize import img2tensorboard from monai.data.synthetic import create_test_image_3d from monai.handlers.utils import stopping_fn_from_metric from monai.data.utils import list_data_collate +from monai.networks.utils import predict_segmentation monai.config.print_config() +logging.basicConfig(stream=sys.stdout, level=logging.INFO) -# Create a temporary directory and 50 random image, mask paris +# Create a temporary directory and 40 random image, mask paris tempdir = tempfile.mkdtemp() print('generating synthetic data to {} (this may take a while)'.format(tempdir)) -for i in range(50): - im, seg = create_test_image_3d(128, 128, 128, channel_dim=-1) +for i in range(40): + im, seg = create_test_image_3d(128, 128, 128, num_seg_classes=1, channel_dim=-1) n = nib.Nifti1Image(im, np.eye(4)) nib.save(n, os.path.join(tempdir, 'img%i.nii.gz' % i)) @@ -53,145 +53,128 @@ images = sorted(glob(os.path.join(tempdir, 'img*.nii.gz'))) segs = sorted(glob(os.path.join(tempdir, 'seg*.nii.gz'))) -train_files = [{'img': img, 'seg': seg} for img, seg in zip(images[:40], segs[:40])] -val_files = [{'img': img, 'seg': seg} for img, seg in zip(images[-10:], segs[-10:])] +train_files = [{'img': img, 'seg': seg} for img, seg in zip(images[:20], segs[:20])] +val_files = [{'img': img, 'seg': seg} for img, seg in zip(images[-20:], segs[-20:])] # Define transforms for image and segmentation train_transforms = transforms.Compose([ LoadNiftid(keys=['img', 'seg']), AsChannelFirstd(keys=['img', 'seg'], channel_dim=-1), + Rescaled(keys=['img', 'seg']), RandCropByPosNegLabeld(keys=['img', 'seg'], label_key='seg', size=[96, 96, 96], pos=1, neg=1, num_samples=4), - RandRotate90d(keys=['img', 'seg'], prob=0.8, axes=[1, 3]) + RandRotate90d(keys=['img', 'seg'], prob=0.8, spatial_axes=[0, 2]) ]) val_transforms = transforms.Compose([ LoadNiftid(keys=['img', 'seg']), - AsChannelFirstd(keys=['img', 'seg'], channel_dim=-1) + AsChannelFirstd(keys=['img', 'seg'], channel_dim=-1), + Rescaled(keys=['img', 'seg']) ]) -# Define nifti dataset, dataloader. -ds = monai.data.Dataset(data=train_files, transform=train_transforms) -loader = DataLoader(ds, batch_size=2, num_workers=4, collate_fn=list_data_collate, - pin_memory=torch.cuda.is_available()) -check_data = monai.utils.misc.first(loader) +# Define dataset, dataloader +check_ds = monai.data.Dataset(data=train_files, transform=train_transforms) +# use batch_size=2 to load images and use RandCropByPosNegLabeld to generate 2 x 4 images for network training +check_loader = DataLoader(check_ds, batch_size=2, num_workers=4, collate_fn=list_data_collate, + pin_memory=torch.cuda.is_available()) +check_data = monai.utils.misc.first(check_loader) print(check_data['img'].shape, check_data['seg'].shape) -lr = 1e-5 -# Create UNet, DiceLoss and Adam optimizer. +# create a training data loader +train_ds = monai.data.Dataset(data=train_files, transform=train_transforms) +# use batch_size=2 to load images and use RandCropByPosNegLabeld to generate 2 x 4 images for network training +train_loader = DataLoader(train_ds, batch_size=2, num_workers=4, collate_fn=list_data_collate, + pin_memory=torch.cuda.is_available()) +# create a validation data loader +val_ds = monai.data.Dataset(data=val_files, transform=val_transforms) +val_loader = DataLoader(val_ds, batch_size=5, num_workers=8, collate_fn=list_data_collate, + pin_memory=torch.cuda.is_available()) + + +# Create UNet, DiceLoss and Adam optimizer net = monai.networks.nets.UNet( dimensions=3, in_channels=1, - num_classes=1, + out_channels=1, channels=(16, 32, 64, 128, 256), strides=(2, 2, 2, 2), num_res_units=2, ) - loss = monai.losses.DiceLoss(do_sigmoid=True) +lr = 1e-3 opt = torch.optim.Adam(net.parameters(), lr) +device = torch.device("cuda:0") -# Since network outputs logits and segmentation, we need a custom function. -def _loss_fn(i, j): - return loss(i[0], j) - - -# Create trainer +# ignite trainer expects batch=(img, seg) and returns output=loss at every iteration, +# user can add output_transform to return other values, like: y_pred, y, etc. def prepare_batch(batch, device=None, non_blocking=False): return _prepare_batch((batch['img'], batch['seg']), device, non_blocking) -device = torch.device("cuda:0") -trainer = create_supervised_trainer(net, opt, _loss_fn, device, False, - prepare_batch=prepare_batch, - output_transform=lambda x, y, y_pred, loss: [y_pred, loss.item(), y]) +trainer = create_supervised_trainer(net, opt, loss, device, False, prepare_batch=prepare_batch) # adding checkpoint handler to save models (network params and optimizer stats) during training checkpoint_handler = ModelCheckpoint('./runs/', 'net', n_saved=10, require_empty=False) trainer.add_event_handler(event_name=Events.EPOCH_COMPLETED, handler=checkpoint_handler, to_save={'net': net, 'opt': opt}) -train_stats_handler = StatsHandler(output_transform=lambda x: x[1]) + +# StatsHandler prints loss at every iteration and print metrics at every epoch, +# we don't set metrics for trainer here, so just print loss, user can also customize print functions +# and can use output_transform to convert engine.state.output if it's not loss value +train_stats_handler = StatsHandler(name='trainer') train_stats_handler.attach(trainer) +# TensorBoardStatsHandler plots loss at every iteration and plots metrics at every epoch, same as StatsHandler +train_tensorboard_stats_handler = TensorBoardStatsHandler() +train_tensorboard_stats_handler.attach(trainer) -@trainer.on(Events.EPOCH_COMPLETED) -def log_training_loss(engine): - # log loss to tensorboard with second item of engine.state.output, loss.item() from output_transform - writer.add_scalar('Loss/train', engine.state.output[1], engine.state.epoch) - - # tensor of ones to use where for converting labels to zero and ones - ones = torch.ones(engine.state.batch['seg'][0].shape, dtype=torch.int32) - first_output_tensor = engine.state.output[0][1][0].detach().cpu() - # log model output to tensorboard, as three dimensional tensor with no channels dimension - img2tensorboard.add_animated_gif_no_channels(writer, "first_output_final_batch", first_output_tensor, 64, - 255, engine.state.epoch) - # get label tensor and convert to single class - first_label_tensor = torch.where(engine.state.batch['seg'][0] > 0, ones, engine.state.batch['seg'][0]) - # log label tensor to tensorboard, there is a channel dimension when getting label from batch - img2tensorboard.add_animated_gif(writer, "first_label_final_batch", first_label_tensor, 64, - 255, engine.state.epoch) - second_output_tensor = engine.state.output[0][1][1].detach().cpu() - img2tensorboard.add_animated_gif_no_channels(writer, "second_output_final_batch", second_output_tensor, 64, - 255, engine.state.epoch) - second_label_tensor = torch.where(engine.state.batch['seg'][1] > 0, ones, engine.state.batch['seg'][1]) - img2tensorboard.add_animated_gif(writer, "second_label_final_batch", second_label_tensor, 64, - 255, engine.state.epoch) - third_output_tensor = engine.state.output[0][1][2].detach().cpu() - img2tensorboard.add_animated_gif_no_channels(writer, "third_output_final_batch", third_output_tensor, 64, - 255, engine.state.epoch) - third_label_tensor = torch.where(engine.state.batch['seg'][2] > 0, ones, engine.state.batch['seg'][2]) - img2tensorboard.add_animated_gif(writer, "third_label_final_batch", third_label_tensor, 64, - 255, engine.state.epoch) - engine.logger.info("Epoch[%s] Loss: %s", engine.state.epoch, engine.state.output[1]) - - -writer = SummaryWriter() +validation_every_n_iters = 5 # Set parameters for validation -validation_every_n_epochs = 1 metric_name = 'Mean_Dice' - # add evaluation metric to the evaluator engine val_metrics = {metric_name: MeanDice(add_sigmoid=True, to_onehot_y=False)} -evaluator = create_supervised_evaluator(net, val_metrics, device, True, - prepare_batch=prepare_batch, - output_transform=lambda x, y, y_pred: (y_pred[0], y)) - -# Add stats event handler to print validation stats via evaluator -logging.basicConfig(stream=sys.stdout, level=logging.INFO) -val_stats_handler = StatsHandler(output_transform=lambda x: None) -val_stats_handler.attach(evaluator) -# Add early stopping handler to evaluator. -early_stopper = EarlyStopping(patience=4, - score_function=stopping_fn_from_metric(metric_name), - trainer=trainer) -evaluator.add_event_handler(event_name=Events.EPOCH_COMPLETED, handler=early_stopper) +# ignite evaluator expects batch=(img, seg) and returns output=(y_pred, y) at every iteration, +# user can add output_transform to return other values +evaluator = create_supervised_evaluator(net, val_metrics, device, True, prepare_batch=prepare_batch) -# create a validation data loader -val_ds = monai.data.Dataset(data=val_files, transform=val_transforms) -val_loader = DataLoader(ds, batch_size=5, num_workers=8, collate_fn=list_data_collate, - pin_memory=torch.cuda.is_available()) - -@trainer.on(Events.EPOCH_COMPLETED(every=validation_every_n_epochs)) +@trainer.on(Events.ITERATION_COMPLETED(every=validation_every_n_iters)) def run_validation(engine): evaluator.run(val_loader) -@evaluator.on(Events.EPOCH_COMPLETED) -def log_metrics_to_tensorboard(engine): - for _, value in engine.state.metrics.items(): - writer.add_scalar('Metrics/' + metric_name, value, trainer.state.epoch) +# Add early stopping handler to evaluator +early_stopper = EarlyStopping(patience=4, + score_function=stopping_fn_from_metric(metric_name), + trainer=trainer) +evaluator.add_event_handler(event_name=Events.EPOCH_COMPLETED, handler=early_stopper) +# Add stats event handler to print validation stats via evaluator +val_stats_handler = StatsHandler( + name='evaluator', + output_transform=lambda x: None, # no need to print loss value, so disable per iteration output + global_epoch_transform=lambda x: trainer.state.epoch) # fetch global epoch number from trainer +val_stats_handler.attach(evaluator) -# create a training data loader -logging.basicConfig(stream=sys.stdout, level=logging.INFO) +# add handler to record metrics to TensorBoard at every validation epoch +val_tensorboard_stats_handler = TensorBoardStatsHandler( + output_transform=lambda x: None, # no need to plot loss value, so disable per iteration output + global_epoch_transform=lambda x: trainer.state.iteration) # fetch global iteration number from trainer +val_tensorboard_stats_handler.attach(evaluator) + +# add handler to draw the first image and the corresponding label and model output in the last batch +# here we draw the 3D output as GIF format along the depth axis, every 2 validation iterations. +val_tensorboard_image_handler = TensorBoardImageHandler( + batch_transform=lambda batch: (batch['img'], batch['seg']), + output_transform=lambda output: predict_segmentation(output[0]), + global_iter_transform=lambda x: trainer.state.epoch +) +evaluator.add_event_handler( + event_name=Events.ITERATION_COMPLETED(every=2), handler=val_tensorboard_image_handler) -train_ds = monai.data.Dataset(data=train_files, transform=train_transforms) -train_loader = DataLoader(train_ds, batch_size=2, num_workers=4, collate_fn=list_data_collate, - pin_memory=torch.cuda.is_available()) -train_epochs = 30 +train_epochs = 5 state = trainer.run(train_loader, train_epochs) diff --git a/examples/unet_segmentation_3d.ipynb b/examples/unet_segmentation_3d.ipynb index 0d49742f10..f42c33a2a5 100644 --- a/examples/unet_segmentation_3d.ipynb +++ b/examples/unet_segmentation_3d.ipynb @@ -145,7 +145,7 @@ "net = monai.networks.nets.UNet(\n", " dimensions=3,\n", " in_channels=1,\n", - " num_classes=1,\n", + " out_channels=1,\n", " channels=(16, 32, 64, 128, 256),\n", " strides=(2, 2, 2, 2),\n", " num_res_units=2,\n", diff --git a/monai/data/synthetic.py b/monai/data/synthetic.py index 4efd4fe393..063c16a965 100644 --- a/monai/data/synthetic.py +++ b/monai/data/synthetic.py @@ -20,7 +20,7 @@ def create_test_image_2d(width, height, num_objs=12, rad_max=30, noise_max=0.0, `radMax'. The mask will have `numSegClasses' number of classes for segmentations labeled sequentially from 1, plus a background class represented as 0. If `noiseMax' is greater than 0 then noise will be added to the image taken from the uniform distribution on range [0,noiseMax). If `channel_dim' is None, will create an image without channel - dimemsion, otherwise create an image with channel dimension as first dim or last dim. + dimension, otherwise create an image with channel dimension as first dim or last dim. """ image = np.zeros((width, height)) @@ -44,7 +44,7 @@ def create_test_image_2d(width, height, num_objs=12, rad_max=30, noise_max=0.0, if channel_dim is not None: assert isinstance(channel_dim, int) and channel_dim in (-1, 0, 2), 'invalid channel dim.' noisyimage, labels = noisyimage[None], labels[None] \ - if channel_dim == 0 else noisyimage[..., None], labels[..., None] + if channel_dim == 0 else (noisyimage[..., None], labels[..., None]) return noisyimage, labels @@ -54,7 +54,8 @@ def create_test_image_3d(height, width, depth, num_objs=12, rad_max=30, """ Return a noisy 3D image and segmentation. - See also: create_test_image_2d + See also: + ``create_test_image_2d`` """ image = np.zeros((width, height, depth)) diff --git a/monai/handlers/classification_saver.py b/monai/handlers/classification_saver.py new file mode 100644 index 0000000000..501dce816f --- /dev/null +++ b/monai/handlers/classification_saver.py @@ -0,0 +1,95 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import csv +import numpy as np +import torch +from ignite.engine import Events +import logging + + +class ClassificationSaver: + """ + Event handler triggered on completing every iteration to save the classification predictions as CSV file. + """ + + def __init__(self, output_dir='./', overwrite=True, + batch_transform=lambda x: x, output_transform=lambda x: x, name=None): + """ + Args: + output_dir (str): output CSV file directory. + overwrite (bool): whether to overwriting existing CSV file content. If we are not overwriting, + then we check if the results have been previously saved, and load them to the prediction_dict. + batch_transform (Callable): a callable that is used to transform the + ignite.engine.batch into expected format to extract the meta_data dictionary. + output_transform (Callable): a callable that is used to transform the + ignite.engine.output into the form expected model prediction data. + The first dimension of this transform's output will be treated as the + batch dimension. Each item in the batch will be saved individually. + name (str): identifier of logging.logger to use, defaulting to `engine.logger`. + + """ + self.output_dir = output_dir + self._prediction_dict = {} + self._preds_filepath = os.path.join(output_dir, 'predictions.csv') + self.overwrite = overwrite + self.batch_transform = batch_transform + self.output_transform = output_transform + + self.logger = None if name is None else logging.getLogger(name) + + def attach(self, engine): + if self.logger is None: + self.logger = engine.logger + if not engine.has_event_handler(self, Events.ITERATION_COMPLETED): + engine.add_event_handler(Events.ITERATION_COMPLETED, self) + if not engine.has_event_handler(self.finalize, Events.COMPLETED): + engine.add_event_handler(Events.COMPLETED, self.finalize) + + def finalize(self, _engine=None): + """ + Writes the prediction dict to a csv + + """ + if not self.overwrite and os.path.exists(self._preds_filepath): + with open(self._preds_filepath, 'r') as f: + reader = csv.reader(f) + for row in reader: + self._prediction_dict[row[0]] = np.array(row[1:]).astype(np.float32) + + if not os.path.exists(self.output_dir): + os.makedirs(self.output_dir) + with open(self._preds_filepath, 'w') as f: + for k, v in sorted(self._prediction_dict.items()): + f.write(k) + for result in v.flatten(): + f.write("," + str(result)) + f.write("\n") + self.logger.info('saved classification predictions into: {}'.format(self._preds_filepath)) + + def __call__(self, engine): + """ + This method assumes self.batch_transform will extract Metadata from the input batch. + Metadata should have the following keys: + + - ``'filename_or_obj'`` -- save the prediction corresponding to file name. + + """ + meta_data = self.batch_transform(engine.state.batch) + filenames = meta_data['filename_or_obj'] + + engine_output = self.output_transform(engine.state.output) + for batch_id, filename in enumerate(filenames): # save a batch of files + output = engine_output[batch_id] + if isinstance(output, torch.Tensor): + output = output.detach().cpu().numpy() + self._prediction_dict[filename] = output.astype(np.float32) diff --git a/monai/handlers/segmentation_saver.py b/monai/handlers/segmentation_saver.py index 1f3fe2615d..98eb972d3a 100644 --- a/monai/handlers/segmentation_saver.py +++ b/monai/handlers/segmentation_saver.py @@ -13,7 +13,7 @@ import numpy as np import torch from ignite.engine import Events - +import logging from monai.data.nifti_writer import write_nifti @@ -50,7 +50,8 @@ def __init__(self, output_path='./', dtype='float32', output_postfix='seg', outp def attach(self, engine): if self.logger is None: self.logger = engine.logger - return engine.add_event_handler(Events.ITERATION_COMPLETED, self) + if not engine.has_event_handler(self, Events.ITERATION_COMPLETED): + engine.add_event_handler(Events.ITERATION_COMPLETED, self) @staticmethod def _create_file_basename(postfix, input_file_name, folder_path, data_root_dir=""): @@ -115,5 +116,6 @@ def __call__(self, engine): output_filename = self._create_file_basename(self.output_postfix, filename, self.output_path) output_filename = '{}{}'.format(output_filename, self.output_ext) # change output to "channel last" format and write to nifti format file - write_nifti(np.moveaxis(seg_output, 0, -1), affine_, output_filename, original_affine_, dtype=seg_output.dtype) + to_save = np.moveaxis(seg_output, 0, -1) + write_nifti(to_save, affine_, output_filename, original_affine_, dtype=seg_output.dtype) self.logger.info('saved: {}'.format(output_filename)) diff --git a/monai/handlers/tensorboard_handlers.py b/monai/handlers/tensorboard_handlers.py index 2d5116bd59..411a53084b 100644 --- a/monai/handlers/tensorboard_handlers.py +++ b/monai/handlers/tensorboard_handlers.py @@ -18,6 +18,8 @@ from monai.utils.misc import is_scalar from monai.transforms.utils import rescale_array +DEFAULT_TAG = 'Loss' + class TensorBoardStatsHandler(object): """TensorBoardStatsHandler defines a set of Ignite Event-handlers for all the TensorBoard logics. @@ -36,8 +38,9 @@ def __init__(self, summary_writer=None, epoch_event_writer=None, iteration_event_writer=None, - output_transform=lambda x: {'Loss': x}, - global_epoch_transform=lambda x: x): + output_transform=lambda x: x, + global_epoch_transform=lambda x: x, + tag_name=DEFAULT_TAG): """ Args: summary_writer (SummaryWriter): user can specify TensorBoard SummaryWriter, @@ -47,17 +50,20 @@ def __init__(self, iteration_event_writer (Callable): custimized callable TensorBoard writer for iteration level. must accept parameter "engine" and "summary_writer", use default event writer if None. output_transform (Callable): a callable that is used to transform the - ``ignite.engine.output`` into a dictionary of (tag_name: scalar) pairs to be plotted onto tensorboard. - by default this scalar plotting happens when every iteration completed. + ``ignite.engine.output`` into a scalar to plot, or a dictionary of {key: scalar}. + in the latter case, the output string will be formated as key: value. + by default this value plotting happens when every iteration completed. global_epoch_transform (Callable): a callable that is used to customize global epoch number. For example, in evaluation, the evaluator engine might want to use trainer engines epoch number when plotting epoch vs metric curves. + tag_name (string): when iteration output is a scalar, tag_name is used to plot, defaults to ``'Loss'``. """ self._writer = SummaryWriter() if summary_writer is None else summary_writer self.epoch_event_writer = epoch_event_writer self.iteration_event_writer = iteration_event_writer self.output_transform = output_transform self.global_epoch_transform = global_epoch_transform + self.tag_name = tag_name def attach(self, engine: Engine): """Register a set of Ignite Event-Handlers to a specified Ignite engine. @@ -121,31 +127,34 @@ def _default_iteration_writer(self, engine: Engine, writer: SummaryWriter): writer (SummaryWriter): TensorBoard writer, created in TensorBoardHandler. """ - loss_dict = self.output_transform(engine.state.output) - if loss_dict is None: + loss = self.output_transform(engine.state.output) + if loss is None: return # do nothing if output is empty - if not isinstance(loss_dict, dict): - raise ValueError('TensorBoardStatsHandler requires' - ' output_transform(engine.state.output) returning a dictionary' - ' of key and scalar pairs to plot' - ' got {}.'.format(type(loss_dict))) - for name, value in loss_dict.items(): - if not is_scalar(value): - warnings.warn('ignoring non-scalar output in tensorboard curve plotting,' - ' make sure `output_transform(engine.state.output)` returns' - ' a dictionary of key and scalar pairs to avoid this warning.' - ' Got {}:{}'.format(name, type(value))) - continue - plot_value = value.item() if torch.is_tensor(value) else value - writer.add_scalar(name, plot_value, engine.state.iteration) + if isinstance(loss, dict): + for name in sorted(loss): + value = loss[name] + if not is_scalar(value): + warnings.warn('ignoring non-scalar output in TensorBoardStatsHandler,' + ' make sure `output_transform(engine.state.output)` returns' + ' a scalar or dictionary of key and scalar pairs to avoid this warning.' + ' {}:{}'.format(name, type(value))) + continue # not plot multi dimensional output + writer.add_scalar(name, value.item() if torch.is_tensor(value) else value, engine.state.iteration) + elif is_scalar(loss): # not printing multi dimensional output + writer.add_scalar(self.tag_name, loss.item() if torch.is_tensor(loss) else loss, engine.state.iteration) + else: + warnings.warn('ignoring non-scalar output in TensorBoardStatsHandler,' + ' make sure `output_transform(engine.state.output)` returns' + ' a scalar or a dictionary of key and scalar pairs to avoid this warning.' + ' {}'.format(type(loss))) writer.flush() class TensorBoardImageHandler(object): """TensorBoardImageHandler is an ignite Event handler that can visualise images, labels and outputs as 2D/3D images. 2D output (shape in Batch, channel, H, W) will be shown as simple image using the first element in the batch, - for 3D to ND output (shape in Batch, channel, H, W, D) input, - the last three dimensions will be shown as GIF image along the last axis (typically Depth). + for 3D to ND output (shape in Batch, channel, H, W, D) input, each of ``self.max_channels`` number of images' + last three dimensions will be shown as animated GIF along the last axis (typically Depth). It's can be used for any Ignite Engine (trainer, validator and evaluator). User can easily added it to engine for any expected Event, for example: ``EPOCH_COMPLETED``, @@ -230,9 +239,9 @@ def _add_2_or_3_d(self, data, step, tag='output'): return if d.ndim == 3: - if d.shape[0] == 3 and self.max_channels == 3: # rgb? + if d.shape[0] == 3 and self.max_channels == 3: # RGB dataformats = 'CHW' - self._writer.add_image('{}_{}'.format(tag, dataformats), d, step, dataformats='CHW') + self._writer.add_image('{}_{}'.format(tag, dataformats), d, step, dataformats=dataformats) return for j, d2 in enumerate(d[:self.max_channels]): d2 = rescale_array(d2, 0, 1) diff --git a/monai/networks/nets/densenet3d.py b/monai/networks/nets/densenet3d.py index f5493f6c04..78fab167c4 100644 --- a/monai/networks/nets/densenet3d.py +++ b/monai/networks/nets/densenet3d.py @@ -146,7 +146,6 @@ def __init__(self, OrderedDict([ ('relu', nn.ReLU(inplace=True)), ('norm', get_avgpooling_type(spatial_dims, is_adaptive=True)(1)), - ('relu', nn.ReLU(inplace=True)), ('flatten', nn.Flatten(1)), ('class', nn.Linear(in_channels, out_channels)), ])) diff --git a/monai/networks/nets/unet.py b/monai/networks/nets/unet.py index b0d42612eb..ad9b3ddbf4 100644 --- a/monai/networks/nets/unet.py +++ b/monai/networks/nets/unet.py @@ -13,7 +13,6 @@ from monai.networks.blocks.convolutions import Convolution, ResidualUnit from monai.networks.layers.simplelayers import SkipConnection -from monai.networks.utils import predict_segmentation from monai.utils import export from monai.utils.aliases import alias @@ -22,13 +21,13 @@ @alias("Unet", "unet") class UNet(nn.Module): - def __init__(self, dimensions, in_channels, num_classes, channels, strides, kernel_size=3, up_kernel_size=3, + def __init__(self, dimensions, in_channels, out_channels, channels, strides, kernel_size=3, up_kernel_size=3, num_res_units=0, instance_norm=True, dropout=0): super().__init__() assert len(channels) == (len(strides) + 1) self.dimensions = dimensions self.in_channels = in_channels - self.num_classes = num_classes + self.out_channels = out_channels self.channels = channels self.strides = strides self.kernel_size = kernel_size @@ -58,7 +57,7 @@ def _create_block(inc, outc, channels, strides, is_top): return nn.Sequential(down, SkipConnection(subblock), up) - self.model = _create_block(in_channels, num_classes, self.channels, self.strides, True) + self.model = _create_block(in_channels, out_channels, self.channels, self.strides, True) def _get_down_layer(self, in_channels, out_channels, strides, is_top): if self.num_res_units > 0: @@ -98,4 +97,4 @@ def _get_up_layer(self, in_channels, out_channels, strides, is_top): def forward(self, x): x = self.model(x) - return x, predict_segmentation(x) + return x diff --git a/monai/transforms/composables.py b/monai/transforms/composables.py index c19c3f7df9..f86afd546e 100644 --- a/monai/transforms/composables.py +++ b/monai/transforms/composables.py @@ -14,8 +14,8 @@ """ import torch +import numpy as np from collections.abc import Hashable - import monai from monai.data.utils import get_random_patch, get_valid_patch_size from monai.networks.layers.simplelayers import GaussianFilter @@ -23,7 +23,7 @@ from monai.transforms.transforms import (LoadNifti, AsChannelFirst, Orientation, AddChannel, Spacing, Rotate90, SpatialCrop, RandAffine, Rand2DElastic, Rand3DElastic, - Flip, Rotate, Zoom) + Rescale, Resize, Flip, Rotate, Zoom) from monai.utils.misc import ensure_tuple from monai.transforms.utils import generate_pos_neg_label_crop_centers, create_grid from monai.utils.aliases import alias @@ -230,17 +230,18 @@ class Rotate90d(MapTransform): dictionary-based wrapper of Rotate90. """ - def __init__(self, keys, k=1, axes=(1, 2)): + def __init__(self, keys, k=1, spatial_axes=(0, 1)): """ Args: k (int): number of times to rotate by 90 degrees. - axes (2 ints): defines the plane to rotate with 2 axes. + spatial_axes (2 ints): defines the plane to rotate with 2 spatial axes. + Default: (0, 1), this is the first two axis in spatial dimensions. """ MapTransform.__init__(self, keys) self.k = k - self.plane_axes = axes + self.spatial_axes = spatial_axes - self.rotator = Rotate90(self.k, self.plane_axes) + self.rotator = Rotate90(self.k, self.spatial_axes) def __call__(self, data): d = dict(data) @@ -249,17 +250,73 @@ def __call__(self, data): return d +@export +@alias('RescaleD', 'RescaleDict') +class Rescaled(MapTransform): + """ + dictionary-based wrapper of Rescale. + """ + + def __init__(self, keys, minv=0.0, maxv=1.0, dtype=np.float32): + MapTransform.__init__(self, keys) + self.rescaler = Rescale(minv, maxv, dtype) + + def __call__(self, data): + d = dict(data) + for key in self.keys: + d[key] = self.rescaler(d[key]) + return d + + +@export +@alias('ResizeD', 'ResizeDict') +class Resized(MapTransform): + """ + dictionary-based wrapper of Resize. + Args: + keys (hashable items): keys of the corresponding items to be transformed. + See also: monai.transform.composables.MapTransform + output_spatial_shape (tuple or list): expected shape of spatial dimensions after resize operation. + order (int): Order of spline interpolation. Default=1. + mode (str): Points outside boundaries are filled according to given mode. + Options are 'constant', 'edge', 'symmetric', 'reflect', 'wrap'. + cval (float): Used with mode 'constant', the value outside image boundaries. + clip (bool): Wheter to clip range of output values after interpolation. Default: True. + preserve_range (bool): Whether to keep original range of values. Default is True. + If False, input is converted according to conventions of img_as_float. See + https://scikit-image.org/docs/dev/user_guide/data_types.html. + anti_aliasing (bool): Whether to apply a gaussian filter to image before down-scaling. + Default is True. + anti_aliasing_sigma (float, tuple of floats): Standard deviation for gaussian filtering. + """ + + def __init__(self, keys, output_spatial_shape, order=1, mode='reflect', cval=0, + clip=True, preserve_range=True, anti_aliasing=True, anti_aliasing_sigma=None): + MapTransform.__init__(self, keys) + self.resizer = Resize(output_spatial_shape, order, mode, cval, clip, preserve_range, + anti_aliasing, anti_aliasing_sigma) + + def __call__(self, data): + d = dict(data) + for key in self.keys: + d[key] = self.resizer(d[key]) + return d + + @export @alias('UniformRandomPatchD', 'UniformRandomPatchDict') class UniformRandomPatchd(Randomizable, MapTransform): """ Selects a patch of the given size chosen at a uniformly random position in the image. + + Args: + patch_spatial_size (tuple or list): Expected patch size of spatial dimensions. """ - def __init__(self, keys, patch_size): + def __init__(self, keys, patch_spatial_size): MapTransform.__init__(self, keys) - self.patch_size = (None,) + tuple(patch_size) + self.patch_spatial_size = (None,) + tuple(patch_spatial_size) self._slices = None @@ -270,8 +327,8 @@ def __call__(self, data): d = dict(data) image_shape = d[self.keys[0]].shape # image shape from the first data key - patch_size = get_valid_patch_size(image_shape, self.patch_size) - self.randomize(image_shape, patch_size) + patch_spatial_size = get_valid_patch_size(image_shape, self.patch_spatial_size) + self.randomize(image_shape, patch_spatial_size) for key in self.keys: d[key] = d[key][self._slices] return d @@ -282,10 +339,10 @@ def __call__(self, data): class RandRotate90d(Randomizable, MapTransform): """ With probability `prob`, input arrays are rotated by 90 degrees - in the plane specified by `axes`. + in the plane specified by `spatial_axes`. """ - def __init__(self, keys, prob=0.1, max_k=3, axes=(1, 2)): + def __init__(self, keys, prob=0.1, max_k=3, spatial_axes=(0, 1)): """ Args: keys (hashable items): keys of the corresponding items to be transformed. @@ -294,14 +351,14 @@ def __init__(self, keys, prob=0.1, max_k=3, axes=(1, 2)): (Default 0.1, with 10% probability it returns a rotated array.) max_k (int): number of rotations will be sampled from `np.random.randint(max_k) + 1`. (Default 3) - axes (2 ints): defines the plane to rotate with 2 axes. - (Default to (1, 2)) + spatial_axes (2 ints): defines the plane to rotate with 2 spatial axes. + Default: (0, 1), this is the first two axis in spatial dimensions. """ MapTransform.__init__(self, keys) self.prob = min(max(prob, 0.0), 1.0) self.max_k = max_k - self.axes = axes + self.spatial_axes = spatial_axes self._do_transform = False self._rand_k = 0 @@ -315,7 +372,7 @@ def __call__(self, data): if not self._do_transform: return data - rotator = Rotate90(self._rand_k, self.axes) + rotator = Rotate90(self._rand_k, self.spatial_axes) d = dict(data) for key in self.keys: d[key] = rotator(d[key]) @@ -599,15 +656,17 @@ def __call__(self, data): @alias('FlipD', 'FlipDict') class Flipd(MapTransform): """Dictionary-based wrapper of Flip. + See numpy.flip for additional details. + https://docs.scipy.org/doc/numpy/reference/generated/numpy.flip.html Args: keys (dict): Keys to pick data for transformation. - axis (None, int or tuple of ints): Axes along which to flip over. Default is None. + spatial_axis (None, int or tuple of ints): Spatial axes along which to flip over. Default is None. """ - def __init__(self, keys, axis=None): + def __init__(self, keys, spatial_axis=None): MapTransform.__init__(self, keys) - self.flipper = Flip(axis=axis) + self.flipper = Flip(spatial_axis=spatial_axis) def __call__(self, data): d = dict(data) @@ -620,19 +679,21 @@ def __call__(self, data): @alias('RandFlipD', 'RandFlipDict') class RandFlipd(Randomizable, MapTransform): """Dict-based wrapper of RandFlip. + See numpy.flip for additional details. + https://docs.scipy.org/doc/numpy/reference/generated/numpy.flip.html Args: prob (float): Probability of flipping. - axis (None, int or tuple of ints): Axes along which to flip over. Default is None. + spatial_axis (None, int or tuple of ints): Spatial axes along which to flip over. Default is None. """ - def __init__(self, keys, prob=0.1, axis=None): + def __init__(self, keys, prob=0.1, spatial_axis=None): MapTransform.__init__(self, keys) - self.axis = axis + self.spatial_axis = spatial_axis self.prob = prob self._do_transform = False - self.flipper = Flip(axis=axis) + self.flipper = Flip(spatial_axis=spatial_axis) def randomize(self): self._do_transform = self.R.random_sample() < self.prob @@ -655,8 +716,8 @@ class Rotated(MapTransform): Args: keys (dict): Keys to pick data for transformation. angle (float): Rotation angle in degrees. - axes (tuple of 2 ints): Axes of rotation. Default: (1, 2). This is the first two - axis in spatial dimensions according to MONAI channel first shape assumption. + spatial_axes (tuple of 2 ints): Spatial axes of rotation. Default: (0, 1). + This is the first two axis in spatial dimensions. reshape (bool): If true, output shape is made same as input. Default: True. order (int): Order of spline interpolation. Range 0-5. Default: 1. This is different from scipy where default interpolation is 3. @@ -666,10 +727,10 @@ class Rotated(MapTransform): prefiter (bool): Apply spline_filter before interpolation. Default: True. """ - def __init__(self, keys, angle, axes=(1, 2), reshape=True, order=1, + def __init__(self, keys, angle, spatial_axes=(0, 1), reshape=True, order=1, mode='constant', cval=0, prefilter=True): MapTransform.__init__(self, keys) - self.rotator = Rotate(angle=angle, axes=axes, reshape=reshape, + self.rotator = Rotate(angle=angle, spatial_axes=spatial_axes, reshape=reshape, order=order, mode=mode, cval=cval, prefilter=prefilter) def __call__(self, data): @@ -688,8 +749,8 @@ class RandRotated(Randomizable, MapTransform): prob (float): Probability of rotation. degrees (tuple of float or float): Range of rotation in degrees. If single number, angle is picked from (-degrees, degrees). - axes (tuple of 2 ints): Axes of rotation. Default: (1, 2). This is the first two - axis in spatial dimensions according to MONAI channel first shape assumption. + spatial_axes (tuple of 2 ints): Spatial axes of rotation. Default: (0, 1). + This is the first two axis in spatial dimensions. reshape (bool): If true, output shape is made same as input. Default: True. order (int): Order of spline interpolation. Range 0-5. Default: 1. This is different from scipy where default interpolation is 3. @@ -698,7 +759,7 @@ class RandRotated(Randomizable, MapTransform): cval (scalar): Value to fill outside boundary. Default: 0. prefiter (bool): Apply spline_filter before interpolation. Default: True. """ - def __init__(self, keys, degrees, prob=0.1, axes=(1, 2), reshape=True, order=1, + def __init__(self, keys, degrees, prob=0.1, spatial_axes=(0, 1), reshape=True, order=1, mode='constant', cval=0, prefilter=True): MapTransform.__init__(self, keys) self.prob = prob @@ -708,7 +769,7 @@ def __init__(self, keys, degrees, prob=0.1, axes=(1, 2), reshape=True, order=1, self.mode = mode self.cval = cval self.prefilter = prefilter - self.axes = axes + self.spatial_axes = spatial_axes if not hasattr(self.degrees, '__iter__'): self.degrees = (-self.degrees, self.degrees) @@ -726,10 +787,10 @@ def __call__(self, data): d = dict(data) if not self._do_transform: return d - rotator = Rotate(self.angle, self.axes, self.reshape, self.order, + rotator = Rotate(self.angle, self.spatial_axes, self.reshape, self.order, self.mode, self.cval, self.prefilter) for key in self.keys: - d[key] = self.flipper(d[key]) + d[key] = rotator(d[key]) return d @@ -772,7 +833,11 @@ class RandZoomd(Randomizable, MapTransform): keys (dict): Keys to pick data for transformation. prob (float): Probability of zooming. min_zoom (float or sequence): Min zoom factor. Can be float or sequence same size as image. + If a float, min_zoom is the same for each spatial axis. + If a sequence, min_zoom should contain one value for each spatial axis. max_zoom (float or sequence): Max zoom factor. Can be float or sequence same size as image. + If a float, max_zoom is the same for each spatial axis. + If a sequence, max_zoom should contain one value for each spatial axis. order (int): order of interpolation. Default=3. mode ('reflect', 'constant', 'nearest', 'mirror', 'wrap'): Determines how input is extended beyond boundaries. Default: 'constant'. diff --git a/monai/transforms/transforms.py b/monai/transforms/transforms.py index 8f140972f6..0e326c6f73 100644 --- a/monai/transforms/transforms.py +++ b/monai/transforms/transforms.py @@ -197,6 +197,16 @@ def __call__(self, filename): class AsChannelFirst: """ Change the channel dimension of the image to the first dimension. + + Most of the image transformations in ``monai.transforms`` + assumes the input image is in the channel-first format, which has the shape + (num_channels, spatial_dim_1[, spatial_dim_2, ...]). + + This transform could be used to convert, for example, a channel-last image array in shape + (spatial_dim_1[, spatial_dim_2, ...], num_channels) into the channel-first format, + so that the multidimensional image array can be correctly interpreted by the other + transforms. + Args: channel_dim (int): which dimension of input image is the channel, default is the last dimension. """ @@ -213,6 +223,15 @@ def __call__(self, img): class AddChannel: """ Adds a 1-length channel dimension to the input image. + + Most of the image transformations in ``monai.transforms`` + assumes the input image is in the channel-first format, which has the shape + (num_channels, spatial_dim_1[, spatial_dim_2, ...]). + + This transform could be used, for example, to convert a (spatial_dim_1[, spatial_dim_2, ...]) + spatial image into the channel-first format so that the + multidimensional image array can be correctly interpreted by the other + transforms. """ def __call__(self, img): @@ -253,8 +272,7 @@ class GaussianNoise(Randomizable): Args: mean (float or array of floats): Mean or “centre” of the distribution. - scale (float): Standard deviation (spread) of distribution. - size (int or tuple of ints): Output shape. Default: None (single value is returned). + std (float): Standard deviation (spread) of distribution. """ def __init__(self, mean=0.0, std=0.1): @@ -267,19 +285,28 @@ def __call__(self, img): @export class Flip: - """Reverses the order of elements along the given axis. Preserves shape. + """Reverses the order of elements along the given spatial axis. Preserves shape. Uses ``np.flip`` in practice. See numpy.flip for additional details. https://docs.scipy.org/doc/numpy/reference/generated/numpy.flip.html Args: - axis (None, int or tuple of ints): Axes along which to flip over. Default is None. + spatial_axis (None, int or tuple of ints): spatial axes along which to flip over. Default is None. """ - def __init__(self, axis=None): - self.axis = axis + def __init__(self, spatial_axis=None): + self.spatial_axis = spatial_axis def __call__(self, img): - return np.flip(img, self.axis) + """ + Args: + img (ndarray): channel first array, must have shape: (num_channels, H[, W, ..., ]), + """ + flipped = list() + for channel in img: + flipped.append( + np.flip(channel, self.spatial_axis) + ) + return np.stack(flipped) @export @@ -289,6 +316,7 @@ class Resize: For additional details, see https://scikit-image.org/docs/dev/api/skimage.transform.html#skimage.transform.resize. Args: + output_spatial_shape (tuple or list): expected shape of spatial dimensions after resize operation. order (int): Order of spline interpolation. Default=1. mode (str): Points outside boundaries are filled according to given mode. Options are 'constant', 'edge', 'symmetric', 'reflect', 'wrap'. @@ -302,10 +330,10 @@ class Resize: anti_aliasing_sigma (float, tuple of floats): Standard deviation for gaussian filtering. """ - def __init__(self, output_shape, order=1, mode='reflect', cval=0, + def __init__(self, output_spatial_shape, order=1, mode='reflect', cval=0, clip=True, preserve_range=True, anti_aliasing=True, anti_aliasing_sigma=None): assert isinstance(order, int), "order must be integer." - self.output_shape = output_shape + self.output_spatial_shape = output_spatial_shape self.order = order self.mode = mode self.cval = cval @@ -315,11 +343,20 @@ def __init__(self, output_shape, order=1, mode='reflect', cval=0, self.anti_aliasing_sigma = anti_aliasing_sigma def __call__(self, img): - return resize(img, self.output_shape, order=self.order, - mode=self.mode, cval=self.cval, - clip=self.clip, preserve_range=self.preserve_range, - anti_aliasing=self.anti_aliasing, - anti_aliasing_sigma=self.anti_aliasing_sigma) + """ + Args: + img (ndarray): channel first array, must have shape: (num_channels, H[, W, ..., ]), + """ + resized = list() + for channel in img: + resized.append( + resize(channel, self.output_spatial_shape, order=self.order, + mode=self.mode, cval=self.cval, + clip=self.clip, preserve_range=self.preserve_range, + anti_aliasing=self.anti_aliasing, + anti_aliasing_sigma=self.anti_aliasing_sigma) + ) + return np.stack(resized).astype(np.float32) @export @@ -330,8 +367,8 @@ class Rotate: Args: angle (float): Rotation angle in degrees. - axes (tuple of 2 ints): Axes of rotation. Default: (1, 2). This is the first two - axis in spatial dimensions according to MONAI channel first shape assumption. + spatial_axes (tuple of 2 ints): Spatial axes of rotation. Default: (0, 1). + This is the first two axis in spatial dimensions. reshape (bool): If true, output shape is made same as input. Default: True. order (int): Order of spline interpolation. Range 0-5. Default: 1. This is different from scipy where default interpolation is 3. @@ -341,19 +378,27 @@ class Rotate: prefiter (bool): Apply spline_filter before interpolation. Default: True. """ - def __init__(self, angle, axes=(1, 2), reshape=True, order=1, mode='constant', cval=0, prefilter=True): + def __init__(self, angle, spatial_axes=(0, 1), reshape=True, order=1, mode='constant', cval=0, prefilter=True): self.angle = angle self.reshape = reshape self.order = order self.mode = mode self.cval = cval self.prefilter = prefilter - self.axes = axes + self.spatial_axes = spatial_axes def __call__(self, img): - return scipy.ndimage.rotate(img, self.angle, self.axes, - reshape=self.reshape, order=self.order, mode=self.mode, cval=self.cval, - prefilter=self.prefilter) + """ + Args: + img (ndarray): channel first array, must have shape: (num_channels, H[, W, ..., ]), + """ + rotated = list() + for channel in img: + rotated.append( + scipy.ndimage.rotate(channel, self.angle, self.spatial_axes, reshape=self.reshape, + order=self.order, mode=self.mode, cval=self.cval, prefilter=self.prefilter) + ) + return np.stack(rotated).astype(np.float32) @export @@ -400,7 +445,7 @@ def __call__(self, img): Args: img (ndarray): channel first array, must have shape: (num_channels, H[, W, ..., ]), """ - zoomed = [] + zoomed = list() if self.use_gpu: import cupy for channel in cupy.array(img): @@ -420,7 +465,7 @@ def __call__(self, img): mode=self.mode, cval=self.cval, prefilter=self.prefilter)) - zoomed = np.stack(zoomed) + zoomed = np.stack(zoomed).astype(np.float32) if not self.keep_size or np.allclose(img.shape, zoomed.shape): return zoomed @@ -452,10 +497,13 @@ def __call__(self, img): class UniformRandomPatch(Randomizable): """ Selects a patch of the given size chosen at a uniformly random position in the image. + + Args: + patch_spatial_size (tuple or list): Expected patch size of spatial dimensions. """ - def __init__(self, patch_size): - self.patch_size = (None,) + tuple(patch_size) + def __init__(self, patch_spatial_size): + self.patch_spatial_size = (None,) + tuple(patch_spatial_size) self._slices = None @@ -463,8 +511,8 @@ def randomize(self, image_shape, patch_shape): self._slices = get_random_patch(image_shape, patch_shape, self.R) def __call__(self, img): - patch_size = get_valid_patch_size(img.shape, self.patch_size) - self.randomize(img.shape, patch_size) + patch_spatial_size = get_valid_patch_size(img.shape, self.patch_spatial_size) + self.randomize(img.shape, patch_spatial_size) return img[self._slices] @@ -478,16 +526,14 @@ class IntensityNormalizer: Args: subtrahend (ndarray): the amount to subtract by (usually the mean) divisor (ndarray): the amount to divide by (usually the standard deviation) - dtype: output data format """ - def __init__(self, subtrahend=None, divisor=None, dtype=np.float32): + def __init__(self, subtrahend=None, divisor=None): if subtrahend is not None or divisor is not None: assert isinstance(subtrahend, np.ndarray) and isinstance(divisor, np.ndarray), \ 'subtrahend and divisor must be set in pair and in numpy array.' self.subtrahend = subtrahend self.divisor = divisor - self.dtype = dtype def __call__(self, img): if self.subtrahend is not None and self.divisor is not None: @@ -497,8 +543,6 @@ def __call__(self, img): img -= np.mean(img) img /= np.std(img) - if self.dtype != img.dtype: - img = img.astype(self.dtype) return img @@ -511,15 +555,13 @@ class ImageEndPadder: Args: out_size (list): the size of region of interest at the end of the operation. mode (string): a portion from numpy.lib.arraypad.pad is copied below. - dtype: output data format. """ - def __init__(self, out_size, mode, dtype=np.float32): - assert out_size is not None and isinstance(out_size, (list, tuple)), 'out_size must be list or tuple' + def __init__(self, out_size, mode): + assert out_size is not None and isinstance(out_size, (list, tuple)), 'out_size must be list or tuple.' self.out_size = out_size - assert isinstance(mode, str), 'mode must be str' + assert isinstance(mode, str), 'mode must be str.' self.mode = mode - self.dtype = dtype def _determine_data_pad_width(self, data_shape): return [(0, max(self.out_size[i] - data_shape[i], 0)) for i in range(len(self.out_size))] @@ -537,39 +579,49 @@ class Rotate90: Rotate an array by 90 degrees in the plane specified by `axes`. """ - def __init__(self, k=1, axes=(1, 2)): + def __init__(self, k=1, spatial_axes=(0, 1)): """ Args: k (int): number of times to rotate by 90 degrees. - axes (2 ints): defines the plane to rotate with 2 axes. + spatial_axes (2 ints): defines the plane to rotate with 2 spatial axes. + Default: (0, 1), this is the first two axis in spatial dimensions. """ self.k = k - self.plane_axes = axes + self.spatial_axes = spatial_axes def __call__(self, img): - return np.ascontiguousarray(np.rot90(img, self.k, self.plane_axes)) + """ + Args: + img (ndarray): channel first array, must have shape: (num_channels, H[, W, ..., ]), + """ + rotated = list() + for channel in img: + rotated.append( + np.rot90(channel, self.k, self.spatial_axes) + ) + return np.stack(rotated) @export class RandRotate90(Randomizable): """ With probability `prob`, input arrays are rotated by 90 degrees - in the plane specified by `axes`. + in the plane specified by `spatial_axes`. """ - def __init__(self, prob=0.1, max_k=3, axes=(1, 2)): + def __init__(self, prob=0.1, max_k=3, spatial_axes=(0, 1)): """ Args: prob (float): probability of rotating. (Default 0.1, with 10% probability it returns a rotated array) max_k (int): number of rotations will be sampled from `np.random.randint(max_k) + 1`. (Default 3) - axes (2 ints): defines the plane to rotate with 2 axes. - (Default (1, 2)) + spatial_axes (2 ints): defines the plane to rotate with 2 spatial axes. + Default: (0, 1), this is the first two axis in spatial dimensions. """ self.prob = min(max(prob, 0.0), 1.0) self.max_k = max_k - self.axes = axes + self.spatial_axes = spatial_axes self._do_transform = False self._rand_k = 0 @@ -582,7 +634,7 @@ def __call__(self, img): self.randomize() if not self._do_transform: return img - rotator = Rotate90(self._rand_k, self.axes) + rotator = Rotate90(self._rand_k, self.spatial_axes) return rotator(img) @@ -590,7 +642,7 @@ def __call__(self, img): class SpatialCrop: """General purpose cropper to produce sub-volume region of interest (ROI). It can support to crop ND spatial (channel-first) data. - Either a center and size must be provided, or alternatively if center and size + Either a spatial center and size must be provided, or alternatively if center and size are not provided, the start and end coordinates of the ROI must be provided. The sub-volume must sit the within original image. @@ -638,8 +690,8 @@ class RandRotate(Randomizable): prob (float): Probability of rotation. degrees (tuple of float or float): Range of rotation in degrees. If single number, angle is picked from (-degrees, degrees). - axes (tuple of 2 ints): Axes of rotation. Default: (1, 2). This is the first two - axis in spatial dimensions according to MONAI channel first shape assumption. + spatial_axes (tuple of 2 ints): Spatial axes of rotation. Default: (0, 1). + This is the first two axis in spatial dimensions. reshape (bool): If true, output shape is made same as input. Default: True. order (int): Order of spline interpolation. Range 0-5. Default: 1. This is different from scipy where default interpolation is 3. @@ -649,7 +701,7 @@ class RandRotate(Randomizable): prefiter (bool): Apply spline_filter before interpolation. Default: True. """ - def __init__(self, degrees, prob=0.1, axes=(1, 2), reshape=True, order=1, + def __init__(self, degrees, prob=0.1, spatial_axes=(0, 1), reshape=True, order=1, mode='constant', cval=0, prefilter=True): self.prob = prob self.degrees = degrees @@ -658,7 +710,7 @@ def __init__(self, degrees, prob=0.1, axes=(1, 2), reshape=True, order=1, self.mode = mode self.cval = cval self.prefilter = prefilter - self.axes = axes + self.spatial_axes = spatial_axes if not hasattr(self.degrees, '__iter__'): self.degrees = (-self.degrees, self.degrees) @@ -675,7 +727,7 @@ def __call__(self, img): self.randomize() if not self._do_transform: return img - rotator = Rotate(self.angle, self.axes, self.reshape, self.order, + rotator = Rotate(self.angle, self.spatial_axes, self.reshape, self.order, self.mode, self.cval, self.prefilter) return rotator(img) @@ -688,15 +740,13 @@ class RandFlip(Randomizable): Args: prob (float): Probability of flipping. - axis (None, int or tuple of ints): Axes along which to flip over. Default is None. + spatial_axis (None, int or tuple of ints): Spatial axes along which to flip over. Default is None. """ - def __init__(self, prob=0.1, axis=None): + def __init__(self, prob=0.1, spatial_axis=None): self.prob = prob - self.flipper = Flip(axis=axis) - + self.flipper = Flip(spatial_axis=spatial_axis) self._do_transform = False - self.flipper = Flip(axis=axis) def randomize(self): self._do_transform = self.R.random_sample() < self.prob @@ -715,7 +765,11 @@ class RandZoom(Randomizable): Args: prob (float): Probability of zooming. min_zoom (float or sequence): Min zoom factor. Can be float or sequence same size as image. + If a float, min_zoom is the same for each spatial axis. + If a sequence, min_zoom should contain one value for each spatial axis. max_zoom (float or sequence): Max zoom factor. Can be float or sequence same size as image. + If a float, max_zoom is the same for each spatial axis. + If a sequence, max_zoom should contain one value for each spatial axis. order (int): order of interpolation. Default=3. mode ('reflect', 'constant', 'nearest', 'mirror', 'wrap'): Determines how input is extended beyond boundaries. Default: 'constant'. @@ -985,7 +1039,7 @@ def __init__(self, Args: rotate_params (float, list of floats): a rotation angle in radians, - a scalar for 2D image, a tuple of 2 floats for 3D. Defaults to no rotation. + a scalar for 2D image, a tuple of 3 floats for 3D. Defaults to no rotation. shear_params (list of floats): a tuple of 2 floats for 2D, a tuple of 6 floats for 3D. Defaults to no shearing. translate_params (list of floats): diff --git a/tests/integration_sliding_window.py b/tests/integration_sliding_window.py index 31c7a1248a..b99bb5c681 100644 --- a/tests/integration_sliding_window.py +++ b/tests/integration_sliding_window.py @@ -40,7 +40,7 @@ def run_test(batch_size=2, device=torch.device("cpu:0")): net = UNet( dimensions=3, in_channels=1, - num_classes=1, + out_channels=1, channels=(4, 8, 16, 32), strides=(2, 2, 2), num_res_units=2, @@ -52,7 +52,7 @@ def _sliding_window_processor(_engine, batch): net.eval() img, seg, meta_data = batch with torch.no_grad(): - seg_probs = sliding_window_inference(img, roi_size, sw_batch_size, lambda x: net(x)[0], device) + seg_probs = sliding_window_inference(img, roi_size, sw_batch_size, net, device) return predict_segmentation(seg_probs) infer_engine = Engine(_sliding_window_processor) diff --git a/tests/integration_unet2d.py b/tests/integration_unet2d.py index 7b0f116b77..d437258c10 100644 --- a/tests/integration_unet2d.py +++ b/tests/integration_unet2d.py @@ -35,7 +35,7 @@ def __len__(self): net = UNet( dimensions=2, in_channels=1, - num_classes=1, + out_channels=1, channels=(4, 8, 16, 32), strides=(2, 2, 2), num_res_units=2, @@ -45,10 +45,7 @@ def __len__(self): opt = torch.optim.Adam(net.parameters(), 1e-4) src = DataLoader(_TestBatch(), batch_size=batch_size) - def loss_fn(pred, grnd): - return loss(pred[0], grnd) - - trainer = create_supervised_trainer(net, opt, loss_fn, device, False) + trainer = create_supervised_trainer(net, opt, loss, device, False) trainer.run(src, 1) loss = trainer.state.output diff --git a/tests/test_dice_loss.py b/tests/test_dice_loss.py index c5640a5660..e937185f91 100644 --- a/tests/test_dice_loss.py +++ b/tests/test_dice_loss.py @@ -82,7 +82,7 @@ TEST_CASE_6 = [ # shape: (1, 1, 2, 2), (1, 1, 2, 2) { - 'include_background': False, + 'include_background': True, 'do_sigmoid': True, }, { diff --git a/tests/test_flip.py b/tests/test_flip.py index a261c315e2..050d66d8db 100644 --- a/tests/test_flip.py +++ b/tests/test_flip.py @@ -14,7 +14,7 @@ import numpy as np from parameterized import parameterized -from monai.transforms import Flip, Flipd +from monai.transforms import Flip from tests.utils import NumpyImageTestCase2D INVALID_CASES = [("wrong_axis", ['s', 1], TypeError), @@ -22,35 +22,25 @@ VALID_CASES = [("no_axis", None), ("one_axis", 1), - ("many_axis", [0, 1, 2])] + ("many_axis", [0, 1])] -class FlipTest(NumpyImageTestCase2D): +class TestFlip(NumpyImageTestCase2D): @parameterized.expand(INVALID_CASES) - def test_invalid_inputs(self, _, axis, raises): + def test_invalid_inputs(self, _, spatial_axis, raises): with self.assertRaises(raises): - flip = Flip(axis) - flip(self.imt) - - @parameterized.expand(INVALID_CASES) - def test_invalid_cases_dict(self, _, axis, raises): - with self.assertRaises(raises): - flip = Flipd(keys='img', axis=axis) - flip({'img': self.imt}) - - @parameterized.expand(VALID_CASES) - def test_correct_results(self, _, axis): - flip = Flip(axis=axis) - expected = np.flip(self.imt, axis) - self.assertTrue(np.allclose(expected, flip(self.imt))) + flip = Flip(spatial_axis) + flip(self.imt[0]) @parameterized.expand(VALID_CASES) - def test_correct_results_dict(self, _, axis): - flip = Flipd(keys='img', axis=axis) - expected = np.flip(self.imt, axis) - res = flip({'img': self.imt}) - assert np.allclose(expected, res['img']) + def test_correct_results(self, _, spatial_axis): + flip = Flip(spatial_axis=spatial_axis) + expected = list() + for channel in self.imt[0]: + expected.append(np.flip(channel, spatial_axis)) + expected = np.stack(expected) + self.assertTrue(np.allclose(expected, flip(self.imt[0]))) if __name__ == '__main__': diff --git a/tests/test_flipd.py b/tests/test_flipd.py new file mode 100644 index 0000000000..e2fcb6b915 --- /dev/null +++ b/tests/test_flipd.py @@ -0,0 +1,48 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +from parameterized import parameterized + +from monai.transforms import Flipd +from tests.utils import NumpyImageTestCase2D + +INVALID_CASES = [("wrong_axis", ['s', 1], TypeError), + ("not_numbers", 's', TypeError)] + +VALID_CASES = [("no_axis", None), + ("one_axis", 1), + ("many_axis", [0, 1])] + + +class TestFlipd(NumpyImageTestCase2D): + + @parameterized.expand(INVALID_CASES) + def test_invalid_cases(self, _, spatial_axis, raises): + with self.assertRaises(raises): + flip = Flipd(keys='img', spatial_axis=spatial_axis) + flip({'img': self.imt[0]}) + + @parameterized.expand(VALID_CASES) + def test_correct_results(self, _, spatial_axis): + flip = Flipd(keys='img', spatial_axis=spatial_axis) + expected = list() + for channel in self.imt[0]: + expected.append(np.flip(channel, spatial_axis)) + expected = np.stack(expected) + res = flip({'img': self.imt[0]}) + assert np.allclose(expected, res['img']) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_generalized_dice_loss.py b/tests/test_generalized_dice_loss.py index e08ff1d296..b2ce96169e 100644 --- a/tests/test_generalized_dice_loss.py +++ b/tests/test_generalized_dice_loss.py @@ -94,7 +94,7 @@ TEST_CASE_6 = [ # shape: (1, 1, 2, 2), (1, 1, 2, 2) { - 'include_background': False, + 'include_background': True, 'do_sigmoid': True, }, { diff --git a/tests/test_handler_classification_saver.py b/tests/test_handler_classification_saver.py new file mode 100644 index 0000000000..3eea9d86ed --- /dev/null +++ b/tests/test_handler_classification_saver.py @@ -0,0 +1,55 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import csv +import shutil +import unittest +import numpy as np +import torch +from ignite.engine import Engine + +from monai.handlers.classification_saver import ClassificationSaver + + +class TestHandlerClassificationSaver(unittest.TestCase): + + def test_saved_content(self): + default_dir = os.path.join('.', 'tempdir') + shutil.rmtree(default_dir, ignore_errors=True) + + # set up engine + def _train_func(engine, batch): + return torch.zeros(8) + + engine = Engine(_train_func) + + # set up testing handler + saver = ClassificationSaver(output_dir=default_dir) + saver.attach(engine) + + data = [{'filename_or_obj': ['testfile' + str(i) for i in range(8)]}] + engine.run(data, epoch_length=2, max_epochs=1) + filepath = os.path.join(default_dir, 'predictions.csv') + self.assertTrue(os.path.exists(filepath)) + with open(filepath, 'r') as f: + reader = csv.reader(f) + i = 0 + for row in reader: + self.assertEqual(row[0], 'testfile' + str(i)) + self.assertEqual(np.array(row[1:]).astype(np.float32), 0.0) + i += 1 + self.assertEqual(i, 8) + shutil.rmtree(default_dir) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_random_affine.py b/tests/test_rand_affine.py similarity index 100% rename from tests/test_random_affine.py rename to tests/test_rand_affine.py diff --git a/tests/test_random_affine_grid.py b/tests/test_rand_affine_grid.py similarity index 100% rename from tests/test_random_affine_grid.py rename to tests/test_rand_affine_grid.py diff --git a/tests/test_random_affined.py b/tests/test_rand_affined.py similarity index 100% rename from tests/test_random_affined.py rename to tests/test_rand_affined.py diff --git a/tests/test_random_deform_grid.py b/tests/test_rand_deform_grid.py similarity index 100% rename from tests/test_random_deform_grid.py rename to tests/test_rand_deform_grid.py diff --git a/tests/test_random_elastic_2d.py b/tests/test_rand_elastic_2d.py similarity index 100% rename from tests/test_random_elastic_2d.py rename to tests/test_rand_elastic_2d.py diff --git a/tests/test_random_elastic_3d.py b/tests/test_rand_elastic_3d.py similarity index 100% rename from tests/test_random_elastic_3d.py rename to tests/test_rand_elastic_3d.py diff --git a/tests/test_random_elasticd_2d.py b/tests/test_rand_elasticd_2d.py similarity index 100% rename from tests/test_random_elasticd_2d.py rename to tests/test_rand_elasticd_2d.py diff --git a/tests/test_random_elasticd_3d.py b/tests/test_rand_elasticd_3d.py similarity index 100% rename from tests/test_random_elasticd_3d.py rename to tests/test_rand_elasticd_3d.py diff --git a/tests/test_rand_flip.py b/tests/test_rand_flip.py index be03ff5a28..1206c85571 100644 --- a/tests/test_rand_flip.py +++ b/tests/test_rand_flip.py @@ -14,7 +14,7 @@ import numpy as np from parameterized import parameterized -from monai.transforms import RandFlip, RandFlipd +from monai.transforms import RandFlip from tests.utils import NumpyImageTestCase2D INVALID_CASES = [("wrong_axis", ['s', 1], TypeError), @@ -22,29 +22,24 @@ VALID_CASES = [("no_axis", None), ("one_axis", 1), - ("many_axis", [0, 1, 2])] + ("many_axis", [0, 1])] -class RandFlipTest(NumpyImageTestCase2D): +class TestRandFlip(NumpyImageTestCase2D): @parameterized.expand(INVALID_CASES) - def test_invalid_inputs(self, _, axis, raises): + def test_invalid_inputs(self, _, spatial_axis, raises): with self.assertRaises(raises): - flip = RandFlip(prob=1.0, axis=axis) - flip(self.imt) + flip = RandFlip(prob=1.0, spatial_axis=spatial_axis) + flip(self.imt[0]) @parameterized.expand(VALID_CASES) - def test_correct_results(self, _, axis): - flip = RandFlip(prob=1.0, axis=axis) - expected = np.flip(self.imt, axis) - self.assertTrue(np.allclose(expected, flip(self.imt))) - - @parameterized.expand(VALID_CASES) - def test_correct_results_dict(self, _, axis): - flip = RandFlipd(keys='img', prob=1.0, axis=axis) - res = flip({'img': self.imt}) - - expected = np.flip(self.imt, axis) - self.assertTrue(np.allclose(expected, res['img'])) + def test_correct_results(self, _, spatial_axis): + flip = RandFlip(prob=1.0, spatial_axis=spatial_axis) + expected = list() + for channel in self.imt[0]: + expected.append(np.flip(channel, spatial_axis)) + expected = np.stack(expected) + self.assertTrue(np.allclose(expected, flip(self.imt[0]))) if __name__ == '__main__': diff --git a/tests/test_rand_flipd.py b/tests/test_rand_flipd.py new file mode 100644 index 0000000000..bcda54eecd --- /dev/null +++ b/tests/test_rand_flipd.py @@ -0,0 +1,38 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +from parameterized import parameterized + +from monai.transforms import RandFlipd +from tests.utils import NumpyImageTestCase2D + +VALID_CASES = [("no_axis", None), + ("one_axis", 1), + ("many_axis", [0, 1])] + +class TestRandFlipd(NumpyImageTestCase2D): + + @parameterized.expand(VALID_CASES) + def test_correct_results(self, _, spatial_axis): + flip = RandFlipd(keys='img', prob=1.0, spatial_axis=spatial_axis) + res = flip({'img': self.imt[0]}) + expected = list() + for channel in self.imt[0]: + expected.append(np.flip(channel, spatial_axis)) + expected = np.stack(expected) + self.assertTrue(np.allclose(expected, res['img'])) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_rand_rotate.py b/tests/test_rand_rotate.py index 29036663af..1e5a18bfc8 100644 --- a/tests/test_rand_rotate.py +++ b/tests/test_rand_rotate.py @@ -19,23 +19,26 @@ from tests.utils import NumpyImageTestCase2D -class RandomRotateTest(NumpyImageTestCase2D): +class TestRandRotate(NumpyImageTestCase2D): @parameterized.expand([ - (90, (1, 2), True, 1, 'reflect', 0, True), - ((-45, 45), (2, 1), True, 3, 'constant', 0, True), - (180, (2, 3), False, 2, 'constant', 4, False), + (90, (0, 1), True, 1, 'reflect', 0, True), + ((-45, 45), (1, 0), True, 3, 'constant', 0, True), + (180, (1, 0), False, 2, 'constant', 4, False), ]) - def test_correct_results(self, degrees, axes, reshape, + def test_correct_results(self, degrees, spatial_axes, reshape, order, mode, cval, prefilter): - rotate_fn = RandRotate(degrees, prob=1.0, axes=axes, reshape=reshape, + rotate_fn = RandRotate(degrees, prob=1.0, spatial_axes=spatial_axes, reshape=reshape, order=order, mode=mode, cval=cval, prefilter=prefilter) rotate_fn.set_random_state(243) - rotated = rotate_fn(self.imt) + rotated = rotate_fn(self.imt[0]) angle = rotate_fn.angle - expected = scipy.ndimage.rotate(self.imt, angle, axes, reshape, order=order, - mode=mode, cval=cval, prefilter=prefilter) + expected = list() + for channel in self.imt[0]: + expected.append(scipy.ndimage.rotate(channel, angle, spatial_axes, reshape, order=order, + mode=mode, cval=cval, prefilter=prefilter)) + expected = np.stack(expected).astype(np.float32) self.assertTrue(np.allclose(expected, rotated)) diff --git a/tests/test_rand_rotate90.py b/tests/test_rand_rotate90.py index 4b291d8cf0..e50c3e0c67 100644 --- a/tests/test_rand_rotate90.py +++ b/tests/test_rand_rotate90.py @@ -17,34 +17,46 @@ from tests.utils import NumpyImageTestCase2D -class Rotate90Test(NumpyImageTestCase2D): +class TestRandRotate90(NumpyImageTestCase2D): def test_default(self): rotate = RandRotate90() rotate.set_random_state(123) - rotated = rotate(self.imt) - expected = np.rot90(self.imt, 0, (1, 2)) + rotated = rotate(self.imt[0]) + expected = list() + for channel in self.imt[0]: + expected.append(np.rot90(channel, 0, (0, 1))) + expected = np.stack(expected) self.assertTrue(np.allclose(rotated, expected)) def test_k(self): rotate = RandRotate90(max_k=2) rotate.set_random_state(234) - rotated = rotate(self.imt) - expected = np.rot90(self.imt, 0, (1, 2)) + rotated = rotate(self.imt[0]) + expected = list() + for channel in self.imt[0]: + expected.append(np.rot90(channel, 0, (0, 1))) + expected = np.stack(expected) self.assertTrue(np.allclose(rotated, expected)) - def test_axes(self): - rotate = RandRotate90(axes=(1, 2)) + def test_spatial_axes(self): + rotate = RandRotate90(spatial_axes=(0, 1)) rotate.set_random_state(234) - rotated = rotate(self.imt) - expected = np.rot90(self.imt, 0, (1, 2)) + rotated = rotate(self.imt[0]) + expected = list() + for channel in self.imt[0]: + expected.append(np.rot90(channel, 0, (0, 1))) + expected = np.stack(expected) self.assertTrue(np.allclose(rotated, expected)) - def test_prob_k_axes(self): - rotate = RandRotate90(prob=1.0, max_k=2, axes=(2, 3)) + def test_prob_k_spatial_axes(self): + rotate = RandRotate90(prob=1.0, max_k=2, spatial_axes=(0, 1)) rotate.set_random_state(234) - rotated = rotate(self.imt) - expected = np.rot90(self.imt, 1, (2, 3)) + rotated = rotate(self.imt[0]) + expected = list() + for channel in self.imt[0]: + expected.append(np.rot90(channel, 1, (0, 1))) + expected = np.stack(expected) self.assertTrue(np.allclose(rotated, expected)) diff --git a/tests/test_rand_rotate90d.py b/tests/test_rand_rotate90d.py index c52a82389f..193627fef1 100644 --- a/tests/test_rand_rotate90d.py +++ b/tests/test_rand_rotate90d.py @@ -17,45 +17,57 @@ from tests.utils import NumpyImageTestCase2D -class Rotate90Test(NumpyImageTestCase2D): +class TestRandRotate90d(NumpyImageTestCase2D): def test_default(self): key = None rotate = RandRotate90d(keys=key) rotate.set_random_state(123) - rotated = rotate({key: self.imt}) - expected = np.rot90(self.imt, 0, (1, 2)) + rotated = rotate({key: self.imt[0]}) + expected = list() + for channel in self.imt[0]: + expected.append(np.rot90(channel, 0, (0, 1))) + expected = np.stack(expected) self.assertTrue(np.allclose(rotated[key], expected)) def test_k(self): key = 'test' rotate = RandRotate90d(keys=key, max_k=2) rotate.set_random_state(234) - rotated = rotate({key: self.imt}) - expected = np.rot90(self.imt, 0, (1, 2)) + rotated = rotate({key: self.imt[0]}) + expected = list() + for channel in self.imt[0]: + expected.append(np.rot90(channel, 0, (0, 1))) + expected = np.stack(expected) self.assertTrue(np.allclose(rotated[key], expected)) - def test_axes(self): - key = ['test'] - rotate = RandRotate90d(keys=key, axes=(1, 2)) + def test_spatial_axes(self): + key = 'test' + rotate = RandRotate90d(keys=key, spatial_axes=(0, 1)) rotate.set_random_state(234) - rotated = rotate({key[0]: self.imt}) - expected = np.rot90(self.imt, 0, (1, 2)) - self.assertTrue(np.allclose(rotated[key[0]], expected)) + rotated = rotate({key: self.imt[0]}) + expected = list() + for channel in self.imt[0]: + expected.append(np.rot90(channel, 0, (0, 1))) + expected = np.stack(expected) + self.assertTrue(np.allclose(rotated[key], expected)) - def test_prob_k_axes(self): - key = ('test',) - rotate = RandRotate90d(keys=key, prob=1.0, max_k=2, axes=(2, 3)) + def test_prob_k_spatial_axes(self): + key = 'test' + rotate = RandRotate90d(keys=key, prob=1.0, max_k=2, spatial_axes=(0, 1)) rotate.set_random_state(234) - rotated = rotate({key[0]: self.imt}) - expected = np.rot90(self.imt, 1, (2, 3)) - self.assertTrue(np.allclose(rotated[key[0]], expected)) + rotated = rotate({key: self.imt[0]}) + expected = list() + for channel in self.imt[0]: + expected.append(np.rot90(channel, 1, (0, 1))) + expected = np.stack(expected) + self.assertTrue(np.allclose(rotated[key], expected)) def test_no_key(self): key = 'unknown' - rotate = RandRotate90d(keys=key, prob=1.0, max_k=2, axes=(2, 3)) + rotate = RandRotate90d(keys=key, prob=1.0, max_k=2, spatial_axes=(0, 1)) with self.assertRaisesRegex(KeyError, ''): - rotated = rotate({'test': self.imt}) + rotated = rotate({'test': self.imt[0]}) if __name__ == '__main__': diff --git a/tests/test_rand_rotated.py b/tests/test_rand_rotated.py new file mode 100644 index 0000000000..1c9d98e83e --- /dev/null +++ b/tests/test_rand_rotated.py @@ -0,0 +1,46 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import numpy as np + +import scipy.ndimage +from parameterized import parameterized + +from monai.transforms import RandRotated +from tests.utils import NumpyImageTestCase2D + + +class TestRandRotated(NumpyImageTestCase2D): + + @parameterized.expand([ + (90, (0, 1), True, 1, 'reflect', 0, True), + ((-45, 45), (1, 0), True, 3, 'constant', 0, True), + (180, (1, 0), False, 2, 'constant', 4, False), + ]) + def test_correct_results(self, degrees, spatial_axes, reshape, + order, mode, cval, prefilter): + rotate_fn = RandRotated('img', degrees, prob=1.0, spatial_axes=spatial_axes, reshape=reshape, + order=order, mode=mode, cval=cval, prefilter=prefilter) + rotate_fn.set_random_state(243) + rotated = rotate_fn({'img': self.imt[0]}) + + angle = rotate_fn.angle + expected = list() + for channel in self.imt[0]: + expected.append(scipy.ndimage.rotate(channel, angle, spatial_axes, reshape, order=order, + mode=mode, cval=cval, prefilter=prefilter)) + expected = np.stack(expected).astype(np.float32) + self.assertTrue(np.allclose(expected, rotated['img'])) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_rand_zoom.py b/tests/test_rand_zoom.py index 530504b887..7dfdb7a522 100644 --- a/tests/test_rand_zoom.py +++ b/tests/test_rand_zoom.py @@ -17,12 +17,12 @@ from scipy.ndimage import zoom as zoom_scipy from parameterized import parameterized -from monai.transforms import RandZoom, RandZoomd +from monai.transforms import RandZoom from tests.utils import NumpyImageTestCase2D VALID_CASES = [(0.9, 1.1, 3, 'constant', 0, True, False, False)] -class ZoomTest(NumpyImageTestCase2D): +class TestRandZoom(NumpyImageTestCase2D): @parameterized.expand(VALID_CASES) def test_correct_results(self, min_zoom, max_zoom, order, mode, @@ -31,28 +31,14 @@ def test_correct_results(self, min_zoom, max_zoom, order, mode, mode=mode, cval=cval, prefilter=prefilter, use_gpu=use_gpu, keep_size=keep_size) random_zoom.set_random_state(234) - - zoomed = random_zoom(self.imt) - expected = zoom_scipy(self.imt, zoom=random_zoom._zoom, mode=mode, - order=order, cval=cval, prefilter=prefilter) - + zoomed = random_zoom(self.imt[0]) + expected = list() + for channel in self.imt[0]: + expected.append(zoom_scipy(channel, zoom=random_zoom._zoom, mode=mode, order=order, + cval=cval, prefilter=prefilter)) + expected = np.stack(expected).astype(np.float32) self.assertTrue(np.allclose(expected, zoomed)) - @parameterized.expand(VALID_CASES) - def test_correct_results_dict(self, min_zoom, max_zoom, order, mode, - cval, prefilter, use_gpu, keep_size): - keys = 'img' - random_zoom = RandZoomd(keys, prob=1.0, min_zoom=min_zoom, max_zoom=max_zoom, order=order, - mode=mode, cval=cval, prefilter=prefilter, use_gpu=use_gpu, - keep_size=keep_size) - random_zoom.set_random_state(234) - - zoomed = random_zoom({keys: self.imt}) - expected = zoom_scipy(self.imt, zoom=random_zoom._zoom, mode=mode, - order=order, cval=cval, prefilter=prefilter) - - self.assertTrue(np.allclose(expected, zoomed[keys])) - @parameterized.expand([ (0.8, 1.2, 1, 'constant', 0, True) ]) @@ -64,17 +50,20 @@ def test_gpu_zoom(self, min_zoom, max_zoom, order, mode, cval, prefilter): keep_size=False) random_zoom.set_random_state(234) - zoomed = random_zoom(self.imt) - expected = zoom_scipy(self.imt, zoom=random_zoom._zoom, mode=mode, order=order, - cval=cval, prefilter=prefilter) + zoomed = random_zoom(self.imt[0]) + expected = list() + for channel in self.imt[0]: + expected.append(zoom_scipy(channel, zoom=random_zoom._zoom, mode=mode, order=order, + cval=cval, prefilter=prefilter)) + expected = np.stack(expected).astype(np.float32) self.assertTrue(np.allclose(expected, zoomed)) def test_keep_size(self): random_zoom = RandZoom(prob=1.0, min_zoom=0.6, max_zoom=0.7, keep_size=True) - zoomed = random_zoom(self.imt) - self.assertTrue(np.array_equal(zoomed.shape, self.imt.shape)) + zoomed = random_zoom(self.imt[0]) + self.assertTrue(np.array_equal(zoomed.shape, self.imt.shape[1:])) @parameterized.expand([ ("no_min_zoom", None, 1.1, 1, TypeError), @@ -83,7 +72,7 @@ def test_keep_size(self): def test_invalid_inputs(self, _, min_zoom, max_zoom, order, raises): with self.assertRaises(raises): random_zoom = RandZoom(prob=1.0, min_zoom=min_zoom, max_zoom=max_zoom, order=order) - zoomed = random_zoom(self.imt) + zoomed = random_zoom(self.imt[0]) if __name__ == '__main__': diff --git a/tests/test_rand_zoomd.py b/tests/test_rand_zoomd.py new file mode 100644 index 0000000000..9a5838da4b --- /dev/null +++ b/tests/test_rand_zoomd.py @@ -0,0 +1,83 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import importlib + +from scipy.ndimage import zoom as zoom_scipy +from parameterized import parameterized + +from monai.transforms import RandZoomd +from tests.utils import NumpyImageTestCase2D + +VALID_CASES = [(0.9, 1.1, 3, 'constant', 0, True, False, False)] + +class TestRandZoomd(NumpyImageTestCase2D): + + @parameterized.expand(VALID_CASES) + def test_correct_results(self, min_zoom, max_zoom, order, mode, + cval, prefilter, use_gpu, keep_size): + key = 'img' + random_zoom = RandZoomd(key, prob=1.0, min_zoom=min_zoom, max_zoom=max_zoom, order=order, + mode=mode, cval=cval, prefilter=prefilter, use_gpu=use_gpu, + keep_size=keep_size) + random_zoom.set_random_state(234) + + zoomed = random_zoom({key: self.imt[0]}) + expected = list() + for channel in self.imt[0]: + expected.append(zoom_scipy(channel, zoom=random_zoom._zoom, mode=mode, order=order, + cval=cval, prefilter=prefilter)) + expected = np.stack(expected).astype(np.float32) + self.assertTrue(np.allclose(expected, zoomed[key])) + + @parameterized.expand([ + (0.8, 1.2, 1, 'constant', 0, True) + ]) + def test_gpu_zoom(self, min_zoom, max_zoom, order, mode, cval, prefilter): + key = 'img' + if importlib.util.find_spec('cupy'): + random_zoom = RandZoomd( + key, prob=1.0, min_zoom=min_zoom, max_zoom=max_zoom, order=order, + mode=mode, cval=cval, prefilter=prefilter, use_gpu=True, + keep_size=False) + random_zoom.set_random_state(234) + + zoomed = random_zoom({key: self.imt[0]}) + expected = list() + for channel in self.imt[0]: + expected.append(zoom_scipy(channel, zoom=random_zoom._zoom, mode=mode, order=order, + cval=cval, prefilter=prefilter)) + expected = np.stack(expected).astype(np.float32) + self.assertTrue(np.allclose(expected, zoomed)) + + def test_keep_size(self): + key = 'img' + random_zoom = RandZoomd(key, prob=1.0, min_zoom=0.6, + max_zoom=0.7, keep_size=True) + zoomed = random_zoom({key: self.imt[0]}) + self.assertTrue(np.array_equal(zoomed[key].shape, self.imt.shape[1:])) + + @parameterized.expand([ + ("no_min_zoom", None, 1.1, 1, TypeError), + ("invalid_order", 0.9, 1.1 , 's', AssertionError) + ]) + def test_invalid_inputs(self, _, min_zoom, max_zoom, order, raises): + key = 'img' + with self.assertRaises(raises): + random_zoom = RandZoomd(key, prob=1.0, min_zoom=min_zoom, max_zoom=max_zoom, order=order) + zoomed = random_zoom({key: self.imt[0]}) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_resize.py b/tests/test_resize.py index 7feaf9f634..30f8101baa 100644 --- a/tests/test_resize.py +++ b/tests/test_resize.py @@ -19,34 +19,37 @@ from tests.utils import NumpyImageTestCase2D -class ResizeTest(NumpyImageTestCase2D): +class TestResize(NumpyImageTestCase2D): @parameterized.expand([ ("invalid_order", "order", AssertionError) ]) def test_invalid_inputs(self, _, order, raises): with self.assertRaises(raises): - resize = Resize(output_shape=(128, 128, 3), order=order) - resize(self.imt) + resize = Resize(output_spatial_shape=(128, 128, 3), order=order) + resize(self.imt[0]) @parameterized.expand([ - ((1, 1, 64, 64), 1, 'reflect', 0, True, True, True, None), - ((1, 1, 32, 32), 2, 'constant', 3, False, False, False, None), - ((1, 1, 256, 256), 3, 'constant', 3, False, False, False, None), + ((64, 64), 1, 'reflect', 0, True, True, True, None), + ((32, 32), 2, 'constant', 3, False, False, False, None), + ((256, 256), 3, 'constant', 3, False, False, False, None), ]) - def test_correct_results(self, output_shape, order, mode, + def test_correct_results(self, output_spatial_shape, order, mode, cval, clip, preserve_range, anti_aliasing, anti_aliasing_sigma): - resize = Resize(output_shape, order, mode, cval, clip, + resize = Resize(output_spatial_shape, order, mode, cval, clip, preserve_range, anti_aliasing, anti_aliasing_sigma) - expected = skimage.transform.resize(self.imt, output_shape, - order=order, mode=mode, - cval=cval, clip=clip, - preserve_range=preserve_range, - anti_aliasing=anti_aliasing, - anti_aliasing_sigma=anti_aliasing_sigma) - self.assertTrue(np.allclose(resize(self.imt), expected)) + expected = list() + for channel in self.imt[0]: + expected.append(skimage.transform.resize(channel, output_spatial_shape, + order=order, mode=mode, + cval=cval, clip=clip, + preserve_range=preserve_range, + anti_aliasing=anti_aliasing, + anti_aliasing_sigma=anti_aliasing_sigma)) + expected = np.stack(expected).astype(np.float32) + self.assertTrue(np.allclose(resize(self.imt[0]), expected)) if __name__ == '__main__': diff --git a/tests/test_resized.py b/tests/test_resized.py new file mode 100644 index 0000000000..d7830d3e1d --- /dev/null +++ b/tests/test_resized.py @@ -0,0 +1,56 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import skimage +from parameterized import parameterized + +from monai.transforms import Resized +from tests.utils import NumpyImageTestCase2D + + +class TestResized(NumpyImageTestCase2D): + + @parameterized.expand([ + ("invalid_order", "order", AssertionError) + ]) + def test_invalid_inputs(self, _, order, raises): + with self.assertRaises(raises): + resize = Resized(keys='img', output_spatial_shape=(128, 128, 3), order=order) + resize({'img': self.imt[0]}) + + @parameterized.expand([ + ((64, 64), 1, 'reflect', 0, True, True, True, None), + ((32, 32), 2, 'constant', 3, False, False, False, None), + ((256, 256), 3, 'constant', 3, False, False, False, None), + ]) + def test_correct_results(self, output_spatial_shape, order, mode, + cval, clip, preserve_range, + anti_aliasing, anti_aliasing_sigma): + resize = Resized('img', output_spatial_shape, order, mode, cval, clip, + preserve_range, anti_aliasing, + anti_aliasing_sigma) + expected = list() + for channel in self.imt[0]: + expected.append(skimage.transform.resize(channel, output_spatial_shape, + order=order, mode=mode, + cval=cval, clip=clip, + preserve_range=preserve_range, + anti_aliasing=anti_aliasing, + anti_aliasing_sigma=anti_aliasing_sigma)) + expected = np.stack(expected).astype(np.float32) + self.assertTrue(np.allclose(resize({'img': self.imt[0]})['img'], expected)) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_rotate.py b/tests/test_rotate.py index 0c34f5809e..7d6d1b531b 100644 --- a/tests/test_rotate.py +++ b/tests/test_rotate.py @@ -15,38 +15,27 @@ import scipy.ndimage from parameterized import parameterized -from monai.transforms import Rotate, Rotated +from monai.transforms import Rotate from tests.utils import NumpyImageTestCase2D -TEST_CASES = [(90, (1, 2), True, 1, 'reflect', 0, True), - (-90, (2, 1), True, 3, 'constant', 0, True), - (180, (2, 3), False, 2, 'constant', 4, False)] +TEST_CASES = [(90, (0, 1), True, 1, 'reflect', 0, True), + (-90, (1, 0), True, 3, 'constant', 0, True), + (180, (1, 0), False, 2, 'constant', 4, False)] -class RotateTest(NumpyImageTestCase2D): +class TestRotate(NumpyImageTestCase2D): @parameterized.expand(TEST_CASES) - def test_correct_results(self, angle, axes, reshape, + def test_correct_results(self, angle, spatial_axes, reshape, order, mode, cval, prefilter): - rotate_fn = Rotate(angle, axes, reshape, + rotate_fn = Rotate(angle, spatial_axes, reshape, order, mode, cval, prefilter) - rotated = rotate_fn(self.imt) - - expected = scipy.ndimage.rotate(self.imt, angle, axes, reshape, order=order, - mode=mode, cval=cval, prefilter=prefilter) + rotated = rotate_fn(self.imt[0]) + expected = list() + for channel in self.imt[0]: + expected.append(scipy.ndimage.rotate(channel, angle, spatial_axes, reshape, order=order, + mode=mode, cval=cval, prefilter=prefilter)) + expected = np.stack(expected).astype(np.float32) self.assertTrue(np.allclose(expected, rotated)) - @parameterized.expand(TEST_CASES) - def test_correct_results_dict(self, angle, axes, reshape, - order, mode, cval, prefilter): - key = 'img' - rotate_fn = Rotated(key, angle, axes, reshape, order, - mode, cval, prefilter) - rotated = rotate_fn({key: self.imt}) - - expected = scipy.ndimage.rotate(self.imt, angle, axes, reshape, order=order, - mode=mode, cval=cval, prefilter=prefilter) - self.assertTrue(np.allclose(expected, rotated[key])) - - if __name__ == '__main__': unittest.main() diff --git a/tests/test_rotate90.py b/tests/test_rotate90.py index 1b1aca78df..990e489cd9 100644 --- a/tests/test_rotate90.py +++ b/tests/test_rotate90.py @@ -17,30 +17,42 @@ from tests.utils import NumpyImageTestCase2D -class Rotate90Test(NumpyImageTestCase2D): +class TestRotate90(NumpyImageTestCase2D): def test_rotate90_default(self): rotate = Rotate90() - rotated = rotate(self.imt) - expected = np.rot90(self.imt, 1, (1, 2)) + rotated = rotate(self.imt[0]) + expected = list() + for channel in self.imt[0]: + expected.append(np.rot90(channel, 1, (0, 1))) + expected = np.stack(expected) self.assertTrue(np.allclose(rotated, expected)) def test_k(self): rotate = Rotate90(k=2) - rotated = rotate(self.imt) - expected = np.rot90(self.imt, 2, (1, 2)) + rotated = rotate(self.imt[0]) + expected = list() + for channel in self.imt[0]: + expected.append(np.rot90(channel, 2, (0, 1))) + expected = np.stack(expected) self.assertTrue(np.allclose(rotated, expected)) - def test_axes(self): - rotate = Rotate90(axes=(1, 2)) - rotated = rotate(self.imt) - expected = np.rot90(self.imt, 1, (1, 2)) + def test_spatial_axes(self): + rotate = Rotate90(spatial_axes=(0, 1)) + rotated = rotate(self.imt[0]) + expected = list() + for channel in self.imt[0]: + expected.append(np.rot90(channel, 1, (0, 1))) + expected = np.stack(expected) self.assertTrue(np.allclose(rotated, expected)) - def test_k_axes(self): - rotate = Rotate90(k=2, axes=(2, 3)) - rotated = rotate(self.imt) - expected = np.rot90(self.imt, 2, (2, 3)) + def test_prob_k_spatial_axes(self): + rotate = Rotate90(k=2, spatial_axes=(0, 1)) + rotated = rotate(self.imt[0]) + expected = list() + for channel in self.imt[0]: + expected.append(np.rot90(channel, 2, (0, 1))) + expected = np.stack(expected) self.assertTrue(np.allclose(rotated, expected)) diff --git a/tests/test_rotate90d.py b/tests/test_rotate90d.py index ccfb2380f0..4b54a9a296 100644 --- a/tests/test_rotate90d.py +++ b/tests/test_rotate90d.py @@ -17,41 +17,53 @@ from tests.utils import NumpyImageTestCase2D -class Rotate90Test(NumpyImageTestCase2D): +class TestRotate90d(NumpyImageTestCase2D): def test_rotate90_default(self): key = 'test' rotate = Rotate90d(keys=key) - rotated = rotate({key: self.imt}) - expected = np.rot90(self.imt, 1, (1, 2)) + rotated = rotate({key: self.imt[0]}) + expected = list() + for channel in self.imt[0]: + expected.append(np.rot90(channel, 1, (0, 1))) + expected = np.stack(expected) self.assertTrue(np.allclose(rotated[key], expected)) def test_k(self): key = None rotate = Rotate90d(keys=key, k=2) - rotated = rotate({key: self.imt}) - expected = np.rot90(self.imt, 2, (1, 2)) + rotated = rotate({key: self.imt[0]}) + expected = list() + for channel in self.imt[0]: + expected.append(np.rot90(channel, 2, (0, 1))) + expected = np.stack(expected) self.assertTrue(np.allclose(rotated[key], expected)) - def test_axes(self): - key = ['test'] - rotate = Rotate90d(keys=key, axes=(1, 2)) - rotated = rotate({key[0]: self.imt}) - expected = np.rot90(self.imt, 1, (1, 2)) - self.assertTrue(np.allclose(rotated[key[0]], expected)) + def test_spatial_axes(self): + key = 'test' + rotate = Rotate90d(keys=key, spatial_axes=(0, 1)) + rotated = rotate({key: self.imt[0]}) + expected = list() + for channel in self.imt[0]: + expected.append(np.rot90(channel, 1, (0, 1))) + expected = np.stack(expected) + self.assertTrue(np.allclose(rotated[key], expected)) - def test_k_axes(self): - key = ('test',) - rotate = Rotate90d(keys=key, k=2, axes=(2, 3)) - rotated = rotate({key[0]: self.imt}) - expected = np.rot90(self.imt, 2, (2, 3)) - self.assertTrue(np.allclose(rotated[key[0]], expected)) + def test_prob_k_spatial_axes(self): + key = 'test' + rotate = Rotate90d(keys=key, k=2, spatial_axes=(0, 1)) + rotated = rotate({key: self.imt[0]}) + expected = list() + for channel in self.imt[0]: + expected.append(np.rot90(channel, 2, (0, 1))) + expected = np.stack(expected) + self.assertTrue(np.allclose(rotated[key], expected)) def test_no_key(self): key = 'unknown' rotate = Rotate90d(keys=key) with self.assertRaisesRegex(KeyError, ''): - rotate({'test': self.imt}) + rotate({'test': self.imt[0]}) if __name__ == '__main__': diff --git a/tests/test_rotated.py b/tests/test_rotated.py new file mode 100644 index 0000000000..af7a758d8d --- /dev/null +++ b/tests/test_rotated.py @@ -0,0 +1,43 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import numpy as np + +import scipy.ndimage +from parameterized import parameterized + +from monai.transforms import Rotated +from tests.utils import NumpyImageTestCase2D + +TEST_CASES = [(90, (0, 1), True, 1, 'reflect', 0, True), + (-90, (1, 0), True, 3, 'constant', 0, True), + (180, (1, 0), False, 2, 'constant', 4, False)] + +class TestRotated(NumpyImageTestCase2D): + + @parameterized.expand(TEST_CASES) + def test_correct_results(self, angle, spatial_axes, reshape, + order, mode, cval, prefilter): + key = 'img' + rotate_fn = Rotated(key, angle, spatial_axes, reshape, order, + mode, cval, prefilter) + rotated = rotate_fn({key: self.imt[0]}) + expected = list() + for channel in self.imt[0]: + expected.append(scipy.ndimage.rotate(channel, angle, spatial_axes, reshape, order=order, + mode=mode, cval=cval, prefilter=prefilter)) + expected = np.stack(expected).astype(np.float32) + self.assertTrue(np.allclose(expected, rotated[key])) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_unet.py b/tests/test_unet.py index c1e838c284..5b8e85f915 100644 --- a/tests/test_unet.py +++ b/tests/test_unet.py @@ -20,39 +20,39 @@ { 'dimensions': 2, 'in_channels': 1, - 'num_classes': 3, + 'out_channels': 3, 'channels': (16, 32, 64), 'strides': (2, 2), 'num_res_units': 1, }, torch.randn(16, 1, 32, 32), - (16, 1, 32, 32), + (16, 3, 32, 32), ] TEST_CASE_2 = [ # single channel 3D, batch 16 { 'dimensions': 3, 'in_channels': 1, - 'num_classes': 3, + 'out_channels': 3, 'channels': (16, 32, 64), 'strides': (2, 2), 'num_res_units': 1, }, torch.randn(16, 1, 32, 24, 48), - (16, 1, 32, 24, 48), + (16, 3, 32, 24, 48), ] TEST_CASE_3 = [ # 4-channel 3D, batch 16 { 'dimensions': 3, 'in_channels': 4, - 'num_classes': 3, + 'out_channels': 3, 'channels': (16, 32, 64), 'strides': (2, 2), 'num_res_units': 1, }, torch.randn(16, 4, 32, 64, 48), - (16, 1, 32, 64, 48), + (16, 3, 32, 64, 48), ] @@ -63,7 +63,7 @@ def test_shape(self, input_param, input_data, expected_shape): net = UNet(**input_param) net.eval() with torch.no_grad(): - result = net.forward(input_data)[1] + result = net.forward(input_data) self.assertEqual(result.shape, expected_shape) diff --git a/tests/test_uniform_rand_patch.py b/tests/test_uniform_rand_patch.py index f11c4b43f4..32b1077ef7 100644 --- a/tests/test_uniform_rand_patch.py +++ b/tests/test_uniform_rand_patch.py @@ -17,13 +17,13 @@ from tests.utils import NumpyImageTestCase2D -class UniformRandomPatchTest(NumpyImageTestCase2D): +class TestUniformRandomPatch(NumpyImageTestCase2D): def test_2d(self): - patch_size = (1, 10, 10) - patch_transform = UniformRandomPatch(patch_size=patch_size) - patch = patch_transform(self.imt) - self.assertTrue(np.allclose(patch.shape[:-2], patch_size[:-2])) + patch_spatial_size = (10, 10) + patch_transform = UniformRandomPatch(patch_spatial_size=patch_spatial_size) + patch = patch_transform(self.imt[0]) + self.assertTrue(np.allclose(patch.shape[1:], patch_spatial_size)) if __name__ == '__main__': diff --git a/tests/test_uniform_rand_patchd.py b/tests/test_uniform_rand_patchd.py index 1ab03b4b6f..a87438acce 100644 --- a/tests/test_uniform_rand_patchd.py +++ b/tests/test_uniform_rand_patchd.py @@ -17,20 +17,20 @@ from tests.utils import NumpyImageTestCase2D -class UniformRandomPatchdTest(NumpyImageTestCase2D): +class TestUniformRandomPatchd(NumpyImageTestCase2D): def test_2d(self): - patch_size = (1, 10, 10) + patch_spatial_size = (10, 10) key = 'test' - patch_transform = UniformRandomPatchd(keys='test', patch_size=patch_size) - patch = patch_transform({key: self.imt}) - self.assertTrue(np.allclose(patch[key].shape[:-2], patch_size[:-2])) + patch_transform = UniformRandomPatchd(keys='test', patch_spatial_size=patch_spatial_size) + patch = patch_transform({key: self.imt[0]}) + self.assertTrue(np.allclose(patch[key].shape[1:], patch_spatial_size)) def test_sync(self): - patch_size = (1, 4, 4) + patch_spatial_size = (4, 4) key_1, key_2 = 'foo', 'bar' rand_image = np.random.rand(3, 10, 10) - patch_transform = UniformRandomPatchd(keys=(key_1, key_2), patch_size=patch_size) + patch_transform = UniformRandomPatchd(keys=(key_1, key_2), patch_spatial_size=patch_spatial_size) patch = patch_transform({key_1: rand_image, key_2: rand_image}) self.assertTrue(np.allclose(patch[key_1], patch[key_2])) diff --git a/tests/test_zoom.py b/tests/test_zoom.py index 83795542bc..cb0af47fef 100644 --- a/tests/test_zoom.py +++ b/tests/test_zoom.py @@ -17,7 +17,7 @@ from scipy.ndimage import zoom as zoom_scipy from parameterized import parameterized -from monai.transforms import Zoom, Zoomd +from monai.transforms import Zoom from tests.utils import NumpyImageTestCase2D VALID_CASES = [(1.1, 3, 'constant', 0, True, False, False), @@ -30,37 +30,31 @@ ("invalid_order", 0.9, 's', AssertionError)] -class ZoomTest(NumpyImageTestCase2D): +class TestZoom(NumpyImageTestCase2D): @parameterized.expand(VALID_CASES) def test_correct_results(self, zoom, order, mode, cval, prefilter, use_gpu, keep_size): zoom_fn = Zoom(zoom=zoom, order=order, mode=mode, cval=cval, prefilter=prefilter, use_gpu=use_gpu, keep_size=keep_size) zoomed = zoom_fn(self.imt[0]) - expected = zoom_scipy(self.imt, zoom=zoom, mode=mode, order=order, - cval=cval, prefilter=prefilter) + expected = list() + for channel in self.imt[0]: + expected.append(zoom_scipy(channel, zoom=zoom, mode=mode, order=order, + cval=cval, prefilter=prefilter)) + expected = np.stack(expected).astype(np.float32) self.assertTrue(np.allclose(expected, zoomed)) - @parameterized.expand(VALID_CASES) - def test_correct_results_dict(self, zoom, order, mode, cval, prefilter, use_gpu, keep_size): - key = 'img' - zoom_fn = Zoomd(key, zoom=zoom, order=order, mode=mode, cval=cval, - prefilter=prefilter, use_gpu=use_gpu, keep_size=keep_size) - zoomed = zoom_fn({key: self.imt[0]}) - - expected = zoom_scipy(self.imt, zoom=zoom, mode=mode, order=order, - cval=cval, prefilter=prefilter) - self.assertTrue(np.allclose(expected, zoomed[key])) - - @parameterized.expand(GPU_CASES) def test_gpu_zoom(self, _, zoom, order, mode, cval, prefilter): if importlib.util.find_spec('cupy'): zoom_fn = Zoom(zoom=zoom, order=order, mode=mode, cval=cval, prefilter=prefilter, use_gpu=True, keep_size=False) zoomed = zoom_fn(self.imt[0]) - expected = zoom_scipy(self.imt, zoom=zoom, mode=mode, order=order, - cval=cval, prefilter=prefilter) + expected = list() + for channel in self.imt[0]: + expected.append(zoom_scipy(channel, zoom=zoom, mode=mode, order=order, + cval=cval, prefilter=prefilter)) + expected = np.stack(expected).astype(np.float32) self.assertTrue(np.allclose(expected, zoomed)) def test_keep_size(self): @@ -76,7 +70,7 @@ def test_keep_size(self): def test_invalid_inputs(self, _, zoom, order, raises): with self.assertRaises(raises): zoom_fn = Zoom(zoom=zoom, order=order) - zoomed = zoom_fn(self.imt) + zoomed = zoom_fn(self.imt[0]) if __name__ == '__main__': diff --git a/tests/test_zoomd.py b/tests/test_zoomd.py new file mode 100644 index 0000000000..6ef85cb1fe --- /dev/null +++ b/tests/test_zoomd.py @@ -0,0 +1,82 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import numpy as np +import importlib + +from scipy.ndimage import zoom as zoom_scipy +from parameterized import parameterized + +from monai.transforms import Zoomd +from tests.utils import NumpyImageTestCase2D + +VALID_CASES = [(1.1, 3, 'constant', 0, True, False, False), + (0.9, 3, 'constant', 0, True, False, False), + (0.8, 1, 'reflect', 0, False, False, False)] + +GPU_CASES = [("gpu_zoom", 0.6, 1, 'constant', 0, True)] + +INVALID_CASES = [("no_zoom", None, 1, TypeError), + ("invalid_order", 0.9, 's', AssertionError)] + + +class TestZoomd(NumpyImageTestCase2D): + + @parameterized.expand(VALID_CASES) + def test_correct_results(self, zoom, order, mode, cval, prefilter, use_gpu, keep_size): + key = 'img' + zoom_fn = Zoomd(key, zoom=zoom, order=order, mode=mode, cval=cval, + prefilter=prefilter, use_gpu=use_gpu, keep_size=keep_size) + zoomed = zoom_fn({key: self.imt[0]}) + expected = list() + for channel in self.imt[0]: + expected.append(zoom_scipy(channel, zoom=zoom, mode=mode, order=order, + cval=cval, prefilter=prefilter)) + expected = np.stack(expected).astype(np.float32) + self.assertTrue(np.allclose(expected, zoomed[key])) + + + @parameterized.expand(GPU_CASES) + def test_gpu_zoom(self, _, zoom, order, mode, cval, prefilter): + key = 'img' + if importlib.util.find_spec('cupy'): + zoom_fn = Zoomd(key, zoom=zoom, order=order, mode=mode, cval=cval, + prefilter=prefilter, use_gpu=True, keep_size=False) + zoomed = zoom_fn({key: self.imt[0]}) + expected = list() + for channel in self.imt[0]: + expected.append(zoom_scipy(channel, zoom=zoom, mode=mode, order=order, + cval=cval, prefilter=prefilter)) + expected = np.stack(expected).astype(np.float32) + self.assertTrue(np.allclose(expected, zoomed[key])) + + def test_keep_size(self): + key = 'img' + zoom_fn = Zoomd(key, zoom=0.6, keep_size=True) + zoomed = zoom_fn({key: self.imt[0]}) + self.assertTrue(np.array_equal(zoomed[key].shape, self.imt.shape[1:])) + + zoom_fn = Zoomd(key, zoom=1.3, keep_size=True) + zoomed = zoom_fn({key: self.imt[0]}) + self.assertTrue(np.array_equal(zoomed[key].shape, self.imt.shape[1:])) + + @parameterized.expand(INVALID_CASES) + def test_invalid_inputs(self, _, zoom, order, raises): + key = 'img' + with self.assertRaises(raises): + zoom_fn = Zoomd(key, zoom=zoom, order=order) + zoomed = zoom_fn({key: self.imt[0]}) + + +if __name__ == '__main__': + unittest.main() From c40f7399c5ea6359536663295200cd449c505352 Mon Sep 17 00:00:00 2001 From: Nic Ma Date: Mon, 16 Mar 2020 18:29:36 +0800 Subject: [PATCH 21/40] 179 add DeleteKeys transform (#180) * [DLMED] add DeleteKeys transform * [DLMED] remove unnecessary copy --- monai/transforms/composables.py | 22 +++++++++++++++++++ tests/test_delete_keys.py | 38 +++++++++++++++++++++++++++++++++ 2 files changed, 60 insertions(+) create mode 100644 tests/test_delete_keys.py diff --git a/monai/transforms/composables.py b/monai/transforms/composables.py index f86afd546e..d8cc708af7 100644 --- a/monai/transforms/composables.py +++ b/monai/transforms/composables.py @@ -883,3 +883,25 @@ def __call__(self, data): for key in self.keys: d[key] = zoomer(d[key]) return d + + +@export +@alias('DeleteKeysD', 'DeleteKeysDict') +class DeleteKeysd(MapTransform): + """ + Delete specified keys from data dictionary to release memory. + It will remove the key-values and copy the others to construct a new dictionary. + """ + + def __init__(self, keys): + """ + Args: + keys (hashable items): keys of the corresponding items to be transformed. + See also: monai.transform.composables.MapTransform + """ + MapTransform.__init__(self, keys) + + def __call__(self, data): + for key in self.keys: + del data[key] + return dict(data) diff --git a/tests/test_delete_keys.py b/tests/test_delete_keys.py new file mode 100644 index 0000000000..35917e36f5 --- /dev/null +++ b/tests/test_delete_keys.py @@ -0,0 +1,38 @@ +# Copyright 2020 MONAI Consortium +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import time +import sys +from parameterized import parameterized +from monai.transforms.composables import DeleteKeysd + +TEST_CASE_1 = [ + {'keys': [str(i) for i in range(30)]}, + 20, + 648, +] + + +class TestDeleteKeysd(unittest.TestCase): + + @parameterized.expand([TEST_CASE_1]) + def test_memory(self, input_param, expected_key_size, expected_mem_size): + input_data = dict() + for i in range(50): + input_data[str(i)] = [time.time()] * 100000 + result = DeleteKeysd(**input_param)(input_data) + self.assertEqual(len(result.keys()), expected_key_size) + self.assertEqual(sys.getsizeof(result), expected_mem_size) + + +if __name__ == '__main__': + unittest.main() From 216298cc4318611a17aabe1ea97bd69b613bb153 Mon Sep 17 00:00:00 2001 From: Wenqi Li Date: Mon, 16 Mar 2020 14:48:33 +0000 Subject: [PATCH 22/40] 176 revise docs (#177) * [DLMED] add highlight features to README * [DLMED] add temp images path to highlight features * documentation updates - update readmet - update docs/ structure - merged highlights.md - update docs/Makefile commands * [DLMED] update end-to-end process chart * revise docstring for transforms/losses * update docs * update readthedocs * 179 add DeleteKeys transform (#180) * [DLMED] add DeleteKeys transform * [DLMED] remove unnecessary copy * update docs * fixes github urls * fixes sys.getsizeof test on mac Co-authored-by: Nic Ma --- .readthedocs.yml | 2 +- CONTRIBUTING.md | 9 + README.md | 257 ++------------ docs/Makefile | 8 +- docs/images/end_to_end_process.png | Bin 0 -> 534556 bytes docs/images/sliding_window.png | Bin 0 -> 579278 bytes docs/index.rst | 21 -- docs/{ => source}/conf.py | 48 ++- docs/source/data.rst | 55 +++ docs/source/engines.rst | 12 + docs/source/handlers.rst | 41 +++ docs/source/highlights.md | 79 +++++ docs/source/index.rst | 80 +++++ docs/source/losses.rst | 23 ++ docs/source/metrics.rst | 17 + docs/source/networks.rst | 94 +++++ docs/source/transform_api.rst | 354 +++++++++++++++++++ docs/source/utils.rst | 20 ++ docs/source/visualize.rst | 12 + monai/data/dataset.py | 15 +- monai/data/synthetic.py | 10 +- monai/data/utils.py | 12 +- monai/engine/multi_gpu_supervised_trainer.py | 20 +- monai/handlers/checkpoint_loader.py | 5 +- monai/handlers/stats_handler.py | 4 +- monai/handlers/tensorboard_handlers.py | 8 +- monai/losses/dice.py | 11 +- monai/metrics/compute_meandice.py | 8 +- monai/networks/layers/convutils.py | 14 +- monai/networks/layers/factories.py | 4 + monai/networks/utils.py | 12 +- monai/transforms/adaptors.py | 79 +++-- monai/transforms/composables.py | 99 ++---- monai/transforms/compose.py | 145 +++++--- monai/transforms/transforms.py | 30 +- monai/transforms/utils.py | 56 +-- monai/utils/misc.py | 2 +- monai/visualize/img2tensorboard.py | 17 +- tests/test_delete_keys.py | 7 +- tests/test_map_transform.py | 2 +- 40 files changed, 1172 insertions(+), 520 deletions(-) create mode 100644 docs/images/end_to_end_process.png create mode 100644 docs/images/sliding_window.png delete mode 100644 docs/index.rst rename docs/{ => source}/conf.py (73%) create mode 100644 docs/source/data.rst create mode 100644 docs/source/engines.rst create mode 100644 docs/source/handlers.rst create mode 100644 docs/source/highlights.md create mode 100644 docs/source/index.rst create mode 100644 docs/source/losses.rst create mode 100644 docs/source/metrics.rst create mode 100644 docs/source/networks.rst create mode 100644 docs/source/transform_api.rst create mode 100644 docs/source/utils.rst create mode 100644 docs/source/visualize.rst diff --git a/.readthedocs.yml b/.readthedocs.yml index 0fa357b9ca..9a68f0626e 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -7,7 +7,7 @@ version: 2 # Build documentation in the docs/ directory with Sphinx sphinx: - configuration: docs/conf.py + configuration: docs/source/conf.py # Build documentation with MkDocs #mkdocs: diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 043d6c20dd..fa7527db87 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -57,6 +57,15 @@ License information: all source code files should start with this paragraph: ``` +### Building the documentation +To build documentation via Sphinx in`docs/` folder: +```bash +cd docs/ +make html +``` +The above commands build html documentation. Type `make help` for all supported formats, +type `make clean` to remove the current build files. + ## Unit testing MONAI tests are located under `tests/`. diff --git a/README.md b/README.md index ac077c5ebd..3fc9c31f87 100644 --- a/README.md +++ b/README.md @@ -1,243 +1,38 @@ # Project MONAI -**M**edical **O**pen **N**etwork for **AI** - _Toolkit for Healthcare Imaging_ +**M**edical **O**pen **N**etwork for **AI** -_Contact: _ +[![License](https://img.shields.io/badge/License-Apache%202.0-green.svg)](https://opensource.org/licenses/Apache-2.0) [![pipeline status](https://gitlab.com/project-monai/MONAI/badges/master/pipeline.svg)](https://github.com/Project-MONAI/MONAI/commits/master) [![Documentation Status](https://readthedocs.org/projects/monai/badge/?version=latest)](https://monai.readthedocs.io/en/latest/?badge=latest) [![coverage report](https://gitlab.com/project-monai/MONAI/badges/master/coverage.svg)](https://gitlab.com/project-monai/MONAI/pipelines/) -This document identifies key concepts of project MONAI at a high level, the goal is to facilitate further technical discussions of requirements,roadmap, feasibility and trade-offs. -## Vision - * Develop a community of academic, industrial and clinical researchers collaborating and working on a common foundation of standardized tools. - * Create a state-of-the-art, end-to-end training toolkit for healthcare imaging. - * Provide academic and industrial researchers with the optimized and standardized way to create and evaluate models +MONAI is a [PyTorch](https://pytorch.org/)-based, [open-source](https://github.com/Project-MONAI/MONAI/blob/master/LICENSE) platform for deep learning in healthcare imaging. Its ambitions are: +- developing a community of academic, industrial and clinical researchers collaborating on a common foundation; +- creating state-of-the-art, end-to-end training workflows for healthcare imaging; +- providing researchers with the optimized and standardized way to create and evaluate deep learning models. -## Targeted users - * Primarily focused on the healthcare researchers who develop DL models for medical imaging -## Goals - * Deliver domain-specific workflow capabilities - * Address the end-end “Pain points” when creating medical imaging deep learning workflows. - * Provide a robust foundation with a performance optimized system software stack that allows researchers to focus on the research and not worry about software development principles. +## Features +> _The codebase is currently under active development._ -## Guiding principles -### Modularity - * Pythonic -- object oriented components - * Compositional -- can combine components to create workflows - * Extensible -- easy to create new components and extend existing components - * Easy to debug -- loosely coupled, easy to follow code (e.g. in eager or graph mode) - * Flexible -- interfaces for easy integration of external modules -### User friendly - * Portable -- use components/workflows via Python “import” - * Run well-known baseline workflows in a few commands - * Access to the well-known public datasets in a few lines of code -### Standardisation - * Unified/consistent component APIs with documentation specifications - * Unified/consistent data and model formats, compatible with other existing standards -### High quality - * Consistent coding style - extensive documentation - tutorials - contributors’ guidelines - * Reproducibility -- e.g. system-specific deterministic training -### Future proof - * Task scalability -- both in datasets and computational resources - * Support for advanced data structures -- e.g. graphs/structured text documents -### Leverage existing high-quality software packages whenever possible - * E.g. low-level medical image format reader, image preprocessing with external packages - * Rigorous risk analysis of choice of foundational software dependencies -### Compatible with external software - * E.g. data visualisation, experiments tracking, management, orchestration +- flexible pre-processing for multi-dimensional medical imaging data; +- compositional & portable APIs for ease of integration in existing workflows; +- domain-specific implementations for networks, losses, evaluation metrics and more; +- customizable design for varying user expertise; +- multi-GPU data parallelism support. -## Key capabilities +## Getting Started - - - - - - - - - - - - - - - - - - - - - - - - - - -
-Basic features - Example - Notes -
Ready-to-use workflows - Volumetric image segmentation - “Bring your own dataset” -
Baseline/reference network architectures - Provide an option to use “U-Net” - -
Intuitive command-line interfaces - - -
Multi-gpu training - Configure the workflow to run data parallel training - -
+Tutorials & examples are located at [monai/examples](https://github.com/Project-MONAI/MONAI/tree/master/examples). +Technical documentation is available via [Read the Docs](https://monai.readthedocs.io/en/latest/). +## Contributing +For guidance on making a contribution to MONAI, see the [contributing guidelines](https://github.com/Project-MONAI/MONAI/blob/master/CONTRIBUTING.md). - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Customisable Python interfaces - Example - Notes -
Training/validation strategies - Schedule a strategy of alternating between generator and discriminator model training - -
Network architectures - Define new networks w/ the recent “Squeeze-and-Excitation” blocks - “Bring your own model” -
Data preprocessors - Define a new reader to read training data from a database system - -
Adaptive training schedule - Stop training when the loss becomes “NaN” - “Callbacks” -
Configuration-driven workflow assembly - Making workflow instances from configuration file - Convenient for managing hyperparameters -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Model sharing & transfer learning - Example - Notes -
Sharing model parameters, hyperparameter configurations - Standardisation of model archiving format - -
Model optimisation for deployment - - -
Fine-tuning from pre-trained models - Model compression, TensorRT - -
Model interpretability - Visualising feature maps of a trained model - -
Experiment tracking & management - - https://polyaxon.com/ -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Advanced features - Example - Notes -
Compatibility with external toolkits - XNAT as data source, ITK as preprocessor - -
Advanced learning strategies - Semi-supervised, active learning - -
High performance preprocessors - Smart caching, multi-process - -
Multi-node distributed training - - -
- +## Links +- Website: _(coming soon)_ +- API documentation: https://monai.readthedocs.io/en/latest/ +- Code: https://github.com/Project-MONAI/MONAI +- Project tracker: https://github.com/Project-MONAI/MONAI/projects +- Issue tracker: https://github.com/Project-MONAI/MONAI/issues +- Wiki: https://github.com/Project-MONAI/MONAI/wiki +- Test status: https://gitlab.com/project-monai/MONAI/pipelines diff --git a/docs/Makefile b/docs/Makefile index e3e3658fe5..bea205e654 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -6,7 +6,7 @@ SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build SOURCEDIR = source -BUILDDIR = ../docs +BUILDDIR = build # Put it first so that "make" without argument is like "make help". help: @@ -17,12 +17,8 @@ help: # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile - sphinx-apidoc -f -o "$(SOURCEDIR)"/apidocs ../monai - rm -rf ../docs/* @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - mv ../docs/html/* ../docs/ - rm -rf ../docs/html ../docs/doctrees clean: - rm -rf ../docs/* + rm -rf build/ rm -rf source/apidocs diff --git a/docs/images/end_to_end_process.png b/docs/images/end_to_end_process.png new file mode 100644 index 0000000000000000000000000000000000000000..e837f64a9308575f5363439f9dd29139a2891a1f GIT binary patch literal 534556 zcmd3OcT^Ky_pYLdqKKl1h!hJVBB1o%rASlhH7dRLP?Ly?Sdd;+I?`L{odiYc0@8a> z2rYz?0wf`Dhxhy5;(M3BwJyJZ?wUnbU@@b5ZB19Hs)90z_A|>tPl@w zRtrxAzfyqcof8^=-4vejtm@%5fBns1k{X{*Mcmx?llvLALX*Z51qct5kWNU`t0mLX zx^n%h++BNpTe7oUxX|b5f>zqk zy7!W4klW*omKL!J5coTZT<*hS&iwJKMk}$aM$2c3b=;g9>!+imm|Q3^DNNj~?EzAE zJ0p%Vy0l-rC%zx+d7lEpSGw)3>+#9L+(O_+%aLFCd^^fS>(Ogb`A+p|^SsEcc|*f|iw<@o@g=ILxLWEF#rxbg8?1SrG257 z3*g$+DsS)__{i(~)M`E>!52e*%;66HiLo*F26MjyVua z{^VV4T3dwTM_sxh0isG8nqY~8_0}zI4ze-m3g9Lcy05LXRRbXcbeH#amrnu;Y0whX zrvue6Oz{IJ_Z_iOM12=#%uDF;Y~iE#Laz2N2z*)uvdl@9eWmcRL|_tFY`9pt{P6^r z1XeGjPG9-uwY(R?p*Hph$#x8#kuTTorFNqq?hD7#Wa&@kqBvIb86l=6)&*Tj^{NO^ zX+Nf%-b;G)DgL;pls|-HpQd3~1XeiQJwcniMcg#2I6&nt0Kbowvb%HVMTbd_o#R2W z6 zvA}8P3%IBw3%C9k7k2`zVljT$racEg)Db4>WKE5K9D(H=5pF-1#`$yI{^ULjwKY{B zmAb_~oqh*c1aN7#>Nr1V?8}3BLXVuolZ0jZXG$;bD>RRCP45K{!oRZiBhsW!JNujQ z4JUEz=15ifi@n(SsH2JZyGge{_&O@03wZZjZ8EMLjFr9GZ+kPQXRZi8691;fy7PJI zs8Fs^S*w-gzGD8ALX$jTY%1P~Sk89gLM1yP2il+&ZeUp70M)n7tqMfY_pZjJPU9r# z_mgsgG12e=vGY*YtfGqarrJ@FjHy0b-=OnA={diGydYmU+T-j#tx^#NSe$?6La(Xs zS@r=Mg55XNbk&rTz|xJFp4A(R?e|?HKAzfF?o&uZ16;&S(hTWTTvU+YAROOb5s|Q$ z`vfp-x48_mwXR2gYK*bqP(HXnJW;>^bOR;TEeEoh%J^m)xzpmpEzqN_86M4h28Zsz zjXD#J26naVtStKDF^X^3t*`SdX*&%RfgeiUErkVX!zy zQ67x-SXh=2cE-jtqizW8L8U)q)A5CdRa#;aeS5zvnj{6dqHb~{w!@3XweKlGeD!)I zniZs?7Sc{UgRA7Cmh_&oy6v#9>~kPk?6N}I4sXM0gFznjS8J(9>%D#AU@w>!2%W6Ja?fTC|8>9=kDOjDGAk?JaZR1jE3PdmaFBez&y@D*5`ZLp z6FIvOgwnVMvZ`u|;Mj;eGaKG6Yi*A@+wUoKdVhK0sTzQ<=NgQd;|6h`6Y=PN_)u*_ z2e;i^8#X3TUCI1;r`7fSUHQ~e4q*Mv7A*NeMw!?Qq(9|gKi?OX5Ql^0nC@?){5)s& z2T)uRdsYsnY^>yT0?s_I34wfcz#a;`*3J%$cy~o38*RPMAU_NTg8X2EFPhl^_cQVM zEFL~wUr6j&uk~!5O<`g^y@Wfz-&F9_2(a^k$$A?naOZJ1fyg3hO>HNw*AbE9Je#q{ ztje`J+xHoO_MAWf(oM@DWWC;>%M^S&7neLGZ?N)iLOEdeYa*wDz&>%h{xsJ;RF4^{ zry3EJ$0_Jz(&@u8-wAj{&CQ&B=G!#Lu~WILt<<2UxLP5|#*O|9LV6!74^Z){*6-tE zUhS_Q>y_K{hdBsji)13?v=h)@-g-hOLArS9lD~<^{(YeW1b8o`p?XlX64&dM+PVGJ zOuMIngmd-8qeHT7_UmF5u)WLHw4&7tW+QaJ#TsBXKcx5!$69)#{r&fUS??3TWg8oM zC?_vQ)pL*)HsUVcmYoSUqjZu!N+rfe?bo&*KryiS)6#9+;F>&@s-IU>gCDmomY|WM5&HwmgH01 zuS||bN3sgOGFgXOzU}|lS^p?=Z?OQ&GmRq@fZFL&;Ucr9(JT?|@f6rlPes_@Rtyg3 zy$2~&?Eqiwqrp`+ZAa7RU6;IOpr+%kbt>TPFY$ah>IwbQ@SzRj{J{2EjZWQT8J4~J zzgv#rs_THkCoM?LXN0XkMHhi3wFTjbl7j7@ODWo{4N8CE&q5@PquqA2cGiQ2JpDkq z8jO3WcUu21Nme27l)bZkMbLU(&F6IK`yNrzgM7-Am{GpuCI}rn(=Y$~&k;`nb*h^| z9pwk3b$h-=wr#cfs0CE0wCR`mZwQvSS-j$wfZ-4FP6ph1$bRz8(S+W$TgQ&iTlqKL z%RBnd94FPFU}g1jV0chN*`-lrU|78KrPCsJ0VC(OCHtN)A2j{cLj-9^rmTo4P)D+MZ z&_+498XCkgjX#h|wP1GRoBlke)THi z4B-AiyXbzsL|wqpjo~Zqgj0~d_?H)DZi0S1l)1&B|3wTROiDxaeFf(E=UnL;I7KMb zd;IC?Lm=UDD&?l=_eGRb<)G*TJ%1&gi?Y|^Y}96rO=<^Hay>|^pCL|f?ods($Pf69 zLA1-h?4Psc1wQ?@T9Qgr2t7FfBPEC6ek>bgS5rq(3~}j@2U?wNe`kJs5KZ9la)LI< zEg>T=P!@bsOw#grSA?$a_mn{$lJ-H;P%@{1Le|DT#i|X)zKsj0vU$rS`N*8vTUkeKgGe!efsiRhooVE z@eRhhlM`;v_Oa}5e4St!XEL&eYh3&08&a4O&sw3*TVDBHE$poje3C79cZEm3jg9lB zSR=;jXLKV54gw^0tIvN-n#Z>mjkj*n2RyaE6H z+Ily@m$?RYeNZi;^M@?=1H|T&fr|x>7wU$L2AK!T$o&jssfc;vSt8FRzNtkCp??6| zuf?p?3x&`V1G_;x0sbp*N!h+z;ROM0{#VL>T@+55aU_bLZO%Eqj6KvGd|iNBG1{vf zklNN3)Q=rI_MJvpQH?lY7de8XB3=~-&%084`sKI=Sg4mcg zK}jix$j{vax;f+d4hgJWS)6>3+|l-{LH}XtqXNOV#k)Nm*D@A2rO+W#=!%&z(Zr3~ z6MdZGBG0&};GosT3;%^HZ|C5jZ|P?H)#lnh568bWiG%{{Jj;N+LgfjI5;zoi*V(h- zd9K72m*$|(<9nVm@S>@U&gEi>oc(c{J+ulsB_~+IQO1Guf8V1PBsVY=JS)KZ;FjkT zLjCj!4Zh_JH0$mdv6+K3n=WY6KF*g}mECQub>W}z{Ewk4q_NX`zhfu*0-96x#BP;7 z2i&9XtuiIMt$dTgr-SFvU*~bv&^m^v#-jU;Is~>7ugQh z5ayWkSCn}eB2(c17ZDma6|$d6D9i^i&$oop;!(uye*oTLRIoX83K zGhU~3IL!at3jn12r$%Ys6h)JrPZxNn7Pd)(LSm+6blwTE6NT9fiUwa|$IeIDT|7_M zb!h0||8)ePGp1X@ELrc?VINevL@dfzQNmYwuN1RhzLf5G0QXxbw#U#+t>mIs4iQ%a z7GP!GMAsZj-1`gsaCwmjOg}IBmU&1;p7^Vb2~In0EFKth6XKiY5GWjo)&%b=nN2Id ze6IfST)LZNk_don%u!#0?$PX_!5>Fx1LqpFiE?Q#!}cM+(Pjt9^8Uyok9GFlRj*LZ?xqzcrpJ@XCHGyJ@~Acr(tEBDpR6|+_E7E? z=Q{ukWOY)BPyRe$f@`E(X{ix=Ik&auQnYwQgo=uqX-fiewfPPj*WL{3cu{M15vTFn zwLc73@LLm`gmmui;LP7sCT(+`eQs8o4K_QX{>ZgKkM$pr_lf6T!}^n;%@(0N3HPq_ z^&1P9>OsC5*!an_57F+2kNsu(|IW}9#8qS#es$jV?mC(ML0x5A`oT9j0twHgPqwp) zCkgzxkKKU%kRZH2{AXrMgI3Kp0NHFwrMwhF!S+<9>KO#?7ok(^h6HD)XS-tv@pJkkkgB}JqF=&Flg zziHBV{L3S{X4Oj0jLDPa%=L#dBH!5mC>{QpCDL57zs;-3$&Vj7jJblb;wy|eef$0c ziGWD&mGN(41Om&RhsSZk`(tE&Tdp0BQt&%fIq0H?!LU_uQLixbB!yE(GnL*4L5=rag@K zp!#eK$;v46;qZCJuF&l7P2}FFxo7kP5t#6ejIad1gL)?n7h^hNMz!AJwjj1pIqTKfzUfsdX}X5mnd#hnaUr)86x~FoSkop zto?`~I~hdL@Vrx?EU^{kX%%QHg>?&*O-A)$H!Gn4Ug)o2h&sge6!J{owth8g7!$Y@ zk>d%15BIeOI>M1?{z)hZ^9M?7R?6~28ge$sW>bHhyazEspz;3yhkv9q3p5*RHb#DL zRhE*up%f6tcDda`h<%5l0rFkoJ-&-3Pu^rTQkUk>#PavF|MM-)h5j%kZHHX+H3Qr) z9ysA)psS(zT8Qs5<6mcRsUO~Br-C~>m4KQdRUrZMgZ)vdnQR1Cq7L?X+V_XQ923g} zOTSEv#YpFwo~#@ySiR-s+0Yz5TXYWz#|m?}Im1gQP!&03vpN7xDFM(xOhnEOWgMhS ztgiGVV`?k|aQ1GS5j#>-u$9$3;+hBtm4AN66E-M@89j;`8Cqi9Fz0800axL2zGhw)WRkt3xutF4LMzBPV} z8_$GRLqhuQ4Nw1%%X93oPa0xO1NN;2qLVRyq9+!^b4~&s^LL$jN`@w2IvDci%Hxd8 z)|yYzk?-?u#2wk5IGUY~Hk7{mGaXytFnRtQBfu~fwU2bFw*Qy({6VJYZR7;;{#-BR z8Lc;01tbsEjql=(PdfAWKP9pY{k|}$;0;b=7}#~+xItQYGpO^f)}$1W-Z^wg)_>O$ zha;N4N7M69r>$5w7JZK@{oZFaaq5+(MwyGn`3#rE^GaiNmE0VTHMckjMR-=mJtB!Y zhzUkp9mZVx>qxI+IzmEn3J3bcHC%$+6z?bde$R*Cu&{H|vr)faS7{nW)?B3HjOG3M z#Y9>CNT{J{n8|7Xzw0tZu6q{u`SF%pI{6*CBc^QUI0PnD8DevFN|%pzg*W~2F*epD zsDAwjxNDx}&`r0vJ+lD=+#B28sMf>RD^T?ybxlk>zoRdN(mE6(ZPPBzw_NV`tDyF` zMYwuZ4?h*Sw>z?a!T$#mpEYNleCEfu1mBoaWuadY>16N=@{#wOKYanl{8IVZan+NN zBeRE{QhX?W5>v&s*W_Er{V3GQlzuizrT6^Uw zKf|jL61KX|c6NVXa_g-6lvy;M{qxbJB?qI+)Sk8xPdo5)FDPzt#2&ya>P_QgY@}{+ zh|o=O`oT)Hjt;dxwB+ zL@21#;jziO(+PBx{yj!MyTX_Z=0NrfNzygdnX|U9xvR9W10&ZjUAkcmVpqKY7zrN? zl*+x7&1b3(v9ez&v-iH9mh_%2QMsdf*E1+OlRHP<(DBHsU;`3Iy8I*+jxZ$`4I~wo zLM;2`r+qNdf{plkVbnXfuIdBb;&P1OiJoWv$`o~oOY+6hVS>%(DvGSwhts>-c*_73 z;IH}@>io-XB|>Nv&*bhlk4^r8z`0!1i9+M@%eX&_IUv<5JnUJbmf^0n3R>F&`Ln`R za)^t=g%481qs{htNm{{F+MYk{A|oaE^B>kWmN#-Xzmgh+&kCydER8j~SgAwbniKo{ zD(gVtlO!*)eeFV+*K+7v4r(hU3hlzeDCg!UD$fC%p_L4(nDEGW)#k&Z-A6<8hgJ2} zoYxiQ(d|I=F)c<%ai2aNRVZI5AAfM>al@_DiJqK|6}?*-NM|ZIOFv$;sP@fHy_NvA zla?t1@%7W+TJo$xdsyCB)v0v zYg!*ErTqLdsplFetizVg5Hu%`M*+mADgA-JzSH?U@$@L{E#yl7B zZ%K2VLBioI2{AhCBLuOpLBd_t`5(^Ux&{(vXGrB)>FJ6}%F5_ykNM8VVU)n_@3;L} z8wiZhmUXoc#LiSA-$IM7`b%%wM!55ilr=beRKG{DD z9E2|Rf(Q3hIc1LDbFLv~dweh%*!d*Nd|#1FE1CSOHc6qO44vzI%{VL}B4YQD@s*|~ zQ)P0gjh#B?WM$@hN%&b8^IIII&N32OZB^0J(CQJYYXp8#g?NnAd_3Z=&7ep16$!~p z_sL)kx5w?$su=-UJk1mgKp$~T*qSe-MEhp*aXFNULRZkBU~o;K^F(dPx&y5ceg?(3 zO?gGUbG07k`>IF1il6%}!`rfjw>DKGf`3EIzrF$Nyn!gVNM%N))M^>JP0V|Lj_B@; z!>jp#e-YLE4nwydRw`S?c$HuMsFuPlsDfJk9`I7bls%o!O8t1Ow1~i$vO`(*7}f~+ zL5GhKx3O*%8jDmUcAXfci`}fA7~ZHLtWObI(-~ZN9|*iEdkv?;8mOp)^#aP|=Emn$=`HegA)!r0H(I3Pf^~X*3N=SdrYLpB0WJvt;r3maJq12~! z$Zrb~-;`9CX3i|f=)hxso?*vp70pNY1)O=n8l~H45 za#&@Ts?*V;L_6%ub2r7Sn)``it*J{DrHw5WK%wjn*{NzB#>-ZsP@exG7=xTer(M7x+HPWf)j7X^6a=IZUboa$3o3%!5?Nr_h61mxm0+bb)E_Pji zB~NxIWi=1lF(_UyYSVA`5qaB%fdAXjdb?HK>=T*=I=fDTT|S=HyZ2?=g@0S&@X_yC zIH%kA8qnKMx}L4ol~9p+{a55so;Ehgx|xb-NiQ9x)@j)Ly;Dw!zcotg{lV42E;05~ zr}reoEL76HB8x)o#Uf*3DtY*^dIz;PHanC}lYLi(VIIqgqJ#W71rt8l9!S3K@!Ya{ zD7-i%ery?6aiTAL&aaIQ1i8V$zz{BQndt)4W!>GmON+^F2=o_Y#|#oj55o# zA0#=Vy@b)mi;pBL>PHO6af?pq2~c=xmjBUV5e^L41bEoeqlHW3nL1Cs;4GSS|-qs6#vzNF=&Npd}RX=fTSo1s@UXke5 zOir;q_R#Q?LI5J6M^toBVEFY`jgDuFLyp-~p~k`<0(ps^Od=1eOCPEKc`5bgmJ%oX z$(}PU;R#b@7C8b_!+H6QPZ43EO%ZgE;`g#M*uKE+Heqh0SMkQzkFwm9Oif$6+U?~x znQqmV+8~k7Jz1$@6KkJBi57|U$rR?hq^U!F_uNQKGb`vo>-oORKu6QAN+>4uufpQr zvse0K_P};Qok@n{3_!@8{Wr&|PgsoudT-Jq#@Oth|CTne!y5_gk!3jLY=a3ol4^g@ zp1KC~#Cxy66?WY5@DGTzY?>bbJq930ukuAjV;(VSjT&>Q5;h8|iC2Im9{C*v2M?V- z>XO{)2aEHSw-DePUiFcB)bS-~(dgYmtnS}iITY58=1vl@oHx|vZwRPf{2XzCO=JAZ z51HddXZl!(L>LGe2gM*06;KMMa)ER*I(IldwO@a4wI#LcVQRDL(=d z-}Y3MT>`szf9H0$#RsOX>UYQ&VS)(l@^5&)mg6&Dp|N}1A}sV;@PVGtZpiV(rE^hn z!b9!ff71U?%z>Y$H=Bl-18jN@?TC;jZjg`BGM!4)u)Q1SF2}3rQoA+Dzqrh0v){C-g>lVqojuj*n zq&0txYn`>X2^kn39$)GwW9Nc&H+%GISBfYBF~x;a0ID1(XHtD4ScDANxB0uGwzbJRs>=)k3TAqqU)scs53Sb8zEVAPrjZ)JNT?`nxYH-Fl3JYWno_V~PvYnpq$GR*n$$yLZlX#|)&iTermhXbRYIb<&Ng#oWqOXD-%y#U*@L<5~3b8(%n_hB_kT#sFffieJW?quDQ;@ zzz7*Ml2wp2gZq`e+p{;{P}90-c!Qrl$k1n zj72vq_{CM8Yva(-W;EG(&3!4NGH7tFvg{%0@~DyN8>TT$g5&Q`!Q@*P{xYEz9@-TEOY00MPZ*BU(L4gELYL?nd|w|@I-)f4l4L`P@l z>kl#qIdFRVph?N&(wpYBDjOF^ctXRCp9UHzD=8~oP1k#RmFMbRq-*1BSiep})S(x5 zgUoqCH+e%bqi-3lcCr1!^F7fwoO)}fIEfPlPU$CBWSbmhmU&07toD#phMXt4zS1$O z`+vP(R0r(M(a=%vn^Wj%aP{no9aljwZo*vp)o9moCNgoj0U_8{5}U17TsMyZjt zZ&=z2@8GTH5tm&oa|N4o$@-~!OwmA^P7wmHsY_Z--+J}-mCrlI4Qf9e3HL?@mHHuo z8R$E0re6uo`b6fB0cy#`_RJP#q$|Kutg|oc3iS7#~LXH z9;{18c&BkVXz=!N3!4G$KOS8+r2qTT__^^(c`9P5C;rffIlDKR76i3TeZ@6O$6f2i zy0|jyNqVeYo}hCsD64n4r0u18QG{&$Imb z90G{2`kVks6^s>(26BA{Vm6&)iNjZ?Gxq+WP{$ct`_$knusxmMchC%xP_Z#Fa-HrZ z=fDu&hRv`YNV}Dof=WGDZe^LCR+6_)XYWuaG^EkS52Y9#^h%C1{Fa?+ z*SDTz0_}@#UAO8T=LSRyT%Ka;Dub}de2ZwQNQRdp>pt!j#*HWMMSKe$qKzRg^t52! z57`ZKSovFPRb;Y&qsp*>{C6&wJ?cIuNzGUMlTJ>|8Oi;*7vS%N_V+haRj$nYD7g}w zrOo`IT}YKuioZ)rf2ptKpmK5Tbrf&vKf3hOr|;Dh`N?km2|{Ot^fJfonHN5WUR^w> zk;%{GXH4?|C>?}2@^y#W-X#i(TEyyLMs;`aP5lQ`S8uq3`x?dHTjJa6cW>G;vb!+& zpC#F%nMm^#cxRQYw01;wPs8pUsFQi#e7UUg4mV}C**~?N2i5f*++8g{oH%@=4c|H= z7{}L*F#c&ZQ=#`kcuKGa@3^Osf!hhQ`lMM-x4VAEHKM)3Dmp_$ZC@lMb-D_I$?{af(tKA0Yd~i5Ibw!h|?{K-GUU113GDH0O$L_urP>wNk z6Sje)j?Ha@=uaUxBazqqO)Q*ez9vvg13Cjpt86r!c&i2rIQEAA%+?j#*4eR28n>C+s=-$KWs z-p{_MsyV4G67y2v;|PtE6ms;&R~i-r37dk4KS7p6y~PwKlAFc`G4ENU1`ALV?*_?e z%b7_GpQ~B%z7%tU7!re5OXi+YOUk454h(D@BFkjbpyX5J&8FT{({n4khSgG^a*R>y z+y_R1{rl<3E8X~@W~nUD>e!oW?Xts@`P_^K7o%9FAO^OAAMB2I4M%tpWlGdQq;Ij~ znV?64@Rb}s5c;;;sM346Rbi7-kGBP#wOzyW>!K`{m!1U@hr_+*(@&NN8c7eOdi1L# z5ZR6oBq7L8`DBV&Z1F?{+e&QvjG#{q!Zc@>yi{K11&dmLAkcp(fDly=Z!|cvvG9@I z3dI?xXbQRit_L0;1?cK;@IS24$41xCn^lq==T2W6mIJWc$jJC(pP1^Z^@UM6+rQgMvA z%$#{Yanb$jBY#{G%%NVEg16ZG`78@JA-x;rYYV#?vGWtE>2Bv0L_nCP4zBcj#Ru`R zBzsg1?Gy!q+o*za`Pf(&i2)a!igslK?uS0f_UmK+9fTS;_oeRx%1T|Uc6V$-CU!qt zjR{P;3R*wN1>-C$=IF3JR;PP9V&A}HrJ+F|Nkr{#H*ox<-~AtPQ-$(2Y9~8{-HD{T#@NrfCSr)u@guxKI|37#oa{82j{BKIZGxjY7ssf8un}X2%-z zndol*2S9Ib9JVG;=sHW!2i{KxxA_*tJ4%0)4{BYVU4Hbju`abP=fil9R-ndIs&IeK z0Z5}uBG}~&ah~K0Vy?tckgKrIIhgSVi!S(Fytw+aF5_EfmXs}*`%d?~IG2WbIRZ$$ z{u@}M*%nQBypv#!xsr0T%BFBt&!o@5mOj!wqcGgSpiX)qPm6l_%Ui}B)0Ow01?G?) zW&yaHa|DF^W_8|alRO>OtV0Y;Oz*K`UWOy4i9o|M;pLh-e0&D$b1@u0NB#Wcu%;5^ z)m-Zs{OfadWvj-9?H~I7E_2cuTOUCrp{h5D6~nKREs{?nm) zJDn&PUgSpFsIQs&2$$vSL2l+7DU$PFYxdOL%ee9}X{-Ruw)5Q$cFpUY;98x4RZN%z zvoUP^<7RspsXV!O9#6I}+MX1Y>$7C$l;NtDBb%i@BVfmC{D;MA(NQSlr}d3V5o+P= zab_GOmvTVjg%?>CBxlde{Nx`^DRz??9R@nl4CwmTn{5oR9fNY<ejwGRIh?#gVR@raJyxh{gFItZ>uUHZJ50x_oag;wJ=Xcu=;@|?p6 zm5&IKpFPThS~8(E`l%aF}p#`GX9o=#|>DDK#Fg7!|i4Zxq|g2L8B)j@7{%2pr2s{`;(KWQYvOLFSk zBmYMYpiK7QHT%;M1SKM$l6C}xSGBrwBU$SBZ{Ncc9M2??6-?GNT8&>UH0&7tACsaMb~evM$J!ABZe`=8Mg+m@y!(+n#nan)@w~&v-StnUixyq zY=5g#4%q@O&jxmlN|g_a$Zf(Y$PKxmuFC5X<2eaq<1x*}kic0uTo%*cy=G+L{vLx# z^xi`CqhbS70u%;xFgtSk?o1|pumvT7CoYEh39&4c~u z*n@5OyV$LiEa<4>aRpx=$Qjg5NFxLp$zqoa(%b6P)HOx)qFn+Ta)*DMirKM8w_qfb ztTh^BDOa$^RhNuz(yaRd4Y0D!Ca9)C3nr$j&iK+=lkzb?X_%Q`!2}{eW7N@qkl67wVy$?X~n!Ee( zqx&#|nzQHAt8{1>E*zi_oVZk>O%encgz3ADPTko9tCcl$>I$W^ph~FNNX> z!Ob?fH%O}Djd1DR%W2Tjtq~p1m95-xkpAP-|ie$X{Ou%aS{KPGjv$U0bynz8Y_FgBS7Y;Ny)7smuv_Q*6O>PtV1 zYxl!RIL28;dFFJcrb}g zdVqZA4CLpCg|2jANPp;!4)h#yf_rILdP4Sc^Oae`&a*6EsIP{$R{Yk8pQNFWmKloy z$`D}xJdXLv`RB#0W6eD_?o`$2H70f=baL+AZ@0|RBHHBrpoXdg5_@z)+?Cb>-L|0& z_IUX~a2@q?yE0sce^qv;?CKY=rz{lPz!N5^Yni+rzRbrDb`0&fw+Z($&A$4TNByz+ zQxOrdmt<}RA}xDm46?peobO!c?i}qht-ok`ni{Z>8eJhT_e0cp-%|mAglv>GX~|$-~(cf)sSjUGN@Mw^tjC8`R|&iz_Vx)Hh(~Cv|ECUZ$_%nZBSoy z#=oST;ABm(w1@<^pyt5IrnjbRA@tt#dru3cTXfNd=D9}OzaqJMvN{bRb9Rl6Hw;GH zQzS%8tOv3R+dt=iQBr#U_!s^4_`yn1+)jdU#k7qtrvLf(`3@&eB!3{5M%qib+@`Pg zjE!+>a33GC{&}vb_N1;8m4V^y1J4>QTmB@zU6S92fuX@gZBIV|N&OqCg4=hmB@2HF zB~9N8;eWLI^#JoN>I_~W3Mjb;X+V`*59&Aayh)`Waq}?wN25_=hMN7ynPbVHCUkEG zlU;`jMqR;|fj2iLP~3JZ&1J|k#~6-Rk`}NIx$p#k`5+#kU~8|^qe?K(F8>U;9~D(b zC>o7F@Th)gf>bTvRz~DYyx;~AZ9qNr|FIb%$qomhHiVxE6z5#%>%ER^LZ-O0X2NL# zfl|w})(fZ2#C+O;4YV<%IEgB|5<2@z+%l5X$s$>)XHz&@cYlYn80d+&8LnAI8a{;V ziWI4+Q0^RK4l0({r??_qn@KKAXDW358S#yw^8m7k7FZ?~Aj)&Y{@= zc7UXoffLcgx5+*j?!05|L!7equZTTIW>oKo^&AaMGp1ZV zm%17kCcfF~fpA=Noq}C&)nq78=z-T%TwRQfzZypuVXsGazh8TC!e70Qek;U^A{svY z_eQZ_zA2;~cHY&My2l(~*24oT@{uBhDLQyAzPI=_{^Ww$HD6!6kUYVytm)*@Zc50= zdRO6Da$ThHvI{GF-3bntab>sUg8BG*n5|I-k`-;JYMF&sAczrfW7rxY3F-;K6(B6>hmP zqK!YM*w-K~h}!I1#idQz$vZq)nk~tK>FMcJGejXDsJ;EWD!p1< z!+Laq3hhR!&8{E#_-|Ch9FhXd>Q}YRP?-&xt(c3M-ZtwZ%k*?cAjl~KA(vq zd5ZNkZv^!xw05!2>FQ-I>|pQJEX$DR&pZ6Dv^apy-X3z(s;{#I#F%q6Ma46&4at($ zu?-vZFm;rxtc8J1r*61zEDW{#e49|_#4RSJ+9_VeBXd*BHJNb6pBS8T2YbS8L&DI2 zm-E~11RS-SsVF$x} zzCQp_U`MpcG1^^aC1G_}A_Z8JEry2SB?(%Av#A;2bzkB=(Dkxqu$x1ORQCu6g>c6% z$%*v%$xHJ+31?vy-q~D@;@Q)VA$iTq+cgX4u|_6SerqOJ&67)zPs{1xDI-iamDshU zhfco~Pt*iOC@INO?MyX`ikJEWU4{f?buBP2Ud2>yaud;qMPU|$InLQ~xQyIDn~i&O zsr*SZxFE`g-^{1=DHKR1QPriNZ=2x1^leMMqjY=RT;8trK`X2tL;C$r_ASBb#oYrW z7;Vf2UU5h2-wo_RCRB2+g<~ss1bPZkm($29#$j|J9gu3 z{G^$Gi=vBMhs05w$HT-W0n5WsgjuyG*h5wmtfOk@Kl5omvqW?F$yUBatwC=FeVsgP zc9iMDg`Yl+Ar|zri}jpmnMGVSMoQu8VyjbgJ`h`<1^+{VguNKK4+zU^`Eo?|XQ`mu zcaC_kel+^A7L<`~3LPmK!^-gU%g?%pr`)Rt7fgO=G}*Mu$e8VJ7w-&&1)7kVH^70i zcru|tNi^_`;I#cBBL+~Zx zq9y@7+UXt2PAhui$4oNDvKtExk!^HTGuH~+pgmYgL5_ahdYD%@ktud~-PJNQB&4G##x;`S zTBAq#%1q1>a9e#t_WZ6ZSg%?vL3v!BZSP-x4ubZ9v|#EA2gcWbl+h5sJ~>4ihwtb@ zJqEuG9f0&$E_I3{8-M5|R6+qFJ-jkC#Tkwd0)@@IsJM_(LlQa}7EG2tHsD*^eg9wW z`oDhN;&Z=CL;MEr=NuEi)572}(f$?H?IHJeI$O7^? z>hR_eM_A}|3ggK$Q&&$fP@E?7M`rJ8n;`4RcF`YV3vCYnB;SzG!)B&NMGqAc`GiH4 zuSFm;^MYR(IaRyh0w5wuj+%2e=(he%nd>(eCWnc{vEw|WOScgt4T1HYWc`Pv_OX~2 zy$Wwz$GYdw7cdPw%g{zWY4OmIeDR~J;YKWd!}t4DM>-%yJ)YaGc)L;P{U&21Iqtc} zy!k}oy8Cct_1?A^t2k3~=hx$z8D)J#f<`mL1nYXOK`|oj38Q(8D@P}GNd(bADs$Qs z{^cg&VX^v>ji`r!&k(*PSn$(ITF7po5L9tX&q|>0HNI2=E6uS^9p4z;r3t3`FICb{ zZ!B1C`m0Xrb=LJatY;T5b^?2bkYacXD;8%5q9z;TlqWow%Lg@|kR^YkcN{oVz}YW- zD9;XxkIbD8B(7D-5z!|r&omUY0(}lh{%Xc|yUvK7`Y#xa+1Tj7lv#02H~O8~d~<;7 zGO}Q2aW$}ICb>_5?^JDIhl^+T;?kQtWQg-pL@8`z@&2I~8j9c)hi}pw*!VhfK*2{} zA}Ih8zKMnY_&=1r2T;>p(>`oL0b4{+M1)AMii(KT(0eZ;f&>+$gMxIDh=@qWaO78*bp_c@bkmUcv`?+s@?{}W}n{P6cIhjd2d-m*Jd+naHfr;dOcQN)a znVd?nBn!l4h!GD^OSi24@5>Z^CmD|Lf5-X*SEnVmE9_L}MSggv90%7rYO zq_mtPS*(|E+dB&hpTXJM-*2W!QLtDb-y>r4xp0q<5k9^`WRz+GZf&8omaiqpirw5! z8DVb;SZW*8s@UG^DSq3#f|n>%b-wm|A}}^tY?5eKGnZoGM|o4`X8|%ich0(ViXr>_ z1vWi@VsQ&}sPJ0$LSE-YlW)oZ#B<1D&1c9sDUz$kOJ|lz?Pp?nSw)(e#PPbPE+j3UX?+ofZ;lMpAS8Y}#P&qQbaUdqwWFHZEj z9ZV>|^6DYsb3R61~~#F&krVk7qIwjWi=+=h4%qM z;mV;jY!%z$&%m7RuJ!?)DeokSY_T;Fp=Yh)Y0zOFOoq8`ZfZMv9}X`-?GkY~o5#L& z+NjY|3ym}J*nvCJ2MNFJ*O7f&wMDupIS6H6-v(={(-V4^YT3~jIV2ipCxf97nl+1K zX7`k?c6imI?MTbE=(m>0j8))H)rTH3CeXE~(tR;Er7)B`b>qgV=czrD-VN$^jY3H~ zojbGTHKHkQuXt?qH3EE{&1=46iDh-TszPCDlC@F8Q$ z_DXklu#}xA1)cA^oLyh#)}y71Adjv3>INDbV`SX19=a2K6A#`~QW6DRX8B3R1Vr_E z1Va_ZlG+il{}x!4n<7|z?evWYFNh;*Qv1; z;zib_@5(p4YFdZ4Mf$kj_joRbe_O(dkLa%v{1D23JUYyv90$5~(2HPNJX8u}OK5x}HB1gKm zUHVqEDdHUAd?n>^fr(PYOs0!BVs`H|dVg2ERLVoeL;8tU+J*d21vz71(x4vZP@Cq$`Q~Jxk6Yq8?(dWVk z2~1g;cyLo;57t8{fUHA^p5J9LCg(XvNUocL7P+8k@fySS0<{H@_)X<$SBl?}b=kdGiyNN69s*1BTk zu}}3GtHZ%=IbD~;d@r$I7^y>R4?276u={!h?k|6&wHgUgwOXBD_%^!29?McRof7OD zv2*WZYu{A7<9>B#&S3D>jQojk;xwKx7)sGjX1%1iye9PtN+IX>ya~~9d54&u_qAm% ztqjU>M}Bp$-w1QtzL}Q$W*j?2cYA&ONz_Ox-4XapSWM8C|diQ;we3UiUWf>nMS+}n9TUFLnCNVTwI%c;|5FH?p#W0#t>XqV85vJ9krwont<*}pw;i`)YshN)_$JJCM}_Q+dtrEiOn-Qe*E*}GX#SnG z5vB*_{-_|6*Xz+>m1A>C8%_>=TYwc!C0=|Skn>SzFG~uVoI_gh81vaJtIcR*_%GY( zV9i{)^rFphI}YpiQkU7ak=n#1#VqcBw%)}VnlsK2VO7b9J9ULFb{<@g`Keq!u_;q^JEAyB0v}k$|_dpgxPlMsacB3FD&M;e0bAdL|-Z z?`_iTGMXZ9ORhXhnvFrh?pxq%ncHbCc0Ek5J6IUNOh+_rJMO-Zm`ce&v&h~SF1TGt zKXCuY?cY~i;(GHIOeGX+&aEa=gL1ycEEg}I6?6F@oPhHpk!WYr_}Jt4 z2Boj|INldB^IVU5NPeaAC^j~kw^f2K?OHqdveZ0Ceo_557C8#_x6@6=)9==h4%#uabpek{z#SJX~EaCq) zq_cqyZ=Cwkb7?kNIi3weOA>w9S+NC1Z4B+P!~bXs(+&ShoG+{n)yInKI>RIMw&46p zgY~y%Uln3pR`~-WCr#{cCkkpBWcxd=U%AvF?bj!JzjYTEW^32opLAEJer$VD>bA77 zr>rY#+;7)utKY3hW#=2R@b)29r~SJ;@!gp%5$ejO-ah2r0^K-i@<5VZ>wxhzE6$yN zlL-gieQ(nGAaX*a!vv+Z5}N8dl-Z#~MoY-V*F66@PO~bttv`S(vvXQ3F?SQc}ccO*}N!rVn2R zRX|!tCyzxws-ng8@6sPjS+BoqF7NdIT9%B{Xoa?{*Bswm2ZVfgG-YRE@`I$Nr>xR) zTcBsPx7ro0y1SPr&^LqI#E{hn7Czm_9#+8YKoIh${T6Wu-#P^q*)F3i9fw?=bKu~& zl0{3_e155M*&CbHhNm>!vM&*p<`gBQ%%oa{-NqiUejG@@7PMx5_!O(BW3*5)DWZ1Ga6JKoc~*An6n>$L z{qm&Uj)|CkJ9rHncA41O)~Aw(h;wTe{O*J?7#M+g#Yax6|$Fm zeE+TctY_V?C#BdZ0CTc^aY|1Z{?zaThG?l_197q z;dSDc>TFW@@|Tn@MSGvx_pBNy_^n9WuDd!n$?@l(5t=_hqpHw9sUHk7w}+fZt{_R% zLkBii<-il?@bsZhWVM4_?)cQ|6D55O9Q&(SL-JjS(Sqn=NlA?gtYSVfn~nM-|3k0A zK+i1;SsQYUBE~wgU1GeyT^8| zIsYyDKOm-DGmYTQ;^>Eg-D~w5w#0>=sOHc8h`r7di4eu#Xf*7cKfa4L3*9oYXr|)V%TuC3x(>8J@3ltL+T3a?UxsJ#nVxUDn9{HhEK<%7w0;i&P&iFU=P!uo{e8 zy6M*b9Oh_m8xz1rYYhY)rOwMLBZxgq1Gc3NjqEF_#I||p%?Uw~czAHrY^f()ts8>7 zOqe_ki4B*k%r?kFBu&Yf_$gQr_L)1ZnGyGrv8?YT^F~k3OPz=D<;USpBt*v4`1oqY zznYm1zN&NI2xfT2818Nj011FU>95sRtk1L`@cCd5N!_>F?y?sk;k6*-7Qbb;iX73& z(6n6z3HTak9X$d7U*qizHoo}vcoE88%2tpRl^2OvZ?XQdQmxbWd{px`xUUEkE%?5E zBJ(*e>G14daX}^|UMBmiTg&8x>(T&{xxy4OqDlU&u>4AWG*Jj$%^?*kl6dv>Z*x7a?e?Rh1EYW<*zpkf(L^Qx) zRp#9uAHA;}1J-mfign@DZVzczr=Duf4aiKpvAxEgtiOjjbA; zUwZQcT{%zn(-JBq~;%qQ`Hwupl z>^|U|PTcjJ9mQU%-ZOTI^Q_%JZD3-yyyr60()G;NR?T7y?2<59&!zKVU0sr_es?k@ zr(1L&(SdB~D-V;GxW_)COUJ&kaqglfUhl5{HFw68tOd)UwV(b@L0PB*Xg+Ix?SuLM zZUqm~lyd7OSl5;3<3Oj{uI@_5>fyzhJEI2a{eGkzdQJ0BNT~OXMsjo?WHYF3+fRDi zKURW!U05t4T@ti;2mId8xUpvu6L0O!`n54e_K#bl7D-obYHZd*KvVcu$&NUYxI52yy%z&~J=zRj{ z1r>0#XuRD>wTrIG>M`8jtnf-kM)73M7oiz;^&2K5U=9bw|>Z4BD*Y_iPiC&oEGWX*jzIexu^kR#LcgW0`zVvGw+ z9%P=)FpjjbLXY5k!g4;=-^kZ2by#1k9VxIADWa(0)iT?Hb>px`WU1SJTE+W>T~6_( z79qjeRS_y;YiONv_nJ4k$-vC;@vi7NH9j#>)6{gwq#%#Tm<*QrG)rj^#k@DZ7b{e> zyzs-OU3F<-iDOQ&#m1c@XW4}qKp8LQ+n=&2A|)US9PGBVHx;EoB&*-j|Gm%tjuTtc z<>euGfCO~ysrZPjDN_POqyi01wzoGysz zz-qFxc$#+xabGLco6+pGn%vi>HoPz=gsumN;Mwt#cGJ(c#)RPf3#lF+$0^seVp9?> zh*WufF};tA-J78a^mrM(pBzppEqzm>SwBiDmYAu8wg*Yw@hf zzMofljNe)TK5T%OkUJDj;bJjgpQXUp8_D`zYKjj6lyE)fk-6=>5M%Q5?aVQC*?J?Q zBJRnvx=t@eoHu854p!HQqJA~)#XaWBANKMw^2Kf>A>Li=Txh&0gSmT@tnN1jD}<7f zSHsscHg=USNu2L4gu=L==?7lYjp3c3Yn9ll*$t+(?xECWXlpwp31bMe=g&p9h&qZG zrN+L!Ad>U^r}}-Iw$jX5+2Bh>aegBUn%%Ox&cykiIYt+>lF=<}?hsI#JX_BP))|8+ zE&W(hZ)R=Mbc%`eS(W*oR)up~_vdE7EgxaPsZRT_$m$Dg!7K3$`Qo9=)}x?-oN7~v zYW(h}>XYuT$m@uChnpFVVA$}O2SyIP--Dq>Rg#k_He+Zj2ApU42m?-Wg>MYsyNXH- z-j$vp>XWyzTWPBC;;R!+Te>EH)^A7F5@1_jM7z7bBU!s-jU%G+AsGan)o-!iw>Y+W zK4!(7+Zy}v*JUH|=uP`#zKWGD%ME8eW|2>Ri z@P7#eSqOrPw{sa4)IB$M!@KKP8`pw$zwe1ZAqD-Bqjg7JXEzvfM2vBR0ZF`%@`KqT z9M76^PaOt@Htdzqx`{DH2C_gONm1hbI#Fie{X}Tq)h1xiCwCOb1&4R(6t3-zoituj z*NRNcQJA__NBluPom3X^_ZVv?`79ja4CFMzN{JsQE)Qw z^|Kmaj2^J?ZB+JXxrtlAONaJe{y{aGi8M&;1rM*z-G`JSqC-~0gj-lg*on99?ag}1 z1lZi^3GoUvGHOaSog5ildgt~VPyOu*{+9TmBh?K2tt#ZBLB2}L{h<15(r&KT=~;e9 zK$e%!F4FCDV8UB%T3$R?kmp?PI1L)Zehk{Q1~GIOTH-B+@uMKNxW^1B?7Dp^kK8IS z<|L#8`3*%i*ZmQ3p5RlzSGYYp9^3uK(Y-~M@vzi;v<`yPc$g&*%Quf?`lt^T045om zV39-qM*J)nv)^I@;TglPl#>=9!pUN~_WAj2E8XgPNOZDldoAR%s~(!m*T`2lbzgW4 zFc+zj3^0La@gnz-+I4rAZQQufCx-E|%eom2eZu%@TuD;Lth<6*Js6qn676$R z9NJ^S0mzvYN z-v8!~x-obBJy?!ap&crmxz3~UXfVXZb%waV`p>DKa*64P2eoyPo)8X zFUt=EA3*4#HRre7Q4VV-p$rV4aSQv^hZGLZE{qX9< znAWg&1YXRc{Zt;n688xM?0H{wwWPcl#A!+=xvHPpDMLwm5N5-al)8lKE zuTgRz6Pe}GrI-kIU&Ffm*!Q*Kh>qgbY#>VhrsC`7BnYDyJ4&C2Rh>l>KA=wXmwwC0 zRCHa{rq0YO_j{h8*+L*lU3Xu(<-G89ZQkVT_>$yiNTjV&X*Erk5pj7S*|#6X>!+TC z-F};|8_7`meIX@MB~l_gV_mLA-cVAMG*Q=8#C#_us+gSp1A^I8Xh+uC=DmZDtNXKm z`dJ}Px$hIPbDy3AI3^`dP>p2Q4vidhZ8bf!8f2@fb>auw7GL|#m9x}(*MmhiTV>%! zdlc~@0ll|;`dZ%$eAvEsN!^QUJg+O(nKU!&4F?Spk^uM`KxnPr3!%&G&zGNKi*pitg8=RB^bI+n_dWQ`(irHDh2lS_~6XXNG5^6agpZa zV|G#{Js9=K@@FjF z>lVUBF;S|?l5Ma>NW83W#rHlV+}ABAz%AbVC?NfaCc(2ROhgfR;P<*eFfMAFQ6{>LOWvTtCh14>Ayq<;r+Ew1hpw0ge(0Ti?C#~_Q zaSU@Ug3$cx$O2MUf+PG-w6m$$KvLbG0`CO{$uMXbE9qHb0)uGr(_(LanXtm`FBX_@ z_=^R(WdYSK=^2{$n18@PW z+{-;hpm84ivh5)R`KWi#d1MsrU0L3i2)6A!x&ms=m~)I+3UWJMRKdwT{9Uq zSf}w#Yze+j`RkUD(9f+APbl{4ycBhP86k}LOu9Q!RYIKnB1m1|#Rx#D1EAWr`fcWS zt_ih^VGGOUM#S2~o-hqZ4r8*`_Seyk?C{EZOVVV0-nM1E31uO3SK}Z;Si?iwQe=1K zt2d#D#Ci%GdBVjzbA9Qw!z^_zj1nK_rYqU^gW)cwP3s4w&mwmY< zm=lc;@DVN21&${%X#$a$RE|mqd;4(idflmae*_Ev0syV7{2HV}yE! z>Ne&K<_UpkQTX+JO3qeU3M#QU&6!VPc#`OpGTRP?>&jA6JCv3Cb#WSRa*8o5#p{anT%>TMt^QLX%-OX>^dFpN}!dU<4z*-a`4l_OPZP7A{w`< z+arFyiPgfX=01xN9bgt-=P%WLNw6SP(o*M`Oc)l>+cn5`G;F?ZD>enrGgy3v9rf=-mMBfezodUGuvAL5s5^3w{!D^EvO-o6rQju#M0NF70f_*rzErY z(E&+%dbb3UH~ZrAw>Q+o2)2IaLU7!H76DkQ4=(^6CJF5&@M20sPGI`kv7QLAo<#pY zK+gXfvM_8QNXhyG-@fdMR8hQ^x+JJOca!?f=d^$RLE2fqV*oTsoE4|dEf;b3mD-&K zd2Q0BJ?)QcMRG$j%KV#t&txA`&Up_NOmd^^aLunc@SszzC@KZvI`aUL7|YBiVrZ7# zTCN^zZE34FieHa)AG?h`T|$3encG62BS3NOwX@+9z>Heq=5g;wjZwMJ-fq#WO8K}M z)fp`<0ZNP|v~cadj6@-sfaJlwJA6q>#iAV<@Yw2qk;$Gq$ z*#xd}#-07lauxZc_n;Sa^WS5PfddQcGJ({mRS&W>u0=| zJ1AWOj4Pfc5nKz4IgZ;N{l|1)k~i9+K2*NhBIth9`e7kMC}A?vZ}}vVBgnhYrlUWh zt=9;{PXe(;Jy><}VMMrK7!1cHR0wXyQ#pvw7WMF@1za}51!80ox59tsU6z!{2{EWP z;^>5{Qf$vhxm}s+ZdR;=S(9w+n2l*QBs+Fjv|e;X!vBx-$-jpFVf3?3nQ#+N_)SDdkru?G|jLB!#YSfm0=`hsfwN;_-NZ2Lkii)c3@(Y46~&O zjhlB^82B2ulG+*UQ)kob$JlGf+CnAuSzpIGT{<&J+A{-KbCrS}OW%jCCG{p}giwPy zS}Xca4OBv6J(u81Vl$nYo>M;b-o1}&EH2vuJp->PF7Vk$dm9mV1`)paI=Mi?=#1_w_JNpbwRpG_JaH`%NcN z-BSdBS%X@5bRUzH7`Di;TF0gSkTiyG$FDEj?&;TejMg+)dH|qk<0gOdRqD&2+ zw0c4a=Lu8B$b{~+;#Y#y`*3u*)?5C>x*0_~QVI=^$IGO_w&tABawV|F@rXh%4&-}MUH!LcpjU*hiyuO_yT5rSLrX7R^A>Hdjd|A7Dh8747q z1gW>x)*C(^*$Q{f%^ZpzXbTe%d(Vm8e)(I=E~05(N1pEZ;h))ks6((2vf(TKAWw@c zMJ-q5_W81SO>JnDNqq~rAF08sHcFQ?`NmPrAUAx7JM1rm7itqm#c{1bvpA4TTW0 zMTG9KL!Z1kD~!y${JuAFcA%=JPWzW7kbwdx^qZDuTuAA)KrG>I?>%N1oM)Bu9(K-H z*3j2Te+xzWiRcgrO*o#?djBry(EcI>z2?|!67rttaEX3C<@ti`@h5z5w|GZdv;71m zCg>)XuqP{Rut}N0m)BbDx=jpa2W|{gCOW`s#ypw(%&D7xgwY_#azA~g3BQCFY(V5t zZV=q!Y5Rx{bj$mu&8QaHQX_wpyWEb`ckF;8ndRf(;p2`24fk{)cR9Vup%&AqxWHR3 z&5qS;p_qi~KAW~Cl+WX=*9YO$RzrKsJ7XbZ>U*YRD3kzmzj*KVz|Pmp+6aJT6+o^N ztL035ya$Q!hO14ROBr*{k1c_M0F26@EmS9Pu8@MNqfCK{}C$ zQ7Zuv!yAvi`n%9R|I?uTKIH@dj|20r~B*d$fj>gMTT?PKrUz9tBE!1V=+h0wfr|*LkOIz z>!whqc-?%z9J^paIBp2GBy|8EcZL^vwS^|M zM?!YB$uHVoHP8FyUVg>~^~3PZvVsXOm1M5R7b=Pw4{T+K*J{Fjn?TCSizX<~`k&$N z*$=((VOv{!)6U5T5N8c&mkFa1qB_gY|0MD`^nAP8T}DmP!(@q1QD&xn zXOjURo7=kEW&yM{avkh7oQv;4>s^!HmhJ%OpST^_cLB+duHEGo`sO&Xtw)WVUZJa*gbc;+vCENEyrw7uEl6a>wS zjw^&@*LRUV*g&tegP`OwhdF;>1~-hw+Z?7ltGD)n$yYtt@v*Z@UaUcfCz`azgDHi= z@5D?fe(P+dHdG5VZ>D4_h2TK^;MZ?*scde2UHg5U4ppULvWy)}22*znhM`!(dg@5C zr+43-p#Qqr4gntd+v@H3E7I4<&}5-8+7qwp>Qb(LO;voN;!xNz;0* zd|ydS#KYoo9Ti|p;_GO~tgA|=@wZ1R(Aog5WIUf?F@|!gX^$uiZkHXM0bjfn7r&eq z?d0pxE^m!W4FZaaOzaB^lx3`ppoV<0e3h*~M^0Kbc1jqqpf0aoV=gBHx$aWn*y3sr z61dIAWmKF}>;8pXV&+q#|2rA_uVwN-$Mba3cU|Km;j1`;m*c7k0}kxv_xR<{ zZC&KMD3w1^Q}EdAvMhXY>F0lbOLpB1)A zV;*dA13jzidTtcEB6_>uq$^9}a_84*PDLYhbUvwE-GF zil5?nA>(B?1umgoY1BOAyV2VKv9brjeboMMqU)VF7Im+VD+4E*pc6OJhdtM;{)dy| zKYiq@*n-CiB_Xpn9IP0?-~-8;O02!06i0{qv@$0MNJ`prME$sb?wYv{j! zuGKq}(*7o#NLb|^{R8rZ1iuI#VrE=EHS*bKqk%n+=b%MX0^7m!-qC#y%2`D2{vn{D zz`>(&_#UXxV&ng+jwf(O^{J5k8#zoV{e!P4@1E^{`=1P}#;b;skT|=#=ik(_~mhzsL{rez539ZQKeOf%Q zg!Gmmi%Wa7zZREnG(bGk?VrMr-#+Yb0k(qt-`&Qd%z*`Dwn5$z_@6}AIqoT?_KKM~ z&avo#=?t5rz}H_?)Kdk%x`DkgxWVsWo|{2?iu)NdCqF@MM$Z%Wt;Sy}zke$IT&ngD z6!I_1<%OKZ!JMhl@n+{F-I_NU8GF_KSa5#rBX?iPit9iPB%W}ZXPa8^BjarHwppBf zFRkKF!pNace${yKA!1-XyFB^VsFA{SzqQamgAPbb6JCeKNX=mb} z!4QUyL*he}`TLMQLR@~GIZz#?Ti=m`MP#qe9i&H-@ScTTrCt)Db*?wAex;Ml(_FaLWUJX#IRn?1qdcB+R?wKvk0Hu_m1 zUYUAvFWeZ#-x^(I9+eIG|2Yxu`uc@-&+=vFFbL$26@CZ)UoV7>oV zy1)O(6_X>TKssZuO`F~cvYwUjuZ7eN(k8U7;FwToB(yQ^T}h#L5{$G!L3y6D-YA20oT zj{`s5Vl(;3qgKO8XiH4Ga1R4a4xs3O-_z^hq8$3Q$4lAo6o0=M@GQJ_Tx9rK46aw} z#J=)Fva%h_z0vl+PdkjxzwkB|vf1guGN`XNA$u*Y?@FJU!#GX%AM(+Nc|o7Bu5zgS zr7~9v-N%g$Rk}c0>9_(b_%PkA6M)?r4v;#3(2DM$J5~?#xQGy>!RPvsWqEaYKx6uU z|8b@Sxx1{mukV+TzOI@NeM3Bc>o}0aPY+=8@d;V+fA~_>H)d+a|1elcA)A&SKke@i zvjoH4faUk2*+`VV+zEo50T@=*Lx}0Gjr$uj{NC(`FTeS38V2ZH6qkR9BNnnVoBhgu zA?=IG|4a#<0&GrL>}eWdU@sDkH&ub$3w=qaZ?a<@#N^%d4yx zg&t&p|N4XeFVFI$iiO`;#3@*o(^>8RaPGrJBS}Krv`|<6lFPdZ*|wudXIGMmV#qtVCxNTd=GE zr4-#3ELiNbZsTOOO%BCXrvG4ACI;LW;=~Pl;>-5>u|bk5x|4 zVU~FKxwY3$|AQJZr)EKa3jS+vg>%aDBFr@zGkn;9bFau>8r;x&?jrjjs5mcZD5eg} z+!gbh2TOI#d)LZX*mf^k{4>qX$y~I@V>5+F-3K>0e!00TQb!yHo!W~zD*&;+=Y6`N zr>eh;>{h%IE!P>5<>DGu)S96%tUVPG|?E%x^{+45-Xc8oX!dIQ1P4o&ROD1SP(z%bbT2 ze0Jv)_{ifpxpjOUFryk^M!Ya5+7?@pB|yy%9JS>N*5`&nyex=4f|M zTGQS}mxYv?6U3u@Wg9rhcTOBDk~X1d@cp7NjY>>IZLIgG1GjhIg@YFf#_9^`VOFX@ zQ`lo1mYiSmA^3vU+}69V?g1c&o=UPPC$PZ#? zaY8&om1b1qKxMRt+l)uvGfn#T^I5R* zw-3S=56Q&0wdJdT`gctK;r9PS}F!wM3dO_ft&#nA>u95`wE)O*rF;os=>)LzJ? z(fMtdwJfMJ+5`XRqta9wW}WFaHu8gz0`!D@1F;r0S;SmQ+y{YHfS$#4zpRboL0{Tm z55+awPv9xIrQ-zXZAB6njVsT0sVlAvm!fNivImV!^?40134O@!9Sw82J!fJPx70<* z6Vp2acwafYvwD%9`CUXT0yCvw=M8oGxxUC22TMH~efIXsd41}-naEb@2To?;5Mhnu zfK;y>sguHMkYKv%L!X@l%=l4x(OX0xfzTG^pU(seO2p?)nw8r-iq;3ZR5Z6`q)ICo zfLhR?6Ra(O(fqap_nZHECk5oXlY7~a6(96^NB=nO-0;+kQ$TN#j}*#xoO?;8>X~cM z0;+gwt=xU6SH6*MD%1T0CAu&g|1ba(PQ1OSjc6?N5fJf2>#+yx^a20rZvfeFk?*?j z%|aK3Rl4X_0Cn$(Xf9s${!hmH!|o1mlyH-WHOu0r^G&P=`;*+a4w;_3Ah`VBS&c^v&i-F zuC`5X>c$1(gw1AN0T;)(QmlPl-<=Dg-kkHG&}c$Uib5NP4VQWaler;t0+Trf(r5+e z2Mx6Mi*y*7kOo_@ETVI^>kNQ-jmzjhBQe^?_<7!=aFiQM{Z^KUJx$u2Cw_P3R!EPc zxjB}L7I>CP`8jera0UGN4<8&7{et4!(D%#=bV{l)Nc@ zUdy4lcdX)!JE+~lw-0WDiPG&#I}^POX7IKQXjRr!)_FJGp#p5&ExD=1Xn37Gf*Mfp zrK0JP!H;47^Woa(TpXG7^|$L7(s7T&1~Jk7s>}flM8_yp!O!MTBWXI`7L z{MY?UwRTV^gLBbdI*)k;`A_~Pvh|M8^y{9)Cd!_vLHbAGhPXExW#585(oZ%*{_^?z z{*&tdkXd*%$;Lr`YWoP1c_S!u0kk@H^UT@SR}Fnq%D@ehoYX3FZpT zzH(RTQ;pPHcN5L9K|!l#S9}jEy$&ipwSAby_u^x`Wz#he6sXj_!+w8gc;-fi?6nr{ zg4mN0Df|n8Atllf){dx0Syb2BLNp=lMz(ZE2rK_V`jxA+~hw}$}bU)cB(IorymUnWy_unImPw$s4xFZ!r6D| zRyb9x(p$?uH!k6|hTZaFqPDN^+|%{+InR|{F<+&As!1yMRy!li6e#AEJ5Cd^GR`OE z@>!R-3qDh1EM^_@Ek`T9wDjgeHJ`g^7C9I`1U?pxgPXab>A)+BE;SYNOxwq(FA!7A zM{hn5d2DSU=AQq}?7o#H#)4(Is;5c?ybO-3^SqWAVI$trDPE_RpQE)i$0c_KLvs$y z5^S51<0)6UE*lr6sYIsX{>^&$$M8+8uh;&gmGKKxK zIz%>485;o=H15mR6u$ceF*s1*UyrHkjuLa^oPP{%KHJX#=tc?ccGKBy4pqciAyicU zl4#c2rx_kuyO>Jhbu-Kwxs>o3t-g@_MvZ3j%R9v@v`6e*`k^&>S2L254DCPg&IXt( z$p2XqrV;XjZT$;-*?FF>V=qpDhb|duF#*^eV0*5E%^eybf3>tuJOuP&pSpi^`JgAe zUMpd0pi6@09DSif+3h8lk@X&j+~O$1g&j0-*_2{TGy{Agb|9(@&w zUpk18=&yrS9?5pTy~o9TSzJA#+cqly6QZDXt0*DTWxY~6<|wdnV&q6LA?p_KdcySM z7C-Bmt}cU`**kejVw&xO1%=X5oS)9~O@^mOIwHOVM3Sp+dM(li03hH$!r8I}SsM=+ zh&ojEU$fw}PsP_80iAoq#|21WAkhw-j`8N1qVB{i;YP!QS#iugc4`Iz!hZxg20WJU zUo2(@Cs=a^a7yW)otg=L9|*hW(YS93Ukv0rYaIpjch~dqM>IgDz|WaD){sD~p-7VT zCmme_ZTTV66Imv&M7?SVU{!|uq0~%}=y zr1FWm&HKDOW!n*5Da9T?f$;r)&MQbRjeho%KKe$_ggo0HuG{O1Ws~o9{U|NzvsOZa z=cG$OVjW2U8bmOi1=ND|{PABZxR&?{`V$^}U_Mlda7$@F=EjRBRHsn7!TUeZAZ>Av zt(D`Z(iPpGy|=|&imaT^m3AnkHXdcSI5HfNczpZu?20`3ZBw3Gveh)xT+2kF&y{7q z-C*gGM(vqXN6tQ_KjrbfBLO8NRCE#5t>%>WZ((?XV6weEvY%ZL) znr|6S0f*)c1p90V?tR{*kA|R5WpgT~m(%Y!j$BPaky3UXH>oR1wbiEf%-0g$POjMa z`fWd`s#a9MV{Uvo%08NU9eV0GhV9690O8>&MYfMYFGm7=T6Kyr=e6;Q9?3OoyUnSk zgat(6q*E~I!z$(2gOHcSeEdjPN?s5Ct)yEGeq{jN)5UkVo%f0OV07!OF{PQ z2`pT5ObB*=_VWwrN5fHe*(35Mp(|g%`fbi!bY~akGwP@gQEqBD()?m5&thga(Ek8-Ct#5*M z&bgg=&jkM1rIVVZ-ahh$!+@8(5~1t zphkgk+WYWZ!W%5hjyY4^uvWB2gpfu2eY>7B=mGuAUdh58T)-G&VMih`P0g0g- zK|n%c=tjCb6andO5a||>P#UCLy1PT^9J;$1V0bs4bDrls@B4i}{?VDeVeh@xz1Fp^ zb*+2XZvm&2{&mX3<3HTZ1kP@0W)!4o0=fuz+86&mMI?$f4>~0dobBVK<|*R-$HIHS zDTGPD1)ZU&-!xMLUwSuRU)L|;N(4^15=UWXDA1!R7~l$0Gzu7Cr$wioTsNnh$fsc@ zDIA^SW&A|Rlj^s;YcqZ~nQg&q=3S}UH87v&OzONGA|7YCj&`Foec~_7MMCjCB4`R%!a-en2!~4uHLX36z7~(%%=F$JlWyTA*4%q7# zARcJZowPo^-%5}xOu#kcs(m?;sEiP)og>Lwfv=`$fQ=jRT=1)3dk5_K6CjS_`SA}1 z?S!)PdI(nzrmC8}s)E|+Q=II*{NuBszO&iTmcwX{jfc4ZQj}~6#?k>_+LMi*?)y@_yf8BhvbbIyz_72 zQbxhitWsHHWx9KuHm6O6N;{<;vcZ%FBCE29@-`ib3P{|_IJ+#NGtPd3u`zYV#T)#oq z#q-W|@9~YT&F?>~rs%K1pir1ZeyTuE&4EkFBBj;6>mWeO`N^SVS1I`@cu0O)G-rY5 z=HWj3S7)AFu^!Rb8S#b;<jNV? z?QAH1$AKMmlyLNbQjU?_{g!}KP4kem*Sy^v0mQHF#%ct*6QykN5ECOEDr~M7d7HYB zCw_w)Mv9_@I8*OZdAPJ>r1O-&Xr==|KJ>Em7uQ&&^kbI3mqfk$NaCudz#szs6A)u4 zuvgKcSC@`hFuqvHwkdXyv;H-+lbM z(yiuUer6j-$}$U{>P10(9+H>#(xm@CO(DPEG+1FtX7MafdykHWpZ@t>x*&i-@V7*N zb5eX*V_t^&;12+zOvEgCy~GMYlpxPxq|%qevR{C8Q%bq>O~Xw~?g4cl+dG(9kVa&8 zsMf!Nedjnq7j|X90lwdIK!PZKam44B2ZvvbaND{ki&B$Q+4JATI{!~qYmDePh=_|l!Os!-5%S7f zQWY3e`qvuQvS=1b-)-0uWjPK3_ z;^yqnsjk73Ci|1lnG&I(;;Z4LxvA?ktr0N}|B?~3#XPK)EOdtKheF;Lag`m| zhCjX!fiNrkXmWUz#Hp!kH_M4VPNb}_Q+OB_cuppGzRdDod|Yz!G8|y;#_teMMe8tb9|KJ-HnKZm zH?Jljr!si)Eu>L?G`NW4Ol2E!8S#DOXNst7J#I7CBXMW?sLx;M2H4nT!UirkUXE4D zQi!u@NHHr+2UK^-zR6K$&BpA+=PFs5V)?kiS_;I4{);AtY2**_3kuc2_5ah>VFN!D zR!&)+?yY}*3`r0kE@2j)KHkz#KXDA+lMesUVknd1EN|f-F#CNN8>7n_$pW`xD)K>a zRqM?%c3oy@UtBc1{Io3k;7puh!t)nOhe4dR;xZc^Ifr^cl`l?!R+uzQ0ZV+Y6F zVFy1TK%+#~S(U$v-aDm_x`Rkg5H{N-UlKlhF@s5}hS~Me*xh&SM6yLUh>H}_pMbk%%`f?=!Z{g{tljW4leDx|j3b#EK1&c~n19^|(|%f|fM8!7`O zWXzSD5LBQm2wvV0BYk*6Y>+)IK#BmV5S&I`sB zzx4(WnHA^GA3|X6i92nlxRwzCe^nsW7vkL3c&q|My}wtzr1^BmZ6CaelChxq?qdd& zB$`~&6md48I1cD*D5dW21zb<*3c|zDWo1e-!Nm40fIqhs!?stkQ4rb`b)r`}r;-)$xLbA?OPQR`?GHutY5i`&04~yPrKS$L_1yMD-k0Etdii z=5vHSVLQUZQzLto_@A7r{qPb5uNlnC(7YiSL5kr9f+R8?*e@fs`SS-DqeaQ?gz;or z{TZ2Pd}z@UT1UqP49H>+GK;*bsPhcG$sVArS?hg%*(Qju=Ci^pOQW9AtN@V1A3)|} z8EAfJc58MR}CxUiTTfEMoFG~+Be5J?d{Y^UTN`p3X}WJ7#t zsowe}p?jvbgTsp;wk%PVc5;S#-+EQ}<`LB6x=h>f@a3Ui+xeTJ`lUrGeV47rBg$8; zCL|*SmKzPHU@iG8<+WEW6D{xOtJJfnW@`>zBBikCaX>eKSxx*!-`cDW;R3VFmu|e2 z-5sJj!TaZb;-Kx}r-%9;Cm9o8J_2B%*XB9UoWL;`?-U+VO)zD&V-_4Py1>rqIH*yR zNlD8xr1n4B7zAWj6itYer6fpgBx<*es@M4nALwqWt3Da>yY}-@T*SQ(_W7x<1l%YB z(6)bQa#hi-c_-au{i=oh`~g0s5m{-v~el%AM~KZ4=|wc@(d+0z6AOHqR_K`3=~;duqR*L51Jo7f?A~u4&$t`0drJ4W!c!zE^))l9c9;9RYB- zTsP=g1KdL5s#cn^=R~|D5BjQ&O;uyN4=7~ zHDM^m#CLMSNn=w|@Hyw=cT}yPx;do#F87w)5Er;90I360?CM-QCIbH|!}?I-l+Vb; zKON%;$jV2azj1#{;K3r?A4Bh{?{3K7r4kzo;)NmR&4rsvv1zloM|6iM^%W zGQ^$k>QG68i{LgdE>sPvv-iSB*dxA~$tZB}4WW1aRWVVN3`Ah;TW-TSBQB>pnRD%&dnvpzss z0aVpnQlJt3dC6P2;R(=9Y=yCb;H}A4jR_z>Gl5@c6RtW#5?%tXbewq+pc$?3WfM0_ z2M3^D$?J<@^%!;Mheh9Aybg4`n!>tgt)50jK|&691L>tA*OrGqV<@#LgYDJQ%*{~T zK#~9g$^8c#1np@UA`q;bB09*rD+S`FxMhkObuocbuZN|?gH5w9W@_*qgBt|#yqW&$WP0X`k@!*J;uzX-uPi^I=XnojQ4~H*i!QGo!9S$ z2?`|4z1WLty$Z8U^_e=WtdGzNeT?eE_>4ATt597`x;P5{?wi}Ogjt8Cm^=7;w9^k( zwp2TXA@#IJY?=&3h2iD3D0%WuJ+NXYjfm{b^1iztvv#rXB8RN@p z?R&meds^ZeeW+|ZvbVXj?CdKTo%lP-cz>hu(@+D7;c z%8>@hb_)7)GU`R@VS${f=r>~ZFtHZ}O+R^pTBm8S!o78VO>cQNs~Ze3%7Uv~(!>Li zE|!La^pG%nZ(8m{V9*wM%sWAdEN7Wgd6V`37tq zfM(e@LFl*MmVQ(`_X{q&`#mcAx1Jb3Yx?zlCO*ESX^nO=cek;;Duf5%dS+1W%HHk0U{Zg)5i9L)9I&9E0U@9TF zDEJt19oF*OnJpAvU-iq&SL!?<54P><)*p@!(%Y?W&km8(_>8ku(}78t1oT_vA`nyvSDG(@hqmBDTzdqaB;VhdYrLp*nyN~3$g%{rFr z?(5s`>+VhYQ3CEhZdsrAGOF5Ilzh%p`FZ$Ng$xmq&#a+L{-g_UWi2Z88!`2sJ|1(KT{@vFbHsj5%A1~xgscUWs)O(}~k3Akpi3yxG_hsfEML@B2 z+LnX?qzbCt$GcgpSejteX2CZ_yhJsK)udRwO9$eyK$W9xeJJX1Yei03Np9)Un;do7 z{Bq=L)pecOdcjDeB>eQtRDY`89P>VneUrOK!&=Ncm%KFJ-hG&2%ht7(ZjDT}mA--U zMOqbvF}+D4FHBY~pkbf-YL{j5$qFgRB+8dHxj9&slWT^l=8jj+=c&@2#H0o$pq6-r zIP!B;+Q?=a^#Z4iz+uz+OAJ2BWWuo_=6b}=lhK+G;8$#I3LjrtsD=!TXPmbYxyd{T>x*5?uBtVH1UcU?%{0BtlZ~kg#M7A z?A#q#hdlvw(uRIPl*_ZCzh{6=N^e7wc&caEd~Q#X$X0`bVB%zlg&rLkrMO}sMzu(3je8&IWFdNmmiTXSJ;(ZoQkSzOpHSSM`s8GF-E)QeBmA0r0-hMq@L%lzisJp2q z&N-AS6(>janr?tqR9|h}lrI};U2m~pU#x?_UN&EFUU$8|r_tdmD&J4P>Nc6lxQod} zE|t{0r^|cYb^3-x(}7>>yNKZ+7U53C6fQPHVNsl>8}Ne4oZ|T=R(5djV7>oGJyF-V z#wO~N!D-39xuA9F<6QStHgNa3Cw*bj?@R1%It=HtM3qRaBVY*F_~cy(8XArg#24{j zyGE0ff}Dk(sBzN1B8r>=clMNdZkh}Ldl`ej0ONs<5>iBeIgijJqQ2(_>KgFIWO}En zs>n!VEmrxUu#A>_UGKDc;Qh%(`x}{mjfDAq!glRT+UyhGY*YlM5W-eYOA(ffHV-77 zW4r7~aJG;?CKr{IfgJKnZ39gX2zYz1*RkKpq+Oj{zFW-9`901ry+iyhr=mdHCjH=$ zk1{1CNX)7kT5X=ff4U}Gu@e%M?R8i}ulD9{Yc%?Hi#RQ&%@4tCofx_r$8xcI9c198 z06(6B@~)z!;@wLI-)OBJg+0%InVs|)IsDbGJ!DRMS>F{0~ulAo@40wLGO9kup z(9j?UV(zNH#W?0zn&ogJbE{Npg72`#OdZxRb1$E*?H;YUu`W3(T{1`uWYlM zOnQbjtXQ=aSXks#)k|pS4n|*xy^TgSxTf_=nJm_)@VV-8dAEM@?#G9%;{?c6=%9gp zCNu9bS_i8 z#da|ub|d;_HF>e~uRVRqqC$K=QQVM9X5ri!4HW*_iI~!K9Kgh8b{Y7#I9a%ed-YvY zMArh#bkEbEH9+yhP?|5ZL)*tJ08+0F54OWZqqZK$^zIGKlL4et|BYv7W=u>HG3cU_ zie+%Dbd~PtqW{j1hU596^HtZ%M`WBUx>$Ta+Z@LGmN<|4&z9NbE+As(kCQF*n;#gf z&&RiU=vL!}B-8lV;gg#c^j|Ai-|~x=T*1-p&z*eY^T*62ojpF-DmI_zU`qD@19k6u zA7q7qh)ZmIgj?whKiQW%x!Hp02j%k4jSGv`5Y?_L>5K4)F4%7B9k<|B96$KMWlX>{ zHsCANIlC2ztg+Vw=B?X2h0?hI8;=t*H8c;hB`9oRq1rt##31>fFb&+S7!Kj&vgCN> z(i^TQr|~w#o*yK&q7vc}Z$ddM~&&FJ6y4j>?{l&4I5f2+B6!4d|F2qvVPgK+0^QbWh`>*1h;A z=@M^``=ktdKX4SJ(b(i_?L<`w@M=)re%|HipEA>2!GEW`5i+u>=g`jjA|H!T4 z)Qr(7C+3x6QOX8`>(h`EGe1WGI`%2glf|crPkSy%1_?I8UK<>x_2YCOdX8RVlNk_t zl8?rAnWNwWk~#{+_EtkD z>>nX7H!G;B`_X~QOw7;Lm z4O%=G-kr0)E zgx!6*XwH&jko}ZsvF3$xsg`H4TZ>yPwjmZXKF|scl9T0(`^6A}t?k_gY}BMt;3=V= zb}iDhxo6V)+Y_z2d9kRp46eFiD?uOos|5W4x*6E#vFdhs|K6ZYaTZZdm+O=pTK+QO zV#4_Bk1MdPAbww>n3OIrN-*&fz`U^OEM-Cm_+UT=C5=meao(8Cy2u0e?7_Z&qe?g`bHjU*6JA`TEp3I3jAAq~11Sc=}H zV{C`%q+g`O^ceqk$(Dox`#woiji!!On>7M8W^&K3tbl4HgWZsJ`UvPY^1dY=Y)bnh zJtLXvp|5B#E^;_8s14n5F&L&kZ@>*=uehP;c_TXc)Jih=<+~fHH|0G%X;~dTYl<2P zXBTr}6ZGdbS(m|R`8Rpk76uVDH7!1!vrOyPel4nGnU0gFUf^ZBQ2&&RO-_XuxmbBp zfrhPcb9S2~ne%m0R%GBE06Nw!3eC>vBkO&+lX#|hF3>XBaUWoxuhwaD+m&nF_4`T} zvLhut1Cslk%g<+6pp45!VZ~5ReBoq0v98ecJfHF`O2&&%dO}5bIa1h&h+_3phb&m+ z6e(x$`@y|~g~f$~1%2sS`9+KA6!-?8jq}C9Vx!dpABYqtULPbx{RBJjH6o*u7K8WI z%FE~T5M9f_z&A38!H=-XK?lM@bh}xLlMX6w3nqEe&nSf+KFVz=(I5WunpTWOuIE#_8Tr`GK^=>gzt&F-%u<3&d=hhH6zB@yGF{5 z0*g4mf01ZZ?|5}}MM+PMhJya>1kPqDm#AD+Bw*H+TpMq)#UHfO8^oaEMOKFLNi4Pr zoJ#9hNiPm^ydrGG$s(xwI|pDV!xK5_i|);PfxJ&yJFHwZ)sO~navfS}ez3$j%~i%nn1 zwjYs@i!|*MKN#)n_Nk;O+ZO+bEywzW@$L08wGXC(0veGU!G_nHj-Lpx&Y+P)^Ru5z}L!O*`W|}x$_5x*P;S5q0YNg zHW1H14!Vzgv&N}+E!Ru$ELVTnK%lS-FR#%kMI4ahH=rGw+&4 zbj0EB?aXF!?#CV3ab$-{UakMQ;l@S~tiqbo(_RN&E}aOI=aTMyGSFX(cHv?QeM=@_ zb3y#!cP1%J*N5XiEV6iW{B~GIp=-pTCFs2Hd3xhut&-@=#|`gohz)%bfZuXZyz9ok z3fU=MG#SM<{Kw*iPlrnhbH0|BB^N@FM0we2#a;Ghz z{7speBo2aZfM1WOydJHgUCTE?YK3q3aK(C5``nEMd?Ilkbh)_rEPg83{RIq>w2P^? z`}UewnKiXzv+{@0@;v78(wI%UB$OiARdu30`^WvpEd%o+(*7s`R~Zhs=@#iWi;?`EY}+K$(76_PXex{LQjb|`#VpwZm{6b|lD7c1Nb&Ba!_~t@zz3>8?5>wRVQ zv@UKof%q~RDvbU_&oN`%TT6@K$6LZshuo;TJp3bhT!Y3N{J;7+fxKC zbW-LtlV}t26_qolPIlrFuo~UofR~0)R$u*au^C#8iBAifGiwT4C3yM4O6IkJ zv=&kOCs6S)-Nh}}`L0cwXV6g~%|t&Zb=hXmBZxuae!1B4H^h3VU3RDcnP0Q|n5|0& zvuPgirsrK0{v+o?Z9r#$<(DkA6kya3ks0r7R!$4{<-;)quLyK%xKwIz?NaPTl1S73 zjZtP0&&mXMtOODUbSkTY+$H}8G%`X_yA2H$)O7`Zk1ib7m% zT@<;;p_TY1-H_qSHTrYbW?>V=ph>&@48h$ur@ioNzA?oXJBObi#LQKSS*WvPP08#s_XkSLdURkAJbyDMUrNc`FfE?0`>)M;RfUROEnnly2(+J zx2}v}*i>3QF~LaME9$zHPsLHFn_iz$Hx*X5rv!1s&x(h*H8>ed{dW`OIyg_*RE4jt zpx{F1r^JDuUIq)rV=tY4pBO&CUsE#BBnU>@_J?_vy&))V_XITZt*6kTo=lc|P5hz7 zPGEbwTbp5i)6Ml&xbT!^J*4rrBkJX=-wCEo&7zWa@oHpIXf1saq{1Fr;Fw?{f9$YB48b&s%Z zgMFt40&_0RPnl{*ppHZ=*&jz@c5Alf@rnAXLyZu$t~ z?$@7fd_$myX7T5K;ujBil6c?stsg(6JlrP@p`(pU^Ydhg$jQtqkJH|x`+Q#aXVc((`zhinBAT76w?a%hobo0RF!!+?H@4XG zE3>;pDXEYB0=wU8SOcgscQak?iP3XpWJ1et(wqXzZZowCF?7k?zW;WTTR@x|wn695 zf!n%S$a~Yk@FWo(MSS@wVm=*XyVe1WFFyIwythCi;8-SF3jbJ(U5YMoN~(Gqx&QxgXCV5 z#{2$rtqsDNQ18kigei2QfEIPuFUdlRw)9V<8c|a`&P2niQsA4%~!>p!@jJWqah^L z)#)}Lw_opz`6&4*lMQr;ecp$t>FYww(!Gs0D~k2qIB>vs09#9C>8UM%QOvj76mTk}HPgU#^>AxUv`9D%IZ_byfS5i!LWt*V!T2kMBcG|Ji zsorZ44?7#lpt^d$pH@Ig)7`nWa6^|a8%ui|arBXtfcaMdcPHllJkeNpWZReGIX06v z5!3Z`hRO52MmNpV*}^iO#VH$ zk3*^VwP*}l?Hsq~qbz=Ef-)R>kQb&%ncG|d)#sdHVL8O2ZhD^$7ZodJ0mTI zBl#-22N)4ADBo~~TMBMNhJ3274dPFe7}5@QG*Zsvgb>3K`6Ch>#s3C;f5Ms&qOPc~ zj|O?6jEzy!mZS9F-=+5>Z&`Vbe`{3ZvQ>Tc5G@~$Wwb* z>Gudto#pSQ z4}D1?yx%&VCvGlHYf&Q8^2l(a{N*x=?0K^KRF}iBHZ;4nx<&oC2I~l|;$&mFse!9- z5?BBpw$L?=+8Dus>~VwlOYq4A*;m&Amu=nU9#D$FaGC4%{rju+xhtC<+O7+K(RQ}& zI+x>FI@H>jX6+m1MON}hmkLRdL}@Q2K6*axf+cFq9=;#JS~N5aKw|xfrCZK8bjP8Z zRW8uadQzQ`(9fpCi9PxR;g{&o+iprZgjKP0He%_Ow)3R3F4)f!3Bf_C+cAfYJZr4I z*&+t@w?0IA-?*ysTWZdB0`N;4KiAr>o9+Jm&Xfor8}WRRw}1&3bkvL=y19f#y_!1P zkbudoH}N;sZr(WAG)78V6LzEDAUW-PXlm-%*t_#bkc}se+m0659f=cvlSp0}v9gvb$VjNyg%n;-oU*kSGY4s)US*|pIcb{`FSa3iJSNNQ;bT5!sA976GRLSs?Z*8bqLV;rkODu4{_|nkH{Zo6g$6#kCl+`Jn8JSOUYTVGPg-oh2ANxZr7+ty zTTdczh1@?H=osG3`G@_n9%rW|>8YCt2e$k#YE2b*dk!_J)_`+$sxd1olMw zkTiS4l_zL7SA*cIMjmq<`C=4?HePB;ExWI$fXXiNnKGycbMC;W>8U*4Kt*rZ=G#+F zjR7A9UKHZ{E4A{4HmU=$LpXO*_RO4jkoD9t_v=Je%BOEhA-+uS_06m%Z1kgwZ{w_G ziVEBkt0gEX(>XwE2+9104)N=0alKi!f!BskftK{F(aw>VU-ork;uCV|Wn$~?YMVtp zD25Q+$X;;awITG6RH5E4ep+FFA8o_fQj71W&rg&?cj6bAfLLzje5}t2X4fy;Fb`bAOYq`(f zl22*Xcxssn4MKqQ8yf) z&jsaI#Vvn(NSU@r(V_uY*ofBK@xvld)kRfEu%!G(n`FZOuOEuHE~QGI(2}R-2y`8M zJy12vXZt5jAfeRGxoeKC+1YC55&rXG5AxR9Y3Yvpp=N1*92ttLh@!uYKb_FCN;}8b zm`)3}uMa~*W$>P_6>sQ{$IO;-*otH_6=W&MS{iJPpnaeF&kg@%H#E&g@YPsq>8lc#d(>#1XVR+1tt_JUT# z^bpiab{iHre&04xD1TLN*0g@6yGK~{t-m}HX8lirw+%14r$#>;JRW$@dkxNw!83lnHb zOS#kC;q>*T_ILhz%qQ1wx@pF$XcMR>_WP6ERd8hdI`_t(6A39Xk(G3q%dM$a)6I6T ztF1LdvTj@`nW*HqTKXlPOcHea?k4r$TF#Xt%{tXdl+UfNzjHmExhb>mV$F;R;c_dk zO7Ni}&Q%~${#Yt{X}M3vYV^9ivE>SOo!@LuLC4gwaVLc*ru5eGP9?Ty;xuZ;wc3^FzkOP-jl~5+9%gO$}I}Q>d3~spqzd)GyX% ztk%$1@$LzXOI4 zVu7!3Ez^u*WkV49r{7Qhc6&by>ZJpqVp}il&dC1p4=5r;9)+0;2@_b$2yOV)f4S2f zZg+GRKssOMc0F>&zP#0a|ER>%)y<=0inVpwPS6ELUk$H%)`M}-ecW*$(!KH3%FWH~ zAr5~fW8f8R z=c%4soyNz=j?Vnj)jp4>{zMz!zyLd+c(=i!`M{hWw~lR%SGEkninh<2^>kFZU9Vs; zqmc{wM(?(~Kw*Ld+{o|#buQkgWC$Ae>=9(gY8(D*x4$bm!!mA?Ca+Z|u8X2#Pqvhp zv2B?^gQA=@6nKX&c(i2MnBAL?0)Vg%XIyhRzcubYu#oh0{ank;%bm6>@tLX@3ceN> ze=ffd@lcOZ&@?1ZC3^jAWF*Z-DNtN@aImwsX0cm-p~2{P1b4c)2*~5Rokg>J%boK) z+npL)hv%t(lbhje(y zI`$PvKT^l%&n{2rz#gSS_?6|DlhgxrFfO%LsH`=p$UNP{2vu|4UvJ6Ey#H!(dK`oO zlDM?HyqqUG&h(f1Qz#(q!dxxc_m6LISx)PaWbT*71`sOf#w73*HwZ)c@GHym*4+=S zCRe#SH=adAbCr zA)ZuqT|cvsDGfzh$Ed~r4MhQaEKwuCd9y3NSZzFaUOTP42};@H!l(PI%%n6VKxisS zsMm(>zrgO7`+G6Ax`FETp(%%g6X{GKT^)4j!V#;jm``+t)C|qKSqGB<>Ri`As6LNs zT6uKHl7qSOKUx5Ovq-S2RsLI3P6dwV>l(D{>vw!E8-zau_;J9Eu-f-jUK8CzxZxss z)i&)@jg8D7i~8Mtz*GQ1yZ7GJ) z>nY=FoT`8sWGFz^3chYXH$Zb!|3IVIlx_LmwA84hbnnDioW;E6WAV>xtvPQ0iPtv zPl>gDc|^3M!=6F-57hiq`_myGrA6`cUky8(2l092R$;O3xw`a};p+X!{PR6<@E&dY z)cNm)DF@u^Ju-a8KcxjZz_arHG2HH(_(@WPlH$Nai%uI$`oV4`_jJxW$4?3P3v68L zVt2+gxy6J#+*UJwvKcu|>n)xExwL3q$D$rZQU_vRl#q!mnC@zR$CWcOtw$trR@K}? zSrQ#3eQvAV%j3VM$p5B21g6Q1cM8+mdwQnc`xBWnBwlXs0*638YTfgV*zDE2d+=Ht z=79n5LF+a_k$0)UwN)Y8KcZ>z3sZ}BCWpm)hf175taTqHnCx55CgqPS_J~b{XO&8L z^jP2!vHf);r?(n_QTj$S@P$wESR=l0z+P+}=FYy2{HZ0B(c-tEgk`v%JEWNbQh*bbybt)%wow+__^ z1ix%TqU2}qmXSuf(#{`J_t}$&)~zcHy6%vznda!2;~$+|NG!l2M(fOc=6T+O4)3=b z=$4cHI&qTnx4dK9*eS08eHCT#Wj=FF33QZ=x>sz41nXg=wZhT9rRi$EyL& z@|N@Tdf8lj`~~*GZG&bT-ql(QgRcvDeO(oO^#JT@hdJB=ZbI2|>E-yPz*o%biQqrb zZYGqaL~D4c^gGBY3TEfj-j%d>d-ViXK)kd0Jd{1}ETM^HJ7>Qaq(;ontAkC%@8tf_)hg=5+E@!<7Fo#HUC_?m9{U(LW)De%!3& z3z~2jIR@;fOA3m|U+*306Vi8naSQr{jYEbChTIoYp;gHs7KHo4CuLm>M&#EEEBvxT z0oK^o_fhdahENve8=CZ<^MxyOz1&ghkCcjqC~F(0%Yz}J1!&1(fv^IiN4G~^I#=g6 z3dQe)cQF?1raHz(E4;YN_((Z;G~QIYYkIlAfiKjPRi^r20idqHxM#9BYK~6O-?9XZ zVZ#cZfP?wYSbqrch*Xhz;O@|Jb~@O2h56rGb!57MjqVRvxC(jP4Y~DQ9r5%%H~9qh zEuP4PhE062g~n~qe}m{yWRGa6*&U1)mlmJ_93+sS^VC6^l4jNQ$NudrcWJKCg2~n5W}x?) zR;nE_#=!8biYSYbiK*Olj*H}t3kDQ&PMz7&zLy*Y=r^t`ew$2tpYaIlm4og{c#Vs6 z*LR??xoijfT8SL**y}khEi^osE7emQ-TD<8YZrn~PsgmzLYmc8>M9j~eg^{Kfcfa_ zzaihB6y_U-OxOYv&N=#}d&U0QD&J+KEbPtO2_B*JYixuOiT`^vL zg!m$MkQt^`&oIQhwXca-NlD-OcchOZy|y#)=Wk@+jc^ozwyOaMro+16CBPl8(2|_t z(r0cpb%R{5?yWtiHxO$9OiSGc@d9lC>*4rL2LXg_$+d#&)*M)IR7%*@N39x?^>_VG zHw2r#r9RHfM2qG$`Fm6YZ2SAd{$zk#JMg~Zbv$_O>k(!aD4O{$WMxK!T~oPGNgU%~ zS@ddlRI^l@lzB1LeQ-BEpw!fCB{oSR|HXDtfTYo0lM36_1|LA^qb~X|?cZGGdFZV# z*eM=xk$PnAOE%f=jgsBM7U-tWN75#@(#cr~#?opZ-+QbwIc~oeiqZIxf;{mik^BCr z_rszACT0|)z}#9UV-9nha7oE;ssc%_=K$S&CL;v9mPsDPd+t3 zL@N^~y`tLZ+e~A`YmOd;mB*iOPjveSm96NQ*@UEk4rXpHoy6rqfKro((L{;Q@l1gl zk3GE6Vu5-2Dgc%6W@9c9axTpo98%o?5zyuo9r>zq49(|AY_g}BJe)JDYMO5snZJYI zT%@nOTDMUm7VJam5ao|7k z7XUGeG>D&}4QM(m5quq8;P!czCK+-J!IUNOuo8gmeo?rywBR zNQZ=kba!|65OWW2{Odf3wLIrM&u8yv@BJJt4kZl9cHw`K>hBwl!?fkZ z>Z5Z8Y{5G3nSp2EOW3PP1|&O}ht>ehAU@b1mg|r39@A~)m**SwQtiimx<^X5Ht+zD zjps=TLT*Dv(b#IV)IWYvF;{*~k(D+%(X3?N{J8rL!1>OgGa;QE>Uc=C0DC}Pe_Daw z2O8<*ZD;QL%R4SQW;XrG#Mlal=G#s6iPC9RFF4~{B5N1lrPuc37{kM<5%IlO>vzLu zx|bEhJ=Yne;7(GkdOCJ>7j4=<+lbmc6T?G8Yr!x(=PcFr7u^G}?(Zz@u|;;cV~~*) z^qn}r)opgiHPxA$CTdERHF2Q^@?uB8YqK?(&1LMr7YuvW-3_49h?JCY{mZKh$=EiF zLVFpgu&^IxlY2Kn4L)s_EkfM?5Tq-$l+2(;2qk?BmNWDY+vI97@{Xv8gD$OEA230>W8&s0ik@|>N4 zm=PuxMe`pLh%f&xg9b@}QOvqY87=y^TeROzSGvfw%66l2zN@sK2g_%=@n8LK3oV0- zZ?c44iI{PZHLOv6P@^gY{?7B20z~dl_$HvSo-E8ew+sEAXDt6@`z}xaQWjREnU!$# z`|ZP-l+9wryUJ(wV6dXsIpyzeW(wFaPygI|EH+IRDJvE(NA=>Ye16L%+;ZOwECX+M zk$3x&^0j*D5fPY`7LUDadHfmI#+v=6`T6Lszhn&%b1mp|(U zA^mLoA&%NR;pjUtelBhCc4R~(a-sY}z+JlmR(c0c${FCm2uO(XSUd9v6_X;h?2Hl5 zBEpI0F;*#`MGAdkyX)k4ISg>+G3Y&C@97+Pzxd_cNS$En{yOVqr8Cbl)_(ps>doqI z5zu`x0XHAfT{j(;f;_j^CTqbwkC2}L3s+*jN>=FfG*M(mN3nlO_6TFFvQ0jwO783(?&W ze94*R`#O+;841P&+@{vvm^}APgwm|PJb4s9zxgT3+%g8?H0uv_saEy}RUZ!Bo|q=4 zBEtP>F2DjkH(Mxw&S8N|wSP?cX6_+?o#Fw&WBfFgu47DHo z%a959mYDEIx?np;`}MDHt-hA+lycS7?c4tT$i-E9SY|H&dN0UNLRqx;B&D&DDZTvw+*Ej-{f9y-BTC{j z%2)EgpjBr-580-Eb}A_N@ZHGtPIful4|Nk>e|&PjMNAFID>y>n$`@3;>Ui%;%Jrv3 zNIddGCE&L1&2YYrT7@j^SjIn!!ya16h~&c?B4>?_aL=E^E)W(;lnW_85AQ9$==+{Q zdk!Kq30a|rQ?}iZ?)wUX4&Xi)Fe!bIdXwp`cIC3Ndg3kOwi;kXnUTipwtK&`_>*{v z0t%vjxX0?|i!p2t9e6t%TKc|s^|5c;+50m?YbGy7mqAMO{o1DpK81n)iv8b?3k%;v z6z0C)y^v769;fgUtxXmu5=UVN#KBfZ!f@LE@bQ1$XNPdVkQzh1lAxdq9I%iyXBg3h!JF~XKGxyTy~6Jb1eAInJQtl1)bYO7f8c2`fS3)DfmM~jt{Ou{!(`F=K9 zP_gf!P02p4f*f!yakw4_3@ZVCVpfWXP5Ij3o88-1fbA8Q6vl48G^jh887EuO`nFkm z{)){G-=(Y-uo@7N5_fXZsFutw!M@yE7WMaNDe1ZS0ZOu>mDhd}u|{)Ey~zBn$sUu@ z;LV?#)d+rKy=IrkNOprlF1}X8wLM0<&39kfl^TScj7Z1cbJ4H{dlc6$wz~Q_NO@r4 zdq`^bnDWP|+}*o^GBsY5pJ_i0$-Izq(N^VmNjWhi+@WqDQoGvD#q`eK5+6OE4XHlX zQ*5bxpCS8#hDOiEHT*94X?!dRZG-P~MKtDzaw?@!h&L3rNi?~1)Y{G)op(s6^Du2! z7bQKD@FA^D!XpmqwQIz$_I()2qk7@MlqhUIdT-t@c`bTXqjC+MyrqHGkeJ~-D4?*T zLv(+uqWlxV0^A{7jM06Ba{*XupYbX{5`tvCc{8%rTT z?{92tmahBPk6a0}Yg-z_-g_NMv_DMC><@#FB)&2f;UaCN@v_*QLF_8Sx_w2aWJVuh z47Y#*>7+kFFD@hH$DCx+5^~%~;V<^Zs+Nlyg({cvfhnrSG@ErJGjjF2cglLk<1luK zx_+$PA3bM3YCk(6+rCiN^HH~($_nq7|NJAL?E7b}(uF*@bX?74xqgy$q|{&;Cu3G( zH;~gAIn>o@KK5qalC7G*T^UHpF`2A>;w^^X1 zI9)a=(jfAiB#q|pfVm@9gpB`+70__s>j4_>VENHd+KX^UT6$!(uQSq;z1!!lJm!^m zzva4NEkeShzh0d0977(gggonQie{K16dTzo{x-$hA8Ss|Qy-^eZ5F^#UVj*8UVES6 z{l@l5gCYPlC=K3!6QBR~%@s0WJ7Xy2&!96;EWs&jR^Sf!t)s|CU@BqW-Q0GGa{p$C zerkXOBiEk0kNaLq6Xbh@_#;?|y7QN>)ENr)F&f}7i-+5;OUXiMcc%6w1X@v+11oB$ z#&6Tnr+uJ8|`4TL(GHQ7|^L)8N0RN{ew**MW<3s8Zy1J!=9-NSZ zTP43=re`yv3dQ!491GGu-i1u-+8L#GRxICpXTQ}UX;6GH=j%}$DU%V(qC@#3>6cd3 zXZ)Tkv_?6xI_q0XGo$)1M`;E(p~nQHA8DT30Av-+D- zvuyl!0AEd*-_Fa~3M5r+ZxTE$geO?x{RAs^^_5^w)`punA&Pak`}|~C7F4n3f3u)J z-5ga5qC?Sf-G2J|=fxEeMPp)V*z&V+8Ol0mi>f~~Bk9~0j_lbEH~U~>S>^GG{FYJi zzOfh;l@v<^$Xju44hr2rA$u>nEH%odn_Ef!hj0MVZDSPhg9ni(Ck{{grXP{l!m)k4<+$i!?Yw1IiVCvVyarFcKUMLKi7;k z!k(|RIuB$eJ53{`3%IpgO~!~^b9U0_K7(g!&K0GNp!+@|hP!5n`V5+RFxu_D_;cg4 zS|9FO)t5fV+q73|f2N5rZpHt2%9gCw{Pnr+Iwha2t!@#SfO}8nW70RG^*gR*^^$n0 ztOKn%sv0^@KOcr_wUNqY!=MI*L+AB4IZ-2c_-T1aq81DSg!$D*! zR0SpFG!%kPZaPbYiUX!K;$$4ZJUN^~5j-FyNBL`Q2fvRQiAWNXA=*2Ele79t&1Kf} z6k#v>({6p|8*yafhXr|k|5%c1|FW*PBJBl{S#(H<820z~$0ZI&?geSPeJ#+#%4#Tl zoGR{99qmYb=VPf=*3jI-!ggqR2ns2r;R1xXgK5y3P6qYaAr*L15A_L1w5PoR7}P)S zjTT|7n~xU+9c*-g@AmrH&vGWbKDoBggld@^JvzE|N_x6FL?BGV>t0&ZfKHzDH=CO) z4W5|vt&@wJ>`~mfkWe3#6X%$^Uj-yFBZ)p2=!+RlnI0jqG$`}tu2Ir+e^}JZ6uwL% zA*Exxd|stK{Jka*-Jf>#o{KdHquFP|P{vM$^6wT08;5jHH_!d>_yCrFP8@oSoS3X+ zNwtPvLOBbU#95e6-;koX#Rnj^#4j%9kSrylhrgN6_|eem6=zNZP0RoVi3X`_E};$a z=B|xm)i@`eC^J>b^Kl(MK9>eAM4ytFL7s}tDd+FaM}lVzvy%(}5`E>PJFoO>WX`TX z@ErvPI#yH1KyK#Lusl!OEQOcitc#*wF`HKl(tPDAu-xlZ(U|nS;s^cMK8MYTxyo8E z?!j8m@%*E%WvO%~pUVWq<(MZlrnvO<;^ac|)B3kJzBi%~p4W#kO8(`?SbbPrVrID3 zkA>ii2`rLJIec>!u#}`rI@4|P{S@{k+@J}?UG(gE=bofh+3?3@%GcI`ErNV$C6SmvY3+83LF`*OUGC14Dn?>DN62qc%&gg)yLTv7v|g zHm`_@gzmSjb(wpRC6QC|gBvd#RyJ1letmh7!0CdeAUUq)m@mOYyyvm|%R%P_r;|;? z>qXNd$X@Hn@Z6!lcPc=d!$B+uN=&?|U3sIY2)QSoe0)2W`O%lP+_9x%ggP7`UGy}b z$$UbT?~|eOG}JvH&x3FXnTK}9>Lf38Wi`wT6}x@EjlBY|Ah(JjzU`hOBGZe+SX?~y z@@-ooI%p^}$|-1@Zc4Tob~*ap)VcW5RtSPO{5K;69z`9bcXx%Ksm2SyMaGK59kqy7 zS^QJ{HG6L}@9ML2hsEmQ)g#1*mz!Iw=qKmZtYC$Ni`%TOko-N4zo+tW3ns! z!Kd7w&E=EVGNV71KM|;*;Kx206UecF$*$yo=q4}~bHG8o()o=Xz%2gQ6p!0kw)=K6 zmmjGKeoyWN*ZqvwbUj2k+PKY~Ktyb`$rCN zaLxEC@Fdf<%?Q3c;IKPLD?z>6+0{jOnd&Om>-&9lwNbCV^9p!zks$nr8tu^__+uTS z*s75pxl{Qqbus4}j9&?P0yCDG%ie2sl8a_Xq&Wkg^pI-KEdC0P zescpA#h7H+(5P!)#tvlBqCyGnEHLH{_H3rw7jf*XN`L%~tC$0o3)l_h34$UREQyfp z5;?kJ>^QbejKh3gWs-E;bWtp9rX!RedVt&QN zC%Vr9Lu~Czh6R{CSqdt6i=M?C;-0IjB@bEqN_>jf3=IwUHx$G3_VKlvqF=PP?>mU?>rL*7-WQQ_FX;kuIam!Wj&*{hf{`Cu53uj_1FhW2Ey)?5Fy zKfY=^*&QrsSeW{(DBwYs;dwzA*`gy&2_tz<5DaAex;+&X6i&KJER!!f(a zU}yC;!_7_TGS|$?5TD?ErHcs3&_nF?ZZFqzPZ!)v@BBvL?4^%g_L^E2#xJ|u;T-_7 zx$Mp^Ga0rjk3Xo8=~1pTZx3V5^xKb+H9ied+JS^1<>%Ef;H5u+PV^Px5N|ey7IB-+ zH;xsq;6N)ZQG-(cB_sq*PyHi7G!9l^Og+Drf~D^gPqwJ+7HP}&0E(c;F2T{m z3jLt(hVgHiSM=`tuynv2@WK!pMUUxPX$9c9m+MD@N^@FcF^2G4G#w8Q$WhbnjpewR zBDF6es*A;NR8!)a!gX5t;|=oNaC+ipJA{1WIEGWHdl`S|!g2PBiiQ0}hZeM?ZAR(Rm*UlITm zmGr~DOe|D={!f7Jr+^;nON{OM%WH;)MSp?eWWJeiAX&?w!b18zDM_{ROh@Wq_N z-yy~BEYaquiXG1dvc*`06DKIgjUS~VkNuG@?$+gF8%{*Ub-Cf_CQR?;O(rQjcIDM> zLw7K7>#?3rJ?`K$`^d8sH!T4i@zf2i~7d_!0Hnw?rpgP-{zM z!KQgZ+C*Gd{GavNp72R$gJ+a_wSl%H+LuFYZz|2{rB!^h>~)Pis>i>c?#rmGjqL_n zN!_MB=i`8ph18Y(?(1C=|JbGZ)8obJ9=^l%rP}eCeoI{bw{lbfIM=oW1wGYrdCz@` zt?i!eLy7IW#6u!}tdN(28qrqIAOUV5v+avc0}=JN)#re;k<$H!wApe2xxsx*;a)IE z{ttSJhNZ+3VwZhaBXOIb2P3~|>%)m@UOt1#jV*|dLDJhgUB?H=>;CTVc0QSA{Z5E- z?zypAT@bn4aRRA^C2UZ6Y||&-ofe;2VNVP4!4b{wMNa=rkCrHzfB6zb*%L#42A-?N z0cN_`W+(9W333vl#tVu*;d?gQ;F)GO46*^oYvC}@OG+%;)IVu)-#Ynu*&lYD?LzJF z6gLBGTe;s3?wLF+H+WXkV#V*Lfebp=jzv2CDD5T6ANjK#&)tGuzGZFRJ|vY_nC)JN zulK#;v4;)mxmWG9rC%?`h{SB<#m`jR23GpcYUo(LAQ|)k!)v zeL4OC(FB1uwzpRLTB(feO^tkB^bNnB=yb6?>1!I1+zWgMf2JT(8Oq*roE>L$&a{5U zDB&|ErDiN*Cc;C_AKe6&kX{G^jB_U!XM7jeXZfFPW-FfC1P-28Z_zR-dz*cm9Ny2Z zFn)aVTY>^lfDo~TfCe^am%D*eJqx&Al5xG4@>T1{&)N z0;$g29N}*-Uo%rWPd$c#ALj~TTL@b%Z6+qWeP9@MXY@sU|AZtGl;d7(RMv3MTFikg)pd{g z+3hcGF0{5mPjpgcfvZh=o?LTkKZS5C$MY`x)CpU*_Q;0q7AG|M@H1P}*LggFkH?1K zm^(9eN!7xeJ$Z?ryU%#emRdhWTjm9iBn}d>KRRFri(a74849XPVDwgTBVTAZ8%R0& zpN1H6ysoTgviW#}&zZguqsP=z3=2l7ZPbQ;^AQ>9lr_w3^I66}%kI9MT5jtI&RVVx z)rxpixy{9XcmFmr^9=D7S4Q`OmL2lAFoz5*=>5%Md=UgPb=(;_ZSMZSZREM+J9u z*_PVm-0J3L6xxs0Xu!O>rZme21x9`^*P=rzH$rAfa=#Ig8lSp@O(x5oN2_eLBzqZY z1j?x8735>TrAChT;yTfLLn!WVy4548+DCK$qWblS`rURdakNATr@0XG0I+OJeiGY1!?dup*eW< zqkGNX2vz0jB|cRe0@%$7Wr4%Ew~p9d4I2##!pM=@TyWk`(& zyfo+mIRHpd>_WD?LpC%h%5-Llv07K5kr))fiq9{w@`*FZInrYOh!peMJaN zfSO$-potkJp&n<9*_;w`ikGpd`}xz0z5I`N+24<(S)T^Ctj}*eA-)UR&9L#|@K3}< zr0W@mNh^!X-*P=#AirfK>0a@o{CXB-WN-n>5Da<66Qa09>}ore;MKEzYIZmvJOyuc zE?5ip2P`d5`I<*ka)N+HeEdER_TPXik-%cZ9b12VydPLZKYMKR4d!;=DfL`YfO+_D z`FQ!qj_qR2)3y~q1O`bUhj0(bPQbE&sy=(_m$nCF5P53!l8gGfJ<&yQN{{xQN$Dio zIfkS$UA7odgT&dYME`S8SEGDUIl4D&HT?)SHuqB6yi{3P+eLnJa_v-1WjVf&3XwBl z`<)&knr>s9cf7k9M52$D<9n`*jvIq|-kF&d-_ep?kS-klU2s|#0yVe2ZrQ~D(#u!D z8kmp_WkkkW@|gK8D+Zjz=z|I^_fO<_(Hh$6YC@ePllJj= z@rLcu;Ov)Nx}g|1e{+nIO<1hJ=*cPQot+ux7*}Qwl#+ywOK>qZ%T`SL^3a9XW}Ywy z-!b@Rq!8jl4JoEgcY8$n3tl!s_qx5w?W(OO>pTePP60Dt+x1ipXVr3lyT-y6|5R)j zYQI3@h@(f(#hl@lAquqt&Gew&c0?|dHs7HidHUi49DPGhvs`P~W9Unhhnc3)yQuCvuBA)R!wql}SM=-$m)Nf>N+>*TN@jou? zm^|>hbT4kSVRp$YHG+YIU#a)Yl-qmiGoVd4QMep(Bi6Ar&U+ThQIF1atVX{V7baNQ zV@)QO6D=Q~?H2l}%O&8_M&hpMjS7@4k%-6}7I^8kN_U&y^yPJ9mAK68NU9uFMzpU=TEFj=sNE@=1#Bb!b*aTPF;c;mAaZ5!+6uArL zXa^Fdp@17PWis#iVAI#Ac&(~LLgG&`0Gy}ya?jT2r6zf(cQxFbJVtdnnqB@OQl&&^ zm4>73on(N_ds@K=%BLKxexnk!`h$fon+s`!R=1g}x+(2Mm3Cnpu-PPBimN8+7kM0I7L-v!@Ja>^%pmkq6) zrC&{y4}Dkh`L9C|m>U5BV!XPkbDs&|1jOys=m}t*BJ_VqOQQJwL?{iqz+xXiDG-rv zIj);;T`nyM-me}O$GJcXa-KI8&KKhJC_fP8#7VEa#A71>jPTQtyZy>R&S5Zv*-Wbm zUe;EnbXbnFIORsjQkc}Q(N?X!jX`(wkFK7O>m++W7R^JMLYXq+l| z<#3&=L^+%9b;2HP-VcF_Yx(W3`Bi?W-twF-qppJnO!K>E8V}gwMPAnNFW6tS)>$9M zFa^U}ef^ns)gR8B&EyGuA9gUX=pVjpWpG=a-7!%jYyXOVkUn1%)Ee#u5V^S2uT$be zn|nU?8w6x~sP592Jy&r9zGQzXXf027~q^`x$F~h{BHl5#8Idg<#o#B6!xq`V@ z&;5PQ>#H-cG{Sw*s>ObhUy_H;*5>gHtuEP)`gU_s28WVsrYACuIyDTHqba#gRVQqy zZK!9p4i?{|m5W4|`_B812z2V|{rJMv`OmN3T8Y!&Y*`jtgBwQn zjF8LqAR|i5DfWjYMkm=s?(!Ru$O9_>n7N~R+u>UkZ!0DP$Y%zqb-;&m;caL0Xu*lH z?`Km#oUS5~HyM%a zOL{Z0B?R0%LkPjDwlQ01i{NCZ8Lzg&)7!(8&k6NY1Sci6!UfU)69fZRKa76&&w17o zgcC3>#leWDQI2s0HF-IAP351A%AwjOMGD++5Vemx@mg99e@+#^{Fqi_PskQvgWDw1 z(@ZG{z^0d%D!w+xgDo+(BrAqjDM_jz9{UH-kS-_Lcdz7Oeh1JYJ=kado!iz!CV3(o z)vuHyrlo}xnY))N-q^3{eKbP=aoDcmnD}VOz+n3nTI;nGpH%JEVmaE&!ok)P2HT7NU)L-|{VLIEv7)H`dEgvTI5hPrr>k z)tx*PKc+d#@x7=T2d9dK*N_s~b;8*X+#cr}3459b<(lTuQQ0;-2}N(~lzbo-dbsLu zEu->o-=IsepM-d0P2TLM-1BE;Oo->V^2Xz%{|dVi{age_L1-4X^{<}(KnnSKz`Sw* zbFCeiUp8UQF+tMOTm0GRLj{WZ=wc?j+-L%d%s4?=%BZH`YCg{Zwh{=Hf1i#@t?iHz zEs+`CR}_`7DlIo{vV)utm@dd>B^sm70;VZzlxHEVnvRUU7dQ^#DT8#!-a9(n_7BvV z(GSf&kCA6UFW^D+?||$VZ<2W(6G7xM)raQlNg|^a+T+JXyC<~Fo#}nPxsN#?(^kqU zbbrcc>}FwQuaHf<9Ad3dLUzQ!;TSszleHPc9B#eE$@Exf3lzFaFsH-;JmO3Sp2ujs zmZN_&bHhCXCH9Rb7UNI;45v9P;&sFYe3HhAv#IVL6azNDbDnQ&+OFs zahH{Sp94PFILh7P+TLQ~ZJCEcixTL%>w+i*Zx7{#VDlJjPdhGE#eF{MOUFLEGB>Nk zYN6p6%81{|WThHxTh=MDeq7XEL|LLR^yZcp+2wqHQ}af6oQTC3T6d9DoMgLLd!_v= z?C#xwr_Bxjug1WFk0(r#tzNgDx#@gep>D`hR)-G|(aunrrGWaFEYsN_(hXhDbe=z-Np{@#3DGf$n=4Wn$pZ#=ye<+z zIL5nlw=>kcV;ku%zb}Zl)lQ>+b+pMb1^I;(kOD@CE{6QV70j~FdC9-u63PlfkwWUq z`HJX->JU*Eg>h`|ZKy6&>v{n+$3LX06IR-mX4l9)WJGQ?>PoXbXq-Yv*3Z^6KsS2> zB8LZP9A6#SqGg~($0xorz-`>uxw*Neq$esXqqjY=`n{7y0P>jidZ(9(P=Q>ZkbSiycuUUzaBEv8pA5DvLcVCMi`^ zxB%^trd6D<#$x`Nz9)`kTaF3$)e`f`UN06m^pY0vmVH|AT>ijowKTO7yf6u5a-|%) zj$Uy2*qmJ0m4Xn(JB+C3vr>&gm>xw6M1q@Q)wW1HP73lG$E?2`x^Xd z>}K-8{h}LFmd@{=UanU&lJVZ-cH>}e9acNYYQM2-y%hI`otrgZJjQshqgc!AtF@^) z3q8O{A@)lYF8e$)lGOQEkQU7?XfO4&zb&;9DqO$(7q#!Bzpj+H;vx)KU2_O=g4uXq z&MPmRuE6o|iLF=GLxsLJ+nD?obk+>CT|&pk9Azh*=T>F@=n%Z3cvm9M%Jm}JZ@FFK z+(K2FZY3BcImC$bn_An8K|ef@9EU7e@zUS9=n7i1Yx2;?@-&uYPrVXdn5zc?u`^z| zz3uft-^01J($n?RUhDE)sF!0`F4c92bY&&Pc;StW*D=*op4-J{LH7N;I@8ov6{%tS zbUbs8QXuSIP2yEoVBl%{O2-VhGwPO#50ZqSdvJa5@}In`)ZSiaO}2uz$c_vFicGNF zGC8d+tfvZ}YmIT}1;E!{Bh87<7Eg*j*^vKHFJg&)Xv@YNzLA@He`lki*Eq#)UkI%5 z{c)$40ve;{t@d7?%MN2V0!kYF{+UNYQew8xa$IV^d9cK)B^u-wf(E>YF|O<4Cg%*q`bb1~NVTYiY{;@)0<@I!W=P6;ZUZq;}#@ zxjK;z*N7ZO+Jyi>>+R$Qgg8AaOWXRPP$zBr?3hcnhx3NoRyExydYZSH34Gt*Cxc${ zSD9&Xu04SSzlUFY4z=D^m->PmBo%EF5!(*18N_rEUp!!InT&%h*}E?KU-(c!iSvH% zzYzMdNW_c=zEJY3Z(yini7G8NB{P6chkPM1MBUez2IZ60Zj@e4mvt8B+N;C4Gv*{7 zW;PrH0}PT2(wxGXO;Y+$0Sh z-y*n;)Gn}n=26SQAUJX$2_icCCeJ`1q$UBY+1q9jHMA@Z!OblAbZ}&OO|>vYYgA#Imgm5mOm67t7Jduc$j)C_%tt;J4aD2syY^!D9pYM1~7@^bL? z81&p0WDIZ9bGT^d{G4wn?Wl$bCasHOU|~tvfA_H^*kY#UHJ!;Gg=AF$yaaDE2~}CF zu)j+znKRT67YJv>v*GZ+TaEMU?71B-3q?CMs%|p=i2c|5?ltsnO^r(2z6>LDmJ3eR z@U!#&DA%#o&2kP~Kkc=Ilqn-FFi!yXWyGeWMF5Ky`uYc?{IbBg5)%0M6yd?6qdTLy z&jRir6gTP%gm3Rpu1`C5mReqJr7p3syyytJaA7DgU-9-7fGZ6A75{9(0;G~@X<0EZ zztgF4z3Q*=5j>K~JOTE-*^U4VC_&@Hin% zv>lwgT#MkHN0w>0ZULx=Yu@DsL6T036!B0Kdy)~q7C2u2vl|Ae$!sXnK^OWR3e)`o zllE4Vmo|K&2#u2tFW$nEs8sXy`Jjl~UP16iXY+Fu6zlUrT4Ky7KpSM~)<{T_OX3^G z(RB?B3Aswjf4T1QE2nkAPj5*Z61fZd2}BOG?;k%$LJn*v!jxmpYa6akvBa80`1O7_ zv91Aph)F|=nO7=pi9h-GEGD;#$8@yEa|LrzlRjNFgJ@tQZ?0rAE1Dz&^ z&U^2`Z*vAM6a>b%{pzJ@a>;f#&rrX!_gWNYC1w1O6Y2kegL2XEuTv?hoFX_jbp#FI z4?&K(wTq3dV{>*HpnNM@8qbO8by!)xS3-dqJ|0xkj5;=@lF_+j7{>-+pPymv*rg`x zl-cHOlp2?TO4uB*uE_gx@;3|l-YHs_l)Xy@MLy!ncNfAn$_xURJ0a1FcW)P=fN$1i zZx)dgq+sJrp-^mL~8o-))s*Vu==5m@%|_8x7uHgwsUO7cpmXH;>`^YXaMb{>B8 zF0rZAn)J4*H5?x&Hxvl07&&zenQ)HV2NOjb5gn^w0@mUx_ zGS}Kn21MYp#=c|~<{ju98Pb2$1{luO|uK2s*oLE|Dx z=W$=hP}Y}1wCPf78YITx?TSBQH>9RdpeFb~jTy$=-quXe*umvWG$rc2=U2_8RAtCzrl<*3e$m*-h;|;a<<$-Jo z%uPV~u^uD8JBVp_65q(yK**;(uL#D`I8&=5&bpATp|v~ape)ch;9z@5NB7rJIBu_W zUfbLDCsB@*=2@d5^N&nQ?n9A5!B-F3ZsgzJ}L)NxzU(XIu)cnLsAxU)V4ezAFonVT_@Tq51on zoQne?5xQ&zq}&j@u8i2uwN#oiY#<&qa0NUMFHsy+H!ZrW#`f3nc@64KAoDNq^}cIxW)$ z>nMxNV{)s;-hSWZ_QJV*KvIPDHuteniTFP5c&wuU^X)h}Cpk5ds}jR@cr)o;HlIQ#=Sxw zAa1ry?)l-{|Dpi|xJTeI*|pzEV3twIu((Ry+-4&WXaq_e_D+X zYYL&-XH5KQiSmwqtsNWo{4J(ab%obq%uLG_bH)CifH!i$&$1UF(32$MR+1T`6d@{Y z=@>Uz=QPH(y6oC^(iR)4ebKt>>q6;SKl$p4iDXNMVoz{SEvr_anW^KqY8#7TT23pE zZIxTG%fdmB{B1eGljrED)p!2uF)I$|>wRf}%kooV>&_4Pkp!yP)Riuji=LCo-REM4Y&f?w?=oq*~_ov z05^TQ-`9|jcJZu#q4LZzYfo`-iBu3T@bHaz$BJ-`J&4`&#by?S`nduD8hfC>6a15s zy-^Z_(JK7xTkQIp?ot+oA|y=3GbY;X^ZFa~R4AF>ha)7Z9(4U?xx^WIgBJX0c6-?B zg`3711mc;q9Yr`&xoq)#p^@N@pR2q3{he+9*R+)I*XQPRx4#V*E>t=&>&t)b*MHT087@v`Zw-_1i*b{#4yWNrEl5sCh10#F%q9w-kK zpa$e_W|We*0xxe@`h&vv=#VBA+qqk2*S2ZXj{dzqU^C|5eTWwW-&NKxvB7+Br>O(OwV#GuCrBB=X`!PQXx#AUE+4dCqK-W9v6zuPc z!?qQ6t5~rQ&9bn*j7V5|IuVq-4KJGwp+NKl; z{M@mO%o$iuW(gc&BH8$Cw~g-}oOA0f?-u%?V8i+^rdH_Q-&2Ql<_@R=tH8co_+NfIpAO?Cy4EGR`B#EwdOr`@r@fXD%^#&dgdm0X>ZmUL1DRB|%+zQbHOKo6$>xMt$OuxT~AS?!DO+&DGCJd)nhDpU^z2 zC4Vl1mM$zLq*iX}^-<81fT_6-A*Q*Bpwtt)GWH&@jYQhdRLD+J7fY4>X^P6(38yuS}%cUk#La`wn z+4p==dR^xw`__s8aqIQBOBq;5Cn`|@mOecNCRRUf5#}ikwM|v5$jHNZHVP9=HJ(I% zPob2aX16MPVa$nH$qkbbtj2SLDGL(=KJBC!sUQMWf0Wb0MIo!jctY11<)@y)%-yg2 zPq6&2fsYXiD_g(g=UxgzvO%xMP?_bY36ESPin`8!y#UR%VrIaggoa6I0UGdV`1+#^fi>6eGejZWmzGZ~lq0|yyL zRqC{DoIl;aAxCx(0f2WrEG%vRyZO;SRZQ#x&2k4b2#4k78SwvgLdpX3wH z2W{zmUgD-dAIqq+4GDNaHG;CpIY$O_l z|9owe{7U00N7rnx`?F1#7*sV~zEbR2zt*^VE@S}7nPlK2G^U5lmljoa@ck)2-3ur1 zgb!X@yv==wXBM~j5i%3#F%V8QF*<8i3!KF@%#cy!BoCkXMc zY<1V-&|N1J-EQLr^5*kv8H`761`fhGQZ5_yQ;Wa&P?Eks^0!T8*9q)Vll)M^goBTF z!ie~NSLXYl{Q>Kc#(5^h9k=Oe;>N|-;OEZ}Q~X9R@=KeY1S{<4QFxcYjA(xS@CB#m z_39PDCC82Jz4F}@NMN%LN?1r@!0RuMv;s1C7ocAmGl71z80{v_nj!vZK<;2Rw`&wU z0(=sf-yUJ7n*09l=!yl2;LRqsrPMNiXzGf{Ch}4c#|iIeCZsu5be2YsxKx-?R?Zc>JkPVAQXh(C9Q-j493{eeCzi44pwQrIg~q_qOY<7PpWs?`DeCA^GI`?5Y|PIDp# zZGZ5)6UV+J<1)n_AtmX?n}CU5px&b)DPy#&F2A67j(~{we_sm@L_MJPV(RN!Siqlw zr$T;zPHTLpIdI;hE1;a$GMdij!(+#LEp?AmIhl|@alN`Bd>CkdDw6Kn>s$D}hGyz#p4!KKJgQ0v~`)dD(6mjW2pOYA1sK4 z6^RH4S7tI2~OE-Chey$NtKA7 z?tU=TlQ0eP_~7uFt2C28F#OCp;nj25y^)~c%m*|jU2A%v#f;BRD;9Jcw&WIBb=`A? zY4I6#-6`wLPE_iNt^q5saBZ10X)0ZJ-6tky3SY#}wDRbC_d2KYN{Y2-pOqxVr?Zk|&rLHnIdz(OdJS*9w!T*hl4dNa!ivdd~Q11&0V} zj*ocC&dlw4 zjyv)Dckd)#h`f!m`q^Jp_w(Z)dbCdj>N{0lhIvs~t+_y-8TgY>?aYBAdacVyUD&Rp zB}^kUe5}K7NZ;8S6%h(OvRUZaf4oNwl9B!uzxZ9qgtH~%C!4&*Wjg5fCT8>j?c=ZN ze6x!m#Xs7_)n1++eU_odJHbF~+3l)hic#UDV70AA$(~PZs~I^Q;&~Un!eHd`K%TC} z(V#cv^DXMKd+GBmJudYE*9bVz*Y6JT8ujEKjc^**`in@bD?N|NWkQBKfIQ*}k{* zD=iAr5n;nloq3_xRSGUv#rD$vtj_LBPT8MF=0lqO@%K$j7k5FMd{a_A;+s4u+vnc%ltXdi~RTe zS)n2F;_doKp5r05O4RP<$Y=h3iU^JROvY^?cJ<}~M0l|nWctwl!*&q}dY_NxK@g9} z?!c=xl4004cWvMl?hIss1(H2?6kYLka21s`FbkB4$<4Qo^>c~P6WUV{yxr}4h0Z#M z;CK?(BXNX5vafgnUI3r98|{*ii8?@yywU4U)v%FnKq%=1elq{x6YF0`S1L0eIhD+` z=q>)?a?KYK-%|qOEwGy|3p-u7CpNXrI%iXafT=0lEpSnazq}n$HB@Mt9%ANW0A2KC zo)n0juo4@1glgX3a2|eq9RooMs2(CBkSA{m8Huef{~^Bq122W0nlGLBBOG0p_&uiC%rDaQz|1?r`1E8GdtU-%qoBm`^=S+<- z(pRoG)=hX?*sLf-CkUVje+*dQhRAz9SrpJo#SOva9>^M-7LWTmq71cD5P99^17GyK z7al6ORK^texnolM`Wutj$MDXZ7cR7rnuo{i$M{hPkQm;KA&R0|^{k~9x%1|Ln^Vcn zjZhOWgP&gTC4R5D*#=pxHSi?7L838YK&nl2z3BF-CXOQgF`|%Ry3wPY!zRPD1Ih-&izO{#V1(J0`jG4 za`%;?lE&M_bRp%ZzUC>A)sLy7Dr-U3%~)G-v+<$-fIr~VT0AoA-5--Xmg5z-pX|a) z8k^lj#PMQbRAVnHea!dL6O~ruMiCQ1%pheX8Mo5!!;ikzV>k0$zk=;rKBul;`q+`g^E&r+KjZ$Z z3c?IH)cl1bN~DU>5N$43Nx4E1)bn{hqonMEhG_^VjBPDkUjU-LwZwxS(Wd>96zuqP zvflg|=I4X0jr?I1(AB2LA(Ong36doD_mL{>3qU}hA$W7PTmGT-y6|f{Hzf445j(6l zMUjaRLxJ^on7AJr4CnzKzSKu(k!WNfZt9sBG&5&H4V!r@pR%3hs8{8phGFz0br_LN zrvCcKT0_1QoH*2%hp{w>|7Z~HY$yq&m*4!B)(tFqSuxy@4jlP_@;$XKX3zb!R%BA| zyH+sG_rvUh>CqS&(TC8eo26~!{j-ZQE*!?JFJ$mQF>7?O`ugC^>;8B3UNMYTXRi?> za(NS4c@@fzyrWPr_hS&eE(uYmr+g#c7&4vU@eq&vt32BwBe|!l&^=rNf4_N@mD7ep zFQMRGFGw?K%SZLovG=0zm^UVfphq_J2gGX$tX|}S&u*4t9dMeJWUzNwMafmjlu39KxiI1C;D1!0XZ%x~=~ovc3W;$}Z}f4h1O@1nCkG5Glz4=`Imz z5b2W6A*H*eyGy#eySsDf&Y}JX_5J=@!(y@4FwfjPcb|RsId|#%+C$4dS(if)#$Yf8 zl7crTF2X))ZkLlKrlsCKGR8qUtOF1Od~;e(4uyYu9U>d?U}nHRz{BP|LB*HoFaCE6 zfo&Y(&bqho;c8*cL+l_|)}mW&S~UvkA9j*O!50PXN^3dOh0-V^I{s^Usx3lL@v_>q zQxrQk?5J`P(U(&~zZxslR#C@bnqTh74n0G{wSuXj9?VQKc%BNLD36!0nD5D914WHC z>0hnItZD_>zy-eiq;6wzjsZYjieut?Sm4zPmBX*N^4doDYaP>?6j?w1=g~YJuOr{! z+9KWiV%=fa7|iLP{Tpij=O|wrM_x;6^(A|vles-Fejf0zf4~~R{{*VLzHMML6IId0 z3cJk@Q@uo@<&Al!?C_D%(Hnsz+Ft4$V~4u?p4>51*p1&!8?NfgO9=7N%iR*S2Rye% zqqegScqlJj-to4kBecTvbR>B^km`ns0lmyk#`2PJli0-u{+k8v{e-;;($4>h0vKpW z^(6{iOz=sUPP@`6|E&uL8Gb`?_-)b1IlPe3jf=aoRD^%;R0Oz>UN{#Jgtm22X~Gune$zU#njn_MKrvIxGX{;7LX;pPZSE z5cd<);7gXyvJWC(nr7ZEO*XtfX zJ}a_fy57K##+`D$cvB2qs%`GP%lEdGvEPCvL_t{GD}p%or3~IeOj-xllh@#(P1eHV zJD9)I?i7mLZ!5t4cihf}`H!HZ+owBw#z3aSzkk#gx_|v{8tI^ds-;n>k*qjd=LiqO zrA1U4=3YP={z~%r64|`=wX0BNHT<_OO6?QBf^&n8&Lni=m=hvk)e96IJ&nW&N#U?t z4N1@UxOV*kWKyvMCSXZj;Ta_&Bk+*Qg(*rWyh-e*40XXsD1*V7l~wwz5|GpmF9-F>$gWG*%9?|O!krMdxLJ-9ep z8Kg`Q@ppS6;J{MsUTN3y85RifU;Z?vW@$FWdpNr0eX;fj4tjL|IRF-?k;j%-@9wNWe1%)Ucf32>2Gs?CWDBvja*^7+c z0+f*dZ1HvYUmx66$XgLH%zcwq7JsK4-!Cu_mNF;a zRGGQ8+&l%y|Jn546sf1F6kES`K<3j3l>e-g`!FidSUSZ{^z`A%Q|KMs8gPB(pG}rZ zr7Z@|b0QwCLMmmKZaJO!9+Q%ci5E-A^RDs9QhE@f=(>cNqby(~FJi>#AaFk!*UMa{ zjhG|nj#e*j1gMvb+7nH(;@aW>up|o4L%Ib8kRLtV72$MbLRWxcac4w*2YnX{4Ux#6 z7%=H7`U6Nf~={7=|Uy>qBWc zrY<~I_xg;W#uz8$Y3$vvP`>I1S$n^UntF`LpLSkw#+1YPXVzE+TUwwBM^JU|s<=S+ z_J4g`;e`_!34QU%SM6eYNsKy?=?oX+OSKt?YH8Wq(mrPW#B~(D5SUCB$a>-V;PJ4U z@AlOPYoEWx4{$ri2HAx*5AxoZunvK3h4hE?GZ!|%_Q#eP70s#b6Uk7 z5~vz$dh>yOUt*^>uvPVDEpO*aj`;;& zfJ>9u7#0tV`vdn5&@N_=-A)@97mDFu|1|@NINEM5)F&q?8ezYBP%I%utyurQ`xqUT zVp}w0$GWnisn$3b_aCHD$Hdh9h<<(bp6_j4O|IE_Sf?WUkrcIEq8W8$FXbCUz|H!* z4YYYo{ap<+2yh0UQNxWv-UTAty7EVWbi3WCveo}hl9DELBxe2mDV_6Y3YGMEhZX-D+UN%=I@NX$U?==ea z{`}ASAo#f#>3>k=;h*`T`sdLeq!fQyQm;bpwA#NT%|)io3hq5^lhL9{LjLy8MErRI zLWp}OMe*#KZ>;@TdD$lF^0cK=Kc0(S5}gm}zb)qOP7eoJyw2MCt#!cxB3bE-_W_ZI zp#JJp9yLe zYRQF;y@m4rf@04BhcRH6QsLp&G7>Ex9x5jxT%;&icT)V0uPPJqEKz^nfr)rpya15$ z73zVA;Nb3C!<2ZA?EeCwZ{cyYL!qBsOlYo57FxDRx&D1AP*FtdqWbhMl?E; zCEW#Zar`Z?9=S7eb>Ey~7Q;KW-sE^D8rR$WXD|6zP=57La^qt;uXn(AqZ$0gs3;WgR2@#oF-s3zjmYN-14V`+ z>$6wTuIv`>pTXb;Qw;()NT4Ocf)P2ZG%blZ<(D7@jJ;>^Hto^6#qYsMfK2Z z$2RKl&Wq0oM%RzO-v#dRt{B*#dLxYR@3U|MFvJM(0z?pmKX@7dY@L!h*VaQdP(@?k zhYb$RQ1_1i*=Rq&=7yJ;O~MEj!|C74xyaX6C~WC}z#vn*ySlrs$bjTxQp0|%0$eEY z6pG@`!?tD1bu6!cpC<(*7ZKD~)qK-b&rq@`0O&A1+Ufq5bmfId;Nd`K<^+oL)WUsq zh0T}bC%*a*F}jZBu;(i=XO+6-;5je;?iM@ZJ_Mh}eEQ{oBkoRuV%XsLR`XeK2IzvZ znS_x{=06np3a)E%x#s5}PHqv$_`q@j?Ujx0SzK^b`5xf~g*1u#_-@ z_b_eUc><|a2;{n-b^?6M_5&DiKE%qrDK3)~h5Ks(Em1yC{o1iei5_1pi5@X{p1&>E zBoDe%)u)M^T@&hlYq@@oEB1H&j>MqgOKTBGFI7z_kG%Rj0{1xY`<9z8q3VyO7i6mg zRFmf|30ddoqI&1^P}Th0n37t)jWeldk|4Qw_ZofxESUd%0t?0GoAQWH{z~Kn=@sF= zK#MlQ-GRXJr3+F!qUC?Kt^^?yr2EcoU}je>rSn1p-fG7i1_+E=QIR5}IPj-JehU#; z%I9Yb>3YTKH(U9|6GT_*$FKYS-TC;wExi%cP*-7eUZ@N$?}O&+`-*&@XZ=8KBaE;@ zMlG5G0zgpwjda|JJ;hZFPoXCYYoGtFCel>Q_wh{?(aJ_Ba55OfSd# z%jpa+in(YwFA_~75_#!kNi03!c-|$wdIF*cSWoBz=vAu0BJwPM3nFZa<4h%wI;(Q? z^U)tD|GtmE1q>IlWvlqeo|Cf3ci808^Ewfs&9i*gkPQVtrA63Yf9xSR>$6qa7bB^q zRtYFrpE$`{qb?%5y5&JS>$9f%GF>T(Y%c@&k=1@Sbqq94*9TQqWsg8xNpS0iPm3H3Shsub~G z#_})}c?uW*ys(mt5T%B+h1d>Av#^0!r(9|+c&mT>?`R=i!GgU>LI(9hg;Sg(WVMK?Y!Ix5y&$HjdsruS=W1c;}@FUq9yN#q_SmHnB#34g}yvGq)(=!ahLGM zY3D5)hvWj&*W!S{epo>B(8!V#aGk+Kc!06<+PuMs|J(l@K>ybthPnuS`2*+ubp$8< z+n97qi^sPK=aSRFW>1XmHik>=%EOAQ`w7gt$L+cRq2j)w%rVw) zhXFQ8lNbKQOh)iKz5bD40t~)2L*}QQbe+Vnn-M4T>Wjid?=uO%z|0{d%a=Il&d-~* zP)Jfna-eggXE2Yw?&t{dWWdJMhgp4(vK_}Cn&!`X8u3|`<0z9{|L;N$J9MN-_|*Q+ zjg?OHk0%9-Y4{w$cyF-6!Mf-=i`x5Z&?QBlGY#0~I{M*-yn3*otZIHpT*7Aq$t-XXxe-?CeoZuEj1gs58%n22OQzJDX_f-MSQXoAxrk zw%no!iWG+7X@Lj0ANa?EkdrbM@T#p@z%zlCVT7n@n~1ITTVmpiqG|1&c#88I4DLcy z^Pg?)pJ?gmg3>eE-ZD~q*4c_@CNC=Fx|LkMeBS<*17(+^e{?~?>0uC;05&Cw&tr_c z%VZ1_7S;m$>+iewgYngpc@gOQ%MM#EREKoOx4?6!atOC$sgU#1O2s4DIGF<&xl1@2 zZ-)c(*dQaH1H|}Ie^bZOXx<8^AXij=td&Or!eMWfpILdjTx9}8isrV?h(PQ{H=@tEph($8< z`k}$dLBCvOT(r+Va8+4ElFK=Ku22-bPfRi~TJGwUSen~?rtx|gvac?9VaqmQvaJ#j z6avTnWr!?BV~NY|2m>gigLDZ?rfvRVd`fTGWN6WPTS$Xo_pGs8e@T?}Bp;delza^p z7JgTNIl{6I<*n9XZ-+gY? zeM`5Mhu;me_Ddu-sdJ=oC>1{Lr})FN7h?=1Jj=Hfj7dt>k4K!Jg;HFr7wJz+Ee|>k zu2PL+U!(>O9SZ2AdE932L?a>UPzh(T+1;TFL7z(Myk%=$k2CtxMLm{Jj7@fA!KHK%nN^-GBM@nWuETvaN(nA ztQV{ApxeER-@kxmRk&C}p`qewfLVDVfyhW(t4k@|x!t~Qz4CTt;K};05W?MB6Z*N4 zJbn>2IM=Z-3uk5I2ISGMk_w__1z49{)c3TMwZx;A3mmbCmC!hTAmpifeG^z=eKBm) zDlH4W+5bp{f`H*cL=PEjV|tKAzo>jaVKJ%4k=pdKxWlG!nC-N3cP;h(lijEhCOukV zUsH-d#W>0YH644Y>4oJTV87pAZqL8!GrQW|&d{T`pTK&={w57HLjnKrsvoT-hszZW z*L;%_V>H5-GyLuAy}<6z9a%Tev5{E_Y0nzJ{k36FtiViAC{(U83=Tn6#=XAc_bA>o zt!?mQry64wmgEG5jN5mqzV{rL7h9%GsSSTT6!We*^ONVSGzfgD;HWikbVQtgpB z{lJ8zsSPQ*+_-MrSL`il(#O0WFb(%-;EW$gm?-RUfJ-~l6F6>o&;XaU^9RcTULhpd z^cgyin><}N-0NR1vK+fCA85nKR~DWI8S^Cq`+gB%xw*S39cPC_Qd@c3?#JwkrkH#6 zp)uBbyoY!~AN@m73U4nIq2MXE+~VvpQ3L);;Io}Hp`!~|51#;f5Oz&W3|kF=Vp@_-RF}D-=m@{u_2TOgeMZk|iqk--QA+VUkV>|- z*l+^6Hw;08A*n+LcN-fbC-?<6%4Uz9mdB(ZB`Xa1w4+E}N(J3-breVqi5p+>u{-B;z0w?gZin+n)lNKEHsrijcMpnLnQPDfp(_ z8R7}<7@d4p)s#fcp-1(BzF>aMF7Kpyk-_H+kJf&P>-Ox0AlPlJtCqjT7lE;X*Xz>k zZQ7cr>gKx}-~8>X5-X#zV2vo&e;~p$kY73}?v+_SZga&3X5gqRrS0B!Jx24|5vHSl z*{qa)q;Ymx$a?D}%bdQ6^4&3G(E0pUmT-ZM(8|gbZa{cbNXLibw@4GQ5$#T1+xF|1 zXkssb2oDTgR*tpN_?ik**h_L~FwCAUUmVs&{W9sQPzcH!?pZM{hT>Pv`-iImqpqe= zn;B&ppNga-ZwpLa=&#f|DzBKIm~)dF&^zL6v-3BH7z-rqSE43LIDKCHEvRqTc|nr} zO(y>Ji_<6W)lI^1>K{vyZTBL!D@>)$;G>nN{IzuCam|*U19o=_4^F2!SVwgqRfQ}q zv25liy$(Vs{zN!bKWQJOed3B&%cq?ySNi}y?R&|kt4$-#MQ#@8{+;7U6ueY*oW{mS z4g{7-JcKKsjy0^mMj-wKve;1$w|Q_C=Qv?u^TGD;H9d_E#ve?CnThZ$AYr?HlWlFf zUh>zP5r~to<5uXqEQM+2a02YwlJ7{B$s5;0)?H?YhOr(%)O+2Y9Y;7>rfiyO{z zi>BQ67gP_U;`4)^4O1YE0n{(p71k2&9}JPh3Ny9$VZ4>O zMZK=r2%ZOH6882WgmDCAl9NFbM-%~}L3(AMS%dAKk#4MVb}!j0=RoLsdflL&b?oZ= zjU5fYCnJg<#L8{Q!qv6KnN368zVZ30Q^#2Sc^C!)KffTU$q%^T2Fl>6AqKgz1cJfy z1A5=>CXRAi)*vM;_J92+owIKcxZ{IX28f)a=9JEcx9O_+RhwSw4O8$q4|HWTX|8YM zC1c4E?l^x@(5m%Kw9RTl@MHxxQmb%_J5=E^w?r9S9L`kZgroZW@1z;Ror-b-G%A02 z`wbEH*V>Gb>sWmHk}!M$Fw2R`Fxk6Cs#Ay5hc~*ZHTZp>PsziSE)qTYe65ith7cR^EPezge z%rke|pqZ;&Ve}lW_7skVO_poLVf*dj4njrSkCeExwx=)1E$`>j`JSy2&(%6^o!d`d zNCLOUxtIb~2TA)M#nmuW8w2|tem0vbV)qkoWxB=iuC3@?V23<-{MBBh>2wsWbltCHLj~9|M;Ku968iV_ zzNUmbyx$m{?zVm2W$#%E*~QRo=r}|m0hdQxg~ZNe{k1tfWZG4ZqBunUxVt>71{)0# zTbGd!Uy?C`q|nN4yDB`2i4HhD0-xD>-gBmsQDcsz;PNJx!DZ!F4j!Ze{HSk0n}`_>OqHprHU9ccC9j&7G};UoT2KfQhy;2q z8Gb>GK7@B&%NrBfd>9oLA#yP8EN7#x(9+4u!KR=xDo)3n zB&z3q^O7hhqN`cuXUGoQ&wbbOfbGoiP#cnBi>}7=MkXF zvC2!o&irwX6FcECXFrtw>*veRUCZdW5_uih7fLa>^0vaLlS;}8LN@P&JE`T=zd2j- z)vKfz)7h+lZ4KuI;{59Axu7%+Q4Wb6kR%Y($HjYt+dMMzW6jQXdn~ez+-1Erqp8a0 z*%?OQUe|PSX;U(}?%G>A_V5U<%)qlf57pkh_0RGy5)tKPL2YdmKZBSp#XV&KkpOYW zc@x?z17e9zr|D3UC|Qp{cTH4$`R<9&G4e~T2OYOjGK;xj$RTm?y2$GU@S?%py^Hb7 zLxBPu@Z##7N;B3))^5A4p2IscGY)s)dOyP*w1%9r11{+pi-xKlj{pmhy6l~^feck} zf6UT+&@QC4oCOpbPc>~tZT^R>B|U^+z<3JASIL18B*@kpNLLu(UWL}{Goy&TUU_`dIZnaE6DOMM zluIeJi^3<_F;X($gn$ZOyrE6kb=0$2`%Gmk=ARZcggTL7^1<7e?a}OFV@G1nE%R3h zBv!sY(~!(0Rt8n`_Pi~*^NNTuJw3vhjxH~4t`4b{oR&GDE{g^`g?>a$-UywDM?uOF zakB#`FCCg?t`YH;p5^!Q2l|4&osrRRTs9Ry9Xfq4dq*c2Ipd)hrn2&U@9(z6={UP3 zg+UTBLD&aJ@yTLXt4sI#s*3ac^_MBxk>1gtzp6{Sq-0iwX7Q=e@||#L0f6bCs?rE-)y=iYywi8>y@gaD=B-IwwUOquuTMp97Hxyb%IXQ_1!Ia;7^h-a?SVT4Ucj2F{jPep10eUXO z$C#|SUTmQ@l&0<#RMx}OdfG+^X(-{x$8xvP(Z#)Jt3{+lafs$)4Ymbin41HQnI>zX zU17YaE!$Eziu-%FQaes znQ8QuO_#@8+^B-R2ShI0In|Gu6O&n2a&^ZsIl>L4Y02yickxvWkKw;E4!R1*oYAw% z3^c^OXf#*DlFVMuI|yo^o2fu~iDvG&iRSD5jSJ{a%io@C*9i6PJ0m^G-}(qxzlhu8 z{yIBU1eAl-5u1HG7IId`NVr=>=_yM$ESto?o1!36Np%>h%!-GZW$7++O?Na|Z;*?f zu0c@~X@S&(Nj-qdX>4fnEUJ*G?mWH!5G?QSOg}O)BL);FdIzPD<^R11h-8IQ2;DZp zFH;hYooCiRF5?m6xk~%m7ryezlnx$BVVe2ceA~jzx|^QRxj|$=qXs=lsBjy{wyeo7 z{2EzQRTshMV3kj8Hf9?tf3-gv&nn$vnhj2PHD1itJQl98mzziv|8W+u4Z?0i#MZRv z@jp1deJa!New_w#XKagS^zCF?ka7M%IM3IGhVaO4OHBzh4>){*?O5N)G$Fw~0hE=} z1zRrswro=dzQ7ed1WuQeth;M!Z%e^6S&3XLfu%PyOTp*!G>>vAK3$rZqPrY@XQ4Ad z2hxPK9@O&aDOrQe9d;=K`~sB58gP#{&+>|bnpTFOVHe~U=ohwCIhnJMjz8Nd3Q6v7 zLz{keNj0@IrbNIb<)#|4z-H`9s+g101xQgFQ}%(@?l{W^)l1kX!f?5F{oMRZ$pXr~ zn_dsPz`Y#K*oO{M-gQWj!~jg&IWn^a;+1cwE$dT&)zo3b$JJ2K30E3b0ZxG|$BHLiqr zl`;SDe#WdFLUb!LdJsCS^z$|7L!_pwRfJ!x(!`jvmU%XYff<(1f&{gsJ9^4dBKyz7 z`W-YKcME5ip|<<;OFTlOll(Wq!z@GHT@A5G(+Qf;GfL*smMT;06Pz461dL+m_9~Y( zRLmiEb_9^UslC(j6xKwwD7V3E)On-tvz=_wie=*_mvt*oTjd108OOW!2R_o#!+ppryEP22+syK+ehdcEXb9Jin)mA>jA zS$*2a70+pp2mWaNoYNR@x<3-D#!PmRzsJW@Yfumj4Q_(s9wguWX>j$ z_+cK7$~C_S`XUxbvd_eDVv~fnn-h}=SEShuoDng#z!?=J1IIJFm0?ls&m7!dML4f) zBNLbd(&UaJuF*s2Y%^Sb6|TItHq5yr?dx`4tDGr9WtPOxVzD#lbT-O|X;_ayp8m;j zw_W9YHO0<^csmvxx!jo&!Kyyq!Br39+=CT}(5t>RrB_T>9! zE2q9tN+)h|JK8XTAEz=ZPP?zR7YLJ63q5pM9cMmDFnDc|VnyR7VS+f5&=<@&_`)og`n4;Fum-OXq}_U)7? zntJZx)e7Hm#$U2cOk(GNFRv1al4EHoT^^?%M=QFQ$D?#_>Yq! zq!^=h%wjG8vcOg|*4tp;&H${T-K~deqmy@BzinSjJKt#-FW35(7c!2|D!#w!l@8R$ zcBZv6Sf9jaXL|6ddKV|%KHTUt)VxFMORsVqfH!o=cIjj09MmYqJ#S@KoQ9avyj_M- z&~K$S^DD(|fyuE<8bX{b%cwA&SN|Kxa2VZ~Ttvh$_c$-qC3Z5Q?WC^4!9kf^Cy+f~ z5AH6m1WL}?!>SmR%jhgxwGPS}h1^=1Zk2|08%(vIc5$Ur`9Kq52gZUnd3|Nh>%yBr z$+CJcnl?0**ywQGsX?=SfC zN1XN5s-Ho?c3!;UkO zxA4PvB3gI9+pGMr4N6W;S?}h@yw0I4ka4!yOXZN-XFqClv@|40TpGP+OzODj!xt2o zXIFqN^$dPZ`N4{@_QQz&@ToDiONcQZWUj8>-FS~u5Ip;w*LkEFaF4MGL+a-)AumyV zcu;k_0%ON+aLUy6EG|kxjWs_XZg-IMzIl{Ci#s?n3GrKQUe=H<;RPrE`nviKltaR! zTDbZ}UPW?XbxIU6QX+}L#V|Kx_$u+&}}G9wA57v6R-M7>@j8M91mAf zUy|(AnC+)&E*_aSp2R9o7=;uJx9e=>G7cM6`J4_WwO7e-4&mqkPx!<q#Ey5e44Z7S%GlNoGlmN8`~0-yVAWMc+MvY{M>3aofOZMzf6dd`sNkH?N1` zmA>NC(sT8&kpE|l1rIBoqf!W%SzQ96~5H{o_A>)ERqh+7Y_zW)^^){W*=oRBy2EX}=tZgyKHn^BFo0jTZ<-OZ2i*j6&F<-3 zR=6%*Xt4rIfRC9_PRsVllzH@1(&6B-wZ*%fX+HJ&!tka8&e>z9Ir!yqgJp|q-i;5M zN{jrli_HC5;Z0_U4bR#UIb4OksV1v@6T2XcbFvWiD@3-ZmbW&w3)}RXvFwZ=Em+`{ zB(~tZKXl!V)d$)RS@(a#Bd}KI>xzkVv8(HZan_N3W`6x?fx9^9+-$xT3|)objXbv~ zDdYX=K#zK@slXyl)~R-DAJSr{9uWs9kz>MSgUCuS96u zO_M|Cs7CPdx8Cdf>r0#t;>Ss~?@~Or2OOSU26};ZpQDnq?*zHGXO^Tp%#KG|bpG_tho$9g~BXCJCMA^)R|{N}TwU zOz|`y7UwH0SC#E{21>7vt^IB~sccj0b|t=f)u-c#8g}>9#^a*a{1($ZD*IWyh))dEpWEewBCaaG?)D2rCDtVT!CF;I+8tQNV- z7^vXoqmaTflo7NfNXu9Y-`PQ|I|(r+^$0<1|bvNQ!TR>)e6#%$Gc`!TY@51Ze0S=G0r(?8iyn9#t>0y86E+ z)YBP;Ns*E-2$7*vfAIe5obPe~)Sl;QqWO)Cd=9^WjE}6!{i4SS`H$So&3UAnH-#+C zQFeQ@y>3RStLwO!NBZ$%GV2@~WNWwifs1D38tXMHPd(1XVe%(u3|wsIW(ag9RdvbX z)70`qdEzc1!OrXs&c=rRRrguH;oSYQQBbWYhGY(luU*=C#YZlehA4K^{)|esQ&!uP z4H)*+<+9tKVWNC9GzCQHBs1Yi=ezs>>tM9n_vTx>>qS@BFmRFWFMzJi;+ka~u$oPd zQcOoGNk*cAr4R*{=AEBkMo*3sU^kxfh6`LkypW4g;IOmFD_1| zeh%uhXgrQ!ROl|#X2rjtWoG^M!+>C3;tyeV^ut(@B!%_2?9}S#WQU3*0}G!JbLgV)OjE&+T5cKE zBK8^QAzvVN5=h7%q7O}BRv~K2-&dO6n_0OL08W^bopd(({Hf7q*E*~2)GIoM>Nyun zx2;~{g63v#@-_{(`TNZ)FPjVS@~p=C#APY#O`;}it`^^%UBVhT65V^$Qh0?=7hUFjrv`#1&f-+saxEeD2)SR-<0H}Ezu63ZMB_U4E{-n*#jLU>`$ zTaAn@pvo4mh|p|~i>LwvVGhQ_FBdZ&D)tI|-C-VRkjOsrmi2E^3+W2N9Mw<1&GBwM zQ8^I>9I)lGPO#LSRsZ*h1`B`CPc+PF)|S-ptmTL9A+eO;+Q;MV$6T-zMwSwpj*6be(N4?s~ONqlYJpIi?(VZk@E*D8&|ogL$V>q2FoQf{_3 zi7{3-YvJz?v&&<$HI&-A&Qu*Kn5mgBqRng?>P-11UkPN2+{RJ z>!V(QFUou%m$=sZXq3^<%;221x##}V0yLDhE}=hX&GVwE1Lg56jtKlbqR@~_Ak1So2c->9tyrfLH>nkc`bSNk-ASv|zX5&be z0vzejSa(svYB~nU_^lddmY?!ig9G)SGKQ(yM`AgWNv$qPvTxKRn4!SpKf_r!sb^i0Zd&cn(Oj@C^#p)6x1+XMvFA}j`&o*e_RFbYY=? zUWZ;Wq437MUhh%x!+`TakB#(hJE3F9%@Y3fw5*4SyJK$hm)n@}XA9@!J*8AmUyfNSMs<*x-#)R9;N5yst z33FnP5!Oy)IVCb_HTZis)G70U*YI4D!;(C(_?C^j!$+pi_2aB%RWjNpzTwl^|5(>4%dn$u3#C1 z_>&JmE(b&I zym#MfHE1~(D5O!fyDri71tHnZ+8ZzfvOG82S-mz(4!tLd>i3zYiya#oBwI&WYQ#wt zUuITx?}+_rJ_d192uh)ZpXn}J!F>sZx{g5@@NlWTfGZJ8Rs|4Xq!B%|0ar{LE{U`A zq}}O#1&P^Tt>^IzkKc0X{$3P2d3)Pt2a{B_xFUs;Z0nE-)?1W&RDdbS?uiESoqE+I6oC+FrG!( zpE77JV;PHO@%Hh?A@soDy`=;8tSs@Oq;ZDrpa6+zjO!Ume#s;w73bRB-cEK7sGMa? zIq<`2dcyXWK@%zaZ)FRB3CBco-L2SaGbMGHZgwMQMs;6go-s8=aJ^|{Qh*S9v5L}3 z$c!J_p0OG2_S9*CCh5GGr%vv0s;g)e? zXJ03D?t~d`%c&OTZwhVTl81bJ^e*a(jATJ- zE_6a65M4&_=mwklu%TiEp?G%<9LXyzVGhtOVsO*Box~BRpF`Vf%ZcK4SF_8f45m@1 z-&c?s(|S*7AbMt}TU>5;@{ChdsUY)5lK^p{-+G}Y=i)M8k~SEtN5^?>(sMe3qL+U| z6|jIiB*=<%;fNGJ?c}45o^bXjL{k2NU*=9 zhiWBDJ%S%TJ*^l;DVvYr<29=DOAzB2K3aMBKG^MEX`M_Z11B%$O=@nNVU$@`I_Y8@FE=|!10Leg@WHQ} zDcmT17L~Q~qpI75*V_xPr$+!GjihcSQ?cx}YVOL%8_&9~7;Dx;7QdpI8C6*}=278bD;gY!CG%Un;vgv!MTuOKhQs z$!&xd^#Az9dx;lgc$5*Fk|@OT0M1JST;~`NFQ4nQVf?#y>43Kpt+Wc zhU^vSEP*n-MYgv7635MLv* z;ytCDAXe*-EZI_hjLtJedYH$4%_)+@9p^W+2v&d6HISZ#7r^fLpRK?Z;kbP?p49g(sz*I&$y`nEu>1 zejfVWVrCf2InXS(nlWKJKPaag(1^xmi#dE})%bCy7_eHol`6gAsrEpQHZYse)KHr9 zl|AGP(&eMiJ$;wXsb{51on*hB@|JSqqAWt)4;@Oac>#0(VV%>2NS~!B{)|tGrMFj zCd&dQuKa%aIpc(A)bw?1BOrcM$0`PKir}(I1FDONEH^kcXBZP@m&O`93Zrqt%cA*9 zS`9~{mBha{9b+w}^Hfi!$@mg;2O#2Su57#2Sv_)8zBE#( zAm>CkeQ;VgWQcHY_~Er=O2OvjMJERFYEZUrqDGu`Hm}qHmr)}rt)r4t-?Fat<&5OP zm&4hnH5x9++@oTL^{W+JqX{i_M?`8_o9c3>O&d496lg;If{5(Zh<`FR%NN(#jHbem z491#u!xK>PP^gwSS0(hNaJQ(!?Yn6=``iEo>EFFl6^?F=hjWBPM%YJYGu96+?+WlF zrae{f`u%Y+85r-*mt@IOcpC6^)mtyxF50fiUwiDzAKh+0*c93Sc!_|<4?}zb?-V|h z8r!I${CSer>LbfU3bP44(IA7;spU!?E&TQzEj+8y5P7u*@2^)S18EknF<%n4Txx0h zb^tDNj6vBbiIlUzYh242%43}J=78?`qI3HX=mSTQE(v2x7v$cl%3kHNc+Y3-%z`lo zhyhFRRWjE`h*3J^^4_17B3rx`0DwEV@!-d>1Ne2>qG|4Dc}t7!O$9L^&6{=I6|P+f$8 zkqQIRFq4U$=TCK#v-_*aNI#l{nz;y^jP2Q-^_^YkX2r77lRIuLbKpv-F|P47v;}2X z_206+{jgi$$3cpx9`mUDq>;Eg08LrA?K8UpLbGptRr8@|SEHpod)4ZEu2<;oI|FGu zRVoK-VCRYq&@#7AvdfPB=FFX*ZwIB9op|vPa~3}g-2RPLvI|AZgH~wDL5%X<9wQ+? zz-Q+bGPZ6uLyhowZT7@!>Gsut1eXgnSPWsX2#gPuPcSe*fL{xtjY4p{Pqb{?tJ^-W zxph}kObi}QKumi&07IzY(Qz7?)lxl{l+vBL0JY)T7O`m23=Esaw{`U6N+t~uiAKzP z1hMsoLn^l@%LWv`EYadqEU6dRI4_uH!>h#_b?nW#eFqlvCfvTfbB~Im5x`__P?j9X zLWnch;P#%=6+3hwa@3e1ym>_|4lknhi4aestTg8e$JgFTn$Mnot`=$#Ohy>1+wOdM zeQ?NrzF%N;&=8s{)~O50ceGg@bhF$w($KJ1oo=$DN6QT@Xg57ij)=+QR0f2ywy&tv z6_sRm8j%7`+NSgC(uGcEN;VAnR(fz;%xv|}5S2KGnznQTg6xj6EQiSq{867r>v>C{ zo0=N#&1!KPqDJ8+rzUEcu=N7LZ-5V_Zo6;n=yG=fTC5-x<;+xeiv|*Pr~}#FxvPCU zmOup${SxX#?9RR3U^X_*%@A1UH3?leQC_{|-3>`?IQ=y3O7SyMuM;tfcS|9ZTJ#tk zGeMRVAoDeL>7v4|mcl^$6kiaeY8DnuZJ}LEH)KquEkvXny+k0fGZ8GJ`6Gj^Rk#P( z=Te`ZT@cv4I?%AVFzhS{Nn|;|8msZOCOxy$6c(;^qTr38`E`=&gO(pEiHRIi$1QpA&m0A2; z``QX^*s2S<$X$?S;GI_R5HCit^mxWI0(wUTB(+u&b|D*~bY5>mUBli!VHkId#s9g>Ii)HisXl3?y$&)%VQ6rwacRx9>-Ok+z{g>#v;4t!__Pj zmzm06;qT%l7H0wx?uBIS^-F)x_ZferEMF}uNi&0~ z>m^&j1=#i`LVSScaS?kvhDbJpX-}-c^<$daYqCFeU)+I2yvz>Yks1%IwvWxqP9`Kc zHzin?Ehbei9n7D2e(b9`Z{4+X`|}1ARFB3k6^F1B5(5H~xeA9@Y;5f83Dc2ix9p5l z-!agi=K$|#i<>Z7ZDmy}OaTXJu_DJerb~6-=4v3BfHh}-B#QcvGnDY%#8;>;94aj%*SUltY7+0Xx0 z2UKDc0)_8V4?)4e6&mqU{rmLF)BDLP`k@I@CTTl5dC>eE)SI&c&hmZYc&oM$!S`PTB^l}!8q0=cv*K)ogJYA34JVE#|_ zGIq7`1cduzs*x@9&w2>21j~0fe269%B4*S*y zuz@6ce9>t}wha4nkt3^G{-8lYYMDv=-LsAgpvV}G?1D`d(Q>o!Sz=2h8FEO<+PnJh z9e^T5N3ah~!}M3O+prq(|KML$&Y>2xNR?q<$@jst$%1=2t*AOvR^->W#FUrQ*xk>t z$!eSXj?8|?E@^qdfBenN^~bjt=s)zTD)t&}A|-_hh1S*gn4fm&NvBwHgD1VoKVa73H4y5-I0ez2NR9BvDEu+?BzYxXFl$5s!WF zIxCnXIo)`FawkGU^LI&R)nlkHq#}cZAz$JIT&cN`ygwPX?p0Lu@yRt(6P(Dt-WcP$ zSm0dA#b|jua=h(gKA72g&V-8b;{B0u{i}NU6yX0A27!1KdPAHmH-`QvkTz2^LG^^E z0>-k4e?qoA%pRk|sPUbko)}~#Bq=+d*W+Dkh8RO-ANMp=;gU+~oq5EuNMFzL`BB8! zGW2n|;Dvf(VACV4!AdrH+N?=T^u}>}%-3v4z{JmZ(zEf)UHpoh>PQo6c-58cpz`u*G@%QqsObu5f)euU;X~Ddtp{!$j1V zM%oVrI=Xw^J3MLQ!*af2pO(SU8xTLOICeT-+&SHhkkpcD^!r7ZYO4~9=d!$>54bK_ zZ|=BtL13;|am{UAfx1$Vn_iz46_U8{KXx0R_i}df5Mi)H2A3ixO6TgFZw8mvj4mM~ zWULE+Hx0+nYGa>(KAw8TOCT#hk zXi8!hHg3hX&f`>!lYES&TeW4S25s&gCRht3f~{(a^^w7(@E46PvpM?Gn2w-$*R>9Fq&1fvBM=tdm-Y`xBXFTaP|oqpX*E zfVF$NSI-Gp2WPLHiIx1C989X`K6Is!GxZGQUQPg9 zIiP^Jc-C0w<2E}*(mN99V*(qhHe9l1(wPFYW`WOv43nf5gx?0@`t5-+vlM=LHi7m2U2@X2Wl_q^o&_E<< z+!IFu_5V`gKY;=oF#x1`K}WJyKv_xF-rx+T+ey6L> z7>p7(Pd;!Fdb|F57D*(w(aM7{H~BfGazzOMe1r*iLvXE)*eQqeO$`g&0kWSR)`It}Xkuc&u;R+={P)&_PcVf{j z@UhtN%!|AIR4jM&urM-W7UbNM>HO11NRi8>R_0oAP0NV6l7-p+)uhh!{EJ8h&*{Oe zNaK5=y9?%O6i~>u;$0&iVE($F>TX5a;>#N%Ykjsa;!y~xu2+JDBjZO;yPzCyL#JHzi)HWVjbauKjov-FdXi&D-fQ#p@zx zm4aJuS*=2E<3^J9WPHJ^^*P0c<0nsrhlf$+>kA_KOh>xmiqj=a`I>$D5;b?F{#GSz zZ7q+N+F?URRaZP!TXVl~*zE19=nQiu?X2?Xz~O~oyT~gEsi4K!<(C$F!=#k4=}OSH z&5a~s%^5dlu4OsLA?hkwNCVC#@y&ZrRpo_girPjG8dKc>S9EAoze{mOgmbgEcbA<6 zHY8x*G#ml}PPgZ&TkDCFqd^78jPML^SKPCe~k)i_3=(EoRE zKxV)Ru*Da$(00vxoJvdj^M=VC?UB;w_PMk=aAP-GEBj$?q((3*9G24Jexr@CbJ@1;~a$FQAz(8gVK0A3YjsXcaIy zggX@x9;DQ9Pz;fe@KlA!;;+*GEYvsvIiyaWI_3&H4kmfDVFMgW2IN1cqUU?k7`)IM z)~$J<%@FW}01iEAX>aXIf$z3VD4V??9=p~z9NbpIjp|f)!=B@|US(~2;oHWR)Dn#O z!&1RQCnPx1*Lbl9xLogZy?nrQcL{^Hi{<)U=^G#yJa;QUkbXiwFB z?$GmAAr|dvVpoa!_Thbk zbiU#LIpYW+!n3$H<(Vm*p!0S{L3@NT=WiKLEd0`SRFi%PEn`h!ddDCZ9t8aywBvyC z05Jf~7ZWpsMThh(y6IIBD-RCoEYuYnrhVtWb-LREhJq!qM6|Nt<)zs6c2XunHt!1V zD)lgSq*amSh50fM#Gj`>RJ%aM8!F&Ery9##eXCYkWSQDg;jA<8;yvS0{{FKRX%7wF z?&zJPv((A?ZYsB? zy}y%5LCs5OZ7Gg#WWp$wY}xnvdh+VUvCU0=ElmF2TQ`KR$RBOhiAAaeThDN^R+Ez! zzY)pN+;`VJi+A&6(zpxz8;g*R@_B4z5XUzkIu+44Y?$tqxO+uaj%=-S( z<*zUV>^&myL)cx=`TE|X`u!>bz?|^dZu!cql(1$-%UyYM3fTI3w5&Y8u=E4G?{xwE2LXP%1*zi+1q9eCzi>2O3W|NVecJ)P3o zkath3l}+R|Mpn0VgBiYmFupb?4q7{z8;iM-hmz4gyXuKZ0N93;$y^Y{yL(95Q1V9d zvMkb5(UrzM@Dl+2NZ>7;x|+XGX)-?u?f6vLjr=#QVgA9F5j$f~jaJCDD&7FWylcGe z&@ShU;n#?oJVqcB)4}ZQcaUXdahe#!fVQB)_B~-I>upjAIZGSms^^N2>%;E5ONw_Z~Dj3ScJhp z=$?bT-n#v%ut)eKv)XP}sVj{le&S7{~pDz(dH+jq0 z1Ua7XFnK|k?PpPSh0b`c%VMg{t^)1y+;}&|@fLP5Fi+PXAf%aag%4$fMtb; z8|tzu^LKQmJV|Pz7e4hGuS?=D)7)ayKI|FWH9yY;Jjk+Bt@V6qRC*>MjvFIwF zte)x`8x{GL$e(_vVwn@p;2xm9uJ7*}Cb3Cl=3cFIp2AqQNnn%M+7`p!arN|h0x_-e82azDRF=CCv8GPp zl7g&c(h!fZqZ_{s(S4D7`ab;35#yX_ofpN6?L+xW1GCvMzvh;D_mvdn65f3V=YjqMWDTCP(L0g7zxfsu zeYU#iszGXuc5$a1cmh&KDe$7}LPAJ)g>#8U(R}N3;z19u_PFe$W(j>R`*7a}6|bdc zoNR`CRVKwP9xOwn@rzB@{5(LYz*eXNT$CeWb|P$!NRT|{_G+eU3W18N()oL(=MXi~ zv!9aohz5_D7FShWuLC^RCCUNJUo~u;4%`6r=m;tneMm-}jo)P_T`#sk*C-^K%V4`! z&6(13GvCgPM$r97ecG8?3O-{|Eq+X>=o$CpWfgLDO{0z|``0m3W>!{<50Z!Th)FgeHe(Yr=~AekHr@+$Flz^L z7}etpH=S4)0s3`jEZB|OHME0NF1JIV31aGa^X)kweL;%(!?S{*_9!{c+`Lt%R%n>T zM}hOhGy5t71-82j*qy3MZpIw4Ars2S$~9=qXe{a20Z!e118%mB@BHTLYnBR`C_f}N z1b>zZ1^^5ht?*x?mA~biy^OwnjQzHcjkYVKhzR6+jNa1)%6Eu)3F3naC+1TNNdNL3i_YbFTiY*C~EAyx2M!v(Y`%) zQiM|saIEaaamlCdr{=pPR_MIwDawEOmkThV@lB46v&O4E*HBkxcbKkrGv$0v95@J@ zG)!ftCprE{rW#f?MuOVUbF!4_IGZW=NUZ(z;$UyKw;s_I`)8Svrso+rD_gDE>t|p) zKS-`7=)8ac;yp?BlI!HMgs7!#Cz|d>-I;h!3OLSLz3>BoP!X*ZQ(34y*(PIcAP&T5OO4?iG=u3b*xV;x~rK_8q|_WHY)g0n_94f806$nxvjbLkG-Tad5-9 z2lzX--t@)-0he$t)Sc_sg1BF_`%7d7&p=i(zuw@+hkq!fdhaK?^lh{&S0qg>cI;=I>f$%1+Pe#qNP}_- zoFFDA7ax#taxqEjG)pIzn^M3FXm0{+cg1#@6nE6Dq=5Exp65!Fj9AR9?8yEBG86gw z(A>~MMs7}_v7{T{uwt8ACpA;%J9j7w2Ehp-2?C74`eXh#8=k^AOTp(?Ly!hzvfU39 z6w0I)k5w|uwCUIVahGg#;Hkb*#$fAHu>(m1dukhp_cVa!#R}EWYYp2+OOPe<*!aol z^(aDbO7|N&)~n|Xwrufd!Vyc4T{tI;iwiJvhYgE-Y}NQt8P{Ul(*RD|Sp%oNzLr!; zFT3ZNMQ_f?$$JfmL3u$*F~x`Sj8eaBShWp`iQ$^F&DYk0*FO$Rn7Esfe~Q&NtbdOj zkDsm6x}7|NJs%{#oc+A@_&PikX`4d;+6W*Wc>1oQrei=Kom{y-NysboXI`WIN;p6P zb5r_FRF@yW^{((hRi&stR*5Z8Y0;vN;Z$tnX90cGx60{fVXcJ4Bd!9n#c#hrEd@&by7L!DT)i&b|k9=trr17K;PWtngl331xHgnqOKn*NquakcLsi? zs&V6`;ZuS>6uPjFbypL}RXkdIU*FaTsEblx~E%j8O*2u6fEw=+H&we^>b zjM(?TJd3Vz(xbMk^*9-B{TY$Ht^oiv!~nD8X*?HWn#@n zmD7|+4VzV`INc|G+V;mnr&JNQS7DD@4{PRhx)Kttn0x$HD}+0I8Epfk2U0v~-|F&C zva4^7yTbE5C9C^-ps&9MG*2d{17=4pLeb3w^VnF!Lwg`a*wxB)(NgKX@-Ey$|0|Dv(T&gQMu zoKmm!S;mexzr#64>-wm5!`M>L)*<|`TTA38`ql&qf4%?`URMhix3;#OQO~{1uQpyU zAQa6gYQoitSMMmU>3oI3*g4;HGIcXGb2X#Y_$@I3kgHj|Xzuhn+*n?An!MBR>xhDP zAm06z6WX^>+;l%Ihm3x)HhMNga*DaKp_o^IK)D_!)A#%Yl(ONdb7{^i^0Qv#`h4?v zVDoE^S4Q$wR!rC$oGzV<2K~6Kr#%4-xeRZVy>V!lW*ry98&>M`y{*uI(ONq_w~wK} z<>v4}i%CYsv(u!R{$Yt8`m!LYq5b&ohfpcq8~XM=z$D3NEQ)ERbTQtchbJfKPT};l zQRfM;9H8Bx1bN#rz%ef#Dcq5ncASUZoZ~ytZS$RW1Y*3s|7m#$(4rNmt}YZN`|I`G z+5P2dZgDNcV9+XuIpG#4+IZS(uBr2(ll2>8>IJ7JT@K^ch?F%qgMQb@Q%xuo9dh)z zC#ZOO3jBpE^4VFAnQ(R?Q!}0zk$@1uD@`GL`PO|CwEy~D%Xn!R@>Zhjk;6FsP(ov~ z)Ypf^DR~Y!zG9oN?|Y_F3tfSf^5-XTMom>eOj6#9T^wayp;$zN%*}ED(DY zEK(iCE`1a+soSoYn42Q{{ySce>H+Q?>Jn$8MCK0m@`gFfPZNIoqFF3-XX?tj?eSf6 zOvh1P;m=Q?+jf+n9@T4IQFzs5I#+JpPdk+Ilkuvm0=XZWERo82Hp^?zd6)PvliR7) z8R*Ar`x3SlWx$bffLc+@@_=qeD`NU!ENZiP242(xt z+?)NkZ#K_GWo4qLdsub0*iqqCL}D5o8?fLxsu7E_Lk@!EvOSPxK+6@E z@-C2t$N|hFw*G5CqZh-_*R%ur?(6U?Xn=q!{v+$;T(m!Bu}`_7#MKI!$lHqMuj^l} zm3fFf4hKsF#Y?Yc(%sLE)DidJ7B7H2w21?yZ!yI(0Og_n-MIDiXL%}nBi&K;|-X3bVm*ha{C0QEL(o%|o zNc&I@j=y{Tfz$z}CObQ%(0s9OL1y+*&<}?_%>0@frlUAR5xEx9jET12q$DcRy>Wk} z?}J7Q0dIawUfK8(e_Bg2g}i`OD!L3i)w3o&DONXKLA2@~fiet=Ne&n%&x%Ks-~^T(HcH$+ zWPh7bwyB|^AT#L<7PMzTIv}R#+7*!Pb*D=Q1V0T~Fc6X<+%tmdpde@NO7Ly6RC=sd zQmdb%-fOF(fV6w9Dglks=LYDYQ__nErP1-hY+ygYSfHa?dv4(;^Am?N02;!JEI`)9-r2Nn82;ly6$azJ|F+Izr>EbLMM_??1#`E5vX`Ppq;M9PH%8a_`i6kTSd|EX`G%=6 zKRHYv-*e1cJSqbBU`B_Q?V`x?b!T2K{b-YUcxD4TI(^8Yx#hv^HA>^=LEJ#_Vk@A8kKfl^S)!LnFrAX~tt3LiMjjfV@59YD$CjjJH` za3KdfMs$O0&HHb(e)>=7roi5sOyJ#BuK{24*nmpTJF%3~#gLD!O7o%*wmTtaHJWQcgKq}|AC*CSRz zg<#Uj;kUG41m0_BR#)SM!r3-0=+j(tY<2x&b`Ms*PdFX^NlyL*i&$!&!)i{o+M=4L zo?kjO34(>RRAGEa$E|omXx+ ztoSdmTuTlE}Htsc!l9&52+-TK#?yGGU!6ed1naqdq3aS;}g?>+$4S&BA?%|roeH9fOwnu&J zMfq2j_h*@ufTcROspFYRhV64;wSM!{p0U_j-Z-+-V1Xvt=m>2!ToCLUC+2M)g# z&UNMs-)0Skv8j$>)uh8?xAbnGc=TgcJITRrqYAq(?vEjSjJK(FQ;t*6;m22cw*>6# zC$~^5Xj9cu*SD9Gero|EREL8|Z*z6tZxSJ6xnT_okM(=lOS75^A0VE5%QuocXq(_) zdbwfdjpL4-_2+CAdmFlw^|B%RH>_Z)a}O>^_HX}}aX+C}5lxl+=Cj2?kB}pX>5gm^ zF?0s%2SaICifS6qWU0xgCjBUP>4ZYzqL1tLpLi3co+%_;3XFeYqwKNvw6v-|NRF`< zU8KnIsYFp-=(9i-ka4MZ>R(te+=R;Sf4Jun>*AYwKH^i{O4gfCOmwtsU*idtsT=uN zoV?HqS%xRtZyL*OMVDB)$BM{8e!*Zx!v<_sD;FU}{d6hDSlOz!y3;NN%&VO5+Nm^2 z`se4r3TY*i?oPQTVC+j3uR0U%F^+aCOL#{%m>W6>_8f?8W|b{9?@GZtO&Gn50LeVg zIB8y@W?EL<23;$Amr+emzZv?>&o{i#F^GH2nI{JMHYgUcVs3)IEVE{P8N3T5MY&Qqrz&rsD#LPwvn$0&k~V3H~X>;WnOCM zKD)y887YxmuGgspSQznmR*$!Cg+3!L!{ZXCzrg`8LO(4Dvc(_~5ITYb2=-XqeXH=0 zB)R?=fwz$8*JRPJFK*nQ!l}N6ko8}C71H*lb|x0y#kpL%WvaYym}snytGTH`#>TyK zV~mVOKvHEk@@|IbeBF0@q*QZOl_-8AVl{bwRCgIYY|yN=K7+O1v;t%(WESMi3~yfV z{dPWxj$!QBjJ0!BZ<_J$X<#0wq*9G08=0k|T=`w*zTfL2o1ZJpN56(tvqINNuDhUc zp18&?cS7DgEFgIk(@_M-*NEls7gBx1d^0gs&{QR}PWTZ~#cQ+me5Hd%W^8<5wD8&< z!QhoSpEet!1hZ#hT`!Y4pk~WOYmnD30p)~&5AvW{pio+hbGRoQQCcdN)=EpTPVC-3 z0c{H@DmYZGEI&M{vHy;W3|p1cAs)#0`@bm}R84Fdfx}Mwy6`S8;8a;oFA$;jS4$;YoS5ka^qAfi!CB0XN~bf>M&vESwbQ_o}_@;M|p z`*4Ud;%E)Nu*#-iE`mWTlUB-)e?TDrGq{$d37CMS$lDJ1BCXI;T&~HWuK99yA<6UY zHvZDpBK_ZAjtj&qdp*`EJtF8WXaA&5c5TmJt12l)h{=Q)B+F}mkb{8P>G%Fq5L