From 62178aac5c70a5c8fa0c0d80d85b3bb1377456ce Mon Sep 17 00:00:00 2001 From: "deepsource-autofix[bot]" <62050782+deepsource-autofix[bot]@users.noreply.github.com> Date: Tue, 12 Jan 2021 10:34:18 +0000 Subject: [PATCH 1/2] Refactor unnecessary `else` / `elif` when `if` block has a `return` statement --- monai/apps/datasets.py | 5 +- monai/data/dataset.py | 3 +- monai/data/decathlon_datalist.py | 5 +- monai/engines/utils.py | 5 +- monai/handlers/confusion_matrix.py | 5 +- monai/metrics/confusion_matrix.py | 57 +++++++++++------------ monai/metrics/hausdorff_distance.py | 5 +- monai/metrics/rocauc.py | 59 ++++++++++++------------ monai/networks/layers/factories.py | 11 ++--- monai/networks/nets/autoencoder.py | 23 +++++---- monai/networks/nets/dynunet.py | 2 +- monai/networks/nets/fullyconnectednet.py | 3 +- monai/networks/nets/unet.py | 21 ++++----- monai/transforms/croppad/array.py | 13 ++---- monai/transforms/intensity/array.py | 5 +- monai/transforms/post/array.py | 5 +- monai/transforms/utility/array.py | 8 ++-- monai/transforms/utils.py | 2 +- monai/utils/misc.py | 2 +- monai/utils/module.py | 3 +- setup.py | 4 +- tests/test_zipdataset.py | 3 +- 22 files changed, 113 insertions(+), 136 deletions(-) diff --git a/monai/apps/datasets.py b/monai/apps/datasets.py index f5a9bcdcbc..b4bc40ae1f 100644 --- a/monai/apps/datasets.py +++ b/monai/apps/datasets.py @@ -291,10 +291,9 @@ def get_properties(self, keys: Optional[Union[Sequence[str], str]] = None): """ if keys is None: return self._properties - elif self._properties is not None: + if self._properties is not None: return {key: self._properties[key] for key in ensure_tuple(keys)} - else: - return {} + return {} def _generate_data_list(self, dataset_dir: str) -> List[Dict]: section = "training" if self.section in ["training", "validation"] else "test" diff --git a/monai/data/dataset.py b/monai/data/dataset.py index 71aedc7413..ed0d590bf7 100644 --- a/monai/data/dataset.py +++ b/monai/data/dataset.py @@ -699,8 +699,7 @@ def _try_shutdown(self): self._round = 0 self._replace_done = False return True - else: - return False + return False def shutdown(self): """ diff --git a/monai/data/decathlon_datalist.py b/monai/data/decathlon_datalist.py index 38e217f19b..6167e83e47 100644 --- a/monai/data/decathlon_datalist.py +++ b/monai/data/decathlon_datalist.py @@ -39,13 +39,12 @@ def _compute_path(base_dir, element): """ if isinstance(element, str): return os.path.normpath(os.path.join(base_dir, element)) - elif isinstance(element, list): + if isinstance(element, list): for e in element: if not isinstance(e, str): raise TypeError(f"Every file path in element must be a str but got {type(element).__name__}.") return [os.path.normpath(os.path.join(base_dir, e)) for e in element] - else: - raise TypeError(f"element must be one of (str, list) but is {type(element).__name__}.") + raise TypeError(f"element must be one of (str, list) but is {type(element).__name__}.") def _append_paths(base_dir: str, is_segmentation: bool, items: List[Dict]) -> List[Dict]: diff --git a/monai/engines/utils.py b/monai/engines/utils.py index d3d4d62c0b..028051883f 100644 --- a/monai/engines/utils.py +++ b/monai/engines/utils.py @@ -95,10 +95,9 @@ def default_prepare_batch( batchdata[CommonKeys.IMAGE].to(device=device, non_blocking=non_blocking), batchdata[CommonKeys.LABEL].to(device=device, non_blocking=non_blocking), ) - elif GanKeys.REALS in batchdata: + if GanKeys.REALS in batchdata: return batchdata[GanKeys.REALS].to(device=device, non_blocking=non_blocking) - else: - return batchdata[CommonKeys.IMAGE].to(device=device, non_blocking=non_blocking), None + return batchdata[CommonKeys.IMAGE].to(device=device, non_blocking=non_blocking), None def default_make_latent( diff --git a/monai/handlers/confusion_matrix.py b/monai/handlers/confusion_matrix.py index 28b52f8609..fe60b964a7 100644 --- a/monai/handlers/confusion_matrix.py +++ b/monai/handlers/confusion_matrix.py @@ -122,6 +122,5 @@ def compute(self): "ConfusionMatrix metric must have at least one example before it can be computed." ) return self._sum / self._num_examples - else: - confusion_matrix = torch.tensor([self._total_tp, self._total_fp, self._total_tn, self._total_fn]) - return compute_confusion_matrix_metric(self.metric_name, confusion_matrix) + confusion_matrix = torch.tensor([self._total_tp, self._total_fp, self._total_tn, self._total_fn]) + return compute_confusion_matrix_metric(self.metric_name, confusion_matrix) diff --git a/monai/metrics/confusion_matrix.py b/monai/metrics/confusion_matrix.py index b9cc92ceeb..bc06fe5855 100644 --- a/monai/metrics/confusion_matrix.py +++ b/monai/metrics/confusion_matrix.py @@ -103,16 +103,15 @@ def __call__(self, y_pred: torch.Tensor, y: torch.Tensor): confusion_matrix = compute_confusion_matrix_metric(self.metric_name, confusion_matrix) f, not_nans = do_metric_reduction(confusion_matrix, self.reduction) return f, not_nans - else: - if len(self.metric_name) < 1: - raise ValueError("the sequence should at least has on metric name.") - results = [] - for metric_name in self.metric_name: - sub_confusion_matrix = compute_confusion_matrix_metric(metric_name, confusion_matrix) - f, not_nans = do_metric_reduction(sub_confusion_matrix, self.reduction) - results.append(f) - results.append(not_nans) - return results + if len(self.metric_name) < 1: + raise ValueError("the sequence should at least has on metric name.") + results = [] + for metric_name in self.metric_name: + sub_confusion_matrix = compute_confusion_matrix_metric(metric_name, confusion_matrix) + f, not_nans = do_metric_reduction(sub_confusion_matrix, self.reduction) + results.append(f) + results.append(not_nans) + return results else: return confusion_matrix @@ -264,8 +263,7 @@ def compute_confusion_matrix_metric(metric_name: str, confusion_matrix: torch.Te if isinstance(denominator, torch.Tensor): return torch.where(denominator != 0, numerator / denominator, nan_tensor) - else: - return numerator / denominator + return numerator / denominator def check_confusion_matrix_metric_name(metric_name: str): @@ -284,37 +282,36 @@ def check_confusion_matrix_metric_name(metric_name: str): metric_name = metric_name.lower() if metric_name in ["sensitivity", "recall", "hit_rate", "true_positive_rate", "tpr"]: return "tpr" - elif metric_name in ["specificity", "selectivity", "true_negative_rate", "tnr"]: + if metric_name in ["specificity", "selectivity", "true_negative_rate", "tnr"]: return "tnr" - elif metric_name in ["precision", "positive_predictive_value", "ppv"]: + if metric_name in ["precision", "positive_predictive_value", "ppv"]: return "ppv" - elif metric_name in ["negative_predictive_value", "npv"]: + if metric_name in ["negative_predictive_value", "npv"]: return "npv" - elif metric_name in ["miss_rate", "false_negative_rate", "fnr"]: + if metric_name in ["miss_rate", "false_negative_rate", "fnr"]: return "fnr" - elif metric_name in ["fall_out", "false_positive_rate", "fpr"]: + if metric_name in ["fall_out", "false_positive_rate", "fpr"]: return "fpr" - elif metric_name in ["false_discovery_rate", "fdr"]: + if metric_name in ["false_discovery_rate", "fdr"]: return "fdr" - elif metric_name in ["false_omission_rate", "for"]: + if metric_name in ["false_omission_rate", "for"]: return "for" - elif metric_name in ["prevalence_threshold", "pt"]: + if metric_name in ["prevalence_threshold", "pt"]: return "pt" - elif metric_name in ["threat_score", "critical_success_index", "ts", "csi"]: + if metric_name in ["threat_score", "critical_success_index", "ts", "csi"]: return "ts" - elif metric_name in ["accuracy", "acc"]: + if metric_name in ["accuracy", "acc"]: return "acc" - elif metric_name in ["balanced_accuracy", "ba"]: + if metric_name in ["balanced_accuracy", "ba"]: return "ba" - elif metric_name in ["f1_score", "f1"]: + if metric_name in ["f1_score", "f1"]: return "f1" - elif metric_name in ["matthews_correlation_coefficient", "mcc"]: + if metric_name in ["matthews_correlation_coefficient", "mcc"]: return "mcc" - elif metric_name in ["fowlkes_mallows_index", "fm"]: + if metric_name in ["fowlkes_mallows_index", "fm"]: return "fm" - elif metric_name in ["informedness", "bookmaker_informedness", "bm"]: + if metric_name in ["informedness", "bookmaker_informedness", "bm"]: return "bm" - elif metric_name in ["markedness", "deltap", "mk"]: + if metric_name in ["markedness", "deltap", "mk"]: return "mk" - else: - raise NotImplementedError("the metric is not implemented.") + raise NotImplementedError("the metric is not implemented.") diff --git a/monai/metrics/hausdorff_distance.py b/monai/metrics/hausdorff_distance.py index b88cc77c99..8ecc19ec46 100644 --- a/monai/metrics/hausdorff_distance.py +++ b/monai/metrics/hausdorff_distance.py @@ -166,7 +166,6 @@ def compute_percent_hausdorff_distance( if not percentile: return surface_distance.max() - elif 0 <= percentile <= 100: + if 0 <= percentile <= 100: return np.percentile(surface_distance, percentile) - else: - raise ValueError(f"percentile should be a value between 0 and 100, get {percentile}.") + raise ValueError(f"percentile should be a value between 0 and 100, get {percentile}.") diff --git a/monai/metrics/rocauc.py b/monai/metrics/rocauc.py index e17330c35d..8d12f90867 100644 --- a/monai/metrics/rocauc.py +++ b/monai/metrics/rocauc.py @@ -114,33 +114,32 @@ def compute_roc_auc( if softmax: warnings.warn("y_pred has only one channel, softmax=True ignored.") return _calculate(y, y_pred) - else: - n_classes = y_pred.shape[1] - if to_onehot_y: - y = one_hot(y, n_classes) - if softmax and other_act is not None: - raise ValueError("Incompatible values: softmax=True and other_act is not None.") - if softmax: - y_pred = y_pred.float().softmax(dim=1) - if other_act is not None: - if not callable(other_act): - raise TypeError(f"other_act must be None or callable but is {type(other_act).__name__}.") - y_pred = other_act(y_pred) - - assert y.shape == y_pred.shape, "data shapes of y_pred and y do not match." - - average = Average(average) - if average == Average.MICRO: - return _calculate(y.flatten(), y_pred.flatten()) - y, y_pred = y.transpose(0, 1), y_pred.transpose(0, 1) - auc_values = [_calculate(y_, y_pred_) for y_, y_pred_ in zip(y, y_pred)] - if average == Average.NONE: - return auc_values - if average == Average.MACRO: - return np.mean(auc_values) - if average == Average.WEIGHTED: - weights = [sum(y_) for y_ in y] - return np.average(auc_values, weights=weights) - raise ValueError( - f'Unsupported average: {average}, available options are ["macro", "weighted", "micro", "none"].' - ) + n_classes = y_pred.shape[1] + if to_onehot_y: + y = one_hot(y, n_classes) + if softmax and other_act is not None: + raise ValueError("Incompatible values: softmax=True and other_act is not None.") + if softmax: + y_pred = y_pred.float().softmax(dim=1) + if other_act is not None: + if not callable(other_act): + raise TypeError(f"other_act must be None or callable but is {type(other_act).__name__}.") + y_pred = other_act(y_pred) + + assert y.shape == y_pred.shape, "data shapes of y_pred and y do not match." + + average = Average(average) + if average == Average.MICRO: + return _calculate(y.flatten(), y_pred.flatten()) + y, y_pred = y.transpose(0, 1), y_pred.transpose(0, 1) + auc_values = [_calculate(y_, y_pred_) for y_, y_pred_ in zip(y, y_pred)] + if average == Average.NONE: + return auc_values + if average == Average.MACRO: + return np.mean(auc_values) + if average == Average.WEIGHTED: + weights = [sum(y_) for y_ in y] + return np.average(auc_values, weights=weights) + raise ValueError( + f'Unsupported average: {average}, available options are ["macro", "weighted", "micro", "none"].' + ) diff --git a/monai/networks/layers/factories.py b/monai/networks/layers/factories.py index 64de9896a5..ec36b2ed95 100644 --- a/monai/networks/layers/factories.py +++ b/monai/networks/layers/factories.py @@ -178,14 +178,13 @@ def split_args(args): if isinstance(args, str): return args, {} - else: - name_obj, name_args = args + name_obj, name_args = args - if not isinstance(name_obj, (str, Callable)) or not isinstance(name_args, dict): - msg = "Layer specifiers must be single strings or pairs of the form (name/object-types, argument dict)" - raise TypeError(msg) + if not isinstance(name_obj, (str, Callable)) or not isinstance(name_args, dict): + msg = "Layer specifiers must be single strings or pairs of the form (name/object-types, argument dict)" + raise TypeError(msg) - return name_obj, name_args + return name_obj, name_args # Define factories for these layer types diff --git a/monai/networks/nets/autoencoder.py b/monai/networks/nets/autoencoder.py index e97502da8f..53e96b0841 100644 --- a/monai/networks/nets/autoencoder.py +++ b/monai/networks/nets/autoencoder.py @@ -148,18 +148,17 @@ def _get_encode_layer(self, in_channels: int, out_channels: int, strides: int, i dropout=self.dropout, last_conv_only=is_last, ) - else: - return Convolution( - dimensions=self.dimensions, - in_channels=in_channels, - out_channels=out_channels, - strides=strides, - kernel_size=self.kernel_size, - act=self.act, - norm=self.norm, - dropout=self.dropout, - conv_only=is_last, - ) + return Convolution( + dimensions=self.dimensions, + in_channels=in_channels, + out_channels=out_channels, + strides=strides, + kernel_size=self.kernel_size, + act=self.act, + norm=self.norm, + dropout=self.dropout, + conv_only=is_last, + ) def _get_decode_layer(self, in_channels: int, out_channels: int, strides: int, is_last: bool) -> nn.Sequential: diff --git a/monai/networks/nets/dynunet.py b/monai/networks/nets/dynunet.py index e8c3b517d9..8958199d3f 100644 --- a/monai/networks/nets/dynunet.py +++ b/monai/networks/nets/dynunet.py @@ -136,7 +136,7 @@ def create_skips(index, downsamples, upsamples, superheads, bottleneck): if len(downsamples) == 0: # bottom of the network, pass the bottleneck block return bottleneck - elif index == 0: # don't associate a supervision head with self.input_block + if index == 0: # don't associate a supervision head with self.input_block current_head, rest_heads = nn.Identity(), superheads else: current_head, rest_heads = superheads[0], superheads[1:] diff --git a/monai/networks/nets/fullyconnectednet.py b/monai/networks/nets/fullyconnectednet.py index 91377c410c..b906bab015 100644 --- a/monai/networks/nets/fullyconnectednet.py +++ b/monai/networks/nets/fullyconnectednet.py @@ -25,8 +25,7 @@ def _get_adn_layer( ) -> ADN: if ordering: return ADN(act=act, dropout=dropout, dropout_dim=1, ordering=ordering) - else: - return ADN(act=act, dropout=dropout, dropout_dim=1) + return ADN(act=act, dropout=dropout, dropout_dim=1) class FullyConnectedNet(nn.Sequential): diff --git a/monai/networks/nets/unet.py b/monai/networks/nets/unet.py index 3626b21a64..f3742d05b5 100644 --- a/monai/networks/nets/unet.py +++ b/monai/networks/nets/unet.py @@ -126,17 +126,16 @@ def _get_down_layer(self, in_channels: int, out_channels: int, strides: int, is_ norm=self.norm, dropout=self.dropout, ) - else: - return Convolution( - self.dimensions, - in_channels, - out_channels, - strides=strides, - kernel_size=self.kernel_size, - act=self.act, - norm=self.norm, - dropout=self.dropout, - ) + return Convolution( + self.dimensions, + in_channels, + out_channels, + strides=strides, + kernel_size=self.kernel_size, + act=self.act, + norm=self.norm, + dropout=self.dropout, + ) def _get_bottom_layer(self, in_channels: int, out_channels: int) -> nn.Module: """ diff --git a/monai/transforms/croppad/array.py b/monai/transforms/croppad/array.py index 9dace83c4d..073d581ee1 100644 --- a/monai/transforms/croppad/array.py +++ b/monai/transforms/croppad/array.py @@ -79,8 +79,7 @@ def _determine_data_pad_width(self, data_shape: Sequence[int]) -> List[Tuple[int width = max(self.spatial_size[i] - data_shape[i], 0) pad_width.append((width // 2, width - (width // 2))) return pad_width - else: - return [(0, max(self.spatial_size[i] - data_shape[i], 0)) for i in range(len(self.spatial_size))] + return [(0, max(self.spatial_size[i] - data_shape[i], 0)) for i in range(len(self.spatial_size))] def __call__(self, img: np.ndarray, mode: Optional[Union[NumpyPadMode, str]] = None) -> np.ndarray: """ @@ -97,9 +96,8 @@ def __call__(self, img: np.ndarray, mode: Optional[Union[NumpyPadMode, str]] = N if not np.asarray(all_pad_width).any(): # all zeros, skip padding return img - else: - img = np.pad(img, all_pad_width, mode=self.mode.value if mode is None else NumpyPadMode(mode).value) - return img + img = np.pad(img, all_pad_width, mode=self.mode.value if mode is None else NumpyPadMode(mode).value) + return img class BorderPad(Transform): @@ -317,9 +315,8 @@ def __call__(self, img: np.ndarray) -> np.ndarray: assert self._size is not None if self.random_center: return img[self._slices] - else: - cropper = CenterSpatialCrop(self._size) - return cropper(img) + cropper = CenterSpatialCrop(self._size) + return cropper(img) class RandSpatialCropSamples(Randomizable, Transform): diff --git a/monai/transforms/intensity/array.py b/monai/transforms/intensity/array.py index 84d25c663f..d1ba8ce7fa 100644 --- a/monai/transforms/intensity/array.py +++ b/monai/transforms/intensity/array.py @@ -163,10 +163,9 @@ def __call__(self, img: np.ndarray) -> np.ndarray: """ if self.minv is not None and self.maxv is not None: return rescale_array(img, self.minv, self.maxv, img.dtype) - elif self.factor is not None: + if self.factor is not None: return (img * (1 + self.factor)).astype(img.dtype) - else: - raise ValueError("Incompatible values: minv=None or maxv=None and factor=None.") + raise ValueError("Incompatible values: minv=None or maxv=None and factor=None.") class RandScaleIntensity(Randomizable, Transform): diff --git a/monai/transforms/post/array.py b/monai/transforms/post/array.py index bf6a431b72..3249fbaf71 100644 --- a/monai/transforms/post/array.py +++ b/monai/transforms/post/array.py @@ -414,6 +414,5 @@ def __call__(self, img: Union[Sequence[torch.Tensor], torch.Tensor]) -> torch.Te if self.num_classes is not None: # if not One-Hot, use "argmax" to vote the most common class return torch.argmax(img_, dim=1, keepdim=has_ch_dim) - else: - # for One-Hot data, round the float number to 0 or 1 - return torch.round(img_) + # for One-Hot data, round the float number to 0 or 1 + return torch.round(img_) diff --git a/monai/transforms/utility/array.py b/monai/transforms/utility/array.py index bbb78fb7bb..4268df1e25 100644 --- a/monai/transforms/utility/array.py +++ b/monai/transforms/utility/array.py @@ -226,10 +226,9 @@ def __call__( """ if isinstance(img, np.ndarray): return img.astype(self.dtype if dtype is None else dtype) - elif torch.is_tensor(img): + if torch.is_tensor(img): return torch.as_tensor(img, dtype=self.dtype if dtype is None else dtype) - else: - raise TypeError(f"img must be one of (numpy.ndarray, torch.Tensor) but is {type(img).__name__}.") + raise TypeError(f"img must be one of (numpy.ndarray, torch.Tensor) but is {type(img).__name__}.") class ToTensor(Transform): @@ -460,8 +459,7 @@ def __call__(self, img: Union[np.ndarray, torch.Tensor], func: Optional[Callable return func(img) if self.func is not None: return self.func(img) - else: - raise ValueError("Incompatible values: func=None and self.func=None.") + raise ValueError("Incompatible values: func=None and self.func=None.") class LabelToMask(Transform): diff --git a/monai/transforms/utils.py b/monai/transforms/utils.py index f28f95688d..6f94660ccd 100644 --- a/monai/transforms/utils.py +++ b/monai/transforms/utils.py @@ -490,7 +490,7 @@ def create_shear(spatial_dims: int, coefs: Union[Sequence[float], float]) -> np. if spatial_dims == 2: coefs = ensure_tuple_size(coefs, dim=2, pad_val=0.0) return np.array([[1, coefs[0], 0.0], [coefs[1], 1.0, 0.0], [0.0, 0.0, 1.0]]) - elif spatial_dims == 3: + if spatial_dims == 3: coefs = ensure_tuple_size(coefs, dim=6, pad_val=0.0) return np.array( [ diff --git a/monai/utils/misc.py b/monai/utils/misc.py index 2b4d078ad7..343e809f70 100644 --- a/monai/utils/misc.py +++ b/monai/utils/misc.py @@ -120,7 +120,7 @@ def ensure_tuple_rep(tup: Any, dim: int) -> Tuple[Any, ...]: """ if not issequenceiterable(tup): return (tup,) * dim - elif len(tup) == dim: + if len(tup) == dim: return tuple(tup) raise ValueError(f"Sequence must have length {dim}, got {len(tup)}.") diff --git a/monai/utils/module.py b/monai/utils/module.py index 7e6ca83b67..1d06585e3b 100644 --- a/monai/utils/module.py +++ b/monai/utils/module.py @@ -85,8 +85,7 @@ def get_full_type_name(typeobj): module = typeobj.__module__ if module is None or module == str.__class__.__module__: return typeobj.__name__ # Avoid reporting __builtin__ - else: - return module + "." + typeobj.__name__ + return module + "." + typeobj.__name__ def min_version(the_module, min_version_str: str = "") -> bool: diff --git a/setup.py b/setup.py index d62a0398d2..e8a54cc9fc 100644 --- a/setup.py +++ b/setup.py @@ -62,9 +62,9 @@ def torch_parallel_backend(): backend = match.group("backend") if backend == "OpenMP": return "AT_PARALLEL_OPENMP" - elif backend == "native thread pool": + if backend == "native thread pool": return "AT_PARALLEL_NATIVE" - elif backend == "native thread pool and TBB": + if backend == "native thread pool and TBB": return "AT_PARALLEL_NATIVE_TBB" except (NameError, AttributeError): # no torch or no binaries warnings.warn("Could not determine torch parallel_info.") diff --git a/tests/test_zipdataset.py b/tests/test_zipdataset.py index afd6ab11ab..1bdb6458d3 100644 --- a/tests/test_zipdataset.py +++ b/tests/test_zipdataset.py @@ -28,8 +28,7 @@ def __len__(self): def __getitem__(self, index): if self.index_only: return index - else: - return 1, 2, index + return 1, 2, index TEST_CASE_1 = [[Dataset_(5), Dataset_(5), Dataset_(5)], None, (0, 0, 0), 5] From e78237a0dc839fbfb3bd045e0a5a23ebef1864a2 Mon Sep 17 00:00:00 2001 From: monai-bot Date: Tue, 12 Jan 2021 12:35:22 +0000 Subject: [PATCH 2/2] [MONAI] python code formatting Signed-off-by: monai-bot --- monai/csrc/resample/pushpull_cpu.cpp | 44 ++++++++++++++++++---------- monai/csrc/resample/pushpull_cuda.cu | 44 ++++++++++++++++++---------- monai/metrics/rocauc.py | 4 +-- 3 files changed, 57 insertions(+), 35 deletions(-) diff --git a/monai/csrc/resample/pushpull_cpu.cpp b/monai/csrc/resample/pushpull_cpu.cpp index 204fa846d3..40743a6cf1 100644 --- a/monai/csrc/resample/pushpull_cpu.cpp +++ b/monai/csrc/resample/pushpull_cpu.cpp @@ -97,19 +97,25 @@ MONAI_NAMESPACE_DEVICE { // cpu bool do_sgrad) : dim(dim), bound0(bound.size() > 0 ? bound[0] : BoundType::Replicate), - bound1(bound.size() > 1 ? bound[1] : bound.size() > 0 ? bound[0] : BoundType::Replicate), + bound1( + bound.size() > 1 ? bound[1] + : bound.size() > 0 ? bound[0] + : BoundType::Replicate), bound2( - bound.size() > 2 ? bound[2] - : bound.size() > 1 ? bound[1] : bound.size() > 0 ? bound[0] : BoundType::Replicate), + bound.size() > 2 ? bound[2] + : bound.size() > 1 ? bound[1] + : bound.size() > 0 ? bound[0] + : BoundType::Replicate), interpolation0(interpolation.size() > 0 ? interpolation[0] : InterpolationType::Linear), interpolation1( - interpolation.size() > 1 ? interpolation[1] - : interpolation.size() > 0 ? interpolation[0] : InterpolationType::Linear), + interpolation.size() > 1 ? interpolation[1] + : interpolation.size() > 0 ? interpolation[0] + : InterpolationType::Linear), interpolation2( - interpolation.size() > 2 - ? interpolation[2] + interpolation.size() > 2 ? interpolation[2] : interpolation.size() > 1 ? interpolation[1] - : interpolation.size() > 0 ? interpolation[0] : InterpolationType::Linear), + : interpolation.size() > 0 ? interpolation[0] + : InterpolationType::Linear), extrapolate(extrapolate), do_pull(do_pull), do_push(do_push), @@ -136,13 +142,14 @@ MONAI_NAMESPACE_DEVICE { // cpu bound2(bound), interpolation0(interpolation.size() > 0 ? interpolation[0] : InterpolationType::Linear), interpolation1( - interpolation.size() > 1 ? interpolation[1] - : interpolation.size() > 0 ? interpolation[0] : InterpolationType::Linear), + interpolation.size() > 1 ? interpolation[1] + : interpolation.size() > 0 ? interpolation[0] + : InterpolationType::Linear), interpolation2( - interpolation.size() > 2 - ? interpolation[2] + interpolation.size() > 2 ? interpolation[2] : interpolation.size() > 1 ? interpolation[1] - : interpolation.size() > 0 ? interpolation[0] : InterpolationType::Linear), + : interpolation.size() > 0 ? interpolation[0] + : InterpolationType::Linear), extrapolate(extrapolate), do_pull(do_pull), do_push(do_push), @@ -165,10 +172,15 @@ MONAI_NAMESPACE_DEVICE { // cpu bool do_sgrad) : dim(dim), bound0(bound.size() > 0 ? bound[0] : BoundType::Replicate), - bound1(bound.size() > 1 ? bound[1] : bound.size() > 0 ? bound[0] : BoundType::Replicate), + bound1( + bound.size() > 1 ? bound[1] + : bound.size() > 0 ? bound[0] + : BoundType::Replicate), bound2( - bound.size() > 2 ? bound[2] - : bound.size() > 1 ? bound[1] : bound.size() > 0 ? bound[0] : BoundType::Replicate), + bound.size() > 2 ? bound[2] + : bound.size() > 1 ? bound[1] + : bound.size() > 0 ? bound[0] + : BoundType::Replicate), interpolation0(interpolation), interpolation1(interpolation), interpolation2(interpolation), diff --git a/monai/csrc/resample/pushpull_cuda.cu b/monai/csrc/resample/pushpull_cuda.cu index cbb0a36a80..ecfeb562ab 100644 --- a/monai/csrc/resample/pushpull_cuda.cu +++ b/monai/csrc/resample/pushpull_cuda.cu @@ -94,19 +94,25 @@ MONAI_NAMESPACE_DEVICE { // cuda bool do_sgrad) : dim(dim), bound0(bound.size() > 0 ? bound[0] : BoundType::Replicate), - bound1(bound.size() > 1 ? bound[1] : bound.size() > 0 ? bound[0] : BoundType::Replicate), + bound1( + bound.size() > 1 ? bound[1] + : bound.size() > 0 ? bound[0] + : BoundType::Replicate), bound2( - bound.size() > 2 ? bound[2] - : bound.size() > 1 ? bound[1] : bound.size() > 0 ? bound[0] : BoundType::Replicate), + bound.size() > 2 ? bound[2] + : bound.size() > 1 ? bound[1] + : bound.size() > 0 ? bound[0] + : BoundType::Replicate), interpolation0(interpolation.size() > 0 ? interpolation[0] : InterpolationType::Linear), interpolation1( - interpolation.size() > 1 ? interpolation[1] - : interpolation.size() > 0 ? interpolation[0] : InterpolationType::Linear), + interpolation.size() > 1 ? interpolation[1] + : interpolation.size() > 0 ? interpolation[0] + : InterpolationType::Linear), interpolation2( - interpolation.size() > 2 - ? interpolation[2] + interpolation.size() > 2 ? interpolation[2] : interpolation.size() > 1 ? interpolation[1] - : interpolation.size() > 0 ? interpolation[0] : InterpolationType::Linear), + : interpolation.size() > 0 ? interpolation[0] + : InterpolationType::Linear), extrapolate(extrapolate), do_pull(do_pull), do_push(do_push), @@ -133,13 +139,14 @@ MONAI_NAMESPACE_DEVICE { // cuda bound2(bound), interpolation0(interpolation.size() > 0 ? interpolation[0] : InterpolationType::Linear), interpolation1( - interpolation.size() > 1 ? interpolation[1] - : interpolation.size() > 0 ? interpolation[0] : InterpolationType::Linear), + interpolation.size() > 1 ? interpolation[1] + : interpolation.size() > 0 ? interpolation[0] + : InterpolationType::Linear), interpolation2( - interpolation.size() > 2 - ? interpolation[2] + interpolation.size() > 2 ? interpolation[2] : interpolation.size() > 1 ? interpolation[1] - : interpolation.size() > 0 ? interpolation[0] : InterpolationType::Linear), + : interpolation.size() > 0 ? interpolation[0] + : InterpolationType::Linear), extrapolate(extrapolate), do_pull(do_pull), do_push(do_push), @@ -162,10 +169,15 @@ MONAI_NAMESPACE_DEVICE { // cuda bool do_sgrad) : dim(dim), bound0(bound.size() > 0 ? bound[0] : BoundType::Replicate), - bound1(bound.size() > 1 ? bound[1] : bound.size() > 0 ? bound[0] : BoundType::Replicate), + bound1( + bound.size() > 1 ? bound[1] + : bound.size() > 0 ? bound[0] + : BoundType::Replicate), bound2( - bound.size() > 2 ? bound[2] - : bound.size() > 1 ? bound[1] : bound.size() > 0 ? bound[0] : BoundType::Replicate), + bound.size() > 2 ? bound[2] + : bound.size() > 1 ? bound[1] + : bound.size() > 0 ? bound[0] + : BoundType::Replicate), interpolation0(interpolation), interpolation1(interpolation), interpolation2(interpolation), diff --git a/monai/metrics/rocauc.py b/monai/metrics/rocauc.py index 8d12f90867..164b32ddb7 100644 --- a/monai/metrics/rocauc.py +++ b/monai/metrics/rocauc.py @@ -140,6 +140,4 @@ def compute_roc_auc( if average == Average.WEIGHTED: weights = [sum(y_) for y_ in y] return np.average(auc_values, weights=weights) - raise ValueError( - f'Unsupported average: {average}, available options are ["macro", "weighted", "micro", "none"].' - ) + raise ValueError(f'Unsupported average: {average}, available options are ["macro", "weighted", "micro", "none"].')