Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion monai/apps/auto3dseg/bundle_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
from monai.utils import ensure_tuple

logger = get_logger(module_name=__name__)
ALGO_HASH = os.environ.get("MONAI_ALGO_HASH", "d8bec42")
ALGO_HASH = os.environ.get("MONAI_ALGO_HASH", "4af80e1")

__all__ = ["BundleAlgo", "BundleGen"]

Expand Down
2 changes: 0 additions & 2 deletions monai/apps/nnunet/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""
Examples:
- User can use the one-liner to start the nnU-Net workflow
Expand Down Expand Up @@ -85,7 +84,6 @@

"""


from __future__ import annotations

from monai.apps.nnunet.nnunetv2_runner import nnUNetV2Runner
Expand Down
1 change: 0 additions & 1 deletion monai/apps/nnunet/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@

logger = monai.apps.utils.get_logger(__name__)


__all__ = ["analyze_data", "create_new_data_copy", "create_new_dataset_json", "NNUNETMode"]


Expand Down
2 changes: 1 addition & 1 deletion monai/metrics/meandice.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ def compute_channel(self, y_pred: torch.Tensor, y: torch.Tensor) -> torch.Tensor
denorm = y_o + torch.sum(y_pred)
if denorm <= 0:
return torch.tensor(1.0, device=y_o.device)
return (2.0 * torch.sum(torch.masked_select(y, y_pred))) / denorm
return torch.tensor(0.0, device=y_o.device)

def __call__(self, y_pred: torch.Tensor, y: torch.Tensor) -> torch.Tensor | tuple[torch.Tensor, torch.Tensor]:
"""
Expand Down
7 changes: 4 additions & 3 deletions monai/networks/nets/basic_unet.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from __future__ import annotations

from collections.abc import Sequence
from typing import Any

import torch
import torch.nn as nn
Expand Down Expand Up @@ -149,16 +150,16 @@ def __init__(
self.convs = TwoConv(spatial_dims, cat_chns + up_chns, out_chns, act, norm, bias, dropout)
self.is_pad = is_pad

def forward(self, x: torch.Tensor, x_e: torch.Tensor | None):
def forward(self, x: torch.Tensor, x_e: Any):
"""

Args:
x: features to be upsampled.
x_e: features from the encoder.
x_e: optional features from the encoder, if None, this branch is not in use.
"""
x_0 = self.upsample(x)

if x_e is not None:
if torch.jit.isinstance(x_e, torch.Tensor):
if self.is_pad:
# handling spatial shapes due to the 2x maxpooling with odd edge lengths.
dimensions = len(x.shape) - 2
Expand Down
5 changes: 4 additions & 1 deletion monai/networks/nets/flexible_unet.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,6 +232,7 @@ def __init__(
dropout: float | tuple = 0.0,
decoder_bias: bool = False,
upsample: str = "nontrainable",
pre_conv: str = "default",
interp_mode: str = "nearest",
is_pad: bool = True,
) -> None:
Expand Down Expand Up @@ -262,6 +263,8 @@ def __init__(
decoder_bias: whether to have a bias term in decoder's convolution blocks.
upsample: upsampling mode, available options are``"deconv"``, ``"pixelshuffle"``,
``"nontrainable"``.
pre_conv:a conv block applied before upsampling. Only used in the "nontrainable" or
"pixelshuffle" mode, default to `default`.
interp_mode: {``"nearest"``, ``"linear"``, ``"bilinear"``, ``"bicubic"``, ``"trilinear"``}
Only used in the "nontrainable" mode.
is_pad: whether to pad upsampling features to fit features from encoder. Default to True.
Expand Down Expand Up @@ -309,7 +312,7 @@ def __init__(
bias=decoder_bias,
upsample=upsample,
interp_mode=interp_mode,
pre_conv="default",
pre_conv=pre_conv,
align_corners=None,
is_pad=is_pad,
)
Expand Down