From b76d04b7c1cc9bb7b860657c1510ab12aa6300a7 Mon Sep 17 00:00:00 2001 From: YunLiu <55491388+KumoLiu@users.noreply.github.com> Date: Wed, 14 Aug 2024 00:25:42 +0800 Subject: [PATCH 1/4] fix #8014 Signed-off-by: YunLiu <55491388+KumoLiu@users.noreply.github.com> --- monai/utils/module.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/monai/utils/module.py b/monai/utils/module.py index 251232d62f..5bd8fdd530 100644 --- a/monai/utils/module.py +++ b/monai/utils/module.py @@ -209,11 +209,13 @@ def load_submodules( ): if (is_pkg or load_all) and name not in sys.modules and match(exclude_pattern, name) is None: try: + mod = import_module(name) mod_spec = importer.find_spec(name) # type: ignore if mod_spec and mod_spec.loader: - mod = importlib.util.module_from_spec(mod_spec) - mod_spec.loader.exec_module(mod) + loader = mod_spec.loader + loader.exec_module(mod) submodules.append(mod) + except OptionalImportError: pass # could not import the optional deps., they are ignored except ImportError as e: From c322f575c961512cfc977bd3044483ac0f167ccd Mon Sep 17 00:00:00 2001 From: YunLiu <55491388+KumoLiu@users.noreply.github.com> Date: Wed, 14 Aug 2024 13:52:54 +0800 Subject: [PATCH 2/4] part of #8017 Signed-off-by: YunLiu <55491388+KumoLiu@users.noreply.github.com> --- monai/networks/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/monai/networks/utils.py b/monai/networks/utils.py index 6a97434215..4302f6c3db 100644 --- a/monai/networks/utils.py +++ b/monai/networks/utils.py @@ -931,7 +931,7 @@ def convert_to_trt( warnings.warn(f"The dynamic batch range sequence should have 3 elements, but got {dynamic_batchsize} elements.") device = device if device else 0 - target_device = torch.device(f"cuda:{device}") if device else torch.device("cuda:0") + target_device = torch.device(f"cuda:{device}") convert_precision = torch.float32 if precision == "fp32" else torch.half inputs = [torch.rand(ensure_tuple(input_shape)).to(target_device)] @@ -986,7 +986,7 @@ def scale_batch_size(input_shape: Sequence[int], scale_num: int): ir_model, inputs=input_placeholder, enabled_precisions=convert_precision, - device=target_device, + device=torch_tensorrt.Device(f"cuda:{device}"), ir="torchscript", **kwargs, ) From b23a42d608649b50b2222b8fc47aa0cea1a1b072 Mon Sep 17 00:00:00 2001 From: YunLiu <55491388+KumoLiu@users.noreply.github.com> Date: Wed, 14 Aug 2024 13:58:01 +0800 Subject: [PATCH 3/4] minor fix Signed-off-by: YunLiu <55491388+KumoLiu@users.noreply.github.com> --- monai/utils/module.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monai/utils/module.py b/monai/utils/module.py index 0dce3bd5ea..78087aef84 100644 --- a/monai/utils/module.py +++ b/monai/utils/module.py @@ -218,7 +218,7 @@ def load_submodules( pass # could not import the optional deps., they are ignored except ImportError as e: msg = ( - "\nMultßiple versions of MONAI may have been installed?\n" + "\nMultiple versions of MONAI may have been installed?\n" "Please see the installation guide: https://docs.monai.io/en/stable/installation.html\n" ) # issue project-monai/monai#5193 raise type(e)(f"{e}\n{msg}").with_traceback(e.__traceback__) from e # raise with modified message From 5e21a00dd20778101e8cc3670d0aefc80c5b9d6c Mon Sep 17 00:00:00 2001 From: YunLiu <55491388+KumoLiu@users.noreply.github.com> Date: Thu, 15 Aug 2024 11:24:37 +0800 Subject: [PATCH 4/4] use cuda set device Signed-off-by: YunLiu <55491388+KumoLiu@users.noreply.github.com> --- monai/networks/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monai/networks/utils.py b/monai/networks/utils.py index 4302f6c3db..f301c2dd5c 100644 --- a/monai/networks/utils.py +++ b/monai/networks/utils.py @@ -822,7 +822,7 @@ def _onnx_trt_compile( output_names = [] if not output_names else output_names # set up the TensorRT builder - torch_tensorrt.set_device(device) + torch.cuda.set_device(device) logger = trt.Logger(trt.Logger.WARNING) builder = trt.Builder(logger) network = builder.create_network(1 << int(trt.NetworkDefinitionCreationFlag.EXPLICIT_BATCH))