Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions monai/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,3 +15,4 @@
from .enums import *
from .misc import *
from .module import *
from .profiling import *
19 changes: 0 additions & 19 deletions monai/utils/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,28 +9,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import time
from functools import wraps


def timing(func):
"""
This simple timing function decorator prints to stdout/logfile (it uses printFlush) how many seconds a call to the
original function took to execute, as well as the name before and after the call.
"""

@wraps(func)
def timingwrap(*args, **kwargs):
print(func.__name__, flush=True)
start = time.perf_counter()
res = func(*args, **kwargs)
end = time.perf_counter()
print(func.__name__, "dT (s) =", (end - start), flush=True)
return res

return timingwrap


class RestartGenerator:
"""
Wraps a generator callable which will be called whenever this class is iterated and its result returned. This is
Expand Down
90 changes: 90 additions & 0 deletions monai/utils/profiling.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
# Copyright 2020 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import time
from functools import wraps

import torch

__all__ = ["torch_profiler_full", "torch_profiler_time_cpu_gpu", "torch_profiler_time_end_to_end"]


def torch_profiler_full(func):
"""
A decorator which will run the torch profiler for the decorated function,
printing the results in full.
Note: Enforces a gpu sync point which could slow down pipelines.
"""

@wraps(func)
def wrapper(*args, **kwargs):

with torch.autograd.profiler.profile(use_cuda=True) as prof:
result = func(*args, **kwargs)

print(prof, flush=True)

return result

return wrapper


def torch_profiler_time_cpu_gpu(func):
"""
A decorator which measures the execution time of both the CPU and GPU components
of the decorated function, printing both results.
Note: Enforces a gpu sync point which could slow down pipelines.
"""

@wraps(func)
def wrapper(*args, **kwargs):

with torch.autograd.profiler.profile(use_cuda=True) as prof:
result = func(*args, **kwargs)

cpu_time = prof.self_cpu_time_total
gpu_time = sum(evt.self_cuda_time_total for evt in prof.function_events)

cpu_time = torch.autograd.profiler.format_time(cpu_time)
gpu_time = torch.autograd.profiler.format_time(gpu_time)

print("cpu time: {}, gpu time: {}".format(cpu_time, gpu_time), flush=True)

return result

return wrapper


def torch_profiler_time_end_to_end(func):
"""
A decorator which measures the total execution time from when the decorated
function is called to when the last cuda operation finishes, printing the result.
Note: Enforces a gpu sync point which could slow down pipelines.
"""

@wraps(func)
def wrapper(*args, **kwargs):

torch.cuda.synchronize()
start = time.perf_counter()

result = func(*args, **kwargs)

torch.cuda.synchronize()
end = time.perf_counter()

total_time = (end - start) * 1e6
total_time_str = torch.autograd.profiler.format_time(total_time)
print("end to end time: {}".format(total_time_str), flush=True)

return result

return wrapper