Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions colossalai/amp/apex_amp/apex_amp.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,11 @@

from torch import Tensor

from colossalai.nn.optimizer import ColossalaiOptimizer
from colossalai.interface import OptimizerWrapper
from colossalai.utils import clip_grad_norm_fp32


class ApexAMPOptimizer(ColossalaiOptimizer):
class ApexAMPOptimizer(OptimizerWrapper):
""" A wrapper class for APEX optimizer and it implements apex-specific backward and clip_grad_norm
methods
"""
Expand Down
4 changes: 2 additions & 2 deletions colossalai/amp/naive_amp/naive_amp.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,12 @@

from colossalai.context import ParallelMode
from colossalai.core import global_context as gpc
from colossalai.nn.optimizer import ColossalaiOptimizer
from colossalai.interface import OptimizerWrapper

from ._fp16_optimizer import FP16Optimizer


class NaiveAMPOptimizer(ColossalaiOptimizer):
class NaiveAMPOptimizer(OptimizerWrapper):
"""A wrapper class for optimizer to cast all parameters to fp16

Args:
Expand Down
4 changes: 2 additions & 2 deletions colossalai/amp/torch_amp/torch_amp.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,13 @@
from torch.nn.modules.loss import _Loss
from torch.optim import Optimizer

from colossalai.nn.optimizer import ColossalaiOptimizer
from colossalai.interface import OptimizerWrapper
from colossalai.utils import clip_grad_norm_fp32

from ._grad_scaler import GradScaler


class TorchAMPOptimizer(ColossalaiOptimizer):
class TorchAMPOptimizer(OptimizerWrapper):
"""A wrapper class which integrate Pytorch AMP with an optimizer

Args:
Expand Down
4 changes: 2 additions & 2 deletions colossalai/auto_parallel/offload/amp_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
from torch.optim import Optimizer

from colossalai.amp.naive_amp.grad_scaler import DynamicGradScaler
from colossalai.interface import OptimizerWrapper
from colossalai.logging import get_dist_logger
from colossalai.nn.optimizer import ColossalaiOptimizer
from colossalai.utils import get_current_device

from .base_offload_module import BaseOffloadModule
Expand All @@ -19,7 +19,7 @@ class OptimState(Enum):
UNSCALED = 1


class AMPOptimizer(ColossalaiOptimizer):
class AMPOptimizer(OptimizerWrapper):
"""
A wrapper for Optimizer.
Code reference: https://github.com/hpcaitech/ColossalAI/blob/main/colossalai/nn/optimizer/zero_optimizer.py
Expand Down
4 changes: 0 additions & 4 deletions colossalai/checkpoint_io/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
from torch.optim import Optimizer

from colossalai.interface import ModelWrapper, OptimizerWrapper
from colossalai.nn.optimizer import ColossalaiOptimizer
from colossalai.tensor.d_tensor import (
is_customized_distributed_tensor,
is_distributed_tensor,
Expand Down Expand Up @@ -130,10 +129,7 @@ def unwrap_optimizer(optimizer: OptimizerWrapper):
This method should be used before saving/loading it to/from sharded checkpoints.
'''

# TODO(Baizhou): ColossalaiOptimizer will be replaced with OptimizerWrapper in the future
unwrapped_optim = optimizer.optim
if isinstance(unwrapped_optim, ColossalaiOptimizer):
unwrapped_optim = unwrapped_optim.optim
return unwrapped_optim


Expand Down
28 changes: 0 additions & 28 deletions colossalai/cli/benchmark/__init__.py

This file was deleted.

105 changes: 0 additions & 105 deletions colossalai/cli/benchmark/benchmark.py

This file was deleted.

18 changes: 0 additions & 18 deletions colossalai/cli/benchmark/models.py

This file was deleted.

159 changes: 0 additions & 159 deletions colossalai/cli/benchmark/utils.py

This file was deleted.

Loading