Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 3 additions & 8 deletions colossalai/__init__.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,4 @@
from .initialize import (
get_default_parser,
initialize,
launch,
launch_from_openmpi,
launch_from_slurm,
launch_from_torch,
)
from .initialize import launch, launch_from_openmpi, launch_from_slurm, launch_from_torch

try:
# .version will be created by setup.py
Expand All @@ -15,3 +8,5 @@
# and directly set PYTHONPATH to use Colossal-AI which is a bad practice
__version__ = '0.0.0'
print('please install Colossal-AI from https://www.colossalai.org/download or from source')

__all__ = ['launch', 'launch_from_openmpi', 'launch_from_slurm', 'launch_from_torch', '__version__']
12 changes: 7 additions & 5 deletions colossalai/context/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from .config import Config, ConfigException
from .parallel_context import ParallelContext
from .parallel_mode import ParallelMode
from .moe_context import MOE_CONTEXT
from .process_group_initializer import *
from .random import *

# from .moe_context import MOE_CONTEXT

__all__ = [
'Config',
'ConfigException',
]
5 changes: 2 additions & 3 deletions colossalai/context/moe_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,12 @@
import torch
import torch.distributed as dist

from colossalai.context.parallel_mode import ParallelMode
from colossalai.context.singleton_meta import SingletonMeta
from colossalai.legacy.tensor import ProcessGroup


def _check_sanity():
from colossalai.core import global_context as gpc
from colossalai.legacy.core import global_context as gpc
if gpc.tensor_parallel_size > 1 or gpc.pipeline_parallel_size > 1:
raise NotImplementedError("Moe is not compatible with tensor or "
"pipeline parallel at present.")
Expand Down Expand Up @@ -61,7 +60,7 @@ def setup(self, seed: int, use_kernel_optim: bool = True):

self.world_size = dist.get_world_size()

from colossalai.core import global_context as gpc
from colossalai.legacy.core import global_context as gpc
self.max_ep_size = gpc.config.get('max_ep_size', self.world_size)
assert self.world_size % self.max_ep_size == 0, \
"Maximum expert parallel size must be a factor of the number of GPUs"
Expand Down
6 changes: 0 additions & 6 deletions colossalai/core.py

This file was deleted.

Loading