Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion colossalai/_analyzer/fx/codegen.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ def emit_code_with_activation_checkpoint(body, ckpt_func, nodes, emit_node_func,

@compatibility(is_backward_compatible=True)
class ActivationCheckpointCodeGen(CodeGen):
def _gen_python_code(self, nodes, root_module: str, namespace: _Namespace) -> PythonCode:
def _gen_python_code(self, nodes, root_module: str, namespace: _Namespace, verbose=None) -> PythonCode:
free_vars: List[str] = []
body: List[str] = []
globals_: Dict[str, Any] = {}
Expand Down
2 changes: 1 addition & 1 deletion colossalai/auto_parallel/offload/base_offload_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import torch.nn as nn

from colossalai.utils import _cast_float
from colossalai.zero.legacy.gemini.tensor_utils import free_storage
from colossalai.utils.common import free_storage

from .region_manager import RegionManager
from .util import GlobalRuntimeInfo
Expand Down
3 changes: 2 additions & 1 deletion colossalai/auto_parallel/offload/region.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@
import torch
from torch.fx import Node

from colossalai.zero.legacy.gemini.tensor_utils import alloc_storage, free_storage
from colossalai.utils.common import free_storage
from colossalai.zero.gemini.chunk.chunk import alloc_storage


class Region:
Expand Down
2 changes: 1 addition & 1 deletion colossalai/autochunk/autochunk_codegen.py
Original file line number Diff line number Diff line change
Expand Up @@ -372,7 +372,7 @@ def __init__(
if print_progress:
get_logger().info("AutoChunk start codegen")

def _gen_python_code(self, nodes, root_module: str, namespace: _Namespace) -> PythonCode:
def _gen_python_code(self, nodes, root_module: str, namespace: _Namespace, verbose=None) -> PythonCode:
free_vars: List[str] = []
body: List[str] = []
globals_: Dict[str, Any] = {}
Expand Down
2 changes: 1 addition & 1 deletion colossalai/fx/codegen/activation_checkpoint_codegen.py
Original file line number Diff line number Diff line change
Expand Up @@ -625,7 +625,7 @@ def emit_code_with_activation_checkpoint(body, ckpt_func, nodes, emit_node_func,
if CODEGEN_AVAILABLE:

class ActivationCheckpointCodeGen(CodeGen):
def _gen_python_code(self, nodes, root_module: str, namespace: _Namespace) -> PythonCode:
def _gen_python_code(self, nodes, root_module: str, namespace: _Namespace, verbose=None) -> PythonCode:
free_vars: List[str] = []
body: List[str] = []
globals_: Dict[str, Any] = {}
Expand Down
2 changes: 2 additions & 0 deletions examples/language/gpt/hybridparallelism/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,8 @@ def __init__(
self.text_fields = self.task_text_field_map[task_name]
self.num_labels = self.glue_task_num_labels[task_name]
self.tokenizer: PreTrainedTokenizer = AutoTokenizer.from_pretrained(self.model_name_or_path, use_fast=True)
if not getattr(self.tokenizer, "pad_token", None):
self.tokenizer.pad_token = self.tokenizer._eos_token
self.setup()

def setup(self):
Expand Down