mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2026-04-26 17:53:08 +00:00
* impl chunk manager * impl param op hook * add reduce_chunk * add zero hook v2 * add zero dp * fix TensorInfo * impl load balancing when using zero without chunk * fix zero hook * polish chunk * fix bugs * ddp ok * zero ok * polish code * fix bugs about load balancing * polish code * polish code * add ene-to-end test * polish code * polish code * polish code * fix typo * add test_chunk * fix bugs * fix bugs * polish code
22 lines
1.1 KiB
Python
22 lines
1.1 KiB
Python
from .spec import ComputePattern, ParallelAction, TensorSpec
|
|
from .op_wrapper import (
|
|
colo_op_impl,)
|
|
from .colo_tensor import ColoTensor
|
|
from .colo_parameter import ColoParameter
|
|
from .utils import convert_parameter, named_params_with_colotensor
|
|
from ._ops import *
|
|
from .optim.colo_optimizer import ColoOptimizer
|
|
from . import distspec
|
|
from .dist_spec_mgr import DistSpecManager
|
|
from .param_op_hook import ParamOpHook, use_param_op_hooks
|
|
from .chunk import ChunkManager, TensorState
|
|
from .module_utils import register_colo_module, is_colo_module, get_colo_module, init_colo_module, check_colo_module
|
|
from .modules import ColoLinear, ColoEmbedding
|
|
|
|
__all__ = [
|
|
'ColoTensor', 'convert_parameter', 'colo_op_impl', 'ComputePattern', 'TensorSpec', 'ParallelAction',
|
|
'named_params_with_colotensor', 'ColoOptimizer', 'ColoParameter', 'distspec', 'DistSpecManager',
|
|
'register_colo_module', 'is_colo_module', 'get_colo_module', 'init_colo_module', 'check_colo_module', 'ColoLinear',
|
|
'ColoEmbedding', 'ParamOpHook', 'use_param_op_hooks', 'ChunkManager', 'TensorState'
|
|
]
|