Added MoE parallel (#127)

This commit is contained in:
HELSON
2022-01-07 15:08:36 +08:00
committed by GitHub
parent 42741dd4a3
commit dceae85195
26 changed files with 858 additions and 18 deletions

View File

@@ -15,6 +15,7 @@ from colossalai.registry import DIST_GROUP_INITIALIZER
from .parallel_mode import ParallelMode
from .random import add_seed, get_seeds, set_mode
from colossalai.global_variables import moe_env
class ParallelContext:
@@ -412,6 +413,13 @@ class ParallelContext:
# add this config to initialize later
pg_init.append(dict(type=INITIALIZER_MAPPING[tensor_parallel_mode.lower()], **tensor_parallel_cfg))
# initialization for moe environment
if parallel_config is not None and 'moe' in parallel_config:
param = parallel_config['moe']
assert 'size' in param, "Moe model parallel size should be given"
moe_env.setup(param['size'])
pg_init.append(dict(type=INITIALIZER_MAPPING['moe']))
# run initialization of different process groups
for initializer_cfg in pg_init:
cfg = initializer_cfg.copy()