mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-08-19 00:17:18 +00:00
[chore] docstring
This commit is contained in:
parent
7e737df5ad
commit
f7c5485ed6
@ -115,8 +115,10 @@ class MoeHybridParallelPlugin(HybridParallelPlugin):
|
||||
"""
|
||||
Modified from colossalai.booster.plugin.hybrid_parallel_plugin.HybridParallelPlugin
|
||||
Extra Args:
|
||||
ep_size (int): The size of tensor parallelism. Tensor parallelism will not be used when tp_size is set to 1.
|
||||
force_overlap_comm (bool): For LowLevelZeroOptimizer, it might causes program hang when some experts are routed and overlap_communication is True during training. This flag is used to force overlap_communication=True.
|
||||
ep_size (int): The size of expert parallelism
|
||||
force_overlap_comm (bool):
|
||||
For LowLevelZeroOptimizer, it might causes program hang when some experts are routed and overlap_communication is True during training.
|
||||
This flag is used to force overlap_communication=True. Make sure every expert are routed when you use this.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
|
Loading…
Reference in New Issue
Block a user