mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-17 23:18:36 +00:00
fix some typo (#5307)
This commit is contained in:
@@ -451,7 +451,7 @@ class CommSpec:
|
||||
elif self.comm_pattern == CollectiveCommPattern.MIXGATHER_FWD_SPLIT_BWD:
|
||||
res_list.append(f"comm_pattern:MIXGATHER_FWD_SPLIT_BWD, ")
|
||||
res_list.append(f"gather_dim:{self.gather_dim}, ")
|
||||
res_list.append(f"logical_process_asex:{self.logical_process_axes})")
|
||||
res_list.append(f"logical_process_axes:{self.logical_process_axes})")
|
||||
|
||||
return "".join(res_list)
|
||||
|
||||
|
@@ -96,9 +96,9 @@ def _apply_layout(tensor, layout):
|
||||
"""
|
||||
Apply the layout to the local tensor during initializing process.
|
||||
"""
|
||||
# layout converter requires a source and target laytout
|
||||
# layout converter requires a source and target layout
|
||||
# we construct the source layer for an unsharded tensor
|
||||
# and use self.dist_layer as the targer layout for the sharded tensor
|
||||
# and use self.dist_layer as the target layout for the sharded tensor
|
||||
source_spec = _construct_default_sharding_spec(tensor)
|
||||
source_layout = Layout(device_mesh=layout.device_mesh, sharding_spec=source_spec, global_shape=tensor.shape)
|
||||
sharded_tensor = layout_converter.apply(tensor=tensor, source_layout=source_layout, target_layout=layout)
|
||||
|
@@ -40,7 +40,7 @@ def get_moe_info(ep_size: int, dp_size: int, pp_size: int, ep_inside: bool) -> M
|
||||
ep_size (int): The expert parallel size.
|
||||
dp_size (int): The data parallel size.
|
||||
pp_size (int): The pipeline parallel size.
|
||||
ep_inside (bool, optional): Use ep inside dp if True, dp inside ep if Fasle.
|
||||
ep_inside (bool, optional): Use ep inside dp if True, dp inside ep if False.
|
||||
|
||||
Returns:
|
||||
dict: The moe info of the given tensor.
|
||||
|
@@ -12,7 +12,7 @@ class MoeParallelInfo:
|
||||
ep_size (int): expert parallel size
|
||||
dp_size (int): data parallel (zero) size
|
||||
pp_size (int, optional): pipeline parallel size. Defaults to 1.
|
||||
ep_inside (bool, optional): Use ep inside dp if True, dp inside ep if Fasle. Defaults to True.
|
||||
ep_inside (bool, optional): Use ep inside dp if True, dp inside ep if False. Defaults to True.
|
||||
"""
|
||||
self.pp_size, self.dp_size, self.ep_size = pp_size, dp_size, ep_size
|
||||
if ep_inside:
|
||||
|
Reference in New Issue
Block a user