mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-04 18:40:28 +00:00
[moe] support optimizer checkpoint (#5015)
* Refactor MoE Manager setup method * unshard optim ckpt * optim io * update transformer version * update requirements * update ckpt * update ckpt * update ckpt * fix engine * fix engine
This commit is contained in:
@@ -72,6 +72,19 @@ def get_ep_size(tensor: torch.Tensor) -> int:
|
||||
return tensor.moe_info.ep_size
|
||||
|
||||
|
||||
def get_dp_size(tensor: torch.Tensor) -> int:
|
||||
"""
|
||||
Get the data parallel size of the given tensor.
|
||||
|
||||
Args:
|
||||
tensor (torch.Tensor): The tensor to be checked.
|
||||
|
||||
Returns:
|
||||
int: The data parallel size of the given tensor.
|
||||
"""
|
||||
return tensor.moe_info.dp_size
|
||||
|
||||
|
||||
def get_dp_group(tensor: torch.Tensor) -> ProcessGroup:
|
||||
"""
|
||||
Get the data parallel group of the given tensor.
|
||||
|
Reference in New Issue
Block a user