mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-08 04:24:47 +00:00
[zero] support extra dp (#6123)
* [zero] support extra dp * [zero] update checkpoint * fix bugs * fix bugs
This commit is contained in:
@@ -2,12 +2,14 @@ import copy
|
||||
|
||||
import pytest
|
||||
import torch
|
||||
import torch.distributed as dist
|
||||
import torch.nn as nn
|
||||
from torch.nn.parallel import DistributedDataParallel as DDP
|
||||
from torch.testing import assert_close
|
||||
|
||||
import colossalai
|
||||
from colossalai.testing import rerun_if_address_is_in_use, spawn
|
||||
from colossalai.cluster import ProcessGroupMesh
|
||||
from colossalai.testing import parameterize, rerun_if_address_is_in_use, spawn
|
||||
from colossalai.testing.random import seed_all
|
||||
from colossalai.zero import LowLevelZeroOptimizer
|
||||
|
||||
@@ -40,11 +42,19 @@ def loose_close(a, b, dtype: torch.dtype = torch.float32):
|
||||
assert_close(a, b, rtol=rtol, atol=atol)
|
||||
|
||||
|
||||
def exam_zero_1_torch_ddp_ckpt():
|
||||
@parameterize("extra_dp_size", [1, 2])
|
||||
def exam_zero_1_torch_ddp_ckpt(extra_dp_size: int):
|
||||
"""
|
||||
We examine the state_dict of zero and DDP.
|
||||
Moreover, we examine the zero's loading checkpoint of a torch ckpt.
|
||||
"""
|
||||
if extra_dp_size > 1:
|
||||
pg_mesh = ProcessGroupMesh(extra_dp_size, dist.get_world_size() // extra_dp_size)
|
||||
extra_dp_group = pg_mesh.get_group_along_axis(0)
|
||||
dp_group = pg_mesh.get_group_along_axis(1)
|
||||
else:
|
||||
dp_group = None
|
||||
extra_dp_group = None
|
||||
local_rank = torch.distributed.get_rank()
|
||||
seed_all(1453)
|
||||
|
||||
@@ -60,7 +70,12 @@ def exam_zero_1_torch_ddp_ckpt():
|
||||
# we only test stage 1 here
|
||||
# the state dicts of stage 1 and stage 2 are the same
|
||||
zero_optimizer = LowLevelZeroOptimizer(
|
||||
zero_optimizer, overlap_communication=True, initial_scale=1, reduce_bucket_size=262144
|
||||
zero_optimizer,
|
||||
overlap_communication=True,
|
||||
initial_scale=1,
|
||||
reduce_bucket_size=262144,
|
||||
dp_process_group=dp_group,
|
||||
extra_dp_group=extra_dp_group,
|
||||
)
|
||||
|
||||
torch_optimizer = torch.optim.Adam(torch_model.parameters(), lr=1)
|
||||
@@ -111,7 +126,7 @@ def run_dist(rank, world_size, port):
|
||||
@pytest.mark.dist
|
||||
@rerun_if_address_is_in_use()
|
||||
def test_zero_ckpt():
|
||||
spawn(run_dist, 2)
|
||||
spawn(run_dist, 4)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
Reference in New Issue
Block a user