[shardformer] Add layernorm (#4072)

* add layernorm to bert

* add layernorm test

* add layernorm test with load state dict

* add use_mixedfusedLN in shard config

* refactor policy to support fused_layernorm
This commit is contained in:
FoolPlayer
2023-06-23 18:00:22 +08:00
committed by Frank Lee
parent 70c58cfd4f
commit 92f6791095
7 changed files with 252 additions and 17 deletions

View File

@@ -0,0 +1,45 @@
import torch
import torch.distributed as dist
import torch.nn as nn
from torch.testing import assert_close
import colossalai
from colossalai.shardformer.layer import LayerNorm1D
from colossalai.testing import rerun_if_address_is_in_use, spawn
def check_layernorm_1d():
norm = nn.LayerNorm(128, 0.00001).cuda()
norm1d = LayerNorm1D.from_native_module(norm, process_group=None)
assert norm1d.weight.shape == torch.Size([128])
# ensure state dict is reversibly loadable
norm.load_state_dict(norm1d.state_dict())
norm1d.load_state_dict(norm.state_dict())
# check computation correctness
x = torch.rand(4, 128).cuda()
out = norm(x)
gather_out = norm1d(x)
assert_close(out, gather_out)
# check backward correctness
out.sum().backward()
gather_out.sum().backward()
assert_close(norm.weight.grad, norm1d.weight.grad)
def run_dist(rank, world_size, port):
colossalai.launch(config={}, rank=rank, world_size=world_size, host='localhost', port=port, backend='nccl')
check_layernorm_1d()
@rerun_if_address_is_in_use()
def test_layernorm_1d():
spawn(run_dist, nprocs=2)
if __name__ == '__main__':
test_layernorm_1d()

View File

@@ -77,7 +77,7 @@ def check_linear_conv_1d_col():
assert_close(target_grad, linear_conv_col.weight.grad)
def check_linear_1d_row():
def check_linear_conv_1d_row():
linear = Conv1D(192, 48).cuda()
linear_row = LinearConv1D_Row.from_native_module(linear, process_group=None, parallel_input=False)
@@ -103,7 +103,7 @@ def check_linear_1d_row():
def run_dist(rank, world_size, port):
colossalai.launch(config={}, rank=rank, world_size=world_size, host='localhost', port=port, backend='nccl')
check_linear_conv_1d_col()
check_linear_1d_row()
check_linear_conv_1d_row()
@rerun_if_address_is_in_use()

View File

@@ -8,7 +8,7 @@ def build_model(world_size, model_fn):
org_model = model_fn().cuda()
# shard model
shard_config = ShardConfig(tensor_parallel_size=world_size)
shard_config = ShardConfig(tensor_parallel_size=world_size, fused_layernorm=True)
model_copy = copy.deepcopy(org_model)
shard_former = ShardFormer(shard_config=shard_config)
shard_former.init_distributed()