mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-21 17:40:33 +00:00
[shardformer] support DDP in HybridPlugin/add tp+dp tests (#4446)
* support DDP for HybridPlugin/add tp+dp tests * add docstring for HybridParallelPlugin
This commit is contained in:
@@ -13,6 +13,7 @@ from tests.test_shardformer.test_model._utils import (
|
||||
check_output_hidden_state,
|
||||
check_weight,
|
||||
run_forward_backward_with_hybrid_plugin,
|
||||
unwrap_model,
|
||||
)
|
||||
|
||||
|
||||
@@ -46,12 +47,8 @@ def check_forward_backward(model_fn, data_gen_fn, output_transform_fn, loss_fn,
|
||||
check_loss(org_loss, sharded_loss, atol=atol, rtol=rtol)
|
||||
|
||||
# unwrap model
|
||||
if org_model.__class__.__name__ == 'BloomModel':
|
||||
bloom = org_model
|
||||
sharded_bloom = sharded_model.unwrap()
|
||||
else:
|
||||
bloom = org_model.transformer
|
||||
sharded_bloom = sharded_model.unwrap().transformer
|
||||
bloom = unwrap_model(org_model, 'BloomModel', 'transformer')
|
||||
sharded_bloom = unwrap_model(sharded_model, 'BloomModel', 'transformer')
|
||||
|
||||
# check grad
|
||||
row_layer_for_check = ['h[0].self_attention.query_key_value', 'word_embeddings']
|
||||
@@ -97,12 +94,16 @@ def check_forward_backward(model_fn, data_gen_fn, output_transform_fn, loss_fn,
|
||||
'pp_size': 1,
|
||||
'enable_all_optimization': True,
|
||||
'use_lazy_init': False,
|
||||
'precision': 'fp32',
|
||||
'precision': 'fp32'
|
||||
}, {
|
||||
'tp_size': 2,
|
||||
'pp_size': 1,
|
||||
'enable_all_optimization': True,
|
||||
'use_lazy_init': False,
|
||||
'precision': 'fp32'
|
||||
}])
|
||||
def run_bloom_test(test_config):
|
||||
|
||||
# TODO(baizhou): add test_config for TP+DP after supporting & debugging it
|
||||
|
||||
sub_model_zoo = model_zoo.get_sub_registry('transformers_bloom')
|
||||
|
||||
for name, (model_fn, data_gen_fn, output_transform_fn, loss_fn, _) in sub_model_zoo.items():
|
||||
|
Reference in New Issue
Block a user