mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-08 20:40:34 +00:00
[shardformer] Pytree fix (#4533)
* pytree test * test bert * test bert * test bert * revise * add register * add register
This commit is contained in:
@@ -191,15 +191,10 @@ def check_output_hidden_state(org_output: Tensor,
|
||||
|
||||
org_hidden_state = org_output.last_hidden_state
|
||||
|
||||
if stage_manager is None:
|
||||
sharded_hidden_state = sharded_output.last_hidden_state
|
||||
|
||||
if stage_manager and stage_manager.is_last_stage():
|
||||
pipeline_output = sharded_output['outputs']
|
||||
if isinstance(pipeline_output, List):
|
||||
sharded_hidden_state = torch.cat([output.last_hidden_state for output in pipeline_output], dim=dim)
|
||||
else:
|
||||
sharded_hidden_state = pipeline_output.last_hidden_state
|
||||
sharded_hidden_state = sharded_output['outputs']['last_hidden_state']
|
||||
else:
|
||||
sharded_hidden_state = sharded_output.last_hidden_state
|
||||
|
||||
assert torch.allclose(org_hidden_state.float(), sharded_hidden_state.float(), atol=atol, rtol=rtol), \
|
||||
f"shard model's output hidden state is not equal to origin model's last hidden state\n{org_hidden_state}\n{sharded_hidden_state}"
|
||||
|
@@ -179,6 +179,7 @@ def run_bert_3d_test(test_config):
|
||||
sub_model_zoo = model_zoo.get_sub_registry('transformers_bert')
|
||||
|
||||
for name, (model_fn, data_gen_fn, output_transform_fn, loss_fn, _) in sub_model_zoo.items():
|
||||
|
||||
check_forward_backward(model_fn, data_gen_fn, output_transform_fn, loss_fn, test_config)
|
||||
|
||||
clear_layout_converter()
|
||||
|
Reference in New Issue
Block a user