[test] mixtra pp shard test

This commit is contained in:
hxwang
2024-07-04 06:39:01 +00:00
committed by Hongxin Liu
parent 8ae8525bdf
commit a249e71946
3 changed files with 49 additions and 46 deletions

View File

@@ -114,37 +114,37 @@ def check_forward_backward(model_fn, data_gen_fn, output_transform_fn, loss_fn,
[
{
"tp_size": 1,
"pp_size": 1,
"ep_size": 4,
"num_microbatches": 2,
"pp_size": 4,
"ep_size": 1,
"num_microbatches": 4,
"zero_stage": 0,
"enable_all_optimization": True,
"use_lazy_init": False,
"precision": "fp16",
"initial_scale": 1,
},
{
"tp_size": 1,
"pp_size": 1,
"ep_size": 4,
"num_microbatches": 2,
"zero_stage": 1,
"enable_all_optimization": True,
"use_lazy_init": False,
"precision": "fp16",
"initial_scale": 1,
},
{
"tp_size": 1,
"pp_size": 1,
"ep_size": 4,
"num_microbatches": 2,
"zero_stage": 2,
"enable_all_optimization": True,
"use_lazy_init": False,
"precision": "fp16",
"initial_scale": 1,
},
# {
# "tp_size": 1,
# "pp_size": 1,
# "ep_size": 4,
# "num_microbatches": 2,
# "zero_stage": 1,
# "enable_all_optimization": True,
# "use_lazy_init": False,
# "precision": "fp16",
# "initial_scale": 1,
# },
# {
# "tp_size": 1,
# "pp_size": 1,
# "ep_size": 4,
# "num_microbatches": 2,
# "zero_stage": 2,
# "enable_all_optimization": True,
# "use_lazy_init": False,
# "precision": "fp16",
# "initial_scale": 1,
# },
],
)
def run_mixtral_test(test_config):