[upgrade] upgrade gpt2 (#6291)

* fix

* fix

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* fix

* fix

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
flybird11111
2025-05-08 14:10:21 +08:00
committed by GitHub
parent 8497ecc3e5
commit a4c6e189fa
3 changed files with 15 additions and 16 deletions

View File

@@ -180,7 +180,7 @@ def check_forward_backward(model_fn, data_gen_fn, output_transform_fn, loss_fn,
"enable_sequence_parallelism": True,
"sequence_parallelism_mode": "split_gather",
"enable_flash_attention": True,
"use_lazy_init": True,
"use_lazy_init": False,
"precision": "fp16",
"initial_scale": 1,
},
@@ -238,7 +238,7 @@ def run_gpt2_test(test_config):
"tp_size": 2,
"pp_size": 2,
"num_microbatches": 4,
"enable_all_optimization": False,
"enable_all_optimization": True,
"use_lazy_init": False,
"precision": "fp32",
"initial_scale": 1,
@@ -247,7 +247,7 @@ def run_gpt2_test(test_config):
"tp_size": 2,
"pp_size": 2,
"num_microbatches": 4,
"enable_all_optimization": False,
"enable_all_optimization": True,
"use_lazy_init": False,
"precision": "fp16",
"zero_stage": 1,