Merge branch 'main' of github.com:hpcaitech/ColossalAI into prefetch

This commit is contained in:
hxwang
2024-05-24 04:05:07 +00:00
16 changed files with 192 additions and 82 deletions

View File

@@ -53,7 +53,10 @@ def check_param(model: GeminiDDP, torch_model: torch.nn.Module):
@parameterize("model_name", ["transformers_gpt_lm"])
@parameterize("master_weights", [True, False])
@parameterize("max_prefetch", [0, 1, 4])
def exam_grad_clipping(placement_config, model_name: str, master_weights: bool, max_prefetch: int):
@parameterize("enable_async_reduce", [False, True])
def exam_grad_clipping(
placement_config, model_name: str, master_weights: bool, max_prefetch: int, enable_async_reduce: bool
):
set_seed(1912)
model_builder, data_gen_fn, output_transform_fn, loss_fn, *_ = next(
iter(model_zoo.get_sub_registry(model_name).values())
@@ -86,6 +89,7 @@ def exam_grad_clipping(placement_config, model_name: str, master_weights: bool,
pin_memory=True,
master_weights=master_weights,
max_prefetch=max_prefetch,
enable_async_reduce=enable_async_reduce,
**placement_config,
)