mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-11 22:10:37 +00:00
[misc] Accelerate CI for zero and dist optim (#5758)
* remove fp16 from lamb * remove d2h copy in checking states --------- Co-authored-by: Edenzzzz <wtan45@wisc.edu>
This commit is contained in:
@@ -18,7 +18,6 @@ from tests.test_optimizer._utils import check_optim_states, run_bert_test
|
||||
|
||||
_ALLOWED_P_G_TYPES = [
|
||||
(torch.float, torch.float), # pure fp32
|
||||
(torch.float, torch.half), # fp16 amp
|
||||
(torch.float, torch.bfloat16), # bfloat16 amp
|
||||
]
|
||||
|
||||
@@ -264,7 +263,6 @@ def run_dist_lamb_fwd_bwd(
|
||||
|
||||
torch_optim.step()
|
||||
optim.step()
|
||||
dist.barrier()
|
||||
torch_optim.zero_grad()
|
||||
optim.zero_grad()
|
||||
try:
|
||||
|
Reference in New Issue
Block a user