mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-08 20:40:34 +00:00
[pipeline] support fp32 for HybridPlugin/merge shardformer test and pipeline test into one file (#4354)
* add naive optimizer for 3DPlugin/refactor gpt2 shardformer test * merge tests of PP/DP/TP combinations into one test file * fix bug when sync grad for dp in HybridPlugin * update supported precisions for 3DPlugin/fix bug when shifting tp_degree * improve the passing of lazy_init * modify lazy_init/use sync_shared_params
This commit is contained in:
committed by
Hongxin Liu
parent
f13954cd58
commit
0ceec8f9a9
@@ -134,7 +134,7 @@ class MixedPrecisionOptimizer(OptimizerWrapper):
|
||||
working_param = self.master_to_working_map[p]
|
||||
if p is working_param:
|
||||
continue
|
||||
if working_param.grad is None:
|
||||
if working_param.grad is not None:
|
||||
p.grad = working_param.grad.data.float()
|
||||
working_param.grad = None
|
||||
total_norm = self._compute_grad_norm()
|
||||
|
Reference in New Issue
Block a user