mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-08 20:40:34 +00:00
[hotfix] fix grad accumulation plus clipping for gemini (#5002)
This commit is contained in:
@@ -88,7 +88,7 @@ def exam_grad_clipping(placement_config, model_name: str, master_weights: bool):
|
||||
)
|
||||
|
||||
optimizer = HybridAdam(model.parameters(), lr=1e-3)
|
||||
zero_optim = GeminiOptimizer(optimizer, model, initial_scale=32, clipping_norm=1.0)
|
||||
zero_optim = GeminiOptimizer(optimizer, model, initial_scale=32, max_norm=1.0)
|
||||
|
||||
model.train()
|
||||
torch_model.train()
|
||||
|
Reference in New Issue
Block a user