mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-13 21:22:49 +00:00
[hotfix] adapt ProcessGroup and Optimizer to ColoTensor (#1388)
This commit is contained in:
@@ -162,9 +162,9 @@ class FusedLAMB(torch.optim.Optimizer):
|
||||
# State initialization
|
||||
if len(state) == 0:
|
||||
# Exponential moving average of gradient values
|
||||
state['exp_avg'] = torch.zeros_like(p.data)
|
||||
state['exp_avg'] = torch.zeros_like(p)
|
||||
# Exponential moving average of gradient values
|
||||
state['exp_avg_sq'] = torch.zeros_like(p.data)
|
||||
state['exp_avg_sq'] = torch.zeros_like(p)
|
||||
|
||||
if p.dtype == torch.float16:
|
||||
g_16.append(p.grad.data)
|
||||
|
Reference in New Issue
Block a user