mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-07 20:10:17 +00:00
[legacy] move communication and nn to legacy and refactor logger (#4671)
* [legacy] move communication to legacy (#4640) * [legacy] refactor logger and clean up legacy codes (#4654) * [legacy] make logger independent to gpc * [legacy] make optim independent to registry * [legacy] move test engine to legacy * [legacy] move nn to legacy (#4656) * [legacy] move nn to legacy * [checkpointio] fix save hf config * [test] remove useledd rpc pp test * [legacy] fix nn init * [example] skip tutorial hybriad parallel example * [devops] test doc check * [devops] test doc check
This commit is contained in:
@@ -2,11 +2,9 @@
|
||||
import torch
|
||||
from torch.optim.optimizer import Optimizer, required
|
||||
|
||||
from colossalai.legacy.registry import OPTIMIZERS
|
||||
from colossalai.utils import multi_tensor_applier
|
||||
|
||||
|
||||
@OPTIMIZERS.register_module
|
||||
class FusedSGD(Optimizer):
|
||||
r"""Implements stochastic gradient descent (optionally with momentum).
|
||||
|
||||
|
Reference in New Issue
Block a user