mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-08 04:24:47 +00:00
[legacy] clean up legacy code (#4743)
* [legacy] remove outdated codes of pipeline (#4692) * [legacy] remove cli of benchmark and update optim (#4690) * [legacy] remove cli of benchmark and update optim * [doc] fix cli doc test * [legacy] fix engine clip grad norm * [legacy] remove outdated colo tensor (#4694) * [legacy] remove outdated colo tensor * [test] fix test import * [legacy] move outdated zero to legacy (#4696) * [legacy] clean up utils (#4700) * [legacy] clean up utils * [example] update examples * [legacy] clean up amp * [legacy] fix amp module * [legacy] clean up gpc (#4742) * [legacy] clean up context * [legacy] clean core, constants and global vars * [legacy] refactor initialize * [example] fix examples ci * [example] fix examples ci * [legacy] fix tests * [example] fix gpt example * [example] fix examples ci * [devops] fix ci installation * [example] fix examples ci
This commit is contained in:
@@ -13,7 +13,6 @@ import torch.nn as nn
|
||||
from torch.optim import Optimizer
|
||||
|
||||
from colossalai.interface import ModelWrapper, OptimizerWrapper
|
||||
from colossalai.nn.optimizer import ColossalaiOptimizer
|
||||
from colossalai.tensor.d_tensor import (
|
||||
is_customized_distributed_tensor,
|
||||
is_distributed_tensor,
|
||||
@@ -130,10 +129,7 @@ def unwrap_optimizer(optimizer: OptimizerWrapper):
|
||||
This method should be used before saving/loading it to/from sharded checkpoints.
|
||||
'''
|
||||
|
||||
# TODO(Baizhou): ColossalaiOptimizer will be replaced with OptimizerWrapper in the future
|
||||
unwrapped_optim = optimizer.optim
|
||||
if isinstance(unwrapped_optim, ColossalaiOptimizer):
|
||||
unwrapped_optim = unwrapped_optim.optim
|
||||
return unwrapped_optim
|
||||
|
||||
|
||||
|
Reference in New Issue
Block a user