mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-10-27 11:31:25 +00:00
[zero] add zero optimizer for ColoTensor (#1046)
* add zero optimizer * torch ok * unit test ok * polish code * fix bugs * polish unit test * polish zero optim * polish colo ddp v2 * refactor folder structure * add comment * polish unit test * polish zero optim * polish unit test
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
import torch
|
||||
from colossalai.tensor import ParamOpHook, ChunkManager, TensorState
|
||||
from colossalai.tensor.param_op_hook import ParamOpHook
|
||||
from colossalai.tensor.chunk import ChunkManager, TensorState
|
||||
from enum import Enum
|
||||
from typing import List
|
||||
from contextlib import contextmanager
|
||||
|
||||
Reference in New Issue
Block a user