mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-11 13:59:08 +00:00
[misc] fit torch api upgradation and remove legecy import (#6093)
* [amp] fit torch's new api * [amp] fix api call * [amp] fix api call * [misc] fit torch pytree api upgrade * [misc] remove legacy import * [misc] fit torch amp api * [misc] fit torch amp api
This commit is contained in:
@@ -1,10 +1,5 @@
|
||||
import torch.nn
|
||||
|
||||
from colossalai.legacy.zero.gemini.ophooks.runtime_mem_tracer_hook import (
|
||||
GradMemStats,
|
||||
GradMemTracerHook,
|
||||
ParamMemTracerHook,
|
||||
)
|
||||
from colossalai.tensor.param_op_hook import ColoParamOpHookManager
|
||||
from colossalai.utils import _cast_float
|
||||
|
||||
@@ -27,6 +22,12 @@ class RuntimeMemTracer:
|
||||
|
||||
def __init__(self, module: torch.nn.Module, dtype: torch.dtype = torch.half):
|
||||
super().__init__()
|
||||
from colossalai.legacy.zero.gemini.ophooks.runtime_mem_tracer_hook import (
|
||||
GradMemStats,
|
||||
GradMemTracerHook,
|
||||
ParamMemTracerHook,
|
||||
)
|
||||
|
||||
self.module = module
|
||||
self.dtype = dtype
|
||||
self._gradstat = GradMemStats()
|
||||
|
Reference in New Issue
Block a user