mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-07 12:01:39 +00:00
[legacy] clean up legacy code (#4743)
* [legacy] remove outdated codes of pipeline (#4692) * [legacy] remove cli of benchmark and update optim (#4690) * [legacy] remove cli of benchmark and update optim * [doc] fix cli doc test * [legacy] fix engine clip grad norm * [legacy] remove outdated colo tensor (#4694) * [legacy] remove outdated colo tensor * [test] fix test import * [legacy] move outdated zero to legacy (#4696) * [legacy] clean up utils (#4700) * [legacy] clean up utils * [example] update examples * [legacy] clean up amp * [legacy] fix amp module * [legacy] clean up gpc (#4742) * [legacy] clean up context * [legacy] clean core, constants and global vars * [legacy] refactor initialize * [example] fix examples ci * [example] fix examples ci * [legacy] fix tests * [example] fix gpt example * [example] fix examples ci * [devops] fix ci installation * [example] fix examples ci
This commit is contained in:
3
colossalai/legacy/zero/gemini/paramhooks/__init__.py
Normal file
3
colossalai/legacy/zero/gemini/paramhooks/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from ._param_hookmgr import BaseParamHookMgr
|
||||
|
||||
__all__ = ["BaseParamHookMgr"]
|
39
colossalai/legacy/zero/gemini/paramhooks/_param_hookmgr.py
Normal file
39
colossalai/legacy/zero/gemini/paramhooks/_param_hookmgr.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import functools
|
||||
from typing import Callable, List
|
||||
|
||||
import torch
|
||||
|
||||
|
||||
class BaseParamHookMgr(object):
|
||||
|
||||
def __init__(self, param_list: List[torch.nn.Parameter]) -> None:
|
||||
r"""
|
||||
register backward hook on every parameters of module
|
||||
"""
|
||||
self._param_list = param_list
|
||||
self._hook_list = []
|
||||
|
||||
def register_backward_hooks(self, hook_call: Callable) -> None:
|
||||
r"""
|
||||
The hook_call will be called every time a gradient with respect to the a param in self.param_list
|
||||
is computed.
|
||||
The hook should have the following signature:
|
||||
```
|
||||
hook(param, grad) -> Tensor or None
|
||||
```
|
||||
"""
|
||||
if not torch.is_grad_enabled():
|
||||
return # don't register grad hooks if grad isn't enabled
|
||||
for p in self._param_list:
|
||||
if p.requires_grad and not hasattr(p, '_base_param_hook'):
|
||||
handle = p.register_hook(functools.partial(hook_call, p))
|
||||
p._base_param_hook = handle
|
||||
|
||||
def remove_hooks(self) -> None:
|
||||
"""
|
||||
Remove hooks from model parameters.
|
||||
"""
|
||||
|
||||
for p in self._param_list:
|
||||
if p.requires_grad and hasattr(p, '_base_param_hook'):
|
||||
p._base_param_hook.remove()
|
Reference in New Issue
Block a user