mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-07-05 11:37:14 +00:00
Fix/format colossalai/engine/paramhooks/(#350)
This commit is contained in:
parent
e83970e3dc
commit
54ee8d1254
@ -1,2 +1,3 @@
|
||||
from ._param_hookmgr import BaseParamHookMgr
|
||||
|
||||
__all__ = ["BaseParamHookMgr"]
|
@ -2,7 +2,9 @@ from typing import Callable, List
|
||||
import torch
|
||||
import functools
|
||||
|
||||
|
||||
class BaseParamHookMgr(object):
|
||||
|
||||
def __init__(self, param_list: List[torch.nn.Parameter]) -> None:
|
||||
r"""
|
||||
register backward hook on every parameters of module
|
||||
@ -10,7 +12,7 @@ class BaseParamHookMgr(object):
|
||||
self._param_list = param_list
|
||||
self._hook_list = []
|
||||
|
||||
def register_backward_hooks(self, hook_call : Callable) -> None:
|
||||
def register_backward_hooks(self, hook_call: Callable) -> None:
|
||||
r"""
|
||||
The hook_call will be called every time a gradient with respect to the a param in self.param_list
|
||||
is computed.
|
||||
|
Loading…
Reference in New Issue
Block a user