mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-27 20:46:00 +00:00
[hotfix] fix memory leak in zero (#781)
This commit is contained in:
@@ -3,6 +3,7 @@
|
||||
|
||||
import torch
|
||||
from .base_grad_scaler import BaseGradScaler
|
||||
from typing import Optional
|
||||
|
||||
__all__ = ['DynamicGradScaler']
|
||||
|
||||
@@ -10,12 +11,12 @@ __all__ = ['DynamicGradScaler']
|
||||
class DynamicGradScaler(BaseGradScaler):
|
||||
|
||||
def __init__(self,
|
||||
initial_scale: int = 2**16,
|
||||
growth_factor: int = 2,
|
||||
initial_scale: float = 2**16,
|
||||
growth_factor: float = 2,
|
||||
backoff_factor: float = 0.5,
|
||||
growth_interval: int = 1000,
|
||||
min_scale: int = None,
|
||||
max_scale: int = None,
|
||||
min_scale: Optional[float] = None,
|
||||
max_scale: Optional[float] = None,
|
||||
hysteresis: int = 2,
|
||||
verbose: bool = False):
|
||||
super().__init__(initial_scale, verbose)
|
||||
|
Reference in New Issue
Block a user