[hotfix] fix memory leak in zero (#781)

This commit is contained in:
HELSON
2022-04-18 13:57:03 +08:00
committed by GitHub
parent 4b01da24cd
commit 4c4388c46e
6 changed files with 32 additions and 36 deletions

View File

@@ -3,6 +3,7 @@
import torch
from .base_grad_scaler import BaseGradScaler
from typing import Optional
__all__ = ['DynamicGradScaler']
@@ -10,12 +11,12 @@ __all__ = ['DynamicGradScaler']
class DynamicGradScaler(BaseGradScaler):
def __init__(self,
initial_scale: int = 2**16,
growth_factor: int = 2,
initial_scale: float = 2**16,
growth_factor: float = 2,
backoff_factor: float = 0.5,
growth_interval: int = 1000,
min_scale: int = None,
max_scale: int = None,
min_scale: Optional[float] = None,
max_scale: Optional[float] = None,
hysteresis: int = 2,
verbose: bool = False):
super().__init__(initial_scale, verbose)