[zero] test gradient accumulation (#1964)

* [zero] fix memory leak for zero2

* [zero] test gradient accumulation

* [zero] remove grad clip test
This commit is contained in:
HELSON
2022-11-29 13:00:30 +08:00
committed by GitHub
parent b0936e4a44
commit a1ce02d740
6 changed files with 317 additions and 268 deletions

View File

@@ -0,0 +1,19 @@
import random
import numpy as np
import torch
def seed_all(seed, cuda_deterministic=False):
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
if torch.cuda.is_available():
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
if cuda_deterministic: # slower, more reproducible
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
else:
torch.backends.cudnn.deterministic = False
torch.backends.cudnn.benchmark = True