Add gradient accumulation, fix lr scheduler

This commit is contained in:
1SAA
2021-11-08 15:48:27 +08:00
parent 0aa07e600c
commit 8aa21d6bc5
9 changed files with 93 additions and 70 deletions

View File

@@ -17,7 +17,8 @@ def run_trainer():
criterion=criterion,
optimizer=optimizer,
lr_scheduler=lr_scheduler,
schedule=schedule
schedule=schedule,
gradient_accumulation=5,
)
logger.info("engine is built", ranks=[0])