[booster] fix no_sync method (#3709)

* [booster] fix no_sync method

* [booster] add test for ddp no_sync

* [booster] fix merge

* [booster] update unit test

* [booster] update unit test

* [booster] update unit test
This commit is contained in:
Hongxin Liu
2023-05-09 11:10:02 +08:00
committed by GitHub
parent 3bf09efe74
commit 6552cbf8e1
6 changed files with 85 additions and 5 deletions

View File

@@ -1,5 +1,5 @@
from abc import ABC, abstractmethod
from typing import Callable, List, Tuple, Union
from typing import Callable, Iterator, List, Tuple, Union
import torch.nn as nn
from torch.optim import Optimizer
@@ -60,6 +60,13 @@ class Plugin(ABC):
"""
pass
@abstractmethod
def no_sync(self, model: nn.Module) -> Iterator[None]:
"""
Context manager to disable gradient synchronization.
"""
pass
@abstractmethod
def prepare_dataloader(self,
dataset: Dataset,