mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-03 10:06:44 +00:00
[zero]support no_sync method for zero1 plugin (#4138)
* support no sync for zero1 plugin * polish * polish
This commit is contained in:
@@ -177,7 +177,7 @@ class TorchFSDPPlugin(DPPluginBase):
|
||||
def support_no_sync(self) -> bool:
|
||||
False
|
||||
|
||||
def no_sync(self, model: nn.Module) -> Iterator[None]:
|
||||
def no_sync(self, model: nn.Module, optimizer: OptimizerWrapper) -> Iterator[None]:
|
||||
raise NotImplementedError("Torch fsdp no_sync func not supported yet.")
|
||||
|
||||
def control_precision(self) -> bool:
|
||||
|
Reference in New Issue
Block a user