mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-17 07:00:37 +00:00
[zero] hotfix master param sync (#4618)
* [zero] add method to update master params * [zero] update zero plugin * [plugin] update low level zero plugin
This commit is contained in:
@@ -1,4 +1,4 @@
|
||||
from .model import ModelWrapper
|
||||
from .model import AMPModelMixin, ModelWrapper
|
||||
from .optimizer import OptimizerWrapper
|
||||
|
||||
__all__ = ['OptimizerWrapper', 'ModelWrapper']
|
||||
__all__ = ['OptimizerWrapper', 'ModelWrapper', 'AMPModelMixin']
|
||||
|
@@ -23,3 +23,14 @@ class ModelWrapper(nn.Module):
|
||||
|
||||
def forward(self, *args, **kwargs):
|
||||
return self.module(*args, **kwargs)
|
||||
|
||||
|
||||
class AMPModelMixin:
|
||||
"""This mixin class defines the interface for AMP training.
|
||||
"""
|
||||
|
||||
def update_master_params(self):
|
||||
"""
|
||||
Update the master parameters for AMP training.
|
||||
"""
|
||||
pass
|
||||
|
Reference in New Issue
Block a user