mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-08 04:24:47 +00:00
[misc] update pre-commit and run all files (#4752)
* [misc] update pre-commit * [misc] run pre-commit * [misc] remove useless configuration files * [misc] ignore cuda for clang-format
This commit is contained in:
@@ -34,9 +34,10 @@ def convert_to_apex_amp(model: nn.Module, optimizer: Optimizer, amp_config):
|
||||
More details about ``amp_config`` refer to `amp_config <https://nvidia.github.io/apex/amp.html?highlight=apex%20amp>`_.
|
||||
"""
|
||||
import apex.amp as apex_amp
|
||||
|
||||
model, optimizer = apex_amp.initialize(model, optimizer, **amp_config)
|
||||
optimizer = ApexAMPOptimizer(optimizer)
|
||||
return model, optimizer
|
||||
|
||||
|
||||
__all__ = ['convert_to_apex_amp', 'ApexAMPOptimizer']
|
||||
__all__ = ["convert_to_apex_amp", "ApexAMPOptimizer"]
|
||||
|
@@ -15,7 +15,7 @@ from colossalai.legacy.utils import clip_grad_norm_fp32
|
||||
|
||||
|
||||
class ApexAMPOptimizer(OptimizerWrapper):
|
||||
""" A wrapper class for APEX optimizer and it implements apex-specific backward and clip_grad_norm
|
||||
"""A wrapper class for APEX optimizer and it implements apex-specific backward and clip_grad_norm
|
||||
methods
|
||||
"""
|
||||
|
||||
|
Reference in New Issue
Block a user