mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-06 19:40:28 +00:00
[npu] add npu support for hybrid plugin and llama (#5090)
* llama 3d * update * fix autocast
This commit is contained in:
@@ -6,6 +6,7 @@ from torch import Tensor
|
||||
from torch.optim import Optimizer
|
||||
|
||||
from colossalai.interface import ModelWrapper, OptimizerWrapper
|
||||
from colossalai.utils.device import autocast
|
||||
|
||||
from .mixed_precision_base import MixedPrecision
|
||||
|
||||
@@ -88,7 +89,7 @@ class TorchAMPModule(ModelWrapper):
|
||||
super().__init__(module)
|
||||
|
||||
def forward(self, *args, **kwargs):
|
||||
with torch.cuda.amp.autocast():
|
||||
with autocast():
|
||||
return self.module(*args, **kwargs)
|
||||
|
||||
|
||||
|
Reference in New Issue
Block a user