mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-16 22:52:25 +00:00
[Tensor] init a tp network training unittest (#849)
This commit is contained in:
@@ -1,7 +1,9 @@
|
||||
from numpy import product
|
||||
from .op_wrapper import _COLOSSAL_OPS
|
||||
|
||||
import torch
|
||||
from typing import Tuple, Optional
|
||||
from .op_wrapper import _COLOSSAL_OPS
|
||||
from numpy import product
|
||||
|
||||
|
||||
class ColoTensor(object):
|
||||
""" Data Structure for Tensor in Colossal-AI
|
||||
@@ -52,7 +54,6 @@ class ColoTensor(object):
|
||||
return product(self._size)
|
||||
|
||||
@staticmethod
|
||||
|
||||
def init_from_torch_tensor(tensor: torch.Tensor, save_payload=True) -> 'ColoTensor':
|
||||
colo_t = ColoTensor(*tensor.size(),
|
||||
dtype=tensor.dtype,
|
||||
|
@@ -26,4 +26,4 @@ class ColoInitContext(InsertPostInitMethodToModuleSubClasses):
|
||||
save_torch_payload = True if not self._lazy_memory_allocate else False
|
||||
for name, param in name_list:
|
||||
delattr(module, name)
|
||||
setattr(module, name, ColoTensor.init_from_torch_tensor(tensor=param.data, save_payload=save_torch_payload))
|
||||
setattr(module, name, ColoTensor.init_from_torch_tensor(tensor=param, save_payload=save_torch_payload))
|
||||
|
Reference in New Issue
Block a user