mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-09 04:50:17 +00:00
Init Conext supports lazy allocate model memory (#842)
This commit is contained in:
27
tests/test_tensor/test_context.py
Normal file
27
tests/test_tensor/test_context.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from colossalai.utils import ColoInitContext
|
||||
|
||||
from numpy import allclose, require
|
||||
import torch
|
||||
from colossalai.tensor import ColoTensor
|
||||
from copy import deepcopy
|
||||
|
||||
|
||||
def test_linear():
|
||||
in_dim = 4
|
||||
out_dim = 5
|
||||
|
||||
with ColoInitContext(lazy_memory_allocate=True) as ctx:
|
||||
fc = torch.nn.Linear(in_dim, out_dim, bias=True)
|
||||
|
||||
print(fc.weight.numel())
|
||||
print(fc.bias.numel())
|
||||
|
||||
# lazy_memory_allocate=True, no payload is maintained
|
||||
assert fc.weight._torch_tensor.numel() == 0
|
||||
|
||||
fc.weight.torch_tensor()
|
||||
assert fc.weight._torch_tensor.numel() == in_dim * out_dim
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
test_linear()
|
Reference in New Issue
Block a user