[tensor] customized op returns ColoTensor (#875)

* [tensor] customized op returns ColoTensor

* polish

* polish code
This commit is contained in:
Jiarui Fang
2022-04-26 13:23:59 +08:00
committed by GitHub
parent 26d4ab8b03
commit 96211c2cc8
7 changed files with 33 additions and 45 deletions

View File

@@ -1,19 +1,13 @@
from cProfile import label
from statistics import mode
from colossalai.tensor.colo_tensor import ColoTensor
from tests.components_to_test.registry import non_distributed_component_funcs
import colossalai
import pytest
import torch
import torch.multiprocessing as mp
from colossalai.testing import parameterize, rerun_if_address_is_in_use
from colossalai.utils.cuda import get_current_device
from colossalai.utils import free_port
from colossalai.core import global_context as gpc
from colossalai.utils import ColoInitContext
import torch.distributed as dist
from functools import partial

View File

@@ -53,11 +53,11 @@ def test_linear():
# torch.nn.functional.linear(torch.randn(1, in_dim), sharded_weight, sharded_bias)
out = fc(input_tensor)
loss = out.sum()
loss = torch.sum(out)
loss.backward()
out_ref = fc_ref(input_ref)
loss_ref = out_ref.sum()
loss_ref = torch.sum(out_ref)
loss_ref.backward()
assert (loss_ref == loss)