mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-02 17:46:42 +00:00
[tensor] fix kwargs in colo_tensor torch_funtion (#825)
This commit is contained in:
@@ -59,14 +59,13 @@ def test_no_wrap_op():
|
||||
t_ref = torch.randn(3, 5)
|
||||
t = ColoTensor.init_from_torch_tensor(t_ref.clone())
|
||||
assert torch.sum(t) == torch.sum(t_ref)
|
||||
|
||||
assert torch.sum(input=t) == torch.sum(input=t_ref)
|
||||
|
||||
def test_lazy_init_tensor():
|
||||
lazy_t = ColoTensor((2, 3), dtype=torch.float32, requires_grad=True)
|
||||
assert lazy_t._torch_tensor == None
|
||||
assert lazy_t.torch_tensor().numel() == 6
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
test_lazy_init_tensor()
|
||||
test_no_wrap_op()
|
||||
# test_element_wise()
|
||||
|
Reference in New Issue
Block a user