[utils] integrated colotensor with lazy init context (#1324)

* [utils] integrated colotensor with lazy init context

* polish code

* polish code

* polish code
This commit is contained in:
Frank Lee
2022-07-15 17:47:12 +08:00
committed by GitHub
parent 659a740738
commit 250be4d31e
2 changed files with 103 additions and 108 deletions

View File

@@ -10,27 +10,42 @@ np.random.seed(MANUAL_SEED)
torch.manual_seed(MANUAL_SEED)
def test_lazy_init():
cpu_rng_state = torch.get_rng_state()
origin_model = resnet34(num_classes=10)
origin_param_dict = dict(origin_model.named_parameters())
torch.set_rng_state(cpu_rng_state)
ctx = LazyInitContext()
def test_lazy_init_with_meta():
ctx = LazyInitContext(to_meta=True)
with ctx:
model = resnet34(num_classes=10)
for param in model.parameters():
assert param.is_meta
for buffer in model.buffers():
assert buffer.is_meta
ctx.lazy_init_parameters(model)
for name, param in model.named_parameters():
assert not param.is_meta, name
for buffer in model.buffers():
assert not buffer.is_meta
def test_lazy_init_without_meta():
ctx = LazyInitContext(to_meta=False)
with ctx:
model = resnet34(num_classes=10)
for param in model.parameters():
assert not param.is_meta
for buffer in model.buffers():
assert not buffer.is_meta
param_dict = dict(model.named_parameters())
for key in origin_param_dict.keys():
assert origin_param_dict[key].data.equal(param_dict[key].data)
conv1_weight_before_init = model.conv1.weight.clone()
ctx.lazy_init_parameters(model)
conv1_weight_after_init = model.conv1.weight.clone()
assert not torch.allclose(conv1_weight_after_init, conv1_weight_before_init)
if __name__ == '__main__':
test_lazy_init()
test_lazy_init_with_meta()
test_lazy_init_without_meta()