[gemini] gemini mgr supports "cpu" placement policy (#1118)

* update gemini mgr

* update chunk

* add docstr

* polish placement policy

* update test chunk

* update test zero

* polish unit test

* remove useless unit test
This commit is contained in:
ver217
2022-06-15 15:05:19 +08:00
committed by GitHub
parent f99f56dff4
commit 7d14b473f0
7 changed files with 124 additions and 129 deletions

View File

@@ -100,6 +100,8 @@ class ColoDDPV2(ColoDDP):
self.fp32_params = []
self.overflow_counter = 0
self.grads_device: Dict[torch.Tensor, torch.device] = {}
self.chunk_manager.create_group('fp16_param', force_data_on_cuda=True)
self.chunk_manager.create_group('fp32_param')
# TODO: get param order and filter unused params
for p in module.parameters():
assert p.dtype == torch.half