mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-23 18:39:56 +00:00
[gemini] gemini mgr supports "cpu" placement policy (#1118)
* update gemini mgr * update chunk * add docstr * polish placement policy * update test chunk * update test zero * polish unit test * remove useless unit test
This commit is contained in:
@@ -100,6 +100,8 @@ class ColoDDPV2(ColoDDP):
|
||||
self.fp32_params = []
|
||||
self.overflow_counter = 0
|
||||
self.grads_device: Dict[torch.Tensor, torch.device] = {}
|
||||
self.chunk_manager.create_group('fp16_param', force_data_on_cuda=True)
|
||||
self.chunk_manager.create_group('fp32_param')
|
||||
# TODO: get param order and filter unused params
|
||||
for p in module.parameters():
|
||||
assert p.dtype == torch.half
|
||||
|
Reference in New Issue
Block a user