mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-06-26 07:22:12 +00:00
[ColoTensor] throw error when ColoInitContext meets meta parameter. (#2105)
This commit is contained in:
parent
d87baa85d9
commit
05545bfee9
@ -36,6 +36,11 @@ def _convert_to_coloparam(param: torch.nn.Parameter,
|
|||||||
return param
|
return param
|
||||||
# detaching tensor is necessary for optimizers.
|
# detaching tensor is necessary for optimizers.
|
||||||
requires_grad = param.requires_grad
|
requires_grad = param.requires_grad
|
||||||
|
|
||||||
|
if param.device.type == 'meta':
|
||||||
|
raise NotImplemented(
|
||||||
|
"ColoInitContext is initializing a model with meta parameters! This is not allowed right now!")
|
||||||
|
else:
|
||||||
# param is the global tensor.
|
# param is the global tensor.
|
||||||
colo_param = ColoParameter(param.to(device=device, dtype=dtype), requires_grad=requires_grad)
|
colo_param = ColoParameter(param.to(device=device, dtype=dtype), requires_grad=requires_grad)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user