1
0
mirror of https://github.com/hpcaitech/ColossalAI.git synced 2025-05-02 05:35:29 +00:00

[gemini] fix colo_init_context ()

This commit is contained in:
ver217 2023-02-13 17:53:15 +08:00 committed by GitHub
parent 5cd8cae0c9
commit f0aa191f51
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -32,7 +32,7 @@ def _convert_to_coloparam(param: torch.nn.Parameter,
default_pg: Optional[ProcessGroup] = None,
default_dist_spec: Optional[Any] = None) -> ColoParameter:
if isinstance(param, ColoParameter):
if type(param) is ColoParameter:
return param
# detaching tensor is necessary for optimizers.
requires_grad = param.requires_grad
@ -102,7 +102,7 @@ class ColoInitContext(InsertPostInitMethodToModuleSubClasses):
"""
name_list = []
for name, param in _named_params_with_replica(module):
if isinstance(param, ColoTensor):
if type(param) is ColoParameter:
continue
split = name.rfind('.')