mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-07 20:10:17 +00:00
[LowLevelZero] low level zero support lora (#5153)
* low level zero support lora low level zero support lora * add checkpoint test * add checkpoint test * fix * fix * fix * fix fix fix fix * fix * fix fix fix fix fix fix fix * fix * fix fix fix fix fix fix fix * fix * test ci * git # This is a combination of 3 commits. Update low_level_zero_plugin.py Update low_level_zero_plugin.py fix fix fix * fix naming fix naming fix naming fix
This commit is contained in:
committed by
Hongxin Liu
parent
14b0d4c7e5
commit
8954a0c2e2
@@ -82,6 +82,9 @@ class GradientStore(BaseStore):
|
||||
"""
|
||||
|
||||
grad_list = []
|
||||
# When using LoRa and the user sets multiple param_groups, it is possible that some param_groups have no parameters with gradients.
|
||||
if group_id not in self._grads_of_params.keys():
|
||||
return grad_list
|
||||
for param_grads in self._grads_of_params[group_id].values():
|
||||
grad_list.append(param_grads[self._working_index])
|
||||
|
||||
|
Reference in New Issue
Block a user