mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-09-06 03:20:52 +00:00
[Feature] qlora support (#5586)
* [feature] qlora support * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * qlora follow commit * migrate qutization folder to colossalai/ * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * minor fixes --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
@@ -187,9 +187,10 @@ class LowLevelZeroOptimizer(OptimizerWrapper):
|
||||
for param_group in self.optim.param_groups:
|
||||
group_params = param_group["params"]
|
||||
for param in group_params:
|
||||
assert (
|
||||
param.dtype == self._dtype
|
||||
), f"Parameters are expected to have the same dtype `{self._dtype}`, but got `{param.dtype}`"
|
||||
if not hasattr(param, "skip_zero_check") or param.skip_zero_check is False:
|
||||
assert (
|
||||
param.dtype == self._dtype
|
||||
), f"Parameters are expected to have the same dtype `{self._dtype}`, but got `{param.dtype}`"
|
||||
|
||||
def _create_master_param_current_rank(self, param_list):
|
||||
# split each param evenly by world size
|
||||
|
Reference in New Issue
Block a user