[NFC] polish <colossalai/nn/lr_scheduler/poly.py> code style (#1267)

This commit is contained in:
oahzxl 2022-07-12 18:18:14 +08:00 committed by GitHub
parent c92f84fcdb
commit 0cf8e8e91c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -17,7 +17,12 @@ class PolynomialLR(_LRScheduler):
the schedule is started from the beginning or When last_epoch=-1, sets initial lr as lr. the schedule is started from the beginning or When last_epoch=-1, sets initial lr as lr.
""" """
def __init__(self, optimizer, total_steps: int, end_lr: float = 0.0001, power: float = 1.0, last_epoch: int = -1, def __init__(self,
optimizer,
total_steps: int,
end_lr: float = 0.0001,
power: float = 1.0,
last_epoch: int = -1,
**kwargs): **kwargs):
if end_lr < 0: if end_lr < 0:
raise ValueError(f'end_lr must >= 0, got {end_lr}') raise ValueError(f'end_lr must >= 0, got {end_lr}')
@ -30,11 +35,9 @@ class PolynomialLR(_LRScheduler):
return self._get_closed_form_lr() return self._get_closed_form_lr()
def _get_closed_form_lr(self): def _get_closed_form_lr(self):
return [ return [(base_lr - self.end_lr) *
(base_lr - self.end_lr) * ((1 - min(self.last_epoch, self.total_steps) / ((1 - min(self.last_epoch, self.total_steps) / self.total_steps)**self.power) + self.end_lr
self.total_steps) ** self.power) + self.end_lr for base_lr in self.base_lrs]
for base_lr in self.base_lrs
]
@LR_SCHEDULERS.register_module @LR_SCHEDULERS.register_module
@ -51,8 +54,13 @@ class PolynomialWarmupLR(WarmupScheduler):
the schedule is started from the beginning or When last_epoch=-1, sets initial lr as lr. the schedule is started from the beginning or When last_epoch=-1, sets initial lr as lr.
""" """
def __init__(self, optimizer, total_steps: int, warmup_steps: int = 0, end_lr: float = 0.0001, power: float = 1.0, def __init__(self,
last_epoch: int = -1, **kwargs): optimizer,
base_scheduler = PolynomialLR( total_steps: int,
optimizer, total_steps - warmup_steps, end_lr=end_lr, power=power) warmup_steps: int = 0,
end_lr: float = 0.0001,
power: float = 1.0,
last_epoch: int = -1,
**kwargs):
base_scheduler = PolynomialLR(optimizer, total_steps - warmup_steps, end_lr=end_lr, power=power)
super().__init__(optimizer, warmup_steps, base_scheduler, last_epoch=last_epoch) super().__init__(optimizer, warmup_steps, base_scheduler, last_epoch=last_epoch)