Fixed docstring in colossalai (#171)

This commit is contained in:
HELSON
2022-01-21 10:44:30 +08:00
committed by GitHub
parent e2089c5c15
commit 0f8c7f9804
77 changed files with 983 additions and 603 deletions

View File

@@ -7,13 +7,14 @@ from .delayed import WarmupScheduler
@LR_SCHEDULERS.register_module
class PolynomialLR(_LRScheduler):
"""Polynomial learning rate scheduler.
:param optimizer: Wrapped optimizer
:type optimizer: torch.optim.Optimizer
:param total_steps: number of total training steps
:param total_steps: Number of total training steps
:type total_steps: int
:param end_lr: Minimum learning rate, defaults to 0.0001
:type end_lr: float, optional
:param power: the power of polynomial, defaults to 1.0
:param power: The power of polynomial, defaults to 1.0
:type power: float, optional
:param last_epoch: The index of last epoch, defaults to -1
:type last_epoch: int, optional
@@ -42,15 +43,16 @@ class PolynomialLR(_LRScheduler):
@LR_SCHEDULERS.register_module
class PolynomialWarmupLR(WarmupScheduler):
"""Polynomial learning rate scheduler with warmup.
:param optimizer: Wrapped optimizer
:type optimizer: torch.optim.Optimizer
:param total_steps: number of total training steps
:param total_steps: Number of total training steps
:type total_steps: int
:param warmup_steps: number of warmup steps, defaults to 0
:param warmup_steps: Number of warmup steps, defaults to 0
:type warmup_steps: int, optional
:param end_lr: Minimum learning rate, defaults to 0.0001
:type end_lr: float, optional
:param power: the power of polynomial, defaults to 1.0
:param power: The power of polynomial, defaults to 1.0
:type power: float, optional
:param last_epoch: The index of last epoch, defaults to -1
:type last_epoch: int, optional