| |
| |
| |
| |
| |
|
|
| import math |
|
|
| from torch.optim import Optimizer |
| from torch.optim.lr_scheduler import _LRScheduler |
|
|
|
|
| class CosineLRScheduler(_LRScheduler): |
| """Cosine LR scheduler. |
| |
| Args: |
| optimizer (Optimizer): Torch optimizer. |
| warmup_steps (int): Number of warmup steps. |
| total_steps (int): Total number of steps. |
| lr_min_ratio (float): Minimum learning rate. |
| cycle_length (float): Cycle length. |
| """ |
| def __init__(self, optimizer: Optimizer, total_steps: int, warmup_steps: int, |
| lr_min_ratio: float = 0.0, cycle_length: float = 1.0): |
| self.warmup_steps = warmup_steps |
| assert self.warmup_steps >= 0 |
| self.total_steps = total_steps |
| assert self.total_steps >= 0 |
| self.lr_min_ratio = lr_min_ratio |
| self.cycle_length = cycle_length |
| super().__init__(optimizer) |
|
|
| def _get_sched_lr(self, lr: float, step: int): |
| if step < self.warmup_steps: |
| lr_ratio = step / self.warmup_steps |
| lr = lr_ratio * lr |
| elif step <= self.total_steps: |
| s = (step - self.warmup_steps) / (self.total_steps - self.warmup_steps) |
| lr_ratio = self.lr_min_ratio + 0.5 * (1 - self.lr_min_ratio) * \ |
| (1. + math.cos(math.pi * s / self.cycle_length)) |
| lr = lr_ratio * lr |
| else: |
| lr_ratio = self.lr_min_ratio |
| lr = lr_ratio * lr |
| return lr |
|
|
| def get_lr(self): |
| return [self._get_sched_lr(lr, self.last_epoch) for lr in self.base_lrs] |
|
|