|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import math |
|
|
from typing import List |
|
|
|
|
|
import torch |
|
|
|
|
|
|
|
|
class WarmupLambdaLR(torch.optim.lr_scheduler.LambdaLR): |
|
|
def __init__(self, optimizer, warmup, last_epoch=-1, verbose=False): |
|
|
|
|
|
self.warmup = warmup |
|
|
|
|
|
def lr_lambda(epoch): |
|
|
|
|
|
if epoch < warmup: |
|
|
return float(epoch + 1) / warmup |
|
|
|
|
|
return 1.0 |
|
|
|
|
|
|
|
|
super(WarmupLambdaLR, self).__init__(optimizer, lr_lambda, last_epoch, verbose) |
|
|
|
|
|
|
|
|
|
|
|
class WarmupCosineLR(torch.optim.lr_scheduler.LRScheduler): |
|
|
def __init__( |
|
|
self, |
|
|
optimizer: torch.optim.Optimizer, |
|
|
warmup_iters: int, |
|
|
lr_decay_iters: int, |
|
|
min_lr: float, |
|
|
last_epoch: int = -1, |
|
|
): |
|
|
self.warmup_iters = warmup_iters |
|
|
self.lr_decay_iters = lr_decay_iters |
|
|
self.min_lr = min_lr |
|
|
super().__init__(optimizer, last_epoch) |
|
|
|
|
|
def get_lr(self) -> List[float]: |
|
|
|
|
|
if self.last_epoch < self.warmup_iters: |
|
|
return [base_lr * self.last_epoch / self.warmup_iters for base_lr in self.base_lrs] |
|
|
|
|
|
if self.last_epoch > self.lr_decay_iters: |
|
|
return [self.min_lr for _ in self.base_lrs] |
|
|
|
|
|
decay_ratio = (self.last_epoch - self.warmup_iters) / (self.lr_decay_iters - self.warmup_iters) |
|
|
assert 0 <= decay_ratio <= 1 |
|
|
coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) |
|
|
return [self.min_lr + coeff * (base_lr - self.min_lr) for base_lr in self.base_lrs] |
|
|
|