File size: 691 Bytes
52a9452 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 |
import torch
from concern.config import Configurable, State
class OptimizerScheduler(Configurable):
optimizer = State()
optimizer_args = State(default={})
learning_rate = State(autoload=False)
def __init__(self, cmd={}, **kwargs):
self.load_all(**kwargs)
self.load('learning_rate', cmd=cmd, **kwargs)
if 'lr' in cmd:
self.optimizer_args['lr'] = cmd['lr']
def create_optimizer(self, parameters):
optimizer = getattr(torch.optim, self.optimizer)(
parameters, **self.optimizer_args)
if hasattr(self.learning_rate, 'prepare'):
self.learning_rate.prepare(optimizer)
return optimizer
|