PeteBleackley commited on
Commit
ae82fc7
·
1 Parent(s): a5b7b8e

Learning rate scheduler

Browse files
Files changed (1) hide show
  1. scripts.py +1 -1
scripts.py CHANGED
@@ -128,7 +128,7 @@ def train_models(path):
128
  tokenizer)
129
  loss_fn = CombinedLoss()
130
  optimizer = torch.optim.NAdam(trainer.parameters(),lr=5.0e-5)
131
- scheduler = torch.optim.ExponentialDecay(optimizer,gamma=0.9)
132
  training_data = qarac.corpora.CombinedCorpus.CombinedCorpus(tokenizer,
133
  all_text='corpora/all_text.csv',
134
  question_answering='corpora/question_answering.csv',
 
128
  tokenizer)
129
  loss_fn = CombinedLoss()
130
  optimizer = torch.optim.NAdam(trainer.parameters(),lr=5.0e-5)
131
+ scheduler = torch.optim.lr_scheduler.ExponentialLR(optimizer,gamma=0.9)
132
  training_data = qarac.corpora.CombinedCorpus.CombinedCorpus(tokenizer,
133
  all_text='corpora/all_text.csv',
134
  question_answering='corpora/question_answering.csv',