aditeyabaral commited on
Commit
dbe04a0
·
verified ·
1 Parent(s): 1d5f9b9

Training Step 1575

Browse files
Files changed (3) hide show
  1. model.safetensors +1 -1
  2. trainer_state.pt +1 -1
  3. training_config.json +1 -1
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:32d67c0e94a4b16f5bbf7b91ee1441b54d6ac09164664e11e1aeecfb7377402b
3
  size 374952416
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0072844ad0ca96ac4ecf9050728b0b1a0708847d40d96bbe1dc0a84d6f1e7112
3
  size 374952416
trainer_state.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bd8232469eadd6851202575e7e978c23c153ee1877d71e8cba0f50f1b995a34d
3
  size 733099875
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:98ce6dae0ec9ba7576e75379b6bc81484c36105b7df91270a5ebba7495f48ddb
3
  size 733099875
training_config.json CHANGED
@@ -61,7 +61,7 @@
61
  "hf_api": "<huggingface_hub.hf_api.HfApi object at 0x1549f71da0f0>",
62
  "wandb_writer": "<wandb.sdk.wandb_run.Run object at 0x1549ff67a750>",
63
  "wandb_table": null,
64
- "optimizer": "AdamW (\nParameter Group 0\n amsgrad: False\n betas: (0.9, 0.999)\n capturable: False\n decoupled_weight_decay: True\n differentiable: False\n eps: 1e-08\n foreach: None\n fused: None\n initial_lr: 0.0002\n lr: 8.387096774193496e-06\n maximize: False\n weight_decay: 0.04000922838120785\n)",
65
  "lr_scheduler": "<torch.optim.lr_scheduler.SequentialLR object at 0x1549f703bbf0>",
66
  "wd_scheduler": "<trainers.schedulers.weight_decay.WeightDecayScheduler object at 0x1549ffc515e0>",
67
  "momentum_scheduler": "<trainers.schedulers.momentum.MomentumScheduler object at 0x1549ffc51ee0>",
 
61
  "hf_api": "<huggingface_hub.hf_api.HfApi object at 0x1549f71da0f0>",
62
  "wandb_writer": "<wandb.sdk.wandb_run.Run object at 0x1549ff67a750>",
63
  "wandb_table": null,
64
+ "optimizer": "AdamW (\nParameter Group 0\n amsgrad: False\n betas: (0.9, 0.999)\n capturable: False\n decoupled_weight_decay: True\n differentiable: False\n eps: 1e-08\n foreach: None\n fused: None\n initial_lr: 0.0002\n lr: 9.983870967741894e-06\n maximize: False\n weight_decay: 0.04001442385781745\n)",
65
  "lr_scheduler": "<torch.optim.lr_scheduler.SequentialLR object at 0x1549f703bbf0>",
66
  "wd_scheduler": "<trainers.schedulers.weight_decay.WeightDecayScheduler object at 0x1549ffc515e0>",
67
  "momentum_scheduler": "<trainers.schedulers.momentum.MomentumScheduler object at 0x1549ffc51ee0>",