| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 25.0, | |
| "global_step": 9450, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.665198237885463e-05, | |
| "loss": 9.873, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 1.9979082546575355e-05, | |
| "loss": 2.5441, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 1.989409994368087e-05, | |
| "loss": 1.9226, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 1.9744283913135673e-05, | |
| "loss": 1.6855, | |
| "step": 1512 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 1.953061615987832e-05, | |
| "loss": 1.5525, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "learning_rate": 1.9254496792347266e-05, | |
| "loss": 1.4545, | |
| "step": 2268 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "learning_rate": 1.8917735147939518e-05, | |
| "loss": 1.3937, | |
| "step": 2646 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "learning_rate": 1.852253793689982e-05, | |
| "loss": 1.3153, | |
| "step": 3024 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "learning_rate": 1.8071494782330375e-05, | |
| "loss": 1.2177, | |
| "step": 3402 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "learning_rate": 1.7567561251073607e-05, | |
| "loss": 1.1404, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 11.0, | |
| "learning_rate": 1.7014039486662075e-05, | |
| "loss": 1.0829, | |
| "step": 4158 | |
| }, | |
| { | |
| "epoch": 12.0, | |
| "learning_rate": 1.6414556571242747e-05, | |
| "loss": 0.9988, | |
| "step": 4536 | |
| }, | |
| { | |
| "epoch": 13.0, | |
| "learning_rate": 1.5773040758264232e-05, | |
| "loss": 0.9398, | |
| "step": 4914 | |
| }, | |
| { | |
| "epoch": 14.0, | |
| "learning_rate": 1.50936957316679e-05, | |
| "loss": 0.8877, | |
| "step": 5292 | |
| }, | |
| { | |
| "epoch": 15.0, | |
| "learning_rate": 1.4380973060255608e-05, | |
| "loss": 0.8544, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 16.0, | |
| "learning_rate": 1.36395430277333e-05, | |
| "loss": 0.7904, | |
| "step": 6048 | |
| }, | |
| { | |
| "epoch": 17.0, | |
| "learning_rate": 1.2874264029573555e-05, | |
| "loss": 0.7554, | |
| "step": 6426 | |
| }, | |
| { | |
| "epoch": 18.0, | |
| "learning_rate": 1.2090150737231307e-05, | |
| "loss": 0.7142, | |
| "step": 6804 | |
| }, | |
| { | |
| "epoch": 19.0, | |
| "learning_rate": 1.1292341238324266e-05, | |
| "loss": 0.6749, | |
| "step": 7182 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "learning_rate": 1.0486063368099783e-05, | |
| "loss": 0.6421, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 21.0, | |
| "learning_rate": 9.67660045280918e-06, | |
| "loss": 0.6075, | |
| "step": 7938 | |
| }, | |
| { | |
| "epoch": 22.0, | |
| "learning_rate": 8.86925668946412e-06, | |
| "loss": 0.5757, | |
| "step": 8316 | |
| }, | |
| { | |
| "epoch": 23.0, | |
| "learning_rate": 8.069322388832444e-06, | |
| "loss": 0.5459, | |
| "step": 8694 | |
| }, | |
| { | |
| "epoch": 24.0, | |
| "learning_rate": 7.282039309426844e-06, | |
| "loss": 0.5311, | |
| "step": 9072 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "learning_rate": 6.51256630964373e-06, | |
| "loss": 0.516, | |
| "step": 9450 | |
| } | |
| ], | |
| "max_steps": 15120, | |
| "num_train_epochs": 40, | |
| "total_flos": 1794560967573504.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |