| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "global_step": 28500, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.912280701754386e-05, | |
| "loss": 2.1706, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.824561403508772e-05, | |
| "loss": 1.4815, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.736842105263158e-05, | |
| "loss": 1.3855, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.649122807017544e-05, | |
| "loss": 1.3281, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.56140350877193e-05, | |
| "loss": 1.289, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.473684210526316e-05, | |
| "loss": 1.2496, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.3859649122807014e-05, | |
| "loss": 1.2333, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.298245614035088e-05, | |
| "loss": 1.2021, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 4.210526315789474e-05, | |
| "loss": 1.1839, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.12280701754386e-05, | |
| "loss": 1.1753, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.0350877192982455e-05, | |
| "loss": 1.1603, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 3.9473684210526316e-05, | |
| "loss": 1.1491, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.859649122807018e-05, | |
| "loss": 1.1346, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.771929824561404e-05, | |
| "loss": 1.1201, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 3.6842105263157895e-05, | |
| "loss": 1.1135, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.5964912280701756e-05, | |
| "loss": 1.1096, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 3.508771929824561e-05, | |
| "loss": 1.0949, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.421052631578947e-05, | |
| "loss": 1.0927, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.3333333333333335e-05, | |
| "loss": 1.0762, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 3.24561403508772e-05, | |
| "loss": 1.0578, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 3.157894736842105e-05, | |
| "loss": 1.045, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 3.0701754385964913e-05, | |
| "loss": 1.0412, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 2.9824561403508772e-05, | |
| "loss": 1.0424, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 2.8947368421052634e-05, | |
| "loss": 1.0348, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 2.8070175438596492e-05, | |
| "loss": 1.0283, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 2.7192982456140354e-05, | |
| "loss": 1.0286, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 2.6315789473684212e-05, | |
| "loss": 1.0263, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 2.5438596491228074e-05, | |
| "loss": 1.025, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 2.456140350877193e-05, | |
| "loss": 1.019, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 2.368421052631579e-05, | |
| "loss": 1.0152, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 2.280701754385965e-05, | |
| "loss": 1.0116, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 2.1929824561403507e-05, | |
| "loss": 1.0123, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 2.105263157894737e-05, | |
| "loss": 1.0018, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 2.0175438596491227e-05, | |
| "loss": 1.0069, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 1.929824561403509e-05, | |
| "loss": 0.9927, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 1.8421052631578947e-05, | |
| "loss": 0.9961, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 1.7543859649122806e-05, | |
| "loss": 0.9895, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 0.9959, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 1.5789473684210526e-05, | |
| "loss": 0.9769, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 1.4912280701754386e-05, | |
| "loss": 0.9681, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.4035087719298246e-05, | |
| "loss": 0.973, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 1.3157894736842106e-05, | |
| "loss": 0.9714, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 1.2280701754385964e-05, | |
| "loss": 0.9678, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 1.1403508771929824e-05, | |
| "loss": 0.9586, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 1.0526315789473684e-05, | |
| "loss": 0.9594, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 9.649122807017545e-06, | |
| "loss": 0.9624, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 8.771929824561403e-06, | |
| "loss": 0.9592, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 7.894736842105263e-06, | |
| "loss": 0.9573, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 7.017543859649123e-06, | |
| "loss": 0.9614, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 6.140350877192982e-06, | |
| "loss": 0.9534, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 5.263157894736842e-06, | |
| "loss": 0.9518, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 4.3859649122807014e-06, | |
| "loss": 0.9523, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 3.5087719298245615e-06, | |
| "loss": 0.9492, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 2.631578947368421e-06, | |
| "loss": 0.9512, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 1.7543859649122807e-06, | |
| "loss": 0.952, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 8.771929824561404e-07, | |
| "loss": 0.9526, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 0.0, | |
| "loss": 0.9482, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 28500, | |
| "total_flos": 2.38277952405504e+17, | |
| "train_loss": 1.076262406198602, | |
| "train_runtime": 7529.011, | |
| "train_samples_per_second": 121.121, | |
| "train_steps_per_second": 3.785 | |
| } | |
| ], | |
| "max_steps": 28500, | |
| "num_train_epochs": 3, | |
| "total_flos": 2.38277952405504e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |