| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 69.38659543467703, | |
| "global_step": 250000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 5.442, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 4.4503, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 6e-06, | |
| "loss": 3.9029, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 3.5541, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1e-05, | |
| "loss": 3.3022, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.2e-05, | |
| "loss": 3.1008, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.4000000000000001e-05, | |
| "loss": 2.9456, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 2.816, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.8e-05, | |
| "loss": 2.7046, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 2e-05, | |
| "loss": 2.6054, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 2.2000000000000003e-05, | |
| "loss": 2.528, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 2.4e-05, | |
| "loss": 2.4576, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 2.6000000000000002e-05, | |
| "loss": 2.3952, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 2.8000000000000003e-05, | |
| "loss": 2.3405, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 3e-05, | |
| "loss": 2.2901, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 3.2000000000000005e-05, | |
| "loss": 2.2414, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 3.4000000000000007e-05, | |
| "loss": 2.2004, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 3.6e-05, | |
| "loss": 2.1586, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 3.8e-05, | |
| "loss": 2.123, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 4e-05, | |
| "loss": 2.0858, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 4.2e-05, | |
| "loss": 2.0479, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 4.4000000000000006e-05, | |
| "loss": 2.0195, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 4.600000000000001e-05, | |
| "loss": 1.9842, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 4.8e-05, | |
| "loss": 1.9575, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 5e-05, | |
| "loss": 1.9303, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "eval_loss": 1.8335806131362915, | |
| "eval_runtime": 405.093, | |
| "eval_samples_per_second": 457.117, | |
| "eval_steps_per_second": 3.572, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 5.2000000000000004e-05, | |
| "loss": 1.906, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 5.4000000000000005e-05, | |
| "loss": 1.8833, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 5.6000000000000006e-05, | |
| "loss": 1.8601, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 5.8e-05, | |
| "loss": 1.8405, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 6e-05, | |
| "loss": 1.822, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 6.2e-05, | |
| "loss": 1.8035, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 6.400000000000001e-05, | |
| "loss": 1.7869, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 6.6e-05, | |
| "loss": 1.7719, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "learning_rate": 6.800000000000001e-05, | |
| "loss": 1.7585, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 7e-05, | |
| "loss": 1.746, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 7.2e-05, | |
| "loss": 1.7321, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 5.13, | |
| "learning_rate": 7.4e-05, | |
| "loss": 1.7189, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 5.27, | |
| "learning_rate": 7.6e-05, | |
| "loss": 1.7075, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 7.800000000000001e-05, | |
| "loss": 1.6968, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 5.55, | |
| "learning_rate": 8e-05, | |
| "loss": 1.6874, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 5.69, | |
| "learning_rate": 8.2e-05, | |
| "loss": 1.6807, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "learning_rate": 8.4e-05, | |
| "loss": 1.6724, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 5.97, | |
| "learning_rate": 8.6e-05, | |
| "loss": 1.6606, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 6.11, | |
| "learning_rate": 8.800000000000001e-05, | |
| "loss": 1.6524, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 6.24, | |
| "learning_rate": 9e-05, | |
| "loss": 1.6434, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 6.38, | |
| "learning_rate": 9.200000000000001e-05, | |
| "loss": 1.6354, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 6.52, | |
| "learning_rate": 9.4e-05, | |
| "loss": 1.6275, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 6.66, | |
| "learning_rate": 9.6e-05, | |
| "loss": 1.6204, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 6.8, | |
| "learning_rate": 9.8e-05, | |
| "loss": 1.6143, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 6.94, | |
| "learning_rate": 0.0001, | |
| "loss": 1.6099, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 6.94, | |
| "eval_loss": 1.537390947341919, | |
| "eval_runtime": 389.453, | |
| "eval_samples_per_second": 475.475, | |
| "eval_steps_per_second": 3.715, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 7.08, | |
| "learning_rate": 9.977777777777779e-05, | |
| "loss": 1.6084, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 7.22, | |
| "learning_rate": 9.955555555555556e-05, | |
| "loss": 1.596, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 7.35, | |
| "learning_rate": 9.933333333333334e-05, | |
| "loss": 1.5903, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 7.49, | |
| "learning_rate": 9.911111111111112e-05, | |
| "loss": 1.5825, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 7.63, | |
| "learning_rate": 9.888888888888889e-05, | |
| "loss": 1.5759, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 7.77, | |
| "learning_rate": 9.866666666666668e-05, | |
| "loss": 1.5692, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 7.91, | |
| "learning_rate": 9.844444444444444e-05, | |
| "loss": 1.5673, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 8.05, | |
| "learning_rate": 9.822222222222223e-05, | |
| "loss": 1.5617, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 8.19, | |
| "learning_rate": 9.8e-05, | |
| "loss": 1.5554, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 8.33, | |
| "learning_rate": 9.777777777777778e-05, | |
| "loss": 1.551, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 8.47, | |
| "learning_rate": 9.755555555555555e-05, | |
| "loss": 1.5474, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 8.6, | |
| "learning_rate": 9.733333333333335e-05, | |
| "loss": 1.543, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "learning_rate": 9.711111111111111e-05, | |
| "loss": 1.538, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 8.88, | |
| "learning_rate": 9.68888888888889e-05, | |
| "loss": 1.5341, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 9.02, | |
| "learning_rate": 9.666666666666667e-05, | |
| "loss": 1.5333, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 9.16, | |
| "learning_rate": 9.644444444444445e-05, | |
| "loss": 1.5245, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 9.3, | |
| "learning_rate": 9.622222222222222e-05, | |
| "loss": 1.5229, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 9.44, | |
| "learning_rate": 9.6e-05, | |
| "loss": 1.5181, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 9.58, | |
| "learning_rate": 9.577777777777777e-05, | |
| "loss": 1.5163, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 9.71, | |
| "learning_rate": 9.555555555555557e-05, | |
| "loss": 1.5131, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 9.85, | |
| "learning_rate": 9.533333333333334e-05, | |
| "loss": 1.5106, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 9.99, | |
| "learning_rate": 9.511111111111112e-05, | |
| "loss": 1.5067, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 10.13, | |
| "learning_rate": 9.488888888888889e-05, | |
| "loss": 1.5016, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 10.27, | |
| "learning_rate": 9.466666666666667e-05, | |
| "loss": 1.4968, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 10.41, | |
| "learning_rate": 9.444444444444444e-05, | |
| "loss": 1.4956, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 10.41, | |
| "eval_loss": 1.436057448387146, | |
| "eval_runtime": 389.7996, | |
| "eval_samples_per_second": 475.052, | |
| "eval_steps_per_second": 3.712, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 10.55, | |
| "learning_rate": 9.422222222222223e-05, | |
| "loss": 1.4937, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 10.69, | |
| "learning_rate": 9.4e-05, | |
| "loss": 1.4894, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 10.82, | |
| "learning_rate": 9.377777777777779e-05, | |
| "loss": 1.4884, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 10.96, | |
| "learning_rate": 9.355555555555556e-05, | |
| "loss": 1.4848, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 11.1, | |
| "learning_rate": 9.333333333333334e-05, | |
| "loss": 1.4803, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 11.24, | |
| "learning_rate": 9.311111111111111e-05, | |
| "loss": 1.4785, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 11.38, | |
| "learning_rate": 9.28888888888889e-05, | |
| "loss": 1.4757, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 11.52, | |
| "learning_rate": 9.266666666666666e-05, | |
| "loss": 1.4761, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 11.66, | |
| "learning_rate": 9.244444444444445e-05, | |
| "loss": 1.4715, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 11.8, | |
| "learning_rate": 9.222222222222223e-05, | |
| "loss": 1.4719, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 11.93, | |
| "learning_rate": 9.200000000000001e-05, | |
| "loss": 1.4686, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 12.07, | |
| "learning_rate": 9.177777777777778e-05, | |
| "loss": 1.4649, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 12.21, | |
| "learning_rate": 9.155555555555557e-05, | |
| "loss": 1.4614, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 12.35, | |
| "learning_rate": 9.133333333333334e-05, | |
| "loss": 1.4597, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 12.49, | |
| "learning_rate": 9.111111111111112e-05, | |
| "loss": 1.4596, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 12.63, | |
| "learning_rate": 9.088888888888889e-05, | |
| "loss": 1.4563, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 12.77, | |
| "learning_rate": 9.066666666666667e-05, | |
| "loss": 1.4544, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 12.91, | |
| "learning_rate": 9.044444444444445e-05, | |
| "loss": 1.4542, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 13.04, | |
| "learning_rate": 9.022222222222224e-05, | |
| "loss": 1.4527, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 13.18, | |
| "learning_rate": 9e-05, | |
| "loss": 1.4485, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 13.32, | |
| "learning_rate": 8.977777777777779e-05, | |
| "loss": 1.4472, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 13.46, | |
| "learning_rate": 8.955555555555556e-05, | |
| "loss": 1.4451, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 13.6, | |
| "learning_rate": 8.933333333333334e-05, | |
| "loss": 1.4423, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 13.74, | |
| "learning_rate": 8.911111111111111e-05, | |
| "loss": 1.4434, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 13.88, | |
| "learning_rate": 8.888888888888889e-05, | |
| "loss": 1.4436, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 13.88, | |
| "eval_loss": 1.388383150100708, | |
| "eval_runtime": 389.6407, | |
| "eval_samples_per_second": 475.245, | |
| "eval_steps_per_second": 3.714, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 14.02, | |
| "learning_rate": 8.866666666666668e-05, | |
| "loss": 1.4407, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 14.15, | |
| "learning_rate": 8.844444444444445e-05, | |
| "loss": 1.4361, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 14.29, | |
| "learning_rate": 8.822222222222223e-05, | |
| "loss": 1.435, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 14.43, | |
| "learning_rate": 8.800000000000001e-05, | |
| "loss": 1.4321, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 14.57, | |
| "learning_rate": 8.777777777777778e-05, | |
| "loss": 1.4341, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 14.71, | |
| "learning_rate": 8.755555555555556e-05, | |
| "loss": 1.432, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 14.85, | |
| "learning_rate": 8.733333333333333e-05, | |
| "loss": 1.4303, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 14.99, | |
| "learning_rate": 8.711111111111112e-05, | |
| "loss": 1.4267, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 15.13, | |
| "learning_rate": 8.68888888888889e-05, | |
| "loss": 1.4254, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 15.27, | |
| "learning_rate": 8.666666666666667e-05, | |
| "loss": 1.4246, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 15.4, | |
| "learning_rate": 8.644444444444445e-05, | |
| "loss": 1.4249, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 15.54, | |
| "learning_rate": 8.622222222222222e-05, | |
| "loss": 1.4246, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 15.68, | |
| "learning_rate": 8.6e-05, | |
| "loss": 1.4218, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 15.82, | |
| "learning_rate": 8.577777777777777e-05, | |
| "loss": 1.4183, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 15.96, | |
| "learning_rate": 8.555555555555556e-05, | |
| "loss": 1.4215, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 16.1, | |
| "learning_rate": 8.533333333333334e-05, | |
| "loss": 1.4148, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 16.24, | |
| "learning_rate": 8.511111111111112e-05, | |
| "loss": 1.4134, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 16.38, | |
| "learning_rate": 8.488888888888889e-05, | |
| "loss": 1.4138, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 16.51, | |
| "learning_rate": 8.466666666666667e-05, | |
| "loss": 1.414, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 16.65, | |
| "learning_rate": 8.444444444444444e-05, | |
| "loss": 1.4126, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 16.79, | |
| "learning_rate": 8.422222222222223e-05, | |
| "loss": 1.4101, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 16.93, | |
| "learning_rate": 8.4e-05, | |
| "loss": 1.4109, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 17.07, | |
| "learning_rate": 8.377777777777778e-05, | |
| "loss": 1.4063, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 17.21, | |
| "learning_rate": 8.355555555555556e-05, | |
| "loss": 1.4065, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 17.35, | |
| "learning_rate": 8.333333333333334e-05, | |
| "loss": 1.4048, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 17.35, | |
| "eval_loss": 1.3567780256271362, | |
| "eval_runtime": 389.8272, | |
| "eval_samples_per_second": 475.018, | |
| "eval_steps_per_second": 3.712, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 17.49, | |
| "learning_rate": 8.311111111111111e-05, | |
| "loss": 1.404, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 17.62, | |
| "learning_rate": 8.28888888888889e-05, | |
| "loss": 1.4021, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 17.76, | |
| "learning_rate": 8.266666666666667e-05, | |
| "loss": 1.4007, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 17.9, | |
| "learning_rate": 8.244444444444445e-05, | |
| "loss": 1.4021, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 18.04, | |
| "learning_rate": 8.222222222222222e-05, | |
| "loss": 1.4005, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 18.18, | |
| "learning_rate": 8.2e-05, | |
| "loss": 1.3965, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 18.32, | |
| "learning_rate": 8.177777777777778e-05, | |
| "loss": 1.3971, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 18.46, | |
| "learning_rate": 8.155555555555557e-05, | |
| "loss": 1.3976, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 18.6, | |
| "learning_rate": 8.133333333333334e-05, | |
| "loss": 1.3968, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 18.73, | |
| "learning_rate": 8.111111111111112e-05, | |
| "loss": 1.395, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 18.87, | |
| "learning_rate": 8.088888888888889e-05, | |
| "loss": 1.3936, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 19.01, | |
| "learning_rate": 8.066666666666667e-05, | |
| "loss": 1.3941, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 19.15, | |
| "learning_rate": 8.044444444444444e-05, | |
| "loss": 1.3874, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 19.29, | |
| "learning_rate": 8.022222222222222e-05, | |
| "loss": 1.3901, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 19.43, | |
| "learning_rate": 8e-05, | |
| "loss": 1.3888, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 19.57, | |
| "learning_rate": 7.977777777777779e-05, | |
| "loss": 1.3905, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 19.71, | |
| "learning_rate": 7.955555555555556e-05, | |
| "loss": 1.3899, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 19.84, | |
| "learning_rate": 7.933333333333334e-05, | |
| "loss": 1.3872, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 19.98, | |
| "learning_rate": 7.911111111111111e-05, | |
| "loss": 1.3857, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 20.12, | |
| "learning_rate": 7.88888888888889e-05, | |
| "loss": 1.3831, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 20.26, | |
| "learning_rate": 7.866666666666666e-05, | |
| "loss": 1.3827, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 20.4, | |
| "learning_rate": 7.844444444444446e-05, | |
| "loss": 1.3824, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 20.54, | |
| "learning_rate": 7.822222222222223e-05, | |
| "loss": 1.3832, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 20.68, | |
| "learning_rate": 7.800000000000001e-05, | |
| "loss": 1.3837, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 20.82, | |
| "learning_rate": 7.777777777777778e-05, | |
| "loss": 1.3789, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 20.82, | |
| "eval_loss": 1.3401681184768677, | |
| "eval_runtime": 389.9204, | |
| "eval_samples_per_second": 474.905, | |
| "eval_steps_per_second": 3.711, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 20.95, | |
| "learning_rate": 7.755555555555556e-05, | |
| "loss": 1.3813, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 21.09, | |
| "learning_rate": 7.733333333333333e-05, | |
| "loss": 1.3808, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 21.23, | |
| "learning_rate": 7.711111111111112e-05, | |
| "loss": 1.3783, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 21.37, | |
| "learning_rate": 7.688888888888889e-05, | |
| "loss": 1.3778, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 21.51, | |
| "learning_rate": 7.666666666666667e-05, | |
| "loss": 1.3785, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 21.65, | |
| "learning_rate": 7.644444444444445e-05, | |
| "loss": 1.3773, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 21.79, | |
| "learning_rate": 7.622222222222223e-05, | |
| "loss": 1.3774, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 21.93, | |
| "learning_rate": 7.6e-05, | |
| "loss": 1.3774, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 22.06, | |
| "learning_rate": 7.577777777777779e-05, | |
| "loss": 1.3757, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 22.2, | |
| "learning_rate": 7.555555555555556e-05, | |
| "loss": 1.3721, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 22.34, | |
| "learning_rate": 7.533333333333334e-05, | |
| "loss": 1.371, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 22.48, | |
| "learning_rate": 7.511111111111111e-05, | |
| "loss": 1.3718, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 22.62, | |
| "learning_rate": 7.488888888888889e-05, | |
| "loss": 1.3706, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 22.76, | |
| "learning_rate": 7.466666666666667e-05, | |
| "loss": 1.3726, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 22.9, | |
| "learning_rate": 7.444444444444444e-05, | |
| "loss": 1.3759, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 23.04, | |
| "learning_rate": 7.422222222222223e-05, | |
| "loss": 1.3727, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 23.18, | |
| "learning_rate": 7.4e-05, | |
| "loss": 1.3689, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 23.31, | |
| "learning_rate": 7.377777777777778e-05, | |
| "loss": 1.3677, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 23.45, | |
| "learning_rate": 7.355555555555556e-05, | |
| "loss": 1.3688, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 23.59, | |
| "learning_rate": 7.333333333333333e-05, | |
| "loss": 1.3663, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 23.73, | |
| "learning_rate": 7.311111111111111e-05, | |
| "loss": 1.3662, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 23.87, | |
| "learning_rate": 7.28888888888889e-05, | |
| "loss": 1.3647, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 24.01, | |
| "learning_rate": 7.266666666666667e-05, | |
| "loss": 1.3661, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 24.15, | |
| "learning_rate": 7.244444444444445e-05, | |
| "loss": 1.3619, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 24.29, | |
| "learning_rate": 7.222222222222222e-05, | |
| "loss": 1.363, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 24.29, | |
| "eval_loss": 1.3217395544052124, | |
| "eval_runtime": 389.4524, | |
| "eval_samples_per_second": 475.475, | |
| "eval_steps_per_second": 3.715, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 24.42, | |
| "learning_rate": 7.2e-05, | |
| "loss": 1.362, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 24.56, | |
| "learning_rate": 7.177777777777777e-05, | |
| "loss": 1.3594, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 24.7, | |
| "learning_rate": 7.155555555555555e-05, | |
| "loss": 1.3598, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 24.84, | |
| "learning_rate": 7.133333333333334e-05, | |
| "loss": 1.3605, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 24.98, | |
| "learning_rate": 7.111111111111112e-05, | |
| "loss": 1.3589, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 25.12, | |
| "learning_rate": 7.088888888888889e-05, | |
| "loss": 1.3574, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 25.26, | |
| "learning_rate": 7.066666666666667e-05, | |
| "loss": 1.3562, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 25.4, | |
| "learning_rate": 7.044444444444444e-05, | |
| "loss": 1.3569, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 25.53, | |
| "learning_rate": 7.022222222222222e-05, | |
| "loss": 1.3568, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 25.67, | |
| "learning_rate": 7e-05, | |
| "loss": 1.3556, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 25.81, | |
| "learning_rate": 6.977777777777779e-05, | |
| "loss": 1.3581, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 25.95, | |
| "learning_rate": 6.955555555555556e-05, | |
| "loss": 1.3583, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 26.09, | |
| "learning_rate": 6.933333333333334e-05, | |
| "loss": 1.3546, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 26.23, | |
| "learning_rate": 6.911111111111111e-05, | |
| "loss": 1.3535, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 26.37, | |
| "learning_rate": 6.88888888888889e-05, | |
| "loss": 1.3526, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 26.51, | |
| "learning_rate": 6.866666666666666e-05, | |
| "loss": 1.3531, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 26.64, | |
| "learning_rate": 6.844444444444445e-05, | |
| "loss": 1.351, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 26.78, | |
| "learning_rate": 6.822222222222222e-05, | |
| "loss": 1.3525, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 26.92, | |
| "learning_rate": 6.800000000000001e-05, | |
| "loss": 1.3529, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 27.06, | |
| "learning_rate": 6.777777777777778e-05, | |
| "loss": 1.3519, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 27.2, | |
| "learning_rate": 6.755555555555557e-05, | |
| "loss": 1.3482, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 27.34, | |
| "learning_rate": 6.733333333333333e-05, | |
| "loss": 1.349, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 27.48, | |
| "learning_rate": 6.711111111111112e-05, | |
| "loss": 1.3478, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 27.62, | |
| "learning_rate": 6.688888888888889e-05, | |
| "loss": 1.3488, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 27.75, | |
| "learning_rate": 6.666666666666667e-05, | |
| "loss": 1.3481, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 27.75, | |
| "eval_loss": 1.3101322650909424, | |
| "eval_runtime": 389.668, | |
| "eval_samples_per_second": 475.212, | |
| "eval_steps_per_second": 3.713, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 27.89, | |
| "learning_rate": 6.644444444444444e-05, | |
| "loss": 1.347, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 28.03, | |
| "learning_rate": 6.622222222222224e-05, | |
| "loss": 1.3488, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 28.17, | |
| "learning_rate": 6.6e-05, | |
| "loss": 1.3447, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 28.31, | |
| "learning_rate": 6.577777777777779e-05, | |
| "loss": 1.3465, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 28.45, | |
| "learning_rate": 6.555555555555556e-05, | |
| "loss": 1.3428, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 28.59, | |
| "learning_rate": 6.533333333333334e-05, | |
| "loss": 1.3442, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 28.73, | |
| "learning_rate": 6.511111111111111e-05, | |
| "loss": 1.3478, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 28.86, | |
| "learning_rate": 6.488888888888889e-05, | |
| "loss": 1.3437, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 29.0, | |
| "learning_rate": 6.466666666666666e-05, | |
| "loss": 1.346, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 29.14, | |
| "learning_rate": 6.444444444444446e-05, | |
| "loss": 1.3398, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 29.28, | |
| "learning_rate": 6.422222222222223e-05, | |
| "loss": 1.3419, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 29.42, | |
| "learning_rate": 6.400000000000001e-05, | |
| "loss": 1.3422, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 29.56, | |
| "learning_rate": 6.377777777777778e-05, | |
| "loss": 1.3419, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 29.7, | |
| "learning_rate": 6.355555555555556e-05, | |
| "loss": 1.3432, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 29.84, | |
| "learning_rate": 6.333333333333333e-05, | |
| "loss": 1.3386, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 29.97, | |
| "learning_rate": 6.311111111111112e-05, | |
| "loss": 1.3415, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 30.11, | |
| "learning_rate": 6.28888888888889e-05, | |
| "loss": 1.3408, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 30.25, | |
| "learning_rate": 6.266666666666667e-05, | |
| "loss": 1.3376, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 30.39, | |
| "learning_rate": 6.244444444444445e-05, | |
| "loss": 1.3383, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 30.53, | |
| "learning_rate": 6.222222222222222e-05, | |
| "loss": 1.3389, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 30.67, | |
| "learning_rate": 6.2e-05, | |
| "loss": 1.3393, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 30.81, | |
| "learning_rate": 6.177777777777779e-05, | |
| "loss": 1.3379, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 30.95, | |
| "learning_rate": 6.155555555555555e-05, | |
| "loss": 1.3352, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 31.09, | |
| "learning_rate": 6.133333333333334e-05, | |
| "loss": 1.3364, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 31.22, | |
| "learning_rate": 6.111111111111112e-05, | |
| "loss": 1.3361, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 31.22, | |
| "eval_loss": 1.2985332012176514, | |
| "eval_runtime": 389.5056, | |
| "eval_samples_per_second": 475.41, | |
| "eval_steps_per_second": 3.715, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 31.36, | |
| "learning_rate": 6.08888888888889e-05, | |
| "loss": 1.3346, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 31.5, | |
| "learning_rate": 6.066666666666667e-05, | |
| "loss": 1.334, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 31.64, | |
| "learning_rate": 6.044444444444445e-05, | |
| "loss": 1.3318, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 31.78, | |
| "learning_rate": 6.0222222222222225e-05, | |
| "loss": 1.3338, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 31.92, | |
| "learning_rate": 6e-05, | |
| "loss": 1.3335, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 32.06, | |
| "learning_rate": 5.977777777777778e-05, | |
| "loss": 1.3335, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 32.2, | |
| "learning_rate": 5.9555555555555554e-05, | |
| "loss": 1.3305, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 32.33, | |
| "learning_rate": 5.9333333333333343e-05, | |
| "loss": 1.3643, | |
| "step": 116500 | |
| }, | |
| { | |
| "epoch": 32.47, | |
| "learning_rate": 5.911111111111112e-05, | |
| "loss": 1.411, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 32.61, | |
| "learning_rate": 5.8888888888888896e-05, | |
| "loss": 1.3549, | |
| "step": 117500 | |
| }, | |
| { | |
| "epoch": 32.75, | |
| "learning_rate": 5.866666666666667e-05, | |
| "loss": 1.3419, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 32.89, | |
| "learning_rate": 5.844444444444445e-05, | |
| "loss": 1.3362, | |
| "step": 118500 | |
| }, | |
| { | |
| "epoch": 33.03, | |
| "learning_rate": 5.8222222222222224e-05, | |
| "loss": 1.3355, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 33.17, | |
| "learning_rate": 5.8e-05, | |
| "loss": 1.3308, | |
| "step": 119500 | |
| }, | |
| { | |
| "epoch": 33.31, | |
| "learning_rate": 5.7777777777777776e-05, | |
| "loss": 1.33, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 33.44, | |
| "learning_rate": 5.755555555555556e-05, | |
| "loss": 1.3318, | |
| "step": 120500 | |
| }, | |
| { | |
| "epoch": 33.58, | |
| "learning_rate": 5.7333333333333336e-05, | |
| "loss": 1.3311, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 33.72, | |
| "learning_rate": 5.711111111111112e-05, | |
| "loss": 1.3266, | |
| "step": 121500 | |
| }, | |
| { | |
| "epoch": 33.86, | |
| "learning_rate": 5.6888888888888895e-05, | |
| "loss": 1.3258, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 34.0, | |
| "learning_rate": 5.666666666666667e-05, | |
| "loss": 1.3266, | |
| "step": 122500 | |
| }, | |
| { | |
| "epoch": 34.14, | |
| "learning_rate": 5.644444444444445e-05, | |
| "loss": 1.3279, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 34.28, | |
| "learning_rate": 5.622222222222222e-05, | |
| "loss": 1.3238, | |
| "step": 123500 | |
| }, | |
| { | |
| "epoch": 34.42, | |
| "learning_rate": 5.6000000000000006e-05, | |
| "loss": 1.3252, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 34.55, | |
| "learning_rate": 5.577777777777778e-05, | |
| "loss": 1.3222, | |
| "step": 124500 | |
| }, | |
| { | |
| "epoch": 34.69, | |
| "learning_rate": 5.555555555555556e-05, | |
| "loss": 1.3239, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 34.69, | |
| "eval_loss": 1.2895334959030151, | |
| "eval_runtime": 389.8228, | |
| "eval_samples_per_second": 475.023, | |
| "eval_steps_per_second": 3.712, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 34.83, | |
| "learning_rate": 5.5333333333333334e-05, | |
| "loss": 1.3251, | |
| "step": 125500 | |
| }, | |
| { | |
| "epoch": 34.97, | |
| "learning_rate": 5.511111111111111e-05, | |
| "loss": 1.3228, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 35.11, | |
| "learning_rate": 5.488888888888889e-05, | |
| "loss": 1.3226, | |
| "step": 126500 | |
| }, | |
| { | |
| "epoch": 35.25, | |
| "learning_rate": 5.466666666666666e-05, | |
| "loss": 1.3197, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 35.39, | |
| "learning_rate": 5.4444444444444446e-05, | |
| "loss": 1.3211, | |
| "step": 127500 | |
| }, | |
| { | |
| "epoch": 35.53, | |
| "learning_rate": 5.422222222222223e-05, | |
| "loss": 1.3199, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 35.66, | |
| "learning_rate": 5.4000000000000005e-05, | |
| "loss": 1.3192, | |
| "step": 128500 | |
| }, | |
| { | |
| "epoch": 35.8, | |
| "learning_rate": 5.377777777777778e-05, | |
| "loss": 1.3211, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 35.94, | |
| "learning_rate": 5.355555555555556e-05, | |
| "loss": 1.3209, | |
| "step": 129500 | |
| }, | |
| { | |
| "epoch": 36.08, | |
| "learning_rate": 5.333333333333333e-05, | |
| "loss": 1.3198, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 36.22, | |
| "learning_rate": 5.311111111111111e-05, | |
| "loss": 1.3151, | |
| "step": 130500 | |
| }, | |
| { | |
| "epoch": 36.36, | |
| "learning_rate": 5.2888888888888885e-05, | |
| "loss": 1.3166, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 36.5, | |
| "learning_rate": 5.266666666666666e-05, | |
| "loss": 1.315, | |
| "step": 131500 | |
| }, | |
| { | |
| "epoch": 36.64, | |
| "learning_rate": 5.244444444444445e-05, | |
| "loss": 1.318, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 36.77, | |
| "learning_rate": 5.222222222222223e-05, | |
| "loss": 1.3169, | |
| "step": 132500 | |
| }, | |
| { | |
| "epoch": 36.91, | |
| "learning_rate": 5.2000000000000004e-05, | |
| "loss": 1.3195, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 37.05, | |
| "learning_rate": 5.177777777777778e-05, | |
| "loss": 1.3146, | |
| "step": 133500 | |
| }, | |
| { | |
| "epoch": 37.19, | |
| "learning_rate": 5.1555555555555556e-05, | |
| "loss": 1.3143, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 37.33, | |
| "learning_rate": 5.133333333333333e-05, | |
| "loss": 1.3128, | |
| "step": 134500 | |
| }, | |
| { | |
| "epoch": 37.47, | |
| "learning_rate": 5.111111111111111e-05, | |
| "loss": 1.3145, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 37.61, | |
| "learning_rate": 5.0888888888888884e-05, | |
| "loss": 1.3125, | |
| "step": 135500 | |
| }, | |
| { | |
| "epoch": 37.75, | |
| "learning_rate": 5.0666666666666674e-05, | |
| "loss": 1.3133, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 37.89, | |
| "learning_rate": 5.044444444444445e-05, | |
| "loss": 1.3138, | |
| "step": 136500 | |
| }, | |
| { | |
| "epoch": 38.02, | |
| "learning_rate": 5.0222222222222226e-05, | |
| "loss": 1.3132, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 38.16, | |
| "learning_rate": 5e-05, | |
| "loss": 1.311, | |
| "step": 137500 | |
| }, | |
| { | |
| "epoch": 38.16, | |
| "eval_loss": 1.2769075632095337, | |
| "eval_runtime": 389.676, | |
| "eval_samples_per_second": 475.202, | |
| "eval_steps_per_second": 3.713, | |
| "step": 137500 | |
| }, | |
| { | |
| "epoch": 38.3, | |
| "learning_rate": 4.977777777777778e-05, | |
| "loss": 1.3099, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 38.44, | |
| "learning_rate": 4.955555555555556e-05, | |
| "loss": 1.3104, | |
| "step": 138500 | |
| }, | |
| { | |
| "epoch": 38.58, | |
| "learning_rate": 4.933333333333334e-05, | |
| "loss": 1.3105, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 38.72, | |
| "learning_rate": 4.9111111111111114e-05, | |
| "loss": 1.3094, | |
| "step": 139500 | |
| }, | |
| { | |
| "epoch": 38.86, | |
| "learning_rate": 4.888888888888889e-05, | |
| "loss": 1.3105, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 39.0, | |
| "learning_rate": 4.866666666666667e-05, | |
| "loss": 1.312, | |
| "step": 140500 | |
| }, | |
| { | |
| "epoch": 39.13, | |
| "learning_rate": 4.844444444444445e-05, | |
| "loss": 1.307, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 39.27, | |
| "learning_rate": 4.8222222222222225e-05, | |
| "loss": 1.3071, | |
| "step": 141500 | |
| }, | |
| { | |
| "epoch": 39.41, | |
| "learning_rate": 4.8e-05, | |
| "loss": 1.3073, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 39.55, | |
| "learning_rate": 4.7777777777777784e-05, | |
| "loss": 1.309, | |
| "step": 142500 | |
| }, | |
| { | |
| "epoch": 39.69, | |
| "learning_rate": 4.755555555555556e-05, | |
| "loss": 1.3068, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 39.83, | |
| "learning_rate": 4.7333333333333336e-05, | |
| "loss": 1.3094, | |
| "step": 143500 | |
| }, | |
| { | |
| "epoch": 39.97, | |
| "learning_rate": 4.711111111111111e-05, | |
| "loss": 1.3096, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 40.11, | |
| "learning_rate": 4.6888888888888895e-05, | |
| "loss": 1.3067, | |
| "step": 144500 | |
| }, | |
| { | |
| "epoch": 40.24, | |
| "learning_rate": 4.666666666666667e-05, | |
| "loss": 1.3065, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 40.38, | |
| "learning_rate": 4.644444444444445e-05, | |
| "loss": 1.3088, | |
| "step": 145500 | |
| }, | |
| { | |
| "epoch": 40.52, | |
| "learning_rate": 4.6222222222222224e-05, | |
| "loss": 1.3087, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 40.66, | |
| "learning_rate": 4.600000000000001e-05, | |
| "loss": 1.3036, | |
| "step": 146500 | |
| }, | |
| { | |
| "epoch": 40.8, | |
| "learning_rate": 4.577777777777778e-05, | |
| "loss": 1.3062, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 40.94, | |
| "learning_rate": 4.555555555555556e-05, | |
| "loss": 1.3038, | |
| "step": 147500 | |
| }, | |
| { | |
| "epoch": 41.08, | |
| "learning_rate": 4.5333333333333335e-05, | |
| "loss": 1.3024, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 41.22, | |
| "learning_rate": 4.511111111111112e-05, | |
| "loss": 1.3034, | |
| "step": 148500 | |
| }, | |
| { | |
| "epoch": 41.35, | |
| "learning_rate": 4.4888888888888894e-05, | |
| "loss": 1.3009, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 41.49, | |
| "learning_rate": 4.466666666666667e-05, | |
| "loss": 1.3055, | |
| "step": 149500 | |
| }, | |
| { | |
| "epoch": 41.63, | |
| "learning_rate": 4.4444444444444447e-05, | |
| "loss": 1.3053, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 41.63, | |
| "eval_loss": 1.2693954706192017, | |
| "eval_runtime": 389.9924, | |
| "eval_samples_per_second": 474.817, | |
| "eval_steps_per_second": 3.71, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 41.77, | |
| "learning_rate": 4.422222222222222e-05, | |
| "loss": 1.3025, | |
| "step": 150500 | |
| }, | |
| { | |
| "epoch": 41.91, | |
| "learning_rate": 4.4000000000000006e-05, | |
| "loss": 1.3022, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 42.05, | |
| "learning_rate": 4.377777777777778e-05, | |
| "loss": 1.301, | |
| "step": 151500 | |
| }, | |
| { | |
| "epoch": 42.19, | |
| "learning_rate": 4.355555555555556e-05, | |
| "loss": 1.3016, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 42.33, | |
| "learning_rate": 4.3333333333333334e-05, | |
| "loss": 1.3006, | |
| "step": 152500 | |
| }, | |
| { | |
| "epoch": 42.46, | |
| "learning_rate": 4.311111111111111e-05, | |
| "loss": 1.3004, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 42.6, | |
| "learning_rate": 4.2888888888888886e-05, | |
| "loss": 1.3038, | |
| "step": 153500 | |
| }, | |
| { | |
| "epoch": 42.74, | |
| "learning_rate": 4.266666666666667e-05, | |
| "loss": 1.3015, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 42.88, | |
| "learning_rate": 4.2444444444444445e-05, | |
| "loss": 1.3019, | |
| "step": 154500 | |
| }, | |
| { | |
| "epoch": 43.02, | |
| "learning_rate": 4.222222222222222e-05, | |
| "loss": 1.2987, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 43.16, | |
| "learning_rate": 4.2e-05, | |
| "loss": 1.2987, | |
| "step": 155500 | |
| }, | |
| { | |
| "epoch": 43.3, | |
| "learning_rate": 4.177777777777778e-05, | |
| "loss": 1.2986, | |
| "step": 156000 | |
| }, | |
| { | |
| "epoch": 43.44, | |
| "learning_rate": 4.155555555555556e-05, | |
| "loss": 1.2987, | |
| "step": 156500 | |
| }, | |
| { | |
| "epoch": 43.57, | |
| "learning_rate": 4.133333333333333e-05, | |
| "loss": 1.2983, | |
| "step": 157000 | |
| }, | |
| { | |
| "epoch": 43.71, | |
| "learning_rate": 4.111111111111111e-05, | |
| "loss": 1.2973, | |
| "step": 157500 | |
| }, | |
| { | |
| "epoch": 43.85, | |
| "learning_rate": 4.088888888888889e-05, | |
| "loss": 1.2981, | |
| "step": 158000 | |
| }, | |
| { | |
| "epoch": 43.99, | |
| "learning_rate": 4.066666666666667e-05, | |
| "loss": 1.299, | |
| "step": 158500 | |
| }, | |
| { | |
| "epoch": 44.13, | |
| "learning_rate": 4.0444444444444444e-05, | |
| "loss": 1.2954, | |
| "step": 159000 | |
| }, | |
| { | |
| "epoch": 44.27, | |
| "learning_rate": 4.022222222222222e-05, | |
| "loss": 1.2956, | |
| "step": 159500 | |
| }, | |
| { | |
| "epoch": 44.41, | |
| "learning_rate": 4e-05, | |
| "loss": 1.297, | |
| "step": 160000 | |
| }, | |
| { | |
| "epoch": 44.55, | |
| "learning_rate": 3.977777777777778e-05, | |
| "loss": 1.296, | |
| "step": 160500 | |
| }, | |
| { | |
| "epoch": 44.68, | |
| "learning_rate": 3.9555555555555556e-05, | |
| "loss": 1.2948, | |
| "step": 161000 | |
| }, | |
| { | |
| "epoch": 44.82, | |
| "learning_rate": 3.933333333333333e-05, | |
| "loss": 1.2971, | |
| "step": 161500 | |
| }, | |
| { | |
| "epoch": 44.96, | |
| "learning_rate": 3.9111111111111115e-05, | |
| "loss": 1.2961, | |
| "step": 162000 | |
| }, | |
| { | |
| "epoch": 45.1, | |
| "learning_rate": 3.888888888888889e-05, | |
| "loss": 1.2938, | |
| "step": 162500 | |
| }, | |
| { | |
| "epoch": 45.1, | |
| "eval_loss": 1.2620575428009033, | |
| "eval_runtime": 389.8387, | |
| "eval_samples_per_second": 475.004, | |
| "eval_steps_per_second": 3.712, | |
| "step": 162500 | |
| }, | |
| { | |
| "epoch": 45.24, | |
| "learning_rate": 3.866666666666667e-05, | |
| "loss": 1.2942, | |
| "step": 163000 | |
| }, | |
| { | |
| "epoch": 45.38, | |
| "learning_rate": 3.844444444444444e-05, | |
| "loss": 1.2935, | |
| "step": 163500 | |
| }, | |
| { | |
| "epoch": 45.52, | |
| "learning_rate": 3.8222222222222226e-05, | |
| "loss": 1.2943, | |
| "step": 164000 | |
| }, | |
| { | |
| "epoch": 45.66, | |
| "learning_rate": 3.8e-05, | |
| "loss": 1.2943, | |
| "step": 164500 | |
| }, | |
| { | |
| "epoch": 45.8, | |
| "learning_rate": 3.777777777777778e-05, | |
| "loss": 1.295, | |
| "step": 165000 | |
| }, | |
| { | |
| "epoch": 45.93, | |
| "learning_rate": 3.7555555555555554e-05, | |
| "loss": 1.2935, | |
| "step": 165500 | |
| }, | |
| { | |
| "epoch": 46.07, | |
| "learning_rate": 3.733333333333334e-05, | |
| "loss": 1.2928, | |
| "step": 166000 | |
| }, | |
| { | |
| "epoch": 46.21, | |
| "learning_rate": 3.7111111111111113e-05, | |
| "loss": 1.2914, | |
| "step": 166500 | |
| }, | |
| { | |
| "epoch": 46.35, | |
| "learning_rate": 3.688888888888889e-05, | |
| "loss": 1.2915, | |
| "step": 167000 | |
| }, | |
| { | |
| "epoch": 46.49, | |
| "learning_rate": 3.6666666666666666e-05, | |
| "loss": 1.2893, | |
| "step": 167500 | |
| }, | |
| { | |
| "epoch": 46.63, | |
| "learning_rate": 3.644444444444445e-05, | |
| "loss": 1.2918, | |
| "step": 168000 | |
| }, | |
| { | |
| "epoch": 46.77, | |
| "learning_rate": 3.6222222222222225e-05, | |
| "loss": 1.2902, | |
| "step": 168500 | |
| }, | |
| { | |
| "epoch": 46.91, | |
| "learning_rate": 3.6e-05, | |
| "loss": 1.2912, | |
| "step": 169000 | |
| }, | |
| { | |
| "epoch": 47.04, | |
| "learning_rate": 3.577777777777778e-05, | |
| "loss": 1.2905, | |
| "step": 169500 | |
| }, | |
| { | |
| "epoch": 47.18, | |
| "learning_rate": 3.555555555555556e-05, | |
| "loss": 1.2895, | |
| "step": 170000 | |
| }, | |
| { | |
| "epoch": 47.32, | |
| "learning_rate": 3.5333333333333336e-05, | |
| "loss": 1.2896, | |
| "step": 170500 | |
| }, | |
| { | |
| "epoch": 47.46, | |
| "learning_rate": 3.511111111111111e-05, | |
| "loss": 1.2887, | |
| "step": 171000 | |
| }, | |
| { | |
| "epoch": 47.6, | |
| "learning_rate": 3.4888888888888895e-05, | |
| "loss": 1.2902, | |
| "step": 171500 | |
| }, | |
| { | |
| "epoch": 47.74, | |
| "learning_rate": 3.466666666666667e-05, | |
| "loss": 1.2887, | |
| "step": 172000 | |
| }, | |
| { | |
| "epoch": 47.88, | |
| "learning_rate": 3.444444444444445e-05, | |
| "loss": 1.2896, | |
| "step": 172500 | |
| }, | |
| { | |
| "epoch": 48.02, | |
| "learning_rate": 3.4222222222222224e-05, | |
| "loss": 1.2886, | |
| "step": 173000 | |
| }, | |
| { | |
| "epoch": 48.15, | |
| "learning_rate": 3.4000000000000007e-05, | |
| "loss": 1.288, | |
| "step": 173500 | |
| }, | |
| { | |
| "epoch": 48.29, | |
| "learning_rate": 3.377777777777778e-05, | |
| "loss": 1.2859, | |
| "step": 174000 | |
| }, | |
| { | |
| "epoch": 48.43, | |
| "learning_rate": 3.355555555555556e-05, | |
| "loss": 1.2868, | |
| "step": 174500 | |
| }, | |
| { | |
| "epoch": 48.57, | |
| "learning_rate": 3.3333333333333335e-05, | |
| "loss": 1.2881, | |
| "step": 175000 | |
| }, | |
| { | |
| "epoch": 48.57, | |
| "eval_loss": 1.256798505783081, | |
| "eval_runtime": 389.7693, | |
| "eval_samples_per_second": 475.089, | |
| "eval_steps_per_second": 3.712, | |
| "step": 175000 | |
| }, | |
| { | |
| "epoch": 48.71, | |
| "learning_rate": 3.311111111111112e-05, | |
| "loss": 1.2844, | |
| "step": 175500 | |
| }, | |
| { | |
| "epoch": 48.85, | |
| "learning_rate": 3.2888888888888894e-05, | |
| "loss": 1.2895, | |
| "step": 176000 | |
| }, | |
| { | |
| "epoch": 48.99, | |
| "learning_rate": 3.266666666666667e-05, | |
| "loss": 1.2865, | |
| "step": 176500 | |
| }, | |
| { | |
| "epoch": 49.13, | |
| "learning_rate": 3.2444444444444446e-05, | |
| "loss": 1.2867, | |
| "step": 177000 | |
| }, | |
| { | |
| "epoch": 49.26, | |
| "learning_rate": 3.222222222222223e-05, | |
| "loss": 1.2841, | |
| "step": 177500 | |
| }, | |
| { | |
| "epoch": 49.4, | |
| "learning_rate": 3.2000000000000005e-05, | |
| "loss": 1.2861, | |
| "step": 178000 | |
| }, | |
| { | |
| "epoch": 49.54, | |
| "learning_rate": 3.177777777777778e-05, | |
| "loss": 1.2844, | |
| "step": 178500 | |
| }, | |
| { | |
| "epoch": 49.68, | |
| "learning_rate": 3.155555555555556e-05, | |
| "loss": 1.285, | |
| "step": 179000 | |
| }, | |
| { | |
| "epoch": 49.82, | |
| "learning_rate": 3.1333333333333334e-05, | |
| "loss": 1.2813, | |
| "step": 179500 | |
| }, | |
| { | |
| "epoch": 49.96, | |
| "learning_rate": 3.111111111111111e-05, | |
| "loss": 1.2845, | |
| "step": 180000 | |
| }, | |
| { | |
| "epoch": 50.1, | |
| "learning_rate": 3.088888888888889e-05, | |
| "loss": 1.2847, | |
| "step": 180500 | |
| }, | |
| { | |
| "epoch": 50.24, | |
| "learning_rate": 3.066666666666667e-05, | |
| "loss": 1.2833, | |
| "step": 181000 | |
| }, | |
| { | |
| "epoch": 50.37, | |
| "learning_rate": 3.044444444444445e-05, | |
| "loss": 1.2814, | |
| "step": 181500 | |
| }, | |
| { | |
| "epoch": 50.51, | |
| "learning_rate": 3.0222222222222225e-05, | |
| "loss": 1.2822, | |
| "step": 182000 | |
| }, | |
| { | |
| "epoch": 50.65, | |
| "learning_rate": 3e-05, | |
| "loss": 1.2817, | |
| "step": 182500 | |
| }, | |
| { | |
| "epoch": 50.79, | |
| "learning_rate": 2.9777777777777777e-05, | |
| "loss": 1.2839, | |
| "step": 183000 | |
| }, | |
| { | |
| "epoch": 50.93, | |
| "learning_rate": 2.955555555555556e-05, | |
| "loss": 1.2827, | |
| "step": 183500 | |
| }, | |
| { | |
| "epoch": 51.07, | |
| "learning_rate": 2.9333333333333336e-05, | |
| "loss": 1.2821, | |
| "step": 184000 | |
| }, | |
| { | |
| "epoch": 51.21, | |
| "learning_rate": 2.9111111111111112e-05, | |
| "loss": 1.2804, | |
| "step": 184500 | |
| }, | |
| { | |
| "epoch": 51.35, | |
| "learning_rate": 2.8888888888888888e-05, | |
| "loss": 1.2796, | |
| "step": 185000 | |
| }, | |
| { | |
| "epoch": 51.48, | |
| "learning_rate": 2.8666666666666668e-05, | |
| "loss": 1.2791, | |
| "step": 185500 | |
| }, | |
| { | |
| "epoch": 51.62, | |
| "learning_rate": 2.8444444444444447e-05, | |
| "loss": 1.2813, | |
| "step": 186000 | |
| }, | |
| { | |
| "epoch": 51.76, | |
| "learning_rate": 2.8222222222222223e-05, | |
| "loss": 1.2825, | |
| "step": 186500 | |
| }, | |
| { | |
| "epoch": 51.9, | |
| "learning_rate": 2.8000000000000003e-05, | |
| "loss": 1.2804, | |
| "step": 187000 | |
| }, | |
| { | |
| "epoch": 52.04, | |
| "learning_rate": 2.777777777777778e-05, | |
| "loss": 1.2805, | |
| "step": 187500 | |
| }, | |
| { | |
| "epoch": 52.04, | |
| "eval_loss": 1.2479686737060547, | |
| "eval_runtime": 389.4325, | |
| "eval_samples_per_second": 475.5, | |
| "eval_steps_per_second": 3.716, | |
| "step": 187500 | |
| }, | |
| { | |
| "epoch": 52.18, | |
| "learning_rate": 2.7555555555555555e-05, | |
| "loss": 1.2787, | |
| "step": 188000 | |
| }, | |
| { | |
| "epoch": 52.32, | |
| "learning_rate": 2.733333333333333e-05, | |
| "loss": 1.279, | |
| "step": 188500 | |
| }, | |
| { | |
| "epoch": 52.46, | |
| "learning_rate": 2.7111111111111114e-05, | |
| "loss": 1.2779, | |
| "step": 189000 | |
| }, | |
| { | |
| "epoch": 52.6, | |
| "learning_rate": 2.688888888888889e-05, | |
| "loss": 1.2805, | |
| "step": 189500 | |
| }, | |
| { | |
| "epoch": 52.73, | |
| "learning_rate": 2.6666666666666667e-05, | |
| "loss": 1.2775, | |
| "step": 190000 | |
| }, | |
| { | |
| "epoch": 52.87, | |
| "learning_rate": 2.6444444444444443e-05, | |
| "loss": 1.2778, | |
| "step": 190500 | |
| }, | |
| { | |
| "epoch": 53.01, | |
| "learning_rate": 2.6222222222222226e-05, | |
| "loss": 1.2809, | |
| "step": 191000 | |
| }, | |
| { | |
| "epoch": 53.15, | |
| "learning_rate": 2.6000000000000002e-05, | |
| "loss": 1.2754, | |
| "step": 191500 | |
| }, | |
| { | |
| "epoch": 53.29, | |
| "learning_rate": 2.5777777777777778e-05, | |
| "loss": 1.2774, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 53.43, | |
| "learning_rate": 2.5555555555555554e-05, | |
| "loss": 1.2753, | |
| "step": 192500 | |
| }, | |
| { | |
| "epoch": 53.57, | |
| "learning_rate": 2.5333333333333337e-05, | |
| "loss": 1.2766, | |
| "step": 193000 | |
| }, | |
| { | |
| "epoch": 53.71, | |
| "learning_rate": 2.5111111111111113e-05, | |
| "loss": 1.2777, | |
| "step": 193500 | |
| }, | |
| { | |
| "epoch": 53.84, | |
| "learning_rate": 2.488888888888889e-05, | |
| "loss": 1.2758, | |
| "step": 194000 | |
| }, | |
| { | |
| "epoch": 53.98, | |
| "learning_rate": 2.466666666666667e-05, | |
| "loss": 1.2771, | |
| "step": 194500 | |
| }, | |
| { | |
| "epoch": 54.12, | |
| "learning_rate": 2.4444444444444445e-05, | |
| "loss": 1.2758, | |
| "step": 195000 | |
| }, | |
| { | |
| "epoch": 54.26, | |
| "learning_rate": 2.4222222222222224e-05, | |
| "loss": 1.2764, | |
| "step": 195500 | |
| }, | |
| { | |
| "epoch": 54.4, | |
| "learning_rate": 2.4e-05, | |
| "loss": 1.2743, | |
| "step": 196000 | |
| }, | |
| { | |
| "epoch": 54.54, | |
| "learning_rate": 2.377777777777778e-05, | |
| "loss": 1.2718, | |
| "step": 196500 | |
| }, | |
| { | |
| "epoch": 54.68, | |
| "learning_rate": 2.3555555555555556e-05, | |
| "loss": 1.2748, | |
| "step": 197000 | |
| }, | |
| { | |
| "epoch": 54.82, | |
| "learning_rate": 2.3333333333333336e-05, | |
| "loss": 1.2754, | |
| "step": 197500 | |
| }, | |
| { | |
| "epoch": 54.95, | |
| "learning_rate": 2.3111111111111112e-05, | |
| "loss": 1.2737, | |
| "step": 198000 | |
| }, | |
| { | |
| "epoch": 55.09, | |
| "learning_rate": 2.288888888888889e-05, | |
| "loss": 1.2753, | |
| "step": 198500 | |
| }, | |
| { | |
| "epoch": 55.23, | |
| "learning_rate": 2.2666666666666668e-05, | |
| "loss": 1.2731, | |
| "step": 199000 | |
| }, | |
| { | |
| "epoch": 55.37, | |
| "learning_rate": 2.2444444444444447e-05, | |
| "loss": 1.2736, | |
| "step": 199500 | |
| }, | |
| { | |
| "epoch": 55.51, | |
| "learning_rate": 2.2222222222222223e-05, | |
| "loss": 1.2741, | |
| "step": 200000 | |
| }, | |
| { | |
| "epoch": 55.51, | |
| "eval_loss": 1.2437978982925415, | |
| "eval_runtime": 390.05, | |
| "eval_samples_per_second": 474.747, | |
| "eval_steps_per_second": 3.71, | |
| "step": 200000 | |
| }, | |
| { | |
| "epoch": 55.65, | |
| "learning_rate": 2.2000000000000003e-05, | |
| "loss": 1.2728, | |
| "step": 200500 | |
| }, | |
| { | |
| "epoch": 55.79, | |
| "learning_rate": 2.177777777777778e-05, | |
| "loss": 1.2728, | |
| "step": 201000 | |
| }, | |
| { | |
| "epoch": 55.93, | |
| "learning_rate": 2.1555555555555555e-05, | |
| "loss": 1.2725, | |
| "step": 201500 | |
| }, | |
| { | |
| "epoch": 56.06, | |
| "learning_rate": 2.1333333333333335e-05, | |
| "loss": 1.2728, | |
| "step": 202000 | |
| }, | |
| { | |
| "epoch": 56.2, | |
| "learning_rate": 2.111111111111111e-05, | |
| "loss": 1.2701, | |
| "step": 202500 | |
| }, | |
| { | |
| "epoch": 56.34, | |
| "learning_rate": 2.088888888888889e-05, | |
| "loss": 1.2703, | |
| "step": 203000 | |
| }, | |
| { | |
| "epoch": 56.48, | |
| "learning_rate": 2.0666666666666666e-05, | |
| "loss": 1.2709, | |
| "step": 203500 | |
| }, | |
| { | |
| "epoch": 56.62, | |
| "learning_rate": 2.0444444444444446e-05, | |
| "loss": 1.2699, | |
| "step": 204000 | |
| }, | |
| { | |
| "epoch": 56.76, | |
| "learning_rate": 2.0222222222222222e-05, | |
| "loss": 1.2702, | |
| "step": 204500 | |
| }, | |
| { | |
| "epoch": 56.9, | |
| "learning_rate": 2e-05, | |
| "loss": 1.273, | |
| "step": 205000 | |
| }, | |
| { | |
| "epoch": 57.04, | |
| "learning_rate": 1.9777777777777778e-05, | |
| "loss": 1.2715, | |
| "step": 205500 | |
| }, | |
| { | |
| "epoch": 57.17, | |
| "learning_rate": 1.9555555555555557e-05, | |
| "loss": 1.2683, | |
| "step": 206000 | |
| }, | |
| { | |
| "epoch": 57.31, | |
| "learning_rate": 1.9333333333333333e-05, | |
| "loss": 1.2681, | |
| "step": 206500 | |
| }, | |
| { | |
| "epoch": 57.45, | |
| "learning_rate": 1.9111111111111113e-05, | |
| "loss": 1.2697, | |
| "step": 207000 | |
| }, | |
| { | |
| "epoch": 57.59, | |
| "learning_rate": 1.888888888888889e-05, | |
| "loss": 1.2682, | |
| "step": 207500 | |
| }, | |
| { | |
| "epoch": 57.73, | |
| "learning_rate": 1.866666666666667e-05, | |
| "loss": 1.2686, | |
| "step": 208000 | |
| }, | |
| { | |
| "epoch": 57.87, | |
| "learning_rate": 1.8444444444444445e-05, | |
| "loss": 1.2701, | |
| "step": 208500 | |
| }, | |
| { | |
| "epoch": 58.01, | |
| "learning_rate": 1.8222222222222224e-05, | |
| "loss": 1.2676, | |
| "step": 209000 | |
| }, | |
| { | |
| "epoch": 58.15, | |
| "learning_rate": 1.8e-05, | |
| "loss": 1.2668, | |
| "step": 209500 | |
| }, | |
| { | |
| "epoch": 58.28, | |
| "learning_rate": 1.777777777777778e-05, | |
| "loss": 1.2681, | |
| "step": 210000 | |
| }, | |
| { | |
| "epoch": 58.42, | |
| "learning_rate": 1.7555555555555556e-05, | |
| "loss": 1.2676, | |
| "step": 210500 | |
| }, | |
| { | |
| "epoch": 58.56, | |
| "learning_rate": 1.7333333333333336e-05, | |
| "loss": 1.2676, | |
| "step": 211000 | |
| }, | |
| { | |
| "epoch": 58.7, | |
| "learning_rate": 1.7111111111111112e-05, | |
| "loss": 1.2697, | |
| "step": 211500 | |
| }, | |
| { | |
| "epoch": 58.84, | |
| "learning_rate": 1.688888888888889e-05, | |
| "loss": 1.2666, | |
| "step": 212000 | |
| }, | |
| { | |
| "epoch": 58.98, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 1.2664, | |
| "step": 212500 | |
| }, | |
| { | |
| "epoch": 58.98, | |
| "eval_loss": 1.239187240600586, | |
| "eval_runtime": 389.678, | |
| "eval_samples_per_second": 475.2, | |
| "eval_steps_per_second": 3.713, | |
| "step": 212500 | |
| }, | |
| { | |
| "epoch": 59.12, | |
| "learning_rate": 1.6444444444444447e-05, | |
| "loss": 1.2661, | |
| "step": 213000 | |
| }, | |
| { | |
| "epoch": 59.26, | |
| "learning_rate": 1.6222222222222223e-05, | |
| "loss": 1.2643, | |
| "step": 213500 | |
| }, | |
| { | |
| "epoch": 59.39, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 1.2661, | |
| "step": 214000 | |
| }, | |
| { | |
| "epoch": 59.53, | |
| "learning_rate": 1.577777777777778e-05, | |
| "loss": 1.2664, | |
| "step": 214500 | |
| }, | |
| { | |
| "epoch": 59.67, | |
| "learning_rate": 1.5555555555555555e-05, | |
| "loss": 1.2662, | |
| "step": 215000 | |
| }, | |
| { | |
| "epoch": 59.81, | |
| "learning_rate": 1.5333333333333334e-05, | |
| "loss": 1.2672, | |
| "step": 215500 | |
| }, | |
| { | |
| "epoch": 59.95, | |
| "learning_rate": 1.5111111111111112e-05, | |
| "loss": 1.264, | |
| "step": 216000 | |
| }, | |
| { | |
| "epoch": 60.09, | |
| "learning_rate": 1.4888888888888888e-05, | |
| "loss": 1.2655, | |
| "step": 216500 | |
| }, | |
| { | |
| "epoch": 60.23, | |
| "learning_rate": 1.4666666666666668e-05, | |
| "loss": 1.2644, | |
| "step": 217000 | |
| }, | |
| { | |
| "epoch": 60.37, | |
| "learning_rate": 1.4444444444444444e-05, | |
| "loss": 1.2661, | |
| "step": 217500 | |
| }, | |
| { | |
| "epoch": 60.51, | |
| "learning_rate": 1.4222222222222224e-05, | |
| "loss": 1.2652, | |
| "step": 218000 | |
| }, | |
| { | |
| "epoch": 60.64, | |
| "learning_rate": 1.4000000000000001e-05, | |
| "loss": 1.2637, | |
| "step": 218500 | |
| }, | |
| { | |
| "epoch": 60.78, | |
| "learning_rate": 1.3777777777777778e-05, | |
| "loss": 1.2657, | |
| "step": 219000 | |
| }, | |
| { | |
| "epoch": 60.92, | |
| "learning_rate": 1.3555555555555557e-05, | |
| "loss": 1.2656, | |
| "step": 219500 | |
| }, | |
| { | |
| "epoch": 61.06, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 1.2626, | |
| "step": 220000 | |
| }, | |
| { | |
| "epoch": 61.2, | |
| "learning_rate": 1.3111111111111113e-05, | |
| "loss": 1.2646, | |
| "step": 220500 | |
| }, | |
| { | |
| "epoch": 61.34, | |
| "learning_rate": 1.2888888888888889e-05, | |
| "loss": 1.2627, | |
| "step": 221000 | |
| }, | |
| { | |
| "epoch": 61.48, | |
| "learning_rate": 1.2666666666666668e-05, | |
| "loss": 1.2633, | |
| "step": 221500 | |
| }, | |
| { | |
| "epoch": 61.62, | |
| "learning_rate": 1.2444444444444445e-05, | |
| "loss": 1.2641, | |
| "step": 222000 | |
| }, | |
| { | |
| "epoch": 61.75, | |
| "learning_rate": 1.2222222222222222e-05, | |
| "loss": 1.2616, | |
| "step": 222500 | |
| }, | |
| { | |
| "epoch": 61.89, | |
| "learning_rate": 1.2e-05, | |
| "loss": 1.2614, | |
| "step": 223000 | |
| }, | |
| { | |
| "epoch": 62.03, | |
| "learning_rate": 1.1777777777777778e-05, | |
| "loss": 1.2615, | |
| "step": 223500 | |
| }, | |
| { | |
| "epoch": 62.17, | |
| "learning_rate": 1.1555555555555556e-05, | |
| "loss": 1.2621, | |
| "step": 224000 | |
| }, | |
| { | |
| "epoch": 62.31, | |
| "learning_rate": 1.1333333333333334e-05, | |
| "loss": 1.2613, | |
| "step": 224500 | |
| }, | |
| { | |
| "epoch": 62.45, | |
| "learning_rate": 1.1111111111111112e-05, | |
| "loss": 1.261, | |
| "step": 225000 | |
| }, | |
| { | |
| "epoch": 62.45, | |
| "eval_loss": 1.2334364652633667, | |
| "eval_runtime": 389.7373, | |
| "eval_samples_per_second": 475.128, | |
| "eval_steps_per_second": 3.713, | |
| "step": 225000 | |
| }, | |
| { | |
| "epoch": 62.59, | |
| "learning_rate": 1.088888888888889e-05, | |
| "loss": 1.2617, | |
| "step": 225500 | |
| }, | |
| { | |
| "epoch": 62.73, | |
| "learning_rate": 1.0666666666666667e-05, | |
| "loss": 1.2586, | |
| "step": 226000 | |
| }, | |
| { | |
| "epoch": 62.86, | |
| "learning_rate": 1.0444444444444445e-05, | |
| "loss": 1.2615, | |
| "step": 226500 | |
| }, | |
| { | |
| "epoch": 63.0, | |
| "learning_rate": 1.0222222222222223e-05, | |
| "loss": 1.2628, | |
| "step": 227000 | |
| }, | |
| { | |
| "epoch": 63.14, | |
| "learning_rate": 1e-05, | |
| "loss": 1.2611, | |
| "step": 227500 | |
| }, | |
| { | |
| "epoch": 63.28, | |
| "learning_rate": 9.777777777777779e-06, | |
| "loss": 1.2602, | |
| "step": 228000 | |
| }, | |
| { | |
| "epoch": 63.42, | |
| "learning_rate": 9.555555555555556e-06, | |
| "loss": 1.2596, | |
| "step": 228500 | |
| }, | |
| { | |
| "epoch": 63.56, | |
| "learning_rate": 9.333333333333334e-06, | |
| "loss": 1.2619, | |
| "step": 229000 | |
| }, | |
| { | |
| "epoch": 63.7, | |
| "learning_rate": 9.111111111111112e-06, | |
| "loss": 1.2614, | |
| "step": 229500 | |
| }, | |
| { | |
| "epoch": 63.84, | |
| "learning_rate": 8.88888888888889e-06, | |
| "loss": 1.2612, | |
| "step": 230000 | |
| }, | |
| { | |
| "epoch": 63.97, | |
| "learning_rate": 8.666666666666668e-06, | |
| "loss": 1.2606, | |
| "step": 230500 | |
| }, | |
| { | |
| "epoch": 64.11, | |
| "learning_rate": 8.444444444444446e-06, | |
| "loss": 1.2582, | |
| "step": 231000 | |
| }, | |
| { | |
| "epoch": 64.25, | |
| "learning_rate": 8.222222222222223e-06, | |
| "loss": 1.2594, | |
| "step": 231500 | |
| }, | |
| { | |
| "epoch": 64.39, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 1.2587, | |
| "step": 232000 | |
| }, | |
| { | |
| "epoch": 64.53, | |
| "learning_rate": 7.777777777777777e-06, | |
| "loss": 1.2576, | |
| "step": 232500 | |
| }, | |
| { | |
| "epoch": 64.67, | |
| "learning_rate": 7.555555555555556e-06, | |
| "loss": 1.2588, | |
| "step": 233000 | |
| }, | |
| { | |
| "epoch": 64.81, | |
| "learning_rate": 7.333333333333334e-06, | |
| "loss": 1.2588, | |
| "step": 233500 | |
| }, | |
| { | |
| "epoch": 64.95, | |
| "learning_rate": 7.111111111111112e-06, | |
| "loss": 1.2573, | |
| "step": 234000 | |
| }, | |
| { | |
| "epoch": 65.08, | |
| "learning_rate": 6.888888888888889e-06, | |
| "loss": 1.2585, | |
| "step": 234500 | |
| }, | |
| { | |
| "epoch": 65.22, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 1.2576, | |
| "step": 235000 | |
| }, | |
| { | |
| "epoch": 65.36, | |
| "learning_rate": 6.4444444444444445e-06, | |
| "loss": 1.2566, | |
| "step": 235500 | |
| }, | |
| { | |
| "epoch": 65.5, | |
| "learning_rate": 6.222222222222222e-06, | |
| "loss": 1.2569, | |
| "step": 236000 | |
| }, | |
| { | |
| "epoch": 65.64, | |
| "learning_rate": 6e-06, | |
| "loss": 1.258, | |
| "step": 236500 | |
| }, | |
| { | |
| "epoch": 65.78, | |
| "learning_rate": 5.777777777777778e-06, | |
| "loss": 1.2567, | |
| "step": 237000 | |
| }, | |
| { | |
| "epoch": 65.92, | |
| "learning_rate": 5.555555555555556e-06, | |
| "loss": 1.2568, | |
| "step": 237500 | |
| }, | |
| { | |
| "epoch": 65.92, | |
| "eval_loss": 1.2293188571929932, | |
| "eval_runtime": 389.8442, | |
| "eval_samples_per_second": 474.997, | |
| "eval_steps_per_second": 3.712, | |
| "step": 237500 | |
| }, | |
| { | |
| "epoch": 66.06, | |
| "learning_rate": 5.333333333333334e-06, | |
| "loss": 1.2559, | |
| "step": 238000 | |
| }, | |
| { | |
| "epoch": 66.19, | |
| "learning_rate": 5.1111111111111115e-06, | |
| "loss": 1.2555, | |
| "step": 238500 | |
| }, | |
| { | |
| "epoch": 66.33, | |
| "learning_rate": 4.888888888888889e-06, | |
| "loss": 1.2585, | |
| "step": 239000 | |
| }, | |
| { | |
| "epoch": 66.47, | |
| "learning_rate": 4.666666666666667e-06, | |
| "loss": 1.257, | |
| "step": 239500 | |
| }, | |
| { | |
| "epoch": 66.61, | |
| "learning_rate": 4.444444444444445e-06, | |
| "loss": 1.255, | |
| "step": 240000 | |
| }, | |
| { | |
| "epoch": 66.75, | |
| "learning_rate": 4.222222222222223e-06, | |
| "loss": 1.256, | |
| "step": 240500 | |
| }, | |
| { | |
| "epoch": 66.89, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 1.2555, | |
| "step": 241000 | |
| }, | |
| { | |
| "epoch": 67.03, | |
| "learning_rate": 3.777777777777778e-06, | |
| "loss": 1.2563, | |
| "step": 241500 | |
| }, | |
| { | |
| "epoch": 67.17, | |
| "learning_rate": 3.555555555555556e-06, | |
| "loss": 1.2528, | |
| "step": 242000 | |
| }, | |
| { | |
| "epoch": 67.31, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 1.2566, | |
| "step": 242500 | |
| }, | |
| { | |
| "epoch": 67.44, | |
| "learning_rate": 3.111111111111111e-06, | |
| "loss": 1.2557, | |
| "step": 243000 | |
| }, | |
| { | |
| "epoch": 67.58, | |
| "learning_rate": 2.888888888888889e-06, | |
| "loss": 1.2521, | |
| "step": 243500 | |
| }, | |
| { | |
| "epoch": 67.72, | |
| "learning_rate": 2.666666666666667e-06, | |
| "loss": 1.2535, | |
| "step": 244000 | |
| }, | |
| { | |
| "epoch": 67.86, | |
| "learning_rate": 2.4444444444444447e-06, | |
| "loss": 1.2559, | |
| "step": 244500 | |
| }, | |
| { | |
| "epoch": 68.0, | |
| "learning_rate": 2.2222222222222225e-06, | |
| "loss": 1.2534, | |
| "step": 245000 | |
| }, | |
| { | |
| "epoch": 68.14, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 1.2562, | |
| "step": 245500 | |
| }, | |
| { | |
| "epoch": 68.28, | |
| "learning_rate": 1.777777777777778e-06, | |
| "loss": 1.2532, | |
| "step": 246000 | |
| }, | |
| { | |
| "epoch": 68.42, | |
| "learning_rate": 1.5555555555555556e-06, | |
| "loss": 1.2523, | |
| "step": 246500 | |
| }, | |
| { | |
| "epoch": 68.55, | |
| "learning_rate": 1.3333333333333334e-06, | |
| "loss": 1.2564, | |
| "step": 247000 | |
| }, | |
| { | |
| "epoch": 68.69, | |
| "learning_rate": 1.1111111111111112e-06, | |
| "loss": 1.2539, | |
| "step": 247500 | |
| }, | |
| { | |
| "epoch": 68.83, | |
| "learning_rate": 8.88888888888889e-07, | |
| "loss": 1.2529, | |
| "step": 248000 | |
| }, | |
| { | |
| "epoch": 68.97, | |
| "learning_rate": 6.666666666666667e-07, | |
| "loss": 1.2523, | |
| "step": 248500 | |
| }, | |
| { | |
| "epoch": 69.11, | |
| "learning_rate": 4.444444444444445e-07, | |
| "loss": 1.2537, | |
| "step": 249000 | |
| }, | |
| { | |
| "epoch": 69.25, | |
| "learning_rate": 2.2222222222222224e-07, | |
| "loss": 1.2548, | |
| "step": 249500 | |
| }, | |
| { | |
| "epoch": 69.39, | |
| "learning_rate": 0.0, | |
| "loss": 1.2535, | |
| "step": 250000 | |
| }, | |
| { | |
| "epoch": 69.39, | |
| "eval_loss": 1.226332664489746, | |
| "eval_runtime": 389.7782, | |
| "eval_samples_per_second": 475.078, | |
| "eval_steps_per_second": 3.712, | |
| "step": 250000 | |
| } | |
| ], | |
| "max_steps": 250000, | |
| "num_train_epochs": 70, | |
| "total_flos": 4.212538057807479e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |