| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.9465648854961832, | |
| "eval_steps": 500, | |
| "global_step": 25500, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 2.5e-06, | |
| "loss": 31.9742, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 5e-06, | |
| "loss": 32.5191, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 7.5e-06, | |
| "loss": 31.2118, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1e-05, | |
| "loss": 28.9569, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.25e-05, | |
| "loss": 27.1361, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.5e-05, | |
| "loss": 24.7898, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1.75e-05, | |
| "loss": 22.545, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 2e-05, | |
| "loss": 21.4837, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 2.25e-05, | |
| "loss": 18.8854, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 2.5e-05, | |
| "loss": 16.722, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 2.7500000000000004e-05, | |
| "loss": 14.4161, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 3e-05, | |
| "loss": 11.8861, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 3.2500000000000004e-05, | |
| "loss": 9.5285, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 3.5e-05, | |
| "loss": 7.503, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 3.7500000000000003e-05, | |
| "loss": 6.355, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4e-05, | |
| "loss": 5.2049, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.25e-05, | |
| "loss": 4.4318, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.5e-05, | |
| "loss": 3.852, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.75e-05, | |
| "loss": 3.4899, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 5e-05, | |
| "loss": 3.3489, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.990079365079365e-05, | |
| "loss": 3.1161, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.9801587301587306e-05, | |
| "loss": 2.9771, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.9702380952380955e-05, | |
| "loss": 2.9968, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.960317460317461e-05, | |
| "loss": 2.9209, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.950396825396826e-05, | |
| "loss": 2.8767, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.940476190476191e-05, | |
| "loss": 2.8769, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.930555555555556e-05, | |
| "loss": 2.8317, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.9206349206349204e-05, | |
| "loss": 2.8225, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.910714285714286e-05, | |
| "loss": 2.6981, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.900793650793651e-05, | |
| "loss": 2.7759, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.8908730158730156e-05, | |
| "loss": 2.631, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.880952380952381e-05, | |
| "loss": 2.693, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.871031746031746e-05, | |
| "loss": 2.6437, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.8611111111111115e-05, | |
| "loss": 2.5367, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.8511904761904764e-05, | |
| "loss": 2.6009, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.841269841269841e-05, | |
| "loss": 2.6043, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.831349206349207e-05, | |
| "loss": 2.4894, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.8214285714285716e-05, | |
| "loss": 2.5902, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.811507936507937e-05, | |
| "loss": 2.5836, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.801587301587302e-05, | |
| "loss": 2.604, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.791666666666667e-05, | |
| "loss": 2.4979, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.781746031746032e-05, | |
| "loss": 2.6036, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.771825396825397e-05, | |
| "loss": 2.4106, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.761904761904762e-05, | |
| "loss": 2.4927, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.751984126984127e-05, | |
| "loss": 2.6297, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.7420634920634924e-05, | |
| "loss": 2.4696, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.732142857142857e-05, | |
| "loss": 2.5137, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.722222222222222e-05, | |
| "loss": 2.4031, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.7123015873015876e-05, | |
| "loss": 2.5041, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.7023809523809525e-05, | |
| "loss": 2.4883, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.692460317460317e-05, | |
| "loss": 2.4844, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.682539682539683e-05, | |
| "loss": 2.4495, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.672619047619048e-05, | |
| "loss": 2.4946, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.662698412698413e-05, | |
| "loss": 2.5235, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.652777777777778e-05, | |
| "loss": 2.443, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.642857142857143e-05, | |
| "loss": 2.5334, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.6329365079365085e-05, | |
| "loss": 2.4367, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.623015873015873e-05, | |
| "loss": 2.5123, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.613095238095239e-05, | |
| "loss": 2.3548, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.603174603174603e-05, | |
| "loss": 2.4387, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.5932539682539685e-05, | |
| "loss": 2.3441, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.5833333333333334e-05, | |
| "loss": 2.3135, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.573412698412698e-05, | |
| "loss": 2.3535, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.563492063492064e-05, | |
| "loss": 2.3418, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.5535714285714286e-05, | |
| "loss": 2.466, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.543650793650794e-05, | |
| "loss": 2.3942, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.533730158730159e-05, | |
| "loss": 2.3981, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.523809523809524e-05, | |
| "loss": 2.4964, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.5138888888888894e-05, | |
| "loss": 2.4041, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.503968253968254e-05, | |
| "loss": 2.3344, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.494047619047619e-05, | |
| "loss": 2.4342, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.4841269841269846e-05, | |
| "loss": 2.3132, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.4742063492063494e-05, | |
| "loss": 2.3755, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.464285714285715e-05, | |
| "loss": 2.3009, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.45436507936508e-05, | |
| "loss": 2.3669, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.4444444444444447e-05, | |
| "loss": 2.2948, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.4345238095238095e-05, | |
| "loss": 2.3885, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.4246031746031744e-05, | |
| "loss": 2.3917, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.41468253968254e-05, | |
| "loss": 2.3107, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.404761904761905e-05, | |
| "loss": 2.2456, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.39484126984127e-05, | |
| "loss": 2.4836, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.384920634920635e-05, | |
| "loss": 2.348, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.375e-05, | |
| "loss": 2.2756, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.3650793650793655e-05, | |
| "loss": 2.3311, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.35515873015873e-05, | |
| "loss": 2.3488, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.345238095238096e-05, | |
| "loss": 2.3493, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.335317460317461e-05, | |
| "loss": 2.374, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.3253968253968256e-05, | |
| "loss": 2.3459, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.315476190476191e-05, | |
| "loss": 2.3194, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.305555555555556e-05, | |
| "loss": 2.3001, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.295634920634921e-05, | |
| "loss": 2.3023, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.2857142857142856e-05, | |
| "loss": 2.3394, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.2757936507936505e-05, | |
| "loss": 2.3859, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.265873015873016e-05, | |
| "loss": 2.2853, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.255952380952381e-05, | |
| "loss": 2.3349, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.2460317460317464e-05, | |
| "loss": 2.2621, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.236111111111111e-05, | |
| "loss": 2.3776, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.226190476190476e-05, | |
| "loss": 2.2746, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.2162698412698416e-05, | |
| "loss": 2.3579, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.2063492063492065e-05, | |
| "loss": 2.3256, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.196428571428572e-05, | |
| "loss": 2.3321, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.186507936507937e-05, | |
| "loss": 2.2774, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.176587301587302e-05, | |
| "loss": 2.1933, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.166666666666667e-05, | |
| "loss": 2.3089, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.156746031746032e-05, | |
| "loss": 2.3038, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.1468253968253976e-05, | |
| "loss": 2.327, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.136904761904762e-05, | |
| "loss": 2.2387, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.126984126984127e-05, | |
| "loss": 2.2992, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.117063492063492e-05, | |
| "loss": 2.3167, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.107142857142857e-05, | |
| "loss": 2.2835, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.0972222222222225e-05, | |
| "loss": 2.2788, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.0873015873015874e-05, | |
| "loss": 2.2814, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.077380952380952e-05, | |
| "loss": 2.2687, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.067460317460318e-05, | |
| "loss": 2.2743, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.0575396825396826e-05, | |
| "loss": 2.3038, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.047619047619048e-05, | |
| "loss": 2.2646, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 4.037698412698413e-05, | |
| "loss": 2.2602, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 4.027777777777778e-05, | |
| "loss": 2.3499, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 4.017857142857143e-05, | |
| "loss": 2.2881, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 4.007936507936508e-05, | |
| "loss": 2.277, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 3.998015873015874e-05, | |
| "loss": 2.1779, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 3.9880952380952386e-05, | |
| "loss": 2.1701, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 3.9781746031746034e-05, | |
| "loss": 2.2704, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 3.968253968253968e-05, | |
| "loss": 2.2207, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 3.958333333333333e-05, | |
| "loss": 2.2198, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 3.9484126984126986e-05, | |
| "loss": 2.223, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 3.9384920634920635e-05, | |
| "loss": 2.3311, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 3.928571428571429e-05, | |
| "loss": 2.1598, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 3.918650793650794e-05, | |
| "loss": 2.2098, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 3.908730158730159e-05, | |
| "loss": 2.2645, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 3.898809523809524e-05, | |
| "loss": 2.2462, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 3.888888888888889e-05, | |
| "loss": 2.3286, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 3.878968253968254e-05, | |
| "loss": 2.2331, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 3.8690476190476195e-05, | |
| "loss": 2.2508, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 3.859126984126984e-05, | |
| "loss": 2.2066, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 3.84920634920635e-05, | |
| "loss": 2.2877, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 3.839285714285715e-05, | |
| "loss": 2.2437, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 3.8293650793650795e-05, | |
| "loss": 2.2342, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 3.8194444444444444e-05, | |
| "loss": 2.3143, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 3.809523809523809e-05, | |
| "loss": 2.2252, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 3.799603174603175e-05, | |
| "loss": 2.2422, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 3.7896825396825396e-05, | |
| "loss": 2.1808, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 3.779761904761905e-05, | |
| "loss": 2.1898, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 3.76984126984127e-05, | |
| "loss": 2.1189, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 3.759920634920635e-05, | |
| "loss": 2.2667, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 3.7500000000000003e-05, | |
| "loss": 2.1627, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 3.740079365079365e-05, | |
| "loss": 2.2402, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 3.730158730158731e-05, | |
| "loss": 2.203, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 3.7202380952380956e-05, | |
| "loss": 2.2202, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 3.7103174603174604e-05, | |
| "loss": 2.2406, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 3.700396825396826e-05, | |
| "loss": 2.2132, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 3.690476190476191e-05, | |
| "loss": 2.1831, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 3.6805555555555556e-05, | |
| "loss": 2.1939, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 3.6706349206349205e-05, | |
| "loss": 2.1544, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 3.6607142857142853e-05, | |
| "loss": 2.2235, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 3.650793650793651e-05, | |
| "loss": 2.2568, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 3.640873015873016e-05, | |
| "loss": 2.2168, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 3.630952380952381e-05, | |
| "loss": 2.1741, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 3.621031746031746e-05, | |
| "loss": 2.2061, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 3.611111111111111e-05, | |
| "loss": 2.2602, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 3.6011904761904765e-05, | |
| "loss": 2.2055, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 3.591269841269841e-05, | |
| "loss": 2.2411, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 3.581349206349207e-05, | |
| "loss": 2.2783, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 3.571428571428572e-05, | |
| "loss": 2.1591, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 3.5615079365079365e-05, | |
| "loss": 2.1479, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 3.551587301587302e-05, | |
| "loss": 2.1781, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.541666666666667e-05, | |
| "loss": 2.1612, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.5317460317460324e-05, | |
| "loss": 2.1134, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 3.521825396825397e-05, | |
| "loss": 2.1386, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 3.511904761904762e-05, | |
| "loss": 2.0929, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 3.501984126984127e-05, | |
| "loss": 2.2324, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.492063492063492e-05, | |
| "loss": 2.1589, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.4821428571428574e-05, | |
| "loss": 2.1681, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.472222222222222e-05, | |
| "loss": 2.1765, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 3.462301587301587e-05, | |
| "loss": 2.1753, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 3.4523809523809526e-05, | |
| "loss": 2.249, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.4424603174603174e-05, | |
| "loss": 2.1518, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.432539682539683e-05, | |
| "loss": 2.2355, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.422619047619048e-05, | |
| "loss": 2.1606, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 3.412698412698413e-05, | |
| "loss": 2.2142, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 3.402777777777778e-05, | |
| "loss": 2.1308, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 3.392857142857143e-05, | |
| "loss": 2.1826, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 3.3829365079365086e-05, | |
| "loss": 2.2073, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 3.3730158730158734e-05, | |
| "loss": 2.1768, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 3.363095238095238e-05, | |
| "loss": 2.2085, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 3.353174603174603e-05, | |
| "loss": 2.1844, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 3.343253968253968e-05, | |
| "loss": 2.1136, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.3333333333333335e-05, | |
| "loss": 2.1827, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 3.3234126984126983e-05, | |
| "loss": 2.084, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.313492063492064e-05, | |
| "loss": 2.1826, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.303571428571429e-05, | |
| "loss": 2.0812, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.2936507936507936e-05, | |
| "loss": 2.3111, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.283730158730159e-05, | |
| "loss": 2.2154, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.273809523809524e-05, | |
| "loss": 2.1717, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.263888888888889e-05, | |
| "loss": 2.1668, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.253968253968254e-05, | |
| "loss": 2.1209, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.244047619047619e-05, | |
| "loss": 2.0419, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 3.234126984126985e-05, | |
| "loss": 2.2324, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 3.2242063492063495e-05, | |
| "loss": 2.2163, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 3.2142857142857144e-05, | |
| "loss": 2.2406, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 3.20436507936508e-05, | |
| "loss": 2.1455, | |
| "step": 10050 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 3.194444444444444e-05, | |
| "loss": 2.0851, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 3.1845238095238096e-05, | |
| "loss": 2.1666, | |
| "step": 10150 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 3.1746031746031745e-05, | |
| "loss": 2.2034, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 3.16468253968254e-05, | |
| "loss": 2.0919, | |
| "step": 10250 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 3.154761904761905e-05, | |
| "loss": 2.0807, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 3.14484126984127e-05, | |
| "loss": 2.1648, | |
| "step": 10350 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 3.134920634920635e-05, | |
| "loss": 2.1216, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 3.125e-05, | |
| "loss": 2.1722, | |
| "step": 10450 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 3.1150793650793656e-05, | |
| "loss": 2.2017, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 3.1051587301587304e-05, | |
| "loss": 2.076, | |
| "step": 10550 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 3.095238095238095e-05, | |
| "loss": 2.0688, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 3.085317460317461e-05, | |
| "loss": 2.0956, | |
| "step": 10650 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.075396825396826e-05, | |
| "loss": 2.1461, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.0654761904761905e-05, | |
| "loss": 2.1046, | |
| "step": 10750 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.055555555555556e-05, | |
| "loss": 2.163, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.0456349206349206e-05, | |
| "loss": 2.1368, | |
| "step": 10850 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.0357142857142857e-05, | |
| "loss": 2.1615, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.0257936507936506e-05, | |
| "loss": 2.1717, | |
| "step": 10950 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.0158730158730158e-05, | |
| "loss": 2.0984, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.005952380952381e-05, | |
| "loss": 2.1134, | |
| "step": 11050 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.996031746031746e-05, | |
| "loss": 2.1184, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.9861111111111113e-05, | |
| "loss": 2.1654, | |
| "step": 11150 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.9761904761904762e-05, | |
| "loss": 2.1137, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.9662698412698414e-05, | |
| "loss": 2.1224, | |
| "step": 11250 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.9563492063492066e-05, | |
| "loss": 2.1394, | |
| "step": 11300 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.9464285714285718e-05, | |
| "loss": 2.1456, | |
| "step": 11350 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.9365079365079366e-05, | |
| "loss": 2.0706, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.9265873015873018e-05, | |
| "loss": 2.1334, | |
| "step": 11450 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 2.916666666666667e-05, | |
| "loss": 2.0187, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 2.906746031746032e-05, | |
| "loss": 2.1122, | |
| "step": 11550 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 2.8968253968253974e-05, | |
| "loss": 2.2484, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 2.886904761904762e-05, | |
| "loss": 2.1894, | |
| "step": 11650 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 2.876984126984127e-05, | |
| "loss": 2.2025, | |
| "step": 11700 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 2.867063492063492e-05, | |
| "loss": 2.183, | |
| "step": 11750 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 2.857142857142857e-05, | |
| "loss": 2.0762, | |
| "step": 11800 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 2.8472222222222223e-05, | |
| "loss": 2.1949, | |
| "step": 11850 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 2.8373015873015875e-05, | |
| "loss": 2.0965, | |
| "step": 11900 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 2.8273809523809523e-05, | |
| "loss": 2.0698, | |
| "step": 11950 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 2.8174603174603175e-05, | |
| "loss": 2.1163, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 2.8075396825396827e-05, | |
| "loss": 2.1259, | |
| "step": 12050 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 2.797619047619048e-05, | |
| "loss": 2.1668, | |
| "step": 12100 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.787698412698413e-05, | |
| "loss": 2.1295, | |
| "step": 12150 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.777777777777778e-05, | |
| "loss": 2.1924, | |
| "step": 12200 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 2.767857142857143e-05, | |
| "loss": 2.2458, | |
| "step": 12250 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 2.7579365079365083e-05, | |
| "loss": 2.0815, | |
| "step": 12300 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 2.7480158730158735e-05, | |
| "loss": 2.0327, | |
| "step": 12350 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.7380952380952383e-05, | |
| "loss": 2.1338, | |
| "step": 12400 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.7281746031746032e-05, | |
| "loss": 2.1107, | |
| "step": 12450 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.718253968253968e-05, | |
| "loss": 2.0855, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.7083333333333332e-05, | |
| "loss": 2.1685, | |
| "step": 12550 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.6984126984126984e-05, | |
| "loss": 2.1238, | |
| "step": 12600 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 2.6884920634920636e-05, | |
| "loss": 2.1076, | |
| "step": 12650 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 2.6785714285714288e-05, | |
| "loss": 2.1635, | |
| "step": 12700 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 2.6686507936507936e-05, | |
| "loss": 2.1277, | |
| "step": 12750 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 2.6587301587301588e-05, | |
| "loss": 2.1048, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 2.648809523809524e-05, | |
| "loss": 2.1246, | |
| "step": 12850 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 2.6388888888888892e-05, | |
| "loss": 2.1518, | |
| "step": 12900 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 2.628968253968254e-05, | |
| "loss": 2.0936, | |
| "step": 12950 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 2.6190476190476192e-05, | |
| "loss": 2.096, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 2.6091269841269844e-05, | |
| "loss": 2.1602, | |
| "step": 13050 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 2.5992063492063496e-05, | |
| "loss": 2.1038, | |
| "step": 13100 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 2.5892857142857148e-05, | |
| "loss": 2.1308, | |
| "step": 13150 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 2.5793650793650796e-05, | |
| "loss": 2.1161, | |
| "step": 13200 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 2.5694444444444445e-05, | |
| "loss": 2.0994, | |
| "step": 13250 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 2.5595238095238093e-05, | |
| "loss": 2.1299, | |
| "step": 13300 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 2.5496031746031745e-05, | |
| "loss": 2.0676, | |
| "step": 13350 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 2.5396825396825397e-05, | |
| "loss": 2.0956, | |
| "step": 13400 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 2.529761904761905e-05, | |
| "loss": 2.1986, | |
| "step": 13450 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 2.5198412698412697e-05, | |
| "loss": 2.0667, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 2.509920634920635e-05, | |
| "loss": 1.989, | |
| "step": 13550 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 2.5e-05, | |
| "loss": 2.0445, | |
| "step": 13600 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 2.4900793650793653e-05, | |
| "loss": 2.0726, | |
| "step": 13650 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 2.4801587301587305e-05, | |
| "loss": 2.1361, | |
| "step": 13700 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 2.4702380952380953e-05, | |
| "loss": 2.0984, | |
| "step": 13750 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 2.4603174603174602e-05, | |
| "loss": 2.1363, | |
| "step": 13800 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 2.4503968253968254e-05, | |
| "loss": 2.0652, | |
| "step": 13850 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 2.4404761904761906e-05, | |
| "loss": 2.087, | |
| "step": 13900 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 2.4305555555555558e-05, | |
| "loss": 2.1069, | |
| "step": 13950 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 2.4206349206349206e-05, | |
| "loss": 2.1175, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 2.4107142857142858e-05, | |
| "loss": 2.071, | |
| "step": 14050 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 2.400793650793651e-05, | |
| "loss": 2.0809, | |
| "step": 14100 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 2.390873015873016e-05, | |
| "loss": 2.0943, | |
| "step": 14150 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 2.380952380952381e-05, | |
| "loss": 2.1503, | |
| "step": 14200 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 2.3710317460317462e-05, | |
| "loss": 2.1241, | |
| "step": 14250 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 2.361111111111111e-05, | |
| "loss": 2.1259, | |
| "step": 14300 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 2.3511904761904762e-05, | |
| "loss": 2.0531, | |
| "step": 14350 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 2.3412698412698414e-05, | |
| "loss": 2.2189, | |
| "step": 14400 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 2.3313492063492066e-05, | |
| "loss": 2.0976, | |
| "step": 14450 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 2.3214285714285715e-05, | |
| "loss": 2.1033, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 2.3115079365079367e-05, | |
| "loss": 2.1572, | |
| "step": 14550 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 2.3015873015873015e-05, | |
| "loss": 2.0534, | |
| "step": 14600 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 2.2916666666666667e-05, | |
| "loss": 2.1607, | |
| "step": 14650 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 2.281746031746032e-05, | |
| "loss": 1.9848, | |
| "step": 14700 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 2.271825396825397e-05, | |
| "loss": 2.0368, | |
| "step": 14750 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 2.261904761904762e-05, | |
| "loss": 2.1026, | |
| "step": 14800 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 2.251984126984127e-05, | |
| "loss": 2.1793, | |
| "step": 14850 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 2.2420634920634923e-05, | |
| "loss": 2.0969, | |
| "step": 14900 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 2.2321428571428575e-05, | |
| "loss": 2.0451, | |
| "step": 14950 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 2.2222222222222223e-05, | |
| "loss": 1.9796, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 2.2123015873015872e-05, | |
| "loss": 2.0302, | |
| "step": 15050 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 2.2023809523809524e-05, | |
| "loss": 2.06, | |
| "step": 15100 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 2.1924603174603176e-05, | |
| "loss": 2.1026, | |
| "step": 15150 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 2.1825396825396827e-05, | |
| "loss": 2.0849, | |
| "step": 15200 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 2.172619047619048e-05, | |
| "loss": 2.1208, | |
| "step": 15250 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 2.1626984126984128e-05, | |
| "loss": 2.0993, | |
| "step": 15300 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 2.152777777777778e-05, | |
| "loss": 2.0567, | |
| "step": 15350 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 2.1428571428571428e-05, | |
| "loss": 2.0269, | |
| "step": 15400 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 2.132936507936508e-05, | |
| "loss": 2.0395, | |
| "step": 15450 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 2.1230158730158732e-05, | |
| "loss": 2.0267, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 2.113095238095238e-05, | |
| "loss": 2.0488, | |
| "step": 15550 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 2.1031746031746032e-05, | |
| "loss": 2.0858, | |
| "step": 15600 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 2.0932539682539684e-05, | |
| "loss": 2.0541, | |
| "step": 15650 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 2.0833333333333336e-05, | |
| "loss": 2.046, | |
| "step": 15700 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 2.0734126984126988e-05, | |
| "loss": 1.9805, | |
| "step": 15750 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 2.0634920634920636e-05, | |
| "loss": 2.0991, | |
| "step": 15800 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 2.0535714285714285e-05, | |
| "loss": 2.1884, | |
| "step": 15850 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 2.0436507936507937e-05, | |
| "loss": 2.1337, | |
| "step": 15900 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 2.033730158730159e-05, | |
| "loss": 2.0913, | |
| "step": 15950 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 2.023809523809524e-05, | |
| "loss": 2.1773, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 2.013888888888889e-05, | |
| "loss": 2.1711, | |
| "step": 16050 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 2.003968253968254e-05, | |
| "loss": 2.0521, | |
| "step": 16100 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.9940476190476193e-05, | |
| "loss": 2.0473, | |
| "step": 16150 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.984126984126984e-05, | |
| "loss": 2.058, | |
| "step": 16200 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.9742063492063493e-05, | |
| "loss": 2.0332, | |
| "step": 16250 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.9642857142857145e-05, | |
| "loss": 2.049, | |
| "step": 16300 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.9543650793650793e-05, | |
| "loss": 2.106, | |
| "step": 16350 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.9444444444444445e-05, | |
| "loss": 2.0819, | |
| "step": 16400 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.9345238095238097e-05, | |
| "loss": 2.0966, | |
| "step": 16450 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.924603174603175e-05, | |
| "loss": 2.1137, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.9146825396825398e-05, | |
| "loss": 2.0342, | |
| "step": 16550 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.9047619047619046e-05, | |
| "loss": 2.0423, | |
| "step": 16600 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.8948412698412698e-05, | |
| "loss": 2.0843, | |
| "step": 16650 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.884920634920635e-05, | |
| "loss": 2.0687, | |
| "step": 16700 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.8750000000000002e-05, | |
| "loss": 2.0647, | |
| "step": 16750 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.8650793650793654e-05, | |
| "loss": 2.0359, | |
| "step": 16800 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.8551587301587302e-05, | |
| "loss": 2.0275, | |
| "step": 16850 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.8452380952380954e-05, | |
| "loss": 2.1197, | |
| "step": 16900 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.8353174603174602e-05, | |
| "loss": 2.054, | |
| "step": 16950 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.8253968253968254e-05, | |
| "loss": 2.0854, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.8154761904761906e-05, | |
| "loss": 2.0493, | |
| "step": 17050 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.8055555555555555e-05, | |
| "loss": 2.1042, | |
| "step": 17100 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.7956349206349207e-05, | |
| "loss": 1.913, | |
| "step": 17150 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.785714285714286e-05, | |
| "loss": 2.1167, | |
| "step": 17200 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.775793650793651e-05, | |
| "loss": 2.0636, | |
| "step": 17250 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.7658730158730162e-05, | |
| "loss": 2.0994, | |
| "step": 17300 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.755952380952381e-05, | |
| "loss": 2.116, | |
| "step": 17350 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.746031746031746e-05, | |
| "loss": 2.0227, | |
| "step": 17400 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.736111111111111e-05, | |
| "loss": 2.102, | |
| "step": 17450 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.7261904761904763e-05, | |
| "loss": 2.1564, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.7162698412698415e-05, | |
| "loss": 2.0394, | |
| "step": 17550 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.7063492063492063e-05, | |
| "loss": 2.0377, | |
| "step": 17600 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.6964285714285715e-05, | |
| "loss": 2.119, | |
| "step": 17650 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.6865079365079367e-05, | |
| "loss": 2.0541, | |
| "step": 17700 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.6765873015873016e-05, | |
| "loss": 2.1053, | |
| "step": 17750 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 2.114, | |
| "step": 17800 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.656746031746032e-05, | |
| "loss": 2.0926, | |
| "step": 17850 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.6468253968253968e-05, | |
| "loss": 2.1273, | |
| "step": 17900 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.636904761904762e-05, | |
| "loss": 2.027, | |
| "step": 17950 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.626984126984127e-05, | |
| "loss": 2.1071, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.6170634920634923e-05, | |
| "loss": 2.0269, | |
| "step": 18050 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.6071428571428572e-05, | |
| "loss": 2.0236, | |
| "step": 18100 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.597222222222222e-05, | |
| "loss": 2.0724, | |
| "step": 18150 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.5873015873015872e-05, | |
| "loss": 2.0522, | |
| "step": 18200 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.5773809523809524e-05, | |
| "loss": 2.0657, | |
| "step": 18250 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.5674603174603176e-05, | |
| "loss": 2.0205, | |
| "step": 18300 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.5575396825396828e-05, | |
| "loss": 1.9731, | |
| "step": 18350 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.5476190476190476e-05, | |
| "loss": 2.0794, | |
| "step": 18400 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.537698412698413e-05, | |
| "loss": 2.1129, | |
| "step": 18450 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.527777777777778e-05, | |
| "loss": 2.0713, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.5178571428571429e-05, | |
| "loss": 2.0227, | |
| "step": 18550 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.5079365079365079e-05, | |
| "loss": 2.0003, | |
| "step": 18600 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.498015873015873e-05, | |
| "loss": 1.961, | |
| "step": 18650 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.4880952380952381e-05, | |
| "loss": 1.9847, | |
| "step": 18700 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.4781746031746033e-05, | |
| "loss": 2.0144, | |
| "step": 18750 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.4682539682539683e-05, | |
| "loss": 2.0327, | |
| "step": 18800 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.4583333333333335e-05, | |
| "loss": 1.9285, | |
| "step": 18850 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.4484126984126987e-05, | |
| "loss": 2.0534, | |
| "step": 18900 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.4384920634920635e-05, | |
| "loss": 2.123, | |
| "step": 18950 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.4285714285714285e-05, | |
| "loss": 2.0127, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.4186507936507937e-05, | |
| "loss": 2.1508, | |
| "step": 19050 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.4087301587301587e-05, | |
| "loss": 2.1037, | |
| "step": 19100 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.398809523809524e-05, | |
| "loss": 2.0962, | |
| "step": 19150 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.388888888888889e-05, | |
| "loss": 2.0785, | |
| "step": 19200 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.3789682539682541e-05, | |
| "loss": 2.1065, | |
| "step": 19250 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.3690476190476192e-05, | |
| "loss": 2.1285, | |
| "step": 19300 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.359126984126984e-05, | |
| "loss": 2.0568, | |
| "step": 19350 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.3492063492063492e-05, | |
| "loss": 2.1088, | |
| "step": 19400 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.3392857142857144e-05, | |
| "loss": 2.0946, | |
| "step": 19450 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.3293650793650794e-05, | |
| "loss": 2.0457, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.3194444444444446e-05, | |
| "loss": 2.0999, | |
| "step": 19550 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.3095238095238096e-05, | |
| "loss": 2.0653, | |
| "step": 19600 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.2996031746031748e-05, | |
| "loss": 2.0811, | |
| "step": 19650 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.2896825396825398e-05, | |
| "loss": 2.0284, | |
| "step": 19700 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.2797619047619047e-05, | |
| "loss": 2.0423, | |
| "step": 19750 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.2698412698412699e-05, | |
| "loss": 1.9918, | |
| "step": 19800 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.2599206349206349e-05, | |
| "loss": 2.0226, | |
| "step": 19850 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.25e-05, | |
| "loss": 2.0711, | |
| "step": 19900 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.2400793650793652e-05, | |
| "loss": 2.0474, | |
| "step": 19950 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.2301587301587301e-05, | |
| "loss": 2.1422, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.2202380952380953e-05, | |
| "loss": 2.0376, | |
| "step": 20050 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.2103174603174603e-05, | |
| "loss": 2.056, | |
| "step": 20100 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.2003968253968255e-05, | |
| "loss": 2.1421, | |
| "step": 20150 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.1904761904761905e-05, | |
| "loss": 2.1107, | |
| "step": 20200 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.1805555555555555e-05, | |
| "loss": 2.0642, | |
| "step": 20250 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.1706349206349207e-05, | |
| "loss": 2.0525, | |
| "step": 20300 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.1607142857142857e-05, | |
| "loss": 2.0263, | |
| "step": 20350 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 1.1507936507936508e-05, | |
| "loss": 2.1769, | |
| "step": 20400 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 1.140873015873016e-05, | |
| "loss": 2.0337, | |
| "step": 20450 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 1.130952380952381e-05, | |
| "loss": 1.9124, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.1210317460317461e-05, | |
| "loss": 2.03, | |
| "step": 20550 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.1111111111111112e-05, | |
| "loss": 2.0838, | |
| "step": 20600 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 1.1011904761904762e-05, | |
| "loss": 2.0309, | |
| "step": 20650 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 1.0912698412698414e-05, | |
| "loss": 2.1253, | |
| "step": 20700 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 1.0813492063492064e-05, | |
| "loss": 2.0027, | |
| "step": 20750 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 1.0714285714285714e-05, | |
| "loss": 2.1645, | |
| "step": 20800 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 1.0615079365079366e-05, | |
| "loss": 1.9856, | |
| "step": 20850 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 1.0515873015873016e-05, | |
| "loss": 2.0447, | |
| "step": 20900 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 1.0416666666666668e-05, | |
| "loss": 2.0944, | |
| "step": 20950 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 1.0317460317460318e-05, | |
| "loss": 2.0401, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 1.0218253968253968e-05, | |
| "loss": 2.0073, | |
| "step": 21050 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 1.011904761904762e-05, | |
| "loss": 1.947, | |
| "step": 21100 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 1.001984126984127e-05, | |
| "loss": 2.049, | |
| "step": 21150 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 9.92063492063492e-06, | |
| "loss": 2.0946, | |
| "step": 21200 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 9.821428571428573e-06, | |
| "loss": 2.1091, | |
| "step": 21250 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.722222222222223e-06, | |
| "loss": 2.0092, | |
| "step": 21300 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.623015873015875e-06, | |
| "loss": 1.972, | |
| "step": 21350 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.523809523809523e-06, | |
| "loss": 2.0306, | |
| "step": 21400 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.424603174603175e-06, | |
| "loss": 2.0901, | |
| "step": 21450 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.325396825396827e-06, | |
| "loss": 2.0199, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 9.226190476190477e-06, | |
| "loss": 1.9885, | |
| "step": 21550 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 9.126984126984127e-06, | |
| "loss": 2.0567, | |
| "step": 21600 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 9.027777777777777e-06, | |
| "loss": 2.1078, | |
| "step": 21650 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 8.92857142857143e-06, | |
| "loss": 2.0263, | |
| "step": 21700 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 8.829365079365081e-06, | |
| "loss": 2.0338, | |
| "step": 21750 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 8.73015873015873e-06, | |
| "loss": 2.024, | |
| "step": 21800 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 8.630952380952381e-06, | |
| "loss": 2.0244, | |
| "step": 21850 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 8.531746031746032e-06, | |
| "loss": 2.0738, | |
| "step": 21900 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 8.432539682539684e-06, | |
| "loss": 2.0932, | |
| "step": 21950 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 2.0231, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 8.234126984126984e-06, | |
| "loss": 2.0454, | |
| "step": 22050 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 8.134920634920636e-06, | |
| "loss": 2.0424, | |
| "step": 22100 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 8.035714285714286e-06, | |
| "loss": 2.0762, | |
| "step": 22150 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 7.936507936507936e-06, | |
| "loss": 2.0292, | |
| "step": 22200 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 7.837301587301588e-06, | |
| "loss": 2.0349, | |
| "step": 22250 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 7.738095238095238e-06, | |
| "loss": 2.0433, | |
| "step": 22300 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 7.63888888888889e-06, | |
| "loss": 2.0813, | |
| "step": 22350 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 7.5396825396825394e-06, | |
| "loss": 2.0036, | |
| "step": 22400 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 7.4404761904761905e-06, | |
| "loss": 1.9523, | |
| "step": 22450 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 7.3412698412698415e-06, | |
| "loss": 2.0789, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 7.242063492063493e-06, | |
| "loss": 1.9412, | |
| "step": 22550 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 7.142857142857143e-06, | |
| "loss": 1.9535, | |
| "step": 22600 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 7.043650793650794e-06, | |
| "loss": 2.0737, | |
| "step": 22650 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 6.944444444444445e-06, | |
| "loss": 2.0372, | |
| "step": 22700 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 6.845238095238096e-06, | |
| "loss": 1.9616, | |
| "step": 22750 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 6.746031746031746e-06, | |
| "loss": 2.0529, | |
| "step": 22800 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 6.646825396825397e-06, | |
| "loss": 1.9972, | |
| "step": 22850 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 6.547619047619048e-06, | |
| "loss": 2.1145, | |
| "step": 22900 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 6.448412698412699e-06, | |
| "loss": 1.9787, | |
| "step": 22950 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 6.349206349206349e-06, | |
| "loss": 2.0294, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 6.25e-06, | |
| "loss": 1.9645, | |
| "step": 23050 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 6.1507936507936505e-06, | |
| "loss": 1.9659, | |
| "step": 23100 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 6.0515873015873015e-06, | |
| "loss": 1.9515, | |
| "step": 23150 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 5.9523809523809525e-06, | |
| "loss": 2.0043, | |
| "step": 23200 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 5.8531746031746036e-06, | |
| "loss": 1.9432, | |
| "step": 23250 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 5.753968253968254e-06, | |
| "loss": 1.9595, | |
| "step": 23300 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 5.654761904761905e-06, | |
| "loss": 2.172, | |
| "step": 23350 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 5.555555555555556e-06, | |
| "loss": 2.0694, | |
| "step": 23400 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 5.456349206349207e-06, | |
| "loss": 1.9371, | |
| "step": 23450 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 5.357142857142857e-06, | |
| "loss": 1.9168, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 5.257936507936508e-06, | |
| "loss": 2.0086, | |
| "step": 23550 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 5.158730158730159e-06, | |
| "loss": 2.0677, | |
| "step": 23600 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 5.05952380952381e-06, | |
| "loss": 1.9763, | |
| "step": 23650 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 4.96031746031746e-06, | |
| "loss": 2.016, | |
| "step": 23700 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 4.861111111111111e-06, | |
| "loss": 2.0365, | |
| "step": 23750 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 4.7619047619047615e-06, | |
| "loss": 1.9687, | |
| "step": 23800 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 4.662698412698413e-06, | |
| "loss": 1.9616, | |
| "step": 23850 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 4.563492063492064e-06, | |
| "loss": 2.0307, | |
| "step": 23900 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 4.464285714285715e-06, | |
| "loss": 2.0172, | |
| "step": 23950 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 4.365079365079365e-06, | |
| "loss": 2.0068, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 4.265873015873016e-06, | |
| "loss": 2.042, | |
| "step": 24050 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 4.166666666666667e-06, | |
| "loss": 1.9672, | |
| "step": 24100 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 4.067460317460318e-06, | |
| "loss": 2.1522, | |
| "step": 24150 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 3.968253968253968e-06, | |
| "loss": 1.9359, | |
| "step": 24200 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 3.869047619047619e-06, | |
| "loss": 2.0953, | |
| "step": 24250 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 3.7698412698412697e-06, | |
| "loss": 2.036, | |
| "step": 24300 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 3.6706349206349208e-06, | |
| "loss": 1.8848, | |
| "step": 24350 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 3.5714285714285714e-06, | |
| "loss": 2.0225, | |
| "step": 24400 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 3.4722222222222224e-06, | |
| "loss": 1.994, | |
| "step": 24450 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 3.373015873015873e-06, | |
| "loss": 2.0587, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 3.273809523809524e-06, | |
| "loss": 2.0663, | |
| "step": 24550 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 3.1746031746031746e-06, | |
| "loss": 1.9933, | |
| "step": 24600 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 3.0753968253968252e-06, | |
| "loss": 2.0767, | |
| "step": 24650 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 2.9761904761904763e-06, | |
| "loss": 2.0258, | |
| "step": 24700 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 2.876984126984127e-06, | |
| "loss": 1.9668, | |
| "step": 24750 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 2.777777777777778e-06, | |
| "loss": 2.1309, | |
| "step": 24800 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 2.6785714285714285e-06, | |
| "loss": 1.9892, | |
| "step": 24850 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 2.5793650793650795e-06, | |
| "loss": 2.0322, | |
| "step": 24900 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 2.48015873015873e-06, | |
| "loss": 2.0309, | |
| "step": 24950 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 2.3809523809523808e-06, | |
| "loss": 2.0231, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 2.281746031746032e-06, | |
| "loss": 2.0231, | |
| "step": 25050 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 2.1825396825396824e-06, | |
| "loss": 1.9826, | |
| "step": 25100 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 2.0833333333333334e-06, | |
| "loss": 1.9962, | |
| "step": 25150 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 1.984126984126984e-06, | |
| "loss": 2.1486, | |
| "step": 25200 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.8849206349206349e-06, | |
| "loss": 2.0243, | |
| "step": 25250 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.7857142857142857e-06, | |
| "loss": 2.1495, | |
| "step": 25300 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 1.6865079365079365e-06, | |
| "loss": 2.0119, | |
| "step": 25350 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 1.5873015873015873e-06, | |
| "loss": 2.0651, | |
| "step": 25400 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 1.4880952380952381e-06, | |
| "loss": 1.987, | |
| "step": 25450 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 1.388888888888889e-06, | |
| "loss": 2.077, | |
| "step": 25500 | |
| } | |
| ], | |
| "logging_steps": 50, | |
| "max_steps": 26200, | |
| "num_train_epochs": 2, | |
| "save_steps": 1500, | |
| "total_flos": 5.393246846976e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |