| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 32.52244048393391, | |
| "global_step": 1000000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 10.4451, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 10.1476, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.5e-06, | |
| "loss": 9.8078, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 9.4816, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 2.5e-06, | |
| "loss": 9.1839, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 3e-06, | |
| "loss": 8.9761, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 3.5e-06, | |
| "loss": 8.863, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 8.8057, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.5e-06, | |
| "loss": 8.7534, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 5e-06, | |
| "loss": 8.6688, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 5.500000000000001e-06, | |
| "loss": 8.5886, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 6e-06, | |
| "loss": 8.5122, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 6.5000000000000004e-06, | |
| "loss": 8.4138, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 7e-06, | |
| "loss": 8.2859, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 8.1421, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 7.9556, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 8.5e-06, | |
| "loss": 7.7768, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 9e-06, | |
| "loss": 7.6378, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 9.5e-06, | |
| "loss": 7.5256, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1e-05, | |
| "loss": 7.4265, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 9.994949494949497e-06, | |
| "loss": 7.3388, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 9.989898989898991e-06, | |
| "loss": 7.2598, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 9.984848484848485e-06, | |
| "loss": 7.1871, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 9.97979797979798e-06, | |
| "loss": 7.118, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 9.974747474747476e-06, | |
| "loss": 7.053, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 9.96969696969697e-06, | |
| "loss": 6.9952, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 9.964646464646466e-06, | |
| "loss": 6.937, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 9.95959595959596e-06, | |
| "loss": 6.8813, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 9.954545454545456e-06, | |
| "loss": 6.8295, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 9.94949494949495e-06, | |
| "loss": 6.7739, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 9.944444444444445e-06, | |
| "loss": 6.7107, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.939393939393939e-06, | |
| "loss": 6.6159, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.934343434343435e-06, | |
| "loss": 6.4972, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 9.92929292929293e-06, | |
| "loss": 6.3819, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 9.924242424242425e-06, | |
| "loss": 6.2707, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 9.91919191919192e-06, | |
| "loss": 6.1748, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 9.914141414141416e-06, | |
| "loss": 6.0899, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 9.90909090909091e-06, | |
| "loss": 6.0077, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 9.904040404040404e-06, | |
| "loss": 5.93, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 9.8989898989899e-06, | |
| "loss": 5.8585, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 9.893939393939395e-06, | |
| "loss": 5.788, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 9.88888888888889e-06, | |
| "loss": 5.7231, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 9.883838383838385e-06, | |
| "loss": 5.6584, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.87878787878788e-06, | |
| "loss": 5.5957, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 9.873737373737373e-06, | |
| "loss": 5.5366, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 9.86868686868687e-06, | |
| "loss": 5.4811, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 9.863636363636364e-06, | |
| "loss": 5.4286, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 9.85858585858586e-06, | |
| "loss": 5.3739, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 9.853535353535354e-06, | |
| "loss": 5.3187, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 9.84848484848485e-06, | |
| "loss": 5.2669, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 9.843434343434344e-06, | |
| "loss": 5.2168, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 9.838383838383839e-06, | |
| "loss": 5.1705, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 9.833333333333333e-06, | |
| "loss": 5.1234, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 9.828282828282829e-06, | |
| "loss": 5.0796, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 9.823232323232325e-06, | |
| "loss": 5.0307, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 9.81818181818182e-06, | |
| "loss": 4.9889, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 9.813131313131315e-06, | |
| "loss": 4.9411, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 9.80808080808081e-06, | |
| "loss": 4.9048, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 9.803030303030304e-06, | |
| "loss": 4.865, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 9.797979797979798e-06, | |
| "loss": 4.8235, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 9.792929292929294e-06, | |
| "loss": 4.7869, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 9.787878787878788e-06, | |
| "loss": 4.7453, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 9.782828282828284e-06, | |
| "loss": 4.7062, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 9.777777777777779e-06, | |
| "loss": 4.6705, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 9.772727272727273e-06, | |
| "loss": 4.636, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 9.767676767676767e-06, | |
| "loss": 4.5991, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 9.762626262626263e-06, | |
| "loss": 4.5671, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 9.757575757575758e-06, | |
| "loss": 4.5312, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 9.752525252525254e-06, | |
| "loss": 4.4905, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 9.747474747474748e-06, | |
| "loss": 4.4666, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 9.742424242424244e-06, | |
| "loss": 4.4342, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 9.737373737373738e-06, | |
| "loss": 4.4024, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 9.732323232323232e-06, | |
| "loss": 4.3676, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 9.727272727272728e-06, | |
| "loss": 4.3405, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 9.722222222222223e-06, | |
| "loss": 4.3077, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 9.717171717171719e-06, | |
| "loss": 4.2787, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 9.712121212121213e-06, | |
| "loss": 4.2466, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 9.707070707070709e-06, | |
| "loss": 4.2114, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 9.702020202020203e-06, | |
| "loss": 4.1992, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 9.696969696969698e-06, | |
| "loss": 4.1587, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 9.691919191919192e-06, | |
| "loss": 4.1242, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 9.686868686868688e-06, | |
| "loss": 4.0931, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 9.681818181818182e-06, | |
| "loss": 4.0625, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 9.676767676767678e-06, | |
| "loss": 4.0334, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 9.671717171717172e-06, | |
| "loss": 3.9965, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 9.666666666666667e-06, | |
| "loss": 3.9718, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 9.661616161616163e-06, | |
| "loss": 3.9466, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 9.656565656565657e-06, | |
| "loss": 3.9127, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 9.651515151515153e-06, | |
| "loss": 3.8866, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 9.646464646464647e-06, | |
| "loss": 3.856, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 9.641414141414143e-06, | |
| "loss": 3.8336, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 9.636363636363638e-06, | |
| "loss": 3.8032, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 9.631313131313132e-06, | |
| "loss": 3.7802, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 9.626262626262626e-06, | |
| "loss": 3.7482, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 9.621212121212122e-06, | |
| "loss": 3.7237, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 9.616161616161616e-06, | |
| "loss": 3.701, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 9.611111111111112e-06, | |
| "loss": 3.6887, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 9.606060606060607e-06, | |
| "loss": 3.6558, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 9.601010101010103e-06, | |
| "loss": 3.625, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.595959595959597e-06, | |
| "loss": 3.6123, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.590909090909091e-06, | |
| "loss": 3.5798, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 9.585858585858586e-06, | |
| "loss": 3.5625, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 9.580808080808082e-06, | |
| "loss": 3.5352, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 9.575757575757576e-06, | |
| "loss": 3.5153, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 9.570707070707072e-06, | |
| "loss": 3.492, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 9.565656565656566e-06, | |
| "loss": 3.4653, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 9.56060606060606e-06, | |
| "loss": 3.4461, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 9.555555555555556e-06, | |
| "loss": 3.4219, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 9.55050505050505e-06, | |
| "loss": 3.4078, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 9.545454545454547e-06, | |
| "loss": 3.3775, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 9.540404040404041e-06, | |
| "loss": 3.3512, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 9.535353535353537e-06, | |
| "loss": 3.3344, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 9.530303030303031e-06, | |
| "loss": 3.3091, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 9.525252525252526e-06, | |
| "loss": 3.2932, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 9.52020202020202e-06, | |
| "loss": 3.2704, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 9.515151515151516e-06, | |
| "loss": 3.249, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 9.51010101010101e-06, | |
| "loss": 3.2267, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 9.505050505050506e-06, | |
| "loss": 3.2072, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 9.5e-06, | |
| "loss": 3.1906, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 9.494949494949497e-06, | |
| "loss": 3.1637, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 9.48989898989899e-06, | |
| "loss": 3.1467, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 9.484848484848485e-06, | |
| "loss": 3.1268, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 9.479797979797981e-06, | |
| "loss": 3.1122, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 9.474747474747475e-06, | |
| "loss": 3.0908, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 9.469696969696971e-06, | |
| "loss": 3.0709, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 9.464646464646466e-06, | |
| "loss": 3.0544, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 9.45959595959596e-06, | |
| "loss": 3.0413, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 9.454545454545456e-06, | |
| "loss": 3.0243, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 9.44949494949495e-06, | |
| "loss": 2.9992, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 9.444444444444445e-06, | |
| "loss": 2.9798, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 9.43939393939394e-06, | |
| "loss": 2.9626, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 9.434343434343435e-06, | |
| "loss": 2.9465, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 9.42929292929293e-06, | |
| "loss": 2.927, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 9.424242424242425e-06, | |
| "loss": 2.9166, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 9.41919191919192e-06, | |
| "loss": 2.9035, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 9.414141414141414e-06, | |
| "loss": 2.8826, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 9.40909090909091e-06, | |
| "loss": 2.8696, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 9.404040404040404e-06, | |
| "loss": 2.8516, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 9.3989898989899e-06, | |
| "loss": 2.8337, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 9.393939393939396e-06, | |
| "loss": 2.82, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 9.38888888888889e-06, | |
| "loss": 2.8056, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 9.383838383838385e-06, | |
| "loss": 2.7897, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 9.378787878787879e-06, | |
| "loss": 2.7751, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 9.373737373737375e-06, | |
| "loss": 2.7571, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 9.36868686868687e-06, | |
| "loss": 2.7505, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 9.363636363636365e-06, | |
| "loss": 2.736, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 9.35858585858586e-06, | |
| "loss": 2.7176, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 9.353535353535354e-06, | |
| "loss": 2.7094, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 9.34848484848485e-06, | |
| "loss": 2.6899, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 9.343434343434344e-06, | |
| "loss": 2.6751, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 9.338383838383838e-06, | |
| "loss": 2.6588, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 9.333333333333334e-06, | |
| "loss": 2.6447, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 9.328282828282829e-06, | |
| "loss": 2.6373, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 9.323232323232325e-06, | |
| "loss": 2.6218, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 9.318181818181819e-06, | |
| "loss": 2.6093, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 9.313131313131313e-06, | |
| "loss": 2.5918, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 9.30808080808081e-06, | |
| "loss": 2.5866, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 9.303030303030303e-06, | |
| "loss": 2.5693, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 9.2979797979798e-06, | |
| "loss": 2.5579, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 9.292929292929294e-06, | |
| "loss": 2.5494, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 9.28787878787879e-06, | |
| "loss": 2.5327, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 9.282828282828284e-06, | |
| "loss": 2.5212, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 9.277777777777778e-06, | |
| "loss": 2.5063, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 9.272727272727273e-06, | |
| "loss": 2.4969, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 9.267676767676769e-06, | |
| "loss": 2.4869, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 9.262626262626263e-06, | |
| "loss": 2.4717, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 9.257575757575759e-06, | |
| "loss": 2.4601, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 9.252525252525253e-06, | |
| "loss": 2.4501, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 9.24747474747475e-06, | |
| "loss": 2.4375, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 9.242424242424244e-06, | |
| "loss": 2.4225, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 9.237373737373738e-06, | |
| "loss": 2.4121, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 9.232323232323232e-06, | |
| "loss": 2.4009, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 9.227272727272728e-06, | |
| "loss": 2.3894, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 9.222222222222224e-06, | |
| "loss": 2.3715, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 9.217171717171718e-06, | |
| "loss": 2.3563, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 9.212121212121213e-06, | |
| "loss": 2.3484, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 9.207070707070707e-06, | |
| "loss": 2.3344, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 9.202020202020203e-06, | |
| "loss": 2.3213, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 9.196969696969697e-06, | |
| "loss": 2.3136, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 9.191919191919193e-06, | |
| "loss": 2.2937, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 9.186868686868688e-06, | |
| "loss": 2.2817, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 9.181818181818184e-06, | |
| "loss": 2.2733, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 9.176767676767678e-06, | |
| "loss": 2.2603, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 9.171717171717172e-06, | |
| "loss": 2.2498, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 9.166666666666666e-06, | |
| "loss": 2.2374, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 9.161616161616162e-06, | |
| "loss": 2.2276, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 9.156565656565657e-06, | |
| "loss": 2.2111, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 9.151515151515153e-06, | |
| "loss": 2.2021, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 9.146464646464647e-06, | |
| "loss": 2.1931, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 9.141414141414143e-06, | |
| "loss": 2.1768, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 9.136363636363637e-06, | |
| "loss": 2.1694, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 9.131313131313132e-06, | |
| "loss": 2.158, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 9.126262626262628e-06, | |
| "loss": 2.1448, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 9.121212121212122e-06, | |
| "loss": 2.1369, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 9.116161616161618e-06, | |
| "loss": 2.1274, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 9.111111111111112e-06, | |
| "loss": 2.1111, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 9.106060606060606e-06, | |
| "loss": 2.1003, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 9.1010101010101e-06, | |
| "loss": 2.0863, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 9.095959595959597e-06, | |
| "loss": 2.079, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 9.090909090909091e-06, | |
| "loss": 2.0716, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 9.085858585858587e-06, | |
| "loss": 2.0595, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 9.080808080808081e-06, | |
| "loss": 2.0456, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 9.075757575757577e-06, | |
| "loss": 2.0385, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 9.070707070707072e-06, | |
| "loss": 2.0254, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 9.065656565656566e-06, | |
| "loss": 2.0141, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 9.06060606060606e-06, | |
| "loss": 2.0025, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 9.055555555555556e-06, | |
| "loss": 1.993, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 9.050505050505052e-06, | |
| "loss": 1.9813, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 9.045454545454546e-06, | |
| "loss": 1.9709, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 9.040404040404042e-06, | |
| "loss": 1.9632, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 9.035353535353537e-06, | |
| "loss": 1.9532, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 9.030303030303031e-06, | |
| "loss": 1.9382, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 9.025252525252525e-06, | |
| "loss": 1.9306, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 9.020202020202021e-06, | |
| "loss": 1.919, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 9.015151515151516e-06, | |
| "loss": 1.9093, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 9.010101010101012e-06, | |
| "loss": 1.9006, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 9.005050505050506e-06, | |
| "loss": 1.8927, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 9e-06, | |
| "loss": 1.88, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 8.994949494949495e-06, | |
| "loss": 1.8681, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 8.98989898989899e-06, | |
| "loss": 1.8623, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 8.984848484848485e-06, | |
| "loss": 1.8515, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 8.97979797979798e-06, | |
| "loss": 1.8424, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 8.974747474747475e-06, | |
| "loss": 1.8322, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 8.969696969696971e-06, | |
| "loss": 1.8241, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 8.964646464646465e-06, | |
| "loss": 1.8149, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 8.95959595959596e-06, | |
| "loss": 1.8053, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 8.954545454545456e-06, | |
| "loss": 1.7971, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 8.94949494949495e-06, | |
| "loss": 1.789, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 8.944444444444446e-06, | |
| "loss": 1.7804, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 8.93939393939394e-06, | |
| "loss": 1.7719, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 8.934343434343436e-06, | |
| "loss": 1.7608, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 8.92929292929293e-06, | |
| "loss": 1.7512, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 8.924242424242425e-06, | |
| "loss": 1.7448, | |
| "step": 116500 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 8.919191919191919e-06, | |
| "loss": 1.7366, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 8.914141414141415e-06, | |
| "loss": 1.7252, | |
| "step": 117500 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 8.90909090909091e-06, | |
| "loss": 1.7187, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 8.904040404040405e-06, | |
| "loss": 1.7151, | |
| "step": 118500 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 8.8989898989899e-06, | |
| "loss": 1.7092, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 8.893939393939394e-06, | |
| "loss": 1.6961, | |
| "step": 119500 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 8.888888888888888e-06, | |
| "loss": 1.6896, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 8.883838383838384e-06, | |
| "loss": 1.6813, | |
| "step": 120500 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 8.87878787878788e-06, | |
| "loss": 1.6744, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 8.873737373737375e-06, | |
| "loss": 1.6657, | |
| "step": 121500 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 8.86868686868687e-06, | |
| "loss": 1.659, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 8.863636363636365e-06, | |
| "loss": 1.6521, | |
| "step": 122500 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 8.85858585858586e-06, | |
| "loss": 1.6431, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 8.853535353535353e-06, | |
| "loss": 1.6365, | |
| "step": 123500 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 8.84848484848485e-06, | |
| "loss": 1.6282, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 8.843434343434344e-06, | |
| "loss": 1.622, | |
| "step": 124500 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 8.83838383838384e-06, | |
| "loss": 1.6115, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 8.833333333333334e-06, | |
| "loss": 1.6057, | |
| "step": 125500 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 8.82828282828283e-06, | |
| "loss": 1.5967, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 8.823232323232324e-06, | |
| "loss": 1.5929, | |
| "step": 126500 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 8.818181818181819e-06, | |
| "loss": 1.585, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 8.813131313131313e-06, | |
| "loss": 1.5804, | |
| "step": 127500 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 8.808080808080809e-06, | |
| "loss": 1.572, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 8.803030303030303e-06, | |
| "loss": 1.569, | |
| "step": 128500 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "learning_rate": 8.7979797979798e-06, | |
| "loss": 1.5566, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 8.792929292929293e-06, | |
| "loss": 1.5525, | |
| "step": 129500 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 8.787878787878788e-06, | |
| "loss": 1.544, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 8.782828282828284e-06, | |
| "loss": 1.5399, | |
| "step": 130500 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 8.777777777777778e-06, | |
| "loss": 1.534, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 8.772727272727274e-06, | |
| "loss": 1.5271, | |
| "step": 131500 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 8.767676767676768e-06, | |
| "loss": 1.5183, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 8.762626262626264e-06, | |
| "loss": 1.513, | |
| "step": 132500 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "learning_rate": 8.757575757575759e-06, | |
| "loss": 1.5109, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 8.752525252525253e-06, | |
| "loss": 1.5012, | |
| "step": 133500 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 8.747474747474747e-06, | |
| "loss": 1.4959, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 4.37, | |
| "learning_rate": 8.742424242424243e-06, | |
| "loss": 1.4882, | |
| "step": 134500 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 8.737373737373738e-06, | |
| "loss": 1.4823, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 8.732323232323234e-06, | |
| "loss": 1.4805, | |
| "step": 135500 | |
| }, | |
| { | |
| "epoch": 4.42, | |
| "learning_rate": 8.727272727272728e-06, | |
| "loss": 1.4717, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 8.722222222222224e-06, | |
| "loss": 1.4684, | |
| "step": 136500 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 8.717171717171718e-06, | |
| "loss": 1.4615, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 8.712121212121212e-06, | |
| "loss": 1.4562, | |
| "step": 137500 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 8.707070707070707e-06, | |
| "loss": 1.4514, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "learning_rate": 8.702020202020203e-06, | |
| "loss": 1.4456, | |
| "step": 138500 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 8.696969696969699e-06, | |
| "loss": 1.4383, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "learning_rate": 8.691919191919193e-06, | |
| "loss": 1.4344, | |
| "step": 139500 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "learning_rate": 8.686868686868687e-06, | |
| "loss": 1.431, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "learning_rate": 8.681818181818182e-06, | |
| "loss": 1.4245, | |
| "step": 140500 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 8.676767676767678e-06, | |
| "loss": 1.4198, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "learning_rate": 8.671717171717172e-06, | |
| "loss": 1.4127, | |
| "step": 141500 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 8.666666666666668e-06, | |
| "loss": 1.4075, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "learning_rate": 8.661616161616162e-06, | |
| "loss": 1.405, | |
| "step": 142500 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 8.656565656565658e-06, | |
| "loss": 1.4003, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 8.651515151515152e-06, | |
| "loss": 1.394, | |
| "step": 143500 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 8.646464646464647e-06, | |
| "loss": 1.39, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 8.641414141414141e-06, | |
| "loss": 1.3857, | |
| "step": 144500 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "learning_rate": 8.636363636363637e-06, | |
| "loss": 1.3816, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "learning_rate": 8.631313131313131e-06, | |
| "loss": 1.3764, | |
| "step": 145500 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "learning_rate": 8.626262626262627e-06, | |
| "loss": 1.3719, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "learning_rate": 8.621212121212122e-06, | |
| "loss": 1.3675, | |
| "step": 146500 | |
| }, | |
| { | |
| "epoch": 4.78, | |
| "learning_rate": 8.616161616161618e-06, | |
| "loss": 1.3663, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "learning_rate": 8.611111111111112e-06, | |
| "loss": 1.3599, | |
| "step": 147500 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 8.606060606060606e-06, | |
| "loss": 1.3566, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 8.601010101010102e-06, | |
| "loss": 1.3528, | |
| "step": 148500 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 8.595959595959596e-06, | |
| "loss": 1.3466, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 8.590909090909092e-06, | |
| "loss": 1.344, | |
| "step": 149500 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 8.585858585858587e-06, | |
| "loss": 1.3414, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 8.580808080808081e-06, | |
| "loss": 1.3367, | |
| "step": 150500 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 8.575757575757575e-06, | |
| "loss": 1.3321, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 8.570707070707071e-06, | |
| "loss": 1.3288, | |
| "step": 151500 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "learning_rate": 8.565656565656566e-06, | |
| "loss": 1.3233, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "learning_rate": 8.560606060606062e-06, | |
| "loss": 1.3211, | |
| "step": 152500 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "learning_rate": 8.555555555555556e-06, | |
| "loss": 1.3145, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 8.550505050505052e-06, | |
| "loss": 1.3113, | |
| "step": 153500 | |
| }, | |
| { | |
| "epoch": 5.01, | |
| "learning_rate": 8.545454545454546e-06, | |
| "loss": 1.3084, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 8.54040404040404e-06, | |
| "loss": 1.3065, | |
| "step": 154500 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "learning_rate": 8.535353535353535e-06, | |
| "loss": 1.3017, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 8.53030303030303e-06, | |
| "loss": 1.2981, | |
| "step": 155500 | |
| }, | |
| { | |
| "epoch": 5.07, | |
| "learning_rate": 8.525252525252527e-06, | |
| "loss": 1.2935, | |
| "step": 156000 | |
| }, | |
| { | |
| "epoch": 5.09, | |
| "learning_rate": 8.520202020202021e-06, | |
| "loss": 1.2921, | |
| "step": 156500 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 8.515151515151517e-06, | |
| "loss": 1.2873, | |
| "step": 157000 | |
| }, | |
| { | |
| "epoch": 5.12, | |
| "learning_rate": 8.510101010101011e-06, | |
| "loss": 1.2847, | |
| "step": 157500 | |
| }, | |
| { | |
| "epoch": 5.14, | |
| "learning_rate": 8.505050505050506e-06, | |
| "loss": 1.2801, | |
| "step": 158000 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 8.5e-06, | |
| "loss": 1.2767, | |
| "step": 158500 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "learning_rate": 8.494949494949496e-06, | |
| "loss": 1.2743, | |
| "step": 159000 | |
| }, | |
| { | |
| "epoch": 5.19, | |
| "learning_rate": 8.48989898989899e-06, | |
| "loss": 1.2707, | |
| "step": 159500 | |
| }, | |
| { | |
| "epoch": 5.2, | |
| "learning_rate": 8.484848484848486e-06, | |
| "loss": 1.2664, | |
| "step": 160000 | |
| }, | |
| { | |
| "epoch": 5.22, | |
| "learning_rate": 8.47979797979798e-06, | |
| "loss": 1.2654, | |
| "step": 160500 | |
| }, | |
| { | |
| "epoch": 5.24, | |
| "learning_rate": 8.474747474747475e-06, | |
| "loss": 1.2622, | |
| "step": 161000 | |
| }, | |
| { | |
| "epoch": 5.25, | |
| "learning_rate": 8.46969696969697e-06, | |
| "loss": 1.258, | |
| "step": 161500 | |
| }, | |
| { | |
| "epoch": 5.27, | |
| "learning_rate": 8.464646464646465e-06, | |
| "loss": 1.254, | |
| "step": 162000 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "learning_rate": 8.45959595959596e-06, | |
| "loss": 1.2513, | |
| "step": 162500 | |
| }, | |
| { | |
| "epoch": 5.3, | |
| "learning_rate": 8.454545454545455e-06, | |
| "loss": 1.2492, | |
| "step": 163000 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "learning_rate": 8.44949494949495e-06, | |
| "loss": 1.2449, | |
| "step": 163500 | |
| }, | |
| { | |
| "epoch": 5.33, | |
| "learning_rate": 8.444444444444446e-06, | |
| "loss": 1.2428, | |
| "step": 164000 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "learning_rate": 8.43939393939394e-06, | |
| "loss": 1.2373, | |
| "step": 164500 | |
| }, | |
| { | |
| "epoch": 5.37, | |
| "learning_rate": 8.434343434343434e-06, | |
| "loss": 1.2362, | |
| "step": 165000 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 8.42929292929293e-06, | |
| "loss": 1.2341, | |
| "step": 165500 | |
| }, | |
| { | |
| "epoch": 5.4, | |
| "learning_rate": 8.424242424242425e-06, | |
| "loss": 1.2298, | |
| "step": 166000 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 8.41919191919192e-06, | |
| "loss": 1.2279, | |
| "step": 166500 | |
| }, | |
| { | |
| "epoch": 5.43, | |
| "learning_rate": 8.414141414141415e-06, | |
| "loss": 1.2244, | |
| "step": 167000 | |
| }, | |
| { | |
| "epoch": 5.45, | |
| "learning_rate": 8.40909090909091e-06, | |
| "loss": 1.221, | |
| "step": 167500 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 8.404040404040405e-06, | |
| "loss": 1.2183, | |
| "step": 168000 | |
| }, | |
| { | |
| "epoch": 5.48, | |
| "learning_rate": 8.3989898989899e-06, | |
| "loss": 1.2159, | |
| "step": 168500 | |
| }, | |
| { | |
| "epoch": 5.5, | |
| "learning_rate": 8.393939393939394e-06, | |
| "loss": 1.2118, | |
| "step": 169000 | |
| }, | |
| { | |
| "epoch": 5.51, | |
| "learning_rate": 8.38888888888889e-06, | |
| "loss": 1.2101, | |
| "step": 169500 | |
| }, | |
| { | |
| "epoch": 5.53, | |
| "learning_rate": 8.383838383838384e-06, | |
| "loss": 1.2067, | |
| "step": 170000 | |
| }, | |
| { | |
| "epoch": 5.55, | |
| "learning_rate": 8.37878787878788e-06, | |
| "loss": 1.204, | |
| "step": 170500 | |
| }, | |
| { | |
| "epoch": 5.56, | |
| "learning_rate": 8.373737373737374e-06, | |
| "loss": 1.2018, | |
| "step": 171000 | |
| }, | |
| { | |
| "epoch": 5.58, | |
| "learning_rate": 8.368686868686869e-06, | |
| "loss": 1.1997, | |
| "step": 171500 | |
| }, | |
| { | |
| "epoch": 5.59, | |
| "learning_rate": 8.363636363636365e-06, | |
| "loss": 1.1964, | |
| "step": 172000 | |
| }, | |
| { | |
| "epoch": 5.61, | |
| "learning_rate": 8.358585858585859e-06, | |
| "loss": 1.1936, | |
| "step": 172500 | |
| }, | |
| { | |
| "epoch": 5.63, | |
| "learning_rate": 8.353535353535355e-06, | |
| "loss": 1.1923, | |
| "step": 173000 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "learning_rate": 8.348484848484849e-06, | |
| "loss": 1.189, | |
| "step": 173500 | |
| }, | |
| { | |
| "epoch": 5.66, | |
| "learning_rate": 8.343434343434345e-06, | |
| "loss": 1.1857, | |
| "step": 174000 | |
| }, | |
| { | |
| "epoch": 5.68, | |
| "learning_rate": 8.33838383838384e-06, | |
| "loss": 1.1831, | |
| "step": 174500 | |
| }, | |
| { | |
| "epoch": 5.69, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 1.1813, | |
| "step": 175000 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "learning_rate": 8.328282828282828e-06, | |
| "loss": 1.1794, | |
| "step": 175500 | |
| }, | |
| { | |
| "epoch": 5.72, | |
| "learning_rate": 8.323232323232324e-06, | |
| "loss": 1.1751, | |
| "step": 176000 | |
| }, | |
| { | |
| "epoch": 5.74, | |
| "learning_rate": 8.318181818181818e-06, | |
| "loss": 1.1736, | |
| "step": 176500 | |
| }, | |
| { | |
| "epoch": 5.76, | |
| "learning_rate": 8.313131313131314e-06, | |
| "loss": 1.1716, | |
| "step": 177000 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 8.308080808080809e-06, | |
| "loss": 1.1689, | |
| "step": 177500 | |
| }, | |
| { | |
| "epoch": 5.79, | |
| "learning_rate": 8.303030303030305e-06, | |
| "loss": 1.1675, | |
| "step": 178000 | |
| }, | |
| { | |
| "epoch": 5.81, | |
| "learning_rate": 8.297979797979799e-06, | |
| "loss": 1.1652, | |
| "step": 178500 | |
| }, | |
| { | |
| "epoch": 5.82, | |
| "learning_rate": 8.292929292929293e-06, | |
| "loss": 1.163, | |
| "step": 179000 | |
| }, | |
| { | |
| "epoch": 5.84, | |
| "learning_rate": 8.287878787878787e-06, | |
| "loss": 1.1602, | |
| "step": 179500 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "learning_rate": 8.282828282828283e-06, | |
| "loss": 1.1577, | |
| "step": 180000 | |
| }, | |
| { | |
| "epoch": 5.87, | |
| "learning_rate": 8.277777777777778e-06, | |
| "loss": 1.1565, | |
| "step": 180500 | |
| }, | |
| { | |
| "epoch": 5.89, | |
| "learning_rate": 8.272727272727274e-06, | |
| "loss": 1.1525, | |
| "step": 181000 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "learning_rate": 8.267676767676768e-06, | |
| "loss": 1.152, | |
| "step": 181500 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "learning_rate": 8.262626262626264e-06, | |
| "loss": 1.1506, | |
| "step": 182000 | |
| }, | |
| { | |
| "epoch": 5.94, | |
| "learning_rate": 8.257575757575758e-06, | |
| "loss": 1.1469, | |
| "step": 182500 | |
| }, | |
| { | |
| "epoch": 5.95, | |
| "learning_rate": 8.252525252525253e-06, | |
| "loss": 1.1449, | |
| "step": 183000 | |
| }, | |
| { | |
| "epoch": 5.97, | |
| "learning_rate": 8.247474747474749e-06, | |
| "loss": 1.1422, | |
| "step": 183500 | |
| }, | |
| { | |
| "epoch": 5.98, | |
| "learning_rate": 8.242424242424243e-06, | |
| "loss": 1.1404, | |
| "step": 184000 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "learning_rate": 8.237373737373739e-06, | |
| "loss": 1.1403, | |
| "step": 184500 | |
| }, | |
| { | |
| "epoch": 6.02, | |
| "learning_rate": 8.232323232323233e-06, | |
| "loss": 1.1371, | |
| "step": 185000 | |
| }, | |
| { | |
| "epoch": 6.03, | |
| "learning_rate": 8.227272727272728e-06, | |
| "loss": 1.1372, | |
| "step": 185500 | |
| }, | |
| { | |
| "epoch": 6.05, | |
| "learning_rate": 8.222222222222222e-06, | |
| "loss": 1.1341, | |
| "step": 186000 | |
| }, | |
| { | |
| "epoch": 6.07, | |
| "learning_rate": 8.217171717171718e-06, | |
| "loss": 1.1302, | |
| "step": 186500 | |
| }, | |
| { | |
| "epoch": 6.08, | |
| "learning_rate": 8.212121212121212e-06, | |
| "loss": 1.1303, | |
| "step": 187000 | |
| }, | |
| { | |
| "epoch": 6.1, | |
| "learning_rate": 8.207070707070708e-06, | |
| "loss": 1.1276, | |
| "step": 187500 | |
| }, | |
| { | |
| "epoch": 6.11, | |
| "learning_rate": 8.202020202020202e-06, | |
| "loss": 1.1267, | |
| "step": 188000 | |
| }, | |
| { | |
| "epoch": 6.13, | |
| "learning_rate": 8.196969696969698e-06, | |
| "loss": 1.123, | |
| "step": 188500 | |
| }, | |
| { | |
| "epoch": 6.15, | |
| "learning_rate": 8.191919191919193e-06, | |
| "loss": 1.1219, | |
| "step": 189000 | |
| }, | |
| { | |
| "epoch": 6.16, | |
| "learning_rate": 8.186868686868687e-06, | |
| "loss": 1.119, | |
| "step": 189500 | |
| }, | |
| { | |
| "epoch": 6.18, | |
| "learning_rate": 8.181818181818183e-06, | |
| "loss": 1.1191, | |
| "step": 190000 | |
| }, | |
| { | |
| "epoch": 6.2, | |
| "learning_rate": 8.176767676767677e-06, | |
| "loss": 1.1166, | |
| "step": 190500 | |
| }, | |
| { | |
| "epoch": 6.21, | |
| "learning_rate": 8.171717171717173e-06, | |
| "loss": 1.114, | |
| "step": 191000 | |
| }, | |
| { | |
| "epoch": 6.23, | |
| "learning_rate": 8.166666666666668e-06, | |
| "loss": 1.113, | |
| "step": 191500 | |
| }, | |
| { | |
| "epoch": 6.24, | |
| "learning_rate": 8.161616161616162e-06, | |
| "loss": 1.1112, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 6.26, | |
| "learning_rate": 8.156565656565658e-06, | |
| "loss": 1.1104, | |
| "step": 192500 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "learning_rate": 8.151515151515152e-06, | |
| "loss": 1.1083, | |
| "step": 193000 | |
| }, | |
| { | |
| "epoch": 6.29, | |
| "learning_rate": 8.146464646464646e-06, | |
| "loss": 1.1073, | |
| "step": 193500 | |
| }, | |
| { | |
| "epoch": 6.31, | |
| "learning_rate": 8.141414141414142e-06, | |
| "loss": 1.1051, | |
| "step": 194000 | |
| }, | |
| { | |
| "epoch": 6.33, | |
| "learning_rate": 8.136363636363637e-06, | |
| "loss": 1.1036, | |
| "step": 194500 | |
| }, | |
| { | |
| "epoch": 6.34, | |
| "learning_rate": 8.131313131313133e-06, | |
| "loss": 1.102, | |
| "step": 195000 | |
| }, | |
| { | |
| "epoch": 6.36, | |
| "learning_rate": 8.126262626262627e-06, | |
| "loss": 1.0998, | |
| "step": 195500 | |
| }, | |
| { | |
| "epoch": 6.37, | |
| "learning_rate": 8.121212121212121e-06, | |
| "loss": 1.0982, | |
| "step": 196000 | |
| }, | |
| { | |
| "epoch": 6.39, | |
| "learning_rate": 8.116161616161616e-06, | |
| "loss": 1.0972, | |
| "step": 196500 | |
| }, | |
| { | |
| "epoch": 6.41, | |
| "learning_rate": 8.111111111111112e-06, | |
| "loss": 1.0951, | |
| "step": 197000 | |
| }, | |
| { | |
| "epoch": 6.42, | |
| "learning_rate": 8.106060606060606e-06, | |
| "loss": 1.0942, | |
| "step": 197500 | |
| }, | |
| { | |
| "epoch": 6.44, | |
| "learning_rate": 8.101010101010102e-06, | |
| "loss": 1.0922, | |
| "step": 198000 | |
| }, | |
| { | |
| "epoch": 6.46, | |
| "learning_rate": 8.095959595959598e-06, | |
| "loss": 1.0919, | |
| "step": 198500 | |
| }, | |
| { | |
| "epoch": 6.47, | |
| "learning_rate": 8.090909090909092e-06, | |
| "loss": 1.0898, | |
| "step": 199000 | |
| }, | |
| { | |
| "epoch": 6.49, | |
| "learning_rate": 8.085858585858586e-06, | |
| "loss": 1.0885, | |
| "step": 199500 | |
| }, | |
| { | |
| "epoch": 6.5, | |
| "learning_rate": 8.08080808080808e-06, | |
| "loss": 1.0876, | |
| "step": 200000 | |
| }, | |
| { | |
| "epoch": 6.52, | |
| "learning_rate": 8.075757575757577e-06, | |
| "loss": 1.0854, | |
| "step": 200500 | |
| }, | |
| { | |
| "epoch": 6.54, | |
| "learning_rate": 8.070707070707071e-06, | |
| "loss": 1.0846, | |
| "step": 201000 | |
| }, | |
| { | |
| "epoch": 6.55, | |
| "learning_rate": 8.065656565656567e-06, | |
| "loss": 1.0831, | |
| "step": 201500 | |
| }, | |
| { | |
| "epoch": 6.57, | |
| "learning_rate": 8.060606060606061e-06, | |
| "loss": 1.0814, | |
| "step": 202000 | |
| }, | |
| { | |
| "epoch": 6.59, | |
| "learning_rate": 8.055555555555557e-06, | |
| "loss": 1.0799, | |
| "step": 202500 | |
| }, | |
| { | |
| "epoch": 6.6, | |
| "learning_rate": 8.050505050505052e-06, | |
| "loss": 1.0787, | |
| "step": 203000 | |
| }, | |
| { | |
| "epoch": 6.62, | |
| "learning_rate": 8.045454545454546e-06, | |
| "loss": 1.0768, | |
| "step": 203500 | |
| }, | |
| { | |
| "epoch": 6.63, | |
| "learning_rate": 8.04040404040404e-06, | |
| "loss": 1.0763, | |
| "step": 204000 | |
| }, | |
| { | |
| "epoch": 6.65, | |
| "learning_rate": 8.035353535353536e-06, | |
| "loss": 1.0751, | |
| "step": 204500 | |
| }, | |
| { | |
| "epoch": 6.67, | |
| "learning_rate": 8.03030303030303e-06, | |
| "loss": 1.0738, | |
| "step": 205000 | |
| }, | |
| { | |
| "epoch": 6.68, | |
| "learning_rate": 8.025252525252526e-06, | |
| "loss": 1.0708, | |
| "step": 205500 | |
| }, | |
| { | |
| "epoch": 6.7, | |
| "learning_rate": 8.02020202020202e-06, | |
| "loss": 1.0701, | |
| "step": 206000 | |
| }, | |
| { | |
| "epoch": 6.72, | |
| "learning_rate": 8.015151515151515e-06, | |
| "loss": 1.0708, | |
| "step": 206500 | |
| }, | |
| { | |
| "epoch": 6.73, | |
| "learning_rate": 8.010101010101011e-06, | |
| "loss": 1.0684, | |
| "step": 207000 | |
| }, | |
| { | |
| "epoch": 6.75, | |
| "learning_rate": 8.005050505050505e-06, | |
| "loss": 1.0668, | |
| "step": 207500 | |
| }, | |
| { | |
| "epoch": 6.76, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 1.0668, | |
| "step": 208000 | |
| }, | |
| { | |
| "epoch": 6.78, | |
| "learning_rate": 7.994949494949496e-06, | |
| "loss": 1.0646, | |
| "step": 208500 | |
| }, | |
| { | |
| "epoch": 6.8, | |
| "learning_rate": 7.989898989898992e-06, | |
| "loss": 1.0641, | |
| "step": 209000 | |
| }, | |
| { | |
| "epoch": 6.81, | |
| "learning_rate": 7.984848484848486e-06, | |
| "loss": 1.0613, | |
| "step": 209500 | |
| }, | |
| { | |
| "epoch": 6.83, | |
| "learning_rate": 7.97979797979798e-06, | |
| "loss": 1.0613, | |
| "step": 210000 | |
| }, | |
| { | |
| "epoch": 6.85, | |
| "learning_rate": 7.974747474747475e-06, | |
| "loss": 1.0604, | |
| "step": 210500 | |
| }, | |
| { | |
| "epoch": 6.86, | |
| "learning_rate": 7.96969696969697e-06, | |
| "loss": 1.0593, | |
| "step": 211000 | |
| }, | |
| { | |
| "epoch": 6.88, | |
| "learning_rate": 7.964646464646465e-06, | |
| "loss": 1.0575, | |
| "step": 211500 | |
| }, | |
| { | |
| "epoch": 6.89, | |
| "learning_rate": 7.95959595959596e-06, | |
| "loss": 1.0564, | |
| "step": 212000 | |
| }, | |
| { | |
| "epoch": 6.91, | |
| "learning_rate": 7.954545454545455e-06, | |
| "loss": 1.0554, | |
| "step": 212500 | |
| }, | |
| { | |
| "epoch": 6.93, | |
| "learning_rate": 7.949494949494951e-06, | |
| "loss": 1.0544, | |
| "step": 213000 | |
| }, | |
| { | |
| "epoch": 6.94, | |
| "learning_rate": 7.944444444444445e-06, | |
| "loss": 1.053, | |
| "step": 213500 | |
| }, | |
| { | |
| "epoch": 6.96, | |
| "learning_rate": 7.93939393939394e-06, | |
| "loss": 1.0526, | |
| "step": 214000 | |
| }, | |
| { | |
| "epoch": 6.98, | |
| "learning_rate": 7.934343434343434e-06, | |
| "loss": 1.0503, | |
| "step": 214500 | |
| }, | |
| { | |
| "epoch": 6.99, | |
| "learning_rate": 7.92929292929293e-06, | |
| "loss": 1.0505, | |
| "step": 215000 | |
| }, | |
| { | |
| "epoch": 7.01, | |
| "learning_rate": 7.924242424242426e-06, | |
| "loss": 1.0482, | |
| "step": 215500 | |
| }, | |
| { | |
| "epoch": 7.02, | |
| "learning_rate": 7.91919191919192e-06, | |
| "loss": 1.0478, | |
| "step": 216000 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "learning_rate": 7.914141414141415e-06, | |
| "loss": 1.0466, | |
| "step": 216500 | |
| }, | |
| { | |
| "epoch": 7.06, | |
| "learning_rate": 7.909090909090909e-06, | |
| "loss": 1.0461, | |
| "step": 217000 | |
| }, | |
| { | |
| "epoch": 7.07, | |
| "learning_rate": 7.904040404040405e-06, | |
| "loss": 1.0442, | |
| "step": 217500 | |
| }, | |
| { | |
| "epoch": 7.09, | |
| "learning_rate": 7.898989898989899e-06, | |
| "loss": 1.0436, | |
| "step": 218000 | |
| }, | |
| { | |
| "epoch": 7.11, | |
| "learning_rate": 7.893939393939395e-06, | |
| "loss": 1.0426, | |
| "step": 218500 | |
| }, | |
| { | |
| "epoch": 7.12, | |
| "learning_rate": 7.88888888888889e-06, | |
| "loss": 1.043, | |
| "step": 219000 | |
| }, | |
| { | |
| "epoch": 7.14, | |
| "learning_rate": 7.883838383838385e-06, | |
| "loss": 1.0406, | |
| "step": 219500 | |
| }, | |
| { | |
| "epoch": 7.15, | |
| "learning_rate": 7.87878787878788e-06, | |
| "loss": 1.0398, | |
| "step": 220000 | |
| }, | |
| { | |
| "epoch": 7.17, | |
| "learning_rate": 7.873737373737374e-06, | |
| "loss": 1.0386, | |
| "step": 220500 | |
| }, | |
| { | |
| "epoch": 7.19, | |
| "learning_rate": 7.868686868686868e-06, | |
| "loss": 1.0379, | |
| "step": 221000 | |
| }, | |
| { | |
| "epoch": 7.2, | |
| "learning_rate": 7.863636363636364e-06, | |
| "loss": 1.0377, | |
| "step": 221500 | |
| }, | |
| { | |
| "epoch": 7.22, | |
| "learning_rate": 7.858585858585859e-06, | |
| "loss": 1.0362, | |
| "step": 222000 | |
| }, | |
| { | |
| "epoch": 7.24, | |
| "learning_rate": 7.853535353535355e-06, | |
| "loss": 1.0357, | |
| "step": 222500 | |
| }, | |
| { | |
| "epoch": 7.25, | |
| "learning_rate": 7.848484848484849e-06, | |
| "loss": 1.0342, | |
| "step": 223000 | |
| }, | |
| { | |
| "epoch": 7.27, | |
| "learning_rate": 7.843434343434345e-06, | |
| "loss": 1.0333, | |
| "step": 223500 | |
| }, | |
| { | |
| "epoch": 7.29, | |
| "learning_rate": 7.838383838383839e-06, | |
| "loss": 1.0331, | |
| "step": 224000 | |
| }, | |
| { | |
| "epoch": 7.3, | |
| "learning_rate": 7.833333333333333e-06, | |
| "loss": 1.0314, | |
| "step": 224500 | |
| }, | |
| { | |
| "epoch": 7.32, | |
| "learning_rate": 7.82828282828283e-06, | |
| "loss": 1.0313, | |
| "step": 225000 | |
| }, | |
| { | |
| "epoch": 7.33, | |
| "learning_rate": 7.823232323232324e-06, | |
| "loss": 1.0294, | |
| "step": 225500 | |
| }, | |
| { | |
| "epoch": 7.35, | |
| "learning_rate": 7.81818181818182e-06, | |
| "loss": 1.0285, | |
| "step": 226000 | |
| }, | |
| { | |
| "epoch": 7.37, | |
| "learning_rate": 7.813131313131314e-06, | |
| "loss": 1.0283, | |
| "step": 226500 | |
| }, | |
| { | |
| "epoch": 7.38, | |
| "learning_rate": 7.808080808080808e-06, | |
| "loss": 1.0278, | |
| "step": 227000 | |
| }, | |
| { | |
| "epoch": 7.4, | |
| "learning_rate": 7.803030303030303e-06, | |
| "loss": 1.0265, | |
| "step": 227500 | |
| }, | |
| { | |
| "epoch": 7.42, | |
| "learning_rate": 7.797979797979799e-06, | |
| "loss": 1.0255, | |
| "step": 228000 | |
| }, | |
| { | |
| "epoch": 7.43, | |
| "learning_rate": 7.792929292929293e-06, | |
| "loss": 1.0243, | |
| "step": 228500 | |
| }, | |
| { | |
| "epoch": 7.45, | |
| "learning_rate": 7.787878787878789e-06, | |
| "loss": 1.0229, | |
| "step": 229000 | |
| }, | |
| { | |
| "epoch": 7.46, | |
| "learning_rate": 7.782828282828283e-06, | |
| "loss": 1.0222, | |
| "step": 229500 | |
| }, | |
| { | |
| "epoch": 7.48, | |
| "learning_rate": 7.77777777777778e-06, | |
| "loss": 1.0219, | |
| "step": 230000 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "learning_rate": 7.772727272727273e-06, | |
| "loss": 1.0224, | |
| "step": 230500 | |
| }, | |
| { | |
| "epoch": 7.51, | |
| "learning_rate": 7.767676767676768e-06, | |
| "loss": 1.0212, | |
| "step": 231000 | |
| }, | |
| { | |
| "epoch": 7.53, | |
| "learning_rate": 7.762626262626262e-06, | |
| "loss": 1.0203, | |
| "step": 231500 | |
| }, | |
| { | |
| "epoch": 7.55, | |
| "learning_rate": 7.757575757575758e-06, | |
| "loss": 1.0185, | |
| "step": 232000 | |
| }, | |
| { | |
| "epoch": 7.56, | |
| "learning_rate": 7.752525252525254e-06, | |
| "loss": 1.0183, | |
| "step": 232500 | |
| }, | |
| { | |
| "epoch": 7.58, | |
| "learning_rate": 7.747474747474748e-06, | |
| "loss": 1.0165, | |
| "step": 233000 | |
| }, | |
| { | |
| "epoch": 7.59, | |
| "learning_rate": 7.742424242424244e-06, | |
| "loss": 1.0161, | |
| "step": 233500 | |
| }, | |
| { | |
| "epoch": 7.61, | |
| "learning_rate": 7.737373737373739e-06, | |
| "loss": 1.0157, | |
| "step": 234000 | |
| }, | |
| { | |
| "epoch": 7.63, | |
| "learning_rate": 7.732323232323233e-06, | |
| "loss": 1.0154, | |
| "step": 234500 | |
| }, | |
| { | |
| "epoch": 7.64, | |
| "learning_rate": 7.727272727272727e-06, | |
| "loss": 1.0134, | |
| "step": 235000 | |
| }, | |
| { | |
| "epoch": 7.66, | |
| "learning_rate": 7.722222222222223e-06, | |
| "loss": 1.0125, | |
| "step": 235500 | |
| }, | |
| { | |
| "epoch": 7.68, | |
| "learning_rate": 7.717171717171717e-06, | |
| "loss": 1.0127, | |
| "step": 236000 | |
| }, | |
| { | |
| "epoch": 7.69, | |
| "learning_rate": 7.712121212121213e-06, | |
| "loss": 1.0119, | |
| "step": 236500 | |
| }, | |
| { | |
| "epoch": 7.71, | |
| "learning_rate": 7.707070707070708e-06, | |
| "loss": 1.0101, | |
| "step": 237000 | |
| }, | |
| { | |
| "epoch": 7.72, | |
| "learning_rate": 7.702020202020202e-06, | |
| "loss": 1.0104, | |
| "step": 237500 | |
| }, | |
| { | |
| "epoch": 7.74, | |
| "learning_rate": 7.696969696969696e-06, | |
| "loss": 1.0098, | |
| "step": 238000 | |
| }, | |
| { | |
| "epoch": 7.76, | |
| "learning_rate": 7.691919191919192e-06, | |
| "loss": 1.0094, | |
| "step": 238500 | |
| }, | |
| { | |
| "epoch": 7.77, | |
| "learning_rate": 7.686868686868687e-06, | |
| "loss": 1.0075, | |
| "step": 239000 | |
| }, | |
| { | |
| "epoch": 7.79, | |
| "learning_rate": 7.681818181818183e-06, | |
| "loss": 1.0078, | |
| "step": 239500 | |
| }, | |
| { | |
| "epoch": 7.81, | |
| "learning_rate": 7.676767676767677e-06, | |
| "loss": 1.0068, | |
| "step": 240000 | |
| }, | |
| { | |
| "epoch": 7.82, | |
| "learning_rate": 7.671717171717173e-06, | |
| "loss": 1.0074, | |
| "step": 240500 | |
| }, | |
| { | |
| "epoch": 7.84, | |
| "learning_rate": 7.666666666666667e-06, | |
| "loss": 1.0056, | |
| "step": 241000 | |
| }, | |
| { | |
| "epoch": 7.85, | |
| "learning_rate": 7.661616161616162e-06, | |
| "loss": 1.0044, | |
| "step": 241500 | |
| }, | |
| { | |
| "epoch": 7.87, | |
| "learning_rate": 7.656565656565658e-06, | |
| "loss": 1.0039, | |
| "step": 242000 | |
| }, | |
| { | |
| "epoch": 7.89, | |
| "learning_rate": 7.651515151515152e-06, | |
| "loss": 1.0028, | |
| "step": 242500 | |
| }, | |
| { | |
| "epoch": 7.9, | |
| "learning_rate": 7.646464646464648e-06, | |
| "loss": 1.0032, | |
| "step": 243000 | |
| }, | |
| { | |
| "epoch": 7.92, | |
| "learning_rate": 7.641414141414142e-06, | |
| "loss": 1.0012, | |
| "step": 243500 | |
| }, | |
| { | |
| "epoch": 7.94, | |
| "learning_rate": 7.636363636363638e-06, | |
| "loss": 1.0012, | |
| "step": 244000 | |
| }, | |
| { | |
| "epoch": 7.95, | |
| "learning_rate": 7.631313131313132e-06, | |
| "loss": 1.0008, | |
| "step": 244500 | |
| }, | |
| { | |
| "epoch": 7.97, | |
| "learning_rate": 7.6262626262626275e-06, | |
| "loss": 0.9994, | |
| "step": 245000 | |
| }, | |
| { | |
| "epoch": 7.98, | |
| "learning_rate": 7.621212121212122e-06, | |
| "loss": 0.9993, | |
| "step": 245500 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "learning_rate": 7.616161616161617e-06, | |
| "loss": 0.9978, | |
| "step": 246000 | |
| }, | |
| { | |
| "epoch": 8.02, | |
| "learning_rate": 7.611111111111111e-06, | |
| "loss": 0.9977, | |
| "step": 246500 | |
| }, | |
| { | |
| "epoch": 8.03, | |
| "learning_rate": 7.606060606060606e-06, | |
| "loss": 0.9968, | |
| "step": 247000 | |
| }, | |
| { | |
| "epoch": 8.05, | |
| "learning_rate": 7.6010101010101016e-06, | |
| "loss": 0.9956, | |
| "step": 247500 | |
| }, | |
| { | |
| "epoch": 8.07, | |
| "learning_rate": 7.595959595959597e-06, | |
| "loss": 0.9955, | |
| "step": 248000 | |
| }, | |
| { | |
| "epoch": 8.08, | |
| "learning_rate": 7.590909090909091e-06, | |
| "loss": 0.9951, | |
| "step": 248500 | |
| }, | |
| { | |
| "epoch": 8.1, | |
| "learning_rate": 7.585858585858586e-06, | |
| "loss": 0.9949, | |
| "step": 249000 | |
| }, | |
| { | |
| "epoch": 8.11, | |
| "learning_rate": 7.580808080808082e-06, | |
| "loss": 0.9926, | |
| "step": 249500 | |
| }, | |
| { | |
| "epoch": 8.13, | |
| "learning_rate": 7.5757575757575764e-06, | |
| "loss": 0.9937, | |
| "step": 250000 | |
| }, | |
| { | |
| "epoch": 8.15, | |
| "learning_rate": 7.5707070707070716e-06, | |
| "loss": 0.9924, | |
| "step": 250500 | |
| }, | |
| { | |
| "epoch": 8.16, | |
| "learning_rate": 7.565656565656566e-06, | |
| "loss": 0.9919, | |
| "step": 251000 | |
| }, | |
| { | |
| "epoch": 8.18, | |
| "learning_rate": 7.560606060606062e-06, | |
| "loss": 0.9913, | |
| "step": 251500 | |
| }, | |
| { | |
| "epoch": 8.2, | |
| "learning_rate": 7.555555555555556e-06, | |
| "loss": 0.9907, | |
| "step": 252000 | |
| }, | |
| { | |
| "epoch": 8.21, | |
| "learning_rate": 7.550505050505051e-06, | |
| "loss": 0.9898, | |
| "step": 252500 | |
| }, | |
| { | |
| "epoch": 8.23, | |
| "learning_rate": 7.545454545454546e-06, | |
| "loss": 0.9891, | |
| "step": 253000 | |
| }, | |
| { | |
| "epoch": 8.24, | |
| "learning_rate": 7.540404040404042e-06, | |
| "loss": 0.9891, | |
| "step": 253500 | |
| }, | |
| { | |
| "epoch": 8.26, | |
| "learning_rate": 7.535353535353536e-06, | |
| "loss": 0.9882, | |
| "step": 254000 | |
| }, | |
| { | |
| "epoch": 8.28, | |
| "learning_rate": 7.530303030303031e-06, | |
| "loss": 0.987, | |
| "step": 254500 | |
| }, | |
| { | |
| "epoch": 8.29, | |
| "learning_rate": 7.525252525252525e-06, | |
| "loss": 0.987, | |
| "step": 255000 | |
| }, | |
| { | |
| "epoch": 8.31, | |
| "learning_rate": 7.520202020202021e-06, | |
| "loss": 0.9869, | |
| "step": 255500 | |
| }, | |
| { | |
| "epoch": 8.33, | |
| "learning_rate": 7.515151515151516e-06, | |
| "loss": 0.9846, | |
| "step": 256000 | |
| }, | |
| { | |
| "epoch": 8.34, | |
| "learning_rate": 7.510101010101011e-06, | |
| "loss": 0.9855, | |
| "step": 256500 | |
| }, | |
| { | |
| "epoch": 8.36, | |
| "learning_rate": 7.505050505050505e-06, | |
| "loss": 0.9838, | |
| "step": 257000 | |
| }, | |
| { | |
| "epoch": 8.37, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.9844, | |
| "step": 257500 | |
| }, | |
| { | |
| "epoch": 8.39, | |
| "learning_rate": 7.494949494949496e-06, | |
| "loss": 0.9836, | |
| "step": 258000 | |
| }, | |
| { | |
| "epoch": 8.41, | |
| "learning_rate": 7.4898989898989905e-06, | |
| "loss": 0.9824, | |
| "step": 258500 | |
| }, | |
| { | |
| "epoch": 8.42, | |
| "learning_rate": 7.484848484848486e-06, | |
| "loss": 0.9816, | |
| "step": 259000 | |
| }, | |
| { | |
| "epoch": 8.44, | |
| "learning_rate": 7.47979797979798e-06, | |
| "loss": 0.9821, | |
| "step": 259500 | |
| }, | |
| { | |
| "epoch": 8.46, | |
| "learning_rate": 7.474747474747476e-06, | |
| "loss": 0.981, | |
| "step": 260000 | |
| }, | |
| { | |
| "epoch": 8.47, | |
| "learning_rate": 7.46969696969697e-06, | |
| "loss": 0.9804, | |
| "step": 260500 | |
| }, | |
| { | |
| "epoch": 8.49, | |
| "learning_rate": 7.464646464646465e-06, | |
| "loss": 0.9799, | |
| "step": 261000 | |
| }, | |
| { | |
| "epoch": 8.5, | |
| "learning_rate": 7.45959595959596e-06, | |
| "loss": 0.9785, | |
| "step": 261500 | |
| }, | |
| { | |
| "epoch": 8.52, | |
| "learning_rate": 7.454545454545456e-06, | |
| "loss": 0.9784, | |
| "step": 262000 | |
| }, | |
| { | |
| "epoch": 8.54, | |
| "learning_rate": 7.44949494949495e-06, | |
| "loss": 0.9782, | |
| "step": 262500 | |
| }, | |
| { | |
| "epoch": 8.55, | |
| "learning_rate": 7.444444444444445e-06, | |
| "loss": 0.977, | |
| "step": 263000 | |
| }, | |
| { | |
| "epoch": 8.57, | |
| "learning_rate": 7.439393939393939e-06, | |
| "loss": 0.9768, | |
| "step": 263500 | |
| }, | |
| { | |
| "epoch": 8.59, | |
| "learning_rate": 7.434343434343435e-06, | |
| "loss": 0.9761, | |
| "step": 264000 | |
| }, | |
| { | |
| "epoch": 8.6, | |
| "learning_rate": 7.42929292929293e-06, | |
| "loss": 0.9753, | |
| "step": 264500 | |
| }, | |
| { | |
| "epoch": 8.62, | |
| "learning_rate": 7.424242424242425e-06, | |
| "loss": 0.9747, | |
| "step": 265000 | |
| }, | |
| { | |
| "epoch": 8.63, | |
| "learning_rate": 7.419191919191919e-06, | |
| "loss": 0.9745, | |
| "step": 265500 | |
| }, | |
| { | |
| "epoch": 8.65, | |
| "learning_rate": 7.414141414141415e-06, | |
| "loss": 0.9743, | |
| "step": 266000 | |
| }, | |
| { | |
| "epoch": 8.67, | |
| "learning_rate": 7.40909090909091e-06, | |
| "loss": 0.9732, | |
| "step": 266500 | |
| }, | |
| { | |
| "epoch": 8.68, | |
| "learning_rate": 7.4040404040404045e-06, | |
| "loss": 0.9734, | |
| "step": 267000 | |
| }, | |
| { | |
| "epoch": 8.7, | |
| "learning_rate": 7.3989898989899e-06, | |
| "loss": 0.9727, | |
| "step": 267500 | |
| }, | |
| { | |
| "epoch": 8.72, | |
| "learning_rate": 7.393939393939395e-06, | |
| "loss": 0.9712, | |
| "step": 268000 | |
| }, | |
| { | |
| "epoch": 8.73, | |
| "learning_rate": 7.38888888888889e-06, | |
| "loss": 0.9707, | |
| "step": 268500 | |
| }, | |
| { | |
| "epoch": 8.75, | |
| "learning_rate": 7.383838383838384e-06, | |
| "loss": 0.9708, | |
| "step": 269000 | |
| }, | |
| { | |
| "epoch": 8.76, | |
| "learning_rate": 7.378787878787879e-06, | |
| "loss": 0.9704, | |
| "step": 269500 | |
| }, | |
| { | |
| "epoch": 8.78, | |
| "learning_rate": 7.373737373737374e-06, | |
| "loss": 0.9695, | |
| "step": 270000 | |
| }, | |
| { | |
| "epoch": 8.8, | |
| "learning_rate": 7.36868686868687e-06, | |
| "loss": 0.9694, | |
| "step": 270500 | |
| }, | |
| { | |
| "epoch": 8.81, | |
| "learning_rate": 7.363636363636364e-06, | |
| "loss": 0.9689, | |
| "step": 271000 | |
| }, | |
| { | |
| "epoch": 8.83, | |
| "learning_rate": 7.358585858585859e-06, | |
| "loss": 0.9683, | |
| "step": 271500 | |
| }, | |
| { | |
| "epoch": 8.85, | |
| "learning_rate": 7.353535353535353e-06, | |
| "loss": 0.9667, | |
| "step": 272000 | |
| }, | |
| { | |
| "epoch": 8.86, | |
| "learning_rate": 7.348484848484849e-06, | |
| "loss": 0.9673, | |
| "step": 272500 | |
| }, | |
| { | |
| "epoch": 8.88, | |
| "learning_rate": 7.343434343434344e-06, | |
| "loss": 0.9663, | |
| "step": 273000 | |
| }, | |
| { | |
| "epoch": 8.89, | |
| "learning_rate": 7.338383838383839e-06, | |
| "loss": 0.9662, | |
| "step": 273500 | |
| }, | |
| { | |
| "epoch": 8.91, | |
| "learning_rate": 7.333333333333333e-06, | |
| "loss": 0.9654, | |
| "step": 274000 | |
| }, | |
| { | |
| "epoch": 8.93, | |
| "learning_rate": 7.328282828282829e-06, | |
| "loss": 0.9646, | |
| "step": 274500 | |
| }, | |
| { | |
| "epoch": 8.94, | |
| "learning_rate": 7.323232323232324e-06, | |
| "loss": 0.9645, | |
| "step": 275000 | |
| }, | |
| { | |
| "epoch": 8.96, | |
| "learning_rate": 7.3181818181818186e-06, | |
| "loss": 0.9641, | |
| "step": 275500 | |
| }, | |
| { | |
| "epoch": 8.98, | |
| "learning_rate": 7.3131313131313146e-06, | |
| "loss": 0.9641, | |
| "step": 276000 | |
| }, | |
| { | |
| "epoch": 8.99, | |
| "learning_rate": 7.308080808080809e-06, | |
| "loss": 0.9626, | |
| "step": 276500 | |
| }, | |
| { | |
| "epoch": 9.01, | |
| "learning_rate": 7.303030303030304e-06, | |
| "loss": 0.9622, | |
| "step": 277000 | |
| }, | |
| { | |
| "epoch": 9.02, | |
| "learning_rate": 7.297979797979798e-06, | |
| "loss": 0.9629, | |
| "step": 277500 | |
| }, | |
| { | |
| "epoch": 9.04, | |
| "learning_rate": 7.2929292929292934e-06, | |
| "loss": 0.9619, | |
| "step": 278000 | |
| }, | |
| { | |
| "epoch": 9.06, | |
| "learning_rate": 7.287878787878789e-06, | |
| "loss": 0.9613, | |
| "step": 278500 | |
| }, | |
| { | |
| "epoch": 9.07, | |
| "learning_rate": 7.282828282828284e-06, | |
| "loss": 0.9609, | |
| "step": 279000 | |
| }, | |
| { | |
| "epoch": 9.09, | |
| "learning_rate": 7.277777777777778e-06, | |
| "loss": 0.9594, | |
| "step": 279500 | |
| }, | |
| { | |
| "epoch": 9.11, | |
| "learning_rate": 7.272727272727273e-06, | |
| "loss": 0.9596, | |
| "step": 280000 | |
| }, | |
| { | |
| "epoch": 9.12, | |
| "learning_rate": 7.2676767676767675e-06, | |
| "loss": 0.9599, | |
| "step": 280500 | |
| }, | |
| { | |
| "epoch": 9.14, | |
| "learning_rate": 7.2626262626262635e-06, | |
| "loss": 0.9588, | |
| "step": 281000 | |
| }, | |
| { | |
| "epoch": 9.16, | |
| "learning_rate": 7.257575757575758e-06, | |
| "loss": 0.9583, | |
| "step": 281500 | |
| }, | |
| { | |
| "epoch": 9.17, | |
| "learning_rate": 7.252525252525253e-06, | |
| "loss": 0.9579, | |
| "step": 282000 | |
| }, | |
| { | |
| "epoch": 9.19, | |
| "learning_rate": 7.247474747474747e-06, | |
| "loss": 0.957, | |
| "step": 282500 | |
| }, | |
| { | |
| "epoch": 9.2, | |
| "learning_rate": 7.242424242424243e-06, | |
| "loss": 0.958, | |
| "step": 283000 | |
| }, | |
| { | |
| "epoch": 9.22, | |
| "learning_rate": 7.237373737373738e-06, | |
| "loss": 0.9561, | |
| "step": 283500 | |
| }, | |
| { | |
| "epoch": 9.24, | |
| "learning_rate": 7.232323232323233e-06, | |
| "loss": 0.9555, | |
| "step": 284000 | |
| }, | |
| { | |
| "epoch": 9.25, | |
| "learning_rate": 7.227272727272729e-06, | |
| "loss": 0.9558, | |
| "step": 284500 | |
| }, | |
| { | |
| "epoch": 9.27, | |
| "learning_rate": 7.222222222222223e-06, | |
| "loss": 0.9542, | |
| "step": 285000 | |
| }, | |
| { | |
| "epoch": 9.29, | |
| "learning_rate": 7.217171717171718e-06, | |
| "loss": 0.9543, | |
| "step": 285500 | |
| }, | |
| { | |
| "epoch": 9.3, | |
| "learning_rate": 7.212121212121212e-06, | |
| "loss": 0.9542, | |
| "step": 286000 | |
| }, | |
| { | |
| "epoch": 9.32, | |
| "learning_rate": 7.207070707070708e-06, | |
| "loss": 0.9538, | |
| "step": 286500 | |
| }, | |
| { | |
| "epoch": 9.33, | |
| "learning_rate": 7.202020202020203e-06, | |
| "loss": 0.9521, | |
| "step": 287000 | |
| }, | |
| { | |
| "epoch": 9.35, | |
| "learning_rate": 7.196969696969698e-06, | |
| "loss": 0.9522, | |
| "step": 287500 | |
| }, | |
| { | |
| "epoch": 9.37, | |
| "learning_rate": 7.191919191919192e-06, | |
| "loss": 0.9519, | |
| "step": 288000 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "learning_rate": 7.186868686868688e-06, | |
| "loss": 0.953, | |
| "step": 288500 | |
| }, | |
| { | |
| "epoch": 9.4, | |
| "learning_rate": 7.181818181818182e-06, | |
| "loss": 0.9514, | |
| "step": 289000 | |
| }, | |
| { | |
| "epoch": 9.42, | |
| "learning_rate": 7.1767676767676775e-06, | |
| "loss": 0.9514, | |
| "step": 289500 | |
| }, | |
| { | |
| "epoch": 9.43, | |
| "learning_rate": 7.171717171717172e-06, | |
| "loss": 0.9502, | |
| "step": 290000 | |
| }, | |
| { | |
| "epoch": 9.45, | |
| "learning_rate": 7.166666666666667e-06, | |
| "loss": 0.9501, | |
| "step": 290500 | |
| }, | |
| { | |
| "epoch": 9.46, | |
| "learning_rate": 7.161616161616162e-06, | |
| "loss": 0.9494, | |
| "step": 291000 | |
| }, | |
| { | |
| "epoch": 9.48, | |
| "learning_rate": 7.156565656565657e-06, | |
| "loss": 0.9492, | |
| "step": 291500 | |
| }, | |
| { | |
| "epoch": 9.5, | |
| "learning_rate": 7.151515151515152e-06, | |
| "loss": 0.9492, | |
| "step": 292000 | |
| }, | |
| { | |
| "epoch": 9.51, | |
| "learning_rate": 7.146464646464647e-06, | |
| "loss": 0.948, | |
| "step": 292500 | |
| }, | |
| { | |
| "epoch": 9.53, | |
| "learning_rate": 7.141414141414143e-06, | |
| "loss": 0.9473, | |
| "step": 293000 | |
| }, | |
| { | |
| "epoch": 9.55, | |
| "learning_rate": 7.136363636363637e-06, | |
| "loss": 0.9472, | |
| "step": 293500 | |
| }, | |
| { | |
| "epoch": 9.56, | |
| "learning_rate": 7.131313131313132e-06, | |
| "loss": 0.9468, | |
| "step": 294000 | |
| }, | |
| { | |
| "epoch": 9.58, | |
| "learning_rate": 7.126262626262626e-06, | |
| "loss": 0.9464, | |
| "step": 294500 | |
| }, | |
| { | |
| "epoch": 9.59, | |
| "learning_rate": 7.121212121212122e-06, | |
| "loss": 0.9457, | |
| "step": 295000 | |
| }, | |
| { | |
| "epoch": 9.61, | |
| "learning_rate": 7.116161616161617e-06, | |
| "loss": 0.9456, | |
| "step": 295500 | |
| }, | |
| { | |
| "epoch": 9.63, | |
| "learning_rate": 7.111111111111112e-06, | |
| "loss": 0.9454, | |
| "step": 296000 | |
| }, | |
| { | |
| "epoch": 9.64, | |
| "learning_rate": 7.106060606060606e-06, | |
| "loss": 0.9451, | |
| "step": 296500 | |
| }, | |
| { | |
| "epoch": 9.66, | |
| "learning_rate": 7.101010101010102e-06, | |
| "loss": 0.9441, | |
| "step": 297000 | |
| }, | |
| { | |
| "epoch": 9.68, | |
| "learning_rate": 7.095959595959596e-06, | |
| "loss": 0.9443, | |
| "step": 297500 | |
| }, | |
| { | |
| "epoch": 9.69, | |
| "learning_rate": 7.0909090909090916e-06, | |
| "loss": 0.9437, | |
| "step": 298000 | |
| }, | |
| { | |
| "epoch": 9.71, | |
| "learning_rate": 7.085858585858586e-06, | |
| "loss": 0.9422, | |
| "step": 298500 | |
| }, | |
| { | |
| "epoch": 9.72, | |
| "learning_rate": 7.080808080808082e-06, | |
| "loss": 0.9429, | |
| "step": 299000 | |
| }, | |
| { | |
| "epoch": 9.74, | |
| "learning_rate": 7.075757575757576e-06, | |
| "loss": 0.9428, | |
| "step": 299500 | |
| }, | |
| { | |
| "epoch": 9.76, | |
| "learning_rate": 7.070707070707071e-06, | |
| "loss": 0.9421, | |
| "step": 300000 | |
| }, | |
| { | |
| "epoch": 9.77, | |
| "learning_rate": 7.065656565656566e-06, | |
| "loss": 0.9414, | |
| "step": 300500 | |
| }, | |
| { | |
| "epoch": 9.79, | |
| "learning_rate": 7.060606060606061e-06, | |
| "loss": 0.9416, | |
| "step": 301000 | |
| }, | |
| { | |
| "epoch": 9.81, | |
| "learning_rate": 7.055555555555557e-06, | |
| "loss": 0.9408, | |
| "step": 301500 | |
| }, | |
| { | |
| "epoch": 9.82, | |
| "learning_rate": 7.050505050505051e-06, | |
| "loss": 0.9401, | |
| "step": 302000 | |
| }, | |
| { | |
| "epoch": 9.84, | |
| "learning_rate": 7.045454545454546e-06, | |
| "loss": 0.9405, | |
| "step": 302500 | |
| }, | |
| { | |
| "epoch": 9.85, | |
| "learning_rate": 7.0404040404040404e-06, | |
| "loss": 0.9399, | |
| "step": 303000 | |
| }, | |
| { | |
| "epoch": 9.87, | |
| "learning_rate": 7.0353535353535364e-06, | |
| "loss": 0.9386, | |
| "step": 303500 | |
| }, | |
| { | |
| "epoch": 9.89, | |
| "learning_rate": 7.030303030303031e-06, | |
| "loss": 0.939, | |
| "step": 304000 | |
| }, | |
| { | |
| "epoch": 9.9, | |
| "learning_rate": 7.025252525252526e-06, | |
| "loss": 0.9384, | |
| "step": 304500 | |
| }, | |
| { | |
| "epoch": 9.92, | |
| "learning_rate": 7.02020202020202e-06, | |
| "loss": 0.937, | |
| "step": 305000 | |
| }, | |
| { | |
| "epoch": 9.94, | |
| "learning_rate": 7.015151515151516e-06, | |
| "loss": 0.9371, | |
| "step": 305500 | |
| }, | |
| { | |
| "epoch": 9.95, | |
| "learning_rate": 7.0101010101010105e-06, | |
| "loss": 0.9369, | |
| "step": 306000 | |
| }, | |
| { | |
| "epoch": 9.97, | |
| "learning_rate": 7.005050505050506e-06, | |
| "loss": 0.9363, | |
| "step": 306500 | |
| }, | |
| { | |
| "epoch": 9.98, | |
| "learning_rate": 7e-06, | |
| "loss": 0.9357, | |
| "step": 307000 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "learning_rate": 6.994949494949496e-06, | |
| "loss": 0.9361, | |
| "step": 307500 | |
| }, | |
| { | |
| "epoch": 10.02, | |
| "learning_rate": 6.98989898989899e-06, | |
| "loss": 0.9351, | |
| "step": 308000 | |
| }, | |
| { | |
| "epoch": 10.03, | |
| "learning_rate": 6.984848484848485e-06, | |
| "loss": 0.9361, | |
| "step": 308500 | |
| }, | |
| { | |
| "epoch": 10.05, | |
| "learning_rate": 6.979797979797981e-06, | |
| "loss": 0.9351, | |
| "step": 309000 | |
| }, | |
| { | |
| "epoch": 10.07, | |
| "learning_rate": 6.974747474747476e-06, | |
| "loss": 0.9343, | |
| "step": 309500 | |
| }, | |
| { | |
| "epoch": 10.08, | |
| "learning_rate": 6.969696969696971e-06, | |
| "loss": 0.9339, | |
| "step": 310000 | |
| }, | |
| { | |
| "epoch": 10.1, | |
| "learning_rate": 6.964646464646465e-06, | |
| "loss": 0.9335, | |
| "step": 310500 | |
| }, | |
| { | |
| "epoch": 10.11, | |
| "learning_rate": 6.95959595959596e-06, | |
| "loss": 0.9338, | |
| "step": 311000 | |
| }, | |
| { | |
| "epoch": 10.13, | |
| "learning_rate": 6.954545454545455e-06, | |
| "loss": 0.9328, | |
| "step": 311500 | |
| }, | |
| { | |
| "epoch": 10.15, | |
| "learning_rate": 6.9494949494949505e-06, | |
| "loss": 0.9331, | |
| "step": 312000 | |
| }, | |
| { | |
| "epoch": 10.16, | |
| "learning_rate": 6.944444444444445e-06, | |
| "loss": 0.9319, | |
| "step": 312500 | |
| }, | |
| { | |
| "epoch": 10.18, | |
| "learning_rate": 6.93939393939394e-06, | |
| "loss": 0.9313, | |
| "step": 313000 | |
| }, | |
| { | |
| "epoch": 10.2, | |
| "learning_rate": 6.934343434343434e-06, | |
| "loss": 0.9313, | |
| "step": 313500 | |
| }, | |
| { | |
| "epoch": 10.21, | |
| "learning_rate": 6.92929292929293e-06, | |
| "loss": 0.931, | |
| "step": 314000 | |
| }, | |
| { | |
| "epoch": 10.23, | |
| "learning_rate": 6.9242424242424245e-06, | |
| "loss": 0.9298, | |
| "step": 314500 | |
| }, | |
| { | |
| "epoch": 10.24, | |
| "learning_rate": 6.91919191919192e-06, | |
| "loss": 0.931, | |
| "step": 315000 | |
| }, | |
| { | |
| "epoch": 10.26, | |
| "learning_rate": 6.914141414141414e-06, | |
| "loss": 0.9302, | |
| "step": 315500 | |
| }, | |
| { | |
| "epoch": 10.28, | |
| "learning_rate": 6.90909090909091e-06, | |
| "loss": 0.929, | |
| "step": 316000 | |
| }, | |
| { | |
| "epoch": 10.29, | |
| "learning_rate": 6.904040404040404e-06, | |
| "loss": 0.9292, | |
| "step": 316500 | |
| }, | |
| { | |
| "epoch": 10.31, | |
| "learning_rate": 6.898989898989899e-06, | |
| "loss": 0.9284, | |
| "step": 317000 | |
| }, | |
| { | |
| "epoch": 10.33, | |
| "learning_rate": 6.893939393939395e-06, | |
| "loss": 0.9292, | |
| "step": 317500 | |
| }, | |
| { | |
| "epoch": 10.34, | |
| "learning_rate": 6.88888888888889e-06, | |
| "loss": 0.9282, | |
| "step": 318000 | |
| }, | |
| { | |
| "epoch": 10.36, | |
| "learning_rate": 6.883838383838385e-06, | |
| "loss": 0.9279, | |
| "step": 318500 | |
| }, | |
| { | |
| "epoch": 10.37, | |
| "learning_rate": 6.878787878787879e-06, | |
| "loss": 0.9272, | |
| "step": 319000 | |
| }, | |
| { | |
| "epoch": 10.39, | |
| "learning_rate": 6.873737373737375e-06, | |
| "loss": 0.927, | |
| "step": 319500 | |
| }, | |
| { | |
| "epoch": 10.41, | |
| "learning_rate": 6.868686868686869e-06, | |
| "loss": 0.9271, | |
| "step": 320000 | |
| }, | |
| { | |
| "epoch": 10.42, | |
| "learning_rate": 6.8636363636363645e-06, | |
| "loss": 0.9266, | |
| "step": 320500 | |
| }, | |
| { | |
| "epoch": 10.44, | |
| "learning_rate": 6.858585858585859e-06, | |
| "loss": 0.9256, | |
| "step": 321000 | |
| }, | |
| { | |
| "epoch": 10.46, | |
| "learning_rate": 6.853535353535354e-06, | |
| "loss": 0.9257, | |
| "step": 321500 | |
| }, | |
| { | |
| "epoch": 10.47, | |
| "learning_rate": 6.848484848484849e-06, | |
| "loss": 0.9259, | |
| "step": 322000 | |
| }, | |
| { | |
| "epoch": 10.49, | |
| "learning_rate": 6.843434343434344e-06, | |
| "loss": 0.9252, | |
| "step": 322500 | |
| }, | |
| { | |
| "epoch": 10.5, | |
| "learning_rate": 6.8383838383838386e-06, | |
| "loss": 0.9246, | |
| "step": 323000 | |
| }, | |
| { | |
| "epoch": 10.52, | |
| "learning_rate": 6.833333333333334e-06, | |
| "loss": 0.9241, | |
| "step": 323500 | |
| }, | |
| { | |
| "epoch": 10.54, | |
| "learning_rate": 6.828282828282828e-06, | |
| "loss": 0.9239, | |
| "step": 324000 | |
| }, | |
| { | |
| "epoch": 10.55, | |
| "learning_rate": 6.823232323232324e-06, | |
| "loss": 0.9237, | |
| "step": 324500 | |
| }, | |
| { | |
| "epoch": 10.57, | |
| "learning_rate": 6.818181818181818e-06, | |
| "loss": 0.9231, | |
| "step": 325000 | |
| }, | |
| { | |
| "epoch": 10.59, | |
| "learning_rate": 6.813131313131313e-06, | |
| "loss": 0.9231, | |
| "step": 325500 | |
| }, | |
| { | |
| "epoch": 10.6, | |
| "learning_rate": 6.808080808080809e-06, | |
| "loss": 0.9227, | |
| "step": 326000 | |
| }, | |
| { | |
| "epoch": 10.62, | |
| "learning_rate": 6.803030303030304e-06, | |
| "loss": 0.9222, | |
| "step": 326500 | |
| }, | |
| { | |
| "epoch": 10.63, | |
| "learning_rate": 6.797979797979799e-06, | |
| "loss": 0.9215, | |
| "step": 327000 | |
| }, | |
| { | |
| "epoch": 10.65, | |
| "learning_rate": 6.792929292929293e-06, | |
| "loss": 0.9217, | |
| "step": 327500 | |
| }, | |
| { | |
| "epoch": 10.67, | |
| "learning_rate": 6.787878787878789e-06, | |
| "loss": 0.9211, | |
| "step": 328000 | |
| }, | |
| { | |
| "epoch": 10.68, | |
| "learning_rate": 6.7828282828282834e-06, | |
| "loss": 0.9204, | |
| "step": 328500 | |
| }, | |
| { | |
| "epoch": 10.7, | |
| "learning_rate": 6.777777777777779e-06, | |
| "loss": 0.9211, | |
| "step": 329000 | |
| }, | |
| { | |
| "epoch": 10.72, | |
| "learning_rate": 6.772727272727273e-06, | |
| "loss": 0.9195, | |
| "step": 329500 | |
| }, | |
| { | |
| "epoch": 10.73, | |
| "learning_rate": 6.767676767676769e-06, | |
| "loss": 0.9195, | |
| "step": 330000 | |
| }, | |
| { | |
| "epoch": 10.75, | |
| "learning_rate": 6.762626262626263e-06, | |
| "loss": 0.9195, | |
| "step": 330500 | |
| }, | |
| { | |
| "epoch": 10.76, | |
| "learning_rate": 6.757575757575758e-06, | |
| "loss": 0.9188, | |
| "step": 331000 | |
| }, | |
| { | |
| "epoch": 10.78, | |
| "learning_rate": 6.752525252525253e-06, | |
| "loss": 0.9187, | |
| "step": 331500 | |
| }, | |
| { | |
| "epoch": 10.8, | |
| "learning_rate": 6.747474747474749e-06, | |
| "loss": 0.9193, | |
| "step": 332000 | |
| }, | |
| { | |
| "epoch": 10.81, | |
| "learning_rate": 6.742424242424243e-06, | |
| "loss": 0.9183, | |
| "step": 332500 | |
| }, | |
| { | |
| "epoch": 10.83, | |
| "learning_rate": 6.737373737373738e-06, | |
| "loss": 0.9176, | |
| "step": 333000 | |
| }, | |
| { | |
| "epoch": 10.85, | |
| "learning_rate": 6.732323232323232e-06, | |
| "loss": 0.9173, | |
| "step": 333500 | |
| }, | |
| { | |
| "epoch": 10.86, | |
| "learning_rate": 6.7272727272727275e-06, | |
| "loss": 0.9164, | |
| "step": 334000 | |
| }, | |
| { | |
| "epoch": 10.88, | |
| "learning_rate": 6.7222222222222235e-06, | |
| "loss": 0.9165, | |
| "step": 334500 | |
| }, | |
| { | |
| "epoch": 10.9, | |
| "learning_rate": 6.717171717171718e-06, | |
| "loss": 0.9169, | |
| "step": 335000 | |
| }, | |
| { | |
| "epoch": 10.91, | |
| "learning_rate": 6.712121212121213e-06, | |
| "loss": 0.9156, | |
| "step": 335500 | |
| }, | |
| { | |
| "epoch": 10.93, | |
| "learning_rate": 6.707070707070707e-06, | |
| "loss": 0.916, | |
| "step": 336000 | |
| }, | |
| { | |
| "epoch": 10.94, | |
| "learning_rate": 6.702020202020203e-06, | |
| "loss": 0.9148, | |
| "step": 336500 | |
| }, | |
| { | |
| "epoch": 10.96, | |
| "learning_rate": 6.6969696969696975e-06, | |
| "loss": 0.9154, | |
| "step": 337000 | |
| }, | |
| { | |
| "epoch": 10.98, | |
| "learning_rate": 6.691919191919193e-06, | |
| "loss": 0.9147, | |
| "step": 337500 | |
| }, | |
| { | |
| "epoch": 10.99, | |
| "learning_rate": 6.686868686868687e-06, | |
| "loss": 0.9145, | |
| "step": 338000 | |
| }, | |
| { | |
| "epoch": 11.01, | |
| "learning_rate": 6.681818181818183e-06, | |
| "loss": 0.9144, | |
| "step": 338500 | |
| }, | |
| { | |
| "epoch": 11.03, | |
| "learning_rate": 6.676767676767677e-06, | |
| "loss": 0.9144, | |
| "step": 339000 | |
| }, | |
| { | |
| "epoch": 11.04, | |
| "learning_rate": 6.671717171717172e-06, | |
| "loss": 0.9127, | |
| "step": 339500 | |
| }, | |
| { | |
| "epoch": 11.06, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 0.9133, | |
| "step": 340000 | |
| }, | |
| { | |
| "epoch": 11.07, | |
| "learning_rate": 6.661616161616163e-06, | |
| "loss": 0.9129, | |
| "step": 340500 | |
| }, | |
| { | |
| "epoch": 11.09, | |
| "learning_rate": 6.656565656565657e-06, | |
| "loss": 0.9133, | |
| "step": 341000 | |
| }, | |
| { | |
| "epoch": 11.11, | |
| "learning_rate": 6.651515151515152e-06, | |
| "loss": 0.9128, | |
| "step": 341500 | |
| }, | |
| { | |
| "epoch": 11.12, | |
| "learning_rate": 6.646464646464646e-06, | |
| "loss": 0.9123, | |
| "step": 342000 | |
| }, | |
| { | |
| "epoch": 11.14, | |
| "learning_rate": 6.641414141414142e-06, | |
| "loss": 0.9114, | |
| "step": 342500 | |
| }, | |
| { | |
| "epoch": 11.16, | |
| "learning_rate": 6.6363636363636375e-06, | |
| "loss": 0.9113, | |
| "step": 343000 | |
| }, | |
| { | |
| "epoch": 11.17, | |
| "learning_rate": 6.631313131313132e-06, | |
| "loss": 0.9117, | |
| "step": 343500 | |
| }, | |
| { | |
| "epoch": 11.19, | |
| "learning_rate": 6.626262626262627e-06, | |
| "loss": 0.9111, | |
| "step": 344000 | |
| }, | |
| { | |
| "epoch": 11.2, | |
| "learning_rate": 6.621212121212121e-06, | |
| "loss": 0.91, | |
| "step": 344500 | |
| }, | |
| { | |
| "epoch": 11.22, | |
| "learning_rate": 6.616161616161617e-06, | |
| "loss": 0.9098, | |
| "step": 345000 | |
| }, | |
| { | |
| "epoch": 11.24, | |
| "learning_rate": 6.6111111111111115e-06, | |
| "loss": 0.9109, | |
| "step": 345500 | |
| }, | |
| { | |
| "epoch": 11.25, | |
| "learning_rate": 6.606060606060607e-06, | |
| "loss": 0.9105, | |
| "step": 346000 | |
| }, | |
| { | |
| "epoch": 11.27, | |
| "learning_rate": 6.601010101010101e-06, | |
| "loss": 0.9095, | |
| "step": 346500 | |
| }, | |
| { | |
| "epoch": 11.29, | |
| "learning_rate": 6.595959595959597e-06, | |
| "loss": 0.9081, | |
| "step": 347000 | |
| }, | |
| { | |
| "epoch": 11.3, | |
| "learning_rate": 6.590909090909091e-06, | |
| "loss": 0.9081, | |
| "step": 347500 | |
| }, | |
| { | |
| "epoch": 11.32, | |
| "learning_rate": 6.585858585858586e-06, | |
| "loss": 0.9084, | |
| "step": 348000 | |
| }, | |
| { | |
| "epoch": 11.33, | |
| "learning_rate": 6.580808080808081e-06, | |
| "loss": 0.9084, | |
| "step": 348500 | |
| }, | |
| { | |
| "epoch": 11.35, | |
| "learning_rate": 6.575757575757577e-06, | |
| "loss": 0.9075, | |
| "step": 349000 | |
| }, | |
| { | |
| "epoch": 11.37, | |
| "learning_rate": 6.570707070707071e-06, | |
| "loss": 0.9076, | |
| "step": 349500 | |
| }, | |
| { | |
| "epoch": 11.38, | |
| "learning_rate": 6.565656565656566e-06, | |
| "loss": 0.9079, | |
| "step": 350000 | |
| }, | |
| { | |
| "epoch": 11.4, | |
| "learning_rate": 6.56060606060606e-06, | |
| "loss": 0.9069, | |
| "step": 350500 | |
| }, | |
| { | |
| "epoch": 11.42, | |
| "learning_rate": 6.555555555555556e-06, | |
| "loss": 0.9071, | |
| "step": 351000 | |
| }, | |
| { | |
| "epoch": 11.43, | |
| "learning_rate": 6.5505050505050516e-06, | |
| "loss": 0.9054, | |
| "step": 351500 | |
| }, | |
| { | |
| "epoch": 11.45, | |
| "learning_rate": 6.545454545454546e-06, | |
| "loss": 0.9064, | |
| "step": 352000 | |
| }, | |
| { | |
| "epoch": 11.46, | |
| "learning_rate": 6.540404040404042e-06, | |
| "loss": 0.9053, | |
| "step": 352500 | |
| }, | |
| { | |
| "epoch": 11.48, | |
| "learning_rate": 6.535353535353536e-06, | |
| "loss": 0.9053, | |
| "step": 353000 | |
| }, | |
| { | |
| "epoch": 11.5, | |
| "learning_rate": 6.530303030303031e-06, | |
| "loss": 0.9046, | |
| "step": 353500 | |
| }, | |
| { | |
| "epoch": 11.51, | |
| "learning_rate": 6.525252525252526e-06, | |
| "loss": 0.9054, | |
| "step": 354000 | |
| }, | |
| { | |
| "epoch": 11.53, | |
| "learning_rate": 6.520202020202021e-06, | |
| "loss": 0.9042, | |
| "step": 354500 | |
| }, | |
| { | |
| "epoch": 11.55, | |
| "learning_rate": 6.515151515151516e-06, | |
| "loss": 0.9043, | |
| "step": 355000 | |
| }, | |
| { | |
| "epoch": 11.56, | |
| "learning_rate": 6.510101010101011e-06, | |
| "loss": 0.904, | |
| "step": 355500 | |
| }, | |
| { | |
| "epoch": 11.58, | |
| "learning_rate": 6.505050505050505e-06, | |
| "loss": 0.9029, | |
| "step": 356000 | |
| }, | |
| { | |
| "epoch": 11.59, | |
| "learning_rate": 6.5000000000000004e-06, | |
| "loss": 0.9035, | |
| "step": 356500 | |
| }, | |
| { | |
| "epoch": 11.61, | |
| "learning_rate": 6.494949494949495e-06, | |
| "loss": 0.9036, | |
| "step": 357000 | |
| }, | |
| { | |
| "epoch": 11.63, | |
| "learning_rate": 6.489898989898991e-06, | |
| "loss": 0.9025, | |
| "step": 357500 | |
| }, | |
| { | |
| "epoch": 11.64, | |
| "learning_rate": 6.484848484848485e-06, | |
| "loss": 0.903, | |
| "step": 358000 | |
| }, | |
| { | |
| "epoch": 11.66, | |
| "learning_rate": 6.47979797979798e-06, | |
| "loss": 0.9022, | |
| "step": 358500 | |
| }, | |
| { | |
| "epoch": 11.68, | |
| "learning_rate": 6.4747474747474745e-06, | |
| "loss": 0.9018, | |
| "step": 359000 | |
| }, | |
| { | |
| "epoch": 11.69, | |
| "learning_rate": 6.4696969696969705e-06, | |
| "loss": 0.9017, | |
| "step": 359500 | |
| }, | |
| { | |
| "epoch": 11.71, | |
| "learning_rate": 6.464646464646466e-06, | |
| "loss": 0.9003, | |
| "step": 360000 | |
| }, | |
| { | |
| "epoch": 11.72, | |
| "learning_rate": 6.45959595959596e-06, | |
| "loss": 0.9013, | |
| "step": 360500 | |
| }, | |
| { | |
| "epoch": 11.74, | |
| "learning_rate": 6.454545454545456e-06, | |
| "loss": 0.9001, | |
| "step": 361000 | |
| }, | |
| { | |
| "epoch": 11.76, | |
| "learning_rate": 6.44949494949495e-06, | |
| "loss": 0.8999, | |
| "step": 361500 | |
| }, | |
| { | |
| "epoch": 11.77, | |
| "learning_rate": 6.444444444444445e-06, | |
| "loss": 0.9004, | |
| "step": 362000 | |
| }, | |
| { | |
| "epoch": 11.79, | |
| "learning_rate": 6.43939393939394e-06, | |
| "loss": 0.8995, | |
| "step": 362500 | |
| }, | |
| { | |
| "epoch": 11.81, | |
| "learning_rate": 6.434343434343436e-06, | |
| "loss": 0.8995, | |
| "step": 363000 | |
| }, | |
| { | |
| "epoch": 11.82, | |
| "learning_rate": 6.42929292929293e-06, | |
| "loss": 0.8992, | |
| "step": 363500 | |
| }, | |
| { | |
| "epoch": 11.84, | |
| "learning_rate": 6.424242424242425e-06, | |
| "loss": 0.8985, | |
| "step": 364000 | |
| }, | |
| { | |
| "epoch": 11.85, | |
| "learning_rate": 6.419191919191919e-06, | |
| "loss": 0.8987, | |
| "step": 364500 | |
| }, | |
| { | |
| "epoch": 11.87, | |
| "learning_rate": 6.4141414141414145e-06, | |
| "loss": 0.898, | |
| "step": 365000 | |
| }, | |
| { | |
| "epoch": 11.89, | |
| "learning_rate": 6.40909090909091e-06, | |
| "loss": 0.8981, | |
| "step": 365500 | |
| }, | |
| { | |
| "epoch": 11.9, | |
| "learning_rate": 6.404040404040405e-06, | |
| "loss": 0.8977, | |
| "step": 366000 | |
| }, | |
| { | |
| "epoch": 11.92, | |
| "learning_rate": 6.398989898989899e-06, | |
| "loss": 0.8975, | |
| "step": 366500 | |
| }, | |
| { | |
| "epoch": 11.94, | |
| "learning_rate": 6.393939393939394e-06, | |
| "loss": 0.8977, | |
| "step": 367000 | |
| }, | |
| { | |
| "epoch": 11.95, | |
| "learning_rate": 6.3888888888888885e-06, | |
| "loss": 0.8966, | |
| "step": 367500 | |
| }, | |
| { | |
| "epoch": 11.97, | |
| "learning_rate": 6.3838383838383845e-06, | |
| "loss": 0.8967, | |
| "step": 368000 | |
| }, | |
| { | |
| "epoch": 11.98, | |
| "learning_rate": 6.37878787878788e-06, | |
| "loss": 0.8961, | |
| "step": 368500 | |
| }, | |
| { | |
| "epoch": 12.0, | |
| "learning_rate": 6.373737373737374e-06, | |
| "loss": 0.8963, | |
| "step": 369000 | |
| }, | |
| { | |
| "epoch": 12.02, | |
| "learning_rate": 6.36868686868687e-06, | |
| "loss": 0.8957, | |
| "step": 369500 | |
| }, | |
| { | |
| "epoch": 12.03, | |
| "learning_rate": 6.363636363636364e-06, | |
| "loss": 0.8962, | |
| "step": 370000 | |
| }, | |
| { | |
| "epoch": 12.05, | |
| "learning_rate": 6.358585858585859e-06, | |
| "loss": 0.8947, | |
| "step": 370500 | |
| }, | |
| { | |
| "epoch": 12.07, | |
| "learning_rate": 6.353535353535354e-06, | |
| "loss": 0.8947, | |
| "step": 371000 | |
| }, | |
| { | |
| "epoch": 12.08, | |
| "learning_rate": 6.34848484848485e-06, | |
| "loss": 0.8938, | |
| "step": 371500 | |
| }, | |
| { | |
| "epoch": 12.1, | |
| "learning_rate": 6.343434343434344e-06, | |
| "loss": 0.8951, | |
| "step": 372000 | |
| }, | |
| { | |
| "epoch": 12.11, | |
| "learning_rate": 6.338383838383839e-06, | |
| "loss": 0.8942, | |
| "step": 372500 | |
| }, | |
| { | |
| "epoch": 12.13, | |
| "learning_rate": 6.333333333333333e-06, | |
| "loss": 0.8938, | |
| "step": 373000 | |
| }, | |
| { | |
| "epoch": 12.15, | |
| "learning_rate": 6.328282828282829e-06, | |
| "loss": 0.8934, | |
| "step": 373500 | |
| }, | |
| { | |
| "epoch": 12.16, | |
| "learning_rate": 6.323232323232324e-06, | |
| "loss": 0.8931, | |
| "step": 374000 | |
| }, | |
| { | |
| "epoch": 12.18, | |
| "learning_rate": 6.318181818181819e-06, | |
| "loss": 0.893, | |
| "step": 374500 | |
| }, | |
| { | |
| "epoch": 12.2, | |
| "learning_rate": 6.313131313131313e-06, | |
| "loss": 0.8923, | |
| "step": 375000 | |
| }, | |
| { | |
| "epoch": 12.21, | |
| "learning_rate": 6.308080808080809e-06, | |
| "loss": 0.8934, | |
| "step": 375500 | |
| }, | |
| { | |
| "epoch": 12.23, | |
| "learning_rate": 6.303030303030303e-06, | |
| "loss": 0.8923, | |
| "step": 376000 | |
| }, | |
| { | |
| "epoch": 12.24, | |
| "learning_rate": 6.2979797979797986e-06, | |
| "loss": 0.8921, | |
| "step": 376500 | |
| }, | |
| { | |
| "epoch": 12.26, | |
| "learning_rate": 6.292929292929294e-06, | |
| "loss": 0.8915, | |
| "step": 377000 | |
| }, | |
| { | |
| "epoch": 12.28, | |
| "learning_rate": 6.287878787878788e-06, | |
| "loss": 0.8915, | |
| "step": 377500 | |
| }, | |
| { | |
| "epoch": 12.29, | |
| "learning_rate": 6.282828282828284e-06, | |
| "loss": 0.8909, | |
| "step": 378000 | |
| }, | |
| { | |
| "epoch": 12.31, | |
| "learning_rate": 6.277777777777778e-06, | |
| "loss": 0.8911, | |
| "step": 378500 | |
| }, | |
| { | |
| "epoch": 12.33, | |
| "learning_rate": 6.2727272727272734e-06, | |
| "loss": 0.8907, | |
| "step": 379000 | |
| }, | |
| { | |
| "epoch": 12.34, | |
| "learning_rate": 6.267676767676768e-06, | |
| "loss": 0.8904, | |
| "step": 379500 | |
| }, | |
| { | |
| "epoch": 12.36, | |
| "learning_rate": 6.262626262626264e-06, | |
| "loss": 0.8905, | |
| "step": 380000 | |
| }, | |
| { | |
| "epoch": 12.37, | |
| "learning_rate": 6.257575757575758e-06, | |
| "loss": 0.89, | |
| "step": 380500 | |
| }, | |
| { | |
| "epoch": 12.39, | |
| "learning_rate": 6.252525252525253e-06, | |
| "loss": 0.8896, | |
| "step": 381000 | |
| }, | |
| { | |
| "epoch": 12.41, | |
| "learning_rate": 6.2474747474747474e-06, | |
| "loss": 0.8889, | |
| "step": 381500 | |
| }, | |
| { | |
| "epoch": 12.42, | |
| "learning_rate": 6.2424242424242434e-06, | |
| "loss": 0.8885, | |
| "step": 382000 | |
| }, | |
| { | |
| "epoch": 12.44, | |
| "learning_rate": 6.237373737373738e-06, | |
| "loss": 0.8887, | |
| "step": 382500 | |
| }, | |
| { | |
| "epoch": 12.46, | |
| "learning_rate": 6.232323232323233e-06, | |
| "loss": 0.8885, | |
| "step": 383000 | |
| }, | |
| { | |
| "epoch": 12.47, | |
| "learning_rate": 6.227272727272727e-06, | |
| "loss": 0.8887, | |
| "step": 383500 | |
| }, | |
| { | |
| "epoch": 12.49, | |
| "learning_rate": 6.222222222222223e-06, | |
| "loss": 0.888, | |
| "step": 384000 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "learning_rate": 6.2171717171717175e-06, | |
| "loss": 0.8884, | |
| "step": 384500 | |
| }, | |
| { | |
| "epoch": 12.52, | |
| "learning_rate": 6.212121212121213e-06, | |
| "loss": 0.888, | |
| "step": 385000 | |
| }, | |
| { | |
| "epoch": 12.54, | |
| "learning_rate": 6.207070707070707e-06, | |
| "loss": 0.8869, | |
| "step": 385500 | |
| }, | |
| { | |
| "epoch": 12.55, | |
| "learning_rate": 6.202020202020203e-06, | |
| "loss": 0.8869, | |
| "step": 386000 | |
| }, | |
| { | |
| "epoch": 12.57, | |
| "learning_rate": 6.196969696969698e-06, | |
| "loss": 0.8865, | |
| "step": 386500 | |
| }, | |
| { | |
| "epoch": 12.59, | |
| "learning_rate": 6.191919191919192e-06, | |
| "loss": 0.8863, | |
| "step": 387000 | |
| }, | |
| { | |
| "epoch": 12.6, | |
| "learning_rate": 6.1868686868686875e-06, | |
| "loss": 0.8865, | |
| "step": 387500 | |
| }, | |
| { | |
| "epoch": 12.62, | |
| "learning_rate": 6.181818181818182e-06, | |
| "loss": 0.8856, | |
| "step": 388000 | |
| }, | |
| { | |
| "epoch": 12.63, | |
| "learning_rate": 6.176767676767678e-06, | |
| "loss": 0.885, | |
| "step": 388500 | |
| }, | |
| { | |
| "epoch": 12.65, | |
| "learning_rate": 6.171717171717172e-06, | |
| "loss": 0.8857, | |
| "step": 389000 | |
| }, | |
| { | |
| "epoch": 12.67, | |
| "learning_rate": 6.166666666666667e-06, | |
| "loss": 0.8857, | |
| "step": 389500 | |
| }, | |
| { | |
| "epoch": 12.68, | |
| "learning_rate": 6.1616161616161615e-06, | |
| "loss": 0.8857, | |
| "step": 390000 | |
| }, | |
| { | |
| "epoch": 12.7, | |
| "learning_rate": 6.1565656565656575e-06, | |
| "loss": 0.885, | |
| "step": 390500 | |
| }, | |
| { | |
| "epoch": 12.72, | |
| "learning_rate": 6.151515151515152e-06, | |
| "loss": 0.8848, | |
| "step": 391000 | |
| }, | |
| { | |
| "epoch": 12.73, | |
| "learning_rate": 6.146464646464647e-06, | |
| "loss": 0.8839, | |
| "step": 391500 | |
| }, | |
| { | |
| "epoch": 12.75, | |
| "learning_rate": 6.141414141414141e-06, | |
| "loss": 0.8841, | |
| "step": 392000 | |
| }, | |
| { | |
| "epoch": 12.77, | |
| "learning_rate": 6.136363636363637e-06, | |
| "loss": 0.8837, | |
| "step": 392500 | |
| }, | |
| { | |
| "epoch": 12.78, | |
| "learning_rate": 6.1313131313131315e-06, | |
| "loss": 0.8839, | |
| "step": 393000 | |
| }, | |
| { | |
| "epoch": 12.8, | |
| "learning_rate": 6.126262626262627e-06, | |
| "loss": 0.8832, | |
| "step": 393500 | |
| }, | |
| { | |
| "epoch": 12.81, | |
| "learning_rate": 6.121212121212121e-06, | |
| "loss": 0.8831, | |
| "step": 394000 | |
| }, | |
| { | |
| "epoch": 12.83, | |
| "learning_rate": 6.116161616161617e-06, | |
| "loss": 0.8832, | |
| "step": 394500 | |
| }, | |
| { | |
| "epoch": 12.85, | |
| "learning_rate": 6.111111111111112e-06, | |
| "loss": 0.8829, | |
| "step": 395000 | |
| }, | |
| { | |
| "epoch": 12.86, | |
| "learning_rate": 6.106060606060606e-06, | |
| "loss": 0.8823, | |
| "step": 395500 | |
| }, | |
| { | |
| "epoch": 12.88, | |
| "learning_rate": 6.1010101010101015e-06, | |
| "loss": 0.8816, | |
| "step": 396000 | |
| }, | |
| { | |
| "epoch": 12.9, | |
| "learning_rate": 6.095959595959597e-06, | |
| "loss": 0.8818, | |
| "step": 396500 | |
| }, | |
| { | |
| "epoch": 12.91, | |
| "learning_rate": 6.090909090909092e-06, | |
| "loss": 0.8808, | |
| "step": 397000 | |
| }, | |
| { | |
| "epoch": 12.93, | |
| "learning_rate": 6.085858585858586e-06, | |
| "loss": 0.8813, | |
| "step": 397500 | |
| }, | |
| { | |
| "epoch": 12.94, | |
| "learning_rate": 6.080808080808081e-06, | |
| "loss": 0.8815, | |
| "step": 398000 | |
| }, | |
| { | |
| "epoch": 12.96, | |
| "learning_rate": 6.0757575757575755e-06, | |
| "loss": 0.8807, | |
| "step": 398500 | |
| }, | |
| { | |
| "epoch": 12.98, | |
| "learning_rate": 6.0707070707070715e-06, | |
| "loss": 0.8805, | |
| "step": 399000 | |
| }, | |
| { | |
| "epoch": 12.99, | |
| "learning_rate": 6.065656565656566e-06, | |
| "loss": 0.88, | |
| "step": 399500 | |
| }, | |
| { | |
| "epoch": 13.01, | |
| "learning_rate": 6.060606060606061e-06, | |
| "loss": 0.8797, | |
| "step": 400000 | |
| }, | |
| { | |
| "epoch": 13.03, | |
| "learning_rate": 6.055555555555555e-06, | |
| "loss": 0.8797, | |
| "step": 400500 | |
| }, | |
| { | |
| "epoch": 13.04, | |
| "learning_rate": 6.050505050505051e-06, | |
| "loss": 0.8797, | |
| "step": 401000 | |
| }, | |
| { | |
| "epoch": 13.06, | |
| "learning_rate": 6.0454545454545456e-06, | |
| "loss": 0.8792, | |
| "step": 401500 | |
| }, | |
| { | |
| "epoch": 13.07, | |
| "learning_rate": 6.040404040404041e-06, | |
| "loss": 0.8785, | |
| "step": 402000 | |
| }, | |
| { | |
| "epoch": 13.09, | |
| "learning_rate": 6.035353535353535e-06, | |
| "loss": 0.8788, | |
| "step": 402500 | |
| }, | |
| { | |
| "epoch": 13.11, | |
| "learning_rate": 6.030303030303031e-06, | |
| "loss": 0.8789, | |
| "step": 403000 | |
| }, | |
| { | |
| "epoch": 13.12, | |
| "learning_rate": 6.025252525252526e-06, | |
| "loss": 0.8788, | |
| "step": 403500 | |
| }, | |
| { | |
| "epoch": 13.14, | |
| "learning_rate": 6.0202020202020204e-06, | |
| "loss": 0.8779, | |
| "step": 404000 | |
| }, | |
| { | |
| "epoch": 13.16, | |
| "learning_rate": 6.015151515151516e-06, | |
| "loss": 0.8779, | |
| "step": 404500 | |
| }, | |
| { | |
| "epoch": 13.17, | |
| "learning_rate": 6.010101010101011e-06, | |
| "loss": 0.8768, | |
| "step": 405000 | |
| }, | |
| { | |
| "epoch": 13.19, | |
| "learning_rate": 6.005050505050506e-06, | |
| "loss": 0.8775, | |
| "step": 405500 | |
| }, | |
| { | |
| "epoch": 13.2, | |
| "learning_rate": 6e-06, | |
| "loss": 0.8773, | |
| "step": 406000 | |
| }, | |
| { | |
| "epoch": 13.22, | |
| "learning_rate": 5.994949494949496e-06, | |
| "loss": 0.8766, | |
| "step": 406500 | |
| }, | |
| { | |
| "epoch": 13.24, | |
| "learning_rate": 5.9898989898989904e-06, | |
| "loss": 0.8764, | |
| "step": 407000 | |
| }, | |
| { | |
| "epoch": 13.25, | |
| "learning_rate": 5.984848484848486e-06, | |
| "loss": 0.8772, | |
| "step": 407500 | |
| }, | |
| { | |
| "epoch": 13.27, | |
| "learning_rate": 5.97979797979798e-06, | |
| "loss": 0.876, | |
| "step": 408000 | |
| }, | |
| { | |
| "epoch": 13.29, | |
| "learning_rate": 5.974747474747475e-06, | |
| "loss": 0.8756, | |
| "step": 408500 | |
| }, | |
| { | |
| "epoch": 13.3, | |
| "learning_rate": 5.96969696969697e-06, | |
| "loss": 0.8755, | |
| "step": 409000 | |
| }, | |
| { | |
| "epoch": 13.32, | |
| "learning_rate": 5.964646464646465e-06, | |
| "loss": 0.8751, | |
| "step": 409500 | |
| }, | |
| { | |
| "epoch": 13.33, | |
| "learning_rate": 5.95959595959596e-06, | |
| "loss": 0.8749, | |
| "step": 410000 | |
| }, | |
| { | |
| "epoch": 13.35, | |
| "learning_rate": 5.954545454545455e-06, | |
| "loss": 0.8748, | |
| "step": 410500 | |
| }, | |
| { | |
| "epoch": 13.37, | |
| "learning_rate": 5.949494949494949e-06, | |
| "loss": 0.875, | |
| "step": 411000 | |
| }, | |
| { | |
| "epoch": 13.38, | |
| "learning_rate": 5.944444444444445e-06, | |
| "loss": 0.8751, | |
| "step": 411500 | |
| }, | |
| { | |
| "epoch": 13.4, | |
| "learning_rate": 5.93939393939394e-06, | |
| "loss": 0.8737, | |
| "step": 412000 | |
| }, | |
| { | |
| "epoch": 13.42, | |
| "learning_rate": 5.9343434343434345e-06, | |
| "loss": 0.8743, | |
| "step": 412500 | |
| }, | |
| { | |
| "epoch": 13.43, | |
| "learning_rate": 5.9292929292929305e-06, | |
| "loss": 0.8733, | |
| "step": 413000 | |
| }, | |
| { | |
| "epoch": 13.45, | |
| "learning_rate": 5.924242424242425e-06, | |
| "loss": 0.8735, | |
| "step": 413500 | |
| }, | |
| { | |
| "epoch": 13.46, | |
| "learning_rate": 5.91919191919192e-06, | |
| "loss": 0.8732, | |
| "step": 414000 | |
| }, | |
| { | |
| "epoch": 13.48, | |
| "learning_rate": 5.914141414141414e-06, | |
| "loss": 0.8734, | |
| "step": 414500 | |
| }, | |
| { | |
| "epoch": 13.5, | |
| "learning_rate": 5.90909090909091e-06, | |
| "loss": 0.8724, | |
| "step": 415000 | |
| }, | |
| { | |
| "epoch": 13.51, | |
| "learning_rate": 5.9040404040404045e-06, | |
| "loss": 0.8724, | |
| "step": 415500 | |
| }, | |
| { | |
| "epoch": 13.53, | |
| "learning_rate": 5.8989898989899e-06, | |
| "loss": 0.8719, | |
| "step": 416000 | |
| }, | |
| { | |
| "epoch": 13.55, | |
| "learning_rate": 5.893939393939394e-06, | |
| "loss": 0.8714, | |
| "step": 416500 | |
| }, | |
| { | |
| "epoch": 13.56, | |
| "learning_rate": 5.88888888888889e-06, | |
| "loss": 0.8723, | |
| "step": 417000 | |
| }, | |
| { | |
| "epoch": 13.58, | |
| "learning_rate": 5.883838383838384e-06, | |
| "loss": 0.8719, | |
| "step": 417500 | |
| }, | |
| { | |
| "epoch": 13.59, | |
| "learning_rate": 5.878787878787879e-06, | |
| "loss": 0.8709, | |
| "step": 418000 | |
| }, | |
| { | |
| "epoch": 13.61, | |
| "learning_rate": 5.873737373737374e-06, | |
| "loss": 0.8709, | |
| "step": 418500 | |
| }, | |
| { | |
| "epoch": 13.63, | |
| "learning_rate": 5.868686868686869e-06, | |
| "loss": 0.8707, | |
| "step": 419000 | |
| }, | |
| { | |
| "epoch": 13.64, | |
| "learning_rate": 5.863636363636364e-06, | |
| "loss": 0.871, | |
| "step": 419500 | |
| }, | |
| { | |
| "epoch": 13.66, | |
| "learning_rate": 5.858585858585859e-06, | |
| "loss": 0.8703, | |
| "step": 420000 | |
| }, | |
| { | |
| "epoch": 13.68, | |
| "learning_rate": 5.853535353535354e-06, | |
| "loss": 0.8698, | |
| "step": 420500 | |
| }, | |
| { | |
| "epoch": 13.69, | |
| "learning_rate": 5.8484848484848485e-06, | |
| "loss": 0.8698, | |
| "step": 421000 | |
| }, | |
| { | |
| "epoch": 13.71, | |
| "learning_rate": 5.8434343434343445e-06, | |
| "loss": 0.8696, | |
| "step": 421500 | |
| }, | |
| { | |
| "epoch": 13.72, | |
| "learning_rate": 5.838383838383839e-06, | |
| "loss": 0.869, | |
| "step": 422000 | |
| }, | |
| { | |
| "epoch": 13.74, | |
| "learning_rate": 5.833333333333334e-06, | |
| "loss": 0.8695, | |
| "step": 422500 | |
| }, | |
| { | |
| "epoch": 13.76, | |
| "learning_rate": 5.828282828282828e-06, | |
| "loss": 0.8691, | |
| "step": 423000 | |
| }, | |
| { | |
| "epoch": 13.77, | |
| "learning_rate": 5.823232323232324e-06, | |
| "loss": 0.8693, | |
| "step": 423500 | |
| }, | |
| { | |
| "epoch": 13.79, | |
| "learning_rate": 5.8181818181818185e-06, | |
| "loss": 0.8686, | |
| "step": 424000 | |
| }, | |
| { | |
| "epoch": 13.81, | |
| "learning_rate": 5.813131313131314e-06, | |
| "loss": 0.8684, | |
| "step": 424500 | |
| }, | |
| { | |
| "epoch": 13.82, | |
| "learning_rate": 5.808080808080808e-06, | |
| "loss": 0.8679, | |
| "step": 425000 | |
| }, | |
| { | |
| "epoch": 13.84, | |
| "learning_rate": 5.803030303030304e-06, | |
| "loss": 0.8679, | |
| "step": 425500 | |
| }, | |
| { | |
| "epoch": 13.85, | |
| "learning_rate": 5.797979797979798e-06, | |
| "loss": 0.8673, | |
| "step": 426000 | |
| }, | |
| { | |
| "epoch": 13.87, | |
| "learning_rate": 5.792929292929293e-06, | |
| "loss": 0.8678, | |
| "step": 426500 | |
| }, | |
| { | |
| "epoch": 13.89, | |
| "learning_rate": 5.787878787878788e-06, | |
| "loss": 0.8677, | |
| "step": 427000 | |
| }, | |
| { | |
| "epoch": 13.9, | |
| "learning_rate": 5.782828282828284e-06, | |
| "loss": 0.867, | |
| "step": 427500 | |
| }, | |
| { | |
| "epoch": 13.92, | |
| "learning_rate": 5.777777777777778e-06, | |
| "loss": 0.8667, | |
| "step": 428000 | |
| }, | |
| { | |
| "epoch": 13.94, | |
| "learning_rate": 5.772727272727273e-06, | |
| "loss": 0.8662, | |
| "step": 428500 | |
| }, | |
| { | |
| "epoch": 13.95, | |
| "learning_rate": 5.767676767676768e-06, | |
| "loss": 0.8665, | |
| "step": 429000 | |
| }, | |
| { | |
| "epoch": 13.97, | |
| "learning_rate": 5.762626262626263e-06, | |
| "loss": 0.8656, | |
| "step": 429500 | |
| }, | |
| { | |
| "epoch": 13.98, | |
| "learning_rate": 5.7575757575757586e-06, | |
| "loss": 0.8659, | |
| "step": 430000 | |
| }, | |
| { | |
| "epoch": 14.0, | |
| "learning_rate": 5.752525252525253e-06, | |
| "loss": 0.8656, | |
| "step": 430500 | |
| }, | |
| { | |
| "epoch": 14.02, | |
| "learning_rate": 5.747474747474748e-06, | |
| "loss": 0.8647, | |
| "step": 431000 | |
| }, | |
| { | |
| "epoch": 14.03, | |
| "learning_rate": 5.742424242424242e-06, | |
| "loss": 0.865, | |
| "step": 431500 | |
| }, | |
| { | |
| "epoch": 14.05, | |
| "learning_rate": 5.737373737373738e-06, | |
| "loss": 0.8646, | |
| "step": 432000 | |
| }, | |
| { | |
| "epoch": 14.07, | |
| "learning_rate": 5.732323232323233e-06, | |
| "loss": 0.8644, | |
| "step": 432500 | |
| }, | |
| { | |
| "epoch": 14.08, | |
| "learning_rate": 5.727272727272728e-06, | |
| "loss": 0.8645, | |
| "step": 433000 | |
| }, | |
| { | |
| "epoch": 14.1, | |
| "learning_rate": 5.722222222222222e-06, | |
| "loss": 0.8641, | |
| "step": 433500 | |
| }, | |
| { | |
| "epoch": 14.11, | |
| "learning_rate": 5.717171717171718e-06, | |
| "loss": 0.8643, | |
| "step": 434000 | |
| }, | |
| { | |
| "epoch": 14.13, | |
| "learning_rate": 5.712121212121212e-06, | |
| "loss": 0.8635, | |
| "step": 434500 | |
| }, | |
| { | |
| "epoch": 14.15, | |
| "learning_rate": 5.7070707070707075e-06, | |
| "loss": 0.8631, | |
| "step": 435000 | |
| }, | |
| { | |
| "epoch": 14.16, | |
| "learning_rate": 5.702020202020202e-06, | |
| "loss": 0.8635, | |
| "step": 435500 | |
| }, | |
| { | |
| "epoch": 14.18, | |
| "learning_rate": 5.696969696969698e-06, | |
| "loss": 0.8627, | |
| "step": 436000 | |
| }, | |
| { | |
| "epoch": 14.2, | |
| "learning_rate": 5.691919191919192e-06, | |
| "loss": 0.8623, | |
| "step": 436500 | |
| }, | |
| { | |
| "epoch": 14.21, | |
| "learning_rate": 5.686868686868687e-06, | |
| "loss": 0.8622, | |
| "step": 437000 | |
| }, | |
| { | |
| "epoch": 14.23, | |
| "learning_rate": 5.681818181818183e-06, | |
| "loss": 0.8619, | |
| "step": 437500 | |
| }, | |
| { | |
| "epoch": 14.24, | |
| "learning_rate": 5.6767676767676775e-06, | |
| "loss": 0.8616, | |
| "step": 438000 | |
| }, | |
| { | |
| "epoch": 14.26, | |
| "learning_rate": 5.671717171717173e-06, | |
| "loss": 0.8621, | |
| "step": 438500 | |
| }, | |
| { | |
| "epoch": 14.28, | |
| "learning_rate": 5.666666666666667e-06, | |
| "loss": 0.8622, | |
| "step": 439000 | |
| }, | |
| { | |
| "epoch": 14.29, | |
| "learning_rate": 5.661616161616162e-06, | |
| "loss": 0.8616, | |
| "step": 439500 | |
| }, | |
| { | |
| "epoch": 14.31, | |
| "learning_rate": 5.656565656565657e-06, | |
| "loss": 0.8612, | |
| "step": 440000 | |
| }, | |
| { | |
| "epoch": 14.33, | |
| "learning_rate": 5.651515151515152e-06, | |
| "loss": 0.8612, | |
| "step": 440500 | |
| }, | |
| { | |
| "epoch": 14.34, | |
| "learning_rate": 5.646464646464647e-06, | |
| "loss": 0.8607, | |
| "step": 441000 | |
| }, | |
| { | |
| "epoch": 14.36, | |
| "learning_rate": 5.641414141414142e-06, | |
| "loss": 0.8607, | |
| "step": 441500 | |
| }, | |
| { | |
| "epoch": 14.37, | |
| "learning_rate": 5.636363636363636e-06, | |
| "loss": 0.8594, | |
| "step": 442000 | |
| }, | |
| { | |
| "epoch": 14.39, | |
| "learning_rate": 5.631313131313132e-06, | |
| "loss": 0.8602, | |
| "step": 442500 | |
| }, | |
| { | |
| "epoch": 14.41, | |
| "learning_rate": 5.626262626262626e-06, | |
| "loss": 0.8597, | |
| "step": 443000 | |
| }, | |
| { | |
| "epoch": 14.42, | |
| "learning_rate": 5.6212121212121215e-06, | |
| "loss": 0.8594, | |
| "step": 443500 | |
| }, | |
| { | |
| "epoch": 14.44, | |
| "learning_rate": 5.616161616161616e-06, | |
| "loss": 0.8598, | |
| "step": 444000 | |
| }, | |
| { | |
| "epoch": 14.46, | |
| "learning_rate": 5.611111111111112e-06, | |
| "loss": 0.8588, | |
| "step": 444500 | |
| }, | |
| { | |
| "epoch": 14.47, | |
| "learning_rate": 5.606060606060606e-06, | |
| "loss": 0.8587, | |
| "step": 445000 | |
| }, | |
| { | |
| "epoch": 14.49, | |
| "learning_rate": 5.601010101010101e-06, | |
| "loss": 0.8583, | |
| "step": 445500 | |
| }, | |
| { | |
| "epoch": 14.51, | |
| "learning_rate": 5.595959595959597e-06, | |
| "loss": 0.8589, | |
| "step": 446000 | |
| }, | |
| { | |
| "epoch": 14.52, | |
| "learning_rate": 5.5909090909090915e-06, | |
| "loss": 0.8582, | |
| "step": 446500 | |
| }, | |
| { | |
| "epoch": 14.54, | |
| "learning_rate": 5.585858585858587e-06, | |
| "loss": 0.8573, | |
| "step": 447000 | |
| }, | |
| { | |
| "epoch": 14.55, | |
| "learning_rate": 5.580808080808081e-06, | |
| "loss": 0.8581, | |
| "step": 447500 | |
| }, | |
| { | |
| "epoch": 14.57, | |
| "learning_rate": 5.575757575757577e-06, | |
| "loss": 0.8578, | |
| "step": 448000 | |
| }, | |
| { | |
| "epoch": 14.59, | |
| "learning_rate": 5.570707070707071e-06, | |
| "loss": 0.858, | |
| "step": 448500 | |
| }, | |
| { | |
| "epoch": 14.6, | |
| "learning_rate": 5.565656565656566e-06, | |
| "loss": 0.8566, | |
| "step": 449000 | |
| }, | |
| { | |
| "epoch": 14.62, | |
| "learning_rate": 5.560606060606061e-06, | |
| "loss": 0.8567, | |
| "step": 449500 | |
| }, | |
| { | |
| "epoch": 14.64, | |
| "learning_rate": 5.555555555555557e-06, | |
| "loss": 0.8566, | |
| "step": 450000 | |
| }, | |
| { | |
| "epoch": 14.65, | |
| "learning_rate": 5.550505050505051e-06, | |
| "loss": 0.8566, | |
| "step": 450500 | |
| }, | |
| { | |
| "epoch": 14.67, | |
| "learning_rate": 5.545454545454546e-06, | |
| "loss": 0.8566, | |
| "step": 451000 | |
| }, | |
| { | |
| "epoch": 14.68, | |
| "learning_rate": 5.54040404040404e-06, | |
| "loss": 0.8563, | |
| "step": 451500 | |
| }, | |
| { | |
| "epoch": 14.7, | |
| "learning_rate": 5.5353535353535355e-06, | |
| "loss": 0.8565, | |
| "step": 452000 | |
| }, | |
| { | |
| "epoch": 14.72, | |
| "learning_rate": 5.530303030303031e-06, | |
| "loss": 0.8566, | |
| "step": 452500 | |
| }, | |
| { | |
| "epoch": 14.73, | |
| "learning_rate": 5.525252525252526e-06, | |
| "loss": 0.8555, | |
| "step": 453000 | |
| }, | |
| { | |
| "epoch": 14.75, | |
| "learning_rate": 5.52020202020202e-06, | |
| "loss": 0.8557, | |
| "step": 453500 | |
| }, | |
| { | |
| "epoch": 14.77, | |
| "learning_rate": 5.515151515151515e-06, | |
| "loss": 0.8556, | |
| "step": 454000 | |
| }, | |
| { | |
| "epoch": 14.78, | |
| "learning_rate": 5.510101010101011e-06, | |
| "loss": 0.8553, | |
| "step": 454500 | |
| }, | |
| { | |
| "epoch": 14.8, | |
| "learning_rate": 5.5050505050505056e-06, | |
| "loss": 0.8544, | |
| "step": 455000 | |
| }, | |
| { | |
| "epoch": 14.81, | |
| "learning_rate": 5.500000000000001e-06, | |
| "loss": 0.8542, | |
| "step": 455500 | |
| }, | |
| { | |
| "epoch": 14.83, | |
| "learning_rate": 5.494949494949495e-06, | |
| "loss": 0.8538, | |
| "step": 456000 | |
| }, | |
| { | |
| "epoch": 14.85, | |
| "learning_rate": 5.489898989898991e-06, | |
| "loss": 0.8538, | |
| "step": 456500 | |
| }, | |
| { | |
| "epoch": 14.86, | |
| "learning_rate": 5.484848484848485e-06, | |
| "loss": 0.8543, | |
| "step": 457000 | |
| }, | |
| { | |
| "epoch": 14.88, | |
| "learning_rate": 5.4797979797979804e-06, | |
| "loss": 0.8538, | |
| "step": 457500 | |
| }, | |
| { | |
| "epoch": 14.9, | |
| "learning_rate": 5.474747474747475e-06, | |
| "loss": 0.8535, | |
| "step": 458000 | |
| }, | |
| { | |
| "epoch": 14.91, | |
| "learning_rate": 5.469696969696971e-06, | |
| "loss": 0.8531, | |
| "step": 458500 | |
| }, | |
| { | |
| "epoch": 14.93, | |
| "learning_rate": 5.464646464646465e-06, | |
| "loss": 0.8533, | |
| "step": 459000 | |
| }, | |
| { | |
| "epoch": 14.94, | |
| "learning_rate": 5.45959595959596e-06, | |
| "loss": 0.8527, | |
| "step": 459500 | |
| }, | |
| { | |
| "epoch": 14.96, | |
| "learning_rate": 5.4545454545454545e-06, | |
| "loss": 0.8525, | |
| "step": 460000 | |
| }, | |
| { | |
| "epoch": 14.98, | |
| "learning_rate": 5.4494949494949504e-06, | |
| "loss": 0.8522, | |
| "step": 460500 | |
| }, | |
| { | |
| "epoch": 14.99, | |
| "learning_rate": 5.444444444444445e-06, | |
| "loss": 0.8525, | |
| "step": 461000 | |
| }, | |
| { | |
| "epoch": 15.01, | |
| "learning_rate": 5.43939393939394e-06, | |
| "loss": 0.8517, | |
| "step": 461500 | |
| }, | |
| { | |
| "epoch": 15.03, | |
| "learning_rate": 5.434343434343434e-06, | |
| "loss": 0.852, | |
| "step": 462000 | |
| }, | |
| { | |
| "epoch": 15.04, | |
| "learning_rate": 5.429292929292929e-06, | |
| "loss": 0.852, | |
| "step": 462500 | |
| }, | |
| { | |
| "epoch": 15.06, | |
| "learning_rate": 5.424242424242425e-06, | |
| "loss": 0.8509, | |
| "step": 463000 | |
| }, | |
| { | |
| "epoch": 15.07, | |
| "learning_rate": 5.41919191919192e-06, | |
| "loss": 0.8512, | |
| "step": 463500 | |
| }, | |
| { | |
| "epoch": 15.09, | |
| "learning_rate": 5.414141414141415e-06, | |
| "loss": 0.8511, | |
| "step": 464000 | |
| }, | |
| { | |
| "epoch": 15.11, | |
| "learning_rate": 5.409090909090909e-06, | |
| "loss": 0.8509, | |
| "step": 464500 | |
| }, | |
| { | |
| "epoch": 15.12, | |
| "learning_rate": 5.404040404040405e-06, | |
| "loss": 0.8508, | |
| "step": 465000 | |
| }, | |
| { | |
| "epoch": 15.14, | |
| "learning_rate": 5.398989898989899e-06, | |
| "loss": 0.8512, | |
| "step": 465500 | |
| }, | |
| { | |
| "epoch": 15.16, | |
| "learning_rate": 5.3939393939393945e-06, | |
| "loss": 0.8497, | |
| "step": 466000 | |
| }, | |
| { | |
| "epoch": 15.17, | |
| "learning_rate": 5.388888888888889e-06, | |
| "loss": 0.8502, | |
| "step": 466500 | |
| }, | |
| { | |
| "epoch": 15.19, | |
| "learning_rate": 5.383838383838385e-06, | |
| "loss": 0.8493, | |
| "step": 467000 | |
| }, | |
| { | |
| "epoch": 15.2, | |
| "learning_rate": 5.378787878787879e-06, | |
| "loss": 0.8501, | |
| "step": 467500 | |
| }, | |
| { | |
| "epoch": 15.22, | |
| "learning_rate": 5.373737373737374e-06, | |
| "loss": 0.8488, | |
| "step": 468000 | |
| }, | |
| { | |
| "epoch": 15.24, | |
| "learning_rate": 5.3686868686868685e-06, | |
| "loss": 0.8491, | |
| "step": 468500 | |
| }, | |
| { | |
| "epoch": 15.25, | |
| "learning_rate": 5.3636363636363645e-06, | |
| "loss": 0.8496, | |
| "step": 469000 | |
| }, | |
| { | |
| "epoch": 15.27, | |
| "learning_rate": 5.358585858585859e-06, | |
| "loss": 0.8484, | |
| "step": 469500 | |
| }, | |
| { | |
| "epoch": 15.29, | |
| "learning_rate": 5.353535353535354e-06, | |
| "loss": 0.8489, | |
| "step": 470000 | |
| }, | |
| { | |
| "epoch": 15.3, | |
| "learning_rate": 5.348484848484848e-06, | |
| "loss": 0.849, | |
| "step": 470500 | |
| }, | |
| { | |
| "epoch": 15.32, | |
| "learning_rate": 5.343434343434344e-06, | |
| "loss": 0.8483, | |
| "step": 471000 | |
| }, | |
| { | |
| "epoch": 15.33, | |
| "learning_rate": 5.338383838383839e-06, | |
| "loss": 0.8481, | |
| "step": 471500 | |
| }, | |
| { | |
| "epoch": 15.35, | |
| "learning_rate": 5.333333333333334e-06, | |
| "loss": 0.8478, | |
| "step": 472000 | |
| }, | |
| { | |
| "epoch": 15.37, | |
| "learning_rate": 5.328282828282829e-06, | |
| "loss": 0.8474, | |
| "step": 472500 | |
| }, | |
| { | |
| "epoch": 15.38, | |
| "learning_rate": 5.323232323232324e-06, | |
| "loss": 0.8474, | |
| "step": 473000 | |
| }, | |
| { | |
| "epoch": 15.4, | |
| "learning_rate": 5.318181818181819e-06, | |
| "loss": 0.8468, | |
| "step": 473500 | |
| }, | |
| { | |
| "epoch": 15.42, | |
| "learning_rate": 5.313131313131313e-06, | |
| "loss": 0.8468, | |
| "step": 474000 | |
| }, | |
| { | |
| "epoch": 15.43, | |
| "learning_rate": 5.3080808080808085e-06, | |
| "loss": 0.8466, | |
| "step": 474500 | |
| }, | |
| { | |
| "epoch": 15.45, | |
| "learning_rate": 5.303030303030303e-06, | |
| "loss": 0.8468, | |
| "step": 475000 | |
| }, | |
| { | |
| "epoch": 15.46, | |
| "learning_rate": 5.297979797979799e-06, | |
| "loss": 0.8463, | |
| "step": 475500 | |
| }, | |
| { | |
| "epoch": 15.48, | |
| "learning_rate": 5.292929292929293e-06, | |
| "loss": 0.8461, | |
| "step": 476000 | |
| }, | |
| { | |
| "epoch": 15.5, | |
| "learning_rate": 5.287878787878788e-06, | |
| "loss": 0.8461, | |
| "step": 476500 | |
| }, | |
| { | |
| "epoch": 15.51, | |
| "learning_rate": 5.2828282828282825e-06, | |
| "loss": 0.8461, | |
| "step": 477000 | |
| }, | |
| { | |
| "epoch": 15.53, | |
| "learning_rate": 5.2777777777777785e-06, | |
| "loss": 0.8453, | |
| "step": 477500 | |
| }, | |
| { | |
| "epoch": 15.55, | |
| "learning_rate": 5.272727272727273e-06, | |
| "loss": 0.8454, | |
| "step": 478000 | |
| }, | |
| { | |
| "epoch": 15.56, | |
| "learning_rate": 5.267676767676768e-06, | |
| "loss": 0.8452, | |
| "step": 478500 | |
| }, | |
| { | |
| "epoch": 15.58, | |
| "learning_rate": 5.262626262626262e-06, | |
| "loss": 0.8456, | |
| "step": 479000 | |
| }, | |
| { | |
| "epoch": 15.59, | |
| "learning_rate": 5.257575757575758e-06, | |
| "loss": 0.8445, | |
| "step": 479500 | |
| }, | |
| { | |
| "epoch": 15.61, | |
| "learning_rate": 5.252525252525253e-06, | |
| "loss": 0.8446, | |
| "step": 480000 | |
| }, | |
| { | |
| "epoch": 15.63, | |
| "learning_rate": 5.247474747474748e-06, | |
| "loss": 0.8444, | |
| "step": 480500 | |
| }, | |
| { | |
| "epoch": 15.64, | |
| "learning_rate": 5.242424242424244e-06, | |
| "loss": 0.8442, | |
| "step": 481000 | |
| }, | |
| { | |
| "epoch": 15.66, | |
| "learning_rate": 5.237373737373738e-06, | |
| "loss": 0.8442, | |
| "step": 481500 | |
| }, | |
| { | |
| "epoch": 15.68, | |
| "learning_rate": 5.232323232323233e-06, | |
| "loss": 0.8438, | |
| "step": 482000 | |
| }, | |
| { | |
| "epoch": 15.69, | |
| "learning_rate": 5.2272727272727274e-06, | |
| "loss": 0.8439, | |
| "step": 482500 | |
| }, | |
| { | |
| "epoch": 15.71, | |
| "learning_rate": 5.2222222222222226e-06, | |
| "loss": 0.8443, | |
| "step": 483000 | |
| }, | |
| { | |
| "epoch": 15.72, | |
| "learning_rate": 5.217171717171718e-06, | |
| "loss": 0.8432, | |
| "step": 483500 | |
| }, | |
| { | |
| "epoch": 15.74, | |
| "learning_rate": 5.212121212121213e-06, | |
| "loss": 0.8439, | |
| "step": 484000 | |
| }, | |
| { | |
| "epoch": 15.76, | |
| "learning_rate": 5.207070707070707e-06, | |
| "loss": 0.8428, | |
| "step": 484500 | |
| }, | |
| { | |
| "epoch": 15.77, | |
| "learning_rate": 5.202020202020202e-06, | |
| "loss": 0.8434, | |
| "step": 485000 | |
| }, | |
| { | |
| "epoch": 15.79, | |
| "learning_rate": 5.196969696969697e-06, | |
| "loss": 0.8428, | |
| "step": 485500 | |
| }, | |
| { | |
| "epoch": 15.81, | |
| "learning_rate": 5.191919191919193e-06, | |
| "loss": 0.8425, | |
| "step": 486000 | |
| }, | |
| { | |
| "epoch": 15.82, | |
| "learning_rate": 5.186868686868687e-06, | |
| "loss": 0.8431, | |
| "step": 486500 | |
| }, | |
| { | |
| "epoch": 15.84, | |
| "learning_rate": 5.181818181818182e-06, | |
| "loss": 0.8422, | |
| "step": 487000 | |
| }, | |
| { | |
| "epoch": 15.85, | |
| "learning_rate": 5.176767676767676e-06, | |
| "loss": 0.8421, | |
| "step": 487500 | |
| }, | |
| { | |
| "epoch": 15.87, | |
| "learning_rate": 5.171717171717172e-06, | |
| "loss": 0.842, | |
| "step": 488000 | |
| }, | |
| { | |
| "epoch": 15.89, | |
| "learning_rate": 5.1666666666666675e-06, | |
| "loss": 0.8418, | |
| "step": 488500 | |
| }, | |
| { | |
| "epoch": 15.9, | |
| "learning_rate": 5.161616161616162e-06, | |
| "loss": 0.8412, | |
| "step": 489000 | |
| }, | |
| { | |
| "epoch": 15.92, | |
| "learning_rate": 5.156565656565658e-06, | |
| "loss": 0.8414, | |
| "step": 489500 | |
| }, | |
| { | |
| "epoch": 15.94, | |
| "learning_rate": 5.151515151515152e-06, | |
| "loss": 0.8409, | |
| "step": 490000 | |
| }, | |
| { | |
| "epoch": 15.95, | |
| "learning_rate": 5.146464646464647e-06, | |
| "loss": 0.8407, | |
| "step": 490500 | |
| }, | |
| { | |
| "epoch": 15.97, | |
| "learning_rate": 5.1414141414141415e-06, | |
| "loss": 0.8408, | |
| "step": 491000 | |
| }, | |
| { | |
| "epoch": 15.98, | |
| "learning_rate": 5.1363636363636375e-06, | |
| "loss": 0.8407, | |
| "step": 491500 | |
| }, | |
| { | |
| "epoch": 16.0, | |
| "learning_rate": 5.131313131313132e-06, | |
| "loss": 0.8407, | |
| "step": 492000 | |
| }, | |
| { | |
| "epoch": 16.02, | |
| "learning_rate": 5.126262626262627e-06, | |
| "loss": 0.8407, | |
| "step": 492500 | |
| }, | |
| { | |
| "epoch": 16.03, | |
| "learning_rate": 5.121212121212121e-06, | |
| "loss": 0.8395, | |
| "step": 493000 | |
| }, | |
| { | |
| "epoch": 16.05, | |
| "learning_rate": 5.116161616161617e-06, | |
| "loss": 0.84, | |
| "step": 493500 | |
| }, | |
| { | |
| "epoch": 16.07, | |
| "learning_rate": 5.1111111111111115e-06, | |
| "loss": 0.8394, | |
| "step": 494000 | |
| }, | |
| { | |
| "epoch": 16.08, | |
| "learning_rate": 5.106060606060607e-06, | |
| "loss": 0.8395, | |
| "step": 494500 | |
| }, | |
| { | |
| "epoch": 16.1, | |
| "learning_rate": 5.101010101010101e-06, | |
| "loss": 0.8391, | |
| "step": 495000 | |
| }, | |
| { | |
| "epoch": 16.11, | |
| "learning_rate": 5.095959595959596e-06, | |
| "loss": 0.8396, | |
| "step": 495500 | |
| }, | |
| { | |
| "epoch": 16.13, | |
| "learning_rate": 5.090909090909091e-06, | |
| "loss": 0.839, | |
| "step": 496000 | |
| }, | |
| { | |
| "epoch": 16.15, | |
| "learning_rate": 5.085858585858586e-06, | |
| "loss": 0.8383, | |
| "step": 496500 | |
| }, | |
| { | |
| "epoch": 16.16, | |
| "learning_rate": 5.0808080808080815e-06, | |
| "loss": 0.8389, | |
| "step": 497000 | |
| }, | |
| { | |
| "epoch": 16.18, | |
| "learning_rate": 5.075757575757576e-06, | |
| "loss": 0.8385, | |
| "step": 497500 | |
| }, | |
| { | |
| "epoch": 16.2, | |
| "learning_rate": 5.070707070707072e-06, | |
| "loss": 0.838, | |
| "step": 498000 | |
| }, | |
| { | |
| "epoch": 16.21, | |
| "learning_rate": 5.065656565656566e-06, | |
| "loss": 0.8377, | |
| "step": 498500 | |
| }, | |
| { | |
| "epoch": 16.23, | |
| "learning_rate": 5.060606060606061e-06, | |
| "loss": 0.8379, | |
| "step": 499000 | |
| }, | |
| { | |
| "epoch": 16.24, | |
| "learning_rate": 5.0555555555555555e-06, | |
| "loss": 0.8376, | |
| "step": 499500 | |
| }, | |
| { | |
| "epoch": 16.26, | |
| "learning_rate": 5.0505050505050515e-06, | |
| "loss": 0.8377, | |
| "step": 500000 | |
| }, | |
| { | |
| "epoch": 16.28, | |
| "learning_rate": 5.045454545454546e-06, | |
| "loss": 0.8374, | |
| "step": 500500 | |
| }, | |
| { | |
| "epoch": 16.29, | |
| "learning_rate": 5.040404040404041e-06, | |
| "loss": 0.8372, | |
| "step": 501000 | |
| }, | |
| { | |
| "epoch": 16.31, | |
| "learning_rate": 5.035353535353535e-06, | |
| "loss": 0.8362, | |
| "step": 501500 | |
| }, | |
| { | |
| "epoch": 16.33, | |
| "learning_rate": 5.030303030303031e-06, | |
| "loss": 0.8366, | |
| "step": 502000 | |
| }, | |
| { | |
| "epoch": 16.34, | |
| "learning_rate": 5.0252525252525255e-06, | |
| "loss": 0.837, | |
| "step": 502500 | |
| }, | |
| { | |
| "epoch": 16.36, | |
| "learning_rate": 5.020202020202021e-06, | |
| "loss": 0.8366, | |
| "step": 503000 | |
| }, | |
| { | |
| "epoch": 16.38, | |
| "learning_rate": 5.015151515151515e-06, | |
| "loss": 0.8363, | |
| "step": 503500 | |
| }, | |
| { | |
| "epoch": 16.39, | |
| "learning_rate": 5.010101010101011e-06, | |
| "loss": 0.8367, | |
| "step": 504000 | |
| }, | |
| { | |
| "epoch": 16.41, | |
| "learning_rate": 5.005050505050505e-06, | |
| "loss": 0.8362, | |
| "step": 504500 | |
| }, | |
| { | |
| "epoch": 16.42, | |
| "learning_rate": 5e-06, | |
| "loss": 0.8354, | |
| "step": 505000 | |
| }, | |
| { | |
| "epoch": 16.44, | |
| "learning_rate": 4.9949494949494956e-06, | |
| "loss": 0.8354, | |
| "step": 505500 | |
| }, | |
| { | |
| "epoch": 16.46, | |
| "learning_rate": 4.98989898989899e-06, | |
| "loss": 0.8359, | |
| "step": 506000 | |
| }, | |
| { | |
| "epoch": 16.47, | |
| "learning_rate": 4.984848484848485e-06, | |
| "loss": 0.8354, | |
| "step": 506500 | |
| }, | |
| { | |
| "epoch": 16.49, | |
| "learning_rate": 4.97979797979798e-06, | |
| "loss": 0.8358, | |
| "step": 507000 | |
| }, | |
| { | |
| "epoch": 16.51, | |
| "learning_rate": 4.974747474747475e-06, | |
| "loss": 0.8346, | |
| "step": 507500 | |
| }, | |
| { | |
| "epoch": 16.52, | |
| "learning_rate": 4.9696969696969696e-06, | |
| "loss": 0.8345, | |
| "step": 508000 | |
| }, | |
| { | |
| "epoch": 16.54, | |
| "learning_rate": 4.964646464646465e-06, | |
| "loss": 0.8346, | |
| "step": 508500 | |
| }, | |
| { | |
| "epoch": 16.55, | |
| "learning_rate": 4.95959595959596e-06, | |
| "loss": 0.834, | |
| "step": 509000 | |
| }, | |
| { | |
| "epoch": 16.57, | |
| "learning_rate": 4.954545454545455e-06, | |
| "loss": 0.8336, | |
| "step": 509500 | |
| }, | |
| { | |
| "epoch": 16.59, | |
| "learning_rate": 4.94949494949495e-06, | |
| "loss": 0.8342, | |
| "step": 510000 | |
| }, | |
| { | |
| "epoch": 16.6, | |
| "learning_rate": 4.944444444444445e-06, | |
| "loss": 0.8342, | |
| "step": 510500 | |
| }, | |
| { | |
| "epoch": 16.62, | |
| "learning_rate": 4.93939393939394e-06, | |
| "loss": 0.8339, | |
| "step": 511000 | |
| }, | |
| { | |
| "epoch": 16.64, | |
| "learning_rate": 4.934343434343435e-06, | |
| "loss": 0.8332, | |
| "step": 511500 | |
| }, | |
| { | |
| "epoch": 16.65, | |
| "learning_rate": 4.92929292929293e-06, | |
| "loss": 0.8334, | |
| "step": 512000 | |
| }, | |
| { | |
| "epoch": 16.67, | |
| "learning_rate": 4.924242424242425e-06, | |
| "loss": 0.834, | |
| "step": 512500 | |
| }, | |
| { | |
| "epoch": 16.68, | |
| "learning_rate": 4.919191919191919e-06, | |
| "loss": 0.8329, | |
| "step": 513000 | |
| }, | |
| { | |
| "epoch": 16.7, | |
| "learning_rate": 4.9141414141414145e-06, | |
| "loss": 0.833, | |
| "step": 513500 | |
| }, | |
| { | |
| "epoch": 16.72, | |
| "learning_rate": 4.90909090909091e-06, | |
| "loss": 0.8321, | |
| "step": 514000 | |
| }, | |
| { | |
| "epoch": 16.73, | |
| "learning_rate": 4.904040404040405e-06, | |
| "loss": 0.8329, | |
| "step": 514500 | |
| }, | |
| { | |
| "epoch": 16.75, | |
| "learning_rate": 4.898989898989899e-06, | |
| "loss": 0.8331, | |
| "step": 515000 | |
| }, | |
| { | |
| "epoch": 16.77, | |
| "learning_rate": 4.893939393939394e-06, | |
| "loss": 0.8321, | |
| "step": 515500 | |
| }, | |
| { | |
| "epoch": 16.78, | |
| "learning_rate": 4.888888888888889e-06, | |
| "loss": 0.8318, | |
| "step": 516000 | |
| }, | |
| { | |
| "epoch": 16.8, | |
| "learning_rate": 4.883838383838384e-06, | |
| "loss": 0.8313, | |
| "step": 516500 | |
| }, | |
| { | |
| "epoch": 16.81, | |
| "learning_rate": 4.878787878787879e-06, | |
| "loss": 0.8314, | |
| "step": 517000 | |
| }, | |
| { | |
| "epoch": 16.83, | |
| "learning_rate": 4.873737373737374e-06, | |
| "loss": 0.8314, | |
| "step": 517500 | |
| }, | |
| { | |
| "epoch": 16.85, | |
| "learning_rate": 4.868686868686869e-06, | |
| "loss": 0.831, | |
| "step": 518000 | |
| }, | |
| { | |
| "epoch": 16.86, | |
| "learning_rate": 4.863636363636364e-06, | |
| "loss": 0.8312, | |
| "step": 518500 | |
| }, | |
| { | |
| "epoch": 16.88, | |
| "learning_rate": 4.858585858585859e-06, | |
| "loss": 0.8313, | |
| "step": 519000 | |
| }, | |
| { | |
| "epoch": 16.9, | |
| "learning_rate": 4.8535353535353545e-06, | |
| "loss": 0.8309, | |
| "step": 519500 | |
| }, | |
| { | |
| "epoch": 16.91, | |
| "learning_rate": 4.848484848484849e-06, | |
| "loss": 0.8306, | |
| "step": 520000 | |
| }, | |
| { | |
| "epoch": 16.93, | |
| "learning_rate": 4.843434343434344e-06, | |
| "loss": 0.8304, | |
| "step": 520500 | |
| }, | |
| { | |
| "epoch": 16.94, | |
| "learning_rate": 4.838383838383839e-06, | |
| "loss": 0.83, | |
| "step": 521000 | |
| }, | |
| { | |
| "epoch": 16.96, | |
| "learning_rate": 4.833333333333333e-06, | |
| "loss": 0.8299, | |
| "step": 521500 | |
| }, | |
| { | |
| "epoch": 16.98, | |
| "learning_rate": 4.8282828282828285e-06, | |
| "loss": 0.8296, | |
| "step": 522000 | |
| }, | |
| { | |
| "epoch": 16.99, | |
| "learning_rate": 4.823232323232324e-06, | |
| "loss": 0.8294, | |
| "step": 522500 | |
| }, | |
| { | |
| "epoch": 17.01, | |
| "learning_rate": 4.818181818181819e-06, | |
| "loss": 0.8299, | |
| "step": 523000 | |
| }, | |
| { | |
| "epoch": 17.03, | |
| "learning_rate": 4.813131313131313e-06, | |
| "loss": 0.8294, | |
| "step": 523500 | |
| }, | |
| { | |
| "epoch": 17.04, | |
| "learning_rate": 4.808080808080808e-06, | |
| "loss": 0.8291, | |
| "step": 524000 | |
| }, | |
| { | |
| "epoch": 17.06, | |
| "learning_rate": 4.803030303030303e-06, | |
| "loss": 0.8286, | |
| "step": 524500 | |
| }, | |
| { | |
| "epoch": 17.07, | |
| "learning_rate": 4.7979797979797985e-06, | |
| "loss": 0.8286, | |
| "step": 525000 | |
| }, | |
| { | |
| "epoch": 17.09, | |
| "learning_rate": 4.792929292929293e-06, | |
| "loss": 0.829, | |
| "step": 525500 | |
| }, | |
| { | |
| "epoch": 17.11, | |
| "learning_rate": 4.787878787878788e-06, | |
| "loss": 0.8281, | |
| "step": 526000 | |
| }, | |
| { | |
| "epoch": 17.12, | |
| "learning_rate": 4.782828282828283e-06, | |
| "loss": 0.8277, | |
| "step": 526500 | |
| }, | |
| { | |
| "epoch": 17.14, | |
| "learning_rate": 4.777777777777778e-06, | |
| "loss": 0.8277, | |
| "step": 527000 | |
| }, | |
| { | |
| "epoch": 17.16, | |
| "learning_rate": 4.772727272727273e-06, | |
| "loss": 0.8269, | |
| "step": 527500 | |
| }, | |
| { | |
| "epoch": 17.17, | |
| "learning_rate": 4.7676767676767685e-06, | |
| "loss": 0.828, | |
| "step": 528000 | |
| }, | |
| { | |
| "epoch": 17.19, | |
| "learning_rate": 4.762626262626263e-06, | |
| "loss": 0.8279, | |
| "step": 528500 | |
| }, | |
| { | |
| "epoch": 17.2, | |
| "learning_rate": 4.757575757575758e-06, | |
| "loss": 0.8272, | |
| "step": 529000 | |
| }, | |
| { | |
| "epoch": 17.22, | |
| "learning_rate": 4.752525252525253e-06, | |
| "loss": 0.8272, | |
| "step": 529500 | |
| }, | |
| { | |
| "epoch": 17.24, | |
| "learning_rate": 4.747474747474748e-06, | |
| "loss": 0.8269, | |
| "step": 530000 | |
| }, | |
| { | |
| "epoch": 17.25, | |
| "learning_rate": 4.7424242424242426e-06, | |
| "loss": 0.8267, | |
| "step": 530500 | |
| }, | |
| { | |
| "epoch": 17.27, | |
| "learning_rate": 4.737373737373738e-06, | |
| "loss": 0.8268, | |
| "step": 531000 | |
| }, | |
| { | |
| "epoch": 17.29, | |
| "learning_rate": 4.732323232323233e-06, | |
| "loss": 0.8265, | |
| "step": 531500 | |
| }, | |
| { | |
| "epoch": 17.3, | |
| "learning_rate": 4.727272727272728e-06, | |
| "loss": 0.8263, | |
| "step": 532000 | |
| }, | |
| { | |
| "epoch": 17.32, | |
| "learning_rate": 4.722222222222222e-06, | |
| "loss": 0.8261, | |
| "step": 532500 | |
| }, | |
| { | |
| "epoch": 17.33, | |
| "learning_rate": 4.717171717171717e-06, | |
| "loss": 0.827, | |
| "step": 533000 | |
| }, | |
| { | |
| "epoch": 17.35, | |
| "learning_rate": 4.7121212121212126e-06, | |
| "loss": 0.8258, | |
| "step": 533500 | |
| }, | |
| { | |
| "epoch": 17.37, | |
| "learning_rate": 4.707070707070707e-06, | |
| "loss": 0.8255, | |
| "step": 534000 | |
| }, | |
| { | |
| "epoch": 17.38, | |
| "learning_rate": 4.702020202020202e-06, | |
| "loss": 0.8257, | |
| "step": 534500 | |
| }, | |
| { | |
| "epoch": 17.4, | |
| "learning_rate": 4.696969696969698e-06, | |
| "loss": 0.8253, | |
| "step": 535000 | |
| }, | |
| { | |
| "epoch": 17.42, | |
| "learning_rate": 4.691919191919192e-06, | |
| "loss": 0.8253, | |
| "step": 535500 | |
| }, | |
| { | |
| "epoch": 17.43, | |
| "learning_rate": 4.6868686868686874e-06, | |
| "loss": 0.8251, | |
| "step": 536000 | |
| }, | |
| { | |
| "epoch": 17.45, | |
| "learning_rate": 4.681818181818183e-06, | |
| "loss": 0.8246, | |
| "step": 536500 | |
| }, | |
| { | |
| "epoch": 17.46, | |
| "learning_rate": 4.676767676767677e-06, | |
| "loss": 0.8249, | |
| "step": 537000 | |
| }, | |
| { | |
| "epoch": 17.48, | |
| "learning_rate": 4.671717171717172e-06, | |
| "loss": 0.8245, | |
| "step": 537500 | |
| }, | |
| { | |
| "epoch": 17.5, | |
| "learning_rate": 4.666666666666667e-06, | |
| "loss": 0.825, | |
| "step": 538000 | |
| }, | |
| { | |
| "epoch": 17.51, | |
| "learning_rate": 4.661616161616162e-06, | |
| "loss": 0.8242, | |
| "step": 538500 | |
| }, | |
| { | |
| "epoch": 17.53, | |
| "learning_rate": 4.656565656565657e-06, | |
| "loss": 0.8234, | |
| "step": 539000 | |
| }, | |
| { | |
| "epoch": 17.55, | |
| "learning_rate": 4.651515151515152e-06, | |
| "loss": 0.8242, | |
| "step": 539500 | |
| }, | |
| { | |
| "epoch": 17.56, | |
| "learning_rate": 4.646464646464647e-06, | |
| "loss": 0.8236, | |
| "step": 540000 | |
| }, | |
| { | |
| "epoch": 17.58, | |
| "learning_rate": 4.641414141414142e-06, | |
| "loss": 0.8235, | |
| "step": 540500 | |
| }, | |
| { | |
| "epoch": 17.59, | |
| "learning_rate": 4.636363636363636e-06, | |
| "loss": 0.8235, | |
| "step": 541000 | |
| }, | |
| { | |
| "epoch": 17.61, | |
| "learning_rate": 4.6313131313131315e-06, | |
| "loss": 0.8239, | |
| "step": 541500 | |
| }, | |
| { | |
| "epoch": 17.63, | |
| "learning_rate": 4.626262626262627e-06, | |
| "loss": 0.8229, | |
| "step": 542000 | |
| }, | |
| { | |
| "epoch": 17.64, | |
| "learning_rate": 4.621212121212122e-06, | |
| "loss": 0.823, | |
| "step": 542500 | |
| }, | |
| { | |
| "epoch": 17.66, | |
| "learning_rate": 4.616161616161616e-06, | |
| "loss": 0.8229, | |
| "step": 543000 | |
| }, | |
| { | |
| "epoch": 17.68, | |
| "learning_rate": 4.611111111111112e-06, | |
| "loss": 0.8225, | |
| "step": 543500 | |
| }, | |
| { | |
| "epoch": 17.69, | |
| "learning_rate": 4.606060606060606e-06, | |
| "loss": 0.8231, | |
| "step": 544000 | |
| }, | |
| { | |
| "epoch": 17.71, | |
| "learning_rate": 4.6010101010101015e-06, | |
| "loss": 0.8214, | |
| "step": 544500 | |
| }, | |
| { | |
| "epoch": 17.72, | |
| "learning_rate": 4.595959595959597e-06, | |
| "loss": 0.8226, | |
| "step": 545000 | |
| }, | |
| { | |
| "epoch": 17.74, | |
| "learning_rate": 4.590909090909092e-06, | |
| "loss": 0.8219, | |
| "step": 545500 | |
| }, | |
| { | |
| "epoch": 17.76, | |
| "learning_rate": 4.585858585858586e-06, | |
| "loss": 0.8215, | |
| "step": 546000 | |
| }, | |
| { | |
| "epoch": 17.77, | |
| "learning_rate": 4.580808080808081e-06, | |
| "loss": 0.8221, | |
| "step": 546500 | |
| }, | |
| { | |
| "epoch": 17.79, | |
| "learning_rate": 4.575757575757576e-06, | |
| "loss": 0.8219, | |
| "step": 547000 | |
| }, | |
| { | |
| "epoch": 17.81, | |
| "learning_rate": 4.5707070707070715e-06, | |
| "loss": 0.8214, | |
| "step": 547500 | |
| }, | |
| { | |
| "epoch": 17.82, | |
| "learning_rate": 4.565656565656566e-06, | |
| "loss": 0.8215, | |
| "step": 548000 | |
| }, | |
| { | |
| "epoch": 17.84, | |
| "learning_rate": 4.560606060606061e-06, | |
| "loss": 0.8215, | |
| "step": 548500 | |
| }, | |
| { | |
| "epoch": 17.85, | |
| "learning_rate": 4.555555555555556e-06, | |
| "loss": 0.8204, | |
| "step": 549000 | |
| }, | |
| { | |
| "epoch": 17.87, | |
| "learning_rate": 4.55050505050505e-06, | |
| "loss": 0.8205, | |
| "step": 549500 | |
| }, | |
| { | |
| "epoch": 17.89, | |
| "learning_rate": 4.5454545454545455e-06, | |
| "loss": 0.8211, | |
| "step": 550000 | |
| }, | |
| { | |
| "epoch": 17.9, | |
| "learning_rate": 4.540404040404041e-06, | |
| "loss": 0.8205, | |
| "step": 550500 | |
| }, | |
| { | |
| "epoch": 17.92, | |
| "learning_rate": 4.535353535353536e-06, | |
| "loss": 0.8207, | |
| "step": 551000 | |
| }, | |
| { | |
| "epoch": 17.94, | |
| "learning_rate": 4.53030303030303e-06, | |
| "loss": 0.8197, | |
| "step": 551500 | |
| }, | |
| { | |
| "epoch": 17.95, | |
| "learning_rate": 4.525252525252526e-06, | |
| "loss": 0.8198, | |
| "step": 552000 | |
| }, | |
| { | |
| "epoch": 17.97, | |
| "learning_rate": 4.520202020202021e-06, | |
| "loss": 0.82, | |
| "step": 552500 | |
| }, | |
| { | |
| "epoch": 17.98, | |
| "learning_rate": 4.5151515151515155e-06, | |
| "loss": 0.8198, | |
| "step": 553000 | |
| }, | |
| { | |
| "epoch": 18.0, | |
| "learning_rate": 4.510101010101011e-06, | |
| "loss": 0.8198, | |
| "step": 553500 | |
| }, | |
| { | |
| "epoch": 18.02, | |
| "learning_rate": 4.505050505050506e-06, | |
| "loss": 0.8194, | |
| "step": 554000 | |
| }, | |
| { | |
| "epoch": 18.03, | |
| "learning_rate": 4.5e-06, | |
| "loss": 0.8195, | |
| "step": 554500 | |
| }, | |
| { | |
| "epoch": 18.05, | |
| "learning_rate": 4.494949494949495e-06, | |
| "loss": 0.8192, | |
| "step": 555000 | |
| }, | |
| { | |
| "epoch": 18.07, | |
| "learning_rate": 4.48989898989899e-06, | |
| "loss": 0.8188, | |
| "step": 555500 | |
| }, | |
| { | |
| "epoch": 18.08, | |
| "learning_rate": 4.4848484848484855e-06, | |
| "loss": 0.8188, | |
| "step": 556000 | |
| }, | |
| { | |
| "epoch": 18.1, | |
| "learning_rate": 4.47979797979798e-06, | |
| "loss": 0.8183, | |
| "step": 556500 | |
| }, | |
| { | |
| "epoch": 18.11, | |
| "learning_rate": 4.474747474747475e-06, | |
| "loss": 0.8184, | |
| "step": 557000 | |
| }, | |
| { | |
| "epoch": 18.13, | |
| "learning_rate": 4.46969696969697e-06, | |
| "loss": 0.8182, | |
| "step": 557500 | |
| }, | |
| { | |
| "epoch": 18.15, | |
| "learning_rate": 4.464646464646465e-06, | |
| "loss": 0.8185, | |
| "step": 558000 | |
| }, | |
| { | |
| "epoch": 18.16, | |
| "learning_rate": 4.4595959595959596e-06, | |
| "loss": 0.818, | |
| "step": 558500 | |
| }, | |
| { | |
| "epoch": 18.18, | |
| "learning_rate": 4.454545454545455e-06, | |
| "loss": 0.8176, | |
| "step": 559000 | |
| }, | |
| { | |
| "epoch": 18.2, | |
| "learning_rate": 4.44949494949495e-06, | |
| "loss": 0.8177, | |
| "step": 559500 | |
| }, | |
| { | |
| "epoch": 18.21, | |
| "learning_rate": 4.444444444444444e-06, | |
| "loss": 0.8175, | |
| "step": 560000 | |
| }, | |
| { | |
| "epoch": 18.23, | |
| "learning_rate": 4.43939393939394e-06, | |
| "loss": 0.8176, | |
| "step": 560500 | |
| }, | |
| { | |
| "epoch": 18.25, | |
| "learning_rate": 4.434343434343435e-06, | |
| "loss": 0.817, | |
| "step": 561000 | |
| }, | |
| { | |
| "epoch": 18.26, | |
| "learning_rate": 4.42929292929293e-06, | |
| "loss": 0.8165, | |
| "step": 561500 | |
| }, | |
| { | |
| "epoch": 18.28, | |
| "learning_rate": 4.424242424242425e-06, | |
| "loss": 0.8167, | |
| "step": 562000 | |
| }, | |
| { | |
| "epoch": 18.29, | |
| "learning_rate": 4.41919191919192e-06, | |
| "loss": 0.8175, | |
| "step": 562500 | |
| }, | |
| { | |
| "epoch": 18.31, | |
| "learning_rate": 4.414141414141415e-06, | |
| "loss": 0.8166, | |
| "step": 563000 | |
| }, | |
| { | |
| "epoch": 18.33, | |
| "learning_rate": 4.409090909090909e-06, | |
| "loss": 0.8167, | |
| "step": 563500 | |
| }, | |
| { | |
| "epoch": 18.34, | |
| "learning_rate": 4.4040404040404044e-06, | |
| "loss": 0.8167, | |
| "step": 564000 | |
| }, | |
| { | |
| "epoch": 18.36, | |
| "learning_rate": 4.3989898989899e-06, | |
| "loss": 0.8166, | |
| "step": 564500 | |
| }, | |
| { | |
| "epoch": 18.38, | |
| "learning_rate": 4.393939393939394e-06, | |
| "loss": 0.8163, | |
| "step": 565000 | |
| }, | |
| { | |
| "epoch": 18.39, | |
| "learning_rate": 4.388888888888889e-06, | |
| "loss": 0.8156, | |
| "step": 565500 | |
| }, | |
| { | |
| "epoch": 18.41, | |
| "learning_rate": 4.383838383838384e-06, | |
| "loss": 0.8155, | |
| "step": 566000 | |
| }, | |
| { | |
| "epoch": 18.42, | |
| "learning_rate": 4.378787878787879e-06, | |
| "loss": 0.8156, | |
| "step": 566500 | |
| }, | |
| { | |
| "epoch": 18.44, | |
| "learning_rate": 4.373737373737374e-06, | |
| "loss": 0.8152, | |
| "step": 567000 | |
| }, | |
| { | |
| "epoch": 18.46, | |
| "learning_rate": 4.368686868686869e-06, | |
| "loss": 0.8155, | |
| "step": 567500 | |
| }, | |
| { | |
| "epoch": 18.47, | |
| "learning_rate": 4.363636363636364e-06, | |
| "loss": 0.8149, | |
| "step": 568000 | |
| }, | |
| { | |
| "epoch": 18.49, | |
| "learning_rate": 4.358585858585859e-06, | |
| "loss": 0.8152, | |
| "step": 568500 | |
| }, | |
| { | |
| "epoch": 18.51, | |
| "learning_rate": 4.353535353535353e-06, | |
| "loss": 0.8145, | |
| "step": 569000 | |
| }, | |
| { | |
| "epoch": 18.52, | |
| "learning_rate": 4.348484848484849e-06, | |
| "loss": 0.8148, | |
| "step": 569500 | |
| }, | |
| { | |
| "epoch": 18.54, | |
| "learning_rate": 4.343434343434344e-06, | |
| "loss": 0.8147, | |
| "step": 570000 | |
| }, | |
| { | |
| "epoch": 18.55, | |
| "learning_rate": 4.338383838383839e-06, | |
| "loss": 0.8143, | |
| "step": 570500 | |
| }, | |
| { | |
| "epoch": 18.57, | |
| "learning_rate": 4.333333333333334e-06, | |
| "loss": 0.8143, | |
| "step": 571000 | |
| }, | |
| { | |
| "epoch": 18.59, | |
| "learning_rate": 4.328282828282829e-06, | |
| "loss": 0.8144, | |
| "step": 571500 | |
| }, | |
| { | |
| "epoch": 18.6, | |
| "learning_rate": 4.323232323232323e-06, | |
| "loss": 0.8138, | |
| "step": 572000 | |
| }, | |
| { | |
| "epoch": 18.62, | |
| "learning_rate": 4.3181818181818185e-06, | |
| "loss": 0.8138, | |
| "step": 572500 | |
| }, | |
| { | |
| "epoch": 18.64, | |
| "learning_rate": 4.313131313131314e-06, | |
| "loss": 0.8141, | |
| "step": 573000 | |
| }, | |
| { | |
| "epoch": 18.65, | |
| "learning_rate": 4.308080808080809e-06, | |
| "loss": 0.8134, | |
| "step": 573500 | |
| }, | |
| { | |
| "epoch": 18.67, | |
| "learning_rate": 4.303030303030303e-06, | |
| "loss": 0.8137, | |
| "step": 574000 | |
| }, | |
| { | |
| "epoch": 18.68, | |
| "learning_rate": 4.297979797979798e-06, | |
| "loss": 0.8135, | |
| "step": 574500 | |
| }, | |
| { | |
| "epoch": 18.7, | |
| "learning_rate": 4.292929292929293e-06, | |
| "loss": 0.8127, | |
| "step": 575000 | |
| }, | |
| { | |
| "epoch": 18.72, | |
| "learning_rate": 4.287878787878788e-06, | |
| "loss": 0.8135, | |
| "step": 575500 | |
| }, | |
| { | |
| "epoch": 18.73, | |
| "learning_rate": 4.282828282828283e-06, | |
| "loss": 0.8134, | |
| "step": 576000 | |
| }, | |
| { | |
| "epoch": 18.75, | |
| "learning_rate": 4.277777777777778e-06, | |
| "loss": 0.8127, | |
| "step": 576500 | |
| }, | |
| { | |
| "epoch": 18.77, | |
| "learning_rate": 4.272727272727273e-06, | |
| "loss": 0.812, | |
| "step": 577000 | |
| }, | |
| { | |
| "epoch": 18.78, | |
| "learning_rate": 4.267676767676767e-06, | |
| "loss": 0.8122, | |
| "step": 577500 | |
| }, | |
| { | |
| "epoch": 18.8, | |
| "learning_rate": 4.262626262626263e-06, | |
| "loss": 0.8124, | |
| "step": 578000 | |
| }, | |
| { | |
| "epoch": 18.81, | |
| "learning_rate": 4.2575757575757585e-06, | |
| "loss": 0.8125, | |
| "step": 578500 | |
| }, | |
| { | |
| "epoch": 18.83, | |
| "learning_rate": 4.252525252525253e-06, | |
| "loss": 0.8117, | |
| "step": 579000 | |
| }, | |
| { | |
| "epoch": 18.85, | |
| "learning_rate": 4.247474747474748e-06, | |
| "loss": 0.8114, | |
| "step": 579500 | |
| }, | |
| { | |
| "epoch": 18.86, | |
| "learning_rate": 4.242424242424243e-06, | |
| "loss": 0.8118, | |
| "step": 580000 | |
| }, | |
| { | |
| "epoch": 18.88, | |
| "learning_rate": 4.237373737373737e-06, | |
| "loss": 0.8115, | |
| "step": 580500 | |
| }, | |
| { | |
| "epoch": 18.9, | |
| "learning_rate": 4.2323232323232325e-06, | |
| "loss": 0.8118, | |
| "step": 581000 | |
| }, | |
| { | |
| "epoch": 18.91, | |
| "learning_rate": 4.227272727272728e-06, | |
| "loss": 0.8113, | |
| "step": 581500 | |
| }, | |
| { | |
| "epoch": 18.93, | |
| "learning_rate": 4.222222222222223e-06, | |
| "loss": 0.8115, | |
| "step": 582000 | |
| }, | |
| { | |
| "epoch": 18.94, | |
| "learning_rate": 4.217171717171717e-06, | |
| "loss": 0.8107, | |
| "step": 582500 | |
| }, | |
| { | |
| "epoch": 18.96, | |
| "learning_rate": 4.212121212121212e-06, | |
| "loss": 0.811, | |
| "step": 583000 | |
| }, | |
| { | |
| "epoch": 18.98, | |
| "learning_rate": 4.207070707070707e-06, | |
| "loss": 0.811, | |
| "step": 583500 | |
| }, | |
| { | |
| "epoch": 18.99, | |
| "learning_rate": 4.2020202020202026e-06, | |
| "loss": 0.8107, | |
| "step": 584000 | |
| }, | |
| { | |
| "epoch": 19.01, | |
| "learning_rate": 4.196969696969697e-06, | |
| "loss": 0.8102, | |
| "step": 584500 | |
| }, | |
| { | |
| "epoch": 19.03, | |
| "learning_rate": 4.191919191919192e-06, | |
| "loss": 0.8105, | |
| "step": 585000 | |
| }, | |
| { | |
| "epoch": 19.04, | |
| "learning_rate": 4.186868686868687e-06, | |
| "loss": 0.8094, | |
| "step": 585500 | |
| }, | |
| { | |
| "epoch": 19.06, | |
| "learning_rate": 4.181818181818182e-06, | |
| "loss": 0.8096, | |
| "step": 586000 | |
| }, | |
| { | |
| "epoch": 19.07, | |
| "learning_rate": 4.1767676767676774e-06, | |
| "loss": 0.8095, | |
| "step": 586500 | |
| }, | |
| { | |
| "epoch": 19.09, | |
| "learning_rate": 4.1717171717171726e-06, | |
| "loss": 0.8097, | |
| "step": 587000 | |
| }, | |
| { | |
| "epoch": 19.11, | |
| "learning_rate": 4.166666666666667e-06, | |
| "loss": 0.8101, | |
| "step": 587500 | |
| }, | |
| { | |
| "epoch": 19.12, | |
| "learning_rate": 4.161616161616162e-06, | |
| "loss": 0.8092, | |
| "step": 588000 | |
| }, | |
| { | |
| "epoch": 19.14, | |
| "learning_rate": 4.156565656565657e-06, | |
| "loss": 0.8089, | |
| "step": 588500 | |
| }, | |
| { | |
| "epoch": 19.16, | |
| "learning_rate": 4.151515151515152e-06, | |
| "loss": 0.8092, | |
| "step": 589000 | |
| }, | |
| { | |
| "epoch": 19.17, | |
| "learning_rate": 4.146464646464647e-06, | |
| "loss": 0.809, | |
| "step": 589500 | |
| }, | |
| { | |
| "epoch": 19.19, | |
| "learning_rate": 4.141414141414142e-06, | |
| "loss": 0.8097, | |
| "step": 590000 | |
| }, | |
| { | |
| "epoch": 19.2, | |
| "learning_rate": 4.136363636363637e-06, | |
| "loss": 0.8085, | |
| "step": 590500 | |
| }, | |
| { | |
| "epoch": 19.22, | |
| "learning_rate": 4.131313131313132e-06, | |
| "loss": 0.8091, | |
| "step": 591000 | |
| }, | |
| { | |
| "epoch": 19.24, | |
| "learning_rate": 4.126262626262626e-06, | |
| "loss": 0.8085, | |
| "step": 591500 | |
| }, | |
| { | |
| "epoch": 19.25, | |
| "learning_rate": 4.1212121212121215e-06, | |
| "loss": 0.8084, | |
| "step": 592000 | |
| }, | |
| { | |
| "epoch": 19.27, | |
| "learning_rate": 4.116161616161617e-06, | |
| "loss": 0.8078, | |
| "step": 592500 | |
| }, | |
| { | |
| "epoch": 19.29, | |
| "learning_rate": 4.111111111111111e-06, | |
| "loss": 0.8082, | |
| "step": 593000 | |
| }, | |
| { | |
| "epoch": 19.3, | |
| "learning_rate": 4.106060606060606e-06, | |
| "loss": 0.8083, | |
| "step": 593500 | |
| }, | |
| { | |
| "epoch": 19.32, | |
| "learning_rate": 4.101010101010101e-06, | |
| "loss": 0.8081, | |
| "step": 594000 | |
| }, | |
| { | |
| "epoch": 19.33, | |
| "learning_rate": 4.095959595959596e-06, | |
| "loss": 0.8076, | |
| "step": 594500 | |
| }, | |
| { | |
| "epoch": 19.35, | |
| "learning_rate": 4.0909090909090915e-06, | |
| "loss": 0.8077, | |
| "step": 595000 | |
| }, | |
| { | |
| "epoch": 19.37, | |
| "learning_rate": 4.085858585858587e-06, | |
| "loss": 0.8072, | |
| "step": 595500 | |
| }, | |
| { | |
| "epoch": 19.38, | |
| "learning_rate": 4.080808080808081e-06, | |
| "loss": 0.8069, | |
| "step": 596000 | |
| }, | |
| { | |
| "epoch": 19.4, | |
| "learning_rate": 4.075757575757576e-06, | |
| "loss": 0.807, | |
| "step": 596500 | |
| }, | |
| { | |
| "epoch": 19.42, | |
| "learning_rate": 4.070707070707071e-06, | |
| "loss": 0.8069, | |
| "step": 597000 | |
| }, | |
| { | |
| "epoch": 19.43, | |
| "learning_rate": 4.065656565656566e-06, | |
| "loss": 0.8068, | |
| "step": 597500 | |
| }, | |
| { | |
| "epoch": 19.45, | |
| "learning_rate": 4.060606060606061e-06, | |
| "loss": 0.8061, | |
| "step": 598000 | |
| }, | |
| { | |
| "epoch": 19.46, | |
| "learning_rate": 4.055555555555556e-06, | |
| "loss": 0.8072, | |
| "step": 598500 | |
| }, | |
| { | |
| "epoch": 19.48, | |
| "learning_rate": 4.050505050505051e-06, | |
| "loss": 0.8064, | |
| "step": 599000 | |
| }, | |
| { | |
| "epoch": 19.5, | |
| "learning_rate": 4.045454545454546e-06, | |
| "loss": 0.8063, | |
| "step": 599500 | |
| }, | |
| { | |
| "epoch": 19.51, | |
| "learning_rate": 4.04040404040404e-06, | |
| "loss": 0.8064, | |
| "step": 600000 | |
| }, | |
| { | |
| "epoch": 19.53, | |
| "learning_rate": 4.0353535353535355e-06, | |
| "loss": 0.806, | |
| "step": 600500 | |
| }, | |
| { | |
| "epoch": 19.55, | |
| "learning_rate": 4.030303030303031e-06, | |
| "loss": 0.805, | |
| "step": 601000 | |
| }, | |
| { | |
| "epoch": 19.56, | |
| "learning_rate": 4.025252525252526e-06, | |
| "loss": 0.806, | |
| "step": 601500 | |
| }, | |
| { | |
| "epoch": 19.58, | |
| "learning_rate": 4.02020202020202e-06, | |
| "loss": 0.806, | |
| "step": 602000 | |
| }, | |
| { | |
| "epoch": 19.59, | |
| "learning_rate": 4.015151515151515e-06, | |
| "loss": 0.8049, | |
| "step": 602500 | |
| }, | |
| { | |
| "epoch": 19.61, | |
| "learning_rate": 4.01010101010101e-06, | |
| "loss": 0.8054, | |
| "step": 603000 | |
| }, | |
| { | |
| "epoch": 19.63, | |
| "learning_rate": 4.0050505050505055e-06, | |
| "loss": 0.8054, | |
| "step": 603500 | |
| }, | |
| { | |
| "epoch": 19.64, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.8051, | |
| "step": 604000 | |
| }, | |
| { | |
| "epoch": 19.66, | |
| "learning_rate": 3.994949494949496e-06, | |
| "loss": 0.8047, | |
| "step": 604500 | |
| }, | |
| { | |
| "epoch": 19.68, | |
| "learning_rate": 3.98989898989899e-06, | |
| "loss": 0.8048, | |
| "step": 605000 | |
| }, | |
| { | |
| "epoch": 19.69, | |
| "learning_rate": 3.984848484848485e-06, | |
| "loss": 0.8046, | |
| "step": 605500 | |
| }, | |
| { | |
| "epoch": 19.71, | |
| "learning_rate": 3.97979797979798e-06, | |
| "loss": 0.8048, | |
| "step": 606000 | |
| }, | |
| { | |
| "epoch": 19.72, | |
| "learning_rate": 3.9747474747474755e-06, | |
| "loss": 0.8044, | |
| "step": 606500 | |
| }, | |
| { | |
| "epoch": 19.74, | |
| "learning_rate": 3.96969696969697e-06, | |
| "loss": 0.8045, | |
| "step": 607000 | |
| }, | |
| { | |
| "epoch": 19.76, | |
| "learning_rate": 3.964646464646465e-06, | |
| "loss": 0.8049, | |
| "step": 607500 | |
| }, | |
| { | |
| "epoch": 19.77, | |
| "learning_rate": 3.95959595959596e-06, | |
| "loss": 0.8036, | |
| "step": 608000 | |
| }, | |
| { | |
| "epoch": 19.79, | |
| "learning_rate": 3.954545454545454e-06, | |
| "loss": 0.8035, | |
| "step": 608500 | |
| }, | |
| { | |
| "epoch": 19.81, | |
| "learning_rate": 3.9494949494949496e-06, | |
| "loss": 0.8034, | |
| "step": 609000 | |
| }, | |
| { | |
| "epoch": 19.82, | |
| "learning_rate": 3.944444444444445e-06, | |
| "loss": 0.8033, | |
| "step": 609500 | |
| }, | |
| { | |
| "epoch": 19.84, | |
| "learning_rate": 3.93939393939394e-06, | |
| "loss": 0.8026, | |
| "step": 610000 | |
| }, | |
| { | |
| "epoch": 19.85, | |
| "learning_rate": 3.934343434343434e-06, | |
| "loss": 0.8033, | |
| "step": 610500 | |
| }, | |
| { | |
| "epoch": 19.87, | |
| "learning_rate": 3.929292929292929e-06, | |
| "loss": 0.8036, | |
| "step": 611000 | |
| }, | |
| { | |
| "epoch": 19.89, | |
| "learning_rate": 3.9242424242424244e-06, | |
| "loss": 0.8033, | |
| "step": 611500 | |
| }, | |
| { | |
| "epoch": 19.9, | |
| "learning_rate": 3.9191919191919196e-06, | |
| "loss": 0.803, | |
| "step": 612000 | |
| }, | |
| { | |
| "epoch": 19.92, | |
| "learning_rate": 3.914141414141415e-06, | |
| "loss": 0.8024, | |
| "step": 612500 | |
| }, | |
| { | |
| "epoch": 19.94, | |
| "learning_rate": 3.90909090909091e-06, | |
| "loss": 0.8028, | |
| "step": 613000 | |
| }, | |
| { | |
| "epoch": 19.95, | |
| "learning_rate": 3.904040404040404e-06, | |
| "loss": 0.8027, | |
| "step": 613500 | |
| }, | |
| { | |
| "epoch": 19.97, | |
| "learning_rate": 3.898989898989899e-06, | |
| "loss": 0.8025, | |
| "step": 614000 | |
| }, | |
| { | |
| "epoch": 19.99, | |
| "learning_rate": 3.8939393939393944e-06, | |
| "loss": 0.802, | |
| "step": 614500 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "learning_rate": 3.88888888888889e-06, | |
| "loss": 0.8017, | |
| "step": 615000 | |
| }, | |
| { | |
| "epoch": 20.02, | |
| "learning_rate": 3.883838383838384e-06, | |
| "loss": 0.802, | |
| "step": 615500 | |
| }, | |
| { | |
| "epoch": 20.03, | |
| "learning_rate": 3.878787878787879e-06, | |
| "loss": 0.8021, | |
| "step": 616000 | |
| }, | |
| { | |
| "epoch": 20.05, | |
| "learning_rate": 3.873737373737374e-06, | |
| "loss": 0.8017, | |
| "step": 616500 | |
| }, | |
| { | |
| "epoch": 20.07, | |
| "learning_rate": 3.868686868686869e-06, | |
| "loss": 0.8017, | |
| "step": 617000 | |
| }, | |
| { | |
| "epoch": 20.08, | |
| "learning_rate": 3.863636363636364e-06, | |
| "loss": 0.8013, | |
| "step": 617500 | |
| }, | |
| { | |
| "epoch": 20.1, | |
| "learning_rate": 3.858585858585859e-06, | |
| "loss": 0.8011, | |
| "step": 618000 | |
| }, | |
| { | |
| "epoch": 20.12, | |
| "learning_rate": 3.853535353535354e-06, | |
| "loss": 0.801, | |
| "step": 618500 | |
| }, | |
| { | |
| "epoch": 20.13, | |
| "learning_rate": 3.848484848484848e-06, | |
| "loss": 0.8009, | |
| "step": 619000 | |
| }, | |
| { | |
| "epoch": 20.15, | |
| "learning_rate": 3.843434343434343e-06, | |
| "loss": 0.8008, | |
| "step": 619500 | |
| }, | |
| { | |
| "epoch": 20.16, | |
| "learning_rate": 3.8383838383838385e-06, | |
| "loss": 0.8013, | |
| "step": 620000 | |
| }, | |
| { | |
| "epoch": 20.18, | |
| "learning_rate": 3.833333333333334e-06, | |
| "loss": 0.8004, | |
| "step": 620500 | |
| }, | |
| { | |
| "epoch": 20.2, | |
| "learning_rate": 3.828282828282829e-06, | |
| "loss": 0.8003, | |
| "step": 621000 | |
| }, | |
| { | |
| "epoch": 20.21, | |
| "learning_rate": 3.823232323232324e-06, | |
| "loss": 0.8002, | |
| "step": 621500 | |
| }, | |
| { | |
| "epoch": 20.23, | |
| "learning_rate": 3.818181818181819e-06, | |
| "loss": 0.8004, | |
| "step": 622000 | |
| }, | |
| { | |
| "epoch": 20.25, | |
| "learning_rate": 3.8131313131313138e-06, | |
| "loss": 0.8002, | |
| "step": 622500 | |
| }, | |
| { | |
| "epoch": 20.26, | |
| "learning_rate": 3.8080808080808085e-06, | |
| "loss": 0.8, | |
| "step": 623000 | |
| }, | |
| { | |
| "epoch": 20.28, | |
| "learning_rate": 3.803030303030303e-06, | |
| "loss": 0.7997, | |
| "step": 623500 | |
| }, | |
| { | |
| "epoch": 20.29, | |
| "learning_rate": 3.7979797979797984e-06, | |
| "loss": 0.7992, | |
| "step": 624000 | |
| }, | |
| { | |
| "epoch": 20.31, | |
| "learning_rate": 3.792929292929293e-06, | |
| "loss": 0.7993, | |
| "step": 624500 | |
| }, | |
| { | |
| "epoch": 20.33, | |
| "learning_rate": 3.7878787878787882e-06, | |
| "loss": 0.7995, | |
| "step": 625000 | |
| }, | |
| { | |
| "epoch": 20.34, | |
| "learning_rate": 3.782828282828283e-06, | |
| "loss": 0.7992, | |
| "step": 625500 | |
| }, | |
| { | |
| "epoch": 20.36, | |
| "learning_rate": 3.777777777777778e-06, | |
| "loss": 0.7988, | |
| "step": 626000 | |
| }, | |
| { | |
| "epoch": 20.38, | |
| "learning_rate": 3.772727272727273e-06, | |
| "loss": 0.7995, | |
| "step": 626500 | |
| }, | |
| { | |
| "epoch": 20.39, | |
| "learning_rate": 3.767676767676768e-06, | |
| "loss": 0.7983, | |
| "step": 627000 | |
| }, | |
| { | |
| "epoch": 20.41, | |
| "learning_rate": 3.7626262626262627e-06, | |
| "loss": 0.7985, | |
| "step": 627500 | |
| }, | |
| { | |
| "epoch": 20.42, | |
| "learning_rate": 3.757575757575758e-06, | |
| "loss": 0.7979, | |
| "step": 628000 | |
| }, | |
| { | |
| "epoch": 20.44, | |
| "learning_rate": 3.7525252525252525e-06, | |
| "loss": 0.7981, | |
| "step": 628500 | |
| }, | |
| { | |
| "epoch": 20.46, | |
| "learning_rate": 3.747474747474748e-06, | |
| "loss": 0.7981, | |
| "step": 629000 | |
| }, | |
| { | |
| "epoch": 20.47, | |
| "learning_rate": 3.742424242424243e-06, | |
| "loss": 0.7981, | |
| "step": 629500 | |
| }, | |
| { | |
| "epoch": 20.49, | |
| "learning_rate": 3.737373737373738e-06, | |
| "loss": 0.7975, | |
| "step": 630000 | |
| }, | |
| { | |
| "epoch": 20.51, | |
| "learning_rate": 3.7323232323232327e-06, | |
| "loss": 0.7974, | |
| "step": 630500 | |
| }, | |
| { | |
| "epoch": 20.52, | |
| "learning_rate": 3.727272727272728e-06, | |
| "loss": 0.7975, | |
| "step": 631000 | |
| }, | |
| { | |
| "epoch": 20.54, | |
| "learning_rate": 3.7222222222222225e-06, | |
| "loss": 0.7972, | |
| "step": 631500 | |
| }, | |
| { | |
| "epoch": 20.55, | |
| "learning_rate": 3.7171717171717177e-06, | |
| "loss": 0.7977, | |
| "step": 632000 | |
| }, | |
| { | |
| "epoch": 20.57, | |
| "learning_rate": 3.7121212121212124e-06, | |
| "loss": 0.7972, | |
| "step": 632500 | |
| }, | |
| { | |
| "epoch": 20.59, | |
| "learning_rate": 3.7070707070707075e-06, | |
| "loss": 0.7966, | |
| "step": 633000 | |
| }, | |
| { | |
| "epoch": 20.6, | |
| "learning_rate": 3.7020202020202023e-06, | |
| "loss": 0.7968, | |
| "step": 633500 | |
| }, | |
| { | |
| "epoch": 20.62, | |
| "learning_rate": 3.6969696969696974e-06, | |
| "loss": 0.7971, | |
| "step": 634000 | |
| }, | |
| { | |
| "epoch": 20.64, | |
| "learning_rate": 3.691919191919192e-06, | |
| "loss": 0.7968, | |
| "step": 634500 | |
| }, | |
| { | |
| "epoch": 20.65, | |
| "learning_rate": 3.686868686868687e-06, | |
| "loss": 0.7963, | |
| "step": 635000 | |
| }, | |
| { | |
| "epoch": 20.67, | |
| "learning_rate": 3.681818181818182e-06, | |
| "loss": 0.7959, | |
| "step": 635500 | |
| }, | |
| { | |
| "epoch": 20.68, | |
| "learning_rate": 3.6767676767676767e-06, | |
| "loss": 0.796, | |
| "step": 636000 | |
| }, | |
| { | |
| "epoch": 20.7, | |
| "learning_rate": 3.671717171717172e-06, | |
| "loss": 0.7956, | |
| "step": 636500 | |
| }, | |
| { | |
| "epoch": 20.72, | |
| "learning_rate": 3.6666666666666666e-06, | |
| "loss": 0.7961, | |
| "step": 637000 | |
| }, | |
| { | |
| "epoch": 20.73, | |
| "learning_rate": 3.661616161616162e-06, | |
| "loss": 0.7957, | |
| "step": 637500 | |
| }, | |
| { | |
| "epoch": 20.75, | |
| "learning_rate": 3.6565656565656573e-06, | |
| "loss": 0.7957, | |
| "step": 638000 | |
| }, | |
| { | |
| "epoch": 20.77, | |
| "learning_rate": 3.651515151515152e-06, | |
| "loss": 0.7955, | |
| "step": 638500 | |
| }, | |
| { | |
| "epoch": 20.78, | |
| "learning_rate": 3.6464646464646467e-06, | |
| "loss": 0.7953, | |
| "step": 639000 | |
| }, | |
| { | |
| "epoch": 20.8, | |
| "learning_rate": 3.641414141414142e-06, | |
| "loss": 0.7947, | |
| "step": 639500 | |
| }, | |
| { | |
| "epoch": 20.81, | |
| "learning_rate": 3.6363636363636366e-06, | |
| "loss": 0.7946, | |
| "step": 640000 | |
| }, | |
| { | |
| "epoch": 20.83, | |
| "learning_rate": 3.6313131313131317e-06, | |
| "loss": 0.795, | |
| "step": 640500 | |
| }, | |
| { | |
| "epoch": 20.85, | |
| "learning_rate": 3.6262626262626264e-06, | |
| "loss": 0.795, | |
| "step": 641000 | |
| }, | |
| { | |
| "epoch": 20.86, | |
| "learning_rate": 3.6212121212121216e-06, | |
| "loss": 0.7944, | |
| "step": 641500 | |
| }, | |
| { | |
| "epoch": 20.88, | |
| "learning_rate": 3.6161616161616163e-06, | |
| "loss": 0.7939, | |
| "step": 642000 | |
| }, | |
| { | |
| "epoch": 20.9, | |
| "learning_rate": 3.6111111111111115e-06, | |
| "loss": 0.7944, | |
| "step": 642500 | |
| }, | |
| { | |
| "epoch": 20.91, | |
| "learning_rate": 3.606060606060606e-06, | |
| "loss": 0.7939, | |
| "step": 643000 | |
| }, | |
| { | |
| "epoch": 20.93, | |
| "learning_rate": 3.6010101010101013e-06, | |
| "loss": 0.794, | |
| "step": 643500 | |
| }, | |
| { | |
| "epoch": 20.94, | |
| "learning_rate": 3.595959595959596e-06, | |
| "loss": 0.7935, | |
| "step": 644000 | |
| }, | |
| { | |
| "epoch": 20.96, | |
| "learning_rate": 3.590909090909091e-06, | |
| "loss": 0.7937, | |
| "step": 644500 | |
| }, | |
| { | |
| "epoch": 20.98, | |
| "learning_rate": 3.585858585858586e-06, | |
| "loss": 0.7936, | |
| "step": 645000 | |
| }, | |
| { | |
| "epoch": 20.99, | |
| "learning_rate": 3.580808080808081e-06, | |
| "loss": 0.793, | |
| "step": 645500 | |
| }, | |
| { | |
| "epoch": 21.01, | |
| "learning_rate": 3.575757575757576e-06, | |
| "loss": 0.7927, | |
| "step": 646000 | |
| }, | |
| { | |
| "epoch": 21.03, | |
| "learning_rate": 3.5707070707070713e-06, | |
| "loss": 0.7927, | |
| "step": 646500 | |
| }, | |
| { | |
| "epoch": 21.04, | |
| "learning_rate": 3.565656565656566e-06, | |
| "loss": 0.7928, | |
| "step": 647000 | |
| }, | |
| { | |
| "epoch": 21.06, | |
| "learning_rate": 3.560606060606061e-06, | |
| "loss": 0.7925, | |
| "step": 647500 | |
| }, | |
| { | |
| "epoch": 21.07, | |
| "learning_rate": 3.555555555555556e-06, | |
| "loss": 0.7924, | |
| "step": 648000 | |
| }, | |
| { | |
| "epoch": 21.09, | |
| "learning_rate": 3.550505050505051e-06, | |
| "loss": 0.7921, | |
| "step": 648500 | |
| }, | |
| { | |
| "epoch": 21.11, | |
| "learning_rate": 3.5454545454545458e-06, | |
| "loss": 0.7915, | |
| "step": 649000 | |
| }, | |
| { | |
| "epoch": 21.12, | |
| "learning_rate": 3.540404040404041e-06, | |
| "loss": 0.7922, | |
| "step": 649500 | |
| }, | |
| { | |
| "epoch": 21.14, | |
| "learning_rate": 3.5353535353535356e-06, | |
| "loss": 0.7916, | |
| "step": 650000 | |
| }, | |
| { | |
| "epoch": 21.16, | |
| "learning_rate": 3.5303030303030304e-06, | |
| "loss": 0.7914, | |
| "step": 650500 | |
| }, | |
| { | |
| "epoch": 21.17, | |
| "learning_rate": 3.5252525252525255e-06, | |
| "loss": 0.7914, | |
| "step": 651000 | |
| }, | |
| { | |
| "epoch": 21.19, | |
| "learning_rate": 3.5202020202020202e-06, | |
| "loss": 0.7912, | |
| "step": 651500 | |
| }, | |
| { | |
| "epoch": 21.2, | |
| "learning_rate": 3.5151515151515154e-06, | |
| "loss": 0.7904, | |
| "step": 652000 | |
| }, | |
| { | |
| "epoch": 21.22, | |
| "learning_rate": 3.51010101010101e-06, | |
| "loss": 0.7909, | |
| "step": 652500 | |
| }, | |
| { | |
| "epoch": 21.24, | |
| "learning_rate": 3.5050505050505052e-06, | |
| "loss": 0.7908, | |
| "step": 653000 | |
| }, | |
| { | |
| "epoch": 21.25, | |
| "learning_rate": 3.5e-06, | |
| "loss": 0.7907, | |
| "step": 653500 | |
| }, | |
| { | |
| "epoch": 21.27, | |
| "learning_rate": 3.494949494949495e-06, | |
| "loss": 0.7903, | |
| "step": 654000 | |
| }, | |
| { | |
| "epoch": 21.29, | |
| "learning_rate": 3.4898989898989907e-06, | |
| "loss": 0.7902, | |
| "step": 654500 | |
| }, | |
| { | |
| "epoch": 21.3, | |
| "learning_rate": 3.4848484848484854e-06, | |
| "loss": 0.7908, | |
| "step": 655000 | |
| }, | |
| { | |
| "epoch": 21.32, | |
| "learning_rate": 3.47979797979798e-06, | |
| "loss": 0.7897, | |
| "step": 655500 | |
| }, | |
| { | |
| "epoch": 21.33, | |
| "learning_rate": 3.4747474747474752e-06, | |
| "loss": 0.7897, | |
| "step": 656000 | |
| }, | |
| { | |
| "epoch": 21.35, | |
| "learning_rate": 3.46969696969697e-06, | |
| "loss": 0.7897, | |
| "step": 656500 | |
| }, | |
| { | |
| "epoch": 21.37, | |
| "learning_rate": 3.464646464646465e-06, | |
| "loss": 0.7891, | |
| "step": 657000 | |
| }, | |
| { | |
| "epoch": 21.38, | |
| "learning_rate": 3.45959595959596e-06, | |
| "loss": 0.7894, | |
| "step": 657500 | |
| }, | |
| { | |
| "epoch": 21.4, | |
| "learning_rate": 3.454545454545455e-06, | |
| "loss": 0.7893, | |
| "step": 658000 | |
| }, | |
| { | |
| "epoch": 21.42, | |
| "learning_rate": 3.4494949494949497e-06, | |
| "loss": 0.7893, | |
| "step": 658500 | |
| }, | |
| { | |
| "epoch": 21.43, | |
| "learning_rate": 3.444444444444445e-06, | |
| "loss": 0.789, | |
| "step": 659000 | |
| }, | |
| { | |
| "epoch": 21.45, | |
| "learning_rate": 3.4393939393939395e-06, | |
| "loss": 0.7888, | |
| "step": 659500 | |
| }, | |
| { | |
| "epoch": 21.46, | |
| "learning_rate": 3.4343434343434347e-06, | |
| "loss": 0.7885, | |
| "step": 660000 | |
| }, | |
| { | |
| "epoch": 21.48, | |
| "learning_rate": 3.4292929292929294e-06, | |
| "loss": 0.788, | |
| "step": 660500 | |
| }, | |
| { | |
| "epoch": 21.5, | |
| "learning_rate": 3.4242424242424246e-06, | |
| "loss": 0.7885, | |
| "step": 661000 | |
| }, | |
| { | |
| "epoch": 21.51, | |
| "learning_rate": 3.4191919191919193e-06, | |
| "loss": 0.7874, | |
| "step": 661500 | |
| }, | |
| { | |
| "epoch": 21.53, | |
| "learning_rate": 3.414141414141414e-06, | |
| "loss": 0.7878, | |
| "step": 662000 | |
| }, | |
| { | |
| "epoch": 21.55, | |
| "learning_rate": 3.409090909090909e-06, | |
| "loss": 0.7879, | |
| "step": 662500 | |
| }, | |
| { | |
| "epoch": 21.56, | |
| "learning_rate": 3.4040404040404047e-06, | |
| "loss": 0.7874, | |
| "step": 663000 | |
| }, | |
| { | |
| "epoch": 21.58, | |
| "learning_rate": 3.3989898989898994e-06, | |
| "loss": 0.7872, | |
| "step": 663500 | |
| }, | |
| { | |
| "epoch": 21.59, | |
| "learning_rate": 3.3939393939393946e-06, | |
| "loss": 0.7869, | |
| "step": 664000 | |
| }, | |
| { | |
| "epoch": 21.61, | |
| "learning_rate": 3.3888888888888893e-06, | |
| "loss": 0.787, | |
| "step": 664500 | |
| }, | |
| { | |
| "epoch": 21.63, | |
| "learning_rate": 3.3838383838383844e-06, | |
| "loss": 0.787, | |
| "step": 665000 | |
| }, | |
| { | |
| "epoch": 21.64, | |
| "learning_rate": 3.378787878787879e-06, | |
| "loss": 0.787, | |
| "step": 665500 | |
| }, | |
| { | |
| "epoch": 21.66, | |
| "learning_rate": 3.3737373737373743e-06, | |
| "loss": 0.7864, | |
| "step": 666000 | |
| }, | |
| { | |
| "epoch": 21.68, | |
| "learning_rate": 3.368686868686869e-06, | |
| "loss": 0.7861, | |
| "step": 666500 | |
| }, | |
| { | |
| "epoch": 21.69, | |
| "learning_rate": 3.3636363636363637e-06, | |
| "loss": 0.7861, | |
| "step": 667000 | |
| }, | |
| { | |
| "epoch": 21.71, | |
| "learning_rate": 3.358585858585859e-06, | |
| "loss": 0.7856, | |
| "step": 667500 | |
| }, | |
| { | |
| "epoch": 21.72, | |
| "learning_rate": 3.3535353535353536e-06, | |
| "loss": 0.7854, | |
| "step": 668000 | |
| }, | |
| { | |
| "epoch": 21.74, | |
| "learning_rate": 3.3484848484848487e-06, | |
| "loss": 0.786, | |
| "step": 668500 | |
| }, | |
| { | |
| "epoch": 21.76, | |
| "learning_rate": 3.3434343434343435e-06, | |
| "loss": 0.7851, | |
| "step": 669000 | |
| }, | |
| { | |
| "epoch": 21.77, | |
| "learning_rate": 3.3383838383838386e-06, | |
| "loss": 0.7858, | |
| "step": 669500 | |
| }, | |
| { | |
| "epoch": 21.79, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 0.7851, | |
| "step": 670000 | |
| }, | |
| { | |
| "epoch": 21.81, | |
| "learning_rate": 3.3282828282828285e-06, | |
| "loss": 0.7853, | |
| "step": 670500 | |
| }, | |
| { | |
| "epoch": 21.82, | |
| "learning_rate": 3.323232323232323e-06, | |
| "loss": 0.7847, | |
| "step": 671000 | |
| }, | |
| { | |
| "epoch": 21.84, | |
| "learning_rate": 3.3181818181818188e-06, | |
| "loss": 0.7843, | |
| "step": 671500 | |
| }, | |
| { | |
| "epoch": 21.86, | |
| "learning_rate": 3.3131313131313135e-06, | |
| "loss": 0.7842, | |
| "step": 672000 | |
| }, | |
| { | |
| "epoch": 21.87, | |
| "learning_rate": 3.3080808080808086e-06, | |
| "loss": 0.7843, | |
| "step": 672500 | |
| }, | |
| { | |
| "epoch": 21.89, | |
| "learning_rate": 3.3030303030303033e-06, | |
| "loss": 0.7841, | |
| "step": 673000 | |
| }, | |
| { | |
| "epoch": 21.9, | |
| "learning_rate": 3.2979797979797985e-06, | |
| "loss": 0.7837, | |
| "step": 673500 | |
| }, | |
| { | |
| "epoch": 21.92, | |
| "learning_rate": 3.292929292929293e-06, | |
| "loss": 0.7839, | |
| "step": 674000 | |
| }, | |
| { | |
| "epoch": 21.94, | |
| "learning_rate": 3.2878787878787883e-06, | |
| "loss": 0.7832, | |
| "step": 674500 | |
| }, | |
| { | |
| "epoch": 21.95, | |
| "learning_rate": 3.282828282828283e-06, | |
| "loss": 0.7832, | |
| "step": 675000 | |
| }, | |
| { | |
| "epoch": 21.97, | |
| "learning_rate": 3.277777777777778e-06, | |
| "loss": 0.7829, | |
| "step": 675500 | |
| }, | |
| { | |
| "epoch": 21.99, | |
| "learning_rate": 3.272727272727273e-06, | |
| "loss": 0.7829, | |
| "step": 676000 | |
| }, | |
| { | |
| "epoch": 22.0, | |
| "learning_rate": 3.267676767676768e-06, | |
| "loss": 0.7821, | |
| "step": 676500 | |
| }, | |
| { | |
| "epoch": 22.02, | |
| "learning_rate": 3.262626262626263e-06, | |
| "loss": 0.7827, | |
| "step": 677000 | |
| }, | |
| { | |
| "epoch": 22.03, | |
| "learning_rate": 3.257575757575758e-06, | |
| "loss": 0.7831, | |
| "step": 677500 | |
| }, | |
| { | |
| "epoch": 22.05, | |
| "learning_rate": 3.2525252525252527e-06, | |
| "loss": 0.7816, | |
| "step": 678000 | |
| }, | |
| { | |
| "epoch": 22.07, | |
| "learning_rate": 3.2474747474747474e-06, | |
| "loss": 0.7816, | |
| "step": 678500 | |
| }, | |
| { | |
| "epoch": 22.08, | |
| "learning_rate": 3.2424242424242425e-06, | |
| "loss": 0.7818, | |
| "step": 679000 | |
| }, | |
| { | |
| "epoch": 22.1, | |
| "learning_rate": 3.2373737373737372e-06, | |
| "loss": 0.7822, | |
| "step": 679500 | |
| }, | |
| { | |
| "epoch": 22.12, | |
| "learning_rate": 3.232323232323233e-06, | |
| "loss": 0.7815, | |
| "step": 680000 | |
| }, | |
| { | |
| "epoch": 22.13, | |
| "learning_rate": 3.227272727272728e-06, | |
| "loss": 0.7815, | |
| "step": 680500 | |
| }, | |
| { | |
| "epoch": 22.15, | |
| "learning_rate": 3.2222222222222227e-06, | |
| "loss": 0.7808, | |
| "step": 681000 | |
| }, | |
| { | |
| "epoch": 22.16, | |
| "learning_rate": 3.217171717171718e-06, | |
| "loss": 0.781, | |
| "step": 681500 | |
| }, | |
| { | |
| "epoch": 22.18, | |
| "learning_rate": 3.2121212121212125e-06, | |
| "loss": 0.781, | |
| "step": 682000 | |
| }, | |
| { | |
| "epoch": 22.2, | |
| "learning_rate": 3.2070707070707072e-06, | |
| "loss": 0.7803, | |
| "step": 682500 | |
| }, | |
| { | |
| "epoch": 22.21, | |
| "learning_rate": 3.2020202020202024e-06, | |
| "loss": 0.7797, | |
| "step": 683000 | |
| }, | |
| { | |
| "epoch": 22.23, | |
| "learning_rate": 3.196969696969697e-06, | |
| "loss": 0.7802, | |
| "step": 683500 | |
| }, | |
| { | |
| "epoch": 22.25, | |
| "learning_rate": 3.1919191919191923e-06, | |
| "loss": 0.7798, | |
| "step": 684000 | |
| }, | |
| { | |
| "epoch": 22.26, | |
| "learning_rate": 3.186868686868687e-06, | |
| "loss": 0.7799, | |
| "step": 684500 | |
| }, | |
| { | |
| "epoch": 22.28, | |
| "learning_rate": 3.181818181818182e-06, | |
| "loss": 0.7797, | |
| "step": 685000 | |
| }, | |
| { | |
| "epoch": 22.29, | |
| "learning_rate": 3.176767676767677e-06, | |
| "loss": 0.7794, | |
| "step": 685500 | |
| }, | |
| { | |
| "epoch": 22.31, | |
| "learning_rate": 3.171717171717172e-06, | |
| "loss": 0.779, | |
| "step": 686000 | |
| }, | |
| { | |
| "epoch": 22.33, | |
| "learning_rate": 3.1666666666666667e-06, | |
| "loss": 0.779, | |
| "step": 686500 | |
| }, | |
| { | |
| "epoch": 22.34, | |
| "learning_rate": 3.161616161616162e-06, | |
| "loss": 0.7788, | |
| "step": 687000 | |
| }, | |
| { | |
| "epoch": 22.36, | |
| "learning_rate": 3.1565656565656566e-06, | |
| "loss": 0.7787, | |
| "step": 687500 | |
| }, | |
| { | |
| "epoch": 22.38, | |
| "learning_rate": 3.1515151515151517e-06, | |
| "loss": 0.7785, | |
| "step": 688000 | |
| }, | |
| { | |
| "epoch": 22.39, | |
| "learning_rate": 3.146464646464647e-06, | |
| "loss": 0.7787, | |
| "step": 688500 | |
| }, | |
| { | |
| "epoch": 22.41, | |
| "learning_rate": 3.141414141414142e-06, | |
| "loss": 0.7783, | |
| "step": 689000 | |
| }, | |
| { | |
| "epoch": 22.42, | |
| "learning_rate": 3.1363636363636367e-06, | |
| "loss": 0.7781, | |
| "step": 689500 | |
| }, | |
| { | |
| "epoch": 22.44, | |
| "learning_rate": 3.131313131313132e-06, | |
| "loss": 0.7777, | |
| "step": 690000 | |
| }, | |
| { | |
| "epoch": 22.46, | |
| "learning_rate": 3.1262626262626266e-06, | |
| "loss": 0.7778, | |
| "step": 690500 | |
| }, | |
| { | |
| "epoch": 22.47, | |
| "learning_rate": 3.1212121212121217e-06, | |
| "loss": 0.7771, | |
| "step": 691000 | |
| }, | |
| { | |
| "epoch": 22.49, | |
| "learning_rate": 3.1161616161616164e-06, | |
| "loss": 0.7774, | |
| "step": 691500 | |
| }, | |
| { | |
| "epoch": 22.51, | |
| "learning_rate": 3.1111111111111116e-06, | |
| "loss": 0.7769, | |
| "step": 692000 | |
| }, | |
| { | |
| "epoch": 22.52, | |
| "learning_rate": 3.1060606060606063e-06, | |
| "loss": 0.7769, | |
| "step": 692500 | |
| }, | |
| { | |
| "epoch": 22.54, | |
| "learning_rate": 3.1010101010101014e-06, | |
| "loss": 0.7762, | |
| "step": 693000 | |
| }, | |
| { | |
| "epoch": 22.55, | |
| "learning_rate": 3.095959595959596e-06, | |
| "loss": 0.7763, | |
| "step": 693500 | |
| }, | |
| { | |
| "epoch": 22.57, | |
| "learning_rate": 3.090909090909091e-06, | |
| "loss": 0.7759, | |
| "step": 694000 | |
| }, | |
| { | |
| "epoch": 22.59, | |
| "learning_rate": 3.085858585858586e-06, | |
| "loss": 0.7763, | |
| "step": 694500 | |
| }, | |
| { | |
| "epoch": 22.6, | |
| "learning_rate": 3.0808080808080807e-06, | |
| "loss": 0.7759, | |
| "step": 695000 | |
| }, | |
| { | |
| "epoch": 22.62, | |
| "learning_rate": 3.075757575757576e-06, | |
| "loss": 0.7756, | |
| "step": 695500 | |
| }, | |
| { | |
| "epoch": 22.64, | |
| "learning_rate": 3.0707070707070706e-06, | |
| "loss": 0.7751, | |
| "step": 696000 | |
| }, | |
| { | |
| "epoch": 22.65, | |
| "learning_rate": 3.0656565656565658e-06, | |
| "loss": 0.7751, | |
| "step": 696500 | |
| }, | |
| { | |
| "epoch": 22.67, | |
| "learning_rate": 3.0606060606060605e-06, | |
| "loss": 0.7749, | |
| "step": 697000 | |
| }, | |
| { | |
| "epoch": 22.68, | |
| "learning_rate": 3.055555555555556e-06, | |
| "loss": 0.7747, | |
| "step": 697500 | |
| }, | |
| { | |
| "epoch": 22.7, | |
| "learning_rate": 3.0505050505050508e-06, | |
| "loss": 0.7752, | |
| "step": 698000 | |
| }, | |
| { | |
| "epoch": 22.72, | |
| "learning_rate": 3.045454545454546e-06, | |
| "loss": 0.7747, | |
| "step": 698500 | |
| }, | |
| { | |
| "epoch": 22.73, | |
| "learning_rate": 3.0404040404040406e-06, | |
| "loss": 0.7744, | |
| "step": 699000 | |
| }, | |
| { | |
| "epoch": 22.75, | |
| "learning_rate": 3.0353535353535358e-06, | |
| "loss": 0.7746, | |
| "step": 699500 | |
| }, | |
| { | |
| "epoch": 22.77, | |
| "learning_rate": 3.0303030303030305e-06, | |
| "loss": 0.7746, | |
| "step": 700000 | |
| }, | |
| { | |
| "epoch": 22.78, | |
| "learning_rate": 3.0252525252525256e-06, | |
| "loss": 0.7737, | |
| "step": 700500 | |
| }, | |
| { | |
| "epoch": 22.8, | |
| "learning_rate": 3.0202020202020203e-06, | |
| "loss": 0.7739, | |
| "step": 701000 | |
| }, | |
| { | |
| "epoch": 22.81, | |
| "learning_rate": 3.0151515151515155e-06, | |
| "loss": 0.7731, | |
| "step": 701500 | |
| }, | |
| { | |
| "epoch": 22.83, | |
| "learning_rate": 3.0101010101010102e-06, | |
| "loss": 0.7734, | |
| "step": 702000 | |
| }, | |
| { | |
| "epoch": 22.85, | |
| "learning_rate": 3.0050505050505054e-06, | |
| "loss": 0.7733, | |
| "step": 702500 | |
| }, | |
| { | |
| "epoch": 22.86, | |
| "learning_rate": 3e-06, | |
| "loss": 0.7729, | |
| "step": 703000 | |
| }, | |
| { | |
| "epoch": 22.88, | |
| "learning_rate": 2.9949494949494952e-06, | |
| "loss": 0.7729, | |
| "step": 703500 | |
| }, | |
| { | |
| "epoch": 22.9, | |
| "learning_rate": 2.98989898989899e-06, | |
| "loss": 0.7729, | |
| "step": 704000 | |
| }, | |
| { | |
| "epoch": 22.91, | |
| "learning_rate": 2.984848484848485e-06, | |
| "loss": 0.7724, | |
| "step": 704500 | |
| }, | |
| { | |
| "epoch": 22.93, | |
| "learning_rate": 2.97979797979798e-06, | |
| "loss": 0.7724, | |
| "step": 705000 | |
| }, | |
| { | |
| "epoch": 22.94, | |
| "learning_rate": 2.9747474747474745e-06, | |
| "loss": 0.7727, | |
| "step": 705500 | |
| }, | |
| { | |
| "epoch": 22.96, | |
| "learning_rate": 2.96969696969697e-06, | |
| "loss": 0.7721, | |
| "step": 706000 | |
| }, | |
| { | |
| "epoch": 22.98, | |
| "learning_rate": 2.9646464646464652e-06, | |
| "loss": 0.7719, | |
| "step": 706500 | |
| }, | |
| { | |
| "epoch": 22.99, | |
| "learning_rate": 2.95959595959596e-06, | |
| "loss": 0.7724, | |
| "step": 707000 | |
| }, | |
| { | |
| "epoch": 23.01, | |
| "learning_rate": 2.954545454545455e-06, | |
| "loss": 0.7717, | |
| "step": 707500 | |
| }, | |
| { | |
| "epoch": 23.03, | |
| "learning_rate": 2.94949494949495e-06, | |
| "loss": 0.7714, | |
| "step": 708000 | |
| }, | |
| { | |
| "epoch": 23.04, | |
| "learning_rate": 2.944444444444445e-06, | |
| "loss": 0.7715, | |
| "step": 708500 | |
| }, | |
| { | |
| "epoch": 23.06, | |
| "learning_rate": 2.9393939393939397e-06, | |
| "loss": 0.7705, | |
| "step": 709000 | |
| }, | |
| { | |
| "epoch": 23.07, | |
| "learning_rate": 2.9343434343434344e-06, | |
| "loss": 0.7708, | |
| "step": 709500 | |
| }, | |
| { | |
| "epoch": 23.09, | |
| "learning_rate": 2.9292929292929295e-06, | |
| "loss": 0.7707, | |
| "step": 710000 | |
| }, | |
| { | |
| "epoch": 23.11, | |
| "learning_rate": 2.9242424242424243e-06, | |
| "loss": 0.7705, | |
| "step": 710500 | |
| }, | |
| { | |
| "epoch": 23.12, | |
| "learning_rate": 2.9191919191919194e-06, | |
| "loss": 0.77, | |
| "step": 711000 | |
| }, | |
| { | |
| "epoch": 23.14, | |
| "learning_rate": 2.914141414141414e-06, | |
| "loss": 0.7698, | |
| "step": 711500 | |
| }, | |
| { | |
| "epoch": 23.16, | |
| "learning_rate": 2.9090909090909093e-06, | |
| "loss": 0.7696, | |
| "step": 712000 | |
| }, | |
| { | |
| "epoch": 23.17, | |
| "learning_rate": 2.904040404040404e-06, | |
| "loss": 0.7698, | |
| "step": 712500 | |
| }, | |
| { | |
| "epoch": 23.19, | |
| "learning_rate": 2.898989898989899e-06, | |
| "loss": 0.7695, | |
| "step": 713000 | |
| }, | |
| { | |
| "epoch": 23.2, | |
| "learning_rate": 2.893939393939394e-06, | |
| "loss": 0.7695, | |
| "step": 713500 | |
| }, | |
| { | |
| "epoch": 23.22, | |
| "learning_rate": 2.888888888888889e-06, | |
| "loss": 0.7692, | |
| "step": 714000 | |
| }, | |
| { | |
| "epoch": 23.24, | |
| "learning_rate": 2.883838383838384e-06, | |
| "loss": 0.7686, | |
| "step": 714500 | |
| }, | |
| { | |
| "epoch": 23.25, | |
| "learning_rate": 2.8787878787878793e-06, | |
| "loss": 0.7681, | |
| "step": 715000 | |
| }, | |
| { | |
| "epoch": 23.27, | |
| "learning_rate": 2.873737373737374e-06, | |
| "loss": 0.7681, | |
| "step": 715500 | |
| }, | |
| { | |
| "epoch": 23.29, | |
| "learning_rate": 2.868686868686869e-06, | |
| "loss": 0.7677, | |
| "step": 716000 | |
| }, | |
| { | |
| "epoch": 23.3, | |
| "learning_rate": 2.863636363636364e-06, | |
| "loss": 0.768, | |
| "step": 716500 | |
| }, | |
| { | |
| "epoch": 23.32, | |
| "learning_rate": 2.858585858585859e-06, | |
| "loss": 0.7676, | |
| "step": 717000 | |
| }, | |
| { | |
| "epoch": 23.33, | |
| "learning_rate": 2.8535353535353537e-06, | |
| "loss": 0.7683, | |
| "step": 717500 | |
| }, | |
| { | |
| "epoch": 23.35, | |
| "learning_rate": 2.848484848484849e-06, | |
| "loss": 0.7679, | |
| "step": 718000 | |
| }, | |
| { | |
| "epoch": 23.37, | |
| "learning_rate": 2.8434343434343436e-06, | |
| "loss": 0.7675, | |
| "step": 718500 | |
| }, | |
| { | |
| "epoch": 23.38, | |
| "learning_rate": 2.8383838383838387e-06, | |
| "loss": 0.767, | |
| "step": 719000 | |
| }, | |
| { | |
| "epoch": 23.4, | |
| "learning_rate": 2.8333333333333335e-06, | |
| "loss": 0.7678, | |
| "step": 719500 | |
| }, | |
| { | |
| "epoch": 23.42, | |
| "learning_rate": 2.8282828282828286e-06, | |
| "loss": 0.7666, | |
| "step": 720000 | |
| }, | |
| { | |
| "epoch": 23.43, | |
| "learning_rate": 2.8232323232323233e-06, | |
| "loss": 0.7663, | |
| "step": 720500 | |
| }, | |
| { | |
| "epoch": 23.45, | |
| "learning_rate": 2.818181818181818e-06, | |
| "loss": 0.767, | |
| "step": 721000 | |
| }, | |
| { | |
| "epoch": 23.46, | |
| "learning_rate": 2.813131313131313e-06, | |
| "loss": 0.7664, | |
| "step": 721500 | |
| }, | |
| { | |
| "epoch": 23.48, | |
| "learning_rate": 2.808080808080808e-06, | |
| "loss": 0.7665, | |
| "step": 722000 | |
| }, | |
| { | |
| "epoch": 23.5, | |
| "learning_rate": 2.803030303030303e-06, | |
| "loss": 0.7664, | |
| "step": 722500 | |
| }, | |
| { | |
| "epoch": 23.51, | |
| "learning_rate": 2.7979797979797986e-06, | |
| "loss": 0.7659, | |
| "step": 723000 | |
| }, | |
| { | |
| "epoch": 23.53, | |
| "learning_rate": 2.7929292929292933e-06, | |
| "loss": 0.7659, | |
| "step": 723500 | |
| }, | |
| { | |
| "epoch": 23.55, | |
| "learning_rate": 2.7878787878787885e-06, | |
| "loss": 0.7656, | |
| "step": 724000 | |
| }, | |
| { | |
| "epoch": 23.56, | |
| "learning_rate": 2.782828282828283e-06, | |
| "loss": 0.7651, | |
| "step": 724500 | |
| }, | |
| { | |
| "epoch": 23.58, | |
| "learning_rate": 2.7777777777777783e-06, | |
| "loss": 0.7648, | |
| "step": 725000 | |
| }, | |
| { | |
| "epoch": 23.6, | |
| "learning_rate": 2.772727272727273e-06, | |
| "loss": 0.7655, | |
| "step": 725500 | |
| }, | |
| { | |
| "epoch": 23.61, | |
| "learning_rate": 2.7676767676767678e-06, | |
| "loss": 0.7651, | |
| "step": 726000 | |
| }, | |
| { | |
| "epoch": 23.63, | |
| "learning_rate": 2.762626262626263e-06, | |
| "loss": 0.7643, | |
| "step": 726500 | |
| }, | |
| { | |
| "epoch": 23.64, | |
| "learning_rate": 2.7575757575757576e-06, | |
| "loss": 0.7637, | |
| "step": 727000 | |
| }, | |
| { | |
| "epoch": 23.66, | |
| "learning_rate": 2.7525252525252528e-06, | |
| "loss": 0.7646, | |
| "step": 727500 | |
| }, | |
| { | |
| "epoch": 23.68, | |
| "learning_rate": 2.7474747474747475e-06, | |
| "loss": 0.7648, | |
| "step": 728000 | |
| }, | |
| { | |
| "epoch": 23.69, | |
| "learning_rate": 2.7424242424242426e-06, | |
| "loss": 0.7643, | |
| "step": 728500 | |
| }, | |
| { | |
| "epoch": 23.71, | |
| "learning_rate": 2.7373737373737374e-06, | |
| "loss": 0.7643, | |
| "step": 729000 | |
| }, | |
| { | |
| "epoch": 23.73, | |
| "learning_rate": 2.7323232323232325e-06, | |
| "loss": 0.7641, | |
| "step": 729500 | |
| }, | |
| { | |
| "epoch": 23.74, | |
| "learning_rate": 2.7272727272727272e-06, | |
| "loss": 0.7633, | |
| "step": 730000 | |
| }, | |
| { | |
| "epoch": 23.76, | |
| "learning_rate": 2.7222222222222224e-06, | |
| "loss": 0.764, | |
| "step": 730500 | |
| }, | |
| { | |
| "epoch": 23.77, | |
| "learning_rate": 2.717171717171717e-06, | |
| "loss": 0.7634, | |
| "step": 731000 | |
| }, | |
| { | |
| "epoch": 23.79, | |
| "learning_rate": 2.7121212121212127e-06, | |
| "loss": 0.7629, | |
| "step": 731500 | |
| }, | |
| { | |
| "epoch": 23.81, | |
| "learning_rate": 2.7070707070707074e-06, | |
| "loss": 0.7633, | |
| "step": 732000 | |
| }, | |
| { | |
| "epoch": 23.82, | |
| "learning_rate": 2.7020202020202025e-06, | |
| "loss": 0.7622, | |
| "step": 732500 | |
| }, | |
| { | |
| "epoch": 23.84, | |
| "learning_rate": 2.6969696969696972e-06, | |
| "loss": 0.7628, | |
| "step": 733000 | |
| }, | |
| { | |
| "epoch": 23.86, | |
| "learning_rate": 2.6919191919191924e-06, | |
| "loss": 0.762, | |
| "step": 733500 | |
| }, | |
| { | |
| "epoch": 23.87, | |
| "learning_rate": 2.686868686868687e-06, | |
| "loss": 0.7631, | |
| "step": 734000 | |
| }, | |
| { | |
| "epoch": 23.89, | |
| "learning_rate": 2.6818181818181822e-06, | |
| "loss": 0.7623, | |
| "step": 734500 | |
| }, | |
| { | |
| "epoch": 23.9, | |
| "learning_rate": 2.676767676767677e-06, | |
| "loss": 0.7622, | |
| "step": 735000 | |
| }, | |
| { | |
| "epoch": 23.92, | |
| "learning_rate": 2.671717171717172e-06, | |
| "loss": 0.7621, | |
| "step": 735500 | |
| }, | |
| { | |
| "epoch": 23.94, | |
| "learning_rate": 2.666666666666667e-06, | |
| "loss": 0.7625, | |
| "step": 736000 | |
| }, | |
| { | |
| "epoch": 23.95, | |
| "learning_rate": 2.661616161616162e-06, | |
| "loss": 0.7619, | |
| "step": 736500 | |
| }, | |
| { | |
| "epoch": 23.97, | |
| "learning_rate": 2.6565656565656567e-06, | |
| "loss": 0.761, | |
| "step": 737000 | |
| }, | |
| { | |
| "epoch": 23.99, | |
| "learning_rate": 2.6515151515151514e-06, | |
| "loss": 0.761, | |
| "step": 737500 | |
| }, | |
| { | |
| "epoch": 24.0, | |
| "learning_rate": 2.6464646464646466e-06, | |
| "loss": 0.7614, | |
| "step": 738000 | |
| }, | |
| { | |
| "epoch": 24.02, | |
| "learning_rate": 2.6414141414141413e-06, | |
| "loss": 0.7612, | |
| "step": 738500 | |
| }, | |
| { | |
| "epoch": 24.03, | |
| "learning_rate": 2.6363636363636364e-06, | |
| "loss": 0.7612, | |
| "step": 739000 | |
| }, | |
| { | |
| "epoch": 24.05, | |
| "learning_rate": 2.631313131313131e-06, | |
| "loss": 0.7606, | |
| "step": 739500 | |
| }, | |
| { | |
| "epoch": 24.07, | |
| "learning_rate": 2.6262626262626267e-06, | |
| "loss": 0.7606, | |
| "step": 740000 | |
| }, | |
| { | |
| "epoch": 24.08, | |
| "learning_rate": 2.621212121212122e-06, | |
| "loss": 0.7604, | |
| "step": 740500 | |
| }, | |
| { | |
| "epoch": 24.1, | |
| "learning_rate": 2.6161616161616166e-06, | |
| "loss": 0.7607, | |
| "step": 741000 | |
| }, | |
| { | |
| "epoch": 24.12, | |
| "learning_rate": 2.6111111111111113e-06, | |
| "loss": 0.76, | |
| "step": 741500 | |
| }, | |
| { | |
| "epoch": 24.13, | |
| "learning_rate": 2.6060606060606064e-06, | |
| "loss": 0.7601, | |
| "step": 742000 | |
| }, | |
| { | |
| "epoch": 24.15, | |
| "learning_rate": 2.601010101010101e-06, | |
| "loss": 0.7592, | |
| "step": 742500 | |
| }, | |
| { | |
| "epoch": 24.16, | |
| "learning_rate": 2.5959595959595963e-06, | |
| "loss": 0.7591, | |
| "step": 743000 | |
| }, | |
| { | |
| "epoch": 24.18, | |
| "learning_rate": 2.590909090909091e-06, | |
| "loss": 0.7597, | |
| "step": 743500 | |
| }, | |
| { | |
| "epoch": 24.2, | |
| "learning_rate": 2.585858585858586e-06, | |
| "loss": 0.7592, | |
| "step": 744000 | |
| }, | |
| { | |
| "epoch": 24.21, | |
| "learning_rate": 2.580808080808081e-06, | |
| "loss": 0.7587, | |
| "step": 744500 | |
| }, | |
| { | |
| "epoch": 24.23, | |
| "learning_rate": 2.575757575757576e-06, | |
| "loss": 0.7591, | |
| "step": 745000 | |
| }, | |
| { | |
| "epoch": 24.25, | |
| "learning_rate": 2.5707070707070707e-06, | |
| "loss": 0.7591, | |
| "step": 745500 | |
| }, | |
| { | |
| "epoch": 24.26, | |
| "learning_rate": 2.565656565656566e-06, | |
| "loss": 0.7586, | |
| "step": 746000 | |
| }, | |
| { | |
| "epoch": 24.28, | |
| "learning_rate": 2.5606060606060606e-06, | |
| "loss": 0.7583, | |
| "step": 746500 | |
| }, | |
| { | |
| "epoch": 24.29, | |
| "learning_rate": 2.5555555555555557e-06, | |
| "loss": 0.7586, | |
| "step": 747000 | |
| }, | |
| { | |
| "epoch": 24.31, | |
| "learning_rate": 2.5505050505050505e-06, | |
| "loss": 0.7583, | |
| "step": 747500 | |
| }, | |
| { | |
| "epoch": 24.33, | |
| "learning_rate": 2.5454545454545456e-06, | |
| "loss": 0.7584, | |
| "step": 748000 | |
| }, | |
| { | |
| "epoch": 24.34, | |
| "learning_rate": 2.5404040404040408e-06, | |
| "loss": 0.7578, | |
| "step": 748500 | |
| }, | |
| { | |
| "epoch": 24.36, | |
| "learning_rate": 2.535353535353536e-06, | |
| "loss": 0.7582, | |
| "step": 749000 | |
| }, | |
| { | |
| "epoch": 24.38, | |
| "learning_rate": 2.5303030303030306e-06, | |
| "loss": 0.7573, | |
| "step": 749500 | |
| }, | |
| { | |
| "epoch": 24.39, | |
| "learning_rate": 2.5252525252525258e-06, | |
| "loss": 0.7573, | |
| "step": 750000 | |
| }, | |
| { | |
| "epoch": 24.41, | |
| "learning_rate": 2.5202020202020205e-06, | |
| "loss": 0.7573, | |
| "step": 750500 | |
| }, | |
| { | |
| "epoch": 24.42, | |
| "learning_rate": 2.5151515151515156e-06, | |
| "loss": 0.7574, | |
| "step": 751000 | |
| }, | |
| { | |
| "epoch": 24.44, | |
| "learning_rate": 2.5101010101010103e-06, | |
| "loss": 0.7567, | |
| "step": 751500 | |
| }, | |
| { | |
| "epoch": 24.46, | |
| "learning_rate": 2.5050505050505055e-06, | |
| "loss": 0.7566, | |
| "step": 752000 | |
| }, | |
| { | |
| "epoch": 24.47, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.7567, | |
| "step": 752500 | |
| }, | |
| { | |
| "epoch": 24.49, | |
| "learning_rate": 2.494949494949495e-06, | |
| "loss": 0.7561, | |
| "step": 753000 | |
| }, | |
| { | |
| "epoch": 24.51, | |
| "learning_rate": 2.48989898989899e-06, | |
| "loss": 0.7563, | |
| "step": 753500 | |
| }, | |
| { | |
| "epoch": 24.52, | |
| "learning_rate": 2.4848484848484848e-06, | |
| "loss": 0.7557, | |
| "step": 754000 | |
| }, | |
| { | |
| "epoch": 24.54, | |
| "learning_rate": 2.47979797979798e-06, | |
| "loss": 0.7566, | |
| "step": 754500 | |
| }, | |
| { | |
| "epoch": 24.55, | |
| "learning_rate": 2.474747474747475e-06, | |
| "loss": 0.7557, | |
| "step": 755000 | |
| }, | |
| { | |
| "epoch": 24.57, | |
| "learning_rate": 2.46969696969697e-06, | |
| "loss": 0.7557, | |
| "step": 755500 | |
| }, | |
| { | |
| "epoch": 24.59, | |
| "learning_rate": 2.464646464646465e-06, | |
| "loss": 0.7554, | |
| "step": 756000 | |
| }, | |
| { | |
| "epoch": 24.6, | |
| "learning_rate": 2.4595959595959597e-06, | |
| "loss": 0.7552, | |
| "step": 756500 | |
| }, | |
| { | |
| "epoch": 24.62, | |
| "learning_rate": 2.454545454545455e-06, | |
| "loss": 0.7556, | |
| "step": 757000 | |
| }, | |
| { | |
| "epoch": 24.64, | |
| "learning_rate": 2.4494949494949495e-06, | |
| "loss": 0.7551, | |
| "step": 757500 | |
| }, | |
| { | |
| "epoch": 24.65, | |
| "learning_rate": 2.4444444444444447e-06, | |
| "loss": 0.7551, | |
| "step": 758000 | |
| }, | |
| { | |
| "epoch": 24.67, | |
| "learning_rate": 2.4393939393939394e-06, | |
| "loss": 0.755, | |
| "step": 758500 | |
| }, | |
| { | |
| "epoch": 24.68, | |
| "learning_rate": 2.4343434343434345e-06, | |
| "loss": 0.7551, | |
| "step": 759000 | |
| }, | |
| { | |
| "epoch": 24.7, | |
| "learning_rate": 2.4292929292929297e-06, | |
| "loss": 0.7543, | |
| "step": 759500 | |
| }, | |
| { | |
| "epoch": 24.72, | |
| "learning_rate": 2.4242424242424244e-06, | |
| "loss": 0.7547, | |
| "step": 760000 | |
| }, | |
| { | |
| "epoch": 24.73, | |
| "learning_rate": 2.4191919191919195e-06, | |
| "loss": 0.7545, | |
| "step": 760500 | |
| }, | |
| { | |
| "epoch": 24.75, | |
| "learning_rate": 2.4141414141414143e-06, | |
| "loss": 0.7544, | |
| "step": 761000 | |
| }, | |
| { | |
| "epoch": 24.77, | |
| "learning_rate": 2.4090909090909094e-06, | |
| "loss": 0.7542, | |
| "step": 761500 | |
| }, | |
| { | |
| "epoch": 24.78, | |
| "learning_rate": 2.404040404040404e-06, | |
| "loss": 0.7543, | |
| "step": 762000 | |
| }, | |
| { | |
| "epoch": 24.8, | |
| "learning_rate": 2.3989898989898993e-06, | |
| "loss": 0.754, | |
| "step": 762500 | |
| }, | |
| { | |
| "epoch": 24.81, | |
| "learning_rate": 2.393939393939394e-06, | |
| "loss": 0.754, | |
| "step": 763000 | |
| }, | |
| { | |
| "epoch": 24.83, | |
| "learning_rate": 2.388888888888889e-06, | |
| "loss": 0.7535, | |
| "step": 763500 | |
| }, | |
| { | |
| "epoch": 24.85, | |
| "learning_rate": 2.3838383838383843e-06, | |
| "loss": 0.7537, | |
| "step": 764000 | |
| }, | |
| { | |
| "epoch": 24.86, | |
| "learning_rate": 2.378787878787879e-06, | |
| "loss": 0.7539, | |
| "step": 764500 | |
| }, | |
| { | |
| "epoch": 24.88, | |
| "learning_rate": 2.373737373737374e-06, | |
| "loss": 0.7533, | |
| "step": 765000 | |
| }, | |
| { | |
| "epoch": 24.9, | |
| "learning_rate": 2.368686868686869e-06, | |
| "loss": 0.7532, | |
| "step": 765500 | |
| }, | |
| { | |
| "epoch": 24.91, | |
| "learning_rate": 2.363636363636364e-06, | |
| "loss": 0.7531, | |
| "step": 766000 | |
| }, | |
| { | |
| "epoch": 24.93, | |
| "learning_rate": 2.3585858585858587e-06, | |
| "loss": 0.7529, | |
| "step": 766500 | |
| }, | |
| { | |
| "epoch": 24.94, | |
| "learning_rate": 2.3535353535353534e-06, | |
| "loss": 0.7525, | |
| "step": 767000 | |
| }, | |
| { | |
| "epoch": 24.96, | |
| "learning_rate": 2.348484848484849e-06, | |
| "loss": 0.7524, | |
| "step": 767500 | |
| }, | |
| { | |
| "epoch": 24.98, | |
| "learning_rate": 2.3434343434343437e-06, | |
| "loss": 0.7523, | |
| "step": 768000 | |
| }, | |
| { | |
| "epoch": 24.99, | |
| "learning_rate": 2.3383838383838384e-06, | |
| "loss": 0.7522, | |
| "step": 768500 | |
| }, | |
| { | |
| "epoch": 25.01, | |
| "learning_rate": 2.3333333333333336e-06, | |
| "loss": 0.7521, | |
| "step": 769000 | |
| }, | |
| { | |
| "epoch": 25.03, | |
| "learning_rate": 2.3282828282828283e-06, | |
| "loss": 0.7519, | |
| "step": 769500 | |
| }, | |
| { | |
| "epoch": 25.04, | |
| "learning_rate": 2.3232323232323234e-06, | |
| "loss": 0.7519, | |
| "step": 770000 | |
| }, | |
| { | |
| "epoch": 25.06, | |
| "learning_rate": 2.318181818181818e-06, | |
| "loss": 0.7517, | |
| "step": 770500 | |
| }, | |
| { | |
| "epoch": 25.07, | |
| "learning_rate": 2.3131313131313133e-06, | |
| "loss": 0.7516, | |
| "step": 771000 | |
| }, | |
| { | |
| "epoch": 25.09, | |
| "learning_rate": 2.308080808080808e-06, | |
| "loss": 0.7515, | |
| "step": 771500 | |
| }, | |
| { | |
| "epoch": 25.11, | |
| "learning_rate": 2.303030303030303e-06, | |
| "loss": 0.7508, | |
| "step": 772000 | |
| }, | |
| { | |
| "epoch": 25.12, | |
| "learning_rate": 2.2979797979797983e-06, | |
| "loss": 0.7518, | |
| "step": 772500 | |
| }, | |
| { | |
| "epoch": 25.14, | |
| "learning_rate": 2.292929292929293e-06, | |
| "loss": 0.7511, | |
| "step": 773000 | |
| }, | |
| { | |
| "epoch": 25.16, | |
| "learning_rate": 2.287878787878788e-06, | |
| "loss": 0.751, | |
| "step": 773500 | |
| }, | |
| { | |
| "epoch": 25.17, | |
| "learning_rate": 2.282828282828283e-06, | |
| "loss": 0.7509, | |
| "step": 774000 | |
| }, | |
| { | |
| "epoch": 25.19, | |
| "learning_rate": 2.277777777777778e-06, | |
| "loss": 0.7506, | |
| "step": 774500 | |
| }, | |
| { | |
| "epoch": 25.2, | |
| "learning_rate": 2.2727272727272728e-06, | |
| "loss": 0.7505, | |
| "step": 775000 | |
| }, | |
| { | |
| "epoch": 25.22, | |
| "learning_rate": 2.267676767676768e-06, | |
| "loss": 0.7505, | |
| "step": 775500 | |
| }, | |
| { | |
| "epoch": 25.24, | |
| "learning_rate": 2.262626262626263e-06, | |
| "loss": 0.7498, | |
| "step": 776000 | |
| }, | |
| { | |
| "epoch": 25.25, | |
| "learning_rate": 2.2575757575757578e-06, | |
| "loss": 0.7503, | |
| "step": 776500 | |
| }, | |
| { | |
| "epoch": 25.27, | |
| "learning_rate": 2.252525252525253e-06, | |
| "loss": 0.7497, | |
| "step": 777000 | |
| }, | |
| { | |
| "epoch": 25.29, | |
| "learning_rate": 2.2474747474747476e-06, | |
| "loss": 0.7504, | |
| "step": 777500 | |
| }, | |
| { | |
| "epoch": 25.3, | |
| "learning_rate": 2.2424242424242428e-06, | |
| "loss": 0.7497, | |
| "step": 778000 | |
| }, | |
| { | |
| "epoch": 25.32, | |
| "learning_rate": 2.2373737373737375e-06, | |
| "loss": 0.7499, | |
| "step": 778500 | |
| }, | |
| { | |
| "epoch": 25.33, | |
| "learning_rate": 2.2323232323232326e-06, | |
| "loss": 0.749, | |
| "step": 779000 | |
| }, | |
| { | |
| "epoch": 25.35, | |
| "learning_rate": 2.2272727272727274e-06, | |
| "loss": 0.7496, | |
| "step": 779500 | |
| }, | |
| { | |
| "epoch": 25.37, | |
| "learning_rate": 2.222222222222222e-06, | |
| "loss": 0.7496, | |
| "step": 780000 | |
| }, | |
| { | |
| "epoch": 25.38, | |
| "learning_rate": 2.2171717171717176e-06, | |
| "loss": 0.7491, | |
| "step": 780500 | |
| }, | |
| { | |
| "epoch": 25.4, | |
| "learning_rate": 2.2121212121212124e-06, | |
| "loss": 0.7491, | |
| "step": 781000 | |
| }, | |
| { | |
| "epoch": 25.42, | |
| "learning_rate": 2.2070707070707075e-06, | |
| "loss": 0.7494, | |
| "step": 781500 | |
| }, | |
| { | |
| "epoch": 25.43, | |
| "learning_rate": 2.2020202020202022e-06, | |
| "loss": 0.7489, | |
| "step": 782000 | |
| }, | |
| { | |
| "epoch": 25.45, | |
| "learning_rate": 2.196969696969697e-06, | |
| "loss": 0.7487, | |
| "step": 782500 | |
| }, | |
| { | |
| "epoch": 25.47, | |
| "learning_rate": 2.191919191919192e-06, | |
| "loss": 0.7484, | |
| "step": 783000 | |
| }, | |
| { | |
| "epoch": 25.48, | |
| "learning_rate": 2.186868686868687e-06, | |
| "loss": 0.7488, | |
| "step": 783500 | |
| }, | |
| { | |
| "epoch": 25.5, | |
| "learning_rate": 2.181818181818182e-06, | |
| "loss": 0.7488, | |
| "step": 784000 | |
| }, | |
| { | |
| "epoch": 25.51, | |
| "learning_rate": 2.1767676767676767e-06, | |
| "loss": 0.7486, | |
| "step": 784500 | |
| }, | |
| { | |
| "epoch": 25.53, | |
| "learning_rate": 2.171717171717172e-06, | |
| "loss": 0.748, | |
| "step": 785000 | |
| }, | |
| { | |
| "epoch": 25.55, | |
| "learning_rate": 2.166666666666667e-06, | |
| "loss": 0.7481, | |
| "step": 785500 | |
| }, | |
| { | |
| "epoch": 25.56, | |
| "learning_rate": 2.1616161616161617e-06, | |
| "loss": 0.7478, | |
| "step": 786000 | |
| }, | |
| { | |
| "epoch": 25.58, | |
| "learning_rate": 2.156565656565657e-06, | |
| "loss": 0.7484, | |
| "step": 786500 | |
| }, | |
| { | |
| "epoch": 25.6, | |
| "learning_rate": 2.1515151515151515e-06, | |
| "loss": 0.7477, | |
| "step": 787000 | |
| }, | |
| { | |
| "epoch": 25.61, | |
| "learning_rate": 2.1464646464646467e-06, | |
| "loss": 0.7474, | |
| "step": 787500 | |
| }, | |
| { | |
| "epoch": 25.63, | |
| "learning_rate": 2.1414141414141414e-06, | |
| "loss": 0.7472, | |
| "step": 788000 | |
| }, | |
| { | |
| "epoch": 25.64, | |
| "learning_rate": 2.1363636363636365e-06, | |
| "loss": 0.7473, | |
| "step": 788500 | |
| }, | |
| { | |
| "epoch": 25.66, | |
| "learning_rate": 2.1313131313131317e-06, | |
| "loss": 0.7471, | |
| "step": 789000 | |
| }, | |
| { | |
| "epoch": 25.68, | |
| "learning_rate": 2.1262626262626264e-06, | |
| "loss": 0.7471, | |
| "step": 789500 | |
| }, | |
| { | |
| "epoch": 25.69, | |
| "learning_rate": 2.1212121212121216e-06, | |
| "loss": 0.7471, | |
| "step": 790000 | |
| }, | |
| { | |
| "epoch": 25.71, | |
| "learning_rate": 2.1161616161616163e-06, | |
| "loss": 0.7471, | |
| "step": 790500 | |
| }, | |
| { | |
| "epoch": 25.73, | |
| "learning_rate": 2.1111111111111114e-06, | |
| "loss": 0.7467, | |
| "step": 791000 | |
| }, | |
| { | |
| "epoch": 25.74, | |
| "learning_rate": 2.106060606060606e-06, | |
| "loss": 0.7469, | |
| "step": 791500 | |
| }, | |
| { | |
| "epoch": 25.76, | |
| "learning_rate": 2.1010101010101013e-06, | |
| "loss": 0.746, | |
| "step": 792000 | |
| }, | |
| { | |
| "epoch": 25.77, | |
| "learning_rate": 2.095959595959596e-06, | |
| "loss": 0.746, | |
| "step": 792500 | |
| }, | |
| { | |
| "epoch": 25.79, | |
| "learning_rate": 2.090909090909091e-06, | |
| "loss": 0.7464, | |
| "step": 793000 | |
| }, | |
| { | |
| "epoch": 25.81, | |
| "learning_rate": 2.0858585858585863e-06, | |
| "loss": 0.7459, | |
| "step": 793500 | |
| }, | |
| { | |
| "epoch": 25.82, | |
| "learning_rate": 2.080808080808081e-06, | |
| "loss": 0.746, | |
| "step": 794000 | |
| }, | |
| { | |
| "epoch": 25.84, | |
| "learning_rate": 2.075757575757576e-06, | |
| "loss": 0.7459, | |
| "step": 794500 | |
| }, | |
| { | |
| "epoch": 25.86, | |
| "learning_rate": 2.070707070707071e-06, | |
| "loss": 0.7458, | |
| "step": 795000 | |
| }, | |
| { | |
| "epoch": 25.87, | |
| "learning_rate": 2.065656565656566e-06, | |
| "loss": 0.745, | |
| "step": 795500 | |
| }, | |
| { | |
| "epoch": 25.89, | |
| "learning_rate": 2.0606060606060607e-06, | |
| "loss": 0.7456, | |
| "step": 796000 | |
| }, | |
| { | |
| "epoch": 25.9, | |
| "learning_rate": 2.0555555555555555e-06, | |
| "loss": 0.7457, | |
| "step": 796500 | |
| }, | |
| { | |
| "epoch": 25.92, | |
| "learning_rate": 2.0505050505050506e-06, | |
| "loss": 0.7448, | |
| "step": 797000 | |
| }, | |
| { | |
| "epoch": 25.94, | |
| "learning_rate": 2.0454545454545457e-06, | |
| "loss": 0.7452, | |
| "step": 797500 | |
| }, | |
| { | |
| "epoch": 25.95, | |
| "learning_rate": 2.0404040404040405e-06, | |
| "loss": 0.7459, | |
| "step": 798000 | |
| }, | |
| { | |
| "epoch": 25.97, | |
| "learning_rate": 2.0353535353535356e-06, | |
| "loss": 0.7447, | |
| "step": 798500 | |
| }, | |
| { | |
| "epoch": 25.99, | |
| "learning_rate": 2.0303030303030303e-06, | |
| "loss": 0.7451, | |
| "step": 799000 | |
| }, | |
| { | |
| "epoch": 26.0, | |
| "learning_rate": 2.0252525252525255e-06, | |
| "loss": 0.7446, | |
| "step": 799500 | |
| }, | |
| { | |
| "epoch": 26.02, | |
| "learning_rate": 2.02020202020202e-06, | |
| "loss": 0.7443, | |
| "step": 800000 | |
| }, | |
| { | |
| "epoch": 26.03, | |
| "learning_rate": 2.0151515151515153e-06, | |
| "loss": 0.7446, | |
| "step": 800500 | |
| }, | |
| { | |
| "epoch": 26.05, | |
| "learning_rate": 2.01010101010101e-06, | |
| "loss": 0.7445, | |
| "step": 801000 | |
| }, | |
| { | |
| "epoch": 26.07, | |
| "learning_rate": 2.005050505050505e-06, | |
| "loss": 0.7444, | |
| "step": 801500 | |
| }, | |
| { | |
| "epoch": 26.08, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.7443, | |
| "step": 802000 | |
| }, | |
| { | |
| "epoch": 26.1, | |
| "learning_rate": 1.994949494949495e-06, | |
| "loss": 0.744, | |
| "step": 802500 | |
| }, | |
| { | |
| "epoch": 26.12, | |
| "learning_rate": 1.98989898989899e-06, | |
| "loss": 0.7439, | |
| "step": 803000 | |
| }, | |
| { | |
| "epoch": 26.13, | |
| "learning_rate": 1.984848484848485e-06, | |
| "loss": 0.7432, | |
| "step": 803500 | |
| }, | |
| { | |
| "epoch": 26.15, | |
| "learning_rate": 1.97979797979798e-06, | |
| "loss": 0.7442, | |
| "step": 804000 | |
| }, | |
| { | |
| "epoch": 26.16, | |
| "learning_rate": 1.9747474747474748e-06, | |
| "loss": 0.7434, | |
| "step": 804500 | |
| }, | |
| { | |
| "epoch": 26.18, | |
| "learning_rate": 1.96969696969697e-06, | |
| "loss": 0.7433, | |
| "step": 805000 | |
| }, | |
| { | |
| "epoch": 26.2, | |
| "learning_rate": 1.9646464646464646e-06, | |
| "loss": 0.7433, | |
| "step": 805500 | |
| }, | |
| { | |
| "epoch": 26.21, | |
| "learning_rate": 1.9595959595959598e-06, | |
| "loss": 0.7432, | |
| "step": 806000 | |
| }, | |
| { | |
| "epoch": 26.23, | |
| "learning_rate": 1.954545454545455e-06, | |
| "loss": 0.7428, | |
| "step": 806500 | |
| }, | |
| { | |
| "epoch": 26.25, | |
| "learning_rate": 1.9494949494949496e-06, | |
| "loss": 0.7429, | |
| "step": 807000 | |
| }, | |
| { | |
| "epoch": 26.26, | |
| "learning_rate": 1.944444444444445e-06, | |
| "loss": 0.7432, | |
| "step": 807500 | |
| }, | |
| { | |
| "epoch": 26.28, | |
| "learning_rate": 1.9393939393939395e-06, | |
| "loss": 0.7434, | |
| "step": 808000 | |
| }, | |
| { | |
| "epoch": 26.29, | |
| "learning_rate": 1.9343434343434347e-06, | |
| "loss": 0.7423, | |
| "step": 808500 | |
| }, | |
| { | |
| "epoch": 26.31, | |
| "learning_rate": 1.9292929292929294e-06, | |
| "loss": 0.743, | |
| "step": 809000 | |
| }, | |
| { | |
| "epoch": 26.33, | |
| "learning_rate": 1.924242424242424e-06, | |
| "loss": 0.7426, | |
| "step": 809500 | |
| }, | |
| { | |
| "epoch": 26.34, | |
| "learning_rate": 1.9191919191919192e-06, | |
| "loss": 0.7424, | |
| "step": 810000 | |
| }, | |
| { | |
| "epoch": 26.36, | |
| "learning_rate": 1.9141414141414144e-06, | |
| "loss": 0.7424, | |
| "step": 810500 | |
| }, | |
| { | |
| "epoch": 26.38, | |
| "learning_rate": 1.9090909090909095e-06, | |
| "loss": 0.7422, | |
| "step": 811000 | |
| }, | |
| { | |
| "epoch": 26.39, | |
| "learning_rate": 1.9040404040404042e-06, | |
| "loss": 0.7417, | |
| "step": 811500 | |
| }, | |
| { | |
| "epoch": 26.41, | |
| "learning_rate": 1.8989898989898992e-06, | |
| "loss": 0.7419, | |
| "step": 812000 | |
| }, | |
| { | |
| "epoch": 26.42, | |
| "learning_rate": 1.8939393939393941e-06, | |
| "loss": 0.7422, | |
| "step": 812500 | |
| }, | |
| { | |
| "epoch": 26.44, | |
| "learning_rate": 1.888888888888889e-06, | |
| "loss": 0.742, | |
| "step": 813000 | |
| }, | |
| { | |
| "epoch": 26.46, | |
| "learning_rate": 1.883838383838384e-06, | |
| "loss": 0.7416, | |
| "step": 813500 | |
| }, | |
| { | |
| "epoch": 26.47, | |
| "learning_rate": 1.878787878787879e-06, | |
| "loss": 0.7416, | |
| "step": 814000 | |
| }, | |
| { | |
| "epoch": 26.49, | |
| "learning_rate": 1.873737373737374e-06, | |
| "loss": 0.7417, | |
| "step": 814500 | |
| }, | |
| { | |
| "epoch": 26.51, | |
| "learning_rate": 1.868686868686869e-06, | |
| "loss": 0.7412, | |
| "step": 815000 | |
| }, | |
| { | |
| "epoch": 26.52, | |
| "learning_rate": 1.863636363636364e-06, | |
| "loss": 0.7417, | |
| "step": 815500 | |
| }, | |
| { | |
| "epoch": 26.54, | |
| "learning_rate": 1.8585858585858588e-06, | |
| "loss": 0.7409, | |
| "step": 816000 | |
| }, | |
| { | |
| "epoch": 26.55, | |
| "learning_rate": 1.8535353535353538e-06, | |
| "loss": 0.7412, | |
| "step": 816500 | |
| }, | |
| { | |
| "epoch": 26.57, | |
| "learning_rate": 1.8484848484848487e-06, | |
| "loss": 0.7414, | |
| "step": 817000 | |
| }, | |
| { | |
| "epoch": 26.59, | |
| "learning_rate": 1.8434343434343434e-06, | |
| "loss": 0.7409, | |
| "step": 817500 | |
| }, | |
| { | |
| "epoch": 26.6, | |
| "learning_rate": 1.8383838383838384e-06, | |
| "loss": 0.7404, | |
| "step": 818000 | |
| }, | |
| { | |
| "epoch": 26.62, | |
| "learning_rate": 1.8333333333333333e-06, | |
| "loss": 0.7405, | |
| "step": 818500 | |
| }, | |
| { | |
| "epoch": 26.64, | |
| "learning_rate": 1.8282828282828286e-06, | |
| "loss": 0.7403, | |
| "step": 819000 | |
| }, | |
| { | |
| "epoch": 26.65, | |
| "learning_rate": 1.8232323232323234e-06, | |
| "loss": 0.7404, | |
| "step": 819500 | |
| }, | |
| { | |
| "epoch": 26.67, | |
| "learning_rate": 1.8181818181818183e-06, | |
| "loss": 0.74, | |
| "step": 820000 | |
| }, | |
| { | |
| "epoch": 26.68, | |
| "learning_rate": 1.8131313131313132e-06, | |
| "loss": 0.74, | |
| "step": 820500 | |
| }, | |
| { | |
| "epoch": 26.7, | |
| "learning_rate": 1.8080808080808082e-06, | |
| "loss": 0.7399, | |
| "step": 821000 | |
| }, | |
| { | |
| "epoch": 26.72, | |
| "learning_rate": 1.803030303030303e-06, | |
| "loss": 0.74, | |
| "step": 821500 | |
| }, | |
| { | |
| "epoch": 26.73, | |
| "learning_rate": 1.797979797979798e-06, | |
| "loss": 0.7403, | |
| "step": 822000 | |
| }, | |
| { | |
| "epoch": 26.75, | |
| "learning_rate": 1.792929292929293e-06, | |
| "loss": 0.7403, | |
| "step": 822500 | |
| }, | |
| { | |
| "epoch": 26.77, | |
| "learning_rate": 1.787878787878788e-06, | |
| "loss": 0.7401, | |
| "step": 823000 | |
| }, | |
| { | |
| "epoch": 26.78, | |
| "learning_rate": 1.782828282828283e-06, | |
| "loss": 0.7394, | |
| "step": 823500 | |
| }, | |
| { | |
| "epoch": 26.8, | |
| "learning_rate": 1.777777777777778e-06, | |
| "loss": 0.7397, | |
| "step": 824000 | |
| }, | |
| { | |
| "epoch": 26.81, | |
| "learning_rate": 1.7727272727272729e-06, | |
| "loss": 0.7399, | |
| "step": 824500 | |
| }, | |
| { | |
| "epoch": 26.83, | |
| "learning_rate": 1.7676767676767678e-06, | |
| "loss": 0.7395, | |
| "step": 825000 | |
| }, | |
| { | |
| "epoch": 26.85, | |
| "learning_rate": 1.7626262626262628e-06, | |
| "loss": 0.7392, | |
| "step": 825500 | |
| }, | |
| { | |
| "epoch": 26.86, | |
| "learning_rate": 1.7575757575757577e-06, | |
| "loss": 0.739, | |
| "step": 826000 | |
| }, | |
| { | |
| "epoch": 26.88, | |
| "learning_rate": 1.7525252525252526e-06, | |
| "loss": 0.7388, | |
| "step": 826500 | |
| }, | |
| { | |
| "epoch": 26.9, | |
| "learning_rate": 1.7474747474747475e-06, | |
| "loss": 0.739, | |
| "step": 827000 | |
| }, | |
| { | |
| "epoch": 26.91, | |
| "learning_rate": 1.7424242424242427e-06, | |
| "loss": 0.739, | |
| "step": 827500 | |
| }, | |
| { | |
| "epoch": 26.93, | |
| "learning_rate": 1.7373737373737376e-06, | |
| "loss": 0.7385, | |
| "step": 828000 | |
| }, | |
| { | |
| "epoch": 26.94, | |
| "learning_rate": 1.7323232323232326e-06, | |
| "loss": 0.7385, | |
| "step": 828500 | |
| }, | |
| { | |
| "epoch": 26.96, | |
| "learning_rate": 1.7272727272727275e-06, | |
| "loss": 0.7386, | |
| "step": 829000 | |
| }, | |
| { | |
| "epoch": 26.98, | |
| "learning_rate": 1.7222222222222224e-06, | |
| "loss": 0.7383, | |
| "step": 829500 | |
| }, | |
| { | |
| "epoch": 26.99, | |
| "learning_rate": 1.7171717171717173e-06, | |
| "loss": 0.7378, | |
| "step": 830000 | |
| }, | |
| { | |
| "epoch": 27.01, | |
| "learning_rate": 1.7121212121212123e-06, | |
| "loss": 0.7385, | |
| "step": 830500 | |
| }, | |
| { | |
| "epoch": 27.03, | |
| "learning_rate": 1.707070707070707e-06, | |
| "loss": 0.738, | |
| "step": 831000 | |
| }, | |
| { | |
| "epoch": 27.04, | |
| "learning_rate": 1.7020202020202024e-06, | |
| "loss": 0.7381, | |
| "step": 831500 | |
| }, | |
| { | |
| "epoch": 27.06, | |
| "learning_rate": 1.6969696969696973e-06, | |
| "loss": 0.7384, | |
| "step": 832000 | |
| }, | |
| { | |
| "epoch": 27.07, | |
| "learning_rate": 1.6919191919191922e-06, | |
| "loss": 0.7379, | |
| "step": 832500 | |
| }, | |
| { | |
| "epoch": 27.09, | |
| "learning_rate": 1.6868686868686871e-06, | |
| "loss": 0.7377, | |
| "step": 833000 | |
| }, | |
| { | |
| "epoch": 27.11, | |
| "learning_rate": 1.6818181818181819e-06, | |
| "loss": 0.7379, | |
| "step": 833500 | |
| }, | |
| { | |
| "epoch": 27.12, | |
| "learning_rate": 1.6767676767676768e-06, | |
| "loss": 0.7377, | |
| "step": 834000 | |
| }, | |
| { | |
| "epoch": 27.14, | |
| "learning_rate": 1.6717171717171717e-06, | |
| "loss": 0.7375, | |
| "step": 834500 | |
| }, | |
| { | |
| "epoch": 27.16, | |
| "learning_rate": 1.6666666666666667e-06, | |
| "loss": 0.7375, | |
| "step": 835000 | |
| }, | |
| { | |
| "epoch": 27.17, | |
| "learning_rate": 1.6616161616161616e-06, | |
| "loss": 0.7369, | |
| "step": 835500 | |
| }, | |
| { | |
| "epoch": 27.19, | |
| "learning_rate": 1.6565656565656567e-06, | |
| "loss": 0.738, | |
| "step": 836000 | |
| }, | |
| { | |
| "epoch": 27.21, | |
| "learning_rate": 1.6515151515151517e-06, | |
| "loss": 0.7377, | |
| "step": 836500 | |
| }, | |
| { | |
| "epoch": 27.22, | |
| "learning_rate": 1.6464646464646466e-06, | |
| "loss": 0.7368, | |
| "step": 837000 | |
| }, | |
| { | |
| "epoch": 27.24, | |
| "learning_rate": 1.6414141414141415e-06, | |
| "loss": 0.7369, | |
| "step": 837500 | |
| }, | |
| { | |
| "epoch": 27.25, | |
| "learning_rate": 1.6363636363636365e-06, | |
| "loss": 0.7372, | |
| "step": 838000 | |
| }, | |
| { | |
| "epoch": 27.27, | |
| "learning_rate": 1.6313131313131314e-06, | |
| "loss": 0.7366, | |
| "step": 838500 | |
| }, | |
| { | |
| "epoch": 27.29, | |
| "learning_rate": 1.6262626262626263e-06, | |
| "loss": 0.7366, | |
| "step": 839000 | |
| }, | |
| { | |
| "epoch": 27.3, | |
| "learning_rate": 1.6212121212121213e-06, | |
| "loss": 0.7363, | |
| "step": 839500 | |
| }, | |
| { | |
| "epoch": 27.32, | |
| "learning_rate": 1.6161616161616164e-06, | |
| "loss": 0.7364, | |
| "step": 840000 | |
| }, | |
| { | |
| "epoch": 27.34, | |
| "learning_rate": 1.6111111111111113e-06, | |
| "loss": 0.7364, | |
| "step": 840500 | |
| }, | |
| { | |
| "epoch": 27.35, | |
| "learning_rate": 1.6060606060606063e-06, | |
| "loss": 0.7365, | |
| "step": 841000 | |
| }, | |
| { | |
| "epoch": 27.37, | |
| "learning_rate": 1.6010101010101012e-06, | |
| "loss": 0.7367, | |
| "step": 841500 | |
| }, | |
| { | |
| "epoch": 27.38, | |
| "learning_rate": 1.5959595959595961e-06, | |
| "loss": 0.7358, | |
| "step": 842000 | |
| }, | |
| { | |
| "epoch": 27.4, | |
| "learning_rate": 1.590909090909091e-06, | |
| "loss": 0.736, | |
| "step": 842500 | |
| }, | |
| { | |
| "epoch": 27.42, | |
| "learning_rate": 1.585858585858586e-06, | |
| "loss": 0.7366, | |
| "step": 843000 | |
| }, | |
| { | |
| "epoch": 27.43, | |
| "learning_rate": 1.580808080808081e-06, | |
| "loss": 0.7358, | |
| "step": 843500 | |
| }, | |
| { | |
| "epoch": 27.45, | |
| "learning_rate": 1.5757575757575759e-06, | |
| "loss": 0.7359, | |
| "step": 844000 | |
| }, | |
| { | |
| "epoch": 27.47, | |
| "learning_rate": 1.570707070707071e-06, | |
| "loss": 0.7362, | |
| "step": 844500 | |
| }, | |
| { | |
| "epoch": 27.48, | |
| "learning_rate": 1.565656565656566e-06, | |
| "loss": 0.7359, | |
| "step": 845000 | |
| }, | |
| { | |
| "epoch": 27.5, | |
| "learning_rate": 1.5606060606060609e-06, | |
| "loss": 0.7352, | |
| "step": 845500 | |
| }, | |
| { | |
| "epoch": 27.51, | |
| "learning_rate": 1.5555555555555558e-06, | |
| "loss": 0.7354, | |
| "step": 846000 | |
| }, | |
| { | |
| "epoch": 27.53, | |
| "learning_rate": 1.5505050505050507e-06, | |
| "loss": 0.7358, | |
| "step": 846500 | |
| }, | |
| { | |
| "epoch": 27.55, | |
| "learning_rate": 1.5454545454545454e-06, | |
| "loss": 0.7356, | |
| "step": 847000 | |
| }, | |
| { | |
| "epoch": 27.56, | |
| "learning_rate": 1.5404040404040404e-06, | |
| "loss": 0.7353, | |
| "step": 847500 | |
| }, | |
| { | |
| "epoch": 27.58, | |
| "learning_rate": 1.5353535353535353e-06, | |
| "loss": 0.7354, | |
| "step": 848000 | |
| }, | |
| { | |
| "epoch": 27.6, | |
| "learning_rate": 1.5303030303030302e-06, | |
| "loss": 0.7354, | |
| "step": 848500 | |
| }, | |
| { | |
| "epoch": 27.61, | |
| "learning_rate": 1.5252525252525254e-06, | |
| "loss": 0.7348, | |
| "step": 849000 | |
| }, | |
| { | |
| "epoch": 27.63, | |
| "learning_rate": 1.5202020202020203e-06, | |
| "loss": 0.7354, | |
| "step": 849500 | |
| }, | |
| { | |
| "epoch": 27.64, | |
| "learning_rate": 1.5151515151515152e-06, | |
| "loss": 0.7349, | |
| "step": 850000 | |
| }, | |
| { | |
| "epoch": 27.66, | |
| "learning_rate": 1.5101010101010102e-06, | |
| "loss": 0.7347, | |
| "step": 850500 | |
| }, | |
| { | |
| "epoch": 27.68, | |
| "learning_rate": 1.5050505050505051e-06, | |
| "loss": 0.7348, | |
| "step": 851000 | |
| }, | |
| { | |
| "epoch": 27.69, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.7346, | |
| "step": 851500 | |
| }, | |
| { | |
| "epoch": 27.71, | |
| "learning_rate": 1.494949494949495e-06, | |
| "loss": 0.7346, | |
| "step": 852000 | |
| }, | |
| { | |
| "epoch": 27.73, | |
| "learning_rate": 1.48989898989899e-06, | |
| "loss": 0.7343, | |
| "step": 852500 | |
| }, | |
| { | |
| "epoch": 27.74, | |
| "learning_rate": 1.484848484848485e-06, | |
| "loss": 0.7347, | |
| "step": 853000 | |
| }, | |
| { | |
| "epoch": 27.76, | |
| "learning_rate": 1.47979797979798e-06, | |
| "loss": 0.7347, | |
| "step": 853500 | |
| }, | |
| { | |
| "epoch": 27.77, | |
| "learning_rate": 1.474747474747475e-06, | |
| "loss": 0.7347, | |
| "step": 854000 | |
| }, | |
| { | |
| "epoch": 27.79, | |
| "learning_rate": 1.4696969696969698e-06, | |
| "loss": 0.7343, | |
| "step": 854500 | |
| }, | |
| { | |
| "epoch": 27.81, | |
| "learning_rate": 1.4646464646464648e-06, | |
| "loss": 0.7344, | |
| "step": 855000 | |
| }, | |
| { | |
| "epoch": 27.82, | |
| "learning_rate": 1.4595959595959597e-06, | |
| "loss": 0.7338, | |
| "step": 855500 | |
| }, | |
| { | |
| "epoch": 27.84, | |
| "learning_rate": 1.4545454545454546e-06, | |
| "loss": 0.7339, | |
| "step": 856000 | |
| }, | |
| { | |
| "epoch": 27.86, | |
| "learning_rate": 1.4494949494949496e-06, | |
| "loss": 0.7336, | |
| "step": 856500 | |
| }, | |
| { | |
| "epoch": 27.87, | |
| "learning_rate": 1.4444444444444445e-06, | |
| "loss": 0.734, | |
| "step": 857000 | |
| }, | |
| { | |
| "epoch": 27.89, | |
| "learning_rate": 1.4393939393939396e-06, | |
| "loss": 0.7339, | |
| "step": 857500 | |
| }, | |
| { | |
| "epoch": 27.9, | |
| "learning_rate": 1.4343434343434346e-06, | |
| "loss": 0.7335, | |
| "step": 858000 | |
| }, | |
| { | |
| "epoch": 27.92, | |
| "learning_rate": 1.4292929292929295e-06, | |
| "loss": 0.7337, | |
| "step": 858500 | |
| }, | |
| { | |
| "epoch": 27.94, | |
| "learning_rate": 1.4242424242424244e-06, | |
| "loss": 0.7334, | |
| "step": 859000 | |
| }, | |
| { | |
| "epoch": 27.95, | |
| "learning_rate": 1.4191919191919194e-06, | |
| "loss": 0.7335, | |
| "step": 859500 | |
| }, | |
| { | |
| "epoch": 27.97, | |
| "learning_rate": 1.4141414141414143e-06, | |
| "loss": 0.7335, | |
| "step": 860000 | |
| }, | |
| { | |
| "epoch": 27.99, | |
| "learning_rate": 1.409090909090909e-06, | |
| "loss": 0.7331, | |
| "step": 860500 | |
| }, | |
| { | |
| "epoch": 28.0, | |
| "learning_rate": 1.404040404040404e-06, | |
| "loss": 0.7332, | |
| "step": 861000 | |
| }, | |
| { | |
| "epoch": 28.02, | |
| "learning_rate": 1.3989898989898993e-06, | |
| "loss": 0.7329, | |
| "step": 861500 | |
| }, | |
| { | |
| "epoch": 28.03, | |
| "learning_rate": 1.3939393939393942e-06, | |
| "loss": 0.733, | |
| "step": 862000 | |
| }, | |
| { | |
| "epoch": 28.05, | |
| "learning_rate": 1.3888888888888892e-06, | |
| "loss": 0.7332, | |
| "step": 862500 | |
| }, | |
| { | |
| "epoch": 28.07, | |
| "learning_rate": 1.3838383838383839e-06, | |
| "loss": 0.7336, | |
| "step": 863000 | |
| }, | |
| { | |
| "epoch": 28.08, | |
| "learning_rate": 1.3787878787878788e-06, | |
| "loss": 0.7325, | |
| "step": 863500 | |
| }, | |
| { | |
| "epoch": 28.1, | |
| "learning_rate": 1.3737373737373738e-06, | |
| "loss": 0.7331, | |
| "step": 864000 | |
| }, | |
| { | |
| "epoch": 28.12, | |
| "learning_rate": 1.3686868686868687e-06, | |
| "loss": 0.7329, | |
| "step": 864500 | |
| }, | |
| { | |
| "epoch": 28.13, | |
| "learning_rate": 1.3636363636363636e-06, | |
| "loss": 0.733, | |
| "step": 865000 | |
| }, | |
| { | |
| "epoch": 28.15, | |
| "learning_rate": 1.3585858585858585e-06, | |
| "loss": 0.7318, | |
| "step": 865500 | |
| }, | |
| { | |
| "epoch": 28.16, | |
| "learning_rate": 1.3535353535353537e-06, | |
| "loss": 0.7324, | |
| "step": 866000 | |
| }, | |
| { | |
| "epoch": 28.18, | |
| "learning_rate": 1.3484848484848486e-06, | |
| "loss": 0.7327, | |
| "step": 866500 | |
| }, | |
| { | |
| "epoch": 28.2, | |
| "learning_rate": 1.3434343434343436e-06, | |
| "loss": 0.7319, | |
| "step": 867000 | |
| }, | |
| { | |
| "epoch": 28.21, | |
| "learning_rate": 1.3383838383838385e-06, | |
| "loss": 0.7326, | |
| "step": 867500 | |
| }, | |
| { | |
| "epoch": 28.23, | |
| "learning_rate": 1.3333333333333334e-06, | |
| "loss": 0.732, | |
| "step": 868000 | |
| }, | |
| { | |
| "epoch": 28.25, | |
| "learning_rate": 1.3282828282828283e-06, | |
| "loss": 0.7321, | |
| "step": 868500 | |
| }, | |
| { | |
| "epoch": 28.26, | |
| "learning_rate": 1.3232323232323233e-06, | |
| "loss": 0.7319, | |
| "step": 869000 | |
| }, | |
| { | |
| "epoch": 28.28, | |
| "learning_rate": 1.3181818181818182e-06, | |
| "loss": 0.7328, | |
| "step": 869500 | |
| }, | |
| { | |
| "epoch": 28.29, | |
| "learning_rate": 1.3131313131313134e-06, | |
| "loss": 0.7317, | |
| "step": 870000 | |
| }, | |
| { | |
| "epoch": 28.31, | |
| "learning_rate": 1.3080808080808083e-06, | |
| "loss": 0.7326, | |
| "step": 870500 | |
| }, | |
| { | |
| "epoch": 28.33, | |
| "learning_rate": 1.3030303030303032e-06, | |
| "loss": 0.7322, | |
| "step": 871000 | |
| }, | |
| { | |
| "epoch": 28.34, | |
| "learning_rate": 1.2979797979797981e-06, | |
| "loss": 0.7318, | |
| "step": 871500 | |
| }, | |
| { | |
| "epoch": 28.36, | |
| "learning_rate": 1.292929292929293e-06, | |
| "loss": 0.7319, | |
| "step": 872000 | |
| }, | |
| { | |
| "epoch": 28.38, | |
| "learning_rate": 1.287878787878788e-06, | |
| "loss": 0.7317, | |
| "step": 872500 | |
| }, | |
| { | |
| "epoch": 28.39, | |
| "learning_rate": 1.282828282828283e-06, | |
| "loss": 0.7317, | |
| "step": 873000 | |
| }, | |
| { | |
| "epoch": 28.41, | |
| "learning_rate": 1.2777777777777779e-06, | |
| "loss": 0.732, | |
| "step": 873500 | |
| }, | |
| { | |
| "epoch": 28.42, | |
| "learning_rate": 1.2727272727272728e-06, | |
| "loss": 0.7315, | |
| "step": 874000 | |
| }, | |
| { | |
| "epoch": 28.44, | |
| "learning_rate": 1.267676767676768e-06, | |
| "loss": 0.7317, | |
| "step": 874500 | |
| }, | |
| { | |
| "epoch": 28.46, | |
| "learning_rate": 1.2626262626262629e-06, | |
| "loss": 0.7317, | |
| "step": 875000 | |
| }, | |
| { | |
| "epoch": 28.47, | |
| "learning_rate": 1.2575757575757578e-06, | |
| "loss": 0.7311, | |
| "step": 875500 | |
| }, | |
| { | |
| "epoch": 28.49, | |
| "learning_rate": 1.2525252525252527e-06, | |
| "loss": 0.7312, | |
| "step": 876000 | |
| }, | |
| { | |
| "epoch": 28.51, | |
| "learning_rate": 1.2474747474747475e-06, | |
| "loss": 0.731, | |
| "step": 876500 | |
| }, | |
| { | |
| "epoch": 28.52, | |
| "learning_rate": 1.2424242424242424e-06, | |
| "loss": 0.7316, | |
| "step": 877000 | |
| }, | |
| { | |
| "epoch": 28.54, | |
| "learning_rate": 1.2373737373737375e-06, | |
| "loss": 0.7311, | |
| "step": 877500 | |
| }, | |
| { | |
| "epoch": 28.55, | |
| "learning_rate": 1.2323232323232325e-06, | |
| "loss": 0.7313, | |
| "step": 878000 | |
| }, | |
| { | |
| "epoch": 28.57, | |
| "learning_rate": 1.2272727272727274e-06, | |
| "loss": 0.7317, | |
| "step": 878500 | |
| }, | |
| { | |
| "epoch": 28.59, | |
| "learning_rate": 1.2222222222222223e-06, | |
| "loss": 0.7309, | |
| "step": 879000 | |
| }, | |
| { | |
| "epoch": 28.6, | |
| "learning_rate": 1.2171717171717173e-06, | |
| "loss": 0.7308, | |
| "step": 879500 | |
| }, | |
| { | |
| "epoch": 28.62, | |
| "learning_rate": 1.2121212121212122e-06, | |
| "loss": 0.7302, | |
| "step": 880000 | |
| }, | |
| { | |
| "epoch": 28.64, | |
| "learning_rate": 1.2070707070707071e-06, | |
| "loss": 0.731, | |
| "step": 880500 | |
| }, | |
| { | |
| "epoch": 28.65, | |
| "learning_rate": 1.202020202020202e-06, | |
| "loss": 0.7309, | |
| "step": 881000 | |
| }, | |
| { | |
| "epoch": 28.67, | |
| "learning_rate": 1.196969696969697e-06, | |
| "loss": 0.7312, | |
| "step": 881500 | |
| }, | |
| { | |
| "epoch": 28.68, | |
| "learning_rate": 1.1919191919191921e-06, | |
| "loss": 0.7308, | |
| "step": 882000 | |
| }, | |
| { | |
| "epoch": 28.7, | |
| "learning_rate": 1.186868686868687e-06, | |
| "loss": 0.7306, | |
| "step": 882500 | |
| }, | |
| { | |
| "epoch": 28.72, | |
| "learning_rate": 1.181818181818182e-06, | |
| "loss": 0.7308, | |
| "step": 883000 | |
| }, | |
| { | |
| "epoch": 28.73, | |
| "learning_rate": 1.1767676767676767e-06, | |
| "loss": 0.7303, | |
| "step": 883500 | |
| }, | |
| { | |
| "epoch": 28.75, | |
| "learning_rate": 1.1717171717171719e-06, | |
| "loss": 0.7307, | |
| "step": 884000 | |
| }, | |
| { | |
| "epoch": 28.77, | |
| "learning_rate": 1.1666666666666668e-06, | |
| "loss": 0.7306, | |
| "step": 884500 | |
| }, | |
| { | |
| "epoch": 28.78, | |
| "learning_rate": 1.1616161616161617e-06, | |
| "loss": 0.7307, | |
| "step": 885000 | |
| }, | |
| { | |
| "epoch": 28.8, | |
| "learning_rate": 1.1565656565656567e-06, | |
| "loss": 0.7302, | |
| "step": 885500 | |
| }, | |
| { | |
| "epoch": 28.81, | |
| "learning_rate": 1.1515151515151516e-06, | |
| "loss": 0.7302, | |
| "step": 886000 | |
| }, | |
| { | |
| "epoch": 28.83, | |
| "learning_rate": 1.1464646464646465e-06, | |
| "loss": 0.7304, | |
| "step": 886500 | |
| }, | |
| { | |
| "epoch": 28.85, | |
| "learning_rate": 1.1414141414141414e-06, | |
| "loss": 0.73, | |
| "step": 887000 | |
| }, | |
| { | |
| "epoch": 28.86, | |
| "learning_rate": 1.1363636363636364e-06, | |
| "loss": 0.7298, | |
| "step": 887500 | |
| }, | |
| { | |
| "epoch": 28.88, | |
| "learning_rate": 1.1313131313131315e-06, | |
| "loss": 0.7299, | |
| "step": 888000 | |
| }, | |
| { | |
| "epoch": 28.9, | |
| "learning_rate": 1.1262626262626265e-06, | |
| "loss": 0.7302, | |
| "step": 888500 | |
| }, | |
| { | |
| "epoch": 28.91, | |
| "learning_rate": 1.1212121212121214e-06, | |
| "loss": 0.7303, | |
| "step": 889000 | |
| }, | |
| { | |
| "epoch": 28.93, | |
| "learning_rate": 1.1161616161616163e-06, | |
| "loss": 0.7299, | |
| "step": 889500 | |
| }, | |
| { | |
| "epoch": 28.94, | |
| "learning_rate": 1.111111111111111e-06, | |
| "loss": 0.7296, | |
| "step": 890000 | |
| }, | |
| { | |
| "epoch": 28.96, | |
| "learning_rate": 1.1060606060606062e-06, | |
| "loss": 0.7298, | |
| "step": 890500 | |
| }, | |
| { | |
| "epoch": 28.98, | |
| "learning_rate": 1.1010101010101011e-06, | |
| "loss": 0.7298, | |
| "step": 891000 | |
| }, | |
| { | |
| "epoch": 28.99, | |
| "learning_rate": 1.095959595959596e-06, | |
| "loss": 0.73, | |
| "step": 891500 | |
| }, | |
| { | |
| "epoch": 29.01, | |
| "learning_rate": 1.090909090909091e-06, | |
| "loss": 0.7298, | |
| "step": 892000 | |
| }, | |
| { | |
| "epoch": 29.03, | |
| "learning_rate": 1.085858585858586e-06, | |
| "loss": 0.7295, | |
| "step": 892500 | |
| }, | |
| { | |
| "epoch": 29.04, | |
| "learning_rate": 1.0808080808080808e-06, | |
| "loss": 0.7292, | |
| "step": 893000 | |
| }, | |
| { | |
| "epoch": 29.06, | |
| "learning_rate": 1.0757575757575758e-06, | |
| "loss": 0.7294, | |
| "step": 893500 | |
| }, | |
| { | |
| "epoch": 29.08, | |
| "learning_rate": 1.0707070707070707e-06, | |
| "loss": 0.7294, | |
| "step": 894000 | |
| }, | |
| { | |
| "epoch": 29.09, | |
| "learning_rate": 1.0656565656565658e-06, | |
| "loss": 0.7298, | |
| "step": 894500 | |
| }, | |
| { | |
| "epoch": 29.11, | |
| "learning_rate": 1.0606060606060608e-06, | |
| "loss": 0.7296, | |
| "step": 895000 | |
| }, | |
| { | |
| "epoch": 29.12, | |
| "learning_rate": 1.0555555555555557e-06, | |
| "loss": 0.7298, | |
| "step": 895500 | |
| }, | |
| { | |
| "epoch": 29.14, | |
| "learning_rate": 1.0505050505050506e-06, | |
| "loss": 0.7298, | |
| "step": 896000 | |
| }, | |
| { | |
| "epoch": 29.16, | |
| "learning_rate": 1.0454545454545456e-06, | |
| "loss": 0.7293, | |
| "step": 896500 | |
| }, | |
| { | |
| "epoch": 29.17, | |
| "learning_rate": 1.0404040404040405e-06, | |
| "loss": 0.7288, | |
| "step": 897000 | |
| }, | |
| { | |
| "epoch": 29.19, | |
| "learning_rate": 1.0353535353535354e-06, | |
| "loss": 0.7295, | |
| "step": 897500 | |
| }, | |
| { | |
| "epoch": 29.21, | |
| "learning_rate": 1.0303030303030304e-06, | |
| "loss": 0.7294, | |
| "step": 898000 | |
| }, | |
| { | |
| "epoch": 29.22, | |
| "learning_rate": 1.0252525252525253e-06, | |
| "loss": 0.7293, | |
| "step": 898500 | |
| }, | |
| { | |
| "epoch": 29.24, | |
| "learning_rate": 1.0202020202020202e-06, | |
| "loss": 0.7295, | |
| "step": 899000 | |
| }, | |
| { | |
| "epoch": 29.25, | |
| "learning_rate": 1.0151515151515152e-06, | |
| "loss": 0.7291, | |
| "step": 899500 | |
| }, | |
| { | |
| "epoch": 29.27, | |
| "learning_rate": 1.01010101010101e-06, | |
| "loss": 0.7292, | |
| "step": 900000 | |
| }, | |
| { | |
| "epoch": 29.29, | |
| "learning_rate": 1.005050505050505e-06, | |
| "loss": 0.7289, | |
| "step": 900500 | |
| }, | |
| { | |
| "epoch": 29.3, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 0.7289, | |
| "step": 901000 | |
| }, | |
| { | |
| "epoch": 29.32, | |
| "learning_rate": 9.94949494949495e-07, | |
| "loss": 0.7292, | |
| "step": 901500 | |
| }, | |
| { | |
| "epoch": 29.34, | |
| "learning_rate": 9.8989898989899e-07, | |
| "loss": 0.7291, | |
| "step": 902000 | |
| }, | |
| { | |
| "epoch": 29.35, | |
| "learning_rate": 9.84848484848485e-07, | |
| "loss": 0.7288, | |
| "step": 902500 | |
| }, | |
| { | |
| "epoch": 29.37, | |
| "learning_rate": 9.797979797979799e-07, | |
| "loss": 0.7287, | |
| "step": 903000 | |
| }, | |
| { | |
| "epoch": 29.38, | |
| "learning_rate": 9.747474747474748e-07, | |
| "loss": 0.7287, | |
| "step": 903500 | |
| }, | |
| { | |
| "epoch": 29.4, | |
| "learning_rate": 9.696969696969698e-07, | |
| "loss": 0.7286, | |
| "step": 904000 | |
| }, | |
| { | |
| "epoch": 29.42, | |
| "learning_rate": 9.646464646464647e-07, | |
| "loss": 0.729, | |
| "step": 904500 | |
| }, | |
| { | |
| "epoch": 29.43, | |
| "learning_rate": 9.595959595959596e-07, | |
| "loss": 0.7284, | |
| "step": 905000 | |
| }, | |
| { | |
| "epoch": 29.45, | |
| "learning_rate": 9.545454545454548e-07, | |
| "loss": 0.7285, | |
| "step": 905500 | |
| }, | |
| { | |
| "epoch": 29.47, | |
| "learning_rate": 9.494949494949496e-07, | |
| "loss": 0.7286, | |
| "step": 906000 | |
| }, | |
| { | |
| "epoch": 29.48, | |
| "learning_rate": 9.444444444444445e-07, | |
| "loss": 0.7281, | |
| "step": 906500 | |
| }, | |
| { | |
| "epoch": 29.5, | |
| "learning_rate": 9.393939393939395e-07, | |
| "loss": 0.7286, | |
| "step": 907000 | |
| }, | |
| { | |
| "epoch": 29.51, | |
| "learning_rate": 9.343434343434345e-07, | |
| "loss": 0.7283, | |
| "step": 907500 | |
| }, | |
| { | |
| "epoch": 29.53, | |
| "learning_rate": 9.292929292929294e-07, | |
| "loss": 0.7284, | |
| "step": 908000 | |
| }, | |
| { | |
| "epoch": 29.55, | |
| "learning_rate": 9.242424242424244e-07, | |
| "loss": 0.7282, | |
| "step": 908500 | |
| }, | |
| { | |
| "epoch": 29.56, | |
| "learning_rate": 9.191919191919192e-07, | |
| "loss": 0.7281, | |
| "step": 909000 | |
| }, | |
| { | |
| "epoch": 29.58, | |
| "learning_rate": 9.141414141414143e-07, | |
| "loss": 0.7287, | |
| "step": 909500 | |
| }, | |
| { | |
| "epoch": 29.6, | |
| "learning_rate": 9.090909090909091e-07, | |
| "loss": 0.7282, | |
| "step": 910000 | |
| }, | |
| { | |
| "epoch": 29.61, | |
| "learning_rate": 9.040404040404041e-07, | |
| "loss": 0.728, | |
| "step": 910500 | |
| }, | |
| { | |
| "epoch": 29.63, | |
| "learning_rate": 8.98989898989899e-07, | |
| "loss": 0.7278, | |
| "step": 911000 | |
| }, | |
| { | |
| "epoch": 29.64, | |
| "learning_rate": 8.93939393939394e-07, | |
| "loss": 0.7287, | |
| "step": 911500 | |
| }, | |
| { | |
| "epoch": 29.66, | |
| "learning_rate": 8.88888888888889e-07, | |
| "loss": 0.7282, | |
| "step": 912000 | |
| }, | |
| { | |
| "epoch": 29.68, | |
| "learning_rate": 8.838383838383839e-07, | |
| "loss": 0.7282, | |
| "step": 912500 | |
| }, | |
| { | |
| "epoch": 29.69, | |
| "learning_rate": 8.787878787878788e-07, | |
| "loss": 0.7281, | |
| "step": 913000 | |
| }, | |
| { | |
| "epoch": 29.71, | |
| "learning_rate": 8.737373737373738e-07, | |
| "loss": 0.7279, | |
| "step": 913500 | |
| }, | |
| { | |
| "epoch": 29.73, | |
| "learning_rate": 8.686868686868688e-07, | |
| "loss": 0.7285, | |
| "step": 914000 | |
| }, | |
| { | |
| "epoch": 29.74, | |
| "learning_rate": 8.636363636363637e-07, | |
| "loss": 0.7277, | |
| "step": 914500 | |
| }, | |
| { | |
| "epoch": 29.76, | |
| "learning_rate": 8.585858585858587e-07, | |
| "loss": 0.7281, | |
| "step": 915000 | |
| }, | |
| { | |
| "epoch": 29.77, | |
| "learning_rate": 8.535353535353535e-07, | |
| "loss": 0.7276, | |
| "step": 915500 | |
| }, | |
| { | |
| "epoch": 29.79, | |
| "learning_rate": 8.484848484848486e-07, | |
| "loss": 0.7278, | |
| "step": 916000 | |
| }, | |
| { | |
| "epoch": 29.81, | |
| "learning_rate": 8.434343434343436e-07, | |
| "loss": 0.7277, | |
| "step": 916500 | |
| }, | |
| { | |
| "epoch": 29.82, | |
| "learning_rate": 8.383838383838384e-07, | |
| "loss": 0.7279, | |
| "step": 917000 | |
| }, | |
| { | |
| "epoch": 29.84, | |
| "learning_rate": 8.333333333333333e-07, | |
| "loss": 0.7276, | |
| "step": 917500 | |
| }, | |
| { | |
| "epoch": 29.86, | |
| "learning_rate": 8.282828282828284e-07, | |
| "loss": 0.7276, | |
| "step": 918000 | |
| }, | |
| { | |
| "epoch": 29.87, | |
| "learning_rate": 8.232323232323233e-07, | |
| "loss": 0.7277, | |
| "step": 918500 | |
| }, | |
| { | |
| "epoch": 29.89, | |
| "learning_rate": 8.181818181818182e-07, | |
| "loss": 0.7276, | |
| "step": 919000 | |
| }, | |
| { | |
| "epoch": 29.9, | |
| "learning_rate": 8.131313131313132e-07, | |
| "loss": 0.7273, | |
| "step": 919500 | |
| }, | |
| { | |
| "epoch": 29.92, | |
| "learning_rate": 8.080808080808082e-07, | |
| "loss": 0.7274, | |
| "step": 920000 | |
| }, | |
| { | |
| "epoch": 29.94, | |
| "learning_rate": 8.030303030303031e-07, | |
| "loss": 0.7277, | |
| "step": 920500 | |
| }, | |
| { | |
| "epoch": 29.95, | |
| "learning_rate": 7.979797979797981e-07, | |
| "loss": 0.7275, | |
| "step": 921000 | |
| }, | |
| { | |
| "epoch": 29.97, | |
| "learning_rate": 7.92929292929293e-07, | |
| "loss": 0.7274, | |
| "step": 921500 | |
| }, | |
| { | |
| "epoch": 29.99, | |
| "learning_rate": 7.878787878787879e-07, | |
| "loss": 0.7269, | |
| "step": 922000 | |
| }, | |
| { | |
| "epoch": 30.0, | |
| "learning_rate": 7.82828282828283e-07, | |
| "loss": 0.7276, | |
| "step": 922500 | |
| }, | |
| { | |
| "epoch": 30.02, | |
| "learning_rate": 7.777777777777779e-07, | |
| "loss": 0.7273, | |
| "step": 923000 | |
| }, | |
| { | |
| "epoch": 30.03, | |
| "learning_rate": 7.727272727272727e-07, | |
| "loss": 0.7275, | |
| "step": 923500 | |
| }, | |
| { | |
| "epoch": 30.05, | |
| "learning_rate": 7.676767676767677e-07, | |
| "loss": 0.7276, | |
| "step": 924000 | |
| }, | |
| { | |
| "epoch": 30.07, | |
| "learning_rate": 7.626262626262627e-07, | |
| "loss": 0.7272, | |
| "step": 924500 | |
| }, | |
| { | |
| "epoch": 30.08, | |
| "learning_rate": 7.575757575757576e-07, | |
| "loss": 0.7269, | |
| "step": 925000 | |
| }, | |
| { | |
| "epoch": 30.1, | |
| "learning_rate": 7.525252525252526e-07, | |
| "loss": 0.7269, | |
| "step": 925500 | |
| }, | |
| { | |
| "epoch": 30.12, | |
| "learning_rate": 7.474747474747475e-07, | |
| "loss": 0.7274, | |
| "step": 926000 | |
| }, | |
| { | |
| "epoch": 30.13, | |
| "learning_rate": 7.424242424242425e-07, | |
| "loss": 0.7272, | |
| "step": 926500 | |
| }, | |
| { | |
| "epoch": 30.15, | |
| "learning_rate": 7.373737373737375e-07, | |
| "loss": 0.7269, | |
| "step": 927000 | |
| }, | |
| { | |
| "epoch": 30.16, | |
| "learning_rate": 7.323232323232324e-07, | |
| "loss": 0.727, | |
| "step": 927500 | |
| }, | |
| { | |
| "epoch": 30.18, | |
| "learning_rate": 7.272727272727273e-07, | |
| "loss": 0.7265, | |
| "step": 928000 | |
| }, | |
| { | |
| "epoch": 30.2, | |
| "learning_rate": 7.222222222222222e-07, | |
| "loss": 0.727, | |
| "step": 928500 | |
| }, | |
| { | |
| "epoch": 30.21, | |
| "learning_rate": 7.171717171717173e-07, | |
| "loss": 0.7272, | |
| "step": 929000 | |
| }, | |
| { | |
| "epoch": 30.23, | |
| "learning_rate": 7.121212121212122e-07, | |
| "loss": 0.7275, | |
| "step": 929500 | |
| }, | |
| { | |
| "epoch": 30.25, | |
| "learning_rate": 7.070707070707071e-07, | |
| "loss": 0.7272, | |
| "step": 930000 | |
| }, | |
| { | |
| "epoch": 30.26, | |
| "learning_rate": 7.02020202020202e-07, | |
| "loss": 0.7266, | |
| "step": 930500 | |
| }, | |
| { | |
| "epoch": 30.28, | |
| "learning_rate": 6.969696969696971e-07, | |
| "loss": 0.7271, | |
| "step": 931000 | |
| }, | |
| { | |
| "epoch": 30.29, | |
| "learning_rate": 6.919191919191919e-07, | |
| "loss": 0.7265, | |
| "step": 931500 | |
| }, | |
| { | |
| "epoch": 30.31, | |
| "learning_rate": 6.868686868686869e-07, | |
| "loss": 0.7268, | |
| "step": 932000 | |
| }, | |
| { | |
| "epoch": 30.33, | |
| "learning_rate": 6.818181818181818e-07, | |
| "loss": 0.7268, | |
| "step": 932500 | |
| }, | |
| { | |
| "epoch": 30.34, | |
| "learning_rate": 6.767676767676768e-07, | |
| "loss": 0.7266, | |
| "step": 933000 | |
| }, | |
| { | |
| "epoch": 30.36, | |
| "learning_rate": 6.717171717171718e-07, | |
| "loss": 0.7267, | |
| "step": 933500 | |
| }, | |
| { | |
| "epoch": 30.38, | |
| "learning_rate": 6.666666666666667e-07, | |
| "loss": 0.7264, | |
| "step": 934000 | |
| }, | |
| { | |
| "epoch": 30.39, | |
| "learning_rate": 6.616161616161616e-07, | |
| "loss": 0.7264, | |
| "step": 934500 | |
| }, | |
| { | |
| "epoch": 30.41, | |
| "learning_rate": 6.565656565656567e-07, | |
| "loss": 0.726, | |
| "step": 935000 | |
| }, | |
| { | |
| "epoch": 30.42, | |
| "learning_rate": 6.515151515151516e-07, | |
| "loss": 0.7263, | |
| "step": 935500 | |
| }, | |
| { | |
| "epoch": 30.44, | |
| "learning_rate": 6.464646464646465e-07, | |
| "loss": 0.7265, | |
| "step": 936000 | |
| }, | |
| { | |
| "epoch": 30.46, | |
| "learning_rate": 6.414141414141415e-07, | |
| "loss": 0.7267, | |
| "step": 936500 | |
| }, | |
| { | |
| "epoch": 30.47, | |
| "learning_rate": 6.363636363636364e-07, | |
| "loss": 0.7266, | |
| "step": 937000 | |
| }, | |
| { | |
| "epoch": 30.49, | |
| "learning_rate": 6.313131313131314e-07, | |
| "loss": 0.7266, | |
| "step": 937500 | |
| }, | |
| { | |
| "epoch": 30.51, | |
| "learning_rate": 6.262626262626264e-07, | |
| "loss": 0.7265, | |
| "step": 938000 | |
| }, | |
| { | |
| "epoch": 30.52, | |
| "learning_rate": 6.212121212121212e-07, | |
| "loss": 0.7266, | |
| "step": 938500 | |
| }, | |
| { | |
| "epoch": 30.54, | |
| "learning_rate": 6.161616161616162e-07, | |
| "loss": 0.7263, | |
| "step": 939000 | |
| }, | |
| { | |
| "epoch": 30.55, | |
| "learning_rate": 6.111111111111112e-07, | |
| "loss": 0.7269, | |
| "step": 939500 | |
| }, | |
| { | |
| "epoch": 30.57, | |
| "learning_rate": 6.060606060606061e-07, | |
| "loss": 0.7264, | |
| "step": 940000 | |
| }, | |
| { | |
| "epoch": 30.59, | |
| "learning_rate": 6.01010101010101e-07, | |
| "loss": 0.7259, | |
| "step": 940500 | |
| }, | |
| { | |
| "epoch": 30.6, | |
| "learning_rate": 5.959595959595961e-07, | |
| "loss": 0.727, | |
| "step": 941000 | |
| }, | |
| { | |
| "epoch": 30.62, | |
| "learning_rate": 5.90909090909091e-07, | |
| "loss": 0.7261, | |
| "step": 941500 | |
| }, | |
| { | |
| "epoch": 30.64, | |
| "learning_rate": 5.858585858585859e-07, | |
| "loss": 0.7261, | |
| "step": 942000 | |
| }, | |
| { | |
| "epoch": 30.65, | |
| "learning_rate": 5.808080808080809e-07, | |
| "loss": 0.7262, | |
| "step": 942500 | |
| }, | |
| { | |
| "epoch": 30.67, | |
| "learning_rate": 5.757575757575758e-07, | |
| "loss": 0.7271, | |
| "step": 943000 | |
| }, | |
| { | |
| "epoch": 30.68, | |
| "learning_rate": 5.707070707070707e-07, | |
| "loss": 0.726, | |
| "step": 943500 | |
| }, | |
| { | |
| "epoch": 30.7, | |
| "learning_rate": 5.656565656565658e-07, | |
| "loss": 0.7262, | |
| "step": 944000 | |
| }, | |
| { | |
| "epoch": 30.72, | |
| "learning_rate": 5.606060606060607e-07, | |
| "loss": 0.7261, | |
| "step": 944500 | |
| }, | |
| { | |
| "epoch": 30.73, | |
| "learning_rate": 5.555555555555555e-07, | |
| "loss": 0.7267, | |
| "step": 945000 | |
| }, | |
| { | |
| "epoch": 30.75, | |
| "learning_rate": 5.505050505050506e-07, | |
| "loss": 0.7259, | |
| "step": 945500 | |
| }, | |
| { | |
| "epoch": 30.77, | |
| "learning_rate": 5.454545454545455e-07, | |
| "loss": 0.7263, | |
| "step": 946000 | |
| }, | |
| { | |
| "epoch": 30.78, | |
| "learning_rate": 5.404040404040404e-07, | |
| "loss": 0.726, | |
| "step": 946500 | |
| }, | |
| { | |
| "epoch": 30.8, | |
| "learning_rate": 5.353535353535354e-07, | |
| "loss": 0.7261, | |
| "step": 947000 | |
| }, | |
| { | |
| "epoch": 30.82, | |
| "learning_rate": 5.303030303030304e-07, | |
| "loss": 0.7256, | |
| "step": 947500 | |
| }, | |
| { | |
| "epoch": 30.83, | |
| "learning_rate": 5.252525252525253e-07, | |
| "loss": 0.7256, | |
| "step": 948000 | |
| }, | |
| { | |
| "epoch": 30.85, | |
| "learning_rate": 5.202020202020203e-07, | |
| "loss": 0.7262, | |
| "step": 948500 | |
| }, | |
| { | |
| "epoch": 30.86, | |
| "learning_rate": 5.151515151515152e-07, | |
| "loss": 0.7256, | |
| "step": 949000 | |
| }, | |
| { | |
| "epoch": 30.88, | |
| "learning_rate": 5.101010101010101e-07, | |
| "loss": 0.726, | |
| "step": 949500 | |
| }, | |
| { | |
| "epoch": 30.9, | |
| "learning_rate": 5.05050505050505e-07, | |
| "loss": 0.7259, | |
| "step": 950000 | |
| }, | |
| { | |
| "epoch": 30.91, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 0.7258, | |
| "step": 950500 | |
| }, | |
| { | |
| "epoch": 30.93, | |
| "learning_rate": 4.94949494949495e-07, | |
| "loss": 0.726, | |
| "step": 951000 | |
| }, | |
| { | |
| "epoch": 30.95, | |
| "learning_rate": 4.898989898989899e-07, | |
| "loss": 0.7255, | |
| "step": 951500 | |
| }, | |
| { | |
| "epoch": 30.96, | |
| "learning_rate": 4.848484848484849e-07, | |
| "loss": 0.7261, | |
| "step": 952000 | |
| }, | |
| { | |
| "epoch": 30.98, | |
| "learning_rate": 4.797979797979798e-07, | |
| "loss": 0.7259, | |
| "step": 952500 | |
| }, | |
| { | |
| "epoch": 30.99, | |
| "learning_rate": 4.747474747474748e-07, | |
| "loss": 0.7255, | |
| "step": 953000 | |
| }, | |
| { | |
| "epoch": 31.01, | |
| "learning_rate": 4.696969696969697e-07, | |
| "loss": 0.7256, | |
| "step": 953500 | |
| }, | |
| { | |
| "epoch": 31.03, | |
| "learning_rate": 4.646464646464647e-07, | |
| "loss": 0.7252, | |
| "step": 954000 | |
| }, | |
| { | |
| "epoch": 31.04, | |
| "learning_rate": 4.595959595959596e-07, | |
| "loss": 0.7259, | |
| "step": 954500 | |
| }, | |
| { | |
| "epoch": 31.06, | |
| "learning_rate": 4.5454545454545457e-07, | |
| "loss": 0.7257, | |
| "step": 955000 | |
| }, | |
| { | |
| "epoch": 31.08, | |
| "learning_rate": 4.494949494949495e-07, | |
| "loss": 0.7259, | |
| "step": 955500 | |
| }, | |
| { | |
| "epoch": 31.09, | |
| "learning_rate": 4.444444444444445e-07, | |
| "loss": 0.7258, | |
| "step": 956000 | |
| }, | |
| { | |
| "epoch": 31.11, | |
| "learning_rate": 4.393939393939394e-07, | |
| "loss": 0.7261, | |
| "step": 956500 | |
| }, | |
| { | |
| "epoch": 31.12, | |
| "learning_rate": 4.343434343434344e-07, | |
| "loss": 0.7258, | |
| "step": 957000 | |
| }, | |
| { | |
| "epoch": 31.14, | |
| "learning_rate": 4.2929292929292934e-07, | |
| "loss": 0.7256, | |
| "step": 957500 | |
| }, | |
| { | |
| "epoch": 31.16, | |
| "learning_rate": 4.242424242424243e-07, | |
| "loss": 0.7256, | |
| "step": 958000 | |
| }, | |
| { | |
| "epoch": 31.17, | |
| "learning_rate": 4.191919191919192e-07, | |
| "loss": 0.7262, | |
| "step": 958500 | |
| }, | |
| { | |
| "epoch": 31.19, | |
| "learning_rate": 4.141414141414142e-07, | |
| "loss": 0.7258, | |
| "step": 959000 | |
| }, | |
| { | |
| "epoch": 31.21, | |
| "learning_rate": 4.090909090909091e-07, | |
| "loss": 0.7248, | |
| "step": 959500 | |
| }, | |
| { | |
| "epoch": 31.22, | |
| "learning_rate": 4.040404040404041e-07, | |
| "loss": 0.7253, | |
| "step": 960000 | |
| }, | |
| { | |
| "epoch": 31.24, | |
| "learning_rate": 3.9898989898989903e-07, | |
| "loss": 0.7253, | |
| "step": 960500 | |
| }, | |
| { | |
| "epoch": 31.25, | |
| "learning_rate": 3.9393939393939396e-07, | |
| "loss": 0.7258, | |
| "step": 961000 | |
| }, | |
| { | |
| "epoch": 31.27, | |
| "learning_rate": 3.8888888888888895e-07, | |
| "loss": 0.7255, | |
| "step": 961500 | |
| }, | |
| { | |
| "epoch": 31.29, | |
| "learning_rate": 3.838383838383838e-07, | |
| "loss": 0.7255, | |
| "step": 962000 | |
| }, | |
| { | |
| "epoch": 31.3, | |
| "learning_rate": 3.787878787878788e-07, | |
| "loss": 0.7256, | |
| "step": 962500 | |
| }, | |
| { | |
| "epoch": 31.32, | |
| "learning_rate": 3.7373737373737374e-07, | |
| "loss": 0.7252, | |
| "step": 963000 | |
| }, | |
| { | |
| "epoch": 31.34, | |
| "learning_rate": 3.6868686868686873e-07, | |
| "loss": 0.7257, | |
| "step": 963500 | |
| }, | |
| { | |
| "epoch": 31.35, | |
| "learning_rate": 3.6363636363636366e-07, | |
| "loss": 0.7254, | |
| "step": 964000 | |
| }, | |
| { | |
| "epoch": 31.37, | |
| "learning_rate": 3.5858585858585864e-07, | |
| "loss": 0.7256, | |
| "step": 964500 | |
| }, | |
| { | |
| "epoch": 31.38, | |
| "learning_rate": 3.535353535353536e-07, | |
| "loss": 0.7257, | |
| "step": 965000 | |
| }, | |
| { | |
| "epoch": 31.4, | |
| "learning_rate": 3.4848484848484856e-07, | |
| "loss": 0.7256, | |
| "step": 965500 | |
| }, | |
| { | |
| "epoch": 31.42, | |
| "learning_rate": 3.4343434343434344e-07, | |
| "loss": 0.7255, | |
| "step": 966000 | |
| }, | |
| { | |
| "epoch": 31.43, | |
| "learning_rate": 3.383838383838384e-07, | |
| "loss": 0.7255, | |
| "step": 966500 | |
| }, | |
| { | |
| "epoch": 31.45, | |
| "learning_rate": 3.3333333333333335e-07, | |
| "loss": 0.7252, | |
| "step": 967000 | |
| }, | |
| { | |
| "epoch": 31.47, | |
| "learning_rate": 3.2828282828282834e-07, | |
| "loss": 0.7256, | |
| "step": 967500 | |
| }, | |
| { | |
| "epoch": 31.48, | |
| "learning_rate": 3.2323232323232327e-07, | |
| "loss": 0.7257, | |
| "step": 968000 | |
| }, | |
| { | |
| "epoch": 31.5, | |
| "learning_rate": 3.181818181818182e-07, | |
| "loss": 0.7259, | |
| "step": 968500 | |
| }, | |
| { | |
| "epoch": 31.51, | |
| "learning_rate": 3.131313131313132e-07, | |
| "loss": 0.7252, | |
| "step": 969000 | |
| }, | |
| { | |
| "epoch": 31.53, | |
| "learning_rate": 3.080808080808081e-07, | |
| "loss": 0.7257, | |
| "step": 969500 | |
| }, | |
| { | |
| "epoch": 31.55, | |
| "learning_rate": 3.0303030303030305e-07, | |
| "loss": 0.725, | |
| "step": 970000 | |
| }, | |
| { | |
| "epoch": 31.56, | |
| "learning_rate": 2.9797979797979803e-07, | |
| "loss": 0.7252, | |
| "step": 970500 | |
| }, | |
| { | |
| "epoch": 31.58, | |
| "learning_rate": 2.9292929292929296e-07, | |
| "loss": 0.7253, | |
| "step": 971000 | |
| }, | |
| { | |
| "epoch": 31.6, | |
| "learning_rate": 2.878787878787879e-07, | |
| "loss": 0.7251, | |
| "step": 971500 | |
| }, | |
| { | |
| "epoch": 31.61, | |
| "learning_rate": 2.828282828282829e-07, | |
| "loss": 0.725, | |
| "step": 972000 | |
| }, | |
| { | |
| "epoch": 31.63, | |
| "learning_rate": 2.7777777777777776e-07, | |
| "loss": 0.7256, | |
| "step": 972500 | |
| }, | |
| { | |
| "epoch": 31.64, | |
| "learning_rate": 2.7272727272727274e-07, | |
| "loss": 0.7257, | |
| "step": 973000 | |
| }, | |
| { | |
| "epoch": 31.66, | |
| "learning_rate": 2.676767676767677e-07, | |
| "loss": 0.7248, | |
| "step": 973500 | |
| }, | |
| { | |
| "epoch": 31.68, | |
| "learning_rate": 2.6262626262626266e-07, | |
| "loss": 0.7248, | |
| "step": 974000 | |
| }, | |
| { | |
| "epoch": 31.69, | |
| "learning_rate": 2.575757575757576e-07, | |
| "loss": 0.7246, | |
| "step": 974500 | |
| }, | |
| { | |
| "epoch": 31.71, | |
| "learning_rate": 2.525252525252525e-07, | |
| "loss": 0.7255, | |
| "step": 975000 | |
| }, | |
| { | |
| "epoch": 31.73, | |
| "learning_rate": 2.474747474747475e-07, | |
| "loss": 0.7248, | |
| "step": 975500 | |
| }, | |
| { | |
| "epoch": 31.74, | |
| "learning_rate": 2.4242424242424244e-07, | |
| "loss": 0.7249, | |
| "step": 976000 | |
| }, | |
| { | |
| "epoch": 31.76, | |
| "learning_rate": 2.373737373737374e-07, | |
| "loss": 0.7253, | |
| "step": 976500 | |
| }, | |
| { | |
| "epoch": 31.77, | |
| "learning_rate": 2.3232323232323235e-07, | |
| "loss": 0.7248, | |
| "step": 977000 | |
| }, | |
| { | |
| "epoch": 31.79, | |
| "learning_rate": 2.2727272727272729e-07, | |
| "loss": 0.725, | |
| "step": 977500 | |
| }, | |
| { | |
| "epoch": 31.81, | |
| "learning_rate": 2.2222222222222224e-07, | |
| "loss": 0.7255, | |
| "step": 978000 | |
| }, | |
| { | |
| "epoch": 31.82, | |
| "learning_rate": 2.171717171717172e-07, | |
| "loss": 0.7251, | |
| "step": 978500 | |
| }, | |
| { | |
| "epoch": 31.84, | |
| "learning_rate": 2.1212121212121216e-07, | |
| "loss": 0.7252, | |
| "step": 979000 | |
| }, | |
| { | |
| "epoch": 31.86, | |
| "learning_rate": 2.070707070707071e-07, | |
| "loss": 0.7252, | |
| "step": 979500 | |
| }, | |
| { | |
| "epoch": 31.87, | |
| "learning_rate": 2.0202020202020205e-07, | |
| "loss": 0.7246, | |
| "step": 980000 | |
| }, | |
| { | |
| "epoch": 31.89, | |
| "learning_rate": 1.9696969696969698e-07, | |
| "loss": 0.725, | |
| "step": 980500 | |
| }, | |
| { | |
| "epoch": 31.9, | |
| "learning_rate": 1.919191919191919e-07, | |
| "loss": 0.7248, | |
| "step": 981000 | |
| }, | |
| { | |
| "epoch": 31.92, | |
| "learning_rate": 1.8686868686868687e-07, | |
| "loss": 0.7255, | |
| "step": 981500 | |
| }, | |
| { | |
| "epoch": 31.94, | |
| "learning_rate": 1.8181818181818183e-07, | |
| "loss": 0.7248, | |
| "step": 982000 | |
| }, | |
| { | |
| "epoch": 31.95, | |
| "learning_rate": 1.767676767676768e-07, | |
| "loss": 0.7252, | |
| "step": 982500 | |
| }, | |
| { | |
| "epoch": 31.97, | |
| "learning_rate": 1.7171717171717172e-07, | |
| "loss": 0.7248, | |
| "step": 983000 | |
| }, | |
| { | |
| "epoch": 31.99, | |
| "learning_rate": 1.6666666666666668e-07, | |
| "loss": 0.7254, | |
| "step": 983500 | |
| }, | |
| { | |
| "epoch": 32.0, | |
| "learning_rate": 1.6161616161616163e-07, | |
| "loss": 0.725, | |
| "step": 984000 | |
| }, | |
| { | |
| "epoch": 32.02, | |
| "learning_rate": 1.565656565656566e-07, | |
| "loss": 0.7259, | |
| "step": 984500 | |
| }, | |
| { | |
| "epoch": 32.03, | |
| "learning_rate": 1.5151515151515152e-07, | |
| "loss": 0.7246, | |
| "step": 985000 | |
| }, | |
| { | |
| "epoch": 32.05, | |
| "learning_rate": 1.4646464646464648e-07, | |
| "loss": 0.7247, | |
| "step": 985500 | |
| }, | |
| { | |
| "epoch": 32.07, | |
| "learning_rate": 1.4141414141414144e-07, | |
| "loss": 0.7248, | |
| "step": 986000 | |
| }, | |
| { | |
| "epoch": 32.08, | |
| "learning_rate": 1.3636363636363637e-07, | |
| "loss": 0.7252, | |
| "step": 986500 | |
| }, | |
| { | |
| "epoch": 32.1, | |
| "learning_rate": 1.3131313131313133e-07, | |
| "loss": 0.7255, | |
| "step": 987000 | |
| }, | |
| { | |
| "epoch": 32.12, | |
| "learning_rate": 1.2626262626262626e-07, | |
| "loss": 0.7247, | |
| "step": 987500 | |
| }, | |
| { | |
| "epoch": 32.13, | |
| "learning_rate": 1.2121212121212122e-07, | |
| "loss": 0.7244, | |
| "step": 988000 | |
| }, | |
| { | |
| "epoch": 32.15, | |
| "learning_rate": 1.1616161616161618e-07, | |
| "loss": 0.7251, | |
| "step": 988500 | |
| }, | |
| { | |
| "epoch": 32.16, | |
| "learning_rate": 1.1111111111111112e-07, | |
| "loss": 0.7248, | |
| "step": 989000 | |
| }, | |
| { | |
| "epoch": 32.18, | |
| "learning_rate": 1.0606060606060608e-07, | |
| "loss": 0.7248, | |
| "step": 989500 | |
| }, | |
| { | |
| "epoch": 32.2, | |
| "learning_rate": 1.0101010101010103e-07, | |
| "loss": 0.7251, | |
| "step": 990000 | |
| }, | |
| { | |
| "epoch": 32.21, | |
| "learning_rate": 9.595959595959596e-08, | |
| "loss": 0.7248, | |
| "step": 990500 | |
| }, | |
| { | |
| "epoch": 32.23, | |
| "learning_rate": 9.090909090909091e-08, | |
| "loss": 0.7247, | |
| "step": 991000 | |
| }, | |
| { | |
| "epoch": 32.25, | |
| "learning_rate": 8.585858585858586e-08, | |
| "loss": 0.7249, | |
| "step": 991500 | |
| }, | |
| { | |
| "epoch": 32.26, | |
| "learning_rate": 8.080808080808082e-08, | |
| "loss": 0.7252, | |
| "step": 992000 | |
| }, | |
| { | |
| "epoch": 32.28, | |
| "learning_rate": 7.575757575757576e-08, | |
| "loss": 0.725, | |
| "step": 992500 | |
| }, | |
| { | |
| "epoch": 32.29, | |
| "learning_rate": 7.070707070707072e-08, | |
| "loss": 0.7253, | |
| "step": 993000 | |
| }, | |
| { | |
| "epoch": 32.31, | |
| "learning_rate": 6.565656565656566e-08, | |
| "loss": 0.7255, | |
| "step": 993500 | |
| }, | |
| { | |
| "epoch": 32.33, | |
| "learning_rate": 6.060606060606061e-08, | |
| "loss": 0.7248, | |
| "step": 994000 | |
| }, | |
| { | |
| "epoch": 32.34, | |
| "learning_rate": 5.555555555555556e-08, | |
| "loss": 0.7249, | |
| "step": 994500 | |
| }, | |
| { | |
| "epoch": 32.36, | |
| "learning_rate": 5.050505050505051e-08, | |
| "loss": 0.724, | |
| "step": 995000 | |
| }, | |
| { | |
| "epoch": 32.38, | |
| "learning_rate": 4.545454545454546e-08, | |
| "loss": 0.725, | |
| "step": 995500 | |
| }, | |
| { | |
| "epoch": 32.39, | |
| "learning_rate": 4.040404040404041e-08, | |
| "loss": 0.7244, | |
| "step": 996000 | |
| }, | |
| { | |
| "epoch": 32.41, | |
| "learning_rate": 3.535353535353536e-08, | |
| "loss": 0.725, | |
| "step": 996500 | |
| }, | |
| { | |
| "epoch": 32.42, | |
| "learning_rate": 3.0303030303030305e-08, | |
| "loss": 0.7254, | |
| "step": 997000 | |
| }, | |
| { | |
| "epoch": 32.44, | |
| "learning_rate": 2.5252525252525256e-08, | |
| "loss": 0.7251, | |
| "step": 997500 | |
| }, | |
| { | |
| "epoch": 32.46, | |
| "learning_rate": 2.0202020202020204e-08, | |
| "loss": 0.7244, | |
| "step": 998000 | |
| }, | |
| { | |
| "epoch": 32.47, | |
| "learning_rate": 1.5151515151515152e-08, | |
| "loss": 0.7251, | |
| "step": 998500 | |
| }, | |
| { | |
| "epoch": 32.49, | |
| "learning_rate": 1.0101010101010102e-08, | |
| "loss": 0.7251, | |
| "step": 999000 | |
| }, | |
| { | |
| "epoch": 32.51, | |
| "learning_rate": 5.050505050505051e-09, | |
| "loss": 0.725, | |
| "step": 999500 | |
| }, | |
| { | |
| "epoch": 32.52, | |
| "learning_rate": 0.0, | |
| "loss": 0.7246, | |
| "step": 1000000 | |
| }, | |
| { | |
| "epoch": 32.52, | |
| "step": 1000000, | |
| "total_flos": 1.1571044548608e+19, | |
| "train_loss": 1.2262278026428222, | |
| "train_runtime": 587774.1428, | |
| "train_samples_per_second": 435.541, | |
| "train_steps_per_second": 1.701 | |
| } | |
| ], | |
| "max_steps": 1000000, | |
| "num_train_epochs": 33, | |
| "total_flos": 1.1571044548608e+19, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |