| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 10.0, | |
| "global_step": 530, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9905660377358493e-05, | |
| "loss": 9.8227, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.9811320754716985e-05, | |
| "loss": 5.3632, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.9716981132075476e-05, | |
| "loss": 4.5065, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.962264150943397e-05, | |
| "loss": 3.6359, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.952830188679246e-05, | |
| "loss": 3.3431, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.943396226415095e-05, | |
| "loss": 3.0381, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.933962264150943e-05, | |
| "loss": 3.0548, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.9245283018867924e-05, | |
| "loss": 2.7514, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.9150943396226415e-05, | |
| "loss": 2.8738, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.9056603773584906e-05, | |
| "loss": 2.5376, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.89622641509434e-05, | |
| "loss": 2.4207, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.886792452830189e-05, | |
| "loss": 2.4471, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.877358490566038e-05, | |
| "loss": 2.3879, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.867924528301887e-05, | |
| "loss": 2.328, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.858490566037736e-05, | |
| "loss": 2.1208, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.849056603773585e-05, | |
| "loss": 2.2425, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.8396226415094344e-05, | |
| "loss": 2.2906, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.8301886792452835e-05, | |
| "loss": 2.1438, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.8207547169811326e-05, | |
| "loss": 2.2857, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.811320754716982e-05, | |
| "loss": 2.1923, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.80188679245283e-05, | |
| "loss": 2.2866, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.792452830188679e-05, | |
| "loss": 2.174, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.7830188679245284e-05, | |
| "loss": 2.2098, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 4.7735849056603775e-05, | |
| "loss": 1.9968, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 4.7641509433962266e-05, | |
| "loss": 2.0575, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 4.754716981132076e-05, | |
| "loss": 2.0604, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.745283018867925e-05, | |
| "loss": 2.2124, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.735849056603774e-05, | |
| "loss": 2.002, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.726415094339623e-05, | |
| "loss": 1.9484, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.716981132075472e-05, | |
| "loss": 1.9954, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.707547169811321e-05, | |
| "loss": 1.917, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 4.6981132075471704e-05, | |
| "loss": 2.0466, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 4.6886792452830195e-05, | |
| "loss": 1.8827, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 4.679245283018868e-05, | |
| "loss": 2.1251, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 4.669811320754717e-05, | |
| "loss": 2.1592, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.660377358490566e-05, | |
| "loss": 2.2133, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.650943396226415e-05, | |
| "loss": 2.1562, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 4.641509433962264e-05, | |
| "loss": 1.9496, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 4.6320754716981134e-05, | |
| "loss": 2.0698, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 4.6226415094339625e-05, | |
| "loss": 2.0431, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 4.6132075471698117e-05, | |
| "loss": 2.0038, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.603773584905661e-05, | |
| "loss": 2.0959, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.59433962264151e-05, | |
| "loss": 1.9298, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 4.584905660377359e-05, | |
| "loss": 1.9915, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 4.575471698113208e-05, | |
| "loss": 1.8004, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 4.566037735849057e-05, | |
| "loss": 1.9061, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 4.556603773584906e-05, | |
| "loss": 1.8904, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.547169811320755e-05, | |
| "loss": 2.0976, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 4.537735849056604e-05, | |
| "loss": 1.7822, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.528301886792453e-05, | |
| "loss": 1.8883, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 4.518867924528302e-05, | |
| "loss": 1.6874, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.509433962264151e-05, | |
| "loss": 1.7476, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.5e-05, | |
| "loss": 1.6606, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.4905660377358494e-05, | |
| "loss": 1.701, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 4.4811320754716985e-05, | |
| "loss": 1.6084, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 4.4716981132075476e-05, | |
| "loss": 1.6217, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 4.462264150943397e-05, | |
| "loss": 1.5683, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 4.452830188679246e-05, | |
| "loss": 1.6248, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 4.443396226415095e-05, | |
| "loss": 1.6011, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 4.433962264150944e-05, | |
| "loss": 1.6206, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 4.4245283018867925e-05, | |
| "loss": 1.5233, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 4.4150943396226416e-05, | |
| "loss": 1.6248, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 4.405660377358491e-05, | |
| "loss": 1.5203, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 4.39622641509434e-05, | |
| "loss": 1.4521, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 4.386792452830189e-05, | |
| "loss": 1.4511, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 4.377358490566038e-05, | |
| "loss": 1.4902, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 4.367924528301887e-05, | |
| "loss": 1.6299, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 4.358490566037736e-05, | |
| "loss": 1.4494, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 4.3490566037735853e-05, | |
| "loss": 1.4915, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 4.3396226415094345e-05, | |
| "loss": 1.5363, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 4.3301886792452836e-05, | |
| "loss": 1.4763, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 4.320754716981133e-05, | |
| "loss": 1.4828, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 4.311320754716982e-05, | |
| "loss": 1.4703, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 4.301886792452831e-05, | |
| "loss": 1.3921, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 4.292452830188679e-05, | |
| "loss": 1.5084, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 4.2830188679245284e-05, | |
| "loss": 1.4489, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 4.2735849056603775e-05, | |
| "loss": 1.479, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 4.2641509433962266e-05, | |
| "loss": 1.4791, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 4.254716981132076e-05, | |
| "loss": 1.3819, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 4.245283018867925e-05, | |
| "loss": 1.4143, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 4.235849056603774e-05, | |
| "loss": 1.4606, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 4.226415094339623e-05, | |
| "loss": 1.4457, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 4.216981132075472e-05, | |
| "loss": 1.3819, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 4.207547169811321e-05, | |
| "loss": 1.3984, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 4.1981132075471704e-05, | |
| "loss": 1.4533, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 4.1886792452830195e-05, | |
| "loss": 1.4731, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 4.1792452830188686e-05, | |
| "loss": 1.4723, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 4.169811320754717e-05, | |
| "loss": 1.4317, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 4.160377358490566e-05, | |
| "loss": 1.441, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 4.150943396226415e-05, | |
| "loss": 1.3809, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 4.1415094339622644e-05, | |
| "loss": 1.2942, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 4.1320754716981135e-05, | |
| "loss": 1.4409, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 4.1226415094339626e-05, | |
| "loss": 1.5428, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 4.113207547169812e-05, | |
| "loss": 1.3872, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 4.103773584905661e-05, | |
| "loss": 1.4422, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 4.09433962264151e-05, | |
| "loss": 1.5333, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 4.084905660377359e-05, | |
| "loss": 1.229, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 4.075471698113208e-05, | |
| "loss": 1.3927, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 4.066037735849057e-05, | |
| "loss": 1.2784, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 4.0566037735849064e-05, | |
| "loss": 1.3527, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 4.047169811320755e-05, | |
| "loss": 1.422, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 4.037735849056604e-05, | |
| "loss": 1.3688, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 4.028301886792453e-05, | |
| "loss": 1.3664, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 4.018867924528302e-05, | |
| "loss": 1.3683, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 4.009433962264151e-05, | |
| "loss": 1.3948, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 4e-05, | |
| "loss": 1.258, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 3.9905660377358494e-05, | |
| "loss": 1.3721, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 3.9811320754716985e-05, | |
| "loss": 1.2634, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 3.9716981132075477e-05, | |
| "loss": 1.3369, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 3.962264150943397e-05, | |
| "loss": 1.2034, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 3.952830188679246e-05, | |
| "loss": 1.2868, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 3.943396226415095e-05, | |
| "loss": 1.2322, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 3.933962264150944e-05, | |
| "loss": 1.2472, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 3.924528301886793e-05, | |
| "loss": 1.1504, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 3.9150943396226416e-05, | |
| "loss": 1.2511, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 3.905660377358491e-05, | |
| "loss": 1.1804, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 3.89622641509434e-05, | |
| "loss": 1.0368, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 3.886792452830189e-05, | |
| "loss": 1.168, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 3.877358490566038e-05, | |
| "loss": 1.2403, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 3.867924528301887e-05, | |
| "loss": 1.2023, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 3.858490566037736e-05, | |
| "loss": 1.2519, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 3.8490566037735854e-05, | |
| "loss": 1.1993, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 3.8396226415094345e-05, | |
| "loss": 1.1572, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 3.8301886792452836e-05, | |
| "loss": 1.2227, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 3.820754716981133e-05, | |
| "loss": 1.1286, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 3.811320754716982e-05, | |
| "loss": 1.2267, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 3.801886792452831e-05, | |
| "loss": 1.1177, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 3.7924528301886794e-05, | |
| "loss": 1.2352, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 3.7830188679245285e-05, | |
| "loss": 1.1686, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 3.7735849056603776e-05, | |
| "loss": 1.1913, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 3.764150943396227e-05, | |
| "loss": 1.1315, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 3.754716981132076e-05, | |
| "loss": 1.1151, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 3.745283018867924e-05, | |
| "loss": 1.1532, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 3.735849056603773e-05, | |
| "loss": 1.1501, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 3.7264150943396224e-05, | |
| "loss": 1.161, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 3.7169811320754716e-05, | |
| "loss": 1.1592, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 3.7075471698113207e-05, | |
| "loss": 1.1969, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 3.69811320754717e-05, | |
| "loss": 1.1805, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 3.688679245283019e-05, | |
| "loss": 1.0553, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 3.679245283018868e-05, | |
| "loss": 1.1072, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 3.669811320754717e-05, | |
| "loss": 1.234, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 3.660377358490566e-05, | |
| "loss": 1.1156, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 3.650943396226415e-05, | |
| "loss": 1.1624, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 3.641509433962264e-05, | |
| "loss": 1.1259, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 3.632075471698113e-05, | |
| "loss": 1.1218, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 3.622641509433962e-05, | |
| "loss": 1.133, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 3.613207547169811e-05, | |
| "loss": 1.2087, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 3.60377358490566e-05, | |
| "loss": 1.1558, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 3.594339622641509e-05, | |
| "loss": 1.1518, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 3.5849056603773584e-05, | |
| "loss": 1.2106, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 3.5754716981132075e-05, | |
| "loss": 1.0969, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 3.5660377358490566e-05, | |
| "loss": 1.0236, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 3.556603773584906e-05, | |
| "loss": 1.0231, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 3.547169811320755e-05, | |
| "loss": 1.0921, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 3.537735849056604e-05, | |
| "loss": 1.1057, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 3.528301886792453e-05, | |
| "loss": 1.121, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 3.518867924528302e-05, | |
| "loss": 1.1201, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 3.5094339622641506e-05, | |
| "loss": 1.0761, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 3.5e-05, | |
| "loss": 1.1364, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 3.490566037735849e-05, | |
| "loss": 1.0045, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 3.481132075471698e-05, | |
| "loss": 1.082, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 3.471698113207547e-05, | |
| "loss": 1.0907, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 3.462264150943396e-05, | |
| "loss": 1.0209, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 3.452830188679245e-05, | |
| "loss": 1.1183, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 3.4433962264150943e-05, | |
| "loss": 1.0522, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 3.4339622641509435e-05, | |
| "loss": 1.1326, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 3.4245283018867926e-05, | |
| "loss": 1.0603, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 3.415094339622642e-05, | |
| "loss": 1.0376, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 3.405660377358491e-05, | |
| "loss": 0.9987, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 3.39622641509434e-05, | |
| "loss": 0.959, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 3.386792452830188e-05, | |
| "loss": 1.0101, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 3.3773584905660374e-05, | |
| "loss": 0.9348, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 3.3679245283018865e-05, | |
| "loss": 0.9486, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 3.3584905660377356e-05, | |
| "loss": 1.0116, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 3.349056603773585e-05, | |
| "loss": 0.9613, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 3.339622641509434e-05, | |
| "loss": 0.9316, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 3.330188679245283e-05, | |
| "loss": 0.9969, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 3.320754716981132e-05, | |
| "loss": 0.8827, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 3.311320754716981e-05, | |
| "loss": 1.0186, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 3.30188679245283e-05, | |
| "loss": 0.914, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 3.2924528301886794e-05, | |
| "loss": 0.9294, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 3.2830188679245285e-05, | |
| "loss": 0.9551, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 3.2735849056603776e-05, | |
| "loss": 1.013, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 3.264150943396227e-05, | |
| "loss": 0.9576, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 3.254716981132075e-05, | |
| "loss": 0.9847, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 3.245283018867924e-05, | |
| "loss": 1.0326, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 3.2358490566037734e-05, | |
| "loss": 0.9624, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 3.2264150943396225e-05, | |
| "loss": 1.0267, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 3.2169811320754716e-05, | |
| "loss": 1.039, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 3.207547169811321e-05, | |
| "loss": 1.1197, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 3.19811320754717e-05, | |
| "loss": 0.8363, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 3.188679245283019e-05, | |
| "loss": 0.9551, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 3.179245283018868e-05, | |
| "loss": 1.0216, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 3.169811320754717e-05, | |
| "loss": 1.0711, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 3.160377358490566e-05, | |
| "loss": 1.0057, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 3.1509433962264154e-05, | |
| "loss": 0.9545, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 3.1415094339622645e-05, | |
| "loss": 0.9554, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 3.132075471698113e-05, | |
| "loss": 0.9069, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 3.122641509433962e-05, | |
| "loss": 1.0332, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 3.113207547169811e-05, | |
| "loss": 1.0513, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 3.10377358490566e-05, | |
| "loss": 0.952, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 3.094339622641509e-05, | |
| "loss": 1.0413, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 3.0849056603773584e-05, | |
| "loss": 0.8725, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 3.0754716981132075e-05, | |
| "loss": 1.0077, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 3.0660377358490567e-05, | |
| "loss": 0.9643, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 3.056603773584906e-05, | |
| "loss": 0.9741, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 3.047169811320755e-05, | |
| "loss": 1.0225, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 3.0377358490566036e-05, | |
| "loss": 1.0555, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 3.0283018867924528e-05, | |
| "loss": 0.9697, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 3.018867924528302e-05, | |
| "loss": 0.9397, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 3.009433962264151e-05, | |
| "loss": 0.9053, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 3e-05, | |
| "loss": 1.1121, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 2.9905660377358492e-05, | |
| "loss": 0.8519, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 2.9811320754716983e-05, | |
| "loss": 0.967, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 2.971698113207547e-05, | |
| "loss": 0.8814, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 2.9622641509433962e-05, | |
| "loss": 1.0404, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 2.9528301886792453e-05, | |
| "loss": 0.8551, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 2.9433962264150944e-05, | |
| "loss": 0.8671, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 2.9339622641509435e-05, | |
| "loss": 0.8446, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 2.9245283018867926e-05, | |
| "loss": 0.8838, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 2.9150943396226417e-05, | |
| "loss": 0.8685, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 2.9056603773584905e-05, | |
| "loss": 0.982, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 2.8962264150943396e-05, | |
| "loss": 0.8266, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 2.8867924528301887e-05, | |
| "loss": 0.8326, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 2.8773584905660378e-05, | |
| "loss": 0.8903, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 2.867924528301887e-05, | |
| "loss": 0.9631, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 2.858490566037736e-05, | |
| "loss": 0.8748, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 2.8490566037735848e-05, | |
| "loss": 0.8452, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 2.839622641509434e-05, | |
| "loss": 0.9581, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 2.830188679245283e-05, | |
| "loss": 0.8077, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 2.820754716981132e-05, | |
| "loss": 0.8741, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 2.8113207547169812e-05, | |
| "loss": 0.8932, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 2.8018867924528303e-05, | |
| "loss": 0.7975, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 4.42, | |
| "learning_rate": 2.7924528301886794e-05, | |
| "loss": 0.9653, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "learning_rate": 2.7830188679245282e-05, | |
| "loss": 0.9458, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 2.7735849056603773e-05, | |
| "loss": 0.91, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 2.7641509433962264e-05, | |
| "loss": 0.871, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 2.7547169811320755e-05, | |
| "loss": 0.8209, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 2.7452830188679247e-05, | |
| "loss": 0.8097, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 2.7358490566037738e-05, | |
| "loss": 0.7661, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "learning_rate": 2.726415094339623e-05, | |
| "loss": 0.895, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "learning_rate": 2.7169811320754716e-05, | |
| "loss": 0.88, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 2.7075471698113207e-05, | |
| "loss": 0.8906, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "learning_rate": 2.69811320754717e-05, | |
| "loss": 0.9012, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 2.688679245283019e-05, | |
| "loss": 0.7683, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 2.679245283018868e-05, | |
| "loss": 0.8865, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 2.6698113207547172e-05, | |
| "loss": 0.8114, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 2.6603773584905663e-05, | |
| "loss": 0.852, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 2.650943396226415e-05, | |
| "loss": 0.8537, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "learning_rate": 2.641509433962264e-05, | |
| "loss": 0.8999, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 2.6320754716981133e-05, | |
| "loss": 0.8895, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "learning_rate": 2.6226415094339624e-05, | |
| "loss": 0.8271, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 2.6132075471698115e-05, | |
| "loss": 0.7964, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 2.6037735849056606e-05, | |
| "loss": 0.9496, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 2.5943396226415094e-05, | |
| "loss": 0.8386, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 2.5849056603773585e-05, | |
| "loss": 0.7688, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 2.5754716981132076e-05, | |
| "loss": 0.8199, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 4.87, | |
| "learning_rate": 2.5660377358490567e-05, | |
| "loss": 0.8596, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 2.5566037735849058e-05, | |
| "loss": 0.8176, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 2.547169811320755e-05, | |
| "loss": 0.8093, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "learning_rate": 2.537735849056604e-05, | |
| "loss": 0.7932, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "learning_rate": 2.5283018867924528e-05, | |
| "loss": 0.7955, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "learning_rate": 2.518867924528302e-05, | |
| "loss": 0.8786, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "learning_rate": 2.509433962264151e-05, | |
| "loss": 0.7967, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 2.5e-05, | |
| "loss": 0.7464, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 2.4905660377358492e-05, | |
| "loss": 0.7578, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "learning_rate": 2.4811320754716983e-05, | |
| "loss": 0.7802, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 2.4716981132075474e-05, | |
| "loss": 0.7665, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 5.08, | |
| "learning_rate": 2.4622641509433962e-05, | |
| "loss": 0.797, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 5.09, | |
| "learning_rate": 2.4528301886792453e-05, | |
| "loss": 0.7681, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 2.4433962264150944e-05, | |
| "loss": 0.8316, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 5.13, | |
| "learning_rate": 2.4339622641509435e-05, | |
| "loss": 0.8532, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 2.4245283018867926e-05, | |
| "loss": 0.7485, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "learning_rate": 2.4150943396226418e-05, | |
| "loss": 0.7837, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 5.19, | |
| "learning_rate": 2.405660377358491e-05, | |
| "loss": 0.7994, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 2.3962264150943396e-05, | |
| "loss": 0.7448, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 5.23, | |
| "learning_rate": 2.3867924528301887e-05, | |
| "loss": 0.842, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 5.25, | |
| "learning_rate": 2.377358490566038e-05, | |
| "loss": 0.8137, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "learning_rate": 2.367924528301887e-05, | |
| "loss": 0.7739, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "learning_rate": 2.358490566037736e-05, | |
| "loss": 0.7935, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 5.3, | |
| "learning_rate": 2.3490566037735852e-05, | |
| "loss": 0.8022, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "learning_rate": 2.339622641509434e-05, | |
| "loss": 0.7823, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 5.34, | |
| "learning_rate": 2.330188679245283e-05, | |
| "loss": 0.7645, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 5.36, | |
| "learning_rate": 2.320754716981132e-05, | |
| "loss": 0.7344, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 2.3113207547169813e-05, | |
| "loss": 0.8098, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 5.4, | |
| "learning_rate": 2.3018867924528304e-05, | |
| "loss": 0.7387, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 5.42, | |
| "learning_rate": 2.2924528301886795e-05, | |
| "loss": 0.7694, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 5.43, | |
| "learning_rate": 2.2830188679245286e-05, | |
| "loss": 0.7766, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 5.45, | |
| "learning_rate": 2.2735849056603774e-05, | |
| "loss": 0.7933, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 5.47, | |
| "learning_rate": 2.2641509433962265e-05, | |
| "loss": 0.737, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 5.49, | |
| "learning_rate": 2.2547169811320756e-05, | |
| "loss": 0.8105, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 5.51, | |
| "learning_rate": 2.2452830188679247e-05, | |
| "loss": 0.7348, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 5.53, | |
| "learning_rate": 2.2358490566037738e-05, | |
| "loss": 0.7407, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 5.55, | |
| "learning_rate": 2.226415094339623e-05, | |
| "loss": 0.7406, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 5.57, | |
| "learning_rate": 2.216981132075472e-05, | |
| "loss": 0.7485, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 5.58, | |
| "learning_rate": 2.2075471698113208e-05, | |
| "loss": 0.7226, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 5.6, | |
| "learning_rate": 2.19811320754717e-05, | |
| "loss": 0.7865, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 5.62, | |
| "learning_rate": 2.188679245283019e-05, | |
| "loss": 0.7983, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "learning_rate": 2.179245283018868e-05, | |
| "loss": 0.7524, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 5.66, | |
| "learning_rate": 2.1698113207547172e-05, | |
| "loss": 0.6711, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 5.68, | |
| "learning_rate": 2.1603773584905663e-05, | |
| "loss": 0.8228, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "learning_rate": 2.1509433962264154e-05, | |
| "loss": 0.72, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 5.72, | |
| "learning_rate": 2.1415094339622642e-05, | |
| "loss": 0.8165, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 5.74, | |
| "learning_rate": 2.1320754716981133e-05, | |
| "loss": 0.7387, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 5.75, | |
| "learning_rate": 2.1226415094339624e-05, | |
| "loss": 0.7303, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 2.1132075471698115e-05, | |
| "loss": 0.7795, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 5.79, | |
| "learning_rate": 2.1037735849056606e-05, | |
| "loss": 0.7651, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 5.81, | |
| "learning_rate": 2.0943396226415098e-05, | |
| "loss": 0.8202, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "learning_rate": 2.0849056603773585e-05, | |
| "loss": 0.807, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "learning_rate": 2.0754716981132076e-05, | |
| "loss": 0.7871, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 5.87, | |
| "learning_rate": 2.0660377358490567e-05, | |
| "loss": 0.6727, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 5.89, | |
| "learning_rate": 2.056603773584906e-05, | |
| "loss": 0.7548, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 5.91, | |
| "learning_rate": 2.047169811320755e-05, | |
| "loss": 0.7253, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "learning_rate": 2.037735849056604e-05, | |
| "loss": 0.7422, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 5.94, | |
| "learning_rate": 2.0283018867924532e-05, | |
| "loss": 0.8262, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 5.96, | |
| "learning_rate": 2.018867924528302e-05, | |
| "loss": 0.7564, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 5.98, | |
| "learning_rate": 2.009433962264151e-05, | |
| "loss": 0.8395, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "learning_rate": 2e-05, | |
| "loss": 0.7952, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 6.02, | |
| "learning_rate": 1.9905660377358493e-05, | |
| "loss": 0.719, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 6.04, | |
| "learning_rate": 1.9811320754716984e-05, | |
| "loss": 0.7088, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 6.06, | |
| "learning_rate": 1.9716981132075475e-05, | |
| "loss": 0.7672, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 6.08, | |
| "learning_rate": 1.9622641509433966e-05, | |
| "loss": 0.6742, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 6.09, | |
| "learning_rate": 1.9528301886792454e-05, | |
| "loss": 0.7195, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 6.11, | |
| "learning_rate": 1.9433962264150945e-05, | |
| "loss": 0.773, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 6.13, | |
| "learning_rate": 1.9339622641509436e-05, | |
| "loss": 0.6531, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 6.15, | |
| "learning_rate": 1.9245283018867927e-05, | |
| "loss": 0.7301, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 6.17, | |
| "learning_rate": 1.9150943396226418e-05, | |
| "loss": 0.6485, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 6.19, | |
| "learning_rate": 1.905660377358491e-05, | |
| "loss": 0.6735, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 6.21, | |
| "learning_rate": 1.8962264150943397e-05, | |
| "loss": 0.7351, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 6.23, | |
| "learning_rate": 1.8867924528301888e-05, | |
| "loss": 0.7142, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "learning_rate": 1.877358490566038e-05, | |
| "loss": 0.6725, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 6.26, | |
| "learning_rate": 1.8679245283018867e-05, | |
| "loss": 0.7494, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "learning_rate": 1.8584905660377358e-05, | |
| "loss": 0.6543, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 6.3, | |
| "learning_rate": 1.849056603773585e-05, | |
| "loss": 0.8346, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 6.32, | |
| "learning_rate": 1.839622641509434e-05, | |
| "loss": 0.8046, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 6.34, | |
| "learning_rate": 1.830188679245283e-05, | |
| "loss": 0.8155, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 6.36, | |
| "learning_rate": 1.820754716981132e-05, | |
| "loss": 0.647, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 6.38, | |
| "learning_rate": 1.811320754716981e-05, | |
| "loss": 0.7001, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 6.4, | |
| "learning_rate": 1.80188679245283e-05, | |
| "loss": 0.7545, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 6.42, | |
| "learning_rate": 1.7924528301886792e-05, | |
| "loss": 0.6668, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 1.7830188679245283e-05, | |
| "loss": 0.8008, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 6.45, | |
| "learning_rate": 1.7735849056603774e-05, | |
| "loss": 0.6139, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 6.47, | |
| "learning_rate": 1.7641509433962265e-05, | |
| "loss": 0.7745, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 6.49, | |
| "learning_rate": 1.7547169811320753e-05, | |
| "loss": 0.6493, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 6.51, | |
| "learning_rate": 1.7452830188679244e-05, | |
| "loss": 0.6302, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 6.53, | |
| "learning_rate": 1.7358490566037735e-05, | |
| "loss": 0.7609, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 6.55, | |
| "learning_rate": 1.7264150943396226e-05, | |
| "loss": 0.765, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 6.57, | |
| "learning_rate": 1.7169811320754717e-05, | |
| "loss": 0.627, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 6.58, | |
| "learning_rate": 1.707547169811321e-05, | |
| "loss": 0.7065, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 6.6, | |
| "learning_rate": 1.69811320754717e-05, | |
| "loss": 0.7555, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 6.62, | |
| "learning_rate": 1.6886792452830187e-05, | |
| "loss": 0.7766, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 6.64, | |
| "learning_rate": 1.6792452830188678e-05, | |
| "loss": 0.6657, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 6.66, | |
| "learning_rate": 1.669811320754717e-05, | |
| "loss": 0.5936, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 6.68, | |
| "learning_rate": 1.660377358490566e-05, | |
| "loss": 0.6822, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 6.7, | |
| "learning_rate": 1.650943396226415e-05, | |
| "loss": 0.7292, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 6.72, | |
| "learning_rate": 1.6415094339622643e-05, | |
| "loss": 0.7883, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 6.74, | |
| "learning_rate": 1.6320754716981134e-05, | |
| "loss": 0.7028, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 6.75, | |
| "learning_rate": 1.622641509433962e-05, | |
| "loss": 0.6749, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 6.77, | |
| "learning_rate": 1.6132075471698112e-05, | |
| "loss": 0.7383, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 6.79, | |
| "learning_rate": 1.6037735849056604e-05, | |
| "loss": 0.7755, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 6.81, | |
| "learning_rate": 1.5943396226415095e-05, | |
| "loss": 0.6989, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 6.83, | |
| "learning_rate": 1.5849056603773586e-05, | |
| "loss": 0.7402, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 6.85, | |
| "learning_rate": 1.5754716981132077e-05, | |
| "loss": 0.675, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 6.87, | |
| "learning_rate": 1.5660377358490564e-05, | |
| "loss": 0.598, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 6.89, | |
| "learning_rate": 1.5566037735849056e-05, | |
| "loss": 0.6434, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 6.91, | |
| "learning_rate": 1.5471698113207547e-05, | |
| "loss": 0.744, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 6.92, | |
| "learning_rate": 1.5377358490566038e-05, | |
| "loss": 0.7623, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 6.94, | |
| "learning_rate": 1.528301886792453e-05, | |
| "loss": 0.728, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 6.96, | |
| "learning_rate": 1.5188679245283018e-05, | |
| "loss": 0.6612, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 6.98, | |
| "learning_rate": 1.509433962264151e-05, | |
| "loss": 0.6723, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "learning_rate": 1.5e-05, | |
| "loss": 0.5828, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 7.02, | |
| "learning_rate": 1.4905660377358491e-05, | |
| "loss": 0.6875, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "learning_rate": 1.4811320754716981e-05, | |
| "loss": 0.6812, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 7.06, | |
| "learning_rate": 1.4716981132075472e-05, | |
| "loss": 0.6529, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 7.08, | |
| "learning_rate": 1.4622641509433963e-05, | |
| "loss": 0.682, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 7.09, | |
| "learning_rate": 1.4528301886792452e-05, | |
| "loss": 0.6836, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 7.11, | |
| "learning_rate": 1.4433962264150944e-05, | |
| "loss": 0.6603, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 7.13, | |
| "learning_rate": 1.4339622641509435e-05, | |
| "loss": 0.6198, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 7.15, | |
| "learning_rate": 1.4245283018867924e-05, | |
| "loss": 0.6459, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 7.17, | |
| "learning_rate": 1.4150943396226415e-05, | |
| "loss": 0.682, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 7.19, | |
| "learning_rate": 1.4056603773584906e-05, | |
| "loss": 0.6039, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 7.21, | |
| "learning_rate": 1.3962264150943397e-05, | |
| "loss": 0.7004, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 7.23, | |
| "learning_rate": 1.3867924528301887e-05, | |
| "loss": 0.622, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 7.25, | |
| "learning_rate": 1.3773584905660378e-05, | |
| "loss": 0.6887, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 7.26, | |
| "learning_rate": 1.3679245283018869e-05, | |
| "loss": 0.6733, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 7.28, | |
| "learning_rate": 1.3584905660377358e-05, | |
| "loss": 0.6231, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 7.3, | |
| "learning_rate": 1.349056603773585e-05, | |
| "loss": 0.6759, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 7.32, | |
| "learning_rate": 1.339622641509434e-05, | |
| "loss": 0.6818, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 7.34, | |
| "learning_rate": 1.3301886792452831e-05, | |
| "loss": 0.6215, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 7.36, | |
| "learning_rate": 1.320754716981132e-05, | |
| "loss": 0.6892, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 7.38, | |
| "learning_rate": 1.3113207547169812e-05, | |
| "loss": 0.6298, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 7.4, | |
| "learning_rate": 1.3018867924528303e-05, | |
| "loss": 0.6314, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 7.42, | |
| "learning_rate": 1.2924528301886792e-05, | |
| "loss": 0.6547, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 7.43, | |
| "learning_rate": 1.2830188679245283e-05, | |
| "loss": 0.6266, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 7.45, | |
| "learning_rate": 1.2735849056603775e-05, | |
| "loss": 0.756, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 7.47, | |
| "learning_rate": 1.2641509433962264e-05, | |
| "loss": 0.6805, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 7.49, | |
| "learning_rate": 1.2547169811320755e-05, | |
| "loss": 0.6736, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 7.51, | |
| "learning_rate": 1.2452830188679246e-05, | |
| "loss": 0.6708, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 7.53, | |
| "learning_rate": 1.2358490566037737e-05, | |
| "loss": 0.6287, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 7.55, | |
| "learning_rate": 1.2264150943396227e-05, | |
| "loss": 0.7074, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 7.57, | |
| "learning_rate": 1.2169811320754718e-05, | |
| "loss": 0.6036, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 7.58, | |
| "learning_rate": 1.2075471698113209e-05, | |
| "loss": 0.6801, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 7.6, | |
| "learning_rate": 1.1981132075471698e-05, | |
| "loss": 0.6523, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 7.62, | |
| "learning_rate": 1.188679245283019e-05, | |
| "loss": 0.6658, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 7.64, | |
| "learning_rate": 1.179245283018868e-05, | |
| "loss": 0.6517, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 7.66, | |
| "learning_rate": 1.169811320754717e-05, | |
| "loss": 0.7265, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 7.68, | |
| "learning_rate": 1.160377358490566e-05, | |
| "loss": 0.6105, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 7.7, | |
| "learning_rate": 1.1509433962264152e-05, | |
| "loss": 0.6036, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 7.72, | |
| "learning_rate": 1.1415094339622643e-05, | |
| "loss": 0.6095, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 7.74, | |
| "learning_rate": 1.1320754716981132e-05, | |
| "loss": 0.7576, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 7.75, | |
| "learning_rate": 1.1226415094339623e-05, | |
| "loss": 0.6719, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 7.77, | |
| "learning_rate": 1.1132075471698115e-05, | |
| "loss": 0.7032, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 7.79, | |
| "learning_rate": 1.1037735849056604e-05, | |
| "loss": 0.6761, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 7.81, | |
| "learning_rate": 1.0943396226415095e-05, | |
| "loss": 0.6498, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 7.83, | |
| "learning_rate": 1.0849056603773586e-05, | |
| "loss": 0.6711, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 7.85, | |
| "learning_rate": 1.0754716981132077e-05, | |
| "loss": 0.5983, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 7.87, | |
| "learning_rate": 1.0660377358490567e-05, | |
| "loss": 0.6335, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 7.89, | |
| "learning_rate": 1.0566037735849058e-05, | |
| "loss": 0.6277, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 7.91, | |
| "learning_rate": 1.0471698113207549e-05, | |
| "loss": 0.6953, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 7.92, | |
| "learning_rate": 1.0377358490566038e-05, | |
| "loss": 0.6101, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 7.94, | |
| "learning_rate": 1.028301886792453e-05, | |
| "loss": 0.5842, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 7.96, | |
| "learning_rate": 1.018867924528302e-05, | |
| "loss": 0.6547, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 7.98, | |
| "learning_rate": 1.009433962264151e-05, | |
| "loss": 0.581, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "learning_rate": 1e-05, | |
| "loss": 0.6649, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 8.02, | |
| "learning_rate": 9.905660377358492e-06, | |
| "loss": 0.6135, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 8.04, | |
| "learning_rate": 9.811320754716983e-06, | |
| "loss": 0.6005, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 8.06, | |
| "learning_rate": 9.716981132075472e-06, | |
| "loss": 0.5888, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 8.08, | |
| "learning_rate": 9.622641509433963e-06, | |
| "loss": 0.5913, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 8.09, | |
| "learning_rate": 9.528301886792455e-06, | |
| "loss": 0.6436, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 8.11, | |
| "learning_rate": 9.433962264150944e-06, | |
| "loss": 0.6632, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 8.13, | |
| "learning_rate": 9.339622641509433e-06, | |
| "loss": 0.5931, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 8.15, | |
| "learning_rate": 9.245283018867924e-06, | |
| "loss": 0.563, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 8.17, | |
| "learning_rate": 9.150943396226416e-06, | |
| "loss": 0.6361, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 8.19, | |
| "learning_rate": 9.056603773584905e-06, | |
| "loss": 0.605, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 8.21, | |
| "learning_rate": 8.962264150943396e-06, | |
| "loss": 0.6416, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 8.23, | |
| "learning_rate": 8.867924528301887e-06, | |
| "loss": 0.6744, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 8.25, | |
| "learning_rate": 8.773584905660376e-06, | |
| "loss": 0.5623, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 8.26, | |
| "learning_rate": 8.679245283018868e-06, | |
| "loss": 0.6513, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 8.28, | |
| "learning_rate": 8.584905660377359e-06, | |
| "loss": 0.6099, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 8.3, | |
| "learning_rate": 8.49056603773585e-06, | |
| "loss": 0.5376, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 8.32, | |
| "learning_rate": 8.396226415094339e-06, | |
| "loss": 0.657, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 8.34, | |
| "learning_rate": 8.30188679245283e-06, | |
| "loss": 0.62, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 8.36, | |
| "learning_rate": 8.207547169811321e-06, | |
| "loss": 0.6364, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 8.38, | |
| "learning_rate": 8.11320754716981e-06, | |
| "loss": 0.7565, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 8.4, | |
| "learning_rate": 8.018867924528302e-06, | |
| "loss": 0.6923, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 8.42, | |
| "learning_rate": 7.924528301886793e-06, | |
| "loss": 0.6294, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 8.43, | |
| "learning_rate": 7.830188679245282e-06, | |
| "loss": 0.6152, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 8.45, | |
| "learning_rate": 7.735849056603773e-06, | |
| "loss": 0.6197, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 8.47, | |
| "learning_rate": 7.641509433962264e-06, | |
| "loss": 0.6689, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 8.49, | |
| "learning_rate": 7.547169811320755e-06, | |
| "loss": 0.5863, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 8.51, | |
| "learning_rate": 7.452830188679246e-06, | |
| "loss": 0.6549, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 8.53, | |
| "learning_rate": 7.358490566037736e-06, | |
| "loss": 0.6476, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 8.55, | |
| "learning_rate": 7.264150943396226e-06, | |
| "loss": 0.5644, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 8.57, | |
| "learning_rate": 7.169811320754717e-06, | |
| "loss": 0.5327, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 8.58, | |
| "learning_rate": 7.0754716981132075e-06, | |
| "loss": 0.6086, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 8.6, | |
| "learning_rate": 6.981132075471699e-06, | |
| "loss": 0.7013, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 8.62, | |
| "learning_rate": 6.886792452830189e-06, | |
| "loss": 0.6546, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 8.64, | |
| "learning_rate": 6.792452830188679e-06, | |
| "loss": 0.636, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 8.66, | |
| "learning_rate": 6.69811320754717e-06, | |
| "loss": 0.6997, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 8.68, | |
| "learning_rate": 6.60377358490566e-06, | |
| "loss": 0.6163, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 8.7, | |
| "learning_rate": 6.5094339622641515e-06, | |
| "loss": 0.6237, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 8.72, | |
| "learning_rate": 6.415094339622642e-06, | |
| "loss": 0.631, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "learning_rate": 6.320754716981132e-06, | |
| "loss": 0.5505, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 8.75, | |
| "learning_rate": 6.226415094339623e-06, | |
| "loss": 0.5707, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 8.77, | |
| "learning_rate": 6.132075471698113e-06, | |
| "loss": 0.6712, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 8.79, | |
| "learning_rate": 6.037735849056604e-06, | |
| "loss": 0.6895, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 8.81, | |
| "learning_rate": 5.943396226415095e-06, | |
| "loss": 0.6574, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 8.83, | |
| "learning_rate": 5.849056603773585e-06, | |
| "loss": 0.6445, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 8.85, | |
| "learning_rate": 5.754716981132076e-06, | |
| "loss": 0.5688, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 8.87, | |
| "learning_rate": 5.660377358490566e-06, | |
| "loss": 0.6162, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 8.89, | |
| "learning_rate": 5.566037735849057e-06, | |
| "loss": 0.5972, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 8.91, | |
| "learning_rate": 5.4716981132075475e-06, | |
| "loss": 0.5262, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 8.92, | |
| "learning_rate": 5.377358490566039e-06, | |
| "loss": 0.7048, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 8.94, | |
| "learning_rate": 5.283018867924529e-06, | |
| "loss": 0.6142, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 8.96, | |
| "learning_rate": 5.188679245283019e-06, | |
| "loss": 0.6144, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 8.98, | |
| "learning_rate": 5.09433962264151e-06, | |
| "loss": 0.6002, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "learning_rate": 5e-06, | |
| "loss": 0.5671, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 9.02, | |
| "learning_rate": 4.9056603773584915e-06, | |
| "loss": 0.5556, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 9.04, | |
| "learning_rate": 4.811320754716982e-06, | |
| "loss": 0.6501, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 9.06, | |
| "learning_rate": 4.716981132075472e-06, | |
| "loss": 0.5688, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 9.08, | |
| "learning_rate": 4.622641509433962e-06, | |
| "loss": 0.5437, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 9.09, | |
| "learning_rate": 4.5283018867924524e-06, | |
| "loss": 0.6673, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 9.11, | |
| "learning_rate": 4.4339622641509435e-06, | |
| "loss": 0.5573, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 9.13, | |
| "learning_rate": 4.339622641509434e-06, | |
| "loss": 0.5971, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 9.15, | |
| "learning_rate": 4.245283018867925e-06, | |
| "loss": 0.5829, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 9.17, | |
| "learning_rate": 4.150943396226415e-06, | |
| "loss": 0.5953, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 9.19, | |
| "learning_rate": 4.056603773584905e-06, | |
| "loss": 0.6096, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 9.21, | |
| "learning_rate": 3.962264150943396e-06, | |
| "loss": 0.6193, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 9.23, | |
| "learning_rate": 3.867924528301887e-06, | |
| "loss": 0.6021, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 9.25, | |
| "learning_rate": 3.7735849056603773e-06, | |
| "loss": 0.6095, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 9.26, | |
| "learning_rate": 3.679245283018868e-06, | |
| "loss": 0.5709, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 9.28, | |
| "learning_rate": 3.5849056603773586e-06, | |
| "loss": 0.6103, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 9.3, | |
| "learning_rate": 3.4905660377358493e-06, | |
| "loss": 0.6074, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 9.32, | |
| "learning_rate": 3.3962264150943395e-06, | |
| "loss": 0.5336, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 9.34, | |
| "learning_rate": 3.30188679245283e-06, | |
| "loss": 0.5793, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 9.36, | |
| "learning_rate": 3.207547169811321e-06, | |
| "loss": 0.5899, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "learning_rate": 3.1132075471698115e-06, | |
| "loss": 0.6602, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 9.4, | |
| "learning_rate": 3.018867924528302e-06, | |
| "loss": 0.5698, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 9.42, | |
| "learning_rate": 2.9245283018867924e-06, | |
| "loss": 0.5968, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 9.43, | |
| "learning_rate": 2.830188679245283e-06, | |
| "loss": 0.6084, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 9.45, | |
| "learning_rate": 2.7358490566037738e-06, | |
| "loss": 0.5757, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 9.47, | |
| "learning_rate": 2.6415094339622644e-06, | |
| "loss": 0.5803, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 9.49, | |
| "learning_rate": 2.547169811320755e-06, | |
| "loss": 0.6314, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 9.51, | |
| "learning_rate": 2.4528301886792457e-06, | |
| "loss": 0.6091, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 9.53, | |
| "learning_rate": 2.358490566037736e-06, | |
| "loss": 0.6383, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 9.55, | |
| "learning_rate": 2.2641509433962262e-06, | |
| "loss": 0.697, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 9.57, | |
| "learning_rate": 2.169811320754717e-06, | |
| "loss": 0.6193, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 9.58, | |
| "learning_rate": 2.0754716981132075e-06, | |
| "loss": 0.5657, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 9.6, | |
| "learning_rate": 1.981132075471698e-06, | |
| "loss": 0.5766, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 9.62, | |
| "learning_rate": 1.8867924528301887e-06, | |
| "loss": 0.6691, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 9.64, | |
| "learning_rate": 1.7924528301886793e-06, | |
| "loss": 0.5512, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 9.66, | |
| "learning_rate": 1.6981132075471698e-06, | |
| "loss": 0.598, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 9.68, | |
| "learning_rate": 1.6037735849056604e-06, | |
| "loss": 0.5527, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 9.7, | |
| "learning_rate": 1.509433962264151e-06, | |
| "loss": 0.6116, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 9.72, | |
| "learning_rate": 1.4150943396226415e-06, | |
| "loss": 0.6351, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 9.74, | |
| "learning_rate": 1.3207547169811322e-06, | |
| "loss": 0.5633, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 9.75, | |
| "learning_rate": 1.2264150943396229e-06, | |
| "loss": 0.5586, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 9.77, | |
| "learning_rate": 1.1320754716981131e-06, | |
| "loss": 0.5783, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 9.79, | |
| "learning_rate": 1.0377358490566038e-06, | |
| "loss": 0.638, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 9.81, | |
| "learning_rate": 9.433962264150943e-07, | |
| "loss": 0.5432, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 9.83, | |
| "learning_rate": 8.490566037735849e-07, | |
| "loss": 0.5232, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 9.85, | |
| "learning_rate": 7.547169811320755e-07, | |
| "loss": 0.6154, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 9.87, | |
| "learning_rate": 6.603773584905661e-07, | |
| "loss": 0.637, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 9.89, | |
| "learning_rate": 5.660377358490566e-07, | |
| "loss": 0.5593, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 9.91, | |
| "learning_rate": 4.7169811320754717e-07, | |
| "loss": 0.6447, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 9.92, | |
| "learning_rate": 3.773584905660378e-07, | |
| "loss": 0.6056, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 9.94, | |
| "learning_rate": 2.830188679245283e-07, | |
| "loss": 0.6928, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 9.96, | |
| "learning_rate": 1.886792452830189e-07, | |
| "loss": 0.609, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 9.98, | |
| "learning_rate": 9.433962264150944e-08, | |
| "loss": 0.6045, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "learning_rate": 0.0, | |
| "loss": 0.5314, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "step": 530, | |
| "total_flos": 6755526882816000.0, | |
| "train_loss": 1.0295860317518126, | |
| "train_runtime": 364.5885, | |
| "train_samples_per_second": 91.912, | |
| "train_steps_per_second": 1.454 | |
| } | |
| ], | |
| "max_steps": 530, | |
| "num_train_epochs": 10, | |
| "total_flos": 6755526882816000.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |