| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.9996397261919059, |
| "eval_steps": 500, |
| "global_step": 2081, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 6.34920634920635e-07, |
| "loss": 1.0895, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.26984126984127e-06, |
| "loss": 1.1735, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.904761904761905e-06, |
| "loss": 1.1056, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.53968253968254e-06, |
| "loss": 1.025, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.1746031746031746e-06, |
| "loss": 1.2185, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.80952380952381e-06, |
| "loss": 1.321, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.444444444444444e-06, |
| "loss": 1.0478, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 5.07936507936508e-06, |
| "loss": 1.058, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 5.7142857142857145e-06, |
| "loss": 0.9844, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 6.349206349206349e-06, |
| "loss": 1.1117, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 6.984126984126984e-06, |
| "loss": 1.0519, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 7.61904761904762e-06, |
| "loss": 1.0784, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 8.253968253968254e-06, |
| "loss": 1.0631, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 8.888888888888888e-06, |
| "loss": 1.0115, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 9.523809523809525e-06, |
| "loss": 1.1257, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.015873015873016e-05, |
| "loss": 1.0646, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.0793650793650794e-05, |
| "loss": 0.9735, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.1428571428571429e-05, |
| "loss": 0.8837, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.2063492063492064e-05, |
| "loss": 1.1921, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.2698412698412699e-05, |
| "loss": 0.9976, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.3333333333333333e-05, |
| "loss": 0.8809, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.3968253968253968e-05, |
| "loss": 0.9342, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.4603174603174603e-05, |
| "loss": 0.9099, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.523809523809524e-05, |
| "loss": 0.8361, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.5873015873015872e-05, |
| "loss": 0.8963, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.6507936507936507e-05, |
| "loss": 1.0563, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.7142857142857142e-05, |
| "loss": 1.0313, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.7777777777777777e-05, |
| "loss": 0.9952, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.8412698412698415e-05, |
| "loss": 0.9503, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.904761904761905e-05, |
| "loss": 1.0316, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9682539682539684e-05, |
| "loss": 1.0243, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.031746031746032e-05, |
| "loss": 0.9671, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.0952380952380954e-05, |
| "loss": 0.8852, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.158730158730159e-05, |
| "loss": 0.9168, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2222222222222227e-05, |
| "loss": 1.0366, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.2857142857142858e-05, |
| "loss": 0.824, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.3492063492063496e-05, |
| "loss": 1.0637, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4126984126984128e-05, |
| "loss": 0.9852, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.4761904761904766e-05, |
| "loss": 0.9821, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.5396825396825397e-05, |
| "loss": 0.8818, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6031746031746035e-05, |
| "loss": 0.9603, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.6666666666666667e-05, |
| "loss": 0.8277, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7301587301587305e-05, |
| "loss": 1.063, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.7936507936507936e-05, |
| "loss": 0.9686, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.8571428571428574e-05, |
| "loss": 0.8671, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9206349206349206e-05, |
| "loss": 0.709, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 2.9841269841269844e-05, |
| "loss": 0.9644, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.047619047619048e-05, |
| "loss": 0.7724, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.111111111111112e-05, |
| "loss": 0.9273, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.1746031746031745e-05, |
| "loss": 0.9536, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.2380952380952386e-05, |
| "loss": 0.8431, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 3.3015873015873014e-05, |
| "loss": 0.7777, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.3650793650793656e-05, |
| "loss": 0.8895, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.4285714285714284e-05, |
| "loss": 0.9128, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.4920634920634925e-05, |
| "loss": 0.9728, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.555555555555555e-05, |
| "loss": 1.0591, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.6190476190476195e-05, |
| "loss": 0.9821, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.682539682539683e-05, |
| "loss": 0.7926, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.7460317460317464e-05, |
| "loss": 0.9065, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.80952380952381e-05, |
| "loss": 0.9164, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.8730158730158734e-05, |
| "loss": 0.9762, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.936507936507937e-05, |
| "loss": 0.9714, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 4e-05, |
| "loss": 0.8679, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.9999975764201455e-05, |
| "loss": 1.0564, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.999990305686455e-05, |
| "loss": 0.7128, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.999978187816552e-05, |
| "loss": 0.8653, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.999961222839802e-05, |
| "loss": 0.802, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.999939410797322e-05, |
| "loss": 0.8232, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.999912751741976e-05, |
| "loss": 0.9164, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.9998812457383744e-05, |
| "loss": 0.8491, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.999844892862873e-05, |
| "loss": 0.8464, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 3.999803693203578e-05, |
| "loss": 0.9416, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.9997576468603386e-05, |
| "loss": 0.8149, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.999706753944752e-05, |
| "loss": 0.9426, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.9996510145801614e-05, |
| "loss": 0.9123, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.9995904289016565e-05, |
| "loss": 0.8335, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.9995249970560695e-05, |
| "loss": 0.8679, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.999454719201982e-05, |
| "loss": 0.8677, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.9993795955097156e-05, |
| "loss": 0.8351, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.999299626161341e-05, |
| "loss": 0.9645, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.999214811350669e-05, |
| "loss": 1.0644, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.9991251512832544e-05, |
| "loss": 0.7915, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.999030646176397e-05, |
| "loss": 0.8231, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.998931296259136e-05, |
| "loss": 0.8896, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.9988271017722564e-05, |
| "loss": 0.9326, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.998718062968278e-05, |
| "loss": 0.8982, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.9986041801114687e-05, |
| "loss": 0.9352, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.998485453477831e-05, |
| "loss": 0.8479, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.998361883355108e-05, |
| "loss": 0.7637, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.9982334700427825e-05, |
| "loss": 0.8371, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.998100213852075e-05, |
| "loss": 0.9657, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.9979621151059405e-05, |
| "loss": 1.0443, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 3.997819174139074e-05, |
| "loss": 0.8705, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 3.9976713912979045e-05, |
| "loss": 0.7415, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 3.9975187669405945e-05, |
| "loss": 1.0969, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 3.997361301437042e-05, |
| "loss": 0.9489, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 3.9971989951688767e-05, |
| "loss": 0.8558, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 3.997031848529461e-05, |
| "loss": 0.8709, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 3.996859861923888e-05, |
| "loss": 0.7765, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 3.996683035768982e-05, |
| "loss": 0.9204, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 3.996501370493294e-05, |
| "loss": 0.8125, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 3.996314866537105e-05, |
| "loss": 0.8693, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 3.9961235243524224e-05, |
| "loss": 0.7341, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 3.9959273444029784e-05, |
| "loss": 0.7737, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 3.995726327164231e-05, |
| "loss": 0.9845, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 3.995520473123362e-05, |
| "loss": 0.9002, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 3.995309782779275e-05, |
| "loss": 0.8678, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 3.9950942566425954e-05, |
| "loss": 0.8083, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 3.9948738952356666e-05, |
| "loss": 0.7714, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 3.9946486990925525e-05, |
| "loss": 0.8358, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 3.9944186687590357e-05, |
| "loss": 0.7999, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 3.994183804792611e-05, |
| "loss": 1.045, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 3.9939441077624896e-05, |
| "loss": 0.905, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 3.993699578249599e-05, |
| "loss": 0.8678, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3.993450216846573e-05, |
| "loss": 0.8256, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3.993196024157762e-05, |
| "loss": 0.8424, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3.992937000799219e-05, |
| "loss": 0.8764, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3.9926731473987104e-05, |
| "loss": 0.858, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3.992404464595705e-05, |
| "loss": 0.8272, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3.9921309530413764e-05, |
| "loss": 0.8956, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3.991852613398603e-05, |
| "loss": 0.8312, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3.991569446341962e-05, |
| "loss": 0.8409, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3.991281452557732e-05, |
| "loss": 0.923, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3.990988632743888e-05, |
| "loss": 0.9534, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3.990690987610104e-05, |
| "loss": 0.8584, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3.9903885178777445e-05, |
| "loss": 0.7978, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3.990081224279872e-05, |
| "loss": 0.8837, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3.989769107561234e-05, |
| "loss": 0.9165, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3.9894521684782715e-05, |
| "loss": 0.8943, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3.989130407799113e-05, |
| "loss": 1.0133, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3.9888038263035686e-05, |
| "loss": 1.0161, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3.988472424783137e-05, |
| "loss": 0.8745, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3.988136204040995e-05, |
| "loss": 0.7766, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3.987795164892001e-05, |
| "loss": 0.7795, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3.98744930816269e-05, |
| "loss": 0.8202, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.987098634691274e-05, |
| "loss": 1.0134, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.986743145327637e-05, |
| "loss": 0.8346, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.9863828409333374e-05, |
| "loss": 0.8658, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.986017722381601e-05, |
| "loss": 0.9497, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.9856477905573214e-05, |
| "loss": 0.9212, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.985273046357059e-05, |
| "loss": 0.8871, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.984893490689035e-05, |
| "loss": 0.8555, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.9845091244731337e-05, |
| "loss": 0.794, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.984119948640897e-05, |
| "loss": 0.8643, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.9837259641355235e-05, |
| "loss": 0.8282, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.9833271719118664e-05, |
| "loss": 0.696, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.9829235729364306e-05, |
| "loss": 0.816, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.9825151681873705e-05, |
| "loss": 0.8172, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.982101958654487e-05, |
| "loss": 0.9991, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.981683945339226e-05, |
| "loss": 0.8788, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.981261129254678e-05, |
| "loss": 0.9226, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.98083351142557e-05, |
| "loss": 0.8852, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.9804010928882684e-05, |
| "loss": 0.8589, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.979963874690774e-05, |
| "loss": 0.8704, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.97952185789272e-05, |
| "loss": 0.7783, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.9790750435653705e-05, |
| "loss": 0.8171, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 3.9786234327916136e-05, |
| "loss": 0.7907, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 3.978167026665965e-05, |
| "loss": 0.7421, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 3.9777058262945624e-05, |
| "loss": 0.8666, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 3.97723983279516e-05, |
| "loss": 0.8571, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 3.976769047297132e-05, |
| "loss": 0.7975, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 3.976293470941463e-05, |
| "loss": 1.0014, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 3.975813104880752e-05, |
| "loss": 0.8491, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 3.975327950279203e-05, |
| "loss": 0.9437, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 3.9748380083126274e-05, |
| "loss": 0.7329, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 3.974343280168439e-05, |
| "loss": 1.0359, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 3.97384376704565e-05, |
| "loss": 0.7802, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 3.973339470154872e-05, |
| "loss": 0.9464, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 3.972830390718308e-05, |
| "loss": 0.9039, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 3.972316529969752e-05, |
| "loss": 0.8495, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 3.971797889154587e-05, |
| "loss": 0.8585, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 3.971274469529781e-05, |
| "loss": 1.0326, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 3.970746272363882e-05, |
| "loss": 0.9453, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 3.9702132989370195e-05, |
| "loss": 0.8943, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 3.969675550540897e-05, |
| "loss": 0.849, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 3.9691330284787885e-05, |
| "loss": 0.9529, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 3.968585734065542e-05, |
| "loss": 0.8069, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 3.968033668627569e-05, |
| "loss": 0.8602, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 3.967476833502842e-05, |
| "loss": 0.9173, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 3.966915230040898e-05, |
| "loss": 0.9686, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 3.966348859602826e-05, |
| "loss": 1.0021, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 3.965777723561272e-05, |
| "loss": 0.8489, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 3.9652018233004285e-05, |
| "loss": 0.8457, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 3.964621160216035e-05, |
| "loss": 0.9543, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 3.964035735715376e-05, |
| "loss": 0.8194, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 3.963445551217274e-05, |
| "loss": 0.8343, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 3.962850608152089e-05, |
| "loss": 0.8299, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 3.9622509079617126e-05, |
| "loss": 1.0046, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 3.961646452099565e-05, |
| "loss": 0.9438, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 3.9610372420305954e-05, |
| "loss": 0.8098, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 3.960423279231271e-05, |
| "loss": 0.8244, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 3.959804565189581e-05, |
| "loss": 0.8205, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 3.959181101405028e-05, |
| "loss": 0.905, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 3.958552889388626e-05, |
| "loss": 0.7775, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 3.957919930662897e-05, |
| "loss": 0.8515, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 3.957282226761867e-05, |
| "loss": 0.8241, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 3.9566397792310634e-05, |
| "loss": 0.8827, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 3.9559925896275074e-05, |
| "loss": 0.9258, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 3.955340659519715e-05, |
| "loss": 0.8532, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 3.954683990487692e-05, |
| "loss": 0.7331, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 3.9540225841229274e-05, |
| "loss": 0.8455, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 3.9533564420283923e-05, |
| "loss": 0.7878, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 3.952685565818536e-05, |
| "loss": 0.8024, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 3.95200995711928e-05, |
| "loss": 0.9417, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 3.951329617568015e-05, |
| "loss": 0.9053, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 3.9506445488135995e-05, |
| "loss": 0.7534, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 3.949954752516353e-05, |
| "loss": 0.8626, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 3.94926023034805e-05, |
| "loss": 0.9647, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 3.948560983991922e-05, |
| "loss": 0.7573, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 3.9478570151426475e-05, |
| "loss": 0.7788, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 3.947148325506351e-05, |
| "loss": 0.8151, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 3.946434916800599e-05, |
| "loss": 0.8526, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 3.945716790754395e-05, |
| "loss": 0.9234, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 3.944993949108174e-05, |
| "loss": 0.8133, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 3.9442663936138e-05, |
| "loss": 0.6915, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 3.9435341260345636e-05, |
| "loss": 0.7824, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 3.9427971481451715e-05, |
| "loss": 0.7328, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 3.942055461731751e-05, |
| "loss": 0.8685, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3.941309068591835e-05, |
| "loss": 0.8444, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3.9405579705343705e-05, |
| "loss": 0.982, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3.939802169379701e-05, |
| "loss": 0.8162, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3.9390416669595725e-05, |
| "loss": 1.0345, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3.938276465117122e-05, |
| "loss": 0.8799, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3.9375065657068786e-05, |
| "loss": 0.8206, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3.9367319705947545e-05, |
| "loss": 0.7084, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3.9359526816580425e-05, |
| "loss": 0.9249, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3.935168700785411e-05, |
| "loss": 0.8001, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3.934380029876902e-05, |
| "loss": 0.7561, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3.933586670843922e-05, |
| "loss": 0.8732, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3.9327886256092383e-05, |
| "loss": 0.9032, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3.9319858961069794e-05, |
| "loss": 0.9088, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3.9311784842826224e-05, |
| "loss": 0.9444, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3.930366392092995e-05, |
| "loss": 0.7806, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3.929549621506269e-05, |
| "loss": 0.8636, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3.928728174501951e-05, |
| "loss": 0.8668, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3.927902053070884e-05, |
| "loss": 0.8495, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3.92707125921524e-05, |
| "loss": 0.8076, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3.926235794948515e-05, |
| "loss": 0.8652, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 3.9253956622955206e-05, |
| "loss": 0.6898, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.924550863292387e-05, |
| "loss": 0.8157, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.923701399986553e-05, |
| "loss": 0.8027, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.922847274436759e-05, |
| "loss": 0.8884, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.9219884887130474e-05, |
| "loss": 0.99, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.9211250448967535e-05, |
| "loss": 0.9088, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.920256945080502e-05, |
| "loss": 0.9476, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.919384191368204e-05, |
| "loss": 0.8133, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.9185067858750464e-05, |
| "loss": 0.8581, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.917624730727491e-05, |
| "loss": 0.7402, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.91673802806327e-05, |
| "loss": 0.7575, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.9158466800313776e-05, |
| "loss": 0.8305, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.9149506887920676e-05, |
| "loss": 0.8849, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.914050056516845e-05, |
| "loss": 0.7999, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.913144785388465e-05, |
| "loss": 0.7355, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.9122348776009236e-05, |
| "loss": 0.9156, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.911320335359456e-05, |
| "loss": 0.8758, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.9104011608805284e-05, |
| "loss": 0.9079, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.909477356391833e-05, |
| "loss": 0.6066, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.908548924132283e-05, |
| "loss": 0.832, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.90761586635201e-05, |
| "loss": 0.8857, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 3.9066781853123526e-05, |
| "loss": 0.9132, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.905735883285856e-05, |
| "loss": 0.8525, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.9047889625562643e-05, |
| "loss": 0.7316, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.903837425418516e-05, |
| "loss": 0.7448, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.902881274178737e-05, |
| "loss": 0.8542, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.901920511154236e-05, |
| "loss": 0.7311, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.9009551386734996e-05, |
| "loss": 0.8629, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.899985159076184e-05, |
| "loss": 0.8525, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.8990105747131135e-05, |
| "loss": 0.8914, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.8980313879462694e-05, |
| "loss": 0.7484, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.897047601148791e-05, |
| "loss": 0.7643, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.8960592167049635e-05, |
| "loss": 0.8069, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.8950662370102145e-05, |
| "loss": 1.0453, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.8940686644711104e-05, |
| "loss": 0.8446, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.893066501505349e-05, |
| "loss": 0.8343, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.89205975054175e-05, |
| "loss": 0.8245, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.891048414020256e-05, |
| "loss": 0.8813, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.890032494391922e-05, |
| "loss": 0.7409, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.8890119941189103e-05, |
| "loss": 0.7772, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.8879869156744845e-05, |
| "loss": 0.8946, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.886957261543003e-05, |
| "loss": 0.8574, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 3.8859230342199174e-05, |
| "loss": 0.8977, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.8848842362117584e-05, |
| "loss": 0.9835, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.883840870036136e-05, |
| "loss": 0.9123, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.882792938221732e-05, |
| "loss": 0.7726, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.881740443308292e-05, |
| "loss": 0.8565, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.880683387846623e-05, |
| "loss": 0.9208, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.879621774398581e-05, |
| "loss": 0.8102, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.878555605537074e-05, |
| "loss": 0.8464, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.877484883846044e-05, |
| "loss": 0.7296, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.8764096119204735e-05, |
| "loss": 0.8411, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.8753297923663685e-05, |
| "loss": 0.7941, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.874245427800757e-05, |
| "loss": 0.8525, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.873156520851684e-05, |
| "loss": 0.7985, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.872063074158204e-05, |
| "loss": 0.9064, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.87096509037037e-05, |
| "loss": 0.8325, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.869862572149234e-05, |
| "loss": 0.961, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.868755522166838e-05, |
| "loss": 0.9097, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.8676439431062054e-05, |
| "loss": 0.8137, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.866527837661337e-05, |
| "loss": 0.8879, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.865407208537202e-05, |
| "loss": 0.8256, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 3.8642820584497366e-05, |
| "loss": 0.8636, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.863152390125831e-05, |
| "loss": 0.8038, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.862018206303327e-05, |
| "loss": 0.8884, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.860879509731009e-05, |
| "loss": 0.7455, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.859736303168599e-05, |
| "loss": 0.9096, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.85858858938675e-05, |
| "loss": 0.8153, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.857436371167038e-05, |
| "loss": 0.7047, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.856279651301955e-05, |
| "loss": 0.7015, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.855118432594905e-05, |
| "loss": 0.8313, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.8539527178601945e-05, |
| "loss": 0.792, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.8527825099230244e-05, |
| "loss": 0.8633, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.851607811619489e-05, |
| "loss": 0.7343, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.850428625796563e-05, |
| "loss": 0.8593, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.849244955312097e-05, |
| "loss": 0.8673, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.848056803034811e-05, |
| "loss": 0.8153, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.8468641718442875e-05, |
| "loss": 0.8026, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.845667064630962e-05, |
| "loss": 0.8072, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.844465484296121e-05, |
| "loss": 0.8605, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.84325943375189e-05, |
| "loss": 0.7819, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.8420489159212276e-05, |
| "loss": 0.9863, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.8408339337379216e-05, |
| "loss": 0.9353, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 3.839614490146579e-05, |
| "loss": 0.7617, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.838390588102617e-05, |
| "loss": 0.9067, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.837162230572262e-05, |
| "loss": 0.9405, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.835929420532535e-05, |
| "loss": 0.7311, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.8346921609712484e-05, |
| "loss": 0.7849, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.8334504548870026e-05, |
| "loss": 0.8944, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.832204305289169e-05, |
| "loss": 0.8006, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.830953715197893e-05, |
| "loss": 0.816, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.829698687644077e-05, |
| "loss": 0.7985, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.828439225669382e-05, |
| "loss": 0.81, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.827175332326214e-05, |
| "loss": 0.8535, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.82590701067772e-05, |
| "loss": 0.8296, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.824634263797779e-05, |
| "loss": 0.8121, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.823357094770994e-05, |
| "loss": 0.8078, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.822075506692686e-05, |
| "loss": 0.8539, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.8207895026688867e-05, |
| "loss": 0.8946, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.8194990858163294e-05, |
| "loss": 0.8443, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.818204259262442e-05, |
| "loss": 0.8162, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.8169050261453406e-05, |
| "loss": 0.7585, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.81560138961382e-05, |
| "loss": 0.8455, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.814293352827347e-05, |
| "loss": 0.8488, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 3.812980918956055e-05, |
| "loss": 0.8369, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 3.81166409118073e-05, |
| "loss": 0.7382, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 3.810342872692811e-05, |
| "loss": 0.7876, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 3.8090172666943755e-05, |
| "loss": 0.8382, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 3.807687276398136e-05, |
| "loss": 0.8402, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 3.80635290502743e-05, |
| "loss": 0.9505, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 3.805014155816213e-05, |
| "loss": 0.9165, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 3.803671032009051e-05, |
| "loss": 0.8564, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 3.80232353686111e-05, |
| "loss": 0.9599, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 3.800971673638155e-05, |
| "loss": 0.8333, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 3.799615445616533e-05, |
| "loss": 0.8219, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 3.798254856083171e-05, |
| "loss": 0.9042, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 3.7968899083355664e-05, |
| "loss": 0.832, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 3.7955206056817786e-05, |
| "loss": 0.7922, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 3.7941469514404225e-05, |
| "loss": 0.7192, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 3.792768948940659e-05, |
| "loss": 0.8974, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 3.791386601522187e-05, |
| "loss": 0.8217, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 3.789999912535235e-05, |
| "loss": 0.8442, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 3.788608885340556e-05, |
| "loss": 0.7616, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 3.7872135233094145e-05, |
| "loss": 0.8246, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 3.7858138298235815e-05, |
| "loss": 0.848, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 3.784409808275328e-05, |
| "loss": 0.8733, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 3.78300146206741e-05, |
| "loss": 0.865, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 3.7815887946130675e-05, |
| "loss": 0.9627, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 3.780171809336013e-05, |
| "loss": 0.8492, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 3.778750509670424e-05, |
| "loss": 0.9278, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 3.777324899060934e-05, |
| "loss": 0.7373, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 3.775894980962623e-05, |
| "loss": 0.8477, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 3.774460758841012e-05, |
| "loss": 0.9743, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 3.773022236172054e-05, |
| "loss": 0.7991, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 3.771579416442122e-05, |
| "loss": 0.7981, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 3.7701323031480055e-05, |
| "loss": 0.7349, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 3.7686808997968996e-05, |
| "loss": 0.8429, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 3.767225209906395e-05, |
| "loss": 0.9152, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 3.765765237004473e-05, |
| "loss": 0.8243, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 3.7643009846294954e-05, |
| "loss": 0.7706, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 3.762832456330194e-05, |
| "loss": 0.8586, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 3.761359655665664e-05, |
| "loss": 0.8182, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 3.7598825862053564e-05, |
| "loss": 0.8633, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 3.758401251529066e-05, |
| "loss": 0.8517, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 3.756915655226926e-05, |
| "loss": 0.9424, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 3.755425800899398e-05, |
| "loss": 0.8925, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 3.753931692157263e-05, |
| "loss": 0.7484, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 3.7524333326216125e-05, |
| "loss": 0.7163, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 3.75093072592384e-05, |
| "loss": 0.7826, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 3.749423875705633e-05, |
| "loss": 0.7602, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 3.747912785618964e-05, |
| "loss": 0.9115, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 3.7463974593260814e-05, |
| "loss": 0.8326, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 3.7448779004994976e-05, |
| "loss": 0.9297, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 3.743354112821985e-05, |
| "loss": 0.9103, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 3.7418260999865654e-05, |
| "loss": 0.9034, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 3.740293865696499e-05, |
| "loss": 0.9349, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 3.73875741366528e-05, |
| "loss": 0.7483, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 3.73721674761662e-05, |
| "loss": 0.7923, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 3.7356718712844473e-05, |
| "loss": 0.6573, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 3.7341227884128946e-05, |
| "loss": 0.8281, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 3.732569502756286e-05, |
| "loss": 0.828, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 3.731012018079133e-05, |
| "loss": 0.8729, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 3.729450338156125e-05, |
| "loss": 0.9161, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 3.727884466772119e-05, |
| "loss": 0.8946, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 3.726314407722127e-05, |
| "loss": 0.7238, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 3.724740164811315e-05, |
| "loss": 0.8633, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 3.723161741854984e-05, |
| "loss": 0.8415, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 3.72157914267857e-05, |
| "loss": 0.7468, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 3.719992371117627e-05, |
| "loss": 0.7795, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 3.718401431017824e-05, |
| "loss": 0.8727, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 3.7168063262349305e-05, |
| "loss": 0.7865, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 3.7152070606348106e-05, |
| "loss": 0.7953, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 3.713603638093412e-05, |
| "loss": 0.8034, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 3.7119960624967574e-05, |
| "loss": 0.7144, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 3.7103843377409346e-05, |
| "loss": 0.8954, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 3.708768467732087e-05, |
| "loss": 0.8446, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 3.707148456386405e-05, |
| "loss": 0.7289, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 3.705524307630115e-05, |
| "loss": 0.7269, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 3.7038960253994714e-05, |
| "loss": 0.7995, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 3.7022636136407465e-05, |
| "loss": 0.7883, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 3.700627076310222e-05, |
| "loss": 0.8259, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 3.698986417374173e-05, |
| "loss": 0.88, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 3.69734164080887e-05, |
| "loss": 0.8297, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 3.695692750600561e-05, |
| "loss": 0.818, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 3.6940397507454614e-05, |
| "loss": 0.7104, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 3.692382645249749e-05, |
| "loss": 0.7665, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 3.6907214381295516e-05, |
| "loss": 0.8125, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 3.689056133410937e-05, |
| "loss": 0.8496, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 3.687386735129903e-05, |
| "loss": 0.6811, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 3.685713247332372e-05, |
| "loss": 0.8451, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 3.6840356740741744e-05, |
| "loss": 0.7363, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 3.682354019421042e-05, |
| "loss": 0.9055, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 3.6806682874486e-05, |
| "loss": 0.7397, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 3.6789784822423544e-05, |
| "loss": 0.8909, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 3.677284607897683e-05, |
| "loss": 0.7827, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 3.6755866685198255e-05, |
| "loss": 0.8396, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 3.673884668223873e-05, |
| "loss": 0.7953, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 3.6721786111347603e-05, |
| "loss": 0.8267, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 3.670468501387252e-05, |
| "loss": 0.7708, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 3.6687543431259366e-05, |
| "loss": 0.7906, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 3.6670361405052125e-05, |
| "loss": 0.7398, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 3.6653138976892816e-05, |
| "loss": 0.8282, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 3.663587618852136e-05, |
| "loss": 0.8384, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 3.661857308177552e-05, |
| "loss": 0.7304, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 3.660122969859074e-05, |
| "loss": 0.8538, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 3.658384608100011e-05, |
| "loss": 0.7887, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 3.656642227113419e-05, |
| "loss": 0.8282, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 3.6548958311221e-05, |
| "loss": 0.7296, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 3.6531454243585834e-05, |
| "loss": 0.7535, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 3.65139101106512e-05, |
| "loss": 0.9152, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 3.649632595493669e-05, |
| "loss": 1.0374, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 3.647870181905893e-05, |
| "loss": 0.8419, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 3.646103774573141e-05, |
| "loss": 0.7652, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 3.644333377776442e-05, |
| "loss": 0.7735, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 3.642558995806495e-05, |
| "loss": 0.8764, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 3.6407806329636556e-05, |
| "loss": 0.7971, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 3.638998293557928e-05, |
| "loss": 0.8727, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 3.637211981908954e-05, |
| "loss": 0.8415, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 3.635421702346004e-05, |
| "loss": 0.8604, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 3.633627459207962e-05, |
| "loss": 0.7912, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 3.63182925684332e-05, |
| "loss": 0.9299, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 3.630027099610165e-05, |
| "loss": 0.9001, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 3.6282209918761684e-05, |
| "loss": 0.903, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 3.626410938018578e-05, |
| "loss": 0.9535, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 3.624596942424202e-05, |
| "loss": 0.9369, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 3.622779009489405e-05, |
| "loss": 0.7961, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 3.620957143620092e-05, |
| "loss": 0.8618, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 3.6191313492317005e-05, |
| "loss": 0.8562, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 3.61730163074919e-05, |
| "loss": 0.844, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 3.6154679926070266e-05, |
| "loss": 0.9032, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 3.613630439249182e-05, |
| "loss": 0.8527, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 3.611788975129112e-05, |
| "loss": 0.8766, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 3.609943604709751e-05, |
| "loss": 0.7445, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 3.608094332463503e-05, |
| "loss": 0.8015, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 3.606241162872228e-05, |
| "loss": 0.812, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 3.604384100427227e-05, |
| "loss": 0.8537, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 3.6025231496292426e-05, |
| "loss": 0.9434, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 3.600658314988436e-05, |
| "loss": 0.7642, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 3.5987896010243824e-05, |
| "loss": 0.9407, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 3.59691701226606e-05, |
| "loss": 0.8671, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 3.5950405532518387e-05, |
| "loss": 0.896, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 3.593160228529464e-05, |
| "loss": 0.7941, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 3.5912760426560544e-05, |
| "loss": 0.8675, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 3.589388000198085e-05, |
| "loss": 0.901, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 3.5874961057313764e-05, |
| "loss": 0.8555, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 3.585600363841088e-05, |
| "loss": 0.7892, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 3.5837007791216995e-05, |
| "loss": 0.7373, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 3.581797356177007e-05, |
| "loss": 0.7006, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 3.579890099620109e-05, |
| "loss": 0.7626, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 3.577979014073392e-05, |
| "loss": 0.7997, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 3.576064104168526e-05, |
| "loss": 0.8331, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 3.574145374546447e-05, |
| "loss": 0.7931, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 3.57222282985735e-05, |
| "loss": 0.7866, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 3.5702964747606765e-05, |
| "loss": 0.937, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 3.568366313925101e-05, |
| "loss": 0.7642, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 3.566432352028522e-05, |
| "loss": 0.8629, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 3.564494593758051e-05, |
| "loss": 0.7315, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 3.5625530438100004e-05, |
| "loss": 1.483, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 3.5606077068898705e-05, |
| "loss": 0.766, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 3.5586585877123415e-05, |
| "loss": 0.901, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 3.556705691001259e-05, |
| "loss": 0.8016, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 3.554749021489625e-05, |
| "loss": 0.7369, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 3.552788583919583e-05, |
| "loss": 0.7872, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 3.55082438304241e-05, |
| "loss": 0.9227, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 3.5488564236185053e-05, |
| "loss": 0.7929, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 3.5468847104173747e-05, |
| "loss": 0.8551, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 3.544909248217621e-05, |
| "loss": 0.7835, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 3.542930041806938e-05, |
| "loss": 0.815, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 3.540947095982088e-05, |
| "loss": 0.8669, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 3.5389604155488986e-05, |
| "loss": 0.7505, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 3.5369700053222497e-05, |
| "loss": 0.8883, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 3.534975870126058e-05, |
| "loss": 0.7755, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 3.5329780147932705e-05, |
| "loss": 0.8622, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 3.5309764441658496e-05, |
| "loss": 0.8317, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 3.5289711630947604e-05, |
| "loss": 0.6175, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 3.526962176439962e-05, |
| "loss": 0.7854, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 3.524949489070394e-05, |
| "loss": 0.8157, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 3.522933105863965e-05, |
| "loss": 0.9842, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 3.52091303170754e-05, |
| "loss": 0.7885, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 3.518889271496932e-05, |
| "loss": 0.8805, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 3.516861830136884e-05, |
| "loss": 0.765, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 3.514830712541062e-05, |
| "loss": 0.8836, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 3.512795923632041e-05, |
| "loss": 0.8163, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 3.5107574683412966e-05, |
| "loss": 0.8981, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 3.5087153516091865e-05, |
| "loss": 0.8185, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 3.506669578384944e-05, |
| "loss": 0.7973, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 3.5046201536266635e-05, |
| "loss": 0.893, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 3.50256708230129e-05, |
| "loss": 0.854, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 3.500510369384606e-05, |
| "loss": 0.781, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 3.498450019861219e-05, |
| "loss": 0.8094, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 3.49638603872455e-05, |
| "loss": 0.7015, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 3.494318430976824e-05, |
| "loss": 0.8307, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 3.492247201629052e-05, |
| "loss": 0.6934, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 3.4901723557010236e-05, |
| "loss": 0.8804, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 3.4880938982212945e-05, |
| "loss": 0.8299, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 3.4860118342271714e-05, |
| "loss": 0.7884, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 3.483926168764704e-05, |
| "loss": 0.8747, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 3.481836906888667e-05, |
| "loss": 0.6359, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 3.4797440536625556e-05, |
| "loss": 0.8699, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 3.477647614158566e-05, |
| "loss": 0.6916, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 3.4755475934575864e-05, |
| "loss": 0.8684, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 3.4734439966491844e-05, |
| "loss": 0.686, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 3.471336828831596e-05, |
| "loss": 0.8439, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 3.469226095111709e-05, |
| "loss": 0.7864, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 3.467111800605056e-05, |
| "loss": 0.7898, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 3.4649939504358e-05, |
| "loss": 0.7938, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 3.4628725497367167e-05, |
| "loss": 0.9238, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 3.460747603649194e-05, |
| "loss": 0.8985, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 3.458619117323205e-05, |
| "loss": 0.7792, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 3.456487095917308e-05, |
| "loss": 0.8916, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 3.4543515445986265e-05, |
| "loss": 0.8299, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 3.4522124685428404e-05, |
| "loss": 0.8005, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 3.450069872934172e-05, |
| "loss": 0.7925, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 3.4479237629653706e-05, |
| "loss": 0.7626, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 3.445774143837706e-05, |
| "loss": 0.8299, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 3.443621020760953e-05, |
| "loss": 0.8726, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 3.4414643989533764e-05, |
| "loss": 0.7443, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 3.439304283641721e-05, |
| "loss": 0.8912, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 3.4371406800611995e-05, |
| "loss": 0.7533, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 3.434973593455477e-05, |
| "loss": 0.9133, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 3.432803029076662e-05, |
| "loss": 0.7537, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 3.43062899218529e-05, |
| "loss": 0.7736, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 3.4284514880503134e-05, |
| "loss": 0.83, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 3.426270521949087e-05, |
| "loss": 0.6362, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 3.424086099167356e-05, |
| "loss": 0.8213, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 3.421898224999244e-05, |
| "loss": 0.8055, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 3.419706904747239e-05, |
| "loss": 0.7437, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 3.4175121437221794e-05, |
| "loss": 0.8992, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 3.4153139472432454e-05, |
| "loss": 0.8173, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 3.413112320637941e-05, |
| "loss": 0.7218, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 3.410907269242084e-05, |
| "loss": 0.7529, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 3.4086987983997916e-05, |
| "loss": 0.8072, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 3.406486913463471e-05, |
| "loss": 0.7501, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 3.404271619793801e-05, |
| "loss": 0.91, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 3.402052922759723e-05, |
| "loss": 0.8097, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 3.399830827738426e-05, |
| "loss": 0.7735, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 3.397605340115336e-05, |
| "loss": 0.8764, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 3.3953764652840976e-05, |
| "loss": 0.8064, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 3.393144208646569e-05, |
| "loss": 0.8023, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 3.3909085756128026e-05, |
| "loss": 0.7171, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 3.388669571601032e-05, |
| "loss": 0.739, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 3.3864272020376635e-05, |
| "loss": 0.7718, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 3.384181472357257e-05, |
| "loss": 0.9454, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 3.38193238800252e-05, |
| "loss": 0.9419, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 3.379679954424287e-05, |
| "loss": 0.8109, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 3.377424177081511e-05, |
| "loss": 0.8562, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 3.375165061441247e-05, |
| "loss": 0.8789, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 3.372902612978645e-05, |
| "loss": 0.8438, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 3.3706368371769265e-05, |
| "loss": 0.7863, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 3.368367739527382e-05, |
| "loss": 0.8441, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 3.36609532552935e-05, |
| "loss": 0.8306, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 3.363819600690208e-05, |
| "loss": 0.8554, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 3.361540570525356e-05, |
| "loss": 0.9843, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 3.359258240558207e-05, |
| "loss": 0.8141, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 3.3569726163201676e-05, |
| "loss": 0.7345, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 3.354683703350633e-05, |
| "loss": 0.8267, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 3.352391507196965e-05, |
| "loss": 0.6914, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 3.3500960334144856e-05, |
| "loss": 0.7247, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 3.347797287566457e-05, |
| "loss": 0.8007, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 3.3454952752240744e-05, |
| "loss": 0.8869, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 3.343190001966449e-05, |
| "loss": 0.9197, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 3.340881473380593e-05, |
| "loss": 0.7868, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 3.338569695061411e-05, |
| "loss": 0.6803, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 3.336254672611682e-05, |
| "loss": 0.8608, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 3.333936411642048e-05, |
| "loss": 0.767, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 3.331614917771e-05, |
| "loss": 0.8731, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 3.329290196624863e-05, |
| "loss": 0.8511, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 3.326962253837783e-05, |
| "loss": 0.8195, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 3.3246310950517186e-05, |
| "loss": 0.7771, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 3.322296725916417e-05, |
| "loss": 0.7615, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 3.319959152089408e-05, |
| "loss": 0.8213, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 3.31761837923599e-05, |
| "loss": 0.7117, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 3.315274413029212e-05, |
| "loss": 0.7325, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 3.312927259149863e-05, |
| "loss": 0.7084, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 3.3105769232864576e-05, |
| "loss": 0.9376, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 3.308223411135224e-05, |
| "loss": 0.7905, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.3058667284000856e-05, |
| "loss": 0.8885, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.303506880792651e-05, |
| "loss": 0.8944, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.3011438740322006e-05, |
| "loss": 0.8821, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.298777713845668e-05, |
| "loss": 0.8499, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.296408405967634e-05, |
| "loss": 0.8673, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.2940359561403024e-05, |
| "loss": 0.8858, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.2916603701134975e-05, |
| "loss": 0.8987, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.2892816536446404e-05, |
| "loss": 0.9773, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.2868998124987404e-05, |
| "loss": 0.853, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.2845148524483803e-05, |
| "loss": 0.7451, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.2821267792737004e-05, |
| "loss": 0.9901, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.279735598762387e-05, |
| "loss": 0.8042, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.2773413167096584e-05, |
| "loss": 0.6899, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.274943938918246e-05, |
| "loss": 0.7422, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.272543471198388e-05, |
| "loss": 0.7801, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.270139919367809e-05, |
| "loss": 0.7765, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.267733289251709e-05, |
| "loss": 0.732, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.265323586682749e-05, |
| "loss": 0.8577, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.262910817501033e-05, |
| "loss": 0.8549, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 3.260494987554103e-05, |
| "loss": 0.8105, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.258076102696914e-05, |
| "loss": 0.8919, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.255654168791827e-05, |
| "loss": 0.7441, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.2532291917085915e-05, |
| "loss": 0.8258, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.2508011773243345e-05, |
| "loss": 0.84, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.248370131523542e-05, |
| "loss": 0.8119, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.2459360601980475e-05, |
| "loss": 0.9093, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.243498969247018e-05, |
| "loss": 0.7744, |
| "step": 641 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.2410588645769365e-05, |
| "loss": 0.8982, |
| "step": 642 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.238615752101592e-05, |
| "loss": 0.8141, |
| "step": 643 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.2361696377420644e-05, |
| "loss": 0.8483, |
| "step": 644 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.233720527426705e-05, |
| "loss": 0.8516, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.23126842709113e-05, |
| "loss": 0.8365, |
| "step": 646 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.2288133426781986e-05, |
| "loss": 0.7851, |
| "step": 647 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.2263552801380056e-05, |
| "loss": 0.7855, |
| "step": 648 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.223894245427861e-05, |
| "loss": 0.7006, |
| "step": 649 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.2214302445122786e-05, |
| "loss": 0.7861, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.2189632833629615e-05, |
| "loss": 0.7589, |
| "step": 651 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.216493367958788e-05, |
| "loss": 0.9644, |
| "step": 652 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.2140205042857944e-05, |
| "loss": 0.8252, |
| "step": 653 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.211544698337164e-05, |
| "loss": 0.8121, |
| "step": 654 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 3.209065956113209e-05, |
| "loss": 0.858, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.206584283621361e-05, |
| "loss": 0.7403, |
| "step": 656 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.204099686876149e-05, |
| "loss": 0.9199, |
| "step": 657 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.2016121718991934e-05, |
| "loss": 0.8291, |
| "step": 658 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.199121744719186e-05, |
| "loss": 0.8127, |
| "step": 659 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.1966284113718736e-05, |
| "loss": 0.8221, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.1941321779000504e-05, |
| "loss": 0.7751, |
| "step": 661 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.191633050353537e-05, |
| "loss": 0.8099, |
| "step": 662 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.189131034789169e-05, |
| "loss": 0.7952, |
| "step": 663 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.186626137270781e-05, |
| "loss": 0.8446, |
| "step": 664 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.184118363869191e-05, |
| "loss": 0.8809, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.1816077206621896e-05, |
| "loss": 0.8659, |
| "step": 666 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.1790942137345195e-05, |
| "loss": 0.705, |
| "step": 667 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.176577849177866e-05, |
| "loss": 0.8541, |
| "step": 668 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.174058633090841e-05, |
| "loss": 0.7785, |
| "step": 669 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.1715365715789646e-05, |
| "loss": 0.7313, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.1690116707546544e-05, |
| "loss": 0.766, |
| "step": 671 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.166483936737208e-05, |
| "loss": 0.8271, |
| "step": 672 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.163953375652793e-05, |
| "loss": 0.7992, |
| "step": 673 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.1614199936344254e-05, |
| "loss": 0.8336, |
| "step": 674 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.158883796821958e-05, |
| "loss": 0.78, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 3.1563447913620666e-05, |
| "loss": 0.7999, |
| "step": 676 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.1538029834082337e-05, |
| "loss": 0.8614, |
| "step": 677 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.1512583791207344e-05, |
| "loss": 0.8046, |
| "step": 678 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.14871098466662e-05, |
| "loss": 0.714, |
| "step": 679 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.1461608062197046e-05, |
| "loss": 0.8509, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.1436078499605486e-05, |
| "loss": 0.8462, |
| "step": 681 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.1410521220764465e-05, |
| "loss": 0.8384, |
| "step": 682 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.138493628761408e-05, |
| "loss": 0.8458, |
| "step": 683 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.135932376216146e-05, |
| "loss": 0.8147, |
| "step": 684 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.133368370648061e-05, |
| "loss": 0.8713, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.1308016182712253e-05, |
| "loss": 0.7309, |
| "step": 686 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.128232125306368e-05, |
| "loss": 0.7653, |
| "step": 687 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.1256598979808604e-05, |
| "loss": 0.75, |
| "step": 688 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.1230849425287e-05, |
| "loss": 0.7608, |
| "step": 689 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.120507265190499e-05, |
| "loss": 0.8003, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.117926872213463e-05, |
| "loss": 0.7997, |
| "step": 691 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.115343769851381e-05, |
| "loss": 0.8859, |
| "step": 692 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.112757964364607e-05, |
| "loss": 0.8336, |
| "step": 693 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.1101694620200476e-05, |
| "loss": 0.7696, |
| "step": 694 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.107578269091144e-05, |
| "loss": 0.9193, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.1049843918578615e-05, |
| "loss": 0.9126, |
| "step": 696 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 3.102387836606666e-05, |
| "loss": 0.898, |
| "step": 697 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.099788609630519e-05, |
| "loss": 0.7109, |
| "step": 698 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.0971867172288525e-05, |
| "loss": 0.7782, |
| "step": 699 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.094582165707562e-05, |
| "loss": 0.7975, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.091974961378985e-05, |
| "loss": 0.8599, |
| "step": 701 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.0893651105618894e-05, |
| "loss": 0.8781, |
| "step": 702 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.086752619581458e-05, |
| "loss": 0.8478, |
| "step": 703 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.084137494769271e-05, |
| "loss": 0.8869, |
| "step": 704 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.081519742463292e-05, |
| "loss": 0.7297, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.0788993690078535e-05, |
| "loss": 0.815, |
| "step": 706 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.0762763807536385e-05, |
| "loss": 0.8696, |
| "step": 707 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.073650784057669e-05, |
| "loss": 0.8298, |
| "step": 708 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.0710225852832885e-05, |
| "loss": 0.8462, |
| "step": 709 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.068391790800146e-05, |
| "loss": 0.8144, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.065758406984183e-05, |
| "loss": 0.8092, |
| "step": 711 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.0631224402176154e-05, |
| "loss": 0.7695, |
| "step": 712 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.060483896888919e-05, |
| "loss": 0.8359, |
| "step": 713 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.057842783392813e-05, |
| "loss": 0.7896, |
| "step": 714 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.055199106130248e-05, |
| "loss": 0.7814, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.052552871508386e-05, |
| "loss": 0.7356, |
| "step": 716 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.0499040859405906e-05, |
| "loss": 0.9219, |
| "step": 717 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 3.047252755846402e-05, |
| "loss": 0.8092, |
| "step": 718 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.044598887651532e-05, |
| "loss": 1.0218, |
| "step": 719 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.0419424877878422e-05, |
| "loss": 0.7883, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.0392835626933286e-05, |
| "loss": 0.8567, |
| "step": 721 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.0366221188121093e-05, |
| "loss": 0.8846, |
| "step": 722 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.0339581625944068e-05, |
| "loss": 0.8121, |
| "step": 723 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.0312917004965303e-05, |
| "loss": 0.745, |
| "step": 724 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.0286227389808644e-05, |
| "loss": 0.8644, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.0259512845158504e-05, |
| "loss": 0.6963, |
| "step": 726 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.0232773435759707e-05, |
| "loss": 0.8206, |
| "step": 727 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.0206009226417365e-05, |
| "loss": 0.7531, |
| "step": 728 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.0179220281996664e-05, |
| "loss": 0.7307, |
| "step": 729 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.0152406667422754e-05, |
| "loss": 0.7837, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.0125568447680563e-05, |
| "loss": 0.8806, |
| "step": 731 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.0098705687814667e-05, |
| "loss": 0.7999, |
| "step": 732 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.007181845292911e-05, |
| "loss": 1.0223, |
| "step": 733 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.0044906808187255e-05, |
| "loss": 0.7352, |
| "step": 734 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 3.0017970818811616e-05, |
| "loss": 0.876, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 2.999101055008371e-05, |
| "loss": 0.7495, |
| "step": 736 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 2.996402606734391e-05, |
| "loss": 0.8191, |
| "step": 737 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 2.993701743599127e-05, |
| "loss": 0.7907, |
| "step": 738 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 2.990998472148335e-05, |
| "loss": 0.8066, |
| "step": 739 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 2.9882927989336107e-05, |
| "loss": 0.8186, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 2.9855847305123675e-05, |
| "loss": 0.7481, |
| "step": 741 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 2.982874273447827e-05, |
| "loss": 0.7773, |
| "step": 742 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 2.9801614343089978e-05, |
| "loss": 0.8407, |
| "step": 743 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 2.9774462196706624e-05, |
| "loss": 0.8318, |
| "step": 744 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 2.97472863611336e-05, |
| "loss": 0.7324, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 2.972008690223371e-05, |
| "loss": 0.8571, |
| "step": 746 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 2.9692863885927017e-05, |
| "loss": 0.7909, |
| "step": 747 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 2.9665617378190683e-05, |
| "loss": 0.874, |
| "step": 748 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 2.9638347445058784e-05, |
| "loss": 0.7185, |
| "step": 749 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 2.9611054152622194e-05, |
| "loss": 0.8008, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 2.958373756702837e-05, |
| "loss": 0.7446, |
| "step": 751 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 2.9556397754481247e-05, |
| "loss": 0.6969, |
| "step": 752 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 2.9529034781241046e-05, |
| "loss": 0.7189, |
| "step": 753 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 2.9501648713624116e-05, |
| "loss": 0.8764, |
| "step": 754 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 2.947423961800278e-05, |
| "loss": 0.8145, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 2.9446807560805166e-05, |
| "loss": 0.8438, |
| "step": 756 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 2.941935260851506e-05, |
| "loss": 0.9055, |
| "step": 757 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 2.9391874827671723e-05, |
| "loss": 0.8791, |
| "step": 758 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 2.9364374284869762e-05, |
| "loss": 0.9194, |
| "step": 759 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 2.9336851046758932e-05, |
| "loss": 0.9119, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 2.9309305180043996e-05, |
| "loss": 0.8019, |
| "step": 761 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 2.928173675148457e-05, |
| "loss": 0.9155, |
| "step": 762 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 2.9254145827894934e-05, |
| "loss": 0.793, |
| "step": 763 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 2.92265324761439e-05, |
| "loss": 0.8046, |
| "step": 764 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 2.919889676315463e-05, |
| "loss": 0.8625, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 2.9171238755904477e-05, |
| "loss": 0.8101, |
| "step": 766 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 2.9143558521424834e-05, |
| "loss": 0.8133, |
| "step": 767 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 2.9115856126800964e-05, |
| "loss": 0.854, |
| "step": 768 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 2.9088131639171823e-05, |
| "loss": 0.7527, |
| "step": 769 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 2.9060385125729928e-05, |
| "loss": 0.8511, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 2.9032616653721168e-05, |
| "loss": 0.8956, |
| "step": 771 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 2.9004826290444648e-05, |
| "loss": 0.8175, |
| "step": 772 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 2.8977014103252543e-05, |
| "loss": 0.7333, |
| "step": 773 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 2.8949180159549897e-05, |
| "loss": 0.7325, |
| "step": 774 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 2.89213245267945e-05, |
| "loss": 0.8888, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 2.889344727249671e-05, |
| "loss": 0.6153, |
| "step": 776 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 2.8865548464219268e-05, |
| "loss": 0.7744, |
| "step": 777 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 2.8837628169577168e-05, |
| "loss": 0.8497, |
| "step": 778 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 2.8809686456237476e-05, |
| "loss": 0.8876, |
| "step": 779 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 2.8781723391919162e-05, |
| "loss": 0.8713, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.8753739044392945e-05, |
| "loss": 0.6641, |
| "step": 781 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.8725733481481132e-05, |
| "loss": 0.8615, |
| "step": 782 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.8697706771057437e-05, |
| "loss": 0.857, |
| "step": 783 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.8669658981046827e-05, |
| "loss": 0.714, |
| "step": 784 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.8641590179425366e-05, |
| "loss": 0.9354, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.861350043422003e-05, |
| "loss": 0.9969, |
| "step": 786 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.8585389813508575e-05, |
| "loss": 0.8724, |
| "step": 787 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.8557258385419317e-05, |
| "loss": 0.8891, |
| "step": 788 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.8529106218131022e-05, |
| "loss": 0.678, |
| "step": 789 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.8500933379872725e-05, |
| "loss": 0.7923, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.847273993892354e-05, |
| "loss": 0.7177, |
| "step": 791 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.8444525963612525e-05, |
| "loss": 0.9207, |
| "step": 792 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.8416291522318506e-05, |
| "loss": 0.8617, |
| "step": 793 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.83880366834699e-05, |
| "loss": 0.9428, |
| "step": 794 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.835976151554457e-05, |
| "loss": 0.8606, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.8331466087069642e-05, |
| "loss": 0.8234, |
| "step": 796 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.8303150466621344e-05, |
| "loss": 0.7587, |
| "step": 797 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.827481472282485e-05, |
| "loss": 0.8345, |
| "step": 798 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.8246458924354092e-05, |
| "loss": 0.7877, |
| "step": 799 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.8218083139931615e-05, |
| "loss": 0.8689, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.818968743832839e-05, |
| "loss": 0.9086, |
| "step": 801 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.816127188836368e-05, |
| "loss": 0.7266, |
| "step": 802 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.8132836558904832e-05, |
| "loss": 0.8246, |
| "step": 803 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.8104381518867138e-05, |
| "loss": 0.7975, |
| "step": 804 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.8075906837213663e-05, |
| "loss": 0.8531, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.8047412582955066e-05, |
| "loss": 0.7897, |
| "step": 806 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.801889882514945e-05, |
| "loss": 0.7114, |
| "step": 807 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.799036563290219e-05, |
| "loss": 0.6832, |
| "step": 808 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.7961813075365752e-05, |
| "loss": 0.7334, |
| "step": 809 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.793324122173954e-05, |
| "loss": 0.9626, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.7904650141269714e-05, |
| "loss": 0.7868, |
| "step": 811 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.7876039903249052e-05, |
| "loss": 0.8865, |
| "step": 812 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.7847410577016748e-05, |
| "loss": 0.8702, |
| "step": 813 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.781876223195826e-05, |
| "loss": 0.7197, |
| "step": 814 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.779009493750514e-05, |
| "loss": 0.818, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.776140876313486e-05, |
| "loss": 0.893, |
| "step": 816 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.7732703778370655e-05, |
| "loss": 0.7538, |
| "step": 817 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.7703980052781353e-05, |
| "loss": 0.9259, |
| "step": 818 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.7675237655981197e-05, |
| "loss": 0.7543, |
| "step": 819 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.7646476657629677e-05, |
| "loss": 0.9162, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.7617697127431374e-05, |
| "loss": 0.7199, |
| "step": 821 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.758889913513577e-05, |
| "loss": 0.7669, |
| "step": 822 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.756008275053711e-05, |
| "loss": 0.8197, |
| "step": 823 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.7531248043474196e-05, |
| "loss": 0.7258, |
| "step": 824 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.750239508383024e-05, |
| "loss": 0.836, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.7473523941532696e-05, |
| "loss": 0.8518, |
| "step": 826 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.7444634686553087e-05, |
| "loss": 0.9213, |
| "step": 827 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.741572738890683e-05, |
| "loss": 0.9349, |
| "step": 828 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.738680211865306e-05, |
| "loss": 0.8099, |
| "step": 829 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.7357858945894492e-05, |
| "loss": 0.8306, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.7328897940777208e-05, |
| "loss": 0.9182, |
| "step": 831 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.7299919173490513e-05, |
| "loss": 0.8413, |
| "step": 832 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.7270922714266777e-05, |
| "loss": 0.7613, |
| "step": 833 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.724190863338122e-05, |
| "loss": 0.9259, |
| "step": 834 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.7212877001151797e-05, |
| "loss": 0.8356, |
| "step": 835 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.7183827887938973e-05, |
| "loss": 0.846, |
| "step": 836 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.7154761364145606e-05, |
| "loss": 0.7379, |
| "step": 837 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.7125677500216724e-05, |
| "loss": 0.8267, |
| "step": 838 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.7096576366639412e-05, |
| "loss": 0.7147, |
| "step": 839 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.7067458033942582e-05, |
| "loss": 0.9711, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.7038322572696833e-05, |
| "loss": 0.7574, |
| "step": 841 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.7009170053514285e-05, |
| "loss": 0.7944, |
| "step": 842 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.6980000547048398e-05, |
| "loss": 0.8095, |
| "step": 843 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 2.6950814123993803e-05, |
| "loss": 0.8336, |
| "step": 844 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 2.6921610855086124e-05, |
| "loss": 0.7845, |
| "step": 845 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 2.6892390811101813e-05, |
| "loss": 0.8121, |
| "step": 846 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 2.686315406285798e-05, |
| "loss": 0.827, |
| "step": 847 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 2.683390068121223e-05, |
| "loss": 0.9013, |
| "step": 848 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 2.6804630737062453e-05, |
| "loss": 0.9542, |
| "step": 849 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 2.6775344301346704e-05, |
| "loss": 0.8534, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 2.6746041445043006e-05, |
| "loss": 0.8444, |
| "step": 851 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 2.6716722239169156e-05, |
| "loss": 0.8776, |
| "step": 852 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 2.66873867547826e-05, |
| "loss": 0.7428, |
| "step": 853 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 2.6658035062980227e-05, |
| "loss": 0.8084, |
| "step": 854 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 2.6628667234898206e-05, |
| "loss": 0.7497, |
| "step": 855 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 2.6599283341711812e-05, |
| "loss": 0.9807, |
| "step": 856 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 2.656988345463525e-05, |
| "loss": 0.9052, |
| "step": 857 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 2.65404676449215e-05, |
| "loss": 0.6598, |
| "step": 858 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 2.651103598386213e-05, |
| "loss": 0.7973, |
| "step": 859 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 2.6481588542787116e-05, |
| "loss": 0.828, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 2.6452125393064686e-05, |
| "loss": 0.7704, |
| "step": 861 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 2.6422646606101133e-05, |
| "loss": 0.7976, |
| "step": 862 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 2.639315225334065e-05, |
| "loss": 0.8268, |
| "step": 863 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 2.636364240626516e-05, |
| "loss": 0.7997, |
| "step": 864 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 2.6334117136394133e-05, |
| "loss": 0.82, |
| "step": 865 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 2.630457651528442e-05, |
| "loss": 1.0674, |
| "step": 866 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 2.627502061453007e-05, |
| "loss": 0.8003, |
| "step": 867 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 2.6245449505762172e-05, |
| "loss": 0.8196, |
| "step": 868 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 2.6215863260648666e-05, |
| "loss": 0.8604, |
| "step": 869 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 2.6186261950894186e-05, |
| "loss": 0.8229, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 2.6156645648239867e-05, |
| "loss": 0.8284, |
| "step": 871 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 2.612701442446318e-05, |
| "loss": 0.8724, |
| "step": 872 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 2.609736835137777e-05, |
| "loss": 0.7375, |
| "step": 873 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 2.6067707500833252e-05, |
| "loss": 0.8087, |
| "step": 874 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 2.6038031944715072e-05, |
| "loss": 0.8175, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 2.6008341754944306e-05, |
| "loss": 0.8015, |
| "step": 876 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 2.597863700347751e-05, |
| "loss": 0.8907, |
| "step": 877 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 2.5948917762306516e-05, |
| "loss": 0.8232, |
| "step": 878 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 2.5919184103458275e-05, |
| "loss": 0.7578, |
| "step": 879 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 2.5889436098994684e-05, |
| "loss": 0.7837, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 2.585967382101241e-05, |
| "loss": 0.7482, |
| "step": 881 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 2.5829897341642706e-05, |
| "loss": 0.9306, |
| "step": 882 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 2.5800106733051254e-05, |
| "loss": 0.8276, |
| "step": 883 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 2.5770302067437963e-05, |
| "loss": 0.8904, |
| "step": 884 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.5740483417036826e-05, |
| "loss": 0.795, |
| "step": 885 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.571065085411573e-05, |
| "loss": 0.7532, |
| "step": 886 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.5680804450976266e-05, |
| "loss": 0.7793, |
| "step": 887 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.5650944279953577e-05, |
| "loss": 0.7032, |
| "step": 888 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.5621070413416177e-05, |
| "loss": 0.7909, |
| "step": 889 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.5591182923765754e-05, |
| "loss": 0.6184, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.556128188343703e-05, |
| "loss": 0.6964, |
| "step": 891 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.5531367364897576e-05, |
| "loss": 0.8541, |
| "step": 892 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.5501439440647603e-05, |
| "loss": 0.874, |
| "step": 893 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.547149818321984e-05, |
| "loss": 0.7377, |
| "step": 894 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.5441543665179295e-05, |
| "loss": 0.6486, |
| "step": 895 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.5411575959123146e-05, |
| "loss": 0.805, |
| "step": 896 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.5381595137680527e-05, |
| "loss": 0.6963, |
| "step": 897 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.5351601273512342e-05, |
| "loss": 0.7427, |
| "step": 898 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.5321594439311116e-05, |
| "loss": 0.9064, |
| "step": 899 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.5291574707800818e-05, |
| "loss": 0.7783, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.526154215173666e-05, |
| "loss": 0.8107, |
| "step": 901 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.523149684390494e-05, |
| "loss": 0.739, |
| "step": 902 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.5201438857122854e-05, |
| "loss": 0.7956, |
| "step": 903 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.517136826423835e-05, |
| "loss": 0.862, |
| "step": 904 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.5141285138129893e-05, |
| "loss": 0.7451, |
| "step": 905 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.5111189551706346e-05, |
| "loss": 0.9004, |
| "step": 906 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.508108157790677e-05, |
| "loss": 0.8009, |
| "step": 907 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.505096128970024e-05, |
| "loss": 0.767, |
| "step": 908 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.5020828760085685e-05, |
| "loss": 0.6996, |
| "step": 909 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.49906840620917e-05, |
| "loss": 0.8032, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.4960527268776354e-05, |
| "loss": 0.8078, |
| "step": 911 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.493035845322705e-05, |
| "loss": 0.8546, |
| "step": 912 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.4900177688560326e-05, |
| "loss": 0.8273, |
| "step": 913 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.486998504792167e-05, |
| "loss": 0.7545, |
| "step": 914 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.4839780604485365e-05, |
| "loss": 0.8121, |
| "step": 915 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.4809564431454286e-05, |
| "loss": 0.8373, |
| "step": 916 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.4779336602059733e-05, |
| "loss": 0.6404, |
| "step": 917 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.474909718956128e-05, |
| "loss": 0.9136, |
| "step": 918 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.471884626724655e-05, |
| "loss": 0.8754, |
| "step": 919 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.468858390843107e-05, |
| "loss": 0.7354, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.4658310186458082e-05, |
| "loss": 0.8756, |
| "step": 921 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.462802517469837e-05, |
| "loss": 0.7962, |
| "step": 922 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.4597728946550076e-05, |
| "loss": 0.8312, |
| "step": 923 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.456742157543853e-05, |
| "loss": 0.721, |
| "step": 924 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.453710313481607e-05, |
| "loss": 0.81, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.450677369816185e-05, |
| "loss": 0.8507, |
| "step": 926 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.4476433338981686e-05, |
| "loss": 0.7319, |
| "step": 927 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.444608213080786e-05, |
| "loss": 0.7374, |
| "step": 928 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.4415720147198956e-05, |
| "loss": 0.7582, |
| "step": 929 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.4385347461739656e-05, |
| "loss": 0.8145, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.4354964148040598e-05, |
| "loss": 0.9313, |
| "step": 931 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.432457027973816e-05, |
| "loss": 0.8551, |
| "step": 932 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.4294165930494308e-05, |
| "loss": 0.7733, |
| "step": 933 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.4263751173996416e-05, |
| "loss": 0.7777, |
| "step": 934 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.4233326083957077e-05, |
| "loss": 0.7798, |
| "step": 935 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.4202890734113922e-05, |
| "loss": 0.7916, |
| "step": 936 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.4172445198229456e-05, |
| "loss": 0.8147, |
| "step": 937 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.4141989550090857e-05, |
| "loss": 0.76, |
| "step": 938 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.411152386350983e-05, |
| "loss": 0.7531, |
| "step": 939 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.408104821232239e-05, |
| "loss": 0.7487, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.405056267038872e-05, |
| "loss": 0.9016, |
| "step": 941 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.402006731159297e-05, |
| "loss": 0.8107, |
| "step": 942 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.3989562209843062e-05, |
| "loss": 0.7306, |
| "step": 943 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.395904743907055e-05, |
| "loss": 0.7758, |
| "step": 944 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.3928523073230426e-05, |
| "loss": 0.718, |
| "step": 945 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.3897989186300926e-05, |
| "loss": 0.825, |
| "step": 946 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.386744585228336e-05, |
| "loss": 0.6727, |
| "step": 947 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.3836893145201935e-05, |
| "loss": 0.7723, |
| "step": 948 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.3806331139103583e-05, |
| "loss": 0.7267, |
| "step": 949 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.3775759908057756e-05, |
| "loss": 0.8376, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.3745179526156284e-05, |
| "loss": 0.7806, |
| "step": 951 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.371459006751316e-05, |
| "loss": 0.6257, |
| "step": 952 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.368399160626438e-05, |
| "loss": 0.8575, |
| "step": 953 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.3653384216567753e-05, |
| "loss": 0.9144, |
| "step": 954 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.3622767972602746e-05, |
| "loss": 0.8163, |
| "step": 955 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.359214294857026e-05, |
| "loss": 0.7571, |
| "step": 956 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.3561509218692485e-05, |
| "loss": 0.6864, |
| "step": 957 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.3530866857212727e-05, |
| "loss": 0.8461, |
| "step": 958 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.3500215938395178e-05, |
| "loss": 0.9644, |
| "step": 959 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.346955653652479e-05, |
| "loss": 0.8808, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.343888872590708e-05, |
| "loss": 0.7557, |
| "step": 961 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.340821258086793e-05, |
| "loss": 0.8821, |
| "step": 962 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.3377528175753432e-05, |
| "loss": 0.759, |
| "step": 963 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.3346835584929685e-05, |
| "loss": 0.7967, |
| "step": 964 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.3316134882782642e-05, |
| "loss": 0.7461, |
| "step": 965 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.3285426143717896e-05, |
| "loss": 0.6334, |
| "step": 966 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.3254709442160537e-05, |
| "loss": 0.8831, |
| "step": 967 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.3223984852554944e-05, |
| "loss": 0.8804, |
| "step": 968 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.319325244936461e-05, |
| "loss": 0.7322, |
| "step": 969 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.3162512307071968e-05, |
| "loss": 0.8192, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.313176450017821e-05, |
| "loss": 0.8268, |
| "step": 971 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.3101009103203093e-05, |
| "loss": 0.7348, |
| "step": 972 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.307024619068479e-05, |
| "loss": 0.7666, |
| "step": 973 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.3039475837179674e-05, |
| "loss": 0.8881, |
| "step": 974 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.300869811726215e-05, |
| "loss": 0.7643, |
| "step": 975 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.2977913105524477e-05, |
| "loss": 0.773, |
| "step": 976 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.2947120876576593e-05, |
| "loss": 0.8274, |
| "step": 977 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.291632150504592e-05, |
| "loss": 0.8946, |
| "step": 978 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.2885515065577203e-05, |
| "loss": 0.8322, |
| "step": 979 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.2854701632832304e-05, |
| "loss": 0.6587, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.2823881281490033e-05, |
| "loss": 0.913, |
| "step": 981 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.2793054086245973e-05, |
| "loss": 0.8071, |
| "step": 982 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.27622201218123e-05, |
| "loss": 0.7787, |
| "step": 983 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.2731379462917597e-05, |
| "loss": 0.8597, |
| "step": 984 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.270053218430664e-05, |
| "loss": 0.7347, |
| "step": 985 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.2669678360740288e-05, |
| "loss": 0.8899, |
| "step": 986 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.2638818066995238e-05, |
| "loss": 0.7668, |
| "step": 987 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.2607951377863885e-05, |
| "loss": 0.7455, |
| "step": 988 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.2577078368154113e-05, |
| "loss": 0.8746, |
| "step": 989 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.2546199112689126e-05, |
| "loss": 0.7987, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.251531368630726e-05, |
| "loss": 0.6962, |
| "step": 991 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.2484422163861815e-05, |
| "loss": 0.8109, |
| "step": 992 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.245352462022086e-05, |
| "loss": 0.9317, |
| "step": 993 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.2422621130267066e-05, |
| "loss": 0.7289, |
| "step": 994 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.2391711768897513e-05, |
| "loss": 0.796, |
| "step": 995 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.2360796611023493e-05, |
| "loss": 0.9338, |
| "step": 996 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.2329875731570362e-05, |
| "loss": 0.8539, |
| "step": 997 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.2298949205477343e-05, |
| "loss": 0.7855, |
| "step": 998 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.226801710769735e-05, |
| "loss": 0.8219, |
| "step": 999 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.2237079513196785e-05, |
| "loss": 0.8549, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.220613649695537e-05, |
| "loss": 0.8478, |
| "step": 1001 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.2175188133965994e-05, |
| "loss": 0.8277, |
| "step": 1002 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.214423449923447e-05, |
| "loss": 0.8375, |
| "step": 1003 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.2113275667779416e-05, |
| "loss": 0.9255, |
| "step": 1004 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.208231171463202e-05, |
| "loss": 0.8158, |
| "step": 1005 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.2051342714835907e-05, |
| "loss": 0.8648, |
| "step": 1006 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.2020368743446906e-05, |
| "loss": 0.6254, |
| "step": 1007 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.1989389875532926e-05, |
| "loss": 0.8529, |
| "step": 1008 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.195840618617372e-05, |
| "loss": 0.9507, |
| "step": 1009 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.192741775046074e-05, |
| "loss": 0.8246, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.189642464349693e-05, |
| "loss": 0.7709, |
| "step": 1011 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.1865426940396544e-05, |
| "loss": 0.8873, |
| "step": 1012 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.1834424716285017e-05, |
| "loss": 0.7875, |
| "step": 1013 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.18034180462987e-05, |
| "loss": 0.752, |
| "step": 1014 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.177240700558473e-05, |
| "loss": 0.7541, |
| "step": 1015 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.1741391669300858e-05, |
| "loss": 0.7826, |
| "step": 1016 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.1710372112615214e-05, |
| "loss": 0.825, |
| "step": 1017 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.167934841070617e-05, |
| "loss": 0.6373, |
| "step": 1018 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.1648320638762147e-05, |
| "loss": 0.9243, |
| "step": 1019 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.1617288871981436e-05, |
| "loss": 0.6677, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.158625318557199e-05, |
| "loss": 0.8723, |
| "step": 1021 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.1555213654751287e-05, |
| "loss": 0.7718, |
| "step": 1022 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.15241703547461e-05, |
| "loss": 0.7793, |
| "step": 1023 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.149312336079234e-05, |
| "loss": 0.7964, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.1462072748134887e-05, |
| "loss": 0.8529, |
| "step": 1025 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.1431018592027374e-05, |
| "loss": 0.8223, |
| "step": 1026 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.1399960967732034e-05, |
| "loss": 0.7982, |
| "step": 1027 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.136889995051949e-05, |
| "loss": 0.8623, |
| "step": 1028 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.13378356156686e-05, |
| "loss": 0.9192, |
| "step": 1029 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.130676803846627e-05, |
| "loss": 0.7604, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.1275697294207244e-05, |
| "loss": 0.8205, |
| "step": 1031 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.1244623458193954e-05, |
| "loss": 0.6614, |
| "step": 1032 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.1213546605736326e-05, |
| "loss": 0.8465, |
| "step": 1033 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.118246681215159e-05, |
| "loss": 0.9324, |
| "step": 1034 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.115138415276411e-05, |
| "loss": 0.823, |
| "step": 1035 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.1120298702905195e-05, |
| "loss": 0.7974, |
| "step": 1036 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.1089210537912914e-05, |
| "loss": 0.7233, |
| "step": 1037 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.105811973313191e-05, |
| "loss": 0.7986, |
| "step": 1038 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.102702636391324e-05, |
| "loss": 0.7569, |
| "step": 1039 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.0995930505614165e-05, |
| "loss": 0.7425, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.096483223359798e-05, |
| "loss": 0.8227, |
| "step": 1041 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.093373162323383e-05, |
| "loss": 0.7444, |
| "step": 1042 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.090262874989653e-05, |
| "loss": 0.8037, |
| "step": 1043 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.0871523688966375e-05, |
| "loss": 0.7835, |
| "step": 1044 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.0840416515828962e-05, |
| "loss": 0.8601, |
| "step": 1045 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.0809307305875012e-05, |
| "loss": 0.949, |
| "step": 1046 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.0778196134500178e-05, |
| "loss": 0.8168, |
| "step": 1047 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.0747083077104877e-05, |
| "loss": 0.8424, |
| "step": 1048 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.0715968209094076e-05, |
| "loss": 0.8134, |
| "step": 1049 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.0684851605877146e-05, |
| "loss": 0.9359, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.065373334286766e-05, |
| "loss": 0.9298, |
| "step": 1051 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.0622613495483215e-05, |
| "loss": 0.7277, |
| "step": 1052 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.0591492139145245e-05, |
| "loss": 0.7517, |
| "step": 1053 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.056036934927885e-05, |
| "loss": 0.781, |
| "step": 1054 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.052924520131258e-05, |
| "loss": 0.8335, |
| "step": 1055 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.04981197706783e-05, |
| "loss": 0.8026, |
| "step": 1056 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.0466993132810985e-05, |
| "loss": 0.7245, |
| "step": 1057 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.0435865363148516e-05, |
| "loss": 0.7823, |
| "step": 1058 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.0404736537131537e-05, |
| "loss": 0.8255, |
| "step": 1059 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.037360673020324e-05, |
| "loss": 0.7689, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.034247601780919e-05, |
| "loss": 0.7764, |
| "step": 1061 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.031134447539717e-05, |
| "loss": 0.7824, |
| "step": 1062 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.0280212178416952e-05, |
| "loss": 0.8695, |
| "step": 1063 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.0249079202320147e-05, |
| "loss": 0.8521, |
| "step": 1064 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.021794562256e-05, |
| "loss": 0.7708, |
| "step": 1065 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.0186811514591233e-05, |
| "loss": 0.8275, |
| "step": 1066 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.015567695386984e-05, |
| "loss": 0.7666, |
| "step": 1067 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.012454201585292e-05, |
| "loss": 0.7394, |
| "step": 1068 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.0093406775998484e-05, |
| "loss": 0.7933, |
| "step": 1069 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.0062271309765268e-05, |
| "loss": 0.7262, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.003113569261255e-05, |
| "loss": 0.7958, |
| "step": 1071 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2e-05, |
| "loss": 0.7564, |
| "step": 1072 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.9968864307387456e-05, |
| "loss": 0.8544, |
| "step": 1073 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.993772869023474e-05, |
| "loss": 0.7939, |
| "step": 1074 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.9906593224001523e-05, |
| "loss": 0.7029, |
| "step": 1075 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.987545798414708e-05, |
| "loss": 0.7907, |
| "step": 1076 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.9844323046130162e-05, |
| "loss": 0.72, |
| "step": 1077 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.9813188485408777e-05, |
| "loss": 0.8664, |
| "step": 1078 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.9782054377440005e-05, |
| "loss": 0.7373, |
| "step": 1079 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.9750920797679863e-05, |
| "loss": 0.8029, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.971978782158305e-05, |
| "loss": 0.978, |
| "step": 1081 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.9688655524602833e-05, |
| "loss": 0.8832, |
| "step": 1082 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.9657523982190812e-05, |
| "loss": 0.8462, |
| "step": 1083 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.962639326979677e-05, |
| "loss": 0.7041, |
| "step": 1084 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.9595263462868473e-05, |
| "loss": 0.821, |
| "step": 1085 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.956413463685149e-05, |
| "loss": 0.751, |
| "step": 1086 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.9533006867189025e-05, |
| "loss": 0.8456, |
| "step": 1087 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.9501880229321705e-05, |
| "loss": 0.8125, |
| "step": 1088 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.9470754798687428e-05, |
| "loss": 0.8438, |
| "step": 1089 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.9439630650721164e-05, |
| "loss": 0.8199, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.9408507860854754e-05, |
| "loss": 0.904, |
| "step": 1091 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.937738650451679e-05, |
| "loss": 0.8243, |
| "step": 1092 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.934626665713234e-05, |
| "loss": 0.8768, |
| "step": 1093 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.931514839412286e-05, |
| "loss": 0.7835, |
| "step": 1094 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.9284031790905934e-05, |
| "loss": 0.6543, |
| "step": 1095 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.9252916922895126e-05, |
| "loss": 0.8008, |
| "step": 1096 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.9221803865499825e-05, |
| "loss": 0.8093, |
| "step": 1097 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.9190692694124988e-05, |
| "loss": 0.7276, |
| "step": 1098 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.915958348417104e-05, |
| "loss": 0.9386, |
| "step": 1099 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.9128476311033635e-05, |
| "loss": 0.8194, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.9097371250103474e-05, |
| "loss": 0.7818, |
| "step": 1101 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.9066268376766177e-05, |
| "loss": 0.8598, |
| "step": 1102 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.903516776640202e-05, |
| "loss": 0.8341, |
| "step": 1103 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.9004069494385838e-05, |
| "loss": 0.8001, |
| "step": 1104 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.8972973636086767e-05, |
| "loss": 0.7861, |
| "step": 1105 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.8941880266868092e-05, |
| "loss": 0.7341, |
| "step": 1106 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.8910789462087097e-05, |
| "loss": 0.78, |
| "step": 1107 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.8879701297094805e-05, |
| "loss": 0.7206, |
| "step": 1108 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.8848615847235895e-05, |
| "loss": 0.6664, |
| "step": 1109 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.8817533187848412e-05, |
| "loss": 0.77, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.878645339426368e-05, |
| "loss": 0.7241, |
| "step": 1111 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.8755376541806052e-05, |
| "loss": 0.7241, |
| "step": 1112 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.8724302705792763e-05, |
| "loss": 0.9062, |
| "step": 1113 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.8693231961533737e-05, |
| "loss": 0.8041, |
| "step": 1114 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.866216438433141e-05, |
| "loss": 0.796, |
| "step": 1115 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.8631100049480516e-05, |
| "loss": 0.8259, |
| "step": 1116 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.860003903226798e-05, |
| "loss": 0.8069, |
| "step": 1117 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.856898140797263e-05, |
| "loss": 0.7876, |
| "step": 1118 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.853792725186512e-05, |
| "loss": 0.8336, |
| "step": 1119 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.850687663920766e-05, |
| "loss": 0.7858, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.8475829645253907e-05, |
| "loss": 0.6726, |
| "step": 1121 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.844478634524872e-05, |
| "loss": 0.7655, |
| "step": 1122 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.841374681442801e-05, |
| "loss": 0.8449, |
| "step": 1123 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.838271112801857e-05, |
| "loss": 0.7344, |
| "step": 1124 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.835167936123785e-05, |
| "loss": 0.9387, |
| "step": 1125 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.8320651589293837e-05, |
| "loss": 0.7721, |
| "step": 1126 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.8289627887384796e-05, |
| "loss": 0.9044, |
| "step": 1127 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.825860833069915e-05, |
| "loss": 0.7439, |
| "step": 1128 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.8227592994415274e-05, |
| "loss": 0.8186, |
| "step": 1129 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.8196581953701303e-05, |
| "loss": 0.7851, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.816557528371499e-05, |
| "loss": 0.8013, |
| "step": 1131 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.8134573059603462e-05, |
| "loss": 0.7881, |
| "step": 1132 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.810357535650308e-05, |
| "loss": 0.8358, |
| "step": 1133 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.807258224953927e-05, |
| "loss": 0.8308, |
| "step": 1134 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.804159381382628e-05, |
| "loss": 0.9222, |
| "step": 1135 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.8010610124467078e-05, |
| "loss": 0.8118, |
| "step": 1136 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7979631256553098e-05, |
| "loss": 0.7905, |
| "step": 1137 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7948657285164103e-05, |
| "loss": 0.751, |
| "step": 1138 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7917688285367985e-05, |
| "loss": 0.7556, |
| "step": 1139 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7886724332220594e-05, |
| "loss": 0.8655, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7855765500765533e-05, |
| "loss": 0.6942, |
| "step": 1141 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7824811866034016e-05, |
| "loss": 0.7991, |
| "step": 1142 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7793863503044632e-05, |
| "loss": 0.8645, |
| "step": 1143 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7762920486803225e-05, |
| "loss": 0.7884, |
| "step": 1144 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7731982892302653e-05, |
| "loss": 0.831, |
| "step": 1145 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.770105079452266e-05, |
| "loss": 0.7858, |
| "step": 1146 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7670124268429648e-05, |
| "loss": 0.6456, |
| "step": 1147 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7639203388976517e-05, |
| "loss": 0.8965, |
| "step": 1148 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.76082882311025e-05, |
| "loss": 0.7986, |
| "step": 1149 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7577378869732934e-05, |
| "loss": 0.8648, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7546475379779145e-05, |
| "loss": 0.7507, |
| "step": 1151 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.751557783613819e-05, |
| "loss": 0.8244, |
| "step": 1152 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7484686313692747e-05, |
| "loss": 0.8803, |
| "step": 1153 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7453800887310884e-05, |
| "loss": 0.7573, |
| "step": 1154 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7422921631845887e-05, |
| "loss": 0.8168, |
| "step": 1155 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.7392048622136118e-05, |
| "loss": 0.8174, |
| "step": 1156 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.736118193300476e-05, |
| "loss": 0.7399, |
| "step": 1157 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.7330321639259716e-05, |
| "loss": 0.7196, |
| "step": 1158 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.729946781569337e-05, |
| "loss": 0.7403, |
| "step": 1159 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.726862053708241e-05, |
| "loss": 0.7497, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.7237779878187702e-05, |
| "loss": 0.8645, |
| "step": 1161 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.7206945913754027e-05, |
| "loss": 0.8736, |
| "step": 1162 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.7176118718509974e-05, |
| "loss": 0.783, |
| "step": 1163 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.7145298367167703e-05, |
| "loss": 0.7814, |
| "step": 1164 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.71144849344228e-05, |
| "loss": 0.6849, |
| "step": 1165 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.7083678494954084e-05, |
| "loss": 0.7768, |
| "step": 1166 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.7052879123423414e-05, |
| "loss": 0.7645, |
| "step": 1167 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.702208689447553e-05, |
| "loss": 0.9248, |
| "step": 1168 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.699130188273786e-05, |
| "loss": 0.8502, |
| "step": 1169 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.696052416282033e-05, |
| "loss": 0.7534, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.6929753809315214e-05, |
| "loss": 0.7784, |
| "step": 1171 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.6898990896796907e-05, |
| "loss": 0.8438, |
| "step": 1172 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.68682354998218e-05, |
| "loss": 0.9313, |
| "step": 1173 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.6837487692928042e-05, |
| "loss": 0.8504, |
| "step": 1174 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.6806747550635395e-05, |
| "loss": 0.854, |
| "step": 1175 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.6776015147445062e-05, |
| "loss": 0.8048, |
| "step": 1176 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.6745290557839463e-05, |
| "loss": 0.8528, |
| "step": 1177 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.671457385628211e-05, |
| "loss": 0.8382, |
| "step": 1178 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.668386511721737e-05, |
| "loss": 0.8847, |
| "step": 1179 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.665316441507032e-05, |
| "loss": 0.7371, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.6622471824246578e-05, |
| "loss": 0.7098, |
| "step": 1181 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.6591787419132072e-05, |
| "loss": 0.8048, |
| "step": 1182 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.6561111274092926e-05, |
| "loss": 0.8229, |
| "step": 1183 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.653044346347521e-05, |
| "loss": 0.7544, |
| "step": 1184 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.6499784061604832e-05, |
| "loss": 0.8072, |
| "step": 1185 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.6469133142787283e-05, |
| "loss": 0.8548, |
| "step": 1186 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.6438490781307515e-05, |
| "loss": 0.7895, |
| "step": 1187 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.640785705142975e-05, |
| "loss": 0.9934, |
| "step": 1188 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.6377232027397258e-05, |
| "loss": 0.9273, |
| "step": 1189 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.634661578343225e-05, |
| "loss": 0.8787, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.6316008393735628e-05, |
| "loss": 0.8416, |
| "step": 1191 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.6285409932486848e-05, |
| "loss": 0.7842, |
| "step": 1192 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.6254820473843723e-05, |
| "loss": 0.7785, |
| "step": 1193 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.622424009194225e-05, |
| "loss": 0.7333, |
| "step": 1194 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.6193668860896427e-05, |
| "loss": 0.8783, |
| "step": 1195 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.6163106854798075e-05, |
| "loss": 0.7089, |
| "step": 1196 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.6132554147716643e-05, |
| "loss": 0.83, |
| "step": 1197 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.610201081369908e-05, |
| "loss": 0.8082, |
| "step": 1198 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.607147692676957e-05, |
| "loss": 0.85, |
| "step": 1199 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.6040952560929453e-05, |
| "loss": 0.877, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.601043779015695e-05, |
| "loss": 0.6425, |
| "step": 1201 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.5979932688407038e-05, |
| "loss": 0.744, |
| "step": 1202 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.5949437329611282e-05, |
| "loss": 0.8513, |
| "step": 1203 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.591895178767761e-05, |
| "loss": 0.8467, |
| "step": 1204 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.5888476136490177e-05, |
| "loss": 0.7514, |
| "step": 1205 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.5858010449909153e-05, |
| "loss": 0.7605, |
| "step": 1206 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.5827554801770554e-05, |
| "loss": 0.8327, |
| "step": 1207 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.5797109265886085e-05, |
| "loss": 0.8947, |
| "step": 1208 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.5766673916042926e-05, |
| "loss": 0.863, |
| "step": 1209 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.5736248826003587e-05, |
| "loss": 0.7075, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.5705834069505702e-05, |
| "loss": 0.6782, |
| "step": 1211 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.5675429720261848e-05, |
| "loss": 0.7683, |
| "step": 1212 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.5645035851959412e-05, |
| "loss": 0.7701, |
| "step": 1213 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.5614652538260344e-05, |
| "loss": 0.7619, |
| "step": 1214 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.558427985280105e-05, |
| "loss": 0.7127, |
| "step": 1215 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.5553917869192136e-05, |
| "loss": 0.8634, |
| "step": 1216 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.552356666101832e-05, |
| "loss": 0.7333, |
| "step": 1217 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.5493226301838155e-05, |
| "loss": 1.0024, |
| "step": 1218 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.5462896865183938e-05, |
| "loss": 0.8464, |
| "step": 1219 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.5432578424561474e-05, |
| "loss": 0.7389, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.540227105344993e-05, |
| "loss": 0.7641, |
| "step": 1221 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.5371974825301636e-05, |
| "loss": 0.7169, |
| "step": 1222 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.5341689813541928e-05, |
| "loss": 0.8362, |
| "step": 1223 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.5311416091568935e-05, |
| "loss": 0.7652, |
| "step": 1224 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.5281153732753458e-05, |
| "loss": 0.756, |
| "step": 1225 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.5250902810438722e-05, |
| "loss": 0.7723, |
| "step": 1226 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.522066339794027e-05, |
| "loss": 0.8064, |
| "step": 1227 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.5190435568545728e-05, |
| "loss": 0.8577, |
| "step": 1228 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.516021939551464e-05, |
| "loss": 0.8803, |
| "step": 1229 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.5130014952078334e-05, |
| "loss": 0.8457, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.5099822311439675e-05, |
| "loss": 0.8142, |
| "step": 1231 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.5069641546772955e-05, |
| "loss": 0.8818, |
| "step": 1232 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.5039472731223657e-05, |
| "loss": 0.7995, |
| "step": 1233 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.5009315937908307e-05, |
| "loss": 0.8498, |
| "step": 1234 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4979171239914321e-05, |
| "loss": 0.7857, |
| "step": 1235 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4949038710299759e-05, |
| "loss": 0.8359, |
| "step": 1236 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4918918422093236e-05, |
| "loss": 0.8208, |
| "step": 1237 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.4888810448293667e-05, |
| "loss": 0.6836, |
| "step": 1238 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4858714861870116e-05, |
| "loss": 0.7107, |
| "step": 1239 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4828631735761663e-05, |
| "loss": 0.7486, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4798561142877147e-05, |
| "loss": 0.7102, |
| "step": 1241 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.476850315609507e-05, |
| "loss": 0.8465, |
| "step": 1242 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4738457848263346e-05, |
| "loss": 0.6829, |
| "step": 1243 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4708425292199187e-05, |
| "loss": 0.7466, |
| "step": 1244 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4678405560688889e-05, |
| "loss": 0.8856, |
| "step": 1245 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4648398726487667e-05, |
| "loss": 0.973, |
| "step": 1246 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4618404862319481e-05, |
| "loss": 0.7838, |
| "step": 1247 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4588424040876858e-05, |
| "loss": 0.8828, |
| "step": 1248 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4558456334820712e-05, |
| "loss": 0.8453, |
| "step": 1249 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4528501816780174e-05, |
| "loss": 0.9562, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4498560559352397e-05, |
| "loss": 0.7759, |
| "step": 1251 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.446863263510243e-05, |
| "loss": 0.8381, |
| "step": 1252 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4438718116562969e-05, |
| "loss": 0.708, |
| "step": 1253 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4408817076234254e-05, |
| "loss": 0.9052, |
| "step": 1254 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4378929586583836e-05, |
| "loss": 0.7446, |
| "step": 1255 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4349055720046425e-05, |
| "loss": 0.8115, |
| "step": 1256 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4319195549023739e-05, |
| "loss": 0.7377, |
| "step": 1257 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4289349145884269e-05, |
| "loss": 0.7954, |
| "step": 1258 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.4259516582963175e-05, |
| "loss": 0.8223, |
| "step": 1259 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4229697932562045e-05, |
| "loss": 0.6936, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4199893266948754e-05, |
| "loss": 0.7449, |
| "step": 1261 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4170102658357302e-05, |
| "loss": 0.6564, |
| "step": 1262 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4140326178987596e-05, |
| "loss": 0.8523, |
| "step": 1263 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4110563901005323e-05, |
| "loss": 0.7964, |
| "step": 1264 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4080815896541737e-05, |
| "loss": 0.9001, |
| "step": 1265 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4051082237693487e-05, |
| "loss": 0.7916, |
| "step": 1266 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.4021362996522494e-05, |
| "loss": 0.6969, |
| "step": 1267 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.3991658245055692e-05, |
| "loss": 0.806, |
| "step": 1268 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.3961968055284936e-05, |
| "loss": 0.8583, |
| "step": 1269 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.3932292499166757e-05, |
| "loss": 0.8543, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.390263164862224e-05, |
| "loss": 0.7614, |
| "step": 1271 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.3872985575536826e-05, |
| "loss": 0.7572, |
| "step": 1272 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.3843354351760141e-05, |
| "loss": 0.6099, |
| "step": 1273 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.3813738049105819e-05, |
| "loss": 0.7254, |
| "step": 1274 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.3784136739351342e-05, |
| "loss": 0.7656, |
| "step": 1275 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.3754550494237835e-05, |
| "loss": 0.7419, |
| "step": 1276 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.3724979385469938e-05, |
| "loss": 0.8136, |
| "step": 1277 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.3695423484715584e-05, |
| "loss": 0.8012, |
| "step": 1278 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.366588286360587e-05, |
| "loss": 0.745, |
| "step": 1279 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.3636357593734839e-05, |
| "loss": 0.724, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.3606847746659353e-05, |
| "loss": 0.7767, |
| "step": 1281 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.3577353393898875e-05, |
| "loss": 0.8141, |
| "step": 1282 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.3547874606935319e-05, |
| "loss": 0.8409, |
| "step": 1283 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.3518411457212887e-05, |
| "loss": 0.8122, |
| "step": 1284 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.348896401613787e-05, |
| "loss": 0.829, |
| "step": 1285 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.3459532355078503e-05, |
| "loss": 0.8344, |
| "step": 1286 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.343011654536476e-05, |
| "loss": 0.6823, |
| "step": 1287 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.3400716658288198e-05, |
| "loss": 0.7419, |
| "step": 1288 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.3371332765101803e-05, |
| "loss": 0.8412, |
| "step": 1289 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.3341964937019776e-05, |
| "loss": 0.7964, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.3312613245217403e-05, |
| "loss": 0.7108, |
| "step": 1291 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.3283277760830852e-05, |
| "loss": 0.6436, |
| "step": 1292 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.3253958554957e-05, |
| "loss": 0.6815, |
| "step": 1293 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.32246556986533e-05, |
| "loss": 0.9237, |
| "step": 1294 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.3195369262937549e-05, |
| "loss": 0.7677, |
| "step": 1295 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.316609931878778e-05, |
| "loss": 0.8574, |
| "step": 1296 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.3136845937142022e-05, |
| "loss": 0.6677, |
| "step": 1297 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.310760918889819e-05, |
| "loss": 0.8527, |
| "step": 1298 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.3078389144913883e-05, |
| "loss": 0.7441, |
| "step": 1299 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.3049185876006203e-05, |
| "loss": 0.811, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.3019999452951607e-05, |
| "loss": 0.8852, |
| "step": 1301 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.2990829946485724e-05, |
| "loss": 0.7903, |
| "step": 1302 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.2961677427303174e-05, |
| "loss": 0.8985, |
| "step": 1303 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.293254196605743e-05, |
| "loss": 0.7823, |
| "step": 1304 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.290342363336059e-05, |
| "loss": 0.7545, |
| "step": 1305 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.2874322499783278e-05, |
| "loss": 0.7774, |
| "step": 1306 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.2845238635854407e-05, |
| "loss": 0.9081, |
| "step": 1307 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.2816172112061032e-05, |
| "loss": 0.7896, |
| "step": 1308 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.2787122998848216e-05, |
| "loss": 0.6985, |
| "step": 1309 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.2758091366618781e-05, |
| "loss": 0.8357, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.2729077285733231e-05, |
| "loss": 0.793, |
| "step": 1311 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.2700080826509487e-05, |
| "loss": 0.8226, |
| "step": 1312 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.2671102059222799e-05, |
| "loss": 0.6967, |
| "step": 1313 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.2642141054105516e-05, |
| "loss": 0.9131, |
| "step": 1314 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.2613197881346941e-05, |
| "loss": 0.8196, |
| "step": 1315 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.2584272611093178e-05, |
| "loss": 0.7749, |
| "step": 1316 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.2555365313446911e-05, |
| "loss": 0.8242, |
| "step": 1317 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.2526476058467307e-05, |
| "loss": 0.6551, |
| "step": 1318 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.2497604916169772e-05, |
| "loss": 0.7725, |
| "step": 1319 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.246875195652581e-05, |
| "loss": 0.8368, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.2439917249462898e-05, |
| "loss": 0.7968, |
| "step": 1321 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.241110086486423e-05, |
| "loss": 0.7811, |
| "step": 1322 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.2382302872568635e-05, |
| "loss": 0.7749, |
| "step": 1323 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.2353523342370328e-05, |
| "loss": 0.7459, |
| "step": 1324 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.2324762344018808e-05, |
| "loss": 0.9459, |
| "step": 1325 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.2296019947218652e-05, |
| "loss": 0.7449, |
| "step": 1326 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.2267296221629353e-05, |
| "loss": 0.9733, |
| "step": 1327 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.223859123686515e-05, |
| "loss": 0.907, |
| "step": 1328 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.2209905062494874e-05, |
| "loss": 0.6868, |
| "step": 1329 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.2181237768041745e-05, |
| "loss": 0.6874, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.2152589422983256e-05, |
| "loss": 0.8635, |
| "step": 1331 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.2123960096750946e-05, |
| "loss": 0.8701, |
| "step": 1332 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.209534985873029e-05, |
| "loss": 0.7662, |
| "step": 1333 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.2066758778260472e-05, |
| "loss": 0.7988, |
| "step": 1334 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.203818692463425e-05, |
| "loss": 0.8568, |
| "step": 1335 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.2009634367097815e-05, |
| "loss": 0.815, |
| "step": 1336 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.1981101174850549e-05, |
| "loss": 0.5727, |
| "step": 1337 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.1952587417044938e-05, |
| "loss": 0.709, |
| "step": 1338 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.192409316278635e-05, |
| "loss": 0.7023, |
| "step": 1339 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.1895618481132867e-05, |
| "loss": 0.9157, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.1867163441095178e-05, |
| "loss": 0.7744, |
| "step": 1341 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.1838728111636324e-05, |
| "loss": 0.722, |
| "step": 1342 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.1810312561671617e-05, |
| "loss": 0.7501, |
| "step": 1343 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.178191686006839e-05, |
| "loss": 0.7627, |
| "step": 1344 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.1753541075645913e-05, |
| "loss": 0.735, |
| "step": 1345 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.1725185277175157e-05, |
| "loss": 0.8688, |
| "step": 1346 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.1696849533378658e-05, |
| "loss": 0.8493, |
| "step": 1347 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.1668533912930364e-05, |
| "loss": 0.8519, |
| "step": 1348 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.164023848445543e-05, |
| "loss": 0.9197, |
| "step": 1349 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.1611963316530106e-05, |
| "loss": 0.8146, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.1583708477681506e-05, |
| "loss": 0.7429, |
| "step": 1351 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.1555474036387482e-05, |
| "loss": 0.7946, |
| "step": 1352 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.152726006107647e-05, |
| "loss": 0.828, |
| "step": 1353 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.1499066620127283e-05, |
| "loss": 0.8039, |
| "step": 1354 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.1470893781868986e-05, |
| "loss": 0.8712, |
| "step": 1355 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.14427416145807e-05, |
| "loss": 0.7982, |
| "step": 1356 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.1414610186491437e-05, |
| "loss": 0.7071, |
| "step": 1357 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.1386499565779978e-05, |
| "loss": 0.7918, |
| "step": 1358 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.135840982057464e-05, |
| "loss": 0.8611, |
| "step": 1359 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.1330341018953183e-05, |
| "loss": 0.8295, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.130229322894257e-05, |
| "loss": 0.7554, |
| "step": 1361 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.1274266518518874e-05, |
| "loss": 0.8147, |
| "step": 1362 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.1246260955607053e-05, |
| "loss": 1.0165, |
| "step": 1363 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.1218276608080845e-05, |
| "loss": 0.9676, |
| "step": 1364 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.1190313543762525e-05, |
| "loss": 0.8096, |
| "step": 1365 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.1162371830422835e-05, |
| "loss": 0.8757, |
| "step": 1366 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.1134451535780732e-05, |
| "loss": 0.8996, |
| "step": 1367 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.1106552727503295e-05, |
| "loss": 0.7988, |
| "step": 1368 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.1078675473205496e-05, |
| "loss": 0.8329, |
| "step": 1369 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.1050819840450107e-05, |
| "loss": 0.6578, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.1022985896747464e-05, |
| "loss": 0.9332, |
| "step": 1371 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.0995173709555352e-05, |
| "loss": 0.8767, |
| "step": 1372 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.0967383346278837e-05, |
| "loss": 0.7504, |
| "step": 1373 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.093961487427007e-05, |
| "loss": 0.7289, |
| "step": 1374 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.0911868360828177e-05, |
| "loss": 0.7684, |
| "step": 1375 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.0884143873199036e-05, |
| "loss": 0.8557, |
| "step": 1376 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.0856441478575164e-05, |
| "loss": 0.7547, |
| "step": 1377 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.0828761244095528e-05, |
| "loss": 0.7937, |
| "step": 1378 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.0801103236845371e-05, |
| "loss": 0.7634, |
| "step": 1379 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.0773467523856104e-05, |
| "loss": 0.7386, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.0745854172105066e-05, |
| "loss": 0.8253, |
| "step": 1381 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.0718263248515434e-05, |
| "loss": 0.7391, |
| "step": 1382 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.069069481995601e-05, |
| "loss": 0.7483, |
| "step": 1383 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.0663148953241075e-05, |
| "loss": 0.8856, |
| "step": 1384 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.0635625715130246e-05, |
| "loss": 0.7958, |
| "step": 1385 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.0608125172328278e-05, |
| "loss": 0.8836, |
| "step": 1386 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.0580647391484949e-05, |
| "loss": 0.75, |
| "step": 1387 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.055319243919484e-05, |
| "loss": 0.778, |
| "step": 1388 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.0525760381997223e-05, |
| "loss": 0.7749, |
| "step": 1389 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.049835128637589e-05, |
| "loss": 0.7563, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.0470965218758953e-05, |
| "loss": 0.7654, |
| "step": 1391 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.0443602245518756e-05, |
| "loss": 0.816, |
| "step": 1392 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.0416262432971638e-05, |
| "loss": 0.7753, |
| "step": 1393 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.0388945847377815e-05, |
| "loss": 0.8129, |
| "step": 1394 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.036165255494122e-05, |
| "loss": 0.8415, |
| "step": 1395 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.033438262180932e-05, |
| "loss": 0.864, |
| "step": 1396 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.0307136114072987e-05, |
| "loss": 0.7572, |
| "step": 1397 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.0279913097766301e-05, |
| "loss": 0.9031, |
| "step": 1398 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.025271363886641e-05, |
| "loss": 0.7405, |
| "step": 1399 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.0225537803293384e-05, |
| "loss": 0.6923, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.0198385656910024e-05, |
| "loss": 0.7479, |
| "step": 1401 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.0171257265521735e-05, |
| "loss": 0.7799, |
| "step": 1402 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.0144152694876332e-05, |
| "loss": 0.8073, |
| "step": 1403 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.0117072010663903e-05, |
| "loss": 0.7006, |
| "step": 1404 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.0090015278516658e-05, |
| "loss": 0.8247, |
| "step": 1405 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.0062982564008737e-05, |
| "loss": 0.7815, |
| "step": 1406 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.0035973932656096e-05, |
| "loss": 0.8321, |
| "step": 1407 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.0008989449916294e-05, |
| "loss": 0.8156, |
| "step": 1408 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 9.982029181188397e-06, |
| "loss": 0.8239, |
| "step": 1409 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 9.955093191812762e-06, |
| "loss": 0.7769, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 9.928181547070897e-06, |
| "loss": 0.8023, |
| "step": 1411 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 9.901294312185343e-06, |
| "loss": 0.8008, |
| "step": 1412 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 9.874431552319444e-06, |
| "loss": 0.7156, |
| "step": 1413 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 9.84759333257726e-06, |
| "loss": 0.7347, |
| "step": 1414 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 9.820779718003341e-06, |
| "loss": 0.8215, |
| "step": 1415 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 9.793990773582642e-06, |
| "loss": 0.6976, |
| "step": 1416 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 9.767226564240291e-06, |
| "loss": 0.8492, |
| "step": 1417 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 9.740487154841504e-06, |
| "loss": 0.7922, |
| "step": 1418 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 9.71377261019136e-06, |
| "loss": 0.7727, |
| "step": 1419 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 9.687082995034702e-06, |
| "loss": 0.8026, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 9.660418374055935e-06, |
| "loss": 0.736, |
| "step": 1421 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 9.633778811878909e-06, |
| "loss": 0.7331, |
| "step": 1422 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 9.607164373066714e-06, |
| "loss": 0.8121, |
| "step": 1423 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 9.580575122121583e-06, |
| "loss": 0.7405, |
| "step": 1424 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 9.554011123484683e-06, |
| "loss": 0.7511, |
| "step": 1425 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 9.527472441535978e-06, |
| "loss": 0.8387, |
| "step": 1426 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 9.500959140594099e-06, |
| "loss": 0.8015, |
| "step": 1427 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 9.474471284916131e-06, |
| "loss": 0.8523, |
| "step": 1428 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 9.448008938697527e-06, |
| "loss": 0.7908, |
| "step": 1429 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 9.42157216607188e-06, |
| "loss": 0.9333, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 9.395161031110818e-06, |
| "loss": 0.8327, |
| "step": 1431 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 9.36877559782385e-06, |
| "loss": 0.7779, |
| "step": 1432 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 9.342415930158167e-06, |
| "loss": 0.7566, |
| "step": 1433 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 9.31608209199854e-06, |
| "loss": 0.8775, |
| "step": 1434 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 9.289774147167117e-06, |
| "loss": 0.7531, |
| "step": 1435 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 9.263492159423315e-06, |
| "loss": 0.7585, |
| "step": 1436 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 9.237236192463623e-06, |
| "loss": 0.7515, |
| "step": 1437 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 9.211006309921471e-06, |
| "loss": 0.7984, |
| "step": 1438 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 9.184802575367081e-06, |
| "loss": 0.7771, |
| "step": 1439 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 9.158625052307289e-06, |
| "loss": 0.8067, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 9.132473804185422e-06, |
| "loss": 0.9069, |
| "step": 1441 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 9.106348894381112e-06, |
| "loss": 0.7876, |
| "step": 1442 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 9.080250386210156e-06, |
| "loss": 0.7432, |
| "step": 1443 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 9.05417834292439e-06, |
| "loss": 0.8866, |
| "step": 1444 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 9.028132827711477e-06, |
| "loss": 0.8469, |
| "step": 1445 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 9.002113903694818e-06, |
| "loss": 0.8669, |
| "step": 1446 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 8.976121633933345e-06, |
| "loss": 0.8811, |
| "step": 1447 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 8.95015608142139e-06, |
| "loss": 0.8613, |
| "step": 1448 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 8.924217309088563e-06, |
| "loss": 0.9477, |
| "step": 1449 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 8.89830537979953e-06, |
| "loss": 0.84, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 8.872420356353938e-06, |
| "loss": 0.6912, |
| "step": 1451 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 8.8465623014862e-06, |
| "loss": 0.6554, |
| "step": 1452 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 8.820731277865374e-06, |
| "loss": 0.8475, |
| "step": 1453 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 8.794927348095015e-06, |
| "loss": 0.7827, |
| "step": 1454 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 8.769150574712999e-06, |
| "loss": 0.7208, |
| "step": 1455 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 8.743401020191406e-06, |
| "loss": 0.9659, |
| "step": 1456 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 8.717678746936334e-06, |
| "loss": 0.8438, |
| "step": 1457 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 8.691983817287755e-06, |
| "loss": 0.9008, |
| "step": 1458 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 8.666316293519399e-06, |
| "loss": 0.8585, |
| "step": 1459 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 8.640676237838545e-06, |
| "loss": 0.7097, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 8.615063712385932e-06, |
| "loss": 0.7962, |
| "step": 1461 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 8.589478779235549e-06, |
| "loss": 0.7608, |
| "step": 1462 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 8.56392150039452e-06, |
| "loss": 0.674, |
| "step": 1463 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 8.538391937802968e-06, |
| "loss": 0.6829, |
| "step": 1464 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 8.512890153333806e-06, |
| "loss": 0.9768, |
| "step": 1465 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 8.487416208792664e-06, |
| "loss": 0.6861, |
| "step": 1466 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 8.461970165917668e-06, |
| "loss": 0.8438, |
| "step": 1467 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 8.436552086379346e-06, |
| "loss": 0.8243, |
| "step": 1468 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 8.411162031780428e-06, |
| "loss": 0.7041, |
| "step": 1469 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 8.385800063655756e-06, |
| "loss": 0.7406, |
| "step": 1470 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 8.36046624347207e-06, |
| "loss": 0.693, |
| "step": 1471 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 8.33516063262792e-06, |
| "loss": 0.6651, |
| "step": 1472 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 8.309883292453461e-06, |
| "loss": 0.7385, |
| "step": 1473 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 8.28463428421036e-06, |
| "loss": 0.67, |
| "step": 1474 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 8.25941366909159e-06, |
| "loss": 0.7692, |
| "step": 1475 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 8.234221508221338e-06, |
| "loss": 0.8259, |
| "step": 1476 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 8.209057862654806e-06, |
| "loss": 0.7763, |
| "step": 1477 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 8.183922793378111e-06, |
| "loss": 0.8802, |
| "step": 1478 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 8.158816361308096e-06, |
| "loss": 0.8748, |
| "step": 1479 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 8.133738627292194e-06, |
| "loss": 0.7644, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 8.108689652108312e-06, |
| "loss": 0.8871, |
| "step": 1481 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 8.083669496464629e-06, |
| "loss": 0.7383, |
| "step": 1482 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 8.058678220999498e-06, |
| "loss": 0.7946, |
| "step": 1483 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 8.033715886281268e-06, |
| "loss": 0.7911, |
| "step": 1484 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 8.008782552808145e-06, |
| "loss": 0.7207, |
| "step": 1485 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 7.983878281008066e-06, |
| "loss": 0.7501, |
| "step": 1486 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 7.95900313123851e-06, |
| "loss": 0.7201, |
| "step": 1487 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 7.934157163786397e-06, |
| "loss": 0.9694, |
| "step": 1488 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 7.909340438867914e-06, |
| "loss": 0.7998, |
| "step": 1489 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 7.884553016628365e-06, |
| "loss": 0.713, |
| "step": 1490 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 7.85979495714206e-06, |
| "loss": 0.8601, |
| "step": 1491 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 7.835066320412121e-06, |
| "loss": 0.7852, |
| "step": 1492 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 7.810367166370387e-06, |
| "loss": 0.6758, |
| "step": 1493 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 7.785697554877226e-06, |
| "loss": 0.8012, |
| "step": 1494 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 7.761057545721398e-06, |
| "loss": 0.9093, |
| "step": 1495 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 7.736447198619952e-06, |
| "loss": 0.7586, |
| "step": 1496 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 7.711866573218016e-06, |
| "loss": 0.7983, |
| "step": 1497 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 7.68731572908871e-06, |
| "loss": 0.7787, |
| "step": 1498 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 7.66279472573295e-06, |
| "loss": 0.8072, |
| "step": 1499 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 7.638303622579365e-06, |
| "loss": 0.6882, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 7.613842478984086e-06, |
| "loss": 0.8786, |
| "step": 1501 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 7.589411354230643e-06, |
| "loss": 0.8019, |
| "step": 1502 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 7.565010307529834e-06, |
| "loss": 0.9197, |
| "step": 1503 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 7.5406393980195294e-06, |
| "loss": 0.748, |
| "step": 1504 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 7.516298684764587e-06, |
| "loss": 0.7812, |
| "step": 1505 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 7.491988226756664e-06, |
| "loss": 0.8592, |
| "step": 1506 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 7.467708082914087e-06, |
| "loss": 0.734, |
| "step": 1507 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 7.443458312081739e-06, |
| "loss": 0.8436, |
| "step": 1508 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 7.419238973030865e-06, |
| "loss": 0.9025, |
| "step": 1509 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 7.3950501244589755e-06, |
| "loss": 0.8887, |
| "step": 1510 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 7.370891824989674e-06, |
| "loss": 0.7684, |
| "step": 1511 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 7.346764133172519e-06, |
| "loss": 0.7576, |
| "step": 1512 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 7.322667107482915e-06, |
| "loss": 0.6784, |
| "step": 1513 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 7.298600806321912e-06, |
| "loss": 0.8285, |
| "step": 1514 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 7.274565288016124e-06, |
| "loss": 0.7349, |
| "step": 1515 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 7.2505606108175474e-06, |
| "loss": 0.8572, |
| "step": 1516 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 7.226586832903424e-06, |
| "loss": 0.868, |
| "step": 1517 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 7.202644012376135e-06, |
| "loss": 0.8423, |
| "step": 1518 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 7.178732207263002e-06, |
| "loss": 0.7127, |
| "step": 1519 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 7.154851475516209e-06, |
| "loss": 0.8426, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 7.131001875012602e-06, |
| "loss": 0.8268, |
| "step": 1521 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 7.107183463553604e-06, |
| "loss": 0.8871, |
| "step": 1522 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 7.083396298865029e-06, |
| "loss": 0.8004, |
| "step": 1523 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 7.05964043859698e-06, |
| "loss": 0.7995, |
| "step": 1524 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 7.035915940323668e-06, |
| "loss": 0.8283, |
| "step": 1525 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 7.012222861543323e-06, |
| "loss": 0.7193, |
| "step": 1526 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 6.9885612596779995e-06, |
| "loss": 0.7968, |
| "step": 1527 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 6.964931192073494e-06, |
| "loss": 0.7081, |
| "step": 1528 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 6.9413327159991475e-06, |
| "loss": 0.9797, |
| "step": 1529 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 6.917765888647765e-06, |
| "loss": 0.8899, |
| "step": 1530 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 6.894230767135421e-06, |
| "loss": 0.7598, |
| "step": 1531 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 6.870727408501376e-06, |
| "loss": 0.8236, |
| "step": 1532 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 6.8472558697078895e-06, |
| "loss": 0.8448, |
| "step": 1533 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 6.823816207640102e-06, |
| "loss": 0.7203, |
| "step": 1534 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 6.800408479105922e-06, |
| "loss": 0.9174, |
| "step": 1535 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 6.777032740835834e-06, |
| "loss": 0.7788, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 6.753689049482819e-06, |
| "loss": 0.7869, |
| "step": 1537 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 6.7303774616221705e-06, |
| "loss": 0.7362, |
| "step": 1538 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 6.707098033751378e-06, |
| "loss": 0.8515, |
| "step": 1539 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 6.683850822290008e-06, |
| "loss": 0.946, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 6.660635883579518e-06, |
| "loss": 0.9418, |
| "step": 1541 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 6.637453273883181e-06, |
| "loss": 0.6686, |
| "step": 1542 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 6.614303049385895e-06, |
| "loss": 0.9048, |
| "step": 1543 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 6.5911852661940716e-06, |
| "loss": 0.7182, |
| "step": 1544 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 6.568099980335518e-06, |
| "loss": 0.7626, |
| "step": 1545 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 6.545047247759255e-06, |
| "loss": 0.748, |
| "step": 1546 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 6.522027124335433e-06, |
| "loss": 0.8629, |
| "step": 1547 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 6.499039665855151e-06, |
| "loss": 0.7892, |
| "step": 1548 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 6.4760849280303484e-06, |
| "loss": 0.959, |
| "step": 1549 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 6.453162966493675e-06, |
| "loss": 0.7625, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 6.430273836798324e-06, |
| "loss": 0.816, |
| "step": 1551 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 6.4074175944179395e-06, |
| "loss": 0.7587, |
| "step": 1552 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 6.384594294746447e-06, |
| "loss": 0.998, |
| "step": 1553 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 6.361803993097926e-06, |
| "loss": 0.6944, |
| "step": 1554 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 6.339046744706505e-06, |
| "loss": 0.7138, |
| "step": 1555 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 6.316322604726184e-06, |
| "loss": 0.6594, |
| "step": 1556 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 6.29363162823074e-06, |
| "loss": 0.8516, |
| "step": 1557 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 6.270973870213563e-06, |
| "loss": 0.7726, |
| "step": 1558 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 6.248349385587531e-06, |
| "loss": 0.8122, |
| "step": 1559 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 6.225758229184902e-06, |
| "loss": 0.6941, |
| "step": 1560 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 6.203200455757132e-06, |
| "loss": 0.7837, |
| "step": 1561 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 6.180676119974805e-06, |
| "loss": 0.7474, |
| "step": 1562 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 6.158185276427428e-06, |
| "loss": 0.7785, |
| "step": 1563 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 6.135727979623376e-06, |
| "loss": 0.8185, |
| "step": 1564 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 6.113304283989689e-06, |
| "loss": 0.8503, |
| "step": 1565 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 6.0909142438719794e-06, |
| "loss": 0.9069, |
| "step": 1566 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 6.068557913534312e-06, |
| "loss": 0.7969, |
| "step": 1567 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 6.046235347159024e-06, |
| "loss": 0.8892, |
| "step": 1568 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 6.023946598846651e-06, |
| "loss": 0.7931, |
| "step": 1569 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 6.001691722615745e-06, |
| "loss": 0.8272, |
| "step": 1570 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 5.9794707724027756e-06, |
| "loss": 0.9673, |
| "step": 1571 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 5.9572838020619994e-06, |
| "loss": 0.9058, |
| "step": 1572 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 5.9351308653652954e-06, |
| "loss": 0.8536, |
| "step": 1573 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 5.913012016002091e-06, |
| "loss": 0.7722, |
| "step": 1574 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 5.8909273075791705e-06, |
| "loss": 0.8903, |
| "step": 1575 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 5.868876793620601e-06, |
| "loss": 0.6824, |
| "step": 1576 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 5.8468605275675506e-06, |
| "loss": 0.8223, |
| "step": 1577 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 5.824878562778209e-06, |
| "loss": 0.7742, |
| "step": 1578 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 5.802930952527616e-06, |
| "loss": 0.7584, |
| "step": 1579 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 5.7810177500075644e-06, |
| "loss": 0.7096, |
| "step": 1580 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 5.759139008326442e-06, |
| "loss": 0.7286, |
| "step": 1581 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 5.737294780509137e-06, |
| "loss": 0.7718, |
| "step": 1582 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 5.715485119496868e-06, |
| "loss": 0.7881, |
| "step": 1583 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 5.693710078147103e-06, |
| "loss": 0.8809, |
| "step": 1584 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 5.671969709233385e-06, |
| "loss": 0.7704, |
| "step": 1585 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 5.6502640654452305e-06, |
| "loss": 0.8623, |
| "step": 1586 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 5.628593199388011e-06, |
| "loss": 0.7277, |
| "step": 1587 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 5.606957163582789e-06, |
| "loss": 0.9185, |
| "step": 1588 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 5.5853560104662385e-06, |
| "loss": 0.7629, |
| "step": 1589 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 5.563789792390472e-06, |
| "loss": 0.8838, |
| "step": 1590 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 5.542258561622937e-06, |
| "loss": 0.6666, |
| "step": 1591 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 5.5207623703462995e-06, |
| "loss": 0.7692, |
| "step": 1592 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 5.499301270658284e-06, |
| "loss": 0.9058, |
| "step": 1593 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 5.4778753145715945e-06, |
| "loss": 0.7392, |
| "step": 1594 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 5.456484554013732e-06, |
| "loss": 0.6149, |
| "step": 1595 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 5.435129040826925e-06, |
| "loss": 0.7383, |
| "step": 1596 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 5.413808826767959e-06, |
| "loss": 0.8324, |
| "step": 1597 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 5.392523963508069e-06, |
| "loss": 0.7086, |
| "step": 1598 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 5.371274502632835e-06, |
| "loss": 0.8273, |
| "step": 1599 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 5.350060495642009e-06, |
| "loss": 0.8447, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 5.328881993949442e-06, |
| "loss": 0.8047, |
| "step": 1601 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 5.3077390488829186e-06, |
| "loss": 0.7535, |
| "step": 1602 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 5.286631711684049e-06, |
| "loss": 0.7135, |
| "step": 1603 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 5.2655600335081615e-06, |
| "loss": 0.7662, |
| "step": 1604 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 5.2445240654241415e-06, |
| "loss": 0.7937, |
| "step": 1605 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 5.223523858414346e-06, |
| "loss": 0.8672, |
| "step": 1606 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 5.202559463374448e-06, |
| "loss": 0.8392, |
| "step": 1607 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 5.181630931113331e-06, |
| "loss": 0.7644, |
| "step": 1608 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 5.160738312352971e-06, |
| "loss": 0.7379, |
| "step": 1609 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 5.139881657728287e-06, |
| "loss": 0.8887, |
| "step": 1610 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 5.119061017787064e-06, |
| "loss": 0.8846, |
| "step": 1611 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 5.098276442989774e-06, |
| "loss": 0.6188, |
| "step": 1612 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 5.07752798370949e-06, |
| "loss": 0.8652, |
| "step": 1613 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 5.056815690231769e-06, |
| "loss": 0.7992, |
| "step": 1614 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 5.036139612754501e-06, |
| "loss": 0.7035, |
| "step": 1615 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 5.01549980138782e-06, |
| "loss": 0.7311, |
| "step": 1616 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 4.994896306153949e-06, |
| "loss": 0.8101, |
| "step": 1617 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 4.9743291769871025e-06, |
| "loss": 0.8478, |
| "step": 1618 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 4.953798463733372e-06, |
| "loss": 0.7407, |
| "step": 1619 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 4.933304216150565e-06, |
| "loss": 0.8636, |
| "step": 1620 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 4.912846483908142e-06, |
| "loss": 0.6478, |
| "step": 1621 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 4.892425316587044e-06, |
| "loss": 0.8704, |
| "step": 1622 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 4.872040763679593e-06, |
| "loss": 0.7613, |
| "step": 1623 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 4.851692874589395e-06, |
| "loss": 0.8397, |
| "step": 1624 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 4.831381698631172e-06, |
| "loss": 0.8374, |
| "step": 1625 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 4.811107285030692e-06, |
| "loss": 0.6728, |
| "step": 1626 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 4.790869682924602e-06, |
| "loss": 0.9003, |
| "step": 1627 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 4.7706689413603615e-06, |
| "loss": 0.6864, |
| "step": 1628 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 4.750505109296069e-06, |
| "loss": 0.8228, |
| "step": 1629 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 4.73037823560039e-06, |
| "loss": 0.7335, |
| "step": 1630 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 4.7102883690524e-06, |
| "loss": 0.7233, |
| "step": 1631 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 4.690235558341509e-06, |
| "loss": 0.7314, |
| "step": 1632 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 4.670219852067293e-06, |
| "loss": 0.8123, |
| "step": 1633 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 4.650241298739422e-06, |
| "loss": 0.694, |
| "step": 1634 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.630299946777508e-06, |
| "loss": 0.828, |
| "step": 1635 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.610395844511017e-06, |
| "loss": 0.7576, |
| "step": 1636 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.590529040179126e-06, |
| "loss": 0.7949, |
| "step": 1637 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.570699581930627e-06, |
| "loss": 0.787, |
| "step": 1638 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.55090751782379e-06, |
| "loss": 0.7649, |
| "step": 1639 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.531152895826261e-06, |
| "loss": 0.7154, |
| "step": 1640 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.511435763814951e-06, |
| "loss": 0.6292, |
| "step": 1641 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.491756169575896e-06, |
| "loss": 0.7495, |
| "step": 1642 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.472114160804175e-06, |
| "loss": 0.7981, |
| "step": 1643 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.452509785103758e-06, |
| "loss": 0.8569, |
| "step": 1644 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.4329430899874095e-06, |
| "loss": 0.6838, |
| "step": 1645 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.413414122876587e-06, |
| "loss": 0.9124, |
| "step": 1646 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.393922931101296e-06, |
| "loss": 0.7863, |
| "step": 1647 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.374469561900001e-06, |
| "loss": 0.7922, |
| "step": 1648 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.355054062419494e-06, |
| "loss": 0.8025, |
| "step": 1649 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.335676479714783e-06, |
| "loss": 0.6002, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.316336860748997e-06, |
| "loss": 0.8782, |
| "step": 1651 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.297035252393236e-06, |
| "loss": 0.6764, |
| "step": 1652 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.2777717014264985e-06, |
| "loss": 0.6844, |
| "step": 1653 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 4.258546254535536e-06, |
| "loss": 0.7753, |
| "step": 1654 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 4.239358958314747e-06, |
| "loss": 0.8182, |
| "step": 1655 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 4.220209859266089e-06, |
| "loss": 0.8466, |
| "step": 1656 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 4.201099003798918e-06, |
| "loss": 0.7496, |
| "step": 1657 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 4.182026438229933e-06, |
| "loss": 0.8054, |
| "step": 1658 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 4.162992208783008e-06, |
| "loss": 0.9287, |
| "step": 1659 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 4.143996361589129e-06, |
| "loss": 0.9243, |
| "step": 1660 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 4.125038942686239e-06, |
| "loss": 0.8774, |
| "step": 1661 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 4.106119998019158e-06, |
| "loss": 0.787, |
| "step": 1662 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 4.0872395734394635e-06, |
| "loss": 0.8279, |
| "step": 1663 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 4.068397714705366e-06, |
| "loss": 0.7005, |
| "step": 1664 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 4.0495944674816235e-06, |
| "loss": 0.6935, |
| "step": 1665 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 4.030829877339404e-06, |
| "loss": 0.8361, |
| "step": 1666 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 4.012103989756179e-06, |
| "loss": 0.7957, |
| "step": 1667 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.99341685011565e-06, |
| "loss": 0.8517, |
| "step": 1668 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.974768503707578e-06, |
| "loss": 0.864, |
| "step": 1669 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.9561589957277325e-06, |
| "loss": 1.0038, |
| "step": 1670 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.937588371277734e-06, |
| "loss": 0.7805, |
| "step": 1671 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.9190566753649695e-06, |
| "loss": 0.8726, |
| "step": 1672 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.900563952902494e-06, |
| "loss": 0.7562, |
| "step": 1673 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.882110248708888e-06, |
| "loss": 0.745, |
| "step": 1674 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 3.8636956075081865e-06, |
| "loss": 0.7297, |
| "step": 1675 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.8453200739297395e-06, |
| "loss": 0.8068, |
| "step": 1676 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.826983692508113e-06, |
| "loss": 0.8313, |
| "step": 1677 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.8086865076830016e-06, |
| "loss": 0.7962, |
| "step": 1678 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.7904285637990847e-06, |
| "loss": 0.7614, |
| "step": 1679 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.772209905105959e-06, |
| "loss": 0.864, |
| "step": 1680 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.7540305757579852e-06, |
| "loss": 0.7649, |
| "step": 1681 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.735890619814231e-06, |
| "loss": 0.8309, |
| "step": 1682 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.7177900812383196e-06, |
| "loss": 0.7396, |
| "step": 1683 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.6997290038983604e-06, |
| "loss": 0.7018, |
| "step": 1684 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.6817074315668054e-06, |
| "loss": 0.9805, |
| "step": 1685 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.6637254079203865e-06, |
| "loss": 0.7778, |
| "step": 1686 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.6457829765399644e-06, |
| "loss": 0.8163, |
| "step": 1687 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.6278801809104614e-06, |
| "loss": 0.8582, |
| "step": 1688 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.610017064420723e-06, |
| "loss": 0.8101, |
| "step": 1689 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.5921936703634506e-06, |
| "loss": 0.8097, |
| "step": 1690 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.5744100419350523e-06, |
| "loss": 0.7586, |
| "step": 1691 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.556666222235581e-06, |
| "loss": 0.7634, |
| "step": 1692 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.5389622542685963e-06, |
| "loss": 0.7933, |
| "step": 1693 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.5212981809410708e-06, |
| "loss": 0.8243, |
| "step": 1694 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.50367404506331e-06, |
| "loss": 0.8139, |
| "step": 1695 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 3.486089889348805e-06, |
| "loss": 0.8511, |
| "step": 1696 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.4685457564141676e-06, |
| "loss": 0.788, |
| "step": 1697 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.4510416887790023e-06, |
| "loss": 0.8526, |
| "step": 1698 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.4335777288658114e-06, |
| "loss": 0.8583, |
| "step": 1699 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.416153918999903e-06, |
| "loss": 0.7428, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.3987703014092623e-06, |
| "loss": 0.7699, |
| "step": 1701 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.381426918224486e-06, |
| "loss": 0.799, |
| "step": 1702 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.3641238114786433e-06, |
| "loss": 0.7842, |
| "step": 1703 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.3468610231071906e-06, |
| "loss": 0.7975, |
| "step": 1704 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.3296385949478814e-06, |
| "loss": 0.8276, |
| "step": 1705 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.3124565687406385e-06, |
| "loss": 0.7976, |
| "step": 1706 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.2953149861274827e-06, |
| "loss": 0.6723, |
| "step": 1707 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.278213888652404e-06, |
| "loss": 0.7095, |
| "step": 1708 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.261153317761272e-06, |
| "loss": 0.7894, |
| "step": 1709 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.244133314801754e-06, |
| "loss": 0.796, |
| "step": 1710 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.2271539210231738e-06, |
| "loss": 0.7484, |
| "step": 1711 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.210215177576461e-06, |
| "loss": 0.8462, |
| "step": 1712 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.193317125514006e-06, |
| "loss": 0.7991, |
| "step": 1713 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.1764598057895844e-06, |
| "loss": 0.8161, |
| "step": 1714 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.1596432592582647e-06, |
| "loss": 0.7618, |
| "step": 1715 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.142867526676283e-06, |
| "loss": 0.9829, |
| "step": 1716 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 3.1261326487009726e-06, |
| "loss": 0.8478, |
| "step": 1717 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.1094386658906427e-06, |
| "loss": 0.6584, |
| "step": 1718 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.0927856187044925e-06, |
| "loss": 0.7591, |
| "step": 1719 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.076173547502519e-06, |
| "loss": 0.7973, |
| "step": 1720 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.0596024925453927e-06, |
| "loss": 0.8468, |
| "step": 1721 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.043072493994399e-06, |
| "loss": 0.7986, |
| "step": 1722 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.0265835919112984e-06, |
| "loss": 0.7687, |
| "step": 1723 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 3.010135826258276e-06, |
| "loss": 0.6432, |
| "step": 1724 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.9937292368977956e-06, |
| "loss": 0.8014, |
| "step": 1725 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.977363863592533e-06, |
| "loss": 0.7958, |
| "step": 1726 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.9610397460052877e-06, |
| "loss": 0.8394, |
| "step": 1727 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.944756923698853e-06, |
| "loss": 0.8421, |
| "step": 1728 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.928515436135959e-06, |
| "loss": 0.8952, |
| "step": 1729 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.91231532267914e-06, |
| "loss": 0.7446, |
| "step": 1730 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.8961566225906622e-06, |
| "loss": 0.8212, |
| "step": 1731 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.880039375032435e-06, |
| "loss": 0.7599, |
| "step": 1732 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.863963619065886e-06, |
| "loss": 0.8142, |
| "step": 1733 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.8479293936519025e-06, |
| "loss": 0.7751, |
| "step": 1734 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.831936737650698e-06, |
| "loss": 0.8624, |
| "step": 1735 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.8159856898217654e-06, |
| "loss": 0.8792, |
| "step": 1736 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.80007628882373e-06, |
| "loss": 0.7768, |
| "step": 1737 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.7842085732143066e-06, |
| "loss": 0.7565, |
| "step": 1738 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.768382581450162e-06, |
| "loss": 0.8546, |
| "step": 1739 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.7525983518868593e-06, |
| "loss": 0.8207, |
| "step": 1740 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.7368559227787295e-06, |
| "loss": 0.8777, |
| "step": 1741 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.7211553322788175e-06, |
| "loss": 0.7463, |
| "step": 1742 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.70549661843875e-06, |
| "loss": 0.7858, |
| "step": 1743 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.689879819208674e-06, |
| "loss": 0.9936, |
| "step": 1744 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.674304972437152e-06, |
| "loss": 0.7667, |
| "step": 1745 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.65877211587106e-06, |
| "loss": 0.777, |
| "step": 1746 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.643281287155526e-06, |
| "loss": 0.7273, |
| "step": 1747 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.627832523833802e-06, |
| "loss": 0.6368, |
| "step": 1748 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.6124258633472078e-06, |
| "loss": 0.7229, |
| "step": 1749 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.5970613430350076e-06, |
| "loss": 0.7988, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.5817390001343536e-06, |
| "loss": 0.7748, |
| "step": 1751 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.5664588717801575e-06, |
| "loss": 0.8473, |
| "step": 1752 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.5512209950050304e-06, |
| "loss": 0.9118, |
| "step": 1753 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.5360254067391934e-06, |
| "loss": 0.8104, |
| "step": 1754 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.5208721438103557e-06, |
| "loss": 0.6764, |
| "step": 1755 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.5057612429436697e-06, |
| "loss": 0.7736, |
| "step": 1756 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.490692740761609e-06, |
| "loss": 0.8584, |
| "step": 1757 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.4756666737838832e-06, |
| "loss": 0.7709, |
| "step": 1758 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.460683078427377e-06, |
| "loss": 0.8495, |
| "step": 1759 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.445741991006021e-06, |
| "loss": 0.6408, |
| "step": 1760 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.430843447730742e-06, |
| "loss": 0.7008, |
| "step": 1761 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.4159874847093458e-06, |
| "loss": 0.7162, |
| "step": 1762 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.4011741379464404e-06, |
| "loss": 0.7658, |
| "step": 1763 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.3864034433433635e-06, |
| "loss": 0.9827, |
| "step": 1764 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.3716754366980622e-06, |
| "loss": 0.7374, |
| "step": 1765 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.3569901537050476e-06, |
| "loss": 0.7533, |
| "step": 1766 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.3423476299552726e-06, |
| "loss": 0.8775, |
| "step": 1767 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.3277479009360547e-06, |
| "loss": 0.7563, |
| "step": 1768 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.3131910020310144e-06, |
| "loss": 0.6648, |
| "step": 1769 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.2986769685199508e-06, |
| "loss": 0.719, |
| "step": 1770 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.2842058355787875e-06, |
| "loss": 0.8639, |
| "step": 1771 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.269777638279471e-06, |
| "loss": 0.8373, |
| "step": 1772 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.2553924115898828e-06, |
| "loss": 0.8113, |
| "step": 1773 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.241050190373777e-06, |
| "loss": 0.7905, |
| "step": 1774 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.2267510093906665e-06, |
| "loss": 0.8163, |
| "step": 1775 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.2124949032957634e-06, |
| "loss": 0.8049, |
| "step": 1776 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.198281906639876e-06, |
| "loss": 0.8953, |
| "step": 1777 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.1841120538693315e-06, |
| "loss": 0.8397, |
| "step": 1778 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.169985379325912e-06, |
| "loss": 0.8456, |
| "step": 1779 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.1559019172467276e-06, |
| "loss": 1.007, |
| "step": 1780 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.141861701764185e-06, |
| "loss": 0.7179, |
| "step": 1781 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.1278647669058584e-06, |
| "loss": 0.7166, |
| "step": 1782 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.113911146594445e-06, |
| "loss": 0.8631, |
| "step": 1783 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.1000008746476543e-06, |
| "loss": 0.7627, |
| "step": 1784 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.086133984778136e-06, |
| "loss": 0.7772, |
| "step": 1785 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.072310510593416e-06, |
| "loss": 0.79, |
| "step": 1786 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.0585304855957778e-06, |
| "loss": 0.9189, |
| "step": 1787 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.0447939431822193e-06, |
| "loss": 0.8636, |
| "step": 1788 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.0311009166443416e-06, |
| "loss": 0.8295, |
| "step": 1789 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.0174514391682964e-06, |
| "loss": 0.8834, |
| "step": 1790 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.0038455438346725e-06, |
| "loss": 1.0268, |
| "step": 1791 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.990283263618451e-06, |
| "loss": 0.9491, |
| "step": 1792 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.9767646313888965e-06, |
| "loss": 0.8521, |
| "step": 1793 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.9632896799095015e-06, |
| "loss": 0.8468, |
| "step": 1794 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.9498584418378753e-06, |
| "loss": 0.769, |
| "step": 1795 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.936470949725706e-06, |
| "loss": 0.863, |
| "step": 1796 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.9231272360186427e-06, |
| "loss": 0.8091, |
| "step": 1797 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.9098273330562488e-06, |
| "loss": 0.8035, |
| "step": 1798 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.8965712730718943e-06, |
| "loss": 0.6403, |
| "step": 1799 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.8833590881927e-06, |
| "loss": 0.7157, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.8701908104394562e-06, |
| "loss": 0.8206, |
| "step": 1801 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.8570664717265251e-06, |
| "loss": 0.5984, |
| "step": 1802 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.8439861038618034e-06, |
| "loss": 0.8247, |
| "step": 1803 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.8309497385465992e-06, |
| "loss": 0.7316, |
| "step": 1804 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.8179574073755812e-06, |
| "loss": 0.8623, |
| "step": 1805 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.805009141836711e-06, |
| "loss": 0.7296, |
| "step": 1806 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.7921049733111329e-06, |
| "loss": 0.7896, |
| "step": 1807 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.779244933073141e-06, |
| "loss": 0.8222, |
| "step": 1808 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.7664290522900663e-06, |
| "loss": 0.8393, |
| "step": 1809 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.753657362022212e-06, |
| "loss": 0.7363, |
| "step": 1810 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.7409298932228002e-06, |
| "loss": 0.8046, |
| "step": 1811 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.7282466767378592e-06, |
| "loss": 0.7361, |
| "step": 1812 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.7156077433061825e-06, |
| "loss": 0.8613, |
| "step": 1813 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.7030131235592318e-06, |
| "loss": 0.7886, |
| "step": 1814 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.6904628480210749e-06, |
| "loss": 0.7348, |
| "step": 1815 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.6779569471083078e-06, |
| "loss": 0.824, |
| "step": 1816 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.665495451129977e-06, |
| "loss": 0.7885, |
| "step": 1817 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.653078390287517e-06, |
| "loss": 0.8066, |
| "step": 1818 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.6407057946746597e-06, |
| "loss": 0.8185, |
| "step": 1819 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.6283776942773878e-06, |
| "loss": 0.8738, |
| "step": 1820 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.6160941189738299e-06, |
| "loss": 0.8296, |
| "step": 1821 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.6038550985342126e-06, |
| "loss": 0.8503, |
| "step": 1822 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.5916606626207843e-06, |
| "loss": 0.8268, |
| "step": 1823 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.5795108407877258e-06, |
| "loss": 0.7455, |
| "step": 1824 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.5674056624811096e-06, |
| "loss": 0.8188, |
| "step": 1825 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.5553451570387968e-06, |
| "loss": 0.7861, |
| "step": 1826 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.5433293536903837e-06, |
| "loss": 0.7601, |
| "step": 1827 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.531358281557136e-06, |
| "loss": 0.7836, |
| "step": 1828 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.5194319696518944e-06, |
| "loss": 0.8343, |
| "step": 1829 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.5075504468790358e-06, |
| "loss": 0.7926, |
| "step": 1830 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.495713742034377e-06, |
| "loss": 0.7364, |
| "step": 1831 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.4839218838051128e-06, |
| "loss": 0.7906, |
| "step": 1832 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.4721749007697605e-06, |
| "loss": 0.8372, |
| "step": 1833 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.4604728213980646e-06, |
| "loss": 0.8345, |
| "step": 1834 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.448815674050954e-06, |
| "loss": 0.763, |
| "step": 1835 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.4372034869804562e-06, |
| "loss": 0.7455, |
| "step": 1836 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.4256362883296304e-06, |
| "loss": 0.7792, |
| "step": 1837 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.4141141061325093e-06, |
| "loss": 0.7958, |
| "step": 1838 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.4026369683140194e-06, |
| "loss": 0.7817, |
| "step": 1839 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.3912049026899222e-06, |
| "loss": 0.9599, |
| "step": 1840 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.3798179369667386e-06, |
| "loss": 0.7668, |
| "step": 1841 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.3684760987416955e-06, |
| "loss": 0.7051, |
| "step": 1842 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.3571794155026385e-06, |
| "loss": 0.6687, |
| "step": 1843 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.3459279146279825e-06, |
| "loss": 0.8387, |
| "step": 1844 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.3347216233866389e-06, |
| "loss": 0.8472, |
| "step": 1845 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.3235605689379495e-06, |
| "loss": 0.7332, |
| "step": 1846 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.3124447783316186e-06, |
| "loss": 0.8496, |
| "step": 1847 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.3013742785076588e-06, |
| "loss": 0.7102, |
| "step": 1848 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2903490962963016e-06, |
| "loss": 0.7193, |
| "step": 1849 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2793692584179661e-06, |
| "loss": 0.9132, |
| "step": 1850 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2684347914831551e-06, |
| "loss": 0.7766, |
| "step": 1851 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2575457219924348e-06, |
| "loss": 0.8676, |
| "step": 1852 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2467020763363258e-06, |
| "loss": 0.8197, |
| "step": 1853 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2359038807952705e-06, |
| "loss": 0.7333, |
| "step": 1854 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2251511615395618e-06, |
| "loss": 0.7514, |
| "step": 1855 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2144439446292666e-06, |
| "loss": 0.7764, |
| "step": 1856 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.2037822560141876e-06, |
| "loss": 0.6432, |
| "step": 1857 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.1931661215337776e-06, |
| "loss": 0.6839, |
| "step": 1858 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.1825955669170797e-06, |
| "loss": 0.7978, |
| "step": 1859 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.1720706177826858e-06, |
| "loss": 0.7621, |
| "step": 1860 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.1615912996386424e-06, |
| "loss": 0.7235, |
| "step": 1861 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.1511576378824208e-06, |
| "loss": 0.7554, |
| "step": 1862 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.1407696578008287e-06, |
| "loss": 0.7399, |
| "step": 1863 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.1304273845699677e-06, |
| "loss": 0.7374, |
| "step": 1864 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.1201308432551628e-06, |
| "loss": 0.7701, |
| "step": 1865 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.1098800588108994e-06, |
| "loss": 0.8395, |
| "step": 1866 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.0996750560807823e-06, |
| "loss": 0.7723, |
| "step": 1867 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.0895158597974431e-06, |
| "loss": 0.6817, |
| "step": 1868 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.0794024945825043e-06, |
| "loss": 0.8405, |
| "step": 1869 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.0693349849465173e-06, |
| "loss": 0.6523, |
| "step": 1870 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.0593133552888935e-06, |
| "loss": 1.0328, |
| "step": 1871 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.0493376298978575e-06, |
| "loss": 0.904, |
| "step": 1872 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.039407832950372e-06, |
| "loss": 0.8709, |
| "step": 1873 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.0295239885120912e-06, |
| "loss": 0.7673, |
| "step": 1874 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.0196861205373065e-06, |
| "loss": 0.8498, |
| "step": 1875 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.0098942528688726e-06, |
| "loss": 0.7199, |
| "step": 1876 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.0001484092381642e-06, |
| "loss": 0.9574, |
| "step": 1877 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 9.904486132650092e-07, |
| "loss": 0.7808, |
| "step": 1878 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 9.807948884576435e-07, |
| "loss": 0.8434, |
| "step": 1879 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 9.71187258212638e-07, |
| "loss": 0.8126, |
| "step": 1880 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 9.616257458148448e-07, |
| "loss": 0.7238, |
| "step": 1881 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 9.521103744373627e-07, |
| "loss": 0.6984, |
| "step": 1882 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 9.42641167141447e-07, |
| "loss": 0.7997, |
| "step": 1883 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 9.332181468764823e-07, |
| "loss": 0.9143, |
| "step": 1884 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 9.238413364799092e-07, |
| "loss": 0.8173, |
| "step": 1885 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 9.145107586771718e-07, |
| "loss": 0.8494, |
| "step": 1886 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 9.052264360816788e-07, |
| "loss": 0.844, |
| "step": 1887 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 8.959883911947199e-07, |
| "loss": 0.853, |
| "step": 1888 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 8.867966464054411e-07, |
| "loss": 0.8262, |
| "step": 1889 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 8.776512239907675e-07, |
| "loss": 0.7729, |
| "step": 1890 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 8.685521461153579e-07, |
| "loss": 0.8433, |
| "step": 1891 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 8.594994348315566e-07, |
| "loss": 0.8724, |
| "step": 1892 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 8.504931120793314e-07, |
| "loss": 0.7585, |
| "step": 1893 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 8.415331996862263e-07, |
| "loss": 0.6362, |
| "step": 1894 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 8.326197193673002e-07, |
| "loss": 0.8052, |
| "step": 1895 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 8.237526927250905e-07, |
| "loss": 0.6686, |
| "step": 1896 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 8.149321412495403e-07, |
| "loss": 0.7978, |
| "step": 1897 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 8.06158086317963e-07, |
| "loss": 0.6994, |
| "step": 1898 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 7.974305491949774e-07, |
| "loss": 0.7491, |
| "step": 1899 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 7.887495510324727e-07, |
| "loss": 0.7033, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 7.801151128695328e-07, |
| "loss": 0.6809, |
| "step": 1901 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 7.715272556324182e-07, |
| "loss": 0.618, |
| "step": 1902 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 7.62986000134478e-07, |
| "loss": 0.8353, |
| "step": 1903 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 7.544913670761312e-07, |
| "loss": 0.7895, |
| "step": 1904 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 7.460433770448006e-07, |
| "loss": 0.6966, |
| "step": 1905 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 7.376420505148596e-07, |
| "loss": 0.7782, |
| "step": 1906 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 7.292874078475987e-07, |
| "loss": 0.7009, |
| "step": 1907 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 7.209794692911587e-07, |
| "loss": 0.873, |
| "step": 1908 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 7.127182549804934e-07, |
| "loss": 0.8967, |
| "step": 1909 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 7.045037849373138e-07, |
| "loss": 0.7229, |
| "step": 1910 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 6.96336079070048e-07, |
| "loss": 0.7422, |
| "step": 1911 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 6.882151571737816e-07, |
| "loss": 0.9238, |
| "step": 1912 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 6.801410389302132e-07, |
| "loss": 0.7907, |
| "step": 1913 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 6.721137439076186e-07, |
| "loss": 0.8472, |
| "step": 1914 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 6.641332915607845e-07, |
| "loss": 0.8317, |
| "step": 1915 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 6.561997012309796e-07, |
| "loss": 0.6712, |
| "step": 1916 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 6.483129921458875e-07, |
| "loss": 0.7183, |
| "step": 1917 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 6.404731834195788e-07, |
| "loss": 0.8612, |
| "step": 1918 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 6.326802940524591e-07, |
| "loss": 0.7981, |
| "step": 1919 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 6.249343429312161e-07, |
| "loss": 0.7665, |
| "step": 1920 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 6.172353488287797e-07, |
| "loss": 0.824, |
| "step": 1921 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 6.09583330404282e-07, |
| "loss": 0.6806, |
| "step": 1922 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 6.019783062029927e-07, |
| "loss": 0.6911, |
| "step": 1923 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 5.944202946563016e-07, |
| "loss": 0.7806, |
| "step": 1924 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 5.869093140816473e-07, |
| "loss": 0.9656, |
| "step": 1925 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 5.794453826824998e-07, |
| "loss": 0.7291, |
| "step": 1926 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 5.720285185482865e-07, |
| "loss": 0.8032, |
| "step": 1927 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 5.646587396543691e-07, |
| "loss": 0.7787, |
| "step": 1928 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 5.573360638619996e-07, |
| "loss": 0.6745, |
| "step": 1929 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 5.50060508918262e-07, |
| "loss": 0.9704, |
| "step": 1930 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 5.428320924560515e-07, |
| "loss": 0.8058, |
| "step": 1931 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 5.356508319940102e-07, |
| "loss": 0.7408, |
| "step": 1932 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 5.285167449364915e-07, |
| "loss": 0.7023, |
| "step": 1933 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 5.214298485735314e-07, |
| "loss": 0.7662, |
| "step": 1934 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 5.143901600807844e-07, |
| "loss": 0.8133, |
| "step": 1935 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 5.073976965195027e-07, |
| "loss": 0.7792, |
| "step": 1936 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 5.004524748364748e-07, |
| "loss": 0.763, |
| "step": 1937 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 4.935545118640028e-07, |
| "loss": 0.7631, |
| "step": 1938 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 4.867038243198519e-07, |
| "loss": 0.8214, |
| "step": 1939 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 4.799004288072073e-07, |
| "loss": 0.9035, |
| "step": 1940 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 4.731443418146464e-07, |
| "loss": 0.8213, |
| "step": 1941 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 4.66435579716078e-07, |
| "loss": 0.7004, |
| "step": 1942 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 4.597741587707294e-07, |
| "loss": 0.77, |
| "step": 1943 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 4.531600951230841e-07, |
| "loss": 0.8242, |
| "step": 1944 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 4.4659340480285085e-07, |
| "loss": 0.7834, |
| "step": 1945 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 4.4007410372493453e-07, |
| "loss": 0.6946, |
| "step": 1946 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.336022076893742e-07, |
| "loss": 0.8164, |
| "step": 1947 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.27177732381332e-07, |
| "loss": 0.9032, |
| "step": 1948 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.208006933710329e-07, |
| "loss": 0.956, |
| "step": 1949 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.144711061137452e-07, |
| "loss": 0.7393, |
| "step": 1950 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.0818898594972455e-07, |
| "loss": 0.7495, |
| "step": 1951 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 4.019543481041921e-07, |
| "loss": 0.8178, |
| "step": 1952 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 3.9576720768729205e-07, |
| "loss": 0.7956, |
| "step": 1953 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 3.8962757969405184e-07, |
| "loss": 0.7989, |
| "step": 1954 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 3.8353547900435107e-07, |
| "loss": 0.8337, |
| "step": 1955 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 3.7749092038288137e-07, |
| "loss": 0.8836, |
| "step": 1956 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 3.714939184791133e-07, |
| "loss": 0.7453, |
| "step": 1957 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 3.655444878272607e-07, |
| "loss": 0.7661, |
| "step": 1958 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 3.5964264284624294e-07, |
| "loss": 0.7837, |
| "step": 1959 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 3.537883978396517e-07, |
| "loss": 0.7851, |
| "step": 1960 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 3.4798176699572193e-07, |
| "loss": 0.6859, |
| "step": 1961 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 3.4222276438728106e-07, |
| "loss": 0.7565, |
| "step": 1962 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 3.365114039717354e-07, |
| "loss": 0.8842, |
| "step": 1963 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 3.308476995910237e-07, |
| "loss": 0.761, |
| "step": 1964 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 3.252316649715814e-07, |
| "loss": 0.8768, |
| "step": 1965 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 3.19663313724321e-07, |
| "loss": 0.9163, |
| "step": 1966 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 3.141426593445829e-07, |
| "loss": 0.7217, |
| "step": 1967 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 3.0866971521211764e-07, |
| "loss": 0.7374, |
| "step": 1968 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 3.0324449459104175e-07, |
| "loss": 0.6896, |
| "step": 1969 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.978670106298087e-07, |
| "loss": 0.8954, |
| "step": 1970 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.925372763611822e-07, |
| "loss": 0.7606, |
| "step": 1971 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.872553047021964e-07, |
| "loss": 0.7081, |
| "step": 1972 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.8202110845413357e-07, |
| "loss": 0.78, |
| "step": 1973 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.7683470030248407e-07, |
| "loss": 0.7278, |
| "step": 1974 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.7169609281692653e-07, |
| "loss": 0.7887, |
| "step": 1975 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.6660529845128123e-07, |
| "loss": 0.9141, |
| "step": 1976 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.615623295434988e-07, |
| "loss": 0.7856, |
| "step": 1977 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.5656719831561594e-07, |
| "loss": 0.7657, |
| "step": 1978 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.5161991687372876e-07, |
| "loss": 0.7214, |
| "step": 1979 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.467204972079751e-07, |
| "loss": 0.8158, |
| "step": 1980 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.418689511924854e-07, |
| "loss": 0.8239, |
| "step": 1981 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.3706529058536986e-07, |
| "loss": 0.7365, |
| "step": 1982 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.3230952702868458e-07, |
| "loss": 0.8375, |
| "step": 1983 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.276016720484009e-07, |
| "loss": 0.7789, |
| "step": 1984 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.2294173705438072e-07, |
| "loss": 0.8496, |
| "step": 1985 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.1832973334035225e-07, |
| "loss": 0.8173, |
| "step": 1986 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.1376567208386988e-07, |
| "loss": 0.9581, |
| "step": 1987 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.0924956434630328e-07, |
| "loss": 0.7356, |
| "step": 1988 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 2.0478142107279942e-07, |
| "loss": 0.8488, |
| "step": 1989 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 2.0036125309226274e-07, |
| "loss": 0.8433, |
| "step": 1990 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.9598907111731958e-07, |
| "loss": 0.8994, |
| "step": 1991 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.9166488574430487e-07, |
| "loss": 0.8825, |
| "step": 1992 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.8738870745322656e-07, |
| "loss": 0.7001, |
| "step": 1993 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.831605466077413e-07, |
| "loss": 0.8457, |
| "step": 1994 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.7898041345513872e-07, |
| "loss": 0.7381, |
| "step": 1995 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.7484831812630388e-07, |
| "loss": 0.8493, |
| "step": 1996 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.7076427063569712e-07, |
| "loss": 0.771, |
| "step": 1997 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.6672828088133862e-07, |
| "loss": 0.816, |
| "step": 1998 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.6274035864476846e-07, |
| "loss": 0.6762, |
| "step": 1999 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.5880051359103532e-07, |
| "loss": 0.8481, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.5490875526866788e-07, |
| "loss": 0.7148, |
| "step": 2001 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.5106509310965466e-07, |
| "loss": 0.5975, |
| "step": 2002 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.4726953642941522e-07, |
| "loss": 0.7898, |
| "step": 2003 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.4352209442678677e-07, |
| "loss": 0.905, |
| "step": 2004 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.398227761839954e-07, |
| "loss": 0.6776, |
| "step": 2005 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.3617159066662944e-07, |
| "loss": 0.8488, |
| "step": 2006 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.3256854672363263e-07, |
| "loss": 0.6709, |
| "step": 2007 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.2901365308726878e-07, |
| "loss": 0.7547, |
| "step": 2008 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.2550691837310614e-07, |
| "loss": 0.8751, |
| "step": 2009 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.2204835107999746e-07, |
| "loss": 0.8257, |
| "step": 2010 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.186379595900533e-07, |
| "loss": 0.7858, |
| "step": 2011 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1527575216863318e-07, |
| "loss": 0.7069, |
| "step": 2012 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.1196173696431445e-07, |
| "loss": 0.7155, |
| "step": 2013 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.086959220088768e-07, |
| "loss": 0.8951, |
| "step": 2014 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.0547831521728447e-07, |
| "loss": 0.8752, |
| "step": 2015 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.0230892438766404e-07, |
| "loss": 0.7507, |
| "step": 2016 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 9.918775720128893e-08, |
| "loss": 0.7597, |
| "step": 2017 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 9.611482122255489e-08, |
| "loss": 0.7164, |
| "step": 2018 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 9.309012389896677e-08, |
| "loss": 0.7541, |
| "step": 2019 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 9.011367256112292e-08, |
| "loss": 0.7041, |
| "step": 2020 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 8.718547442268855e-08, |
| "loss": 0.9207, |
| "step": 2021 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 8.430553658038687e-08, |
| "loss": 0.7468, |
| "step": 2022 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 8.147386601397689e-08, |
| "loss": 0.7138, |
| "step": 2023 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 7.869046958623782e-08, |
| "loss": 0.7367, |
| "step": 2024 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 7.595535404295584e-08, |
| "loss": 0.6845, |
| "step": 2025 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 7.326852601289735e-08, |
| "loss": 0.9195, |
| "step": 2026 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 7.062999200780906e-08, |
| "loss": 0.7876, |
| "step": 2027 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 6.803975842238686e-08, |
| "loss": 0.7008, |
| "step": 2028 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 6.549783153426692e-08, |
| "loss": 0.7194, |
| "step": 2029 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 6.300421750401464e-08, |
| "loss": 0.8785, |
| "step": 2030 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 6.055892237510241e-08, |
| "loss": 0.9092, |
| "step": 2031 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 5.81619520738963e-08, |
| "loss": 0.7507, |
| "step": 2032 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 5.581331240964938e-08, |
| "loss": 0.7115, |
| "step": 2033 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 5.3513009074472874e-08, |
| "loss": 0.7851, |
| "step": 2034 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 5.126104764333839e-08, |
| "loss": 0.9028, |
| "step": 2035 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 4.905743357405124e-08, |
| "loss": 0.8539, |
| "step": 2036 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 4.690217220725046e-08, |
| "loss": 0.7222, |
| "step": 2037 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 4.4795268766379963e-08, |
| "loss": 0.8067, |
| "step": 2038 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 4.2736728357692935e-08, |
| "loss": 0.7003, |
| "step": 2039 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 4.0726555970223015e-08, |
| "loss": 0.78, |
| "step": 2040 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 3.876475647578204e-08, |
| "loss": 0.7718, |
| "step": 2041 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 3.685133462895341e-08, |
| "loss": 0.7875, |
| "step": 2042 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 3.498629506706097e-08, |
| "loss": 0.7403, |
| "step": 2043 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 3.3169642310182384e-08, |
| "loss": 0.8347, |
| "step": 2044 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 3.14013807611202e-08, |
| "loss": 0.962, |
| "step": 2045 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 2.9681514705393045e-08, |
| "loss": 0.8896, |
| "step": 2046 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 2.801004831123999e-08, |
| "loss": 0.8373, |
| "step": 2047 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 2.6386985629587303e-08, |
| "loss": 0.702, |
| "step": 2048 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 2.481233059405952e-08, |
| "loss": 0.7443, |
| "step": 2049 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 2.328608702095947e-08, |
| "loss": 0.7173, |
| "step": 2050 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 2.1808258609261613e-08, |
| "loss": 0.7821, |
| "step": 2051 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 2.0378848940598716e-08, |
| "loss": 0.7199, |
| "step": 2052 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.8997861479259637e-08, |
| "loss": 0.7673, |
| "step": 2053 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.7665299572176e-08, |
| "loss": 0.6804, |
| "step": 2054 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.638116644892218e-08, |
| "loss": 0.8385, |
| "step": 2055 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.5145465221695353e-08, |
| "loss": 0.7585, |
| "step": 2056 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.3958198885315467e-08, |
| "loss": 0.913, |
| "step": 2057 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.2819370317218583e-08, |
| "loss": 0.8979, |
| "step": 2058 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.1728982277443569e-08, |
| "loss": 0.7767, |
| "step": 2059 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.0687037408636525e-08, |
| "loss": 0.8949, |
| "step": 2060 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 9.693538236033029e-09, |
| "loss": 0.7811, |
| "step": 2061 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 8.74848716745813e-09, |
| "loss": 0.7495, |
| "step": 2062 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 7.851886493317473e-09, |
| "loss": 0.6359, |
| "step": 2063 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 7.003738386595071e-09, |
| "loss": 0.9381, |
| "step": 2064 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 6.204044902844431e-09, |
| "loss": 0.8975, |
| "step": 2065 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 5.452807980186325e-09, |
| "loss": 0.8496, |
| "step": 2066 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 4.75002943930658e-09, |
| "loss": 0.7888, |
| "step": 2067 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 4.095710983442746e-09, |
| "loss": 0.7674, |
| "step": 2068 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 3.489854198386322e-09, |
| "loss": 0.847, |
| "step": 2069 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 2.932460552482752e-09, |
| "loss": 0.8345, |
| "step": 2070 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 2.4235313966203268e-09, |
| "loss": 0.9327, |
| "step": 2071 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.9630679642257398e-09, |
| "loss": 0.8124, |
| "step": 2072 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.5510713712707515e-09, |
| "loss": 0.8164, |
| "step": 2073 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.1875426162633041e-09, |
| "loss": 0.8303, |
| "step": 2074 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 8.724825802430837e-10, |
| "loss": 0.7074, |
| "step": 2075 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 6.058920267837387e-10, |
| "loss": 0.8208, |
| "step": 2076 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 3.8777160198844033e-10, |
| "loss": 0.751, |
| "step": 2077 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.1812183448988167e-10, |
| "loss": 0.7847, |
| "step": 2078 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 9.694313544583722e-11, |
| "loss": 0.7622, |
| "step": 2079 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.423579854582414e-11, |
| "loss": 0.7019, |
| "step": 2080 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 0.0, |
| "loss": 0.7925, |
| "step": 2081 |
| }, |
| { |
| "epoch": 1.0, |
| "step": 2081, |
| "total_flos": 6.1537201706232e+21, |
| "train_loss": 0.8170489284179923, |
| "train_runtime": 14766.6549, |
| "train_samples_per_second": 2.256, |
| "train_steps_per_second": 0.141 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 2081, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 200, |
| "total_flos": 6.1537201706232e+21, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|