| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 29.41176470588235, | |
| "global_step": 1000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 5e-05, | |
| "loss": 0.9671, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.0001, | |
| "loss": 0.9742, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 9.999975227016531e-05, | |
| "loss": 0.9788, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 9.999900908311602e-05, | |
| "loss": 0.8884, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 9.999777044621652e-05, | |
| "loss": 0.9563, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 9.999603637174071e-05, | |
| "loss": 0.9023, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 9.999380687687188e-05, | |
| "loss": 0.9346, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 9.999108198370249e-05, | |
| "loss": 0.98, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 9.998786171923407e-05, | |
| "loss": 0.8556, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 9.998414611537681e-05, | |
| "loss": 0.8322, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 9.997993520894937e-05, | |
| "loss": 0.7677, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 9.997522904167844e-05, | |
| "loss": 0.7311, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 9.997002766019832e-05, | |
| "loss": 0.706, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 9.996433111605052e-05, | |
| "loss": 0.6655, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 9.99581394656832e-05, | |
| "loss": 0.7465, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 9.995145277045061e-05, | |
| "loss": 0.7319, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 9.994427109661253e-05, | |
| "loss": 0.587, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.993659451533353e-05, | |
| "loss": 0.6471, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 9.992842310268233e-05, | |
| "loss": 0.6296, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 9.991975693963107e-05, | |
| "loss": 0.5796, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 9.99105961120544e-05, | |
| "loss": 0.6657, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 9.990094071072877e-05, | |
| "loss": 0.576, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 9.989079083133139e-05, | |
| "loss": 0.626, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 9.988014657443941e-05, | |
| "loss": 0.5832, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 9.986900804552878e-05, | |
| "loss": 0.6958, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 9.985737535497337e-05, | |
| "loss": 0.6523, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 9.984524861804376e-05, | |
| "loss": 0.6173, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 9.983262795490613e-05, | |
| "loss": 0.6003, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 9.981951349062106e-05, | |
| "loss": 0.6464, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 9.980590535514233e-05, | |
| "loss": 0.7132, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 9.979180368331558e-05, | |
| "loss": 0.6187, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 9.9777208614877e-05, | |
| "loss": 0.625, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 9.976212029445194e-05, | |
| "loss": 0.6943, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 9.97465388715535e-05, | |
| "loss": 0.5769, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 9.9730464500581e-05, | |
| "loss": 0.554, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 9.971389734081848e-05, | |
| "loss": 0.5686, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 9.969683755643317e-05, | |
| "loss": 0.6092, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 9.967928531647374e-05, | |
| "loss": 0.5469, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 9.966124079486872e-05, | |
| "loss": 0.5331, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 9.96427041704248e-05, | |
| "loss": 0.552, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 9.962367562682496e-05, | |
| "loss": 0.6404, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 9.960415535262671e-05, | |
| "loss": 0.5036, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 9.958414354126022e-05, | |
| "loss": 0.4996, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 9.956364039102642e-05, | |
| "loss": 0.5628, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 9.954264610509497e-05, | |
| "loss": 0.5361, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 9.952116089150232e-05, | |
| "loss": 0.5238, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 9.94991849631496e-05, | |
| "loss": 0.5175, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 9.947671853780054e-05, | |
| "loss": 0.5335, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 9.94537618380793e-05, | |
| "loss": 0.5588, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 9.943031509146825e-05, | |
| "loss": 0.5778, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 9.940637853030572e-05, | |
| "loss": 0.6549, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 9.938195239178374e-05, | |
| "loss": 0.5418, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 9.935703691794565e-05, | |
| "loss": 0.5377, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 9.933163235568367e-05, | |
| "loss": 0.5407, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 9.930573895673657e-05, | |
| "loss": 0.5151, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 9.927935697768698e-05, | |
| "loss": 0.5212, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 9.925248667995907e-05, | |
| "loss": 0.4884, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 9.922512832981584e-05, | |
| "loss": 0.5755, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 9.919728219835643e-05, | |
| "loss": 0.4974, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 9.916894856151357e-05, | |
| "loss": 0.5347, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 9.914012770005072e-05, | |
| "loss": 0.5166, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 9.91108198995594e-05, | |
| "loss": 0.5797, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 9.908102545045625e-05, | |
| "loss": 0.5326, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 9.905074464798024e-05, | |
| "loss": 0.4958, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 9.901997779218967e-05, | |
| "loss": 0.5195, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 9.898872518795932e-05, | |
| "loss": 0.5413, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 9.895698714497724e-05, | |
| "loss": 0.4966, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 9.892476397774186e-05, | |
| "loss": 0.4877, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 9.889205600555877e-05, | |
| "loss": 0.4595, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 9.885886355253758e-05, | |
| "loss": 0.4971, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 9.882518694758875e-05, | |
| "loss": 0.4404, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 9.879102652442024e-05, | |
| "loss": 0.5061, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 9.875638262153431e-05, | |
| "loss": 0.4412, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 9.872125558222409e-05, | |
| "loss": 0.4656, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 9.868564575457023e-05, | |
| "loss": 0.4941, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 9.864955349143734e-05, | |
| "loss": 0.4919, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 9.861297915047069e-05, | |
| "loss": 0.4698, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 9.857592309409247e-05, | |
| "loss": 0.4034, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 9.853838568949831e-05, | |
| "loss": 0.4339, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 9.850036730865364e-05, | |
| "loss": 0.4331, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 9.846186832828989e-05, | |
| "loss": 0.3584, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 9.842288912990096e-05, | |
| "loss": 0.5122, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 9.838343009973925e-05, | |
| "loss": 0.4512, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 9.83434916288119e-05, | |
| "loss": 0.4047, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 9.830307411287695e-05, | |
| "loss": 0.4223, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 9.82621779524394e-05, | |
| "loss": 0.4206, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 9.822080355274719e-05, | |
| "loss": 0.4966, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 9.817895132378725e-05, | |
| "loss": 0.4635, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 9.813662168028144e-05, | |
| "loss": 0.399, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 9.809381504168234e-05, | |
| "loss": 0.4493, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 9.805053183216923e-05, | |
| "loss": 0.3969, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 9.800677248064382e-05, | |
| "loss": 0.4203, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 9.796253742072596e-05, | |
| "loss": 0.441, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 9.791782709074944e-05, | |
| "loss": 0.3533, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 9.787264193375753e-05, | |
| "loss": 0.4099, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 9.782698239749873e-05, | |
| "loss": 0.4118, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 9.778084893442218e-05, | |
| "loss": 0.4052, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 9.77342420016733e-05, | |
| "loss": 0.3896, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 9.768716206108921e-05, | |
| "loss": 0.3944, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 9.763960957919413e-05, | |
| "loss": 0.3747, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 9.759158502719481e-05, | |
| "loss": 0.4162, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 9.754308888097583e-05, | |
| "loss": 0.5347, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 9.749412162109485e-05, | |
| "loss": 0.2792, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 9.744468373277797e-05, | |
| "loss": 0.2966, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 9.739477570591473e-05, | |
| "loss": 0.3162, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 9.734439803505345e-05, | |
| "loss": 0.2621, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 9.729355121939621e-05, | |
| "loss": 0.2839, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 9.724223576279395e-05, | |
| "loss": 0.3226, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 9.719045217374143e-05, | |
| "loss": 0.3154, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 9.713820096537225e-05, | |
| "loss": 0.2635, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 9.708548265545375e-05, | |
| "loss": 0.2497, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 9.703229776638185e-05, | |
| "loss": 0.2801, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 9.697864682517592e-05, | |
| "loss": 0.3091, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 9.692453036347351e-05, | |
| "loss": 0.2305, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 9.686994891752508e-05, | |
| "loss": 0.3208, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 9.681490302818874e-05, | |
| "loss": 0.2521, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 9.675939324092486e-05, | |
| "loss": 0.2234, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 9.670342010579065e-05, | |
| "loss": 0.2973, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 9.664698417743475e-05, | |
| "loss": 0.278, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 9.659008601509168e-05, | |
| "loss": 0.2262, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 9.653272618257631e-05, | |
| "loss": 0.2038, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 9.647490524827834e-05, | |
| "loss": 0.244, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 9.641662378515659e-05, | |
| "loss": 0.2841, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 9.635788237073334e-05, | |
| "loss": 0.2516, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 9.629868158708861e-05, | |
| "loss": 0.319, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 9.623902202085444e-05, | |
| "loss": 0.2777, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 9.617890426320899e-05, | |
| "loss": 0.3114, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 9.611832890987076e-05, | |
| "loss": 0.2935, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 9.605729656109265e-05, | |
| "loss": 0.2749, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 9.599580782165598e-05, | |
| "loss": 0.294, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 9.593386330086458e-05, | |
| "loss": 0.2927, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 9.587146361253868e-05, | |
| "loss": 0.273, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 9.580860937500884e-05, | |
| "loss": 0.3173, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 9.57453012111099e-05, | |
| "loss": 0.28, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 9.568153974817464e-05, | |
| "loss": 0.2875, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 9.561732561802778e-05, | |
| "loss": 0.3158, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 9.555265945697953e-05, | |
| "loss": 0.1789, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 9.548754190581939e-05, | |
| "loss": 0.1532, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 9.542197360980978e-05, | |
| "loss": 0.1656, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 9.53559552186796e-05, | |
| "loss": 0.1701, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 9.528948738661784e-05, | |
| "loss": 0.1375, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 9.522257077226717e-05, | |
| "loss": 0.1552, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 9.51552060387172e-05, | |
| "loss": 0.1609, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 9.508739385349812e-05, | |
| "loss": 0.1284, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 9.501913488857399e-05, | |
| "loss": 0.14, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 9.49504298203361e-05, | |
| "loss": 0.1028, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 9.488127932959625e-05, | |
| "loss": 0.1138, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 9.481168410158003e-05, | |
| "loss": 0.1513, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 9.474164482592002e-05, | |
| "loss": 0.134, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 9.467116219664894e-05, | |
| "loss": 0.1403, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 9.460023691219277e-05, | |
| "loss": 0.1164, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 9.45288696753639e-05, | |
| "loss": 0.1175, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "learning_rate": 9.445706119335407e-05, | |
| "loss": 0.1, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 9.438481217772744e-05, | |
| "loss": 0.1515, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 9.431212334441343e-05, | |
| "loss": 0.1281, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 9.423899541369978e-05, | |
| "loss": 0.1375, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 9.41654291102253e-05, | |
| "loss": 0.1325, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 9.409142516297269e-05, | |
| "loss": 0.1227, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 9.401698430526142e-05, | |
| "loss": 0.1227, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 9.394210727474028e-05, | |
| "loss": 0.1325, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 9.386679481338033e-05, | |
| "loss": 0.16, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "learning_rate": 9.379104766746722e-05, | |
| "loss": 0.161, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 9.371486658759416e-05, | |
| "loss": 0.151, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 9.363825232865413e-05, | |
| "loss": 0.1166, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 9.356120564983266e-05, | |
| "loss": 0.1102, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 9.348372731460023e-05, | |
| "loss": 0.1489, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 9.340581809070459e-05, | |
| "loss": 0.1399, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "learning_rate": 9.332747875016332e-05, | |
| "loss": 0.153, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 9.324871006925613e-05, | |
| "loss": 0.1352, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 9.316951282851707e-05, | |
| "loss": 0.1631, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 5.03, | |
| "learning_rate": 9.308988781272694e-05, | |
| "loss": 0.0511, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 9.300983581090541e-05, | |
| "loss": 0.063, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 5.09, | |
| "learning_rate": 9.292935761630326e-05, | |
| "loss": 0.0565, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 5.12, | |
| "learning_rate": 9.284845402639446e-05, | |
| "loss": 0.049, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 9.276712584286833e-05, | |
| "loss": 0.0574, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 5.18, | |
| "learning_rate": 9.26853738716216e-05, | |
| "loss": 0.0635, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 9.260319892275034e-05, | |
| "loss": 0.0494, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 5.24, | |
| "learning_rate": 9.2520601810542e-05, | |
| "loss": 0.0516, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "learning_rate": 9.243758335346735e-05, | |
| "loss": 0.0463, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 5.29, | |
| "learning_rate": 9.235414437417234e-05, | |
| "loss": 0.061, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "learning_rate": 9.227028569946996e-05, | |
| "loss": 0.0434, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "learning_rate": 9.2186008160332e-05, | |
| "loss": 0.0656, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 9.210131259188095e-05, | |
| "loss": 0.0334, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 9.201619983338153e-05, | |
| "loss": 0.0726, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 5.44, | |
| "learning_rate": 9.193067072823251e-05, | |
| "loss": 0.0635, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 5.47, | |
| "learning_rate": 9.18447261239584e-05, | |
| "loss": 0.0652, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 5.5, | |
| "learning_rate": 9.175836687220084e-05, | |
| "loss": 0.0455, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 5.53, | |
| "learning_rate": 9.167159382871039e-05, | |
| "loss": 0.0501, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 5.56, | |
| "learning_rate": 9.15844078533379e-05, | |
| "loss": 0.0429, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 5.59, | |
| "learning_rate": 9.149680981002609e-05, | |
| "loss": 0.0551, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 5.62, | |
| "learning_rate": 9.140880056680088e-05, | |
| "loss": 0.0743, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 5.65, | |
| "learning_rate": 9.13203809957629e-05, | |
| "loss": 0.0619, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 5.68, | |
| "learning_rate": 9.123155197307876e-05, | |
| "loss": 0.0608, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "learning_rate": 9.114231437897244e-05, | |
| "loss": 0.0685, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 5.74, | |
| "learning_rate": 9.105266909771653e-05, | |
| "loss": 0.0477, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 5.76, | |
| "learning_rate": 9.096261701762342e-05, | |
| "loss": 0.0477, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 5.79, | |
| "learning_rate": 9.087215903103662e-05, | |
| "loss": 0.0761, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 5.82, | |
| "learning_rate": 9.078129603432181e-05, | |
| "loss": 0.0707, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "learning_rate": 9.069002892785797e-05, | |
| "loss": 0.0662, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 5.88, | |
| "learning_rate": 9.059835861602853e-05, | |
| "loss": 0.0689, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 5.91, | |
| "learning_rate": 9.050628600721234e-05, | |
| "loss": 0.0642, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 5.94, | |
| "learning_rate": 9.041381201377468e-05, | |
| "loss": 0.0787, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 5.97, | |
| "learning_rate": 9.032093755205822e-05, | |
| "loss": 0.0667, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "learning_rate": 9.0227663542374e-05, | |
| "loss": 0.0465, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 6.03, | |
| "learning_rate": 9.013399090899217e-05, | |
| "loss": 0.0323, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 6.06, | |
| "learning_rate": 9.003992058013302e-05, | |
| "loss": 0.0254, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 6.09, | |
| "learning_rate": 8.994545348795759e-05, | |
| "loss": 0.0292, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 6.12, | |
| "learning_rate": 8.985059056855858e-05, | |
| "loss": 0.0219, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 6.15, | |
| "learning_rate": 8.975533276195102e-05, | |
| "loss": 0.018, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 6.18, | |
| "learning_rate": 8.965968101206291e-05, | |
| "loss": 0.031, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 6.21, | |
| "learning_rate": 8.956363626672595e-05, | |
| "loss": 0.0239, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 6.24, | |
| "learning_rate": 8.94671994776661e-05, | |
| "loss": 0.0306, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 6.26, | |
| "learning_rate": 8.937037160049416e-05, | |
| "loss": 0.0231, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 6.29, | |
| "learning_rate": 8.927315359469626e-05, | |
| "loss": 0.0317, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 6.32, | |
| "learning_rate": 8.917554642362443e-05, | |
| "loss": 0.0347, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 6.35, | |
| "learning_rate": 8.907755105448704e-05, | |
| "loss": 0.0335, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 6.38, | |
| "learning_rate": 8.89791684583391e-05, | |
| "loss": 0.0404, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 6.41, | |
| "learning_rate": 8.888039961007282e-05, | |
| "loss": 0.0376, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 6.44, | |
| "learning_rate": 8.87812454884078e-05, | |
| "loss": 0.0281, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 6.47, | |
| "learning_rate": 8.868170707588142e-05, | |
| "loss": 0.0345, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 6.5, | |
| "learning_rate": 8.858178535883905e-05, | |
| "loss": 0.0327, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 6.53, | |
| "learning_rate": 8.848148132742431e-05, | |
| "loss": 0.0208, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 6.56, | |
| "learning_rate": 8.838079597556925e-05, | |
| "loss": 0.0338, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 6.59, | |
| "learning_rate": 8.827973030098448e-05, | |
| "loss": 0.0277, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 6.62, | |
| "learning_rate": 8.81782853051493e-05, | |
| "loss": 0.0315, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 6.65, | |
| "learning_rate": 8.807646199330187e-05, | |
| "loss": 0.025, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 6.68, | |
| "learning_rate": 8.797426137442897e-05, | |
| "loss": 0.0348, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 6.71, | |
| "learning_rate": 8.787168446125638e-05, | |
| "loss": 0.0334, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 6.74, | |
| "learning_rate": 8.776873227023852e-05, | |
| "loss": 0.0257, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 6.76, | |
| "learning_rate": 8.766540582154859e-05, | |
| "loss": 0.028, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 6.79, | |
| "learning_rate": 8.756170613906833e-05, | |
| "loss": 0.0468, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 6.82, | |
| "learning_rate": 8.745763425037797e-05, | |
| "loss": 0.0486, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 6.85, | |
| "learning_rate": 8.735319118674596e-05, | |
| "loss": 0.0291, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 6.88, | |
| "learning_rate": 8.724837798311882e-05, | |
| "loss": 0.0248, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 6.91, | |
| "learning_rate": 8.714319567811088e-05, | |
| "loss": 0.0209, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 6.94, | |
| "learning_rate": 8.703764531399392e-05, | |
| "loss": 0.0232, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 6.97, | |
| "learning_rate": 8.69317279366869e-05, | |
| "loss": 0.0351, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "learning_rate": 8.682544459574562e-05, | |
| "loss": 0.0153, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 7.03, | |
| "learning_rate": 8.671879634435224e-05, | |
| "loss": 0.0134, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 7.06, | |
| "learning_rate": 8.661178423930491e-05, | |
| "loss": 0.0136, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 7.09, | |
| "learning_rate": 8.650440934100728e-05, | |
| "loss": 0.0129, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 7.12, | |
| "learning_rate": 8.639667271345798e-05, | |
| "loss": 0.0123, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 7.15, | |
| "learning_rate": 8.628857542424009e-05, | |
| "loss": 0.0143, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 7.18, | |
| "learning_rate": 8.618011854451056e-05, | |
| "loss": 0.0093, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 7.21, | |
| "learning_rate": 8.607130314898956e-05, | |
| "loss": 0.0151, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 7.24, | |
| "learning_rate": 8.596213031594991e-05, | |
| "loss": 0.0099, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 7.26, | |
| "learning_rate": 8.585260112720631e-05, | |
| "loss": 0.0212, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 7.29, | |
| "learning_rate": 8.57427166681047e-05, | |
| "loss": 0.0218, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 7.32, | |
| "learning_rate": 8.56324780275114e-05, | |
| "loss": 0.017, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 7.35, | |
| "learning_rate": 8.552188629780244e-05, | |
| "loss": 0.014, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 7.38, | |
| "learning_rate": 8.541094257485265e-05, | |
| "loss": 0.0153, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 7.41, | |
| "learning_rate": 8.529964795802485e-05, | |
| "loss": 0.0164, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 7.44, | |
| "learning_rate": 8.518800355015892e-05, | |
| "loss": 0.0103, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 7.47, | |
| "learning_rate": 8.507601045756085e-05, | |
| "loss": 0.0285, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "learning_rate": 8.49636697899919e-05, | |
| "loss": 0.0155, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 7.53, | |
| "learning_rate": 8.485098266065744e-05, | |
| "loss": 0.0146, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 7.56, | |
| "learning_rate": 8.473795018619604e-05, | |
| "loss": 0.0138, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 7.59, | |
| "learning_rate": 8.462457348666835e-05, | |
| "loss": 0.0132, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 7.62, | |
| "learning_rate": 8.4510853685546e-05, | |
| "loss": 0.0285, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 7.65, | |
| "learning_rate": 8.439679190970052e-05, | |
| "loss": 0.0134, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 7.68, | |
| "learning_rate": 8.428238928939207e-05, | |
| "loss": 0.0128, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 7.71, | |
| "learning_rate": 8.416764695825835e-05, | |
| "loss": 0.0142, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 7.74, | |
| "learning_rate": 8.405256605330331e-05, | |
| "loss": 0.0251, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 7.76, | |
| "learning_rate": 8.39371477148859e-05, | |
| "loss": 0.0213, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 7.79, | |
| "learning_rate": 8.382139308670875e-05, | |
| "loss": 0.0133, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 7.82, | |
| "learning_rate": 8.370530331580686e-05, | |
| "loss": 0.0118, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 7.85, | |
| "learning_rate": 8.35888795525362e-05, | |
| "loss": 0.0178, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 7.88, | |
| "learning_rate": 8.347212295056239e-05, | |
| "loss": 0.0151, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 7.91, | |
| "learning_rate": 8.335503466684915e-05, | |
| "loss": 0.0152, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 7.94, | |
| "learning_rate": 8.323761586164695e-05, | |
| "loss": 0.0112, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 7.97, | |
| "learning_rate": 8.311986769848141e-05, | |
| "loss": 0.0243, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "learning_rate": 8.300179134414188e-05, | |
| "loss": 0.007, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 8.03, | |
| "learning_rate": 8.288338796866976e-05, | |
| "loss": 0.0088, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 8.06, | |
| "learning_rate": 8.276465874534702e-05, | |
| "loss": 0.0132, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 8.09, | |
| "learning_rate": 8.264560485068446e-05, | |
| "loss": 0.0099, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 8.12, | |
| "learning_rate": 8.252622746441021e-05, | |
| "loss": 0.0072, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 8.15, | |
| "learning_rate": 8.240652776945781e-05, | |
| "loss": 0.0085, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 8.18, | |
| "learning_rate": 8.228650695195472e-05, | |
| "loss": 0.0053, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 8.21, | |
| "learning_rate": 8.216616620121043e-05, | |
| "loss": 0.0096, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 8.24, | |
| "learning_rate": 8.204550670970469e-05, | |
| "loss": 0.0051, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 8.26, | |
| "learning_rate": 8.192452967307576e-05, | |
| "loss": 0.0097, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 8.29, | |
| "learning_rate": 8.180323629010848e-05, | |
| "loss": 0.0052, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 8.32, | |
| "learning_rate": 8.168162776272244e-05, | |
| "loss": 0.0102, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 8.35, | |
| "learning_rate": 8.155970529596006e-05, | |
| "loss": 0.0089, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 8.38, | |
| "learning_rate": 8.143747009797464e-05, | |
| "loss": 0.0143, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 8.41, | |
| "learning_rate": 8.131492338001839e-05, | |
| "loss": 0.006, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 8.44, | |
| "learning_rate": 8.119206635643045e-05, | |
| "loss": 0.007, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 8.47, | |
| "learning_rate": 8.106890024462481e-05, | |
| "loss": 0.0074, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 8.5, | |
| "learning_rate": 8.094542626507828e-05, | |
| "loss": 0.0101, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 8.53, | |
| "learning_rate": 8.082164564131845e-05, | |
| "loss": 0.0108, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 8.56, | |
| "learning_rate": 8.069755959991142e-05, | |
| "loss": 0.0046, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 8.59, | |
| "learning_rate": 8.057316937044977e-05, | |
| "loss": 0.0095, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 8.62, | |
| "learning_rate": 8.044847618554034e-05, | |
| "loss": 0.0056, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 8.65, | |
| "learning_rate": 8.032348128079203e-05, | |
| "loss": 0.0232, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 8.68, | |
| "learning_rate": 8.019818589480352e-05, | |
| "loss": 0.0061, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 8.71, | |
| "learning_rate": 8.0072591269151e-05, | |
| "loss": 0.0087, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "learning_rate": 7.994669864837594e-05, | |
| "loss": 0.0076, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 8.76, | |
| "learning_rate": 7.982050927997264e-05, | |
| "loss": 0.0097, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 8.79, | |
| "learning_rate": 7.969402441437594e-05, | |
| "loss": 0.0064, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 8.82, | |
| "learning_rate": 7.956724530494887e-05, | |
| "loss": 0.0088, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 8.85, | |
| "learning_rate": 7.944017320797013e-05, | |
| "loss": 0.0125, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 8.88, | |
| "learning_rate": 7.931280938262169e-05, | |
| "loss": 0.0046, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 8.91, | |
| "learning_rate": 7.918515509097634e-05, | |
| "loss": 0.0049, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 8.94, | |
| "learning_rate": 7.905721159798513e-05, | |
| "loss": 0.0102, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 8.97, | |
| "learning_rate": 7.89289801714649e-05, | |
| "loss": 0.0109, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "learning_rate": 7.880046208208563e-05, | |
| "loss": 0.0049, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 9.03, | |
| "learning_rate": 7.867165860335792e-05, | |
| "loss": 0.0042, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 9.06, | |
| "learning_rate": 7.854257101162037e-05, | |
| "loss": 0.0036, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 9.09, | |
| "learning_rate": 7.841320058602688e-05, | |
| "loss": 0.0077, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 9.12, | |
| "learning_rate": 7.828354860853399e-05, | |
| "loss": 0.0041, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 9.15, | |
| "learning_rate": 7.815361636388827e-05, | |
| "loss": 0.0046, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 9.18, | |
| "learning_rate": 7.802340513961342e-05, | |
| "loss": 0.0031, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 9.21, | |
| "learning_rate": 7.789291622599767e-05, | |
| "loss": 0.0058, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 9.24, | |
| "learning_rate": 7.776215091608085e-05, | |
| "loss": 0.0058, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 9.26, | |
| "learning_rate": 7.763111050564178e-05, | |
| "loss": 0.0032, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 9.29, | |
| "learning_rate": 7.749979629318516e-05, | |
| "loss": 0.0071, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 9.32, | |
| "learning_rate": 7.736820957992895e-05, | |
| "loss": 0.0053, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 9.35, | |
| "learning_rate": 7.723635166979133e-05, | |
| "loss": 0.0054, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "learning_rate": 7.710422386937784e-05, | |
| "loss": 0.0083, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 9.41, | |
| "learning_rate": 7.697182748796841e-05, | |
| "loss": 0.0119, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 9.44, | |
| "learning_rate": 7.683916383750436e-05, | |
| "loss": 0.0051, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 9.47, | |
| "learning_rate": 7.670623423257548e-05, | |
| "loss": 0.0037, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 9.5, | |
| "learning_rate": 7.657303999040693e-05, | |
| "loss": 0.0066, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 9.53, | |
| "learning_rate": 7.64395824308462e-05, | |
| "loss": 0.0051, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 9.56, | |
| "learning_rate": 7.630586287635008e-05, | |
| "loss": 0.0041, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 9.59, | |
| "learning_rate": 7.617188265197148e-05, | |
| "loss": 0.0047, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 9.62, | |
| "learning_rate": 7.603764308534636e-05, | |
| "loss": 0.0023, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 9.65, | |
| "learning_rate": 7.590314550668054e-05, | |
| "loss": 0.0046, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 9.68, | |
| "learning_rate": 7.576839124873653e-05, | |
| "loss": 0.0026, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 9.71, | |
| "learning_rate": 7.563338164682036e-05, | |
| "loss": 0.0071, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 9.74, | |
| "learning_rate": 7.549811803876825e-05, | |
| "loss": 0.009, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 9.76, | |
| "learning_rate": 7.536260176493348e-05, | |
| "loss": 0.0062, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 9.79, | |
| "learning_rate": 7.5226834168173e-05, | |
| "loss": 0.0026, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 9.82, | |
| "learning_rate": 7.509081659383417e-05, | |
| "loss": 0.0079, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 9.85, | |
| "learning_rate": 7.495455038974146e-05, | |
| "loss": 0.0028, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 9.88, | |
| "learning_rate": 7.481803690618304e-05, | |
| "loss": 0.01, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 9.91, | |
| "learning_rate": 7.46812774958974e-05, | |
| "loss": 0.0045, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 9.94, | |
| "learning_rate": 7.454427351405999e-05, | |
| "loss": 0.0107, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 9.97, | |
| "learning_rate": 7.440702631826977e-05, | |
| "loss": 0.0053, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "learning_rate": 7.426953726853574e-05, | |
| "loss": 0.0132, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 10.03, | |
| "learning_rate": 7.413180772726348e-05, | |
| "loss": 0.0031, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 10.06, | |
| "learning_rate": 7.399383905924165e-05, | |
| "loss": 0.0031, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 10.09, | |
| "learning_rate": 7.385563263162847e-05, | |
| "loss": 0.002, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 10.12, | |
| "learning_rate": 7.371718981393815e-05, | |
| "loss": 0.0023, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 10.15, | |
| "learning_rate": 7.357851197802735e-05, | |
| "loss": 0.0028, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 10.18, | |
| "learning_rate": 7.343960049808156e-05, | |
| "loss": 0.0033, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 10.21, | |
| "learning_rate": 7.330045675060149e-05, | |
| "loss": 0.0054, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 10.24, | |
| "learning_rate": 7.316108211438945e-05, | |
| "loss": 0.002, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 10.26, | |
| "learning_rate": 7.302147797053569e-05, | |
| "loss": 0.0096, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 10.29, | |
| "learning_rate": 7.288164570240463e-05, | |
| "loss": 0.0033, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 10.32, | |
| "learning_rate": 7.274158669562126e-05, | |
| "loss": 0.0027, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 10.35, | |
| "learning_rate": 7.26013023380574e-05, | |
| "loss": 0.0022, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 10.38, | |
| "learning_rate": 7.246079401981784e-05, | |
| "loss": 0.0031, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 10.41, | |
| "learning_rate": 7.232006313322667e-05, | |
| "loss": 0.0022, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 10.44, | |
| "learning_rate": 7.217911107281352e-05, | |
| "loss": 0.0036, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 10.47, | |
| "learning_rate": 7.203793923529956e-05, | |
| "loss": 0.003, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 10.5, | |
| "learning_rate": 7.189654901958385e-05, | |
| "loss": 0.0031, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 10.53, | |
| "learning_rate": 7.175494182672939e-05, | |
| "loss": 0.0018, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 10.56, | |
| "learning_rate": 7.161311905994922e-05, | |
| "loss": 0.0021, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 10.59, | |
| "learning_rate": 7.147108212459257e-05, | |
| "loss": 0.0132, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 10.62, | |
| "learning_rate": 7.13288324281309e-05, | |
| "loss": 0.0017, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 10.65, | |
| "learning_rate": 7.118637138014396e-05, | |
| "loss": 0.0028, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 10.68, | |
| "learning_rate": 7.104370039230583e-05, | |
| "loss": 0.0019, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 10.71, | |
| "learning_rate": 7.090082087837092e-05, | |
| "loss": 0.0015, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 10.74, | |
| "learning_rate": 7.075773425415994e-05, | |
| "loss": 0.0075, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 10.76, | |
| "learning_rate": 7.061444193754596e-05, | |
| "loss": 0.0046, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 10.79, | |
| "learning_rate": 7.047094534844023e-05, | |
| "loss": 0.0022, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 10.82, | |
| "learning_rate": 7.032724590877821e-05, | |
| "loss": 0.0018, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 10.85, | |
| "learning_rate": 7.018334504250545e-05, | |
| "loss": 0.0042, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 10.88, | |
| "learning_rate": 7.003924417556343e-05, | |
| "loss": 0.0069, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 10.91, | |
| "learning_rate": 6.989494473587554e-05, | |
| "loss": 0.002, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 10.94, | |
| "learning_rate": 6.975044815333282e-05, | |
| "loss": 0.0061, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 10.97, | |
| "learning_rate": 6.960575585977984e-05, | |
| "loss": 0.0059, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 11.0, | |
| "learning_rate": 6.946086928900054e-05, | |
| "loss": 0.0021, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 11.03, | |
| "learning_rate": 6.931578987670396e-05, | |
| "loss": 0.0021, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 11.06, | |
| "learning_rate": 6.917051906051006e-05, | |
| "loss": 0.0028, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 11.09, | |
| "learning_rate": 6.902505827993541e-05, | |
| "loss": 0.0016, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 11.12, | |
| "learning_rate": 6.887940897637908e-05, | |
| "loss": 0.0015, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 11.15, | |
| "learning_rate": 6.873357259310815e-05, | |
| "loss": 0.0016, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 11.18, | |
| "learning_rate": 6.858755057524354e-05, | |
| "loss": 0.0115, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 11.21, | |
| "learning_rate": 6.844134436974567e-05, | |
| "loss": 0.0103, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 11.24, | |
| "learning_rate": 6.829495542540013e-05, | |
| "loss": 0.0045, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 11.26, | |
| "learning_rate": 6.814838519280324e-05, | |
| "loss": 0.0037, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 11.29, | |
| "learning_rate": 6.80016351243478e-05, | |
| "loss": 0.0042, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 11.32, | |
| "learning_rate": 6.785470667420862e-05, | |
| "loss": 0.0014, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 11.35, | |
| "learning_rate": 6.77076012983281e-05, | |
| "loss": 0.0021, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 11.38, | |
| "learning_rate": 6.75603204544019e-05, | |
| "loss": 0.0013, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 11.41, | |
| "learning_rate": 6.741286560186437e-05, | |
| "loss": 0.0016, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 11.44, | |
| "learning_rate": 6.726523820187413e-05, | |
| "loss": 0.0018, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 11.47, | |
| "learning_rate": 6.711743971729967e-05, | |
| "loss": 0.0017, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 11.5, | |
| "learning_rate": 6.696947161270476e-05, | |
| "loss": 0.0017, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 11.53, | |
| "learning_rate": 6.682133535433393e-05, | |
| "loss": 0.0034, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 11.56, | |
| "learning_rate": 6.667303241009803e-05, | |
| "loss": 0.0048, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 11.59, | |
| "learning_rate": 6.652456424955963e-05, | |
| "loss": 0.0036, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 11.62, | |
| "learning_rate": 6.637593234391843e-05, | |
| "loss": 0.0038, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 11.65, | |
| "learning_rate": 6.622713816599673e-05, | |
| "loss": 0.0011, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 11.68, | |
| "learning_rate": 6.60781831902248e-05, | |
| "loss": 0.0014, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 11.71, | |
| "learning_rate": 6.592906889262632e-05, | |
| "loss": 0.0047, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 11.74, | |
| "learning_rate": 6.577979675080369e-05, | |
| "loss": 0.0037, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 11.76, | |
| "learning_rate": 6.563036824392344e-05, | |
| "loss": 0.0023, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 11.79, | |
| "learning_rate": 6.548078485270152e-05, | |
| "loss": 0.0025, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 11.82, | |
| "learning_rate": 6.533104805938873e-05, | |
| "loss": 0.003, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 11.85, | |
| "learning_rate": 6.518115934775585e-05, | |
| "loss": 0.0022, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 11.88, | |
| "learning_rate": 6.503112020307916e-05, | |
| "loss": 0.0021, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 11.91, | |
| "learning_rate": 6.488093211212555e-05, | |
| "loss": 0.003, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 11.94, | |
| "learning_rate": 6.473059656313782e-05, | |
| "loss": 0.0013, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 11.97, | |
| "learning_rate": 6.458011504582005e-05, | |
| "loss": 0.0033, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 12.0, | |
| "learning_rate": 6.442948905132266e-05, | |
| "loss": 0.0012, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 12.03, | |
| "learning_rate": 6.427872007222777e-05, | |
| "loss": 0.0023, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 12.06, | |
| "learning_rate": 6.412780960253436e-05, | |
| "loss": 0.001, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 12.09, | |
| "learning_rate": 6.397675913764347e-05, | |
| "loss": 0.0035, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 12.12, | |
| "learning_rate": 6.382557017434332e-05, | |
| "loss": 0.0028, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 12.15, | |
| "learning_rate": 6.367424421079463e-05, | |
| "loss": 0.0009, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 12.18, | |
| "learning_rate": 6.352278274651561e-05, | |
| "loss": 0.0049, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 12.21, | |
| "learning_rate": 6.337118728236721e-05, | |
| "loss": 0.0013, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 12.24, | |
| "learning_rate": 6.321945932053822e-05, | |
| "loss": 0.0025, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 12.26, | |
| "learning_rate": 6.306760036453035e-05, | |
| "loss": 0.0013, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 12.29, | |
| "learning_rate": 6.291561191914333e-05, | |
| "loss": 0.0022, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 12.32, | |
| "learning_rate": 6.276349549046007e-05, | |
| "loss": 0.002, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 12.35, | |
| "learning_rate": 6.261125258583171e-05, | |
| "loss": 0.0012, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 12.38, | |
| "learning_rate": 6.245888471386263e-05, | |
| "loss": 0.0015, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 12.41, | |
| "learning_rate": 6.230639338439549e-05, | |
| "loss": 0.0011, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 12.44, | |
| "learning_rate": 6.215378010849641e-05, | |
| "loss": 0.0027, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 12.47, | |
| "learning_rate": 6.200104639843985e-05, | |
| "loss": 0.0011, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "learning_rate": 6.184819376769364e-05, | |
| "loss": 0.0025, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 12.53, | |
| "learning_rate": 6.169522373090412e-05, | |
| "loss": 0.001, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 12.56, | |
| "learning_rate": 6.154213780388092e-05, | |
| "loss": 0.001, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 12.59, | |
| "learning_rate": 6.138893750358212e-05, | |
| "loss": 0.0011, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 12.62, | |
| "learning_rate": 6.123562434809912e-05, | |
| "loss": 0.0015, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 12.65, | |
| "learning_rate": 6.108219985664161e-05, | |
| "loss": 0.0024, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 12.68, | |
| "learning_rate": 6.0928665549522554e-05, | |
| "loss": 0.0018, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 12.71, | |
| "learning_rate": 6.0775022948143115e-05, | |
| "loss": 0.0011, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 12.74, | |
| "learning_rate": 6.06212735749775e-05, | |
| "loss": 0.002, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 12.76, | |
| "learning_rate": 6.046741895355802e-05, | |
| "loss": 0.0063, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 12.79, | |
| "learning_rate": 6.031346060845986e-05, | |
| "loss": 0.0019, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 12.82, | |
| "learning_rate": 6.015940006528602e-05, | |
| "loss": 0.002, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 12.85, | |
| "learning_rate": 6.0005238850652234e-05, | |
| "loss": 0.004, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 12.88, | |
| "learning_rate": 5.9850978492171794e-05, | |
| "loss": 0.0013, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 12.91, | |
| "learning_rate": 5.96966205184404e-05, | |
| "loss": 0.0096, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 12.94, | |
| "learning_rate": 5.954216645902109e-05, | |
| "loss": 0.0011, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 12.97, | |
| "learning_rate": 5.9387617844429e-05, | |
| "loss": 0.001, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 13.0, | |
| "learning_rate": 5.923297620611623e-05, | |
| "loss": 0.001, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 13.03, | |
| "learning_rate": 5.907824307645669e-05, | |
| "loss": 0.0021, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 13.06, | |
| "learning_rate": 5.892341998873089e-05, | |
| "loss": 0.0035, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 13.09, | |
| "learning_rate": 5.876850847711073e-05, | |
| "loss": 0.0009, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 13.12, | |
| "learning_rate": 5.861351007664434e-05, | |
| "loss": 0.0012, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 13.15, | |
| "learning_rate": 5.845842632324088e-05, | |
| "loss": 0.0038, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 13.18, | |
| "learning_rate": 5.83032587536552e-05, | |
| "loss": 0.0029, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 13.21, | |
| "learning_rate": 5.814800890547278e-05, | |
| "loss": 0.0015, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 13.24, | |
| "learning_rate": 5.799267831709442e-05, | |
| "loss": 0.0037, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 13.26, | |
| "learning_rate": 5.78372685277209e-05, | |
| "loss": 0.0009, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 13.29, | |
| "learning_rate": 5.7681781077337905e-05, | |
| "loss": 0.0035, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 13.32, | |
| "learning_rate": 5.752621750670068e-05, | |
| "loss": 0.0023, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 13.35, | |
| "learning_rate": 5.737057935731868e-05, | |
| "loss": 0.0011, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 13.38, | |
| "learning_rate": 5.721486817144044e-05, | |
| "loss": 0.003, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 13.41, | |
| "learning_rate": 5.705908549203823e-05, | |
| "loss": 0.0009, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 13.44, | |
| "learning_rate": 5.690323286279274e-05, | |
| "loss": 0.0028, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 13.47, | |
| "learning_rate": 5.674731182807781e-05, | |
| "loss": 0.0021, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 13.5, | |
| "learning_rate": 5.659132393294514e-05, | |
| "loss": 0.0028, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 13.53, | |
| "learning_rate": 5.643527072310891e-05, | |
| "loss": 0.0021, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 13.56, | |
| "learning_rate": 5.627915374493061e-05, | |
| "loss": 0.0023, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 13.59, | |
| "learning_rate": 5.612297454540352e-05, | |
| "loss": 0.003, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 13.62, | |
| "learning_rate": 5.596673467213756e-05, | |
| "loss": 0.0017, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 13.65, | |
| "learning_rate": 5.581043567334383e-05, | |
| "loss": 0.001, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 13.68, | |
| "learning_rate": 5.5654079097819345e-05, | |
| "loss": 0.001, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 13.71, | |
| "learning_rate": 5.5497666494931654e-05, | |
| "loss": 0.006, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 13.74, | |
| "learning_rate": 5.5341199414603493e-05, | |
| "loss": 0.0017, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 13.76, | |
| "learning_rate": 5.518467940729739e-05, | |
| "loss": 0.0017, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 13.79, | |
| "learning_rate": 5.502810802400039e-05, | |
| "loss": 0.0029, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 13.82, | |
| "learning_rate": 5.487148681620862e-05, | |
| "loss": 0.0009, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 13.85, | |
| "learning_rate": 5.4714817335911894e-05, | |
| "loss": 0.0011, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 13.88, | |
| "learning_rate": 5.455810113557839e-05, | |
| "loss": 0.0011, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 13.91, | |
| "learning_rate": 5.440133976813926e-05, | |
| "loss": 0.0009, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 13.94, | |
| "learning_rate": 5.4244534786973214e-05, | |
| "loss": 0.0036, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 13.97, | |
| "learning_rate": 5.40876877458911e-05, | |
| "loss": 0.0011, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 14.0, | |
| "learning_rate": 5.3930800199120616e-05, | |
| "loss": 0.0008, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 14.03, | |
| "learning_rate": 5.377387370129079e-05, | |
| "loss": 0.0062, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 14.06, | |
| "learning_rate": 5.361690980741663e-05, | |
| "loss": 0.0012, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 14.09, | |
| "learning_rate": 5.345991007288371e-05, | |
| "loss": 0.0012, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 14.12, | |
| "learning_rate": 5.330287605343279e-05, | |
| "loss": 0.0008, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 14.15, | |
| "learning_rate": 5.314580930514431e-05, | |
| "loss": 0.0023, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 14.18, | |
| "learning_rate": 5.298871138442307e-05, | |
| "loss": 0.0014, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 14.21, | |
| "learning_rate": 5.283158384798275e-05, | |
| "loss": 0.0028, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 14.24, | |
| "learning_rate": 5.267442825283048e-05, | |
| "loss": 0.0008, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 14.26, | |
| "learning_rate": 5.2517246156251455e-05, | |
| "loss": 0.0022, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 14.29, | |
| "learning_rate": 5.236003911579345e-05, | |
| "loss": 0.0023, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 14.32, | |
| "learning_rate": 5.220280868925145e-05, | |
| "loss": 0.0016, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 14.35, | |
| "learning_rate": 5.204555643465215e-05, | |
| "loss": 0.0008, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 14.38, | |
| "learning_rate": 5.1888283910238555e-05, | |
| "loss": 0.0023, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 14.41, | |
| "learning_rate": 5.173099267445451e-05, | |
| "loss": 0.0014, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 14.44, | |
| "learning_rate": 5.157368428592933e-05, | |
| "loss": 0.002, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 14.47, | |
| "learning_rate": 5.1416360303462206e-05, | |
| "loss": 0.0025, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 14.5, | |
| "learning_rate": 5.125902228600693e-05, | |
| "loss": 0.0014, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 14.53, | |
| "learning_rate": 5.110167179265636e-05, | |
| "loss": 0.0008, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 14.56, | |
| "learning_rate": 5.094431038262693e-05, | |
| "loss": 0.001, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 14.59, | |
| "learning_rate": 5.078693961524329e-05, | |
| "loss": 0.0008, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 14.62, | |
| "learning_rate": 5.062956104992285e-05, | |
| "loss": 0.0008, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 14.65, | |
| "learning_rate": 5.0472176246160184e-05, | |
| "loss": 0.0068, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 14.68, | |
| "learning_rate": 5.031478676351179e-05, | |
| "loss": 0.0007, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 14.71, | |
| "learning_rate": 5.01573941615805e-05, | |
| "loss": 0.0008, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 14.74, | |
| "learning_rate": 5e-05, | |
| "loss": 0.0019, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 14.76, | |
| "learning_rate": 4.984260583841953e-05, | |
| "loss": 0.0007, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 14.79, | |
| "learning_rate": 4.9685213236488216e-05, | |
| "loss": 0.0019, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 14.82, | |
| "learning_rate": 4.9527823753839834e-05, | |
| "loss": 0.0008, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 14.85, | |
| "learning_rate": 4.937043895007717e-05, | |
| "loss": 0.0008, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 14.88, | |
| "learning_rate": 4.9213060384756716e-05, | |
| "loss": 0.0009, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 14.91, | |
| "learning_rate": 4.9055689617373084e-05, | |
| "loss": 0.0013, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 14.94, | |
| "learning_rate": 4.8898328207343666e-05, | |
| "loss": 0.0026, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 14.97, | |
| "learning_rate": 4.874097771399308e-05, | |
| "loss": 0.0045, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 15.0, | |
| "learning_rate": 4.858363969653781e-05, | |
| "loss": 0.0008, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 15.03, | |
| "learning_rate": 4.8426315714070684e-05, | |
| "loss": 0.0008, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 15.06, | |
| "learning_rate": 4.8269007325545506e-05, | |
| "loss": 0.0008, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 15.09, | |
| "learning_rate": 4.8111716089761456e-05, | |
| "loss": 0.0008, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 15.12, | |
| "learning_rate": 4.7954443565347865e-05, | |
| "loss": 0.0008, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 15.15, | |
| "learning_rate": 4.779719131074857e-05, | |
| "loss": 0.0007, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 15.18, | |
| "learning_rate": 4.7639960884206576e-05, | |
| "loss": 0.0032, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 15.21, | |
| "learning_rate": 4.7482753843748564e-05, | |
| "loss": 0.0009, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 15.24, | |
| "learning_rate": 4.7325571747169545e-05, | |
| "loss": 0.0008, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 15.26, | |
| "learning_rate": 4.716841615201726e-05, | |
| "loss": 0.0008, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 15.29, | |
| "learning_rate": 4.7011288615576934e-05, | |
| "loss": 0.0027, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 15.32, | |
| "learning_rate": 4.6854190694855694e-05, | |
| "loss": 0.0007, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 15.35, | |
| "learning_rate": 4.6697123946567227e-05, | |
| "loss": 0.0018, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 15.38, | |
| "learning_rate": 4.65400899271163e-05, | |
| "loss": 0.0013, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 15.41, | |
| "learning_rate": 4.63830901925834e-05, | |
| "loss": 0.0018, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 15.44, | |
| "learning_rate": 4.6226126298709224e-05, | |
| "loss": 0.0047, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 15.47, | |
| "learning_rate": 4.60691998008794e-05, | |
| "loss": 0.0011, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 15.5, | |
| "learning_rate": 4.5912312254108905e-05, | |
| "loss": 0.0013, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 15.53, | |
| "learning_rate": 4.575546521302681e-05, | |
| "loss": 0.0007, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 15.56, | |
| "learning_rate": 4.5598660231860746e-05, | |
| "loss": 0.0025, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 15.59, | |
| "learning_rate": 4.544189886442162e-05, | |
| "loss": 0.0007, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 15.62, | |
| "learning_rate": 4.528518266408811e-05, | |
| "loss": 0.0031, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 15.65, | |
| "learning_rate": 4.5128513183791386e-05, | |
| "loss": 0.0007, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 15.68, | |
| "learning_rate": 4.49718919759996e-05, | |
| "loss": 0.001, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 15.71, | |
| "learning_rate": 4.481532059270262e-05, | |
| "loss": 0.0015, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 15.74, | |
| "learning_rate": 4.465880058539652e-05, | |
| "loss": 0.0015, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 15.76, | |
| "learning_rate": 4.450233350506836e-05, | |
| "loss": 0.0007, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 15.79, | |
| "learning_rate": 4.4345920902180647e-05, | |
| "loss": 0.0015, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 15.82, | |
| "learning_rate": 4.418956432665618e-05, | |
| "loss": 0.0007, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 15.85, | |
| "learning_rate": 4.403326532786245e-05, | |
| "loss": 0.0038, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 15.88, | |
| "learning_rate": 4.387702545459649e-05, | |
| "loss": 0.0025, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 15.91, | |
| "learning_rate": 4.3720846255069406e-05, | |
| "loss": 0.0007, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 15.94, | |
| "learning_rate": 4.356472927689109e-05, | |
| "loss": 0.0009, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 15.97, | |
| "learning_rate": 4.3408676067054866e-05, | |
| "loss": 0.0007, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 16.0, | |
| "learning_rate": 4.32526881719222e-05, | |
| "loss": 0.0006, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 16.03, | |
| "learning_rate": 4.3096767137207256e-05, | |
| "loss": 0.0013, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 16.06, | |
| "learning_rate": 4.2940914507961775e-05, | |
| "loss": 0.0007, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 16.09, | |
| "learning_rate": 4.278513182855956e-05, | |
| "loss": 0.0012, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 16.12, | |
| "learning_rate": 4.262942064268134e-05, | |
| "loss": 0.0006, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 16.15, | |
| "learning_rate": 4.247378249329933e-05, | |
| "loss": 0.0014, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 16.18, | |
| "learning_rate": 4.23182189226621e-05, | |
| "loss": 0.0018, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 16.21, | |
| "learning_rate": 4.21627314722791e-05, | |
| "loss": 0.0018, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 16.24, | |
| "learning_rate": 4.20073216829056e-05, | |
| "loss": 0.0014, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 16.26, | |
| "learning_rate": 4.185199109452721e-05, | |
| "loss": 0.0007, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 16.29, | |
| "learning_rate": 4.169674124634481e-05, | |
| "loss": 0.0011, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 16.32, | |
| "learning_rate": 4.1541573676759126e-05, | |
| "loss": 0.0006, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 16.35, | |
| "learning_rate": 4.138648992335566e-05, | |
| "loss": 0.0006, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 16.38, | |
| "learning_rate": 4.12314915228893e-05, | |
| "loss": 0.0007, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 16.41, | |
| "learning_rate": 4.107658001126913e-05, | |
| "loss": 0.0007, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 16.44, | |
| "learning_rate": 4.092175692354333e-05, | |
| "loss": 0.0007, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 16.47, | |
| "learning_rate": 4.0767023793883785e-05, | |
| "loss": 0.0006, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 16.5, | |
| "learning_rate": 4.0612382155571026e-05, | |
| "loss": 0.0029, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 16.53, | |
| "learning_rate": 4.045783354097893e-05, | |
| "loss": 0.0013, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 16.56, | |
| "learning_rate": 4.0303379481559623e-05, | |
| "loss": 0.0023, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 16.59, | |
| "learning_rate": 4.0149021507828224e-05, | |
| "loss": 0.0024, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 16.62, | |
| "learning_rate": 3.9994761149347784e-05, | |
| "loss": 0.0006, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 16.65, | |
| "learning_rate": 3.984059993471399e-05, | |
| "loss": 0.0006, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 16.68, | |
| "learning_rate": 3.968653939154016e-05, | |
| "loss": 0.0007, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 16.71, | |
| "learning_rate": 3.9532581046442e-05, | |
| "loss": 0.0015, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 16.74, | |
| "learning_rate": 3.937872642502252e-05, | |
| "loss": 0.0021, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 16.76, | |
| "learning_rate": 3.9224977051856904e-05, | |
| "loss": 0.0006, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 16.79, | |
| "learning_rate": 3.907133445047747e-05, | |
| "loss": 0.0007, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 16.82, | |
| "learning_rate": 3.8917800143358404e-05, | |
| "loss": 0.001, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 16.85, | |
| "learning_rate": 3.8764375651900906e-05, | |
| "loss": 0.0058, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 16.88, | |
| "learning_rate": 3.861106249641789e-05, | |
| "loss": 0.0007, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 16.91, | |
| "learning_rate": 3.84578621961191e-05, | |
| "loss": 0.0023, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 16.94, | |
| "learning_rate": 3.830477626909589e-05, | |
| "loss": 0.0007, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 16.97, | |
| "learning_rate": 3.8151806232306374e-05, | |
| "loss": 0.0027, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 17.0, | |
| "learning_rate": 3.7998953601560175e-05, | |
| "loss": 0.0008, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 17.03, | |
| "learning_rate": 3.784621989150361e-05, | |
| "loss": 0.0027, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 17.06, | |
| "learning_rate": 3.769360661560453e-05, | |
| "loss": 0.0019, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 17.09, | |
| "learning_rate": 3.75411152861374e-05, | |
| "loss": 0.0006, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 17.12, | |
| "learning_rate": 3.73887474141683e-05, | |
| "loss": 0.0006, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 17.15, | |
| "learning_rate": 3.723650450953994e-05, | |
| "loss": 0.0016, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 17.18, | |
| "learning_rate": 3.708438808085668e-05, | |
| "loss": 0.0006, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 17.21, | |
| "learning_rate": 3.693239963546967e-05, | |
| "loss": 0.0006, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 17.24, | |
| "learning_rate": 3.6780540679461784e-05, | |
| "loss": 0.0006, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 17.26, | |
| "learning_rate": 3.662881271763279e-05, | |
| "loss": 0.0014, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 17.29, | |
| "learning_rate": 3.64772172534844e-05, | |
| "loss": 0.0013, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 17.32, | |
| "learning_rate": 3.63257557892054e-05, | |
| "loss": 0.0007, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 17.35, | |
| "learning_rate": 3.6174429825656685e-05, | |
| "loss": 0.0014, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 17.38, | |
| "learning_rate": 3.602324086235655e-05, | |
| "loss": 0.0006, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 17.41, | |
| "learning_rate": 3.587219039746564e-05, | |
| "loss": 0.0008, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 17.44, | |
| "learning_rate": 3.572127992777223e-05, | |
| "loss": 0.0014, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 17.47, | |
| "learning_rate": 3.557051094867735e-05, | |
| "loss": 0.0006, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 17.5, | |
| "learning_rate": 3.541988495417997e-05, | |
| "loss": 0.0006, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 17.53, | |
| "learning_rate": 3.5269403436862175e-05, | |
| "loss": 0.0026, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 17.56, | |
| "learning_rate": 3.511906788787447e-05, | |
| "loss": 0.0012, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 17.59, | |
| "learning_rate": 3.496887979692084e-05, | |
| "loss": 0.0029, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 17.62, | |
| "learning_rate": 3.481884065224415e-05, | |
| "loss": 0.0006, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 17.65, | |
| "learning_rate": 3.466895194061128e-05, | |
| "loss": 0.0008, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 17.68, | |
| "learning_rate": 3.451921514729848e-05, | |
| "loss": 0.0017, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 17.71, | |
| "learning_rate": 3.436963175607656e-05, | |
| "loss": 0.0008, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 17.74, | |
| "learning_rate": 3.422020324919632e-05, | |
| "loss": 0.0021, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 17.76, | |
| "learning_rate": 3.4070931107373675e-05, | |
| "loss": 0.0041, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 17.79, | |
| "learning_rate": 3.39218168097752e-05, | |
| "loss": 0.0012, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 17.82, | |
| "learning_rate": 3.377286183400328e-05, | |
| "loss": 0.0006, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 17.85, | |
| "learning_rate": 3.362406765608158e-05, | |
| "loss": 0.0006, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 17.88, | |
| "learning_rate": 3.3475435750440356e-05, | |
| "loss": 0.0018, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 17.91, | |
| "learning_rate": 3.332696758990197e-05, | |
| "loss": 0.0005, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 17.94, | |
| "learning_rate": 3.3178664645666066e-05, | |
| "loss": 0.0006, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 17.97, | |
| "learning_rate": 3.303052838729525e-05, | |
| "loss": 0.0006, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 18.0, | |
| "learning_rate": 3.2882560282700336e-05, | |
| "loss": 0.0044, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 18.03, | |
| "learning_rate": 3.273476179812588e-05, | |
| "loss": 0.0012, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 18.06, | |
| "learning_rate": 3.258713439813566e-05, | |
| "loss": 0.0018, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 18.09, | |
| "learning_rate": 3.243967954559811e-05, | |
| "loss": 0.0006, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 18.12, | |
| "learning_rate": 3.229239870167191e-05, | |
| "loss": 0.0016, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 18.15, | |
| "learning_rate": 3.2145293325791395e-05, | |
| "loss": 0.0006, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 18.18, | |
| "learning_rate": 3.199836487565222e-05, | |
| "loss": 0.0007, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 18.21, | |
| "learning_rate": 3.1851614807196774e-05, | |
| "loss": 0.0006, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 18.24, | |
| "learning_rate": 3.170504457459989e-05, | |
| "loss": 0.0039, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 18.26, | |
| "learning_rate": 3.155865563025433e-05, | |
| "loss": 0.0013, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 18.29, | |
| "learning_rate": 3.1412449424756474e-05, | |
| "loss": 0.0005, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 18.32, | |
| "learning_rate": 3.1266427406891856e-05, | |
| "loss": 0.0005, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 18.35, | |
| "learning_rate": 3.112059102362093e-05, | |
| "loss": 0.0016, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 18.38, | |
| "learning_rate": 3.0974941720064585e-05, | |
| "loss": 0.0014, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 18.41, | |
| "learning_rate": 3.082948093948997e-05, | |
| "loss": 0.0006, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 18.44, | |
| "learning_rate": 3.0684210123296055e-05, | |
| "loss": 0.0006, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 18.47, | |
| "learning_rate": 3.053913071099947e-05, | |
| "loss": 0.0005, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 18.5, | |
| "learning_rate": 3.0394244140220163e-05, | |
| "loss": 0.0025, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 18.53, | |
| "learning_rate": 3.0249551846667207e-05, | |
| "loss": 0.0024, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 18.56, | |
| "learning_rate": 3.010505526412447e-05, | |
| "loss": 0.0005, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 18.59, | |
| "learning_rate": 2.996075582443658e-05, | |
| "loss": 0.0015, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 18.62, | |
| "learning_rate": 2.981665495749457e-05, | |
| "loss": 0.0027, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 18.65, | |
| "learning_rate": 2.9672754091221805e-05, | |
| "loss": 0.0006, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 18.68, | |
| "learning_rate": 2.9529054651559772e-05, | |
| "loss": 0.0005, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 18.71, | |
| "learning_rate": 2.938555806245406e-05, | |
| "loss": 0.0006, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 18.74, | |
| "learning_rate": 2.9242265745840063e-05, | |
| "loss": 0.0006, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 18.76, | |
| "learning_rate": 2.9099179121629117e-05, | |
| "loss": 0.0006, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 18.79, | |
| "learning_rate": 2.895629960769417e-05, | |
| "loss": 0.0007, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 18.82, | |
| "learning_rate": 2.881362861985606e-05, | |
| "loss": 0.0006, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 18.85, | |
| "learning_rate": 2.867116757186911e-05, | |
| "loss": 0.0033, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 18.88, | |
| "learning_rate": 2.8528917875407433e-05, | |
| "loss": 0.0012, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 18.91, | |
| "learning_rate": 2.838688094005078e-05, | |
| "loss": 0.0005, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 18.94, | |
| "learning_rate": 2.8245058173270622e-05, | |
| "loss": 0.0007, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 18.97, | |
| "learning_rate": 2.8103450980416136e-05, | |
| "loss": 0.0024, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 19.0, | |
| "learning_rate": 2.796206076470044e-05, | |
| "loss": 0.0057, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 19.03, | |
| "learning_rate": 2.7820888927186483e-05, | |
| "loss": 0.0026, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 19.06, | |
| "learning_rate": 2.7679936866773315e-05, | |
| "loss": 0.0017, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 19.09, | |
| "learning_rate": 2.753920598018217e-05, | |
| "loss": 0.0004, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 19.12, | |
| "learning_rate": 2.739869766194263e-05, | |
| "loss": 0.0006, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 19.15, | |
| "learning_rate": 2.7258413304378734e-05, | |
| "loss": 0.0006, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 19.18, | |
| "learning_rate": 2.7118354297595396e-05, | |
| "loss": 0.0026, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 19.21, | |
| "learning_rate": 2.6978522029464325e-05, | |
| "loss": 0.0026, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 19.24, | |
| "learning_rate": 2.683891788561055e-05, | |
| "loss": 0.0006, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 19.26, | |
| "learning_rate": 2.669954324939852e-05, | |
| "loss": 0.0005, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 19.29, | |
| "learning_rate": 2.6560399501918465e-05, | |
| "loss": 0.0006, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 19.32, | |
| "learning_rate": 2.6421488021972673e-05, | |
| "loss": 0.0012, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 19.35, | |
| "learning_rate": 2.6282810186061862e-05, | |
| "loss": 0.0022, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 19.38, | |
| "learning_rate": 2.6144367368371535e-05, | |
| "loss": 0.0004, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 19.41, | |
| "learning_rate": 2.600616094075835e-05, | |
| "loss": 0.0015, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 19.44, | |
| "learning_rate": 2.5868192272736514e-05, | |
| "loss": 0.0006, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 19.47, | |
| "learning_rate": 2.5730462731464273e-05, | |
| "loss": 0.0011, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 19.5, | |
| "learning_rate": 2.5592973681730236e-05, | |
| "loss": 0.0024, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 19.53, | |
| "learning_rate": 2.5455726485940012e-05, | |
| "loss": 0.0019, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 19.56, | |
| "learning_rate": 2.5318722504102604e-05, | |
| "loss": 0.0006, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 19.59, | |
| "learning_rate": 2.5181963093816962e-05, | |
| "loss": 0.0012, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 19.62, | |
| "learning_rate": 2.504544961025853e-05, | |
| "loss": 0.0006, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 19.65, | |
| "learning_rate": 2.4909183406165836e-05, | |
| "loss": 0.0006, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 19.68, | |
| "learning_rate": 2.4773165831827018e-05, | |
| "loss": 0.0016, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 19.71, | |
| "learning_rate": 2.4637398235066527e-05, | |
| "loss": 0.0005, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 19.74, | |
| "learning_rate": 2.450188196123177e-05, | |
| "loss": 0.0006, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 19.76, | |
| "learning_rate": 2.4366618353179644e-05, | |
| "loss": 0.0016, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 19.79, | |
| "learning_rate": 2.423160875126348e-05, | |
| "loss": 0.0022, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 19.82, | |
| "learning_rate": 2.4096854493319477e-05, | |
| "loss": 0.0005, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 19.85, | |
| "learning_rate": 2.3962356914653657e-05, | |
| "loss": 0.0004, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 19.88, | |
| "learning_rate": 2.3828117348028528e-05, | |
| "loss": 0.0021, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 19.91, | |
| "learning_rate": 2.3694137123649946e-05, | |
| "loss": 0.0005, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 19.94, | |
| "learning_rate": 2.3560417569153796e-05, | |
| "loss": 0.002, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 19.97, | |
| "learning_rate": 2.342696000959309e-05, | |
| "loss": 0.0007, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "learning_rate": 2.3293765767424537e-05, | |
| "loss": 0.0007, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 20.03, | |
| "learning_rate": 2.3160836162495653e-05, | |
| "loss": 0.0016, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 20.06, | |
| "learning_rate": 2.3028172512031604e-05, | |
| "loss": 0.001, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 20.09, | |
| "learning_rate": 2.289577613062218e-05, | |
| "loss": 0.0008, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 20.12, | |
| "learning_rate": 2.276364833020868e-05, | |
| "loss": 0.0005, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 20.15, | |
| "learning_rate": 2.2631790420071064e-05, | |
| "loss": 0.0006, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 20.18, | |
| "learning_rate": 2.2500203706814856e-05, | |
| "loss": 0.0016, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 20.21, | |
| "learning_rate": 2.2368889494358235e-05, | |
| "loss": 0.003, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 20.24, | |
| "learning_rate": 2.2237849083919142e-05, | |
| "loss": 0.0025, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 20.26, | |
| "learning_rate": 2.2107083774002364e-05, | |
| "loss": 0.0005, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 20.29, | |
| "learning_rate": 2.1976594860386597e-05, | |
| "loss": 0.0007, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 20.32, | |
| "learning_rate": 2.1846383636111743e-05, | |
| "loss": 0.0005, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 20.35, | |
| "learning_rate": 2.1716451391466008e-05, | |
| "loss": 0.0021, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 20.38, | |
| "learning_rate": 2.1586799413973135e-05, | |
| "loss": 0.0017, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 20.41, | |
| "learning_rate": 2.1457428988379635e-05, | |
| "loss": 0.0005, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 20.44, | |
| "learning_rate": 2.1328341396642093e-05, | |
| "loss": 0.0005, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 20.47, | |
| "learning_rate": 2.1199537917914386e-05, | |
| "loss": 0.0005, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 20.5, | |
| "learning_rate": 2.107101982853511e-05, | |
| "loss": 0.0014, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 20.53, | |
| "learning_rate": 2.0942788402014867e-05, | |
| "loss": 0.0016, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 20.56, | |
| "learning_rate": 2.0814844909023663e-05, | |
| "loss": 0.0024, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 20.59, | |
| "learning_rate": 2.068719061737831e-05, | |
| "loss": 0.0014, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 20.62, | |
| "learning_rate": 2.0559826792029884e-05, | |
| "loss": 0.0005, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 20.65, | |
| "learning_rate": 2.0432754695051136e-05, | |
| "loss": 0.0005, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 20.68, | |
| "learning_rate": 2.0305975585624058e-05, | |
| "loss": 0.0006, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 20.71, | |
| "learning_rate": 2.0179490720027372e-05, | |
| "loss": 0.0017, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 20.74, | |
| "learning_rate": 2.005330135162408e-05, | |
| "loss": 0.0016, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 20.76, | |
| "learning_rate": 1.992740873084899e-05, | |
| "loss": 0.0008, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 20.79, | |
| "learning_rate": 1.9801814105196497e-05, | |
| "loss": 0.0006, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 20.82, | |
| "learning_rate": 1.9676518719207977e-05, | |
| "loss": 0.0005, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 20.85, | |
| "learning_rate": 1.9551523814459665e-05, | |
| "loss": 0.0005, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 20.88, | |
| "learning_rate": 1.9426830629550242e-05, | |
| "loss": 0.0005, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 20.91, | |
| "learning_rate": 1.9302440400088606e-05, | |
| "loss": 0.0017, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 20.94, | |
| "learning_rate": 1.917835435868155e-05, | |
| "loss": 0.0005, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 20.97, | |
| "learning_rate": 1.9054573734921714e-05, | |
| "loss": 0.0005, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 21.0, | |
| "learning_rate": 1.8931099755375203e-05, | |
| "loss": 0.0005, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 21.03, | |
| "learning_rate": 1.880793364356956e-05, | |
| "loss": 0.0007, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 21.06, | |
| "learning_rate": 1.8685076619981608e-05, | |
| "loss": 0.0011, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 21.09, | |
| "learning_rate": 1.8562529902025372e-05, | |
| "loss": 0.001, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 21.12, | |
| "learning_rate": 1.844029470403993e-05, | |
| "loss": 0.0006, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 21.15, | |
| "learning_rate": 1.8318372237277565e-05, | |
| "loss": 0.0005, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 21.18, | |
| "learning_rate": 1.8196763709891524e-05, | |
| "loss": 0.0005, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 21.21, | |
| "learning_rate": 1.8075470326924243e-05, | |
| "loss": 0.0015, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 21.24, | |
| "learning_rate": 1.795449329029531e-05, | |
| "loss": 0.0005, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 21.26, | |
| "learning_rate": 1.7833833798789595e-05, | |
| "loss": 0.0011, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 21.29, | |
| "learning_rate": 1.7713493048045294e-05, | |
| "loss": 0.0005, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 21.32, | |
| "learning_rate": 1.7593472230542202e-05, | |
| "loss": 0.0005, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 21.35, | |
| "learning_rate": 1.747377253558982e-05, | |
| "loss": 0.0005, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 21.38, | |
| "learning_rate": 1.7354395149315534e-05, | |
| "loss": 0.0019, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 21.41, | |
| "learning_rate": 1.7235341254653005e-05, | |
| "loss": 0.0023, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 21.44, | |
| "learning_rate": 1.7116612031330252e-05, | |
| "loss": 0.0018, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 21.47, | |
| "learning_rate": 1.6998208655858137e-05, | |
| "loss": 0.0016, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 21.5, | |
| "learning_rate": 1.6880132301518598e-05, | |
| "loss": 0.0014, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 21.53, | |
| "learning_rate": 1.6762384138353078e-05, | |
| "loss": 0.002, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 21.56, | |
| "learning_rate": 1.6644965333150847e-05, | |
| "loss": 0.0005, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 21.59, | |
| "learning_rate": 1.6527877049437622e-05, | |
| "loss": 0.0011, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 21.62, | |
| "learning_rate": 1.6411120447463807e-05, | |
| "loss": 0.0005, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 21.65, | |
| "learning_rate": 1.6294696684193154e-05, | |
| "loss": 0.0005, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 21.68, | |
| "learning_rate": 1.617860691329126e-05, | |
| "loss": 0.0022, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 21.71, | |
| "learning_rate": 1.6062852285114123e-05, | |
| "loss": 0.0029, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 21.74, | |
| "learning_rate": 1.5947433946696693e-05, | |
| "loss": 0.0005, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 21.76, | |
| "learning_rate": 1.583235304174167e-05, | |
| "loss": 0.0005, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 21.79, | |
| "learning_rate": 1.5717610710607948e-05, | |
| "loss": 0.0005, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 21.82, | |
| "learning_rate": 1.5603208090299498e-05, | |
| "loss": 0.0004, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 21.85, | |
| "learning_rate": 1.5489146314454002e-05, | |
| "loss": 0.0006, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 21.88, | |
| "learning_rate": 1.537542651333167e-05, | |
| "loss": 0.0005, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 21.91, | |
| "learning_rate": 1.5262049813803958e-05, | |
| "loss": 0.0004, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 21.94, | |
| "learning_rate": 1.5149017339342574e-05, | |
| "loss": 0.0016, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 21.97, | |
| "learning_rate": 1.5036330210008115e-05, | |
| "loss": 0.0006, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 22.0, | |
| "learning_rate": 1.4923989542439159e-05, | |
| "loss": 0.0005, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 22.03, | |
| "learning_rate": 1.4811996449841098e-05, | |
| "loss": 0.0005, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 22.06, | |
| "learning_rate": 1.4700352041975168e-05, | |
| "loss": 0.0027, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 22.09, | |
| "learning_rate": 1.458905742514734e-05, | |
| "loss": 0.0005, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 22.12, | |
| "learning_rate": 1.447811370219757e-05, | |
| "loss": 0.0004, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 22.15, | |
| "learning_rate": 1.4367521972488612e-05, | |
| "loss": 0.0016, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 22.18, | |
| "learning_rate": 1.4257283331895315e-05, | |
| "loss": 0.001, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 22.21, | |
| "learning_rate": 1.4147398872793693e-05, | |
| "loss": 0.0011, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 22.24, | |
| "learning_rate": 1.4037869684050115e-05, | |
| "loss": 0.0013, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 22.26, | |
| "learning_rate": 1.3928696851010443e-05, | |
| "loss": 0.0015, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 22.29, | |
| "learning_rate": 1.3819881455489458e-05, | |
| "loss": 0.0005, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 22.32, | |
| "learning_rate": 1.3711424575759912e-05, | |
| "loss": 0.0012, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 22.35, | |
| "learning_rate": 1.3603327286542023e-05, | |
| "loss": 0.0005, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 22.38, | |
| "learning_rate": 1.3495590658992718e-05, | |
| "loss": 0.0012, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 22.41, | |
| "learning_rate": 1.33882157606951e-05, | |
| "loss": 0.0011, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 22.44, | |
| "learning_rate": 1.3281203655647756e-05, | |
| "loss": 0.0004, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 22.47, | |
| "learning_rate": 1.317455540425439e-05, | |
| "loss": 0.0006, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 22.5, | |
| "learning_rate": 1.3068272063313102e-05, | |
| "loss": 0.0005, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 22.53, | |
| "learning_rate": 1.2962354686006084e-05, | |
| "loss": 0.0013, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 22.56, | |
| "learning_rate": 1.2856804321889115e-05, | |
| "loss": 0.0005, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 22.59, | |
| "learning_rate": 1.2751622016881182e-05, | |
| "loss": 0.001, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 22.62, | |
| "learning_rate": 1.2646808813254035e-05, | |
| "loss": 0.0006, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 22.65, | |
| "learning_rate": 1.2542365749622049e-05, | |
| "loss": 0.0005, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 22.68, | |
| "learning_rate": 1.2438293860931677e-05, | |
| "loss": 0.0014, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 22.71, | |
| "learning_rate": 1.2334594178451425e-05, | |
| "loss": 0.0005, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 22.74, | |
| "learning_rate": 1.2231267729761487e-05, | |
| "loss": 0.0005, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 22.76, | |
| "learning_rate": 1.2128315538743646e-05, | |
| "loss": 0.0005, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 22.79, | |
| "learning_rate": 1.2025738625571026e-05, | |
| "loss": 0.0014, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 22.82, | |
| "learning_rate": 1.1923538006698154e-05, | |
| "loss": 0.0016, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 22.85, | |
| "learning_rate": 1.1821714694850689e-05, | |
| "loss": 0.0005, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 22.88, | |
| "learning_rate": 1.172026969901553e-05, | |
| "loss": 0.0005, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 22.91, | |
| "learning_rate": 1.161920402443077e-05, | |
| "loss": 0.0015, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 22.94, | |
| "learning_rate": 1.1518518672575701e-05, | |
| "loss": 0.0029, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 22.97, | |
| "learning_rate": 1.1418214641160958e-05, | |
| "loss": 0.0005, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 23.0, | |
| "learning_rate": 1.1318292924118584e-05, | |
| "loss": 0.0006, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 23.03, | |
| "learning_rate": 1.121875451159221e-05, | |
| "loss": 0.0005, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 23.06, | |
| "learning_rate": 1.1119600389927182e-05, | |
| "loss": 0.0019, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 23.09, | |
| "learning_rate": 1.1020831541660915e-05, | |
| "loss": 0.0004, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 23.12, | |
| "learning_rate": 1.092244894551298e-05, | |
| "loss": 0.0014, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 23.15, | |
| "learning_rate": 1.0824453576375576e-05, | |
| "loss": 0.0005, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 23.18, | |
| "learning_rate": 1.0726846405303754e-05, | |
| "loss": 0.0004, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 23.21, | |
| "learning_rate": 1.062962839950587e-05, | |
| "loss": 0.0005, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 23.24, | |
| "learning_rate": 1.0532800522333902e-05, | |
| "loss": 0.0005, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 23.26, | |
| "learning_rate": 1.0436363733274057e-05, | |
| "loss": 0.0005, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 23.29, | |
| "learning_rate": 1.0340318987937097e-05, | |
| "loss": 0.0011, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 23.32, | |
| "learning_rate": 1.0244667238048988e-05, | |
| "loss": 0.0012, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 23.35, | |
| "learning_rate": 1.014940943144142e-05, | |
| "loss": 0.0005, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 23.38, | |
| "learning_rate": 1.0054546512042424e-05, | |
| "loss": 0.0015, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 23.41, | |
| "learning_rate": 9.960079419866985e-06, | |
| "loss": 0.0005, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 23.44, | |
| "learning_rate": 9.866009091007833e-06, | |
| "loss": 0.0011, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 23.47, | |
| "learning_rate": 9.772336457626014e-06, | |
| "loss": 0.0013, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 23.5, | |
| "learning_rate": 9.679062447941778e-06, | |
| "loss": 0.0004, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 23.53, | |
| "learning_rate": 9.586187986225325e-06, | |
| "loss": 0.0019, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 23.56, | |
| "learning_rate": 9.493713992787672e-06, | |
| "loss": 0.0004, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 23.59, | |
| "learning_rate": 9.401641383971477e-06, | |
| "loss": 0.0005, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 23.62, | |
| "learning_rate": 9.309971072142038e-06, | |
| "loss": 0.0009, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 23.65, | |
| "learning_rate": 9.218703965678204e-06, | |
| "loss": 0.001, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 23.68, | |
| "learning_rate": 9.127840968963381e-06, | |
| "loss": 0.0005, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 23.71, | |
| "learning_rate": 9.03738298237658e-06, | |
| "loss": 0.0017, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 23.74, | |
| "learning_rate": 8.94733090228349e-06, | |
| "loss": 0.0005, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 23.76, | |
| "learning_rate": 8.857685621027568e-06, | |
| "loss": 0.0021, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 23.79, | |
| "learning_rate": 8.768448026921245e-06, | |
| "loss": 0.0005, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 23.82, | |
| "learning_rate": 8.67961900423711e-06, | |
| "loss": 0.0019, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 23.85, | |
| "learning_rate": 8.591199433199126e-06, | |
| "loss": 0.0004, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 23.88, | |
| "learning_rate": 8.503190189973914e-06, | |
| "loss": 0.0014, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 23.91, | |
| "learning_rate": 8.415592146662104e-06, | |
| "loss": 0.0005, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 23.94, | |
| "learning_rate": 8.328406171289621e-06, | |
| "loss": 0.0028, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 23.97, | |
| "learning_rate": 8.24163312779917e-06, | |
| "loss": 0.0005, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 24.0, | |
| "learning_rate": 8.155273876041614e-06, | |
| "loss": 0.0005, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 24.03, | |
| "learning_rate": 8.069329271767484e-06, | |
| "loss": 0.001, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 24.06, | |
| "learning_rate": 7.983800166618482e-06, | |
| "loss": 0.0005, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 24.09, | |
| "learning_rate": 7.898687408119065e-06, | |
| "loss": 0.0004, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 24.12, | |
| "learning_rate": 7.813991839667995e-06, | |
| "loss": 0.0004, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 24.15, | |
| "learning_rate": 7.72971430053005e-06, | |
| "loss": 0.0012, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 24.18, | |
| "learning_rate": 7.645855625827658e-06, | |
| "loss": 0.0023, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 24.21, | |
| "learning_rate": 7.56241664653266e-06, | |
| "loss": 0.0018, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 24.24, | |
| "learning_rate": 7.4793981894580034e-06, | |
| "loss": 0.0009, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 24.26, | |
| "learning_rate": 7.396801077249676e-06, | |
| "loss": 0.0012, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 24.29, | |
| "learning_rate": 7.3146261283784104e-06, | |
| "loss": 0.0005, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 24.32, | |
| "learning_rate": 7.2328741571316696e-06, | |
| "loss": 0.0022, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 24.35, | |
| "learning_rate": 7.1515459736055505e-06, | |
| "loss": 0.0005, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 24.38, | |
| "learning_rate": 7.070642383696763e-06, | |
| "loss": 0.0005, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 24.41, | |
| "learning_rate": 6.990164189094589e-06, | |
| "loss": 0.0004, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 24.44, | |
| "learning_rate": 6.910112187273066e-06, | |
| "loss": 0.0013, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 24.47, | |
| "learning_rate": 6.830487171482935e-06, | |
| "loss": 0.0005, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 24.5, | |
| "learning_rate": 6.751289930743882e-06, | |
| "loss": 0.0005, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 24.53, | |
| "learning_rate": 6.6725212498366885e-06, | |
| "loss": 0.0013, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 24.56, | |
| "learning_rate": 6.594181909295427e-06, | |
| "loss": 0.0021, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 24.59, | |
| "learning_rate": 6.516272685399793e-06, | |
| "loss": 0.0005, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 24.62, | |
| "learning_rate": 6.438794350167337e-06, | |
| "loss": 0.002, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 24.65, | |
| "learning_rate": 6.36174767134588e-06, | |
| "loss": 0.0004, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 24.68, | |
| "learning_rate": 6.285133412405858e-06, | |
| "loss": 0.0004, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 24.71, | |
| "learning_rate": 6.208952332532786e-06, | |
| "loss": 0.0005, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 24.74, | |
| "learning_rate": 6.133205186619695e-06, | |
| "loss": 0.0011, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 24.76, | |
| "learning_rate": 6.057892725259717e-06, | |
| "loss": 0.0013, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 24.79, | |
| "learning_rate": 5.983015694738597e-06, | |
| "loss": 0.0005, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 24.82, | |
| "learning_rate": 5.908574837027309e-06, | |
| "loss": 0.0005, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 24.85, | |
| "learning_rate": 5.83457088977471e-06, | |
| "loss": 0.0004, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 24.88, | |
| "learning_rate": 5.761004586300234e-06, | |
| "loss": 0.0013, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 24.91, | |
| "learning_rate": 5.687876655586583e-06, | |
| "loss": 0.0004, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 24.94, | |
| "learning_rate": 5.615187822272583e-06, | |
| "loss": 0.0005, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 24.97, | |
| "learning_rate": 5.542938806645931e-06, | |
| "loss": 0.0005, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "learning_rate": 5.4711303246361144e-06, | |
| "loss": 0.0004, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 25.03, | |
| "learning_rate": 5.399763087807236e-06, | |
| "loss": 0.0005, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 25.06, | |
| "learning_rate": 5.328837803351083e-06, | |
| "loss": 0.0005, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 25.09, | |
| "learning_rate": 5.258355174079993e-06, | |
| "loss": 0.0004, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 25.12, | |
| "learning_rate": 5.188315898419971e-06, | |
| "loss": 0.0004, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 25.15, | |
| "learning_rate": 5.118720670403748e-06, | |
| "loss": 0.0004, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 25.18, | |
| "learning_rate": 5.04957017966391e-06, | |
| "loss": 0.0005, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 25.21, | |
| "learning_rate": 4.980865111426003e-06, | |
| "loss": 0.0013, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 25.24, | |
| "learning_rate": 4.912606146501886e-06, | |
| "loss": 0.0027, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 25.26, | |
| "learning_rate": 4.844793961282812e-06, | |
| "loss": 0.0014, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 25.29, | |
| "learning_rate": 4.777429227732844e-06, | |
| "loss": 0.0006, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 25.32, | |
| "learning_rate": 4.710512613382151e-06, | |
| "loss": 0.001, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 25.35, | |
| "learning_rate": 4.644044781320422e-06, | |
| "loss": 0.0004, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 25.38, | |
| "learning_rate": 4.578026390190232e-06, | |
| "loss": 0.0005, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 25.41, | |
| "learning_rate": 4.5124580941806165e-06, | |
| "loss": 0.0005, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 25.44, | |
| "learning_rate": 4.447340543020473e-06, | |
| "loss": 0.0004, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 25.47, | |
| "learning_rate": 4.382674381972224e-06, | |
| "loss": 0.001, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 25.5, | |
| "learning_rate": 4.318460251825357e-06, | |
| "loss": 0.0005, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 25.53, | |
| "learning_rate": 4.254698788890127e-06, | |
| "loss": 0.0004, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 25.56, | |
| "learning_rate": 4.191390624991159e-06, | |
| "loss": 0.0011, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 25.59, | |
| "learning_rate": 4.12853638746134e-06, | |
| "loss": 0.0005, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 25.62, | |
| "learning_rate": 4.0661366991354365e-06, | |
| "loss": 0.001, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 25.65, | |
| "learning_rate": 4.004192178344029e-06, | |
| "loss": 0.0004, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 25.68, | |
| "learning_rate": 3.942703438907358e-06, | |
| "loss": 0.0024, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 25.71, | |
| "learning_rate": 3.881671090129247e-06, | |
| "loss": 0.0022, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 25.74, | |
| "learning_rate": 3.821095736791008e-06, | |
| "loss": 0.0021, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 25.76, | |
| "learning_rate": 3.7609779791455744e-06, | |
| "loss": 0.0004, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 25.79, | |
| "learning_rate": 3.7013184129113976e-06, | |
| "loss": 0.0005, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 25.82, | |
| "learning_rate": 3.6421176292666783e-06, | |
| "loss": 0.0004, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 25.85, | |
| "learning_rate": 3.58337621484342e-06, | |
| "loss": 0.0015, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 25.88, | |
| "learning_rate": 3.5250947517216637e-06, | |
| "loss": 0.0005, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 25.91, | |
| "learning_rate": 3.4672738174236884e-06, | |
| "loss": 0.0005, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 25.94, | |
| "learning_rate": 3.4099139849083307e-06, | |
| "loss": 0.0009, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 25.97, | |
| "learning_rate": 3.353015822565253e-06, | |
| "loss": 0.0015, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 26.0, | |
| "learning_rate": 3.296579894209345e-06, | |
| "loss": 0.0005, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 26.03, | |
| "learning_rate": 3.2406067590751433e-06, | |
| "loss": 0.0005, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 26.06, | |
| "learning_rate": 3.1850969718112745e-06, | |
| "loss": 0.0005, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 26.09, | |
| "learning_rate": 3.1300510824749273e-06, | |
| "loss": 0.0011, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 26.12, | |
| "learning_rate": 3.0754696365265068e-06, | |
| "loss": 0.0005, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 26.15, | |
| "learning_rate": 3.0213531748240764e-06, | |
| "loss": 0.0005, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 26.18, | |
| "learning_rate": 2.9677022336181413e-06, | |
| "loss": 0.0005, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 26.21, | |
| "learning_rate": 2.914517344546258e-06, | |
| "loss": 0.0015, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 26.24, | |
| "learning_rate": 2.8617990346277657e-06, | |
| "loss": 0.0012, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 26.26, | |
| "learning_rate": 2.8095478262585907e-06, | |
| "loss": 0.0004, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 26.29, | |
| "learning_rate": 2.7577642372060673e-06, | |
| "loss": 0.0004, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 26.32, | |
| "learning_rate": 2.7064487806037985e-06, | |
| "loss": 0.0005, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 26.35, | |
| "learning_rate": 2.6556019649465525e-06, | |
| "loss": 0.0004, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 26.38, | |
| "learning_rate": 2.6052242940852787e-06, | |
| "loss": 0.0008, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 26.41, | |
| "learning_rate": 2.5553162672220465e-06, | |
| "loss": 0.0005, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 26.44, | |
| "learning_rate": 2.5058783789051467e-06, | |
| "loss": 0.0029, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 26.47, | |
| "learning_rate": 2.45691111902418e-06, | |
| "loss": 0.0005, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 26.5, | |
| "learning_rate": 2.4084149728051952e-06, | |
| "loss": 0.0012, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 26.53, | |
| "learning_rate": 2.360390420805869e-06, | |
| "loss": 0.0005, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 26.56, | |
| "learning_rate": 2.3128379389108e-06, | |
| "loss": 0.0015, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 26.59, | |
| "learning_rate": 2.2657579983267064e-06, | |
| "loss": 0.0004, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 26.62, | |
| "learning_rate": 2.219151065577829e-06, | |
| "loss": 0.0019, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 26.65, | |
| "learning_rate": 2.1730176025012816e-06, | |
| "loss": 0.0011, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 26.68, | |
| "learning_rate": 2.1273580662424796e-06, | |
| "loss": 0.0013, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 26.71, | |
| "learning_rate": 2.082172909250568e-06, | |
| "loss": 0.0013, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 26.74, | |
| "learning_rate": 2.0374625792740464e-06, | |
| "loss": 0.0011, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 26.76, | |
| "learning_rate": 1.993227519356189e-06, | |
| "loss": 0.0004, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 26.79, | |
| "learning_rate": 1.9494681678307703e-06, | |
| "loss": 0.0004, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 26.82, | |
| "learning_rate": 1.906184958317664e-06, | |
| "loss": 0.0011, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 26.85, | |
| "learning_rate": 1.8633783197185783e-06, | |
| "loss": 0.0011, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 26.88, | |
| "learning_rate": 1.8210486762127499e-06, | |
| "loss": 0.0012, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 26.91, | |
| "learning_rate": 1.7791964472528232e-06, | |
| "loss": 0.0004, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 26.94, | |
| "learning_rate": 1.737822047560611e-06, | |
| "loss": 0.0009, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 26.97, | |
| "learning_rate": 1.696925887123052e-06, | |
| "loss": 0.0005, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 27.0, | |
| "learning_rate": 1.656508371188109e-06, | |
| "loss": 0.0005, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 27.03, | |
| "learning_rate": 1.6165699002607671e-06, | |
| "loss": 0.0005, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 27.06, | |
| "learning_rate": 1.5771108700990412e-06, | |
| "loss": 0.0004, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 27.09, | |
| "learning_rate": 1.538131671710108e-06, | |
| "loss": 0.0004, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 27.12, | |
| "learning_rate": 1.4996326913463754e-06, | |
| "loss": 0.001, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 27.15, | |
| "learning_rate": 1.461614310501691e-06, | |
| "loss": 0.0005, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 27.18, | |
| "learning_rate": 1.4240769059075342e-06, | |
| "loss": 0.0014, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 27.21, | |
| "learning_rate": 1.387020849529319e-06, | |
| "loss": 0.0005, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 27.24, | |
| "learning_rate": 1.3504465085626638e-06, | |
| "loss": 0.0005, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 27.26, | |
| "learning_rate": 1.3143542454297885e-06, | |
| "loss": 0.0004, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 27.29, | |
| "learning_rate": 1.2787444177759068e-06, | |
| "loss": 0.0013, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 27.32, | |
| "learning_rate": 1.243617378465689e-06, | |
| "loss": 0.0005, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 27.35, | |
| "learning_rate": 1.208973475579761e-06, | |
| "loss": 0.0011, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 27.38, | |
| "learning_rate": 1.1748130524112666e-06, | |
| "loss": 0.0014, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 27.41, | |
| "learning_rate": 1.1411364474624264e-06, | |
| "loss": 0.0013, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 27.44, | |
| "learning_rate": 1.1079439944412406e-06, | |
| "loss": 0.001, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 27.47, | |
| "learning_rate": 1.075236022258147e-06, | |
| "loss": 0.0009, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 27.5, | |
| "learning_rate": 1.0430128550227625e-06, | |
| "loss": 0.0004, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 27.53, | |
| "learning_rate": 1.0112748120406856e-06, | |
| "loss": 0.0004, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 27.56, | |
| "learning_rate": 9.800222078103271e-07, | |
| "loss": 0.0013, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 27.59, | |
| "learning_rate": 9.492553520197733e-07, | |
| "loss": 0.0004, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 27.62, | |
| "learning_rate": 9.189745495437608e-07, | |
| "loss": 0.0013, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 27.65, | |
| "learning_rate": 8.891801004406119e-07, | |
| "loss": 0.0004, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 27.68, | |
| "learning_rate": 8.59872299949288e-07, | |
| "loss": 0.0005, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 27.71, | |
| "learning_rate": 8.31051438486441e-07, | |
| "loss": 0.0004, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 27.74, | |
| "learning_rate": 8.027178016435765e-07, | |
| "loss": 0.0005, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 27.76, | |
| "learning_rate": 7.748716701841685e-07, | |
| "loss": 0.0009, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 27.79, | |
| "learning_rate": 7.475133200409212e-07, | |
| "loss": 0.0022, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 27.82, | |
| "learning_rate": 7.206430223130278e-07, | |
| "loss": 0.0011, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 27.85, | |
| "learning_rate": 6.9426104326345e-07, | |
| "loss": 0.0015, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 27.88, | |
| "learning_rate": 6.683676443163311e-07, | |
| "loss": 0.0025, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 27.91, | |
| "learning_rate": 6.429630820543598e-07, | |
| "loss": 0.0012, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 27.94, | |
| "learning_rate": 6.180476082162656e-07, | |
| "loss": 0.0005, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 27.97, | |
| "learning_rate": 5.936214696942887e-07, | |
| "loss": 0.0005, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 28.0, | |
| "learning_rate": 5.696849085317646e-07, | |
| "loss": 0.0004, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 28.03, | |
| "learning_rate": 5.462381619207091e-07, | |
| "loss": 0.001, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 28.06, | |
| "learning_rate": 5.232814621994598e-07, | |
| "loss": 0.0004, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 28.09, | |
| "learning_rate": 5.008150368503994e-07, | |
| "loss": 0.001, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 28.12, | |
| "learning_rate": 4.788391084976862e-07, | |
| "loss": 0.0004, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 28.15, | |
| "learning_rate": 4.573538949050327e-07, | |
| "loss": 0.0005, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 28.18, | |
| "learning_rate": 4.363596089735911e-07, | |
| "loss": 0.0005, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 28.21, | |
| "learning_rate": 4.1585645873978284e-07, | |
| "loss": 0.0005, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 28.24, | |
| "learning_rate": 3.958446473733002e-07, | |
| "loss": 0.0004, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 28.26, | |
| "learning_rate": 3.7632437317505207e-07, | |
| "loss": 0.0004, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 28.29, | |
| "learning_rate": 3.572958295752049e-07, | |
| "loss": 0.0004, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 28.32, | |
| "learning_rate": 3.387592051312782e-07, | |
| "loss": 0.0022, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 28.35, | |
| "learning_rate": 3.207146835262742e-07, | |
| "loss": 0.0005, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 28.38, | |
| "learning_rate": 3.0316244356683454e-07, | |
| "loss": 0.0005, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 28.41, | |
| "learning_rate": 2.8610265918151414e-07, | |
| "loss": 0.0005, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 28.44, | |
| "learning_rate": 2.695354994190047e-07, | |
| "loss": 0.0011, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 28.47, | |
| "learning_rate": 2.534611284465083e-07, | |
| "loss": 0.0011, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 28.5, | |
| "learning_rate": 2.3787970554806084e-07, | |
| "loss": 0.0005, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 28.53, | |
| "learning_rate": 2.2279138512300567e-07, | |
| "loss": 0.0014, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 28.56, | |
| "learning_rate": 2.0819631668442253e-07, | |
| "loss": 0.0011, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 28.59, | |
| "learning_rate": 1.940946448576675e-07, | |
| "loss": 0.001, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 28.62, | |
| "learning_rate": 1.8048650937893542e-07, | |
| "loss": 0.0013, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 28.65, | |
| "learning_rate": 1.6737204509387206e-07, | |
| "loss": 0.0004, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 28.68, | |
| "learning_rate": 1.5475138195623629e-07, | |
| "loss": 0.0005, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 28.71, | |
| "learning_rate": 1.4262464502663443e-07, | |
| "loss": 0.0017, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 28.74, | |
| "learning_rate": 1.309919544712268e-07, | |
| "loss": 0.0014, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 28.76, | |
| "learning_rate": 1.1985342556060652e-07, | |
| "loss": 0.0005, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 28.79, | |
| "learning_rate": 1.0920916866861142e-07, | |
| "loss": 0.0005, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 28.82, | |
| "learning_rate": 9.905928927123609e-08, | |
| "loss": 0.0005, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 28.85, | |
| "learning_rate": 8.940388794559939e-08, | |
| "loss": 0.0021, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 28.88, | |
| "learning_rate": 8.02430603689397e-08, | |
| "loss": 0.0012, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 28.91, | |
| "learning_rate": 7.157689731767669e-08, | |
| "loss": 0.0011, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 28.94, | |
| "learning_rate": 6.340548466648443e-08, | |
| "loss": 0.0014, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 28.97, | |
| "learning_rate": 5.572890338748082e-08, | |
| "loss": 0.0005, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 29.0, | |
| "learning_rate": 4.8547229549383844e-08, | |
| "loss": 0.0006, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 29.03, | |
| "learning_rate": 4.186053431680104e-08, | |
| "loss": 0.0019, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 29.06, | |
| "learning_rate": 3.566888394948009e-08, | |
| "loss": 0.0005, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 29.09, | |
| "learning_rate": 2.997233980168157e-08, | |
| "loss": 0.0012, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 29.12, | |
| "learning_rate": 2.4770958321568283e-08, | |
| "loss": 0.0005, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 29.15, | |
| "learning_rate": 2.0064791050633526e-08, | |
| "loss": 0.0005, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 29.18, | |
| "learning_rate": 1.5853884623195925e-08, | |
| "loss": 0.0005, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 29.21, | |
| "learning_rate": 1.2138280765944254e-08, | |
| "loss": 0.0004, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 29.24, | |
| "learning_rate": 8.918016297515541e-09, | |
| "loss": 0.0005, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 29.26, | |
| "learning_rate": 6.193123128134248e-09, | |
| "loss": 0.0005, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 29.29, | |
| "learning_rate": 3.963628259290308e-09, | |
| "loss": 0.0008, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 29.32, | |
| "learning_rate": 2.229553783478222e-09, | |
| "loss": 0.0005, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 29.35, | |
| "learning_rate": 9.90916883986115e-10, | |
| "loss": 0.0005, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 29.38, | |
| "learning_rate": 2.477298346958978e-10, | |
| "loss": 0.0016, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 29.41, | |
| "learning_rate": 0.0, | |
| "loss": 0.0004, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 29.41, | |
| "step": 1000, | |
| "total_flos": 1.5307500373209907e+17, | |
| "train_loss": 0.07744903843736392, | |
| "train_runtime": 5187.366, | |
| "train_samples_per_second": 3.084, | |
| "train_steps_per_second": 0.193 | |
| } | |
| ], | |
| "max_steps": 1000, | |
| "num_train_epochs": 30, | |
| "total_flos": 1.5307500373209907e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |