| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 6.451612903225806, | |
| "global_step": 1000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 5e-05, | |
| "loss": 0.843, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 0.0001, | |
| "loss": 0.7773, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 9.999975227016531e-05, | |
| "loss": 0.8884, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 9.999900908311602e-05, | |
| "loss": 1.0872, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 9.999777044621652e-05, | |
| "loss": 1.1342, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 9.999603637174071e-05, | |
| "loss": 0.8538, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 9.999380687687188e-05, | |
| "loss": 0.9938, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 9.999108198370249e-05, | |
| "loss": 0.8892, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 9.998786171923407e-05, | |
| "loss": 0.9229, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 9.998414611537681e-05, | |
| "loss": 0.8371, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 9.997993520894937e-05, | |
| "loss": 0.889, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 9.997522904167844e-05, | |
| "loss": 0.8277, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 9.997002766019832e-05, | |
| "loss": 0.8332, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 9.996433111605052e-05, | |
| "loss": 0.6883, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 9.99581394656832e-05, | |
| "loss": 0.9964, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 9.995145277045061e-05, | |
| "loss": 0.7082, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 9.994427109661253e-05, | |
| "loss": 0.7267, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 9.993659451533353e-05, | |
| "loss": 0.8105, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 9.992842310268233e-05, | |
| "loss": 0.828, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 9.991975693963107e-05, | |
| "loss": 0.6968, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 9.99105961120544e-05, | |
| "loss": 0.7103, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 9.990094071072877e-05, | |
| "loss": 0.5235, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 9.989079083133139e-05, | |
| "loss": 0.6003, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 9.988014657443941e-05, | |
| "loss": 0.6947, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 9.986900804552878e-05, | |
| "loss": 0.5919, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 9.985737535497337e-05, | |
| "loss": 0.7571, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 9.984524861804376e-05, | |
| "loss": 0.7387, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 9.983262795490613e-05, | |
| "loss": 0.6541, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 9.981951349062106e-05, | |
| "loss": 0.4892, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 9.980590535514233e-05, | |
| "loss": 0.6266, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 9.979180368331558e-05, | |
| "loss": 0.5837, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 9.9777208614877e-05, | |
| "loss": 0.6601, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 9.976212029445194e-05, | |
| "loss": 0.678, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 9.97465388715535e-05, | |
| "loss": 0.6987, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 9.9730464500581e-05, | |
| "loss": 0.7587, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 9.971389734081848e-05, | |
| "loss": 0.5157, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 9.969683755643317e-05, | |
| "loss": 0.7689, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 9.967928531647374e-05, | |
| "loss": 0.5331, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 9.966124079486872e-05, | |
| "loss": 0.9998, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 9.96427041704248e-05, | |
| "loss": 0.6785, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 9.962367562682496e-05, | |
| "loss": 0.6151, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 9.960415535262671e-05, | |
| "loss": 0.7399, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 9.958414354126022e-05, | |
| "loss": 0.5477, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 9.956364039102642e-05, | |
| "loss": 0.79, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 9.954264610509497e-05, | |
| "loss": 0.5478, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 9.952116089150232e-05, | |
| "loss": 0.6187, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 9.94991849631496e-05, | |
| "loss": 0.5635, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 9.947671853780054e-05, | |
| "loss": 0.612, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 9.94537618380793e-05, | |
| "loss": 0.5631, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 9.943031509146825e-05, | |
| "loss": 0.7695, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 9.940637853030572e-05, | |
| "loss": 0.6831, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 9.938195239178374e-05, | |
| "loss": 0.7762, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 9.935703691794565e-05, | |
| "loss": 0.5958, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 9.933163235568367e-05, | |
| "loss": 0.6312, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 9.930573895673657e-05, | |
| "loss": 0.5744, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 9.927935697768698e-05, | |
| "loss": 0.7623, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 9.925248667995907e-05, | |
| "loss": 0.7806, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 9.922512832981584e-05, | |
| "loss": 0.7338, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 9.919728219835643e-05, | |
| "loss": 0.5437, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 9.916894856151357e-05, | |
| "loss": 0.6343, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 9.914012770005072e-05, | |
| "loss": 0.5863, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 9.91108198995594e-05, | |
| "loss": 0.7165, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 9.908102545045625e-05, | |
| "loss": 0.7479, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 9.905074464798024e-05, | |
| "loss": 0.6124, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 9.901997779218967e-05, | |
| "loss": 0.8549, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 9.898872518795932e-05, | |
| "loss": 0.7272, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 9.895698714497724e-05, | |
| "loss": 0.674, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 9.892476397774186e-05, | |
| "loss": 0.7458, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 9.889205600555877e-05, | |
| "loss": 0.6658, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 9.885886355253758e-05, | |
| "loss": 0.5559, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 9.882518694758875e-05, | |
| "loss": 0.5863, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 9.879102652442024e-05, | |
| "loss": 0.5821, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 9.875638262153431e-05, | |
| "loss": 0.6834, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 9.872125558222409e-05, | |
| "loss": 0.5755, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 9.868564575457023e-05, | |
| "loss": 0.6745, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 9.864955349143734e-05, | |
| "loss": 0.5966, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 9.861297915047069e-05, | |
| "loss": 0.6118, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 9.857592309409247e-05, | |
| "loss": 0.5196, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 9.853838568949831e-05, | |
| "loss": 0.6412, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.850036730865364e-05, | |
| "loss": 0.6207, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.846186832828989e-05, | |
| "loss": 0.5705, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.842288912990096e-05, | |
| "loss": 0.5643, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.838343009973925e-05, | |
| "loss": 0.7729, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.83434916288119e-05, | |
| "loss": 0.5091, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 9.830307411287695e-05, | |
| "loss": 0.7592, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 9.82621779524394e-05, | |
| "loss": 0.6386, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 9.822080355274719e-05, | |
| "loss": 0.5282, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 9.817895132378725e-05, | |
| "loss": 0.6444, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 9.813662168028144e-05, | |
| "loss": 0.5698, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 9.809381504168234e-05, | |
| "loss": 0.4588, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 9.805053183216923e-05, | |
| "loss": 0.645, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 9.800677248064382e-05, | |
| "loss": 0.5169, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 9.796253742072596e-05, | |
| "loss": 0.5538, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 9.791782709074944e-05, | |
| "loss": 0.549, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 9.787264193375753e-05, | |
| "loss": 0.6782, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 9.782698239749873e-05, | |
| "loss": 0.6065, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 9.778084893442218e-05, | |
| "loss": 0.8218, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 9.77342420016733e-05, | |
| "loss": 0.5523, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 9.768716206108921e-05, | |
| "loss": 0.61, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 9.763960957919413e-05, | |
| "loss": 0.8251, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 9.759158502719481e-05, | |
| "loss": 0.8027, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 9.754308888097583e-05, | |
| "loss": 1.0138, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 9.749412162109485e-05, | |
| "loss": 0.5758, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 9.744468373277797e-05, | |
| "loss": 0.6591, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 9.739477570591473e-05, | |
| "loss": 0.5436, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 9.734439803505345e-05, | |
| "loss": 0.589, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 9.729355121939621e-05, | |
| "loss": 0.54, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 9.724223576279395e-05, | |
| "loss": 0.5461, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 9.719045217374143e-05, | |
| "loss": 0.626, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 9.713820096537225e-05, | |
| "loss": 0.5492, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.708548265545375e-05, | |
| "loss": 0.63, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.703229776638185e-05, | |
| "loss": 0.6278, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 9.697864682517592e-05, | |
| "loss": 0.5364, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 9.692453036347351e-05, | |
| "loss": 0.6641, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 9.686994891752508e-05, | |
| "loss": 0.5447, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 9.681490302818874e-05, | |
| "loss": 0.9975, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 9.675939324092486e-05, | |
| "loss": 0.6382, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 9.670342010579065e-05, | |
| "loss": 0.7098, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 9.664698417743475e-05, | |
| "loss": 0.6094, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 9.659008601509168e-05, | |
| "loss": 0.5387, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 9.653272618257631e-05, | |
| "loss": 0.6442, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 9.647490524827834e-05, | |
| "loss": 0.5843, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 9.641662378515659e-05, | |
| "loss": 0.6625, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 9.635788237073334e-05, | |
| "loss": 0.447, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 9.629868158708861e-05, | |
| "loss": 0.4932, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 9.623902202085444e-05, | |
| "loss": 0.6485, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 9.617890426320899e-05, | |
| "loss": 0.4969, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 9.611832890987076e-05, | |
| "loss": 0.5679, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 9.605729656109265e-05, | |
| "loss": 0.7879, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 9.599580782165598e-05, | |
| "loss": 0.6588, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 9.593386330086458e-05, | |
| "loss": 0.7059, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 9.587146361253868e-05, | |
| "loss": 0.625, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 9.580860937500884e-05, | |
| "loss": 0.6958, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 9.57453012111099e-05, | |
| "loss": 0.6816, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 9.568153974817464e-05, | |
| "loss": 0.5775, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 9.561732561802778e-05, | |
| "loss": 0.5051, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 9.555265945697953e-05, | |
| "loss": 0.5025, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 9.548754190581939e-05, | |
| "loss": 0.5407, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 9.542197360980978e-05, | |
| "loss": 0.5462, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 9.53559552186796e-05, | |
| "loss": 0.6505, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 9.528948738661784e-05, | |
| "loss": 0.5869, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 9.522257077226717e-05, | |
| "loss": 0.5047, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 9.51552060387172e-05, | |
| "loss": 0.7094, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 9.508739385349812e-05, | |
| "loss": 0.6079, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 9.501913488857399e-05, | |
| "loss": 0.5941, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 9.49504298203361e-05, | |
| "loss": 0.5613, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 9.488127932959625e-05, | |
| "loss": 0.693, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 9.481168410158003e-05, | |
| "loss": 0.6659, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 9.474164482592002e-05, | |
| "loss": 0.8608, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 9.467116219664894e-05, | |
| "loss": 0.743, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 9.460023691219277e-05, | |
| "loss": 0.7069, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 9.45288696753639e-05, | |
| "loss": 0.6795, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 9.445706119335407e-05, | |
| "loss": 0.4837, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 9.438481217772744e-05, | |
| "loss": 0.6796, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 9.431212334441343e-05, | |
| "loss": 0.5442, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 9.423899541369978e-05, | |
| "loss": 0.4109, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 9.41654291102253e-05, | |
| "loss": 0.5735, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 9.409142516297269e-05, | |
| "loss": 0.5028, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 9.401698430526142e-05, | |
| "loss": 0.4985, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 9.394210727474028e-05, | |
| "loss": 0.5741, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 9.386679481338033e-05, | |
| "loss": 0.5671, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 9.379104766746722e-05, | |
| "loss": 0.5839, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 9.371486658759416e-05, | |
| "loss": 0.5261, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 9.363825232865413e-05, | |
| "loss": 0.474, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 9.356120564983266e-05, | |
| "loss": 0.5641, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 9.348372731460023e-05, | |
| "loss": 0.5222, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 9.340581809070459e-05, | |
| "loss": 0.562, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 9.332747875016332e-05, | |
| "loss": 0.5875, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 9.324871006925613e-05, | |
| "loss": 0.4605, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 9.316951282851707e-05, | |
| "loss": 0.5808, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 9.308988781272694e-05, | |
| "loss": 0.6235, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 9.300983581090541e-05, | |
| "loss": 0.684, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 9.292935761630326e-05, | |
| "loss": 0.4411, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 9.284845402639446e-05, | |
| "loss": 0.5228, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 9.276712584286833e-05, | |
| "loss": 0.8171, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 9.26853738716216e-05, | |
| "loss": 0.5837, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 9.260319892275034e-05, | |
| "loss": 0.4187, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 9.2520601810542e-05, | |
| "loss": 0.4252, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 9.243758335346735e-05, | |
| "loss": 0.5112, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 9.235414437417234e-05, | |
| "loss": 0.5455, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 9.227028569946996e-05, | |
| "loss": 0.668, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 9.2186008160332e-05, | |
| "loss": 0.4831, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 9.210131259188095e-05, | |
| "loss": 0.4275, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 9.201619983338153e-05, | |
| "loss": 0.852, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 9.193067072823251e-05, | |
| "loss": 0.5029, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 9.18447261239584e-05, | |
| "loss": 0.534, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 9.175836687220084e-05, | |
| "loss": 0.5727, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 9.167159382871039e-05, | |
| "loss": 0.5095, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 9.15844078533379e-05, | |
| "loss": 0.674, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 9.149680981002609e-05, | |
| "loss": 0.4672, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 9.140880056680088e-05, | |
| "loss": 0.578, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 9.13203809957629e-05, | |
| "loss": 0.5875, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 9.123155197307876e-05, | |
| "loss": 0.5596, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 9.114231437897244e-05, | |
| "loss": 0.5782, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 9.105266909771653e-05, | |
| "loss": 0.4072, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 9.096261701762342e-05, | |
| "loss": 0.4472, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 9.087215903103662e-05, | |
| "loss": 0.4525, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 9.078129603432181e-05, | |
| "loss": 0.4294, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 9.069002892785797e-05, | |
| "loss": 0.49, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 9.059835861602853e-05, | |
| "loss": 0.6561, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 9.050628600721234e-05, | |
| "loss": 0.6434, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 9.041381201377468e-05, | |
| "loss": 0.5954, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 9.032093755205822e-05, | |
| "loss": 0.5783, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 9.0227663542374e-05, | |
| "loss": 0.7083, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 9.013399090899217e-05, | |
| "loss": 0.4703, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 9.003992058013302e-05, | |
| "loss": 0.5067, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 8.994545348795759e-05, | |
| "loss": 0.5962, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 8.985059056855858e-05, | |
| "loss": 0.4955, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 8.975533276195102e-05, | |
| "loss": 0.5224, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 8.965968101206291e-05, | |
| "loss": 0.4446, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 8.956363626672595e-05, | |
| "loss": 0.4902, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 8.94671994776661e-05, | |
| "loss": 0.466, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 8.937037160049416e-05, | |
| "loss": 0.4933, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 8.927315359469626e-05, | |
| "loss": 0.542, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 8.917554642362443e-05, | |
| "loss": 0.5227, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 8.907755105448704e-05, | |
| "loss": 0.5783, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 8.89791684583391e-05, | |
| "loss": 0.509, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 8.888039961007282e-05, | |
| "loss": 0.4139, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 8.87812454884078e-05, | |
| "loss": 0.4373, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 8.868170707588142e-05, | |
| "loss": 0.5913, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 8.858178535883905e-05, | |
| "loss": 0.564, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 8.848148132742431e-05, | |
| "loss": 0.5669, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 8.838079597556925e-05, | |
| "loss": 0.4491, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 8.827973030098448e-05, | |
| "loss": 0.4236, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 8.81782853051493e-05, | |
| "loss": 0.4449, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 8.807646199330187e-05, | |
| "loss": 0.5031, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 8.797426137442897e-05, | |
| "loss": 0.5822, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 8.787168446125638e-05, | |
| "loss": 0.4124, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 8.776873227023852e-05, | |
| "loss": 0.5732, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 8.766540582154859e-05, | |
| "loss": 0.5132, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 8.756170613906833e-05, | |
| "loss": 0.5151, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 8.745763425037797e-05, | |
| "loss": 0.5334, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 8.735319118674596e-05, | |
| "loss": 0.5812, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 8.724837798311882e-05, | |
| "loss": 0.4281, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 8.714319567811088e-05, | |
| "loss": 0.5254, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 8.703764531399392e-05, | |
| "loss": 0.5139, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 8.69317279366869e-05, | |
| "loss": 0.5656, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 8.682544459574562e-05, | |
| "loss": 0.4704, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 8.671879634435224e-05, | |
| "loss": 0.4704, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 8.661178423930491e-05, | |
| "loss": 0.4682, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 8.650440934100728e-05, | |
| "loss": 0.516, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 8.639667271345798e-05, | |
| "loss": 0.5812, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 8.628857542424009e-05, | |
| "loss": 0.6955, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 8.618011854451056e-05, | |
| "loss": 0.4637, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 8.607130314898956e-05, | |
| "loss": 0.5001, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 8.596213031594991e-05, | |
| "loss": 0.5324, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 8.585260112720631e-05, | |
| "loss": 0.9362, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 8.57427166681047e-05, | |
| "loss": 0.4641, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 8.56324780275114e-05, | |
| "loss": 0.4814, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 8.552188629780244e-05, | |
| "loss": 0.5503, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 8.541094257485265e-05, | |
| "loss": 0.4649, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 8.529964795802485e-05, | |
| "loss": 0.3803, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 8.518800355015892e-05, | |
| "loss": 0.4112, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 8.507601045756085e-05, | |
| "loss": 0.4634, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 8.49636697899919e-05, | |
| "loss": 0.5406, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 8.485098266065744e-05, | |
| "loss": 0.5009, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 8.473795018619604e-05, | |
| "loss": 0.5179, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 8.462457348666835e-05, | |
| "loss": 0.4948, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 8.4510853685546e-05, | |
| "loss": 0.4825, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 8.439679190970052e-05, | |
| "loss": 0.6783, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 8.428238928939207e-05, | |
| "loss": 0.5107, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 8.416764695825835e-05, | |
| "loss": 0.6285, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 8.405256605330331e-05, | |
| "loss": 0.5409, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 8.39371477148859e-05, | |
| "loss": 0.4913, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 8.382139308670875e-05, | |
| "loss": 0.4905, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 8.370530331580686e-05, | |
| "loss": 0.5521, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 8.35888795525362e-05, | |
| "loss": 0.6787, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 8.347212295056239e-05, | |
| "loss": 0.6243, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 8.335503466684915e-05, | |
| "loss": 0.5954, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 8.323761586164695e-05, | |
| "loss": 0.4916, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 8.311986769848141e-05, | |
| "loss": 0.4462, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 8.300179134414188e-05, | |
| "loss": 0.5472, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 8.288338796866976e-05, | |
| "loss": 0.5206, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 8.276465874534702e-05, | |
| "loss": 0.541, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 8.264560485068446e-05, | |
| "loss": 0.5823, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 8.252622746441021e-05, | |
| "loss": 0.5128, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 8.240652776945781e-05, | |
| "loss": 0.4968, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 8.228650695195472e-05, | |
| "loss": 0.6532, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 8.216616620121043e-05, | |
| "loss": 0.3638, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 8.204550670970469e-05, | |
| "loss": 0.6636, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 8.192452967307576e-05, | |
| "loss": 0.5279, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 8.180323629010848e-05, | |
| "loss": 0.4919, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 8.168162776272244e-05, | |
| "loss": 0.4528, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 8.155970529596006e-05, | |
| "loss": 0.5357, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 8.143747009797464e-05, | |
| "loss": 0.5333, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 8.131492338001839e-05, | |
| "loss": 0.4751, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 8.119206635643045e-05, | |
| "loss": 0.5092, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 8.106890024462481e-05, | |
| "loss": 0.6139, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 8.094542626507828e-05, | |
| "loss": 0.4858, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 8.082164564131845e-05, | |
| "loss": 0.5627, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 8.069755959991142e-05, | |
| "loss": 0.9165, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 8.057316937044977e-05, | |
| "loss": 0.6534, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 8.044847618554034e-05, | |
| "loss": 0.4697, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 8.032348128079203e-05, | |
| "loss": 0.5343, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 8.019818589480352e-05, | |
| "loss": 0.5272, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 8.0072591269151e-05, | |
| "loss": 0.4909, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 7.994669864837594e-05, | |
| "loss": 0.5913, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 7.982050927997264e-05, | |
| "loss": 0.4666, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 7.969402441437594e-05, | |
| "loss": 0.6011, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 7.956724530494887e-05, | |
| "loss": 0.6574, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 7.944017320797013e-05, | |
| "loss": 0.6198, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 7.931280938262169e-05, | |
| "loss": 0.5228, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 7.918515509097634e-05, | |
| "loss": 0.5375, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 7.905721159798513e-05, | |
| "loss": 0.6888, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 7.89289801714649e-05, | |
| "loss": 0.4663, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 7.880046208208563e-05, | |
| "loss": 0.6382, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 7.867165860335792e-05, | |
| "loss": 0.5565, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 7.854257101162037e-05, | |
| "loss": 0.4628, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 7.841320058602688e-05, | |
| "loss": 0.6364, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 7.828354860853399e-05, | |
| "loss": 0.4293, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 7.815361636388827e-05, | |
| "loss": 0.3665, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 7.802340513961342e-05, | |
| "loss": 0.3727, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 7.789291622599767e-05, | |
| "loss": 0.3468, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 7.776215091608085e-05, | |
| "loss": 0.4416, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 7.763111050564178e-05, | |
| "loss": 0.3674, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 7.749979629318516e-05, | |
| "loss": 0.3358, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 7.736820957992895e-05, | |
| "loss": 0.4428, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 7.723635166979133e-05, | |
| "loss": 0.3428, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 7.710422386937784e-05, | |
| "loss": 0.3407, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 7.697182748796841e-05, | |
| "loss": 0.3479, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 7.683916383750436e-05, | |
| "loss": 0.341, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 7.670623423257548e-05, | |
| "loss": 0.3725, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 7.657303999040693e-05, | |
| "loss": 0.3185, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 7.64395824308462e-05, | |
| "loss": 0.3545, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 7.630586287635008e-05, | |
| "loss": 0.4408, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 7.617188265197148e-05, | |
| "loss": 0.3995, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 7.603764308534636e-05, | |
| "loss": 0.4507, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 7.590314550668054e-05, | |
| "loss": 0.3594, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 7.576839124873653e-05, | |
| "loss": 0.2748, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 7.563338164682036e-05, | |
| "loss": 0.3803, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 7.549811803876825e-05, | |
| "loss": 0.3374, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 7.536260176493348e-05, | |
| "loss": 0.3064, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 7.5226834168173e-05, | |
| "loss": 0.3877, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 7.509081659383417e-05, | |
| "loss": 0.4996, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 7.495455038974146e-05, | |
| "loss": 0.3529, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 7.481803690618304e-05, | |
| "loss": 0.3439, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 7.46812774958974e-05, | |
| "loss": 0.2838, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 7.454427351405999e-05, | |
| "loss": 0.3637, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 7.440702631826977e-05, | |
| "loss": 0.4256, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 7.426953726853574e-05, | |
| "loss": 0.3401, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 7.413180772726348e-05, | |
| "loss": 0.3433, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 7.399383905924165e-05, | |
| "loss": 0.4915, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 7.385563263162847e-05, | |
| "loss": 0.3406, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 7.371718981393815e-05, | |
| "loss": 0.3037, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 7.357851197802735e-05, | |
| "loss": 0.5157, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 7.343960049808156e-05, | |
| "loss": 0.4369, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 7.330045675060149e-05, | |
| "loss": 0.3087, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 7.316108211438945e-05, | |
| "loss": 0.3795, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 7.302147797053569e-05, | |
| "loss": 0.3669, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 7.288164570240463e-05, | |
| "loss": 0.6638, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 7.274158669562126e-05, | |
| "loss": 0.3923, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 7.26013023380574e-05, | |
| "loss": 0.3798, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 7.246079401981784e-05, | |
| "loss": 0.439, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 7.232006313322667e-05, | |
| "loss": 0.431, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 7.217911107281352e-05, | |
| "loss": 0.3676, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 7.203793923529956e-05, | |
| "loss": 0.5888, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 7.189654901958385e-05, | |
| "loss": 0.5541, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 7.175494182672939e-05, | |
| "loss": 0.3824, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 7.161311905994922e-05, | |
| "loss": 0.342, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 7.147108212459257e-05, | |
| "loss": 0.4249, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 7.13288324281309e-05, | |
| "loss": 0.3621, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 7.118637138014396e-05, | |
| "loss": 0.3147, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 7.104370039230583e-05, | |
| "loss": 0.4188, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 7.090082087837092e-05, | |
| "loss": 0.7176, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 7.075773425415994e-05, | |
| "loss": 0.3622, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 7.061444193754596e-05, | |
| "loss": 0.3533, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 7.047094534844023e-05, | |
| "loss": 0.2945, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 7.032724590877821e-05, | |
| "loss": 0.3296, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 7.018334504250545e-05, | |
| "loss": 0.3552, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 7.003924417556343e-05, | |
| "loss": 0.3736, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 6.989494473587554e-05, | |
| "loss": 0.3778, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 6.975044815333282e-05, | |
| "loss": 0.3759, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 6.960575585977984e-05, | |
| "loss": 0.6022, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 6.946086928900054e-05, | |
| "loss": 0.4204, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 6.931578987670396e-05, | |
| "loss": 0.3803, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 6.917051906051006e-05, | |
| "loss": 0.324, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 6.902505827993541e-05, | |
| "loss": 0.3164, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 6.887940897637908e-05, | |
| "loss": 0.3777, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 6.873357259310815e-05, | |
| "loss": 0.3591, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 6.858755057524354e-05, | |
| "loss": 0.4109, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 6.844134436974567e-05, | |
| "loss": 0.5666, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 6.829495542540013e-05, | |
| "loss": 0.4039, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 6.814838519280324e-05, | |
| "loss": 0.2895, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 6.80016351243478e-05, | |
| "loss": 0.4687, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 6.785470667420862e-05, | |
| "loss": 0.4379, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 6.77076012983281e-05, | |
| "loss": 0.4803, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 6.75603204544019e-05, | |
| "loss": 0.3904, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 6.741286560186437e-05, | |
| "loss": 0.4808, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 6.726523820187413e-05, | |
| "loss": 0.2799, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 6.711743971729967e-05, | |
| "loss": 0.4341, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 6.696947161270476e-05, | |
| "loss": 0.3893, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 6.682133535433393e-05, | |
| "loss": 0.469, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 6.667303241009803e-05, | |
| "loss": 0.298, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 6.652456424955963e-05, | |
| "loss": 0.4633, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 6.637593234391843e-05, | |
| "loss": 0.2929, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 6.622713816599673e-05, | |
| "loss": 0.4443, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 6.60781831902248e-05, | |
| "loss": 0.454, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 6.592906889262632e-05, | |
| "loss": 0.2841, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 6.577979675080369e-05, | |
| "loss": 0.292, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 6.563036824392344e-05, | |
| "loss": 0.2953, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 6.548078485270152e-05, | |
| "loss": 0.4211, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 6.533104805938873e-05, | |
| "loss": 0.334, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 6.518115934775585e-05, | |
| "loss": 0.3343, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 6.503112020307916e-05, | |
| "loss": 0.5768, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 6.488093211212555e-05, | |
| "loss": 0.3803, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 6.473059656313782e-05, | |
| "loss": 0.3835, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 6.458011504582005e-05, | |
| "loss": 0.475, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 6.442948905132266e-05, | |
| "loss": 0.3802, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 6.427872007222777e-05, | |
| "loss": 0.44, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 6.412780960253436e-05, | |
| "loss": 0.3829, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 6.397675913764347e-05, | |
| "loss": 0.2887, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 6.382557017434332e-05, | |
| "loss": 0.3647, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 6.367424421079463e-05, | |
| "loss": 0.3898, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 6.352278274651561e-05, | |
| "loss": 0.3215, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 6.337118728236721e-05, | |
| "loss": 0.3979, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 6.321945932053822e-05, | |
| "loss": 0.3281, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 6.306760036453035e-05, | |
| "loss": 0.3366, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 6.291561191914333e-05, | |
| "loss": 0.3521, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 6.276349549046007e-05, | |
| "loss": 0.3278, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 6.261125258583171e-05, | |
| "loss": 0.3875, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 6.245888471386263e-05, | |
| "loss": 0.3578, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 6.230639338439549e-05, | |
| "loss": 0.3993, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 6.215378010849641e-05, | |
| "loss": 0.3705, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 6.200104639843985e-05, | |
| "loss": 0.3881, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 6.184819376769364e-05, | |
| "loss": 0.2901, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 6.169522373090412e-05, | |
| "loss": 0.4248, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 6.154213780388092e-05, | |
| "loss": 0.3594, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 6.138893750358212e-05, | |
| "loss": 0.3262, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 6.123562434809912e-05, | |
| "loss": 0.3315, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 6.108219985664161e-05, | |
| "loss": 0.3014, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 6.0928665549522554e-05, | |
| "loss": 0.394, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 6.0775022948143115e-05, | |
| "loss": 0.3497, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 6.06212735749775e-05, | |
| "loss": 0.3582, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 6.046741895355802e-05, | |
| "loss": 0.3075, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 6.031346060845986e-05, | |
| "loss": 0.4744, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 6.015940006528602e-05, | |
| "loss": 0.3455, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 6.0005238850652234e-05, | |
| "loss": 0.4062, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 5.9850978492171794e-05, | |
| "loss": 0.3979, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 5.96966205184404e-05, | |
| "loss": 0.425, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 5.954216645902109e-05, | |
| "loss": 0.4698, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 5.9387617844429e-05, | |
| "loss": 0.3623, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 5.923297620611623e-05, | |
| "loss": 0.344, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 5.907824307645669e-05, | |
| "loss": 0.3457, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 5.892341998873089e-05, | |
| "loss": 0.3953, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 5.876850847711073e-05, | |
| "loss": 0.2856, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 5.861351007664434e-05, | |
| "loss": 0.385, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 5.845842632324088e-05, | |
| "loss": 0.5248, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 5.83032587536552e-05, | |
| "loss": 0.3359, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 5.814800890547278e-05, | |
| "loss": 0.327, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 5.799267831709442e-05, | |
| "loss": 0.3425, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 5.78372685277209e-05, | |
| "loss": 0.3914, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 5.7681781077337905e-05, | |
| "loss": 0.3865, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 5.752621750670068e-05, | |
| "loss": 0.5089, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 5.737057935731868e-05, | |
| "loss": 0.4281, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 5.721486817144044e-05, | |
| "loss": 0.3415, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 5.705908549203823e-05, | |
| "loss": 0.334, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 5.690323286279274e-05, | |
| "loss": 0.4623, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 5.674731182807781e-05, | |
| "loss": 0.3411, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 5.659132393294514e-05, | |
| "loss": 0.3804, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 5.643527072310891e-05, | |
| "loss": 0.2749, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 5.627915374493061e-05, | |
| "loss": 0.3664, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 5.612297454540352e-05, | |
| "loss": 0.2758, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 5.596673467213756e-05, | |
| "loss": 0.3713, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 5.581043567334383e-05, | |
| "loss": 0.3681, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 5.5654079097819345e-05, | |
| "loss": 0.3195, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 5.5497666494931654e-05, | |
| "loss": 0.267, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 5.5341199414603493e-05, | |
| "loss": 0.2766, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 5.518467940729739e-05, | |
| "loss": 0.2919, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 5.502810802400039e-05, | |
| "loss": 0.2355, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 5.487148681620862e-05, | |
| "loss": 0.2233, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 5.4714817335911894e-05, | |
| "loss": 0.2188, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 5.455810113557839e-05, | |
| "loss": 0.2827, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 5.440133976813926e-05, | |
| "loss": 0.1987, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 5.4244534786973214e-05, | |
| "loss": 0.2357, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 5.40876877458911e-05, | |
| "loss": 0.1815, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 5.3930800199120616e-05, | |
| "loss": 0.1929, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 5.377387370129079e-05, | |
| "loss": 0.1879, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 5.361690980741663e-05, | |
| "loss": 0.2746, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 5.345991007288371e-05, | |
| "loss": 0.2047, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 5.330287605343279e-05, | |
| "loss": 0.183, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 5.314580930514431e-05, | |
| "loss": 0.1678, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 5.298871138442307e-05, | |
| "loss": 0.2222, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 5.283158384798275e-05, | |
| "loss": 0.2359, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 5.267442825283048e-05, | |
| "loss": 0.4228, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 5.2517246156251455e-05, | |
| "loss": 0.1614, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 5.236003911579345e-05, | |
| "loss": 0.2356, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 5.220280868925145e-05, | |
| "loss": 0.1942, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 5.204555643465215e-05, | |
| "loss": 0.2173, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 5.1888283910238555e-05, | |
| "loss": 0.2235, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 5.173099267445451e-05, | |
| "loss": 0.228, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 5.157368428592933e-05, | |
| "loss": 0.1412, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 5.1416360303462206e-05, | |
| "loss": 0.186, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 5.125902228600693e-05, | |
| "loss": 0.1846, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 5.110167179265636e-05, | |
| "loss": 0.1597, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 5.094431038262693e-05, | |
| "loss": 0.2345, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 5.078693961524329e-05, | |
| "loss": 0.2507, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 5.062956104992285e-05, | |
| "loss": 0.2224, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 5.0472176246160184e-05, | |
| "loss": 0.2293, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 5.031478676351179e-05, | |
| "loss": 0.251, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 5.01573941615805e-05, | |
| "loss": 0.2296, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 5e-05, | |
| "loss": 0.2347, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 4.984260583841953e-05, | |
| "loss": 0.2897, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 4.9685213236488216e-05, | |
| "loss": 0.2051, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 4.9527823753839834e-05, | |
| "loss": 0.2166, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 4.937043895007717e-05, | |
| "loss": 0.2186, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 4.9213060384756716e-05, | |
| "loss": 0.1655, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 4.9055689617373084e-05, | |
| "loss": 0.231, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 4.8898328207343666e-05, | |
| "loss": 0.1842, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 4.874097771399308e-05, | |
| "loss": 0.2285, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 4.858363969653781e-05, | |
| "loss": 0.205, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 4.8426315714070684e-05, | |
| "loss": 0.2345, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 4.8269007325545506e-05, | |
| "loss": 0.1897, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 4.8111716089761456e-05, | |
| "loss": 0.1877, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 4.7954443565347865e-05, | |
| "loss": 0.1964, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 4.779719131074857e-05, | |
| "loss": 0.2365, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 4.7639960884206576e-05, | |
| "loss": 0.1893, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 4.7482753843748564e-05, | |
| "loss": 0.1933, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 4.7325571747169545e-05, | |
| "loss": 0.2651, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 4.716841615201726e-05, | |
| "loss": 0.2011, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 4.7011288615576934e-05, | |
| "loss": 0.2375, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 4.6854190694855694e-05, | |
| "loss": 0.252, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 4.6697123946567227e-05, | |
| "loss": 0.192, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 4.65400899271163e-05, | |
| "loss": 0.1752, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 4.63830901925834e-05, | |
| "loss": 0.3051, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 4.6226126298709224e-05, | |
| "loss": 0.1545, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 4.60691998008794e-05, | |
| "loss": 0.2186, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 4.5912312254108905e-05, | |
| "loss": 0.3145, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 4.575546521302681e-05, | |
| "loss": 0.2099, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 4.5598660231860746e-05, | |
| "loss": 0.2375, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 4.544189886442162e-05, | |
| "loss": 0.2055, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 4.528518266408811e-05, | |
| "loss": 0.1854, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 4.5128513183791386e-05, | |
| "loss": 0.2062, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 4.49718919759996e-05, | |
| "loss": 0.1739, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 4.481532059270262e-05, | |
| "loss": 0.2428, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 4.465880058539652e-05, | |
| "loss": 0.2161, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 4.450233350506836e-05, | |
| "loss": 0.2911, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 4.4345920902180647e-05, | |
| "loss": 0.252, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 4.418956432665618e-05, | |
| "loss": 0.1989, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 4.403326532786245e-05, | |
| "loss": 0.1503, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 4.387702545459649e-05, | |
| "loss": 0.2055, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 4.3720846255069406e-05, | |
| "loss": 0.1626, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 4.356472927689109e-05, | |
| "loss": 0.1413, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 4.3408676067054866e-05, | |
| "loss": 0.2321, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 4.32526881719222e-05, | |
| "loss": 0.1704, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 4.3096767137207256e-05, | |
| "loss": 0.1724, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 4.2940914507961775e-05, | |
| "loss": 0.2181, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 4.278513182855956e-05, | |
| "loss": 0.2005, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 4.262942064268134e-05, | |
| "loss": 0.1698, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 4.247378249329933e-05, | |
| "loss": 0.1967, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 4.23182189226621e-05, | |
| "loss": 0.193, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 4.21627314722791e-05, | |
| "loss": 0.229, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 4.20073216829056e-05, | |
| "loss": 0.2073, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 4.185199109452721e-05, | |
| "loss": 0.2214, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 4.169674124634481e-05, | |
| "loss": 0.2383, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 4.1541573676759126e-05, | |
| "loss": 0.234, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 4.138648992335566e-05, | |
| "loss": 0.2308, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 4.12314915228893e-05, | |
| "loss": 0.2476, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 4.107658001126913e-05, | |
| "loss": 0.2285, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 4.092175692354333e-05, | |
| "loss": 0.2358, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 4.0767023793883785e-05, | |
| "loss": 0.2443, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 4.0612382155571026e-05, | |
| "loss": 0.1997, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 4.045783354097893e-05, | |
| "loss": 0.246, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 4.0303379481559623e-05, | |
| "loss": 0.2152, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 4.0149021507828224e-05, | |
| "loss": 0.1601, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 3.9994761149347784e-05, | |
| "loss": 0.1746, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 3.984059993471399e-05, | |
| "loss": 0.2204, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 3.968653939154016e-05, | |
| "loss": 0.2831, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 3.9532581046442e-05, | |
| "loss": 0.1808, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 3.937872642502252e-05, | |
| "loss": 0.1939, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 3.9224977051856904e-05, | |
| "loss": 0.2102, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 3.907133445047747e-05, | |
| "loss": 0.2231, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "learning_rate": 3.8917800143358404e-05, | |
| "loss": 0.2068, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 3.8764375651900906e-05, | |
| "loss": 0.2311, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 3.861106249641789e-05, | |
| "loss": 0.2331, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 3.84578621961191e-05, | |
| "loss": 0.255, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 3.830477626909589e-05, | |
| "loss": 0.1869, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 3.8151806232306374e-05, | |
| "loss": 0.1965, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 3.7998953601560175e-05, | |
| "loss": 0.478, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 3.784621989150361e-05, | |
| "loss": 0.2627, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 3.769360661560453e-05, | |
| "loss": 0.2695, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 3.75411152861374e-05, | |
| "loss": 0.2186, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 3.73887474141683e-05, | |
| "loss": 0.1926, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 3.723650450953994e-05, | |
| "loss": 0.2855, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 3.708438808085668e-05, | |
| "loss": 0.2363, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 3.693239963546967e-05, | |
| "loss": 0.21, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 3.6780540679461784e-05, | |
| "loss": 0.2214, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 3.662881271763279e-05, | |
| "loss": 0.2023, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 3.64772172534844e-05, | |
| "loss": 0.2352, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 3.63257557892054e-05, | |
| "loss": 0.2042, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 3.6174429825656685e-05, | |
| "loss": 0.2864, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 3.602324086235655e-05, | |
| "loss": 0.1612, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 3.587219039746564e-05, | |
| "loss": 0.1933, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 3.572127992777223e-05, | |
| "loss": 0.344, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 3.557051094867735e-05, | |
| "loss": 0.1954, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 3.541988495417997e-05, | |
| "loss": 0.2301, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 3.5269403436862175e-05, | |
| "loss": 0.2151, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 3.511906788787447e-05, | |
| "loss": 0.2214, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 3.496887979692084e-05, | |
| "loss": 0.245, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 3.481884065224415e-05, | |
| "loss": 0.2341, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 3.466895194061128e-05, | |
| "loss": 0.2308, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 3.451921514729848e-05, | |
| "loss": 0.2219, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 3.436963175607656e-05, | |
| "loss": 0.2053, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 3.422020324919632e-05, | |
| "loss": 0.3216, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 3.4070931107373675e-05, | |
| "loss": 0.2443, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 3.39218168097752e-05, | |
| "loss": 0.2597, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 3.377286183400328e-05, | |
| "loss": 0.254, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 3.362406765608158e-05, | |
| "loss": 0.1677, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 3.3475435750440356e-05, | |
| "loss": 0.1773, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 3.332696758990197e-05, | |
| "loss": 0.2142, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 3.3178664645666066e-05, | |
| "loss": 0.2396, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 3.303052838729525e-05, | |
| "loss": 0.2472, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 3.2882560282700336e-05, | |
| "loss": 0.2072, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 3.273476179812588e-05, | |
| "loss": 0.3123, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 3.258713439813566e-05, | |
| "loss": 0.2578, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 3.243967954559811e-05, | |
| "loss": 0.1629, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 3.229239870167191e-05, | |
| "loss": 0.4055, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 3.98, | |
| "learning_rate": 3.2145293325791395e-05, | |
| "loss": 0.192, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 3.199836487565222e-05, | |
| "loss": 0.2333, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 3.1851614807196774e-05, | |
| "loss": 0.2915, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 3.170504457459989e-05, | |
| "loss": 0.1784, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 3.155865563025433e-05, | |
| "loss": 0.1412, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 3.1412449424756474e-05, | |
| "loss": 0.1135, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 3.1266427406891856e-05, | |
| "loss": 0.1025, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 3.112059102362093e-05, | |
| "loss": 0.1087, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 3.0974941720064585e-05, | |
| "loss": 0.1515, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 3.082948093948997e-05, | |
| "loss": 0.1301, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 3.0684210123296055e-05, | |
| "loss": 0.0812, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 3.053913071099947e-05, | |
| "loss": 0.1231, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 3.0394244140220163e-05, | |
| "loss": 0.0913, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 3.0249551846667207e-05, | |
| "loss": 0.1416, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 3.010505526412447e-05, | |
| "loss": 0.126, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 2.996075582443658e-05, | |
| "loss": 0.1037, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 2.981665495749457e-05, | |
| "loss": 0.0937, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 2.9672754091221805e-05, | |
| "loss": 0.1061, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 2.9529054651559772e-05, | |
| "loss": 0.1426, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 2.938555806245406e-05, | |
| "loss": 0.0995, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 2.9242265745840063e-05, | |
| "loss": 0.0778, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 2.9099179121629117e-05, | |
| "loss": 0.0848, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 2.895629960769417e-05, | |
| "loss": 0.0843, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 2.881362861985606e-05, | |
| "loss": 0.0779, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 2.867116757186911e-05, | |
| "loss": 0.0856, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 2.8528917875407433e-05, | |
| "loss": 0.1184, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 2.838688094005078e-05, | |
| "loss": 0.0872, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 2.8245058173270622e-05, | |
| "loss": 0.1039, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 2.8103450980416136e-05, | |
| "loss": 0.1731, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 2.796206076470044e-05, | |
| "loss": 0.0944, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 2.7820888927186483e-05, | |
| "loss": 0.1147, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 2.7679936866773315e-05, | |
| "loss": 0.2891, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 2.753920598018217e-05, | |
| "loss": 0.0897, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 2.739869766194263e-05, | |
| "loss": 0.1152, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "learning_rate": 2.7258413304378734e-05, | |
| "loss": 0.1273, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 2.7118354297595396e-05, | |
| "loss": 0.1433, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 2.6978522029464325e-05, | |
| "loss": 0.0997, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "learning_rate": 2.683891788561055e-05, | |
| "loss": 0.0809, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 2.669954324939852e-05, | |
| "loss": 0.1114, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 2.6560399501918465e-05, | |
| "loss": 0.1008, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 2.6421488021972673e-05, | |
| "loss": 0.1733, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 2.6282810186061862e-05, | |
| "loss": 0.1466, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 2.6144367368371535e-05, | |
| "loss": 0.0989, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 2.600616094075835e-05, | |
| "loss": 0.1372, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 2.5868192272736514e-05, | |
| "loss": 0.0731, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "learning_rate": 2.5730462731464273e-05, | |
| "loss": 0.1069, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 2.5592973681730236e-05, | |
| "loss": 0.1162, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 2.5455726485940012e-05, | |
| "loss": 0.1549, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 2.5318722504102604e-05, | |
| "loss": 0.0871, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 2.5181963093816962e-05, | |
| "loss": 0.0946, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 2.504544961025853e-05, | |
| "loss": 0.1013, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 2.4909183406165836e-05, | |
| "loss": 0.1005, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 2.4773165831827018e-05, | |
| "loss": 0.2741, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 2.4637398235066527e-05, | |
| "loss": 0.0816, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "learning_rate": 2.450188196123177e-05, | |
| "loss": 0.1006, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 2.4366618353179644e-05, | |
| "loss": 0.102, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 2.423160875126348e-05, | |
| "loss": 0.1042, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 2.4096854493319477e-05, | |
| "loss": 0.1389, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 2.3962356914653657e-05, | |
| "loss": 0.1044, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 2.3828117348028528e-05, | |
| "loss": 0.085, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 4.37, | |
| "learning_rate": 2.3694137123649946e-05, | |
| "loss": 0.0848, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 4.37, | |
| "learning_rate": 2.3560417569153796e-05, | |
| "loss": 0.0948, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 2.342696000959309e-05, | |
| "loss": 0.124, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 2.3293765767424537e-05, | |
| "loss": 0.0876, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 2.3160836162495653e-05, | |
| "loss": 0.0751, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 2.3028172512031604e-05, | |
| "loss": 0.0939, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 2.289577613062218e-05, | |
| "loss": 0.2399, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 2.276364833020868e-05, | |
| "loss": 0.1172, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 4.42, | |
| "learning_rate": 2.2631790420071064e-05, | |
| "loss": 0.1124, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "learning_rate": 2.2500203706814856e-05, | |
| "loss": 0.1065, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "learning_rate": 2.2368889494358235e-05, | |
| "loss": 0.1028, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 2.2237849083919142e-05, | |
| "loss": 0.1354, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 2.2107083774002364e-05, | |
| "loss": 0.1364, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 2.1976594860386597e-05, | |
| "loss": 0.1475, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 2.1846383636111743e-05, | |
| "loss": 0.1157, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 2.1716451391466008e-05, | |
| "loss": 0.108, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 2.1586799413973135e-05, | |
| "loss": 0.0873, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "learning_rate": 2.1457428988379635e-05, | |
| "loss": 0.1704, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "learning_rate": 2.1328341396642093e-05, | |
| "loss": 0.1362, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 2.1199537917914386e-05, | |
| "loss": 0.1068, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "learning_rate": 2.107101982853511e-05, | |
| "loss": 0.1443, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "learning_rate": 2.0942788402014867e-05, | |
| "loss": 0.0976, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 2.0814844909023663e-05, | |
| "loss": 0.1091, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 2.068719061737831e-05, | |
| "loss": 0.138, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 2.0559826792029884e-05, | |
| "loss": 0.1105, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 2.0432754695051136e-05, | |
| "loss": 0.0952, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "learning_rate": 2.0305975585624058e-05, | |
| "loss": 0.1463, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "learning_rate": 2.0179490720027372e-05, | |
| "loss": 0.0833, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "learning_rate": 2.005330135162408e-05, | |
| "loss": 0.1192, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "learning_rate": 1.992740873084899e-05, | |
| "loss": 0.1378, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 1.9801814105196497e-05, | |
| "loss": 0.0791, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "learning_rate": 1.9676518719207977e-05, | |
| "loss": 0.1302, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "learning_rate": 1.9551523814459665e-05, | |
| "loss": 0.1174, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 1.9426830629550242e-05, | |
| "loss": 0.0775, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 1.9302440400088606e-05, | |
| "loss": 0.0887, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 1.917835435868155e-05, | |
| "loss": 0.1085, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "learning_rate": 1.9054573734921714e-05, | |
| "loss": 0.0957, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 1.8931099755375203e-05, | |
| "loss": 0.1174, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 1.880793364356956e-05, | |
| "loss": 0.1021, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 1.8685076619981608e-05, | |
| "loss": 0.1318, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "learning_rate": 1.8562529902025372e-05, | |
| "loss": 0.1226, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "learning_rate": 1.844029470403993e-05, | |
| "loss": 0.0972, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 1.8318372237277565e-05, | |
| "loss": 0.2055, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 1.8196763709891524e-05, | |
| "loss": 0.0886, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 1.8075470326924243e-05, | |
| "loss": 0.1226, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 1.795449329029531e-05, | |
| "loss": 0.1223, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 1.7833833798789595e-05, | |
| "loss": 0.0861, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 1.7713493048045294e-05, | |
| "loss": 0.1125, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 1.7593472230542202e-05, | |
| "loss": 0.1058, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 1.747377253558982e-05, | |
| "loss": 0.1082, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 4.69, | |
| "learning_rate": 1.7354395149315534e-05, | |
| "loss": 0.1018, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 1.7235341254653005e-05, | |
| "loss": 0.1446, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 1.7116612031330252e-05, | |
| "loss": 0.1118, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 1.6998208655858137e-05, | |
| "loss": 0.1146, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "learning_rate": 1.6880132301518598e-05, | |
| "loss": 0.1042, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "learning_rate": 1.6762384138353078e-05, | |
| "loss": 0.145, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "learning_rate": 1.6644965333150847e-05, | |
| "loss": 0.0899, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 1.6527877049437622e-05, | |
| "loss": 0.1717, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 1.6411120447463807e-05, | |
| "loss": 0.0906, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "learning_rate": 1.6294696684193154e-05, | |
| "loss": 0.1858, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "learning_rate": 1.617860691329126e-05, | |
| "loss": 0.1178, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "learning_rate": 1.6062852285114123e-05, | |
| "loss": 0.0934, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 1.5947433946696693e-05, | |
| "loss": 0.0981, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 1.583235304174167e-05, | |
| "loss": 0.1257, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 4.78, | |
| "learning_rate": 1.5717610710607948e-05, | |
| "loss": 0.1439, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 1.5603208090299498e-05, | |
| "loss": 0.0967, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 1.5489146314454002e-05, | |
| "loss": 0.1511, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "learning_rate": 1.537542651333167e-05, | |
| "loss": 0.0943, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 1.5262049813803958e-05, | |
| "loss": 0.0903, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 1.5149017339342574e-05, | |
| "loss": 0.1197, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 1.5036330210008115e-05, | |
| "loss": 0.146, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 1.4923989542439159e-05, | |
| "loss": 0.0967, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 1.4811996449841098e-05, | |
| "loss": 0.1224, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 4.84, | |
| "learning_rate": 1.4700352041975168e-05, | |
| "loss": 0.1215, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 1.458905742514734e-05, | |
| "loss": 0.0928, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 1.447811370219757e-05, | |
| "loss": 0.1678, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 1.4367521972488612e-05, | |
| "loss": 0.1032, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 1.4257283331895315e-05, | |
| "loss": 0.1344, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 4.87, | |
| "learning_rate": 1.4147398872793693e-05, | |
| "loss": 0.0932, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 1.4037869684050115e-05, | |
| "loss": 0.104, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 1.3928696851010443e-05, | |
| "loss": 0.1034, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 1.3819881455489458e-05, | |
| "loss": 0.1104, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 4.9, | |
| "learning_rate": 1.3711424575759912e-05, | |
| "loss": 0.1111, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 4.9, | |
| "learning_rate": 1.3603327286542023e-05, | |
| "loss": 0.1067, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 1.3495590658992718e-05, | |
| "loss": 0.0965, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "learning_rate": 1.33882157606951e-05, | |
| "loss": 0.0913, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "learning_rate": 1.3281203655647756e-05, | |
| "loss": 0.1056, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 1.317455540425439e-05, | |
| "loss": 0.0977, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "learning_rate": 1.3068272063313102e-05, | |
| "loss": 0.1095, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "learning_rate": 1.2962354686006084e-05, | |
| "loss": 0.1123, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 4.95, | |
| "learning_rate": 1.2856804321889115e-05, | |
| "loss": 0.1233, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 4.95, | |
| "learning_rate": 1.2751622016881182e-05, | |
| "loss": 0.1309, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "learning_rate": 1.2646808813254035e-05, | |
| "loss": 0.1012, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 1.2542365749622049e-05, | |
| "loss": 0.1109, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 1.2438293860931677e-05, | |
| "loss": 0.1016, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "learning_rate": 1.2334594178451425e-05, | |
| "loss": 0.1526, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 1.2231267729761487e-05, | |
| "loss": 0.1835, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 1.2128315538743646e-05, | |
| "loss": 0.0754, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 1.2025738625571026e-05, | |
| "loss": 0.1137, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 5.01, | |
| "learning_rate": 1.1923538006698154e-05, | |
| "loss": 0.0515, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 5.01, | |
| "learning_rate": 1.1821714694850689e-05, | |
| "loss": 0.0571, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 1.172026969901553e-05, | |
| "loss": 0.0598, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 5.03, | |
| "learning_rate": 1.161920402443077e-05, | |
| "loss": 0.0467, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 5.03, | |
| "learning_rate": 1.1518518672575701e-05, | |
| "loss": 0.0517, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "learning_rate": 1.1418214641160958e-05, | |
| "loss": 0.0907, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 5.05, | |
| "learning_rate": 1.1318292924118584e-05, | |
| "loss": 0.0663, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 5.05, | |
| "learning_rate": 1.121875451159221e-05, | |
| "loss": 0.0828, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 1.1119600389927182e-05, | |
| "loss": 0.05, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 1.1020831541660915e-05, | |
| "loss": 0.0606, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 5.07, | |
| "learning_rate": 1.092244894551298e-05, | |
| "loss": 0.0505, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 5.08, | |
| "learning_rate": 1.0824453576375576e-05, | |
| "loss": 0.0596, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 5.08, | |
| "learning_rate": 1.0726846405303754e-05, | |
| "loss": 0.0663, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 5.09, | |
| "learning_rate": 1.062962839950587e-05, | |
| "loss": 0.053, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 5.1, | |
| "learning_rate": 1.0532800522333902e-05, | |
| "loss": 0.0641, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 5.1, | |
| "learning_rate": 1.0436363733274057e-05, | |
| "loss": 0.0951, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 1.0340318987937097e-05, | |
| "loss": 0.0671, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 5.12, | |
| "learning_rate": 1.0244667238048988e-05, | |
| "loss": 0.0516, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 5.12, | |
| "learning_rate": 1.014940943144142e-05, | |
| "loss": 0.0571, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 5.13, | |
| "learning_rate": 1.0054546512042424e-05, | |
| "loss": 0.046, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 5.14, | |
| "learning_rate": 9.960079419866985e-06, | |
| "loss": 0.0595, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 5.14, | |
| "learning_rate": 9.866009091007833e-06, | |
| "loss": 0.0451, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 9.772336457626014e-06, | |
| "loss": 0.038, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 9.679062447941778e-06, | |
| "loss": 0.0682, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 5.16, | |
| "learning_rate": 9.586187986225325e-06, | |
| "loss": 0.0433, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "learning_rate": 9.493713992787672e-06, | |
| "loss": 0.0441, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "learning_rate": 9.401641383971477e-06, | |
| "loss": 0.0865, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 5.18, | |
| "learning_rate": 9.309971072142038e-06, | |
| "loss": 0.0629, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 5.19, | |
| "learning_rate": 9.218703965678204e-06, | |
| "loss": 0.0558, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 5.19, | |
| "learning_rate": 9.127840968963381e-06, | |
| "loss": 0.0459, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 5.2, | |
| "learning_rate": 9.03738298237658e-06, | |
| "loss": 0.0567, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 8.94733090228349e-06, | |
| "loss": 0.0582, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 8.857685621027568e-06, | |
| "loss": 0.0811, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 5.22, | |
| "learning_rate": 8.768448026921245e-06, | |
| "loss": 0.0445, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 5.23, | |
| "learning_rate": 8.67961900423711e-06, | |
| "loss": 0.0542, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 5.23, | |
| "learning_rate": 8.591199433199126e-06, | |
| "loss": 0.049, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 5.24, | |
| "learning_rate": 8.503190189973914e-06, | |
| "loss": 0.0587, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 5.25, | |
| "learning_rate": 8.415592146662104e-06, | |
| "loss": 0.0647, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 5.25, | |
| "learning_rate": 8.328406171289621e-06, | |
| "loss": 0.0462, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "learning_rate": 8.24163312779917e-06, | |
| "loss": 0.0817, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "learning_rate": 8.155273876041614e-06, | |
| "loss": 0.0621, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 5.27, | |
| "learning_rate": 8.069329271767484e-06, | |
| "loss": 0.0792, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "learning_rate": 7.983800166618482e-06, | |
| "loss": 0.2328, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "learning_rate": 7.898687408119065e-06, | |
| "loss": 0.0794, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 5.29, | |
| "learning_rate": 7.813991839667995e-06, | |
| "loss": 0.0715, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 5.3, | |
| "learning_rate": 7.72971430053005e-06, | |
| "loss": 0.0616, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 5.3, | |
| "learning_rate": 7.645855625827658e-06, | |
| "loss": 0.0546, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 5.31, | |
| "learning_rate": 7.56241664653266e-06, | |
| "loss": 0.0482, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "learning_rate": 7.4793981894580034e-06, | |
| "loss": 0.0669, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "learning_rate": 7.396801077249676e-06, | |
| "loss": 0.0647, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 5.33, | |
| "learning_rate": 7.3146261283784104e-06, | |
| "loss": 0.0585, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 5.34, | |
| "learning_rate": 7.2328741571316696e-06, | |
| "loss": 0.0887, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 5.34, | |
| "learning_rate": 7.1515459736055505e-06, | |
| "loss": 0.0517, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "learning_rate": 7.070642383696763e-06, | |
| "loss": 0.0747, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "learning_rate": 6.990164189094589e-06, | |
| "loss": 0.0676, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 5.36, | |
| "learning_rate": 6.910112187273066e-06, | |
| "loss": 0.0648, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 5.37, | |
| "learning_rate": 6.830487171482935e-06, | |
| "loss": 0.052, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 5.37, | |
| "learning_rate": 6.751289930743882e-06, | |
| "loss": 0.0587, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 6.6725212498366885e-06, | |
| "loss": 0.1961, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 5.39, | |
| "learning_rate": 6.594181909295427e-06, | |
| "loss": 0.096, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 5.39, | |
| "learning_rate": 6.516272685399793e-06, | |
| "loss": 0.1016, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 5.4, | |
| "learning_rate": 6.438794350167337e-06, | |
| "loss": 0.0557, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 6.36174767134588e-06, | |
| "loss": 0.0592, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 6.285133412405858e-06, | |
| "loss": 0.0739, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 5.42, | |
| "learning_rate": 6.208952332532786e-06, | |
| "loss": 0.0507, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 5.43, | |
| "learning_rate": 6.133205186619695e-06, | |
| "loss": 0.0649, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 5.43, | |
| "learning_rate": 6.057892725259717e-06, | |
| "loss": 0.0798, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 5.44, | |
| "learning_rate": 5.983015694738597e-06, | |
| "loss": 0.0634, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 5.45, | |
| "learning_rate": 5.908574837027309e-06, | |
| "loss": 0.0654, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 5.45, | |
| "learning_rate": 5.83457088977471e-06, | |
| "loss": 0.0722, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 5.761004586300234e-06, | |
| "loss": 0.0656, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 5.687876655586583e-06, | |
| "loss": 0.0539, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 5.47, | |
| "learning_rate": 5.615187822272583e-06, | |
| "loss": 0.065, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 5.48, | |
| "learning_rate": 5.542938806645931e-06, | |
| "loss": 0.0573, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 5.48, | |
| "learning_rate": 5.4711303246361144e-06, | |
| "loss": 0.0561, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 5.49, | |
| "learning_rate": 5.399763087807236e-06, | |
| "loss": 0.0652, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 5.5, | |
| "learning_rate": 5.328837803351083e-06, | |
| "loss": 0.0604, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 5.5, | |
| "learning_rate": 5.258355174079993e-06, | |
| "loss": 0.06, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 5.51, | |
| "learning_rate": 5.188315898419971e-06, | |
| "loss": 0.0785, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 5.52, | |
| "learning_rate": 5.118720670403748e-06, | |
| "loss": 0.0748, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 5.52, | |
| "learning_rate": 5.04957017966391e-06, | |
| "loss": 0.048, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 5.53, | |
| "learning_rate": 4.980865111426003e-06, | |
| "loss": 0.0843, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 5.54, | |
| "learning_rate": 4.912606146501886e-06, | |
| "loss": 0.042, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 5.54, | |
| "learning_rate": 4.844793961282812e-06, | |
| "loss": 0.0764, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 5.55, | |
| "learning_rate": 4.777429227732844e-06, | |
| "loss": 0.0874, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 5.55, | |
| "learning_rate": 4.710512613382151e-06, | |
| "loss": 0.0941, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 5.56, | |
| "learning_rate": 4.644044781320422e-06, | |
| "loss": 0.14, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 5.57, | |
| "learning_rate": 4.578026390190232e-06, | |
| "loss": 0.0905, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 5.57, | |
| "learning_rate": 4.5124580941806165e-06, | |
| "loss": 0.068, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 5.58, | |
| "learning_rate": 4.447340543020473e-06, | |
| "loss": 0.057, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 5.59, | |
| "learning_rate": 4.382674381972224e-06, | |
| "loss": 0.0571, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 5.59, | |
| "learning_rate": 4.318460251825357e-06, | |
| "loss": 0.0642, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 5.6, | |
| "learning_rate": 4.254698788890127e-06, | |
| "loss": 0.0557, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 5.61, | |
| "learning_rate": 4.191390624991159e-06, | |
| "loss": 0.0752, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 5.61, | |
| "learning_rate": 4.12853638746134e-06, | |
| "loss": 0.1361, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 5.62, | |
| "learning_rate": 4.0661366991354365e-06, | |
| "loss": 0.048, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 5.63, | |
| "learning_rate": 4.004192178344029e-06, | |
| "loss": 0.0381, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 5.63, | |
| "learning_rate": 3.942703438907358e-06, | |
| "loss": 0.0516, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "learning_rate": 3.881671090129247e-06, | |
| "loss": 0.07, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 5.65, | |
| "learning_rate": 3.821095736791008e-06, | |
| "loss": 0.058, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 5.65, | |
| "learning_rate": 3.7609779791455744e-06, | |
| "loss": 0.065, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 5.66, | |
| "learning_rate": 3.7013184129113976e-06, | |
| "loss": 0.0421, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 5.66, | |
| "learning_rate": 3.6421176292666783e-06, | |
| "loss": 0.0721, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 5.67, | |
| "learning_rate": 3.58337621484342e-06, | |
| "loss": 0.0599, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 5.68, | |
| "learning_rate": 3.5250947517216637e-06, | |
| "loss": 0.0571, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 5.68, | |
| "learning_rate": 3.4672738174236884e-06, | |
| "loss": 0.0589, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 5.69, | |
| "learning_rate": 3.4099139849083307e-06, | |
| "loss": 0.0922, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "learning_rate": 3.353015822565253e-06, | |
| "loss": 0.0413, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "learning_rate": 3.296579894209345e-06, | |
| "loss": 0.0732, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "learning_rate": 3.2406067590751433e-06, | |
| "loss": 0.051, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 5.72, | |
| "learning_rate": 3.1850969718112745e-06, | |
| "loss": 0.058, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 5.72, | |
| "learning_rate": 3.1300510824749273e-06, | |
| "loss": 0.0939, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 5.73, | |
| "learning_rate": 3.0754696365265068e-06, | |
| "loss": 0.0825, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 5.74, | |
| "learning_rate": 3.0213531748240764e-06, | |
| "loss": 0.0542, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 5.74, | |
| "learning_rate": 2.9677022336181413e-06, | |
| "loss": 0.057, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 5.75, | |
| "learning_rate": 2.914517344546258e-06, | |
| "loss": 0.0492, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 5.75, | |
| "learning_rate": 2.8617990346277657e-06, | |
| "loss": 0.0601, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 5.76, | |
| "learning_rate": 2.8095478262585907e-06, | |
| "loss": 0.067, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 2.7577642372060673e-06, | |
| "loss": 0.0573, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 2.7064487806037985e-06, | |
| "loss": 0.0573, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 5.78, | |
| "learning_rate": 2.6556019649465525e-06, | |
| "loss": 0.1025, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 5.79, | |
| "learning_rate": 2.6052242940852787e-06, | |
| "loss": 0.0734, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 5.79, | |
| "learning_rate": 2.5553162672220465e-06, | |
| "loss": 0.1048, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 5.8, | |
| "learning_rate": 2.5058783789051467e-06, | |
| "loss": 0.0557, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 5.81, | |
| "learning_rate": 2.45691111902418e-06, | |
| "loss": 0.0607, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 5.81, | |
| "learning_rate": 2.4084149728051952e-06, | |
| "loss": 0.0662, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 5.82, | |
| "learning_rate": 2.360390420805869e-06, | |
| "loss": 0.0689, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "learning_rate": 2.3128379389108e-06, | |
| "loss": 0.0692, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "learning_rate": 2.2657579983267064e-06, | |
| "loss": 0.0494, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 5.84, | |
| "learning_rate": 2.219151065577829e-06, | |
| "loss": 0.0668, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "learning_rate": 2.1730176025012816e-06, | |
| "loss": 0.067, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "learning_rate": 2.1273580662424796e-06, | |
| "loss": 0.0899, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 5.86, | |
| "learning_rate": 2.082172909250568e-06, | |
| "loss": 0.0672, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 5.86, | |
| "learning_rate": 2.0374625792740464e-06, | |
| "loss": 0.0559, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 5.87, | |
| "learning_rate": 1.993227519356189e-06, | |
| "loss": 0.0682, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 5.88, | |
| "learning_rate": 1.9494681678307703e-06, | |
| "loss": 0.0748, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 5.88, | |
| "learning_rate": 1.906184958317664e-06, | |
| "loss": 0.0641, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 5.89, | |
| "learning_rate": 1.8633783197185783e-06, | |
| "loss": 0.0729, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "learning_rate": 1.8210486762127499e-06, | |
| "loss": 0.0673, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "learning_rate": 1.7791964472528232e-06, | |
| "loss": 0.064, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 5.91, | |
| "learning_rate": 1.737822047560611e-06, | |
| "loss": 0.0555, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "learning_rate": 1.696925887123052e-06, | |
| "loss": 0.0539, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "learning_rate": 1.656508371188109e-06, | |
| "loss": 0.0565, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 5.93, | |
| "learning_rate": 1.6165699002607671e-06, | |
| "loss": 0.0623, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 5.94, | |
| "learning_rate": 1.5771108700990412e-06, | |
| "loss": 0.0788, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 5.94, | |
| "learning_rate": 1.538131671710108e-06, | |
| "loss": 0.0686, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 5.95, | |
| "learning_rate": 1.4996326913463754e-06, | |
| "loss": 0.0602, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 5.95, | |
| "learning_rate": 1.461614310501691e-06, | |
| "loss": 0.0658, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 5.96, | |
| "learning_rate": 1.4240769059075342e-06, | |
| "loss": 0.0539, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 5.97, | |
| "learning_rate": 1.387020849529319e-06, | |
| "loss": 0.0487, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 5.97, | |
| "learning_rate": 1.3504465085626638e-06, | |
| "loss": 0.0557, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 5.98, | |
| "learning_rate": 1.3143542454297885e-06, | |
| "loss": 0.0489, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 5.99, | |
| "learning_rate": 1.2787444177759068e-06, | |
| "loss": 0.0567, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 5.99, | |
| "learning_rate": 1.243617378465689e-06, | |
| "loss": 0.0802, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "learning_rate": 1.208973475579761e-06, | |
| "loss": 0.038, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 6.01, | |
| "learning_rate": 1.1748130524112666e-06, | |
| "loss": 0.0769, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 6.01, | |
| "learning_rate": 1.1411364474624264e-06, | |
| "loss": 0.0503, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 6.02, | |
| "learning_rate": 1.1079439944412406e-06, | |
| "loss": 0.0544, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 6.03, | |
| "learning_rate": 1.075236022258147e-06, | |
| "loss": 0.0557, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 6.03, | |
| "learning_rate": 1.0430128550227625e-06, | |
| "loss": 0.0401, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 6.04, | |
| "learning_rate": 1.0112748120406856e-06, | |
| "loss": 0.0525, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 6.05, | |
| "learning_rate": 9.800222078103271e-07, | |
| "loss": 0.1297, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 6.05, | |
| "learning_rate": 9.492553520197733e-07, | |
| "loss": 0.0372, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 6.06, | |
| "learning_rate": 9.189745495437608e-07, | |
| "loss": 0.0314, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 6.06, | |
| "learning_rate": 8.891801004406119e-07, | |
| "loss": 0.0446, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 6.07, | |
| "learning_rate": 8.59872299949288e-07, | |
| "loss": 0.0515, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 6.08, | |
| "learning_rate": 8.31051438486441e-07, | |
| "loss": 0.0603, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 6.08, | |
| "learning_rate": 8.027178016435765e-07, | |
| "loss": 0.0486, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 6.09, | |
| "learning_rate": 7.748716701841685e-07, | |
| "loss": 0.0474, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 6.1, | |
| "learning_rate": 7.475133200409212e-07, | |
| "loss": 0.0592, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 6.1, | |
| "learning_rate": 7.206430223130278e-07, | |
| "loss": 0.2176, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 6.11, | |
| "learning_rate": 6.9426104326345e-07, | |
| "loss": 0.0607, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 6.12, | |
| "learning_rate": 6.683676443163311e-07, | |
| "loss": 0.0523, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 6.12, | |
| "learning_rate": 6.429630820543598e-07, | |
| "loss": 0.0521, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 6.13, | |
| "learning_rate": 6.180476082162656e-07, | |
| "loss": 0.0504, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 6.14, | |
| "learning_rate": 5.936214696942887e-07, | |
| "loss": 0.0565, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 6.14, | |
| "learning_rate": 5.696849085317646e-07, | |
| "loss": 0.0505, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 6.15, | |
| "learning_rate": 5.462381619207091e-07, | |
| "loss": 0.0641, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 6.15, | |
| "learning_rate": 5.232814621994598e-07, | |
| "loss": 0.0585, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 6.16, | |
| "learning_rate": 5.008150368503994e-07, | |
| "loss": 0.0443, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 6.17, | |
| "learning_rate": 4.788391084976862e-07, | |
| "loss": 0.0452, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 6.17, | |
| "learning_rate": 4.573538949050327e-07, | |
| "loss": 0.0449, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 6.18, | |
| "learning_rate": 4.363596089735911e-07, | |
| "loss": 0.0738, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 6.19, | |
| "learning_rate": 4.1585645873978284e-07, | |
| "loss": 0.0449, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 6.19, | |
| "learning_rate": 3.958446473733002e-07, | |
| "loss": 0.0445, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 6.2, | |
| "learning_rate": 3.7632437317505207e-07, | |
| "loss": 0.0343, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 6.21, | |
| "learning_rate": 3.572958295752049e-07, | |
| "loss": 0.0462, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 6.21, | |
| "learning_rate": 3.387592051312782e-07, | |
| "loss": 0.0579, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 6.22, | |
| "learning_rate": 3.207146835262742e-07, | |
| "loss": 0.0615, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 6.23, | |
| "learning_rate": 3.0316244356683454e-07, | |
| "loss": 0.0409, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 6.23, | |
| "learning_rate": 2.8610265918151414e-07, | |
| "loss": 0.0358, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 6.24, | |
| "learning_rate": 2.695354994190047e-07, | |
| "loss": 0.0412, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "learning_rate": 2.534611284465083e-07, | |
| "loss": 0.0611, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "learning_rate": 2.3787970554806084e-07, | |
| "loss": 0.0416, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 6.26, | |
| "learning_rate": 2.2279138512300567e-07, | |
| "loss": 0.0458, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 6.26, | |
| "learning_rate": 2.0819631668442253e-07, | |
| "loss": 0.0507, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 6.27, | |
| "learning_rate": 1.940946448576675e-07, | |
| "loss": 0.0527, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "learning_rate": 1.8048650937893542e-07, | |
| "loss": 0.0879, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "learning_rate": 1.6737204509387206e-07, | |
| "loss": 0.039, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 6.29, | |
| "learning_rate": 1.5475138195623629e-07, | |
| "loss": 0.0455, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 6.3, | |
| "learning_rate": 1.4262464502663443e-07, | |
| "loss": 0.0433, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 6.3, | |
| "learning_rate": 1.309919544712268e-07, | |
| "loss": 0.1652, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 6.31, | |
| "learning_rate": 1.1985342556060652e-07, | |
| "loss": 0.0449, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 6.32, | |
| "learning_rate": 1.0920916866861142e-07, | |
| "loss": 0.0421, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 6.32, | |
| "learning_rate": 9.905928927123609e-08, | |
| "loss": 0.0377, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 6.33, | |
| "learning_rate": 8.940388794559939e-08, | |
| "loss": 0.0516, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 6.34, | |
| "learning_rate": 8.02430603689397e-08, | |
| "loss": 0.0473, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 6.34, | |
| "learning_rate": 7.157689731767669e-08, | |
| "loss": 0.0416, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 6.35, | |
| "learning_rate": 6.340548466648443e-08, | |
| "loss": 0.038, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 6.35, | |
| "learning_rate": 5.572890338748082e-08, | |
| "loss": 0.0422, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 6.36, | |
| "learning_rate": 4.8547229549383844e-08, | |
| "loss": 0.0644, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 6.37, | |
| "learning_rate": 4.186053431680104e-08, | |
| "loss": 0.0417, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 6.37, | |
| "learning_rate": 3.566888394948009e-08, | |
| "loss": 0.0655, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 6.38, | |
| "learning_rate": 2.997233980168157e-08, | |
| "loss": 0.0482, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 6.39, | |
| "learning_rate": 2.4770958321568283e-08, | |
| "loss": 0.0427, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 6.39, | |
| "learning_rate": 2.0064791050633526e-08, | |
| "loss": 0.0622, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 6.4, | |
| "learning_rate": 1.5853884623195925e-08, | |
| "loss": 0.0509, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 6.41, | |
| "learning_rate": 1.2138280765944254e-08, | |
| "loss": 0.0523, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 6.41, | |
| "learning_rate": 8.918016297515541e-09, | |
| "loss": 0.0627, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 6.42, | |
| "learning_rate": 6.193123128134248e-09, | |
| "loss": 0.0431, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 3.963628259290308e-09, | |
| "loss": 0.043, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 2.229553783478222e-09, | |
| "loss": 0.0358, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 6.44, | |
| "learning_rate": 9.90916883986115e-10, | |
| "loss": 0.0868, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 6.45, | |
| "learning_rate": 2.477298346958978e-10, | |
| "loss": 0.0718, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 6.45, | |
| "learning_rate": 0.0, | |
| "loss": 0.0503, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 6.45, | |
| "step": 1000, | |
| "total_flos": 9.524118332178432e+16, | |
| "train_loss": 0.3133371548131108, | |
| "train_runtime": 3521.0844, | |
| "train_samples_per_second": 2.272, | |
| "train_steps_per_second": 0.284 | |
| } | |
| ], | |
| "max_steps": 1000, | |
| "num_train_epochs": 7, | |
| "total_flos": 9.524118332178432e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |