| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 6.329113924050633, | |
| "global_step": 500, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 5e-05, | |
| "loss": 1.0641, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.0001, | |
| "loss": 0.9056, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 9.999975227016531e-05, | |
| "loss": 0.9593, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 9.999900908311602e-05, | |
| "loss": 0.973, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 9.999777044621652e-05, | |
| "loss": 1.0652, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 9.999603637174071e-05, | |
| "loss": 0.9212, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 9.999380687687188e-05, | |
| "loss": 0.899, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 9.999108198370249e-05, | |
| "loss": 0.7079, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 9.998786171923407e-05, | |
| "loss": 0.7139, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 9.998414611537681e-05, | |
| "loss": 0.8258, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 9.997993520894937e-05, | |
| "loss": 0.7415, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 9.997522904167844e-05, | |
| "loss": 0.8233, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 9.997002766019832e-05, | |
| "loss": 0.8698, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 9.996433111605052e-05, | |
| "loss": 0.6354, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 9.99581394656832e-05, | |
| "loss": 0.6136, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 9.995145277045061e-05, | |
| "loss": 0.7363, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 9.994427109661253e-05, | |
| "loss": 0.6065, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 9.993659451533353e-05, | |
| "loss": 0.6152, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 9.992842310268233e-05, | |
| "loss": 0.7021, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 9.991975693963107e-05, | |
| "loss": 0.5468, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 9.99105961120544e-05, | |
| "loss": 0.5437, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 9.990094071072877e-05, | |
| "loss": 0.6458, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 9.989079083133139e-05, | |
| "loss": 0.6744, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 9.988014657443941e-05, | |
| "loss": 0.5592, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 9.986900804552878e-05, | |
| "loss": 0.5351, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 9.985737535497337e-05, | |
| "loss": 0.5705, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 9.984524861804376e-05, | |
| "loss": 0.5696, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 9.983262795490613e-05, | |
| "loss": 0.588, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 9.981951349062106e-05, | |
| "loss": 0.627, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 9.980590535514233e-05, | |
| "loss": 0.5702, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 9.979180368331558e-05, | |
| "loss": 0.5907, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 9.9777208614877e-05, | |
| "loss": 0.4817, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 9.976212029445194e-05, | |
| "loss": 0.5789, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 9.97465388715535e-05, | |
| "loss": 0.5108, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 9.9730464500581e-05, | |
| "loss": 0.557, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 9.971389734081848e-05, | |
| "loss": 0.5771, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 9.969683755643317e-05, | |
| "loss": 0.5524, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 9.967928531647374e-05, | |
| "loss": 0.5648, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 9.966124079486872e-05, | |
| "loss": 0.4951, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 9.96427041704248e-05, | |
| "loss": 0.5516, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.962367562682496e-05, | |
| "loss": 0.5582, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.960415535262671e-05, | |
| "loss": 0.5352, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.958414354126022e-05, | |
| "loss": 0.737, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 9.956364039102642e-05, | |
| "loss": 0.4762, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 9.954264610509497e-05, | |
| "loss": 0.4896, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 9.952116089150232e-05, | |
| "loss": 0.4975, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 9.94991849631496e-05, | |
| "loss": 0.5646, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 9.947671853780054e-05, | |
| "loss": 0.5599, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 9.94537618380793e-05, | |
| "loss": 0.5255, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 9.943031509146825e-05, | |
| "loss": 0.5971, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 9.940637853030572e-05, | |
| "loss": 0.5932, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 9.938195239178374e-05, | |
| "loss": 0.5979, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 9.935703691794565e-05, | |
| "loss": 0.496, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 9.933163235568367e-05, | |
| "loss": 0.5341, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 9.930573895673657e-05, | |
| "loss": 0.4661, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 9.927935697768698e-05, | |
| "loss": 0.6222, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.925248667995907e-05, | |
| "loss": 0.5015, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 9.922512832981584e-05, | |
| "loss": 0.654, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 9.919728219835643e-05, | |
| "loss": 0.4863, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 9.916894856151357e-05, | |
| "loss": 0.6024, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 9.914012770005072e-05, | |
| "loss": 0.4767, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 9.91108198995594e-05, | |
| "loss": 0.5065, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 9.908102545045625e-05, | |
| "loss": 0.6043, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 9.905074464798024e-05, | |
| "loss": 0.6733, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 9.901997779218967e-05, | |
| "loss": 0.522, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 9.898872518795932e-05, | |
| "loss": 0.5205, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 9.895698714497724e-05, | |
| "loss": 0.5216, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 9.892476397774186e-05, | |
| "loss": 0.6014, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 9.889205600555877e-05, | |
| "loss": 0.5321, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 9.885886355253758e-05, | |
| "loss": 0.545, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 9.882518694758875e-05, | |
| "loss": 0.4868, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 9.879102652442024e-05, | |
| "loss": 0.5366, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 9.875638262153431e-05, | |
| "loss": 0.5805, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 9.872125558222409e-05, | |
| "loss": 0.585, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 9.868564575457023e-05, | |
| "loss": 0.6058, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 9.864955349143734e-05, | |
| "loss": 0.4402, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 9.861297915047069e-05, | |
| "loss": 0.5349, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 9.857592309409247e-05, | |
| "loss": 0.5443, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 9.853838568949831e-05, | |
| "loss": 0.6198, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 9.850036730865364e-05, | |
| "loss": 0.6008, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 9.846186832828989e-05, | |
| "loss": 0.4168, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 9.842288912990096e-05, | |
| "loss": 0.4738, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 9.838343009973925e-05, | |
| "loss": 0.5146, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 9.83434916288119e-05, | |
| "loss": 0.481, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 9.830307411287695e-05, | |
| "loss": 0.5005, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 9.82621779524394e-05, | |
| "loss": 0.4643, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 9.822080355274719e-05, | |
| "loss": 0.5201, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 9.817895132378725e-05, | |
| "loss": 0.4168, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 9.813662168028144e-05, | |
| "loss": 0.4409, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 9.809381504168234e-05, | |
| "loss": 0.4593, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 9.805053183216923e-05, | |
| "loss": 0.4755, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 9.800677248064382e-05, | |
| "loss": 0.4565, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 9.796253742072596e-05, | |
| "loss": 0.4718, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 9.791782709074944e-05, | |
| "loss": 0.4884, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 9.787264193375753e-05, | |
| "loss": 0.5204, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 9.782698239749873e-05, | |
| "loss": 0.4807, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 9.778084893442218e-05, | |
| "loss": 0.4089, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 9.77342420016733e-05, | |
| "loss": 0.4371, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 9.768716206108921e-05, | |
| "loss": 0.4959, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 9.763960957919413e-05, | |
| "loss": 0.4446, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 9.759158502719481e-05, | |
| "loss": 0.4576, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 9.754308888097583e-05, | |
| "loss": 0.3678, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 9.749412162109485e-05, | |
| "loss": 0.3743, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 9.744468373277797e-05, | |
| "loss": 0.4571, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 9.739477570591473e-05, | |
| "loss": 0.3944, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 9.734439803505345e-05, | |
| "loss": 0.3904, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 9.729355121939621e-05, | |
| "loss": 0.494, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 9.724223576279395e-05, | |
| "loss": 0.4904, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 9.719045217374143e-05, | |
| "loss": 0.3727, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 9.713820096537225e-05, | |
| "loss": 0.4916, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 9.708548265545375e-05, | |
| "loss": 0.5386, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 9.703229776638185e-05, | |
| "loss": 0.3815, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 9.697864682517592e-05, | |
| "loss": 0.4243, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 9.692453036347351e-05, | |
| "loss": 0.5205, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 9.686994891752508e-05, | |
| "loss": 0.5158, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 9.681490302818874e-05, | |
| "loss": 0.4424, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 9.675939324092486e-05, | |
| "loss": 0.5825, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 9.670342010579065e-05, | |
| "loss": 0.6082, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 9.664698417743475e-05, | |
| "loss": 0.3913, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 9.659008601509168e-05, | |
| "loss": 0.4178, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 9.653272618257631e-05, | |
| "loss": 0.5543, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 9.647490524827834e-05, | |
| "loss": 0.4127, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 9.641662378515659e-05, | |
| "loss": 0.4079, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 9.635788237073334e-05, | |
| "loss": 0.4811, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 9.629868158708861e-05, | |
| "loss": 0.4229, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 9.623902202085444e-05, | |
| "loss": 0.4332, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 9.617890426320899e-05, | |
| "loss": 0.4451, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 9.611832890987076e-05, | |
| "loss": 0.4018, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.605729656109265e-05, | |
| "loss": 0.4175, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 9.599580782165598e-05, | |
| "loss": 0.4841, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 9.593386330086458e-05, | |
| "loss": 0.4353, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 9.587146361253868e-05, | |
| "loss": 0.4262, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 9.580860937500884e-05, | |
| "loss": 0.4572, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 9.57453012111099e-05, | |
| "loss": 0.4856, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 9.568153974817464e-05, | |
| "loss": 0.4451, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 9.561732561802778e-05, | |
| "loss": 0.5691, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 9.555265945697953e-05, | |
| "loss": 0.3989, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 9.548754190581939e-05, | |
| "loss": 0.4935, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 9.542197360980978e-05, | |
| "loss": 0.4637, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 9.53559552186796e-05, | |
| "loss": 0.4895, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 9.528948738661784e-05, | |
| "loss": 0.4625, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 9.522257077226717e-05, | |
| "loss": 0.4409, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 9.51552060387172e-05, | |
| "loss": 0.4123, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 9.508739385349812e-05, | |
| "loss": 0.4801, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 9.501913488857399e-05, | |
| "loss": 0.3965, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 9.49504298203361e-05, | |
| "loss": 0.4027, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 9.488127932959625e-05, | |
| "loss": 0.4779, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 9.481168410158003e-05, | |
| "loss": 0.4168, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 9.474164482592002e-05, | |
| "loss": 0.4352, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 9.467116219664894e-05, | |
| "loss": 0.5395, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 9.460023691219277e-05, | |
| "loss": 0.5874, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 9.45288696753639e-05, | |
| "loss": 0.5014, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 9.445706119335407e-05, | |
| "loss": 0.4106, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 9.438481217772744e-05, | |
| "loss": 0.4717, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 9.431212334441343e-05, | |
| "loss": 0.4105, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 9.423899541369978e-05, | |
| "loss": 0.4421, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 9.41654291102253e-05, | |
| "loss": 0.491, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 9.409142516297269e-05, | |
| "loss": 0.4624, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 9.401698430526142e-05, | |
| "loss": 0.2957, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 9.394210727474028e-05, | |
| "loss": 0.3436, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 9.386679481338033e-05, | |
| "loss": 0.3312, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 9.379104766746722e-05, | |
| "loss": 0.3232, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 9.371486658759416e-05, | |
| "loss": 0.3306, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 9.363825232865413e-05, | |
| "loss": 0.3507, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 9.356120564983266e-05, | |
| "loss": 0.3474, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 9.348372731460023e-05, | |
| "loss": 0.3206, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 9.340581809070459e-05, | |
| "loss": 0.2675, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 9.332747875016332e-05, | |
| "loss": 0.3432, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 9.324871006925613e-05, | |
| "loss": 0.3829, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 9.316951282851707e-05, | |
| "loss": 0.3136, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 9.308988781272694e-05, | |
| "loss": 0.3381, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 9.300983581090541e-05, | |
| "loss": 0.3394, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 9.292935761630326e-05, | |
| "loss": 0.3277, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 9.284845402639446e-05, | |
| "loss": 0.3286, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 9.276712584286833e-05, | |
| "loss": 0.3151, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 9.26853738716216e-05, | |
| "loss": 0.3557, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 9.260319892275034e-05, | |
| "loss": 0.2099, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 9.2520601810542e-05, | |
| "loss": 0.3627, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 9.243758335346735e-05, | |
| "loss": 0.285, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 9.235414437417234e-05, | |
| "loss": 0.3189, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 9.227028569946996e-05, | |
| "loss": 0.288, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 9.2186008160332e-05, | |
| "loss": 0.2462, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 9.210131259188095e-05, | |
| "loss": 0.2527, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 9.201619983338153e-05, | |
| "loss": 0.3712, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 9.193067072823251e-05, | |
| "loss": 0.3526, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 9.18447261239584e-05, | |
| "loss": 0.2721, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 9.175836687220084e-05, | |
| "loss": 0.2738, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 9.167159382871039e-05, | |
| "loss": 0.3511, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 9.15844078533379e-05, | |
| "loss": 0.3712, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 9.149680981002609e-05, | |
| "loss": 0.3244, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 9.140880056680088e-05, | |
| "loss": 0.3304, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 9.13203809957629e-05, | |
| "loss": 0.3476, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 9.123155197307876e-05, | |
| "loss": 0.2981, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 9.114231437897244e-05, | |
| "loss": 0.2947, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 9.105266909771653e-05, | |
| "loss": 0.2627, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 9.096261701762342e-05, | |
| "loss": 0.306, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 9.087215903103662e-05, | |
| "loss": 0.3326, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 9.078129603432181e-05, | |
| "loss": 0.2348, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 9.069002892785797e-05, | |
| "loss": 0.4223, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 9.059835861602853e-05, | |
| "loss": 0.294, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 9.050628600721234e-05, | |
| "loss": 0.269, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 9.041381201377468e-05, | |
| "loss": 0.3375, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 9.032093755205822e-05, | |
| "loss": 0.3022, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 9.0227663542374e-05, | |
| "loss": 0.3297, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 9.013399090899217e-05, | |
| "loss": 0.3108, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 9.003992058013302e-05, | |
| "loss": 0.2948, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 8.994545348795759e-05, | |
| "loss": 0.2719, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 8.985059056855858e-05, | |
| "loss": 0.3116, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 8.975533276195102e-05, | |
| "loss": 0.2567, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 8.965968101206291e-05, | |
| "loss": 0.3751, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 8.956363626672595e-05, | |
| "loss": 0.2538, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 8.94671994776661e-05, | |
| "loss": 0.2973, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 8.937037160049416e-05, | |
| "loss": 0.4477, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 8.927315359469626e-05, | |
| "loss": 0.3517, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 8.917554642362443e-05, | |
| "loss": 0.2779, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 8.907755105448704e-05, | |
| "loss": 0.3196, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 8.89791684583391e-05, | |
| "loss": 0.31, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 8.888039961007282e-05, | |
| "loss": 0.2522, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 8.87812454884078e-05, | |
| "loss": 0.2974, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 8.868170707588142e-05, | |
| "loss": 0.2817, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 8.858178535883905e-05, | |
| "loss": 0.3853, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 8.848148132742431e-05, | |
| "loss": 0.2393, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 8.838079597556925e-05, | |
| "loss": 0.3607, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 8.827973030098448e-05, | |
| "loss": 0.3263, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 8.81782853051493e-05, | |
| "loss": 0.3084, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 8.807646199330187e-05, | |
| "loss": 0.3019, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 8.797426137442897e-05, | |
| "loss": 0.3646, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 8.787168446125638e-05, | |
| "loss": 0.3149, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 8.776873227023852e-05, | |
| "loss": 0.326, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 8.766540582154859e-05, | |
| "loss": 0.3216, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 8.756170613906833e-05, | |
| "loss": 0.3348, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 8.745763425037797e-05, | |
| "loss": 0.2524, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 8.735319118674596e-05, | |
| "loss": 0.2725, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 8.724837798311882e-05, | |
| "loss": 0.2733, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 8.714319567811088e-05, | |
| "loss": 0.287, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 8.703764531399392e-05, | |
| "loss": 0.2891, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 8.69317279366869e-05, | |
| "loss": 0.2329, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 8.682544459574562e-05, | |
| "loss": 0.1752, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 8.671879634435224e-05, | |
| "loss": 0.1686, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 8.661178423930491e-05, | |
| "loss": 0.2164, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 8.650440934100728e-05, | |
| "loss": 0.1909, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 8.639667271345798e-05, | |
| "loss": 0.1765, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 8.628857542424009e-05, | |
| "loss": 0.1558, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 8.618011854451056e-05, | |
| "loss": 0.1471, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 8.607130314898956e-05, | |
| "loss": 0.175, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 8.596213031594991e-05, | |
| "loss": 0.1692, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 8.585260112720631e-05, | |
| "loss": 0.1176, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 8.57427166681047e-05, | |
| "loss": 0.1773, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 8.56324780275114e-05, | |
| "loss": 0.1821, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 8.552188629780244e-05, | |
| "loss": 0.1471, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 8.541094257485265e-05, | |
| "loss": 0.1949, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 8.529964795802485e-05, | |
| "loss": 0.1234, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 8.518800355015892e-05, | |
| "loss": 0.1625, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 8.507601045756085e-05, | |
| "loss": 0.1417, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 8.49636697899919e-05, | |
| "loss": 0.1088, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 8.485098266065744e-05, | |
| "loss": 0.1524, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 8.473795018619604e-05, | |
| "loss": 0.1711, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 8.462457348666835e-05, | |
| "loss": 0.161, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 8.4510853685546e-05, | |
| "loss": 0.1641, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 8.439679190970052e-05, | |
| "loss": 0.1633, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 8.428238928939207e-05, | |
| "loss": 0.1673, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 8.416764695825835e-05, | |
| "loss": 0.1629, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 8.405256605330331e-05, | |
| "loss": 0.177, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 8.39371477148859e-05, | |
| "loss": 0.1188, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 8.382139308670875e-05, | |
| "loss": 0.1934, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 8.370530331580686e-05, | |
| "loss": 0.1049, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 8.35888795525362e-05, | |
| "loss": 0.1376, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 8.347212295056239e-05, | |
| "loss": 0.1574, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 8.335503466684915e-05, | |
| "loss": 0.1222, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 8.323761586164695e-05, | |
| "loss": 0.1634, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 8.311986769848141e-05, | |
| "loss": 0.1764, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 8.300179134414188e-05, | |
| "loss": 0.1621, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 8.288338796866976e-05, | |
| "loss": 0.1549, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 8.276465874534702e-05, | |
| "loss": 0.1053, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 8.264560485068446e-05, | |
| "loss": 0.1224, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 8.252622746441021e-05, | |
| "loss": 0.1361, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 8.240652776945781e-05, | |
| "loss": 0.1753, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 8.228650695195472e-05, | |
| "loss": 0.1539, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 8.216616620121043e-05, | |
| "loss": 0.1603, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 8.204550670970469e-05, | |
| "loss": 0.1519, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "learning_rate": 8.192452967307576e-05, | |
| "loss": 0.1494, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 8.180323629010848e-05, | |
| "loss": 0.1695, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 8.168162776272244e-05, | |
| "loss": 0.1384, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 8.155970529596006e-05, | |
| "loss": 0.1552, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 8.143747009797464e-05, | |
| "loss": 0.166, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 8.131492338001839e-05, | |
| "loss": 0.2007, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 8.119206635643045e-05, | |
| "loss": 0.1331, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 8.106890024462481e-05, | |
| "loss": 0.2133, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 8.094542626507828e-05, | |
| "loss": 0.1507, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 8.082164564131845e-05, | |
| "loss": 0.1465, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 8.069755959991142e-05, | |
| "loss": 0.1344, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 8.057316937044977e-05, | |
| "loss": 0.1623, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 8.044847618554034e-05, | |
| "loss": 0.1823, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 8.032348128079203e-05, | |
| "loss": 0.1191, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 8.019818589480352e-05, | |
| "loss": 0.1536, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 8.0072591269151e-05, | |
| "loss": 0.1745, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 7.994669864837594e-05, | |
| "loss": 0.1265, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 7.982050927997264e-05, | |
| "loss": 0.1475, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 7.969402441437594e-05, | |
| "loss": 0.1597, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 7.956724530494887e-05, | |
| "loss": 0.1716, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 7.944017320797013e-05, | |
| "loss": 0.1569, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 7.931280938262169e-05, | |
| "loss": 0.1696, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 7.918515509097634e-05, | |
| "loss": 0.1879, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 3.85, | |
| "learning_rate": 7.905721159798513e-05, | |
| "loss": 0.1568, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 7.89289801714649e-05, | |
| "loss": 0.1276, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 7.880046208208563e-05, | |
| "loss": 0.1508, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 7.867165860335792e-05, | |
| "loss": 0.1641, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 7.854257101162037e-05, | |
| "loss": 0.1571, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 7.841320058602688e-05, | |
| "loss": 0.1642, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 7.828354860853399e-05, | |
| "loss": 0.2288, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 7.815361636388827e-05, | |
| "loss": 0.1195, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 7.802340513961342e-05, | |
| "loss": 0.1699, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 7.789291622599767e-05, | |
| "loss": 0.1761, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 7.776215091608085e-05, | |
| "loss": 0.2214, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 7.763111050564178e-05, | |
| "loss": 0.2109, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 7.749979629318516e-05, | |
| "loss": 0.1801, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 7.736820957992895e-05, | |
| "loss": 0.0728, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 7.723635166979133e-05, | |
| "loss": 0.0642, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 7.710422386937784e-05, | |
| "loss": 0.0868, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 7.697182748796841e-05, | |
| "loss": 0.0672, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 7.683916383750436e-05, | |
| "loss": 0.0645, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 7.670623423257548e-05, | |
| "loss": 0.0746, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 7.657303999040693e-05, | |
| "loss": 0.0801, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 7.64395824308462e-05, | |
| "loss": 0.0557, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 4.11, | |
| "learning_rate": 7.630586287635008e-05, | |
| "loss": 0.061, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 7.617188265197148e-05, | |
| "loss": 0.0666, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 7.603764308534636e-05, | |
| "loss": 0.0495, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 7.590314550668054e-05, | |
| "loss": 0.0591, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 7.576839124873653e-05, | |
| "loss": 0.0914, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 7.563338164682036e-05, | |
| "loss": 0.1023, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 7.549811803876825e-05, | |
| "loss": 0.1043, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "learning_rate": 7.536260176493348e-05, | |
| "loss": 0.054, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "learning_rate": 7.5226834168173e-05, | |
| "loss": 0.0712, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 7.509081659383417e-05, | |
| "loss": 0.0814, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 7.495455038974146e-05, | |
| "loss": 0.0573, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 7.481803690618304e-05, | |
| "loss": 0.0603, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "learning_rate": 7.46812774958974e-05, | |
| "loss": 0.0565, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 7.454427351405999e-05, | |
| "loss": 0.063, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 7.440702631826977e-05, | |
| "loss": 0.0666, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 7.426953726853574e-05, | |
| "loss": 0.0669, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 7.413180772726348e-05, | |
| "loss": 0.0812, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "learning_rate": 7.399383905924165e-05, | |
| "loss": 0.081, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 7.385563263162847e-05, | |
| "loss": 0.0697, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 7.371718981393815e-05, | |
| "loss": 0.0639, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 4.37, | |
| "learning_rate": 7.357851197802735e-05, | |
| "loss": 0.0792, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 7.343960049808156e-05, | |
| "loss": 0.0665, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 7.330045675060149e-05, | |
| "loss": 0.0593, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 7.316108211438945e-05, | |
| "loss": 0.0307, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 4.42, | |
| "learning_rate": 7.302147797053569e-05, | |
| "loss": 0.0656, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "learning_rate": 7.288164570240463e-05, | |
| "loss": 0.049, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 7.274158669562126e-05, | |
| "loss": 0.0653, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 7.26013023380574e-05, | |
| "loss": 0.0668, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 7.246079401981784e-05, | |
| "loss": 0.0894, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "learning_rate": 7.232006313322667e-05, | |
| "loss": 0.0641, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 7.217911107281352e-05, | |
| "loss": 0.0905, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 7.203793923529956e-05, | |
| "loss": 0.0753, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 7.189654901958385e-05, | |
| "loss": 0.0737, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 7.175494182672939e-05, | |
| "loss": 0.0828, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "learning_rate": 7.161311905994922e-05, | |
| "loss": 0.0597, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 7.147108212459257e-05, | |
| "loss": 0.0623, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "learning_rate": 7.13288324281309e-05, | |
| "loss": 0.0751, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 7.118637138014396e-05, | |
| "loss": 0.0586, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 7.104370039230583e-05, | |
| "loss": 0.0794, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 7.090082087837092e-05, | |
| "loss": 0.0554, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 7.075773425415994e-05, | |
| "loss": 0.0601, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "learning_rate": 7.061444193754596e-05, | |
| "loss": 0.0867, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 7.047094534844023e-05, | |
| "loss": 0.0828, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 7.032724590877821e-05, | |
| "loss": 0.0879, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 7.018334504250545e-05, | |
| "loss": 0.068, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 7.003924417556343e-05, | |
| "loss": 0.0586, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 6.989494473587554e-05, | |
| "loss": 0.0971, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 6.975044815333282e-05, | |
| "loss": 0.0864, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "learning_rate": 6.960575585977984e-05, | |
| "loss": 0.0482, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "learning_rate": 6.946086928900054e-05, | |
| "loss": 0.0812, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "learning_rate": 6.931578987670396e-05, | |
| "loss": 0.0756, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "learning_rate": 6.917051906051006e-05, | |
| "loss": 0.0585, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "learning_rate": 6.902505827993541e-05, | |
| "loss": 0.0653, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 4.78, | |
| "learning_rate": 6.887940897637908e-05, | |
| "loss": 0.0783, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "learning_rate": 6.873357259310815e-05, | |
| "loss": 0.0849, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 6.858755057524354e-05, | |
| "loss": 0.0642, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 6.844134436974567e-05, | |
| "loss": 0.0651, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 4.84, | |
| "learning_rate": 6.829495542540013e-05, | |
| "loss": 0.0511, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 6.814838519280324e-05, | |
| "loss": 0.0713, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 6.80016351243478e-05, | |
| "loss": 0.0853, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 4.87, | |
| "learning_rate": 6.785470667420862e-05, | |
| "loss": 0.0777, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 6.77076012983281e-05, | |
| "loss": 0.0714, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 4.9, | |
| "learning_rate": 6.75603204544019e-05, | |
| "loss": 0.051, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 6.741286560186437e-05, | |
| "loss": 0.0831, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "learning_rate": 6.726523820187413e-05, | |
| "loss": 0.0613, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "learning_rate": 6.711743971729967e-05, | |
| "loss": 0.092, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 4.95, | |
| "learning_rate": 6.696947161270476e-05, | |
| "loss": 0.0457, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "learning_rate": 6.682133535433393e-05, | |
| "loss": 0.0491, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 6.667303241009803e-05, | |
| "loss": 0.0788, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 6.652456424955963e-05, | |
| "loss": 0.0598, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 6.637593234391843e-05, | |
| "loss": 0.0446, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 5.01, | |
| "learning_rate": 6.622713816599673e-05, | |
| "loss": 0.02, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 5.03, | |
| "learning_rate": 6.60781831902248e-05, | |
| "loss": 0.03, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "learning_rate": 6.592906889262632e-05, | |
| "loss": 0.0296, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 5.05, | |
| "learning_rate": 6.577979675080369e-05, | |
| "loss": 0.0267, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 6.563036824392344e-05, | |
| "loss": 0.0358, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 5.08, | |
| "learning_rate": 6.548078485270152e-05, | |
| "loss": 0.0387, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 5.09, | |
| "learning_rate": 6.533104805938873e-05, | |
| "loss": 0.0335, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 5.1, | |
| "learning_rate": 6.518115934775585e-05, | |
| "loss": 0.0275, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 6.503112020307916e-05, | |
| "loss": 0.0258, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 5.13, | |
| "learning_rate": 6.488093211212555e-05, | |
| "loss": 0.0534, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 5.14, | |
| "learning_rate": 6.473059656313782e-05, | |
| "loss": 0.0176, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 6.458011504582005e-05, | |
| "loss": 0.025, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 5.16, | |
| "learning_rate": 6.442948905132266e-05, | |
| "loss": 0.0172, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 5.18, | |
| "learning_rate": 6.427872007222777e-05, | |
| "loss": 0.0312, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 5.19, | |
| "learning_rate": 6.412780960253436e-05, | |
| "loss": 0.0279, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 5.2, | |
| "learning_rate": 6.397675913764347e-05, | |
| "loss": 0.0279, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 5.22, | |
| "learning_rate": 6.382557017434332e-05, | |
| "loss": 0.0198, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 5.23, | |
| "learning_rate": 6.367424421079463e-05, | |
| "loss": 0.0404, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 5.24, | |
| "learning_rate": 6.352278274651561e-05, | |
| "loss": 0.0266, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 5.25, | |
| "learning_rate": 6.337118728236721e-05, | |
| "loss": 0.0341, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 5.27, | |
| "learning_rate": 6.321945932053822e-05, | |
| "loss": 0.0205, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "learning_rate": 6.306760036453035e-05, | |
| "loss": 0.0341, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 5.29, | |
| "learning_rate": 6.291561191914333e-05, | |
| "loss": 0.0398, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 5.3, | |
| "learning_rate": 6.276349549046007e-05, | |
| "loss": 0.0287, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "learning_rate": 6.261125258583171e-05, | |
| "loss": 0.0491, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 5.33, | |
| "learning_rate": 6.245888471386263e-05, | |
| "loss": 0.0235, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 5.34, | |
| "learning_rate": 6.230639338439549e-05, | |
| "loss": 0.0312, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "learning_rate": 6.215378010849641e-05, | |
| "loss": 0.0413, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 5.37, | |
| "learning_rate": 6.200104639843985e-05, | |
| "loss": 0.0316, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 6.184819376769364e-05, | |
| "loss": 0.044, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 5.39, | |
| "learning_rate": 6.169522373090412e-05, | |
| "loss": 0.0233, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 6.154213780388092e-05, | |
| "loss": 0.0152, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 5.42, | |
| "learning_rate": 6.138893750358212e-05, | |
| "loss": 0.0589, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 5.43, | |
| "learning_rate": 6.123562434809912e-05, | |
| "loss": 0.0534, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 5.44, | |
| "learning_rate": 6.108219985664161e-05, | |
| "loss": 0.0453, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 6.0928665549522554e-05, | |
| "loss": 0.0252, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 5.47, | |
| "learning_rate": 6.0775022948143115e-05, | |
| "loss": 0.0337, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 5.48, | |
| "learning_rate": 6.06212735749775e-05, | |
| "loss": 0.0406, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 5.49, | |
| "learning_rate": 6.046741895355802e-05, | |
| "loss": 0.044, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 5.51, | |
| "learning_rate": 6.031346060845986e-05, | |
| "loss": 0.0646, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 5.52, | |
| "learning_rate": 6.015940006528602e-05, | |
| "loss": 0.0238, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 5.53, | |
| "learning_rate": 6.0005238850652234e-05, | |
| "loss": 0.0517, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 5.54, | |
| "learning_rate": 5.9850978492171794e-05, | |
| "loss": 0.0274, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 5.56, | |
| "learning_rate": 5.96966205184404e-05, | |
| "loss": 0.0247, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 5.57, | |
| "learning_rate": 5.954216645902109e-05, | |
| "loss": 0.0648, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 5.58, | |
| "learning_rate": 5.9387617844429e-05, | |
| "loss": 0.0282, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 5.59, | |
| "learning_rate": 5.923297620611623e-05, | |
| "loss": 0.0537, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 5.61, | |
| "learning_rate": 5.907824307645669e-05, | |
| "loss": 0.048, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 5.62, | |
| "learning_rate": 5.892341998873089e-05, | |
| "loss": 0.0227, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 5.63, | |
| "learning_rate": 5.876850847711073e-05, | |
| "loss": 0.0211, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 5.65, | |
| "learning_rate": 5.861351007664434e-05, | |
| "loss": 0.0318, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 5.66, | |
| "learning_rate": 5.845842632324088e-05, | |
| "loss": 0.0225, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 5.67, | |
| "learning_rate": 5.83032587536552e-05, | |
| "loss": 0.0482, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 5.68, | |
| "learning_rate": 5.814800890547278e-05, | |
| "loss": 0.0543, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "learning_rate": 5.799267831709442e-05, | |
| "loss": 0.0264, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "learning_rate": 5.78372685277209e-05, | |
| "loss": 0.0311, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 5.72, | |
| "learning_rate": 5.7681781077337905e-05, | |
| "loss": 0.0524, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 5.73, | |
| "learning_rate": 5.752621750670068e-05, | |
| "loss": 0.0186, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 5.75, | |
| "learning_rate": 5.737057935731868e-05, | |
| "loss": 0.0269, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 5.76, | |
| "learning_rate": 5.721486817144044e-05, | |
| "loss": 0.0312, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 5.705908549203823e-05, | |
| "loss": 0.0356, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 5.78, | |
| "learning_rate": 5.690323286279274e-05, | |
| "loss": 0.0507, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 5.8, | |
| "learning_rate": 5.674731182807781e-05, | |
| "loss": 0.0399, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 5.81, | |
| "learning_rate": 5.659132393294514e-05, | |
| "loss": 0.0364, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 5.82, | |
| "learning_rate": 5.643527072310891e-05, | |
| "loss": 0.0453, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 5.84, | |
| "learning_rate": 5.627915374493061e-05, | |
| "loss": 0.021, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "learning_rate": 5.612297454540352e-05, | |
| "loss": 0.0155, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 5.86, | |
| "learning_rate": 5.596673467213756e-05, | |
| "loss": 0.0336, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 5.87, | |
| "learning_rate": 5.581043567334383e-05, | |
| "loss": 0.0245, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 5.89, | |
| "learning_rate": 5.5654079097819345e-05, | |
| "loss": 0.0359, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "learning_rate": 5.5497666494931654e-05, | |
| "loss": 0.0245, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 5.91, | |
| "learning_rate": 5.5341199414603493e-05, | |
| "loss": 0.0246, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "learning_rate": 5.518467940729739e-05, | |
| "loss": 0.0222, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 5.94, | |
| "learning_rate": 5.502810802400039e-05, | |
| "loss": 0.0317, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 5.95, | |
| "learning_rate": 5.487148681620862e-05, | |
| "loss": 0.0199, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 5.96, | |
| "learning_rate": 5.4714817335911894e-05, | |
| "loss": 0.0559, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 5.97, | |
| "learning_rate": 5.455810113557839e-05, | |
| "loss": 0.0405, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 5.99, | |
| "learning_rate": 5.440133976813926e-05, | |
| "loss": 0.0416, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "learning_rate": 5.4244534786973214e-05, | |
| "loss": 0.0208, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 6.01, | |
| "learning_rate": 5.40876877458911e-05, | |
| "loss": 0.0295, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 6.03, | |
| "learning_rate": 5.3930800199120616e-05, | |
| "loss": 0.0106, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 6.04, | |
| "learning_rate": 5.377387370129079e-05, | |
| "loss": 0.0165, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 6.05, | |
| "learning_rate": 5.361690980741663e-05, | |
| "loss": 0.0098, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 6.06, | |
| "learning_rate": 5.345991007288371e-05, | |
| "loss": 0.0259, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 6.08, | |
| "learning_rate": 5.330287605343279e-05, | |
| "loss": 0.0065, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 6.09, | |
| "learning_rate": 5.314580930514431e-05, | |
| "loss": 0.0162, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 6.1, | |
| "learning_rate": 5.298871138442307e-05, | |
| "loss": 0.0107, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 6.11, | |
| "learning_rate": 5.283158384798275e-05, | |
| "loss": 0.0126, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 6.13, | |
| "learning_rate": 5.267442825283048e-05, | |
| "loss": 0.0275, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 6.14, | |
| "learning_rate": 5.2517246156251455e-05, | |
| "loss": 0.022, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 6.15, | |
| "learning_rate": 5.236003911579345e-05, | |
| "loss": 0.0101, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 6.16, | |
| "learning_rate": 5.220280868925145e-05, | |
| "loss": 0.0189, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 6.18, | |
| "learning_rate": 5.204555643465215e-05, | |
| "loss": 0.0205, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 6.19, | |
| "learning_rate": 5.1888283910238555e-05, | |
| "loss": 0.0123, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 6.2, | |
| "learning_rate": 5.173099267445451e-05, | |
| "loss": 0.015, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 6.22, | |
| "learning_rate": 5.157368428592933e-05, | |
| "loss": 0.0116, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 6.23, | |
| "learning_rate": 5.1416360303462206e-05, | |
| "loss": 0.0219, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 6.24, | |
| "learning_rate": 5.125902228600693e-05, | |
| "loss": 0.0095, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "learning_rate": 5.110167179265636e-05, | |
| "loss": 0.0287, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 6.27, | |
| "learning_rate": 5.094431038262693e-05, | |
| "loss": 0.0184, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "learning_rate": 5.078693961524329e-05, | |
| "loss": 0.0118, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 6.29, | |
| "learning_rate": 5.062956104992285e-05, | |
| "loss": 0.0147, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 6.3, | |
| "learning_rate": 5.0472176246160184e-05, | |
| "loss": 0.02, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 6.32, | |
| "learning_rate": 5.031478676351179e-05, | |
| "loss": 0.0126, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 6.33, | |
| "learning_rate": 5.01573941615805e-05, | |
| "loss": 0.0118, | |
| "step": 500 | |
| } | |
| ], | |
| "max_steps": 1000, | |
| "num_train_epochs": 13, | |
| "total_flos": 3.637158979269427e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |