| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 2410, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0004149377593360996, | |
| "grad_norm": 26.93341562765044, | |
| "learning_rate": 4.1493775933609963e-08, | |
| "loss": 1.4725, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.002074688796680498, | |
| "grad_norm": 25.297316212761707, | |
| "learning_rate": 2.074688796680498e-07, | |
| "loss": 1.4346, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.004149377593360996, | |
| "grad_norm": 16.240075815340298, | |
| "learning_rate": 4.149377593360996e-07, | |
| "loss": 1.3793, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.006224066390041493, | |
| "grad_norm": 8.414236179504735, | |
| "learning_rate": 6.224066390041494e-07, | |
| "loss": 1.274, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.008298755186721992, | |
| "grad_norm": 10.510339353255072, | |
| "learning_rate": 8.298755186721992e-07, | |
| "loss": 1.1601, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.01037344398340249, | |
| "grad_norm": 4.437200517809562, | |
| "learning_rate": 1.037344398340249e-06, | |
| "loss": 1.0375, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.012448132780082987, | |
| "grad_norm": 3.4287551512904613, | |
| "learning_rate": 1.2448132780082988e-06, | |
| "loss": 0.9974, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.014522821576763486, | |
| "grad_norm": 3.4049944593465016, | |
| "learning_rate": 1.4522821576763488e-06, | |
| "loss": 0.9596, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.016597510373443983, | |
| "grad_norm": 3.0107447624660275, | |
| "learning_rate": 1.6597510373443984e-06, | |
| "loss": 0.938, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.01867219917012448, | |
| "grad_norm": 3.0215484563628694, | |
| "learning_rate": 1.8672199170124482e-06, | |
| "loss": 0.9083, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.02074688796680498, | |
| "grad_norm": 2.973401033499103, | |
| "learning_rate": 2.074688796680498e-06, | |
| "loss": 0.9175, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.022821576763485476, | |
| "grad_norm": 3.1166975342093104, | |
| "learning_rate": 2.282157676348548e-06, | |
| "loss": 0.9051, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.024896265560165973, | |
| "grad_norm": 3.1493583912363667, | |
| "learning_rate": 2.4896265560165977e-06, | |
| "loss": 0.8867, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.026970954356846474, | |
| "grad_norm": 2.971810139692584, | |
| "learning_rate": 2.6970954356846475e-06, | |
| "loss": 0.9024, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.029045643153526972, | |
| "grad_norm": 2.9673454675974535, | |
| "learning_rate": 2.9045643153526977e-06, | |
| "loss": 0.8855, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.03112033195020747, | |
| "grad_norm": 3.195994891124804, | |
| "learning_rate": 3.112033195020747e-06, | |
| "loss": 0.8873, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.03319502074688797, | |
| "grad_norm": 3.0341908061093603, | |
| "learning_rate": 3.319502074688797e-06, | |
| "loss": 0.8785, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.035269709543568464, | |
| "grad_norm": 3.0888190331536958, | |
| "learning_rate": 3.526970954356847e-06, | |
| "loss": 0.8686, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.03734439834024896, | |
| "grad_norm": 3.069834595940876, | |
| "learning_rate": 3.7344398340248965e-06, | |
| "loss": 0.8795, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.03941908713692946, | |
| "grad_norm": 3.07342880372721, | |
| "learning_rate": 3.941908713692946e-06, | |
| "loss": 0.8543, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.04149377593360996, | |
| "grad_norm": 3.149595655831201, | |
| "learning_rate": 4.149377593360996e-06, | |
| "loss": 0.8516, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.043568464730290454, | |
| "grad_norm": 3.1588136250157293, | |
| "learning_rate": 4.356846473029046e-06, | |
| "loss": 0.861, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.04564315352697095, | |
| "grad_norm": 3.2472498773631298, | |
| "learning_rate": 4.564315352697096e-06, | |
| "loss": 0.8626, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.04771784232365145, | |
| "grad_norm": 3.0627212516038003, | |
| "learning_rate": 4.771784232365146e-06, | |
| "loss": 0.8585, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.04979253112033195, | |
| "grad_norm": 3.3866567736302415, | |
| "learning_rate": 4.979253112033195e-06, | |
| "loss": 0.8546, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.05186721991701245, | |
| "grad_norm": 3.045817807536523, | |
| "learning_rate": 5.1867219917012455e-06, | |
| "loss": 0.8579, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.05394190871369295, | |
| "grad_norm": 2.9392093861675868, | |
| "learning_rate": 5.394190871369295e-06, | |
| "loss": 0.8282, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.056016597510373446, | |
| "grad_norm": 3.310150868761337, | |
| "learning_rate": 5.601659751037345e-06, | |
| "loss": 0.846, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.058091286307053944, | |
| "grad_norm": 3.03717223043934, | |
| "learning_rate": 5.809128630705395e-06, | |
| "loss": 0.8459, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.06016597510373444, | |
| "grad_norm": 3.028995697830279, | |
| "learning_rate": 6.016597510373444e-06, | |
| "loss": 0.8488, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.06224066390041494, | |
| "grad_norm": 3.2031060057481935, | |
| "learning_rate": 6.224066390041494e-06, | |
| "loss": 0.8323, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.06431535269709543, | |
| "grad_norm": 3.3375024554537545, | |
| "learning_rate": 6.431535269709544e-06, | |
| "loss": 0.8365, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.06639004149377593, | |
| "grad_norm": 3.348800008787665, | |
| "learning_rate": 6.639004149377594e-06, | |
| "loss": 0.8337, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.06846473029045644, | |
| "grad_norm": 3.052961732749455, | |
| "learning_rate": 6.846473029045644e-06, | |
| "loss": 0.8315, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.07053941908713693, | |
| "grad_norm": 3.1121690085579607, | |
| "learning_rate": 7.053941908713694e-06, | |
| "loss": 0.8401, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.07261410788381743, | |
| "grad_norm": 3.242023378743259, | |
| "learning_rate": 7.261410788381743e-06, | |
| "loss": 0.8158, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.07468879668049792, | |
| "grad_norm": 3.075162049471092, | |
| "learning_rate": 7.468879668049793e-06, | |
| "loss": 0.8347, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.07676348547717843, | |
| "grad_norm": 2.9264920323994255, | |
| "learning_rate": 7.676348547717844e-06, | |
| "loss": 0.8088, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.07883817427385892, | |
| "grad_norm": 2.948569896440745, | |
| "learning_rate": 7.883817427385892e-06, | |
| "loss": 0.8218, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.08091286307053942, | |
| "grad_norm": 3.3517457188955224, | |
| "learning_rate": 8.091286307053943e-06, | |
| "loss": 0.8147, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.08298755186721991, | |
| "grad_norm": 3.220462454017356, | |
| "learning_rate": 8.298755186721992e-06, | |
| "loss": 0.8349, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.08506224066390042, | |
| "grad_norm": 3.1111446024937006, | |
| "learning_rate": 8.506224066390042e-06, | |
| "loss": 0.8057, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.08713692946058091, | |
| "grad_norm": 2.892565595009811, | |
| "learning_rate": 8.713692946058093e-06, | |
| "loss": 0.8294, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.08921161825726141, | |
| "grad_norm": 3.235067221557126, | |
| "learning_rate": 8.921161825726142e-06, | |
| "loss": 0.7982, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.0912863070539419, | |
| "grad_norm": 3.1287542161799013, | |
| "learning_rate": 9.128630705394191e-06, | |
| "loss": 0.7973, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.09336099585062241, | |
| "grad_norm": 2.9450638727706315, | |
| "learning_rate": 9.33609958506224e-06, | |
| "loss": 0.8147, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.0954356846473029, | |
| "grad_norm": 3.170060713175617, | |
| "learning_rate": 9.543568464730292e-06, | |
| "loss": 0.8161, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.0975103734439834, | |
| "grad_norm": 3.0907201923890146, | |
| "learning_rate": 9.751037344398341e-06, | |
| "loss": 0.8163, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.0995850622406639, | |
| "grad_norm": 3.102859260824346, | |
| "learning_rate": 9.95850622406639e-06, | |
| "loss": 0.7918, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.1016597510373444, | |
| "grad_norm": 2.9868921661921517, | |
| "learning_rate": 9.999916085034977e-06, | |
| "loss": 0.8164, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.1037344398340249, | |
| "grad_norm": 2.8504855656123937, | |
| "learning_rate": 9.999575185316994e-06, | |
| "loss": 0.8105, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.10580912863070539, | |
| "grad_norm": 2.969587333276916, | |
| "learning_rate": 9.998972074026074e-06, | |
| "loss": 0.8055, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.1078838174273859, | |
| "grad_norm": 2.9939024309770157, | |
| "learning_rate": 9.998106782793455e-06, | |
| "loss": 0.7973, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.10995850622406639, | |
| "grad_norm": 2.997723701099069, | |
| "learning_rate": 9.996979357000869e-06, | |
| "loss": 0.7941, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.11203319502074689, | |
| "grad_norm": 2.979776887568956, | |
| "learning_rate": 9.995589855778159e-06, | |
| "loss": 0.7897, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.11410788381742738, | |
| "grad_norm": 2.7677213383011443, | |
| "learning_rate": 9.993938352000174e-06, | |
| "loss": 0.7962, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.11618257261410789, | |
| "grad_norm": 2.936072860527269, | |
| "learning_rate": 9.992024932282955e-06, | |
| "loss": 0.7934, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.11825726141078838, | |
| "grad_norm": 2.7901975755448003, | |
| "learning_rate": 9.989849696979188e-06, | |
| "loss": 0.8062, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.12033195020746888, | |
| "grad_norm": 2.8991854152652974, | |
| "learning_rate": 9.987412760172939e-06, | |
| "loss": 0.8007, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.12240663900414937, | |
| "grad_norm": 3.0373899188891267, | |
| "learning_rate": 9.984714249673676e-06, | |
| "loss": 0.7941, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.12448132780082988, | |
| "grad_norm": 2.8040271211308596, | |
| "learning_rate": 9.981754307009556e-06, | |
| "loss": 0.8004, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.12655601659751037, | |
| "grad_norm": 2.8585104734793263, | |
| "learning_rate": 9.978533087420015e-06, | |
| "loss": 0.8029, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.12863070539419086, | |
| "grad_norm": 2.87456284995761, | |
| "learning_rate": 9.97505075984762e-06, | |
| "loss": 0.7807, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.13070539419087138, | |
| "grad_norm": 3.056578905955286, | |
| "learning_rate": 9.971307506929202e-06, | |
| "loss": 0.7795, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.13278008298755187, | |
| "grad_norm": 2.722917451156625, | |
| "learning_rate": 9.967303524986294e-06, | |
| "loss": 0.7873, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.13485477178423236, | |
| "grad_norm": 2.9180827249941013, | |
| "learning_rate": 9.963039024014811e-06, | |
| "loss": 0.7861, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.13692946058091288, | |
| "grad_norm": 2.73431405256196, | |
| "learning_rate": 9.958514227674065e-06, | |
| "loss": 0.766, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.13900414937759337, | |
| "grad_norm": 2.9085046091623137, | |
| "learning_rate": 9.953729373275008e-06, | |
| "loss": 0.7731, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.14107883817427386, | |
| "grad_norm": 2.8708373658306523, | |
| "learning_rate": 9.9486847117678e-06, | |
| "loss": 0.7594, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.14315352697095435, | |
| "grad_norm": 3.0046135483136696, | |
| "learning_rate": 9.943380507728647e-06, | |
| "loss": 0.7618, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.14522821576763487, | |
| "grad_norm": 2.8484743596508486, | |
| "learning_rate": 9.937817039345924e-06, | |
| "loss": 0.7604, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.14730290456431536, | |
| "grad_norm": 2.72605173131386, | |
| "learning_rate": 9.931994598405576e-06, | |
| "loss": 0.7564, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.14937759336099585, | |
| "grad_norm": 2.909791550347346, | |
| "learning_rate": 9.925913490275834e-06, | |
| "loss": 0.7512, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.15145228215767634, | |
| "grad_norm": 2.819547837925891, | |
| "learning_rate": 9.919574033891175e-06, | |
| "loss": 0.7457, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.15352697095435686, | |
| "grad_norm": 2.7924198276169854, | |
| "learning_rate": 9.912976561735617e-06, | |
| "loss": 0.7414, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.15560165975103735, | |
| "grad_norm": 2.832091750839748, | |
| "learning_rate": 9.906121419825269e-06, | |
| "loss": 0.7363, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.15767634854771784, | |
| "grad_norm": 2.829243284643042, | |
| "learning_rate": 9.899008967690185e-06, | |
| "loss": 0.7326, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.15975103734439833, | |
| "grad_norm": 2.826535782313376, | |
| "learning_rate": 9.891639578355511e-06, | |
| "loss": 0.7538, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.16182572614107885, | |
| "grad_norm": 2.882936305924773, | |
| "learning_rate": 9.88401363832192e-06, | |
| "loss": 0.7302, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.16390041493775934, | |
| "grad_norm": 2.906899687666024, | |
| "learning_rate": 9.87613154754534e-06, | |
| "loss": 0.7129, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.16597510373443983, | |
| "grad_norm": 2.7967270291757944, | |
| "learning_rate": 9.867993719415974e-06, | |
| "loss": 0.7416, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.16804979253112035, | |
| "grad_norm": 2.8084997358792023, | |
| "learning_rate": 9.859600580736632e-06, | |
| "loss": 0.7353, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.17012448132780084, | |
| "grad_norm": 2.680436779833713, | |
| "learning_rate": 9.850952571700332e-06, | |
| "loss": 0.7376, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.17219917012448133, | |
| "grad_norm": 2.8865976677763907, | |
| "learning_rate": 9.842050145867219e-06, | |
| "loss": 0.7262, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.17427385892116182, | |
| "grad_norm": 2.8914725126052416, | |
| "learning_rate": 9.832893770140778e-06, | |
| "loss": 0.7161, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.17634854771784234, | |
| "grad_norm": 2.7982408695630725, | |
| "learning_rate": 9.823483924743348e-06, | |
| "loss": 0.7283, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.17842323651452283, | |
| "grad_norm": 2.6688029764570604, | |
| "learning_rate": 9.813821103190932e-06, | |
| "loss": 0.7192, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.18049792531120332, | |
| "grad_norm": 2.8287346081752327, | |
| "learning_rate": 9.803905812267317e-06, | |
| "loss": 0.724, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.1825726141078838, | |
| "grad_norm": 2.744322820321148, | |
| "learning_rate": 9.793738571997488e-06, | |
| "loss": 0.7204, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.18464730290456433, | |
| "grad_norm": 2.736399026435038, | |
| "learning_rate": 9.783319915620365e-06, | |
| "loss": 0.7265, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.18672199170124482, | |
| "grad_norm": 2.8039795008875887, | |
| "learning_rate": 9.772650389560829e-06, | |
| "loss": 0.7217, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.1887966804979253, | |
| "grad_norm": 2.6831409383313893, | |
| "learning_rate": 9.761730553401067e-06, | |
| "loss": 0.7016, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.1908713692946058, | |
| "grad_norm": 2.7717760950966492, | |
| "learning_rate": 9.750560979851222e-06, | |
| "loss": 0.728, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.19294605809128632, | |
| "grad_norm": 2.614242945562297, | |
| "learning_rate": 9.739142254719351e-06, | |
| "loss": 0.7025, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.1950207468879668, | |
| "grad_norm": 2.649844888470243, | |
| "learning_rate": 9.727474976880718e-06, | |
| "loss": 0.7069, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.1970954356846473, | |
| "grad_norm": 2.800760446265028, | |
| "learning_rate": 9.715559758246363e-06, | |
| "loss": 0.6901, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.1991701244813278, | |
| "grad_norm": 2.7922242993251265, | |
| "learning_rate": 9.703397223731028e-06, | |
| "loss": 0.6913, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.2012448132780083, | |
| "grad_norm": 2.907407187432472, | |
| "learning_rate": 9.690988011220367e-06, | |
| "loss": 0.7121, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.2033195020746888, | |
| "grad_norm": 2.626218495773325, | |
| "learning_rate": 9.678332771537506e-06, | |
| "loss": 0.7026, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.2053941908713693, | |
| "grad_norm": 2.8404176936123338, | |
| "learning_rate": 9.665432168408895e-06, | |
| "loss": 0.7113, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.2074688796680498, | |
| "grad_norm": 2.786382253827723, | |
| "learning_rate": 9.652286878429508e-06, | |
| "loss": 0.6852, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.2095435684647303, | |
| "grad_norm": 2.7800940175136515, | |
| "learning_rate": 9.638897591027355e-06, | |
| "loss": 0.6917, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.21161825726141079, | |
| "grad_norm": 2.710182741542463, | |
| "learning_rate": 9.625265008427317e-06, | |
| "loss": 0.6903, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.21369294605809128, | |
| "grad_norm": 3.0976476780998827, | |
| "learning_rate": 9.61138984561433e-06, | |
| "loss": 0.6776, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.2157676348547718, | |
| "grad_norm": 2.6986195603682015, | |
| "learning_rate": 9.597272830295877e-06, | |
| "loss": 0.6759, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.21784232365145229, | |
| "grad_norm": 2.8093612594263493, | |
| "learning_rate": 9.582914702863816e-06, | |
| "loss": 0.691, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.21991701244813278, | |
| "grad_norm": 2.668102059117624, | |
| "learning_rate": 9.568316216355569e-06, | |
| "loss": 0.6658, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.22199170124481327, | |
| "grad_norm": 2.968496552447308, | |
| "learning_rate": 9.553478136414606e-06, | |
| "loss": 0.6585, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.22406639004149378, | |
| "grad_norm": 2.788495237514456, | |
| "learning_rate": 9.538401241250302e-06, | |
| "loss": 0.6761, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.22614107883817428, | |
| "grad_norm": 2.692620475621199, | |
| "learning_rate": 9.523086321597123e-06, | |
| "loss": 0.6862, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.22821576763485477, | |
| "grad_norm": 2.8519734853628274, | |
| "learning_rate": 9.507534180673142e-06, | |
| "loss": 0.6765, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.23029045643153526, | |
| "grad_norm": 2.6424845030364934, | |
| "learning_rate": 9.49174563413793e-06, | |
| "loss": 0.693, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.23236514522821577, | |
| "grad_norm": 2.68422130623634, | |
| "learning_rate": 9.475721510049765e-06, | |
| "loss": 0.6549, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.23443983402489627, | |
| "grad_norm": 2.745364732974406, | |
| "learning_rate": 9.459462648822209e-06, | |
| "loss": 0.6762, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.23651452282157676, | |
| "grad_norm": 2.7138771122564496, | |
| "learning_rate": 9.442969903180021e-06, | |
| "loss": 0.6617, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.23858921161825727, | |
| "grad_norm": 2.594975020618961, | |
| "learning_rate": 9.426244138114456e-06, | |
| "loss": 0.6644, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.24066390041493776, | |
| "grad_norm": 2.6711485289021217, | |
| "learning_rate": 9.409286230837876e-06, | |
| "loss": 0.6515, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.24273858921161826, | |
| "grad_norm": 3.157835949894035, | |
| "learning_rate": 9.39209707073775e-06, | |
| "loss": 0.6552, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.24481327800829875, | |
| "grad_norm": 2.7728392273294427, | |
| "learning_rate": 9.37467755933002e-06, | |
| "loss": 0.6547, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.24688796680497926, | |
| "grad_norm": 2.6326098618284726, | |
| "learning_rate": 9.357028610211802e-06, | |
| "loss": 0.6581, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.24896265560165975, | |
| "grad_norm": 2.645526952797483, | |
| "learning_rate": 9.339151149013483e-06, | |
| "loss": 0.66, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.25103734439834025, | |
| "grad_norm": 2.695303011335832, | |
| "learning_rate": 9.32104611335017e-06, | |
| "loss": 0.6494, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.25311203319502074, | |
| "grad_norm": 2.595062725441063, | |
| "learning_rate": 9.302714452772515e-06, | |
| "loss": 0.6443, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.2551867219917012, | |
| "grad_norm": 2.6458460885695168, | |
| "learning_rate": 9.284157128716916e-06, | |
| "loss": 0.6514, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.2572614107883817, | |
| "grad_norm": 2.5786064674401583, | |
| "learning_rate": 9.265375114455091e-06, | |
| "loss": 0.6405, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.25933609958506226, | |
| "grad_norm": 2.6907850585932027, | |
| "learning_rate": 9.246369395043033e-06, | |
| "loss": 0.6476, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.26141078838174275, | |
| "grad_norm": 2.6953268245247557, | |
| "learning_rate": 9.227140967269348e-06, | |
| "loss": 0.6414, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.26348547717842324, | |
| "grad_norm": 2.6657263650658427, | |
| "learning_rate": 9.20769083960298e-06, | |
| "loss": 0.6514, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.26556016597510373, | |
| "grad_norm": 2.5956487125743615, | |
| "learning_rate": 9.188020032140308e-06, | |
| "loss": 0.6494, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.2676348547717842, | |
| "grad_norm": 2.6461882359542095, | |
| "learning_rate": 9.168129576551665e-06, | |
| "loss": 0.6388, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.2697095435684647, | |
| "grad_norm": 2.734469941118851, | |
| "learning_rate": 9.148020516027207e-06, | |
| "loss": 0.639, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.2717842323651452, | |
| "grad_norm": 2.85311061995995, | |
| "learning_rate": 9.127693905222223e-06, | |
| "loss": 0.6239, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.27385892116182575, | |
| "grad_norm": 2.905457532313008, | |
| "learning_rate": 9.107150810201805e-06, | |
| "loss": 0.6332, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.27593360995850624, | |
| "grad_norm": 2.866240537811393, | |
| "learning_rate": 9.086392308384946e-06, | |
| "loss": 0.6402, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.27800829875518673, | |
| "grad_norm": 2.6848783944743717, | |
| "learning_rate": 9.065419488488029e-06, | |
| "loss": 0.6198, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.2800829875518672, | |
| "grad_norm": 2.806882576661356, | |
| "learning_rate": 9.044233450467728e-06, | |
| "loss": 0.6329, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.2821576763485477, | |
| "grad_norm": 2.6697354108572457, | |
| "learning_rate": 9.022835305463322e-06, | |
| "loss": 0.628, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.2842323651452282, | |
| "grad_norm": 2.692739103753555, | |
| "learning_rate": 9.001226175738409e-06, | |
| "loss": 0.6285, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.2863070539419087, | |
| "grad_norm": 2.6356169772477887, | |
| "learning_rate": 8.979407194622062e-06, | |
| "loss": 0.6029, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.2883817427385892, | |
| "grad_norm": 2.684304580451599, | |
| "learning_rate": 8.95737950644938e-06, | |
| "loss": 0.6163, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.29045643153526973, | |
| "grad_norm": 2.665591903369743, | |
| "learning_rate": 8.93514426650147e-06, | |
| "loss": 0.6036, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.2925311203319502, | |
| "grad_norm": 2.5849646219326394, | |
| "learning_rate": 8.912702640944862e-06, | |
| "loss": 0.63, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.2946058091286307, | |
| "grad_norm": 2.642637231105898, | |
| "learning_rate": 8.89005580677034e-06, | |
| "loss": 0.6142, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.2966804979253112, | |
| "grad_norm": 2.8131632244224543, | |
| "learning_rate": 8.867204951731227e-06, | |
| "loss": 0.6177, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.2987551867219917, | |
| "grad_norm": 2.7483245438817794, | |
| "learning_rate": 8.844151274281074e-06, | |
| "loss": 0.6194, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.3008298755186722, | |
| "grad_norm": 3.0664428917760675, | |
| "learning_rate": 8.820895983510813e-06, | |
| "loss": 0.6186, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.3029045643153527, | |
| "grad_norm": 2.688656442588083, | |
| "learning_rate": 8.797440299085344e-06, | |
| "loss": 0.5988, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.3049792531120332, | |
| "grad_norm": 2.7050533397545258, | |
| "learning_rate": 8.77378545117957e-06, | |
| "loss": 0.616, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.3070539419087137, | |
| "grad_norm": 2.693473576801362, | |
| "learning_rate": 8.74993268041387e-06, | |
| "loss": 0.5999, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.3091286307053942, | |
| "grad_norm": 2.6799339600570766, | |
| "learning_rate": 8.725883237789046e-06, | |
| "loss": 0.5903, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.3112033195020747, | |
| "grad_norm": 2.5430481996060528, | |
| "learning_rate": 8.701638384620694e-06, | |
| "loss": 0.5856, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.3132780082987552, | |
| "grad_norm": 2.672587466894312, | |
| "learning_rate": 8.677199392473068e-06, | |
| "loss": 0.6042, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.3153526970954357, | |
| "grad_norm": 2.7790737972229484, | |
| "learning_rate": 8.652567543092385e-06, | |
| "loss": 0.5917, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.31742738589211617, | |
| "grad_norm": 2.7954825670577677, | |
| "learning_rate": 8.627744128339599e-06, | |
| "loss": 0.5939, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.31950207468879666, | |
| "grad_norm": 2.662658318367537, | |
| "learning_rate": 8.602730450122648e-06, | |
| "loss": 0.6011, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.3215767634854772, | |
| "grad_norm": 2.6270972350688884, | |
| "learning_rate": 8.577527820328176e-06, | |
| "loss": 0.5859, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.3236514522821577, | |
| "grad_norm": 2.5997336791640144, | |
| "learning_rate": 8.552137560752728e-06, | |
| "loss": 0.5695, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.3257261410788382, | |
| "grad_norm": 2.6129156491647096, | |
| "learning_rate": 8.526561003033424e-06, | |
| "loss": 0.5991, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.3278008298755187, | |
| "grad_norm": 2.641434925698528, | |
| "learning_rate": 8.50079948857812e-06, | |
| "loss": 0.5756, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.32987551867219916, | |
| "grad_norm": 2.744754568258112, | |
| "learning_rate": 8.474854368495055e-06, | |
| "loss": 0.5916, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.33195020746887965, | |
| "grad_norm": 2.8742812662542767, | |
| "learning_rate": 8.44872700352199e-06, | |
| "loss": 0.5858, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.33402489626556015, | |
| "grad_norm": 2.675688772837824, | |
| "learning_rate": 8.422418763954841e-06, | |
| "loss": 0.5858, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.3360995850622407, | |
| "grad_norm": 2.6454009935375202, | |
| "learning_rate": 8.395931029575817e-06, | |
| "loss": 0.5793, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.3381742738589212, | |
| "grad_norm": 2.5799174291219855, | |
| "learning_rate": 8.369265189581048e-06, | |
| "loss": 0.5577, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.34024896265560167, | |
| "grad_norm": 2.6266021968279025, | |
| "learning_rate": 8.342422642507727e-06, | |
| "loss": 0.5908, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.34232365145228216, | |
| "grad_norm": 2.6270567170702104, | |
| "learning_rate": 8.31540479616076e-06, | |
| "loss": 0.5583, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.34439834024896265, | |
| "grad_norm": 2.6605567528471283, | |
| "learning_rate": 8.288213067538936e-06, | |
| "loss": 0.5792, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.34647302904564314, | |
| "grad_norm": 2.6648839547096936, | |
| "learning_rate": 8.260848882760616e-06, | |
| "loss": 0.5758, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.34854771784232363, | |
| "grad_norm": 2.4985116541737487, | |
| "learning_rate": 8.233313676988917e-06, | |
| "loss": 0.5589, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.3506224066390041, | |
| "grad_norm": 2.5295015470513014, | |
| "learning_rate": 8.205608894356461e-06, | |
| "loss": 0.5747, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.35269709543568467, | |
| "grad_norm": 2.6313000253018317, | |
| "learning_rate": 8.177735987889628e-06, | |
| "loss": 0.5794, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.35477178423236516, | |
| "grad_norm": 2.841463850249757, | |
| "learning_rate": 8.149696419432352e-06, | |
| "loss": 0.5662, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.35684647302904565, | |
| "grad_norm": 2.499217630200596, | |
| "learning_rate": 8.121491659569442e-06, | |
| "loss": 0.5489, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.35892116182572614, | |
| "grad_norm": 2.624500775254657, | |
| "learning_rate": 8.093123187549475e-06, | |
| "loss": 0.5529, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.36099585062240663, | |
| "grad_norm": 2.891141879312641, | |
| "learning_rate": 8.064592491207193e-06, | |
| "loss": 0.5615, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.3630705394190871, | |
| "grad_norm": 2.985654722661823, | |
| "learning_rate": 8.035901066885486e-06, | |
| "loss": 0.5727, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.3651452282157676, | |
| "grad_norm": 2.7907723136875995, | |
| "learning_rate": 8.007050419356898e-06, | |
| "loss": 0.5608, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.36721991701244816, | |
| "grad_norm": 2.6416506986010813, | |
| "learning_rate": 7.978042061744728e-06, | |
| "loss": 0.5557, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.36929460580912865, | |
| "grad_norm": 2.5258498715580067, | |
| "learning_rate": 7.94887751544365e-06, | |
| "loss": 0.5534, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.37136929460580914, | |
| "grad_norm": 2.561386861258123, | |
| "learning_rate": 7.919558310039937e-06, | |
| "loss": 0.556, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.37344398340248963, | |
| "grad_norm": 2.727627068843935, | |
| "learning_rate": 7.890085983231225e-06, | |
| "loss": 0.5462, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.3755186721991701, | |
| "grad_norm": 2.5718069262603533, | |
| "learning_rate": 7.860462080745884e-06, | |
| "loss": 0.5399, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.3775933609958506, | |
| "grad_norm": 2.5380387267872746, | |
| "learning_rate": 7.830688156261927e-06, | |
| "loss": 0.5502, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.3796680497925311, | |
| "grad_norm": 2.6214419026204565, | |
| "learning_rate": 7.800765771325546e-06, | |
| "loss": 0.5417, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.3817427385892116, | |
| "grad_norm": 2.712261230434538, | |
| "learning_rate": 7.7706964952692e-06, | |
| "loss": 0.545, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.38381742738589214, | |
| "grad_norm": 2.5420503220632797, | |
| "learning_rate": 7.740481905129307e-06, | |
| "loss": 0.5333, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.38589211618257263, | |
| "grad_norm": 2.6001135316476893, | |
| "learning_rate": 7.710123585563552e-06, | |
| "loss": 0.532, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.3879668049792531, | |
| "grad_norm": 2.720274250658824, | |
| "learning_rate": 7.679623128767754e-06, | |
| "loss": 0.5466, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.3900414937759336, | |
| "grad_norm": 2.751465451077315, | |
| "learning_rate": 7.648982134392378e-06, | |
| "loss": 0.5411, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.3921161825726141, | |
| "grad_norm": 2.5435730720837864, | |
| "learning_rate": 7.618202209458623e-06, | |
| "loss": 0.5225, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.3941908713692946, | |
| "grad_norm": 2.5155581478521456, | |
| "learning_rate": 7.587284968274155e-06, | |
| "loss": 0.5361, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.3962655601659751, | |
| "grad_norm": 2.6500904333286384, | |
| "learning_rate": 7.556232032348429e-06, | |
| "loss": 0.5323, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.3983402489626556, | |
| "grad_norm": 2.6832927074173947, | |
| "learning_rate": 7.5250450303076526e-06, | |
| "loss": 0.5324, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.4004149377593361, | |
| "grad_norm": 2.5794687904843934, | |
| "learning_rate": 7.49372559780937e-06, | |
| "loss": 0.5401, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.4024896265560166, | |
| "grad_norm": 2.779973776708556, | |
| "learning_rate": 7.462275377456671e-06, | |
| "loss": 0.5353, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.4045643153526971, | |
| "grad_norm": 2.510096649942157, | |
| "learning_rate": 7.430696018712049e-06, | |
| "loss": 0.5178, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.4066390041493776, | |
| "grad_norm": 2.544609714596366, | |
| "learning_rate": 7.398989177810889e-06, | |
| "loss": 0.5305, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.4087136929460581, | |
| "grad_norm": 2.6358450407419394, | |
| "learning_rate": 7.3671565176746025e-06, | |
| "loss": 0.5083, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.4107883817427386, | |
| "grad_norm": 2.5104688131658115, | |
| "learning_rate": 7.335199707823415e-06, | |
| "loss": 0.5213, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.41286307053941906, | |
| "grad_norm": 2.534767895856582, | |
| "learning_rate": 7.3031204242888e-06, | |
| "loss": 0.5003, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.4149377593360996, | |
| "grad_norm": 2.734749407560919, | |
| "learning_rate": 7.270920349525584e-06, | |
| "loss": 0.5179, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.4170124481327801, | |
| "grad_norm": 2.404837852320902, | |
| "learning_rate": 7.238601172323701e-06, | |
| "loss": 0.5231, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.4190871369294606, | |
| "grad_norm": 2.655040755780098, | |
| "learning_rate": 7.206164587719627e-06, | |
| "loss": 0.5191, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.4211618257261411, | |
| "grad_norm": 2.537938752408207, | |
| "learning_rate": 7.173612296907473e-06, | |
| "loss": 0.5206, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.42323651452282157, | |
| "grad_norm": 2.6276182145014135, | |
| "learning_rate": 7.1409460071497675e-06, | |
| "loss": 0.5294, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.42531120331950206, | |
| "grad_norm": 2.543796942084407, | |
| "learning_rate": 7.108167431687917e-06, | |
| "loss": 0.5166, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.42738589211618255, | |
| "grad_norm": 2.6672869217839468, | |
| "learning_rate": 7.075278289652349e-06, | |
| "loss": 0.5225, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.42946058091286304, | |
| "grad_norm": 2.531083972850949, | |
| "learning_rate": 7.042280305972354e-06, | |
| "loss": 0.5142, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.4315352697095436, | |
| "grad_norm": 2.4747366529454693, | |
| "learning_rate": 7.009175211285611e-06, | |
| "loss": 0.5141, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.4336099585062241, | |
| "grad_norm": 2.493745172465378, | |
| "learning_rate": 6.975964741847427e-06, | |
| "loss": 0.5241, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.43568464730290457, | |
| "grad_norm": 2.59288402554468, | |
| "learning_rate": 6.942650639439678e-06, | |
| "loss": 0.5127, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.43775933609958506, | |
| "grad_norm": 2.5743234047985952, | |
| "learning_rate": 6.9092346512794475e-06, | |
| "loss": 0.5112, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.43983402489626555, | |
| "grad_norm": 2.46036118970127, | |
| "learning_rate": 6.875718529927404e-06, | |
| "loss": 0.4881, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.44190871369294604, | |
| "grad_norm": 2.582868746430813, | |
| "learning_rate": 6.8421040331958745e-06, | |
| "loss": 0.5072, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.44398340248962653, | |
| "grad_norm": 2.5161140611104056, | |
| "learning_rate": 6.808392924056659e-06, | |
| "loss": 0.5068, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.4460580912863071, | |
| "grad_norm": 2.616608707294859, | |
| "learning_rate": 6.774586970548567e-06, | |
| "loss": 0.5132, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.44813278008298757, | |
| "grad_norm": 2.4704454065037424, | |
| "learning_rate": 6.7406879456846875e-06, | |
| "loss": 0.4942, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.45020746887966806, | |
| "grad_norm": 2.5654508809058805, | |
| "learning_rate": 6.7066976273594e-06, | |
| "loss": 0.4914, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.45228215767634855, | |
| "grad_norm": 2.5642213246435017, | |
| "learning_rate": 6.672617798255135e-06, | |
| "loss": 0.5043, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.45435684647302904, | |
| "grad_norm": 2.3759068788938538, | |
| "learning_rate": 6.63845024574887e-06, | |
| "loss": 0.4744, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.45643153526970953, | |
| "grad_norm": 2.4485238164491, | |
| "learning_rate": 6.604196761818395e-06, | |
| "loss": 0.4992, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.45850622406639, | |
| "grad_norm": 2.481384678496845, | |
| "learning_rate": 6.5698591429483286e-06, | |
| "loss": 0.5016, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.4605809128630705, | |
| "grad_norm": 2.571902891821277, | |
| "learning_rate": 6.535439190035884e-06, | |
| "loss": 0.4893, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.46265560165975106, | |
| "grad_norm": 2.559158463935003, | |
| "learning_rate": 6.5009387082964405e-06, | |
| "loss": 0.4991, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.46473029045643155, | |
| "grad_norm": 2.538459567637443, | |
| "learning_rate": 6.466359507168849e-06, | |
| "loss": 0.4953, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.46680497925311204, | |
| "grad_norm": 2.5267222175872623, | |
| "learning_rate": 6.431703400220541e-06, | |
| "loss": 0.482, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.46887966804979253, | |
| "grad_norm": 2.38064786704353, | |
| "learning_rate": 6.396972205052407e-06, | |
| "loss": 0.4839, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.470954356846473, | |
| "grad_norm": 2.5624442145243935, | |
| "learning_rate": 6.362167743203474e-06, | |
| "loss": 0.4975, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.4730290456431535, | |
| "grad_norm": 2.4427415841202014, | |
| "learning_rate": 6.327291840055365e-06, | |
| "loss": 0.4759, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.475103734439834, | |
| "grad_norm": 2.5750114799610095, | |
| "learning_rate": 6.292346324736578e-06, | |
| "loss": 0.4931, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.47717842323651455, | |
| "grad_norm": 2.368089990602912, | |
| "learning_rate": 6.2573330300265375e-06, | |
| "loss": 0.4943, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.47925311203319504, | |
| "grad_norm": 2.430038445579993, | |
| "learning_rate": 6.22225379225948e-06, | |
| "loss": 0.4776, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.48132780082987553, | |
| "grad_norm": 2.655610966122537, | |
| "learning_rate": 6.18711045122814e-06, | |
| "loss": 0.4931, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.483402489626556, | |
| "grad_norm": 2.561229036359029, | |
| "learning_rate": 6.151904850087265e-06, | |
| "loss": 0.4853, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.4854771784232365, | |
| "grad_norm": 2.5716333011695487, | |
| "learning_rate": 6.116638835256943e-06, | |
| "loss": 0.4662, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.487551867219917, | |
| "grad_norm": 2.6260675883477123, | |
| "learning_rate": 6.081314256325762e-06, | |
| "loss": 0.4771, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.4896265560165975, | |
| "grad_norm": 2.464680850201926, | |
| "learning_rate": 6.045932965953813e-06, | |
| "loss": 0.4819, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.491701244813278, | |
| "grad_norm": 2.4612043275661852, | |
| "learning_rate": 6.010496819775518e-06, | |
| "loss": 0.4744, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.49377593360995853, | |
| "grad_norm": 2.4847373767230994, | |
| "learning_rate": 5.9750076763023025e-06, | |
| "loss": 0.4895, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.495850622406639, | |
| "grad_norm": 2.324351303455581, | |
| "learning_rate": 5.939467396825137e-06, | |
| "loss": 0.4771, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.4979253112033195, | |
| "grad_norm": 2.4411119886082546, | |
| "learning_rate": 5.903877845316906e-06, | |
| "loss": 0.4698, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 2.5240319324749887, | |
| "learning_rate": 5.8682408883346535e-06, | |
| "loss": 0.4705, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.5020746887966805, | |
| "grad_norm": 2.6132898743051123, | |
| "learning_rate": 5.832558394921688e-06, | |
| "loss": 0.4557, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.504149377593361, | |
| "grad_norm": 2.484591361229376, | |
| "learning_rate": 5.796832236509556e-06, | |
| "loss": 0.4753, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.5062240663900415, | |
| "grad_norm": 2.392319441698671, | |
| "learning_rate": 5.761064286819895e-06, | |
| "loss": 0.458, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.508298755186722, | |
| "grad_norm": 2.6251874596416966, | |
| "learning_rate": 5.725256421766158e-06, | |
| "loss": 0.4809, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.5103734439834025, | |
| "grad_norm": 2.4584477311068125, | |
| "learning_rate": 5.689410519355226e-06, | |
| "loss": 0.4709, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.5124481327800829, | |
| "grad_norm": 2.3560580563552858, | |
| "learning_rate": 5.653528459588925e-06, | |
| "loss": 0.4659, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.5145228215767634, | |
| "grad_norm": 2.485258794734814, | |
| "learning_rate": 5.617612124365411e-06, | |
| "loss": 0.4647, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.516597510373444, | |
| "grad_norm": 2.4250973675636938, | |
| "learning_rate": 5.5816633973804766e-06, | |
| "loss": 0.4721, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.5186721991701245, | |
| "grad_norm": 2.4772730090302515, | |
| "learning_rate": 5.545684164028764e-06, | |
| "loss": 0.4558, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.520746887966805, | |
| "grad_norm": 2.6817558433388053, | |
| "learning_rate": 5.509676311304869e-06, | |
| "loss": 0.4632, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.5228215767634855, | |
| "grad_norm": 2.482278609637873, | |
| "learning_rate": 5.4736417277043865e-06, | |
| "loss": 0.4662, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.524896265560166, | |
| "grad_norm": 2.4538360481145873, | |
| "learning_rate": 5.4375823031248545e-06, | |
| "loss": 0.4542, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.5269709543568465, | |
| "grad_norm": 2.3974115486101693, | |
| "learning_rate": 5.401499928766644e-06, | |
| "loss": 0.4688, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.529045643153527, | |
| "grad_norm": 2.35868507934765, | |
| "learning_rate": 5.365396497033763e-06, | |
| "loss": 0.4528, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.5311203319502075, | |
| "grad_norm": 2.350069783189846, | |
| "learning_rate": 5.32927390143462e-06, | |
| "loss": 0.4606, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.533195020746888, | |
| "grad_norm": 2.48472920397625, | |
| "learning_rate": 5.293134036482697e-06, | |
| "loss": 0.443, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.5352697095435685, | |
| "grad_norm": 2.4861989102755, | |
| "learning_rate": 5.256978797597202e-06, | |
| "loss": 0.4639, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.5373443983402489, | |
| "grad_norm": 2.4356508616340116, | |
| "learning_rate": 5.220810081003656e-06, | |
| "loss": 0.4453, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.5394190871369294, | |
| "grad_norm": 2.7333647570132213, | |
| "learning_rate": 5.184629783634441e-06, | |
| "loss": 0.4575, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.5414937759336099, | |
| "grad_norm": 2.4190653155415967, | |
| "learning_rate": 5.1484398030293135e-06, | |
| "loss": 0.4522, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.5435684647302904, | |
| "grad_norm": 2.4062268029705636, | |
| "learning_rate": 5.112242037235885e-06, | |
| "loss": 0.4517, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.5456431535269709, | |
| "grad_norm": 2.474917237517535, | |
| "learning_rate": 5.076038384710077e-06, | |
| "loss": 0.456, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.5477178423236515, | |
| "grad_norm": 2.5820885338481077, | |
| "learning_rate": 5.039830744216548e-06, | |
| "loss": 0.4575, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.549792531120332, | |
| "grad_norm": 2.5020192978566365, | |
| "learning_rate": 5.003621014729113e-06, | |
| "loss": 0.4468, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.5518672199170125, | |
| "grad_norm": 2.4844474809833224, | |
| "learning_rate": 4.967411095331149e-06, | |
| "loss": 0.4426, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.553941908713693, | |
| "grad_norm": 2.567949384188064, | |
| "learning_rate": 4.931202885115994e-06, | |
| "loss": 0.4491, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.5560165975103735, | |
| "grad_norm": 2.3971814860863128, | |
| "learning_rate": 4.894998283087341e-06, | |
| "loss": 0.4521, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.558091286307054, | |
| "grad_norm": 2.6050382832392502, | |
| "learning_rate": 4.858799188059651e-06, | |
| "loss": 0.4402, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.5601659751037344, | |
| "grad_norm": 2.504067986804623, | |
| "learning_rate": 4.822607498558555e-06, | |
| "loss": 0.4309, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.5622406639004149, | |
| "grad_norm": 2.436795227940985, | |
| "learning_rate": 4.786425112721293e-06, | |
| "loss": 0.4432, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.5643153526970954, | |
| "grad_norm": 2.463086006536084, | |
| "learning_rate": 4.75025392819715e-06, | |
| "loss": 0.4547, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.5663900414937759, | |
| "grad_norm": 2.526031011478867, | |
| "learning_rate": 4.714095842047952e-06, | |
| "loss": 0.4382, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.5684647302904564, | |
| "grad_norm": 2.484312790550504, | |
| "learning_rate": 4.677952750648544e-06, | |
| "loss": 0.4395, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.5705394190871369, | |
| "grad_norm": 2.51470713708296, | |
| "learning_rate": 4.641826549587352e-06, | |
| "loss": 0.4324, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.5726141078838174, | |
| "grad_norm": 2.328972994785194, | |
| "learning_rate": 4.605719133566955e-06, | |
| "loss": 0.4409, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.5746887966804979, | |
| "grad_norm": 2.467684453460364, | |
| "learning_rate": 4.56963239630472e-06, | |
| "loss": 0.4502, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.5767634854771784, | |
| "grad_norm": 2.5795480839970093, | |
| "learning_rate": 4.533568230433477e-06, | |
| "loss": 0.4417, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.578838174273859, | |
| "grad_norm": 2.3301068759580907, | |
| "learning_rate": 4.497528527402262e-06, | |
| "loss": 0.4361, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.5809128630705395, | |
| "grad_norm": 2.581867988962586, | |
| "learning_rate": 4.461515177377113e-06, | |
| "loss": 0.4398, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.58298755186722, | |
| "grad_norm": 2.437392550100252, | |
| "learning_rate": 4.42553006914194e-06, | |
| "loss": 0.4234, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.5850622406639004, | |
| "grad_norm": 2.43460113329211, | |
| "learning_rate": 4.3895750899994566e-06, | |
| "loss": 0.4299, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.5871369294605809, | |
| "grad_norm": 2.4540600474475016, | |
| "learning_rate": 4.353652125672208e-06, | |
| "loss": 0.429, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.5892116182572614, | |
| "grad_norm": 2.402133768218254, | |
| "learning_rate": 4.317763060203665e-06, | |
| "loss": 0.4195, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.5912863070539419, | |
| "grad_norm": 2.3040455325782103, | |
| "learning_rate": 4.281909775859411e-06, | |
| "loss": 0.4142, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.5933609958506224, | |
| "grad_norm": 2.437892690340681, | |
| "learning_rate": 4.246094153028426e-06, | |
| "loss": 0.4376, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.5954356846473029, | |
| "grad_norm": 2.4883366883550067, | |
| "learning_rate": 4.210318070124465e-06, | |
| "loss": 0.4343, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.5975103734439834, | |
| "grad_norm": 2.623855396529678, | |
| "learning_rate": 4.1745834034875435e-06, | |
| "loss": 0.4316, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.5995850622406639, | |
| "grad_norm": 2.4818254566920186, | |
| "learning_rate": 4.138892027285525e-06, | |
| "loss": 0.4201, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.6016597510373444, | |
| "grad_norm": 2.487523042972382, | |
| "learning_rate": 4.10324581341583e-06, | |
| "loss": 0.4159, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.6037344398340249, | |
| "grad_norm": 2.4142763091450186, | |
| "learning_rate": 4.067646631407259e-06, | |
| "loss": 0.4171, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.6058091286307054, | |
| "grad_norm": 2.535593504431872, | |
| "learning_rate": 4.0320963483219485e-06, | |
| "loss": 0.4239, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.6078838174273858, | |
| "grad_norm": 2.3293103348557094, | |
| "learning_rate": 3.996596828657437e-06, | |
| "loss": 0.423, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.6099585062240664, | |
| "grad_norm": 2.3875069760256045, | |
| "learning_rate": 3.961149934248893e-06, | |
| "loss": 0.426, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.6120331950207469, | |
| "grad_norm": 2.473197291626687, | |
| "learning_rate": 3.925757524171455e-06, | |
| "loss": 0.427, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.6141078838174274, | |
| "grad_norm": 2.7741410864382754, | |
| "learning_rate": 3.8904214546427355e-06, | |
| "loss": 0.4122, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.6161825726141079, | |
| "grad_norm": 2.4047571270961345, | |
| "learning_rate": 3.855143578925468e-06, | |
| "loss": 0.4131, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.6182572614107884, | |
| "grad_norm": 2.423122390796353, | |
| "learning_rate": 3.819925747230309e-06, | |
| "loss": 0.4234, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.6203319502074689, | |
| "grad_norm": 2.625870115353325, | |
| "learning_rate": 3.7847698066187975e-06, | |
| "loss": 0.4169, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.6224066390041494, | |
| "grad_norm": 2.3339760463576047, | |
| "learning_rate": 3.749677600906489e-06, | |
| "loss": 0.4167, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.6244813278008299, | |
| "grad_norm": 2.381759807149871, | |
| "learning_rate": 3.7146509705662453e-06, | |
| "loss": 0.4127, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.6265560165975104, | |
| "grad_norm": 2.49037216210766, | |
| "learning_rate": 3.6796917526317153e-06, | |
| "loss": 0.4196, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.6286307053941909, | |
| "grad_norm": 2.3307430355853813, | |
| "learning_rate": 3.6448017806009804e-06, | |
| "loss": 0.4032, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.6307053941908713, | |
| "grad_norm": 2.459076699312393, | |
| "learning_rate": 3.609982884340402e-06, | |
| "loss": 0.4191, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.6327800829875518, | |
| "grad_norm": 2.349992533584889, | |
| "learning_rate": 3.575236889988646e-06, | |
| "loss": 0.4096, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.6348547717842323, | |
| "grad_norm": 2.505421167742011, | |
| "learning_rate": 3.540565619860906e-06, | |
| "loss": 0.4149, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.6369294605809128, | |
| "grad_norm": 2.3662854321031994, | |
| "learning_rate": 3.5059708923533354e-06, | |
| "loss": 0.4092, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.6390041493775933, | |
| "grad_norm": 2.33810078459921, | |
| "learning_rate": 3.4714545218476727e-06, | |
| "loss": 0.4022, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.6410788381742739, | |
| "grad_norm": 2.292723665647368, | |
| "learning_rate": 3.437018318616084e-06, | |
| "loss": 0.3924, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 0.6431535269709544, | |
| "grad_norm": 2.3059685019935343, | |
| "learning_rate": 3.4026640887262196e-06, | |
| "loss": 0.4062, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.6452282157676349, | |
| "grad_norm": 2.4576342501679966, | |
| "learning_rate": 3.3683936339464957e-06, | |
| "loss": 0.408, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 0.6473029045643154, | |
| "grad_norm": 2.2902363586226624, | |
| "learning_rate": 3.334208751651593e-06, | |
| "loss": 0.4123, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.6493775933609959, | |
| "grad_norm": 2.2743147574529328, | |
| "learning_rate": 3.300111234728191e-06, | |
| "loss": 0.4097, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 0.6514522821576764, | |
| "grad_norm": 2.4569598537485327, | |
| "learning_rate": 3.2661028714809405e-06, | |
| "loss": 0.4012, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.6535269709543569, | |
| "grad_norm": 2.28227869578094, | |
| "learning_rate": 3.2321854455386657e-06, | |
| "loss": 0.4008, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 0.6556016597510373, | |
| "grad_norm": 2.4708407304872533, | |
| "learning_rate": 3.198360735760827e-06, | |
| "loss": 0.3873, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.6576763485477178, | |
| "grad_norm": 2.2707165698258103, | |
| "learning_rate": 3.1646305161442183e-06, | |
| "loss": 0.4059, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 0.6597510373443983, | |
| "grad_norm": 2.2036975566545993, | |
| "learning_rate": 3.1309965557299303e-06, | |
| "loss": 0.3878, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.6618257261410788, | |
| "grad_norm": 2.4960520314367627, | |
| "learning_rate": 3.097460618510571e-06, | |
| "loss": 0.3885, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 0.6639004149377593, | |
| "grad_norm": 2.2804809511371755, | |
| "learning_rate": 3.064024463337747e-06, | |
| "loss": 0.4002, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.6659751037344398, | |
| "grad_norm": 2.6325297095462767, | |
| "learning_rate": 3.0306898438298184e-06, | |
| "loss": 0.4003, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 0.6680497925311203, | |
| "grad_norm": 2.366691569211348, | |
| "learning_rate": 2.997458508279928e-06, | |
| "loss": 0.3941, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.6701244813278008, | |
| "grad_norm": 2.2056640704322494, | |
| "learning_rate": 2.964332199564309e-06, | |
| "loss": 0.3966, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 0.6721991701244814, | |
| "grad_norm": 2.423642247816294, | |
| "learning_rate": 2.9313126550508762e-06, | |
| "loss": 0.396, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.6742738589211619, | |
| "grad_norm": 2.433390053325066, | |
| "learning_rate": 2.8984016065081073e-06, | |
| "loss": 0.4023, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.6763485477178424, | |
| "grad_norm": 2.357419717448147, | |
| "learning_rate": 2.865600780014216e-06, | |
| "loss": 0.4088, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.6784232365145229, | |
| "grad_norm": 2.245544606902691, | |
| "learning_rate": 2.8329118958666236e-06, | |
| "loss": 0.3858, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 0.6804979253112033, | |
| "grad_norm": 2.439298195310241, | |
| "learning_rate": 2.80033666849174e-06, | |
| "loss": 0.3986, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.6825726141078838, | |
| "grad_norm": 2.4100345874144518, | |
| "learning_rate": 2.7678768063550454e-06, | |
| "loss": 0.3946, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 0.6846473029045643, | |
| "grad_norm": 2.3584382118069973, | |
| "learning_rate": 2.735534011871479e-06, | |
| "loss": 0.378, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.6867219917012448, | |
| "grad_norm": 2.5751863977070903, | |
| "learning_rate": 2.7033099813161696e-06, | |
| "loss": 0.3901, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 0.6887966804979253, | |
| "grad_norm": 2.2159484402883827, | |
| "learning_rate": 2.6712064047354515e-06, | |
| "loss": 0.393, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.6908713692946058, | |
| "grad_norm": 2.3569350609642257, | |
| "learning_rate": 2.6392249658582454e-06, | |
| "loss": 0.3818, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 0.6929460580912863, | |
| "grad_norm": 2.476100739673621, | |
| "learning_rate": 2.607367342007738e-06, | |
| "loss": 0.387, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.6950207468879668, | |
| "grad_norm": 2.394265629557479, | |
| "learning_rate": 2.5756352040134193e-06, | |
| "loss": 0.3893, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 0.6970954356846473, | |
| "grad_norm": 2.4572385167590234, | |
| "learning_rate": 2.5440302161234542e-06, | |
| "loss": 0.3962, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.6991701244813278, | |
| "grad_norm": 2.3166108869604387, | |
| "learning_rate": 2.5125540359173893e-06, | |
| "loss": 0.3935, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 0.7012448132780082, | |
| "grad_norm": 2.446525027779214, | |
| "learning_rate": 2.481208314219233e-06, | |
| "loss": 0.3882, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.7033195020746889, | |
| "grad_norm": 2.284958027056727, | |
| "learning_rate": 2.449994695010856e-06, | |
| "loss": 0.3758, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 0.7053941908713693, | |
| "grad_norm": 2.516170194826004, | |
| "learning_rate": 2.4189148153457875e-06, | |
| "loss": 0.3882, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.7074688796680498, | |
| "grad_norm": 2.3397843827362914, | |
| "learning_rate": 2.387970305263349e-06, | |
| "loss": 0.3887, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 0.7095435684647303, | |
| "grad_norm": 2.3903105786705123, | |
| "learning_rate": 2.3571627877031596e-06, | |
| "loss": 0.3785, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.7116182572614108, | |
| "grad_norm": 2.4388910192687523, | |
| "learning_rate": 2.326493878420028e-06, | |
| "loss": 0.3704, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 0.7136929460580913, | |
| "grad_norm": 2.3725455021448245, | |
| "learning_rate": 2.295965185899205e-06, | |
| "loss": 0.3778, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.7157676348547718, | |
| "grad_norm": 2.3541520314694946, | |
| "learning_rate": 2.265578311272021e-06, | |
| "loss": 0.3827, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 0.7178423236514523, | |
| "grad_norm": 2.318059450682999, | |
| "learning_rate": 2.2353348482319233e-06, | |
| "loss": 0.3641, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.7199170124481328, | |
| "grad_norm": 2.303956633945153, | |
| "learning_rate": 2.2052363829508776e-06, | |
| "loss": 0.3804, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 0.7219917012448133, | |
| "grad_norm": 2.3145838554705547, | |
| "learning_rate": 2.1752844939961926e-06, | |
| "loss": 0.3747, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.7240663900414938, | |
| "grad_norm": 2.2586817786943603, | |
| "learning_rate": 2.1454807522477128e-06, | |
| "loss": 0.3722, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 0.7261410788381742, | |
| "grad_norm": 2.4038449021090256, | |
| "learning_rate": 2.1158267208154497e-06, | |
| "loss": 0.3807, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.7282157676348547, | |
| "grad_norm": 2.2154594010619166, | |
| "learning_rate": 2.0863239549575865e-06, | |
| "loss": 0.3763, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 0.7302904564315352, | |
| "grad_norm": 2.5107957004825976, | |
| "learning_rate": 2.0569740019989136e-06, | |
| "loss": 0.3752, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.7323651452282157, | |
| "grad_norm": 2.350354806134385, | |
| "learning_rate": 2.0277784012496865e-06, | |
| "loss": 0.3819, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 0.7344398340248963, | |
| "grad_norm": 2.4464933220936125, | |
| "learning_rate": 1.998738683924875e-06, | |
| "loss": 0.3628, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.7365145228215768, | |
| "grad_norm": 2.5270841137858064, | |
| "learning_rate": 1.9698563730638794e-06, | |
| "loss": 0.3754, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 0.7385892116182573, | |
| "grad_norm": 2.250774099933747, | |
| "learning_rate": 1.9411329834506286e-06, | |
| "loss": 0.3667, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.7406639004149378, | |
| "grad_norm": 2.3407235003652804, | |
| "learning_rate": 1.9125700215341476e-06, | |
| "loss": 0.3735, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 0.7427385892116183, | |
| "grad_norm": 2.3446167395429347, | |
| "learning_rate": 1.8841689853495516e-06, | |
| "loss": 0.3723, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.7448132780082988, | |
| "grad_norm": 2.3350133034877656, | |
| "learning_rate": 1.8559313644394677e-06, | |
| "loss": 0.3785, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 0.7468879668049793, | |
| "grad_norm": 2.37500798404668, | |
| "learning_rate": 1.827858639775925e-06, | |
| "loss": 0.3786, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.7489626556016598, | |
| "grad_norm": 2.4392836582028945, | |
| "learning_rate": 1.7999522836826744e-06, | |
| "loss": 0.3736, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 0.7510373443983402, | |
| "grad_norm": 2.408718217237952, | |
| "learning_rate": 1.7722137597579698e-06, | |
| "loss": 0.3648, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.7531120331950207, | |
| "grad_norm": 2.4393541244678216, | |
| "learning_rate": 1.744644522797817e-06, | |
| "loss": 0.3556, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 0.7551867219917012, | |
| "grad_norm": 2.360665467065917, | |
| "learning_rate": 1.7172460187196588e-06, | |
| "loss": 0.3754, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.7572614107883817, | |
| "grad_norm": 2.3486896399291, | |
| "learning_rate": 1.6900196844865575e-06, | |
| "loss": 0.3615, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 0.7593360995850622, | |
| "grad_norm": 2.330649947928342, | |
| "learning_rate": 1.6629669480318166e-06, | |
| "loss": 0.3763, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.7614107883817427, | |
| "grad_norm": 2.3447118121017287, | |
| "learning_rate": 1.6360892281841007e-06, | |
| "loss": 0.3639, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 0.7634854771784232, | |
| "grad_norm": 2.507491155826583, | |
| "learning_rate": 1.609387934593019e-06, | |
| "loss": 0.3779, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.7655601659751037, | |
| "grad_norm": 2.288411757086649, | |
| "learning_rate": 1.5828644676551892e-06, | |
| "loss": 0.366, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 0.7676348547717843, | |
| "grad_norm": 2.3840080652247826, | |
| "learning_rate": 1.5565202184408e-06, | |
| "loss": 0.3589, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.7697095435684648, | |
| "grad_norm": 2.4900677150816364, | |
| "learning_rate": 1.5303565686206452e-06, | |
| "loss": 0.3683, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 0.7717842323651453, | |
| "grad_norm": 2.5276222084732067, | |
| "learning_rate": 1.5043748903936672e-06, | |
| "loss": 0.3725, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.7738589211618258, | |
| "grad_norm": 2.406387332540463, | |
| "learning_rate": 1.4785765464149836e-06, | |
| "loss": 0.3511, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 0.7759336099585062, | |
| "grad_norm": 2.409678473099456, | |
| "learning_rate": 1.4529628897244214e-06, | |
| "loss": 0.3637, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.7780082987551867, | |
| "grad_norm": 2.372236015081896, | |
| "learning_rate": 1.4275352636755613e-06, | |
| "loss": 0.3693, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 0.7800829875518672, | |
| "grad_norm": 2.3018141960934013, | |
| "learning_rate": 1.4022950018652703e-06, | |
| "loss": 0.3557, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.7821576763485477, | |
| "grad_norm": 2.3440334623732375, | |
| "learning_rate": 1.3772434280637737e-06, | |
| "loss": 0.3669, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 0.7842323651452282, | |
| "grad_norm": 2.4523078746497755, | |
| "learning_rate": 1.3523818561452145e-06, | |
| "loss": 0.3646, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.7863070539419087, | |
| "grad_norm": 2.4219074003538346, | |
| "learning_rate": 1.3277115900187515e-06, | |
| "loss": 0.3577, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 0.7883817427385892, | |
| "grad_norm": 2.8682882199437842, | |
| "learning_rate": 1.3032339235601749e-06, | |
| "loss": 0.3474, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.7904564315352697, | |
| "grad_norm": 2.329298210972444, | |
| "learning_rate": 1.2789501405440397e-06, | |
| "loss": 0.3725, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 0.7925311203319502, | |
| "grad_norm": 2.365427473253902, | |
| "learning_rate": 1.254861514576348e-06, | |
| "loss": 0.3705, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.7946058091286307, | |
| "grad_norm": 2.449442819984942, | |
| "learning_rate": 1.2309693090277392e-06, | |
| "loss": 0.3498, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 0.7966804979253111, | |
| "grad_norm": 2.2309766163591362, | |
| "learning_rate": 1.207274776967235e-06, | |
| "loss": 0.3497, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.7987551867219918, | |
| "grad_norm": 2.5332149638146606, | |
| "learning_rate": 1.183779161096526e-06, | |
| "loss": 0.3533, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 0.8008298755186722, | |
| "grad_norm": 2.464873395759963, | |
| "learning_rate": 1.1604836936847852e-06, | |
| "loss": 0.3669, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.8029045643153527, | |
| "grad_norm": 2.353844278985767, | |
| "learning_rate": 1.1373895965040522e-06, | |
| "loss": 0.3531, | |
| "step": 1935 | |
| }, | |
| { | |
| "epoch": 0.8049792531120332, | |
| "grad_norm": 2.478326080734339, | |
| "learning_rate": 1.1144980807651413e-06, | |
| "loss": 0.3469, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.8070539419087137, | |
| "grad_norm": 2.3657798765264837, | |
| "learning_rate": 1.0918103470541297e-06, | |
| "loss": 0.3434, | |
| "step": 1945 | |
| }, | |
| { | |
| "epoch": 0.8091286307053942, | |
| "grad_norm": 2.4345052899894517, | |
| "learning_rate": 1.069327585269383e-06, | |
| "loss": 0.3575, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.8112033195020747, | |
| "grad_norm": 2.335213511709306, | |
| "learning_rate": 1.047050974559149e-06, | |
| "loss": 0.3542, | |
| "step": 1955 | |
| }, | |
| { | |
| "epoch": 0.8132780082987552, | |
| "grad_norm": 2.3666035829182164, | |
| "learning_rate": 1.024981683259723e-06, | |
| "loss": 0.3484, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.8153526970954357, | |
| "grad_norm": 2.461171014362592, | |
| "learning_rate": 1.0031208688341609e-06, | |
| "loss": 0.3484, | |
| "step": 1965 | |
| }, | |
| { | |
| "epoch": 0.8174273858921162, | |
| "grad_norm": 2.5462843981961223, | |
| "learning_rate": 9.814696778115834e-07, | |
| "loss": 0.3466, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.8195020746887967, | |
| "grad_norm": 2.3821519258481705, | |
| "learning_rate": 9.600292457270416e-07, | |
| "loss": 0.3502, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 0.8215767634854771, | |
| "grad_norm": 2.397078021612525, | |
| "learning_rate": 9.388006970619557e-07, | |
| "loss": 0.3518, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.8236514522821576, | |
| "grad_norm": 2.3345715923241945, | |
| "learning_rate": 9.177851451851511e-07, | |
| "loss": 0.3483, | |
| "step": 1985 | |
| }, | |
| { | |
| "epoch": 0.8257261410788381, | |
| "grad_norm": 2.363653449360418, | |
| "learning_rate": 8.969836922944536e-07, | |
| "loss": 0.3414, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.8278008298755186, | |
| "grad_norm": 2.3624129572192616, | |
| "learning_rate": 8.76397429358895e-07, | |
| "loss": 0.359, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 0.8298755186721992, | |
| "grad_norm": 2.314328813522686, | |
| "learning_rate": 8.560274360614829e-07, | |
| "loss": 0.3385, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.8319502074688797, | |
| "grad_norm": 2.199340475549847, | |
| "learning_rate": 8.358747807425827e-07, | |
| "loss": 0.3539, | |
| "step": 2005 | |
| }, | |
| { | |
| "epoch": 0.8340248962655602, | |
| "grad_norm": 2.4399680516899633, | |
| "learning_rate": 8.159405203438875e-07, | |
| "loss": 0.3518, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.8360995850622407, | |
| "grad_norm": 2.4262479785068733, | |
| "learning_rate": 7.962257003529777e-07, | |
| "loss": 0.344, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 0.8381742738589212, | |
| "grad_norm": 2.3545302681837987, | |
| "learning_rate": 7.767313547484979e-07, | |
| "loss": 0.3487, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.8402489626556017, | |
| "grad_norm": 2.3064171274116747, | |
| "learning_rate": 7.574585059459171e-07, | |
| "loss": 0.3447, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 0.8423236514522822, | |
| "grad_norm": 2.5487816431519477, | |
| "learning_rate": 7.384081647439184e-07, | |
| "loss": 0.356, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.8443983402489627, | |
| "grad_norm": 2.459116013365039, | |
| "learning_rate": 7.195813302713761e-07, | |
| "loss": 0.3375, | |
| "step": 2035 | |
| }, | |
| { | |
| "epoch": 0.8464730290456431, | |
| "grad_norm": 2.4240198038099905, | |
| "learning_rate": 7.009789899349567e-07, | |
| "loss": 0.3427, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.8485477178423236, | |
| "grad_norm": 2.339613272043201, | |
| "learning_rate": 6.826021193673416e-07, | |
| "loss": 0.3481, | |
| "step": 2045 | |
| }, | |
| { | |
| "epoch": 0.8506224066390041, | |
| "grad_norm": 2.4864055871608484, | |
| "learning_rate": 6.644516823760439e-07, | |
| "loss": 0.3488, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.8526970954356846, | |
| "grad_norm": 2.3920126614464254, | |
| "learning_rate": 6.465286308928676e-07, | |
| "loss": 0.3427, | |
| "step": 2055 | |
| }, | |
| { | |
| "epoch": 0.8547717842323651, | |
| "grad_norm": 2.432956486348477, | |
| "learning_rate": 6.28833904923985e-07, | |
| "loss": 0.3522, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.8568464730290456, | |
| "grad_norm": 2.4012672614414243, | |
| "learning_rate": 6.113684325006286e-07, | |
| "loss": 0.3426, | |
| "step": 2065 | |
| }, | |
| { | |
| "epoch": 0.8589211618257261, | |
| "grad_norm": 2.4925509857922656, | |
| "learning_rate": 5.941331296304254e-07, | |
| "loss": 0.3428, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.8609958506224067, | |
| "grad_norm": 2.2832723407803606, | |
| "learning_rate": 5.771289002493502e-07, | |
| "loss": 0.3472, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 0.8630705394190872, | |
| "grad_norm": 2.3874716687155564, | |
| "learning_rate": 5.603566361743229e-07, | |
| "loss": 0.3413, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.8651452282157677, | |
| "grad_norm": 2.351570855990709, | |
| "learning_rate": 5.438172170564288e-07, | |
| "loss": 0.3322, | |
| "step": 2085 | |
| }, | |
| { | |
| "epoch": 0.8672199170124482, | |
| "grad_norm": 2.5008832392383207, | |
| "learning_rate": 5.275115103347884e-07, | |
| "loss": 0.3477, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.8692946058091287, | |
| "grad_norm": 2.3216651986422088, | |
| "learning_rate": 5.114403711910631e-07, | |
| "loss": 0.336, | |
| "step": 2095 | |
| }, | |
| { | |
| "epoch": 0.8713692946058091, | |
| "grad_norm": 2.326035363037846, | |
| "learning_rate": 4.956046425045991e-07, | |
| "loss": 0.3439, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.8734439834024896, | |
| "grad_norm": 2.4613685736167557, | |
| "learning_rate": 4.80005154808228e-07, | |
| "loss": 0.3468, | |
| "step": 2105 | |
| }, | |
| { | |
| "epoch": 0.8755186721991701, | |
| "grad_norm": 2.3791168252802537, | |
| "learning_rate": 4.646427262447034e-07, | |
| "loss": 0.3572, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.8775933609958506, | |
| "grad_norm": 2.437202584127699, | |
| "learning_rate": 4.49518162523791e-07, | |
| "loss": 0.3462, | |
| "step": 2115 | |
| }, | |
| { | |
| "epoch": 0.8796680497925311, | |
| "grad_norm": 2.4764470732059096, | |
| "learning_rate": 4.34632256880016e-07, | |
| "loss": 0.3445, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.8817427385892116, | |
| "grad_norm": 2.3787585521250945, | |
| "learning_rate": 4.1998579003105553e-07, | |
| "loss": 0.3401, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 0.8838174273858921, | |
| "grad_norm": 2.3948308594804346, | |
| "learning_rate": 4.0557953013679873e-07, | |
| "loss": 0.3536, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.8858921161825726, | |
| "grad_norm": 2.3397934739178363, | |
| "learning_rate": 3.914142327590531e-07, | |
| "loss": 0.3334, | |
| "step": 2135 | |
| }, | |
| { | |
| "epoch": 0.8879668049792531, | |
| "grad_norm": 2.45922644953553, | |
| "learning_rate": 3.7749064082191976e-07, | |
| "loss": 0.3349, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.8900414937759336, | |
| "grad_norm": 2.413840835433163, | |
| "learning_rate": 3.6380948457283293e-07, | |
| "loss": 0.3315, | |
| "step": 2145 | |
| }, | |
| { | |
| "epoch": 0.8921161825726142, | |
| "grad_norm": 2.347524977176241, | |
| "learning_rate": 3.5037148154425494e-07, | |
| "loss": 0.346, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.8941908713692946, | |
| "grad_norm": 2.47677069324449, | |
| "learning_rate": 3.3717733651604967e-07, | |
| "loss": 0.3415, | |
| "step": 2155 | |
| }, | |
| { | |
| "epoch": 0.8962655601659751, | |
| "grad_norm": 2.397745011270569, | |
| "learning_rate": 3.242277414785128e-07, | |
| "loss": 0.3384, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.8983402489626556, | |
| "grad_norm": 2.415367296859196, | |
| "learning_rate": 3.1152337559608725e-07, | |
| "loss": 0.3387, | |
| "step": 2165 | |
| }, | |
| { | |
| "epoch": 0.9004149377593361, | |
| "grad_norm": 2.23801113438236, | |
| "learning_rate": 2.990649051717348e-07, | |
| "loss": 0.3458, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.9024896265560166, | |
| "grad_norm": 2.4178418849940413, | |
| "learning_rate": 2.868529836119954e-07, | |
| "loss": 0.3347, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 0.9045643153526971, | |
| "grad_norm": 2.5082956753743204, | |
| "learning_rate": 2.7488825139272037e-07, | |
| "loss": 0.3368, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.9066390041493776, | |
| "grad_norm": 2.2379502275244194, | |
| "learning_rate": 2.631713360254734e-07, | |
| "loss": 0.3436, | |
| "step": 2185 | |
| }, | |
| { | |
| "epoch": 0.9087136929460581, | |
| "grad_norm": 2.424932946521832, | |
| "learning_rate": 2.5170285202462895e-07, | |
| "loss": 0.3446, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.9107883817427386, | |
| "grad_norm": 2.4571108685084107, | |
| "learning_rate": 2.4048340087513665e-07, | |
| "loss": 0.336, | |
| "step": 2195 | |
| }, | |
| { | |
| "epoch": 0.9128630705394191, | |
| "grad_norm": 2.3387656990162395, | |
| "learning_rate": 2.295135710009755e-07, | |
| "loss": 0.3414, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.9149377593360996, | |
| "grad_norm": 2.3291759526847113, | |
| "learning_rate": 2.1879393773429903e-07, | |
| "loss": 0.3442, | |
| "step": 2205 | |
| }, | |
| { | |
| "epoch": 0.91701244813278, | |
| "grad_norm": 2.365416524223355, | |
| "learning_rate": 2.0832506328525237e-07, | |
| "loss": 0.3317, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.9190871369294605, | |
| "grad_norm": 2.384106710657764, | |
| "learning_rate": 1.9810749671249353e-07, | |
| "loss": 0.347, | |
| "step": 2215 | |
| }, | |
| { | |
| "epoch": 0.921161825726141, | |
| "grad_norm": 2.380728030255305, | |
| "learning_rate": 1.8814177389439304e-07, | |
| "loss": 0.3466, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.9232365145228216, | |
| "grad_norm": 2.297466874753593, | |
| "learning_rate": 1.784284175009282e-07, | |
| "loss": 0.3368, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 0.9253112033195021, | |
| "grad_norm": 2.4935459102531685, | |
| "learning_rate": 1.68967936966275e-07, | |
| "loss": 0.3404, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.9273858921161826, | |
| "grad_norm": 2.36033858236717, | |
| "learning_rate": 1.5976082846208552e-07, | |
| "loss": 0.3499, | |
| "step": 2235 | |
| }, | |
| { | |
| "epoch": 0.9294605809128631, | |
| "grad_norm": 2.4730950951949353, | |
| "learning_rate": 1.5080757487146823e-07, | |
| "loss": 0.3327, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.9315352697095436, | |
| "grad_norm": 2.4704731647631255, | |
| "learning_rate": 1.4210864576365891e-07, | |
| "loss": 0.338, | |
| "step": 2245 | |
| }, | |
| { | |
| "epoch": 0.9336099585062241, | |
| "grad_norm": 2.3890389217669723, | |
| "learning_rate": 1.3366449736939845e-07, | |
| "loss": 0.3375, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.9356846473029046, | |
| "grad_norm": 2.6765870984768663, | |
| "learning_rate": 1.254755725570006e-07, | |
| "loss": 0.3504, | |
| "step": 2255 | |
| }, | |
| { | |
| "epoch": 0.9377593360995851, | |
| "grad_norm": 2.3147038405622333, | |
| "learning_rate": 1.1754230080912588e-07, | |
| "loss": 0.3267, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.9398340248962656, | |
| "grad_norm": 2.559635037749535, | |
| "learning_rate": 1.0986509820025915e-07, | |
| "loss": 0.3502, | |
| "step": 2265 | |
| }, | |
| { | |
| "epoch": 0.941908713692946, | |
| "grad_norm": 2.378006403702019, | |
| "learning_rate": 1.0244436737488373e-07, | |
| "loss": 0.3496, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.9439834024896265, | |
| "grad_norm": 2.4406461268285216, | |
| "learning_rate": 9.528049752636714e-08, | |
| "loss": 0.3292, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 0.946058091286307, | |
| "grad_norm": 2.3994648318804632, | |
| "learning_rate": 8.837386437654861e-08, | |
| "loss": 0.3369, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.9481327800829875, | |
| "grad_norm": 2.3387687484550583, | |
| "learning_rate": 8.172483015603139e-08, | |
| "loss": 0.3445, | |
| "step": 2285 | |
| }, | |
| { | |
| "epoch": 0.950207468879668, | |
| "grad_norm": 2.3329266437152243, | |
| "learning_rate": 7.533374358518974e-08, | |
| "loss": 0.3374, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.9522821576763485, | |
| "grad_norm": 2.344823680655971, | |
| "learning_rate": 6.920093985587351e-08, | |
| "loss": 0.3369, | |
| "step": 2295 | |
| }, | |
| { | |
| "epoch": 0.9543568464730291, | |
| "grad_norm": 2.297823156508915, | |
| "learning_rate": 6.332674061383492e-08, | |
| "loss": 0.3387, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.9564315352697096, | |
| "grad_norm": 2.436614135753504, | |
| "learning_rate": 5.7711453941852736e-08, | |
| "loss": 0.334, | |
| "step": 2305 | |
| }, | |
| { | |
| "epoch": 0.9585062240663901, | |
| "grad_norm": 2.2863118140565564, | |
| "learning_rate": 5.23553743435784e-08, | |
| "loss": 0.3284, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.9605809128630706, | |
| "grad_norm": 2.1820719536802504, | |
| "learning_rate": 4.7258782728092366e-08, | |
| "loss": 0.3293, | |
| "step": 2315 | |
| }, | |
| { | |
| "epoch": 0.9626556016597511, | |
| "grad_norm": 2.333916190818195, | |
| "learning_rate": 4.2421946395164174e-08, | |
| "loss": 0.33, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.9647302904564315, | |
| "grad_norm": 2.4479781364843096, | |
| "learning_rate": 3.784511902124033e-08, | |
| "loss": 0.3491, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 0.966804979253112, | |
| "grad_norm": 2.4249579257495566, | |
| "learning_rate": 3.352854064613553e-08, | |
| "loss": 0.345, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.9688796680497925, | |
| "grad_norm": 2.2853865711169785, | |
| "learning_rate": 2.9472437660446605e-08, | |
| "loss": 0.3358, | |
| "step": 2335 | |
| }, | |
| { | |
| "epoch": 0.970954356846473, | |
| "grad_norm": 2.4792048595371647, | |
| "learning_rate": 2.56770227936759e-08, | |
| "loss": 0.3365, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.9730290456431535, | |
| "grad_norm": 2.38571031272437, | |
| "learning_rate": 2.2142495103075757e-08, | |
| "loss": 0.3426, | |
| "step": 2345 | |
| }, | |
| { | |
| "epoch": 0.975103734439834, | |
| "grad_norm": 2.3533072019330095, | |
| "learning_rate": 1.8869039963210766e-08, | |
| "loss": 0.3384, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.9771784232365145, | |
| "grad_norm": 2.42964006589875, | |
| "learning_rate": 1.585682905623054e-08, | |
| "loss": 0.3406, | |
| "step": 2355 | |
| }, | |
| { | |
| "epoch": 0.979253112033195, | |
| "grad_norm": 2.2589268313436968, | |
| "learning_rate": 1.3106020362869675e-08, | |
| "loss": 0.3379, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.9813278008298755, | |
| "grad_norm": 2.4389590802442203, | |
| "learning_rate": 1.0616758154161633e-08, | |
| "loss": 0.342, | |
| "step": 2365 | |
| }, | |
| { | |
| "epoch": 0.983402489626556, | |
| "grad_norm": 2.387697736294216, | |
| "learning_rate": 8.38917298386921e-09, | |
| "loss": 0.3414, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.9854771784232366, | |
| "grad_norm": 2.2923054120317836, | |
| "learning_rate": 6.423381681641694e-09, | |
| "loss": 0.3416, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 0.9875518672199171, | |
| "grad_norm": 2.341070250308901, | |
| "learning_rate": 4.719487346884211e-09, | |
| "loss": 0.3353, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.9896265560165975, | |
| "grad_norm": 2.390677320345825, | |
| "learning_rate": 3.277579343352599e-09, | |
| "loss": 0.3273, | |
| "step": 2385 | |
| }, | |
| { | |
| "epoch": 0.991701244813278, | |
| "grad_norm": 2.3301588985719905, | |
| "learning_rate": 2.097733294464943e-09, | |
| "loss": 0.355, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.9937759336099585, | |
| "grad_norm": 2.423326896617218, | |
| "learning_rate": 1.1800110793358521e-09, | |
| "loss": 0.3402, | |
| "step": 2395 | |
| }, | |
| { | |
| "epoch": 0.995850622406639, | |
| "grad_norm": 2.3567177848437986, | |
| "learning_rate": 5.244608295323916e-10, | |
| "loss": 0.3361, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.9979253112033195, | |
| "grad_norm": 2.4643918613730165, | |
| "learning_rate": 1.3111692654776963e-10, | |
| "loss": 0.3175, | |
| "step": 2405 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 2.3790989221416656, | |
| "learning_rate": 0.0, | |
| "loss": 0.3387, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_runtime": 3.4735, | |
| "eval_samples_per_second": 2.879, | |
| "eval_steps_per_second": 0.864, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 2410, | |
| "total_flos": 252302485094400.0, | |
| "train_loss": 0.0, | |
| "train_runtime": 0.0084, | |
| "train_samples_per_second": 4572412.844, | |
| "train_steps_per_second": 285835.105 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 2410, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 252302485094400.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |