| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9997582789460963, | |
| "eval_steps": 500, | |
| "global_step": 1034, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0009668842156151801, | |
| "grad_norm": 19.466110229492188, | |
| "learning_rate": 0.0, | |
| "loss": 1.0726, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0019337684312303602, | |
| "grad_norm": 21.088153839111328, | |
| "learning_rate": 1.5625e-07, | |
| "loss": 1.0254, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0029006526468455403, | |
| "grad_norm": 19.635608673095703, | |
| "learning_rate": 3.125e-07, | |
| "loss": 1.043, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0038675368624607204, | |
| "grad_norm": 19.43179702758789, | |
| "learning_rate": 4.6875000000000006e-07, | |
| "loss": 1.1058, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.0048344210780759, | |
| "grad_norm": 20.224998474121094, | |
| "learning_rate": 6.25e-07, | |
| "loss": 1.1457, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.005801305293691081, | |
| "grad_norm": 16.54555320739746, | |
| "learning_rate": 7.8125e-07, | |
| "loss": 0.9547, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.00676818950930626, | |
| "grad_norm": 17.746612548828125, | |
| "learning_rate": 9.375000000000001e-07, | |
| "loss": 1.0488, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.007735073724921441, | |
| "grad_norm": 18.536388397216797, | |
| "learning_rate": 1.0937500000000001e-06, | |
| "loss": 1.0446, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.00870195794053662, | |
| "grad_norm": 15.782112121582031, | |
| "learning_rate": 1.25e-06, | |
| "loss": 0.9769, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.0096688421561518, | |
| "grad_norm": 15.247085571289062, | |
| "learning_rate": 1.40625e-06, | |
| "loss": 0.976, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.010635726371766982, | |
| "grad_norm": 14.069637298583984, | |
| "learning_rate": 1.5625e-06, | |
| "loss": 1.037, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.011602610587382161, | |
| "grad_norm": 9.867317199707031, | |
| "learning_rate": 1.71875e-06, | |
| "loss": 0.9776, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.012569494802997341, | |
| "grad_norm": 9.262001991271973, | |
| "learning_rate": 1.8750000000000003e-06, | |
| "loss": 0.9161, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.01353637901861252, | |
| "grad_norm": 9.465025901794434, | |
| "learning_rate": 2.0312500000000002e-06, | |
| "loss": 0.9926, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.014503263234227702, | |
| "grad_norm": 9.50775146484375, | |
| "learning_rate": 2.1875000000000002e-06, | |
| "loss": 0.9811, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.015470147449842882, | |
| "grad_norm": 8.129717826843262, | |
| "learning_rate": 2.3437500000000002e-06, | |
| "loss": 0.9271, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.01643703166545806, | |
| "grad_norm": 9.526381492614746, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.896, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.01740391588107324, | |
| "grad_norm": 8.398605346679688, | |
| "learning_rate": 2.65625e-06, | |
| "loss": 0.6924, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.01837080009668842, | |
| "grad_norm": 9.33753776550293, | |
| "learning_rate": 2.8125e-06, | |
| "loss": 0.813, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.0193376843123036, | |
| "grad_norm": 9.168170928955078, | |
| "learning_rate": 2.96875e-06, | |
| "loss": 0.8602, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02030456852791878, | |
| "grad_norm": 8.446990013122559, | |
| "learning_rate": 3.125e-06, | |
| "loss": 0.8427, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.021271452743533963, | |
| "grad_norm": 6.478088855743408, | |
| "learning_rate": 3.28125e-06, | |
| "loss": 0.6606, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.022238336959149143, | |
| "grad_norm": 6.63073205947876, | |
| "learning_rate": 3.4375e-06, | |
| "loss": 0.8054, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.023205221174764323, | |
| "grad_norm": 7.6455302238464355, | |
| "learning_rate": 3.59375e-06, | |
| "loss": 0.7505, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.024172105390379502, | |
| "grad_norm": 6.51140832901001, | |
| "learning_rate": 3.7500000000000005e-06, | |
| "loss": 0.6872, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.025138989605994682, | |
| "grad_norm": 5.949461460113525, | |
| "learning_rate": 3.90625e-06, | |
| "loss": 0.6455, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.02610587382160986, | |
| "grad_norm": 6.92095422744751, | |
| "learning_rate": 4.0625000000000005e-06, | |
| "loss": 0.7218, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.02707275803722504, | |
| "grad_norm": 5.846500873565674, | |
| "learning_rate": 4.21875e-06, | |
| "loss": 0.7175, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.02803964225284022, | |
| "grad_norm": 5.6374430656433105, | |
| "learning_rate": 4.3750000000000005e-06, | |
| "loss": 0.7276, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.029006526468455404, | |
| "grad_norm": 4.731724739074707, | |
| "learning_rate": 4.53125e-06, | |
| "loss": 0.6809, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.029973410684070584, | |
| "grad_norm": 5.237879753112793, | |
| "learning_rate": 4.6875000000000004e-06, | |
| "loss": 0.702, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.030940294899685764, | |
| "grad_norm": 4.484801769256592, | |
| "learning_rate": 4.84375e-06, | |
| "loss": 0.6112, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.03190717911530094, | |
| "grad_norm": 4.658060073852539, | |
| "learning_rate": 5e-06, | |
| "loss": 0.5665, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.03287406333091612, | |
| "grad_norm": 4.812199592590332, | |
| "learning_rate": 4.999987712204937e-06, | |
| "loss": 0.6284, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.0338409475465313, | |
| "grad_norm": 4.881224155426025, | |
| "learning_rate": 4.999950848940538e-06, | |
| "loss": 0.6992, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.03480783176214648, | |
| "grad_norm": 4.776272773742676, | |
| "learning_rate": 4.999889410569179e-06, | |
| "loss": 0.6326, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.03577471597776166, | |
| "grad_norm": 4.670900344848633, | |
| "learning_rate": 4.999803397694811e-06, | |
| "loss": 0.6886, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.03674160019337684, | |
| "grad_norm": 4.662517070770264, | |
| "learning_rate": 4.999692811162965e-06, | |
| "loss": 0.5526, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.03770848440899202, | |
| "grad_norm": 4.519469738006592, | |
| "learning_rate": 4.999557652060729e-06, | |
| "loss": 0.6316, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.0386753686246072, | |
| "grad_norm": 4.677404403686523, | |
| "learning_rate": 4.999397921716751e-06, | |
| "loss": 0.662, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.03964225284022238, | |
| "grad_norm": 4.661558628082275, | |
| "learning_rate": 4.9992136217012184e-06, | |
| "loss": 0.6335, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.04060913705583756, | |
| "grad_norm": 4.582630634307861, | |
| "learning_rate": 4.9990047538258416e-06, | |
| "loss": 0.6392, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.04157602127145275, | |
| "grad_norm": 4.041533946990967, | |
| "learning_rate": 4.998771320143843e-06, | |
| "loss": 0.6174, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.04254290548706793, | |
| "grad_norm": 4.323627948760986, | |
| "learning_rate": 4.99851332294993e-06, | |
| "loss": 0.5823, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.043509789702683106, | |
| "grad_norm": 4.2771124839782715, | |
| "learning_rate": 4.998230764780277e-06, | |
| "loss": 0.6334, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.044476673918298286, | |
| "grad_norm": 4.1346001625061035, | |
| "learning_rate": 4.997923648412495e-06, | |
| "loss": 0.4832, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.045443558133913466, | |
| "grad_norm": 3.749467134475708, | |
| "learning_rate": 4.9975919768656125e-06, | |
| "loss": 0.6186, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.046410442349528645, | |
| "grad_norm": 4.082079887390137, | |
| "learning_rate": 4.997235753400039e-06, | |
| "loss": 0.5151, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.047377326565143825, | |
| "grad_norm": 4.149928569793701, | |
| "learning_rate": 4.996854981517535e-06, | |
| "loss": 0.5641, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.048344210780759005, | |
| "grad_norm": 4.360020637512207, | |
| "learning_rate": 4.996449664961177e-06, | |
| "loss": 0.6201, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.049311094996374184, | |
| "grad_norm": 3.918663263320923, | |
| "learning_rate": 4.996019807715324e-06, | |
| "loss": 0.6154, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.050277979211989364, | |
| "grad_norm": 4.024591445922852, | |
| "learning_rate": 4.995565414005573e-06, | |
| "loss": 0.6135, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.051244863427604544, | |
| "grad_norm": 4.135931491851807, | |
| "learning_rate": 4.995086488298723e-06, | |
| "loss": 0.6724, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.05221174764321972, | |
| "grad_norm": 3.9647462368011475, | |
| "learning_rate": 4.994583035302724e-06, | |
| "loss": 0.5477, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.0531786318588349, | |
| "grad_norm": 4.09563684463501, | |
| "learning_rate": 4.994055059966641e-06, | |
| "loss": 0.5471, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.05414551607445008, | |
| "grad_norm": 4.164375305175781, | |
| "learning_rate": 4.993502567480592e-06, | |
| "loss": 0.6487, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.05511240029006526, | |
| "grad_norm": 4.008912563323975, | |
| "learning_rate": 4.992925563275714e-06, | |
| "loss": 0.5567, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.05607928450568044, | |
| "grad_norm": 3.589381217956543, | |
| "learning_rate": 4.99232405302409e-06, | |
| "loss": 0.5749, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.05704616872129562, | |
| "grad_norm": 3.8242640495300293, | |
| "learning_rate": 4.991698042638711e-06, | |
| "loss": 0.6054, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.05801305293691081, | |
| "grad_norm": 4.131597995758057, | |
| "learning_rate": 4.991047538273403e-06, | |
| "loss": 0.5606, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.05897993715252599, | |
| "grad_norm": 3.8480031490325928, | |
| "learning_rate": 4.990372546322782e-06, | |
| "loss": 0.5264, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.05994682136814117, | |
| "grad_norm": 4.238224506378174, | |
| "learning_rate": 4.9896730734221755e-06, | |
| "loss": 0.6041, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.06091370558375635, | |
| "grad_norm": 3.8943910598754883, | |
| "learning_rate": 4.988949126447567e-06, | |
| "loss": 0.553, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.06188058979937153, | |
| "grad_norm": 3.774726629257202, | |
| "learning_rate": 4.988200712515527e-06, | |
| "loss": 0.505, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.0628474740149867, | |
| "grad_norm": 3.9384453296661377, | |
| "learning_rate": 4.987427838983141e-06, | |
| "loss": 0.6143, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.06381435823060189, | |
| "grad_norm": 3.9011735916137695, | |
| "learning_rate": 4.986630513447939e-06, | |
| "loss": 0.5535, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.06478124244621707, | |
| "grad_norm": 4.145521640777588, | |
| "learning_rate": 4.985808743747817e-06, | |
| "loss": 0.5506, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.06574812666183225, | |
| "grad_norm": 3.8869740962982178, | |
| "learning_rate": 4.984962537960969e-06, | |
| "loss": 0.593, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.06671501087744743, | |
| "grad_norm": 4.154409408569336, | |
| "learning_rate": 4.984091904405793e-06, | |
| "loss": 0.6245, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.0676818950930626, | |
| "grad_norm": 3.601078987121582, | |
| "learning_rate": 4.983196851640825e-06, | |
| "loss": 0.5135, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.06864877930867778, | |
| "grad_norm": 3.83754301071167, | |
| "learning_rate": 4.9822773884646444e-06, | |
| "loss": 0.5545, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.06961566352429296, | |
| "grad_norm": 3.9656333923339844, | |
| "learning_rate": 4.981333523915792e-06, | |
| "loss": 0.6262, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.07058254773990814, | |
| "grad_norm": 3.8404502868652344, | |
| "learning_rate": 4.980365267272679e-06, | |
| "loss": 0.5053, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.07154943195552332, | |
| "grad_norm": 4.082815170288086, | |
| "learning_rate": 4.979372628053496e-06, | |
| "loss": 0.6844, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.0725163161711385, | |
| "grad_norm": 3.7831435203552246, | |
| "learning_rate": 4.97835561601612e-06, | |
| "loss": 0.5896, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.07348320038675368, | |
| "grad_norm": 4.10606575012207, | |
| "learning_rate": 4.977314241158021e-06, | |
| "loss": 0.6269, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.07445008460236886, | |
| "grad_norm": 3.9456067085266113, | |
| "learning_rate": 4.97624851371616e-06, | |
| "loss": 0.5214, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.07541696881798404, | |
| "grad_norm": 3.6953811645507812, | |
| "learning_rate": 4.975158444166887e-06, | |
| "loss": 0.5133, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.07638385303359922, | |
| "grad_norm": 4.28406286239624, | |
| "learning_rate": 4.974044043225846e-06, | |
| "loss": 0.596, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.0773507372492144, | |
| "grad_norm": 4.1657819747924805, | |
| "learning_rate": 4.972905321847859e-06, | |
| "loss": 0.5556, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.07831762146482958, | |
| "grad_norm": 3.9782443046569824, | |
| "learning_rate": 4.9717422912268265e-06, | |
| "loss": 0.4779, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.07928450568044476, | |
| "grad_norm": 3.791666269302368, | |
| "learning_rate": 4.970554962795615e-06, | |
| "loss": 0.5185, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.08025138989605994, | |
| "grad_norm": 3.8677923679351807, | |
| "learning_rate": 4.969343348225942e-06, | |
| "loss": 0.6107, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.08121827411167512, | |
| "grad_norm": 4.23475980758667, | |
| "learning_rate": 4.968107459428265e-06, | |
| "loss": 0.6065, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.0821851583272903, | |
| "grad_norm": 3.8311767578125, | |
| "learning_rate": 4.966847308551664e-06, | |
| "loss": 0.5138, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.0831520425429055, | |
| "grad_norm": 4.179352283477783, | |
| "learning_rate": 4.9655629079837176e-06, | |
| "loss": 0.5707, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.08411892675852067, | |
| "grad_norm": 3.6656126976013184, | |
| "learning_rate": 4.9642542703503874e-06, | |
| "loss": 0.491, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.08508581097413585, | |
| "grad_norm": 4.082828521728516, | |
| "learning_rate": 4.962921408515891e-06, | |
| "loss": 0.5196, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.08605269518975103, | |
| "grad_norm": 4.420588493347168, | |
| "learning_rate": 4.961564335582572e-06, | |
| "loss": 0.5719, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.08701957940536621, | |
| "grad_norm": 4.108341217041016, | |
| "learning_rate": 4.960183064890782e-06, | |
| "loss": 0.577, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.08798646362098139, | |
| "grad_norm": 3.8085920810699463, | |
| "learning_rate": 4.958777610018734e-06, | |
| "loss": 0.5059, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.08895334783659657, | |
| "grad_norm": 3.7101457118988037, | |
| "learning_rate": 4.957347984782384e-06, | |
| "loss": 0.5393, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.08992023205221175, | |
| "grad_norm": 3.719266653060913, | |
| "learning_rate": 4.955894203235285e-06, | |
| "loss": 0.5582, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.09088711626782693, | |
| "grad_norm": 4.132759094238281, | |
| "learning_rate": 4.954416279668451e-06, | |
| "loss": 0.5649, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.09185400048344211, | |
| "grad_norm": 3.8168387413024902, | |
| "learning_rate": 4.952914228610221e-06, | |
| "loss": 0.5105, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.09282088469905729, | |
| "grad_norm": 4.188531875610352, | |
| "learning_rate": 4.951388064826112e-06, | |
| "loss": 0.61, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.09378776891467247, | |
| "grad_norm": 4.02006196975708, | |
| "learning_rate": 4.949837803318672e-06, | |
| "loss": 0.5134, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.09475465313028765, | |
| "grad_norm": 4.280331134796143, | |
| "learning_rate": 4.9482634593273395e-06, | |
| "loss": 0.5284, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.09572153734590283, | |
| "grad_norm": 3.8056821823120117, | |
| "learning_rate": 4.946665048328288e-06, | |
| "loss": 0.4675, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.09668842156151801, | |
| "grad_norm": 4.207078456878662, | |
| "learning_rate": 4.945042586034272e-06, | |
| "loss": 0.5671, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.09765530577713319, | |
| "grad_norm": 4.331418991088867, | |
| "learning_rate": 4.943396088394482e-06, | |
| "loss": 0.5816, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.09862218999274837, | |
| "grad_norm": 3.848921060562134, | |
| "learning_rate": 4.941725571594377e-06, | |
| "loss": 0.5999, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.09958907420836355, | |
| "grad_norm": 3.920262336730957, | |
| "learning_rate": 4.940031052055532e-06, | |
| "loss": 0.5007, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.10055595842397873, | |
| "grad_norm": 3.944211483001709, | |
| "learning_rate": 4.938312546435474e-06, | |
| "loss": 0.5734, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.10152284263959391, | |
| "grad_norm": 4.038331508636475, | |
| "learning_rate": 4.936570071627517e-06, | |
| "loss": 0.5748, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.10248972685520909, | |
| "grad_norm": 3.655327081680298, | |
| "learning_rate": 4.934803644760604e-06, | |
| "loss": 0.5011, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.10345661107082427, | |
| "grad_norm": 3.9986114501953125, | |
| "learning_rate": 4.933013283199124e-06, | |
| "loss": 0.5448, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.10442349528643945, | |
| "grad_norm": 3.777674674987793, | |
| "learning_rate": 4.9311990045427555e-06, | |
| "loss": 0.5184, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.10539037950205463, | |
| "grad_norm": 4.203485012054443, | |
| "learning_rate": 4.929360826626286e-06, | |
| "loss": 0.5776, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.1063572637176698, | |
| "grad_norm": 4.092432975769043, | |
| "learning_rate": 4.927498767519437e-06, | |
| "loss": 0.5099, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.10732414793328499, | |
| "grad_norm": 3.8467228412628174, | |
| "learning_rate": 4.925612845526691e-06, | |
| "loss": 0.5168, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.10829103214890017, | |
| "grad_norm": 3.9712438583374023, | |
| "learning_rate": 4.923703079187104e-06, | |
| "loss": 0.5442, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.10925791636451535, | |
| "grad_norm": 3.5273666381835938, | |
| "learning_rate": 4.921769487274132e-06, | |
| "loss": 0.5169, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.11022480058013052, | |
| "grad_norm": 3.956287384033203, | |
| "learning_rate": 4.91981208879544e-06, | |
| "loss": 0.5562, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.1111916847957457, | |
| "grad_norm": 3.581745147705078, | |
| "learning_rate": 4.917830902992716e-06, | |
| "loss": 0.5398, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.11215856901136088, | |
| "grad_norm": 3.948413133621216, | |
| "learning_rate": 4.915825949341484e-06, | |
| "loss": 0.5221, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.11312545322697606, | |
| "grad_norm": 3.879704713821411, | |
| "learning_rate": 4.913797247550912e-06, | |
| "loss": 0.4968, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.11409233744259124, | |
| "grad_norm": 3.907553195953369, | |
| "learning_rate": 4.9117448175636184e-06, | |
| "loss": 0.5767, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.11505922165820642, | |
| "grad_norm": 3.8580918312072754, | |
| "learning_rate": 4.9096686795554725e-06, | |
| "loss": 0.4927, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.11602610587382162, | |
| "grad_norm": 4.098156452178955, | |
| "learning_rate": 4.907568853935403e-06, | |
| "loss": 0.5588, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.1169929900894368, | |
| "grad_norm": 3.590182304382324, | |
| "learning_rate": 4.90544536134519e-06, | |
| "loss": 0.5009, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.11795987430505198, | |
| "grad_norm": 3.887967348098755, | |
| "learning_rate": 4.9032982226592684e-06, | |
| "loss": 0.5394, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.11892675852066716, | |
| "grad_norm": 3.6042134761810303, | |
| "learning_rate": 4.901127458984516e-06, | |
| "loss": 0.4072, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.11989364273628234, | |
| "grad_norm": 3.842017412185669, | |
| "learning_rate": 4.898933091660054e-06, | |
| "loss": 0.5181, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.12086052695189752, | |
| "grad_norm": 4.496882438659668, | |
| "learning_rate": 4.8967151422570314e-06, | |
| "loss": 0.5224, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.1218274111675127, | |
| "grad_norm": 3.38244366645813, | |
| "learning_rate": 4.894473632578414e-06, | |
| "loss": 0.5029, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.12279429538312787, | |
| "grad_norm": 3.902873992919922, | |
| "learning_rate": 4.89220858465877e-06, | |
| "loss": 0.5346, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.12376117959874305, | |
| "grad_norm": 4.05102014541626, | |
| "learning_rate": 4.889920020764055e-06, | |
| "loss": 0.5789, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.12472806381435823, | |
| "grad_norm": 3.9317867755889893, | |
| "learning_rate": 4.887607963391394e-06, | |
| "loss": 0.5399, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.1256949480299734, | |
| "grad_norm": 3.631277561187744, | |
| "learning_rate": 4.8852724352688556e-06, | |
| "loss": 0.4701, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.12666183224558858, | |
| "grad_norm": 4.077336311340332, | |
| "learning_rate": 4.882913459355233e-06, | |
| "loss": 0.622, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.12762871646120377, | |
| "grad_norm": 4.419619083404541, | |
| "learning_rate": 4.880531058839816e-06, | |
| "loss": 0.5563, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.12859560067681894, | |
| "grad_norm": 3.4148476123809814, | |
| "learning_rate": 4.878125257142165e-06, | |
| "loss": 0.4959, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.12956248489243413, | |
| "grad_norm": 4.037806510925293, | |
| "learning_rate": 4.8756960779118765e-06, | |
| "loss": 0.5066, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.1305293691080493, | |
| "grad_norm": 3.803792715072632, | |
| "learning_rate": 4.873243545028356e-06, | |
| "loss": 0.5543, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.1314962533236645, | |
| "grad_norm": 3.535799503326416, | |
| "learning_rate": 4.870767682600584e-06, | |
| "loss": 0.465, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.13246313753927969, | |
| "grad_norm": 3.79543137550354, | |
| "learning_rate": 4.868268514966869e-06, | |
| "loss": 0.5385, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.13343002175489485, | |
| "grad_norm": 3.995861768722534, | |
| "learning_rate": 4.86574606669462e-06, | |
| "loss": 0.5965, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.13439690597051004, | |
| "grad_norm": 4.11192512512207, | |
| "learning_rate": 4.8632003625800995e-06, | |
| "loss": 0.5551, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.1353637901861252, | |
| "grad_norm": 4.1996259689331055, | |
| "learning_rate": 4.860631427648179e-06, | |
| "loss": 0.5325, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.1363306744017404, | |
| "grad_norm": 3.9488883018493652, | |
| "learning_rate": 4.858039287152095e-06, | |
| "loss": 0.5976, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.13729755861735557, | |
| "grad_norm": 3.8057913780212402, | |
| "learning_rate": 4.855423966573202e-06, | |
| "loss": 0.5458, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.13826444283297076, | |
| "grad_norm": 3.676607131958008, | |
| "learning_rate": 4.852785491620716e-06, | |
| "loss": 0.4622, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.13923132704858593, | |
| "grad_norm": 4.006147384643555, | |
| "learning_rate": 4.850123888231472e-06, | |
| "loss": 0.4659, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.14019821126420112, | |
| "grad_norm": 3.4836902618408203, | |
| "learning_rate": 4.847439182569656e-06, | |
| "loss": 0.5167, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.1411650954798163, | |
| "grad_norm": 4.223696231842041, | |
| "learning_rate": 4.844731401026562e-06, | |
| "loss": 0.5559, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.14213197969543148, | |
| "grad_norm": 4.1777262687683105, | |
| "learning_rate": 4.84200057022032e-06, | |
| "loss": 0.5509, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.14309886391104665, | |
| "grad_norm": 3.9227278232574463, | |
| "learning_rate": 4.8392467169956405e-06, | |
| "loss": 0.5256, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.14406574812666184, | |
| "grad_norm": 3.8219034671783447, | |
| "learning_rate": 4.836469868423552e-06, | |
| "loss": 0.4913, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.145032632342277, | |
| "grad_norm": 3.7245306968688965, | |
| "learning_rate": 4.833670051801131e-06, | |
| "loss": 0.4811, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.1459995165578922, | |
| "grad_norm": 3.941112756729126, | |
| "learning_rate": 4.830847294651236e-06, | |
| "loss": 0.5529, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.14696640077350737, | |
| "grad_norm": 3.680382013320923, | |
| "learning_rate": 4.8280016247222355e-06, | |
| "loss": 0.4964, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.14793328498912256, | |
| "grad_norm": 4.14690637588501, | |
| "learning_rate": 4.825133069987737e-06, | |
| "loss": 0.5186, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.14890016920473773, | |
| "grad_norm": 3.8385708332061768, | |
| "learning_rate": 4.822241658646312e-06, | |
| "loss": 0.4802, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.14986705342035292, | |
| "grad_norm": 4.05110502243042, | |
| "learning_rate": 4.819327419121215e-06, | |
| "loss": 0.5394, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.15083393763596809, | |
| "grad_norm": 4.225109100341797, | |
| "learning_rate": 4.816390380060108e-06, | |
| "loss": 0.5902, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.15180082185158328, | |
| "grad_norm": 3.542275905609131, | |
| "learning_rate": 4.81343057033478e-06, | |
| "loss": 0.5075, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.15276770606719844, | |
| "grad_norm": 4.327058792114258, | |
| "learning_rate": 4.810448019040857e-06, | |
| "loss": 0.5222, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.15373459028281364, | |
| "grad_norm": 3.977292060852051, | |
| "learning_rate": 4.8074427554975235e-06, | |
| "loss": 0.5177, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.1547014744984288, | |
| "grad_norm": 3.762315511703491, | |
| "learning_rate": 4.804414809247231e-06, | |
| "loss": 0.485, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.155668358714044, | |
| "grad_norm": 4.200908660888672, | |
| "learning_rate": 4.8013642100554034e-06, | |
| "loss": 0.4808, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.15663524292965916, | |
| "grad_norm": 4.091949939727783, | |
| "learning_rate": 4.798290987910152e-06, | |
| "loss": 0.5482, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.15760212714527436, | |
| "grad_norm": 3.7029221057891846, | |
| "learning_rate": 4.795195173021976e-06, | |
| "loss": 0.4768, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.15856901136088952, | |
| "grad_norm": 4.126641750335693, | |
| "learning_rate": 4.792076795823466e-06, | |
| "loss": 0.5219, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.15953589557650472, | |
| "grad_norm": 4.042599201202393, | |
| "learning_rate": 4.7889358869690065e-06, | |
| "loss": 0.4618, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.16050277979211988, | |
| "grad_norm": 3.8868730068206787, | |
| "learning_rate": 4.785772477334473e-06, | |
| "loss": 0.5551, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.16146966400773508, | |
| "grad_norm": 3.4996016025543213, | |
| "learning_rate": 4.782586598016928e-06, | |
| "loss": 0.4826, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.16243654822335024, | |
| "grad_norm": 3.912738084793091, | |
| "learning_rate": 4.779378280334318e-06, | |
| "loss": 0.5114, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.16340343243896543, | |
| "grad_norm": 8.732720375061035, | |
| "learning_rate": 4.776147555825164e-06, | |
| "loss": 0.5314, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.1643703166545806, | |
| "grad_norm": 4.118671417236328, | |
| "learning_rate": 4.772894456248248e-06, | |
| "loss": 0.4919, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.1653372008701958, | |
| "grad_norm": 3.8536298274993896, | |
| "learning_rate": 4.769619013582309e-06, | |
| "loss": 0.5183, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.166304085085811, | |
| "grad_norm": 3.6809961795806885, | |
| "learning_rate": 4.766321260025721e-06, | |
| "loss": 0.4654, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.16727096930142615, | |
| "grad_norm": 3.947495222091675, | |
| "learning_rate": 4.7630012279961805e-06, | |
| "loss": 0.45, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.16823785351704135, | |
| "grad_norm": 3.8548734188079834, | |
| "learning_rate": 4.7596589501303845e-06, | |
| "loss": 0.5412, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.1692047377326565, | |
| "grad_norm": 3.772038698196411, | |
| "learning_rate": 4.7562944592837145e-06, | |
| "loss": 0.4951, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.1701716219482717, | |
| "grad_norm": 4.423032760620117, | |
| "learning_rate": 4.7529077885299095e-06, | |
| "loss": 0.5264, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.17113850616388687, | |
| "grad_norm": 3.9457714557647705, | |
| "learning_rate": 4.749498971160742e-06, | |
| "loss": 0.5227, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.17210539037950207, | |
| "grad_norm": 3.9973223209381104, | |
| "learning_rate": 4.746068040685692e-06, | |
| "loss": 0.5296, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.17307227459511723, | |
| "grad_norm": 3.861050605773926, | |
| "learning_rate": 4.742615030831615e-06, | |
| "loss": 0.5122, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.17403915881073242, | |
| "grad_norm": 3.90736722946167, | |
| "learning_rate": 4.739139975542415e-06, | |
| "loss": 0.5479, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.1750060430263476, | |
| "grad_norm": 4.067862510681152, | |
| "learning_rate": 4.735642908978704e-06, | |
| "loss": 0.5305, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.17597292724196278, | |
| "grad_norm": 3.789336919784546, | |
| "learning_rate": 4.7321238655174715e-06, | |
| "loss": 0.4683, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.17693981145757795, | |
| "grad_norm": 3.6877589225769043, | |
| "learning_rate": 4.728582879751746e-06, | |
| "loss": 0.4879, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.17790669567319314, | |
| "grad_norm": 3.7258737087249756, | |
| "learning_rate": 4.725019986490255e-06, | |
| "loss": 0.4918, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.1788735798888083, | |
| "grad_norm": 3.882462501525879, | |
| "learning_rate": 4.721435220757078e-06, | |
| "loss": 0.5517, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.1798404641044235, | |
| "grad_norm": 4.0587568283081055, | |
| "learning_rate": 4.717828617791308e-06, | |
| "loss": 0.4685, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.18080734832003867, | |
| "grad_norm": 3.7173986434936523, | |
| "learning_rate": 4.714200213046707e-06, | |
| "loss": 0.5425, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.18177423253565386, | |
| "grad_norm": 4.385982513427734, | |
| "learning_rate": 4.7105500421913465e-06, | |
| "loss": 0.4914, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.18274111675126903, | |
| "grad_norm": 4.058135509490967, | |
| "learning_rate": 4.706878141107269e-06, | |
| "loss": 0.5249, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.18370800096688422, | |
| "grad_norm": 4.178554058074951, | |
| "learning_rate": 4.703184545890129e-06, | |
| "loss": 0.4403, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.1846748851824994, | |
| "grad_norm": 3.969520330429077, | |
| "learning_rate": 4.699469292848839e-06, | |
| "loss": 0.5016, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.18564176939811458, | |
| "grad_norm": 4.418413162231445, | |
| "learning_rate": 4.695732418505214e-06, | |
| "loss": 0.5239, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.18660865361372975, | |
| "grad_norm": 4.015957832336426, | |
| "learning_rate": 4.691973959593609e-06, | |
| "loss": 0.522, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.18757553782934494, | |
| "grad_norm": 4.055311679840088, | |
| "learning_rate": 4.688193953060566e-06, | |
| "loss": 0.4946, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.1885424220449601, | |
| "grad_norm": 4.2180585861206055, | |
| "learning_rate": 4.6843924360644385e-06, | |
| "loss": 0.54, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.1895093062605753, | |
| "grad_norm": 3.808861494064331, | |
| "learning_rate": 4.680569445975036e-06, | |
| "loss": 0.5254, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.19047619047619047, | |
| "grad_norm": 3.952702522277832, | |
| "learning_rate": 4.676725020373255e-06, | |
| "loss": 0.4599, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.19144307469180566, | |
| "grad_norm": 3.55468487739563, | |
| "learning_rate": 4.672859197050706e-06, | |
| "loss": 0.4616, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.19240995890742082, | |
| "grad_norm": 3.6720266342163086, | |
| "learning_rate": 4.6689720140093445e-06, | |
| "loss": 0.5532, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.19337684312303602, | |
| "grad_norm": 3.75171160697937, | |
| "learning_rate": 4.665063509461098e-06, | |
| "loss": 0.407, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.19434372733865118, | |
| "grad_norm": 3.524125099182129, | |
| "learning_rate": 4.661133721827487e-06, | |
| "loss": 0.458, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.19531061155426638, | |
| "grad_norm": 4.0818023681640625, | |
| "learning_rate": 4.657182689739253e-06, | |
| "loss": 0.5092, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.19627749576988154, | |
| "grad_norm": 3.552830219268799, | |
| "learning_rate": 4.653210452035974e-06, | |
| "loss": 0.4761, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.19724437998549674, | |
| "grad_norm": 3.4877846240997314, | |
| "learning_rate": 4.649217047765685e-06, | |
| "loss": 0.5329, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.1982112642011119, | |
| "grad_norm": 3.507387161254883, | |
| "learning_rate": 4.645202516184492e-06, | |
| "loss": 0.4731, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.1991781484167271, | |
| "grad_norm": 4.230087757110596, | |
| "learning_rate": 4.641166896756188e-06, | |
| "loss": 0.4747, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.2001450326323423, | |
| "grad_norm": 3.850822687149048, | |
| "learning_rate": 4.6371102291518635e-06, | |
| "loss": 0.5242, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.20111191684795746, | |
| "grad_norm": 3.663846969604492, | |
| "learning_rate": 4.63303255324952e-06, | |
| "loss": 0.4807, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.20207880106357265, | |
| "grad_norm": 3.500422477722168, | |
| "learning_rate": 4.628933909133674e-06, | |
| "loss": 0.4273, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.20304568527918782, | |
| "grad_norm": 3.770920753479004, | |
| "learning_rate": 4.624814337094964e-06, | |
| "loss": 0.5495, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.204012569494803, | |
| "grad_norm": 3.9864301681518555, | |
| "learning_rate": 4.620673877629757e-06, | |
| "loss": 0.5147, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.20497945371041817, | |
| "grad_norm": 4.359103679656982, | |
| "learning_rate": 4.616512571439745e-06, | |
| "loss": 0.4922, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.20594633792603337, | |
| "grad_norm": 3.866464376449585, | |
| "learning_rate": 4.612330459431552e-06, | |
| "loss": 0.4987, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.20691322214164853, | |
| "grad_norm": 4.180274963378906, | |
| "learning_rate": 4.608127582716324e-06, | |
| "loss": 0.5276, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.20788010635726373, | |
| "grad_norm": 3.7912492752075195, | |
| "learning_rate": 4.603903982609334e-06, | |
| "loss": 0.5121, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.2088469905728789, | |
| "grad_norm": 3.7710208892822266, | |
| "learning_rate": 4.599659700629566e-06, | |
| "loss": 0.5193, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.2098138747884941, | |
| "grad_norm": 3.5846283435821533, | |
| "learning_rate": 4.595394778499314e-06, | |
| "loss": 0.4799, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.21078075900410925, | |
| "grad_norm": 3.6125054359436035, | |
| "learning_rate": 4.59110925814377e-06, | |
| "loss": 0.5133, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.21174764321972445, | |
| "grad_norm": 4.170721530914307, | |
| "learning_rate": 4.586803181690609e-06, | |
| "loss": 0.5695, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.2127145274353396, | |
| "grad_norm": 3.4708595275878906, | |
| "learning_rate": 4.582476591469582e-06, | |
| "loss": 0.4518, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.2136814116509548, | |
| "grad_norm": 4.279057502746582, | |
| "learning_rate": 4.5781295300120885e-06, | |
| "loss": 0.5111, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.21464829586656997, | |
| "grad_norm": 3.40183162689209, | |
| "learning_rate": 4.573762040050772e-06, | |
| "loss": 0.4127, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.21561518008218516, | |
| "grad_norm": 3.459470748901367, | |
| "learning_rate": 4.569374164519088e-06, | |
| "loss": 0.4438, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.21658206429780033, | |
| "grad_norm": 4.130403518676758, | |
| "learning_rate": 4.564965946550889e-06, | |
| "loss": 0.5337, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.21754894851341552, | |
| "grad_norm": 3.649747371673584, | |
| "learning_rate": 4.560537429479998e-06, | |
| "loss": 0.5272, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.2185158327290307, | |
| "grad_norm": 3.7949132919311523, | |
| "learning_rate": 4.556088656839785e-06, | |
| "loss": 0.4954, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.21948271694464588, | |
| "grad_norm": 3.963280200958252, | |
| "learning_rate": 4.5516196723627325e-06, | |
| "loss": 0.5444, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.22044960116026105, | |
| "grad_norm": 3.713867425918579, | |
| "learning_rate": 4.547130519980014e-06, | |
| "loss": 0.4787, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.22141648537587624, | |
| "grad_norm": 3.647979736328125, | |
| "learning_rate": 4.542621243821058e-06, | |
| "loss": 0.4688, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.2223833695914914, | |
| "grad_norm": 4.001070499420166, | |
| "learning_rate": 4.538091888213112e-06, | |
| "loss": 0.4714, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.2233502538071066, | |
| "grad_norm": 4.070573806762695, | |
| "learning_rate": 4.533542497680811e-06, | |
| "loss": 0.5579, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.22431713802272177, | |
| "grad_norm": 3.7523491382598877, | |
| "learning_rate": 4.52897311694574e-06, | |
| "loss": 0.4251, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.22528402223833696, | |
| "grad_norm": 3.940523147583008, | |
| "learning_rate": 4.524383790925987e-06, | |
| "loss": 0.5004, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.22625090645395213, | |
| "grad_norm": 3.9226436614990234, | |
| "learning_rate": 4.519774564735712e-06, | |
| "loss": 0.505, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.22721779066956732, | |
| "grad_norm": 3.784249782562256, | |
| "learning_rate": 4.515145483684696e-06, | |
| "loss": 0.5336, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.2281846748851825, | |
| "grad_norm": 3.493046760559082, | |
| "learning_rate": 4.510496593277898e-06, | |
| "loss": 0.4649, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.22915155910079768, | |
| "grad_norm": 3.4909865856170654, | |
| "learning_rate": 4.505827939215009e-06, | |
| "loss": 0.52, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.23011844331641285, | |
| "grad_norm": 3.6906330585479736, | |
| "learning_rate": 4.50113956739e-06, | |
| "loss": 0.463, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.23108532753202804, | |
| "grad_norm": 3.881237268447876, | |
| "learning_rate": 4.496431523890673e-06, | |
| "loss": 0.5222, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.23205221174764323, | |
| "grad_norm": 3.843491554260254, | |
| "learning_rate": 4.4917038549982075e-06, | |
| "loss": 0.5514, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.2330190959632584, | |
| "grad_norm": 3.859520196914673, | |
| "learning_rate": 4.486956607186702e-06, | |
| "loss": 0.545, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.2339859801788736, | |
| "grad_norm": 3.564976215362549, | |
| "learning_rate": 4.482189827122726e-06, | |
| "loss": 0.4548, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.23495286439448876, | |
| "grad_norm": 3.7164249420166016, | |
| "learning_rate": 4.477403561664852e-06, | |
| "loss": 0.5378, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.23591974861010395, | |
| "grad_norm": 3.8178417682647705, | |
| "learning_rate": 4.472597857863198e-06, | |
| "loss": 0.446, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.23688663282571912, | |
| "grad_norm": 3.6128547191619873, | |
| "learning_rate": 4.467772762958968e-06, | |
| "loss": 0.4653, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.2378535170413343, | |
| "grad_norm": 4.037587642669678, | |
| "learning_rate": 4.462928324383985e-06, | |
| "loss": 0.57, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.23882040125694948, | |
| "grad_norm": 4.266468048095703, | |
| "learning_rate": 4.458064589760221e-06, | |
| "loss": 0.4874, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.23978728547256467, | |
| "grad_norm": 3.4381649494171143, | |
| "learning_rate": 4.453181606899338e-06, | |
| "loss": 0.4447, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.24075416968817984, | |
| "grad_norm": 4.215127468109131, | |
| "learning_rate": 4.448279423802207e-06, | |
| "loss": 0.5358, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.24172105390379503, | |
| "grad_norm": 3.6681251525878906, | |
| "learning_rate": 4.443358088658448e-06, | |
| "loss": 0.4914, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.2426879381194102, | |
| "grad_norm": 3.6266114711761475, | |
| "learning_rate": 4.438417649845946e-06, | |
| "loss": 0.5211, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.2436548223350254, | |
| "grad_norm": 4.406180381774902, | |
| "learning_rate": 4.43345815593038e-06, | |
| "loss": 0.5456, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.24462170655064056, | |
| "grad_norm": 3.685242176055908, | |
| "learning_rate": 4.428479655664748e-06, | |
| "loss": 0.4988, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.24558859076625575, | |
| "grad_norm": 3.8363120555877686, | |
| "learning_rate": 4.423482197988881e-06, | |
| "loss": 0.4961, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.24655547498187091, | |
| "grad_norm": 3.825303554534912, | |
| "learning_rate": 4.4184658320289675e-06, | |
| "loss": 0.4879, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.2475223591974861, | |
| "grad_norm": 3.914113759994507, | |
| "learning_rate": 4.41343060709707e-06, | |
| "loss": 0.5032, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.24848924341310127, | |
| "grad_norm": 3.840186357498169, | |
| "learning_rate": 4.408376572690638e-06, | |
| "loss": 0.5148, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.24945612762871647, | |
| "grad_norm": 3.9938724040985107, | |
| "learning_rate": 4.403303778492023e-06, | |
| "loss": 0.4698, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.25042301184433163, | |
| "grad_norm": 3.904791831970215, | |
| "learning_rate": 4.3982122743679875e-06, | |
| "loss": 0.5339, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.2513898960599468, | |
| "grad_norm": 3.4951677322387695, | |
| "learning_rate": 4.3931021103692206e-06, | |
| "loss": 0.4536, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.252356780275562, | |
| "grad_norm": 3.8062729835510254, | |
| "learning_rate": 4.387973336729841e-06, | |
| "loss": 0.4929, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.25332366449117716, | |
| "grad_norm": 4.109336853027344, | |
| "learning_rate": 4.382826003866903e-06, | |
| "loss": 0.5335, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.25429054870679235, | |
| "grad_norm": 3.5560543537139893, | |
| "learning_rate": 4.377660162379904e-06, | |
| "loss": 0.4673, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.25525743292240755, | |
| "grad_norm": 3.9022483825683594, | |
| "learning_rate": 4.372475863050286e-06, | |
| "loss": 0.5222, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.25622431713802274, | |
| "grad_norm": 3.8836610317230225, | |
| "learning_rate": 4.3672731568409344e-06, | |
| "loss": 0.4954, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.2571912013536379, | |
| "grad_norm": 3.6870367527008057, | |
| "learning_rate": 4.36205209489568e-06, | |
| "loss": 0.4124, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.25815808556925307, | |
| "grad_norm": 3.783818244934082, | |
| "learning_rate": 4.3568127285387925e-06, | |
| "loss": 0.4833, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.25912496978486826, | |
| "grad_norm": 3.5868349075317383, | |
| "learning_rate": 4.351555109274482e-06, | |
| "loss": 0.4759, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.26009185400048346, | |
| "grad_norm": 3.780407667160034, | |
| "learning_rate": 4.346279288786387e-06, | |
| "loss": 0.4834, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.2610587382160986, | |
| "grad_norm": 3.6964030265808105, | |
| "learning_rate": 4.340985318937066e-06, | |
| "loss": 0.4428, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.2620256224317138, | |
| "grad_norm": 3.7668895721435547, | |
| "learning_rate": 4.3356732517674935e-06, | |
| "loss": 0.5027, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.262992506647329, | |
| "grad_norm": 4.010104656219482, | |
| "learning_rate": 4.330343139496545e-06, | |
| "loss": 0.5497, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.2639593908629442, | |
| "grad_norm": 3.8650081157684326, | |
| "learning_rate": 4.32499503452048e-06, | |
| "loss": 0.5673, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.26492627507855937, | |
| "grad_norm": 3.683579444885254, | |
| "learning_rate": 4.319628989412435e-06, | |
| "loss": 0.501, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.2658931592941745, | |
| "grad_norm": 4.045727252960205, | |
| "learning_rate": 4.314245056921899e-06, | |
| "loss": 0.5478, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.2668600435097897, | |
| "grad_norm": 4.400568008422852, | |
| "learning_rate": 4.308843289974198e-06, | |
| "loss": 0.6053, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.2678269277254049, | |
| "grad_norm": 3.3865373134613037, | |
| "learning_rate": 4.303423741669978e-06, | |
| "loss": 0.4358, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.2687938119410201, | |
| "grad_norm": 3.7650463581085205, | |
| "learning_rate": 4.297986465284678e-06, | |
| "loss": 0.473, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.2697606961566352, | |
| "grad_norm": 3.752685546875, | |
| "learning_rate": 4.292531514268008e-06, | |
| "loss": 0.4686, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.2707275803722504, | |
| "grad_norm": 4.085626125335693, | |
| "learning_rate": 4.287058942243424e-06, | |
| "loss": 0.46, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.2716944645878656, | |
| "grad_norm": 4.068567752838135, | |
| "learning_rate": 4.281568803007601e-06, | |
| "loss": 0.4952, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.2726613488034808, | |
| "grad_norm": 4.001227378845215, | |
| "learning_rate": 4.276061150529903e-06, | |
| "loss": 0.5389, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.27362823301909595, | |
| "grad_norm": 3.950711488723755, | |
| "learning_rate": 4.270536038951855e-06, | |
| "loss": 0.5659, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.27459511723471114, | |
| "grad_norm": 3.8639347553253174, | |
| "learning_rate": 4.2649935225866066e-06, | |
| "loss": 0.541, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.27556200145032633, | |
| "grad_norm": 3.983407735824585, | |
| "learning_rate": 4.259433655918404e-06, | |
| "loss": 0.5081, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.2765288856659415, | |
| "grad_norm": 3.7507271766662598, | |
| "learning_rate": 4.253856493602047e-06, | |
| "loss": 0.4813, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.27749576988155666, | |
| "grad_norm": 3.543520927429199, | |
| "learning_rate": 4.24826209046236e-06, | |
| "loss": 0.4907, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.27846265409717186, | |
| "grad_norm": 3.6161043643951416, | |
| "learning_rate": 4.242650501493642e-06, | |
| "loss": 0.5002, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.27942953831278705, | |
| "grad_norm": 3.732840061187744, | |
| "learning_rate": 4.237021781859143e-06, | |
| "loss": 0.4547, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.28039642252840224, | |
| "grad_norm": 3.6558990478515625, | |
| "learning_rate": 4.231375986890502e-06, | |
| "loss": 0.4796, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.2813633067440174, | |
| "grad_norm": 3.682800769805908, | |
| "learning_rate": 4.225713172087216e-06, | |
| "loss": 0.4748, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.2823301909596326, | |
| "grad_norm": 4.0862040519714355, | |
| "learning_rate": 4.220033393116093e-06, | |
| "loss": 0.5725, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.28329707517524777, | |
| "grad_norm": 3.8720784187316895, | |
| "learning_rate": 4.2143367058107e-06, | |
| "loss": 0.5085, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.28426395939086296, | |
| "grad_norm": 3.7148096561431885, | |
| "learning_rate": 4.208623166170819e-06, | |
| "loss": 0.4869, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.2852308436064781, | |
| "grad_norm": 4.130632400512695, | |
| "learning_rate": 4.202892830361892e-06, | |
| "loss": 0.4793, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.2861977278220933, | |
| "grad_norm": 3.892319440841675, | |
| "learning_rate": 4.197145754714473e-06, | |
| "loss": 0.5266, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.2871646120377085, | |
| "grad_norm": 3.7769579887390137, | |
| "learning_rate": 4.191381995723672e-06, | |
| "loss": 0.4877, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.2881314962533237, | |
| "grad_norm": 3.810389757156372, | |
| "learning_rate": 4.185601610048601e-06, | |
| "loss": 0.5253, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.2890983804689388, | |
| "grad_norm": 3.692101001739502, | |
| "learning_rate": 4.179804654511816e-06, | |
| "loss": 0.5104, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.290065264684554, | |
| "grad_norm": 4.029324054718018, | |
| "learning_rate": 4.1739911860987575e-06, | |
| "loss": 0.4898, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.2910321489001692, | |
| "grad_norm": 3.8836569786071777, | |
| "learning_rate": 4.168161261957192e-06, | |
| "loss": 0.54, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.2919990331157844, | |
| "grad_norm": 3.8458945751190186, | |
| "learning_rate": 4.162314939396652e-06, | |
| "loss": 0.501, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.29296591733139954, | |
| "grad_norm": 3.7436769008636475, | |
| "learning_rate": 4.1564522758878656e-06, | |
| "loss": 0.5324, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.29393280154701473, | |
| "grad_norm": 3.9301185607910156, | |
| "learning_rate": 4.1505733290622e-06, | |
| "loss": 0.5143, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.2948996857626299, | |
| "grad_norm": 4.087481498718262, | |
| "learning_rate": 4.144678156711091e-06, | |
| "loss": 0.4861, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.2958665699782451, | |
| "grad_norm": 4.267450332641602, | |
| "learning_rate": 4.138766816785474e-06, | |
| "loss": 0.5108, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.29683345419386026, | |
| "grad_norm": 3.688645362854004, | |
| "learning_rate": 4.132839367395215e-06, | |
| "loss": 0.4646, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.29780033840947545, | |
| "grad_norm": 3.762014389038086, | |
| "learning_rate": 4.1268958668085415e-06, | |
| "loss": 0.5341, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.29876722262509064, | |
| "grad_norm": 4.061873435974121, | |
| "learning_rate": 4.120936373451467e-06, | |
| "loss": 0.4799, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.29973410684070584, | |
| "grad_norm": 3.6946425437927246, | |
| "learning_rate": 4.1149609459072185e-06, | |
| "loss": 0.4608, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.30070099105632103, | |
| "grad_norm": 3.5779032707214355, | |
| "learning_rate": 4.108969642915658e-06, | |
| "loss": 0.4416, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.30166787527193617, | |
| "grad_norm": 4.075043678283691, | |
| "learning_rate": 4.10296252337271e-06, | |
| "loss": 0.5143, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.30263475948755136, | |
| "grad_norm": 3.8208065032958984, | |
| "learning_rate": 4.096939646329775e-06, | |
| "loss": 0.4665, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.30360164370316656, | |
| "grad_norm": 3.35614275932312, | |
| "learning_rate": 4.090901070993159e-06, | |
| "loss": 0.5053, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.30456852791878175, | |
| "grad_norm": 3.444335460662842, | |
| "learning_rate": 4.08484685672348e-06, | |
| "loss": 0.4517, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.3055354121343969, | |
| "grad_norm": 3.4999983310699463, | |
| "learning_rate": 4.078777063035096e-06, | |
| "loss": 0.4485, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.3065022963500121, | |
| "grad_norm": 3.6743931770324707, | |
| "learning_rate": 4.07269174959551e-06, | |
| "loss": 0.4993, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.3074691805656273, | |
| "grad_norm": 4.012587547302246, | |
| "learning_rate": 4.06659097622479e-06, | |
| "loss": 0.4892, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.30843606478124247, | |
| "grad_norm": 3.8606793880462646, | |
| "learning_rate": 4.06047480289498e-06, | |
| "loss": 0.5095, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.3094029489968576, | |
| "grad_norm": 3.4969189167022705, | |
| "learning_rate": 4.0543432897295055e-06, | |
| "loss": 0.4492, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.3103698332124728, | |
| "grad_norm": 3.996771812438965, | |
| "learning_rate": 4.0481964970025885e-06, | |
| "loss": 0.5217, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.311336717428088, | |
| "grad_norm": 3.5888161659240723, | |
| "learning_rate": 4.042034485138653e-06, | |
| "loss": 0.5112, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.3123036016437032, | |
| "grad_norm": 3.8949966430664062, | |
| "learning_rate": 4.035857314711729e-06, | |
| "loss": 0.509, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.3132704858593183, | |
| "grad_norm": 3.5536141395568848, | |
| "learning_rate": 4.029665046444862e-06, | |
| "loss": 0.4585, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.3142373700749335, | |
| "grad_norm": 3.8758373260498047, | |
| "learning_rate": 4.023457741209509e-06, | |
| "loss": 0.5344, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.3152042542905487, | |
| "grad_norm": 3.6032464504241943, | |
| "learning_rate": 4.0172354600249465e-06, | |
| "loss": 0.4507, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.3161711385061639, | |
| "grad_norm": 3.7095227241516113, | |
| "learning_rate": 4.0109982640576676e-06, | |
| "loss": 0.5182, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.31713802272177904, | |
| "grad_norm": 3.854598045349121, | |
| "learning_rate": 4.0047462146207795e-06, | |
| "loss": 0.5601, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.31810490693739424, | |
| "grad_norm": 3.6497013568878174, | |
| "learning_rate": 3.998479373173406e-06, | |
| "loss": 0.4906, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.31907179115300943, | |
| "grad_norm": 3.552006483078003, | |
| "learning_rate": 3.9921978013200764e-06, | |
| "loss": 0.5249, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.3200386753686246, | |
| "grad_norm": 3.874676465988159, | |
| "learning_rate": 3.985901560810126e-06, | |
| "loss": 0.5477, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.32100555958423976, | |
| "grad_norm": 3.7521750926971436, | |
| "learning_rate": 3.979590713537084e-06, | |
| "loss": 0.5294, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.32197244379985496, | |
| "grad_norm": 3.6338000297546387, | |
| "learning_rate": 3.973265321538069e-06, | |
| "loss": 0.5001, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.32293932801547015, | |
| "grad_norm": 3.83825945854187, | |
| "learning_rate": 3.966925446993177e-06, | |
| "loss": 0.4977, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.32390621223108534, | |
| "grad_norm": 3.9020495414733887, | |
| "learning_rate": 3.960571152224872e-06, | |
| "loss": 0.4189, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.3248730964467005, | |
| "grad_norm": 3.4360294342041016, | |
| "learning_rate": 3.954202499697373e-06, | |
| "loss": 0.5238, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.3258399806623157, | |
| "grad_norm": 3.462728500366211, | |
| "learning_rate": 3.9478195520160355e-06, | |
| "loss": 0.4317, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.32680686487793087, | |
| "grad_norm": 3.646033525466919, | |
| "learning_rate": 3.941422371926742e-06, | |
| "loss": 0.4208, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.32777374909354606, | |
| "grad_norm": 3.4529306888580322, | |
| "learning_rate": 3.935011022315284e-06, | |
| "loss": 0.4753, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.3287406333091612, | |
| "grad_norm": 3.7135887145996094, | |
| "learning_rate": 3.928585566206742e-06, | |
| "loss": 0.405, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.3297075175247764, | |
| "grad_norm": 3.571955442428589, | |
| "learning_rate": 3.922146066764863e-06, | |
| "loss": 0.4933, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.3306744017403916, | |
| "grad_norm": 3.5327978134155273, | |
| "learning_rate": 3.9156925872914504e-06, | |
| "loss": 0.5044, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.3316412859560068, | |
| "grad_norm": 4.009077548980713, | |
| "learning_rate": 3.9092251912257286e-06, | |
| "loss": 0.5184, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.332608170171622, | |
| "grad_norm": 3.705810785293579, | |
| "learning_rate": 3.902743942143728e-06, | |
| "loss": 0.5028, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.3335750543872371, | |
| "grad_norm": 3.7308077812194824, | |
| "learning_rate": 3.896248903757658e-06, | |
| "loss": 0.5334, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.3345419386028523, | |
| "grad_norm": 3.8802003860473633, | |
| "learning_rate": 3.889740139915279e-06, | |
| "loss": 0.5459, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.3355088228184675, | |
| "grad_norm": 3.775622606277466, | |
| "learning_rate": 3.883217714599273e-06, | |
| "loss": 0.498, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.3364757070340827, | |
| "grad_norm": 4.032291889190674, | |
| "learning_rate": 3.876681691926625e-06, | |
| "loss": 0.4995, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.33744259124969783, | |
| "grad_norm": 3.9446566104888916, | |
| "learning_rate": 3.870132136147977e-06, | |
| "loss": 0.5006, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.338409475465313, | |
| "grad_norm": 3.741894006729126, | |
| "learning_rate": 3.8635691116470095e-06, | |
| "loss": 0.4801, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.3393763596809282, | |
| "grad_norm": 3.4935619831085205, | |
| "learning_rate": 3.856992682939803e-06, | |
| "loss": 0.4642, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.3403432438965434, | |
| "grad_norm": 3.661100149154663, | |
| "learning_rate": 3.850402914674205e-06, | |
| "loss": 0.5108, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.34131012811215855, | |
| "grad_norm": 3.8849480152130127, | |
| "learning_rate": 3.84379987162919e-06, | |
| "loss": 0.4919, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.34227701232777374, | |
| "grad_norm": 3.7999656200408936, | |
| "learning_rate": 3.837183618714233e-06, | |
| "loss": 0.548, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.34324389654338894, | |
| "grad_norm": 3.730701446533203, | |
| "learning_rate": 3.830554220968661e-06, | |
| "loss": 0.5114, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.34421078075900413, | |
| "grad_norm": 4.108263969421387, | |
| "learning_rate": 3.823911743561018e-06, | |
| "loss": 0.5186, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.34517766497461927, | |
| "grad_norm": 3.543729066848755, | |
| "learning_rate": 3.817256251788425e-06, | |
| "loss": 0.4591, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.34614454919023446, | |
| "grad_norm": 3.803205728530884, | |
| "learning_rate": 3.810587811075937e-06, | |
| "loss": 0.4616, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.34711143340584966, | |
| "grad_norm": 3.6811118125915527, | |
| "learning_rate": 3.803906486975901e-06, | |
| "loss": 0.4723, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.34807831762146485, | |
| "grad_norm": 3.6985318660736084, | |
| "learning_rate": 3.7972123451673102e-06, | |
| "loss": 0.4351, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.34904520183708, | |
| "grad_norm": 3.970501661300659, | |
| "learning_rate": 3.790505451455158e-06, | |
| "loss": 0.4424, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.3500120860526952, | |
| "grad_norm": 3.792994976043701, | |
| "learning_rate": 3.783785871769793e-06, | |
| "loss": 0.5227, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.3509789702683104, | |
| "grad_norm": 3.7695376873016357, | |
| "learning_rate": 3.77705367216627e-06, | |
| "loss": 0.5037, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.35194585448392557, | |
| "grad_norm": 3.754580020904541, | |
| "learning_rate": 3.7703089188237e-06, | |
| "loss": 0.5397, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.3529127386995407, | |
| "grad_norm": 3.678025245666504, | |
| "learning_rate": 3.7635516780446e-06, | |
| "loss": 0.4307, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.3538796229151559, | |
| "grad_norm": 3.763305187225342, | |
| "learning_rate": 3.756782016254242e-06, | |
| "loss": 0.5489, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.3548465071307711, | |
| "grad_norm": 3.853353261947632, | |
| "learning_rate": 3.7500000000000005e-06, | |
| "loss": 0.4606, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.3558133913463863, | |
| "grad_norm": 3.5719709396362305, | |
| "learning_rate": 3.743205695950695e-06, | |
| "loss": 0.4928, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.3567802755620014, | |
| "grad_norm": 3.763885021209717, | |
| "learning_rate": 3.7363991708959386e-06, | |
| "loss": 0.4669, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.3577471597776166, | |
| "grad_norm": 3.9263694286346436, | |
| "learning_rate": 3.729580491745479e-06, | |
| "loss": 0.5033, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.3587140439932318, | |
| "grad_norm": 3.462216377258301, | |
| "learning_rate": 3.7227497255285416e-06, | |
| "loss": 0.4507, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.359680928208847, | |
| "grad_norm": 3.9465153217315674, | |
| "learning_rate": 3.7159069393931717e-06, | |
| "loss": 0.4838, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.36064781242446214, | |
| "grad_norm": 4.065799713134766, | |
| "learning_rate": 3.709052200605572e-06, | |
| "loss": 0.5648, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.36161469664007734, | |
| "grad_norm": 3.8492698669433594, | |
| "learning_rate": 3.702185576549441e-06, | |
| "loss": 0.4143, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.36258158085569253, | |
| "grad_norm": 3.912066698074341, | |
| "learning_rate": 3.6953071347253167e-06, | |
| "loss": 0.4447, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.3635484650713077, | |
| "grad_norm": 3.6531989574432373, | |
| "learning_rate": 3.688416942749905e-06, | |
| "loss": 0.4697, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.3645153492869229, | |
| "grad_norm": 3.5612435340881348, | |
| "learning_rate": 3.6815150683554187e-06, | |
| "loss": 0.473, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.36548223350253806, | |
| "grad_norm": 3.904783248901367, | |
| "learning_rate": 3.674601579388913e-06, | |
| "loss": 0.5046, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.36644911771815325, | |
| "grad_norm": 4.0458903312683105, | |
| "learning_rate": 3.6676765438116157e-06, | |
| "loss": 0.5251, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.36741600193376844, | |
| "grad_norm": 3.6802172660827637, | |
| "learning_rate": 3.660740029698262e-06, | |
| "loss": 0.4432, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.36838288614938364, | |
| "grad_norm": 4.1497626304626465, | |
| "learning_rate": 3.6537921052364223e-06, | |
| "loss": 0.4888, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.3693497703649988, | |
| "grad_norm": 4.301728248596191, | |
| "learning_rate": 3.646832838725835e-06, | |
| "loss": 0.5565, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.37031665458061397, | |
| "grad_norm": 3.8046703338623047, | |
| "learning_rate": 3.6398622985777314e-06, | |
| "loss": 0.5819, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.37128353879622916, | |
| "grad_norm": 3.943380355834961, | |
| "learning_rate": 3.632880553314169e-06, | |
| "loss": 0.5247, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.37225042301184436, | |
| "grad_norm": 4.136211395263672, | |
| "learning_rate": 3.6258876715673475e-06, | |
| "loss": 0.469, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.3732173072274595, | |
| "grad_norm": 3.70596981048584, | |
| "learning_rate": 3.6188837220789496e-06, | |
| "loss": 0.544, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.3741841914430747, | |
| "grad_norm": 3.52099347114563, | |
| "learning_rate": 3.611868773699449e-06, | |
| "loss": 0.4492, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.3751510756586899, | |
| "grad_norm": 3.8500866889953613, | |
| "learning_rate": 3.604842895387446e-06, | |
| "loss": 0.4829, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.3761179598743051, | |
| "grad_norm": 3.67982816696167, | |
| "learning_rate": 3.597806156208982e-06, | |
| "loss": 0.4814, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.3770848440899202, | |
| "grad_norm": 3.550171136856079, | |
| "learning_rate": 3.5907586253368643e-06, | |
| "loss": 0.4134, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.3780517283055354, | |
| "grad_norm": 3.7428512573242188, | |
| "learning_rate": 3.5837003720499853e-06, | |
| "loss": 0.4985, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.3790186125211506, | |
| "grad_norm": 3.5214486122131348, | |
| "learning_rate": 3.57663146573264e-06, | |
| "loss": 0.4259, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.3799854967367658, | |
| "grad_norm": 3.610581398010254, | |
| "learning_rate": 3.569551975873847e-06, | |
| "loss": 0.4875, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.38095238095238093, | |
| "grad_norm": 3.991929292678833, | |
| "learning_rate": 3.562461972066663e-06, | |
| "loss": 0.4683, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.3819192651679961, | |
| "grad_norm": 3.5541305541992188, | |
| "learning_rate": 3.555361524007498e-06, | |
| "loss": 0.4556, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.3828861493836113, | |
| "grad_norm": 3.4029953479766846, | |
| "learning_rate": 3.5482507014954327e-06, | |
| "loss": 0.5061, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.3838530335992265, | |
| "grad_norm": 3.640984058380127, | |
| "learning_rate": 3.541129574431532e-06, | |
| "loss": 0.434, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.38481991781484165, | |
| "grad_norm": 3.657914876937866, | |
| "learning_rate": 3.5339982128181536e-06, | |
| "loss": 0.4475, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.38578680203045684, | |
| "grad_norm": 4.057721138000488, | |
| "learning_rate": 3.526856686758269e-06, | |
| "loss": 0.4982, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.38675368624607204, | |
| "grad_norm": 3.833341121673584, | |
| "learning_rate": 3.519705066454762e-06, | |
| "loss": 0.5104, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.38772057046168723, | |
| "grad_norm": 3.7433853149414062, | |
| "learning_rate": 3.51254342220975e-06, | |
| "loss": 0.4551, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.38868745467730237, | |
| "grad_norm": 3.844796657562256, | |
| "learning_rate": 3.5053718244238847e-06, | |
| "loss": 0.4761, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.38965433889291756, | |
| "grad_norm": 3.6285436153411865, | |
| "learning_rate": 3.4981903435956675e-06, | |
| "loss": 0.4168, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.39062122310853276, | |
| "grad_norm": 3.8488385677337646, | |
| "learning_rate": 3.4909990503207488e-06, | |
| "loss": 0.5304, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.39158810732414795, | |
| "grad_norm": 3.696931838989258, | |
| "learning_rate": 3.4837980152912393e-06, | |
| "loss": 0.5355, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.3925549915397631, | |
| "grad_norm": 3.8406333923339844, | |
| "learning_rate": 3.4765873092950126e-06, | |
| "loss": 0.5835, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.3935218757553783, | |
| "grad_norm": 3.689175605773926, | |
| "learning_rate": 3.4693670032150117e-06, | |
| "loss": 0.5174, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.3944887599709935, | |
| "grad_norm": 3.8330447673797607, | |
| "learning_rate": 3.4621371680285492e-06, | |
| "loss": 0.5241, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.39545564418660867, | |
| "grad_norm": 3.984819173812866, | |
| "learning_rate": 3.4548978748066115e-06, | |
| "loss": 0.4701, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.3964225284022238, | |
| "grad_norm": 3.6191537380218506, | |
| "learning_rate": 3.4476491947131596e-06, | |
| "loss": 0.4835, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.397389412617839, | |
| "grad_norm": 3.8275318145751953, | |
| "learning_rate": 3.440391199004431e-06, | |
| "loss": 0.4989, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.3983562968334542, | |
| "grad_norm": 3.577519416809082, | |
| "learning_rate": 3.4331239590282354e-06, | |
| "loss": 0.4495, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.3993231810490694, | |
| "grad_norm": 3.678297281265259, | |
| "learning_rate": 3.4258475462232586e-06, | |
| "loss": 0.4558, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.4002900652646846, | |
| "grad_norm": 3.846923828125, | |
| "learning_rate": 3.4185620321183543e-06, | |
| "loss": 0.4499, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.4012569494802997, | |
| "grad_norm": 3.836149215698242, | |
| "learning_rate": 3.4112674883318477e-06, | |
| "loss": 0.5519, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.4022238336959149, | |
| "grad_norm": 3.9182722568511963, | |
| "learning_rate": 3.403963986570825e-06, | |
| "loss": 0.5123, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.4031907179115301, | |
| "grad_norm": 3.498328924179077, | |
| "learning_rate": 3.3966515986304322e-06, | |
| "loss": 0.4568, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.4041576021271453, | |
| "grad_norm": 3.3499412536621094, | |
| "learning_rate": 3.3893303963931703e-06, | |
| "loss": 0.4542, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.40512448634276044, | |
| "grad_norm": 3.920807123184204, | |
| "learning_rate": 3.3820004518281835e-06, | |
| "loss": 0.4842, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.40609137055837563, | |
| "grad_norm": 3.754964590072632, | |
| "learning_rate": 3.37466183699056e-06, | |
| "loss": 0.4783, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.4070582547739908, | |
| "grad_norm": 3.6585028171539307, | |
| "learning_rate": 3.367314624020613e-06, | |
| "loss": 0.4872, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.408025138989606, | |
| "grad_norm": 4.0016374588012695, | |
| "learning_rate": 3.3599588851431814e-06, | |
| "loss": 0.5722, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.40899202320522116, | |
| "grad_norm": 3.19661283493042, | |
| "learning_rate": 3.352594692666915e-06, | |
| "loss": 0.4261, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.40995890742083635, | |
| "grad_norm": 3.5285444259643555, | |
| "learning_rate": 3.3452221189835642e-06, | |
| "loss": 0.495, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.41092579163645154, | |
| "grad_norm": 3.6112709045410156, | |
| "learning_rate": 3.337841236567268e-06, | |
| "loss": 0.4101, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.41189267585206674, | |
| "grad_norm": 3.411106586456299, | |
| "learning_rate": 3.3304521179738435e-06, | |
| "loss": 0.4433, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.4128595600676819, | |
| "grad_norm": 3.8429133892059326, | |
| "learning_rate": 3.32305483584007e-06, | |
| "loss": 0.4486, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.41382644428329707, | |
| "grad_norm": 3.9495856761932373, | |
| "learning_rate": 3.3156494628829778e-06, | |
| "loss": 0.5468, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.41479332849891226, | |
| "grad_norm": 3.9919161796569824, | |
| "learning_rate": 3.30823607189913e-06, | |
| "loss": 0.5148, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.41576021271452746, | |
| "grad_norm": 3.898630380630493, | |
| "learning_rate": 3.3008147357639115e-06, | |
| "loss": 0.5074, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.4167270969301426, | |
| "grad_norm": 3.9545817375183105, | |
| "learning_rate": 3.2933855274308067e-06, | |
| "loss": 0.4954, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.4176939811457578, | |
| "grad_norm": 3.883395195007324, | |
| "learning_rate": 3.2859485199306886e-06, | |
| "loss": 0.5059, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.418660865361373, | |
| "grad_norm": 3.6193654537200928, | |
| "learning_rate": 3.278503786371095e-06, | |
| "loss": 0.5204, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.4196277495769882, | |
| "grad_norm": 4.254292964935303, | |
| "learning_rate": 3.2710513999355147e-06, | |
| "loss": 0.5259, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.4205946337926033, | |
| "grad_norm": 3.752901077270508, | |
| "learning_rate": 3.2635914338826665e-06, | |
| "loss": 0.4255, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.4215615180082185, | |
| "grad_norm": 3.523738145828247, | |
| "learning_rate": 3.2561239615457764e-06, | |
| "loss": 0.3845, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.4225284022238337, | |
| "grad_norm": 4.015591621398926, | |
| "learning_rate": 3.2486490563318605e-06, | |
| "loss": 0.4784, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.4234952864394489, | |
| "grad_norm": 3.526104211807251, | |
| "learning_rate": 3.241166791721001e-06, | |
| "loss": 0.4133, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.42446217065506403, | |
| "grad_norm": 3.768324375152588, | |
| "learning_rate": 3.233677241265627e-06, | |
| "loss": 0.5096, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.4254290548706792, | |
| "grad_norm": 3.6191327571868896, | |
| "learning_rate": 3.2261804785897855e-06, | |
| "loss": 0.4197, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.4263959390862944, | |
| "grad_norm": 3.9885098934173584, | |
| "learning_rate": 3.218676577388424e-06, | |
| "loss": 0.4907, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.4273628233019096, | |
| "grad_norm": 3.5632357597351074, | |
| "learning_rate": 3.2111656114266625e-06, | |
| "loss": 0.4452, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.42832970751752475, | |
| "grad_norm": 4.117615699768066, | |
| "learning_rate": 3.2036476545390695e-06, | |
| "loss": 0.5533, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.42929659173313994, | |
| "grad_norm": 4.148410797119141, | |
| "learning_rate": 3.1961227806289358e-06, | |
| "loss": 0.5051, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.43026347594875514, | |
| "grad_norm": 3.9031429290771484, | |
| "learning_rate": 3.188591063667548e-06, | |
| "loss": 0.5358, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.43123036016437033, | |
| "grad_norm": 3.73646879196167, | |
| "learning_rate": 3.1810525776934614e-06, | |
| "loss": 0.4306, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.4321972443799855, | |
| "grad_norm": 3.6302988529205322, | |
| "learning_rate": 3.1735073968117743e-06, | |
| "loss": 0.3704, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.43316412859560066, | |
| "grad_norm": 3.804278612136841, | |
| "learning_rate": 3.165955595193393e-06, | |
| "loss": 0.5157, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.43413101281121586, | |
| "grad_norm": 3.7024874687194824, | |
| "learning_rate": 3.1583972470743123e-06, | |
| "loss": 0.4806, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.43509789702683105, | |
| "grad_norm": 3.7965049743652344, | |
| "learning_rate": 3.1508324267548775e-06, | |
| "loss": 0.4658, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.43606478124244624, | |
| "grad_norm": 4.142588138580322, | |
| "learning_rate": 3.1432612085990576e-06, | |
| "loss": 0.5566, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.4370316654580614, | |
| "grad_norm": 3.6118361949920654, | |
| "learning_rate": 3.1356836670337153e-06, | |
| "loss": 0.4803, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.4379985496736766, | |
| "grad_norm": 3.7696590423583984, | |
| "learning_rate": 3.1280998765478725e-06, | |
| "loss": 0.4552, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.43896543388929177, | |
| "grad_norm": 3.716688632965088, | |
| "learning_rate": 3.1205099116919794e-06, | |
| "loss": 0.4393, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.43993231810490696, | |
| "grad_norm": 3.637057304382324, | |
| "learning_rate": 3.1129138470771823e-06, | |
| "loss": 0.4637, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.4408992023205221, | |
| "grad_norm": 3.727297067642212, | |
| "learning_rate": 3.10531175737459e-06, | |
| "loss": 0.4762, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.4418660865361373, | |
| "grad_norm": 3.2276439666748047, | |
| "learning_rate": 3.0977037173145387e-06, | |
| "loss": 0.4832, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.4428329707517525, | |
| "grad_norm": 4.06106424331665, | |
| "learning_rate": 3.090089801685857e-06, | |
| "loss": 0.5277, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.4437998549673677, | |
| "grad_norm": 3.59995698928833, | |
| "learning_rate": 3.082470085335133e-06, | |
| "loss": 0.4655, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.4447667391829828, | |
| "grad_norm": 3.5383338928222656, | |
| "learning_rate": 3.074844643165979e-06, | |
| "loss": 0.5158, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.445733623398598, | |
| "grad_norm": 3.6818606853485107, | |
| "learning_rate": 3.0672135501382894e-06, | |
| "loss": 0.3861, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.4467005076142132, | |
| "grad_norm": 3.7184290885925293, | |
| "learning_rate": 3.05957688126751e-06, | |
| "loss": 0.5045, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.4476673918298284, | |
| "grad_norm": 3.7670133113861084, | |
| "learning_rate": 3.0519347116239e-06, | |
| "loss": 0.4893, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.44863427604544354, | |
| "grad_norm": 3.8827385902404785, | |
| "learning_rate": 3.04428711633179e-06, | |
| "loss": 0.453, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.44960116026105873, | |
| "grad_norm": 3.7055163383483887, | |
| "learning_rate": 3.036634170568847e-06, | |
| "loss": 0.4931, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.4505680444766739, | |
| "grad_norm": 3.6650643348693848, | |
| "learning_rate": 3.0289759495653344e-06, | |
| "loss": 0.5074, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.4515349286922891, | |
| "grad_norm": 3.983785629272461, | |
| "learning_rate": 3.021312528603371e-06, | |
| "loss": 0.47, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.45250181290790426, | |
| "grad_norm": 3.402285099029541, | |
| "learning_rate": 3.0136439830161968e-06, | |
| "loss": 0.4588, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.45346869712351945, | |
| "grad_norm": 3.4030966758728027, | |
| "learning_rate": 3.0059703881874232e-06, | |
| "loss": 0.4406, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.45443558133913464, | |
| "grad_norm": 4.0612359046936035, | |
| "learning_rate": 2.9982918195502985e-06, | |
| "loss": 0.5245, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.45540246555474984, | |
| "grad_norm": 4.0308051109313965, | |
| "learning_rate": 2.990608352586965e-06, | |
| "loss": 0.5318, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.456369349770365, | |
| "grad_norm": 3.894338369369507, | |
| "learning_rate": 2.9829200628277176e-06, | |
| "loss": 0.469, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.45733623398598017, | |
| "grad_norm": 3.9288504123687744, | |
| "learning_rate": 2.9752270258502593e-06, | |
| "loss": 0.5062, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.45830311820159536, | |
| "grad_norm": 3.727022171020508, | |
| "learning_rate": 2.9675293172789583e-06, | |
| "loss": 0.4718, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.45927000241721055, | |
| "grad_norm": 3.7620606422424316, | |
| "learning_rate": 2.959827012784108e-06, | |
| "loss": 0.4797, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.4602368866328257, | |
| "grad_norm": 3.69411301612854, | |
| "learning_rate": 2.952120188081179e-06, | |
| "loss": 0.4773, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.4612037708484409, | |
| "grad_norm": 3.8497121334075928, | |
| "learning_rate": 2.9444089189300783e-06, | |
| "loss": 0.4913, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.4621706550640561, | |
| "grad_norm": 3.8784735202789307, | |
| "learning_rate": 2.936693281134402e-06, | |
| "loss": 0.5083, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.4631375392796713, | |
| "grad_norm": 3.7350783348083496, | |
| "learning_rate": 2.92897335054069e-06, | |
| "loss": 0.5107, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.46410442349528647, | |
| "grad_norm": 3.7133052349090576, | |
| "learning_rate": 2.9212492030376815e-06, | |
| "loss": 0.4692, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.4650713077109016, | |
| "grad_norm": 3.5680742263793945, | |
| "learning_rate": 2.913520914555572e-06, | |
| "loss": 0.421, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.4660381919265168, | |
| "grad_norm": 3.4685680866241455, | |
| "learning_rate": 2.90578856106526e-06, | |
| "loss": 0.4427, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.467005076142132, | |
| "grad_norm": 3.5732250213623047, | |
| "learning_rate": 2.8980522185776065e-06, | |
| "loss": 0.4766, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.4679719603577472, | |
| "grad_norm": 3.5667455196380615, | |
| "learning_rate": 2.8903119631426835e-06, | |
| "loss": 0.4288, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.4689388445733623, | |
| "grad_norm": 3.793536424636841, | |
| "learning_rate": 2.882567870849029e-06, | |
| "loss": 0.4478, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.4699057287889775, | |
| "grad_norm": 3.8462436199188232, | |
| "learning_rate": 2.874820017822899e-06, | |
| "loss": 0.5199, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.4708726130045927, | |
| "grad_norm": 3.7388360500335693, | |
| "learning_rate": 2.8670684802275173e-06, | |
| "loss": 0.5316, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.4718394972202079, | |
| "grad_norm": 3.991246223449707, | |
| "learning_rate": 2.8593133342623276e-06, | |
| "loss": 0.5396, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.47280638143582304, | |
| "grad_norm": 4.0928449630737305, | |
| "learning_rate": 2.8515546561622464e-06, | |
| "loss": 0.4738, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.47377326565143824, | |
| "grad_norm": 4.5484819412231445, | |
| "learning_rate": 2.8437925221969097e-06, | |
| "loss": 0.5258, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.47474014986705343, | |
| "grad_norm": 3.6725525856018066, | |
| "learning_rate": 2.8360270086699274e-06, | |
| "loss": 0.4532, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.4757070340826686, | |
| "grad_norm": 3.562164068222046, | |
| "learning_rate": 2.8282581919181314e-06, | |
| "loss": 0.4455, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.47667391829828376, | |
| "grad_norm": 3.7910706996917725, | |
| "learning_rate": 2.820486148310822e-06, | |
| "loss": 0.5149, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.47764080251389895, | |
| "grad_norm": 3.9550485610961914, | |
| "learning_rate": 2.8127109542490238e-06, | |
| "loss": 0.4868, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.47860768672951415, | |
| "grad_norm": 3.7173829078674316, | |
| "learning_rate": 2.8049326861647303e-06, | |
| "loss": 0.5135, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.47957457094512934, | |
| "grad_norm": 3.9203269481658936, | |
| "learning_rate": 2.7971514205201515e-06, | |
| "loss": 0.4916, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.4805414551607445, | |
| "grad_norm": 4.3923726081848145, | |
| "learning_rate": 2.7893672338069666e-06, | |
| "loss": 0.5507, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.4815083393763597, | |
| "grad_norm": 3.923211097717285, | |
| "learning_rate": 2.7815802025455677e-06, | |
| "loss": 0.4686, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.48247522359197487, | |
| "grad_norm": 3.6547012329101562, | |
| "learning_rate": 2.7737904032843105e-06, | |
| "loss": 0.4615, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.48344210780759006, | |
| "grad_norm": 4.033823013305664, | |
| "learning_rate": 2.765997912598761e-06, | |
| "loss": 0.5064, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.4844089920232052, | |
| "grad_norm": 3.4618935585021973, | |
| "learning_rate": 2.7582028070909415e-06, | |
| "loss": 0.4413, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.4853758762388204, | |
| "grad_norm": 3.9825124740600586, | |
| "learning_rate": 2.750405163388579e-06, | |
| "loss": 0.5646, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.4863427604544356, | |
| "grad_norm": 3.5702192783355713, | |
| "learning_rate": 2.742605058144352e-06, | |
| "loss": 0.455, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.4873096446700508, | |
| "grad_norm": 3.779356002807617, | |
| "learning_rate": 2.7348025680351363e-06, | |
| "loss": 0.432, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.4882765288856659, | |
| "grad_norm": 3.8731324672698975, | |
| "learning_rate": 2.7269977697612515e-06, | |
| "loss": 0.4606, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.4892434131012811, | |
| "grad_norm": 3.5303452014923096, | |
| "learning_rate": 2.7191907400457068e-06, | |
| "loss": 0.4369, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.4902102973168963, | |
| "grad_norm": 3.997253894805908, | |
| "learning_rate": 2.7113815556334478e-06, | |
| "loss": 0.4808, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.4911771815325115, | |
| "grad_norm": 3.6927413940429688, | |
| "learning_rate": 2.7035702932906e-06, | |
| "loss": 0.4606, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.49214406574812664, | |
| "grad_norm": 3.5967085361480713, | |
| "learning_rate": 2.6957570298037156e-06, | |
| "loss": 0.4379, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.49311094996374183, | |
| "grad_norm": 3.671698808670044, | |
| "learning_rate": 2.6879418419790203e-06, | |
| "loss": 0.4879, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.494077834179357, | |
| "grad_norm": 3.4737465381622314, | |
| "learning_rate": 2.680124806641654e-06, | |
| "loss": 0.4498, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.4950447183949722, | |
| "grad_norm": 3.8905839920043945, | |
| "learning_rate": 2.6723060006349198e-06, | |
| "loss": 0.5119, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.4960116026105874, | |
| "grad_norm": 3.9527182579040527, | |
| "learning_rate": 2.664485500819527e-06, | |
| "loss": 0.4407, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.49697848682620255, | |
| "grad_norm": 3.410557985305786, | |
| "learning_rate": 2.656663384072834e-06, | |
| "loss": 0.4549, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.49794537104181774, | |
| "grad_norm": 3.4379289150238037, | |
| "learning_rate": 2.6488397272880943e-06, | |
| "loss": 0.4566, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.49891225525743294, | |
| "grad_norm": 3.790123224258423, | |
| "learning_rate": 2.641014607373702e-06, | |
| "loss": 0.4753, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.49987913947304813, | |
| "grad_norm": 3.5022404193878174, | |
| "learning_rate": 2.633188101252433e-06, | |
| "loss": 0.4508, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.5008460236886633, | |
| "grad_norm": 3.726773738861084, | |
| "learning_rate": 2.625360285860689e-06, | |
| "loss": 0.4535, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.5018129079042785, | |
| "grad_norm": 4.066783905029297, | |
| "learning_rate": 2.617531238147744e-06, | |
| "loss": 0.4777, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.5027797921198937, | |
| "grad_norm": 3.717376470565796, | |
| "learning_rate": 2.6097010350749843e-06, | |
| "loss": 0.4507, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.5037466763355088, | |
| "grad_norm": 4.117121696472168, | |
| "learning_rate": 2.6018697536151554e-06, | |
| "loss": 0.4365, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.504713560551124, | |
| "grad_norm": 3.5613620281219482, | |
| "learning_rate": 2.5940374707516014e-06, | |
| "loss": 0.4911, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.5056804447667392, | |
| "grad_norm": 3.5967776775360107, | |
| "learning_rate": 2.5862042634775125e-06, | |
| "loss": 0.4665, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.5066473289823543, | |
| "grad_norm": 3.632263660430908, | |
| "learning_rate": 2.5783702087951646e-06, | |
| "loss": 0.4511, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.5076142131979695, | |
| "grad_norm": 3.460890293121338, | |
| "learning_rate": 2.5705353837151655e-06, | |
| "loss": 0.4014, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.5085810974135847, | |
| "grad_norm": 3.7510364055633545, | |
| "learning_rate": 2.5626998652556937e-06, | |
| "loss": 0.5107, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.5095479816291999, | |
| "grad_norm": 3.4469544887542725, | |
| "learning_rate": 2.554863730441748e-06, | |
| "loss": 0.4476, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.5105148658448151, | |
| "grad_norm": 5.02686882019043, | |
| "learning_rate": 2.547027056304379e-06, | |
| "loss": 0.4338, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.5114817500604303, | |
| "grad_norm": 3.9072558879852295, | |
| "learning_rate": 2.5391899198799475e-06, | |
| "loss": 0.4529, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.5124486342760455, | |
| "grad_norm": 4.003458499908447, | |
| "learning_rate": 2.5313523982093517e-06, | |
| "loss": 0.4483, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.5134155184916607, | |
| "grad_norm": 3.5714404582977295, | |
| "learning_rate": 2.5235145683372813e-06, | |
| "loss": 0.4508, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.5143824027072758, | |
| "grad_norm": 4.432825565338135, | |
| "learning_rate": 2.5156765073114535e-06, | |
| "loss": 0.4886, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.515349286922891, | |
| "grad_norm": 3.702751636505127, | |
| "learning_rate": 2.507838292181858e-06, | |
| "loss": 0.4578, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.5163161711385061, | |
| "grad_norm": 3.845242738723755, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.4034, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.5172830553541213, | |
| "grad_norm": 3.649942398071289, | |
| "learning_rate": 2.4921617078181425e-06, | |
| "loss": 0.4518, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.5182499395697365, | |
| "grad_norm": 3.932555913925171, | |
| "learning_rate": 2.484323492688547e-06, | |
| "loss": 0.4826, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.5192168237853517, | |
| "grad_norm": 3.628704071044922, | |
| "learning_rate": 2.47648543166272e-06, | |
| "loss": 0.4381, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.5201837080009669, | |
| "grad_norm": 3.462639093399048, | |
| "learning_rate": 2.4686476017906487e-06, | |
| "loss": 0.4244, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.5211505922165821, | |
| "grad_norm": 3.4104435443878174, | |
| "learning_rate": 2.4608100801200533e-06, | |
| "loss": 0.3825, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.5221174764321972, | |
| "grad_norm": 3.861297369003296, | |
| "learning_rate": 2.452972943695621e-06, | |
| "loss": 0.4183, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.5230843606478124, | |
| "grad_norm": 3.5461347103118896, | |
| "learning_rate": 2.445136269558254e-06, | |
| "loss": 0.4373, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.5240512448634276, | |
| "grad_norm": 3.585427761077881, | |
| "learning_rate": 2.4373001347443067e-06, | |
| "loss": 0.4556, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.5250181290790428, | |
| "grad_norm": 3.582169771194458, | |
| "learning_rate": 2.4294646162848353e-06, | |
| "loss": 0.4544, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.525985013294658, | |
| "grad_norm": 3.5394082069396973, | |
| "learning_rate": 2.4216297912048354e-06, | |
| "loss": 0.4314, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.5269518975102732, | |
| "grad_norm": 3.7783658504486084, | |
| "learning_rate": 2.413795736522489e-06, | |
| "loss": 0.4848, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.5279187817258884, | |
| "grad_norm": 3.5096867084503174, | |
| "learning_rate": 2.4059625292483994e-06, | |
| "loss": 0.4411, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.5288856659415035, | |
| "grad_norm": 3.744575023651123, | |
| "learning_rate": 2.3981302463848454e-06, | |
| "loss": 0.4997, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.5298525501571187, | |
| "grad_norm": 3.5636816024780273, | |
| "learning_rate": 2.390298964925016e-06, | |
| "loss": 0.4279, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.5308194343727338, | |
| "grad_norm": 3.968400716781616, | |
| "learning_rate": 2.3824687618522567e-06, | |
| "loss": 0.4238, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.531786318588349, | |
| "grad_norm": 3.7942044734954834, | |
| "learning_rate": 2.374639714139312e-06, | |
| "loss": 0.4884, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.5327532028039642, | |
| "grad_norm": 3.705420732498169, | |
| "learning_rate": 2.366811898747568e-06, | |
| "loss": 0.4936, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.5337200870195794, | |
| "grad_norm": 3.5354163646698, | |
| "learning_rate": 2.358985392626298e-06, | |
| "loss": 0.4697, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.5346869712351946, | |
| "grad_norm": 3.759593963623047, | |
| "learning_rate": 2.351160272711907e-06, | |
| "loss": 0.4631, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.5356538554508098, | |
| "grad_norm": 3.913259506225586, | |
| "learning_rate": 2.343336615927167e-06, | |
| "loss": 0.471, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.536620739666425, | |
| "grad_norm": 4.187595367431641, | |
| "learning_rate": 2.3355144991804736e-06, | |
| "loss": 0.5586, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.5375876238820402, | |
| "grad_norm": 3.8247408866882324, | |
| "learning_rate": 2.3276939993650806e-06, | |
| "loss": 0.5067, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.5385545080976553, | |
| "grad_norm": 3.486445665359497, | |
| "learning_rate": 2.3198751933583463e-06, | |
| "loss": 0.4399, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.5395213923132705, | |
| "grad_norm": 3.785252571105957, | |
| "learning_rate": 2.312058158020981e-06, | |
| "loss": 0.4527, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.5404882765288856, | |
| "grad_norm": 3.7378010749816895, | |
| "learning_rate": 2.304242970196285e-06, | |
| "loss": 0.458, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.5414551607445008, | |
| "grad_norm": 3.6285452842712402, | |
| "learning_rate": 2.2964297067094004e-06, | |
| "loss": 0.4403, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.542422044960116, | |
| "grad_norm": 3.6113779544830322, | |
| "learning_rate": 2.2886184443665522e-06, | |
| "loss": 0.41, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.5433889291757312, | |
| "grad_norm": 3.54592227935791, | |
| "learning_rate": 2.2808092599542936e-06, | |
| "loss": 0.4754, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.5443558133913464, | |
| "grad_norm": 3.5506227016448975, | |
| "learning_rate": 2.2730022302387493e-06, | |
| "loss": 0.4601, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.5453226976069616, | |
| "grad_norm": 3.6648545265197754, | |
| "learning_rate": 2.265197431964864e-06, | |
| "loss": 0.4528, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.5462895818225767, | |
| "grad_norm": 3.643314838409424, | |
| "learning_rate": 2.257394941855648e-06, | |
| "loss": 0.4552, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.5472564660381919, | |
| "grad_norm": 3.9885194301605225, | |
| "learning_rate": 2.249594836611422e-06, | |
| "loss": 0.4111, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.5482233502538071, | |
| "grad_norm": 3.288057804107666, | |
| "learning_rate": 2.2417971929090593e-06, | |
| "loss": 0.3875, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.5491902344694223, | |
| "grad_norm": 4.048732280731201, | |
| "learning_rate": 2.2340020874012393e-06, | |
| "loss": 0.5485, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.5501571186850375, | |
| "grad_norm": 3.265153646469116, | |
| "learning_rate": 2.2262095967156895e-06, | |
| "loss": 0.4145, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.5511240029006527, | |
| "grad_norm": 3.73302960395813, | |
| "learning_rate": 2.218419797454433e-06, | |
| "loss": 0.5199, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.5520908871162679, | |
| "grad_norm": 3.5953993797302246, | |
| "learning_rate": 2.2106327661930343e-06, | |
| "loss": 0.4172, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.553057771331883, | |
| "grad_norm": 3.707054853439331, | |
| "learning_rate": 2.202848579479849e-06, | |
| "loss": 0.4282, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.5540246555474981, | |
| "grad_norm": 3.5350615978240967, | |
| "learning_rate": 2.19506731383527e-06, | |
| "loss": 0.4952, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.5549915397631133, | |
| "grad_norm": 4.000664234161377, | |
| "learning_rate": 2.187289045750977e-06, | |
| "loss": 0.4619, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.5559584239787285, | |
| "grad_norm": 4.087167739868164, | |
| "learning_rate": 2.1795138516891786e-06, | |
| "loss": 0.5084, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.5569253081943437, | |
| "grad_norm": 3.46340012550354, | |
| "learning_rate": 2.17174180808187e-06, | |
| "loss": 0.4312, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.5578921924099589, | |
| "grad_norm": 3.5368056297302246, | |
| "learning_rate": 2.163972991330073e-06, | |
| "loss": 0.5035, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.5588590766255741, | |
| "grad_norm": 3.7972068786621094, | |
| "learning_rate": 2.1562074778030907e-06, | |
| "loss": 0.4531, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.5598259608411893, | |
| "grad_norm": 3.9204769134521484, | |
| "learning_rate": 2.148445343837755e-06, | |
| "loss": 0.4403, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.5607928450568045, | |
| "grad_norm": 3.8079707622528076, | |
| "learning_rate": 2.1406866657376733e-06, | |
| "loss": 0.4951, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.5617597292724196, | |
| "grad_norm": 3.6291754245758057, | |
| "learning_rate": 2.1329315197724835e-06, | |
| "loss": 0.4237, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.5627266134880348, | |
| "grad_norm": 3.552342414855957, | |
| "learning_rate": 2.1251799821771015e-06, | |
| "loss": 0.4063, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.56369349770365, | |
| "grad_norm": 3.4691274166107178, | |
| "learning_rate": 2.1174321291509716e-06, | |
| "loss": 0.4758, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.5646603819192652, | |
| "grad_norm": 3.760558605194092, | |
| "learning_rate": 2.1096880368573173e-06, | |
| "loss": 0.5106, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.5656272661348803, | |
| "grad_norm": 3.534207582473755, | |
| "learning_rate": 2.1019477814223943e-06, | |
| "loss": 0.5242, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.5665941503504955, | |
| "grad_norm": 4.03209114074707, | |
| "learning_rate": 2.0942114389347402e-06, | |
| "loss": 0.4693, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.5675610345661107, | |
| "grad_norm": 3.6348366737365723, | |
| "learning_rate": 2.086479085444429e-06, | |
| "loss": 0.5009, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.5685279187817259, | |
| "grad_norm": 3.4231724739074707, | |
| "learning_rate": 2.078750796962319e-06, | |
| "loss": 0.457, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.5694948029973411, | |
| "grad_norm": 3.9665541648864746, | |
| "learning_rate": 2.071026649459311e-06, | |
| "loss": 0.4962, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.5704616872129562, | |
| "grad_norm": 3.6691770553588867, | |
| "learning_rate": 2.0633067188655985e-06, | |
| "loss": 0.4548, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.5714285714285714, | |
| "grad_norm": 3.683633804321289, | |
| "learning_rate": 2.055591081069922e-06, | |
| "loss": 0.4559, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.5723954556441866, | |
| "grad_norm": 3.508237600326538, | |
| "learning_rate": 2.0478798119188216e-06, | |
| "loss": 0.4858, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.5733623398598018, | |
| "grad_norm": 3.7423527240753174, | |
| "learning_rate": 2.040172987215893e-06, | |
| "loss": 0.4467, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.574329224075417, | |
| "grad_norm": 2.975794792175293, | |
| "learning_rate": 2.032470682721042e-06, | |
| "loss": 0.4223, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.5752961082910322, | |
| "grad_norm": 3.9564762115478516, | |
| "learning_rate": 2.024772974149741e-06, | |
| "loss": 0.5339, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.5762629925066474, | |
| "grad_norm": 3.652500629425049, | |
| "learning_rate": 2.0170799371722828e-06, | |
| "loss": 0.4471, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.5772298767222626, | |
| "grad_norm": 3.6535003185272217, | |
| "learning_rate": 2.0093916474130354e-06, | |
| "loss": 0.5303, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.5781967609378776, | |
| "grad_norm": 3.983417510986328, | |
| "learning_rate": 2.0017081804497023e-06, | |
| "loss": 0.4422, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.5791636451534928, | |
| "grad_norm": 3.6490795612335205, | |
| "learning_rate": 1.9940296118125776e-06, | |
| "loss": 0.4801, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.580130529369108, | |
| "grad_norm": 3.816997528076172, | |
| "learning_rate": 1.986356016983804e-06, | |
| "loss": 0.45, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.5810974135847232, | |
| "grad_norm": 3.5687811374664307, | |
| "learning_rate": 1.9786874713966293e-06, | |
| "loss": 0.5209, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.5820642978003384, | |
| "grad_norm": 3.970633029937744, | |
| "learning_rate": 1.9710240504346664e-06, | |
| "loss": 0.5093, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.5830311820159536, | |
| "grad_norm": 3.7453064918518066, | |
| "learning_rate": 1.9633658294311535e-06, | |
| "loss": 0.4749, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.5839980662315688, | |
| "grad_norm": 3.2988827228546143, | |
| "learning_rate": 1.9557128836682107e-06, | |
| "loss": 0.3784, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.584964950447184, | |
| "grad_norm": 3.251164674758911, | |
| "learning_rate": 1.9480652883761007e-06, | |
| "loss": 0.4132, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.5859318346627991, | |
| "grad_norm": 3.453906297683716, | |
| "learning_rate": 1.9404231187324903e-06, | |
| "loss": 0.4589, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.5868987188784143, | |
| "grad_norm": 3.687375783920288, | |
| "learning_rate": 1.9327864498617114e-06, | |
| "loss": 0.5179, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.5878656030940295, | |
| "grad_norm": 3.5259013175964355, | |
| "learning_rate": 1.925155356834022e-06, | |
| "loss": 0.4519, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.5888324873096447, | |
| "grad_norm": 3.542691469192505, | |
| "learning_rate": 1.9175299146648672e-06, | |
| "loss": 0.4736, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.5897993715252599, | |
| "grad_norm": 3.4081952571868896, | |
| "learning_rate": 1.9099101983141434e-06, | |
| "loss": 0.4722, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.590766255740875, | |
| "grad_norm": 3.5784544944763184, | |
| "learning_rate": 1.9022962826854619e-06, | |
| "loss": 0.5243, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.5917331399564902, | |
| "grad_norm": 3.597616672515869, | |
| "learning_rate": 1.8946882426254103e-06, | |
| "loss": 0.4564, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.5927000241721054, | |
| "grad_norm": 3.7966713905334473, | |
| "learning_rate": 1.887086152922818e-06, | |
| "loss": 0.5083, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.5936669083877205, | |
| "grad_norm": 3.926304817199707, | |
| "learning_rate": 1.879490088308021e-06, | |
| "loss": 0.4445, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.5946337926033357, | |
| "grad_norm": 3.6007909774780273, | |
| "learning_rate": 1.8719001234521283e-06, | |
| "loss": 0.4761, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.5956006768189509, | |
| "grad_norm": 3.7344369888305664, | |
| "learning_rate": 1.8643163329662844e-06, | |
| "loss": 0.4451, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.5965675610345661, | |
| "grad_norm": 3.4734723567962646, | |
| "learning_rate": 1.8567387914009432e-06, | |
| "loss": 0.4302, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.5975344452501813, | |
| "grad_norm": 3.6221048831939697, | |
| "learning_rate": 1.849167573245123e-06, | |
| "loss": 0.4603, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.5985013294657965, | |
| "grad_norm": 3.7050702571868896, | |
| "learning_rate": 1.8416027529256885e-06, | |
| "loss": 0.4371, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.5994682136814117, | |
| "grad_norm": 3.6579859256744385, | |
| "learning_rate": 1.8340444048066075e-06, | |
| "loss": 0.4887, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.6004350978970269, | |
| "grad_norm": 3.98431658744812, | |
| "learning_rate": 1.8264926031882274e-06, | |
| "loss": 0.474, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.6014019821126421, | |
| "grad_norm": 3.3613712787628174, | |
| "learning_rate": 1.8189474223065392e-06, | |
| "loss": 0.4559, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.6023688663282571, | |
| "grad_norm": 3.6685123443603516, | |
| "learning_rate": 1.8114089363324525e-06, | |
| "loss": 0.3974, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.6033357505438723, | |
| "grad_norm": 3.8673484325408936, | |
| "learning_rate": 1.8038772193710646e-06, | |
| "loss": 0.4535, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.6043026347594875, | |
| "grad_norm": 3.636211395263672, | |
| "learning_rate": 1.7963523454609317e-06, | |
| "loss": 0.482, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.6052695189751027, | |
| "grad_norm": 3.6469950675964355, | |
| "learning_rate": 1.7888343885733381e-06, | |
| "loss": 0.4359, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.6062364031907179, | |
| "grad_norm": 3.870375633239746, | |
| "learning_rate": 1.7813234226115767e-06, | |
| "loss": 0.4971, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.6072032874063331, | |
| "grad_norm": 3.4417426586151123, | |
| "learning_rate": 1.773819521410215e-06, | |
| "loss": 0.4375, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.6081701716219483, | |
| "grad_norm": 3.7465872764587402, | |
| "learning_rate": 1.766322758734374e-06, | |
| "loss": 0.4631, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.6091370558375635, | |
| "grad_norm": 3.620347499847412, | |
| "learning_rate": 1.7588332082789995e-06, | |
| "loss": 0.4412, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.6101039400531786, | |
| "grad_norm": 3.565495014190674, | |
| "learning_rate": 1.75135094366814e-06, | |
| "loss": 0.4535, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.6110708242687938, | |
| "grad_norm": 3.6394903659820557, | |
| "learning_rate": 1.7438760384542242e-06, | |
| "loss": 0.4759, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.612037708484409, | |
| "grad_norm": 4.002828598022461, | |
| "learning_rate": 1.7364085661173346e-06, | |
| "loss": 0.4823, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.6130045927000242, | |
| "grad_norm": 3.7082462310791016, | |
| "learning_rate": 1.7289486000644857e-06, | |
| "loss": 0.4105, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.6139714769156394, | |
| "grad_norm": 4.062910079956055, | |
| "learning_rate": 1.721496213628906e-06, | |
| "loss": 0.4587, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.6149383611312546, | |
| "grad_norm": 3.607834577560425, | |
| "learning_rate": 1.7140514800693125e-06, | |
| "loss": 0.4679, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.6159052453468697, | |
| "grad_norm": 3.3181495666503906, | |
| "learning_rate": 1.7066144725691933e-06, | |
| "loss": 0.4362, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.6168721295624849, | |
| "grad_norm": 4.290891170501709, | |
| "learning_rate": 1.6991852642360898e-06, | |
| "loss": 0.4636, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.6178390137781, | |
| "grad_norm": 3.491584539413452, | |
| "learning_rate": 1.6917639281008703e-06, | |
| "loss": 0.4388, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.6188058979937152, | |
| "grad_norm": 3.530135154724121, | |
| "learning_rate": 1.6843505371170226e-06, | |
| "loss": 0.4667, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.6197727822093304, | |
| "grad_norm": 3.9121077060699463, | |
| "learning_rate": 1.6769451641599305e-06, | |
| "loss": 0.5047, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.6207396664249456, | |
| "grad_norm": 3.8880884647369385, | |
| "learning_rate": 1.6695478820261573e-06, | |
| "loss": 0.4943, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.6217065506405608, | |
| "grad_norm": 3.671125888824463, | |
| "learning_rate": 1.6621587634327328e-06, | |
| "loss": 0.5352, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.622673434856176, | |
| "grad_norm": 3.2758569717407227, | |
| "learning_rate": 1.6547778810164366e-06, | |
| "loss": 0.4389, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.6236403190717912, | |
| "grad_norm": 4.217796325683594, | |
| "learning_rate": 1.647405307333085e-06, | |
| "loss": 0.5478, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.6246072032874064, | |
| "grad_norm": 3.5154519081115723, | |
| "learning_rate": 1.6400411148568196e-06, | |
| "loss": 0.394, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.6255740875030215, | |
| "grad_norm": 3.519857883453369, | |
| "learning_rate": 1.6326853759793878e-06, | |
| "loss": 0.4146, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.6265409717186367, | |
| "grad_norm": 3.5776000022888184, | |
| "learning_rate": 1.6253381630094412e-06, | |
| "loss": 0.4827, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.6275078559342518, | |
| "grad_norm": 4.16077995300293, | |
| "learning_rate": 1.6179995481718165e-06, | |
| "loss": 0.5614, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 0.628474740149867, | |
| "grad_norm": 3.562154769897461, | |
| "learning_rate": 1.610669603606831e-06, | |
| "loss": 0.4778, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.6294416243654822, | |
| "grad_norm": 3.7697620391845703, | |
| "learning_rate": 1.6033484013695688e-06, | |
| "loss": 0.4292, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 0.6304085085810974, | |
| "grad_norm": 3.4476921558380127, | |
| "learning_rate": 1.5960360134291759e-06, | |
| "loss": 0.4513, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.6313753927967126, | |
| "grad_norm": 3.656102180480957, | |
| "learning_rate": 1.588732511668153e-06, | |
| "loss": 0.4747, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 0.6323422770123278, | |
| "grad_norm": 3.675884962081909, | |
| "learning_rate": 1.5814379678816471e-06, | |
| "loss": 0.4267, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.633309161227943, | |
| "grad_norm": 3.7877371311187744, | |
| "learning_rate": 1.5741524537767427e-06, | |
| "loss": 0.4905, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.6342760454435581, | |
| "grad_norm": 3.5091817378997803, | |
| "learning_rate": 1.566876040971765e-06, | |
| "loss": 0.4275, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.6352429296591733, | |
| "grad_norm": 3.42677903175354, | |
| "learning_rate": 1.5596088009955695e-06, | |
| "loss": 0.4384, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 0.6362098138747885, | |
| "grad_norm": 3.765990972518921, | |
| "learning_rate": 1.5523508052868402e-06, | |
| "loss": 0.4993, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.6371766980904037, | |
| "grad_norm": 3.590433120727539, | |
| "learning_rate": 1.5451021251933895e-06, | |
| "loss": 0.432, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 0.6381435823060189, | |
| "grad_norm": 3.3445186614990234, | |
| "learning_rate": 1.5378628319714514e-06, | |
| "loss": 0.4175, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.6391104665216341, | |
| "grad_norm": 3.357813596725464, | |
| "learning_rate": 1.5306329967849887e-06, | |
| "loss": 0.412, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 0.6400773507372493, | |
| "grad_norm": 4.022750377655029, | |
| "learning_rate": 1.5234126907049878e-06, | |
| "loss": 0.3955, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.6410442349528644, | |
| "grad_norm": 3.672391414642334, | |
| "learning_rate": 1.5162019847087616e-06, | |
| "loss": 0.4514, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 0.6420111191684795, | |
| "grad_norm": 4.018763065338135, | |
| "learning_rate": 1.509000949679252e-06, | |
| "loss": 0.4605, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.6429780033840947, | |
| "grad_norm": 3.6761765480041504, | |
| "learning_rate": 1.5018096564043333e-06, | |
| "loss": 0.4994, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.6439448875997099, | |
| "grad_norm": 3.574371576309204, | |
| "learning_rate": 1.4946281755761153e-06, | |
| "loss": 0.4411, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.6449117718153251, | |
| "grad_norm": 3.484593629837036, | |
| "learning_rate": 1.4874565777902518e-06, | |
| "loss": 0.4839, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 0.6458786560309403, | |
| "grad_norm": 3.7389230728149414, | |
| "learning_rate": 1.4802949335452387e-06, | |
| "loss": 0.4805, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.6468455402465555, | |
| "grad_norm": 3.800123691558838, | |
| "learning_rate": 1.4731433132417316e-06, | |
| "loss": 0.4202, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 0.6478124244621707, | |
| "grad_norm": 4.061914920806885, | |
| "learning_rate": 1.466001787181846e-06, | |
| "loss": 0.4681, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.6487793086777859, | |
| "grad_norm": 3.831731081008911, | |
| "learning_rate": 1.4588704255684697e-06, | |
| "loss": 0.4506, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 0.649746192893401, | |
| "grad_norm": 3.3978049755096436, | |
| "learning_rate": 1.451749298504568e-06, | |
| "loss": 0.4754, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.6507130771090162, | |
| "grad_norm": 3.271028518676758, | |
| "learning_rate": 1.4446384759925024e-06, | |
| "loss": 0.4375, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 0.6516799613246314, | |
| "grad_norm": 3.7137019634246826, | |
| "learning_rate": 1.4375380279333378e-06, | |
| "loss": 0.5393, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.6526468455402465, | |
| "grad_norm": 3.694547176361084, | |
| "learning_rate": 1.4304480241261529e-06, | |
| "loss": 0.5164, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.6536137297558617, | |
| "grad_norm": 3.5363903045654297, | |
| "learning_rate": 1.4233685342673602e-06, | |
| "loss": 0.447, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.6545806139714769, | |
| "grad_norm": 3.400479555130005, | |
| "learning_rate": 1.4162996279500158e-06, | |
| "loss": 0.4502, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 0.6555474981870921, | |
| "grad_norm": 3.27048397064209, | |
| "learning_rate": 1.409241374663136e-06, | |
| "loss": 0.4497, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.6565143824027073, | |
| "grad_norm": 3.7610373497009277, | |
| "learning_rate": 1.4021938437910181e-06, | |
| "loss": 0.4564, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 0.6574812666183224, | |
| "grad_norm": 3.84147572517395, | |
| "learning_rate": 1.3951571046125546e-06, | |
| "loss": 0.493, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.6584481508339376, | |
| "grad_norm": 3.6611154079437256, | |
| "learning_rate": 1.388131226300552e-06, | |
| "loss": 0.3995, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 0.6594150350495528, | |
| "grad_norm": 3.796962261199951, | |
| "learning_rate": 1.3811162779210512e-06, | |
| "loss": 0.5106, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.660381919265168, | |
| "grad_norm": 3.6814441680908203, | |
| "learning_rate": 1.374112328432652e-06, | |
| "loss": 0.522, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 0.6613488034807832, | |
| "grad_norm": 3.965012788772583, | |
| "learning_rate": 1.3671194466858335e-06, | |
| "loss": 0.4978, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 0.6623156876963984, | |
| "grad_norm": 3.3158814907073975, | |
| "learning_rate": 1.3601377014222688e-06, | |
| "loss": 0.4425, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.6632825719120136, | |
| "grad_norm": 3.8214852809906006, | |
| "learning_rate": 1.3531671612741653e-06, | |
| "loss": 0.5121, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 0.6642494561276288, | |
| "grad_norm": 3.924567222595215, | |
| "learning_rate": 1.3462078947635781e-06, | |
| "loss": 0.5657, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 0.665216340343244, | |
| "grad_norm": 3.8759005069732666, | |
| "learning_rate": 1.3392599703017393e-06, | |
| "loss": 0.4754, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 0.666183224558859, | |
| "grad_norm": 3.805741548538208, | |
| "learning_rate": 1.3323234561883847e-06, | |
| "loss": 0.498, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 0.6671501087744742, | |
| "grad_norm": 3.289720296859741, | |
| "learning_rate": 1.3253984206110881e-06, | |
| "loss": 0.4543, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.6681169929900894, | |
| "grad_norm": 3.9015278816223145, | |
| "learning_rate": 1.318484931644582e-06, | |
| "loss": 0.5214, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 0.6690838772057046, | |
| "grad_norm": 3.7777562141418457, | |
| "learning_rate": 1.311583057250096e-06, | |
| "loss": 0.4999, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 0.6700507614213198, | |
| "grad_norm": 3.9760212898254395, | |
| "learning_rate": 1.3046928652746833e-06, | |
| "loss": 0.5266, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 0.671017645636935, | |
| "grad_norm": 4.030066967010498, | |
| "learning_rate": 1.297814423450559e-06, | |
| "loss": 0.4069, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 0.6719845298525502, | |
| "grad_norm": 3.4443728923797607, | |
| "learning_rate": 1.2909477993944286e-06, | |
| "loss": 0.4446, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.6729514140681654, | |
| "grad_norm": 3.7695109844207764, | |
| "learning_rate": 1.284093060606829e-06, | |
| "loss": 0.514, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 0.6739182982837805, | |
| "grad_norm": 3.507645606994629, | |
| "learning_rate": 1.2772502744714592e-06, | |
| "loss": 0.4932, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 0.6748851824993957, | |
| "grad_norm": 3.8569910526275635, | |
| "learning_rate": 1.2704195082545217e-06, | |
| "loss": 0.4999, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 0.6758520667150109, | |
| "grad_norm": 3.4546616077423096, | |
| "learning_rate": 1.2636008291040618e-06, | |
| "loss": 0.3947, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 0.676818950930626, | |
| "grad_norm": 3.5067853927612305, | |
| "learning_rate": 1.2567943040493059e-06, | |
| "loss": 0.4806, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.6777858351462412, | |
| "grad_norm": 3.6664950847625732, | |
| "learning_rate": 1.2500000000000007e-06, | |
| "loss": 0.4407, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 0.6787527193618564, | |
| "grad_norm": 3.515747308731079, | |
| "learning_rate": 1.243217983745758e-06, | |
| "loss": 0.4741, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 0.6797196035774716, | |
| "grad_norm": 3.3077144622802734, | |
| "learning_rate": 1.236448321955401e-06, | |
| "loss": 0.4433, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 0.6806864877930868, | |
| "grad_norm": 3.554198741912842, | |
| "learning_rate": 1.2296910811763008e-06, | |
| "loss": 0.4852, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 0.6816533720087019, | |
| "grad_norm": 3.428964138031006, | |
| "learning_rate": 1.222946327833731e-06, | |
| "loss": 0.436, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.6826202562243171, | |
| "grad_norm": 3.696312665939331, | |
| "learning_rate": 1.2162141282302078e-06, | |
| "loss": 0.4375, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 0.6835871404399323, | |
| "grad_norm": 3.7810776233673096, | |
| "learning_rate": 1.2094945485448424e-06, | |
| "loss": 0.5128, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 0.6845540246555475, | |
| "grad_norm": 3.7111852169036865, | |
| "learning_rate": 1.2027876548326898e-06, | |
| "loss": 0.4304, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 0.6855209088711627, | |
| "grad_norm": 3.6479649543762207, | |
| "learning_rate": 1.196093513024099e-06, | |
| "loss": 0.5147, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 0.6864877930867779, | |
| "grad_norm": 3.8986546993255615, | |
| "learning_rate": 1.1894121889240638e-06, | |
| "loss": 0.4977, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.6874546773023931, | |
| "grad_norm": 3.825770378112793, | |
| "learning_rate": 1.182743748211576e-06, | |
| "loss": 0.3706, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 0.6884215615180083, | |
| "grad_norm": 3.962735891342163, | |
| "learning_rate": 1.1760882564389823e-06, | |
| "loss": 0.4488, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 0.6893884457336233, | |
| "grad_norm": 3.5851974487304688, | |
| "learning_rate": 1.1694457790313403e-06, | |
| "loss": 0.4938, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 0.6903553299492385, | |
| "grad_norm": 3.4636547565460205, | |
| "learning_rate": 1.1628163812857672e-06, | |
| "loss": 0.4451, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 0.6913222141648537, | |
| "grad_norm": 3.505995750427246, | |
| "learning_rate": 1.15620012837081e-06, | |
| "loss": 0.435, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.6922890983804689, | |
| "grad_norm": 3.468104124069214, | |
| "learning_rate": 1.1495970853257963e-06, | |
| "loss": 0.5059, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 0.6932559825960841, | |
| "grad_norm": 3.6698925495147705, | |
| "learning_rate": 1.1430073170601968e-06, | |
| "loss": 0.4358, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 0.6942228668116993, | |
| "grad_norm": 3.6901962757110596, | |
| "learning_rate": 1.1364308883529913e-06, | |
| "loss": 0.4783, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 0.6951897510273145, | |
| "grad_norm": 3.789227247238159, | |
| "learning_rate": 1.1298678638520247e-06, | |
| "loss": 0.4928, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 0.6961566352429297, | |
| "grad_norm": 3.7630035877227783, | |
| "learning_rate": 1.1233183080733764e-06, | |
| "loss": 0.4331, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.6971235194585449, | |
| "grad_norm": 3.66995906829834, | |
| "learning_rate": 1.1167822854007265e-06, | |
| "loss": 0.4984, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 0.69809040367416, | |
| "grad_norm": 3.4906699657440186, | |
| "learning_rate": 1.1102598600847222e-06, | |
| "loss": 0.5019, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 0.6990572878897752, | |
| "grad_norm": 4.130275726318359, | |
| "learning_rate": 1.1037510962423425e-06, | |
| "loss": 0.4758, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 0.7000241721053904, | |
| "grad_norm": 3.688877820968628, | |
| "learning_rate": 1.0972560578562715e-06, | |
| "loss": 0.3597, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 0.7009910563210056, | |
| "grad_norm": 3.8207924365997314, | |
| "learning_rate": 1.0907748087742716e-06, | |
| "loss": 0.4928, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.7019579405366208, | |
| "grad_norm": 3.633645534515381, | |
| "learning_rate": 1.0843074127085506e-06, | |
| "loss": 0.4764, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 0.7029248247522359, | |
| "grad_norm": 4.134641170501709, | |
| "learning_rate": 1.0778539332351374e-06, | |
| "loss": 0.4815, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 0.7038917089678511, | |
| "grad_norm": 3.7726616859436035, | |
| "learning_rate": 1.0714144337932588e-06, | |
| "loss": 0.4984, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 0.7048585931834663, | |
| "grad_norm": 3.6876401901245117, | |
| "learning_rate": 1.0649889776847161e-06, | |
| "loss": 0.4851, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 0.7058254773990814, | |
| "grad_norm": 3.7998340129852295, | |
| "learning_rate": 1.0585776280732585e-06, | |
| "loss": 0.463, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.7067923616146966, | |
| "grad_norm": 3.6184780597686768, | |
| "learning_rate": 1.0521804479839651e-06, | |
| "loss": 0.4611, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 0.7077592458303118, | |
| "grad_norm": 3.5277678966522217, | |
| "learning_rate": 1.0457975003026278e-06, | |
| "loss": 0.4036, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 0.708726130045927, | |
| "grad_norm": 3.882558822631836, | |
| "learning_rate": 1.0394288477751274e-06, | |
| "loss": 0.4848, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 0.7096930142615422, | |
| "grad_norm": 3.571218729019165, | |
| "learning_rate": 1.0330745530068235e-06, | |
| "loss": 0.4654, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 0.7106598984771574, | |
| "grad_norm": 3.78566312789917, | |
| "learning_rate": 1.0267346784619324e-06, | |
| "loss": 0.3912, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.7116267826927726, | |
| "grad_norm": 3.657423496246338, | |
| "learning_rate": 1.0204092864629173e-06, | |
| "loss": 0.4067, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 0.7125936669083878, | |
| "grad_norm": 3.9299519062042236, | |
| "learning_rate": 1.0140984391898744e-06, | |
| "loss": 0.4697, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 0.7135605511240029, | |
| "grad_norm": 3.5789129734039307, | |
| "learning_rate": 1.0078021986799238e-06, | |
| "loss": 0.4346, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 0.714527435339618, | |
| "grad_norm": 3.3570632934570312, | |
| "learning_rate": 1.0015206268265948e-06, | |
| "loss": 0.388, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 0.7154943195552332, | |
| "grad_norm": 3.806309938430786, | |
| "learning_rate": 9.952537853792204e-07, | |
| "loss": 0.481, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.7164612037708484, | |
| "grad_norm": 4.444820880889893, | |
| "learning_rate": 9.890017359423326e-07, | |
| "loss": 0.4859, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 0.7174280879864636, | |
| "grad_norm": 3.6907238960266113, | |
| "learning_rate": 9.827645399750539e-07, | |
| "loss": 0.4662, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 0.7183949722020788, | |
| "grad_norm": 3.746755361557007, | |
| "learning_rate": 9.765422587904919e-07, | |
| "loss": 0.4504, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 0.719361856417694, | |
| "grad_norm": 3.8143019676208496, | |
| "learning_rate": 9.703349535551387e-07, | |
| "loss": 0.4632, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 0.7203287406333092, | |
| "grad_norm": 3.932626485824585, | |
| "learning_rate": 9.641426852882717e-07, | |
| "loss": 0.4846, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.7212956248489243, | |
| "grad_norm": 3.918102502822876, | |
| "learning_rate": 9.579655148613478e-07, | |
| "loss": 0.5095, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 0.7222625090645395, | |
| "grad_norm": 3.4765045642852783, | |
| "learning_rate": 9.518035029974127e-07, | |
| "loss": 0.4269, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 0.7232293932801547, | |
| "grad_norm": 3.6121413707733154, | |
| "learning_rate": 9.456567102704961e-07, | |
| "loss": 0.412, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 0.7241962774957699, | |
| "grad_norm": 3.615751028060913, | |
| "learning_rate": 9.395251971050206e-07, | |
| "loss": 0.4399, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 0.7251631617113851, | |
| "grad_norm": 3.4750213623046875, | |
| "learning_rate": 9.334090237752095e-07, | |
| "loss": 0.452, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.7261300459270003, | |
| "grad_norm": 3.536120891571045, | |
| "learning_rate": 9.273082504044903e-07, | |
| "loss": 0.4228, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 0.7270969301426154, | |
| "grad_norm": 3.8346316814422607, | |
| "learning_rate": 9.21222936964905e-07, | |
| "loss": 0.5377, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 0.7280638143582306, | |
| "grad_norm": 3.4409730434417725, | |
| "learning_rate": 9.151531432765204e-07, | |
| "loss": 0.3675, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 0.7290306985738458, | |
| "grad_norm": 3.424736261367798, | |
| "learning_rate": 9.090989290068425e-07, | |
| "loss": 0.4304, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 0.7299975827894609, | |
| "grad_norm": 3.5875778198242188, | |
| "learning_rate": 9.030603536702254e-07, | |
| "loss": 0.4541, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.7309644670050761, | |
| "grad_norm": 3.5809242725372314, | |
| "learning_rate": 8.970374766272916e-07, | |
| "loss": 0.4664, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 0.7319313512206913, | |
| "grad_norm": 3.7358856201171875, | |
| "learning_rate": 8.910303570843423e-07, | |
| "loss": 0.4463, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 0.7328982354363065, | |
| "grad_norm": 3.236128807067871, | |
| "learning_rate": 8.850390540927825e-07, | |
| "loss": 0.4242, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 0.7338651196519217, | |
| "grad_norm": 3.585001230239868, | |
| "learning_rate": 8.790636265485333e-07, | |
| "loss": 0.4475, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 0.7348320038675369, | |
| "grad_norm": 4.007416248321533, | |
| "learning_rate": 8.731041331914591e-07, | |
| "loss": 0.4996, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.7357988880831521, | |
| "grad_norm": 3.5270919799804688, | |
| "learning_rate": 8.67160632604786e-07, | |
| "loss": 0.412, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 0.7367657722987673, | |
| "grad_norm": 3.9546616077423096, | |
| "learning_rate": 8.612331832145269e-07, | |
| "loss": 0.4678, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 0.7377326565143824, | |
| "grad_norm": 3.887314558029175, | |
| "learning_rate": 8.553218432889091e-07, | |
| "loss": 0.4595, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 0.7386995407299976, | |
| "grad_norm": 3.572169303894043, | |
| "learning_rate": 8.49426670937801e-07, | |
| "loss": 0.3734, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 0.7396664249456127, | |
| "grad_norm": 3.3383076190948486, | |
| "learning_rate": 8.435477241121354e-07, | |
| "loss": 0.396, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.7406333091612279, | |
| "grad_norm": 3.5187559127807617, | |
| "learning_rate": 8.376850606033487e-07, | |
| "loss": 0.4395, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 0.7416001933768431, | |
| "grad_norm": 3.5995166301727295, | |
| "learning_rate": 8.31838738042808e-07, | |
| "loss": 0.4179, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 0.7425670775924583, | |
| "grad_norm": 3.620683193206787, | |
| "learning_rate": 8.260088139012435e-07, | |
| "loss": 0.4468, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 0.7435339618080735, | |
| "grad_norm": 3.868516445159912, | |
| "learning_rate": 8.201953454881844e-07, | |
| "loss": 0.5147, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 0.7445008460236887, | |
| "grad_norm": 3.560850143432617, | |
| "learning_rate": 8.143983899513988e-07, | |
| "loss": 0.4294, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.7454677302393038, | |
| "grad_norm": 3.773320436477661, | |
| "learning_rate": 8.086180042763284e-07, | |
| "loss": 0.4832, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 0.746434614454919, | |
| "grad_norm": 3.5478060245513916, | |
| "learning_rate": 8.028542452855281e-07, | |
| "loss": 0.45, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 0.7474014986705342, | |
| "grad_norm": 3.715156316757202, | |
| "learning_rate": 7.971071696381089e-07, | |
| "loss": 0.4849, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 0.7483683828861494, | |
| "grad_norm": 3.5130743980407715, | |
| "learning_rate": 7.913768338291821e-07, | |
| "loss": 0.456, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 0.7493352671017646, | |
| "grad_norm": 3.6129679679870605, | |
| "learning_rate": 7.856632941893e-07, | |
| "loss": 0.3651, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.7503021513173798, | |
| "grad_norm": 3.693694591522217, | |
| "learning_rate": 7.799666068839068e-07, | |
| "loss": 0.4264, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 0.751269035532995, | |
| "grad_norm": 3.7626357078552246, | |
| "learning_rate": 7.74286827912785e-07, | |
| "loss": 0.4372, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 0.7522359197486101, | |
| "grad_norm": 3.6865146160125732, | |
| "learning_rate": 7.686240131094993e-07, | |
| "loss": 0.514, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 0.7532028039642252, | |
| "grad_norm": 3.815524101257324, | |
| "learning_rate": 7.629782181408574e-07, | |
| "loss": 0.4652, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 0.7541696881798404, | |
| "grad_norm": 3.8076345920562744, | |
| "learning_rate": 7.573494985063579e-07, | |
| "loss": 0.479, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.7551365723954556, | |
| "grad_norm": 3.990732192993164, | |
| "learning_rate": 7.517379095376418e-07, | |
| "loss": 0.5049, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 0.7561034566110708, | |
| "grad_norm": 3.6248281002044678, | |
| "learning_rate": 7.461435063979533e-07, | |
| "loss": 0.4247, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 0.757070340826686, | |
| "grad_norm": 4.307629585266113, | |
| "learning_rate": 7.405663440815968e-07, | |
| "loss": 0.4989, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 0.7580372250423012, | |
| "grad_norm": 3.800755500793457, | |
| "learning_rate": 7.350064774133936e-07, | |
| "loss": 0.4744, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 0.7590041092579164, | |
| "grad_norm": 3.5235469341278076, | |
| "learning_rate": 7.294639610481461e-07, | |
| "loss": 0.4778, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.7599709934735316, | |
| "grad_norm": 3.831073760986328, | |
| "learning_rate": 7.239388494700974e-07, | |
| "loss": 0.4422, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 0.7609378776891468, | |
| "grad_norm": 3.861233949661255, | |
| "learning_rate": 7.184311969924002e-07, | |
| "loss": 0.465, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 0.7619047619047619, | |
| "grad_norm": 4.040560722351074, | |
| "learning_rate": 7.129410577565766e-07, | |
| "loss": 0.5075, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 0.7628716461203771, | |
| "grad_norm": 3.890537977218628, | |
| "learning_rate": 7.074684857319928e-07, | |
| "loss": 0.4742, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 0.7638385303359922, | |
| "grad_norm": 3.9048914909362793, | |
| "learning_rate": 7.020135347153229e-07, | |
| "loss": 0.4557, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.7648054145516074, | |
| "grad_norm": 4.083683490753174, | |
| "learning_rate": 6.965762583300223e-07, | |
| "loss": 0.459, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 0.7657722987672226, | |
| "grad_norm": 3.9095261096954346, | |
| "learning_rate": 6.911567100258021e-07, | |
| "loss": 0.4768, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 0.7667391829828378, | |
| "grad_norm": 4.056056976318359, | |
| "learning_rate": 6.85754943078103e-07, | |
| "loss": 0.5037, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 0.767706067198453, | |
| "grad_norm": 3.6234371662139893, | |
| "learning_rate": 6.803710105875661e-07, | |
| "loss": 0.4934, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 0.7686729514140682, | |
| "grad_norm": 3.658932685852051, | |
| "learning_rate": 6.750049654795199e-07, | |
| "loss": 0.5005, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.7696398356296833, | |
| "grad_norm": 3.7866780757904053, | |
| "learning_rate": 6.696568605034559e-07, | |
| "loss": 0.4743, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 0.7706067198452985, | |
| "grad_norm": 3.559441328048706, | |
| "learning_rate": 6.643267482325061e-07, | |
| "loss": 0.4083, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 0.7715736040609137, | |
| "grad_norm": 3.7595908641815186, | |
| "learning_rate": 6.590146810629347e-07, | |
| "loss": 0.4967, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 0.7725404882765289, | |
| "grad_norm": 3.7670183181762695, | |
| "learning_rate": 6.537207112136143e-07, | |
| "loss": 0.4929, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 0.7735073724921441, | |
| "grad_norm": 4.016094207763672, | |
| "learning_rate": 6.484448907255181e-07, | |
| "loss": 0.4798, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.7744742567077593, | |
| "grad_norm": 3.9730477333068848, | |
| "learning_rate": 6.431872714612072e-07, | |
| "loss": 0.5017, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 0.7754411409233745, | |
| "grad_norm": 4.174206733703613, | |
| "learning_rate": 6.379479051043208e-07, | |
| "loss": 0.4918, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 0.7764080251389897, | |
| "grad_norm": 3.839644193649292, | |
| "learning_rate": 6.327268431590664e-07, | |
| "loss": 0.4634, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 0.7773749093546047, | |
| "grad_norm": 3.615432024002075, | |
| "learning_rate": 6.275241369497142e-07, | |
| "loss": 0.4287, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 0.7783417935702199, | |
| "grad_norm": 3.912182569503784, | |
| "learning_rate": 6.223398376200956e-07, | |
| "loss": 0.4709, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.7793086777858351, | |
| "grad_norm": 3.828120470046997, | |
| "learning_rate": 6.171739961330983e-07, | |
| "loss": 0.4846, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 0.7802755620014503, | |
| "grad_norm": 3.653789758682251, | |
| "learning_rate": 6.1202666327016e-07, | |
| "loss": 0.4179, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 0.7812424462170655, | |
| "grad_norm": 3.5782525539398193, | |
| "learning_rate": 6.068978896307795e-07, | |
| "loss": 0.4177, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 0.7822093304326807, | |
| "grad_norm": 3.655728340148926, | |
| "learning_rate": 6.017877256320132e-07, | |
| "loss": 0.4135, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 0.7831762146482959, | |
| "grad_norm": 3.3964128494262695, | |
| "learning_rate": 5.966962215079786e-07, | |
| "loss": 0.3804, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.7841430988639111, | |
| "grad_norm": 3.8653347492218018, | |
| "learning_rate": 5.916234273093624e-07, | |
| "loss": 0.5036, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 0.7851099830795262, | |
| "grad_norm": 3.8347480297088623, | |
| "learning_rate": 5.865693929029306e-07, | |
| "loss": 0.4827, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 0.7860768672951414, | |
| "grad_norm": 3.4399571418762207, | |
| "learning_rate": 5.815341679710327e-07, | |
| "loss": 0.475, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 0.7870437515107566, | |
| "grad_norm": 3.6704349517822266, | |
| "learning_rate": 5.765178020111201e-07, | |
| "loss": 0.4524, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 0.7880106357263718, | |
| "grad_norm": 3.6934151649475098, | |
| "learning_rate": 5.715203443352526e-07, | |
| "loss": 0.507, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.788977519941987, | |
| "grad_norm": 3.8846731185913086, | |
| "learning_rate": 5.665418440696202e-07, | |
| "loss": 0.4698, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 0.7899444041576021, | |
| "grad_norm": 3.6200265884399414, | |
| "learning_rate": 5.615823501540546e-07, | |
| "loss": 0.4948, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 0.7909112883732173, | |
| "grad_norm": 3.749350070953369, | |
| "learning_rate": 5.566419113415528e-07, | |
| "loss": 0.4372, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 0.7918781725888325, | |
| "grad_norm": 4.006725788116455, | |
| "learning_rate": 5.51720576197794e-07, | |
| "loss": 0.4777, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 0.7928450568044476, | |
| "grad_norm": 3.7433784008026123, | |
| "learning_rate": 5.468183931006635e-07, | |
| "loss": 0.4171, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.7938119410200628, | |
| "grad_norm": 3.7018020153045654, | |
| "learning_rate": 5.419354102397792e-07, | |
| "loss": 0.4783, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 0.794778825235678, | |
| "grad_norm": 3.5116868019104004, | |
| "learning_rate": 5.370716756160158e-07, | |
| "loss": 0.4673, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 0.7957457094512932, | |
| "grad_norm": 3.450648069381714, | |
| "learning_rate": 5.32227237041032e-07, | |
| "loss": 0.4079, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 0.7967125936669084, | |
| "grad_norm": 3.5097784996032715, | |
| "learning_rate": 5.274021421368019e-07, | |
| "loss": 0.4189, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 0.7976794778825236, | |
| "grad_norm": 3.470055341720581, | |
| "learning_rate": 5.22596438335149e-07, | |
| "loss": 0.4872, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.7986463620981388, | |
| "grad_norm": 4.227003574371338, | |
| "learning_rate": 5.178101728772741e-07, | |
| "loss": 0.4725, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 0.799613246313754, | |
| "grad_norm": 4.068714618682861, | |
| "learning_rate": 5.130433928132983e-07, | |
| "loss": 0.4568, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 0.8005801305293692, | |
| "grad_norm": 4.06673526763916, | |
| "learning_rate": 5.082961450017943e-07, | |
| "loss": 0.4902, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 0.8015470147449842, | |
| "grad_norm": 3.6162238121032715, | |
| "learning_rate": 5.035684761093273e-07, | |
| "loss": 0.4429, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 0.8025138989605994, | |
| "grad_norm": 3.8663253784179688, | |
| "learning_rate": 4.988604326099999e-07, | |
| "loss": 0.4794, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.8034807831762146, | |
| "grad_norm": 3.618603467941284, | |
| "learning_rate": 4.941720607849912e-07, | |
| "loss": 0.4308, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 0.8044476673918298, | |
| "grad_norm": 3.8849265575408936, | |
| "learning_rate": 4.895034067221025e-07, | |
| "loss": 0.4932, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 0.805414551607445, | |
| "grad_norm": 3.5201117992401123, | |
| "learning_rate": 4.848545163153048e-07, | |
| "loss": 0.443, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 0.8063814358230602, | |
| "grad_norm": 3.6072754859924316, | |
| "learning_rate": 4.802254352642883e-07, | |
| "loss": 0.4063, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 0.8073483200386754, | |
| "grad_norm": 3.856928586959839, | |
| "learning_rate": 4.756162090740135e-07, | |
| "loss": 0.5103, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.8083152042542906, | |
| "grad_norm": 3.5394015312194824, | |
| "learning_rate": 4.7102688305426106e-07, | |
| "loss": 0.4409, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 0.8092820884699057, | |
| "grad_norm": 3.6355698108673096, | |
| "learning_rate": 4.6645750231918864e-07, | |
| "loss": 0.5113, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 0.8102489726855209, | |
| "grad_norm": 4.026984691619873, | |
| "learning_rate": 4.6190811178688843e-07, | |
| "loss": 0.4681, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 0.8112158569011361, | |
| "grad_norm": 3.748511552810669, | |
| "learning_rate": 4.5737875617894225e-07, | |
| "loss": 0.4808, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 0.8121827411167513, | |
| "grad_norm": 3.9079511165618896, | |
| "learning_rate": 4.528694800199859e-07, | |
| "loss": 0.4995, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.8131496253323665, | |
| "grad_norm": 3.926765203475952, | |
| "learning_rate": 4.4838032763726806e-07, | |
| "loss": 0.4712, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 0.8141165095479816, | |
| "grad_norm": 3.8391571044921875, | |
| "learning_rate": 4.4391134316021545e-07, | |
| "loss": 0.4801, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 0.8150833937635968, | |
| "grad_norm": 3.581597089767456, | |
| "learning_rate": 4.394625705200012e-07, | |
| "loss": 0.3686, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 0.816050277979212, | |
| "grad_norm": 3.869783401489258, | |
| "learning_rate": 4.3503405344911125e-07, | |
| "loss": 0.4921, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 0.8170171621948271, | |
| "grad_norm": 3.75410532951355, | |
| "learning_rate": 4.3062583548091256e-07, | |
| "loss": 0.4503, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.8179840464104423, | |
| "grad_norm": 3.647118330001831, | |
| "learning_rate": 4.2623795994922836e-07, | |
| "loss": 0.4418, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 0.8189509306260575, | |
| "grad_norm": 3.6203055381774902, | |
| "learning_rate": 4.218704699879117e-07, | |
| "loss": 0.415, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 0.8199178148416727, | |
| "grad_norm": 3.5551204681396484, | |
| "learning_rate": 4.1752340853041973e-07, | |
| "loss": 0.4879, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 0.8208846990572879, | |
| "grad_norm": 3.568030595779419, | |
| "learning_rate": 4.1319681830939124e-07, | |
| "loss": 0.4573, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 0.8218515832729031, | |
| "grad_norm": 3.600548028945923, | |
| "learning_rate": 4.0889074185623103e-07, | |
| "loss": 0.4589, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.8228184674885183, | |
| "grad_norm": 3.7248218059539795, | |
| "learning_rate": 4.0460522150068684e-07, | |
| "loss": 0.4588, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 0.8237853517041335, | |
| "grad_norm": 3.6307175159454346, | |
| "learning_rate": 4.0034029937043527e-07, | |
| "loss": 0.5157, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 0.8247522359197486, | |
| "grad_norm": 3.7944419384002686, | |
| "learning_rate": 3.9609601739066664e-07, | |
| "loss": 0.4348, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 0.8257191201353637, | |
| "grad_norm": 3.4527225494384766, | |
| "learning_rate": 3.918724172836763e-07, | |
| "loss": 0.5047, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 0.8266860043509789, | |
| "grad_norm": 3.6622514724731445, | |
| "learning_rate": 3.876695405684486e-07, | |
| "loss": 0.4555, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.8276528885665941, | |
| "grad_norm": 3.5915470123291016, | |
| "learning_rate": 3.8348742856025495e-07, | |
| "loss": 0.4388, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 0.8286197727822093, | |
| "grad_norm": 3.6613149642944336, | |
| "learning_rate": 3.793261223702441e-07, | |
| "loss": 0.4704, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 0.8295866569978245, | |
| "grad_norm": 3.679746389389038, | |
| "learning_rate": 3.751856629050363e-07, | |
| "loss": 0.4167, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 0.8305535412134397, | |
| "grad_norm": 3.6246674060821533, | |
| "learning_rate": 3.7106609086632635e-07, | |
| "loss": 0.4532, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 0.8315204254290549, | |
| "grad_norm": 3.590588092803955, | |
| "learning_rate": 3.669674467504808e-07, | |
| "loss": 0.458, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.8324873096446701, | |
| "grad_norm": 3.574864149093628, | |
| "learning_rate": 3.628897708481377e-07, | |
| "loss": 0.5124, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 0.8334541938602852, | |
| "grad_norm": 3.350720167160034, | |
| "learning_rate": 3.588331032438133e-07, | |
| "loss": 0.3843, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 0.8344210780759004, | |
| "grad_norm": 3.3735225200653076, | |
| "learning_rate": 3.5479748381550855e-07, | |
| "loss": 0.4444, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 0.8353879622915156, | |
| "grad_norm": 3.831509590148926, | |
| "learning_rate": 3.5078295223431534e-07, | |
| "loss": 0.493, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 0.8363548465071308, | |
| "grad_norm": 3.679081916809082, | |
| "learning_rate": 3.4678954796402624e-07, | |
| "loss": 0.4197, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.837321730722746, | |
| "grad_norm": 3.6159653663635254, | |
| "learning_rate": 3.4281731026074737e-07, | |
| "loss": 0.4386, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 0.8382886149383612, | |
| "grad_norm": 3.782343626022339, | |
| "learning_rate": 3.388662781725141e-07, | |
| "loss": 0.4017, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 0.8392554991539763, | |
| "grad_norm": 3.8932251930236816, | |
| "learning_rate": 3.3493649053890325e-07, | |
| "loss": 0.4875, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 0.8402223833695915, | |
| "grad_norm": 3.8822195529937744, | |
| "learning_rate": 3.310279859906565e-07, | |
| "loss": 0.4377, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 0.8411892675852066, | |
| "grad_norm": 3.878910779953003, | |
| "learning_rate": 3.271408029492948e-07, | |
| "loss": 0.3554, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.8421561518008218, | |
| "grad_norm": 3.642711877822876, | |
| "learning_rate": 3.232749796267451e-07, | |
| "loss": 0.4133, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 0.843123036016437, | |
| "grad_norm": 3.5945470333099365, | |
| "learning_rate": 3.194305540249637e-07, | |
| "loss": 0.46, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 0.8440899202320522, | |
| "grad_norm": 3.501006603240967, | |
| "learning_rate": 3.1560756393556187e-07, | |
| "loss": 0.4509, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 0.8450568044476674, | |
| "grad_norm": 3.5597221851348877, | |
| "learning_rate": 3.118060469394346e-07, | |
| "loss": 0.4517, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 0.8460236886632826, | |
| "grad_norm": 3.781863212585449, | |
| "learning_rate": 3.0802604040639034e-07, | |
| "loss": 0.4685, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.8469905728788978, | |
| "grad_norm": 3.9921908378601074, | |
| "learning_rate": 3.042675814947868e-07, | |
| "loss": 0.4825, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 0.847957457094513, | |
| "grad_norm": 3.7082839012145996, | |
| "learning_rate": 3.0053070715116153e-07, | |
| "loss": 0.3958, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 0.8489243413101281, | |
| "grad_norm": 4.115004062652588, | |
| "learning_rate": 2.9681545410987193e-07, | |
| "loss": 0.4948, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 0.8498912255257433, | |
| "grad_norm": 3.928007125854492, | |
| "learning_rate": 2.9312185889273147e-07, | |
| "loss": 0.3921, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 0.8508581097413584, | |
| "grad_norm": 3.7645504474639893, | |
| "learning_rate": 2.894499578086543e-07, | |
| "loss": 0.4806, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.8518249939569736, | |
| "grad_norm": 3.6392364501953125, | |
| "learning_rate": 2.8579978695329386e-07, | |
| "loss": 0.5108, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 0.8527918781725888, | |
| "grad_norm": 3.5847926139831543, | |
| "learning_rate": 2.821713822086919e-07, | |
| "loss": 0.4466, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 0.853758762388204, | |
| "grad_norm": 3.5183796882629395, | |
| "learning_rate": 2.785647792429233e-07, | |
| "loss": 0.4317, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 0.8547256466038192, | |
| "grad_norm": 3.4891810417175293, | |
| "learning_rate": 2.7498001350974605e-07, | |
| "loss": 0.4389, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 0.8556925308194344, | |
| "grad_norm": 3.8687262535095215, | |
| "learning_rate": 2.714171202482538e-07, | |
| "loss": 0.4642, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.8566594150350495, | |
| "grad_norm": 3.986570358276367, | |
| "learning_rate": 2.678761344825295e-07, | |
| "loss": 0.4655, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 0.8576262992506647, | |
| "grad_norm": 3.5800821781158447, | |
| "learning_rate": 2.6435709102129727e-07, | |
| "loss": 0.418, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 0.8585931834662799, | |
| "grad_norm": 3.695192813873291, | |
| "learning_rate": 2.608600244575857e-07, | |
| "loss": 0.4942, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 0.8595600676818951, | |
| "grad_norm": 3.764131546020508, | |
| "learning_rate": 2.5738496916838524e-07, | |
| "loss": 0.4745, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 0.8605269518975103, | |
| "grad_norm": 3.749889373779297, | |
| "learning_rate": 2.5393195931430884e-07, | |
| "loss": 0.4866, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.8614938361131255, | |
| "grad_norm": 3.4770333766937256, | |
| "learning_rate": 2.505010288392587e-07, | |
| "loss": 0.4979, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 0.8624607203287407, | |
| "grad_norm": 3.926590919494629, | |
| "learning_rate": 2.4709221147009105e-07, | |
| "loss": 0.4444, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 0.8634276045443559, | |
| "grad_norm": 3.7266767024993896, | |
| "learning_rate": 2.4370554071628613e-07, | |
| "loss": 0.4518, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 0.864394488759971, | |
| "grad_norm": 3.3029301166534424, | |
| "learning_rate": 2.4034104986961626e-07, | |
| "loss": 0.4383, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 0.8653613729755861, | |
| "grad_norm": 3.774355888366699, | |
| "learning_rate": 2.3699877200382026e-07, | |
| "loss": 0.4556, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.8663282571912013, | |
| "grad_norm": 3.818542718887329, | |
| "learning_rate": 2.336787399742793e-07, | |
| "loss": 0.4664, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 0.8672951414068165, | |
| "grad_norm": 3.992581605911255, | |
| "learning_rate": 2.303809864176909e-07, | |
| "loss": 0.4589, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 0.8682620256224317, | |
| "grad_norm": 3.4998490810394287, | |
| "learning_rate": 2.271055437517519e-07, | |
| "loss": 0.4027, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 0.8692289098380469, | |
| "grad_norm": 3.571521520614624, | |
| "learning_rate": 2.2385244417483743e-07, | |
| "loss": 0.4391, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 0.8701957940536621, | |
| "grad_norm": 3.1858935356140137, | |
| "learning_rate": 2.2062171966568263e-07, | |
| "loss": 0.4333, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.8711626782692773, | |
| "grad_norm": 3.4342987537384033, | |
| "learning_rate": 2.174134019830726e-07, | |
| "loss": 0.4417, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 0.8721295624848925, | |
| "grad_norm": 3.857280731201172, | |
| "learning_rate": 2.14227522665528e-07, | |
| "loss": 0.4099, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 0.8730964467005076, | |
| "grad_norm": 4.276398658752441, | |
| "learning_rate": 2.1106411303099455e-07, | |
| "loss": 0.4882, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 0.8740633309161228, | |
| "grad_norm": 3.4768717288970947, | |
| "learning_rate": 2.0792320417653472e-07, | |
| "loss": 0.4288, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 0.875030215131738, | |
| "grad_norm": 3.416771173477173, | |
| "learning_rate": 2.0480482697802507e-07, | |
| "loss": 0.4147, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.8759970993473531, | |
| "grad_norm": 3.6449015140533447, | |
| "learning_rate": 2.017090120898485e-07, | |
| "loss": 0.4636, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 0.8769639835629683, | |
| "grad_norm": 3.3985326290130615, | |
| "learning_rate": 1.986357899445976e-07, | |
| "loss": 0.4414, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 0.8779308677785835, | |
| "grad_norm": 3.730642557144165, | |
| "learning_rate": 1.9558519075276976e-07, | |
| "loss": 0.4355, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 0.8788977519941987, | |
| "grad_norm": 3.829969644546509, | |
| "learning_rate": 1.9255724450247676e-07, | |
| "loss": 0.491, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 0.8798646362098139, | |
| "grad_norm": 3.6221253871917725, | |
| "learning_rate": 1.895519809591434e-07, | |
| "loss": 0.4487, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.880831520425429, | |
| "grad_norm": 3.4621918201446533, | |
| "learning_rate": 1.8656942966522124e-07, | |
| "loss": 0.4012, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 0.8817984046410442, | |
| "grad_norm": 3.4926211833953857, | |
| "learning_rate": 1.836096199398929e-07, | |
| "loss": 0.4418, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 0.8827652888566594, | |
| "grad_norm": 3.554006814956665, | |
| "learning_rate": 1.8067258087878597e-07, | |
| "loss": 0.372, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 0.8837321730722746, | |
| "grad_norm": 3.646104097366333, | |
| "learning_rate": 1.7775834135368818e-07, | |
| "loss": 0.4249, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 0.8846990572878898, | |
| "grad_norm": 4.161782264709473, | |
| "learning_rate": 1.748669300122627e-07, | |
| "loss": 0.5372, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.885665941503505, | |
| "grad_norm": 3.7760252952575684, | |
| "learning_rate": 1.7199837527776485e-07, | |
| "loss": 0.4686, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 0.8866328257191202, | |
| "grad_norm": 3.922210693359375, | |
| "learning_rate": 1.691527053487646e-07, | |
| "loss": 0.5029, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 0.8875997099347354, | |
| "grad_norm": 3.84432315826416, | |
| "learning_rate": 1.6632994819886976e-07, | |
| "loss": 0.4649, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 0.8885665941503504, | |
| "grad_norm": 3.71447491645813, | |
| "learning_rate": 1.635301315764484e-07, | |
| "loss": 0.4049, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 0.8895334783659656, | |
| "grad_norm": 3.771190643310547, | |
| "learning_rate": 1.607532830043601e-07, | |
| "loss": 0.4949, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.8905003625815808, | |
| "grad_norm": 3.759092092514038, | |
| "learning_rate": 1.579994297796808e-07, | |
| "loss": 0.4791, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 0.891467246797196, | |
| "grad_norm": 3.479841470718384, | |
| "learning_rate": 1.5526859897343815e-07, | |
| "loss": 0.4447, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 0.8924341310128112, | |
| "grad_norm": 3.0901901721954346, | |
| "learning_rate": 1.5256081743034336e-07, | |
| "loss": 0.3394, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 0.8934010152284264, | |
| "grad_norm": 3.611321210861206, | |
| "learning_rate": 1.4987611176852879e-07, | |
| "loss": 0.4004, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 0.8943678994440416, | |
| "grad_norm": 3.6917665004730225, | |
| "learning_rate": 1.472145083792842e-07, | |
| "loss": 0.4116, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.8953347836596568, | |
| "grad_norm": 4.044205665588379, | |
| "learning_rate": 1.44576033426799e-07, | |
| "loss": 0.5017, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 0.896301667875272, | |
| "grad_norm": 3.850446939468384, | |
| "learning_rate": 1.419607128479053e-07, | |
| "loss": 0.5565, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 0.8972685520908871, | |
| "grad_norm": 3.598113775253296, | |
| "learning_rate": 1.3936857235182233e-07, | |
| "loss": 0.4689, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 0.8982354363065023, | |
| "grad_norm": 3.6002085208892822, | |
| "learning_rate": 1.3679963741990127e-07, | |
| "loss": 0.4742, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 0.8992023205221175, | |
| "grad_norm": 3.4773659706115723, | |
| "learning_rate": 1.3425393330538023e-07, | |
| "loss": 0.3849, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.9001692047377327, | |
| "grad_norm": 3.768770456314087, | |
| "learning_rate": 1.317314850331314e-07, | |
| "loss": 0.4926, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 0.9011360889533478, | |
| "grad_norm": 3.5853402614593506, | |
| "learning_rate": 1.292323173994167e-07, | |
| "loss": 0.4547, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 0.902102973168963, | |
| "grad_norm": 3.9394826889038086, | |
| "learning_rate": 1.2675645497164352e-07, | |
| "loss": 0.4413, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 0.9030698573845782, | |
| "grad_norm": 4.156927108764648, | |
| "learning_rate": 1.2430392208812452e-07, | |
| "loss": 0.5271, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 0.9040367416001934, | |
| "grad_norm": 3.9245412349700928, | |
| "learning_rate": 1.2187474285783623e-07, | |
| "loss": 0.5127, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.9050036258158085, | |
| "grad_norm": 3.750504970550537, | |
| "learning_rate": 1.1946894116018404e-07, | |
| "loss": 0.5002, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 0.9059705100314237, | |
| "grad_norm": 3.6622493267059326, | |
| "learning_rate": 1.1708654064476743e-07, | |
| "loss": 0.5017, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 0.9069373942470389, | |
| "grad_norm": 3.3695197105407715, | |
| "learning_rate": 1.1472756473114493e-07, | |
| "loss": 0.4487, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 0.9079042784626541, | |
| "grad_norm": 3.628037452697754, | |
| "learning_rate": 1.1239203660860648e-07, | |
| "loss": 0.4447, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 0.9088711626782693, | |
| "grad_norm": 3.471571922302246, | |
| "learning_rate": 1.1007997923594555e-07, | |
| "loss": 0.4211, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.9098380468938845, | |
| "grad_norm": 3.611616849899292, | |
| "learning_rate": 1.0779141534123127e-07, | |
| "loss": 0.4236, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 0.9108049311094997, | |
| "grad_norm": 3.4879791736602783, | |
| "learning_rate": 1.0552636742158701e-07, | |
| "loss": 0.4596, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 0.9117718153251149, | |
| "grad_norm": 3.495035409927368, | |
| "learning_rate": 1.0328485774296875e-07, | |
| "loss": 0.4096, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 0.91273869954073, | |
| "grad_norm": 3.4880497455596924, | |
| "learning_rate": 1.010669083399457e-07, | |
| "loss": 0.4351, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 0.9137055837563451, | |
| "grad_norm": 3.4250264167785645, | |
| "learning_rate": 9.887254101548422e-08, | |
| "loss": 0.4098, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.9146724679719603, | |
| "grad_norm": 3.6895461082458496, | |
| "learning_rate": 9.670177734073221e-08, | |
| "loss": 0.4395, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 0.9156393521875755, | |
| "grad_norm": 3.6795530319213867, | |
| "learning_rate": 9.455463865481019e-08, | |
| "loss": 0.398, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 0.9166062364031907, | |
| "grad_norm": 3.933271884918213, | |
| "learning_rate": 9.243114606459741e-08, | |
| "loss": 0.4955, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 0.9175731206188059, | |
| "grad_norm": 4.0044708251953125, | |
| "learning_rate": 9.033132044452775e-08, | |
| "loss": 0.4639, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 0.9185400048344211, | |
| "grad_norm": 3.7048990726470947, | |
| "learning_rate": 8.825518243638226e-08, | |
| "loss": 0.5338, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.9195068890500363, | |
| "grad_norm": 3.9390463829040527, | |
| "learning_rate": 8.620275244908826e-08, | |
| "loss": 0.4594, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 0.9204737732656514, | |
| "grad_norm": 3.997107744216919, | |
| "learning_rate": 8.417405065851647e-08, | |
| "loss": 0.4062, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 0.9214406574812666, | |
| "grad_norm": 3.6523690223693848, | |
| "learning_rate": 8.216909700728498e-08, | |
| "loss": 0.422, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 0.9224075416968818, | |
| "grad_norm": 3.660822629928589, | |
| "learning_rate": 8.018791120456088e-08, | |
| "loss": 0.459, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 0.923374425912497, | |
| "grad_norm": 4.010040760040283, | |
| "learning_rate": 7.823051272586812e-08, | |
| "loss": 0.5026, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.9243413101281122, | |
| "grad_norm": 3.4889373779296875, | |
| "learning_rate": 7.629692081289608e-08, | |
| "loss": 0.4264, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 0.9253081943437274, | |
| "grad_norm": 3.5010697841644287, | |
| "learning_rate": 7.438715447331018e-08, | |
| "loss": 0.471, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 0.9262750785593425, | |
| "grad_norm": 3.4076452255249023, | |
| "learning_rate": 7.250123248056379e-08, | |
| "loss": 0.3945, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 0.9272419627749577, | |
| "grad_norm": 3.8815410137176514, | |
| "learning_rate": 7.063917337371495e-08, | |
| "loss": 0.3895, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 0.9282088469905729, | |
| "grad_norm": 3.7122230529785156, | |
| "learning_rate": 6.880099545724522e-08, | |
| "loss": 0.3973, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.929175731206188, | |
| "grad_norm": 3.873793125152588, | |
| "learning_rate": 6.698671680087643e-08, | |
| "loss": 0.4093, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 0.9301426154218032, | |
| "grad_norm": 3.5222861766815186, | |
| "learning_rate": 6.519635523939666e-08, | |
| "loss": 0.4055, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 0.9311094996374184, | |
| "grad_norm": 3.970353841781616, | |
| "learning_rate": 6.342992837248235e-08, | |
| "loss": 0.4792, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 0.9320763838530336, | |
| "grad_norm": 3.4243791103363037, | |
| "learning_rate": 6.168745356452676e-08, | |
| "loss": 0.387, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 0.9330432680686488, | |
| "grad_norm": 3.5594351291656494, | |
| "learning_rate": 5.996894794446817e-08, | |
| "loss": 0.4666, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.934010152284264, | |
| "grad_norm": 3.504962205886841, | |
| "learning_rate": 5.8274428405623307e-08, | |
| "loss": 0.4723, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 0.9349770364998792, | |
| "grad_norm": 3.720414876937866, | |
| "learning_rate": 5.660391160551837e-08, | |
| "loss": 0.4996, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 0.9359439207154944, | |
| "grad_norm": 3.542029857635498, | |
| "learning_rate": 5.495741396572829e-08, | |
| "loss": 0.4294, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 0.9369108049311095, | |
| "grad_norm": 3.799464225769043, | |
| "learning_rate": 5.333495167171354e-08, | |
| "loss": 0.4009, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 0.9378776891467246, | |
| "grad_norm": 4.372755527496338, | |
| "learning_rate": 5.17365406726611e-08, | |
| "loss": 0.5372, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.9388445733623398, | |
| "grad_norm": 3.79471492767334, | |
| "learning_rate": 5.016219668132871e-08, | |
| "loss": 0.4428, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 0.939811457577955, | |
| "grad_norm": 3.7523727416992188, | |
| "learning_rate": 4.8611935173889235e-08, | |
| "loss": 0.472, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 0.9407783417935702, | |
| "grad_norm": 3.699140787124634, | |
| "learning_rate": 4.708577138977932e-08, | |
| "loss": 0.4489, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 0.9417452260091854, | |
| "grad_norm": 3.6899728775024414, | |
| "learning_rate": 4.5583720331549563e-08, | |
| "loss": 0.5004, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 0.9427121102248006, | |
| "grad_norm": 3.827622652053833, | |
| "learning_rate": 4.410579676471571e-08, | |
| "loss": 0.493, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.9436789944404158, | |
| "grad_norm": 3.3393099308013916, | |
| "learning_rate": 4.265201521761603e-08, | |
| "loss": 0.4271, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 0.9446458786560309, | |
| "grad_norm": 3.922778367996216, | |
| "learning_rate": 4.1222389981265546e-08, | |
| "loss": 0.4808, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 0.9456127628716461, | |
| "grad_norm": 3.676297903060913, | |
| "learning_rate": 3.9816935109218414e-08, | |
| "loss": 0.4545, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 0.9465796470872613, | |
| "grad_norm": 3.3293559551239014, | |
| "learning_rate": 3.843566441742774e-08, | |
| "loss": 0.4173, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 0.9475465313028765, | |
| "grad_norm": 3.5446856021881104, | |
| "learning_rate": 3.7078591484110136e-08, | |
| "loss": 0.4439, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.9485134155184917, | |
| "grad_norm": 3.5858993530273438, | |
| "learning_rate": 3.574572964961304e-08, | |
| "loss": 0.405, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 0.9494802997341069, | |
| "grad_norm": 3.8750436305999756, | |
| "learning_rate": 3.443709201628287e-08, | |
| "loss": 0.4514, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 0.950447183949722, | |
| "grad_norm": 3.9488742351531982, | |
| "learning_rate": 3.3152691448336825e-08, | |
| "loss": 0.4698, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 0.9514140681653372, | |
| "grad_norm": 3.8947930335998535, | |
| "learning_rate": 3.189254057173491e-08, | |
| "loss": 0.4765, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 0.9523809523809523, | |
| "grad_norm": 3.8969860076904297, | |
| "learning_rate": 3.065665177405808e-08, | |
| "loss": 0.4567, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.9533478365965675, | |
| "grad_norm": 3.5067756175994873, | |
| "learning_rate": 2.9445037204385297e-08, | |
| "loss": 0.4525, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 0.9543147208121827, | |
| "grad_norm": 3.490902900695801, | |
| "learning_rate": 2.825770877317363e-08, | |
| "loss": 0.4457, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 0.9552816050277979, | |
| "grad_norm": 3.8223989009857178, | |
| "learning_rate": 2.7094678152141396e-08, | |
| "loss": 0.4631, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 0.9562484892434131, | |
| "grad_norm": 3.704549551010132, | |
| "learning_rate": 2.5955956774154633e-08, | |
| "loss": 0.3899, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 0.9572153734590283, | |
| "grad_norm": 3.6381337642669678, | |
| "learning_rate": 2.4841555833112764e-08, | |
| "loss": 0.4808, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.9581822576746435, | |
| "grad_norm": 3.9116270542144775, | |
| "learning_rate": 2.3751486283840884e-08, | |
| "loss": 0.4986, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 0.9591491418902587, | |
| "grad_norm": 3.7075469493865967, | |
| "learning_rate": 2.268575884197932e-08, | |
| "loss": 0.4586, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 0.9601160261058739, | |
| "grad_norm": 3.487811326980591, | |
| "learning_rate": 2.1644383983880356e-08, | |
| "loss": 0.4241, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 0.961082910321489, | |
| "grad_norm": 3.439530372619629, | |
| "learning_rate": 2.0627371946504992e-08, | |
| "loss": 0.4778, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 0.9620497945371042, | |
| "grad_norm": 3.838062286376953, | |
| "learning_rate": 1.9634732727321636e-08, | |
| "loss": 0.4545, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.9630166787527193, | |
| "grad_norm": 3.6535823345184326, | |
| "learning_rate": 1.866647608420813e-08, | |
| "loss": 0.4729, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 0.9639835629683345, | |
| "grad_norm": 3.867074489593506, | |
| "learning_rate": 1.7722611535355426e-08, | |
| "loss": 0.4372, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 0.9649504471839497, | |
| "grad_norm": 3.6077470779418945, | |
| "learning_rate": 1.6803148359175182e-08, | |
| "loss": 0.4406, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 0.9659173313995649, | |
| "grad_norm": 3.9167864322662354, | |
| "learning_rate": 1.5908095594207585e-08, | |
| "loss": 0.4684, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 0.9668842156151801, | |
| "grad_norm": 4.19531774520874, | |
| "learning_rate": 1.503746203903228e-08, | |
| "loss": 0.4953, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.9678510998307953, | |
| "grad_norm": 4.0359601974487305, | |
| "learning_rate": 1.4191256252182595e-08, | |
| "loss": 0.4722, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 0.9688179840464104, | |
| "grad_norm": 3.364161968231201, | |
| "learning_rate": 1.336948655206144e-08, | |
| "loss": 0.361, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 0.9697848682620256, | |
| "grad_norm": 3.708479642868042, | |
| "learning_rate": 1.2572161016858874e-08, | |
| "loss": 0.4385, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 0.9707517524776408, | |
| "grad_norm": 3.9538137912750244, | |
| "learning_rate": 1.179928748447301e-08, | |
| "loss": 0.4849, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 0.971718636693256, | |
| "grad_norm": 3.6462512016296387, | |
| "learning_rate": 1.1050873552433394e-08, | |
| "loss": 0.4823, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.9726855209088712, | |
| "grad_norm": 4.054479122161865, | |
| "learning_rate": 1.0326926577825247e-08, | |
| "loss": 0.5613, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 0.9736524051244864, | |
| "grad_norm": 3.732670545578003, | |
| "learning_rate": 9.627453677218402e-09, | |
| "loss": 0.4506, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 0.9746192893401016, | |
| "grad_norm": 3.2570137977600098, | |
| "learning_rate": 8.952461726596528e-09, | |
| "loss": 0.4087, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 0.9755861735557168, | |
| "grad_norm": 3.9363338947296143, | |
| "learning_rate": 8.301957361289969e-09, | |
| "loss": 0.4716, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 0.9765530577713318, | |
| "grad_norm": 3.6260664463043213, | |
| "learning_rate": 7.675946975909953e-09, | |
| "loss": 0.5033, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.977519941986947, | |
| "grad_norm": 3.406822919845581, | |
| "learning_rate": 7.074436724286704e-09, | |
| "loss": 0.456, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 0.9784868262025622, | |
| "grad_norm": 3.807194948196411, | |
| "learning_rate": 6.497432519407543e-09, | |
| "loss": 0.4096, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 0.9794537104181774, | |
| "grad_norm": 3.6305527687072754, | |
| "learning_rate": 5.944940033360269e-09, | |
| "loss": 0.3987, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 0.9804205946337926, | |
| "grad_norm": 3.2959699630737305, | |
| "learning_rate": 5.4169646972762615e-09, | |
| "loss": 0.3819, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 0.9813874788494078, | |
| "grad_norm": 3.903257369995117, | |
| "learning_rate": 4.913511701278017e-09, | |
| "loss": 0.447, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.982354363065023, | |
| "grad_norm": 3.6549253463745117, | |
| "learning_rate": 4.434585994427254e-09, | |
| "loss": 0.4847, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 0.9833212472806382, | |
| "grad_norm": 3.9208667278289795, | |
| "learning_rate": 3.98019228467661e-09, | |
| "loss": 0.4342, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 0.9842881314962533, | |
| "grad_norm": 3.740576982498169, | |
| "learning_rate": 3.5503350388232936e-09, | |
| "loss": 0.4214, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 0.9852550157118685, | |
| "grad_norm": 3.650252342224121, | |
| "learning_rate": 3.1450184824657892e-09, | |
| "loss": 0.4118, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 0.9862218999274837, | |
| "grad_norm": 3.3320415019989014, | |
| "learning_rate": 2.7642465999613843e-09, | |
| "loss": 0.4433, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.9871887841430989, | |
| "grad_norm": 3.4924774169921875, | |
| "learning_rate": 2.408023134387871e-09, | |
| "loss": 0.4358, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 0.988155668358714, | |
| "grad_norm": 3.6019985675811768, | |
| "learning_rate": 2.0763515875055207e-09, | |
| "loss": 0.4682, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 0.9891225525743292, | |
| "grad_norm": 3.7121329307556152, | |
| "learning_rate": 1.7692352197240525e-09, | |
| "loss": 0.4277, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 0.9900894367899444, | |
| "grad_norm": 3.643587589263916, | |
| "learning_rate": 1.4866770500704398e-09, | |
| "loss": 0.4418, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 0.9910563210055596, | |
| "grad_norm": 3.526364803314209, | |
| "learning_rate": 1.2286798561572666e-09, | |
| "loss": 0.4761, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.9920232052211748, | |
| "grad_norm": 3.6432149410247803, | |
| "learning_rate": 9.952461741585816e-10, | |
| "loss": 0.4749, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 0.9929900894367899, | |
| "grad_norm": 3.4780168533325195, | |
| "learning_rate": 7.863782987821422e-10, | |
| "loss": 0.4458, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 0.9939569736524051, | |
| "grad_norm": 3.4899373054504395, | |
| "learning_rate": 6.020782832488747e-10, | |
| "loss": 0.4496, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 0.9949238578680203, | |
| "grad_norm": 3.6461098194122314, | |
| "learning_rate": 4.4234793927094845e-10, | |
| "loss": 0.4434, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 0.9958907420836355, | |
| "grad_norm": 3.9469432830810547, | |
| "learning_rate": 3.0718883703567684e-10, | |
| "loss": 0.4967, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.9968576262992507, | |
| "grad_norm": 3.649221658706665, | |
| "learning_rate": 1.9660230518886436e-10, | |
| "loss": 0.4199, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 0.9978245105148659, | |
| "grad_norm": 3.793912172317505, | |
| "learning_rate": 1.1058943082203898e-10, | |
| "loss": 0.4808, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 0.9987913947304811, | |
| "grad_norm": 3.6841025352478027, | |
| "learning_rate": 4.915105946246002e-11, | |
| "loss": 0.4407, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 0.9997582789460963, | |
| "grad_norm": 3.665886640548706, | |
| "learning_rate": 1.2287795063681274e-11, | |
| "loss": 0.4681, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 0.9997582789460963, | |
| "step": 1034, | |
| "total_flos": 1.8974566881186284e+18, | |
| "train_loss": 0.49419337980867123, | |
| "train_runtime": 23447.7413, | |
| "train_samples_per_second": 1.411, | |
| "train_steps_per_second": 0.044 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 1034, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 50, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.8974566881186284e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |