| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.9904153354632586, | |
| "eval_steps": 500, | |
| "global_step": 468, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.006389776357827476, | |
| "grad_norm": 5.939326833886755, | |
| "learning_rate": 2.1276595744680852e-07, | |
| "loss": 0.8883, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.012779552715654952, | |
| "grad_norm": 5.646039516212413, | |
| "learning_rate": 4.2553191489361704e-07, | |
| "loss": 0.8365, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.019169329073482427, | |
| "grad_norm": 6.284923228838248, | |
| "learning_rate": 6.382978723404255e-07, | |
| "loss": 0.8863, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.025559105431309903, | |
| "grad_norm": 5.935120251509228, | |
| "learning_rate": 8.510638297872341e-07, | |
| "loss": 0.8819, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.03194888178913738, | |
| "grad_norm": 5.74959015018875, | |
| "learning_rate": 1.0638297872340427e-06, | |
| "loss": 0.8347, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.038338658146964855, | |
| "grad_norm": 5.668302511268384, | |
| "learning_rate": 1.276595744680851e-06, | |
| "loss": 0.8812, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.04472843450479233, | |
| "grad_norm": 5.508216270485821, | |
| "learning_rate": 1.4893617021276596e-06, | |
| "loss": 0.8532, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.051118210862619806, | |
| "grad_norm": 4.4296962402956614, | |
| "learning_rate": 1.7021276595744682e-06, | |
| "loss": 0.8053, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.05750798722044728, | |
| "grad_norm": 4.11899785775395, | |
| "learning_rate": 1.9148936170212767e-06, | |
| "loss": 0.8029, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.06389776357827476, | |
| "grad_norm": 3.5659315553503177, | |
| "learning_rate": 2.1276595744680853e-06, | |
| "loss": 0.7869, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.07028753993610223, | |
| "grad_norm": 2.4300073492734002, | |
| "learning_rate": 2.340425531914894e-06, | |
| "loss": 0.7142, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.07667731629392971, | |
| "grad_norm": 2.4820563392754007, | |
| "learning_rate": 2.553191489361702e-06, | |
| "loss": 0.8301, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.08306709265175719, | |
| "grad_norm": 2.0596140227686197, | |
| "learning_rate": 2.765957446808511e-06, | |
| "loss": 0.7813, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.08945686900958466, | |
| "grad_norm": 2.955671691211743, | |
| "learning_rate": 2.978723404255319e-06, | |
| "loss": 0.745, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.09584664536741214, | |
| "grad_norm": 3.7251994469728262, | |
| "learning_rate": 3.191489361702128e-06, | |
| "loss": 0.8039, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.10223642172523961, | |
| "grad_norm": 3.5680504240926907, | |
| "learning_rate": 3.4042553191489363e-06, | |
| "loss": 0.7677, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.10862619808306709, | |
| "grad_norm": 3.464855495528428, | |
| "learning_rate": 3.6170212765957453e-06, | |
| "loss": 0.7265, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.11501597444089456, | |
| "grad_norm": 3.237246494768919, | |
| "learning_rate": 3.8297872340425535e-06, | |
| "loss": 0.7228, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.12140575079872204, | |
| "grad_norm": 2.685414752116424, | |
| "learning_rate": 4.042553191489362e-06, | |
| "loss": 0.7299, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.12779552715654952, | |
| "grad_norm": 2.059772471367897, | |
| "learning_rate": 4.255319148936171e-06, | |
| "loss": 0.6523, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.134185303514377, | |
| "grad_norm": 1.8228628515995098, | |
| "learning_rate": 4.468085106382979e-06, | |
| "loss": 0.6761, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.14057507987220447, | |
| "grad_norm": 1.539143622792853, | |
| "learning_rate": 4.680851063829788e-06, | |
| "loss": 0.6845, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.14696485623003194, | |
| "grad_norm": 1.4047159858448883, | |
| "learning_rate": 4.893617021276596e-06, | |
| "loss": 0.6791, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.15335463258785942, | |
| "grad_norm": 1.5196729672988176, | |
| "learning_rate": 5.106382978723404e-06, | |
| "loss": 0.6849, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.1597444089456869, | |
| "grad_norm": 1.4730021288614452, | |
| "learning_rate": 5.319148936170213e-06, | |
| "loss": 0.6652, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.16613418530351437, | |
| "grad_norm": 1.459656354515963, | |
| "learning_rate": 5.531914893617022e-06, | |
| "loss": 0.6774, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.17252396166134185, | |
| "grad_norm": 1.1478544478537531, | |
| "learning_rate": 5.744680851063831e-06, | |
| "loss": 0.6268, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.17891373801916932, | |
| "grad_norm": 1.0471700380284323, | |
| "learning_rate": 5.957446808510638e-06, | |
| "loss": 0.6209, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.1853035143769968, | |
| "grad_norm": 0.9703028296253196, | |
| "learning_rate": 6.170212765957447e-06, | |
| "loss": 0.6205, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.19169329073482427, | |
| "grad_norm": 1.0970479694204027, | |
| "learning_rate": 6.382978723404256e-06, | |
| "loss": 0.6159, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.19808306709265175, | |
| "grad_norm": 1.0378377333972602, | |
| "learning_rate": 6.595744680851064e-06, | |
| "loss": 0.6027, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.20447284345047922, | |
| "grad_norm": 0.8649214812498761, | |
| "learning_rate": 6.808510638297873e-06, | |
| "loss": 0.6119, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.2108626198083067, | |
| "grad_norm": 0.8210120080027409, | |
| "learning_rate": 7.021276595744682e-06, | |
| "loss": 0.6111, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.21725239616613418, | |
| "grad_norm": 0.9550908877879448, | |
| "learning_rate": 7.234042553191491e-06, | |
| "loss": 0.5762, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.22364217252396165, | |
| "grad_norm": 0.8769037404164327, | |
| "learning_rate": 7.446808510638298e-06, | |
| "loss": 0.6283, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.23003194888178913, | |
| "grad_norm": 0.769174472199385, | |
| "learning_rate": 7.659574468085107e-06, | |
| "loss": 0.5627, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.2364217252396166, | |
| "grad_norm": 0.7612759134662135, | |
| "learning_rate": 7.872340425531916e-06, | |
| "loss": 0.5743, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.24281150159744408, | |
| "grad_norm": 0.6947587526462609, | |
| "learning_rate": 8.085106382978723e-06, | |
| "loss": 0.5698, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.24920127795527156, | |
| "grad_norm": 0.8620641383044846, | |
| "learning_rate": 8.297872340425532e-06, | |
| "loss": 0.5533, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.25559105431309903, | |
| "grad_norm": 0.7275486724052839, | |
| "learning_rate": 8.510638297872341e-06, | |
| "loss": 0.5601, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.26198083067092653, | |
| "grad_norm": 0.6421975753501947, | |
| "learning_rate": 8.72340425531915e-06, | |
| "loss": 0.573, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.268370607028754, | |
| "grad_norm": 0.6763167089082316, | |
| "learning_rate": 8.936170212765958e-06, | |
| "loss": 0.5274, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.2747603833865815, | |
| "grad_norm": 0.791857632119965, | |
| "learning_rate": 9.148936170212767e-06, | |
| "loss": 0.5863, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.28115015974440893, | |
| "grad_norm": 0.6979879507542114, | |
| "learning_rate": 9.361702127659576e-06, | |
| "loss": 0.5931, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.28753993610223644, | |
| "grad_norm": 0.6543340532934582, | |
| "learning_rate": 9.574468085106385e-06, | |
| "loss": 0.5401, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.2939297124600639, | |
| "grad_norm": 0.7142949738731162, | |
| "learning_rate": 9.787234042553192e-06, | |
| "loss": 0.5508, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.3003194888178914, | |
| "grad_norm": 0.7178585720555077, | |
| "learning_rate": 1e-05, | |
| "loss": 0.5752, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.30670926517571884, | |
| "grad_norm": 0.6878134120108661, | |
| "learning_rate": 9.999860789001947e-06, | |
| "loss": 0.5564, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.31309904153354634, | |
| "grad_norm": 0.590528457363352, | |
| "learning_rate": 9.999443163759669e-06, | |
| "loss": 0.5656, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.3194888178913738, | |
| "grad_norm": 0.703067597957756, | |
| "learning_rate": 9.998747147528375e-06, | |
| "loss": 0.5588, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.3258785942492013, | |
| "grad_norm": 0.6921324872735819, | |
| "learning_rate": 9.997772779065312e-06, | |
| "loss": 0.5601, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.33226837060702874, | |
| "grad_norm": 0.5997251272391629, | |
| "learning_rate": 9.996520112627602e-06, | |
| "loss": 0.5519, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.33865814696485624, | |
| "grad_norm": 0.5810992095022359, | |
| "learning_rate": 9.994989217969224e-06, | |
| "loss": 0.5169, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.3450479233226837, | |
| "grad_norm": 0.7121641421177344, | |
| "learning_rate": 9.993180180337126e-06, | |
| "loss": 0.567, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.3514376996805112, | |
| "grad_norm": 0.6920329848567716, | |
| "learning_rate": 9.991093100466482e-06, | |
| "loss": 0.5458, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.35782747603833864, | |
| "grad_norm": 0.6326536706188822, | |
| "learning_rate": 9.988728094575082e-06, | |
| "loss": 0.5889, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.36421725239616615, | |
| "grad_norm": 0.6775331304026146, | |
| "learning_rate": 9.986085294356858e-06, | |
| "loss": 0.5935, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.3706070287539936, | |
| "grad_norm": 0.6172072038346998, | |
| "learning_rate": 9.983164846974549e-06, | |
| "loss": 0.5015, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.3769968051118211, | |
| "grad_norm": 0.5593175688117278, | |
| "learning_rate": 9.979966915051517e-06, | |
| "loss": 0.5244, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.38338658146964855, | |
| "grad_norm": 0.6246196225479252, | |
| "learning_rate": 9.976491676662679e-06, | |
| "loss": 0.5293, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.38977635782747605, | |
| "grad_norm": 0.5916637249082176, | |
| "learning_rate": 9.972739325324596e-06, | |
| "loss": 0.548, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.3961661341853035, | |
| "grad_norm": 0.5778850665461611, | |
| "learning_rate": 9.968710069984699e-06, | |
| "loss": 0.5515, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.402555910543131, | |
| "grad_norm": 0.7961082414816123, | |
| "learning_rate": 9.964404135009649e-06, | |
| "loss": 0.5921, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.40894568690095845, | |
| "grad_norm": 0.5553256523659722, | |
| "learning_rate": 9.959821760172849e-06, | |
| "loss": 0.5426, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.41533546325878595, | |
| "grad_norm": 0.7920237945271542, | |
| "learning_rate": 9.95496320064109e-06, | |
| "loss": 0.5709, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.4217252396166134, | |
| "grad_norm": 0.7171526937204784, | |
| "learning_rate": 9.94982872696034e-06, | |
| "loss": 0.5393, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.4281150159744409, | |
| "grad_norm": 0.5946353886197038, | |
| "learning_rate": 9.94441862504068e-06, | |
| "loss": 0.5537, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.43450479233226835, | |
| "grad_norm": 0.7014656677391896, | |
| "learning_rate": 9.938733196140386e-06, | |
| "loss": 0.5507, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.44089456869009586, | |
| "grad_norm": 0.7401127081728431, | |
| "learning_rate": 9.932772756849152e-06, | |
| "loss": 0.5486, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.4472843450479233, | |
| "grad_norm": 0.71028329705812, | |
| "learning_rate": 9.926537639070457e-06, | |
| "loss": 0.5597, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.4536741214057508, | |
| "grad_norm": 0.6719617307022565, | |
| "learning_rate": 9.92002819000309e-06, | |
| "loss": 0.5781, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.46006389776357826, | |
| "grad_norm": 0.676393091959936, | |
| "learning_rate": 9.913244772121811e-06, | |
| "loss": 0.556, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.46645367412140576, | |
| "grad_norm": 0.700331139760568, | |
| "learning_rate": 9.90618776315717e-06, | |
| "loss": 0.5468, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.4728434504792332, | |
| "grad_norm": 0.7278588322652723, | |
| "learning_rate": 9.898857556074469e-06, | |
| "loss": 0.5612, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.4792332268370607, | |
| "grad_norm": 0.62591269973449, | |
| "learning_rate": 9.891254559051886e-06, | |
| "loss": 0.537, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.48562300319488816, | |
| "grad_norm": 0.6205503345604493, | |
| "learning_rate": 9.883379195457747e-06, | |
| "loss": 0.55, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.49201277955271566, | |
| "grad_norm": 0.694304583408491, | |
| "learning_rate": 9.875231903826936e-06, | |
| "loss": 0.5262, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.4984025559105431, | |
| "grad_norm": 0.7120306944516143, | |
| "learning_rate": 9.8668131378365e-06, | |
| "loss": 0.5569, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.5047923322683706, | |
| "grad_norm": 0.6815847947515142, | |
| "learning_rate": 9.858123366280358e-06, | |
| "loss": 0.4874, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.5111821086261981, | |
| "grad_norm": 0.7373853072992483, | |
| "learning_rate": 9.849163073043223e-06, | |
| "loss": 0.5152, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.5175718849840255, | |
| "grad_norm": 0.6920909149775949, | |
| "learning_rate": 9.83993275707364e-06, | |
| "loss": 0.55, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.5239616613418531, | |
| "grad_norm": 0.6413282356116132, | |
| "learning_rate": 9.830432932356207e-06, | |
| "loss": 0.5567, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.5303514376996805, | |
| "grad_norm": 0.629225153697812, | |
| "learning_rate": 9.820664127882958e-06, | |
| "loss": 0.5275, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.536741214057508, | |
| "grad_norm": 0.6043922821719563, | |
| "learning_rate": 9.8106268876239e-06, | |
| "loss": 0.5047, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.5431309904153354, | |
| "grad_norm": 0.6256854779571432, | |
| "learning_rate": 9.800321770496726e-06, | |
| "loss": 0.555, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.549520766773163, | |
| "grad_norm": 0.6576775867390146, | |
| "learning_rate": 9.789749350335693e-06, | |
| "loss": 0.5336, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.5559105431309904, | |
| "grad_norm": 0.6302312444007682, | |
| "learning_rate": 9.778910215859666e-06, | |
| "loss": 0.5471, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.5623003194888179, | |
| "grad_norm": 0.6250229560663714, | |
| "learning_rate": 9.767804970639338e-06, | |
| "loss": 0.5165, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.5686900958466453, | |
| "grad_norm": 0.6250237657849015, | |
| "learning_rate": 9.756434233063616e-06, | |
| "loss": 0.5273, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.5750798722044729, | |
| "grad_norm": 0.6557675626746576, | |
| "learning_rate": 9.744798636305189e-06, | |
| "loss": 0.5275, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.5814696485623003, | |
| "grad_norm": 0.6268074492076356, | |
| "learning_rate": 9.732898828285273e-06, | |
| "loss": 0.5284, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.5878594249201278, | |
| "grad_norm": 0.6093120046283272, | |
| "learning_rate": 9.72073547163753e-06, | |
| "loss": 0.5079, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.5942492012779552, | |
| "grad_norm": 0.5832678416729479, | |
| "learning_rate": 9.708309243671167e-06, | |
| "loss": 0.4975, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.6006389776357828, | |
| "grad_norm": 0.6124566759848806, | |
| "learning_rate": 9.695620836333219e-06, | |
| "loss": 0.5067, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.6070287539936102, | |
| "grad_norm": 0.51383645099116, | |
| "learning_rate": 9.68267095617003e-06, | |
| "loss": 0.4905, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.6134185303514377, | |
| "grad_norm": 0.6183211307426659, | |
| "learning_rate": 9.669460324287899e-06, | |
| "loss": 0.5042, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.6198083067092651, | |
| "grad_norm": 0.622243096226216, | |
| "learning_rate": 9.655989676312918e-06, | |
| "loss": 0.5508, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.6261980830670927, | |
| "grad_norm": 0.5178015072023299, | |
| "learning_rate": 9.642259762350034e-06, | |
| "loss": 0.5313, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.6325878594249201, | |
| "grad_norm": 0.5953592164694912, | |
| "learning_rate": 9.628271346941252e-06, | |
| "loss": 0.5013, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.6389776357827476, | |
| "grad_norm": 0.6705132761257814, | |
| "learning_rate": 9.614025209023084e-06, | |
| "loss": 0.5094, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.645367412140575, | |
| "grad_norm": 0.6060915976868061, | |
| "learning_rate": 9.59952214188316e-06, | |
| "loss": 0.5347, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.6517571884984026, | |
| "grad_norm": 0.7225453293078749, | |
| "learning_rate": 9.58476295311606e-06, | |
| "loss": 0.5148, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.65814696485623, | |
| "grad_norm": 0.6096956061373165, | |
| "learning_rate": 9.569748464578343e-06, | |
| "loss": 0.5133, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.6645367412140575, | |
| "grad_norm": 0.541399197158301, | |
| "learning_rate": 9.554479512342785e-06, | |
| "loss": 0.5255, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.670926517571885, | |
| "grad_norm": 0.5146271141486363, | |
| "learning_rate": 9.538956946651816e-06, | |
| "loss": 0.5174, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.6773162939297125, | |
| "grad_norm": 0.6002536156277724, | |
| "learning_rate": 9.52318163187018e-06, | |
| "loss": 0.5241, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.6837060702875399, | |
| "grad_norm": 0.6048884598631759, | |
| "learning_rate": 9.507154446436806e-06, | |
| "loss": 0.556, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.6900958466453674, | |
| "grad_norm": 0.593631889438514, | |
| "learning_rate": 9.490876282815884e-06, | |
| "loss": 0.4931, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.6964856230031949, | |
| "grad_norm": 0.705422947908814, | |
| "learning_rate": 9.474348047447177e-06, | |
| "loss": 0.5638, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.7028753993610224, | |
| "grad_norm": 0.5761128333881479, | |
| "learning_rate": 9.457570660695542e-06, | |
| "loss": 0.5356, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.7092651757188498, | |
| "grad_norm": 0.6066705392801198, | |
| "learning_rate": 9.440545056799677e-06, | |
| "loss": 0.5238, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.7156549520766773, | |
| "grad_norm": 0.7184729689357008, | |
| "learning_rate": 9.423272183820109e-06, | |
| "loss": 0.4955, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.7220447284345048, | |
| "grad_norm": 0.6112746616131367, | |
| "learning_rate": 9.405753003586396e-06, | |
| "loss": 0.5371, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.7284345047923323, | |
| "grad_norm": 0.6140108522380364, | |
| "learning_rate": 9.387988491643558e-06, | |
| "loss": 0.5264, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.7348242811501597, | |
| "grad_norm": 0.5430091444141817, | |
| "learning_rate": 9.369979637197774e-06, | |
| "loss": 0.5294, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.7412140575079872, | |
| "grad_norm": 0.6211511315887338, | |
| "learning_rate": 9.351727443061284e-06, | |
| "loss": 0.5394, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.7476038338658147, | |
| "grad_norm": 0.6441304788522555, | |
| "learning_rate": 9.33323292559655e-06, | |
| "loss": 0.509, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.7539936102236422, | |
| "grad_norm": 0.5558677193355432, | |
| "learning_rate": 9.31449711465967e-06, | |
| "loss": 0.5041, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.7603833865814696, | |
| "grad_norm": 0.6096616313577746, | |
| "learning_rate": 9.29552105354302e-06, | |
| "loss": 0.5161, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.7667731629392971, | |
| "grad_norm": 0.6683422606125339, | |
| "learning_rate": 9.27630579891716e-06, | |
| "loss": 0.5005, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.7731629392971247, | |
| "grad_norm": 0.5938654489875361, | |
| "learning_rate": 9.256852420771999e-06, | |
| "loss": 0.5071, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.7795527156549521, | |
| "grad_norm": 0.5778542306326887, | |
| "learning_rate": 9.237162002357214e-06, | |
| "loss": 0.553, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.7859424920127795, | |
| "grad_norm": 0.49779213511495696, | |
| "learning_rate": 9.217235640121927e-06, | |
| "loss": 0.5111, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.792332268370607, | |
| "grad_norm": 0.648421144650797, | |
| "learning_rate": 9.197074443653643e-06, | |
| "loss": 0.5429, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.7987220447284346, | |
| "grad_norm": 0.5346673798968297, | |
| "learning_rate": 9.176679535616477e-06, | |
| "loss": 0.5066, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.805111821086262, | |
| "grad_norm": 0.5784378870838629, | |
| "learning_rate": 9.156052051688633e-06, | |
| "loss": 0.4976, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.8115015974440895, | |
| "grad_norm": 0.5198483505362743, | |
| "learning_rate": 9.135193140499155e-06, | |
| "loss": 0.4932, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.8178913738019169, | |
| "grad_norm": 0.5027087300412536, | |
| "learning_rate": 9.114103963563986e-06, | |
| "loss": 0.5018, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.8242811501597445, | |
| "grad_norm": 0.5448072626099122, | |
| "learning_rate": 9.092785695221271e-06, | |
| "loss": 0.4893, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.8306709265175719, | |
| "grad_norm": 0.5949876325123383, | |
| "learning_rate": 9.071239522565978e-06, | |
| "loss": 0.524, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.8370607028753994, | |
| "grad_norm": 0.6161426105193486, | |
| "learning_rate": 9.049466645383785e-06, | |
| "loss": 0.5288, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.8434504792332268, | |
| "grad_norm": 0.6061825018249648, | |
| "learning_rate": 9.027468276084274e-06, | |
| "loss": 0.5379, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.8498402555910544, | |
| "grad_norm": 0.5880258323548357, | |
| "learning_rate": 9.00524563963343e-06, | |
| "loss": 0.4832, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.8562300319488818, | |
| "grad_norm": 0.581072835588311, | |
| "learning_rate": 8.982799973485407e-06, | |
| "loss": 0.4957, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.8626198083067093, | |
| "grad_norm": 0.7137741694459601, | |
| "learning_rate": 8.960132527513642e-06, | |
| "loss": 0.5215, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.8690095846645367, | |
| "grad_norm": 0.6303942330303249, | |
| "learning_rate": 8.937244563941248e-06, | |
| "loss": 0.5311, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.8753993610223643, | |
| "grad_norm": 0.5736321260943835, | |
| "learning_rate": 8.914137357270723e-06, | |
| "loss": 0.5273, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.8817891373801917, | |
| "grad_norm": 0.6659752233979646, | |
| "learning_rate": 8.890812194212987e-06, | |
| "loss": 0.509, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.8881789137380192, | |
| "grad_norm": 0.6063003791380733, | |
| "learning_rate": 8.867270373615735e-06, | |
| "loss": 0.4817, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.8945686900958466, | |
| "grad_norm": 0.6323544992431169, | |
| "learning_rate": 8.8435132063911e-06, | |
| "loss": 0.5432, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.9009584664536742, | |
| "grad_norm": 0.5875202515500545, | |
| "learning_rate": 8.81954201544267e-06, | |
| "loss": 0.5157, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.9073482428115016, | |
| "grad_norm": 0.5158112960861807, | |
| "learning_rate": 8.79535813559181e-06, | |
| "loss": 0.5001, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.9137380191693291, | |
| "grad_norm": 0.6139834649152105, | |
| "learning_rate": 8.77096291350334e-06, | |
| "loss": 0.51, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.9201277955271565, | |
| "grad_norm": 0.561485414402271, | |
| "learning_rate": 8.746357707610544e-06, | |
| "loss": 0.5113, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.9265175718849841, | |
| "grad_norm": 0.5609994812507103, | |
| "learning_rate": 8.721543888039534e-06, | |
| "loss": 0.4914, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.9329073482428115, | |
| "grad_norm": 0.586348417327207, | |
| "learning_rate": 8.69652283653294e-06, | |
| "loss": 0.5002, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.939297124600639, | |
| "grad_norm": 0.6465892255119615, | |
| "learning_rate": 8.671295946372989e-06, | |
| "loss": 0.51, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.9456869009584664, | |
| "grad_norm": 0.5870012600957859, | |
| "learning_rate": 8.6458646223039e-06, | |
| "loss": 0.5439, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.952076677316294, | |
| "grad_norm": 0.5662553653549135, | |
| "learning_rate": 8.620230280453672e-06, | |
| "loss": 0.5058, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.9584664536741214, | |
| "grad_norm": 0.6486518411691115, | |
| "learning_rate": 8.594394348255239e-06, | |
| "loss": 0.5026, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.9648562300319489, | |
| "grad_norm": 0.6335687814490685, | |
| "learning_rate": 8.568358264366958e-06, | |
| "loss": 0.4945, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.9712460063897763, | |
| "grad_norm": 0.5380759128367937, | |
| "learning_rate": 8.542123478592518e-06, | |
| "loss": 0.4761, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.9776357827476039, | |
| "grad_norm": 0.6134987796896156, | |
| "learning_rate": 8.515691451800206e-06, | |
| "loss": 0.4927, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.9840255591054313, | |
| "grad_norm": 0.5952557338963458, | |
| "learning_rate": 8.489063655841552e-06, | |
| "loss": 0.5093, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.9904153354632588, | |
| "grad_norm": 0.6076343709563325, | |
| "learning_rate": 8.462241573469378e-06, | |
| "loss": 0.4936, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.9968051118210862, | |
| "grad_norm": 0.5325272303068264, | |
| "learning_rate": 8.435226698255228e-06, | |
| "loss": 0.5296, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.0031948881789137, | |
| "grad_norm": 1.0161938671420945, | |
| "learning_rate": 8.408020534506195e-06, | |
| "loss": 0.7904, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.0095846645367412, | |
| "grad_norm": 0.6034160193006088, | |
| "learning_rate": 8.380624597181165e-06, | |
| "loss": 0.469, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.0159744408945688, | |
| "grad_norm": 0.644754478924477, | |
| "learning_rate": 8.353040411806449e-06, | |
| "loss": 0.4401, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.0223642172523961, | |
| "grad_norm": 0.5199692418429636, | |
| "learning_rate": 8.325269514390835e-06, | |
| "loss": 0.4196, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.0287539936102237, | |
| "grad_norm": 0.709228246079567, | |
| "learning_rate": 8.297313451340064e-06, | |
| "loss": 0.5009, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.035143769968051, | |
| "grad_norm": 0.6079094018832423, | |
| "learning_rate": 8.269173779370712e-06, | |
| "loss": 0.4393, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.0415335463258786, | |
| "grad_norm": 0.5851329417535205, | |
| "learning_rate": 8.240852065423507e-06, | |
| "loss": 0.4733, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.0479233226837061, | |
| "grad_norm": 0.6480843008328585, | |
| "learning_rate": 8.21234988657607e-06, | |
| "loss": 0.4762, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.0543130990415335, | |
| "grad_norm": 0.7724457770306185, | |
| "learning_rate": 8.183668829955111e-06, | |
| "loss": 0.4513, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.060702875399361, | |
| "grad_norm": 0.66681760535574, | |
| "learning_rate": 8.154810492648038e-06, | |
| "loss": 0.5136, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.0670926517571886, | |
| "grad_norm": 0.5765332946867638, | |
| "learning_rate": 8.125776481614025e-06, | |
| "loss": 0.4317, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.073482428115016, | |
| "grad_norm": 0.5568878896758926, | |
| "learning_rate": 8.096568413594533e-06, | |
| "loss": 0.3822, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.0798722044728435, | |
| "grad_norm": 0.6206731715380917, | |
| "learning_rate": 8.067187915023283e-06, | |
| "loss": 0.4174, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.0862619808306708, | |
| "grad_norm": 0.682543625652864, | |
| "learning_rate": 8.037636621935686e-06, | |
| "loss": 0.551, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.0926517571884984, | |
| "grad_norm": 0.4897959561888439, | |
| "learning_rate": 8.007916179877742e-06, | |
| "loss": 0.4369, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.099041533546326, | |
| "grad_norm": 0.7818451288790205, | |
| "learning_rate": 7.978028243814416e-06, | |
| "loss": 0.5282, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.1054313099041533, | |
| "grad_norm": 0.5844463438419834, | |
| "learning_rate": 7.947974478037468e-06, | |
| "loss": 0.4509, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.1118210862619808, | |
| "grad_norm": 0.5892570136016434, | |
| "learning_rate": 7.917756556072792e-06, | |
| "loss": 0.4203, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.1182108626198084, | |
| "grad_norm": 0.6334769130736677, | |
| "learning_rate": 7.887376160587214e-06, | |
| "loss": 0.4379, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.1246006389776357, | |
| "grad_norm": 0.5891377737653463, | |
| "learning_rate": 7.85683498329481e-06, | |
| "loss": 0.4553, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.1309904153354633, | |
| "grad_norm": 0.5718548911057971, | |
| "learning_rate": 7.826134724862687e-06, | |
| "loss": 0.4572, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.1373801916932909, | |
| "grad_norm": 0.542183966636336, | |
| "learning_rate": 7.795277094816292e-06, | |
| "loss": 0.4023, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.1437699680511182, | |
| "grad_norm": 0.675772555808779, | |
| "learning_rate": 7.764263811444214e-06, | |
| "loss": 0.5047, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.1501597444089458, | |
| "grad_norm": 0.5970425855210937, | |
| "learning_rate": 7.733096601702508e-06, | |
| "loss": 0.438, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.156549520766773, | |
| "grad_norm": 0.5058854610619358, | |
| "learning_rate": 7.70177720111852e-06, | |
| "loss": 0.4419, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.1629392971246006, | |
| "grad_norm": 0.596766018731859, | |
| "learning_rate": 7.67030735369426e-06, | |
| "loss": 0.499, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.1693290734824282, | |
| "grad_norm": 0.6019693135976418, | |
| "learning_rate": 7.638688811809274e-06, | |
| "loss": 0.4516, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.1757188498402555, | |
| "grad_norm": 0.5688086313577724, | |
| "learning_rate": 7.6069233361230696e-06, | |
| "loss": 0.4864, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.182108626198083, | |
| "grad_norm": 0.5444823406385277, | |
| "learning_rate": 7.575012695477076e-06, | |
| "loss": 0.41, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.1884984025559104, | |
| "grad_norm": 0.6475358161762239, | |
| "learning_rate": 7.542958666796149e-06, | |
| "loss": 0.4585, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.194888178913738, | |
| "grad_norm": 0.6520731788132865, | |
| "learning_rate": 7.510763034989616e-06, | |
| "loss": 0.5023, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.2012779552715656, | |
| "grad_norm": 0.5647918008618648, | |
| "learning_rate": 7.478427592851894e-06, | |
| "loss": 0.4532, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.207667731629393, | |
| "grad_norm": 0.5612898299187284, | |
| "learning_rate": 7.44595414096265e-06, | |
| "loss": 0.4615, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.2140575079872205, | |
| "grad_norm": 0.6201130018182571, | |
| "learning_rate": 7.413344487586542e-06, | |
| "loss": 0.4727, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.220447284345048, | |
| "grad_norm": 0.6836457184617555, | |
| "learning_rate": 7.380600448572532e-06, | |
| "loss": 0.5417, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.2268370607028753, | |
| "grad_norm": 0.5524358944795331, | |
| "learning_rate": 7.347723847252756e-06, | |
| "loss": 0.4031, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.233226837060703, | |
| "grad_norm": 0.6892770464559641, | |
| "learning_rate": 7.314716514341007e-06, | |
| "loss": 0.5466, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.2396166134185305, | |
| "grad_norm": 0.48977487646481727, | |
| "learning_rate": 7.28158028783079e-06, | |
| "loss": 0.4168, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.2460063897763578, | |
| "grad_norm": 0.5904604261843117, | |
| "learning_rate": 7.248317012892969e-06, | |
| "loss": 0.4585, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.2523961661341854, | |
| "grad_norm": 0.6140896090102136, | |
| "learning_rate": 7.214928541773027e-06, | |
| "loss": 0.5206, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.2587859424920127, | |
| "grad_norm": 0.5153473610058708, | |
| "learning_rate": 7.1814167336879195e-06, | |
| "loss": 0.4515, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.2651757188498403, | |
| "grad_norm": 0.5641706604709406, | |
| "learning_rate": 7.147783454722545e-06, | |
| "loss": 0.4381, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.2715654952076676, | |
| "grad_norm": 0.5467280105530581, | |
| "learning_rate": 7.1140305777258355e-06, | |
| "loss": 0.4382, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.2779552715654952, | |
| "grad_norm": 0.5940078557429479, | |
| "learning_rate": 7.080159982206471e-06, | |
| "loss": 0.495, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.2843450479233227, | |
| "grad_norm": 0.5481415073873086, | |
| "learning_rate": 7.046173554228213e-06, | |
| "loss": 0.4493, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.29073482428115, | |
| "grad_norm": 0.5226708143072835, | |
| "learning_rate": 7.012073186304885e-06, | |
| "loss": 0.5147, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.2971246006389776, | |
| "grad_norm": 0.5603032891355726, | |
| "learning_rate": 6.9778607772949894e-06, | |
| "loss": 0.4593, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.3035143769968052, | |
| "grad_norm": 0.5155117345098292, | |
| "learning_rate": 6.943538232295965e-06, | |
| "loss": 0.4486, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.3099041533546325, | |
| "grad_norm": 0.5125923437421245, | |
| "learning_rate": 6.909107462538113e-06, | |
| "loss": 0.4461, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.31629392971246, | |
| "grad_norm": 0.5278736322801073, | |
| "learning_rate": 6.874570385278161e-06, | |
| "loss": 0.4668, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.3226837060702876, | |
| "grad_norm": 0.4866207863589341, | |
| "learning_rate": 6.839928923692505e-06, | |
| "loss": 0.4516, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.329073482428115, | |
| "grad_norm": 0.5497864006993186, | |
| "learning_rate": 6.805185006770125e-06, | |
| "loss": 0.478, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.3354632587859425, | |
| "grad_norm": 0.47952476548514206, | |
| "learning_rate": 6.7703405692051585e-06, | |
| "loss": 0.4269, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.34185303514377, | |
| "grad_norm": 0.566472528900126, | |
| "learning_rate": 6.735397551289179e-06, | |
| "loss": 0.5375, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.3482428115015974, | |
| "grad_norm": 0.49366337098307594, | |
| "learning_rate": 6.700357898803146e-06, | |
| "loss": 0.4364, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.354632587859425, | |
| "grad_norm": 0.5082412356887517, | |
| "learning_rate": 6.665223562909058e-06, | |
| "loss": 0.4753, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.3610223642172525, | |
| "grad_norm": 0.5322742527094048, | |
| "learning_rate": 6.629996500041299e-06, | |
| "loss": 0.4512, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.3674121405750799, | |
| "grad_norm": 0.53645890162532, | |
| "learning_rate": 6.5946786717977026e-06, | |
| "loss": 0.4677, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.3738019169329074, | |
| "grad_norm": 0.5140782482192351, | |
| "learning_rate": 6.5592720448303174e-06, | |
| "loss": 0.4067, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.3801916932907348, | |
| "grad_norm": 0.48626291680198364, | |
| "learning_rate": 6.523778590735892e-06, | |
| "loss": 0.4265, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.3865814696485623, | |
| "grad_norm": 0.48629716552002167, | |
| "learning_rate": 6.488200285946094e-06, | |
| "loss": 0.4775, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.3929712460063897, | |
| "grad_norm": 0.5120946036888885, | |
| "learning_rate": 6.452539111617454e-06, | |
| "loss": 0.4539, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.3993610223642172, | |
| "grad_norm": 0.542860867459949, | |
| "learning_rate": 6.416797053521039e-06, | |
| "loss": 0.4642, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.4057507987220448, | |
| "grad_norm": 0.5124124603028827, | |
| "learning_rate": 6.380976101931879e-06, | |
| "loss": 0.4582, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.4121405750798721, | |
| "grad_norm": 0.4517956483758476, | |
| "learning_rate": 6.345078251518144e-06, | |
| "loss": 0.4301, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.4185303514376997, | |
| "grad_norm": 0.5830331141613415, | |
| "learning_rate": 6.3091055012300675e-06, | |
| "loss": 0.5153, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.4249201277955272, | |
| "grad_norm": 0.48233448247479527, | |
| "learning_rate": 6.273059854188636e-06, | |
| "loss": 0.4437, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.4313099041533546, | |
| "grad_norm": 0.5444193247707191, | |
| "learning_rate": 6.236943317574054e-06, | |
| "loss": 0.4759, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.4376996805111821, | |
| "grad_norm": 0.5024988130991458, | |
| "learning_rate": 6.200757902513962e-06, | |
| "loss": 0.4679, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.4440894568690097, | |
| "grad_norm": 0.5175885271861922, | |
| "learning_rate": 6.164505623971458e-06, | |
| "loss": 0.4234, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.450479233226837, | |
| "grad_norm": 0.4376542443716638, | |
| "learning_rate": 6.128188500632892e-06, | |
| "loss": 0.398, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.4568690095846646, | |
| "grad_norm": 0.5759942567661992, | |
| "learning_rate": 6.091808554795462e-06, | |
| "loss": 0.5398, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.4632587859424921, | |
| "grad_norm": 0.4898895278757163, | |
| "learning_rate": 6.055367812254592e-06, | |
| "loss": 0.4255, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.4696485623003195, | |
| "grad_norm": 0.4696729162734873, | |
| "learning_rate": 6.0188683021911394e-06, | |
| "loss": 0.4489, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.476038338658147, | |
| "grad_norm": 0.5098940508988645, | |
| "learning_rate": 5.982312057058392e-06, | |
| "loss": 0.4104, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.4824281150159744, | |
| "grad_norm": 0.6198589467660736, | |
| "learning_rate": 5.9457011124689025e-06, | |
| "loss": 0.501, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.488817891373802, | |
| "grad_norm": 0.4565136471110508, | |
| "learning_rate": 5.9090375070811215e-06, | |
| "loss": 0.3869, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.4952076677316293, | |
| "grad_norm": 0.5057090794514764, | |
| "learning_rate": 5.872323282485889e-06, | |
| "loss": 0.4588, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.5015974440894568, | |
| "grad_norm": 0.5608394144228892, | |
| "learning_rate": 5.835560483092743e-06, | |
| "loss": 0.4653, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.5079872204472844, | |
| "grad_norm": 0.6220909500036299, | |
| "learning_rate": 5.798751156016085e-06, | |
| "loss": 0.4403, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.5143769968051117, | |
| "grad_norm": 0.5819233906027126, | |
| "learning_rate": 5.7618973509611755e-06, | |
| "loss": 0.4671, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.5207667731629393, | |
| "grad_norm": 0.505958297197071, | |
| "learning_rate": 5.72500112011001e-06, | |
| "loss": 0.3863, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.5271565495207668, | |
| "grad_norm": 0.5491380150544555, | |
| "learning_rate": 5.688064518007036e-06, | |
| "loss": 0.4611, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.5335463258785942, | |
| "grad_norm": 0.5108050814170858, | |
| "learning_rate": 5.651089601444752e-06, | |
| "loss": 0.4266, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.5399361022364217, | |
| "grad_norm": 0.530547935838076, | |
| "learning_rate": 5.614078429349172e-06, | |
| "loss": 0.5397, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.5463258785942493, | |
| "grad_norm": 0.519497556597016, | |
| "learning_rate": 5.577033062665179e-06, | |
| "loss": 0.4192, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.5527156549520766, | |
| "grad_norm": 0.4455480383961401, | |
| "learning_rate": 5.53995556424176e-06, | |
| "loss": 0.3964, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.5591054313099042, | |
| "grad_norm": 0.48544208406671674, | |
| "learning_rate": 5.50284799871714e-06, | |
| "loss": 0.4475, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.5654952076677318, | |
| "grad_norm": 0.5282757804682853, | |
| "learning_rate": 5.465712432403812e-06, | |
| "loss": 0.4599, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.571884984025559, | |
| "grad_norm": 0.4279150051045706, | |
| "learning_rate": 5.428550933173476e-06, | |
| "loss": 0.4, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.5782747603833864, | |
| "grad_norm": 0.5469340909930815, | |
| "learning_rate": 5.391365570341893e-06, | |
| "loss": 0.5113, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.5846645367412142, | |
| "grad_norm": 0.4618048376781538, | |
| "learning_rate": 5.3541584145536475e-06, | |
| "loss": 0.4291, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.5910543130990416, | |
| "grad_norm": 0.5225663531458168, | |
| "learning_rate": 5.3169315376668566e-06, | |
| "loss": 0.4848, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.5974440894568689, | |
| "grad_norm": 0.49273053679401285, | |
| "learning_rate": 5.279687012637798e-06, | |
| "loss": 0.4784, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.6038338658146964, | |
| "grad_norm": 0.46554327160527115, | |
| "learning_rate": 5.242426913405471e-06, | |
| "loss": 0.3884, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.610223642172524, | |
| "grad_norm": 0.5337970795767303, | |
| "learning_rate": 5.2051533147761155e-06, | |
| "loss": 0.4842, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.6166134185303513, | |
| "grad_norm": 0.48323448493104104, | |
| "learning_rate": 5.167868292307679e-06, | |
| "loss": 0.4648, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.623003194888179, | |
| "grad_norm": 0.49347032813805447, | |
| "learning_rate": 5.130573922194236e-06, | |
| "loss": 0.4431, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.6293929712460065, | |
| "grad_norm": 0.50006423841222, | |
| "learning_rate": 5.093272281150383e-06, | |
| "loss": 0.3953, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.6357827476038338, | |
| "grad_norm": 0.4949626163764748, | |
| "learning_rate": 5.05596544629559e-06, | |
| "loss": 0.477, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.6421725239616614, | |
| "grad_norm": 0.46206160201037194, | |
| "learning_rate": 5.018655495038542e-06, | |
| "loss": 0.4404, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.648562300319489, | |
| "grad_norm": 0.5095043130383302, | |
| "learning_rate": 4.981344504961459e-06, | |
| "loss": 0.4675, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.6549520766773163, | |
| "grad_norm": 0.4971947533379268, | |
| "learning_rate": 4.944034553704412e-06, | |
| "loss": 0.4368, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.6613418530351438, | |
| "grad_norm": 0.49298857462259404, | |
| "learning_rate": 4.906727718849619e-06, | |
| "loss": 0.417, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.6677316293929714, | |
| "grad_norm": 0.5054393682029441, | |
| "learning_rate": 4.8694260778057655e-06, | |
| "loss": 0.4863, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.6741214057507987, | |
| "grad_norm": 0.5048349623117495, | |
| "learning_rate": 4.832131707692322e-06, | |
| "loss": 0.4885, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.680511182108626, | |
| "grad_norm": 0.49665382088434956, | |
| "learning_rate": 4.7948466852238844e-06, | |
| "loss": 0.4544, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.6869009584664538, | |
| "grad_norm": 0.45794905976989625, | |
| "learning_rate": 4.757573086594529e-06, | |
| "loss": 0.4111, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.6932907348242812, | |
| "grad_norm": 0.4867140035140799, | |
| "learning_rate": 4.720312987362204e-06, | |
| "loss": 0.4673, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.6996805111821085, | |
| "grad_norm": 0.4877025701394254, | |
| "learning_rate": 4.683068462333144e-06, | |
| "loss": 0.4534, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.706070287539936, | |
| "grad_norm": 0.5164443235320769, | |
| "learning_rate": 4.645841585446356e-06, | |
| "loss": 0.4729, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.7124600638977636, | |
| "grad_norm": 0.5206651702866386, | |
| "learning_rate": 4.6086344296581095e-06, | |
| "loss": 0.4515, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.718849840255591, | |
| "grad_norm": 0.44975243940892895, | |
| "learning_rate": 4.5714490668265245e-06, | |
| "loss": 0.3956, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.7252396166134185, | |
| "grad_norm": 0.5150600979285241, | |
| "learning_rate": 4.534287567596189e-06, | |
| "loss": 0.4826, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.731629392971246, | |
| "grad_norm": 0.44663737805507914, | |
| "learning_rate": 4.497152001282861e-06, | |
| "loss": 0.4611, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.7380191693290734, | |
| "grad_norm": 0.47041695438535863, | |
| "learning_rate": 4.460044435758241e-06, | |
| "loss": 0.4731, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.744408945686901, | |
| "grad_norm": 0.49359752589080874, | |
| "learning_rate": 4.4229669373348225e-06, | |
| "loss": 0.4691, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.7507987220447285, | |
| "grad_norm": 0.4755332416119975, | |
| "learning_rate": 4.3859215706508295e-06, | |
| "loss": 0.5063, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.7571884984025559, | |
| "grad_norm": 0.46330533243846833, | |
| "learning_rate": 4.348910398555249e-06, | |
| "loss": 0.4332, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.7635782747603834, | |
| "grad_norm": 0.4703765694158572, | |
| "learning_rate": 4.311935481992965e-06, | |
| "loss": 0.4414, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.769968051118211, | |
| "grad_norm": 0.43873743245514685, | |
| "learning_rate": 4.274998879889991e-06, | |
| "loss": 0.4154, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.7763578274760383, | |
| "grad_norm": 0.5141018858801798, | |
| "learning_rate": 4.238102649038825e-06, | |
| "loss": 0.4835, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.7827476038338657, | |
| "grad_norm": 0.5307194537828929, | |
| "learning_rate": 4.2012488439839185e-06, | |
| "loss": 0.4395, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.7891373801916934, | |
| "grad_norm": 0.4611910870857464, | |
| "learning_rate": 4.164439516907258e-06, | |
| "loss": 0.4301, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.7955271565495208, | |
| "grad_norm": 0.4530565187328216, | |
| "learning_rate": 4.127676717514114e-06, | |
| "loss": 0.4656, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.8019169329073481, | |
| "grad_norm": 0.46146544394786343, | |
| "learning_rate": 4.090962492918881e-06, | |
| "loss": 0.4251, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.8083067092651757, | |
| "grad_norm": 0.4658753245837098, | |
| "learning_rate": 4.054298887531099e-06, | |
| "loss": 0.4263, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.8146964856230032, | |
| "grad_norm": 0.5475493259239109, | |
| "learning_rate": 4.017687942941609e-06, | |
| "loss": 0.5073, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.8210862619808306, | |
| "grad_norm": 0.4549777584024105, | |
| "learning_rate": 3.981131697808862e-06, | |
| "loss": 0.4339, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.8274760383386581, | |
| "grad_norm": 0.5215504813505114, | |
| "learning_rate": 3.94463218774541e-06, | |
| "loss": 0.5156, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.8338658146964857, | |
| "grad_norm": 0.4335416500238613, | |
| "learning_rate": 3.90819144520454e-06, | |
| "loss": 0.406, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.840255591054313, | |
| "grad_norm": 0.49909105011412563, | |
| "learning_rate": 3.8718114993671086e-06, | |
| "loss": 0.4829, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.8466453674121406, | |
| "grad_norm": 0.4628816958363882, | |
| "learning_rate": 3.835494376028544e-06, | |
| "loss": 0.4224, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.8530351437699681, | |
| "grad_norm": 0.4533349508451965, | |
| "learning_rate": 3.799242097486038e-06, | |
| "loss": 0.4704, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.8594249201277955, | |
| "grad_norm": 0.4690421359779084, | |
| "learning_rate": 3.7630566824259456e-06, | |
| "loss": 0.4393, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.865814696485623, | |
| "grad_norm": 0.4630875020603059, | |
| "learning_rate": 3.726940145811363e-06, | |
| "loss": 0.4218, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.8722044728434506, | |
| "grad_norm": 0.48834085374534414, | |
| "learning_rate": 3.6908944987699346e-06, | |
| "loss": 0.4633, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.878594249201278, | |
| "grad_norm": 0.4836104614209088, | |
| "learning_rate": 3.6549217484818576e-06, | |
| "loss": 0.3949, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.8849840255591053, | |
| "grad_norm": 0.4927575530847232, | |
| "learning_rate": 3.6190238980681235e-06, | |
| "loss": 0.4919, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.891373801916933, | |
| "grad_norm": 0.4763861274009809, | |
| "learning_rate": 3.583202946478963e-06, | |
| "loss": 0.4336, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.8977635782747604, | |
| "grad_norm": 0.4309910470920112, | |
| "learning_rate": 3.5474608883825475e-06, | |
| "loss": 0.4415, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.9041533546325877, | |
| "grad_norm": 0.45467413959799413, | |
| "learning_rate": 3.5117997140539073e-06, | |
| "loss": 0.4946, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.9105431309904153, | |
| "grad_norm": 0.42259856610255164, | |
| "learning_rate": 3.47622140926411e-06, | |
| "loss": 0.4256, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.9169329073482428, | |
| "grad_norm": 0.4427788655758311, | |
| "learning_rate": 3.4407279551696846e-06, | |
| "loss": 0.4835, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.9233226837060702, | |
| "grad_norm": 0.4605258419395826, | |
| "learning_rate": 3.4053213282022983e-06, | |
| "loss": 0.44, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.9297124600638977, | |
| "grad_norm": 0.5071629759274202, | |
| "learning_rate": 3.370003499958703e-06, | |
| "loss": 0.4645, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.9361022364217253, | |
| "grad_norm": 0.48363360093763774, | |
| "learning_rate": 3.334776437090944e-06, | |
| "loss": 0.462, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.9424920127795526, | |
| "grad_norm": 0.4457750481400301, | |
| "learning_rate": 3.2996421011968546e-06, | |
| "loss": 0.432, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.9488817891373802, | |
| "grad_norm": 0.5209371840279784, | |
| "learning_rate": 3.264602448710822e-06, | |
| "loss": 0.5063, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.9552715654952078, | |
| "grad_norm": 0.4580657598769086, | |
| "learning_rate": 3.2296594307948428e-06, | |
| "loss": 0.4476, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.961661341853035, | |
| "grad_norm": 0.48115111879063843, | |
| "learning_rate": 3.194814993229878e-06, | |
| "loss": 0.4974, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.9680511182108626, | |
| "grad_norm": 0.42678489823446886, | |
| "learning_rate": 3.1600710763074972e-06, | |
| "loss": 0.3876, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.9744408945686902, | |
| "grad_norm": 0.4943139109451635, | |
| "learning_rate": 3.125429614721842e-06, | |
| "loss": 0.4586, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.9808306709265175, | |
| "grad_norm": 0.5049796788632963, | |
| "learning_rate": 3.090892537461889e-06, | |
| "loss": 0.455, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.9872204472843449, | |
| "grad_norm": 0.4088021294321166, | |
| "learning_rate": 3.056461767704037e-06, | |
| "loss": 0.4289, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.9936102236421727, | |
| "grad_norm": 0.5464401476508677, | |
| "learning_rate": 3.0221392227050126e-06, | |
| "loss": 0.4965, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.721460700158716, | |
| "learning_rate": 2.9879268136951163e-06, | |
| "loss": 0.5999, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 2.0063897763578273, | |
| "grad_norm": 0.50592498250288, | |
| "learning_rate": 2.953826445771788e-06, | |
| "loss": 0.4096, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 2.012779552715655, | |
| "grad_norm": 0.4815005239435329, | |
| "learning_rate": 2.9198400177935303e-06, | |
| "loss": 0.4204, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 2.0191693290734825, | |
| "grad_norm": 0.4919344912678577, | |
| "learning_rate": 2.8859694222741653e-06, | |
| "loss": 0.4065, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 2.02555910543131, | |
| "grad_norm": 0.4836934263357082, | |
| "learning_rate": 2.852216545277456e-06, | |
| "loss": 0.417, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 2.0319488817891376, | |
| "grad_norm": 0.5111485054174799, | |
| "learning_rate": 2.8185832663120817e-06, | |
| "loss": 0.4178, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 2.038338658146965, | |
| "grad_norm": 0.4876654217900757, | |
| "learning_rate": 2.785071458226972e-06, | |
| "loss": 0.4162, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 2.0447284345047922, | |
| "grad_norm": 0.4889408957175494, | |
| "learning_rate": 2.7516829871070295e-06, | |
| "loss": 0.4227, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.0511182108626196, | |
| "grad_norm": 0.47704930971226556, | |
| "learning_rate": 2.718419712169213e-06, | |
| "loss": 0.4171, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 2.0575079872204474, | |
| "grad_norm": 0.4941892851571367, | |
| "learning_rate": 2.685283485658995e-06, | |
| "loss": 0.3946, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 2.0638977635782747, | |
| "grad_norm": 0.49536189000668357, | |
| "learning_rate": 2.6522761527472464e-06, | |
| "loss": 0.4055, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 2.070287539936102, | |
| "grad_norm": 0.498407448658534, | |
| "learning_rate": 2.6193995514274705e-06, | |
| "loss": 0.4286, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 2.07667731629393, | |
| "grad_norm": 0.49324569254265715, | |
| "learning_rate": 2.586655512413458e-06, | |
| "loss": 0.3876, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.083067092651757, | |
| "grad_norm": 0.4571096566119231, | |
| "learning_rate": 2.554045859037353e-06, | |
| "loss": 0.431, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 2.0894568690095845, | |
| "grad_norm": 0.48319541945049765, | |
| "learning_rate": 2.521572407148107e-06, | |
| "loss": 0.4077, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 2.0958466453674123, | |
| "grad_norm": 0.5509207161299222, | |
| "learning_rate": 2.4892369650103837e-06, | |
| "loss": 0.4051, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 2.1022364217252396, | |
| "grad_norm": 0.49806488185391035, | |
| "learning_rate": 2.4570413332038523e-06, | |
| "loss": 0.3732, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 2.108626198083067, | |
| "grad_norm": 0.4723552960459098, | |
| "learning_rate": 2.4249873045229244e-06, | |
| "loss": 0.4008, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 2.1150159744408947, | |
| "grad_norm": 0.46395555256164933, | |
| "learning_rate": 2.3930766638769325e-06, | |
| "loss": 0.405, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 2.121405750798722, | |
| "grad_norm": 0.4528638272264116, | |
| "learning_rate": 2.3613111881907273e-06, | |
| "loss": 0.3923, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 2.1277955271565494, | |
| "grad_norm": 0.4474661011978129, | |
| "learning_rate": 2.3296926463057396e-06, | |
| "loss": 0.3939, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 2.134185303514377, | |
| "grad_norm": 0.421027222898932, | |
| "learning_rate": 2.29822279888148e-06, | |
| "loss": 0.3959, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 2.1405750798722045, | |
| "grad_norm": 0.4592802347221619, | |
| "learning_rate": 2.2669033982974946e-06, | |
| "loss": 0.4106, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 2.146964856230032, | |
| "grad_norm": 0.46615052740034235, | |
| "learning_rate": 2.235736188555787e-06, | |
| "loss": 0.4319, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 2.1533546325878596, | |
| "grad_norm": 0.5136771524664518, | |
| "learning_rate": 2.2047229051837107e-06, | |
| "loss": 0.4372, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 2.159744408945687, | |
| "grad_norm": 0.4515706738752584, | |
| "learning_rate": 2.173865275137314e-06, | |
| "loss": 0.4268, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 2.1661341853035143, | |
| "grad_norm": 0.4058929378533555, | |
| "learning_rate": 2.143165016705192e-06, | |
| "loss": 0.3638, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 2.1725239616613417, | |
| "grad_norm": 0.4358373263176716, | |
| "learning_rate": 2.1126238394127868e-06, | |
| "loss": 0.3834, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 2.1789137380191694, | |
| "grad_norm": 0.4822754167503809, | |
| "learning_rate": 2.082243443927212e-06, | |
| "loss": 0.4131, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 2.1853035143769968, | |
| "grad_norm": 0.4863778870340934, | |
| "learning_rate": 2.052025521962534e-06, | |
| "loss": 0.4181, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 2.191693290734824, | |
| "grad_norm": 0.4558203017460311, | |
| "learning_rate": 2.0219717561855857e-06, | |
| "loss": 0.4152, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 2.198083067092652, | |
| "grad_norm": 0.4020002217520936, | |
| "learning_rate": 1.992083820122259e-06, | |
| "loss": 0.418, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 2.2044728434504792, | |
| "grad_norm": 0.41891725804618096, | |
| "learning_rate": 1.962363378064316e-06, | |
| "loss": 0.4342, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 2.2108626198083066, | |
| "grad_norm": 0.4347711215955635, | |
| "learning_rate": 1.9328120849767198e-06, | |
| "loss": 0.3909, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 2.2172523961661343, | |
| "grad_norm": 0.4688921121601955, | |
| "learning_rate": 1.9034315864054682e-06, | |
| "loss": 0.4363, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 2.2236421725239617, | |
| "grad_norm": 0.4512802904655767, | |
| "learning_rate": 1.8742235183859747e-06, | |
| "loss": 0.4233, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 2.230031948881789, | |
| "grad_norm": 0.4382006382370674, | |
| "learning_rate": 1.8451895073519643e-06, | |
| "loss": 0.3794, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 2.236421725239617, | |
| "grad_norm": 0.4454650826780735, | |
| "learning_rate": 1.8163311700448899e-06, | |
| "loss": 0.4106, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.242811501597444, | |
| "grad_norm": 0.45973761832057647, | |
| "learning_rate": 1.7876501134239316e-06, | |
| "loss": 0.3999, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 2.2492012779552715, | |
| "grad_norm": 0.44812123349696353, | |
| "learning_rate": 1.7591479345764972e-06, | |
| "loss": 0.4107, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 2.255591054313099, | |
| "grad_norm": 0.41964973266665767, | |
| "learning_rate": 1.7308262206292898e-06, | |
| "loss": 0.4169, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 2.2619808306709266, | |
| "grad_norm": 0.44609980592121035, | |
| "learning_rate": 1.7026865486599375e-06, | |
| "loss": 0.364, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 2.268370607028754, | |
| "grad_norm": 0.4266000906747224, | |
| "learning_rate": 1.6747304856091662e-06, | |
| "loss": 0.3808, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 2.2747603833865817, | |
| "grad_norm": 0.4350563035929625, | |
| "learning_rate": 1.6469595881935523e-06, | |
| "loss": 0.3921, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 2.281150159744409, | |
| "grad_norm": 0.4129513322439617, | |
| "learning_rate": 1.6193754028188363e-06, | |
| "loss": 0.4012, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 2.2875399361022364, | |
| "grad_norm": 0.4331680795837092, | |
| "learning_rate": 1.591979465493806e-06, | |
| "loss": 0.395, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 2.2939297124600637, | |
| "grad_norm": 0.3831972251838243, | |
| "learning_rate": 1.5647733017447741e-06, | |
| "loss": 0.4192, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 2.3003194888178915, | |
| "grad_norm": 0.4002251544746337, | |
| "learning_rate": 1.5377584265306222e-06, | |
| "loss": 0.3826, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.306709265175719, | |
| "grad_norm": 0.3948825051164168, | |
| "learning_rate": 1.510936344158448e-06, | |
| "loss": 0.3739, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 2.313099041533546, | |
| "grad_norm": 0.3965434929052763, | |
| "learning_rate": 1.484308548199796e-06, | |
| "loss": 0.3696, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 2.319488817891374, | |
| "grad_norm": 0.4327192766078725, | |
| "learning_rate": 1.4578765214074842e-06, | |
| "loss": 0.4113, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 2.3258785942492013, | |
| "grad_norm": 0.4073354571122917, | |
| "learning_rate": 1.4316417356330441e-06, | |
| "loss": 0.3845, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 2.3322683706070286, | |
| "grad_norm": 0.41277676364789384, | |
| "learning_rate": 1.4056056517447637e-06, | |
| "loss": 0.377, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 2.3386581469648564, | |
| "grad_norm": 0.4149049495469826, | |
| "learning_rate": 1.3797697195463278e-06, | |
| "loss": 0.4298, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 2.3450479233226837, | |
| "grad_norm": 0.4117195440029289, | |
| "learning_rate": 1.3541353776961035e-06, | |
| "loss": 0.4068, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 2.351437699680511, | |
| "grad_norm": 0.3841644723162104, | |
| "learning_rate": 1.3287040536270135e-06, | |
| "loss": 0.4017, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 2.357827476038339, | |
| "grad_norm": 0.4303326263598452, | |
| "learning_rate": 1.30347716346706e-06, | |
| "loss": 0.4332, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 2.364217252396166, | |
| "grad_norm": 0.41401970610364003, | |
| "learning_rate": 1.2784561119604683e-06, | |
| "loss": 0.3681, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.3706070287539935, | |
| "grad_norm": 0.40385544293286596, | |
| "learning_rate": 1.2536422923894565e-06, | |
| "loss": 0.3825, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 2.376996805111821, | |
| "grad_norm": 0.4394370254509512, | |
| "learning_rate": 1.2290370864966623e-06, | |
| "loss": 0.4088, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 2.3833865814696487, | |
| "grad_norm": 0.4254728931741769, | |
| "learning_rate": 1.2046418644081904e-06, | |
| "loss": 0.391, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 2.389776357827476, | |
| "grad_norm": 0.42666004262363705, | |
| "learning_rate": 1.1804579845573288e-06, | |
| "loss": 0.4476, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 2.3961661341853033, | |
| "grad_norm": 0.4373776397437837, | |
| "learning_rate": 1.156486793608899e-06, | |
| "loss": 0.4188, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.402555910543131, | |
| "grad_norm": 0.3867171395976531, | |
| "learning_rate": 1.1327296263842653e-06, | |
| "loss": 0.399, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 2.4089456869009584, | |
| "grad_norm": 0.39763707037878027, | |
| "learning_rate": 1.1091878057870137e-06, | |
| "loss": 0.4019, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 2.415335463258786, | |
| "grad_norm": 0.4245467453346327, | |
| "learning_rate": 1.0858626427292796e-06, | |
| "loss": 0.4332, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 2.4217252396166136, | |
| "grad_norm": 0.41903475208384494, | |
| "learning_rate": 1.0627554360587533e-06, | |
| "loss": 0.4077, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 2.428115015974441, | |
| "grad_norm": 0.4604881881845569, | |
| "learning_rate": 1.0398674724863584e-06, | |
| "loss": 0.3906, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.4345047923322682, | |
| "grad_norm": 0.4168020913917256, | |
| "learning_rate": 1.0172000265145938e-06, | |
| "loss": 0.407, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 2.440894568690096, | |
| "grad_norm": 0.3844958002632311, | |
| "learning_rate": 9.947543603665711e-07, | |
| "loss": 0.4081, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 2.4472843450479234, | |
| "grad_norm": 0.4155711405073521, | |
| "learning_rate": 9.72531723915726e-07, | |
| "loss": 0.3926, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 2.4536741214057507, | |
| "grad_norm": 0.37669505673974685, | |
| "learning_rate": 9.505333546162171e-07, | |
| "loss": 0.3968, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 2.460063897763578, | |
| "grad_norm": 0.39108937314306624, | |
| "learning_rate": 9.287604774340236e-07, | |
| "loss": 0.4092, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 2.466453674121406, | |
| "grad_norm": 0.4154659598997551, | |
| "learning_rate": 9.07214304778729e-07, | |
| "loss": 0.3836, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 2.472843450479233, | |
| "grad_norm": 0.42732909763740085, | |
| "learning_rate": 8.858960364360142e-07, | |
| "loss": 0.3909, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 2.479233226837061, | |
| "grad_norm": 0.46781450823047865, | |
| "learning_rate": 8.648068595008458e-07, | |
| "loss": 0.4295, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 2.4856230031948883, | |
| "grad_norm": 0.4207052994172123, | |
| "learning_rate": 8.439479483113683e-07, | |
| "loss": 0.412, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 2.4920127795527156, | |
| "grad_norm": 0.4651449696067935, | |
| "learning_rate": 8.233204643835235e-07, | |
| "loss": 0.4401, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.498402555910543, | |
| "grad_norm": 0.432963422375978, | |
| "learning_rate": 8.029255563463589e-07, | |
| "loss": 0.4232, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 2.5047923322683707, | |
| "grad_norm": 0.4012166356865382, | |
| "learning_rate": 7.827643598780748e-07, | |
| "loss": 0.3839, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 2.511182108626198, | |
| "grad_norm": 0.401740942677533, | |
| "learning_rate": 7.628379976427868e-07, | |
| "loss": 0.403, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 2.5175718849840254, | |
| "grad_norm": 0.4156246235081078, | |
| "learning_rate": 7.431475792280018e-07, | |
| "loss": 0.4106, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 2.523961661341853, | |
| "grad_norm": 0.39956361367808163, | |
| "learning_rate": 7.23694201082843e-07, | |
| "loss": 0.3808, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 2.5303514376996805, | |
| "grad_norm": 0.42524257439713276, | |
| "learning_rate": 7.044789464569817e-07, | |
| "loss": 0.3947, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 2.536741214057508, | |
| "grad_norm": 0.4289252624573838, | |
| "learning_rate": 6.855028853403295e-07, | |
| "loss": 0.4013, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 2.543130990415335, | |
| "grad_norm": 0.4199678098142181, | |
| "learning_rate": 6.667670744034498e-07, | |
| "loss": 0.4427, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 2.549520766773163, | |
| "grad_norm": 0.42374370185700533, | |
| "learning_rate": 6.482725569387171e-07, | |
| "loss": 0.3886, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 2.5559105431309903, | |
| "grad_norm": 0.40953925753906273, | |
| "learning_rate": 6.300203628022272e-07, | |
| "loss": 0.4188, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.562300319488818, | |
| "grad_norm": 0.3965203916622367, | |
| "learning_rate": 6.120115083564432e-07, | |
| "loss": 0.402, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 2.5686900958466454, | |
| "grad_norm": 0.40588183023422286, | |
| "learning_rate": 5.942469964136055e-07, | |
| "loss": 0.3844, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 2.5750798722044728, | |
| "grad_norm": 0.4212202535942351, | |
| "learning_rate": 5.767278161798912e-07, | |
| "loss": 0.359, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 2.5814696485623, | |
| "grad_norm": 0.40807230590377985, | |
| "learning_rate": 5.594549432003244e-07, | |
| "loss": 0.4046, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 2.587859424920128, | |
| "grad_norm": 0.4065941037032681, | |
| "learning_rate": 5.42429339304461e-07, | |
| "loss": 0.3872, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 2.594249201277955, | |
| "grad_norm": 0.4055507031249362, | |
| "learning_rate": 5.256519525528254e-07, | |
| "loss": 0.3958, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 2.600638977635783, | |
| "grad_norm": 0.3944854548946413, | |
| "learning_rate": 5.091237171841173e-07, | |
| "loss": 0.3915, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 2.6070287539936103, | |
| "grad_norm": 0.41148215925651993, | |
| "learning_rate": 4.92845553563196e-07, | |
| "loss": 0.4433, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 2.6134185303514377, | |
| "grad_norm": 0.40060963983229303, | |
| "learning_rate": 4.768183681298211e-07, | |
| "loss": 0.429, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 2.619808306709265, | |
| "grad_norm": 0.39104723450292084, | |
| "learning_rate": 4.6104305334818577e-07, | |
| "loss": 0.4049, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 2.626198083067093, | |
| "grad_norm": 0.41910098102442134, | |
| "learning_rate": 4.455204876572172e-07, | |
| "loss": 0.419, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 2.63258785942492, | |
| "grad_norm": 0.388238905104723, | |
| "learning_rate": 4.3025153542165744e-07, | |
| "loss": 0.408, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 2.6389776357827475, | |
| "grad_norm": 0.38853881981152416, | |
| "learning_rate": 4.1523704688394176e-07, | |
| "loss": 0.4312, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 2.6453674121405752, | |
| "grad_norm": 0.3775424632865612, | |
| "learning_rate": 4.0047785811684116e-07, | |
| "loss": 0.3922, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 2.6517571884984026, | |
| "grad_norm": 0.4123685198929477, | |
| "learning_rate": 3.8597479097691626e-07, | |
| "loss": 0.4248, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 2.65814696485623, | |
| "grad_norm": 0.3941239777440188, | |
| "learning_rate": 3.717286530587483e-07, | |
| "loss": 0.3785, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 2.6645367412140573, | |
| "grad_norm": 0.4070688621758578, | |
| "learning_rate": 3.577402376499672e-07, | |
| "loss": 0.3856, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 2.670926517571885, | |
| "grad_norm": 0.39475712080069125, | |
| "learning_rate": 3.440103236870823e-07, | |
| "loss": 0.433, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 2.6773162939297124, | |
| "grad_norm": 0.4031736225229901, | |
| "learning_rate": 3.3053967571210375e-07, | |
| "loss": 0.4027, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 2.68370607028754, | |
| "grad_norm": 0.417062150683953, | |
| "learning_rate": 3.1732904382996975e-07, | |
| "loss": 0.4087, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 2.6900958466453675, | |
| "grad_norm": 0.3942701779532604, | |
| "learning_rate": 3.04379163666782e-07, | |
| "loss": 0.4171, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 2.696485623003195, | |
| "grad_norm": 0.41971407615958284, | |
| "learning_rate": 2.916907563288357e-07, | |
| "loss": 0.387, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 2.702875399361022, | |
| "grad_norm": 0.37163498827867353, | |
| "learning_rate": 2.792645283624712e-07, | |
| "loss": 0.4099, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 2.70926517571885, | |
| "grad_norm": 0.41634144417008856, | |
| "learning_rate": 2.671011717147276e-07, | |
| "loss": 0.3998, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 2.7156549520766773, | |
| "grad_norm": 0.380999985640894, | |
| "learning_rate": 2.5520136369481194e-07, | |
| "loss": 0.3755, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.722044728434505, | |
| "grad_norm": 0.38768748893225846, | |
| "learning_rate": 2.4356576693638555e-07, | |
| "loss": 0.3846, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 2.7284345047923324, | |
| "grad_norm": 0.4063859780725629, | |
| "learning_rate": 2.3219502936066228e-07, | |
| "loss": 0.4051, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 2.7348242811501597, | |
| "grad_norm": 0.3835176157673035, | |
| "learning_rate": 2.210897841403331e-07, | |
| "loss": 0.3944, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 2.741214057507987, | |
| "grad_norm": 0.3951836054369348, | |
| "learning_rate": 2.1025064966430697e-07, | |
| "loss": 0.3943, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 2.747603833865815, | |
| "grad_norm": 0.40354951885971074, | |
| "learning_rate": 1.9967822950327453e-07, | |
| "loss": 0.4288, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.753993610223642, | |
| "grad_norm": 0.39378799368443823, | |
| "learning_rate": 1.8937311237610168e-07, | |
| "loss": 0.42, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 2.7603833865814695, | |
| "grad_norm": 0.4000205800735376, | |
| "learning_rate": 1.793358721170435e-07, | |
| "loss": 0.4311, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 2.7667731629392973, | |
| "grad_norm": 0.41244447958550856, | |
| "learning_rate": 1.6956706764379438e-07, | |
| "loss": 0.4062, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 2.7731629392971247, | |
| "grad_norm": 0.41104503804420633, | |
| "learning_rate": 1.6006724292636166e-07, | |
| "loss": 0.4015, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 2.779552715654952, | |
| "grad_norm": 0.3908953310387036, | |
| "learning_rate": 1.508369269567783e-07, | |
| "loss": 0.4058, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 2.7859424920127793, | |
| "grad_norm": 0.39112594980146725, | |
| "learning_rate": 1.418766337196431e-07, | |
| "loss": 0.4407, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 2.792332268370607, | |
| "grad_norm": 0.39184009268684444, | |
| "learning_rate": 1.3318686216350241e-07, | |
| "loss": 0.4107, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 2.7987220447284344, | |
| "grad_norm": 0.42530952200855676, | |
| "learning_rate": 1.2476809617306408e-07, | |
| "loss": 0.3736, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 2.8051118210862622, | |
| "grad_norm": 0.3883802162547901, | |
| "learning_rate": 1.166208045422551e-07, | |
| "loss": 0.3678, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 2.8115015974440896, | |
| "grad_norm": 0.4093764366584164, | |
| "learning_rate": 1.0874544094811424e-07, | |
| "loss": 0.4079, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 2.817891373801917, | |
| "grad_norm": 0.3812421287222979, | |
| "learning_rate": 1.0114244392553318e-07, | |
| "loss": 0.403, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 2.8242811501597442, | |
| "grad_norm": 0.40965170224044706, | |
| "learning_rate": 9.381223684283291e-08, | |
| "loss": 0.4121, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 2.830670926517572, | |
| "grad_norm": 0.41109145513893314, | |
| "learning_rate": 8.675522787819023e-08, | |
| "loss": 0.445, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 2.8370607028753994, | |
| "grad_norm": 0.4091067055865315, | |
| "learning_rate": 7.997180999691101e-08, | |
| "loss": 0.394, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 2.8434504792332267, | |
| "grad_norm": 0.41543621509697454, | |
| "learning_rate": 7.346236092954318e-08, | |
| "loss": 0.4386, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 2.8498402555910545, | |
| "grad_norm": 0.3410672092392521, | |
| "learning_rate": 6.722724315084805e-08, | |
| "loss": 0.4019, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 2.856230031948882, | |
| "grad_norm": 0.4241567804361133, | |
| "learning_rate": 6.12668038596137e-08, | |
| "loss": 0.4067, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 2.862619808306709, | |
| "grad_norm": 0.4166855512327295, | |
| "learning_rate": 5.5581374959320366e-08, | |
| "loss": 0.4205, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 2.8690095846645365, | |
| "grad_norm": 0.397404185394741, | |
| "learning_rate": 5.017127303966085e-08, | |
| "loss": 0.4272, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 2.8753993610223643, | |
| "grad_norm": 0.4241765106878171, | |
| "learning_rate": 4.50367993589107e-08, | |
| "loss": 0.397, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.8817891373801916, | |
| "grad_norm": 0.3756306836463371, | |
| "learning_rate": 4.0178239827151077e-08, | |
| "loss": 0.4365, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 2.8881789137380194, | |
| "grad_norm": 0.41326425164378694, | |
| "learning_rate": 3.559586499035206e-08, | |
| "loss": 0.4258, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 2.8945686900958467, | |
| "grad_norm": 0.37141499578173215, | |
| "learning_rate": 3.128993001530245e-08, | |
| "loss": 0.4096, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 2.900958466453674, | |
| "grad_norm": 0.3835883402084741, | |
| "learning_rate": 2.7260674675404498e-08, | |
| "loss": 0.4371, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 2.9073482428115014, | |
| "grad_norm": 0.3898223565826393, | |
| "learning_rate": 2.3508323337321225e-08, | |
| "loss": 0.4341, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 2.913738019169329, | |
| "grad_norm": 0.3802080502556978, | |
| "learning_rate": 2.0033084948483104e-08, | |
| "loss": 0.4276, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 2.9201277955271565, | |
| "grad_norm": 0.4016942016157449, | |
| "learning_rate": 1.6835153025451246e-08, | |
| "loss": 0.3919, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 2.9265175718849843, | |
| "grad_norm": 0.4014119653183354, | |
| "learning_rate": 1.3914705643143788e-08, | |
| "loss": 0.4055, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 2.9329073482428116, | |
| "grad_norm": 0.38729883990431374, | |
| "learning_rate": 1.1271905424918294e-08, | |
| "loss": 0.3727, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 2.939297124600639, | |
| "grad_norm": 0.38700137662258594, | |
| "learning_rate": 8.906899533517866e-09, | |
| "loss": 0.4233, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.9456869009584663, | |
| "grad_norm": 0.40073560383876833, | |
| "learning_rate": 6.819819662874372e-09, | |
| "loss": 0.4393, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 2.952076677316294, | |
| "grad_norm": 0.4211203211298018, | |
| "learning_rate": 5.0107820307770945e-09, | |
| "loss": 0.4044, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 2.9584664536741214, | |
| "grad_norm": 0.38832513543231306, | |
| "learning_rate": 3.4798873723984604e-09, | |
| "loss": 0.4042, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 2.9648562300319488, | |
| "grad_norm": 0.39417708428911924, | |
| "learning_rate": 2.2272209346885233e-09, | |
| "loss": 0.3879, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 2.9712460063897765, | |
| "grad_norm": 0.3807543666143404, | |
| "learning_rate": 1.2528524716259872e-09, | |
| "loss": 0.4377, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 2.977635782747604, | |
| "grad_norm": 0.37421605400702745, | |
| "learning_rate": 5.568362403318706e-10, | |
| "loss": 0.3945, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 2.984025559105431, | |
| "grad_norm": 0.384644409546502, | |
| "learning_rate": 1.3921099805302985e-10, | |
| "loss": 0.4163, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 2.9904153354632586, | |
| "grad_norm": 0.42083923572753107, | |
| "learning_rate": 0.0, | |
| "loss": 0.4204, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 2.9904153354632586, | |
| "step": 468, | |
| "total_flos": 3.310734730054861e+17, | |
| "train_loss": 0.48025444665780437, | |
| "train_runtime": 10001.8118, | |
| "train_samples_per_second": 2.999, | |
| "train_steps_per_second": 0.047 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 468, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3.310734730054861e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |