| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.9904153354632586, | |
| "eval_steps": 500, | |
| "global_step": 468, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.006389776357827476, | |
| "grad_norm": 5.9393923041460335, | |
| "learning_rate": 8.510638297872341e-07, | |
| "loss": 0.8883, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.012779552715654952, | |
| "grad_norm": 5.645614037221844, | |
| "learning_rate": 1.7021276595744682e-06, | |
| "loss": 0.8365, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.019169329073482427, | |
| "grad_norm": 6.260474581584071, | |
| "learning_rate": 2.553191489361702e-06, | |
| "loss": 0.8853, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.025559105431309903, | |
| "grad_norm": 5.551335163218543, | |
| "learning_rate": 3.4042553191489363e-06, | |
| "loss": 0.8686, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.03194888178913738, | |
| "grad_norm": 4.506734407013277, | |
| "learning_rate": 4.255319148936171e-06, | |
| "loss": 0.792, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.038338658146964855, | |
| "grad_norm": 2.7013711545792316, | |
| "learning_rate": 5.106382978723404e-06, | |
| "loss": 0.7978, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.04472843450479233, | |
| "grad_norm": 2.231426309167533, | |
| "learning_rate": 5.957446808510638e-06, | |
| "loss": 0.7656, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.051118210862619806, | |
| "grad_norm": 3.786605436059839, | |
| "learning_rate": 6.808510638297873e-06, | |
| "loss": 0.7476, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.05750798722044728, | |
| "grad_norm": 3.9559474965927994, | |
| "learning_rate": 7.659574468085107e-06, | |
| "loss": 0.751, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.06389776357827476, | |
| "grad_norm": 4.3050608871997476, | |
| "learning_rate": 8.510638297872341e-06, | |
| "loss": 0.7376, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.07028753993610223, | |
| "grad_norm": 3.844203590596342, | |
| "learning_rate": 9.361702127659576e-06, | |
| "loss": 0.6577, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.07667731629392971, | |
| "grad_norm": 3.7090602450962535, | |
| "learning_rate": 1.0212765957446808e-05, | |
| "loss": 0.7587, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.08306709265175719, | |
| "grad_norm": 2.6605932751385915, | |
| "learning_rate": 1.1063829787234044e-05, | |
| "loss": 0.7102, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.08945686900958466, | |
| "grad_norm": 1.841280162949687, | |
| "learning_rate": 1.1914893617021277e-05, | |
| "loss": 0.6686, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.09584664536741214, | |
| "grad_norm": 2.4654109468445635, | |
| "learning_rate": 1.2765957446808513e-05, | |
| "loss": 0.6976, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.10223642172523961, | |
| "grad_norm": 2.2718952209600842, | |
| "learning_rate": 1.3617021276595745e-05, | |
| "loss": 0.66, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.10862619808306709, | |
| "grad_norm": 1.336892858370965, | |
| "learning_rate": 1.4468085106382981e-05, | |
| "loss": 0.6222, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.11501597444089456, | |
| "grad_norm": 1.3994667991152954, | |
| "learning_rate": 1.5319148936170214e-05, | |
| "loss": 0.616, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.12140575079872204, | |
| "grad_norm": 1.3560171283396125, | |
| "learning_rate": 1.6170212765957446e-05, | |
| "loss": 0.6394, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.12779552715654952, | |
| "grad_norm": 1.1983042661201426, | |
| "learning_rate": 1.7021276595744682e-05, | |
| "loss": 0.5704, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.134185303514377, | |
| "grad_norm": 0.9862694631668227, | |
| "learning_rate": 1.7872340425531915e-05, | |
| "loss": 0.592, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.14057507987220447, | |
| "grad_norm": 1.1717058829411315, | |
| "learning_rate": 1.872340425531915e-05, | |
| "loss": 0.6111, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.14696485623003194, | |
| "grad_norm": 1.052734463937942, | |
| "learning_rate": 1.9574468085106384e-05, | |
| "loss": 0.6105, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.15335463258785942, | |
| "grad_norm": 0.9393704655813953, | |
| "learning_rate": 2.0425531914893616e-05, | |
| "loss": 0.6196, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.1597444089456869, | |
| "grad_norm": 1.064356434143729, | |
| "learning_rate": 2.1276595744680852e-05, | |
| "loss": 0.6022, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.16613418530351437, | |
| "grad_norm": 0.8527356341897169, | |
| "learning_rate": 2.2127659574468088e-05, | |
| "loss": 0.6149, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.17252396166134185, | |
| "grad_norm": 0.8856534958761616, | |
| "learning_rate": 2.2978723404255324e-05, | |
| "loss": 0.57, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.17891373801916932, | |
| "grad_norm": 0.7965378935060776, | |
| "learning_rate": 2.3829787234042553e-05, | |
| "loss": 0.5663, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.1853035143769968, | |
| "grad_norm": 0.930055407459929, | |
| "learning_rate": 2.468085106382979e-05, | |
| "loss": 0.5675, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.19169329073482427, | |
| "grad_norm": 0.9473480680946368, | |
| "learning_rate": 2.5531914893617025e-05, | |
| "loss": 0.5647, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.19808306709265175, | |
| "grad_norm": 0.9035770727022399, | |
| "learning_rate": 2.6382978723404255e-05, | |
| "loss": 0.5571, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.20447284345047922, | |
| "grad_norm": 0.9629442234127248, | |
| "learning_rate": 2.723404255319149e-05, | |
| "loss": 0.5675, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.2108626198083067, | |
| "grad_norm": 0.8898757055373155, | |
| "learning_rate": 2.8085106382978727e-05, | |
| "loss": 0.5684, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.21725239616613418, | |
| "grad_norm": 0.8818620757382937, | |
| "learning_rate": 2.8936170212765963e-05, | |
| "loss": 0.5394, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.22364217252396165, | |
| "grad_norm": 1.2556928806711947, | |
| "learning_rate": 2.9787234042553192e-05, | |
| "loss": 0.5912, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.23003194888178913, | |
| "grad_norm": 0.8995453098056544, | |
| "learning_rate": 3.063829787234043e-05, | |
| "loss": 0.5284, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.2364217252396166, | |
| "grad_norm": 1.1694120920465332, | |
| "learning_rate": 3.1489361702127664e-05, | |
| "loss": 0.5448, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.24281150159744408, | |
| "grad_norm": 0.8640750703168888, | |
| "learning_rate": 3.234042553191489e-05, | |
| "loss": 0.5427, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.24920127795527156, | |
| "grad_norm": 0.9511230435963061, | |
| "learning_rate": 3.319148936170213e-05, | |
| "loss": 0.5242, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.25559105431309903, | |
| "grad_norm": 0.9815409113958286, | |
| "learning_rate": 3.4042553191489365e-05, | |
| "loss": 0.5327, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.26198083067092653, | |
| "grad_norm": 0.9294918620323822, | |
| "learning_rate": 3.48936170212766e-05, | |
| "loss": 0.55, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.268370607028754, | |
| "grad_norm": 1.2780033880918986, | |
| "learning_rate": 3.574468085106383e-05, | |
| "loss": 0.5057, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.2747603833865815, | |
| "grad_norm": 0.970896465496613, | |
| "learning_rate": 3.6595744680851066e-05, | |
| "loss": 0.563, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.28115015974440893, | |
| "grad_norm": 1.1743520181962854, | |
| "learning_rate": 3.74468085106383e-05, | |
| "loss": 0.5716, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.28753993610223644, | |
| "grad_norm": 0.9440517537009965, | |
| "learning_rate": 3.829787234042554e-05, | |
| "loss": 0.5202, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.2939297124600639, | |
| "grad_norm": 1.244837667755633, | |
| "learning_rate": 3.914893617021277e-05, | |
| "loss": 0.5333, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.3003194888178914, | |
| "grad_norm": 1.0156465350553556, | |
| "learning_rate": 4e-05, | |
| "loss": 0.5569, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.30670926517571884, | |
| "grad_norm": 1.0875814805232027, | |
| "learning_rate": 3.999944315600779e-05, | |
| "loss": 0.5412, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.31309904153354634, | |
| "grad_norm": 0.9841398658147307, | |
| "learning_rate": 3.9997772655038674e-05, | |
| "loss": 0.5493, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.3194888178913738, | |
| "grad_norm": 1.041702971561358, | |
| "learning_rate": 3.99949885901135e-05, | |
| "loss": 0.5464, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.3258785942492013, | |
| "grad_norm": 1.1971478020256678, | |
| "learning_rate": 3.999109111626125e-05, | |
| "loss": 0.5488, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.33226837060702874, | |
| "grad_norm": 0.7937301188136895, | |
| "learning_rate": 3.998608045051041e-05, | |
| "loss": 0.5406, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.33865814696485624, | |
| "grad_norm": 1.0893696428115565, | |
| "learning_rate": 3.99799568718769e-05, | |
| "loss": 0.5057, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.3450479233226837, | |
| "grad_norm": 1.0041397722351062, | |
| "learning_rate": 3.9972720721348503e-05, | |
| "loss": 0.5568, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.3514376996805112, | |
| "grad_norm": 1.1730368348535483, | |
| "learning_rate": 3.996437240186593e-05, | |
| "loss": 0.5376, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.35782747603833864, | |
| "grad_norm": 1.0260574947019203, | |
| "learning_rate": 3.995491237830033e-05, | |
| "loss": 0.5845, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.36421725239616615, | |
| "grad_norm": 0.9477092619910678, | |
| "learning_rate": 3.994434117742743e-05, | |
| "loss": 0.5861, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.3706070287539936, | |
| "grad_norm": 1.020580513302534, | |
| "learning_rate": 3.9932659387898195e-05, | |
| "loss": 0.4967, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.3769968051118211, | |
| "grad_norm": 1.117231150698062, | |
| "learning_rate": 3.991986766020607e-05, | |
| "loss": 0.5163, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.38338658146964855, | |
| "grad_norm": 0.9136891394466323, | |
| "learning_rate": 3.9905966706650715e-05, | |
| "loss": 0.5236, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.38977635782747605, | |
| "grad_norm": 0.9610320358708975, | |
| "learning_rate": 3.9890957301298385e-05, | |
| "loss": 0.5442, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.3961661341853035, | |
| "grad_norm": 0.9525076230315872, | |
| "learning_rate": 3.9874840279938794e-05, | |
| "loss": 0.5481, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.402555910543131, | |
| "grad_norm": 0.9886639116884637, | |
| "learning_rate": 3.9857616540038595e-05, | |
| "loss": 0.587, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.40894568690095845, | |
| "grad_norm": 0.8789895879873663, | |
| "learning_rate": 3.9839287040691396e-05, | |
| "loss": 0.5405, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.41533546325878595, | |
| "grad_norm": 1.2919381394585123, | |
| "learning_rate": 3.981985280256436e-05, | |
| "loss": 0.5661, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.4217252396166134, | |
| "grad_norm": 0.8458840142981038, | |
| "learning_rate": 3.979931490784136e-05, | |
| "loss": 0.5376, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.4281150159744409, | |
| "grad_norm": 0.7123621144432971, | |
| "learning_rate": 3.977767450016272e-05, | |
| "loss": 0.5501, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.43450479233226835, | |
| "grad_norm": 0.9335736753943794, | |
| "learning_rate": 3.9754932784561545e-05, | |
| "loss": 0.5495, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.44089456869009586, | |
| "grad_norm": 0.7759436980865956, | |
| "learning_rate": 3.973109102739661e-05, | |
| "loss": 0.5468, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.4472843450479233, | |
| "grad_norm": 0.9317387591226213, | |
| "learning_rate": 3.9706150556281827e-05, | |
| "loss": 0.5578, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.4536741214057508, | |
| "grad_norm": 1.125415618543546, | |
| "learning_rate": 3.968011276001236e-05, | |
| "loss": 0.5769, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.46006389776357826, | |
| "grad_norm": 1.0297816336058314, | |
| "learning_rate": 3.9652979088487246e-05, | |
| "loss": 0.5545, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.46645367412140576, | |
| "grad_norm": 0.977116035792409, | |
| "learning_rate": 3.962475105262868e-05, | |
| "loss": 0.5443, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.4728434504792332, | |
| "grad_norm": 0.9591838077463976, | |
| "learning_rate": 3.9595430224297874e-05, | |
| "loss": 0.5597, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.4792332268370607, | |
| "grad_norm": 1.062273213479338, | |
| "learning_rate": 3.9565018236207545e-05, | |
| "loss": 0.5364, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.48562300319488816, | |
| "grad_norm": 1.3459560450980237, | |
| "learning_rate": 3.953351678183099e-05, | |
| "loss": 0.5507, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.49201277955271566, | |
| "grad_norm": 0.9913845916559971, | |
| "learning_rate": 3.9500927615307745e-05, | |
| "loss": 0.5244, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.4984025559105431, | |
| "grad_norm": 1.4251625906436933, | |
| "learning_rate": 3.9467252551346e-05, | |
| "loss": 0.5594, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.5047923322683706, | |
| "grad_norm": 1.0112792856876618, | |
| "learning_rate": 3.943249346512143e-05, | |
| "loss": 0.4883, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.5111821086261981, | |
| "grad_norm": 1.0989399875889763, | |
| "learning_rate": 3.939665229217289e-05, | |
| "loss": 0.5144, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.5175718849840255, | |
| "grad_norm": 1.0239637264891768, | |
| "learning_rate": 3.935973102829456e-05, | |
| "loss": 0.5527, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.5239616613418531, | |
| "grad_norm": 1.0068238967871816, | |
| "learning_rate": 3.9321731729424826e-05, | |
| "loss": 0.5672, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.5303514376996805, | |
| "grad_norm": 0.9875459555325186, | |
| "learning_rate": 3.928265651153183e-05, | |
| "loss": 0.529, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.536741214057508, | |
| "grad_norm": 1.0535928381866786, | |
| "learning_rate": 3.92425075504956e-05, | |
| "loss": 0.5037, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.5431309904153354, | |
| "grad_norm": 0.8056689145152576, | |
| "learning_rate": 3.9201287081986905e-05, | |
| "loss": 0.5561, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.549520766773163, | |
| "grad_norm": 0.9754123651205087, | |
| "learning_rate": 3.915899740134277e-05, | |
| "loss": 0.533, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.5559105431309904, | |
| "grad_norm": 1.15840636473317, | |
| "learning_rate": 3.9115640863438666e-05, | |
| "loss": 0.5493, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.5623003194888179, | |
| "grad_norm": 0.8988153421079207, | |
| "learning_rate": 3.907121988255735e-05, | |
| "loss": 0.5196, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.5686900958466453, | |
| "grad_norm": 1.1383255251414823, | |
| "learning_rate": 3.9025736932254463e-05, | |
| "loss": 0.527, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.5750798722044729, | |
| "grad_norm": 1.1283342937219425, | |
| "learning_rate": 3.8979194545220754e-05, | |
| "loss": 0.5295, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.5814696485623003, | |
| "grad_norm": 0.9227752366498948, | |
| "learning_rate": 3.893159531314109e-05, | |
| "loss": 0.5306, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.5878594249201278, | |
| "grad_norm": 1.0643565858340844, | |
| "learning_rate": 3.888294188655012e-05, | |
| "loss": 0.5093, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.5942492012779552, | |
| "grad_norm": 0.7000539222261911, | |
| "learning_rate": 3.8833236974684666e-05, | |
| "loss": 0.4991, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.6006389776357828, | |
| "grad_norm": 1.1723297653142641, | |
| "learning_rate": 3.8782483345332876e-05, | |
| "loss": 0.5068, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.6070287539936102, | |
| "grad_norm": 0.7821654004337761, | |
| "learning_rate": 3.873068382468012e-05, | |
| "loss": 0.4921, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.6134185303514377, | |
| "grad_norm": 0.9387810448789369, | |
| "learning_rate": 3.8677841297151595e-05, | |
| "loss": 0.5062, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.6198083067092651, | |
| "grad_norm": 1.0554276403586835, | |
| "learning_rate": 3.862395870525167e-05, | |
| "loss": 0.5563, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.6261980830670927, | |
| "grad_norm": 0.7778950998937124, | |
| "learning_rate": 3.856903904940014e-05, | |
| "loss": 0.5336, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.6325878594249201, | |
| "grad_norm": 0.9707505249343071, | |
| "learning_rate": 3.851308538776501e-05, | |
| "loss": 0.5042, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.6389776357827476, | |
| "grad_norm": 0.9480577698679056, | |
| "learning_rate": 3.845610083609234e-05, | |
| "loss": 0.5125, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.645367412140575, | |
| "grad_norm": 1.000490021841588, | |
| "learning_rate": 3.839808856753264e-05, | |
| "loss": 0.5378, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.6517571884984026, | |
| "grad_norm": 1.150496995903701, | |
| "learning_rate": 3.833905181246424e-05, | |
| "loss": 0.5201, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.65814696485623, | |
| "grad_norm": 1.119406744384519, | |
| "learning_rate": 3.827899385831337e-05, | |
| "loss": 0.5219, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.6645367412140575, | |
| "grad_norm": 1.0112237142528195, | |
| "learning_rate": 3.821791804937114e-05, | |
| "loss": 0.5298, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.670926517571885, | |
| "grad_norm": 0.7034761817860105, | |
| "learning_rate": 3.815582778660726e-05, | |
| "loss": 0.5192, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.6773162939297125, | |
| "grad_norm": 1.0053867926801359, | |
| "learning_rate": 3.809272652748072e-05, | |
| "loss": 0.5276, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.6837060702875399, | |
| "grad_norm": 0.7482460221471098, | |
| "learning_rate": 3.8028617785747224e-05, | |
| "loss": 0.5608, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.6900958466453674, | |
| "grad_norm": 0.9865927820444385, | |
| "learning_rate": 3.7963505131263534e-05, | |
| "loss": 0.4968, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.6964856230031949, | |
| "grad_norm": 1.038453462373431, | |
| "learning_rate": 3.789739218978871e-05, | |
| "loss": 0.5693, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.7028753993610224, | |
| "grad_norm": 0.6920319585898572, | |
| "learning_rate": 3.783028264278217e-05, | |
| "loss": 0.5372, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.7092651757188498, | |
| "grad_norm": 1.0352100256938261, | |
| "learning_rate": 3.7762180227198707e-05, | |
| "loss": 0.5277, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.7156549520766773, | |
| "grad_norm": 0.64046404209257, | |
| "learning_rate": 3.7693088735280435e-05, | |
| "loss": 0.4962, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.7220447284345048, | |
| "grad_norm": 1.0958799819009744, | |
| "learning_rate": 3.762301201434558e-05, | |
| "loss": 0.5407, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.7284345047923323, | |
| "grad_norm": 0.7248797975531077, | |
| "learning_rate": 3.755195396657423e-05, | |
| "loss": 0.5282, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.7348242811501597, | |
| "grad_norm": 1.051488291269328, | |
| "learning_rate": 3.74799185487911e-05, | |
| "loss": 0.5331, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.7412140575079872, | |
| "grad_norm": 1.080506027543262, | |
| "learning_rate": 3.7406909772245136e-05, | |
| "loss": 0.5433, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.7476038338658147, | |
| "grad_norm": 0.8852634100172303, | |
| "learning_rate": 3.73329317023862e-05, | |
| "loss": 0.5128, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.7539936102236422, | |
| "grad_norm": 1.1931790977018395, | |
| "learning_rate": 3.725798845863868e-05, | |
| "loss": 0.5067, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.7603833865814696, | |
| "grad_norm": 0.9230539545451707, | |
| "learning_rate": 3.718208421417208e-05, | |
| "loss": 0.518, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.7667731629392971, | |
| "grad_norm": 0.9288097411893196, | |
| "learning_rate": 3.710522319566864e-05, | |
| "loss": 0.5025, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.7731629392971247, | |
| "grad_norm": 0.8269049032128745, | |
| "learning_rate": 3.7027409683087994e-05, | |
| "loss": 0.509, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.7795527156549521, | |
| "grad_norm": 1.063330879029129, | |
| "learning_rate": 3.694864800942886e-05, | |
| "loss": 0.5577, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.7859424920127795, | |
| "grad_norm": 0.7741080297489087, | |
| "learning_rate": 3.686894256048771e-05, | |
| "loss": 0.5132, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.792332268370607, | |
| "grad_norm": 0.9963198870882927, | |
| "learning_rate": 3.678829777461457e-05, | |
| "loss": 0.5456, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.7987220447284346, | |
| "grad_norm": 0.9303282772631454, | |
| "learning_rate": 3.670671814246591e-05, | |
| "loss": 0.51, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.805111821086262, | |
| "grad_norm": 0.8193974356952273, | |
| "learning_rate": 3.662420820675453e-05, | |
| "loss": 0.4998, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.8115015974440895, | |
| "grad_norm": 0.8935114136883495, | |
| "learning_rate": 3.654077256199662e-05, | |
| "loss": 0.4945, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.8178913738019169, | |
| "grad_norm": 0.8144910990007458, | |
| "learning_rate": 3.645641585425594e-05, | |
| "loss": 0.5031, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.8242811501597445, | |
| "grad_norm": 0.8181401765209083, | |
| "learning_rate": 3.6371142780885085e-05, | |
| "loss": 0.4892, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.8306709265175719, | |
| "grad_norm": 0.8625236903783421, | |
| "learning_rate": 3.628495809026391e-05, | |
| "loss": 0.526, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.8370607028753994, | |
| "grad_norm": 0.807030171859448, | |
| "learning_rate": 3.619786658153514e-05, | |
| "loss": 0.5289, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.8434504792332268, | |
| "grad_norm": 0.8764232140015433, | |
| "learning_rate": 3.6109873104337096e-05, | |
| "loss": 0.5394, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.8498402555910544, | |
| "grad_norm": 0.6976250810888259, | |
| "learning_rate": 3.602098255853372e-05, | |
| "loss": 0.4852, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.8562300319488818, | |
| "grad_norm": 0.721487679418641, | |
| "learning_rate": 3.593119989394163e-05, | |
| "loss": 0.4989, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.8626198083067093, | |
| "grad_norm": 0.8535899066215291, | |
| "learning_rate": 3.584053011005457e-05, | |
| "loss": 0.5261, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.8690095846645367, | |
| "grad_norm": 0.8308742651526032, | |
| "learning_rate": 3.574897825576499e-05, | |
| "loss": 0.5336, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.8753993610223643, | |
| "grad_norm": 1.04154322336692, | |
| "learning_rate": 3.565654942908289e-05, | |
| "loss": 0.5291, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.8817891373801917, | |
| "grad_norm": 0.7174442743523838, | |
| "learning_rate": 3.556324877685195e-05, | |
| "loss": 0.5135, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.8881789137380192, | |
| "grad_norm": 1.0786600177139642, | |
| "learning_rate": 3.546908149446294e-05, | |
| "loss": 0.4844, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.8945686900958466, | |
| "grad_norm": 0.9288876630518276, | |
| "learning_rate": 3.53740528255644e-05, | |
| "loss": 0.5467, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.9009584664536742, | |
| "grad_norm": 0.7709318953465457, | |
| "learning_rate": 3.527816806177068e-05, | |
| "loss": 0.5182, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.9073482428115016, | |
| "grad_norm": 0.8923057265426987, | |
| "learning_rate": 3.518143254236724e-05, | |
| "loss": 0.5033, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.9137380191693291, | |
| "grad_norm": 0.6637070684857435, | |
| "learning_rate": 3.508385165401336e-05, | |
| "loss": 0.5127, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.9201277955271565, | |
| "grad_norm": 0.9230168482815491, | |
| "learning_rate": 3.4985430830442175e-05, | |
| "loss": 0.516, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.9265175718849841, | |
| "grad_norm": 0.8941920473315101, | |
| "learning_rate": 3.4886175552158135e-05, | |
| "loss": 0.4947, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.9329073482428115, | |
| "grad_norm": 0.8990821129059456, | |
| "learning_rate": 3.478609134613176e-05, | |
| "loss": 0.5022, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.939297124600639, | |
| "grad_norm": 0.8662214281666614, | |
| "learning_rate": 3.4685183785491956e-05, | |
| "loss": 0.5149, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.9456869009584664, | |
| "grad_norm": 0.761583043458421, | |
| "learning_rate": 3.45834584892156e-05, | |
| "loss": 0.5484, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.952076677316294, | |
| "grad_norm": 0.9833756730405143, | |
| "learning_rate": 3.448092112181469e-05, | |
| "loss": 0.5113, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.9584664536741214, | |
| "grad_norm": 0.6553490250245431, | |
| "learning_rate": 3.4377577393020955e-05, | |
| "loss": 0.5057, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.9648562300319489, | |
| "grad_norm": 0.9077550016293252, | |
| "learning_rate": 3.427343305746783e-05, | |
| "loss": 0.4993, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.9712460063897763, | |
| "grad_norm": 0.7171270449880269, | |
| "learning_rate": 3.416849391437007e-05, | |
| "loss": 0.4784, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.9776357827476039, | |
| "grad_norm": 0.9933525489448933, | |
| "learning_rate": 3.406276580720082e-05, | |
| "loss": 0.4958, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.9840255591054313, | |
| "grad_norm": 0.7952422694005465, | |
| "learning_rate": 3.3956254623366206e-05, | |
| "loss": 0.5126, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.9904153354632588, | |
| "grad_norm": 0.7573030595910021, | |
| "learning_rate": 3.384896629387751e-05, | |
| "loss": 0.4954, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.9968051118210862, | |
| "grad_norm": 0.6608346407299301, | |
| "learning_rate": 3.374090679302091e-05, | |
| "loss": 0.5326, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.0031948881789137, | |
| "grad_norm": 1.221115547246655, | |
| "learning_rate": 3.363208213802478e-05, | |
| "loss": 0.7746, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.0095846645367412, | |
| "grad_norm": 0.755241193707458, | |
| "learning_rate": 3.352249838872466e-05, | |
| "loss": 0.4163, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.0159744408945688, | |
| "grad_norm": 0.5770894202417984, | |
| "learning_rate": 3.3412161647225795e-05, | |
| "loss": 0.3854, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.0223642172523961, | |
| "grad_norm": 0.6479907351500458, | |
| "learning_rate": 3.330107805756334e-05, | |
| "loss": 0.3674, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.0287539936102237, | |
| "grad_norm": 0.8588766728374346, | |
| "learning_rate": 3.318925380536026e-05, | |
| "loss": 0.4473, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.035143769968051, | |
| "grad_norm": 0.6190221770598182, | |
| "learning_rate": 3.307669511748285e-05, | |
| "loss": 0.3834, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.0415335463258786, | |
| "grad_norm": 0.9380647190076025, | |
| "learning_rate": 3.2963408261694026e-05, | |
| "loss": 0.4215, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.0479233226837061, | |
| "grad_norm": 0.8105378167398238, | |
| "learning_rate": 3.284939954630428e-05, | |
| "loss": 0.4247, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.0543130990415335, | |
| "grad_norm": 0.7819386983119491, | |
| "learning_rate": 3.2734675319820446e-05, | |
| "loss": 0.4013, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.060702875399361, | |
| "grad_norm": 1.1818150256172972, | |
| "learning_rate": 3.261924197059215e-05, | |
| "loss": 0.4592, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.0670926517571886, | |
| "grad_norm": 0.7222613902999555, | |
| "learning_rate": 3.25031059264561e-05, | |
| "loss": 0.3779, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.073482428115016, | |
| "grad_norm": 0.695599012715811, | |
| "learning_rate": 3.2386273654378134e-05, | |
| "loss": 0.3354, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.0798722044728435, | |
| "grad_norm": 0.6636714986165804, | |
| "learning_rate": 3.226875166009313e-05, | |
| "loss": 0.3658, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.0862619808306708, | |
| "grad_norm": 0.9119019299436991, | |
| "learning_rate": 3.215054648774274e-05, | |
| "loss": 0.4916, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.0926517571884984, | |
| "grad_norm": 0.6438001573913675, | |
| "learning_rate": 3.2031664719510967e-05, | |
| "loss": 0.3829, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.099041533546326, | |
| "grad_norm": 0.892962035381319, | |
| "learning_rate": 3.1912112975257664e-05, | |
| "loss": 0.4622, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.1054313099041533, | |
| "grad_norm": 0.7025792696391929, | |
| "learning_rate": 3.1791897912149874e-05, | |
| "loss": 0.3948, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.1118210862619808, | |
| "grad_norm": 0.659137823569491, | |
| "learning_rate": 3.167102622429117e-05, | |
| "loss": 0.364, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.1182108626198084, | |
| "grad_norm": 0.7388120415365451, | |
| "learning_rate": 3.1549504642348856e-05, | |
| "loss": 0.3855, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.1246006389776357, | |
| "grad_norm": 0.6711108216098111, | |
| "learning_rate": 3.142733993317924e-05, | |
| "loss": 0.3966, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.1309904153354633, | |
| "grad_norm": 0.6360807358108843, | |
| "learning_rate": 3.130453889945075e-05, | |
| "loss": 0.4024, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.1373801916932909, | |
| "grad_norm": 0.6324189051210329, | |
| "learning_rate": 3.118110837926517e-05, | |
| "loss": 0.3553, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.1437699680511182, | |
| "grad_norm": 0.7964831390454978, | |
| "learning_rate": 3.105705524577686e-05, | |
| "loss": 0.4578, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.1501597444089458, | |
| "grad_norm": 0.6710667005919045, | |
| "learning_rate": 3.093238640681003e-05, | |
| "loss": 0.3781, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.156549520766773, | |
| "grad_norm": 0.6387506156404197, | |
| "learning_rate": 3.080710880447408e-05, | |
| "loss": 0.3887, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.1629392971246006, | |
| "grad_norm": 0.7478222110923342, | |
| "learning_rate": 3.068122941477704e-05, | |
| "loss": 0.4388, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.1693290734824282, | |
| "grad_norm": 0.6263047219752043, | |
| "learning_rate": 3.05547552472371e-05, | |
| "loss": 0.3889, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.1757188498402555, | |
| "grad_norm": 0.8473527998575385, | |
| "learning_rate": 3.0427693344492278e-05, | |
| "loss": 0.4279, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.182108626198083, | |
| "grad_norm": 0.5095696451206226, | |
| "learning_rate": 3.0300050781908305e-05, | |
| "loss": 0.3517, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.1884984025559104, | |
| "grad_norm": 0.7415470456207829, | |
| "learning_rate": 3.0171834667184597e-05, | |
| "loss": 0.3922, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.194888178913738, | |
| "grad_norm": 0.8177392758602474, | |
| "learning_rate": 3.0043052139958465e-05, | |
| "loss": 0.4332, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.2012779552715656, | |
| "grad_norm": 0.5193140042572805, | |
| "learning_rate": 2.9913710371407574e-05, | |
| "loss": 0.3973, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.207667731629393, | |
| "grad_norm": 0.8215524197527592, | |
| "learning_rate": 2.97838165638506e-05, | |
| "loss": 0.3988, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.2140575079872205, | |
| "grad_norm": 0.6518458707691871, | |
| "learning_rate": 2.965337795034617e-05, | |
| "loss": 0.4124, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.220447284345048, | |
| "grad_norm": 0.7141025249008749, | |
| "learning_rate": 2.9522401794290127e-05, | |
| "loss": 0.478, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.2268370607028753, | |
| "grad_norm": 0.6807610256712961, | |
| "learning_rate": 2.9390895389011024e-05, | |
| "loss": 0.3373, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.233226837060703, | |
| "grad_norm": 0.6877212054512275, | |
| "learning_rate": 2.9258866057364028e-05, | |
| "loss": 0.481, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.2396166134185305, | |
| "grad_norm": 0.6607586787800477, | |
| "learning_rate": 2.912632115132316e-05, | |
| "loss": 0.3599, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.2460063897763578, | |
| "grad_norm": 0.6239197081486476, | |
| "learning_rate": 2.8993268051571877e-05, | |
| "loss": 0.4054, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.2523961661341854, | |
| "grad_norm": 0.6374267993723667, | |
| "learning_rate": 2.885971416709211e-05, | |
| "loss": 0.4582, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.2587859424920127, | |
| "grad_norm": 0.6083181799353529, | |
| "learning_rate": 2.8725666934751678e-05, | |
| "loss": 0.3904, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.2651757188498403, | |
| "grad_norm": 0.567022003437379, | |
| "learning_rate": 2.859113381889018e-05, | |
| "loss": 0.3776, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.2715654952076676, | |
| "grad_norm": 0.5883698301032603, | |
| "learning_rate": 2.8456122310903342e-05, | |
| "loss": 0.3869, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.2779552715654952, | |
| "grad_norm": 0.6219746159300882, | |
| "learning_rate": 2.8320639928825885e-05, | |
| "loss": 0.4434, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.2843450479233227, | |
| "grad_norm": 0.5913066862304653, | |
| "learning_rate": 2.818469421691285e-05, | |
| "loss": 0.4034, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.29073482428115, | |
| "grad_norm": 0.7406263370585998, | |
| "learning_rate": 2.804829274521954e-05, | |
| "loss": 0.4592, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.2971246006389776, | |
| "grad_norm": 0.6535050713890558, | |
| "learning_rate": 2.7911443109179958e-05, | |
| "loss": 0.4018, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.3035143769968052, | |
| "grad_norm": 0.6660994073473191, | |
| "learning_rate": 2.777415292918386e-05, | |
| "loss": 0.3987, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.3099041533546325, | |
| "grad_norm": 0.5770049980372955, | |
| "learning_rate": 2.7636429850152452e-05, | |
| "loss": 0.3931, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.31629392971246, | |
| "grad_norm": 0.7664470442831808, | |
| "learning_rate": 2.7498281541112645e-05, | |
| "loss": 0.406, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.3226837060702876, | |
| "grad_norm": 0.5845799672630331, | |
| "learning_rate": 2.735971569477002e-05, | |
| "loss": 0.3969, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.329073482428115, | |
| "grad_norm": 0.7256834317235751, | |
| "learning_rate": 2.72207400270805e-05, | |
| "loss": 0.4243, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.3354632587859425, | |
| "grad_norm": 0.5160971519592359, | |
| "learning_rate": 2.7081362276820634e-05, | |
| "loss": 0.3745, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.34185303514377, | |
| "grad_norm": 0.8175757140515724, | |
| "learning_rate": 2.6941590205156715e-05, | |
| "loss": 0.4768, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.3482428115015974, | |
| "grad_norm": 0.47324065852496966, | |
| "learning_rate": 2.6801431595212585e-05, | |
| "loss": 0.3741, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.354632587859425, | |
| "grad_norm": 0.6620362519919619, | |
| "learning_rate": 2.666089425163623e-05, | |
| "loss": 0.4173, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.3610223642172525, | |
| "grad_norm": 0.5902652547868394, | |
| "learning_rate": 2.6519986000165197e-05, | |
| "loss": 0.3851, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.3674121405750799, | |
| "grad_norm": 0.5996280221322619, | |
| "learning_rate": 2.637871468719081e-05, | |
| "loss": 0.3976, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.3738019169329074, | |
| "grad_norm": 0.5952050484113727, | |
| "learning_rate": 2.623708817932127e-05, | |
| "loss": 0.3557, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.3801916932907348, | |
| "grad_norm": 0.5629505435064166, | |
| "learning_rate": 2.6095114362943567e-05, | |
| "loss": 0.3675, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.3865814696485623, | |
| "grad_norm": 0.7284878464385949, | |
| "learning_rate": 2.5952801143784377e-05, | |
| "loss": 0.4192, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.3929712460063897, | |
| "grad_norm": 0.5611770400782802, | |
| "learning_rate": 2.5810156446469815e-05, | |
| "loss": 0.3929, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.3993610223642172, | |
| "grad_norm": 0.7474665760149969, | |
| "learning_rate": 2.5667188214084156e-05, | |
| "loss": 0.4085, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.4057507987220448, | |
| "grad_norm": 0.6711723162682844, | |
| "learning_rate": 2.5523904407727518e-05, | |
| "loss": 0.4069, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.4121405750798721, | |
| "grad_norm": 0.6895348539751915, | |
| "learning_rate": 2.5380313006072576e-05, | |
| "loss": 0.3844, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.4185303514376997, | |
| "grad_norm": 0.6675508521520543, | |
| "learning_rate": 2.523642200492027e-05, | |
| "loss": 0.4525, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.4249201277955272, | |
| "grad_norm": 0.6837935351740067, | |
| "learning_rate": 2.5092239416754545e-05, | |
| "loss": 0.3858, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.4313099041533546, | |
| "grad_norm": 0.7055234490813711, | |
| "learning_rate": 2.4947773270296218e-05, | |
| "loss": 0.4076, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.4376996805111821, | |
| "grad_norm": 0.642041869833729, | |
| "learning_rate": 2.4803031610055847e-05, | |
| "loss": 0.4116, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.4440894568690097, | |
| "grad_norm": 0.5973792017921002, | |
| "learning_rate": 2.465802249588583e-05, | |
| "loss": 0.3701, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.450479233226837, | |
| "grad_norm": 0.6561472014273614, | |
| "learning_rate": 2.451275400253157e-05, | |
| "loss": 0.3497, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.4568690095846646, | |
| "grad_norm": 0.7636693842376161, | |
| "learning_rate": 2.436723421918185e-05, | |
| "loss": 0.4835, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.4632587859424921, | |
| "grad_norm": 0.78261861893272, | |
| "learning_rate": 2.4221471249018368e-05, | |
| "loss": 0.373, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.4696485623003195, | |
| "grad_norm": 0.7179566840944903, | |
| "learning_rate": 2.4075473208764558e-05, | |
| "loss": 0.3923, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.476038338658147, | |
| "grad_norm": 0.5427966480029829, | |
| "learning_rate": 2.392924822823357e-05, | |
| "loss": 0.3575, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.4824281150159744, | |
| "grad_norm": 0.8958851181734642, | |
| "learning_rate": 2.378280444987561e-05, | |
| "loss": 0.4385, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.488817891373802, | |
| "grad_norm": 0.5170480560665358, | |
| "learning_rate": 2.3636150028324486e-05, | |
| "loss": 0.3372, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.4952076677316293, | |
| "grad_norm": 0.7554046281590651, | |
| "learning_rate": 2.3489293129943555e-05, | |
| "loss": 0.4001, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.5015974440894568, | |
| "grad_norm": 0.7092746883912118, | |
| "learning_rate": 2.334224193237097e-05, | |
| "loss": 0.4067, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.5079872204472844, | |
| "grad_norm": 0.7165299708968617, | |
| "learning_rate": 2.319500462406434e-05, | |
| "loss": 0.3842, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.5143769968051117, | |
| "grad_norm": 0.6445134623150068, | |
| "learning_rate": 2.3047589403844702e-05, | |
| "loss": 0.4108, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.5207667731629393, | |
| "grad_norm": 0.4954710255941146, | |
| "learning_rate": 2.290000448044004e-05, | |
| "loss": 0.3344, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.5271565495207668, | |
| "grad_norm": 0.6207344756135259, | |
| "learning_rate": 2.2752258072028145e-05, | |
| "loss": 0.4079, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.5335463258785942, | |
| "grad_norm": 0.4812466788237613, | |
| "learning_rate": 2.260435840577901e-05, | |
| "loss": 0.3711, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.5399361022364217, | |
| "grad_norm": 0.7722622559224537, | |
| "learning_rate": 2.245631371739669e-05, | |
| "loss": 0.4798, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.5463258785942493, | |
| "grad_norm": 0.514025904239645, | |
| "learning_rate": 2.2308132250660717e-05, | |
| "loss": 0.3586, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.5527156549520766, | |
| "grad_norm": 0.6211683701769444, | |
| "learning_rate": 2.215982225696704e-05, | |
| "loss": 0.341, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.5591054313099042, | |
| "grad_norm": 0.5633667247279115, | |
| "learning_rate": 2.201139199486856e-05, | |
| "loss": 0.3927, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.5654952076677318, | |
| "grad_norm": 0.5601679935599658, | |
| "learning_rate": 2.186284972961525e-05, | |
| "loss": 0.4049, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.571884984025559, | |
| "grad_norm": 0.5442755218693436, | |
| "learning_rate": 2.1714203732693905e-05, | |
| "loss": 0.3493, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.5782747603833864, | |
| "grad_norm": 0.7005155470203877, | |
| "learning_rate": 2.1565462281367572e-05, | |
| "loss": 0.446, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.5846645367412142, | |
| "grad_norm": 0.5005677905989587, | |
| "learning_rate": 2.141663365821459e-05, | |
| "loss": 0.3761, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.5910543130990416, | |
| "grad_norm": 0.598933880956211, | |
| "learning_rate": 2.1267726150667426e-05, | |
| "loss": 0.4227, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.5974440894568689, | |
| "grad_norm": 0.5823697199633178, | |
| "learning_rate": 2.1118748050551194e-05, | |
| "loss": 0.4185, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.6038338658146964, | |
| "grad_norm": 0.4661817463909431, | |
| "learning_rate": 2.0969707653621884e-05, | |
| "loss": 0.335, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.610223642172524, | |
| "grad_norm": 0.534066282750192, | |
| "learning_rate": 2.0820613259104462e-05, | |
| "loss": 0.4249, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.6166134185303513, | |
| "grad_norm": 0.5288717205900548, | |
| "learning_rate": 2.0671473169230715e-05, | |
| "loss": 0.4078, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.623003194888179, | |
| "grad_norm": 0.5850676124543679, | |
| "learning_rate": 2.0522295688776945e-05, | |
| "loss": 0.3872, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.6293929712460065, | |
| "grad_norm": 0.46385901267321095, | |
| "learning_rate": 2.037308912460153e-05, | |
| "loss": 0.3415, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.6357827476038338, | |
| "grad_norm": 0.6386876033156802, | |
| "learning_rate": 2.022386178518236e-05, | |
| "loss": 0.417, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.6421725239616614, | |
| "grad_norm": 0.4805587222138907, | |
| "learning_rate": 2.0074621980154167e-05, | |
| "loss": 0.3882, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.648562300319489, | |
| "grad_norm": 0.5746353853534201, | |
| "learning_rate": 1.9925378019845837e-05, | |
| "loss": 0.4104, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.6549520766773163, | |
| "grad_norm": 0.501859313648766, | |
| "learning_rate": 1.9776138214817648e-05, | |
| "loss": 0.3774, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.6613418530351438, | |
| "grad_norm": 0.5820108106526879, | |
| "learning_rate": 1.9626910875398475e-05, | |
| "loss": 0.3576, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.6677316293929714, | |
| "grad_norm": 0.5511522872814458, | |
| "learning_rate": 1.9477704311223062e-05, | |
| "loss": 0.4259, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.6741214057507987, | |
| "grad_norm": 0.5192707555977589, | |
| "learning_rate": 1.932852683076929e-05, | |
| "loss": 0.4159, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.680511182108626, | |
| "grad_norm": 0.5414396653048178, | |
| "learning_rate": 1.9179386740895538e-05, | |
| "loss": 0.3925, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.6869009584664538, | |
| "grad_norm": 0.5148601689863671, | |
| "learning_rate": 1.9030292346378116e-05, | |
| "loss": 0.3598, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.6932907348242812, | |
| "grad_norm": 0.5119362335475469, | |
| "learning_rate": 1.8881251949448817e-05, | |
| "loss": 0.4128, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.6996805111821085, | |
| "grad_norm": 0.5906313617691342, | |
| "learning_rate": 1.8732273849332577e-05, | |
| "loss": 0.3892, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.706070287539936, | |
| "grad_norm": 0.5726182137360729, | |
| "learning_rate": 1.8583366341785424e-05, | |
| "loss": 0.4135, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.7124600638977636, | |
| "grad_norm": 0.47501236190964014, | |
| "learning_rate": 1.8434537718632438e-05, | |
| "loss": 0.3966, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.718849840255591, | |
| "grad_norm": 0.5163914225737597, | |
| "learning_rate": 1.8285796267306098e-05, | |
| "loss": 0.3321, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.7252396166134185, | |
| "grad_norm": 0.5440727291416668, | |
| "learning_rate": 1.8137150270384755e-05, | |
| "loss": 0.4162, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.731629392971246, | |
| "grad_norm": 0.4932582814102065, | |
| "learning_rate": 1.7988608005131445e-05, | |
| "loss": 0.4086, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.7380191693290734, | |
| "grad_norm": 0.533011592071942, | |
| "learning_rate": 1.7840177743032964e-05, | |
| "loss": 0.414, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.744408945686901, | |
| "grad_norm": 0.49102076107513076, | |
| "learning_rate": 1.769186774933929e-05, | |
| "loss": 0.4176, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.7507987220447285, | |
| "grad_norm": 0.5771966331162738, | |
| "learning_rate": 1.7543686282603318e-05, | |
| "loss": 0.4432, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.7571884984025559, | |
| "grad_norm": 0.5461736300422602, | |
| "learning_rate": 1.7395641594220998e-05, | |
| "loss": 0.3829, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.7635782747603834, | |
| "grad_norm": 0.6577548367990437, | |
| "learning_rate": 1.724774192797186e-05, | |
| "loss": 0.382, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.769968051118211, | |
| "grad_norm": 0.43878563775490903, | |
| "learning_rate": 1.7099995519559963e-05, | |
| "loss": 0.3575, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.7763578274760383, | |
| "grad_norm": 0.5120401210743035, | |
| "learning_rate": 1.69524105961553e-05, | |
| "loss": 0.4286, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.7827476038338657, | |
| "grad_norm": 0.5653816201782214, | |
| "learning_rate": 1.6804995375935674e-05, | |
| "loss": 0.3742, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.7891373801916934, | |
| "grad_norm": 0.5726713802278968, | |
| "learning_rate": 1.665775806762903e-05, | |
| "loss": 0.3685, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.7955271565495208, | |
| "grad_norm": 0.5229354326981167, | |
| "learning_rate": 1.6510706870056455e-05, | |
| "loss": 0.408, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.8019169329073481, | |
| "grad_norm": 0.4954167189576208, | |
| "learning_rate": 1.6363849971675524e-05, | |
| "loss": 0.3659, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.8083067092651757, | |
| "grad_norm": 0.5076708644451216, | |
| "learning_rate": 1.6217195550124397e-05, | |
| "loss": 0.3686, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.8146964856230032, | |
| "grad_norm": 0.5824051094209408, | |
| "learning_rate": 1.6070751771766434e-05, | |
| "loss": 0.4467, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.8210862619808306, | |
| "grad_norm": 0.5285624925239529, | |
| "learning_rate": 1.592452679123545e-05, | |
| "loss": 0.3776, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.8274760383386581, | |
| "grad_norm": 0.5376124276180292, | |
| "learning_rate": 1.577852875098164e-05, | |
| "loss": 0.4504, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.8338658146964857, | |
| "grad_norm": 0.5373102243429786, | |
| "learning_rate": 1.563276578081816e-05, | |
| "loss": 0.3501, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.840255591054313, | |
| "grad_norm": 0.5462928026195687, | |
| "learning_rate": 1.5487245997468434e-05, | |
| "loss": 0.4209, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.8466453674121406, | |
| "grad_norm": 0.4909580614215068, | |
| "learning_rate": 1.5341977504114176e-05, | |
| "loss": 0.3647, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.8530351437699681, | |
| "grad_norm": 0.5421878755182835, | |
| "learning_rate": 1.5196968389944153e-05, | |
| "loss": 0.4069, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.8594249201277955, | |
| "grad_norm": 0.5053862903315562, | |
| "learning_rate": 1.5052226729703782e-05, | |
| "loss": 0.3763, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.865814696485623, | |
| "grad_norm": 0.43565640748900136, | |
| "learning_rate": 1.4907760583245451e-05, | |
| "loss": 0.367, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.8722044728434506, | |
| "grad_norm": 0.6034830380300681, | |
| "learning_rate": 1.4763577995079738e-05, | |
| "loss": 0.3964, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.878594249201278, | |
| "grad_norm": 0.5420427837665357, | |
| "learning_rate": 1.461968699392743e-05, | |
| "loss": 0.3378, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.8849840255591053, | |
| "grad_norm": 0.5951922897755679, | |
| "learning_rate": 1.4476095592272494e-05, | |
| "loss": 0.4326, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.891373801916933, | |
| "grad_norm": 0.4840728145584482, | |
| "learning_rate": 1.4332811785915852e-05, | |
| "loss": 0.369, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.8977635782747604, | |
| "grad_norm": 0.5037450720887865, | |
| "learning_rate": 1.418984355353019e-05, | |
| "loss": 0.3897, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.9041533546325877, | |
| "grad_norm": 0.6081578605737592, | |
| "learning_rate": 1.404719885621563e-05, | |
| "loss": 0.4251, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.9105431309904153, | |
| "grad_norm": 0.46944637013816987, | |
| "learning_rate": 1.390488563705644e-05, | |
| "loss": 0.3646, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.9169329073482428, | |
| "grad_norm": 0.5228222176493802, | |
| "learning_rate": 1.3762911820678739e-05, | |
| "loss": 0.4235, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.9233226837060702, | |
| "grad_norm": 0.5899737215877533, | |
| "learning_rate": 1.3621285312809193e-05, | |
| "loss": 0.3794, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.9297124600638977, | |
| "grad_norm": 0.5572929497186375, | |
| "learning_rate": 1.3480013999834811e-05, | |
| "loss": 0.4011, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.9361022364217253, | |
| "grad_norm": 0.5632588672503747, | |
| "learning_rate": 1.3339105748363776e-05, | |
| "loss": 0.3981, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.9424920127795526, | |
| "grad_norm": 0.5276160699091037, | |
| "learning_rate": 1.3198568404787418e-05, | |
| "loss": 0.3767, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.9488817891373802, | |
| "grad_norm": 0.6024773811499252, | |
| "learning_rate": 1.3058409794843288e-05, | |
| "loss": 0.4415, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.9552715654952078, | |
| "grad_norm": 0.5259393060101312, | |
| "learning_rate": 1.2918637723179371e-05, | |
| "loss": 0.3879, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.961661341853035, | |
| "grad_norm": 0.5317965049616244, | |
| "learning_rate": 1.2779259972919512e-05, | |
| "loss": 0.4269, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.9680511182108626, | |
| "grad_norm": 0.45743435082366124, | |
| "learning_rate": 1.2640284305229989e-05, | |
| "loss": 0.3205, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.9744408945686902, | |
| "grad_norm": 0.5754797561825906, | |
| "learning_rate": 1.2501718458887369e-05, | |
| "loss": 0.395, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.9808306709265175, | |
| "grad_norm": 0.5096694579354405, | |
| "learning_rate": 1.2363570149847556e-05, | |
| "loss": 0.3931, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.9872204472843449, | |
| "grad_norm": 0.5649164808169996, | |
| "learning_rate": 1.2225847070816148e-05, | |
| "loss": 0.3755, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.9936102236421727, | |
| "grad_norm": 0.4969808916941929, | |
| "learning_rate": 1.208855689082005e-05, | |
| "loss": 0.4243, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.7954635925002451, | |
| "learning_rate": 1.1951707254780465e-05, | |
| "loss": 0.4858, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 2.0063897763578273, | |
| "grad_norm": 0.6662795810886528, | |
| "learning_rate": 1.1815305783087153e-05, | |
| "loss": 0.2806, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 2.012779552715655, | |
| "grad_norm": 0.5137310238156836, | |
| "learning_rate": 1.1679360071174121e-05, | |
| "loss": 0.295, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 2.0191693290734825, | |
| "grad_norm": 0.815847533118561, | |
| "learning_rate": 1.1543877689096661e-05, | |
| "loss": 0.2896, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 2.02555910543131, | |
| "grad_norm": 0.5069824950228584, | |
| "learning_rate": 1.1408866181109824e-05, | |
| "loss": 0.2837, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 2.0319488817891376, | |
| "grad_norm": 0.5850695127338842, | |
| "learning_rate": 1.1274333065248327e-05, | |
| "loss": 0.2856, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 2.038338658146965, | |
| "grad_norm": 0.5779513618203883, | |
| "learning_rate": 1.1140285832907888e-05, | |
| "loss": 0.2885, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 2.0447284345047922, | |
| "grad_norm": 0.5351383370576208, | |
| "learning_rate": 1.1006731948428118e-05, | |
| "loss": 0.3018, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.0511182108626196, | |
| "grad_norm": 0.4884202390865607, | |
| "learning_rate": 1.0873678848676852e-05, | |
| "loss": 0.2815, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 2.0575079872204474, | |
| "grad_norm": 0.5191469140904564, | |
| "learning_rate": 1.074113394263598e-05, | |
| "loss": 0.2685, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 2.0638977635782747, | |
| "grad_norm": 0.541141996904648, | |
| "learning_rate": 1.0609104610988986e-05, | |
| "loss": 0.2809, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 2.070287539936102, | |
| "grad_norm": 0.5063236911612786, | |
| "learning_rate": 1.0477598205709882e-05, | |
| "loss": 0.2979, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 2.07667731629393, | |
| "grad_norm": 0.48822674540291033, | |
| "learning_rate": 1.0346622049653832e-05, | |
| "loss": 0.2493, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.083067092651757, | |
| "grad_norm": 0.5172260293987238, | |
| "learning_rate": 1.0216183436149412e-05, | |
| "loss": 0.3062, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 2.0894568690095845, | |
| "grad_norm": 0.5077833533693695, | |
| "learning_rate": 1.0086289628592429e-05, | |
| "loss": 0.2817, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 2.0958466453674123, | |
| "grad_norm": 0.47094358503601513, | |
| "learning_rate": 9.956947860041535e-06, | |
| "loss": 0.2633, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 2.1022364217252396, | |
| "grad_norm": 0.4678422611989536, | |
| "learning_rate": 9.82816533281541e-06, | |
| "loss": 0.2519, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 2.108626198083067, | |
| "grad_norm": 0.4961545238058503, | |
| "learning_rate": 9.699949218091698e-06, | |
| "loss": 0.2806, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 2.1150159744408947, | |
| "grad_norm": 0.45400755049188124, | |
| "learning_rate": 9.57230665550773e-06, | |
| "loss": 0.2845, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 2.121405750798722, | |
| "grad_norm": 0.4087039835019054, | |
| "learning_rate": 9.44524475276291e-06, | |
| "loss": 0.2712, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 2.1277955271565494, | |
| "grad_norm": 0.45141929812295156, | |
| "learning_rate": 9.318770585222959e-06, | |
| "loss": 0.2751, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 2.134185303514377, | |
| "grad_norm": 0.4285243933890445, | |
| "learning_rate": 9.19289119552592e-06, | |
| "loss": 0.2715, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 2.1405750798722045, | |
| "grad_norm": 0.4298003352353432, | |
| "learning_rate": 9.067613593189978e-06, | |
| "loss": 0.2806, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 2.146964856230032, | |
| "grad_norm": 0.45672324827943345, | |
| "learning_rate": 8.942944754223148e-06, | |
| "loss": 0.2973, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 2.1533546325878596, | |
| "grad_norm": 0.4370811146410844, | |
| "learning_rate": 8.818891620734843e-06, | |
| "loss": 0.3125, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 2.159744408945687, | |
| "grad_norm": 0.43681333400547245, | |
| "learning_rate": 8.695461100549257e-06, | |
| "loss": 0.2928, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 2.1661341853035143, | |
| "grad_norm": 0.4180585221813101, | |
| "learning_rate": 8.572660066820768e-06, | |
| "loss": 0.2434, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 2.1725239616613417, | |
| "grad_norm": 0.42261573045223133, | |
| "learning_rate": 8.450495357651147e-06, | |
| "loss": 0.2675, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 2.1789137380191694, | |
| "grad_norm": 0.4068510209903175, | |
| "learning_rate": 8.328973775708848e-06, | |
| "loss": 0.2813, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 2.1853035143769968, | |
| "grad_norm": 0.388040708146111, | |
| "learning_rate": 8.208102087850136e-06, | |
| "loss": 0.2878, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 2.191693290734824, | |
| "grad_norm": 0.43053220862581665, | |
| "learning_rate": 8.087887024742343e-06, | |
| "loss": 0.2825, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 2.198083067092652, | |
| "grad_norm": 0.41793950177347855, | |
| "learning_rate": 7.968335280489037e-06, | |
| "loss": 0.2986, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 2.2044728434504792, | |
| "grad_norm": 0.40031758724937355, | |
| "learning_rate": 7.849453512257264e-06, | |
| "loss": 0.3, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 2.2108626198083066, | |
| "grad_norm": 0.38059364454447125, | |
| "learning_rate": 7.731248339906879e-06, | |
| "loss": 0.2727, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 2.2172523961661343, | |
| "grad_norm": 0.41990913510389755, | |
| "learning_rate": 7.613726345621873e-06, | |
| "loss": 0.3046, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 2.2236421725239617, | |
| "grad_norm": 0.43424406475522437, | |
| "learning_rate": 7.496894073543899e-06, | |
| "loss": 0.2936, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 2.230031948881789, | |
| "grad_norm": 0.3750088497925214, | |
| "learning_rate": 7.380758029407857e-06, | |
| "loss": 0.2664, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 2.236421725239617, | |
| "grad_norm": 0.418532871938177, | |
| "learning_rate": 7.2653246801795594e-06, | |
| "loss": 0.2891, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.242811501597444, | |
| "grad_norm": 0.4028035425262737, | |
| "learning_rate": 7.150600453695726e-06, | |
| "loss": 0.2734, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 2.2492012779552715, | |
| "grad_norm": 0.4466135219208513, | |
| "learning_rate": 7.036591738305989e-06, | |
| "loss": 0.2815, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 2.255591054313099, | |
| "grad_norm": 0.3867166130148223, | |
| "learning_rate": 6.923304882517159e-06, | |
| "loss": 0.296, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 2.2619808306709266, | |
| "grad_norm": 0.3917494183899583, | |
| "learning_rate": 6.81074619463975e-06, | |
| "loss": 0.2325, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 2.268370607028754, | |
| "grad_norm": 0.4190504448138774, | |
| "learning_rate": 6.698921942436665e-06, | |
| "loss": 0.2535, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 2.2747603833865817, | |
| "grad_norm": 0.41331962647091514, | |
| "learning_rate": 6.587838352774209e-06, | |
| "loss": 0.2587, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 2.281150159744409, | |
| "grad_norm": 0.41804090107839076, | |
| "learning_rate": 6.477501611275345e-06, | |
| "loss": 0.2758, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 2.2875399361022364, | |
| "grad_norm": 0.39910602146901847, | |
| "learning_rate": 6.367917861975224e-06, | |
| "loss": 0.2607, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 2.2939297124600637, | |
| "grad_norm": 0.36374725362936167, | |
| "learning_rate": 6.2590932069790965e-06, | |
| "loss": 0.2896, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 2.3003194888178915, | |
| "grad_norm": 0.3931796715710283, | |
| "learning_rate": 6.151033706122489e-06, | |
| "loss": 0.2562, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.306709265175719, | |
| "grad_norm": 0.39607678195113055, | |
| "learning_rate": 6.043745376633792e-06, | |
| "loss": 0.2575, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 2.313099041533546, | |
| "grad_norm": 0.370701894040692, | |
| "learning_rate": 5.937234192799184e-06, | |
| "loss": 0.2476, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 2.319488817891374, | |
| "grad_norm": 0.3922675962076736, | |
| "learning_rate": 5.831506085629937e-06, | |
| "loss": 0.2758, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 2.3258785942492013, | |
| "grad_norm": 0.39508969009396355, | |
| "learning_rate": 5.7265669425321765e-06, | |
| "loss": 0.2572, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 2.3322683706070286, | |
| "grad_norm": 0.4127067246019672, | |
| "learning_rate": 5.622422606979055e-06, | |
| "loss": 0.2507, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 2.3386581469648564, | |
| "grad_norm": 0.40000330806363765, | |
| "learning_rate": 5.519078878185311e-06, | |
| "loss": 0.2973, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 2.3450479233226837, | |
| "grad_norm": 0.3982421880351729, | |
| "learning_rate": 5.416541510784414e-06, | |
| "loss": 0.2778, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 2.351437699680511, | |
| "grad_norm": 0.37370414007694336, | |
| "learning_rate": 5.314816214508054e-06, | |
| "loss": 0.2815, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 2.357827476038339, | |
| "grad_norm": 0.4150724253275529, | |
| "learning_rate": 5.21390865386824e-06, | |
| "loss": 0.2968, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 2.364217252396166, | |
| "grad_norm": 0.3761463056883756, | |
| "learning_rate": 5.113824447841873e-06, | |
| "loss": 0.2363, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.3706070287539935, | |
| "grad_norm": 0.35871951677398056, | |
| "learning_rate": 5.014569169557826e-06, | |
| "loss": 0.2612, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 2.376996805111821, | |
| "grad_norm": 0.3892389571463014, | |
| "learning_rate": 4.916148345986649e-06, | |
| "loss": 0.2759, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 2.3833865814696487, | |
| "grad_norm": 0.37772421711614007, | |
| "learning_rate": 4.8185674576327615e-06, | |
| "loss": 0.2608, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 2.389776357827476, | |
| "grad_norm": 0.382653409586813, | |
| "learning_rate": 4.721831938229315e-06, | |
| "loss": 0.3155, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 2.3961661341853033, | |
| "grad_norm": 0.39130961949763166, | |
| "learning_rate": 4.625947174435596e-06, | |
| "loss": 0.292, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.402555910543131, | |
| "grad_norm": 0.3689264594938094, | |
| "learning_rate": 4.530918505537061e-06, | |
| "loss": 0.2778, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 2.4089456869009584, | |
| "grad_norm": 0.3583777664156191, | |
| "learning_rate": 4.436751223148055e-06, | |
| "loss": 0.2793, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 2.415335463258786, | |
| "grad_norm": 0.3792734486077673, | |
| "learning_rate": 4.343450570917118e-06, | |
| "loss": 0.2987, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 2.4217252396166136, | |
| "grad_norm": 0.3559642646144205, | |
| "learning_rate": 4.251021744235013e-06, | |
| "loss": 0.2782, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 2.428115015974441, | |
| "grad_norm": 0.4010256232612567, | |
| "learning_rate": 4.159469889945434e-06, | |
| "loss": 0.2529, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.4345047923322682, | |
| "grad_norm": 0.3765358138333099, | |
| "learning_rate": 4.068800106058375e-06, | |
| "loss": 0.279, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 2.440894568690096, | |
| "grad_norm": 0.3685887074154549, | |
| "learning_rate": 3.9790174414662844e-06, | |
| "loss": 0.2705, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 2.4472843450479234, | |
| "grad_norm": 0.3728894980841834, | |
| "learning_rate": 3.890126895662904e-06, | |
| "loss": 0.2583, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 2.4536741214057507, | |
| "grad_norm": 0.36277573516201544, | |
| "learning_rate": 3.8021334184648685e-06, | |
| "loss": 0.2807, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 2.460063897763578, | |
| "grad_norm": 0.35706119183313567, | |
| "learning_rate": 3.7150419097360944e-06, | |
| "loss": 0.2898, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 2.466453674121406, | |
| "grad_norm": 0.38011267281397304, | |
| "learning_rate": 3.628857219114916e-06, | |
| "loss": 0.2553, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 2.472843450479233, | |
| "grad_norm": 0.3863113162029853, | |
| "learning_rate": 3.543584145744057e-06, | |
| "loss": 0.2534, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 2.479233226837061, | |
| "grad_norm": 0.439587610934195, | |
| "learning_rate": 3.459227438003383e-06, | |
| "loss": 0.2975, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 2.4856230031948883, | |
| "grad_norm": 0.3793664447722741, | |
| "learning_rate": 3.3757917932454732e-06, | |
| "loss": 0.2902, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 2.4920127795527156, | |
| "grad_norm": 0.406296707834338, | |
| "learning_rate": 3.293281857534094e-06, | |
| "loss": 0.2969, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.498402555910543, | |
| "grad_norm": 0.37691162956880797, | |
| "learning_rate": 3.2117022253854356e-06, | |
| "loss": 0.2889, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 2.5047923322683707, | |
| "grad_norm": 0.35367935501619513, | |
| "learning_rate": 3.1310574395122994e-06, | |
| "loss": 0.2516, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 2.511182108626198, | |
| "grad_norm": 0.3525236924672651, | |
| "learning_rate": 3.0513519905711474e-06, | |
| "loss": 0.2781, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 2.5175718849840254, | |
| "grad_norm": 0.3564008946825266, | |
| "learning_rate": 2.9725903169120073e-06, | |
| "loss": 0.2698, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 2.523961661341853, | |
| "grad_norm": 0.3718042292779991, | |
| "learning_rate": 2.894776804331372e-06, | |
| "loss": 0.2557, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 2.5303514376996805, | |
| "grad_norm": 0.3716672854028638, | |
| "learning_rate": 2.817915785827927e-06, | |
| "loss": 0.2579, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 2.536741214057508, | |
| "grad_norm": 0.353812866963394, | |
| "learning_rate": 2.742011541361318e-06, | |
| "loss": 0.2715, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 2.543130990415335, | |
| "grad_norm": 0.3844045853121929, | |
| "learning_rate": 2.6670682976137994e-06, | |
| "loss": 0.3141, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 2.549520766773163, | |
| "grad_norm": 0.37146608361893524, | |
| "learning_rate": 2.5930902277548686e-06, | |
| "loss": 0.2504, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 2.5559105431309903, | |
| "grad_norm": 0.35978150748451004, | |
| "learning_rate": 2.5200814512089088e-06, | |
| "loss": 0.2868, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.562300319488818, | |
| "grad_norm": 0.3498821367555298, | |
| "learning_rate": 2.4480460334257727e-06, | |
| "loss": 0.2778, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 2.5686900958466454, | |
| "grad_norm": 0.3590299815828268, | |
| "learning_rate": 2.376987985654422e-06, | |
| "loss": 0.2466, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 2.5750798722044728, | |
| "grad_norm": 0.3560328836577989, | |
| "learning_rate": 2.3069112647195647e-06, | |
| "loss": 0.229, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 2.5814696485623, | |
| "grad_norm": 0.37314736778508073, | |
| "learning_rate": 2.2378197728012975e-06, | |
| "loss": 0.2704, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 2.587859424920128, | |
| "grad_norm": 0.3763392516149028, | |
| "learning_rate": 2.169717357217844e-06, | |
| "loss": 0.259, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 2.594249201277955, | |
| "grad_norm": 0.3399458492987759, | |
| "learning_rate": 2.1026078102113014e-06, | |
| "loss": 0.2684, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 2.600638977635783, | |
| "grad_norm": 0.3578314862739145, | |
| "learning_rate": 2.0364948687364692e-06, | |
| "loss": 0.2664, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 2.6070287539936103, | |
| "grad_norm": 0.39093714164893223, | |
| "learning_rate": 1.971382214252784e-06, | |
| "loss": 0.3064, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 2.6134185303514377, | |
| "grad_norm": 0.35226742624856194, | |
| "learning_rate": 1.9072734725192844e-06, | |
| "loss": 0.3043, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 2.619808306709265, | |
| "grad_norm": 0.34363517197221133, | |
| "learning_rate": 1.844172213392743e-06, | |
| "loss": 0.2771, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 2.626198083067093, | |
| "grad_norm": 0.35717003311156614, | |
| "learning_rate": 1.7820819506288688e-06, | |
| "loss": 0.2916, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 2.63258785942492, | |
| "grad_norm": 0.3397444551813988, | |
| "learning_rate": 1.7210061416866297e-06, | |
| "loss": 0.2802, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 2.6389776357827475, | |
| "grad_norm": 0.3374288264378377, | |
| "learning_rate": 1.660948187535767e-06, | |
| "loss": 0.2996, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 2.6453674121405752, | |
| "grad_norm": 0.348235164857282, | |
| "learning_rate": 1.6019114324673647e-06, | |
| "loss": 0.2643, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 2.6517571884984026, | |
| "grad_norm": 0.3728291742720239, | |
| "learning_rate": 1.543899163907665e-06, | |
| "loss": 0.2875, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 2.65814696485623, | |
| "grad_norm": 0.33982563684444306, | |
| "learning_rate": 1.4869146122349932e-06, | |
| "loss": 0.2545, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 2.6645367412140573, | |
| "grad_norm": 0.34046077697953536, | |
| "learning_rate": 1.4309609505998689e-06, | |
| "loss": 0.2647, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 2.670926517571885, | |
| "grad_norm": 0.3459458344871144, | |
| "learning_rate": 1.3760412947483292e-06, | |
| "loss": 0.2972, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 2.6773162939297124, | |
| "grad_norm": 0.36579406936806147, | |
| "learning_rate": 1.322158702848415e-06, | |
| "loss": 0.2695, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 2.68370607028754, | |
| "grad_norm": 0.3707374256323378, | |
| "learning_rate": 1.269316175319879e-06, | |
| "loss": 0.2719, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 2.6900958466453675, | |
| "grad_norm": 0.3709423910066585, | |
| "learning_rate": 1.217516654667128e-06, | |
| "loss": 0.298, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 2.696485623003195, | |
| "grad_norm": 0.37469291952689926, | |
| "learning_rate": 1.1667630253153428e-06, | |
| "loss": 0.2612, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 2.702875399361022, | |
| "grad_norm": 0.347832218730705, | |
| "learning_rate": 1.1170581134498849e-06, | |
| "loss": 0.2918, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 2.70926517571885, | |
| "grad_norm": 0.36883525953686164, | |
| "learning_rate": 1.0684046868589103e-06, | |
| "loss": 0.2719, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 2.7156549520766773, | |
| "grad_norm": 0.34462150376832146, | |
| "learning_rate": 1.0208054547792478e-06, | |
| "loss": 0.2524, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.722044728434505, | |
| "grad_norm": 0.33935916043461156, | |
| "learning_rate": 9.742630677455422e-07, | |
| "loss": 0.2525, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 2.7284345047923324, | |
| "grad_norm": 0.35482552002227297, | |
| "learning_rate": 9.287801174426491e-07, | |
| "loss": 0.2701, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 2.7348242811501597, | |
| "grad_norm": 0.341771079482618, | |
| "learning_rate": 8.843591365613324e-07, | |
| "loss": 0.2759, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 2.741214057507987, | |
| "grad_norm": 0.3437210997091319, | |
| "learning_rate": 8.410025986572279e-07, | |
| "loss": 0.2507, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 2.747603833865815, | |
| "grad_norm": 0.35387497003554297, | |
| "learning_rate": 7.987129180130981e-07, | |
| "loss": 0.2934, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.753993610223642, | |
| "grad_norm": 0.3433319826548646, | |
| "learning_rate": 7.574924495044067e-07, | |
| "loss": 0.2816, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 2.7603833865814695, | |
| "grad_norm": 0.3445699585899121, | |
| "learning_rate": 7.17343488468174e-07, | |
| "loss": 0.2938, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 2.7667731629392973, | |
| "grad_norm": 0.36179132098384625, | |
| "learning_rate": 6.782682705751775e-07, | |
| "loss": 0.259, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 2.7731629392971247, | |
| "grad_norm": 0.35409736012157567, | |
| "learning_rate": 6.402689717054467e-07, | |
| "loss": 0.2694, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 2.779552715654952, | |
| "grad_norm": 0.3347120611379767, | |
| "learning_rate": 6.033477078271132e-07, | |
| "loss": 0.2791, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 2.7859424920127793, | |
| "grad_norm": 0.34362095896036443, | |
| "learning_rate": 5.675065348785724e-07, | |
| "loss": 0.3029, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 2.792332268370607, | |
| "grad_norm": 0.3623549484091769, | |
| "learning_rate": 5.327474486540097e-07, | |
| "loss": 0.2857, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 2.7987220447284344, | |
| "grad_norm": 0.3849385006693377, | |
| "learning_rate": 4.990723846922563e-07, | |
| "loss": 0.2437, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 2.8051118210862622, | |
| "grad_norm": 0.3456451470480186, | |
| "learning_rate": 4.664832181690204e-07, | |
| "loss": 0.2464, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 2.8115015974440896, | |
| "grad_norm": 0.3623322322297761, | |
| "learning_rate": 4.3498176379245696e-07, | |
| "loss": 0.266, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 2.817891373801917, | |
| "grad_norm": 0.33249253792620725, | |
| "learning_rate": 4.045697757021327e-07, | |
| "loss": 0.2745, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 2.8242811501597442, | |
| "grad_norm": 0.35011482248093373, | |
| "learning_rate": 3.7524894737133164e-07, | |
| "loss": 0.2849, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 2.830670926517572, | |
| "grad_norm": 0.35867683687456126, | |
| "learning_rate": 3.4702091151276094e-07, | |
| "loss": 0.309, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 2.8370607028753994, | |
| "grad_norm": 0.35152581911491154, | |
| "learning_rate": 3.1988723998764405e-07, | |
| "loss": 0.2618, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 2.8434504792332267, | |
| "grad_norm": 0.3729635038125147, | |
| "learning_rate": 2.938494437181727e-07, | |
| "loss": 0.2999, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 2.8498402555910545, | |
| "grad_norm": 0.33552722155677045, | |
| "learning_rate": 2.689089726033922e-07, | |
| "loss": 0.2825, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 2.856230031948882, | |
| "grad_norm": 0.3924095079184395, | |
| "learning_rate": 2.450672154384548e-07, | |
| "loss": 0.2687, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 2.862619808306709, | |
| "grad_norm": 0.5138097567383261, | |
| "learning_rate": 2.2232549983728146e-07, | |
| "loss": 0.3004, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 2.8690095846645365, | |
| "grad_norm": 0.3283039402589176, | |
| "learning_rate": 2.006850921586434e-07, | |
| "loss": 0.294, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 2.8753993610223643, | |
| "grad_norm": 0.37738305697605573, | |
| "learning_rate": 1.801471974356428e-07, | |
| "loss": 0.2692, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.8817891373801916, | |
| "grad_norm": 0.3418654851564619, | |
| "learning_rate": 1.607129593086043e-07, | |
| "loss": 0.3001, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 2.8881789137380194, | |
| "grad_norm": 0.3402266094342756, | |
| "learning_rate": 1.4238345996140824e-07, | |
| "loss": 0.2911, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 2.8945686900958467, | |
| "grad_norm": 0.31824348901824345, | |
| "learning_rate": 1.251597200612098e-07, | |
| "loss": 0.279, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 2.900958466453674, | |
| "grad_norm": 0.34531177153096265, | |
| "learning_rate": 1.0904269870161799e-07, | |
| "loss": 0.3028, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 2.9073482428115014, | |
| "grad_norm": 0.3568279096820218, | |
| "learning_rate": 9.40332933492849e-08, | |
| "loss": 0.2927, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 2.913738019169329, | |
| "grad_norm": 0.34527750344815883, | |
| "learning_rate": 8.013233979393242e-08, | |
| "loss": 0.2933, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 2.9201277955271565, | |
| "grad_norm": 0.3493059274199788, | |
| "learning_rate": 6.734061210180499e-08, | |
| "loss": 0.252, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 2.9265175718849843, | |
| "grad_norm": 0.370444099756165, | |
| "learning_rate": 5.565882257257515e-08, | |
| "loss": 0.2883, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 2.9329073482428116, | |
| "grad_norm": 0.3400590434922779, | |
| "learning_rate": 4.508762169967318e-08, | |
| "loss": 0.2418, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 2.939297124600639, | |
| "grad_norm": 0.3380417666194957, | |
| "learning_rate": 3.562759813407146e-08, | |
| "loss": 0.2928, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.9456869009584663, | |
| "grad_norm": 0.34811024230473875, | |
| "learning_rate": 2.7279278651497487e-08, | |
| "loss": 0.3067, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 2.952076677316294, | |
| "grad_norm": 0.37158100906796243, | |
| "learning_rate": 2.0043128123108378e-08, | |
| "loss": 0.2683, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 2.9584664536741214, | |
| "grad_norm": 0.3508788377065571, | |
| "learning_rate": 1.3919549489593841e-08, | |
| "loss": 0.2738, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 2.9648562300319488, | |
| "grad_norm": 0.34321071515720997, | |
| "learning_rate": 8.908883738754093e-09, | |
| "loss": 0.2583, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 2.9712460063897765, | |
| "grad_norm": 0.34013241840819486, | |
| "learning_rate": 5.011409886503949e-09, | |
| "loss": 0.2974, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 2.977635782747604, | |
| "grad_norm": 0.3266932550661067, | |
| "learning_rate": 2.2273449613274822e-09, | |
| "loss": 0.264, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 2.984025559105431, | |
| "grad_norm": 0.33590879102094134, | |
| "learning_rate": 5.568439922121194e-10, | |
| "loss": 0.2854, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 2.9904153354632586, | |
| "grad_norm": 0.36927881731237316, | |
| "learning_rate": 0.0, | |
| "loss": 0.2865, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 2.9904153354632586, | |
| "step": 468, | |
| "total_flos": 3.310734730054861e+17, | |
| "train_loss": 0.41292818673910237, | |
| "train_runtime": 10004.6891, | |
| "train_samples_per_second": 2.998, | |
| "train_steps_per_second": 0.047 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 468, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3.310734730054861e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |