| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.9904153354632586, |
| "eval_steps": 500, |
| "global_step": 468, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.006389776357827476, |
| "grad_norm": 5.939312845608103, |
| "learning_rate": 4.2553191489361704e-07, |
| "loss": 0.8883, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.012779552715654952, |
| "grad_norm": 5.6459943274568385, |
| "learning_rate": 8.510638297872341e-07, |
| "loss": 0.8365, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.019169329073482427, |
| "grad_norm": 6.290489665408837, |
| "learning_rate": 1.276595744680851e-06, |
| "loss": 0.8864, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.025559105431309903, |
| "grad_norm": 5.8544636560570735, |
| "learning_rate": 1.7021276595744682e-06, |
| "loss": 0.879, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.03194888178913738, |
| "grad_norm": 5.478170251266386, |
| "learning_rate": 2.1276595744680853e-06, |
| "loss": 0.8245, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.038338658146964855, |
| "grad_norm": 4.688336713722404, |
| "learning_rate": 2.553191489361702e-06, |
| "loss": 0.849, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.04472843450479233, |
| "grad_norm": 4.278233500376253, |
| "learning_rate": 2.978723404255319e-06, |
| "loss": 0.8151, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.051118210862619806, |
| "grad_norm": 2.367029075074496, |
| "learning_rate": 3.4042553191489363e-06, |
| "loss": 0.7566, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.05750798722044728, |
| "grad_norm": 2.0707237905380484, |
| "learning_rate": 3.8297872340425535e-06, |
| "loss": 0.7551, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.06389776357827476, |
| "grad_norm": 1.7866725538131587, |
| "learning_rate": 4.255319148936171e-06, |
| "loss": 0.7457, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.07028753993610223, |
| "grad_norm": 3.404477977029493, |
| "learning_rate": 4.680851063829788e-06, |
| "loss": 0.699, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.07667731629392971, |
| "grad_norm": 3.8759087344215093, |
| "learning_rate": 5.106382978723404e-06, |
| "loss": 0.8106, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.08306709265175719, |
| "grad_norm": 4.206484510948959, |
| "learning_rate": 5.531914893617022e-06, |
| "loss": 0.7639, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.08945686900958466, |
| "grad_norm": 3.597855342218176, |
| "learning_rate": 5.957446808510638e-06, |
| "loss": 0.7143, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.09584664536741214, |
| "grad_norm": 3.493223809169224, |
| "learning_rate": 6.382978723404256e-06, |
| "loss": 0.7495, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.10223642172523961, |
| "grad_norm": 2.7730036124678588, |
| "learning_rate": 6.808510638297873e-06, |
| "loss": 0.7079, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.10862619808306709, |
| "grad_norm": 1.99917862715565, |
| "learning_rate": 7.234042553191491e-06, |
| "loss": 0.6637, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.11501597444089456, |
| "grad_norm": 1.7234540912576959, |
| "learning_rate": 7.659574468085107e-06, |
| "loss": 0.6563, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.12140575079872204, |
| "grad_norm": 1.9632219810386513, |
| "learning_rate": 8.085106382978723e-06, |
| "loss": 0.6753, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.12779552715654952, |
| "grad_norm": 2.1614567368781157, |
| "learning_rate": 8.510638297872341e-06, |
| "loss": 0.6063, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.134185303514377, |
| "grad_norm": 1.932507047332365, |
| "learning_rate": 8.936170212765958e-06, |
| "loss": 0.629, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.14057507987220447, |
| "grad_norm": 1.5481507427216847, |
| "learning_rate": 9.361702127659576e-06, |
| "loss": 0.6444, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.14696485623003194, |
| "grad_norm": 1.2177329752055572, |
| "learning_rate": 9.787234042553192e-06, |
| "loss": 0.641, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.15335463258785942, |
| "grad_norm": 1.4206725631658237, |
| "learning_rate": 1.0212765957446808e-05, |
| "loss": 0.6496, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.1597444089456869, |
| "grad_norm": 1.4667519530084758, |
| "learning_rate": 1.0638297872340426e-05, |
| "loss": 0.6304, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.16613418530351437, |
| "grad_norm": 1.2452708214307595, |
| "learning_rate": 1.1063829787234044e-05, |
| "loss": 0.6413, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.17252396166134185, |
| "grad_norm": 0.7759167145812482, |
| "learning_rate": 1.1489361702127662e-05, |
| "loss": 0.5927, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.17891373801916932, |
| "grad_norm": 1.0321850071310563, |
| "learning_rate": 1.1914893617021277e-05, |
| "loss": 0.5898, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.1853035143769968, |
| "grad_norm": 1.0266703283162124, |
| "learning_rate": 1.2340425531914895e-05, |
| "loss": 0.5895, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.19169329073482427, |
| "grad_norm": 0.9674925840183656, |
| "learning_rate": 1.2765957446808513e-05, |
| "loss": 0.5853, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.19808306709265175, |
| "grad_norm": 0.9805374530460067, |
| "learning_rate": 1.3191489361702127e-05, |
| "loss": 0.5756, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.20447284345047922, |
| "grad_norm": 0.9258462587522315, |
| "learning_rate": 1.3617021276595745e-05, |
| "loss": 0.585, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.2108626198083067, |
| "grad_norm": 0.8925757909283792, |
| "learning_rate": 1.4042553191489363e-05, |
| "loss": 0.584, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.21725239616613418, |
| "grad_norm": 0.7738950177471798, |
| "learning_rate": 1.4468085106382981e-05, |
| "loss": 0.553, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.22364217252396165, |
| "grad_norm": 0.823063478968403, |
| "learning_rate": 1.4893617021276596e-05, |
| "loss": 0.605, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.23003194888178913, |
| "grad_norm": 0.7769591687125452, |
| "learning_rate": 1.5319148936170214e-05, |
| "loss": 0.5398, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.2364217252396166, |
| "grad_norm": 0.724085749329525, |
| "learning_rate": 1.5744680851063832e-05, |
| "loss": 0.5541, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.24281150159744408, |
| "grad_norm": 0.7937699708520545, |
| "learning_rate": 1.6170212765957446e-05, |
| "loss": 0.5516, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.24920127795527156, |
| "grad_norm": 0.838915436660047, |
| "learning_rate": 1.6595744680851064e-05, |
| "loss": 0.5335, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.25559105431309903, |
| "grad_norm": 0.7447841608522099, |
| "learning_rate": 1.7021276595744682e-05, |
| "loss": 0.5414, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.26198083067092653, |
| "grad_norm": 0.8714110715774589, |
| "learning_rate": 1.74468085106383e-05, |
| "loss": 0.556, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.268370607028754, |
| "grad_norm": 0.7411083091257032, |
| "learning_rate": 1.7872340425531915e-05, |
| "loss": 0.5113, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.2747603833865815, |
| "grad_norm": 0.7680242229988357, |
| "learning_rate": 1.8297872340425533e-05, |
| "loss": 0.5684, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.28115015974440893, |
| "grad_norm": 0.7374046719973165, |
| "learning_rate": 1.872340425531915e-05, |
| "loss": 0.577, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.28753993610223644, |
| "grad_norm": 0.7709945486266545, |
| "learning_rate": 1.914893617021277e-05, |
| "loss": 0.5251, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.2939297124600639, |
| "grad_norm": 0.7420014515431359, |
| "learning_rate": 1.9574468085106384e-05, |
| "loss": 0.5358, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.3003194888178914, |
| "grad_norm": 0.7321704500165183, |
| "learning_rate": 2e-05, |
| "loss": 0.56, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.30670926517571884, |
| "grad_norm": 0.7855274459068936, |
| "learning_rate": 1.9999721578003894e-05, |
| "loss": 0.5427, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.31309904153354634, |
| "grad_norm": 0.7731901448013743, |
| "learning_rate": 1.9998886327519337e-05, |
| "loss": 0.5517, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.3194888178913738, |
| "grad_norm": 0.7327331638939495, |
| "learning_rate": 1.999749429505675e-05, |
| "loss": 0.5468, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.3258785942492013, |
| "grad_norm": 0.7419176272941636, |
| "learning_rate": 1.9995545558130624e-05, |
| "loss": 0.548, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.33226837060702874, |
| "grad_norm": 0.7873113572745489, |
| "learning_rate": 1.9993040225255205e-05, |
| "loss": 0.5403, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.33865814696485624, |
| "grad_norm": 0.6658693818077654, |
| "learning_rate": 1.998997843593845e-05, |
| "loss": 0.5054, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.3450479233226837, |
| "grad_norm": 0.7409323699660156, |
| "learning_rate": 1.9986360360674252e-05, |
| "loss": 0.5555, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.3514376996805112, |
| "grad_norm": 0.7094024540386773, |
| "learning_rate": 1.9982186200932964e-05, |
| "loss": 0.535, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.35782747603833864, |
| "grad_norm": 0.9538171828161622, |
| "learning_rate": 1.9977456189150164e-05, |
| "loss": 0.5788, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.36421725239616615, |
| "grad_norm": 0.7395956073936416, |
| "learning_rate": 1.9972170588713715e-05, |
| "loss": 0.5836, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.3706070287539936, |
| "grad_norm": 0.8649724098491106, |
| "learning_rate": 1.9966329693949098e-05, |
| "loss": 0.4918, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.3769968051118211, |
| "grad_norm": 0.8838321465136332, |
| "learning_rate": 1.9959933830103034e-05, |
| "loss": 0.514, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.38338658146964855, |
| "grad_norm": 0.7993190296372936, |
| "learning_rate": 1.9952983353325358e-05, |
| "loss": 0.5191, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.38977635782747605, |
| "grad_norm": 1.0003320683454586, |
| "learning_rate": 1.9945478650649192e-05, |
| "loss": 0.5395, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.3961661341853035, |
| "grad_norm": 0.8174378430731664, |
| "learning_rate": 1.9937420139969397e-05, |
| "loss": 0.5428, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.402555910543131, |
| "grad_norm": 0.7497946237416347, |
| "learning_rate": 1.9928808270019297e-05, |
| "loss": 0.5826, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.40894568690095845, |
| "grad_norm": 0.8640015891418493, |
| "learning_rate": 1.9919643520345698e-05, |
| "loss": 0.5343, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.41533546325878595, |
| "grad_norm": 1.0033449675940591, |
| "learning_rate": 1.990992640128218e-05, |
| "loss": 0.5623, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.4217252396166134, |
| "grad_norm": 0.7213392023439573, |
| "learning_rate": 1.989965745392068e-05, |
| "loss": 0.5314, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.4281150159744409, |
| "grad_norm": 0.8863830620435368, |
| "learning_rate": 1.988883725008136e-05, |
| "loss": 0.5458, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.43450479233226835, |
| "grad_norm": 0.7362416113857194, |
| "learning_rate": 1.9877466392280773e-05, |
| "loss": 0.5428, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.44089456869009586, |
| "grad_norm": 0.9788737478764125, |
| "learning_rate": 1.9865545513698304e-05, |
| "loss": 0.5412, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.4472843450479233, |
| "grad_norm": 0.8021931238319318, |
| "learning_rate": 1.9853075278140913e-05, |
| "loss": 0.5522, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.4536741214057508, |
| "grad_norm": 0.85887825826516, |
| "learning_rate": 1.984005638000618e-05, |
| "loss": 0.5702, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.46006389776357826, |
| "grad_norm": 0.7815409304110528, |
| "learning_rate": 1.9826489544243623e-05, |
| "loss": 0.5482, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.46645367412140576, |
| "grad_norm": 0.7632274039193099, |
| "learning_rate": 1.981237552631434e-05, |
| "loss": 0.5387, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.4728434504792332, |
| "grad_norm": 0.7141397681245097, |
| "learning_rate": 1.9797715112148937e-05, |
| "loss": 0.5546, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.4792332268370607, |
| "grad_norm": 0.9775621975797985, |
| "learning_rate": 1.9782509118103773e-05, |
| "loss": 0.5308, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.48562300319488816, |
| "grad_norm": 0.7330287655613912, |
| "learning_rate": 1.9766758390915494e-05, |
| "loss": 0.5431, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.49201277955271566, |
| "grad_norm": 0.8510881793812443, |
| "learning_rate": 1.9750463807653873e-05, |
| "loss": 0.5187, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.4984025559105431, |
| "grad_norm": 0.8486211359598791, |
| "learning_rate": 1.9733626275673e-05, |
| "loss": 0.55, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.5047923322683706, |
| "grad_norm": 0.7806116343240348, |
| "learning_rate": 1.9716246732560715e-05, |
| "loss": 0.4813, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.5111821086261981, |
| "grad_norm": 0.6353268171693698, |
| "learning_rate": 1.9698326146086446e-05, |
| "loss": 0.5078, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.5175718849840255, |
| "grad_norm": 0.8766273238149929, |
| "learning_rate": 1.967986551414728e-05, |
| "loss": 0.544, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.5239616613418531, |
| "grad_norm": 0.7124929308186037, |
| "learning_rate": 1.9660865864712413e-05, |
| "loss": 0.5547, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.5303514376996805, |
| "grad_norm": 0.7985052392042088, |
| "learning_rate": 1.9641328255765916e-05, |
| "loss": 0.5215, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.536741214057508, |
| "grad_norm": 0.6406165534662566, |
| "learning_rate": 1.96212537752478e-05, |
| "loss": 0.4978, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.5431309904153354, |
| "grad_norm": 0.7351154516097609, |
| "learning_rate": 1.9600643540993453e-05, |
| "loss": 0.5498, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.549520766773163, |
| "grad_norm": 0.7023736701907295, |
| "learning_rate": 1.9579498700671386e-05, |
| "loss": 0.5271, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.5559105431309904, |
| "grad_norm": 0.7585882232459317, |
| "learning_rate": 1.9557820431719333e-05, |
| "loss": 0.5419, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.5623003194888179, |
| "grad_norm": 0.7609703576418915, |
| "learning_rate": 1.9535609941278676e-05, |
| "loss": 0.5115, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.5686900958466453, |
| "grad_norm": 0.8355505315864735, |
| "learning_rate": 1.9512868466127232e-05, |
| "loss": 0.5214, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.5750798722044729, |
| "grad_norm": 0.8985531652526637, |
| "learning_rate": 1.9489597272610377e-05, |
| "loss": 0.5217, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.5814696485623003, |
| "grad_norm": 0.8296453033353365, |
| "learning_rate": 1.9465797656570546e-05, |
| "loss": 0.5235, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.5878594249201278, |
| "grad_norm": 0.8400305646818227, |
| "learning_rate": 1.944147094327506e-05, |
| "loss": 0.5028, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.5942492012779552, |
| "grad_norm": 0.8027178786911765, |
| "learning_rate": 1.9416618487342333e-05, |
| "loss": 0.4932, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.6006389776357828, |
| "grad_norm": 0.831774187088593, |
| "learning_rate": 1.9391241672666438e-05, |
| "loss": 0.5015, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.6070287539936102, |
| "grad_norm": 0.6880013029770987, |
| "learning_rate": 1.936534191234006e-05, |
| "loss": 0.4851, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.6134185303514377, |
| "grad_norm": 0.7874317799707009, |
| "learning_rate": 1.9338920648575798e-05, |
| "loss": 0.4993, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.6198083067092651, |
| "grad_norm": 0.8086603460274078, |
| "learning_rate": 1.9311979352625837e-05, |
| "loss": 0.5472, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.6261980830670927, |
| "grad_norm": 0.7330826956678399, |
| "learning_rate": 1.928451952470007e-05, |
| "loss": 0.5267, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.6325878594249201, |
| "grad_norm": 0.6774581525725395, |
| "learning_rate": 1.9256542693882505e-05, |
| "loss": 0.4964, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.6389776357827476, |
| "grad_norm": 0.699096603267482, |
| "learning_rate": 1.922805041804617e-05, |
| "loss": 0.5051, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.645367412140575, |
| "grad_norm": 0.7625712048870313, |
| "learning_rate": 1.919904428376632e-05, |
| "loss": 0.5301, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.6517571884984026, |
| "grad_norm": 0.7442243954348883, |
| "learning_rate": 1.916952590623212e-05, |
| "loss": 0.5114, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.65814696485623, |
| "grad_norm": 0.6319971486530133, |
| "learning_rate": 1.9139496929156685e-05, |
| "loss": 0.5106, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.6645367412140575, |
| "grad_norm": 0.6634321632909197, |
| "learning_rate": 1.910895902468557e-05, |
| "loss": 0.5211, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.670926517571885, |
| "grad_norm": 0.5959863886093508, |
| "learning_rate": 1.907791389330363e-05, |
| "loss": 0.5124, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.6773162939297125, |
| "grad_norm": 0.7995435481771501, |
| "learning_rate": 1.904636326374036e-05, |
| "loss": 0.52, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.6837060702875399, |
| "grad_norm": 0.7383352863257084, |
| "learning_rate": 1.9014308892873612e-05, |
| "loss": 0.5516, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.6900958466453674, |
| "grad_norm": 0.8315686682034763, |
| "learning_rate": 1.8981752565631767e-05, |
| "loss": 0.4891, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.6964856230031949, |
| "grad_norm": 0.6508951908046772, |
| "learning_rate": 1.8948696094894354e-05, |
| "loss": 0.5591, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.7028753993610224, |
| "grad_norm": 0.8009701429333234, |
| "learning_rate": 1.8915141321391083e-05, |
| "loss": 0.5305, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.7092651757188498, |
| "grad_norm": 0.7308231155353043, |
| "learning_rate": 1.8881090113599353e-05, |
| "loss": 0.5191, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.7156549520766773, |
| "grad_norm": 0.7808253591246626, |
| "learning_rate": 1.8846544367640218e-05, |
| "loss": 0.4906, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.7220447284345048, |
| "grad_norm": 0.8760149164716509, |
| "learning_rate": 1.881150600717279e-05, |
| "loss": 0.532, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.7284345047923323, |
| "grad_norm": 0.6776840820965584, |
| "learning_rate": 1.8775976983287117e-05, |
| "loss": 0.521, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.7348242811501597, |
| "grad_norm": 0.8898200759371685, |
| "learning_rate": 1.873995927439555e-05, |
| "loss": 0.5251, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.7412140575079872, |
| "grad_norm": 0.7499195664294598, |
| "learning_rate": 1.8703454886122568e-05, |
| "loss": 0.5348, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.7476038338658147, |
| "grad_norm": 0.8505865320259214, |
| "learning_rate": 1.86664658511931e-05, |
| "loss": 0.5054, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.7539936102236422, |
| "grad_norm": 0.6522705814194051, |
| "learning_rate": 1.862899422931934e-05, |
| "loss": 0.4996, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.7603833865814696, |
| "grad_norm": 0.6577321085300217, |
| "learning_rate": 1.859104210708604e-05, |
| "loss": 0.5106, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.7667731629392971, |
| "grad_norm": 0.6237691862811225, |
| "learning_rate": 1.855261159783432e-05, |
| "loss": 0.4952, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.7731629392971247, |
| "grad_norm": 0.7454463301186427, |
| "learning_rate": 1.8513704841543997e-05, |
| "loss": 0.5026, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.7795527156549521, |
| "grad_norm": 0.7934145741008819, |
| "learning_rate": 1.847432400471443e-05, |
| "loss": 0.549, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.7859424920127795, |
| "grad_norm": 0.6802649460778195, |
| "learning_rate": 1.8434471280243854e-05, |
| "loss": 0.506, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.792332268370607, |
| "grad_norm": 0.7304620814404005, |
| "learning_rate": 1.8394148887307286e-05, |
| "loss": 0.5383, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.7987220447284346, |
| "grad_norm": 0.6957782218041328, |
| "learning_rate": 1.8353359071232954e-05, |
| "loss": 0.5019, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.805111821086262, |
| "grad_norm": 0.6826820301754135, |
| "learning_rate": 1.8312104103377266e-05, |
| "loss": 0.4937, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.8115015974440895, |
| "grad_norm": 0.598906813674708, |
| "learning_rate": 1.827038628099831e-05, |
| "loss": 0.488, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.8178913738019169, |
| "grad_norm": 0.6633449549765212, |
| "learning_rate": 1.822820792712797e-05, |
| "loss": 0.4971, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.8242811501597445, |
| "grad_norm": 0.6710913601976871, |
| "learning_rate": 1.8185571390442542e-05, |
| "loss": 0.4836, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.8306709265175719, |
| "grad_norm": 0.5762592167913351, |
| "learning_rate": 1.8142479045131956e-05, |
| "loss": 0.5192, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.8370607028753994, |
| "grad_norm": 0.6738150719804582, |
| "learning_rate": 1.809893329076757e-05, |
| "loss": 0.5228, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.8434504792332268, |
| "grad_norm": 0.6500141385276138, |
| "learning_rate": 1.8054936552168548e-05, |
| "loss": 0.5332, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.8498402555910544, |
| "grad_norm": 0.5886713445337487, |
| "learning_rate": 1.801049127926686e-05, |
| "loss": 0.4787, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.8562300319488818, |
| "grad_norm": 0.7649082888316344, |
| "learning_rate": 1.7965599946970814e-05, |
| "loss": 0.4914, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.8626198083067093, |
| "grad_norm": 0.8014082101971098, |
| "learning_rate": 1.7920265055027285e-05, |
| "loss": 0.5171, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.8690095846645367, |
| "grad_norm": 0.7706425976997076, |
| "learning_rate": 1.7874489127882496e-05, |
| "loss": 0.5267, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.8753993610223643, |
| "grad_norm": 0.9916919676402002, |
| "learning_rate": 1.7828274714541445e-05, |
| "loss": 0.5225, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.8817891373801917, |
| "grad_norm": 0.7510268200588113, |
| "learning_rate": 1.7781624388425974e-05, |
| "loss": 0.5056, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.8881789137380192, |
| "grad_norm": 1.0622441121039243, |
| "learning_rate": 1.773454074723147e-05, |
| "loss": 0.4773, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.8945686900958466, |
| "grad_norm": 0.8430739649427611, |
| "learning_rate": 1.76870264127822e-05, |
| "loss": 0.5384, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.9009584664536742, |
| "grad_norm": 0.7217579935144164, |
| "learning_rate": 1.763908403088534e-05, |
| "loss": 0.5108, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.9073482428115016, |
| "grad_norm": 1.0377962316213807, |
| "learning_rate": 1.759071627118362e-05, |
| "loss": 0.4961, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.9137380191693291, |
| "grad_norm": 0.6423592379266523, |
| "learning_rate": 1.754192582700668e-05, |
| "loss": 0.5061, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.9201277955271565, |
| "grad_norm": 0.7302473302785821, |
| "learning_rate": 1.7492715415221087e-05, |
| "loss": 0.5074, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.9265175718849841, |
| "grad_norm": 0.709358335589859, |
| "learning_rate": 1.7443087776079068e-05, |
| "loss": 0.4867, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.9329073482428115, |
| "grad_norm": 0.6708691396098521, |
| "learning_rate": 1.739304567306588e-05, |
| "loss": 0.4955, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.939297124600639, |
| "grad_norm": 0.7224681216999947, |
| "learning_rate": 1.7342591892745978e-05, |
| "loss": 0.5063, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.9456869009584664, |
| "grad_norm": 0.692060343253805, |
| "learning_rate": 1.72917292446078e-05, |
| "loss": 0.5395, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.952076677316294, |
| "grad_norm": 0.6503466417528021, |
| "learning_rate": 1.7240460560907345e-05, |
| "loss": 0.502, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.9584664536741214, |
| "grad_norm": 0.7852299377720712, |
| "learning_rate": 1.7188788696510477e-05, |
| "loss": 0.4982, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.9648562300319489, |
| "grad_norm": 0.6487201669982385, |
| "learning_rate": 1.7136716528733916e-05, |
| "loss": 0.491, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.9712460063897763, |
| "grad_norm": 0.7162643954228396, |
| "learning_rate": 1.7084246957185036e-05, |
| "loss": 0.4715, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.9776357827476039, |
| "grad_norm": 0.6882308824929978, |
| "learning_rate": 1.703138290360041e-05, |
| "loss": 0.4884, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.9840255591054313, |
| "grad_norm": 0.6558983320574183, |
| "learning_rate": 1.6978127311683103e-05, |
| "loss": 0.5053, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.9904153354632588, |
| "grad_norm": 0.7793396179424056, |
| "learning_rate": 1.6924483146938756e-05, |
| "loss": 0.4891, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.9968051118210862, |
| "grad_norm": 0.6408866926804856, |
| "learning_rate": 1.6870453396510456e-05, |
| "loss": 0.5253, |
| "step": 156 |
| }, |
| { |
| "epoch": 1.0031948881789137, |
| "grad_norm": 1.219315523607184, |
| "learning_rate": 1.681604106901239e-05, |
| "loss": 0.7727, |
| "step": 157 |
| }, |
| { |
| "epoch": 1.0095846645367412, |
| "grad_norm": 0.7363761093026954, |
| "learning_rate": 1.676124919436233e-05, |
| "loss": 0.4369, |
| "step": 158 |
| }, |
| { |
| "epoch": 1.0159744408945688, |
| "grad_norm": 0.6104584236502094, |
| "learning_rate": 1.6706080823612897e-05, |
| "loss": 0.4074, |
| "step": 159 |
| }, |
| { |
| "epoch": 1.0223642172523961, |
| "grad_norm": 0.6688279966321403, |
| "learning_rate": 1.665053902878167e-05, |
| "loss": 0.3875, |
| "step": 160 |
| }, |
| { |
| "epoch": 1.0287539936102237, |
| "grad_norm": 0.7714442502018793, |
| "learning_rate": 1.659462690268013e-05, |
| "loss": 0.4693, |
| "step": 161 |
| }, |
| { |
| "epoch": 1.035143769968051, |
| "grad_norm": 0.6581337601062037, |
| "learning_rate": 1.6538347558741424e-05, |
| "loss": 0.4088, |
| "step": 162 |
| }, |
| { |
| "epoch": 1.0415335463258786, |
| "grad_norm": 0.8907132608840105, |
| "learning_rate": 1.6481704130847013e-05, |
| "loss": 0.4421, |
| "step": 163 |
| }, |
| { |
| "epoch": 1.0479233226837061, |
| "grad_norm": 0.7281123005456749, |
| "learning_rate": 1.642469977315214e-05, |
| "loss": 0.4466, |
| "step": 164 |
| }, |
| { |
| "epoch": 1.0543130990415335, |
| "grad_norm": 0.8713456380104283, |
| "learning_rate": 1.6367337659910223e-05, |
| "loss": 0.4205, |
| "step": 165 |
| }, |
| { |
| "epoch": 1.060702875399361, |
| "grad_norm": 0.8221934081375099, |
| "learning_rate": 1.6309620985296075e-05, |
| "loss": 0.4811, |
| "step": 166 |
| }, |
| { |
| "epoch": 1.0670926517571886, |
| "grad_norm": 0.6414871893647255, |
| "learning_rate": 1.625155296322805e-05, |
| "loss": 0.3994, |
| "step": 167 |
| }, |
| { |
| "epoch": 1.073482428115016, |
| "grad_norm": 0.6020141646779518, |
| "learning_rate": 1.6193136827189067e-05, |
| "loss": 0.3544, |
| "step": 168 |
| }, |
| { |
| "epoch": 1.0798722044728435, |
| "grad_norm": 0.6620341470772692, |
| "learning_rate": 1.6134375830046566e-05, |
| "loss": 0.3865, |
| "step": 169 |
| }, |
| { |
| "epoch": 1.0862619808306708, |
| "grad_norm": 0.7529504403287007, |
| "learning_rate": 1.607527324387137e-05, |
| "loss": 0.5138, |
| "step": 170 |
| }, |
| { |
| "epoch": 1.0926517571884984, |
| "grad_norm": 0.5746068027715405, |
| "learning_rate": 1.6015832359755483e-05, |
| "loss": 0.4053, |
| "step": 171 |
| }, |
| { |
| "epoch": 1.099041533546326, |
| "grad_norm": 0.6423681462846987, |
| "learning_rate": 1.5956056487628832e-05, |
| "loss": 0.4878, |
| "step": 172 |
| }, |
| { |
| "epoch": 1.1054313099041533, |
| "grad_norm": 0.6192258722203541, |
| "learning_rate": 1.5895948956074937e-05, |
| "loss": 0.4188, |
| "step": 173 |
| }, |
| { |
| "epoch": 1.1118210862619808, |
| "grad_norm": 0.6163929815962566, |
| "learning_rate": 1.5835513112145583e-05, |
| "loss": 0.3854, |
| "step": 174 |
| }, |
| { |
| "epoch": 1.1182108626198084, |
| "grad_norm": 0.5697582130826278, |
| "learning_rate": 1.5774752321174428e-05, |
| "loss": 0.4073, |
| "step": 175 |
| }, |
| { |
| "epoch": 1.1246006389776357, |
| "grad_norm": 0.6223679982213549, |
| "learning_rate": 1.571366996658962e-05, |
| "loss": 0.4224, |
| "step": 176 |
| }, |
| { |
| "epoch": 1.1309904153354633, |
| "grad_norm": 0.6320866995001497, |
| "learning_rate": 1.5652269449725375e-05, |
| "loss": 0.4248, |
| "step": 177 |
| }, |
| { |
| "epoch": 1.1373801916932909, |
| "grad_norm": 0.5297458143137096, |
| "learning_rate": 1.5590554189632585e-05, |
| "loss": 0.3748, |
| "step": 178 |
| }, |
| { |
| "epoch": 1.1437699680511182, |
| "grad_norm": 0.6358623803264093, |
| "learning_rate": 1.552852762288843e-05, |
| "loss": 0.4747, |
| "step": 179 |
| }, |
| { |
| "epoch": 1.1501597444089458, |
| "grad_norm": 0.6278673693951984, |
| "learning_rate": 1.5466193203405017e-05, |
| "loss": 0.4036, |
| "step": 180 |
| }, |
| { |
| "epoch": 1.156549520766773, |
| "grad_norm": 0.6151559404690766, |
| "learning_rate": 1.540355440223704e-05, |
| "loss": 0.4124, |
| "step": 181 |
| }, |
| { |
| "epoch": 1.1629392971246006, |
| "grad_norm": 0.5891168526046718, |
| "learning_rate": 1.534061470738852e-05, |
| "loss": 0.4637, |
| "step": 182 |
| }, |
| { |
| "epoch": 1.1693290734824282, |
| "grad_norm": 0.6184989510741438, |
| "learning_rate": 1.527737762361855e-05, |
| "loss": 0.4161, |
| "step": 183 |
| }, |
| { |
| "epoch": 1.1757188498402555, |
| "grad_norm": 0.6419175983144096, |
| "learning_rate": 1.5213846672246139e-05, |
| "loss": 0.4533, |
| "step": 184 |
| }, |
| { |
| "epoch": 1.182108626198083, |
| "grad_norm": 0.5120065057835093, |
| "learning_rate": 1.5150025390954153e-05, |
| "loss": 0.375, |
| "step": 185 |
| }, |
| { |
| "epoch": 1.1884984025559104, |
| "grad_norm": 0.698299656514935, |
| "learning_rate": 1.5085917333592299e-05, |
| "loss": 0.4193, |
| "step": 186 |
| }, |
| { |
| "epoch": 1.194888178913738, |
| "grad_norm": 0.6532253196374253, |
| "learning_rate": 1.5021526069979232e-05, |
| "loss": 0.4596, |
| "step": 187 |
| }, |
| { |
| "epoch": 1.2012779552715656, |
| "grad_norm": 0.6028992706390495, |
| "learning_rate": 1.4956855185703787e-05, |
| "loss": 0.4207, |
| "step": 188 |
| }, |
| { |
| "epoch": 1.207667731629393, |
| "grad_norm": 0.6185875205322425, |
| "learning_rate": 1.48919082819253e-05, |
| "loss": 0.4243, |
| "step": 189 |
| }, |
| { |
| "epoch": 1.2140575079872205, |
| "grad_norm": 0.6043057260440954, |
| "learning_rate": 1.4826688975173085e-05, |
| "loss": 0.437, |
| "step": 190 |
| }, |
| { |
| "epoch": 1.220447284345048, |
| "grad_norm": 0.6821203725825057, |
| "learning_rate": 1.4761200897145063e-05, |
| "loss": 0.5037, |
| "step": 191 |
| }, |
| { |
| "epoch": 1.2268370607028753, |
| "grad_norm": 0.6008298374460067, |
| "learning_rate": 1.4695447694505512e-05, |
| "loss": 0.3661, |
| "step": 192 |
| }, |
| { |
| "epoch": 1.233226837060703, |
| "grad_norm": 0.7199503911011816, |
| "learning_rate": 1.4629433028682014e-05, |
| "loss": 0.5043, |
| "step": 193 |
| }, |
| { |
| "epoch": 1.2396166134185305, |
| "grad_norm": 0.5180806806501965, |
| "learning_rate": 1.456316057566158e-05, |
| "loss": 0.3819, |
| "step": 194 |
| }, |
| { |
| "epoch": 1.2460063897763578, |
| "grad_norm": 0.6928136290488852, |
| "learning_rate": 1.4496634025785938e-05, |
| "loss": 0.4263, |
| "step": 195 |
| }, |
| { |
| "epoch": 1.2523961661341854, |
| "grad_norm": 0.5665537937081578, |
| "learning_rate": 1.4429857083546054e-05, |
| "loss": 0.4844, |
| "step": 196 |
| }, |
| { |
| "epoch": 1.2587859424920127, |
| "grad_norm": 0.5563376996661965, |
| "learning_rate": 1.4362833467375839e-05, |
| "loss": 0.4156, |
| "step": 197 |
| }, |
| { |
| "epoch": 1.2651757188498403, |
| "grad_norm": 0.5377783142222045, |
| "learning_rate": 1.429556690944509e-05, |
| "loss": 0.4025, |
| "step": 198 |
| }, |
| { |
| "epoch": 1.2715654952076676, |
| "grad_norm": 0.5413290657480134, |
| "learning_rate": 1.4228061155451671e-05, |
| "loss": 0.4066, |
| "step": 199 |
| }, |
| { |
| "epoch": 1.2779552715654952, |
| "grad_norm": 0.5854137012981868, |
| "learning_rate": 1.4160319964412943e-05, |
| "loss": 0.4631, |
| "step": 200 |
| }, |
| { |
| "epoch": 1.2843450479233227, |
| "grad_norm": 0.5549643478979397, |
| "learning_rate": 1.4092347108456425e-05, |
| "loss": 0.4203, |
| "step": 201 |
| }, |
| { |
| "epoch": 1.29073482428115, |
| "grad_norm": 0.6090412261860937, |
| "learning_rate": 1.402414637260977e-05, |
| "loss": 0.4818, |
| "step": 202 |
| }, |
| { |
| "epoch": 1.2971246006389776, |
| "grad_norm": 0.5907663028777652, |
| "learning_rate": 1.3955721554589979e-05, |
| "loss": 0.4253, |
| "step": 203 |
| }, |
| { |
| "epoch": 1.3035143769968052, |
| "grad_norm": 0.6715293474231077, |
| "learning_rate": 1.388707646459193e-05, |
| "loss": 0.418, |
| "step": 204 |
| }, |
| { |
| "epoch": 1.3099041533546325, |
| "grad_norm": 0.6297284099203906, |
| "learning_rate": 1.3818214925076226e-05, |
| "loss": 0.4147, |
| "step": 205 |
| }, |
| { |
| "epoch": 1.31629392971246, |
| "grad_norm": 0.5667392659435537, |
| "learning_rate": 1.3749140770556322e-05, |
| "loss": 0.4311, |
| "step": 206 |
| }, |
| { |
| "epoch": 1.3226837060702876, |
| "grad_norm": 0.6322500298227558, |
| "learning_rate": 1.367985784738501e-05, |
| "loss": 0.4203, |
| "step": 207 |
| }, |
| { |
| "epoch": 1.329073482428115, |
| "grad_norm": 0.6001231519749656, |
| "learning_rate": 1.361037001354025e-05, |
| "loss": 0.4476, |
| "step": 208 |
| }, |
| { |
| "epoch": 1.3354632587859425, |
| "grad_norm": 0.5824816964719087, |
| "learning_rate": 1.3540681138410317e-05, |
| "loss": 0.3966, |
| "step": 209 |
| }, |
| { |
| "epoch": 1.34185303514377, |
| "grad_norm": 0.6931574895039062, |
| "learning_rate": 1.3470795102578358e-05, |
| "loss": 0.5033, |
| "step": 210 |
| }, |
| { |
| "epoch": 1.3482428115015974, |
| "grad_norm": 0.5416798455387101, |
| "learning_rate": 1.3400715797606293e-05, |
| "loss": 0.4008, |
| "step": 211 |
| }, |
| { |
| "epoch": 1.354632587859425, |
| "grad_norm": 0.6524420733651487, |
| "learning_rate": 1.3330447125818115e-05, |
| "loss": 0.4436, |
| "step": 212 |
| }, |
| { |
| "epoch": 1.3610223642172525, |
| "grad_norm": 0.6109732592249346, |
| "learning_rate": 1.3259993000082599e-05, |
| "loss": 0.4139, |
| "step": 213 |
| }, |
| { |
| "epoch": 1.3674121405750799, |
| "grad_norm": 0.6089156914272807, |
| "learning_rate": 1.3189357343595405e-05, |
| "loss": 0.4287, |
| "step": 214 |
| }, |
| { |
| "epoch": 1.3738019169329074, |
| "grad_norm": 0.5100478253771064, |
| "learning_rate": 1.3118544089660635e-05, |
| "loss": 0.3773, |
| "step": 215 |
| }, |
| { |
| "epoch": 1.3801916932907348, |
| "grad_norm": 0.665636694875197, |
| "learning_rate": 1.3047557181471784e-05, |
| "loss": 0.3922, |
| "step": 216 |
| }, |
| { |
| "epoch": 1.3865814696485623, |
| "grad_norm": 0.6421315985345839, |
| "learning_rate": 1.2976400571892189e-05, |
| "loss": 0.4432, |
| "step": 217 |
| }, |
| { |
| "epoch": 1.3929712460063897, |
| "grad_norm": 0.5630313538184247, |
| "learning_rate": 1.2905078223234907e-05, |
| "loss": 0.4191, |
| "step": 218 |
| }, |
| { |
| "epoch": 1.3993610223642172, |
| "grad_norm": 0.6745179299688657, |
| "learning_rate": 1.2833594107042078e-05, |
| "loss": 0.4333, |
| "step": 219 |
| }, |
| { |
| "epoch": 1.4057507987220448, |
| "grad_norm": 0.5454374317298349, |
| "learning_rate": 1.2761952203863759e-05, |
| "loss": 0.4277, |
| "step": 220 |
| }, |
| { |
| "epoch": 1.4121405750798721, |
| "grad_norm": 0.5789898470817271, |
| "learning_rate": 1.2690156503036288e-05, |
| "loss": 0.4036, |
| "step": 221 |
| }, |
| { |
| "epoch": 1.4185303514376997, |
| "grad_norm": 0.5949332555607629, |
| "learning_rate": 1.2618211002460135e-05, |
| "loss": 0.4785, |
| "step": 222 |
| }, |
| { |
| "epoch": 1.4249201277955272, |
| "grad_norm": 0.5587473061257787, |
| "learning_rate": 1.2546119708377273e-05, |
| "loss": 0.4109, |
| "step": 223 |
| }, |
| { |
| "epoch": 1.4313099041533546, |
| "grad_norm": 0.5092986860891325, |
| "learning_rate": 1.2473886635148109e-05, |
| "loss": 0.4375, |
| "step": 224 |
| }, |
| { |
| "epoch": 1.4376996805111821, |
| "grad_norm": 0.49324268916930714, |
| "learning_rate": 1.2401515805027924e-05, |
| "loss": 0.4339, |
| "step": 225 |
| }, |
| { |
| "epoch": 1.4440894568690097, |
| "grad_norm": 0.521973180886398, |
| "learning_rate": 1.2329011247942915e-05, |
| "loss": 0.3929, |
| "step": 226 |
| }, |
| { |
| "epoch": 1.450479233226837, |
| "grad_norm": 0.49449177011483986, |
| "learning_rate": 1.2256377001265785e-05, |
| "loss": 0.3688, |
| "step": 227 |
| }, |
| { |
| "epoch": 1.4568690095846646, |
| "grad_norm": 0.688803254610615, |
| "learning_rate": 1.2183617109590925e-05, |
| "loss": 0.5054, |
| "step": 228 |
| }, |
| { |
| "epoch": 1.4632587859424921, |
| "grad_norm": 0.5222053229962778, |
| "learning_rate": 1.2110735624509184e-05, |
| "loss": 0.3946, |
| "step": 229 |
| }, |
| { |
| "epoch": 1.4696485623003195, |
| "grad_norm": 0.5032950143354372, |
| "learning_rate": 1.2037736604382279e-05, |
| "loss": 0.4158, |
| "step": 230 |
| }, |
| { |
| "epoch": 1.476038338658147, |
| "grad_norm": 0.5739740428574489, |
| "learning_rate": 1.1964624114116784e-05, |
| "loss": 0.3794, |
| "step": 231 |
| }, |
| { |
| "epoch": 1.4824281150159744, |
| "grad_norm": 0.6710884805647371, |
| "learning_rate": 1.1891402224937805e-05, |
| "loss": 0.466, |
| "step": 232 |
| }, |
| { |
| "epoch": 1.488817891373802, |
| "grad_norm": 0.4582879331386117, |
| "learning_rate": 1.1818075014162243e-05, |
| "loss": 0.3585, |
| "step": 233 |
| }, |
| { |
| "epoch": 1.4952076677316293, |
| "grad_norm": 0.5448073298830566, |
| "learning_rate": 1.1744646564971777e-05, |
| "loss": 0.4238, |
| "step": 234 |
| }, |
| { |
| "epoch": 1.5015974440894568, |
| "grad_norm": 0.5760693930458392, |
| "learning_rate": 1.1671120966185486e-05, |
| "loss": 0.4324, |
| "step": 235 |
| }, |
| { |
| "epoch": 1.5079872204472844, |
| "grad_norm": 0.49993740420542576, |
| "learning_rate": 1.159750231203217e-05, |
| "loss": 0.4078, |
| "step": 236 |
| }, |
| { |
| "epoch": 1.5143769968051117, |
| "grad_norm": 0.563168928495261, |
| "learning_rate": 1.1523794701922351e-05, |
| "loss": 0.4351, |
| "step": 237 |
| }, |
| { |
| "epoch": 1.5207667731629393, |
| "grad_norm": 0.5313983639628916, |
| "learning_rate": 1.145000224022002e-05, |
| "loss": 0.3571, |
| "step": 238 |
| }, |
| { |
| "epoch": 1.5271565495207668, |
| "grad_norm": 0.501524364831029, |
| "learning_rate": 1.1376129036014073e-05, |
| "loss": 0.4304, |
| "step": 239 |
| }, |
| { |
| "epoch": 1.5335463258785942, |
| "grad_norm": 0.44709921144798975, |
| "learning_rate": 1.1302179202889505e-05, |
| "loss": 0.3948, |
| "step": 240 |
| }, |
| { |
| "epoch": 1.5399361022364217, |
| "grad_norm": 0.6024736966395187, |
| "learning_rate": 1.1228156858698344e-05, |
| "loss": 0.5055, |
| "step": 241 |
| }, |
| { |
| "epoch": 1.5463258785942493, |
| "grad_norm": 0.5039190418971187, |
| "learning_rate": 1.1154066125330358e-05, |
| "loss": 0.3853, |
| "step": 242 |
| }, |
| { |
| "epoch": 1.5527156549520766, |
| "grad_norm": 0.5082416278400913, |
| "learning_rate": 1.107991112848352e-05, |
| "loss": 0.365, |
| "step": 243 |
| }, |
| { |
| "epoch": 1.5591054313099042, |
| "grad_norm": 0.571424017024242, |
| "learning_rate": 1.100569599743428e-05, |
| "loss": 0.4157, |
| "step": 244 |
| }, |
| { |
| "epoch": 1.5654952076677318, |
| "grad_norm": 0.5185521716853748, |
| "learning_rate": 1.0931424864807624e-05, |
| "loss": 0.4278, |
| "step": 245 |
| }, |
| { |
| "epoch": 1.571884984025559, |
| "grad_norm": 0.48800280720450917, |
| "learning_rate": 1.0857101866346953e-05, |
| "loss": 0.3713, |
| "step": 246 |
| }, |
| { |
| "epoch": 1.5782747603833864, |
| "grad_norm": 0.6095490026665179, |
| "learning_rate": 1.0782731140683786e-05, |
| "loss": 0.4746, |
| "step": 247 |
| }, |
| { |
| "epoch": 1.5846645367412142, |
| "grad_norm": 0.44928616025284457, |
| "learning_rate": 1.0708316829107295e-05, |
| "loss": 0.3991, |
| "step": 248 |
| }, |
| { |
| "epoch": 1.5910543130990416, |
| "grad_norm": 0.527002704565493, |
| "learning_rate": 1.0633863075333713e-05, |
| "loss": 0.4499, |
| "step": 249 |
| }, |
| { |
| "epoch": 1.5974440894568689, |
| "grad_norm": 0.5073694564198865, |
| "learning_rate": 1.0559374025275597e-05, |
| "loss": 0.4451, |
| "step": 250 |
| }, |
| { |
| "epoch": 1.6038338658146964, |
| "grad_norm": 0.43991601364268823, |
| "learning_rate": 1.0484853826810942e-05, |
| "loss": 0.3585, |
| "step": 251 |
| }, |
| { |
| "epoch": 1.610223642172524, |
| "grad_norm": 0.5793726292991741, |
| "learning_rate": 1.0410306629552231e-05, |
| "loss": 0.4504, |
| "step": 252 |
| }, |
| { |
| "epoch": 1.6166134185303513, |
| "grad_norm": 0.5109980782065752, |
| "learning_rate": 1.0335736584615357e-05, |
| "loss": 0.4323, |
| "step": 253 |
| }, |
| { |
| "epoch": 1.623003194888179, |
| "grad_norm": 0.5541438519921948, |
| "learning_rate": 1.0261147844388472e-05, |
| "loss": 0.4108, |
| "step": 254 |
| }, |
| { |
| "epoch": 1.6293929712460065, |
| "grad_norm": 0.49521914364363934, |
| "learning_rate": 1.0186544562300766e-05, |
| "loss": 0.3646, |
| "step": 255 |
| }, |
| { |
| "epoch": 1.6357827476038338, |
| "grad_norm": 0.5712895311131073, |
| "learning_rate": 1.011193089259118e-05, |
| "loss": 0.4436, |
| "step": 256 |
| }, |
| { |
| "epoch": 1.6421725239616614, |
| "grad_norm": 0.4759908770222787, |
| "learning_rate": 1.0037310990077083e-05, |
| "loss": 0.4114, |
| "step": 257 |
| }, |
| { |
| "epoch": 1.648562300319489, |
| "grad_norm": 0.5916396180237442, |
| "learning_rate": 9.962689009922918e-06, |
| "loss": 0.4351, |
| "step": 258 |
| }, |
| { |
| "epoch": 1.6549520766773163, |
| "grad_norm": 0.5121135747386429, |
| "learning_rate": 9.888069107408824e-06, |
| "loss": 0.4054, |
| "step": 259 |
| }, |
| { |
| "epoch": 1.6613418530351438, |
| "grad_norm": 0.5138320946770819, |
| "learning_rate": 9.813455437699238e-06, |
| "loss": 0.3825, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.6677316293929714, |
| "grad_norm": 0.6049983944001385, |
| "learning_rate": 9.738852155611531e-06, |
| "loss": 0.4499, |
| "step": 261 |
| }, |
| { |
| "epoch": 1.6741214057507987, |
| "grad_norm": 0.5783031442949135, |
| "learning_rate": 9.664263415384644e-06, |
| "loss": 0.4481, |
| "step": 262 |
| }, |
| { |
| "epoch": 1.680511182108626, |
| "grad_norm": 0.5079212196836366, |
| "learning_rate": 9.589693370447769e-06, |
| "loss": 0.4215, |
| "step": 263 |
| }, |
| { |
| "epoch": 1.6869009584664538, |
| "grad_norm": 0.5362072676088528, |
| "learning_rate": 9.515146173189058e-06, |
| "loss": 0.382, |
| "step": 264 |
| }, |
| { |
| "epoch": 1.6932907348242812, |
| "grad_norm": 0.5939862572487508, |
| "learning_rate": 9.440625974724408e-06, |
| "loss": 0.4359, |
| "step": 265 |
| }, |
| { |
| "epoch": 1.6996805111821085, |
| "grad_norm": 0.509251006459368, |
| "learning_rate": 9.366136924666289e-06, |
| "loss": 0.4187, |
| "step": 266 |
| }, |
| { |
| "epoch": 1.706070287539936, |
| "grad_norm": 0.5360988896841928, |
| "learning_rate": 9.291683170892712e-06, |
| "loss": 0.4397, |
| "step": 267 |
| }, |
| { |
| "epoch": 1.7124600638977636, |
| "grad_norm": 0.4969550304271485, |
| "learning_rate": 9.217268859316219e-06, |
| "loss": 0.4203, |
| "step": 268 |
| }, |
| { |
| "epoch": 1.718849840255591, |
| "grad_norm": 0.5191197891519107, |
| "learning_rate": 9.142898133653049e-06, |
| "loss": 0.3609, |
| "step": 269 |
| }, |
| { |
| "epoch": 1.7252396166134185, |
| "grad_norm": 0.4714057411588144, |
| "learning_rate": 9.068575135192377e-06, |
| "loss": 0.4465, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.731629392971246, |
| "grad_norm": 0.4652015886422851, |
| "learning_rate": 8.994304002565723e-06, |
| "loss": 0.4304, |
| "step": 271 |
| }, |
| { |
| "epoch": 1.7380191693290734, |
| "grad_norm": 0.5542538380015932, |
| "learning_rate": 8.920088871516482e-06, |
| "loss": 0.4401, |
| "step": 272 |
| }, |
| { |
| "epoch": 1.744408945686901, |
| "grad_norm": 0.5308379584880095, |
| "learning_rate": 8.845933874669645e-06, |
| "loss": 0.4408, |
| "step": 273 |
| }, |
| { |
| "epoch": 1.7507987220447285, |
| "grad_norm": 0.5930183810947137, |
| "learning_rate": 8.771843141301659e-06, |
| "loss": 0.4715, |
| "step": 274 |
| }, |
| { |
| "epoch": 1.7571884984025559, |
| "grad_norm": 0.5544444716982725, |
| "learning_rate": 8.697820797110499e-06, |
| "loss": 0.4054, |
| "step": 275 |
| }, |
| { |
| "epoch": 1.7635782747603834, |
| "grad_norm": 0.5332926586542652, |
| "learning_rate": 8.62387096398593e-06, |
| "loss": 0.4075, |
| "step": 276 |
| }, |
| { |
| "epoch": 1.769968051118211, |
| "grad_norm": 0.46783125273604637, |
| "learning_rate": 8.549997759779981e-06, |
| "loss": 0.383, |
| "step": 277 |
| }, |
| { |
| "epoch": 1.7763578274760383, |
| "grad_norm": 0.5521530233312871, |
| "learning_rate": 8.47620529807765e-06, |
| "loss": 0.4533, |
| "step": 278 |
| }, |
| { |
| "epoch": 1.7827476038338657, |
| "grad_norm": 0.5736693711263415, |
| "learning_rate": 8.402497687967837e-06, |
| "loss": 0.4041, |
| "step": 279 |
| }, |
| { |
| "epoch": 1.7891373801916934, |
| "grad_norm": 0.5178503013621817, |
| "learning_rate": 8.328879033814516e-06, |
| "loss": 0.397, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.7955271565495208, |
| "grad_norm": 0.5005614658444079, |
| "learning_rate": 8.255353435028228e-06, |
| "loss": 0.432, |
| "step": 281 |
| }, |
| { |
| "epoch": 1.8019169329073481, |
| "grad_norm": 0.466589550480172, |
| "learning_rate": 8.181924985837762e-06, |
| "loss": 0.3921, |
| "step": 282 |
| }, |
| { |
| "epoch": 1.8083067092651757, |
| "grad_norm": 0.492210299786068, |
| "learning_rate": 8.108597775062198e-06, |
| "loss": 0.3952, |
| "step": 283 |
| }, |
| { |
| "epoch": 1.8146964856230032, |
| "grad_norm": 0.5488180113006943, |
| "learning_rate": 8.035375885883217e-06, |
| "loss": 0.4721, |
| "step": 284 |
| }, |
| { |
| "epoch": 1.8210862619808306, |
| "grad_norm": 0.46872330736771156, |
| "learning_rate": 7.962263395617724e-06, |
| "loss": 0.4027, |
| "step": 285 |
| }, |
| { |
| "epoch": 1.8274760383386581, |
| "grad_norm": 0.5921155853332334, |
| "learning_rate": 7.88926437549082e-06, |
| "loss": 0.4784, |
| "step": 286 |
| }, |
| { |
| "epoch": 1.8338658146964857, |
| "grad_norm": 0.44822680013304916, |
| "learning_rate": 7.81638289040908e-06, |
| "loss": 0.3745, |
| "step": 287 |
| }, |
| { |
| "epoch": 1.840255591054313, |
| "grad_norm": 0.5070334055575236, |
| "learning_rate": 7.743622998734217e-06, |
| "loss": 0.4493, |
| "step": 288 |
| }, |
| { |
| "epoch": 1.8466453674121406, |
| "grad_norm": 0.45829357905039364, |
| "learning_rate": 7.670988752057088e-06, |
| "loss": 0.3906, |
| "step": 289 |
| }, |
| { |
| "epoch": 1.8530351437699681, |
| "grad_norm": 0.4401957393457272, |
| "learning_rate": 7.598484194972076e-06, |
| "loss": 0.4351, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.8594249201277955, |
| "grad_norm": 0.45173410773764394, |
| "learning_rate": 7.526113364851891e-06, |
| "loss": 0.404, |
| "step": 291 |
| }, |
| { |
| "epoch": 1.865814696485623, |
| "grad_norm": 0.48332838338770834, |
| "learning_rate": 7.453880291622726e-06, |
| "loss": 0.39, |
| "step": 292 |
| }, |
| { |
| "epoch": 1.8722044728434506, |
| "grad_norm": 0.49775533365807295, |
| "learning_rate": 7.381788997539869e-06, |
| "loss": 0.4263, |
| "step": 293 |
| }, |
| { |
| "epoch": 1.878594249201278, |
| "grad_norm": 0.45957391883705206, |
| "learning_rate": 7.309843496963715e-06, |
| "loss": 0.362, |
| "step": 294 |
| }, |
| { |
| "epoch": 1.8849840255591053, |
| "grad_norm": 0.5317384668186387, |
| "learning_rate": 7.238047796136247e-06, |
| "loss": 0.4593, |
| "step": 295 |
| }, |
| { |
| "epoch": 1.891373801916933, |
| "grad_norm": 0.4766410704227857, |
| "learning_rate": 7.166405892957926e-06, |
| "loss": 0.3991, |
| "step": 296 |
| }, |
| { |
| "epoch": 1.8977635782747604, |
| "grad_norm": 0.4468475803781737, |
| "learning_rate": 7.094921776765095e-06, |
| "loss": 0.4128, |
| "step": 297 |
| }, |
| { |
| "epoch": 1.9041533546325877, |
| "grad_norm": 0.5191666964746032, |
| "learning_rate": 7.023599428107815e-06, |
| "loss": 0.457, |
| "step": 298 |
| }, |
| { |
| "epoch": 1.9105431309904153, |
| "grad_norm": 0.44496722949584133, |
| "learning_rate": 6.95244281852822e-06, |
| "loss": 0.3908, |
| "step": 299 |
| }, |
| { |
| "epoch": 1.9169329073482428, |
| "grad_norm": 0.4827518065178973, |
| "learning_rate": 6.881455910339369e-06, |
| "loss": 0.4515, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.9233226837060702, |
| "grad_norm": 0.5066972710057507, |
| "learning_rate": 6.8106426564045965e-06, |
| "loss": 0.4084, |
| "step": 301 |
| }, |
| { |
| "epoch": 1.9297124600638977, |
| "grad_norm": 0.468517851202735, |
| "learning_rate": 6.740006999917406e-06, |
| "loss": 0.4292, |
| "step": 302 |
| }, |
| { |
| "epoch": 1.9361022364217253, |
| "grad_norm": 0.5620319742163961, |
| "learning_rate": 6.669552874181888e-06, |
| "loss": 0.4285, |
| "step": 303 |
| }, |
| { |
| "epoch": 1.9424920127795526, |
| "grad_norm": 0.46212229948639855, |
| "learning_rate": 6.599284202393709e-06, |
| "loss": 0.4009, |
| "step": 304 |
| }, |
| { |
| "epoch": 1.9488817891373802, |
| "grad_norm": 0.4618831752703247, |
| "learning_rate": 6.529204897421644e-06, |
| "loss": 0.471, |
| "step": 305 |
| }, |
| { |
| "epoch": 1.9552715654952078, |
| "grad_norm": 0.4453523115078921, |
| "learning_rate": 6.4593188615896855e-06, |
| "loss": 0.4149, |
| "step": 306 |
| }, |
| { |
| "epoch": 1.961661341853035, |
| "grad_norm": 0.5565537423113835, |
| "learning_rate": 6.389629986459756e-06, |
| "loss": 0.4595, |
| "step": 307 |
| }, |
| { |
| "epoch": 1.9680511182108626, |
| "grad_norm": 0.4817525299898893, |
| "learning_rate": 6.3201421526149945e-06, |
| "loss": 0.351, |
| "step": 308 |
| }, |
| { |
| "epoch": 1.9744408945686902, |
| "grad_norm": 0.49529548437523874, |
| "learning_rate": 6.250859229443684e-06, |
| "loss": 0.4219, |
| "step": 309 |
| }, |
| { |
| "epoch": 1.9808306709265175, |
| "grad_norm": 0.4677244453368462, |
| "learning_rate": 6.181785074923778e-06, |
| "loss": 0.4204, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.9872204472843449, |
| "grad_norm": 0.5533811435739016, |
| "learning_rate": 6.112923535408074e-06, |
| "loss": 0.4005, |
| "step": 311 |
| }, |
| { |
| "epoch": 1.9936102236421727, |
| "grad_norm": 0.6829408529085472, |
| "learning_rate": 6.044278445410025e-06, |
| "loss": 0.457, |
| "step": 312 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.7425708478665342, |
| "learning_rate": 5.975853627390233e-06, |
| "loss": 0.5428, |
| "step": 313 |
| }, |
| { |
| "epoch": 2.0063897763578273, |
| "grad_norm": 0.6185014849673437, |
| "learning_rate": 5.907652891543576e-06, |
| "loss": 0.3446, |
| "step": 314 |
| }, |
| { |
| "epoch": 2.012779552715655, |
| "grad_norm": 0.550249298005834, |
| "learning_rate": 5.839680035587061e-06, |
| "loss": 0.3567, |
| "step": 315 |
| }, |
| { |
| "epoch": 2.0191693290734825, |
| "grad_norm": 0.6406389855268264, |
| "learning_rate": 5.771938844548331e-06, |
| "loss": 0.3439, |
| "step": 316 |
| }, |
| { |
| "epoch": 2.02555910543131, |
| "grad_norm": 0.7498793828860592, |
| "learning_rate": 5.704433090554912e-06, |
| "loss": 0.3525, |
| "step": 317 |
| }, |
| { |
| "epoch": 2.0319488817891376, |
| "grad_norm": 0.6058522485872244, |
| "learning_rate": 5.6371665326241635e-06, |
| "loss": 0.3493, |
| "step": 318 |
| }, |
| { |
| "epoch": 2.038338658146965, |
| "grad_norm": 0.4684273692024481, |
| "learning_rate": 5.570142916453944e-06, |
| "loss": 0.3529, |
| "step": 319 |
| }, |
| { |
| "epoch": 2.0447284345047922, |
| "grad_norm": 0.5736798457088337, |
| "learning_rate": 5.503365974214059e-06, |
| "loss": 0.3601, |
| "step": 320 |
| }, |
| { |
| "epoch": 2.0511182108626196, |
| "grad_norm": 0.6933501821794038, |
| "learning_rate": 5.436839424338426e-06, |
| "loss": 0.3495, |
| "step": 321 |
| }, |
| { |
| "epoch": 2.0575079872204474, |
| "grad_norm": 0.5220030865474038, |
| "learning_rate": 5.37056697131799e-06, |
| "loss": 0.3313, |
| "step": 322 |
| }, |
| { |
| "epoch": 2.0638977635782747, |
| "grad_norm": 0.48932445764444477, |
| "learning_rate": 5.304552305494493e-06, |
| "loss": 0.3429, |
| "step": 323 |
| }, |
| { |
| "epoch": 2.070287539936102, |
| "grad_norm": 0.5367328609042773, |
| "learning_rate": 5.238799102854941e-06, |
| "loss": 0.3631, |
| "step": 324 |
| }, |
| { |
| "epoch": 2.07667731629393, |
| "grad_norm": 0.4753482664474989, |
| "learning_rate": 5.173311024826916e-06, |
| "loss": 0.3164, |
| "step": 325 |
| }, |
| { |
| "epoch": 2.083067092651757, |
| "grad_norm": 0.5857488280714297, |
| "learning_rate": 5.108091718074706e-06, |
| "loss": 0.3684, |
| "step": 326 |
| }, |
| { |
| "epoch": 2.0894568690095845, |
| "grad_norm": 0.5577030340771072, |
| "learning_rate": 5.043144814296214e-06, |
| "loss": 0.3393, |
| "step": 327 |
| }, |
| { |
| "epoch": 2.0958466453674123, |
| "grad_norm": 0.5001282088257198, |
| "learning_rate": 4.9784739300207675e-06, |
| "loss": 0.3289, |
| "step": 328 |
| }, |
| { |
| "epoch": 2.1022364217252396, |
| "grad_norm": 0.49155760192395415, |
| "learning_rate": 4.914082666407705e-06, |
| "loss": 0.3126, |
| "step": 329 |
| }, |
| { |
| "epoch": 2.108626198083067, |
| "grad_norm": 0.4721435423436379, |
| "learning_rate": 4.849974609045849e-06, |
| "loss": 0.3412, |
| "step": 330 |
| }, |
| { |
| "epoch": 2.1150159744408947, |
| "grad_norm": 0.4887160051279591, |
| "learning_rate": 4.786153327753865e-06, |
| "loss": 0.344, |
| "step": 331 |
| }, |
| { |
| "epoch": 2.121405750798722, |
| "grad_norm": 0.4325584192349116, |
| "learning_rate": 4.722622376381455e-06, |
| "loss": 0.3316, |
| "step": 332 |
| }, |
| { |
| "epoch": 2.1277955271565494, |
| "grad_norm": 0.4294996718579918, |
| "learning_rate": 4.659385292611479e-06, |
| "loss": 0.3339, |
| "step": 333 |
| }, |
| { |
| "epoch": 2.134185303514377, |
| "grad_norm": 0.41116978589443015, |
| "learning_rate": 4.59644559776296e-06, |
| "loss": 0.3325, |
| "step": 334 |
| }, |
| { |
| "epoch": 2.1405750798722045, |
| "grad_norm": 0.4411913227153377, |
| "learning_rate": 4.533806796594989e-06, |
| "loss": 0.342, |
| "step": 335 |
| }, |
| { |
| "epoch": 2.146964856230032, |
| "grad_norm": 0.44530187962960605, |
| "learning_rate": 4.471472377111574e-06, |
| "loss": 0.364, |
| "step": 336 |
| }, |
| { |
| "epoch": 2.1533546325878596, |
| "grad_norm": 0.4681980949823063, |
| "learning_rate": 4.409445810367421e-06, |
| "loss": 0.3749, |
| "step": 337 |
| }, |
| { |
| "epoch": 2.159744408945687, |
| "grad_norm": 0.4082359456108807, |
| "learning_rate": 4.347730550274628e-06, |
| "loss": 0.3586, |
| "step": 338 |
| }, |
| { |
| "epoch": 2.1661341853035143, |
| "grad_norm": 0.41583681737409806, |
| "learning_rate": 4.286330033410384e-06, |
| "loss": 0.3047, |
| "step": 339 |
| }, |
| { |
| "epoch": 2.1725239616613417, |
| "grad_norm": 0.4540111795301057, |
| "learning_rate": 4.2252476788255735e-06, |
| "loss": 0.3254, |
| "step": 340 |
| }, |
| { |
| "epoch": 2.1789137380191694, |
| "grad_norm": 0.45433063555043707, |
| "learning_rate": 4.164486887854424e-06, |
| "loss": 0.3459, |
| "step": 341 |
| }, |
| { |
| "epoch": 2.1853035143769968, |
| "grad_norm": 0.43892759194821435, |
| "learning_rate": 4.104051043925068e-06, |
| "loss": 0.354, |
| "step": 342 |
| }, |
| { |
| "epoch": 2.191693290734824, |
| "grad_norm": 0.43626351327017776, |
| "learning_rate": 4.043943512371171e-06, |
| "loss": 0.3484, |
| "step": 343 |
| }, |
| { |
| "epoch": 2.198083067092652, |
| "grad_norm": 0.41866320314755173, |
| "learning_rate": 3.984167640244518e-06, |
| "loss": 0.3595, |
| "step": 344 |
| }, |
| { |
| "epoch": 2.2044728434504792, |
| "grad_norm": 0.43171890711936267, |
| "learning_rate": 3.924726756128632e-06, |
| "loss": 0.3683, |
| "step": 345 |
| }, |
| { |
| "epoch": 2.2108626198083066, |
| "grad_norm": 0.39657734240201464, |
| "learning_rate": 3.8656241699534396e-06, |
| "loss": 0.3319, |
| "step": 346 |
| }, |
| { |
| "epoch": 2.2172523961661343, |
| "grad_norm": 0.4390716578058822, |
| "learning_rate": 3.8068631728109364e-06, |
| "loss": 0.3718, |
| "step": 347 |
| }, |
| { |
| "epoch": 2.2236421725239617, |
| "grad_norm": 0.43526087538819297, |
| "learning_rate": 3.7484470367719493e-06, |
| "loss": 0.3586, |
| "step": 348 |
| }, |
| { |
| "epoch": 2.230031948881789, |
| "grad_norm": 0.41150940940323066, |
| "learning_rate": 3.6903790147039286e-06, |
| "loss": 0.3219, |
| "step": 349 |
| }, |
| { |
| "epoch": 2.236421725239617, |
| "grad_norm": 0.4434399201886998, |
| "learning_rate": 3.6326623400897797e-06, |
| "loss": 0.3496, |
| "step": 350 |
| }, |
| { |
| "epoch": 2.242811501597444, |
| "grad_norm": 0.4226836887457401, |
| "learning_rate": 3.575300226847863e-06, |
| "loss": 0.3369, |
| "step": 351 |
| }, |
| { |
| "epoch": 2.2492012779552715, |
| "grad_norm": 0.449011167578069, |
| "learning_rate": 3.5182958691529945e-06, |
| "loss": 0.3456, |
| "step": 352 |
| }, |
| { |
| "epoch": 2.255591054313099, |
| "grad_norm": 0.40337825402760535, |
| "learning_rate": 3.4616524412585797e-06, |
| "loss": 0.3569, |
| "step": 353 |
| }, |
| { |
| "epoch": 2.2619808306709266, |
| "grad_norm": 0.4417946658839781, |
| "learning_rate": 3.405373097319875e-06, |
| "loss": 0.2965, |
| "step": 354 |
| }, |
| { |
| "epoch": 2.268370607028754, |
| "grad_norm": 0.4226400522886019, |
| "learning_rate": 3.3494609712183323e-06, |
| "loss": 0.3143, |
| "step": 355 |
| }, |
| { |
| "epoch": 2.2747603833865817, |
| "grad_norm": 0.42990286288068097, |
| "learning_rate": 3.2939191763871047e-06, |
| "loss": 0.325, |
| "step": 356 |
| }, |
| { |
| "epoch": 2.281150159744409, |
| "grad_norm": 0.48668783932063814, |
| "learning_rate": 3.2387508056376726e-06, |
| "loss": 0.3393, |
| "step": 357 |
| }, |
| { |
| "epoch": 2.2875399361022364, |
| "grad_norm": 0.45993768274586794, |
| "learning_rate": 3.183958930987612e-06, |
| "loss": 0.3271, |
| "step": 358 |
| }, |
| { |
| "epoch": 2.2939297124600637, |
| "grad_norm": 0.3696554039297162, |
| "learning_rate": 3.1295466034895482e-06, |
| "loss": 0.356, |
| "step": 359 |
| }, |
| { |
| "epoch": 2.3003194888178915, |
| "grad_norm": 0.41320713175521795, |
| "learning_rate": 3.0755168530612444e-06, |
| "loss": 0.321, |
| "step": 360 |
| }, |
| { |
| "epoch": 2.306709265175719, |
| "grad_norm": 0.42742117582586886, |
| "learning_rate": 3.021872688316896e-06, |
| "loss": 0.3161, |
| "step": 361 |
| }, |
| { |
| "epoch": 2.313099041533546, |
| "grad_norm": 0.41256761998572333, |
| "learning_rate": 2.968617096399592e-06, |
| "loss": 0.3095, |
| "step": 362 |
| }, |
| { |
| "epoch": 2.319488817891374, |
| "grad_norm": 0.418828546634445, |
| "learning_rate": 2.9157530428149683e-06, |
| "loss": 0.3434, |
| "step": 363 |
| }, |
| { |
| "epoch": 2.3258785942492013, |
| "grad_norm": 0.40161025260603606, |
| "learning_rate": 2.8632834712660882e-06, |
| "loss": 0.3203, |
| "step": 364 |
| }, |
| { |
| "epoch": 2.3322683706070286, |
| "grad_norm": 0.4182135730630846, |
| "learning_rate": 2.8112113034895273e-06, |
| "loss": 0.3135, |
| "step": 365 |
| }, |
| { |
| "epoch": 2.3386581469648564, |
| "grad_norm": 0.40613861239190874, |
| "learning_rate": 2.7595394390926557e-06, |
| "loss": 0.3643, |
| "step": 366 |
| }, |
| { |
| "epoch": 2.3450479233226837, |
| "grad_norm": 0.41302826534326115, |
| "learning_rate": 2.708270755392207e-06, |
| "loss": 0.3424, |
| "step": 367 |
| }, |
| { |
| "epoch": 2.351437699680511, |
| "grad_norm": 0.3930172226452873, |
| "learning_rate": 2.657408107254027e-06, |
| "loss": 0.3429, |
| "step": 368 |
| }, |
| { |
| "epoch": 2.357827476038339, |
| "grad_norm": 0.41867946397201244, |
| "learning_rate": 2.60695432693412e-06, |
| "loss": 0.3657, |
| "step": 369 |
| }, |
| { |
| "epoch": 2.364217252396166, |
| "grad_norm": 0.3952956485859315, |
| "learning_rate": 2.5569122239209366e-06, |
| "loss": 0.2999, |
| "step": 370 |
| }, |
| { |
| "epoch": 2.3706070287539935, |
| "grad_norm": 0.38547686501477857, |
| "learning_rate": 2.507284584778913e-06, |
| "loss": 0.3229, |
| "step": 371 |
| }, |
| { |
| "epoch": 2.376996805111821, |
| "grad_norm": 0.4071024941451391, |
| "learning_rate": 2.4580741729933246e-06, |
| "loss": 0.3398, |
| "step": 372 |
| }, |
| { |
| "epoch": 2.3833865814696487, |
| "grad_norm": 0.3975504941590724, |
| "learning_rate": 2.4092837288163807e-06, |
| "loss": 0.3246, |
| "step": 373 |
| }, |
| { |
| "epoch": 2.389776357827476, |
| "grad_norm": 0.4007814100724813, |
| "learning_rate": 2.3609159691146577e-06, |
| "loss": 0.3834, |
| "step": 374 |
| }, |
| { |
| "epoch": 2.3961661341853033, |
| "grad_norm": 0.4132465009460835, |
| "learning_rate": 2.312973587217798e-06, |
| "loss": 0.3552, |
| "step": 375 |
| }, |
| { |
| "epoch": 2.402555910543131, |
| "grad_norm": 0.38136466401080477, |
| "learning_rate": 2.2654592527685305e-06, |
| "loss": 0.3394, |
| "step": 376 |
| }, |
| { |
| "epoch": 2.4089456869009584, |
| "grad_norm": 0.38378352088164336, |
| "learning_rate": 2.2183756115740274e-06, |
| "loss": 0.3417, |
| "step": 377 |
| }, |
| { |
| "epoch": 2.415335463258786, |
| "grad_norm": 0.42705803790306746, |
| "learning_rate": 2.171725285458559e-06, |
| "loss": 0.3657, |
| "step": 378 |
| }, |
| { |
| "epoch": 2.4217252396166136, |
| "grad_norm": 0.3893534985584009, |
| "learning_rate": 2.1255108721175066e-06, |
| "loss": 0.3439, |
| "step": 379 |
| }, |
| { |
| "epoch": 2.428115015974441, |
| "grad_norm": 0.443399810426828, |
| "learning_rate": 2.079734944972717e-06, |
| "loss": 0.3216, |
| "step": 380 |
| }, |
| { |
| "epoch": 2.4345047923322682, |
| "grad_norm": 0.39874922783339656, |
| "learning_rate": 2.0344000530291875e-06, |
| "loss": 0.3434, |
| "step": 381 |
| }, |
| { |
| "epoch": 2.440894568690096, |
| "grad_norm": 0.3771297738742276, |
| "learning_rate": 1.9895087207331422e-06, |
| "loss": 0.3409, |
| "step": 382 |
| }, |
| { |
| "epoch": 2.4472843450479234, |
| "grad_norm": 0.40027082321568275, |
| "learning_rate": 1.945063447831452e-06, |
| "loss": 0.3265, |
| "step": 383 |
| }, |
| { |
| "epoch": 2.4536741214057507, |
| "grad_norm": 0.3737581626959035, |
| "learning_rate": 1.9010667092324342e-06, |
| "loss": 0.34, |
| "step": 384 |
| }, |
| { |
| "epoch": 2.460063897763578, |
| "grad_norm": 0.3841031529537794, |
| "learning_rate": 1.8575209548680472e-06, |
| "loss": 0.3503, |
| "step": 385 |
| }, |
| { |
| "epoch": 2.466453674121406, |
| "grad_norm": 0.4038855857442773, |
| "learning_rate": 1.814428609557458e-06, |
| "loss": 0.3201, |
| "step": 386 |
| }, |
| { |
| "epoch": 2.472843450479233, |
| "grad_norm": 0.40823279792933154, |
| "learning_rate": 1.7717920728720284e-06, |
| "loss": 0.3219, |
| "step": 387 |
| }, |
| { |
| "epoch": 2.479233226837061, |
| "grad_norm": 0.45571229187170187, |
| "learning_rate": 1.7296137190016916e-06, |
| "loss": 0.3646, |
| "step": 388 |
| }, |
| { |
| "epoch": 2.4856230031948883, |
| "grad_norm": 0.3992759518176683, |
| "learning_rate": 1.6878958966227366e-06, |
| "loss": 0.3514, |
| "step": 389 |
| }, |
| { |
| "epoch": 2.4920127795527156, |
| "grad_norm": 0.44997582744977993, |
| "learning_rate": 1.646640928767047e-06, |
| "loss": 0.3676, |
| "step": 390 |
| }, |
| { |
| "epoch": 2.498402555910543, |
| "grad_norm": 0.4036812441443117, |
| "learning_rate": 1.6058511126927178e-06, |
| "loss": 0.3572, |
| "step": 391 |
| }, |
| { |
| "epoch": 2.5047923322683707, |
| "grad_norm": 0.38861999088805715, |
| "learning_rate": 1.5655287197561497e-06, |
| "loss": 0.3176, |
| "step": 392 |
| }, |
| { |
| "epoch": 2.511182108626198, |
| "grad_norm": 0.3958987299112533, |
| "learning_rate": 1.5256759952855737e-06, |
| "loss": 0.3411, |
| "step": 393 |
| }, |
| { |
| "epoch": 2.5175718849840254, |
| "grad_norm": 0.3958900572864119, |
| "learning_rate": 1.4862951584560037e-06, |
| "loss": 0.3391, |
| "step": 394 |
| }, |
| { |
| "epoch": 2.523961661341853, |
| "grad_norm": 0.3918198479937304, |
| "learning_rate": 1.447388402165686e-06, |
| "loss": 0.3178, |
| "step": 395 |
| }, |
| { |
| "epoch": 2.5303514376996805, |
| "grad_norm": 0.40756583772176336, |
| "learning_rate": 1.4089578929139635e-06, |
| "loss": 0.3264, |
| "step": 396 |
| }, |
| { |
| "epoch": 2.536741214057508, |
| "grad_norm": 0.40960058637131896, |
| "learning_rate": 1.371005770680659e-06, |
| "loss": 0.3385, |
| "step": 397 |
| }, |
| { |
| "epoch": 2.543130990415335, |
| "grad_norm": 0.40903897126869604, |
| "learning_rate": 1.3335341488068997e-06, |
| "loss": 0.3805, |
| "step": 398 |
| }, |
| { |
| "epoch": 2.549520766773163, |
| "grad_norm": 0.39992597114439177, |
| "learning_rate": 1.2965451138774343e-06, |
| "loss": 0.3196, |
| "step": 399 |
| }, |
| { |
| "epoch": 2.5559105431309903, |
| "grad_norm": 0.3894861786588525, |
| "learning_rate": 1.2600407256044544e-06, |
| "loss": 0.354, |
| "step": 400 |
| }, |
| { |
| "epoch": 2.562300319488818, |
| "grad_norm": 0.3773260960595135, |
| "learning_rate": 1.2240230167128863e-06, |
| "loss": 0.3408, |
| "step": 401 |
| }, |
| { |
| "epoch": 2.5686900958466454, |
| "grad_norm": 0.3916021706010166, |
| "learning_rate": 1.188493992827211e-06, |
| "loss": 0.3141, |
| "step": 402 |
| }, |
| { |
| "epoch": 2.5750798722044728, |
| "grad_norm": 0.3906836314398566, |
| "learning_rate": 1.1534556323597824e-06, |
| "loss": 0.2924, |
| "step": 403 |
| }, |
| { |
| "epoch": 2.5814696485623, |
| "grad_norm": 0.4038749879309955, |
| "learning_rate": 1.1189098864006488e-06, |
| "loss": 0.3365, |
| "step": 404 |
| }, |
| { |
| "epoch": 2.587859424920128, |
| "grad_norm": 0.40279549296487077, |
| "learning_rate": 1.084858678608922e-06, |
| "loss": 0.3239, |
| "step": 405 |
| }, |
| { |
| "epoch": 2.594249201277955, |
| "grad_norm": 0.36365947820301786, |
| "learning_rate": 1.0513039051056507e-06, |
| "loss": 0.3326, |
| "step": 406 |
| }, |
| { |
| "epoch": 2.600638977635783, |
| "grad_norm": 0.3696286967249599, |
| "learning_rate": 1.0182474343682346e-06, |
| "loss": 0.3292, |
| "step": 407 |
| }, |
| { |
| "epoch": 2.6070287539936103, |
| "grad_norm": 0.4175231953214467, |
| "learning_rate": 9.85691107126392e-07, |
| "loss": 0.3744, |
| "step": 408 |
| }, |
| { |
| "epoch": 2.6134185303514377, |
| "grad_norm": 0.39195370451842787, |
| "learning_rate": 9.536367362596422e-07, |
| "loss": 0.3695, |
| "step": 409 |
| }, |
| { |
| "epoch": 2.619808306709265, |
| "grad_norm": 0.37605332595306007, |
| "learning_rate": 9.220861066963715e-07, |
| "loss": 0.3418, |
| "step": 410 |
| }, |
| { |
| "epoch": 2.626198083067093, |
| "grad_norm": 0.3995977702745196, |
| "learning_rate": 8.910409753144344e-07, |
| "loss": 0.3549, |
| "step": 411 |
| }, |
| { |
| "epoch": 2.63258785942492, |
| "grad_norm": 0.3834229012458761, |
| "learning_rate": 8.605030708433149e-07, |
| "loss": 0.3445, |
| "step": 412 |
| }, |
| { |
| "epoch": 2.6389776357827475, |
| "grad_norm": 0.37644074285723944, |
| "learning_rate": 8.304740937678835e-07, |
| "loss": 0.3668, |
| "step": 413 |
| }, |
| { |
| "epoch": 2.6453674121405752, |
| "grad_norm": 0.38149522660404983, |
| "learning_rate": 8.009557162336823e-07, |
| "loss": 0.3286, |
| "step": 414 |
| }, |
| { |
| "epoch": 2.6517571884984026, |
| "grad_norm": 0.3965348662763524, |
| "learning_rate": 7.719495819538325e-07, |
| "loss": 0.3562, |
| "step": 415 |
| }, |
| { |
| "epoch": 2.65814696485623, |
| "grad_norm": 0.3740820985387306, |
| "learning_rate": 7.434573061174966e-07, |
| "loss": 0.3171, |
| "step": 416 |
| }, |
| { |
| "epoch": 2.6645367412140573, |
| "grad_norm": 0.37288133480679564, |
| "learning_rate": 7.154804752999344e-07, |
| "loss": 0.3263, |
| "step": 417 |
| }, |
| { |
| "epoch": 2.670926517571885, |
| "grad_norm": 0.37023486110236786, |
| "learning_rate": 6.880206473741646e-07, |
| "loss": 0.368, |
| "step": 418 |
| }, |
| { |
| "epoch": 2.6773162939297124, |
| "grad_norm": 0.3924590139873951, |
| "learning_rate": 6.610793514242075e-07, |
| "loss": 0.3348, |
| "step": 419 |
| }, |
| { |
| "epoch": 2.68370607028754, |
| "grad_norm": 0.4026773365921225, |
| "learning_rate": 6.346580876599395e-07, |
| "loss": 0.3405, |
| "step": 420 |
| }, |
| { |
| "epoch": 2.6900958466453675, |
| "grad_norm": 0.3855444929570631, |
| "learning_rate": 6.08758327333564e-07, |
| "loss": 0.3588, |
| "step": 421 |
| }, |
| { |
| "epoch": 2.696485623003195, |
| "grad_norm": 0.4018291966488895, |
| "learning_rate": 5.833815126576714e-07, |
| "loss": 0.3259, |
| "step": 422 |
| }, |
| { |
| "epoch": 2.702875399361022, |
| "grad_norm": 0.36389835567175366, |
| "learning_rate": 5.585290567249424e-07, |
| "loss": 0.3514, |
| "step": 423 |
| }, |
| { |
| "epoch": 2.70926517571885, |
| "grad_norm": 0.40458156951380986, |
| "learning_rate": 5.342023434294552e-07, |
| "loss": 0.337, |
| "step": 424 |
| }, |
| { |
| "epoch": 2.7156549520766773, |
| "grad_norm": 0.36807317654930927, |
| "learning_rate": 5.104027273896239e-07, |
| "loss": 0.3148, |
| "step": 425 |
| }, |
| { |
| "epoch": 2.722044728434505, |
| "grad_norm": 0.3689483940462552, |
| "learning_rate": 4.871315338727711e-07, |
| "loss": 0.3181, |
| "step": 426 |
| }, |
| { |
| "epoch": 2.7284345047923324, |
| "grad_norm": 0.3833457229816993, |
| "learning_rate": 4.6439005872132457e-07, |
| "loss": 0.3368, |
| "step": 427 |
| }, |
| { |
| "epoch": 2.7348242811501597, |
| "grad_norm": 0.36604943775735366, |
| "learning_rate": 4.421795682806662e-07, |
| "loss": 0.3364, |
| "step": 428 |
| }, |
| { |
| "epoch": 2.741214057507987, |
| "grad_norm": 0.3773811652537217, |
| "learning_rate": 4.2050129932861394e-07, |
| "loss": 0.3229, |
| "step": 429 |
| }, |
| { |
| "epoch": 2.747603833865815, |
| "grad_norm": 0.3796116399398149, |
| "learning_rate": 3.9935645900654906e-07, |
| "loss": 0.3633, |
| "step": 430 |
| }, |
| { |
| "epoch": 2.753993610223642, |
| "grad_norm": 0.37803460451421467, |
| "learning_rate": 3.7874622475220336e-07, |
| "loss": 0.3524, |
| "step": 431 |
| }, |
| { |
| "epoch": 2.7603833865814695, |
| "grad_norm": 0.36675174735385174, |
| "learning_rate": 3.58671744234087e-07, |
| "loss": 0.365, |
| "step": 432 |
| }, |
| { |
| "epoch": 2.7667731629392973, |
| "grad_norm": 0.3898919999844523, |
| "learning_rate": 3.3913413528758877e-07, |
| "loss": 0.3323, |
| "step": 433 |
| }, |
| { |
| "epoch": 2.7731629392971247, |
| "grad_norm": 0.3936657666166714, |
| "learning_rate": 3.2013448585272333e-07, |
| "loss": 0.3337, |
| "step": 434 |
| }, |
| { |
| "epoch": 2.779552715654952, |
| "grad_norm": 0.38046637386341803, |
| "learning_rate": 3.016738539135566e-07, |
| "loss": 0.344, |
| "step": 435 |
| }, |
| { |
| "epoch": 2.7859424920127793, |
| "grad_norm": 0.3770525251793597, |
| "learning_rate": 2.837532674392862e-07, |
| "loss": 0.374, |
| "step": 436 |
| }, |
| { |
| "epoch": 2.792332268370607, |
| "grad_norm": 0.3916319385244186, |
| "learning_rate": 2.6637372432700483e-07, |
| "loss": 0.3492, |
| "step": 437 |
| }, |
| { |
| "epoch": 2.7987220447284344, |
| "grad_norm": 0.4083221537063404, |
| "learning_rate": 2.4953619234612816e-07, |
| "loss": 0.3056, |
| "step": 438 |
| }, |
| { |
| "epoch": 2.8051118210862622, |
| "grad_norm": 0.36900391336674243, |
| "learning_rate": 2.332416090845102e-07, |
| "loss": 0.3071, |
| "step": 439 |
| }, |
| { |
| "epoch": 2.8115015974440896, |
| "grad_norm": 0.4181996987699531, |
| "learning_rate": 2.1749088189622848e-07, |
| "loss": 0.3351, |
| "step": 440 |
| }, |
| { |
| "epoch": 2.817891373801917, |
| "grad_norm": 0.3547619175700847, |
| "learning_rate": 2.0228488785106636e-07, |
| "loss": 0.3406, |
| "step": 441 |
| }, |
| { |
| "epoch": 2.8242811501597442, |
| "grad_norm": 0.3977218103469981, |
| "learning_rate": 1.8762447368566582e-07, |
| "loss": 0.3505, |
| "step": 442 |
| }, |
| { |
| "epoch": 2.830670926517572, |
| "grad_norm": 0.39003543295538634, |
| "learning_rate": 1.7351045575638047e-07, |
| "loss": 0.3753, |
| "step": 443 |
| }, |
| { |
| "epoch": 2.8370607028753994, |
| "grad_norm": 0.37845868272626343, |
| "learning_rate": 1.5994361999382202e-07, |
| "loss": 0.3278, |
| "step": 444 |
| }, |
| { |
| "epoch": 2.8434504792332267, |
| "grad_norm": 0.39842394374774287, |
| "learning_rate": 1.4692472185908635e-07, |
| "loss": 0.3682, |
| "step": 445 |
| }, |
| { |
| "epoch": 2.8498402555910545, |
| "grad_norm": 0.3457049068963664, |
| "learning_rate": 1.344544863016961e-07, |
| "loss": 0.3438, |
| "step": 446 |
| }, |
| { |
| "epoch": 2.856230031948882, |
| "grad_norm": 0.40463187861502226, |
| "learning_rate": 1.225336077192274e-07, |
| "loss": 0.3356, |
| "step": 447 |
| }, |
| { |
| "epoch": 2.862619808306709, |
| "grad_norm": 0.5473639905587789, |
| "learning_rate": 1.1116274991864073e-07, |
| "loss": 0.3612, |
| "step": 448 |
| }, |
| { |
| "epoch": 2.8690095846645365, |
| "grad_norm": 0.36738048359514414, |
| "learning_rate": 1.003425460793217e-07, |
| "loss": 0.3618, |
| "step": 449 |
| }, |
| { |
| "epoch": 2.8753993610223643, |
| "grad_norm": 0.39397443170606444, |
| "learning_rate": 9.00735987178214e-08, |
| "loss": 0.3325, |
| "step": 450 |
| }, |
| { |
| "epoch": 2.8817891373801916, |
| "grad_norm": 0.3641877860232395, |
| "learning_rate": 8.035647965430215e-08, |
| "loss": 0.3705, |
| "step": 451 |
| }, |
| { |
| "epoch": 2.8881789137380194, |
| "grad_norm": 0.3766602850517281, |
| "learning_rate": 7.119172998070412e-08, |
| "loss": 0.3602, |
| "step": 452 |
| }, |
| { |
| "epoch": 2.8945686900958467, |
| "grad_norm": 0.3504672529434601, |
| "learning_rate": 6.25798600306049e-08, |
| "loss": 0.345, |
| "step": 453 |
| }, |
| { |
| "epoch": 2.900958466453674, |
| "grad_norm": 0.3689087024207003, |
| "learning_rate": 5.4521349350808996e-08, |
| "loss": 0.3711, |
| "step": 454 |
| }, |
| { |
| "epoch": 2.9073482428115014, |
| "grad_norm": 0.3816320750179523, |
| "learning_rate": 4.701664667464245e-08, |
| "loss": 0.3646, |
| "step": 455 |
| }, |
| { |
| "epoch": 2.913738019169329, |
| "grad_norm": 0.3679437431040902, |
| "learning_rate": 4.006616989696621e-08, |
| "loss": 0.3615, |
| "step": 456 |
| }, |
| { |
| "epoch": 2.9201277955271565, |
| "grad_norm": 0.38571927082544183, |
| "learning_rate": 3.367030605090249e-08, |
| "loss": 0.3222, |
| "step": 457 |
| }, |
| { |
| "epoch": 2.9265175718849843, |
| "grad_norm": 0.3971237004311223, |
| "learning_rate": 2.7829411286287577e-08, |
| "loss": 0.3485, |
| "step": 458 |
| }, |
| { |
| "epoch": 2.9329073482428116, |
| "grad_norm": 0.3630064489830963, |
| "learning_rate": 2.254381084983659e-08, |
| "loss": 0.3065, |
| "step": 459 |
| }, |
| { |
| "epoch": 2.939297124600639, |
| "grad_norm": 0.3754819920105193, |
| "learning_rate": 1.781379906703573e-08, |
| "loss": 0.36, |
| "step": 460 |
| }, |
| { |
| "epoch": 2.9456869009584663, |
| "grad_norm": 0.37643335933636624, |
| "learning_rate": 1.3639639325748744e-08, |
| "loss": 0.3725, |
| "step": 461 |
| }, |
| { |
| "epoch": 2.952076677316294, |
| "grad_norm": 0.397354621784988, |
| "learning_rate": 1.0021564061554189e-08, |
| "loss": 0.3378, |
| "step": 462 |
| }, |
| { |
| "epoch": 2.9584664536741214, |
| "grad_norm": 0.37424179888551773, |
| "learning_rate": 6.959774744796921e-09, |
| "loss": 0.3399, |
| "step": 463 |
| }, |
| { |
| "epoch": 2.9648562300319488, |
| "grad_norm": 0.37190360281608004, |
| "learning_rate": 4.454441869377047e-09, |
| "loss": 0.3219, |
| "step": 464 |
| }, |
| { |
| "epoch": 2.9712460063897765, |
| "grad_norm": 0.3657736795626134, |
| "learning_rate": 2.5057049432519744e-09, |
| "loss": 0.3705, |
| "step": 465 |
| }, |
| { |
| "epoch": 2.977635782747604, |
| "grad_norm": 0.35303669900268125, |
| "learning_rate": 1.1136724806637411e-09, |
| "loss": 0.3318, |
| "step": 466 |
| }, |
| { |
| "epoch": 2.984025559105431, |
| "grad_norm": 0.36854160936154184, |
| "learning_rate": 2.784219961060597e-10, |
| "loss": 0.3518, |
| "step": 467 |
| }, |
| { |
| "epoch": 2.9904153354632586, |
| "grad_norm": 0.3989983000303115, |
| "learning_rate": 0.0, |
| "loss": 0.3546, |
| "step": 468 |
| }, |
| { |
| "epoch": 2.9904153354632586, |
| "step": 468, |
| "total_flos": 3.310734730054861e+17, |
| "train_loss": 0.4434524042229367, |
| "train_runtime": 10205.5973, |
| "train_samples_per_second": 2.939, |
| "train_steps_per_second": 0.046 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 468, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 3.310734730054861e+17, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|