| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.9998775260257196, |
| "eval_steps": 500, |
| "global_step": 2041, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0004898958971218616, |
| "grad_norm": 1.668303370475769, |
| "learning_rate": 5.0000000000000004e-08, |
| "loss": 0.9871, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0009797917942437231, |
| "grad_norm": 1.5414912700653076, |
| "learning_rate": 1.0000000000000001e-07, |
| "loss": 0.9553, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.001469687691365585, |
| "grad_norm": 1.5704071521759033, |
| "learning_rate": 1.5000000000000002e-07, |
| "loss": 0.9932, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.0019595835884874463, |
| "grad_norm": 1.5206725597381592, |
| "learning_rate": 2.0000000000000002e-07, |
| "loss": 0.955, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.002449479485609308, |
| "grad_norm": 1.5219823122024536, |
| "learning_rate": 2.5000000000000004e-07, |
| "loss": 0.9255, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.00293937538273117, |
| "grad_norm": 1.458319902420044, |
| "learning_rate": 3.0000000000000004e-07, |
| "loss": 0.918, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.003429271279853031, |
| "grad_norm": 1.6977298259735107, |
| "learning_rate": 3.5000000000000004e-07, |
| "loss": 1.0083, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.0039191671769748925, |
| "grad_norm": 1.5800329446792603, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 0.9472, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.004409063074096754, |
| "grad_norm": 1.5464119911193848, |
| "learning_rate": 4.5000000000000003e-07, |
| "loss": 0.9621, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.004898958971218616, |
| "grad_norm": 1.5234100818634033, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 0.9296, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.005388854868340478, |
| "grad_norm": 1.481718897819519, |
| "learning_rate": 5.5e-07, |
| "loss": 0.9571, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.00587875076546234, |
| "grad_norm": 1.4497618675231934, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 0.9538, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.0063686466625842006, |
| "grad_norm": 1.5114537477493286, |
| "learning_rate": 6.5e-07, |
| "loss": 0.9591, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.006858542559706062, |
| "grad_norm": 1.4514930248260498, |
| "learning_rate": 7.000000000000001e-07, |
| "loss": 0.917, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.007348438456827924, |
| "grad_norm": 1.3999227285385132, |
| "learning_rate": 7.5e-07, |
| "loss": 0.9543, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.007838334353949785, |
| "grad_norm": 1.3517608642578125, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 0.9439, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.008328230251071647, |
| "grad_norm": 1.2273361682891846, |
| "learning_rate": 8.500000000000001e-07, |
| "loss": 0.961, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.008818126148193509, |
| "grad_norm": 1.157639980316162, |
| "learning_rate": 9.000000000000001e-07, |
| "loss": 0.9285, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.00930802204531537, |
| "grad_norm": 1.0808814764022827, |
| "learning_rate": 9.500000000000001e-07, |
| "loss": 0.91, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.009797917942437232, |
| "grad_norm": 1.0530990362167358, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 0.899, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.010287813839559094, |
| "grad_norm": 1.0722999572753906, |
| "learning_rate": 1.0500000000000001e-06, |
| "loss": 0.9384, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.010777709736680956, |
| "grad_norm": 0.9200907945632935, |
| "learning_rate": 1.1e-06, |
| "loss": 0.9012, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.011267605633802818, |
| "grad_norm": 0.8614085912704468, |
| "learning_rate": 1.1500000000000002e-06, |
| "loss": 0.9176, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.01175750153092468, |
| "grad_norm": 0.8791833519935608, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 0.9024, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.01224739742804654, |
| "grad_norm": 0.7829999923706055, |
| "learning_rate": 1.25e-06, |
| "loss": 0.8956, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.012737293325168401, |
| "grad_norm": 0.7279092073440552, |
| "learning_rate": 1.3e-06, |
| "loss": 0.8837, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.013227189222290263, |
| "grad_norm": 0.7260591983795166, |
| "learning_rate": 1.3500000000000002e-06, |
| "loss": 0.9255, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.013717085119412125, |
| "grad_norm": 0.6384291648864746, |
| "learning_rate": 1.4000000000000001e-06, |
| "loss": 0.8652, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.014206981016533986, |
| "grad_norm": 0.6459453701972961, |
| "learning_rate": 1.45e-06, |
| "loss": 0.8353, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.014696876913655848, |
| "grad_norm": 0.6280319094657898, |
| "learning_rate": 1.5e-06, |
| "loss": 0.8961, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.01518677281077771, |
| "grad_norm": 0.6156179308891296, |
| "learning_rate": 1.5500000000000002e-06, |
| "loss": 0.8929, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.01567666870789957, |
| "grad_norm": 0.5640994310379028, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 0.8689, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.016166564605021434, |
| "grad_norm": 0.5428940057754517, |
| "learning_rate": 1.6500000000000003e-06, |
| "loss": 0.8843, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.016656460502143294, |
| "grad_norm": 0.5078619122505188, |
| "learning_rate": 1.7000000000000002e-06, |
| "loss": 0.879, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.017146356399265157, |
| "grad_norm": 0.4971136748790741, |
| "learning_rate": 1.75e-06, |
| "loss": 0.8815, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.017636252296387017, |
| "grad_norm": 0.4693506062030792, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 0.8494, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.01812614819350888, |
| "grad_norm": 0.4563111960887909, |
| "learning_rate": 1.85e-06, |
| "loss": 0.8379, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.01861604409063074, |
| "grad_norm": 0.4457121193408966, |
| "learning_rate": 1.9000000000000002e-06, |
| "loss": 0.8923, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.019105939987752604, |
| "grad_norm": 0.4194672107696533, |
| "learning_rate": 1.9500000000000004e-06, |
| "loss": 0.8229, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.019595835884874464, |
| "grad_norm": 0.40843793749809265, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.8483, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.020085731781996324, |
| "grad_norm": 0.39125168323516846, |
| "learning_rate": 2.05e-06, |
| "loss": 0.8487, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.020575627679118188, |
| "grad_norm": 0.39917248487472534, |
| "learning_rate": 2.1000000000000002e-06, |
| "loss": 0.8859, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.021065523576240048, |
| "grad_norm": 0.36128363013267517, |
| "learning_rate": 2.15e-06, |
| "loss": 0.7875, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.02155541947336191, |
| "grad_norm": 0.37637510895729065, |
| "learning_rate": 2.2e-06, |
| "loss": 0.8377, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.02204531537048377, |
| "grad_norm": 0.3807457387447357, |
| "learning_rate": 2.25e-06, |
| "loss": 0.848, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.022535211267605635, |
| "grad_norm": 0.36621737480163574, |
| "learning_rate": 2.3000000000000004e-06, |
| "loss": 0.8301, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.023025107164727495, |
| "grad_norm": 0.35659459233283997, |
| "learning_rate": 2.35e-06, |
| "loss": 0.8429, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.02351500306184936, |
| "grad_norm": 0.3621164858341217, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 0.8035, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.02400489895897122, |
| "grad_norm": 0.3681345283985138, |
| "learning_rate": 2.4500000000000003e-06, |
| "loss": 0.7814, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.02449479485609308, |
| "grad_norm": 0.3703807294368744, |
| "learning_rate": 2.5e-06, |
| "loss": 0.8359, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.024984690753214942, |
| "grad_norm": 0.3428044319152832, |
| "learning_rate": 2.55e-06, |
| "loss": 0.7875, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.025474586650336802, |
| "grad_norm": 0.3258868455886841, |
| "learning_rate": 2.6e-06, |
| "loss": 0.7972, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.025964482547458666, |
| "grad_norm": 0.30134114623069763, |
| "learning_rate": 2.6500000000000005e-06, |
| "loss": 0.8126, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.026454378444580526, |
| "grad_norm": 0.3181776702404022, |
| "learning_rate": 2.7000000000000004e-06, |
| "loss": 0.8232, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.02694427434170239, |
| "grad_norm": 0.3208143711090088, |
| "learning_rate": 2.7500000000000004e-06, |
| "loss": 0.8325, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.02743417023882425, |
| "grad_norm": 0.2844039499759674, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 0.7862, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.027924066135946113, |
| "grad_norm": 0.28659218549728394, |
| "learning_rate": 2.85e-06, |
| "loss": 0.8426, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.028413962033067973, |
| "grad_norm": 0.2850748300552368, |
| "learning_rate": 2.9e-06, |
| "loss": 0.7922, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.028903857930189833, |
| "grad_norm": 0.27821680903434753, |
| "learning_rate": 2.95e-06, |
| "loss": 0.8085, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.029393753827311696, |
| "grad_norm": 0.2871108055114746, |
| "learning_rate": 3e-06, |
| "loss": 0.8015, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.029883649724433557, |
| "grad_norm": 0.2768966853618622, |
| "learning_rate": 3.05e-06, |
| "loss": 0.8081, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.03037354562155542, |
| "grad_norm": 0.27445188164711, |
| "learning_rate": 3.1000000000000004e-06, |
| "loss": 0.7832, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.03086344151867728, |
| "grad_norm": 0.2732076048851013, |
| "learning_rate": 3.1500000000000003e-06, |
| "loss": 0.8103, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.03135333741579914, |
| "grad_norm": 0.2777082026004791, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 0.838, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.03184323331292101, |
| "grad_norm": 0.2745436728000641, |
| "learning_rate": 3.2500000000000002e-06, |
| "loss": 0.792, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.03233312921004287, |
| "grad_norm": 0.2597768306732178, |
| "learning_rate": 3.3000000000000006e-06, |
| "loss": 0.8076, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.03282302510716473, |
| "grad_norm": 0.25194868445396423, |
| "learning_rate": 3.3500000000000005e-06, |
| "loss": 0.7815, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.03331292100428659, |
| "grad_norm": 0.24808664619922638, |
| "learning_rate": 3.4000000000000005e-06, |
| "loss": 0.7815, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.03380281690140845, |
| "grad_norm": 0.25291213393211365, |
| "learning_rate": 3.45e-06, |
| "loss": 0.7831, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.034292712798530314, |
| "grad_norm": 0.2491445690393448, |
| "learning_rate": 3.5e-06, |
| "loss": 0.7918, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.034782608695652174, |
| "grad_norm": 0.24815823137760162, |
| "learning_rate": 3.5500000000000003e-06, |
| "loss": 0.7925, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.035272504592774034, |
| "grad_norm": 0.23526257276535034, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 0.7971, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.035762400489895894, |
| "grad_norm": 0.23858553171157837, |
| "learning_rate": 3.65e-06, |
| "loss": 0.8177, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.03625229638701776, |
| "grad_norm": 0.23471508920192719, |
| "learning_rate": 3.7e-06, |
| "loss": 0.7927, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.03674219228413962, |
| "grad_norm": 0.23156484961509705, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.7824, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.03723208818126148, |
| "grad_norm": 0.23146644234657288, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 0.7813, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.03772198407838334, |
| "grad_norm": 0.23870326578617096, |
| "learning_rate": 3.85e-06, |
| "loss": 0.7871, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.03821187997550521, |
| "grad_norm": 0.2287260890007019, |
| "learning_rate": 3.900000000000001e-06, |
| "loss": 0.7842, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.03870177587262707, |
| "grad_norm": 0.22993454337120056, |
| "learning_rate": 3.95e-06, |
| "loss": 0.7554, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.03919167176974893, |
| "grad_norm": 0.22278738021850586, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.7796, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.03968156766687079, |
| "grad_norm": 0.22093527019023895, |
| "learning_rate": 4.05e-06, |
| "loss": 0.7546, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.04017146356399265, |
| "grad_norm": 0.22237907350063324, |
| "learning_rate": 4.1e-06, |
| "loss": 0.7762, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.040661359461114516, |
| "grad_norm": 0.23848189413547516, |
| "learning_rate": 4.15e-06, |
| "loss": 0.7694, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.041151255358236376, |
| "grad_norm": 0.22380702197551727, |
| "learning_rate": 4.2000000000000004e-06, |
| "loss": 0.7829, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.041641151255358236, |
| "grad_norm": 0.22918623685836792, |
| "learning_rate": 4.25e-06, |
| "loss": 0.7784, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.042131047152480096, |
| "grad_norm": 0.21957220137119293, |
| "learning_rate": 4.3e-06, |
| "loss": 0.7444, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.04262094304960196, |
| "grad_norm": 0.2195909172296524, |
| "learning_rate": 4.350000000000001e-06, |
| "loss": 0.791, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.04311083894672382, |
| "grad_norm": 0.22012612223625183, |
| "learning_rate": 4.4e-06, |
| "loss": 0.7374, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.04360073484384568, |
| "grad_norm": 0.22052907943725586, |
| "learning_rate": 4.450000000000001e-06, |
| "loss": 0.7833, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.04409063074096754, |
| "grad_norm": 0.22584034502506256, |
| "learning_rate": 4.5e-06, |
| "loss": 0.7803, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.0445805266380894, |
| "grad_norm": 0.21903306245803833, |
| "learning_rate": 4.5500000000000005e-06, |
| "loss": 0.7222, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.04507042253521127, |
| "grad_norm": 0.21120664477348328, |
| "learning_rate": 4.600000000000001e-06, |
| "loss": 0.7955, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.04556031843233313, |
| "grad_norm": 0.22016657888889313, |
| "learning_rate": 4.65e-06, |
| "loss": 0.7551, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.04605021432945499, |
| "grad_norm": 0.2233135849237442, |
| "learning_rate": 4.7e-06, |
| "loss": 0.7665, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.04654011022657685, |
| "grad_norm": 0.21228142082691193, |
| "learning_rate": 4.75e-06, |
| "loss": 0.7513, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.04703000612369872, |
| "grad_norm": 0.2213594615459442, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 0.757, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.04751990202082058, |
| "grad_norm": 0.21695221960544586, |
| "learning_rate": 4.85e-06, |
| "loss": 0.7641, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.04800979791794244, |
| "grad_norm": 0.2285292148590088, |
| "learning_rate": 4.9000000000000005e-06, |
| "loss": 0.7558, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.0484996938150643, |
| "grad_norm": 0.21678751707077026, |
| "learning_rate": 4.95e-06, |
| "loss": 0.7418, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.04898958971218616, |
| "grad_norm": 0.20614083111286163, |
| "learning_rate": 5e-06, |
| "loss": 0.7477, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.049479485609308024, |
| "grad_norm": 0.2086937427520752, |
| "learning_rate": 4.999999659917706e-06, |
| "loss": 0.7649, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.049969381506429884, |
| "grad_norm": 0.2142128199338913, |
| "learning_rate": 4.999998639670915e-06, |
| "loss": 0.7333, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.050459277403551744, |
| "grad_norm": 0.21820193529129028, |
| "learning_rate": 4.999996939259905e-06, |
| "loss": 0.7542, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.050949173300673604, |
| "grad_norm": 0.21764390170574188, |
| "learning_rate": 4.999994558685137e-06, |
| "loss": 0.7514, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.05143906919779547, |
| "grad_norm": 0.21882116794586182, |
| "learning_rate": 4.999991497947262e-06, |
| "loss": 0.7423, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.05192896509491733, |
| "grad_norm": 0.21646051108837128, |
| "learning_rate": 4.99998775704711e-06, |
| "loss": 0.7604, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.05241886099203919, |
| "grad_norm": 0.2149217575788498, |
| "learning_rate": 4.9999833359857005e-06, |
| "loss": 0.7573, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.05290875688916105, |
| "grad_norm": 0.21114769577980042, |
| "learning_rate": 4.9999782347642355e-06, |
| "loss": 0.7664, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.05339865278628291, |
| "grad_norm": 0.23437361419200897, |
| "learning_rate": 4.9999724533841035e-06, |
| "loss": 0.7561, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.05388854868340478, |
| "grad_norm": 0.21103020012378693, |
| "learning_rate": 4.999965991846876e-06, |
| "loss": 0.7931, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.05437844458052664, |
| "grad_norm": 0.22028645873069763, |
| "learning_rate": 4.999958850154312e-06, |
| "loss": 0.7392, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.0548683404776485, |
| "grad_norm": 0.21418073773384094, |
| "learning_rate": 4.999951028308353e-06, |
| "loss": 0.7394, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.05535823637477036, |
| "grad_norm": 0.2170812487602234, |
| "learning_rate": 4.99994252631113e-06, |
| "loss": 0.7649, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.055848132271892226, |
| "grad_norm": 0.2240699976682663, |
| "learning_rate": 4.999933344164954e-06, |
| "loss": 0.7407, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.056338028169014086, |
| "grad_norm": 0.2185816615819931, |
| "learning_rate": 4.999923481872324e-06, |
| "loss": 0.7705, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.056827924066135946, |
| "grad_norm": 0.21891696751117706, |
| "learning_rate": 4.999912939435922e-06, |
| "loss": 0.7243, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.057317819963257806, |
| "grad_norm": 0.21337160468101501, |
| "learning_rate": 4.999901716858617e-06, |
| "loss": 0.7281, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.057807715860379666, |
| "grad_norm": 0.22411711513996124, |
| "learning_rate": 4.9998898141434635e-06, |
| "loss": 0.7407, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.05829761175750153, |
| "grad_norm": 0.21748706698417664, |
| "learning_rate": 4.999877231293698e-06, |
| "loss": 0.7421, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.05878750765462339, |
| "grad_norm": 0.2109653800725937, |
| "learning_rate": 4.999863968312744e-06, |
| "loss": 0.748, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.05927740355174525, |
| "grad_norm": 0.21555471420288086, |
| "learning_rate": 4.999850025204211e-06, |
| "loss": 0.7199, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.05976729944886711, |
| "grad_norm": 0.221576988697052, |
| "learning_rate": 4.999835401971892e-06, |
| "loss": 0.7292, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.06025719534598898, |
| "grad_norm": 0.21922191977500916, |
| "learning_rate": 4.9998200986197645e-06, |
| "loss": 0.7493, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.06074709124311084, |
| "grad_norm": 0.22416526079177856, |
| "learning_rate": 4.999804115151994e-06, |
| "loss": 0.7382, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.0612369871402327, |
| "grad_norm": 0.21385036408901215, |
| "learning_rate": 4.9997874515729275e-06, |
| "loss": 0.7542, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.06172688303735456, |
| "grad_norm": 0.21742364764213562, |
| "learning_rate": 4.9997701078870985e-06, |
| "loss": 0.7512, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.06221677893447643, |
| "grad_norm": 0.21706967055797577, |
| "learning_rate": 4.999752084099227e-06, |
| "loss": 0.7517, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.06270667483159828, |
| "grad_norm": 0.24107079207897186, |
| "learning_rate": 4.999733380214215e-06, |
| "loss": 0.7558, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.06319657072872015, |
| "grad_norm": 0.21388758718967438, |
| "learning_rate": 4.999713996237152e-06, |
| "loss": 0.765, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.06368646662584201, |
| "grad_norm": 0.2108607441186905, |
| "learning_rate": 4.999693932173312e-06, |
| "loss": 0.7365, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.06417636252296387, |
| "grad_norm": 0.21274609863758087, |
| "learning_rate": 4.999673188028153e-06, |
| "loss": 0.7529, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.06466625842008573, |
| "grad_norm": 0.21276482939720154, |
| "learning_rate": 4.999651763807321e-06, |
| "loss": 0.7573, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.06515615431720759, |
| "grad_norm": 0.22406885027885437, |
| "learning_rate": 4.999629659516641e-06, |
| "loss": 0.724, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.06564605021432945, |
| "grad_norm": 0.22327333688735962, |
| "learning_rate": 4.99960687516213e-06, |
| "loss": 0.7507, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.06613594611145132, |
| "grad_norm": 0.21817858517169952, |
| "learning_rate": 4.999583410749984e-06, |
| "loss": 0.7329, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.06662584200857317, |
| "grad_norm": 0.2202960103750229, |
| "learning_rate": 4.99955926628659e-06, |
| "loss": 0.7001, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.06711573790569504, |
| "grad_norm": 0.22170418500900269, |
| "learning_rate": 4.999534441778515e-06, |
| "loss": 0.7083, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.0676056338028169, |
| "grad_norm": 0.22274421155452728, |
| "learning_rate": 4.999508937232514e-06, |
| "loss": 0.7545, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.06809552969993876, |
| "grad_norm": 0.22415056824684143, |
| "learning_rate": 4.999482752655524e-06, |
| "loss": 0.7451, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.06858542559706063, |
| "grad_norm": 0.20987579226493835, |
| "learning_rate": 4.999455888054672e-06, |
| "loss": 0.7274, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.06907532149418248, |
| "grad_norm": 0.2241065949201584, |
| "learning_rate": 4.999428343437264e-06, |
| "loss": 0.7422, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.06956521739130435, |
| "grad_norm": 0.2241523563861847, |
| "learning_rate": 4.999400118810794e-06, |
| "loss": 0.73, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.07005511328842622, |
| "grad_norm": 0.21995913982391357, |
| "learning_rate": 4.999371214182944e-06, |
| "loss": 0.7614, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.07054500918554807, |
| "grad_norm": 0.22398804128170013, |
| "learning_rate": 4.999341629561575e-06, |
| "loss": 0.7263, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.07103490508266994, |
| "grad_norm": 0.2156529575586319, |
| "learning_rate": 4.9993113649547364e-06, |
| "loss": 0.759, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.07152480097979179, |
| "grad_norm": 0.22519437968730927, |
| "learning_rate": 4.999280420370664e-06, |
| "loss": 0.7556, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.07201469687691366, |
| "grad_norm": 0.22073966264724731, |
| "learning_rate": 4.999248795817774e-06, |
| "loss": 0.7485, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.07250459277403552, |
| "grad_norm": 0.2195131778717041, |
| "learning_rate": 4.999216491304673e-06, |
| "loss": 0.732, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.07299448867115738, |
| "grad_norm": 0.23442167043685913, |
| "learning_rate": 4.999183506840149e-06, |
| "loss": 0.7593, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.07348438456827924, |
| "grad_norm": 0.225722998380661, |
| "learning_rate": 4.999149842433175e-06, |
| "loss": 0.7417, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.0739742804654011, |
| "grad_norm": 0.22025473415851593, |
| "learning_rate": 4.9991154980929104e-06, |
| "loss": 0.7449, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.07446417636252296, |
| "grad_norm": 0.23160138726234436, |
| "learning_rate": 4.9990804738287005e-06, |
| "loss": 0.7409, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.07495407225964483, |
| "grad_norm": 0.21675211191177368, |
| "learning_rate": 4.999044769650072e-06, |
| "loss": 0.7417, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.07544396815676668, |
| "grad_norm": 0.22495831549167633, |
| "learning_rate": 4.999008385566741e-06, |
| "loss": 0.7398, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.07593386405388855, |
| "grad_norm": 0.22678421437740326, |
| "learning_rate": 4.9989713215886036e-06, |
| "loss": 0.7217, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.07642375995101042, |
| "grad_norm": 0.21712447702884674, |
| "learning_rate": 4.998933577725746e-06, |
| "loss": 0.7394, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.07691365584813227, |
| "grad_norm": 0.2327713668346405, |
| "learning_rate": 4.998895153988437e-06, |
| "loss": 0.7475, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.07740355174525414, |
| "grad_norm": 0.21809875965118408, |
| "learning_rate": 4.998856050387129e-06, |
| "loss": 0.7397, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.07789344764237599, |
| "grad_norm": 0.2248068004846573, |
| "learning_rate": 4.998816266932462e-06, |
| "loss": 0.7504, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.07838334353949786, |
| "grad_norm": 0.22332903742790222, |
| "learning_rate": 4.998775803635259e-06, |
| "loss": 0.7187, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.07887323943661972, |
| "grad_norm": 0.23418764770030975, |
| "learning_rate": 4.998734660506529e-06, |
| "loss": 0.7269, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.07936313533374158, |
| "grad_norm": 0.21334218978881836, |
| "learning_rate": 4.998692837557465e-06, |
| "loss": 0.7228, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.07985303123086344, |
| "grad_norm": 0.2249280959367752, |
| "learning_rate": 4.998650334799446e-06, |
| "loss": 0.7569, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.0803429271279853, |
| "grad_norm": 0.22758226096630096, |
| "learning_rate": 4.998607152244036e-06, |
| "loss": 0.7351, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.08083282302510716, |
| "grad_norm": 0.21682260930538177, |
| "learning_rate": 4.998563289902984e-06, |
| "loss": 0.705, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.08132271892222903, |
| "grad_norm": 0.22186994552612305, |
| "learning_rate": 4.998518747788221e-06, |
| "loss": 0.6869, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.08181261481935088, |
| "grad_norm": 0.2218897044658661, |
| "learning_rate": 4.9984735259118675e-06, |
| "loss": 0.7516, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.08230251071647275, |
| "grad_norm": 0.24786899983882904, |
| "learning_rate": 4.9984276242862265e-06, |
| "loss": 0.7292, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.0827924066135946, |
| "grad_norm": 0.22796982526779175, |
| "learning_rate": 4.998381042923786e-06, |
| "loss": 0.708, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.08328230251071647, |
| "grad_norm": 0.2350655496120453, |
| "learning_rate": 4.998333781837219e-06, |
| "loss": 0.7427, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.08377219840783834, |
| "grad_norm": 0.21661469340324402, |
| "learning_rate": 4.998285841039384e-06, |
| "loss": 0.7173, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.08426209430496019, |
| "grad_norm": 0.2350396364927292, |
| "learning_rate": 4.998237220543324e-06, |
| "loss": 0.7485, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.08475199020208206, |
| "grad_norm": 0.2210429608821869, |
| "learning_rate": 4.998187920362268e-06, |
| "loss": 0.7455, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.08524188609920393, |
| "grad_norm": 0.23016513884067535, |
| "learning_rate": 4.998137940509626e-06, |
| "loss": 0.7116, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.08573178199632578, |
| "grad_norm": 0.21964678168296814, |
| "learning_rate": 4.998087280998999e-06, |
| "loss": 0.7356, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.08622167789344765, |
| "grad_norm": 0.22432510554790497, |
| "learning_rate": 4.998035941844167e-06, |
| "loss": 0.7315, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.0867115737905695, |
| "grad_norm": 0.2379545122385025, |
| "learning_rate": 4.9979839230591e-06, |
| "loss": 0.7165, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.08720146968769137, |
| "grad_norm": 0.234178826212883, |
| "learning_rate": 4.99793122465795e-06, |
| "loss": 0.7378, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.08769136558481323, |
| "grad_norm": 0.216177836060524, |
| "learning_rate": 4.9978778466550524e-06, |
| "loss": 0.6997, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.08818126148193509, |
| "grad_norm": 0.2306894212961197, |
| "learning_rate": 4.9978237890649315e-06, |
| "loss": 0.7341, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.08867115737905695, |
| "grad_norm": 0.21481899917125702, |
| "learning_rate": 4.9977690519022945e-06, |
| "loss": 0.7151, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.0891610532761788, |
| "grad_norm": 0.2187681943178177, |
| "learning_rate": 4.997713635182033e-06, |
| "loss": 0.7497, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.08965094917330067, |
| "grad_norm": 0.22013579308986664, |
| "learning_rate": 4.997657538919224e-06, |
| "loss": 0.7094, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.09014084507042254, |
| "grad_norm": 0.22705145180225372, |
| "learning_rate": 4.99760076312913e-06, |
| "loss": 0.7298, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.0906307409675444, |
| "grad_norm": 0.2299501895904541, |
| "learning_rate": 4.997543307827196e-06, |
| "loss": 0.7584, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.09112063686466626, |
| "grad_norm": 0.22536176443099976, |
| "learning_rate": 4.997485173029056e-06, |
| "loss": 0.711, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.09161053276178811, |
| "grad_norm": 0.22876884043216705, |
| "learning_rate": 4.997426358750525e-06, |
| "loss": 0.6999, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.09210042865890998, |
| "grad_norm": 0.2220255434513092, |
| "learning_rate": 4.997366865007605e-06, |
| "loss": 0.7033, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.09259032455603185, |
| "grad_norm": 0.22282293438911438, |
| "learning_rate": 4.9973066918164815e-06, |
| "loss": 0.7296, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.0930802204531537, |
| "grad_norm": 0.2244081050157547, |
| "learning_rate": 4.997245839193526e-06, |
| "loss": 0.7217, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.09357011635027557, |
| "grad_norm": 0.24491725862026215, |
| "learning_rate": 4.997184307155294e-06, |
| "loss": 0.6933, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.09406001224739743, |
| "grad_norm": 0.2341097742319107, |
| "learning_rate": 4.997122095718527e-06, |
| "loss": 0.7156, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.09454990814451929, |
| "grad_norm": 0.23293037712574005, |
| "learning_rate": 4.997059204900151e-06, |
| "loss": 0.7219, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.09503980404164115, |
| "grad_norm": 0.224905863404274, |
| "learning_rate": 4.996995634717276e-06, |
| "loss": 0.6925, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.09552969993876301, |
| "grad_norm": 0.23174867033958435, |
| "learning_rate": 4.996931385187195e-06, |
| "loss": 0.7354, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.09601959583588487, |
| "grad_norm": 0.2354530245065689, |
| "learning_rate": 4.996866456327392e-06, |
| "loss": 0.7218, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.09650949173300674, |
| "grad_norm": 0.22878359258174896, |
| "learning_rate": 4.996800848155529e-06, |
| "loss": 0.7289, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.0969993876301286, |
| "grad_norm": 0.22221261262893677, |
| "learning_rate": 4.996734560689457e-06, |
| "loss": 0.7276, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.09748928352725046, |
| "grad_norm": 0.23485805094242096, |
| "learning_rate": 4.9966675939472094e-06, |
| "loss": 0.7407, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.09797917942437231, |
| "grad_norm": 0.2275373488664627, |
| "learning_rate": 4.996599947947009e-06, |
| "loss": 0.7187, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.09846907532149418, |
| "grad_norm": 0.22819741070270538, |
| "learning_rate": 4.996531622707255e-06, |
| "loss": 0.717, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.09895897121861605, |
| "grad_norm": 0.24706602096557617, |
| "learning_rate": 4.99646261824654e-06, |
| "loss": 0.7126, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.0994488671157379, |
| "grad_norm": 0.2240106165409088, |
| "learning_rate": 4.996392934583636e-06, |
| "loss": 0.7269, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.09993876301285977, |
| "grad_norm": 0.23807524144649506, |
| "learning_rate": 4.996322571737502e-06, |
| "loss": 0.719, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.10042865890998164, |
| "grad_norm": 0.23285457491874695, |
| "learning_rate": 4.996251529727282e-06, |
| "loss": 0.7088, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.10091855480710349, |
| "grad_norm": 0.227351576089859, |
| "learning_rate": 4.996179808572303e-06, |
| "loss": 0.74, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.10140845070422536, |
| "grad_norm": 0.2282775193452835, |
| "learning_rate": 4.996107408292079e-06, |
| "loss": 0.7037, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.10189834660134721, |
| "grad_norm": 0.23110702633857727, |
| "learning_rate": 4.996034328906308e-06, |
| "loss": 0.7153, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.10238824249846908, |
| "grad_norm": 0.2378554791212082, |
| "learning_rate": 4.995960570434869e-06, |
| "loss": 0.7097, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.10287813839559094, |
| "grad_norm": 0.2271486073732376, |
| "learning_rate": 4.995886132897833e-06, |
| "loss": 0.7112, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.1033680342927128, |
| "grad_norm": 0.24688458442687988, |
| "learning_rate": 4.99581101631545e-06, |
| "loss": 0.6905, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.10385793018983466, |
| "grad_norm": 0.23487895727157593, |
| "learning_rate": 4.995735220708158e-06, |
| "loss": 0.6851, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.10434782608695652, |
| "grad_norm": 0.23712845146656036, |
| "learning_rate": 4.995658746096577e-06, |
| "loss": 0.7162, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.10483772198407838, |
| "grad_norm": 0.23982517421245575, |
| "learning_rate": 4.995581592501514e-06, |
| "loss": 0.7172, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.10532761788120025, |
| "grad_norm": 0.24245846271514893, |
| "learning_rate": 4.99550375994396e-06, |
| "loss": 0.7461, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.1058175137783221, |
| "grad_norm": 0.25469428300857544, |
| "learning_rate": 4.995425248445089e-06, |
| "loss": 0.7231, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.10630740967544397, |
| "grad_norm": 0.23790335655212402, |
| "learning_rate": 4.995346058026263e-06, |
| "loss": 0.6956, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.10679730557256582, |
| "grad_norm": 0.23641760647296906, |
| "learning_rate": 4.995266188709027e-06, |
| "loss": 0.7264, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.10728720146968769, |
| "grad_norm": 0.24292510747909546, |
| "learning_rate": 4.99518564051511e-06, |
| "loss": 0.6772, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.10777709736680956, |
| "grad_norm": 0.2255912572145462, |
| "learning_rate": 4.995104413466426e-06, |
| "loss": 0.6805, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.10826699326393141, |
| "grad_norm": 0.24550217390060425, |
| "learning_rate": 4.995022507585075e-06, |
| "loss": 0.7456, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.10875688916105328, |
| "grad_norm": 0.24428114295005798, |
| "learning_rate": 4.99493992289334e-06, |
| "loss": 0.7485, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.10924678505817514, |
| "grad_norm": 0.2401362955570221, |
| "learning_rate": 4.994856659413691e-06, |
| "loss": 0.7092, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.109736680955297, |
| "grad_norm": 0.24536854028701782, |
| "learning_rate": 4.99477271716878e-06, |
| "loss": 0.6942, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.11022657685241886, |
| "grad_norm": 0.23893441259860992, |
| "learning_rate": 4.994688096181444e-06, |
| "loss": 0.7108, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.11071647274954072, |
| "grad_norm": 0.2415887713432312, |
| "learning_rate": 4.994602796474707e-06, |
| "loss": 0.6844, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.11120636864666258, |
| "grad_norm": 0.2488085925579071, |
| "learning_rate": 4.994516818071775e-06, |
| "loss": 0.7047, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.11169626454378445, |
| "grad_norm": 0.24722400307655334, |
| "learning_rate": 4.994430160996041e-06, |
| "loss": 0.7382, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.1121861604409063, |
| "grad_norm": 0.24387723207473755, |
| "learning_rate": 4.99434282527108e-06, |
| "loss": 0.7109, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.11267605633802817, |
| "grad_norm": 0.24834807217121124, |
| "learning_rate": 4.9942548109206545e-06, |
| "loss": 0.7077, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.11316595223515002, |
| "grad_norm": 0.23779404163360596, |
| "learning_rate": 4.994166117968709e-06, |
| "loss": 0.6803, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.11365584813227189, |
| "grad_norm": 0.24619951844215393, |
| "learning_rate": 4.994076746439375e-06, |
| "loss": 0.7001, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.11414574402939376, |
| "grad_norm": 0.23684336245059967, |
| "learning_rate": 4.993986696356966e-06, |
| "loss": 0.7151, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.11463563992651561, |
| "grad_norm": 0.26148277521133423, |
| "learning_rate": 4.993895967745983e-06, |
| "loss": 0.6959, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.11512553582363748, |
| "grad_norm": 0.24303443729877472, |
| "learning_rate": 4.993804560631109e-06, |
| "loss": 0.7427, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.11561543172075933, |
| "grad_norm": 0.23858515918254852, |
| "learning_rate": 4.993712475037213e-06, |
| "loss": 0.7167, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.1161053276178812, |
| "grad_norm": 0.23997746407985687, |
| "learning_rate": 4.993619710989349e-06, |
| "loss": 0.7456, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.11659522351500307, |
| "grad_norm": 0.24167482554912567, |
| "learning_rate": 4.993526268512755e-06, |
| "loss": 0.7072, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.11708511941212492, |
| "grad_norm": 0.2411864548921585, |
| "learning_rate": 4.9934321476328515e-06, |
| "loss": 0.6987, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.11757501530924679, |
| "grad_norm": 0.2430465817451477, |
| "learning_rate": 4.993337348375249e-06, |
| "loss": 0.7009, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.11806491120636865, |
| "grad_norm": 0.2417476773262024, |
| "learning_rate": 4.993241870765736e-06, |
| "loss": 0.695, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.1185548071034905, |
| "grad_norm": 0.24546459317207336, |
| "learning_rate": 4.99314571483029e-06, |
| "loss": 0.7091, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.11904470300061237, |
| "grad_norm": 0.2377326339483261, |
| "learning_rate": 4.993048880595072e-06, |
| "loss": 0.7005, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.11953459889773423, |
| "grad_norm": 0.2439325898885727, |
| "learning_rate": 4.992951368086427e-06, |
| "loss": 0.6956, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.1200244947948561, |
| "grad_norm": 0.2318105548620224, |
| "learning_rate": 4.992853177330884e-06, |
| "loss": 0.708, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.12051439069197796, |
| "grad_norm": 0.24266572296619415, |
| "learning_rate": 4.992754308355159e-06, |
| "loss": 0.7126, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.12100428658909981, |
| "grad_norm": 0.24958163499832153, |
| "learning_rate": 4.99265476118615e-06, |
| "loss": 0.7165, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.12149418248622168, |
| "grad_norm": 0.24117092788219452, |
| "learning_rate": 4.99255453585094e-06, |
| "loss": 0.6977, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.12198407838334353, |
| "grad_norm": 0.24092306196689606, |
| "learning_rate": 4.992453632376797e-06, |
| "loss": 0.7179, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.1224739742804654, |
| "grad_norm": 0.24704957008361816, |
| "learning_rate": 4.992352050791175e-06, |
| "loss": 0.6955, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.12296387017758727, |
| "grad_norm": 0.23530827462673187, |
| "learning_rate": 4.992249791121709e-06, |
| "loss": 0.7051, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.12345376607470912, |
| "grad_norm": 0.2405635416507721, |
| "learning_rate": 4.992146853396219e-06, |
| "loss": 0.7247, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.12394366197183099, |
| "grad_norm": 0.23462170362472534, |
| "learning_rate": 4.992043237642715e-06, |
| "loss": 0.709, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.12443355786895285, |
| "grad_norm": 0.24728280305862427, |
| "learning_rate": 4.991938943889384e-06, |
| "loss": 0.6904, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.12492345376607471, |
| "grad_norm": 0.2390342354774475, |
| "learning_rate": 4.991833972164602e-06, |
| "loss": 0.7031, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.12541334966319656, |
| "grad_norm": 0.25283288955688477, |
| "learning_rate": 4.991728322496928e-06, |
| "loss": 0.717, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.12590324556031843, |
| "grad_norm": 0.23909400403499603, |
| "learning_rate": 4.991621994915105e-06, |
| "loss": 0.6916, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.1263931414574403, |
| "grad_norm": 0.24953396618366241, |
| "learning_rate": 4.991514989448063e-06, |
| "loss": 0.6901, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.12688303735456216, |
| "grad_norm": 0.24343158304691315, |
| "learning_rate": 4.991407306124914e-06, |
| "loss": 0.7018, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.12737293325168403, |
| "grad_norm": 0.24725763499736786, |
| "learning_rate": 4.991298944974953e-06, |
| "loss": 0.6996, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.12786282914880587, |
| "grad_norm": 0.25944840908050537, |
| "learning_rate": 4.991189906027663e-06, |
| "loss": 0.7083, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.12835272504592773, |
| "grad_norm": 0.2454294115304947, |
| "learning_rate": 4.991080189312709e-06, |
| "loss": 0.7058, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.1288426209430496, |
| "grad_norm": 0.2501434087753296, |
| "learning_rate": 4.990969794859941e-06, |
| "loss": 0.7102, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.12933251684017147, |
| "grad_norm": 0.26826831698417664, |
| "learning_rate": 4.990858722699395e-06, |
| "loss": 0.7305, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.12982241273729334, |
| "grad_norm": 0.23846790194511414, |
| "learning_rate": 4.990746972861289e-06, |
| "loss": 0.6752, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.13031230863441517, |
| "grad_norm": 0.25017425417900085, |
| "learning_rate": 4.990634545376027e-06, |
| "loss": 0.7062, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.13080220453153704, |
| "grad_norm": 0.2652483880519867, |
| "learning_rate": 4.990521440274195e-06, |
| "loss": 0.7117, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.1312921004286589, |
| "grad_norm": 0.24066515266895294, |
| "learning_rate": 4.990407657586568e-06, |
| "loss": 0.7193, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.13178199632578078, |
| "grad_norm": 0.2460177093744278, |
| "learning_rate": 4.990293197344098e-06, |
| "loss": 0.6644, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.13227189222290264, |
| "grad_norm": 0.2507753372192383, |
| "learning_rate": 4.99017805957793e-06, |
| "loss": 0.7114, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.13276178812002448, |
| "grad_norm": 0.25557348132133484, |
| "learning_rate": 4.990062244319387e-06, |
| "loss": 0.6895, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.13325168401714635, |
| "grad_norm": 0.2597479522228241, |
| "learning_rate": 4.989945751599978e-06, |
| "loss": 0.7285, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.13374157991426822, |
| "grad_norm": 0.2611940801143646, |
| "learning_rate": 4.989828581451398e-06, |
| "loss": 0.7296, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.13423147581139008, |
| "grad_norm": 0.2552890479564667, |
| "learning_rate": 4.989710733905524e-06, |
| "loss": 0.7324, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.13472137170851195, |
| "grad_norm": 0.2605132460594177, |
| "learning_rate": 4.9895922089944185e-06, |
| "loss": 0.6969, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.1352112676056338, |
| "grad_norm": 0.25689518451690674, |
| "learning_rate": 4.989473006750328e-06, |
| "loss": 0.7032, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.13570116350275566, |
| "grad_norm": 0.25465458631515503, |
| "learning_rate": 4.989353127205684e-06, |
| "loss": 0.6865, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.13619105939987752, |
| "grad_norm": 0.25785622000694275, |
| "learning_rate": 4.989232570393101e-06, |
| "loss": 0.7007, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.1366809552969994, |
| "grad_norm": 0.25546690821647644, |
| "learning_rate": 4.989111336345378e-06, |
| "loss": 0.7342, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.13717085119412126, |
| "grad_norm": 0.25426116585731506, |
| "learning_rate": 4.9889894250955005e-06, |
| "loss": 0.6978, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.13766074709124312, |
| "grad_norm": 0.25376084446907043, |
| "learning_rate": 4.988866836676635e-06, |
| "loss": 0.6856, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.13815064298836496, |
| "grad_norm": 0.2518889307975769, |
| "learning_rate": 4.988743571122133e-06, |
| "loss": 0.6964, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.13864053888548683, |
| "grad_norm": 0.24486613273620605, |
| "learning_rate": 4.9886196284655315e-06, |
| "loss": 0.7082, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.1391304347826087, |
| "grad_norm": 0.26591217517852783, |
| "learning_rate": 4.988495008740552e-06, |
| "loss": 0.6737, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.13962033067973056, |
| "grad_norm": 0.2720721960067749, |
| "learning_rate": 4.988369711981097e-06, |
| "loss": 0.7142, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.14011022657685243, |
| "grad_norm": 0.2502274215221405, |
| "learning_rate": 4.988243738221258e-06, |
| "loss": 0.6928, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.14060012247397427, |
| "grad_norm": 0.2472478747367859, |
| "learning_rate": 4.988117087495306e-06, |
| "loss": 0.7081, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.14109001837109614, |
| "grad_norm": 0.2597702145576477, |
| "learning_rate": 4.9879897598377005e-06, |
| "loss": 0.7252, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.141579914268218, |
| "grad_norm": 0.2554181218147278, |
| "learning_rate": 4.987861755283081e-06, |
| "loss": 0.6871, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.14206981016533987, |
| "grad_norm": 0.2611641585826874, |
| "learning_rate": 4.9877330738662755e-06, |
| "loss": 0.6922, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.14255970606246174, |
| "grad_norm": 0.2400081306695938, |
| "learning_rate": 4.987603715622291e-06, |
| "loss": 0.6572, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.14304960195958358, |
| "grad_norm": 0.2615026831626892, |
| "learning_rate": 4.987473680586323e-06, |
| "loss": 0.7023, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.14353949785670544, |
| "grad_norm": 0.25383347272872925, |
| "learning_rate": 4.98734296879375e-06, |
| "loss": 0.6958, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.1440293937538273, |
| "grad_norm": 0.25849246978759766, |
| "learning_rate": 4.987211580280133e-06, |
| "loss": 0.7196, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.14451928965094918, |
| "grad_norm": 0.26360154151916504, |
| "learning_rate": 4.987079515081219e-06, |
| "loss": 0.7121, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.14500918554807105, |
| "grad_norm": 0.2684732973575592, |
| "learning_rate": 4.986946773232939e-06, |
| "loss": 0.7198, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.14549908144519288, |
| "grad_norm": 0.2610439360141754, |
| "learning_rate": 4.986813354771406e-06, |
| "loss": 0.6924, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.14598897734231475, |
| "grad_norm": 0.25493988394737244, |
| "learning_rate": 4.986679259732919e-06, |
| "loss": 0.7354, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.14647887323943662, |
| "grad_norm": 0.2612632215023041, |
| "learning_rate": 4.986544488153963e-06, |
| "loss": 0.6878, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.14696876913655849, |
| "grad_norm": 0.25811073184013367, |
| "learning_rate": 4.986409040071202e-06, |
| "loss": 0.6793, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.14745866503368035, |
| "grad_norm": 0.2634781301021576, |
| "learning_rate": 4.986272915521487e-06, |
| "loss": 0.7061, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.1479485609308022, |
| "grad_norm": 0.25826555490493774, |
| "learning_rate": 4.9861361145418545e-06, |
| "loss": 0.664, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.14843845682792406, |
| "grad_norm": 0.24985066056251526, |
| "learning_rate": 4.985998637169522e-06, |
| "loss": 0.6932, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.14892835272504593, |
| "grad_norm": 0.25596854090690613, |
| "learning_rate": 4.985860483441894e-06, |
| "loss": 0.6568, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.1494182486221678, |
| "grad_norm": 0.2568427324295044, |
| "learning_rate": 4.985721653396555e-06, |
| "loss": 0.6889, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.14990814451928966, |
| "grad_norm": 0.2716825604438782, |
| "learning_rate": 4.985582147071277e-06, |
| "loss": 0.6813, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.1503980404164115, |
| "grad_norm": 0.2683699131011963, |
| "learning_rate": 4.985441964504015e-06, |
| "loss": 0.6805, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.15088793631353337, |
| "grad_norm": 0.2588299810886383, |
| "learning_rate": 4.985301105732908e-06, |
| "loss": 0.6981, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.15137783221065523, |
| "grad_norm": 0.24666503071784973, |
| "learning_rate": 4.985159570796279e-06, |
| "loss": 0.6898, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.1518677281077771, |
| "grad_norm": 0.2546054720878601, |
| "learning_rate": 4.985017359732636e-06, |
| "loss": 0.6994, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.15235762400489897, |
| "grad_norm": 0.24838140606880188, |
| "learning_rate": 4.9848744725806666e-06, |
| "loss": 0.7174, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.15284751990202083, |
| "grad_norm": 0.2766893804073334, |
| "learning_rate": 4.984730909379248e-06, |
| "loss": 0.7009, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.15333741579914267, |
| "grad_norm": 0.2599968910217285, |
| "learning_rate": 4.984586670167438e-06, |
| "loss": 0.687, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.15382731169626454, |
| "grad_norm": 0.2503408193588257, |
| "learning_rate": 4.984441754984479e-06, |
| "loss": 0.732, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.1543172075933864, |
| "grad_norm": 0.2544567883014679, |
| "learning_rate": 4.984296163869798e-06, |
| "loss": 0.6474, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.15480710349050827, |
| "grad_norm": 0.25679001212120056, |
| "learning_rate": 4.9841498968630055e-06, |
| "loss": 0.7131, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.15529699938763014, |
| "grad_norm": 0.2618502080440521, |
| "learning_rate": 4.984002954003895e-06, |
| "loss": 0.7174, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.15578689528475198, |
| "grad_norm": 0.268387109041214, |
| "learning_rate": 4.9838553353324445e-06, |
| "loss": 0.7087, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.15627679118187385, |
| "grad_norm": 0.25605982542037964, |
| "learning_rate": 4.9837070408888175e-06, |
| "loss": 0.6813, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.15676668707899571, |
| "grad_norm": 0.26913389563560486, |
| "learning_rate": 4.983558070713359e-06, |
| "loss": 0.73, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.15725658297611758, |
| "grad_norm": 0.26553523540496826, |
| "learning_rate": 4.983408424846597e-06, |
| "loss": 0.6842, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.15774647887323945, |
| "grad_norm": 0.2679034471511841, |
| "learning_rate": 4.983258103329248e-06, |
| "loss": 0.681, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.1582363747703613, |
| "grad_norm": 0.2610563039779663, |
| "learning_rate": 4.983107106202209e-06, |
| "loss": 0.6812, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.15872627066748315, |
| "grad_norm": 0.25914549827575684, |
| "learning_rate": 4.982955433506558e-06, |
| "loss": 0.6934, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.15921616656460502, |
| "grad_norm": 0.25528278946876526, |
| "learning_rate": 4.982803085283563e-06, |
| "loss": 0.7034, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.1597060624617269, |
| "grad_norm": 0.2526107132434845, |
| "learning_rate": 4.982650061574672e-06, |
| "loss": 0.704, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.16019595835884876, |
| "grad_norm": 0.25860223174095154, |
| "learning_rate": 4.982496362421517e-06, |
| "loss": 0.6896, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.1606858542559706, |
| "grad_norm": 0.2585155665874481, |
| "learning_rate": 4.982341987865914e-06, |
| "loss": 0.7077, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.16117575015309246, |
| "grad_norm": 0.25666436553001404, |
| "learning_rate": 4.982186937949864e-06, |
| "loss": 0.6887, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.16166564605021433, |
| "grad_norm": 0.248607337474823, |
| "learning_rate": 4.9820312127155515e-06, |
| "loss": 0.7001, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.1621555419473362, |
| "grad_norm": 0.28019267320632935, |
| "learning_rate": 4.981874812205341e-06, |
| "loss": 0.7299, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.16264543784445806, |
| "grad_norm": 0.26801958680152893, |
| "learning_rate": 4.981717736461788e-06, |
| "loss": 0.6979, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.1631353337415799, |
| "grad_norm": 0.2580174505710602, |
| "learning_rate": 4.981559985527624e-06, |
| "loss": 0.6632, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.16362522963870177, |
| "grad_norm": 0.26502886414527893, |
| "learning_rate": 4.9814015594457686e-06, |
| "loss": 0.6887, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.16411512553582364, |
| "grad_norm": 0.27120092511177063, |
| "learning_rate": 4.9812424582593246e-06, |
| "loss": 0.7047, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.1646050214329455, |
| "grad_norm": 0.2717300057411194, |
| "learning_rate": 4.981082682011577e-06, |
| "loss": 0.7083, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.16509491733006737, |
| "grad_norm": 0.2662920653820038, |
| "learning_rate": 4.9809222307459984e-06, |
| "loss": 0.718, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.1655848132271892, |
| "grad_norm": 0.25591158866882324, |
| "learning_rate": 4.980761104506238e-06, |
| "loss": 0.6912, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.16607470912431108, |
| "grad_norm": 0.25804129242897034, |
| "learning_rate": 4.980599303336135e-06, |
| "loss": 0.6812, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.16656460502143294, |
| "grad_norm": 0.26774024963378906, |
| "learning_rate": 4.980436827279709e-06, |
| "loss": 0.7064, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.1670545009185548, |
| "grad_norm": 0.26028621196746826, |
| "learning_rate": 4.980273676381165e-06, |
| "loss": 0.685, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.16754439681567668, |
| "grad_norm": 0.2737005054950714, |
| "learning_rate": 4.980109850684891e-06, |
| "loss": 0.6825, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.16803429271279854, |
| "grad_norm": 0.26772844791412354, |
| "learning_rate": 4.979945350235459e-06, |
| "loss": 0.6999, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.16852418860992038, |
| "grad_norm": 0.2749219834804535, |
| "learning_rate": 4.979780175077621e-06, |
| "loss": 0.67, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.16901408450704225, |
| "grad_norm": 0.2647887170314789, |
| "learning_rate": 4.979614325256318e-06, |
| "loss": 0.7059, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.16950398040416412, |
| "grad_norm": 0.2660578787326813, |
| "learning_rate": 4.979447800816673e-06, |
| "loss": 0.6964, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.16999387630128598, |
| "grad_norm": 0.2642233669757843, |
| "learning_rate": 4.979280601803988e-06, |
| "loss": 0.6668, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.17048377219840785, |
| "grad_norm": 0.26047587394714355, |
| "learning_rate": 4.979112728263755e-06, |
| "loss": 0.7015, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.1709736680955297, |
| "grad_norm": 0.26166731119155884, |
| "learning_rate": 4.9789441802416454e-06, |
| "loss": 0.6964, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.17146356399265156, |
| "grad_norm": 0.26318004727363586, |
| "learning_rate": 4.978774957783517e-06, |
| "loss": 0.7015, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.17195345988977342, |
| "grad_norm": 0.2654951512813568, |
| "learning_rate": 4.978605060935407e-06, |
| "loss": 0.6653, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.1724433557868953, |
| "grad_norm": 0.2767031490802765, |
| "learning_rate": 4.9784344897435405e-06, |
| "loss": 0.6837, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.17293325168401716, |
| "grad_norm": 0.2719745635986328, |
| "learning_rate": 4.978263244254324e-06, |
| "loss": 0.6784, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.173423147581139, |
| "grad_norm": 0.2508717179298401, |
| "learning_rate": 4.978091324514346e-06, |
| "loss": 0.6805, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.17391304347826086, |
| "grad_norm": 0.26405903697013855, |
| "learning_rate": 4.977918730570381e-06, |
| "loss": 0.6521, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.17440293937538273, |
| "grad_norm": 0.26240110397338867, |
| "learning_rate": 4.977745462469386e-06, |
| "loss": 0.675, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.1748928352725046, |
| "grad_norm": 0.2654706835746765, |
| "learning_rate": 4.9775715202585005e-06, |
| "loss": 0.6967, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.17538273116962647, |
| "grad_norm": 0.26780185103416443, |
| "learning_rate": 4.97739690398505e-06, |
| "loss": 0.6808, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.1758726270667483, |
| "grad_norm": 0.27980104088783264, |
| "learning_rate": 4.9772216136965405e-06, |
| "loss": 0.6901, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.17636252296387017, |
| "grad_norm": 0.2825722396373749, |
| "learning_rate": 4.977045649440662e-06, |
| "loss": 0.7147, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.17685241886099204, |
| "grad_norm": 0.28033769130706787, |
| "learning_rate": 4.97686901126529e-06, |
| "loss": 0.69, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.1773423147581139, |
| "grad_norm": 0.2616003751754761, |
| "learning_rate": 4.976691699218479e-06, |
| "loss": 0.679, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.17783221065523577, |
| "grad_norm": 0.26597169041633606, |
| "learning_rate": 4.976513713348472e-06, |
| "loss": 0.6817, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.1783221065523576, |
| "grad_norm": 0.2727013826370239, |
| "learning_rate": 4.976335053703692e-06, |
| "loss": 0.6878, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.17881200244947948, |
| "grad_norm": 0.2789647579193115, |
| "learning_rate": 4.976155720332747e-06, |
| "loss": 0.6942, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.17930189834660135, |
| "grad_norm": 0.26109856367111206, |
| "learning_rate": 4.975975713284426e-06, |
| "loss": 0.6992, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.1797917942437232, |
| "grad_norm": 0.2630729675292969, |
| "learning_rate": 4.975795032607703e-06, |
| "loss": 0.7101, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.18028169014084508, |
| "grad_norm": 0.28116390109062195, |
| "learning_rate": 4.9756136783517365e-06, |
| "loss": 0.6843, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.18077158603796692, |
| "grad_norm": 0.26151201128959656, |
| "learning_rate": 4.975431650565865e-06, |
| "loss": 0.6973, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.1812614819350888, |
| "grad_norm": 0.2683990001678467, |
| "learning_rate": 4.975248949299613e-06, |
| "loss": 0.7055, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.18175137783221065, |
| "grad_norm": 0.26030197739601135, |
| "learning_rate": 4.9750655746026875e-06, |
| "loss": 0.7008, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.18224127372933252, |
| "grad_norm": 0.26876166462898254, |
| "learning_rate": 4.974881526524978e-06, |
| "loss": 0.694, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.1827311696264544, |
| "grad_norm": 0.2684592604637146, |
| "learning_rate": 4.974696805116558e-06, |
| "loss": 0.7156, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.18322106552357623, |
| "grad_norm": 0.27490681409835815, |
| "learning_rate": 4.974511410427683e-06, |
| "loss": 0.6843, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.1837109614206981, |
| "grad_norm": 0.2698623239994049, |
| "learning_rate": 4.974325342508793e-06, |
| "loss": 0.6917, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.18420085731781996, |
| "grad_norm": 0.27144962549209595, |
| "learning_rate": 4.974138601410513e-06, |
| "loss": 0.7152, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.18469075321494183, |
| "grad_norm": 0.26694294810295105, |
| "learning_rate": 4.9739511871836454e-06, |
| "loss": 0.6715, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.1851806491120637, |
| "grad_norm": 0.2714312672615051, |
| "learning_rate": 4.973763099879181e-06, |
| "loss": 0.6997, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.18567054500918556, |
| "grad_norm": 0.27155402302742004, |
| "learning_rate": 4.973574339548291e-06, |
| "loss": 0.6609, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.1861604409063074, |
| "grad_norm": 0.2687325179576874, |
| "learning_rate": 4.973384906242332e-06, |
| "loss": 0.7095, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.18665033680342927, |
| "grad_norm": 0.2704453766345978, |
| "learning_rate": 4.973194800012841e-06, |
| "loss": 0.6728, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.18714023270055113, |
| "grad_norm": 0.2697974145412445, |
| "learning_rate": 4.973004020911541e-06, |
| "loss": 0.6817, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.187630128597673, |
| "grad_norm": 0.27038195729255676, |
| "learning_rate": 4.972812568990334e-06, |
| "loss": 0.69, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.18812002449479487, |
| "grad_norm": 0.27607688307762146, |
| "learning_rate": 4.97262044430131e-06, |
| "loss": 0.6935, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.1886099203919167, |
| "grad_norm": 0.261408269405365, |
| "learning_rate": 4.972427646896738e-06, |
| "loss": 0.6641, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.18909981628903857, |
| "grad_norm": 0.2864983379840851, |
| "learning_rate": 4.972234176829072e-06, |
| "loss": 0.6989, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.18958971218616044, |
| "grad_norm": 0.2765742838382721, |
| "learning_rate": 4.972040034150949e-06, |
| "loss": 0.6516, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.1900796080832823, |
| "grad_norm": 0.27313947677612305, |
| "learning_rate": 4.9718452189151885e-06, |
| "loss": 0.722, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.19056950398040418, |
| "grad_norm": 0.27086713910102844, |
| "learning_rate": 4.971649731174793e-06, |
| "loss": 0.6706, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.19105939987752601, |
| "grad_norm": 0.2872767150402069, |
| "learning_rate": 4.9714535709829475e-06, |
| "loss": 0.7021, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.19154929577464788, |
| "grad_norm": 0.2844785749912262, |
| "learning_rate": 4.971256738393021e-06, |
| "loss": 0.6867, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.19203919167176975, |
| "grad_norm": 0.2807650864124298, |
| "learning_rate": 4.971059233458565e-06, |
| "loss": 0.6744, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.19252908756889162, |
| "grad_norm": 0.26693230867385864, |
| "learning_rate": 4.970861056233314e-06, |
| "loss": 0.7266, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.19301898346601348, |
| "grad_norm": 0.27306389808654785, |
| "learning_rate": 4.970662206771184e-06, |
| "loss": 0.6958, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.19350887936313532, |
| "grad_norm": 0.2836274802684784, |
| "learning_rate": 4.9704626851262775e-06, |
| "loss": 0.699, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.1939987752602572, |
| "grad_norm": 0.27907463908195496, |
| "learning_rate": 4.970262491352875e-06, |
| "loss": 0.6961, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.19448867115737906, |
| "grad_norm": 0.2898431420326233, |
| "learning_rate": 4.970061625505443e-06, |
| "loss": 0.6883, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.19497856705450092, |
| "grad_norm": 0.2687452435493469, |
| "learning_rate": 4.969860087638632e-06, |
| "loss": 0.6499, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.1954684629516228, |
| "grad_norm": 0.2855985760688782, |
| "learning_rate": 4.969657877807271e-06, |
| "loss": 0.6937, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.19595835884874463, |
| "grad_norm": 0.27004674077033997, |
| "learning_rate": 4.969454996066377e-06, |
| "loss": 0.6813, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.1964482547458665, |
| "grad_norm": 0.2813244163990021, |
| "learning_rate": 4.969251442471145e-06, |
| "loss": 0.6907, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.19693815064298836, |
| "grad_norm": 0.2692974805831909, |
| "learning_rate": 4.969047217076956e-06, |
| "loss": 0.668, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.19742804654011023, |
| "grad_norm": 0.2798345983028412, |
| "learning_rate": 4.968842319939374e-06, |
| "loss": 0.6637, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.1979179424372321, |
| "grad_norm": 0.2649277448654175, |
| "learning_rate": 4.968636751114141e-06, |
| "loss": 0.7005, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.19840783833435394, |
| "grad_norm": 0.270865261554718, |
| "learning_rate": 4.968430510657189e-06, |
| "loss": 0.6985, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.1988977342314758, |
| "grad_norm": 0.2828701138496399, |
| "learning_rate": 4.968223598624625e-06, |
| "loss": 0.6675, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.19938763012859767, |
| "grad_norm": 0.28420430421829224, |
| "learning_rate": 4.968016015072747e-06, |
| "loss": 0.7123, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.19987752602571954, |
| "grad_norm": 0.2720252573490143, |
| "learning_rate": 4.967807760058029e-06, |
| "loss": 0.6612, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.2003674219228414, |
| "grad_norm": 0.276832640171051, |
| "learning_rate": 4.9675988336371305e-06, |
| "loss": 0.6776, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.20085731781996327, |
| "grad_norm": 0.2641506791114807, |
| "learning_rate": 4.967389235866893e-06, |
| "loss": 0.6782, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.2013472137170851, |
| "grad_norm": 0.2748897969722748, |
| "learning_rate": 4.967178966804341e-06, |
| "loss": 0.6945, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.20183710961420698, |
| "grad_norm": 0.27730754017829895, |
| "learning_rate": 4.966968026506683e-06, |
| "loss": 0.6681, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.20232700551132884, |
| "grad_norm": 0.2730098366737366, |
| "learning_rate": 4.966756415031307e-06, |
| "loss": 0.7097, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.2028169014084507, |
| "grad_norm": 0.26940208673477173, |
| "learning_rate": 4.9665441324357834e-06, |
| "loss": 0.6963, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.20330679730557258, |
| "grad_norm": 0.28830280900001526, |
| "learning_rate": 4.9663311787778715e-06, |
| "loss": 0.6455, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.20379669320269442, |
| "grad_norm": 0.2707945704460144, |
| "learning_rate": 4.966117554115505e-06, |
| "loss": 0.6781, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.20428658909981628, |
| "grad_norm": 0.28427818417549133, |
| "learning_rate": 4.965903258506806e-06, |
| "loss": 0.654, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.20477648499693815, |
| "grad_norm": 0.26649975776672363, |
| "learning_rate": 4.965688292010077e-06, |
| "loss": 0.7078, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.20526638089406002, |
| "grad_norm": 0.27190712094306946, |
| "learning_rate": 4.9654726546838015e-06, |
| "loss": 0.6648, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.20575627679118189, |
| "grad_norm": 0.27658402919769287, |
| "learning_rate": 4.965256346586648e-06, |
| "loss": 0.7066, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.20624617268830372, |
| "grad_norm": 0.2791120111942291, |
| "learning_rate": 4.965039367777466e-06, |
| "loss": 0.6856, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.2067360685854256, |
| "grad_norm": 0.27996936440467834, |
| "learning_rate": 4.964821718315289e-06, |
| "loss": 0.6779, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.20722596448254746, |
| "grad_norm": 0.27422329783439636, |
| "learning_rate": 4.9646033982593315e-06, |
| "loss": 0.6641, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.20771586037966933, |
| "grad_norm": 0.28245022892951965, |
| "learning_rate": 4.964384407668991e-06, |
| "loss": 0.6649, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.2082057562767912, |
| "grad_norm": 0.2692187428474426, |
| "learning_rate": 4.964164746603847e-06, |
| "loss": 0.6685, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.20869565217391303, |
| "grad_norm": 0.27592557668685913, |
| "learning_rate": 4.963944415123662e-06, |
| "loss": 0.6436, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.2091855480710349, |
| "grad_norm": 0.2831850051879883, |
| "learning_rate": 4.9637234132883805e-06, |
| "loss": 0.6738, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.20967544396815677, |
| "grad_norm": 0.30106180906295776, |
| "learning_rate": 4.9635017411581295e-06, |
| "loss": 0.7092, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.21016533986527863, |
| "grad_norm": 0.2780488431453705, |
| "learning_rate": 4.96327939879322e-06, |
| "loss": 0.6872, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.2106552357624005, |
| "grad_norm": 0.2832487225532532, |
| "learning_rate": 4.96305638625414e-06, |
| "loss": 0.6791, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.21114513165952234, |
| "grad_norm": 0.27021270990371704, |
| "learning_rate": 4.962832703601568e-06, |
| "loss": 0.7024, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.2116350275566442, |
| "grad_norm": 0.2906636595726013, |
| "learning_rate": 4.962608350896357e-06, |
| "loss": 0.6916, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.21212492345376607, |
| "grad_norm": 0.27886664867401123, |
| "learning_rate": 4.962383328199549e-06, |
| "loss": 0.6882, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.21261481935088794, |
| "grad_norm": 0.2870326638221741, |
| "learning_rate": 4.962157635572362e-06, |
| "loss": 0.6775, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.2131047152480098, |
| "grad_norm": 0.2815456688404083, |
| "learning_rate": 4.9619312730762e-06, |
| "loss": 0.7052, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.21359461114513165, |
| "grad_norm": 0.2805708646774292, |
| "learning_rate": 4.961704240772649e-06, |
| "loss": 0.6697, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.2140845070422535, |
| "grad_norm": 0.29036977887153625, |
| "learning_rate": 4.961476538723477e-06, |
| "loss": 0.6859, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.21457440293937538, |
| "grad_norm": 0.28320616483688354, |
| "learning_rate": 4.9612481669906335e-06, |
| "loss": 0.6502, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.21506429883649725, |
| "grad_norm": 0.2814081013202667, |
| "learning_rate": 4.961019125636251e-06, |
| "loss": 0.6802, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.21555419473361911, |
| "grad_norm": 0.2973114252090454, |
| "learning_rate": 4.960789414722642e-06, |
| "loss": 0.6808, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.21604409063074098, |
| "grad_norm": 0.28980064392089844, |
| "learning_rate": 4.960559034312306e-06, |
| "loss": 0.6619, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.21653398652786282, |
| "grad_norm": 0.28697559237480164, |
| "learning_rate": 4.960327984467919e-06, |
| "loss": 0.6548, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.2170238824249847, |
| "grad_norm": 0.2818733751773834, |
| "learning_rate": 4.9600962652523435e-06, |
| "loss": 0.6608, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.21751377832210655, |
| "grad_norm": 0.28344979882240295, |
| "learning_rate": 4.959863876728622e-06, |
| "loss": 0.6818, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.21800367421922842, |
| "grad_norm": 0.277803510427475, |
| "learning_rate": 4.959630818959979e-06, |
| "loss": 0.6632, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.2184935701163503, |
| "grad_norm": 0.290330708026886, |
| "learning_rate": 4.9593970920098226e-06, |
| "loss": 0.7021, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.21898346601347213, |
| "grad_norm": 0.2983878254890442, |
| "learning_rate": 4.9591626959417395e-06, |
| "loss": 0.645, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.219473361910594, |
| "grad_norm": 0.3008386492729187, |
| "learning_rate": 4.958927630819503e-06, |
| "loss": 0.6722, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.21996325780771586, |
| "grad_norm": 0.29174381494522095, |
| "learning_rate": 4.9586918967070666e-06, |
| "loss": 0.7, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.22045315370483773, |
| "grad_norm": 0.2773303985595703, |
| "learning_rate": 4.958455493668565e-06, |
| "loss": 0.6606, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.2209430496019596, |
| "grad_norm": 0.29781636595726013, |
| "learning_rate": 4.958218421768314e-06, |
| "loss": 0.673, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.22143294549908143, |
| "grad_norm": 0.2792598605155945, |
| "learning_rate": 4.957980681070814e-06, |
| "loss": 0.7003, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.2219228413962033, |
| "grad_norm": 0.2928924262523651, |
| "learning_rate": 4.957742271640746e-06, |
| "loss": 0.6825, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.22241273729332517, |
| "grad_norm": 0.2799023985862732, |
| "learning_rate": 4.957503193542973e-06, |
| "loss": 0.6808, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.22290263319044704, |
| "grad_norm": 0.2886837124824524, |
| "learning_rate": 4.957263446842541e-06, |
| "loss": 0.6846, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.2233925290875689, |
| "grad_norm": 0.29160356521606445, |
| "learning_rate": 4.957023031604674e-06, |
| "loss": 0.6755, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.22388242498469074, |
| "grad_norm": 0.29495176672935486, |
| "learning_rate": 4.956781947894784e-06, |
| "loss": 0.6744, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.2243723208818126, |
| "grad_norm": 0.28703880310058594, |
| "learning_rate": 4.95654019577846e-06, |
| "loss": 0.7161, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.22486221677893448, |
| "grad_norm": 0.29158130288124084, |
| "learning_rate": 4.956297775321475e-06, |
| "loss": 0.6943, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.22535211267605634, |
| "grad_norm": 0.2803298234939575, |
| "learning_rate": 4.956054686589783e-06, |
| "loss": 0.6823, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.2258420085731782, |
| "grad_norm": 0.2935578525066376, |
| "learning_rate": 4.95581092964952e-06, |
| "loss": 0.6856, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.22633190447030005, |
| "grad_norm": 0.28209394216537476, |
| "learning_rate": 4.955566504567004e-06, |
| "loss": 0.693, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.22682180036742192, |
| "grad_norm": 0.2914902865886688, |
| "learning_rate": 4.955321411408735e-06, |
| "loss": 0.6978, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.22731169626454378, |
| "grad_norm": 0.2760421335697174, |
| "learning_rate": 4.955075650241396e-06, |
| "loss": 0.6912, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.22780159216166565, |
| "grad_norm": 0.2878439724445343, |
| "learning_rate": 4.954829221131847e-06, |
| "loss": 0.6973, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.22829148805878752, |
| "grad_norm": 0.2849530279636383, |
| "learning_rate": 4.954582124147135e-06, |
| "loss": 0.6682, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.22878138395590936, |
| "grad_norm": 0.2800678610801697, |
| "learning_rate": 4.954334359354486e-06, |
| "loss": 0.6524, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.22927127985303122, |
| "grad_norm": 0.2819807231426239, |
| "learning_rate": 4.954085926821308e-06, |
| "loss": 0.6899, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.2297611757501531, |
| "grad_norm": 0.290926456451416, |
| "learning_rate": 4.953836826615193e-06, |
| "loss": 0.6909, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.23025107164727496, |
| "grad_norm": 0.2750617265701294, |
| "learning_rate": 4.953587058803909e-06, |
| "loss": 0.666, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.23074096754439682, |
| "grad_norm": 0.2927756607532501, |
| "learning_rate": 4.953336623455413e-06, |
| "loss": 0.6676, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.23123086344151866, |
| "grad_norm": 0.2824176549911499, |
| "learning_rate": 4.953085520637839e-06, |
| "loss": 0.6479, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.23172075933864053, |
| "grad_norm": 0.2902854084968567, |
| "learning_rate": 4.952833750419502e-06, |
| "loss": 0.6675, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.2322106552357624, |
| "grad_norm": 0.2878444790840149, |
| "learning_rate": 4.952581312868901e-06, |
| "loss": 0.6707, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.23270055113288426, |
| "grad_norm": 0.29057806730270386, |
| "learning_rate": 4.952328208054716e-06, |
| "loss": 0.6931, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.23319044703000613, |
| "grad_norm": 0.287222683429718, |
| "learning_rate": 4.952074436045806e-06, |
| "loss": 0.6675, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.233680342927128, |
| "grad_norm": 0.2918603718280792, |
| "learning_rate": 4.951819996911217e-06, |
| "loss": 0.6752, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.23417023882424984, |
| "grad_norm": 0.2872229814529419, |
| "learning_rate": 4.951564890720172e-06, |
| "loss": 0.6731, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.2346601347213717, |
| "grad_norm": 0.28795695304870605, |
| "learning_rate": 4.951309117542075e-06, |
| "loss": 0.6798, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.23515003061849357, |
| "grad_norm": 0.27824899554252625, |
| "learning_rate": 4.951052677446515e-06, |
| "loss": 0.705, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.23563992651561544, |
| "grad_norm": 0.2940081059932709, |
| "learning_rate": 4.95079557050326e-06, |
| "loss": 0.6721, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.2361298224127373, |
| "grad_norm": 0.31432050466537476, |
| "learning_rate": 4.950537796782261e-06, |
| "loss": 0.71, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.23661971830985915, |
| "grad_norm": 0.28667113184928894, |
| "learning_rate": 4.950279356353648e-06, |
| "loss": 0.6781, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.237109614206981, |
| "grad_norm": 0.28186243772506714, |
| "learning_rate": 4.950020249287734e-06, |
| "loss": 0.689, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.23759951010410288, |
| "grad_norm": 0.282751202583313, |
| "learning_rate": 4.9497604756550134e-06, |
| "loss": 0.6943, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.23808940600122475, |
| "grad_norm": 0.29054197669029236, |
| "learning_rate": 4.9495000355261625e-06, |
| "loss": 0.689, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.2385793018983466, |
| "grad_norm": 0.28855305910110474, |
| "learning_rate": 4.949238928972037e-06, |
| "loss": 0.6566, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.23906919779546845, |
| "grad_norm": 0.2937643229961395, |
| "learning_rate": 4.948977156063675e-06, |
| "loss": 0.6669, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.23955909369259032, |
| "grad_norm": 0.27907466888427734, |
| "learning_rate": 4.948714716872297e-06, |
| "loss": 0.6802, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.2400489895897122, |
| "grad_norm": 0.29693931341171265, |
| "learning_rate": 4.948451611469304e-06, |
| "loss": 0.6874, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.24053888548683405, |
| "grad_norm": 0.2904781401157379, |
| "learning_rate": 4.948187839926276e-06, |
| "loss": 0.6912, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.24102878138395592, |
| "grad_norm": 0.28125911951065063, |
| "learning_rate": 4.947923402314978e-06, |
| "loss": 0.6663, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.24151867728107776, |
| "grad_norm": 0.3010816276073456, |
| "learning_rate": 4.947658298707354e-06, |
| "loss": 0.6784, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.24200857317819963, |
| "grad_norm": 0.29629260301589966, |
| "learning_rate": 4.9473925291755295e-06, |
| "loss": 0.7006, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.2424984690753215, |
| "grad_norm": 0.29816797375679016, |
| "learning_rate": 4.947126093791812e-06, |
| "loss": 0.6519, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.24298836497244336, |
| "grad_norm": 0.29432323575019836, |
| "learning_rate": 4.946858992628688e-06, |
| "loss": 0.7049, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.24347826086956523, |
| "grad_norm": 0.2930796444416046, |
| "learning_rate": 4.946591225758828e-06, |
| "loss": 0.661, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.24396815676668707, |
| "grad_norm": 0.2817651629447937, |
| "learning_rate": 4.946322793255081e-06, |
| "loss": 0.6622, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.24445805266380893, |
| "grad_norm": 0.28571751713752747, |
| "learning_rate": 4.946053695190479e-06, |
| "loss": 0.6726, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.2449479485609308, |
| "grad_norm": 0.28773561120033264, |
| "learning_rate": 4.945783931638235e-06, |
| "loss": 0.6766, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.24543784445805267, |
| "grad_norm": 0.28509920835494995, |
| "learning_rate": 4.945513502671741e-06, |
| "loss": 0.6548, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.24592774035517453, |
| "grad_norm": 0.28688615560531616, |
| "learning_rate": 4.9452424083645725e-06, |
| "loss": 0.6814, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.24641763625229637, |
| "grad_norm": 0.3019798994064331, |
| "learning_rate": 4.9449706487904845e-06, |
| "loss": 0.6688, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.24690753214941824, |
| "grad_norm": 0.2820969521999359, |
| "learning_rate": 4.944698224023414e-06, |
| "loss": 0.6696, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.2473974280465401, |
| "grad_norm": 0.2863926887512207, |
| "learning_rate": 4.944425134137478e-06, |
| "loss": 0.6699, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.24788732394366197, |
| "grad_norm": 0.28923314809799194, |
| "learning_rate": 4.944151379206976e-06, |
| "loss": 0.6594, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.24837721984078384, |
| "grad_norm": 0.29417911171913147, |
| "learning_rate": 4.943876959306386e-06, |
| "loss": 0.6771, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.2488671157379057, |
| "grad_norm": 0.3137289881706238, |
| "learning_rate": 4.943601874510369e-06, |
| "loss": 0.6995, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.24935701163502755, |
| "grad_norm": 0.2851713001728058, |
| "learning_rate": 4.9433261248937655e-06, |
| "loss": 0.6397, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.24984690753214941, |
| "grad_norm": 0.3067043125629425, |
| "learning_rate": 4.943049710531599e-06, |
| "loss": 0.6696, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.2503368034292713, |
| "grad_norm": 0.28277942538261414, |
| "learning_rate": 4.942772631499071e-06, |
| "loss": 0.6852, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.2508266993263931, |
| "grad_norm": 0.2837325930595398, |
| "learning_rate": 4.942494887871565e-06, |
| "loss": 0.7028, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.251316595223515, |
| "grad_norm": 0.28493350744247437, |
| "learning_rate": 4.942216479724647e-06, |
| "loss": 0.6616, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.25180649112063686, |
| "grad_norm": 0.28880465030670166, |
| "learning_rate": 4.941937407134061e-06, |
| "loss": 0.6609, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.25229638701775875, |
| "grad_norm": 0.29091018438339233, |
| "learning_rate": 4.941657670175734e-06, |
| "loss": 0.6714, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.2527862829148806, |
| "grad_norm": 0.32527679204940796, |
| "learning_rate": 4.941377268925773e-06, |
| "loss": 0.6783, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.25327617881200243, |
| "grad_norm": 0.31313812732696533, |
| "learning_rate": 4.941096203460465e-06, |
| "loss": 0.7043, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.2537660747091243, |
| "grad_norm": 0.29403164982795715, |
| "learning_rate": 4.940814473856278e-06, |
| "loss": 0.678, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.25425597060624616, |
| "grad_norm": 0.2897968590259552, |
| "learning_rate": 4.9405320801898615e-06, |
| "loss": 0.6726, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.25474586650336806, |
| "grad_norm": 0.3000484108924866, |
| "learning_rate": 4.940249022538045e-06, |
| "loss": 0.6736, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.2552357624004899, |
| "grad_norm": 0.3161175847053528, |
| "learning_rate": 4.9399653009778394e-06, |
| "loss": 0.6713, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.25572565829761174, |
| "grad_norm": 0.30063289403915405, |
| "learning_rate": 4.9396809155864354e-06, |
| "loss": 0.6839, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.25621555419473363, |
| "grad_norm": 0.3286581337451935, |
| "learning_rate": 4.939395866441204e-06, |
| "loss": 0.6747, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.25670545009185547, |
| "grad_norm": 0.3079366087913513, |
| "learning_rate": 4.9391101536196975e-06, |
| "loss": 0.691, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.25719534598897736, |
| "grad_norm": 0.30219361186027527, |
| "learning_rate": 4.938823777199649e-06, |
| "loss": 0.7159, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.2576852418860992, |
| "grad_norm": 0.3035431206226349, |
| "learning_rate": 4.938536737258972e-06, |
| "loss": 0.6961, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.25817513778322104, |
| "grad_norm": 0.29658591747283936, |
| "learning_rate": 4.938249033875759e-06, |
| "loss": 0.6925, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.25866503368034294, |
| "grad_norm": 0.3085661232471466, |
| "learning_rate": 4.937960667128285e-06, |
| "loss": 0.6447, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.2591549295774648, |
| "grad_norm": 0.30884072184562683, |
| "learning_rate": 4.9376716370950054e-06, |
| "loss": 0.6899, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.25964482547458667, |
| "grad_norm": 0.30775752663612366, |
| "learning_rate": 4.937381943854554e-06, |
| "loss": 0.6918, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.2601347213717085, |
| "grad_norm": 0.2908138930797577, |
| "learning_rate": 4.937091587485749e-06, |
| "loss": 0.691, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.26062461726883035, |
| "grad_norm": 0.31106460094451904, |
| "learning_rate": 4.9368005680675825e-06, |
| "loss": 0.6864, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.26111451316595224, |
| "grad_norm": 0.33596447110176086, |
| "learning_rate": 4.936508885679234e-06, |
| "loss": 0.6663, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.2616044090630741, |
| "grad_norm": 0.30692586302757263, |
| "learning_rate": 4.936216540400059e-06, |
| "loss": 0.6774, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.262094304960196, |
| "grad_norm": 0.30321234464645386, |
| "learning_rate": 4.935923532309596e-06, |
| "loss": 0.6516, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.2625842008573178, |
| "grad_norm": 0.29872727394104004, |
| "learning_rate": 4.935629861487561e-06, |
| "loss": 0.6467, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.26307409675443966, |
| "grad_norm": 0.3029206097126007, |
| "learning_rate": 4.9353355280138525e-06, |
| "loss": 0.6683, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.26356399265156155, |
| "grad_norm": 0.2922782897949219, |
| "learning_rate": 4.935040531968548e-06, |
| "loss": 0.6554, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.2640538885486834, |
| "grad_norm": 0.3034957945346832, |
| "learning_rate": 4.934744873431907e-06, |
| "loss": 0.6924, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.2645437844458053, |
| "grad_norm": 0.28574198484420776, |
| "learning_rate": 4.934448552484367e-06, |
| "loss": 0.6931, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.2650336803429271, |
| "grad_norm": 0.31014350056648254, |
| "learning_rate": 4.934151569206548e-06, |
| "loss": 0.677, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.26552357624004896, |
| "grad_norm": 0.29660624265670776, |
| "learning_rate": 4.933853923679247e-06, |
| "loss": 0.6425, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.26601347213717086, |
| "grad_norm": 0.3027561604976654, |
| "learning_rate": 4.933555615983445e-06, |
| "loss": 0.706, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.2665033680342927, |
| "grad_norm": 0.3137056827545166, |
| "learning_rate": 4.933256646200301e-06, |
| "loss": 0.6832, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.2669932639314146, |
| "grad_norm": 0.300325483083725, |
| "learning_rate": 4.9329570144111525e-06, |
| "loss": 0.6648, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.26748315982853643, |
| "grad_norm": 0.29390445351600647, |
| "learning_rate": 4.932656720697523e-06, |
| "loss": 0.688, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.26797305572565827, |
| "grad_norm": 0.3029269874095917, |
| "learning_rate": 4.932355765141108e-06, |
| "loss": 0.6781, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.26846295162278017, |
| "grad_norm": 0.30967509746551514, |
| "learning_rate": 4.932054147823791e-06, |
| "loss": 0.6786, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.268952847519902, |
| "grad_norm": 0.30142655968666077, |
| "learning_rate": 4.9317518688276285e-06, |
| "loss": 0.6964, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.2694427434170239, |
| "grad_norm": 0.3007182776927948, |
| "learning_rate": 4.931448928234862e-06, |
| "loss": 0.6662, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.26993263931414574, |
| "grad_norm": 0.29471781849861145, |
| "learning_rate": 4.931145326127911e-06, |
| "loss": 0.698, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.2704225352112676, |
| "grad_norm": 0.30415260791778564, |
| "learning_rate": 4.930841062589375e-06, |
| "loss": 0.7043, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.2709124311083895, |
| "grad_norm": 0.3111327290534973, |
| "learning_rate": 4.930536137702034e-06, |
| "loss": 0.7016, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.2714023270055113, |
| "grad_norm": 0.3104318082332611, |
| "learning_rate": 4.930230551548848e-06, |
| "loss": 0.7092, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.2718922229026332, |
| "grad_norm": 0.3054961562156677, |
| "learning_rate": 4.929924304212956e-06, |
| "loss": 0.658, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.27238211879975505, |
| "grad_norm": 0.31626299023628235, |
| "learning_rate": 4.929617395777678e-06, |
| "loss": 0.684, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.27287201469687694, |
| "grad_norm": 0.30073830485343933, |
| "learning_rate": 4.929309826326512e-06, |
| "loss": 0.6749, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.2733619105939988, |
| "grad_norm": 0.30198878049850464, |
| "learning_rate": 4.929001595943138e-06, |
| "loss": 0.659, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.2738518064911206, |
| "grad_norm": 0.3132243752479553, |
| "learning_rate": 4.928692704711416e-06, |
| "loss": 0.6918, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.2743417023882425, |
| "grad_norm": 0.3196859359741211, |
| "learning_rate": 4.928383152715383e-06, |
| "loss": 0.6748, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.27483159828536435, |
| "grad_norm": 0.2999397814273834, |
| "learning_rate": 4.928072940039258e-06, |
| "loss": 0.6835, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.27532149418248625, |
| "grad_norm": 0.3084956705570221, |
| "learning_rate": 4.92776206676744e-06, |
| "loss": 0.6781, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.2758113900796081, |
| "grad_norm": 0.3060546815395355, |
| "learning_rate": 4.927450532984507e-06, |
| "loss": 0.6698, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.2763012859767299, |
| "grad_norm": 0.2978016138076782, |
| "learning_rate": 4.9271383387752145e-06, |
| "loss": 0.6748, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.2767911818738518, |
| "grad_norm": 0.3078594505786896, |
| "learning_rate": 4.926825484224503e-06, |
| "loss": 0.6964, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.27728107777097366, |
| "grad_norm": 0.30537673830986023, |
| "learning_rate": 4.926511969417488e-06, |
| "loss": 0.6821, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.27777097366809556, |
| "grad_norm": 0.2968599200248718, |
| "learning_rate": 4.926197794439466e-06, |
| "loss": 0.6922, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.2782608695652174, |
| "grad_norm": 0.3004016876220703, |
| "learning_rate": 4.9258829593759135e-06, |
| "loss": 0.6586, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.27875076546233923, |
| "grad_norm": 0.2979053854942322, |
| "learning_rate": 4.925567464312487e-06, |
| "loss": 0.6963, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.27924066135946113, |
| "grad_norm": 0.309206485748291, |
| "learning_rate": 4.9252513093350206e-06, |
| "loss": 0.6728, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.27973055725658297, |
| "grad_norm": 0.2939196228981018, |
| "learning_rate": 4.924934494529531e-06, |
| "loss": 0.6411, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.28022045315370486, |
| "grad_norm": 0.30378228425979614, |
| "learning_rate": 4.92461701998221e-06, |
| "loss": 0.6868, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.2807103490508267, |
| "grad_norm": 0.2925695478916168, |
| "learning_rate": 4.924298885779435e-06, |
| "loss": 0.6987, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.28120024494794854, |
| "grad_norm": 0.29253894090652466, |
| "learning_rate": 4.923980092007757e-06, |
| "loss": 0.6548, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.28169014084507044, |
| "grad_norm": 0.30530020594596863, |
| "learning_rate": 4.923660638753911e-06, |
| "loss": 0.6667, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.2821800367421923, |
| "grad_norm": 0.2993137240409851, |
| "learning_rate": 4.9233405261048066e-06, |
| "loss": 0.657, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.28266993263931417, |
| "grad_norm": 0.3004169464111328, |
| "learning_rate": 4.923019754147537e-06, |
| "loss": 0.6751, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.283159828536436, |
| "grad_norm": 0.3030836880207062, |
| "learning_rate": 4.922698322969372e-06, |
| "loss": 0.6605, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.28364972443355785, |
| "grad_norm": 0.3000795841217041, |
| "learning_rate": 4.922376232657765e-06, |
| "loss": 0.7048, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.28413962033067974, |
| "grad_norm": 0.3051553964614868, |
| "learning_rate": 4.922053483300343e-06, |
| "loss": 0.6613, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.2846295162278016, |
| "grad_norm": 0.2963907718658447, |
| "learning_rate": 4.9217300749849175e-06, |
| "loss": 0.6541, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.2851194121249235, |
| "grad_norm": 0.3138301968574524, |
| "learning_rate": 4.921406007799474e-06, |
| "loss": 0.6408, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.2856093080220453, |
| "grad_norm": 0.292310893535614, |
| "learning_rate": 4.9210812818321814e-06, |
| "loss": 0.6784, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.28609920391916716, |
| "grad_norm": 0.30917978286743164, |
| "learning_rate": 4.920755897171388e-06, |
| "loss": 0.679, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.28658909981628905, |
| "grad_norm": 0.30815809965133667, |
| "learning_rate": 4.920429853905617e-06, |
| "loss": 0.669, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.2870789957134109, |
| "grad_norm": 0.3038373589515686, |
| "learning_rate": 4.920103152123576e-06, |
| "loss": 0.6818, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.2875688916105328, |
| "grad_norm": 0.3149436414241791, |
| "learning_rate": 4.919775791914148e-06, |
| "loss": 0.6871, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.2880587875076546, |
| "grad_norm": 0.29806646704673767, |
| "learning_rate": 4.919447773366397e-06, |
| "loss": 0.6504, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.28854868340477646, |
| "grad_norm": 0.29603317379951477, |
| "learning_rate": 4.919119096569567e-06, |
| "loss": 0.6709, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.28903857930189836, |
| "grad_norm": 0.29373258352279663, |
| "learning_rate": 4.918789761613076e-06, |
| "loss": 0.6673, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.2895284751990202, |
| "grad_norm": 0.3051702380180359, |
| "learning_rate": 4.91845976858653e-06, |
| "loss": 0.652, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.2900183710961421, |
| "grad_norm": 0.3097754120826721, |
| "learning_rate": 4.918129117579704e-06, |
| "loss": 0.6782, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.29050826699326393, |
| "grad_norm": 0.29908713698387146, |
| "learning_rate": 4.91779780868256e-06, |
| "loss": 0.6467, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.29099816289038577, |
| "grad_norm": 0.30273228883743286, |
| "learning_rate": 4.917465841985234e-06, |
| "loss": 0.6372, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.29148805878750766, |
| "grad_norm": 0.32377082109451294, |
| "learning_rate": 4.917133217578044e-06, |
| "loss": 0.7058, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.2919779546846295, |
| "grad_norm": 0.31425973773002625, |
| "learning_rate": 4.916799935551485e-06, |
| "loss": 0.6975, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.2924678505817514, |
| "grad_norm": 0.2944108247756958, |
| "learning_rate": 4.916465995996232e-06, |
| "loss": 0.6974, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.29295774647887324, |
| "grad_norm": 0.3176249861717224, |
| "learning_rate": 4.916131399003139e-06, |
| "loss": 0.6825, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.2934476423759951, |
| "grad_norm": 0.3081783950328827, |
| "learning_rate": 4.915796144663236e-06, |
| "loss": 0.6493, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.29393753827311697, |
| "grad_norm": 0.3036037087440491, |
| "learning_rate": 4.915460233067738e-06, |
| "loss": 0.6487, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.2944274341702388, |
| "grad_norm": 0.30290353298187256, |
| "learning_rate": 4.915123664308032e-06, |
| "loss": 0.6779, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.2949173300673607, |
| "grad_norm": 0.3099123239517212, |
| "learning_rate": 4.9147864384756895e-06, |
| "loss": 0.6716, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.29540722596448254, |
| "grad_norm": 0.29497596621513367, |
| "learning_rate": 4.914448555662455e-06, |
| "loss": 0.6712, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.2958971218616044, |
| "grad_norm": 0.30663248896598816, |
| "learning_rate": 4.914110015960258e-06, |
| "loss": 0.656, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.2963870177587263, |
| "grad_norm": 0.3016858994960785, |
| "learning_rate": 4.913770819461201e-06, |
| "loss": 0.6637, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.2968769136558481, |
| "grad_norm": 0.3061820864677429, |
| "learning_rate": 4.913430966257569e-06, |
| "loss": 0.7025, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.29736680955297, |
| "grad_norm": 0.31017857789993286, |
| "learning_rate": 4.913090456441826e-06, |
| "loss": 0.6858, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.29785670545009185, |
| "grad_norm": 0.3078562319278717, |
| "learning_rate": 4.9127492901066095e-06, |
| "loss": 0.6884, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.2983466013472137, |
| "grad_norm": 0.29675593972206116, |
| "learning_rate": 4.912407467344742e-06, |
| "loss": 0.6452, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.2988364972443356, |
| "grad_norm": 0.3073781430721283, |
| "learning_rate": 4.9120649882492195e-06, |
| "loss": 0.6797, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.2993263931414574, |
| "grad_norm": 0.3066023588180542, |
| "learning_rate": 4.911721852913223e-06, |
| "loss": 0.6618, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.2998162890385793, |
| "grad_norm": 0.3042829930782318, |
| "learning_rate": 4.911378061430103e-06, |
| "loss": 0.6489, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.30030618493570116, |
| "grad_norm": 0.2988530695438385, |
| "learning_rate": 4.911033613893397e-06, |
| "loss": 0.6994, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.300796080832823, |
| "grad_norm": 0.31692102551460266, |
| "learning_rate": 4.910688510396815e-06, |
| "loss": 0.6698, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.3012859767299449, |
| "grad_norm": 0.30138760805130005, |
| "learning_rate": 4.91034275103425e-06, |
| "loss": 0.652, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.30177587262706673, |
| "grad_norm": 0.3250851035118103, |
| "learning_rate": 4.909996335899768e-06, |
| "loss": 0.6551, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.3022657685241886, |
| "grad_norm": 0.31239888072013855, |
| "learning_rate": 4.909649265087621e-06, |
| "loss": 0.6676, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.30275566442131047, |
| "grad_norm": 0.30099815130233765, |
| "learning_rate": 4.909301538692233e-06, |
| "loss": 0.6777, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.3032455603184323, |
| "grad_norm": 0.3122364282608032, |
| "learning_rate": 4.908953156808207e-06, |
| "loss": 0.658, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.3037354562155542, |
| "grad_norm": 0.3116292357444763, |
| "learning_rate": 4.908604119530328e-06, |
| "loss": 0.6574, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.30422535211267604, |
| "grad_norm": 0.30465927720069885, |
| "learning_rate": 4.908254426953557e-06, |
| "loss": 0.6238, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.30471524800979793, |
| "grad_norm": 0.3098441958427429, |
| "learning_rate": 4.907904079173033e-06, |
| "loss": 0.6996, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.3052051439069198, |
| "grad_norm": 0.3130321502685547, |
| "learning_rate": 4.907553076284073e-06, |
| "loss": 0.6849, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.30569503980404167, |
| "grad_norm": 0.31285127997398376, |
| "learning_rate": 4.907201418382174e-06, |
| "loss": 0.6887, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.3061849357011635, |
| "grad_norm": 0.3078165650367737, |
| "learning_rate": 4.906849105563008e-06, |
| "loss": 0.6516, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.30667483159828535, |
| "grad_norm": 0.30509424209594727, |
| "learning_rate": 4.906496137922429e-06, |
| "loss": 0.6655, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.30716472749540724, |
| "grad_norm": 0.30886662006378174, |
| "learning_rate": 4.906142515556469e-06, |
| "loss": 0.6742, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.3076546233925291, |
| "grad_norm": 0.30892255902290344, |
| "learning_rate": 4.905788238561333e-06, |
| "loss": 0.6623, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.308144519289651, |
| "grad_norm": 0.3067830502986908, |
| "learning_rate": 4.905433307033411e-06, |
| "loss": 0.6616, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.3086344151867728, |
| "grad_norm": 0.3024778962135315, |
| "learning_rate": 4.905077721069265e-06, |
| "loss": 0.6747, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.30912431108389465, |
| "grad_norm": 0.3080230951309204, |
| "learning_rate": 4.90472148076564e-06, |
| "loss": 0.6855, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.30961420698101655, |
| "grad_norm": 0.3134588897228241, |
| "learning_rate": 4.9043645862194545e-06, |
| "loss": 0.6981, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.3101041028781384, |
| "grad_norm": 0.3064582049846649, |
| "learning_rate": 4.904007037527809e-06, |
| "loss": 0.6902, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.3105939987752603, |
| "grad_norm": 0.3072272837162018, |
| "learning_rate": 4.903648834787981e-06, |
| "loss": 0.6876, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.3110838946723821, |
| "grad_norm": 0.31387409567832947, |
| "learning_rate": 4.903289978097423e-06, |
| "loss": 0.6981, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.31157379056950396, |
| "grad_norm": 0.30770981311798096, |
| "learning_rate": 4.90293046755377e-06, |
| "loss": 0.6514, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.31206368646662586, |
| "grad_norm": 0.2999817132949829, |
| "learning_rate": 4.90257030325483e-06, |
| "loss": 0.6645, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.3125535823637477, |
| "grad_norm": 0.3214426636695862, |
| "learning_rate": 4.9022094852985925e-06, |
| "loss": 0.6754, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.3130434782608696, |
| "grad_norm": 0.3216695487499237, |
| "learning_rate": 4.901848013783225e-06, |
| "loss": 0.6642, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.31353337415799143, |
| "grad_norm": 0.3139570355415344, |
| "learning_rate": 4.901485888807069e-06, |
| "loss": 0.6535, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.31402327005511327, |
| "grad_norm": 0.3131052851676941, |
| "learning_rate": 4.9011231104686495e-06, |
| "loss": 0.6743, |
| "step": 641 |
| }, |
| { |
| "epoch": 0.31451316595223516, |
| "grad_norm": 0.2983650863170624, |
| "learning_rate": 4.900759678866663e-06, |
| "loss": 0.6808, |
| "step": 642 |
| }, |
| { |
| "epoch": 0.315003061849357, |
| "grad_norm": 0.31281766295433044, |
| "learning_rate": 4.90039559409999e-06, |
| "loss": 0.6625, |
| "step": 643 |
| }, |
| { |
| "epoch": 0.3154929577464789, |
| "grad_norm": 0.3253592550754547, |
| "learning_rate": 4.9000308562676825e-06, |
| "loss": 0.6513, |
| "step": 644 |
| }, |
| { |
| "epoch": 0.31598285364360074, |
| "grad_norm": 0.3129440248012543, |
| "learning_rate": 4.8996654654689745e-06, |
| "loss": 0.6746, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.3164727495407226, |
| "grad_norm": 0.3100399971008301, |
| "learning_rate": 4.899299421803277e-06, |
| "loss": 0.6788, |
| "step": 646 |
| }, |
| { |
| "epoch": 0.31696264543784447, |
| "grad_norm": 0.3134135901927948, |
| "learning_rate": 4.898932725370177e-06, |
| "loss": 0.667, |
| "step": 647 |
| }, |
| { |
| "epoch": 0.3174525413349663, |
| "grad_norm": 0.2989857792854309, |
| "learning_rate": 4.898565376269439e-06, |
| "loss": 0.6549, |
| "step": 648 |
| }, |
| { |
| "epoch": 0.3179424372320882, |
| "grad_norm": 0.307166188955307, |
| "learning_rate": 4.89819737460101e-06, |
| "loss": 0.6819, |
| "step": 649 |
| }, |
| { |
| "epoch": 0.31843233312921004, |
| "grad_norm": 0.3121511936187744, |
| "learning_rate": 4.897828720465007e-06, |
| "loss": 0.6865, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.3189222290263319, |
| "grad_norm": 0.3052747845649719, |
| "learning_rate": 4.897459413961729e-06, |
| "loss": 0.6588, |
| "step": 651 |
| }, |
| { |
| "epoch": 0.3194121249234538, |
| "grad_norm": 0.3178597688674927, |
| "learning_rate": 4.897089455191653e-06, |
| "loss": 0.6671, |
| "step": 652 |
| }, |
| { |
| "epoch": 0.3199020208205756, |
| "grad_norm": 0.3084629476070404, |
| "learning_rate": 4.896718844255431e-06, |
| "loss": 0.6698, |
| "step": 653 |
| }, |
| { |
| "epoch": 0.3203919167176975, |
| "grad_norm": 0.32827791571617126, |
| "learning_rate": 4.896347581253893e-06, |
| "loss": 0.667, |
| "step": 654 |
| }, |
| { |
| "epoch": 0.32088181261481935, |
| "grad_norm": 0.3143371641635895, |
| "learning_rate": 4.895975666288048e-06, |
| "loss": 0.6646, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.3213717085119412, |
| "grad_norm": 0.3030723035335541, |
| "learning_rate": 4.8956030994590805e-06, |
| "loss": 0.6569, |
| "step": 656 |
| }, |
| { |
| "epoch": 0.3218616044090631, |
| "grad_norm": 0.313111275434494, |
| "learning_rate": 4.8952298808683554e-06, |
| "loss": 0.6601, |
| "step": 657 |
| }, |
| { |
| "epoch": 0.3223515003061849, |
| "grad_norm": 0.33992889523506165, |
| "learning_rate": 4.894856010617411e-06, |
| "loss": 0.6966, |
| "step": 658 |
| }, |
| { |
| "epoch": 0.3228413962033068, |
| "grad_norm": 0.31487035751342773, |
| "learning_rate": 4.894481488807964e-06, |
| "loss": 0.6819, |
| "step": 659 |
| }, |
| { |
| "epoch": 0.32333129210042866, |
| "grad_norm": 0.30711787939071655, |
| "learning_rate": 4.894106315541909e-06, |
| "loss": 0.6497, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.3238211879975505, |
| "grad_norm": 0.30664297938346863, |
| "learning_rate": 4.89373049092132e-06, |
| "loss": 0.6343, |
| "step": 661 |
| }, |
| { |
| "epoch": 0.3243110838946724, |
| "grad_norm": 0.32928934693336487, |
| "learning_rate": 4.893354015048444e-06, |
| "loss": 0.6819, |
| "step": 662 |
| }, |
| { |
| "epoch": 0.32480097979179423, |
| "grad_norm": 0.3158688545227051, |
| "learning_rate": 4.892976888025708e-06, |
| "loss": 0.671, |
| "step": 663 |
| }, |
| { |
| "epoch": 0.3252908756889161, |
| "grad_norm": 0.311565101146698, |
| "learning_rate": 4.892599109955715e-06, |
| "loss": 0.6619, |
| "step": 664 |
| }, |
| { |
| "epoch": 0.32578077158603796, |
| "grad_norm": 0.317327082157135, |
| "learning_rate": 4.892220680941247e-06, |
| "loss": 0.6821, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.3262706674831598, |
| "grad_norm": 0.32189080119132996, |
| "learning_rate": 4.891841601085259e-06, |
| "loss": 0.6384, |
| "step": 666 |
| }, |
| { |
| "epoch": 0.3267605633802817, |
| "grad_norm": 0.3046722412109375, |
| "learning_rate": 4.891461870490887e-06, |
| "loss": 0.673, |
| "step": 667 |
| }, |
| { |
| "epoch": 0.32725045927740354, |
| "grad_norm": 0.31418314576148987, |
| "learning_rate": 4.891081489261443e-06, |
| "loss": 0.6604, |
| "step": 668 |
| }, |
| { |
| "epoch": 0.32774035517452543, |
| "grad_norm": 0.31496915221214294, |
| "learning_rate": 4.890700457500416e-06, |
| "loss": 0.6448, |
| "step": 669 |
| }, |
| { |
| "epoch": 0.32823025107164727, |
| "grad_norm": 0.3126322329044342, |
| "learning_rate": 4.890318775311471e-06, |
| "loss": 0.6533, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.3287201469687691, |
| "grad_norm": 0.305670827627182, |
| "learning_rate": 4.889936442798451e-06, |
| "loss": 0.64, |
| "step": 671 |
| }, |
| { |
| "epoch": 0.329210042865891, |
| "grad_norm": 0.32083502411842346, |
| "learning_rate": 4.889553460065375e-06, |
| "loss": 0.6648, |
| "step": 672 |
| }, |
| { |
| "epoch": 0.32969993876301285, |
| "grad_norm": 0.30888432264328003, |
| "learning_rate": 4.88916982721644e-06, |
| "loss": 0.6468, |
| "step": 673 |
| }, |
| { |
| "epoch": 0.33018983466013474, |
| "grad_norm": 0.3246508836746216, |
| "learning_rate": 4.88878554435602e-06, |
| "loss": 0.6956, |
| "step": 674 |
| }, |
| { |
| "epoch": 0.3306797305572566, |
| "grad_norm": 0.31742051243782043, |
| "learning_rate": 4.888400611588664e-06, |
| "loss": 0.6581, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.3311696264543784, |
| "grad_norm": 0.2962053716182709, |
| "learning_rate": 4.8880150290191e-06, |
| "loss": 0.6419, |
| "step": 676 |
| }, |
| { |
| "epoch": 0.3316595223515003, |
| "grad_norm": 0.302521675825119, |
| "learning_rate": 4.887628796752232e-06, |
| "loss": 0.6541, |
| "step": 677 |
| }, |
| { |
| "epoch": 0.33214941824862215, |
| "grad_norm": 0.3055395483970642, |
| "learning_rate": 4.88724191489314e-06, |
| "loss": 0.657, |
| "step": 678 |
| }, |
| { |
| "epoch": 0.33263931414574405, |
| "grad_norm": 0.3162010908126831, |
| "learning_rate": 4.886854383547081e-06, |
| "loss": 0.6679, |
| "step": 679 |
| }, |
| { |
| "epoch": 0.3331292100428659, |
| "grad_norm": 0.3197442591190338, |
| "learning_rate": 4.88646620281949e-06, |
| "loss": 0.6334, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.3336191059399877, |
| "grad_norm": 0.3153987228870392, |
| "learning_rate": 4.8860773728159775e-06, |
| "loss": 0.6522, |
| "step": 681 |
| }, |
| { |
| "epoch": 0.3341090018371096, |
| "grad_norm": 0.32448455691337585, |
| "learning_rate": 4.885687893642329e-06, |
| "loss": 0.6632, |
| "step": 682 |
| }, |
| { |
| "epoch": 0.33459889773423146, |
| "grad_norm": 0.31581366062164307, |
| "learning_rate": 4.885297765404512e-06, |
| "loss": 0.6734, |
| "step": 683 |
| }, |
| { |
| "epoch": 0.33508879363135335, |
| "grad_norm": 0.32606983184814453, |
| "learning_rate": 4.884906988208664e-06, |
| "loss": 0.6559, |
| "step": 684 |
| }, |
| { |
| "epoch": 0.3355786895284752, |
| "grad_norm": 0.3181923031806946, |
| "learning_rate": 4.884515562161104e-06, |
| "loss": 0.6753, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.3360685854255971, |
| "grad_norm": 0.29703840613365173, |
| "learning_rate": 4.884123487368324e-06, |
| "loss": 0.6682, |
| "step": 686 |
| }, |
| { |
| "epoch": 0.3365584813227189, |
| "grad_norm": 0.30182990431785583, |
| "learning_rate": 4.8837307639369966e-06, |
| "loss": 0.6391, |
| "step": 687 |
| }, |
| { |
| "epoch": 0.33704837721984077, |
| "grad_norm": 0.3200860917568207, |
| "learning_rate": 4.883337391973966e-06, |
| "loss": 0.6768, |
| "step": 688 |
| }, |
| { |
| "epoch": 0.33753827311696266, |
| "grad_norm": 0.29847651720046997, |
| "learning_rate": 4.882943371586256e-06, |
| "loss": 0.6144, |
| "step": 689 |
| }, |
| { |
| "epoch": 0.3380281690140845, |
| "grad_norm": 0.3135667145252228, |
| "learning_rate": 4.882548702881067e-06, |
| "loss": 0.6512, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.3385180649112064, |
| "grad_norm": 0.2976519465446472, |
| "learning_rate": 4.882153385965774e-06, |
| "loss": 0.6572, |
| "step": 691 |
| }, |
| { |
| "epoch": 0.33900796080832823, |
| "grad_norm": 0.3228099048137665, |
| "learning_rate": 4.881757420947928e-06, |
| "loss": 0.6546, |
| "step": 692 |
| }, |
| { |
| "epoch": 0.3394978567054501, |
| "grad_norm": 0.3073636293411255, |
| "learning_rate": 4.88136080793526e-06, |
| "loss": 0.6427, |
| "step": 693 |
| }, |
| { |
| "epoch": 0.33998775260257197, |
| "grad_norm": 0.3015718162059784, |
| "learning_rate": 4.880963547035674e-06, |
| "loss": 0.6581, |
| "step": 694 |
| }, |
| { |
| "epoch": 0.3404776484996938, |
| "grad_norm": 0.30982354283332825, |
| "learning_rate": 4.88056563835725e-06, |
| "loss": 0.6349, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.3409675443968157, |
| "grad_norm": 0.3180277645587921, |
| "learning_rate": 4.880167082008247e-06, |
| "loss": 0.6618, |
| "step": 696 |
| }, |
| { |
| "epoch": 0.34145744029393754, |
| "grad_norm": 0.3110656440258026, |
| "learning_rate": 4.879767878097098e-06, |
| "loss": 0.665, |
| "step": 697 |
| }, |
| { |
| "epoch": 0.3419473361910594, |
| "grad_norm": 0.3028208911418915, |
| "learning_rate": 4.879368026732412e-06, |
| "loss": 0.6707, |
| "step": 698 |
| }, |
| { |
| "epoch": 0.3424372320881813, |
| "grad_norm": 0.3225865066051483, |
| "learning_rate": 4.878967528022976e-06, |
| "loss": 0.6498, |
| "step": 699 |
| }, |
| { |
| "epoch": 0.3429271279853031, |
| "grad_norm": 0.3116249442100525, |
| "learning_rate": 4.87856638207775e-06, |
| "loss": 0.6627, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.343417023882425, |
| "grad_norm": 0.32170018553733826, |
| "learning_rate": 4.878164589005876e-06, |
| "loss": 0.6571, |
| "step": 701 |
| }, |
| { |
| "epoch": 0.34390691977954685, |
| "grad_norm": 0.3243235647678375, |
| "learning_rate": 4.877762148916665e-06, |
| "loss": 0.6281, |
| "step": 702 |
| }, |
| { |
| "epoch": 0.3443968156766687, |
| "grad_norm": 0.30337801575660706, |
| "learning_rate": 4.8773590619196064e-06, |
| "loss": 0.6781, |
| "step": 703 |
| }, |
| { |
| "epoch": 0.3448867115737906, |
| "grad_norm": 0.30504652857780457, |
| "learning_rate": 4.87695532812437e-06, |
| "loss": 0.6737, |
| "step": 704 |
| }, |
| { |
| "epoch": 0.3453766074709124, |
| "grad_norm": 0.33436259627342224, |
| "learning_rate": 4.876550947640794e-06, |
| "loss": 0.6872, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.3458665033680343, |
| "grad_norm": 0.3146010935306549, |
| "learning_rate": 4.8761459205789e-06, |
| "loss": 0.6522, |
| "step": 706 |
| }, |
| { |
| "epoch": 0.34635639926515616, |
| "grad_norm": 0.3099222779273987, |
| "learning_rate": 4.8757402470488795e-06, |
| "loss": 0.6409, |
| "step": 707 |
| }, |
| { |
| "epoch": 0.346846295162278, |
| "grad_norm": 0.30881136655807495, |
| "learning_rate": 4.875333927161104e-06, |
| "loss": 0.6406, |
| "step": 708 |
| }, |
| { |
| "epoch": 0.3473361910593999, |
| "grad_norm": 0.310293048620224, |
| "learning_rate": 4.874926961026118e-06, |
| "loss": 0.6476, |
| "step": 709 |
| }, |
| { |
| "epoch": 0.34782608695652173, |
| "grad_norm": 0.33084210753440857, |
| "learning_rate": 4.874519348754644e-06, |
| "loss": 0.6554, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.3483159828536436, |
| "grad_norm": 0.3112454116344452, |
| "learning_rate": 4.874111090457579e-06, |
| "loss": 0.668, |
| "step": 711 |
| }, |
| { |
| "epoch": 0.34880587875076546, |
| "grad_norm": 0.32594504952430725, |
| "learning_rate": 4.873702186245996e-06, |
| "loss": 0.678, |
| "step": 712 |
| }, |
| { |
| "epoch": 0.3492957746478873, |
| "grad_norm": 0.3138704299926758, |
| "learning_rate": 4.873292636231145e-06, |
| "loss": 0.6708, |
| "step": 713 |
| }, |
| { |
| "epoch": 0.3497856705450092, |
| "grad_norm": 0.3248952627182007, |
| "learning_rate": 4.872882440524448e-06, |
| "loss": 0.6453, |
| "step": 714 |
| }, |
| { |
| "epoch": 0.35027556644213104, |
| "grad_norm": 0.30387961864471436, |
| "learning_rate": 4.872471599237508e-06, |
| "loss": 0.6293, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.35076546233925293, |
| "grad_norm": 0.3210145831108093, |
| "learning_rate": 4.8720601124821e-06, |
| "loss": 0.6811, |
| "step": 716 |
| }, |
| { |
| "epoch": 0.35125535823637477, |
| "grad_norm": 0.3145703673362732, |
| "learning_rate": 4.8716479803701755e-06, |
| "loss": 0.6534, |
| "step": 717 |
| }, |
| { |
| "epoch": 0.3517452541334966, |
| "grad_norm": 0.32032543420791626, |
| "learning_rate": 4.871235203013861e-06, |
| "loss": 0.677, |
| "step": 718 |
| }, |
| { |
| "epoch": 0.3522351500306185, |
| "grad_norm": 0.311057984828949, |
| "learning_rate": 4.870821780525458e-06, |
| "loss": 0.6599, |
| "step": 719 |
| }, |
| { |
| "epoch": 0.35272504592774034, |
| "grad_norm": 0.31371673941612244, |
| "learning_rate": 4.8704077130174475e-06, |
| "loss": 0.6655, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.35321494182486224, |
| "grad_norm": 0.31282398104667664, |
| "learning_rate": 4.869993000602482e-06, |
| "loss": 0.6588, |
| "step": 721 |
| }, |
| { |
| "epoch": 0.3537048377219841, |
| "grad_norm": 0.3058509826660156, |
| "learning_rate": 4.8695776433933895e-06, |
| "loss": 0.6554, |
| "step": 722 |
| }, |
| { |
| "epoch": 0.3541947336191059, |
| "grad_norm": 0.32043665647506714, |
| "learning_rate": 4.8691616415031765e-06, |
| "loss": 0.6419, |
| "step": 723 |
| }, |
| { |
| "epoch": 0.3546846295162278, |
| "grad_norm": 0.3181472718715668, |
| "learning_rate": 4.86874499504502e-06, |
| "loss": 0.6498, |
| "step": 724 |
| }, |
| { |
| "epoch": 0.35517452541334965, |
| "grad_norm": 0.31420233845710754, |
| "learning_rate": 4.868327704132277e-06, |
| "loss": 0.6833, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.35566442131047155, |
| "grad_norm": 0.30752044916152954, |
| "learning_rate": 4.867909768878479e-06, |
| "loss": 0.6696, |
| "step": 726 |
| }, |
| { |
| "epoch": 0.3561543172075934, |
| "grad_norm": 0.2980179488658905, |
| "learning_rate": 4.867491189397331e-06, |
| "loss": 0.6181, |
| "step": 727 |
| }, |
| { |
| "epoch": 0.3566442131047152, |
| "grad_norm": 0.33069589734077454, |
| "learning_rate": 4.867071965802715e-06, |
| "loss": 0.6842, |
| "step": 728 |
| }, |
| { |
| "epoch": 0.3571341090018371, |
| "grad_norm": 0.31239742040634155, |
| "learning_rate": 4.866652098208684e-06, |
| "loss": 0.6794, |
| "step": 729 |
| }, |
| { |
| "epoch": 0.35762400489895896, |
| "grad_norm": 0.3049803674221039, |
| "learning_rate": 4.866231586729474e-06, |
| "loss": 0.6487, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.35811390079608085, |
| "grad_norm": 0.32708168029785156, |
| "learning_rate": 4.86581043147949e-06, |
| "loss": 0.6732, |
| "step": 731 |
| }, |
| { |
| "epoch": 0.3586037966932027, |
| "grad_norm": 0.317973792552948, |
| "learning_rate": 4.865388632573313e-06, |
| "loss": 0.6676, |
| "step": 732 |
| }, |
| { |
| "epoch": 0.35909369259032453, |
| "grad_norm": 0.3271551728248596, |
| "learning_rate": 4.864966190125702e-06, |
| "loss": 0.6439, |
| "step": 733 |
| }, |
| { |
| "epoch": 0.3595835884874464, |
| "grad_norm": 0.3095121383666992, |
| "learning_rate": 4.864543104251587e-06, |
| "loss": 0.6134, |
| "step": 734 |
| }, |
| { |
| "epoch": 0.36007348438456827, |
| "grad_norm": 0.3160260319709778, |
| "learning_rate": 4.864119375066077e-06, |
| "loss": 0.6594, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.36056338028169016, |
| "grad_norm": 0.3187366724014282, |
| "learning_rate": 4.8636950026844534e-06, |
| "loss": 0.6758, |
| "step": 736 |
| }, |
| { |
| "epoch": 0.361053276178812, |
| "grad_norm": 0.32910969853401184, |
| "learning_rate": 4.863269987222174e-06, |
| "loss": 0.6372, |
| "step": 737 |
| }, |
| { |
| "epoch": 0.36154317207593384, |
| "grad_norm": 0.3208555281162262, |
| "learning_rate": 4.862844328794871e-06, |
| "loss": 0.632, |
| "step": 738 |
| }, |
| { |
| "epoch": 0.36203306797305573, |
| "grad_norm": 0.31716328859329224, |
| "learning_rate": 4.862418027518351e-06, |
| "loss": 0.6717, |
| "step": 739 |
| }, |
| { |
| "epoch": 0.3625229638701776, |
| "grad_norm": 0.33008137345314026, |
| "learning_rate": 4.861991083508595e-06, |
| "loss": 0.6629, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.36301285976729947, |
| "grad_norm": 0.32629767060279846, |
| "learning_rate": 4.861563496881763e-06, |
| "loss": 0.6517, |
| "step": 741 |
| }, |
| { |
| "epoch": 0.3635027556644213, |
| "grad_norm": 0.32302382588386536, |
| "learning_rate": 4.861135267754183e-06, |
| "loss": 0.6497, |
| "step": 742 |
| }, |
| { |
| "epoch": 0.36399265156154315, |
| "grad_norm": 0.31350627541542053, |
| "learning_rate": 4.860706396242364e-06, |
| "loss": 0.6607, |
| "step": 743 |
| }, |
| { |
| "epoch": 0.36448254745866504, |
| "grad_norm": 0.3175422251224518, |
| "learning_rate": 4.860276882462986e-06, |
| "loss": 0.6311, |
| "step": 744 |
| }, |
| { |
| "epoch": 0.3649724433557869, |
| "grad_norm": 0.31468355655670166, |
| "learning_rate": 4.8598467265329065e-06, |
| "loss": 0.6402, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.3654623392529088, |
| "grad_norm": 0.3147619962692261, |
| "learning_rate": 4.859415928569154e-06, |
| "loss": 0.6882, |
| "step": 746 |
| }, |
| { |
| "epoch": 0.3659522351500306, |
| "grad_norm": 0.31198322772979736, |
| "learning_rate": 4.858984488688937e-06, |
| "loss": 0.6679, |
| "step": 747 |
| }, |
| { |
| "epoch": 0.36644213104715245, |
| "grad_norm": 0.3297489285469055, |
| "learning_rate": 4.858552407009633e-06, |
| "loss": 0.685, |
| "step": 748 |
| }, |
| { |
| "epoch": 0.36693202694427435, |
| "grad_norm": 0.307198166847229, |
| "learning_rate": 4.858119683648796e-06, |
| "loss": 0.6682, |
| "step": 749 |
| }, |
| { |
| "epoch": 0.3674219228413962, |
| "grad_norm": 0.3127272129058838, |
| "learning_rate": 4.857686318724159e-06, |
| "loss": 0.6529, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.3679118187385181, |
| "grad_norm": 0.31503623723983765, |
| "learning_rate": 4.857252312353622e-06, |
| "loss": 0.6607, |
| "step": 751 |
| }, |
| { |
| "epoch": 0.3684017146356399, |
| "grad_norm": 0.3225836157798767, |
| "learning_rate": 4.856817664655266e-06, |
| "loss": 0.6539, |
| "step": 752 |
| }, |
| { |
| "epoch": 0.3688916105327618, |
| "grad_norm": 0.3189702332019806, |
| "learning_rate": 4.856382375747341e-06, |
| "loss": 0.68, |
| "step": 753 |
| }, |
| { |
| "epoch": 0.36938150642988365, |
| "grad_norm": 0.3274145722389221, |
| "learning_rate": 4.855946445748277e-06, |
| "loss": 0.6388, |
| "step": 754 |
| }, |
| { |
| "epoch": 0.3698714023270055, |
| "grad_norm": 0.3334755003452301, |
| "learning_rate": 4.855509874776674e-06, |
| "loss": 0.6597, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.3703612982241274, |
| "grad_norm": 0.3221217095851898, |
| "learning_rate": 4.855072662951308e-06, |
| "loss": 0.6532, |
| "step": 756 |
| }, |
| { |
| "epoch": 0.37085119412124923, |
| "grad_norm": 0.32857850193977356, |
| "learning_rate": 4.85463481039113e-06, |
| "loss": 0.6585, |
| "step": 757 |
| }, |
| { |
| "epoch": 0.3713410900183711, |
| "grad_norm": 0.3258369266986847, |
| "learning_rate": 4.854196317215265e-06, |
| "loss": 0.673, |
| "step": 758 |
| }, |
| { |
| "epoch": 0.37183098591549296, |
| "grad_norm": 0.3420117497444153, |
| "learning_rate": 4.853757183543012e-06, |
| "loss": 0.6838, |
| "step": 759 |
| }, |
| { |
| "epoch": 0.3723208818126148, |
| "grad_norm": 0.30562669038772583, |
| "learning_rate": 4.853317409493844e-06, |
| "loss": 0.6298, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.3728107777097367, |
| "grad_norm": 0.3201233148574829, |
| "learning_rate": 4.852876995187408e-06, |
| "loss": 0.6723, |
| "step": 761 |
| }, |
| { |
| "epoch": 0.37330067360685854, |
| "grad_norm": 0.2966959774494171, |
| "learning_rate": 4.852435940743526e-06, |
| "loss": 0.6491, |
| "step": 762 |
| }, |
| { |
| "epoch": 0.37379056950398043, |
| "grad_norm": 0.32500383257865906, |
| "learning_rate": 4.851994246282194e-06, |
| "loss": 0.6564, |
| "step": 763 |
| }, |
| { |
| "epoch": 0.37428046540110227, |
| "grad_norm": 0.3188881278038025, |
| "learning_rate": 4.851551911923582e-06, |
| "loss": 0.6505, |
| "step": 764 |
| }, |
| { |
| "epoch": 0.3747703612982241, |
| "grad_norm": 0.3221505284309387, |
| "learning_rate": 4.8511089377880334e-06, |
| "loss": 0.6634, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.375260257195346, |
| "grad_norm": 0.3052166700363159, |
| "learning_rate": 4.850665323996068e-06, |
| "loss": 0.6639, |
| "step": 766 |
| }, |
| { |
| "epoch": 0.37575015309246784, |
| "grad_norm": 0.33018791675567627, |
| "learning_rate": 4.850221070668376e-06, |
| "loss": 0.6912, |
| "step": 767 |
| }, |
| { |
| "epoch": 0.37624004898958974, |
| "grad_norm": 0.32073283195495605, |
| "learning_rate": 4.849776177925825e-06, |
| "loss": 0.6493, |
| "step": 768 |
| }, |
| { |
| "epoch": 0.3767299448867116, |
| "grad_norm": 0.31678506731987, |
| "learning_rate": 4.849330645889454e-06, |
| "loss": 0.6803, |
| "step": 769 |
| }, |
| { |
| "epoch": 0.3772198407838334, |
| "grad_norm": 0.32724466919898987, |
| "learning_rate": 4.848884474680477e-06, |
| "loss": 0.6704, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.3777097366809553, |
| "grad_norm": 0.3205874264240265, |
| "learning_rate": 4.8484376644202845e-06, |
| "loss": 0.6577, |
| "step": 771 |
| }, |
| { |
| "epoch": 0.37819963257807715, |
| "grad_norm": 0.30893105268478394, |
| "learning_rate": 4.847990215230435e-06, |
| "loss": 0.666, |
| "step": 772 |
| }, |
| { |
| "epoch": 0.37868952847519904, |
| "grad_norm": 0.31219717860221863, |
| "learning_rate": 4.847542127232665e-06, |
| "loss": 0.6565, |
| "step": 773 |
| }, |
| { |
| "epoch": 0.3791794243723209, |
| "grad_norm": 0.3261759579181671, |
| "learning_rate": 4.847093400548885e-06, |
| "loss": 0.677, |
| "step": 774 |
| }, |
| { |
| "epoch": 0.3796693202694427, |
| "grad_norm": 0.31503742933273315, |
| "learning_rate": 4.846644035301178e-06, |
| "loss": 0.6666, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.3801592161665646, |
| "grad_norm": 0.31190887093544006, |
| "learning_rate": 4.8461940316118e-06, |
| "loss": 0.6574, |
| "step": 776 |
| }, |
| { |
| "epoch": 0.38064911206368646, |
| "grad_norm": 0.3217906355857849, |
| "learning_rate": 4.845743389603182e-06, |
| "loss": 0.6441, |
| "step": 777 |
| }, |
| { |
| "epoch": 0.38113900796080835, |
| "grad_norm": 0.33193954825401306, |
| "learning_rate": 4.845292109397929e-06, |
| "loss": 0.6248, |
| "step": 778 |
| }, |
| { |
| "epoch": 0.3816289038579302, |
| "grad_norm": 0.3300223648548126, |
| "learning_rate": 4.844840191118819e-06, |
| "loss": 0.6431, |
| "step": 779 |
| }, |
| { |
| "epoch": 0.38211879975505203, |
| "grad_norm": 0.33220401406288147, |
| "learning_rate": 4.844387634888802e-06, |
| "loss": 0.6605, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.3826086956521739, |
| "grad_norm": 0.3159903585910797, |
| "learning_rate": 4.8439344408310044e-06, |
| "loss": 0.655, |
| "step": 781 |
| }, |
| { |
| "epoch": 0.38309859154929576, |
| "grad_norm": 0.32114678621292114, |
| "learning_rate": 4.843480609068724e-06, |
| "loss": 0.658, |
| "step": 782 |
| }, |
| { |
| "epoch": 0.38358848744641766, |
| "grad_norm": 0.3086536228656769, |
| "learning_rate": 4.843026139725433e-06, |
| "loss": 0.6794, |
| "step": 783 |
| }, |
| { |
| "epoch": 0.3840783833435395, |
| "grad_norm": 0.31365227699279785, |
| "learning_rate": 4.842571032924778e-06, |
| "loss": 0.6678, |
| "step": 784 |
| }, |
| { |
| "epoch": 0.38456827924066134, |
| "grad_norm": 0.3227309286594391, |
| "learning_rate": 4.842115288790577e-06, |
| "loss": 0.6379, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.38505817513778323, |
| "grad_norm": 0.32405832409858704, |
| "learning_rate": 4.8416589074468224e-06, |
| "loss": 0.652, |
| "step": 786 |
| }, |
| { |
| "epoch": 0.38554807103490507, |
| "grad_norm": 0.31400805711746216, |
| "learning_rate": 4.841201889017681e-06, |
| "loss": 0.6566, |
| "step": 787 |
| }, |
| { |
| "epoch": 0.38603796693202697, |
| "grad_norm": 0.3314230144023895, |
| "learning_rate": 4.84074423362749e-06, |
| "loss": 0.6414, |
| "step": 788 |
| }, |
| { |
| "epoch": 0.3865278628291488, |
| "grad_norm": 0.3287082314491272, |
| "learning_rate": 4.840285941400765e-06, |
| "loss": 0.6401, |
| "step": 789 |
| }, |
| { |
| "epoch": 0.38701775872627064, |
| "grad_norm": 0.31949952244758606, |
| "learning_rate": 4.8398270124621886e-06, |
| "loss": 0.6583, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.38750765462339254, |
| "grad_norm": 0.3409273028373718, |
| "learning_rate": 4.839367446936621e-06, |
| "loss": 0.651, |
| "step": 791 |
| }, |
| { |
| "epoch": 0.3879975505205144, |
| "grad_norm": 0.31103092432022095, |
| "learning_rate": 4.838907244949093e-06, |
| "loss": 0.6546, |
| "step": 792 |
| }, |
| { |
| "epoch": 0.3884874464176363, |
| "grad_norm": 0.31950950622558594, |
| "learning_rate": 4.8384464066248114e-06, |
| "loss": 0.6822, |
| "step": 793 |
| }, |
| { |
| "epoch": 0.3889773423147581, |
| "grad_norm": 0.3224134147167206, |
| "learning_rate": 4.8379849320891545e-06, |
| "loss": 0.6731, |
| "step": 794 |
| }, |
| { |
| "epoch": 0.38946723821187995, |
| "grad_norm": 0.31685829162597656, |
| "learning_rate": 4.8375228214676735e-06, |
| "loss": 0.6401, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.38995713410900185, |
| "grad_norm": 0.3154316544532776, |
| "learning_rate": 4.837060074886092e-06, |
| "loss": 0.6542, |
| "step": 796 |
| }, |
| { |
| "epoch": 0.3904470300061237, |
| "grad_norm": 0.30656275153160095, |
| "learning_rate": 4.836596692470309e-06, |
| "loss": 0.6541, |
| "step": 797 |
| }, |
| { |
| "epoch": 0.3909369259032456, |
| "grad_norm": 0.30986401438713074, |
| "learning_rate": 4.836132674346393e-06, |
| "loss": 0.6513, |
| "step": 798 |
| }, |
| { |
| "epoch": 0.3914268218003674, |
| "grad_norm": 0.3240419328212738, |
| "learning_rate": 4.83566802064059e-06, |
| "loss": 0.6405, |
| "step": 799 |
| }, |
| { |
| "epoch": 0.39191671769748926, |
| "grad_norm": 0.3198719620704651, |
| "learning_rate": 4.8352027314793145e-06, |
| "loss": 0.633, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.39240661359461115, |
| "grad_norm": 0.336039662361145, |
| "learning_rate": 4.834736806989156e-06, |
| "loss": 0.6608, |
| "step": 801 |
| }, |
| { |
| "epoch": 0.392896509491733, |
| "grad_norm": 0.3361132740974426, |
| "learning_rate": 4.834270247296878e-06, |
| "loss": 0.6726, |
| "step": 802 |
| }, |
| { |
| "epoch": 0.3933864053888549, |
| "grad_norm": 0.3405260741710663, |
| "learning_rate": 4.833803052529414e-06, |
| "loss": 0.6692, |
| "step": 803 |
| }, |
| { |
| "epoch": 0.3938763012859767, |
| "grad_norm": 0.3248771131038666, |
| "learning_rate": 4.8333352228138725e-06, |
| "loss": 0.6819, |
| "step": 804 |
| }, |
| { |
| "epoch": 0.39436619718309857, |
| "grad_norm": 0.3115767538547516, |
| "learning_rate": 4.8328667582775336e-06, |
| "loss": 0.6491, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.39485609308022046, |
| "grad_norm": 0.31870561838150024, |
| "learning_rate": 4.83239765904785e-06, |
| "loss": 0.6663, |
| "step": 806 |
| }, |
| { |
| "epoch": 0.3953459889773423, |
| "grad_norm": 0.31889578700065613, |
| "learning_rate": 4.8319279252524495e-06, |
| "loss": 0.6528, |
| "step": 807 |
| }, |
| { |
| "epoch": 0.3958358848744642, |
| "grad_norm": 0.32603761553764343, |
| "learning_rate": 4.831457557019128e-06, |
| "loss": 0.6628, |
| "step": 808 |
| }, |
| { |
| "epoch": 0.39632578077158603, |
| "grad_norm": 0.3227848708629608, |
| "learning_rate": 4.830986554475859e-06, |
| "loss": 0.6717, |
| "step": 809 |
| }, |
| { |
| "epoch": 0.3968156766687079, |
| "grad_norm": 0.30794838070869446, |
| "learning_rate": 4.830514917750785e-06, |
| "loss": 0.6596, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.39730557256582977, |
| "grad_norm": 0.34206315875053406, |
| "learning_rate": 4.830042646972221e-06, |
| "loss": 0.6542, |
| "step": 811 |
| }, |
| { |
| "epoch": 0.3977954684629516, |
| "grad_norm": 0.31525009870529175, |
| "learning_rate": 4.8295697422686586e-06, |
| "loss": 0.6531, |
| "step": 812 |
| }, |
| { |
| "epoch": 0.3982853643600735, |
| "grad_norm": 0.34714141488075256, |
| "learning_rate": 4.829096203768757e-06, |
| "loss": 0.6652, |
| "step": 813 |
| }, |
| { |
| "epoch": 0.39877526025719534, |
| "grad_norm": 0.3229881227016449, |
| "learning_rate": 4.828622031601351e-06, |
| "loss": 0.6705, |
| "step": 814 |
| }, |
| { |
| "epoch": 0.3992651561543172, |
| "grad_norm": 0.31995874643325806, |
| "learning_rate": 4.828147225895445e-06, |
| "loss": 0.6507, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.3997550520514391, |
| "grad_norm": 0.326016902923584, |
| "learning_rate": 4.827671786780219e-06, |
| "loss": 0.6786, |
| "step": 816 |
| }, |
| { |
| "epoch": 0.4002449479485609, |
| "grad_norm": 0.318215548992157, |
| "learning_rate": 4.8271957143850235e-06, |
| "loss": 0.6425, |
| "step": 817 |
| }, |
| { |
| "epoch": 0.4007348438456828, |
| "grad_norm": 0.3327416777610779, |
| "learning_rate": 4.82671900883938e-06, |
| "loss": 0.6546, |
| "step": 818 |
| }, |
| { |
| "epoch": 0.40122473974280465, |
| "grad_norm": 0.33632010221481323, |
| "learning_rate": 4.826241670272986e-06, |
| "loss": 0.6582, |
| "step": 819 |
| }, |
| { |
| "epoch": 0.40171463563992654, |
| "grad_norm": 0.3371349275112152, |
| "learning_rate": 4.825763698815709e-06, |
| "loss": 0.6768, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.4022045315370484, |
| "grad_norm": 0.332950621843338, |
| "learning_rate": 4.825285094597586e-06, |
| "loss": 0.6613, |
| "step": 821 |
| }, |
| { |
| "epoch": 0.4026944274341702, |
| "grad_norm": 0.31979843974113464, |
| "learning_rate": 4.824805857748831e-06, |
| "loss": 0.6281, |
| "step": 822 |
| }, |
| { |
| "epoch": 0.4031843233312921, |
| "grad_norm": 0.33180534839630127, |
| "learning_rate": 4.824325988399828e-06, |
| "loss": 0.6741, |
| "step": 823 |
| }, |
| { |
| "epoch": 0.40367421922841396, |
| "grad_norm": 0.32684949040412903, |
| "learning_rate": 4.8238454866811326e-06, |
| "loss": 0.6638, |
| "step": 824 |
| }, |
| { |
| "epoch": 0.40416411512553585, |
| "grad_norm": 0.3229905664920807, |
| "learning_rate": 4.823364352723472e-06, |
| "loss": 0.6545, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.4046540110226577, |
| "grad_norm": 0.32018545269966125, |
| "learning_rate": 4.822882586657748e-06, |
| "loss": 0.6553, |
| "step": 826 |
| }, |
| { |
| "epoch": 0.40514390691977953, |
| "grad_norm": 0.32592976093292236, |
| "learning_rate": 4.8224001886150315e-06, |
| "loss": 0.6425, |
| "step": 827 |
| }, |
| { |
| "epoch": 0.4056338028169014, |
| "grad_norm": 0.3335873484611511, |
| "learning_rate": 4.821917158726568e-06, |
| "loss": 0.6419, |
| "step": 828 |
| }, |
| { |
| "epoch": 0.40612369871402326, |
| "grad_norm": 0.32976460456848145, |
| "learning_rate": 4.821433497123772e-06, |
| "loss": 0.6545, |
| "step": 829 |
| }, |
| { |
| "epoch": 0.40661359461114516, |
| "grad_norm": 0.3185843527317047, |
| "learning_rate": 4.820949203938231e-06, |
| "loss": 0.6607, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.407103490508267, |
| "grad_norm": 0.3265590965747833, |
| "learning_rate": 4.820464279301705e-06, |
| "loss": 0.6779, |
| "step": 831 |
| }, |
| { |
| "epoch": 0.40759338640538884, |
| "grad_norm": 0.32094770669937134, |
| "learning_rate": 4.819978723346127e-06, |
| "loss": 0.6139, |
| "step": 832 |
| }, |
| { |
| "epoch": 0.40808328230251073, |
| "grad_norm": 0.34664106369018555, |
| "learning_rate": 4.819492536203598e-06, |
| "loss": 0.6486, |
| "step": 833 |
| }, |
| { |
| "epoch": 0.40857317819963257, |
| "grad_norm": 0.32802873849868774, |
| "learning_rate": 4.819005718006394e-06, |
| "loss": 0.6516, |
| "step": 834 |
| }, |
| { |
| "epoch": 0.40906307409675446, |
| "grad_norm": 0.324709415435791, |
| "learning_rate": 4.8185182688869615e-06, |
| "loss": 0.665, |
| "step": 835 |
| }, |
| { |
| "epoch": 0.4095529699938763, |
| "grad_norm": 0.31625524163246155, |
| "learning_rate": 4.818030188977918e-06, |
| "loss": 0.6571, |
| "step": 836 |
| }, |
| { |
| "epoch": 0.41004286589099814, |
| "grad_norm": 0.3230063021183014, |
| "learning_rate": 4.817541478412055e-06, |
| "loss": 0.6628, |
| "step": 837 |
| }, |
| { |
| "epoch": 0.41053276178812004, |
| "grad_norm": 0.3374910354614258, |
| "learning_rate": 4.817052137322334e-06, |
| "loss": 0.6773, |
| "step": 838 |
| }, |
| { |
| "epoch": 0.4110226576852419, |
| "grad_norm": 0.32604286074638367, |
| "learning_rate": 4.816562165841885e-06, |
| "loss": 0.6695, |
| "step": 839 |
| }, |
| { |
| "epoch": 0.41151255358236377, |
| "grad_norm": 0.3211324214935303, |
| "learning_rate": 4.816071564104015e-06, |
| "loss": 0.6546, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.4120024494794856, |
| "grad_norm": 0.3172019124031067, |
| "learning_rate": 4.815580332242199e-06, |
| "loss": 0.661, |
| "step": 841 |
| }, |
| { |
| "epoch": 0.41249234537660745, |
| "grad_norm": 0.32369232177734375, |
| "learning_rate": 4.8150884703900855e-06, |
| "loss": 0.6643, |
| "step": 842 |
| }, |
| { |
| "epoch": 0.41298224127372934, |
| "grad_norm": 0.3227887749671936, |
| "learning_rate": 4.814595978681493e-06, |
| "loss": 0.6353, |
| "step": 843 |
| }, |
| { |
| "epoch": 0.4134721371708512, |
| "grad_norm": 0.32307952642440796, |
| "learning_rate": 4.814102857250411e-06, |
| "loss": 0.6701, |
| "step": 844 |
| }, |
| { |
| "epoch": 0.4139620330679731, |
| "grad_norm": 0.335151731967926, |
| "learning_rate": 4.813609106231002e-06, |
| "loss": 0.6335, |
| "step": 845 |
| }, |
| { |
| "epoch": 0.4144519289650949, |
| "grad_norm": 0.32878342270851135, |
| "learning_rate": 4.813114725757597e-06, |
| "loss": 0.6643, |
| "step": 846 |
| }, |
| { |
| "epoch": 0.41494182486221676, |
| "grad_norm": 0.3271647095680237, |
| "learning_rate": 4.812619715964702e-06, |
| "loss": 0.6337, |
| "step": 847 |
| }, |
| { |
| "epoch": 0.41543172075933865, |
| "grad_norm": 0.3339027166366577, |
| "learning_rate": 4.8121240769869905e-06, |
| "loss": 0.6458, |
| "step": 848 |
| }, |
| { |
| "epoch": 0.4159216166564605, |
| "grad_norm": 0.33395877480506897, |
| "learning_rate": 4.8116278089593104e-06, |
| "loss": 0.6487, |
| "step": 849 |
| }, |
| { |
| "epoch": 0.4164115125535824, |
| "grad_norm": 0.34332144260406494, |
| "learning_rate": 4.811130912016679e-06, |
| "loss": 0.6517, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.4169014084507042, |
| "grad_norm": 0.37615567445755005, |
| "learning_rate": 4.810633386294285e-06, |
| "loss": 0.6458, |
| "step": 851 |
| }, |
| { |
| "epoch": 0.41739130434782606, |
| "grad_norm": 0.3249885141849518, |
| "learning_rate": 4.810135231927486e-06, |
| "loss": 0.6688, |
| "step": 852 |
| }, |
| { |
| "epoch": 0.41788120024494796, |
| "grad_norm": 0.32357704639434814, |
| "learning_rate": 4.8096364490518164e-06, |
| "loss": 0.6505, |
| "step": 853 |
| }, |
| { |
| "epoch": 0.4183710961420698, |
| "grad_norm": 0.3268754482269287, |
| "learning_rate": 4.809137037802975e-06, |
| "loss": 0.6305, |
| "step": 854 |
| }, |
| { |
| "epoch": 0.4188609920391917, |
| "grad_norm": 0.32692062854766846, |
| "learning_rate": 4.808636998316837e-06, |
| "loss": 0.6655, |
| "step": 855 |
| }, |
| { |
| "epoch": 0.41935088793631353, |
| "grad_norm": 0.33697402477264404, |
| "learning_rate": 4.808136330729444e-06, |
| "loss": 0.6642, |
| "step": 856 |
| }, |
| { |
| "epoch": 0.41984078383343537, |
| "grad_norm": 0.3350159227848053, |
| "learning_rate": 4.807635035177012e-06, |
| "loss": 0.6565, |
| "step": 857 |
| }, |
| { |
| "epoch": 0.42033067973055727, |
| "grad_norm": 0.319500595331192, |
| "learning_rate": 4.807133111795925e-06, |
| "loss": 0.6227, |
| "step": 858 |
| }, |
| { |
| "epoch": 0.4208205756276791, |
| "grad_norm": 0.31828346848487854, |
| "learning_rate": 4.806630560722741e-06, |
| "loss": 0.6408, |
| "step": 859 |
| }, |
| { |
| "epoch": 0.421310471524801, |
| "grad_norm": 0.3408709466457367, |
| "learning_rate": 4.806127382094184e-06, |
| "loss": 0.6513, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.42180036742192284, |
| "grad_norm": 0.3314608335494995, |
| "learning_rate": 4.805623576047155e-06, |
| "loss": 0.6601, |
| "step": 861 |
| }, |
| { |
| "epoch": 0.4222902633190447, |
| "grad_norm": 0.31937554478645325, |
| "learning_rate": 4.805119142718722e-06, |
| "loss": 0.6375, |
| "step": 862 |
| }, |
| { |
| "epoch": 0.4227801592161666, |
| "grad_norm": 0.33292335271835327, |
| "learning_rate": 4.804614082246121e-06, |
| "loss": 0.6677, |
| "step": 863 |
| }, |
| { |
| "epoch": 0.4232700551132884, |
| "grad_norm": 0.33356305956840515, |
| "learning_rate": 4.804108394766766e-06, |
| "loss": 0.6489, |
| "step": 864 |
| }, |
| { |
| "epoch": 0.4237599510104103, |
| "grad_norm": 0.33318352699279785, |
| "learning_rate": 4.8036020804182335e-06, |
| "loss": 0.6214, |
| "step": 865 |
| }, |
| { |
| "epoch": 0.42424984690753215, |
| "grad_norm": 0.329732209444046, |
| "learning_rate": 4.803095139338276e-06, |
| "loss": 0.6433, |
| "step": 866 |
| }, |
| { |
| "epoch": 0.424739742804654, |
| "grad_norm": 0.3253389894962311, |
| "learning_rate": 4.802587571664815e-06, |
| "loss": 0.6813, |
| "step": 867 |
| }, |
| { |
| "epoch": 0.4252296387017759, |
| "grad_norm": 0.3183434009552002, |
| "learning_rate": 4.802079377535943e-06, |
| "loss": 0.656, |
| "step": 868 |
| }, |
| { |
| "epoch": 0.4257195345988977, |
| "grad_norm": 0.3274723291397095, |
| "learning_rate": 4.801570557089921e-06, |
| "loss": 0.5987, |
| "step": 869 |
| }, |
| { |
| "epoch": 0.4262094304960196, |
| "grad_norm": 0.3257862627506256, |
| "learning_rate": 4.8010611104651814e-06, |
| "loss": 0.6481, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.42669932639314145, |
| "grad_norm": 0.3261187970638275, |
| "learning_rate": 4.800551037800329e-06, |
| "loss": 0.6453, |
| "step": 871 |
| }, |
| { |
| "epoch": 0.4271892222902633, |
| "grad_norm": 0.3393871784210205, |
| "learning_rate": 4.800040339234134e-06, |
| "loss": 0.6306, |
| "step": 872 |
| }, |
| { |
| "epoch": 0.4276791181873852, |
| "grad_norm": 0.3195154368877411, |
| "learning_rate": 4.799529014905544e-06, |
| "loss": 0.6537, |
| "step": 873 |
| }, |
| { |
| "epoch": 0.428169014084507, |
| "grad_norm": 0.3519408702850342, |
| "learning_rate": 4.79901706495367e-06, |
| "loss": 0.6727, |
| "step": 874 |
| }, |
| { |
| "epoch": 0.4286589099816289, |
| "grad_norm": 0.33387088775634766, |
| "learning_rate": 4.7985044895177976e-06, |
| "loss": 0.6556, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.42914880587875076, |
| "grad_norm": 0.32974541187286377, |
| "learning_rate": 4.7979912887373805e-06, |
| "loss": 0.6579, |
| "step": 876 |
| }, |
| { |
| "epoch": 0.4296387017758726, |
| "grad_norm": 0.32956287264823914, |
| "learning_rate": 4.7974774627520424e-06, |
| "loss": 0.6522, |
| "step": 877 |
| }, |
| { |
| "epoch": 0.4301285976729945, |
| "grad_norm": 0.31489697098731995, |
| "learning_rate": 4.7969630117015785e-06, |
| "loss": 0.6613, |
| "step": 878 |
| }, |
| { |
| "epoch": 0.43061849357011633, |
| "grad_norm": 0.33859875798225403, |
| "learning_rate": 4.796447935725954e-06, |
| "loss": 0.6546, |
| "step": 879 |
| }, |
| { |
| "epoch": 0.43110838946723823, |
| "grad_norm": 0.33555036783218384, |
| "learning_rate": 4.795932234965302e-06, |
| "loss": 0.6083, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.43159828536436007, |
| "grad_norm": 0.3280044496059418, |
| "learning_rate": 4.795415909559929e-06, |
| "loss": 0.6721, |
| "step": 881 |
| }, |
| { |
| "epoch": 0.43208818126148196, |
| "grad_norm": 0.331339567899704, |
| "learning_rate": 4.7948989596503085e-06, |
| "loss": 0.6529, |
| "step": 882 |
| }, |
| { |
| "epoch": 0.4325780771586038, |
| "grad_norm": 0.3267557919025421, |
| "learning_rate": 4.794381385377084e-06, |
| "loss": 0.6483, |
| "step": 883 |
| }, |
| { |
| "epoch": 0.43306797305572564, |
| "grad_norm": 0.3242381513118744, |
| "learning_rate": 4.79386318688107e-06, |
| "loss": 0.652, |
| "step": 884 |
| }, |
| { |
| "epoch": 0.43355786895284754, |
| "grad_norm": 0.35081592202186584, |
| "learning_rate": 4.793344364303252e-06, |
| "loss": 0.6859, |
| "step": 885 |
| }, |
| { |
| "epoch": 0.4340477648499694, |
| "grad_norm": 0.33770686388015747, |
| "learning_rate": 4.792824917784783e-06, |
| "loss": 0.6315, |
| "step": 886 |
| }, |
| { |
| "epoch": 0.43453766074709127, |
| "grad_norm": 0.33075547218322754, |
| "learning_rate": 4.792304847466986e-06, |
| "loss": 0.6512, |
| "step": 887 |
| }, |
| { |
| "epoch": 0.4350275566442131, |
| "grad_norm": 0.3429637551307678, |
| "learning_rate": 4.791784153491355e-06, |
| "loss": 0.6803, |
| "step": 888 |
| }, |
| { |
| "epoch": 0.43551745254133495, |
| "grad_norm": 0.3312215209007263, |
| "learning_rate": 4.791262835999554e-06, |
| "loss": 0.6292, |
| "step": 889 |
| }, |
| { |
| "epoch": 0.43600734843845684, |
| "grad_norm": 0.3361411690711975, |
| "learning_rate": 4.790740895133415e-06, |
| "loss": 0.6537, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.4364972443355787, |
| "grad_norm": 0.320117324590683, |
| "learning_rate": 4.790218331034939e-06, |
| "loss": 0.669, |
| "step": 891 |
| }, |
| { |
| "epoch": 0.4369871402327006, |
| "grad_norm": 0.3243316113948822, |
| "learning_rate": 4.7896951438463e-06, |
| "loss": 0.6205, |
| "step": 892 |
| }, |
| { |
| "epoch": 0.4374770361298224, |
| "grad_norm": 0.3368995785713196, |
| "learning_rate": 4.789171333709838e-06, |
| "loss": 0.6719, |
| "step": 893 |
| }, |
| { |
| "epoch": 0.43796693202694426, |
| "grad_norm": 0.3193654716014862, |
| "learning_rate": 4.788646900768064e-06, |
| "loss": 0.657, |
| "step": 894 |
| }, |
| { |
| "epoch": 0.43845682792406615, |
| "grad_norm": 0.3282233476638794, |
| "learning_rate": 4.788121845163658e-06, |
| "loss": 0.6526, |
| "step": 895 |
| }, |
| { |
| "epoch": 0.438946723821188, |
| "grad_norm": 0.3279890716075897, |
| "learning_rate": 4.787596167039471e-06, |
| "loss": 0.6384, |
| "step": 896 |
| }, |
| { |
| "epoch": 0.4394366197183099, |
| "grad_norm": 0.3300457298755646, |
| "learning_rate": 4.787069866538521e-06, |
| "loss": 0.6702, |
| "step": 897 |
| }, |
| { |
| "epoch": 0.4399265156154317, |
| "grad_norm": 0.33194148540496826, |
| "learning_rate": 4.7865429438039955e-06, |
| "loss": 0.6595, |
| "step": 898 |
| }, |
| { |
| "epoch": 0.44041641151255356, |
| "grad_norm": 0.34703534841537476, |
| "learning_rate": 4.786015398979254e-06, |
| "loss": 0.6537, |
| "step": 899 |
| }, |
| { |
| "epoch": 0.44090630740967546, |
| "grad_norm": 0.3270828425884247, |
| "learning_rate": 4.7854872322078225e-06, |
| "loss": 0.6616, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.4413962033067973, |
| "grad_norm": 0.3177931606769562, |
| "learning_rate": 4.784958443633398e-06, |
| "loss": 0.6359, |
| "step": 901 |
| }, |
| { |
| "epoch": 0.4418860992039192, |
| "grad_norm": 0.33886033296585083, |
| "learning_rate": 4.784429033399844e-06, |
| "loss": 0.6472, |
| "step": 902 |
| }, |
| { |
| "epoch": 0.44237599510104103, |
| "grad_norm": 0.3440721929073334, |
| "learning_rate": 4.783899001651196e-06, |
| "loss": 0.6417, |
| "step": 903 |
| }, |
| { |
| "epoch": 0.44286589099816287, |
| "grad_norm": 0.3315575420856476, |
| "learning_rate": 4.783368348531657e-06, |
| "loss": 0.6478, |
| "step": 904 |
| }, |
| { |
| "epoch": 0.44335578689528476, |
| "grad_norm": 0.32453784346580505, |
| "learning_rate": 4.782837074185602e-06, |
| "loss": 0.6605, |
| "step": 905 |
| }, |
| { |
| "epoch": 0.4438456827924066, |
| "grad_norm": 0.3341559171676636, |
| "learning_rate": 4.782305178757569e-06, |
| "loss": 0.6778, |
| "step": 906 |
| }, |
| { |
| "epoch": 0.4443355786895285, |
| "grad_norm": 0.338220477104187, |
| "learning_rate": 4.781772662392272e-06, |
| "loss": 0.6609, |
| "step": 907 |
| }, |
| { |
| "epoch": 0.44482547458665034, |
| "grad_norm": 0.34984090924263, |
| "learning_rate": 4.781239525234588e-06, |
| "loss": 0.668, |
| "step": 908 |
| }, |
| { |
| "epoch": 0.4453153704837722, |
| "grad_norm": 0.31468555331230164, |
| "learning_rate": 4.780705767429565e-06, |
| "loss": 0.5993, |
| "step": 909 |
| }, |
| { |
| "epoch": 0.44580526638089407, |
| "grad_norm": 0.327556312084198, |
| "learning_rate": 4.780171389122423e-06, |
| "loss": 0.6509, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.4462951622780159, |
| "grad_norm": 0.3160576820373535, |
| "learning_rate": 4.779636390458546e-06, |
| "loss": 0.6445, |
| "step": 911 |
| }, |
| { |
| "epoch": 0.4467850581751378, |
| "grad_norm": 0.3446383476257324, |
| "learning_rate": 4.7791007715834906e-06, |
| "loss": 0.64, |
| "step": 912 |
| }, |
| { |
| "epoch": 0.44727495407225965, |
| "grad_norm": 0.32624417543411255, |
| "learning_rate": 4.778564532642978e-06, |
| "loss": 0.6636, |
| "step": 913 |
| }, |
| { |
| "epoch": 0.4477648499693815, |
| "grad_norm": 0.3289485275745392, |
| "learning_rate": 4.778027673782901e-06, |
| "loss": 0.656, |
| "step": 914 |
| }, |
| { |
| "epoch": 0.4482547458665034, |
| "grad_norm": 0.3322214186191559, |
| "learning_rate": 4.777490195149322e-06, |
| "loss": 0.6997, |
| "step": 915 |
| }, |
| { |
| "epoch": 0.4487446417636252, |
| "grad_norm": 0.3421451449394226, |
| "learning_rate": 4.776952096888471e-06, |
| "loss": 0.6594, |
| "step": 916 |
| }, |
| { |
| "epoch": 0.4492345376607471, |
| "grad_norm": 0.3431428372859955, |
| "learning_rate": 4.7764133791467434e-06, |
| "loss": 0.651, |
| "step": 917 |
| }, |
| { |
| "epoch": 0.44972443355786895, |
| "grad_norm": 0.32274821400642395, |
| "learning_rate": 4.7758740420707086e-06, |
| "loss": 0.6277, |
| "step": 918 |
| }, |
| { |
| "epoch": 0.4502143294549908, |
| "grad_norm": 0.3264138102531433, |
| "learning_rate": 4.7753340858071e-06, |
| "loss": 0.6352, |
| "step": 919 |
| }, |
| { |
| "epoch": 0.4507042253521127, |
| "grad_norm": 0.3209553062915802, |
| "learning_rate": 4.7747935105028224e-06, |
| "loss": 0.6557, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.4511941212492345, |
| "grad_norm": 0.3410312831401825, |
| "learning_rate": 4.774252316304947e-06, |
| "loss": 0.6751, |
| "step": 921 |
| }, |
| { |
| "epoch": 0.4516840171463564, |
| "grad_norm": 0.35638895630836487, |
| "learning_rate": 4.773710503360714e-06, |
| "loss": 0.6669, |
| "step": 922 |
| }, |
| { |
| "epoch": 0.45217391304347826, |
| "grad_norm": 0.34015554189682007, |
| "learning_rate": 4.773168071817534e-06, |
| "loss": 0.6505, |
| "step": 923 |
| }, |
| { |
| "epoch": 0.4526638089406001, |
| "grad_norm": 0.32471373677253723, |
| "learning_rate": 4.772625021822983e-06, |
| "loss": 0.6682, |
| "step": 924 |
| }, |
| { |
| "epoch": 0.453153704837722, |
| "grad_norm": 0.31346777081489563, |
| "learning_rate": 4.7720813535248056e-06, |
| "loss": 0.643, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.45364360073484383, |
| "grad_norm": 0.33824825286865234, |
| "learning_rate": 4.771537067070917e-06, |
| "loss": 0.6633, |
| "step": 926 |
| }, |
| { |
| "epoch": 0.4541334966319657, |
| "grad_norm": 0.3258407711982727, |
| "learning_rate": 4.770992162609397e-06, |
| "loss": 0.6483, |
| "step": 927 |
| }, |
| { |
| "epoch": 0.45462339252908757, |
| "grad_norm": 0.32574284076690674, |
| "learning_rate": 4.770446640288497e-06, |
| "loss": 0.6399, |
| "step": 928 |
| }, |
| { |
| "epoch": 0.4551132884262094, |
| "grad_norm": 0.31942424178123474, |
| "learning_rate": 4.769900500256634e-06, |
| "loss": 0.6746, |
| "step": 929 |
| }, |
| { |
| "epoch": 0.4556031843233313, |
| "grad_norm": 0.3261716365814209, |
| "learning_rate": 4.769353742662395e-06, |
| "loss": 0.6626, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.45609308022045314, |
| "grad_norm": 0.32581284642219543, |
| "learning_rate": 4.768806367654534e-06, |
| "loss": 0.6647, |
| "step": 931 |
| }, |
| { |
| "epoch": 0.45658297611757503, |
| "grad_norm": 0.32622581720352173, |
| "learning_rate": 4.768258375381971e-06, |
| "loss": 0.6535, |
| "step": 932 |
| }, |
| { |
| "epoch": 0.4570728720146969, |
| "grad_norm": 0.3114738166332245, |
| "learning_rate": 4.767709765993799e-06, |
| "loss": 0.6342, |
| "step": 933 |
| }, |
| { |
| "epoch": 0.4575627679118187, |
| "grad_norm": 0.33273571729660034, |
| "learning_rate": 4.7671605396392735e-06, |
| "loss": 0.6558, |
| "step": 934 |
| }, |
| { |
| "epoch": 0.4580526638089406, |
| "grad_norm": 0.3257521688938141, |
| "learning_rate": 4.766610696467822e-06, |
| "loss": 0.6407, |
| "step": 935 |
| }, |
| { |
| "epoch": 0.45854255970606245, |
| "grad_norm": 0.3364790678024292, |
| "learning_rate": 4.766060236629037e-06, |
| "loss": 0.6393, |
| "step": 936 |
| }, |
| { |
| "epoch": 0.45903245560318434, |
| "grad_norm": 0.33170202374458313, |
| "learning_rate": 4.765509160272681e-06, |
| "loss": 0.6292, |
| "step": 937 |
| }, |
| { |
| "epoch": 0.4595223515003062, |
| "grad_norm": 0.31647032499313354, |
| "learning_rate": 4.764957467548681e-06, |
| "loss": 0.634, |
| "step": 938 |
| }, |
| { |
| "epoch": 0.460012247397428, |
| "grad_norm": 0.32740557193756104, |
| "learning_rate": 4.7644051586071345e-06, |
| "loss": 0.6115, |
| "step": 939 |
| }, |
| { |
| "epoch": 0.4605021432945499, |
| "grad_norm": 0.32127001881599426, |
| "learning_rate": 4.763852233598307e-06, |
| "loss": 0.6474, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.46099203919167175, |
| "grad_norm": 0.3447674512863159, |
| "learning_rate": 4.76329869267263e-06, |
| "loss": 0.6845, |
| "step": 941 |
| }, |
| { |
| "epoch": 0.46148193508879365, |
| "grad_norm": 0.32889634370803833, |
| "learning_rate": 4.762744535980702e-06, |
| "loss": 0.6364, |
| "step": 942 |
| }, |
| { |
| "epoch": 0.4619718309859155, |
| "grad_norm": 0.34127506613731384, |
| "learning_rate": 4.762189763673291e-06, |
| "loss": 0.6549, |
| "step": 943 |
| }, |
| { |
| "epoch": 0.4624617268830373, |
| "grad_norm": 0.33296868205070496, |
| "learning_rate": 4.761634375901333e-06, |
| "loss": 0.6462, |
| "step": 944 |
| }, |
| { |
| "epoch": 0.4629516227801592, |
| "grad_norm": 0.3282979428768158, |
| "learning_rate": 4.761078372815927e-06, |
| "loss": 0.6408, |
| "step": 945 |
| }, |
| { |
| "epoch": 0.46344151867728106, |
| "grad_norm": 0.3336234986782074, |
| "learning_rate": 4.760521754568345e-06, |
| "loss": 0.6441, |
| "step": 946 |
| }, |
| { |
| "epoch": 0.46393141457440296, |
| "grad_norm": 0.3332432806491852, |
| "learning_rate": 4.759964521310021e-06, |
| "loss": 0.6592, |
| "step": 947 |
| }, |
| { |
| "epoch": 0.4644213104715248, |
| "grad_norm": 0.3263160288333893, |
| "learning_rate": 4.7594066731925624e-06, |
| "loss": 0.6621, |
| "step": 948 |
| }, |
| { |
| "epoch": 0.4649112063686467, |
| "grad_norm": 0.3333407938480377, |
| "learning_rate": 4.758848210367739e-06, |
| "loss": 0.6374, |
| "step": 949 |
| }, |
| { |
| "epoch": 0.46540110226576853, |
| "grad_norm": 0.3290969729423523, |
| "learning_rate": 4.7582891329874894e-06, |
| "loss": 0.6531, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.46589099816289037, |
| "grad_norm": 0.32899218797683716, |
| "learning_rate": 4.7577294412039195e-06, |
| "loss": 0.6478, |
| "step": 951 |
| }, |
| { |
| "epoch": 0.46638089406001226, |
| "grad_norm": 0.32041114568710327, |
| "learning_rate": 4.757169135169304e-06, |
| "loss": 0.6581, |
| "step": 952 |
| }, |
| { |
| "epoch": 0.4668707899571341, |
| "grad_norm": 0.3273862302303314, |
| "learning_rate": 4.7566082150360795e-06, |
| "loss": 0.6173, |
| "step": 953 |
| }, |
| { |
| "epoch": 0.467360685854256, |
| "grad_norm": 0.3293192684650421, |
| "learning_rate": 4.7560466809568565e-06, |
| "loss": 0.6345, |
| "step": 954 |
| }, |
| { |
| "epoch": 0.46785058175137784, |
| "grad_norm": 0.3363111913204193, |
| "learning_rate": 4.755484533084407e-06, |
| "loss": 0.6461, |
| "step": 955 |
| }, |
| { |
| "epoch": 0.4683404776484997, |
| "grad_norm": 0.3306375741958618, |
| "learning_rate": 4.754921771571674e-06, |
| "loss": 0.6364, |
| "step": 956 |
| }, |
| { |
| "epoch": 0.46883037354562157, |
| "grad_norm": 0.3344152271747589, |
| "learning_rate": 4.754358396571764e-06, |
| "loss": 0.6205, |
| "step": 957 |
| }, |
| { |
| "epoch": 0.4693202694427434, |
| "grad_norm": 0.32841694355010986, |
| "learning_rate": 4.753794408237954e-06, |
| "loss": 0.662, |
| "step": 958 |
| }, |
| { |
| "epoch": 0.4698101653398653, |
| "grad_norm": 0.3455883860588074, |
| "learning_rate": 4.753229806723684e-06, |
| "loss": 0.6537, |
| "step": 959 |
| }, |
| { |
| "epoch": 0.47030006123698714, |
| "grad_norm": 0.3336228132247925, |
| "learning_rate": 4.752664592182564e-06, |
| "loss": 0.6564, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.470789957134109, |
| "grad_norm": 0.32779762148857117, |
| "learning_rate": 4.75209876476837e-06, |
| "loss": 0.6124, |
| "step": 961 |
| }, |
| { |
| "epoch": 0.4712798530312309, |
| "grad_norm": 0.3376416862010956, |
| "learning_rate": 4.751532324635043e-06, |
| "loss": 0.6485, |
| "step": 962 |
| }, |
| { |
| "epoch": 0.4717697489283527, |
| "grad_norm": 0.33599647879600525, |
| "learning_rate": 4.750965271936692e-06, |
| "loss": 0.6489, |
| "step": 963 |
| }, |
| { |
| "epoch": 0.4722596448254746, |
| "grad_norm": 0.33952006697654724, |
| "learning_rate": 4.750397606827594e-06, |
| "loss": 0.6501, |
| "step": 964 |
| }, |
| { |
| "epoch": 0.47274954072259645, |
| "grad_norm": 0.3457185626029968, |
| "learning_rate": 4.74982932946219e-06, |
| "loss": 0.6554, |
| "step": 965 |
| }, |
| { |
| "epoch": 0.4732394366197183, |
| "grad_norm": 0.3317156732082367, |
| "learning_rate": 4.749260439995089e-06, |
| "loss": 0.6445, |
| "step": 966 |
| }, |
| { |
| "epoch": 0.4737293325168402, |
| "grad_norm": 0.32509130239486694, |
| "learning_rate": 4.748690938581068e-06, |
| "loss": 0.6303, |
| "step": 967 |
| }, |
| { |
| "epoch": 0.474219228413962, |
| "grad_norm": 0.3271726369857788, |
| "learning_rate": 4.748120825375067e-06, |
| "loss": 0.6544, |
| "step": 968 |
| }, |
| { |
| "epoch": 0.4747091243110839, |
| "grad_norm": 0.32812872529029846, |
| "learning_rate": 4.747550100532195e-06, |
| "loss": 0.6249, |
| "step": 969 |
| }, |
| { |
| "epoch": 0.47519902020820576, |
| "grad_norm": 0.33634963631629944, |
| "learning_rate": 4.746978764207727e-06, |
| "loss": 0.6416, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.4756889161053276, |
| "grad_norm": 0.33176323771476746, |
| "learning_rate": 4.746406816557103e-06, |
| "loss": 0.6538, |
| "step": 971 |
| }, |
| { |
| "epoch": 0.4761788120024495, |
| "grad_norm": 0.3377355933189392, |
| "learning_rate": 4.7458342577359315e-06, |
| "loss": 0.6325, |
| "step": 972 |
| }, |
| { |
| "epoch": 0.47666870789957133, |
| "grad_norm": 0.3324216902256012, |
| "learning_rate": 4.745261087899987e-06, |
| "loss": 0.6774, |
| "step": 973 |
| }, |
| { |
| "epoch": 0.4771586037966932, |
| "grad_norm": 0.33282217383384705, |
| "learning_rate": 4.744687307205207e-06, |
| "loss": 0.6317, |
| "step": 974 |
| }, |
| { |
| "epoch": 0.47764849969381507, |
| "grad_norm": 0.3376564383506775, |
| "learning_rate": 4.7441129158077e-06, |
| "loss": 0.6658, |
| "step": 975 |
| }, |
| { |
| "epoch": 0.4781383955909369, |
| "grad_norm": 0.3456125855445862, |
| "learning_rate": 4.743537913863736e-06, |
| "loss": 0.6802, |
| "step": 976 |
| }, |
| { |
| "epoch": 0.4786282914880588, |
| "grad_norm": 0.3268921971321106, |
| "learning_rate": 4.742962301529755e-06, |
| "loss": 0.6463, |
| "step": 977 |
| }, |
| { |
| "epoch": 0.47911818738518064, |
| "grad_norm": 0.34568679332733154, |
| "learning_rate": 4.742386078962362e-06, |
| "loss": 0.6507, |
| "step": 978 |
| }, |
| { |
| "epoch": 0.47960808328230253, |
| "grad_norm": 0.3478900194168091, |
| "learning_rate": 4.741809246318326e-06, |
| "loss": 0.6582, |
| "step": 979 |
| }, |
| { |
| "epoch": 0.4800979791794244, |
| "grad_norm": 0.3300599753856659, |
| "learning_rate": 4.741231803754583e-06, |
| "loss": 0.6444, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.4805878750765462, |
| "grad_norm": 0.3292956054210663, |
| "learning_rate": 4.740653751428238e-06, |
| "loss": 0.635, |
| "step": 981 |
| }, |
| { |
| "epoch": 0.4810777709736681, |
| "grad_norm": 0.3479529619216919, |
| "learning_rate": 4.740075089496557e-06, |
| "loss": 0.6671, |
| "step": 982 |
| }, |
| { |
| "epoch": 0.48156766687078995, |
| "grad_norm": 0.33764973282814026, |
| "learning_rate": 4.739495818116974e-06, |
| "loss": 0.6346, |
| "step": 983 |
| }, |
| { |
| "epoch": 0.48205756276791184, |
| "grad_norm": 0.3391423225402832, |
| "learning_rate": 4.738915937447091e-06, |
| "loss": 0.6376, |
| "step": 984 |
| }, |
| { |
| "epoch": 0.4825474586650337, |
| "grad_norm": 0.3254561126232147, |
| "learning_rate": 4.738335447644673e-06, |
| "loss": 0.6551, |
| "step": 985 |
| }, |
| { |
| "epoch": 0.4830373545621555, |
| "grad_norm": 0.32766976952552795, |
| "learning_rate": 4.7377543488676495e-06, |
| "loss": 0.6372, |
| "step": 986 |
| }, |
| { |
| "epoch": 0.4835272504592774, |
| "grad_norm": 0.340474009513855, |
| "learning_rate": 4.7371726412741205e-06, |
| "loss": 0.6697, |
| "step": 987 |
| }, |
| { |
| "epoch": 0.48401714635639925, |
| "grad_norm": 0.3602207601070404, |
| "learning_rate": 4.736590325022346e-06, |
| "loss": 0.6462, |
| "step": 988 |
| }, |
| { |
| "epoch": 0.48450704225352115, |
| "grad_norm": 0.332263708114624, |
| "learning_rate": 4.7360074002707556e-06, |
| "loss": 0.6545, |
| "step": 989 |
| }, |
| { |
| "epoch": 0.484996938150643, |
| "grad_norm": 0.3312827944755554, |
| "learning_rate": 4.735423867177944e-06, |
| "loss": 0.6291, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.4854868340477648, |
| "grad_norm": 0.35063663125038147, |
| "learning_rate": 4.73483972590267e-06, |
| "loss": 0.6423, |
| "step": 991 |
| }, |
| { |
| "epoch": 0.4859767299448867, |
| "grad_norm": 0.3246268033981323, |
| "learning_rate": 4.734254976603858e-06, |
| "loss": 0.653, |
| "step": 992 |
| }, |
| { |
| "epoch": 0.48646662584200856, |
| "grad_norm": 0.33790072798728943, |
| "learning_rate": 4.7336696194405995e-06, |
| "loss": 0.6448, |
| "step": 993 |
| }, |
| { |
| "epoch": 0.48695652173913045, |
| "grad_norm": 0.3333311676979065, |
| "learning_rate": 4.733083654572148e-06, |
| "loss": 0.6458, |
| "step": 994 |
| }, |
| { |
| "epoch": 0.4874464176362523, |
| "grad_norm": 0.3357294499874115, |
| "learning_rate": 4.732497082157928e-06, |
| "loss": 0.6462, |
| "step": 995 |
| }, |
| { |
| "epoch": 0.48793631353337413, |
| "grad_norm": 0.3324768841266632, |
| "learning_rate": 4.731909902357522e-06, |
| "loss": 0.6442, |
| "step": 996 |
| }, |
| { |
| "epoch": 0.48842620943049603, |
| "grad_norm": 0.3282735049724579, |
| "learning_rate": 4.731322115330683e-06, |
| "loss": 0.5999, |
| "step": 997 |
| }, |
| { |
| "epoch": 0.48891610532761787, |
| "grad_norm": 0.3322250247001648, |
| "learning_rate": 4.730733721237329e-06, |
| "loss": 0.6564, |
| "step": 998 |
| }, |
| { |
| "epoch": 0.48940600122473976, |
| "grad_norm": 0.33998483419418335, |
| "learning_rate": 4.7301447202375405e-06, |
| "loss": 0.6403, |
| "step": 999 |
| }, |
| { |
| "epoch": 0.4898958971218616, |
| "grad_norm": 0.3261421024799347, |
| "learning_rate": 4.729555112491566e-06, |
| "loss": 0.6633, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.49038579301898344, |
| "grad_norm": 0.3308633267879486, |
| "learning_rate": 4.728964898159816e-06, |
| "loss": 0.6439, |
| "step": 1001 |
| }, |
| { |
| "epoch": 0.49087568891610533, |
| "grad_norm": 0.3286653459072113, |
| "learning_rate": 4.728374077402868e-06, |
| "loss": 0.6699, |
| "step": 1002 |
| }, |
| { |
| "epoch": 0.4913655848132272, |
| "grad_norm": 0.34002241492271423, |
| "learning_rate": 4.727782650381464e-06, |
| "loss": 0.6804, |
| "step": 1003 |
| }, |
| { |
| "epoch": 0.49185548071034907, |
| "grad_norm": 0.3460935056209564, |
| "learning_rate": 4.727190617256513e-06, |
| "loss": 0.6293, |
| "step": 1004 |
| }, |
| { |
| "epoch": 0.4923453766074709, |
| "grad_norm": 0.3384420871734619, |
| "learning_rate": 4.726597978189085e-06, |
| "loss": 0.6425, |
| "step": 1005 |
| }, |
| { |
| "epoch": 0.49283527250459275, |
| "grad_norm": 0.3322122097015381, |
| "learning_rate": 4.726004733340417e-06, |
| "loss": 0.624, |
| "step": 1006 |
| }, |
| { |
| "epoch": 0.49332516840171464, |
| "grad_norm": 0.33144181966781616, |
| "learning_rate": 4.725410882871911e-06, |
| "loss": 0.6088, |
| "step": 1007 |
| }, |
| { |
| "epoch": 0.4938150642988365, |
| "grad_norm": 0.3404276967048645, |
| "learning_rate": 4.724816426945134e-06, |
| "loss": 0.6394, |
| "step": 1008 |
| }, |
| { |
| "epoch": 0.4943049601959584, |
| "grad_norm": 0.3359997570514679, |
| "learning_rate": 4.724221365721816e-06, |
| "loss": 0.6123, |
| "step": 1009 |
| }, |
| { |
| "epoch": 0.4947948560930802, |
| "grad_norm": 0.3339918255805969, |
| "learning_rate": 4.723625699363854e-06, |
| "loss": 0.6568, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.49528475199020205, |
| "grad_norm": 0.3534146547317505, |
| "learning_rate": 4.723029428033308e-06, |
| "loss": 0.6777, |
| "step": 1011 |
| }, |
| { |
| "epoch": 0.49577464788732395, |
| "grad_norm": 0.39140719175338745, |
| "learning_rate": 4.722432551892402e-06, |
| "loss": 0.6508, |
| "step": 1012 |
| }, |
| { |
| "epoch": 0.4962645437844458, |
| "grad_norm": 0.3383652865886688, |
| "learning_rate": 4.721835071103527e-06, |
| "loss": 0.6109, |
| "step": 1013 |
| }, |
| { |
| "epoch": 0.4967544396815677, |
| "grad_norm": 0.33597037196159363, |
| "learning_rate": 4.721236985829237e-06, |
| "loss": 0.6525, |
| "step": 1014 |
| }, |
| { |
| "epoch": 0.4972443355786895, |
| "grad_norm": 0.34626680612564087, |
| "learning_rate": 4.720638296232251e-06, |
| "loss": 0.6479, |
| "step": 1015 |
| }, |
| { |
| "epoch": 0.4977342314758114, |
| "grad_norm": 0.3565669357776642, |
| "learning_rate": 4.720039002475449e-06, |
| "loss": 0.6626, |
| "step": 1016 |
| }, |
| { |
| "epoch": 0.49822412737293326, |
| "grad_norm": 0.3603387475013733, |
| "learning_rate": 4.719439104721883e-06, |
| "loss": 0.635, |
| "step": 1017 |
| }, |
| { |
| "epoch": 0.4987140232700551, |
| "grad_norm": 0.34352055191993713, |
| "learning_rate": 4.71883860313476e-06, |
| "loss": 0.6377, |
| "step": 1018 |
| }, |
| { |
| "epoch": 0.499203919167177, |
| "grad_norm": 0.3388681411743164, |
| "learning_rate": 4.71823749787746e-06, |
| "loss": 0.6413, |
| "step": 1019 |
| }, |
| { |
| "epoch": 0.49969381506429883, |
| "grad_norm": 0.34849029779434204, |
| "learning_rate": 4.717635789113521e-06, |
| "loss": 0.6617, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.5001837109614207, |
| "grad_norm": 0.3474747836589813, |
| "learning_rate": 4.717033477006648e-06, |
| "loss": 0.6206, |
| "step": 1021 |
| }, |
| { |
| "epoch": 0.5006736068585426, |
| "grad_norm": 0.3422054648399353, |
| "learning_rate": 4.716430561720709e-06, |
| "loss": 0.6646, |
| "step": 1022 |
| }, |
| { |
| "epoch": 0.5011635027556645, |
| "grad_norm": 0.3335569202899933, |
| "learning_rate": 4.715827043419737e-06, |
| "loss": 0.633, |
| "step": 1023 |
| }, |
| { |
| "epoch": 0.5016533986527862, |
| "grad_norm": 0.33768150210380554, |
| "learning_rate": 4.7152229222679285e-06, |
| "loss": 0.6749, |
| "step": 1024 |
| }, |
| { |
| "epoch": 0.5021432945499081, |
| "grad_norm": 0.3343658447265625, |
| "learning_rate": 4.714618198429645e-06, |
| "loss": 0.6464, |
| "step": 1025 |
| }, |
| { |
| "epoch": 0.50263319044703, |
| "grad_norm": 0.33807894587516785, |
| "learning_rate": 4.71401287206941e-06, |
| "loss": 0.6605, |
| "step": 1026 |
| }, |
| { |
| "epoch": 0.5031230863441518, |
| "grad_norm": 0.3393549621105194, |
| "learning_rate": 4.713406943351913e-06, |
| "loss": 0.6217, |
| "step": 1027 |
| }, |
| { |
| "epoch": 0.5036129822412737, |
| "grad_norm": 0.33353668451309204, |
| "learning_rate": 4.7128004124420074e-06, |
| "loss": 0.6658, |
| "step": 1028 |
| }, |
| { |
| "epoch": 0.5041028781383956, |
| "grad_norm": 0.34081828594207764, |
| "learning_rate": 4.712193279504707e-06, |
| "loss": 0.6708, |
| "step": 1029 |
| }, |
| { |
| "epoch": 0.5045927740355175, |
| "grad_norm": 0.3360579311847687, |
| "learning_rate": 4.711585544705195e-06, |
| "loss": 0.6333, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.5050826699326393, |
| "grad_norm": 0.34213027358055115, |
| "learning_rate": 4.710977208208812e-06, |
| "loss": 0.6771, |
| "step": 1031 |
| }, |
| { |
| "epoch": 0.5055725658297612, |
| "grad_norm": 0.33724650740623474, |
| "learning_rate": 4.710368270181068e-06, |
| "loss": 0.6521, |
| "step": 1032 |
| }, |
| { |
| "epoch": 0.5060624617268831, |
| "grad_norm": 0.348318487405777, |
| "learning_rate": 4.709758730787633e-06, |
| "loss": 0.6532, |
| "step": 1033 |
| }, |
| { |
| "epoch": 0.5065523576240049, |
| "grad_norm": 0.3409515917301178, |
| "learning_rate": 4.709148590194343e-06, |
| "loss": 0.6564, |
| "step": 1034 |
| }, |
| { |
| "epoch": 0.5070422535211268, |
| "grad_norm": 0.33423691987991333, |
| "learning_rate": 4.708537848567195e-06, |
| "loss": 0.625, |
| "step": 1035 |
| }, |
| { |
| "epoch": 0.5075321494182486, |
| "grad_norm": 0.33926403522491455, |
| "learning_rate": 4.707926506072352e-06, |
| "loss": 0.6819, |
| "step": 1036 |
| }, |
| { |
| "epoch": 0.5080220453153704, |
| "grad_norm": 0.32573071122169495, |
| "learning_rate": 4.7073145628761394e-06, |
| "loss": 0.6431, |
| "step": 1037 |
| }, |
| { |
| "epoch": 0.5085119412124923, |
| "grad_norm": 0.3247716426849365, |
| "learning_rate": 4.706702019145045e-06, |
| "loss": 0.6184, |
| "step": 1038 |
| }, |
| { |
| "epoch": 0.5090018371096142, |
| "grad_norm": 0.32566073536872864, |
| "learning_rate": 4.706088875045722e-06, |
| "loss": 0.6466, |
| "step": 1039 |
| }, |
| { |
| "epoch": 0.5094917330067361, |
| "grad_norm": 0.34337013959884644, |
| "learning_rate": 4.705475130744986e-06, |
| "loss": 0.6602, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.5099816289038579, |
| "grad_norm": 0.33207571506500244, |
| "learning_rate": 4.704860786409815e-06, |
| "loss": 0.6485, |
| "step": 1041 |
| }, |
| { |
| "epoch": 0.5104715248009798, |
| "grad_norm": 0.3431706428527832, |
| "learning_rate": 4.704245842207353e-06, |
| "loss": 0.6468, |
| "step": 1042 |
| }, |
| { |
| "epoch": 0.5109614206981017, |
| "grad_norm": 0.33269333839416504, |
| "learning_rate": 4.703630298304902e-06, |
| "loss": 0.6604, |
| "step": 1043 |
| }, |
| { |
| "epoch": 0.5114513165952235, |
| "grad_norm": 0.33562201261520386, |
| "learning_rate": 4.7030141548699325e-06, |
| "loss": 0.6441, |
| "step": 1044 |
| }, |
| { |
| "epoch": 0.5119412124923454, |
| "grad_norm": 0.32720717787742615, |
| "learning_rate": 4.702397412070076e-06, |
| "loss": 0.6311, |
| "step": 1045 |
| }, |
| { |
| "epoch": 0.5124311083894673, |
| "grad_norm": 0.3368285298347473, |
| "learning_rate": 4.701780070073129e-06, |
| "loss": 0.6353, |
| "step": 1046 |
| }, |
| { |
| "epoch": 0.5129210042865892, |
| "grad_norm": 0.34569135308265686, |
| "learning_rate": 4.701162129047045e-06, |
| "loss": 0.6613, |
| "step": 1047 |
| }, |
| { |
| "epoch": 0.5134109001837109, |
| "grad_norm": 0.33015212416648865, |
| "learning_rate": 4.700543589159948e-06, |
| "loss": 0.6612, |
| "step": 1048 |
| }, |
| { |
| "epoch": 0.5139007960808328, |
| "grad_norm": 0.3241855800151825, |
| "learning_rate": 4.699924450580121e-06, |
| "loss": 0.6207, |
| "step": 1049 |
| }, |
| { |
| "epoch": 0.5143906919779547, |
| "grad_norm": 0.34834349155426025, |
| "learning_rate": 4.699304713476009e-06, |
| "loss": 0.6437, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.5148805878750765, |
| "grad_norm": 0.3462347388267517, |
| "learning_rate": 4.698684378016223e-06, |
| "loss": 0.6784, |
| "step": 1051 |
| }, |
| { |
| "epoch": 0.5153704837721984, |
| "grad_norm": 0.34267929196357727, |
| "learning_rate": 4.698063444369534e-06, |
| "loss": 0.6536, |
| "step": 1052 |
| }, |
| { |
| "epoch": 0.5158603796693203, |
| "grad_norm": 0.3365679383277893, |
| "learning_rate": 4.697441912704878e-06, |
| "loss": 0.631, |
| "step": 1053 |
| }, |
| { |
| "epoch": 0.5163502755664421, |
| "grad_norm": 0.3212380111217499, |
| "learning_rate": 4.69681978319135e-06, |
| "loss": 0.6266, |
| "step": 1054 |
| }, |
| { |
| "epoch": 0.516840171463564, |
| "grad_norm": 0.3165789544582367, |
| "learning_rate": 4.696197055998213e-06, |
| "loss": 0.6088, |
| "step": 1055 |
| }, |
| { |
| "epoch": 0.5173300673606859, |
| "grad_norm": 0.33306971192359924, |
| "learning_rate": 4.695573731294888e-06, |
| "loss": 0.6357, |
| "step": 1056 |
| }, |
| { |
| "epoch": 0.5178199632578078, |
| "grad_norm": 0.3291991353034973, |
| "learning_rate": 4.694949809250962e-06, |
| "loss": 0.652, |
| "step": 1057 |
| }, |
| { |
| "epoch": 0.5183098591549296, |
| "grad_norm": 0.33322209119796753, |
| "learning_rate": 4.694325290036181e-06, |
| "loss": 0.6504, |
| "step": 1058 |
| }, |
| { |
| "epoch": 0.5187997550520514, |
| "grad_norm": 0.3383603096008301, |
| "learning_rate": 4.693700173820457e-06, |
| "loss": 0.6514, |
| "step": 1059 |
| }, |
| { |
| "epoch": 0.5192896509491733, |
| "grad_norm": 0.31692832708358765, |
| "learning_rate": 4.693074460773861e-06, |
| "loss": 0.6002, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.5197795468462951, |
| "grad_norm": 0.3406504690647125, |
| "learning_rate": 4.69244815106663e-06, |
| "loss": 0.6455, |
| "step": 1061 |
| }, |
| { |
| "epoch": 0.520269442743417, |
| "grad_norm": 0.33714982867240906, |
| "learning_rate": 4.691821244869161e-06, |
| "loss": 0.6195, |
| "step": 1062 |
| }, |
| { |
| "epoch": 0.5207593386405389, |
| "grad_norm": 0.33816972374916077, |
| "learning_rate": 4.691193742352013e-06, |
| "loss": 0.6575, |
| "step": 1063 |
| }, |
| { |
| "epoch": 0.5212492345376607, |
| "grad_norm": 0.3503166735172272, |
| "learning_rate": 4.690565643685908e-06, |
| "loss": 0.6625, |
| "step": 1064 |
| }, |
| { |
| "epoch": 0.5217391304347826, |
| "grad_norm": 0.3353184461593628, |
| "learning_rate": 4.689936949041731e-06, |
| "loss": 0.6535, |
| "step": 1065 |
| }, |
| { |
| "epoch": 0.5222290263319045, |
| "grad_norm": 0.34093669056892395, |
| "learning_rate": 4.689307658590527e-06, |
| "loss": 0.6244, |
| "step": 1066 |
| }, |
| { |
| "epoch": 0.5227189222290264, |
| "grad_norm": 0.3339327871799469, |
| "learning_rate": 4.688677772503506e-06, |
| "loss": 0.6777, |
| "step": 1067 |
| }, |
| { |
| "epoch": 0.5232088181261482, |
| "grad_norm": 0.3263406455516815, |
| "learning_rate": 4.688047290952038e-06, |
| "loss": 0.6408, |
| "step": 1068 |
| }, |
| { |
| "epoch": 0.5236987140232701, |
| "grad_norm": 0.3410097062587738, |
| "learning_rate": 4.687416214107655e-06, |
| "loss": 0.6394, |
| "step": 1069 |
| }, |
| { |
| "epoch": 0.524188609920392, |
| "grad_norm": 0.34035810828208923, |
| "learning_rate": 4.686784542142052e-06, |
| "loss": 0.6518, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.5246785058175137, |
| "grad_norm": 0.33170121908187866, |
| "learning_rate": 4.686152275227085e-06, |
| "loss": 0.6382, |
| "step": 1071 |
| }, |
| { |
| "epoch": 0.5251684017146356, |
| "grad_norm": 0.3450348973274231, |
| "learning_rate": 4.685519413534773e-06, |
| "loss": 0.6588, |
| "step": 1072 |
| }, |
| { |
| "epoch": 0.5256582976117575, |
| "grad_norm": 0.34389328956604004, |
| "learning_rate": 4.684885957237295e-06, |
| "loss": 0.6438, |
| "step": 1073 |
| }, |
| { |
| "epoch": 0.5261481935088793, |
| "grad_norm": 0.34908241033554077, |
| "learning_rate": 4.6842519065069924e-06, |
| "loss": 0.6369, |
| "step": 1074 |
| }, |
| { |
| "epoch": 0.5266380894060012, |
| "grad_norm": 0.3395238220691681, |
| "learning_rate": 4.683617261516371e-06, |
| "loss": 0.6595, |
| "step": 1075 |
| }, |
| { |
| "epoch": 0.5271279853031231, |
| "grad_norm": 0.3576270043849945, |
| "learning_rate": 4.682982022438093e-06, |
| "loss": 0.6664, |
| "step": 1076 |
| }, |
| { |
| "epoch": 0.527617881200245, |
| "grad_norm": 0.3306131064891815, |
| "learning_rate": 4.6823461894449885e-06, |
| "loss": 0.6365, |
| "step": 1077 |
| }, |
| { |
| "epoch": 0.5281077770973668, |
| "grad_norm": 0.3483700156211853, |
| "learning_rate": 4.681709762710042e-06, |
| "loss": 0.6366, |
| "step": 1078 |
| }, |
| { |
| "epoch": 0.5285976729944887, |
| "grad_norm": 0.3481180965900421, |
| "learning_rate": 4.681072742406408e-06, |
| "loss": 0.6414, |
| "step": 1079 |
| }, |
| { |
| "epoch": 0.5290875688916106, |
| "grad_norm": 0.3466982841491699, |
| "learning_rate": 4.680435128707395e-06, |
| "loss": 0.6119, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.5295774647887324, |
| "grad_norm": 0.35319292545318604, |
| "learning_rate": 4.679796921786477e-06, |
| "loss": 0.6763, |
| "step": 1081 |
| }, |
| { |
| "epoch": 0.5300673606858542, |
| "grad_norm": 0.33957529067993164, |
| "learning_rate": 4.679158121817287e-06, |
| "loss": 0.644, |
| "step": 1082 |
| }, |
| { |
| "epoch": 0.5305572565829761, |
| "grad_norm": 0.3421860337257385, |
| "learning_rate": 4.6785187289736215e-06, |
| "loss": 0.6345, |
| "step": 1083 |
| }, |
| { |
| "epoch": 0.5310471524800979, |
| "grad_norm": 0.3383704721927643, |
| "learning_rate": 4.677878743429438e-06, |
| "loss": 0.6402, |
| "step": 1084 |
| }, |
| { |
| "epoch": 0.5315370483772198, |
| "grad_norm": 0.33505967259407043, |
| "learning_rate": 4.677238165358854e-06, |
| "loss": 0.6463, |
| "step": 1085 |
| }, |
| { |
| "epoch": 0.5320269442743417, |
| "grad_norm": 0.33692505955696106, |
| "learning_rate": 4.676596994936149e-06, |
| "loss": 0.6614, |
| "step": 1086 |
| }, |
| { |
| "epoch": 0.5325168401714636, |
| "grad_norm": 0.3422906994819641, |
| "learning_rate": 4.675955232335764e-06, |
| "loss": 0.684, |
| "step": 1087 |
| }, |
| { |
| "epoch": 0.5330067360685854, |
| "grad_norm": 0.33133259415626526, |
| "learning_rate": 4.6753128777323e-06, |
| "loss": 0.6523, |
| "step": 1088 |
| }, |
| { |
| "epoch": 0.5334966319657073, |
| "grad_norm": 0.3325001001358032, |
| "learning_rate": 4.67466993130052e-06, |
| "loss": 0.6484, |
| "step": 1089 |
| }, |
| { |
| "epoch": 0.5339865278628292, |
| "grad_norm": 0.3328482508659363, |
| "learning_rate": 4.674026393215349e-06, |
| "loss": 0.6595, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.534476423759951, |
| "grad_norm": 0.3406400680541992, |
| "learning_rate": 4.673382263651869e-06, |
| "loss": 0.6405, |
| "step": 1091 |
| }, |
| { |
| "epoch": 0.5349663196570729, |
| "grad_norm": 0.3300803005695343, |
| "learning_rate": 4.6727375427853285e-06, |
| "loss": 0.629, |
| "step": 1092 |
| }, |
| { |
| "epoch": 0.5354562155541948, |
| "grad_norm": 0.34695202112197876, |
| "learning_rate": 4.672092230791132e-06, |
| "loss": 0.667, |
| "step": 1093 |
| }, |
| { |
| "epoch": 0.5359461114513165, |
| "grad_norm": 0.3438461422920227, |
| "learning_rate": 4.6714463278448475e-06, |
| "loss": 0.6396, |
| "step": 1094 |
| }, |
| { |
| "epoch": 0.5364360073484384, |
| "grad_norm": 0.33978861570358276, |
| "learning_rate": 4.670799834122204e-06, |
| "loss": 0.6649, |
| "step": 1095 |
| }, |
| { |
| "epoch": 0.5369259032455603, |
| "grad_norm": 0.34469425678253174, |
| "learning_rate": 4.6701527497990886e-06, |
| "loss": 0.6625, |
| "step": 1096 |
| }, |
| { |
| "epoch": 0.5374157991426822, |
| "grad_norm": 0.34310537576675415, |
| "learning_rate": 4.669505075051553e-06, |
| "loss": 0.6516, |
| "step": 1097 |
| }, |
| { |
| "epoch": 0.537905695039804, |
| "grad_norm": 0.34367427229881287, |
| "learning_rate": 4.668856810055805e-06, |
| "loss": 0.6483, |
| "step": 1098 |
| }, |
| { |
| "epoch": 0.5383955909369259, |
| "grad_norm": 0.33170315623283386, |
| "learning_rate": 4.668207954988217e-06, |
| "loss": 0.6468, |
| "step": 1099 |
| }, |
| { |
| "epoch": 0.5388854868340478, |
| "grad_norm": 0.3421383798122406, |
| "learning_rate": 4.66755851002532e-06, |
| "loss": 0.6819, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.5393753827311696, |
| "grad_norm": 0.3380526304244995, |
| "learning_rate": 4.666908475343805e-06, |
| "loss": 0.6373, |
| "step": 1101 |
| }, |
| { |
| "epoch": 0.5398652786282915, |
| "grad_norm": 0.35264331102371216, |
| "learning_rate": 4.666257851120525e-06, |
| "loss": 0.6699, |
| "step": 1102 |
| }, |
| { |
| "epoch": 0.5403551745254134, |
| "grad_norm": 0.3282771408557892, |
| "learning_rate": 4.6656066375324924e-06, |
| "loss": 0.6482, |
| "step": 1103 |
| }, |
| { |
| "epoch": 0.5408450704225352, |
| "grad_norm": 0.34882423281669617, |
| "learning_rate": 4.664954834756881e-06, |
| "loss": 0.665, |
| "step": 1104 |
| }, |
| { |
| "epoch": 0.541334966319657, |
| "grad_norm": 0.3674059808254242, |
| "learning_rate": 4.664302442971024e-06, |
| "loss": 0.6649, |
| "step": 1105 |
| }, |
| { |
| "epoch": 0.541824862216779, |
| "grad_norm": 0.3493945896625519, |
| "learning_rate": 4.663649462352413e-06, |
| "loss": 0.6531, |
| "step": 1106 |
| }, |
| { |
| "epoch": 0.5423147581139008, |
| "grad_norm": 0.347479909658432, |
| "learning_rate": 4.662995893078702e-06, |
| "loss": 0.6499, |
| "step": 1107 |
| }, |
| { |
| "epoch": 0.5428046540110226, |
| "grad_norm": 0.3363555669784546, |
| "learning_rate": 4.6623417353277074e-06, |
| "loss": 0.6465, |
| "step": 1108 |
| }, |
| { |
| "epoch": 0.5432945499081445, |
| "grad_norm": 0.35221952199935913, |
| "learning_rate": 4.661686989277401e-06, |
| "loss": 0.6255, |
| "step": 1109 |
| }, |
| { |
| "epoch": 0.5437844458052664, |
| "grad_norm": 0.3683142066001892, |
| "learning_rate": 4.661031655105917e-06, |
| "loss": 0.6582, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.5442743417023882, |
| "grad_norm": 0.3454713225364685, |
| "learning_rate": 4.660375732991551e-06, |
| "loss": 0.6543, |
| "step": 1111 |
| }, |
| { |
| "epoch": 0.5447642375995101, |
| "grad_norm": 0.33530518412590027, |
| "learning_rate": 4.659719223112755e-06, |
| "loss": 0.6654, |
| "step": 1112 |
| }, |
| { |
| "epoch": 0.545254133496632, |
| "grad_norm": 0.3434372544288635, |
| "learning_rate": 4.659062125648144e-06, |
| "loss": 0.6548, |
| "step": 1113 |
| }, |
| { |
| "epoch": 0.5457440293937539, |
| "grad_norm": 0.33734411001205444, |
| "learning_rate": 4.658404440776491e-06, |
| "loss": 0.6547, |
| "step": 1114 |
| }, |
| { |
| "epoch": 0.5462339252908757, |
| "grad_norm": 0.339223176240921, |
| "learning_rate": 4.657746168676731e-06, |
| "loss": 0.62, |
| "step": 1115 |
| }, |
| { |
| "epoch": 0.5467238211879976, |
| "grad_norm": 0.3487626910209656, |
| "learning_rate": 4.657087309527957e-06, |
| "loss": 0.6413, |
| "step": 1116 |
| }, |
| { |
| "epoch": 0.5472137170851195, |
| "grad_norm": 0.3410091996192932, |
| "learning_rate": 4.656427863509421e-06, |
| "loss": 0.6577, |
| "step": 1117 |
| }, |
| { |
| "epoch": 0.5477036129822412, |
| "grad_norm": 0.34262603521347046, |
| "learning_rate": 4.655767830800536e-06, |
| "loss": 0.6196, |
| "step": 1118 |
| }, |
| { |
| "epoch": 0.5481935088793631, |
| "grad_norm": 0.3456190526485443, |
| "learning_rate": 4.655107211580874e-06, |
| "loss": 0.6383, |
| "step": 1119 |
| }, |
| { |
| "epoch": 0.548683404776485, |
| "grad_norm": 0.3383759558200836, |
| "learning_rate": 4.654446006030168e-06, |
| "loss": 0.6492, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.5491733006736068, |
| "grad_norm": 0.3437906801700592, |
| "learning_rate": 4.653784214328309e-06, |
| "loss": 0.6358, |
| "step": 1121 |
| }, |
| { |
| "epoch": 0.5496631965707287, |
| "grad_norm": 0.34938323497772217, |
| "learning_rate": 4.653121836655348e-06, |
| "loss": 0.6887, |
| "step": 1122 |
| }, |
| { |
| "epoch": 0.5501530924678506, |
| "grad_norm": 0.35022029280662537, |
| "learning_rate": 4.652458873191496e-06, |
| "loss": 0.6198, |
| "step": 1123 |
| }, |
| { |
| "epoch": 0.5506429883649725, |
| "grad_norm": 0.36812126636505127, |
| "learning_rate": 4.651795324117121e-06, |
| "loss": 0.6725, |
| "step": 1124 |
| }, |
| { |
| "epoch": 0.5511328842620943, |
| "grad_norm": 0.33869123458862305, |
| "learning_rate": 4.651131189612753e-06, |
| "loss": 0.6463, |
| "step": 1125 |
| }, |
| { |
| "epoch": 0.5516227801592162, |
| "grad_norm": 0.3331858217716217, |
| "learning_rate": 4.6504664698590795e-06, |
| "loss": 0.6208, |
| "step": 1126 |
| }, |
| { |
| "epoch": 0.5521126760563381, |
| "grad_norm": 0.37185487151145935, |
| "learning_rate": 4.64980116503695e-06, |
| "loss": 0.6047, |
| "step": 1127 |
| }, |
| { |
| "epoch": 0.5526025719534599, |
| "grad_norm": 0.33874985575675964, |
| "learning_rate": 4.64913527532737e-06, |
| "loss": 0.6609, |
| "step": 1128 |
| }, |
| { |
| "epoch": 0.5530924678505817, |
| "grad_norm": 0.3434850871562958, |
| "learning_rate": 4.648468800911506e-06, |
| "loss": 0.64, |
| "step": 1129 |
| }, |
| { |
| "epoch": 0.5535823637477036, |
| "grad_norm": 0.3372526168823242, |
| "learning_rate": 4.64780174197068e-06, |
| "loss": 0.6444, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.5540722596448254, |
| "grad_norm": 0.34443947672843933, |
| "learning_rate": 4.64713409868638e-06, |
| "loss": 0.6298, |
| "step": 1131 |
| }, |
| { |
| "epoch": 0.5545621555419473, |
| "grad_norm": 0.3271819055080414, |
| "learning_rate": 4.646465871240246e-06, |
| "loss": 0.6408, |
| "step": 1132 |
| }, |
| { |
| "epoch": 0.5550520514390692, |
| "grad_norm": 0.35289159417152405, |
| "learning_rate": 4.645797059814083e-06, |
| "loss": 0.6305, |
| "step": 1133 |
| }, |
| { |
| "epoch": 0.5555419473361911, |
| "grad_norm": 0.3394930362701416, |
| "learning_rate": 4.645127664589848e-06, |
| "loss": 0.6392, |
| "step": 1134 |
| }, |
| { |
| "epoch": 0.5560318432333129, |
| "grad_norm": 0.34543678164482117, |
| "learning_rate": 4.644457685749663e-06, |
| "loss": 0.6357, |
| "step": 1135 |
| }, |
| { |
| "epoch": 0.5565217391304348, |
| "grad_norm": 0.34728914499282837, |
| "learning_rate": 4.643787123475806e-06, |
| "loss": 0.6703, |
| "step": 1136 |
| }, |
| { |
| "epoch": 0.5570116350275567, |
| "grad_norm": 0.3504582643508911, |
| "learning_rate": 4.643115977950713e-06, |
| "loss": 0.6491, |
| "step": 1137 |
| }, |
| { |
| "epoch": 0.5575015309246785, |
| "grad_norm": 0.34880971908569336, |
| "learning_rate": 4.642444249356981e-06, |
| "loss": 0.6385, |
| "step": 1138 |
| }, |
| { |
| "epoch": 0.5579914268218004, |
| "grad_norm": 0.3401770293712616, |
| "learning_rate": 4.641771937877364e-06, |
| "loss": 0.6738, |
| "step": 1139 |
| }, |
| { |
| "epoch": 0.5584813227189223, |
| "grad_norm": 0.3317437171936035, |
| "learning_rate": 4.641099043694775e-06, |
| "loss": 0.6203, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.558971218616044, |
| "grad_norm": 0.3573921322822571, |
| "learning_rate": 4.640425566992284e-06, |
| "loss": 0.657, |
| "step": 1141 |
| }, |
| { |
| "epoch": 0.5594611145131659, |
| "grad_norm": 0.361086368560791, |
| "learning_rate": 4.639751507953124e-06, |
| "loss": 0.6261, |
| "step": 1142 |
| }, |
| { |
| "epoch": 0.5599510104102878, |
| "grad_norm": 0.35282137989997864, |
| "learning_rate": 4.639076866760681e-06, |
| "loss": 0.6494, |
| "step": 1143 |
| }, |
| { |
| "epoch": 0.5604409063074097, |
| "grad_norm": 0.3424915671348572, |
| "learning_rate": 4.638401643598504e-06, |
| "loss": 0.6582, |
| "step": 1144 |
| }, |
| { |
| "epoch": 0.5609308022045315, |
| "grad_norm": 0.3451825976371765, |
| "learning_rate": 4.6377258386502956e-06, |
| "loss": 0.6469, |
| "step": 1145 |
| }, |
| { |
| "epoch": 0.5614206981016534, |
| "grad_norm": 0.38539865612983704, |
| "learning_rate": 4.637049452099921e-06, |
| "loss": 0.6455, |
| "step": 1146 |
| }, |
| { |
| "epoch": 0.5619105939987753, |
| "grad_norm": 0.34357380867004395, |
| "learning_rate": 4.636372484131402e-06, |
| "loss": 0.6174, |
| "step": 1147 |
| }, |
| { |
| "epoch": 0.5624004898958971, |
| "grad_norm": 0.3448379337787628, |
| "learning_rate": 4.6356949349289164e-06, |
| "loss": 0.6623, |
| "step": 1148 |
| }, |
| { |
| "epoch": 0.562890385793019, |
| "grad_norm": 0.33698979020118713, |
| "learning_rate": 4.635016804676804e-06, |
| "loss": 0.6447, |
| "step": 1149 |
| }, |
| { |
| "epoch": 0.5633802816901409, |
| "grad_norm": 0.3464658260345459, |
| "learning_rate": 4.634338093559561e-06, |
| "loss": 0.6389, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.5638701775872627, |
| "grad_norm": 0.3458057940006256, |
| "learning_rate": 4.633658801761841e-06, |
| "loss": 0.6406, |
| "step": 1151 |
| }, |
| { |
| "epoch": 0.5643600734843846, |
| "grad_norm": 0.33887600898742676, |
| "learning_rate": 4.6329789294684554e-06, |
| "loss": 0.639, |
| "step": 1152 |
| }, |
| { |
| "epoch": 0.5648499693815064, |
| "grad_norm": 0.3495391309261322, |
| "learning_rate": 4.632298476864376e-06, |
| "loss": 0.6676, |
| "step": 1153 |
| }, |
| { |
| "epoch": 0.5653398652786283, |
| "grad_norm": 0.34821441769599915, |
| "learning_rate": 4.631617444134729e-06, |
| "loss": 0.6338, |
| "step": 1154 |
| }, |
| { |
| "epoch": 0.5658297611757501, |
| "grad_norm": 0.3480973243713379, |
| "learning_rate": 4.630935831464801e-06, |
| "loss": 0.6485, |
| "step": 1155 |
| }, |
| { |
| "epoch": 0.566319657072872, |
| "grad_norm": 0.34872427582740784, |
| "learning_rate": 4.630253639040035e-06, |
| "loss": 0.6717, |
| "step": 1156 |
| }, |
| { |
| "epoch": 0.5668095529699939, |
| "grad_norm": 0.35412171483039856, |
| "learning_rate": 4.629570867046032e-06, |
| "loss": 0.6638, |
| "step": 1157 |
| }, |
| { |
| "epoch": 0.5672994488671157, |
| "grad_norm": 0.3398905396461487, |
| "learning_rate": 4.628887515668552e-06, |
| "loss": 0.6222, |
| "step": 1158 |
| }, |
| { |
| "epoch": 0.5677893447642376, |
| "grad_norm": 0.3369976878166199, |
| "learning_rate": 4.628203585093511e-06, |
| "loss": 0.6314, |
| "step": 1159 |
| }, |
| { |
| "epoch": 0.5682792406613595, |
| "grad_norm": 0.3475426733493805, |
| "learning_rate": 4.627519075506983e-06, |
| "loss": 0.6509, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.5687691365584813, |
| "grad_norm": 0.33894526958465576, |
| "learning_rate": 4.6268339870952e-06, |
| "loss": 0.6242, |
| "step": 1161 |
| }, |
| { |
| "epoch": 0.5692590324556032, |
| "grad_norm": 0.33706197142601013, |
| "learning_rate": 4.626148320044551e-06, |
| "loss": 0.6349, |
| "step": 1162 |
| }, |
| { |
| "epoch": 0.5697489283527251, |
| "grad_norm": 0.34657469391822815, |
| "learning_rate": 4.625462074541582e-06, |
| "loss": 0.6365, |
| "step": 1163 |
| }, |
| { |
| "epoch": 0.570238824249847, |
| "grad_norm": 0.33160915970802307, |
| "learning_rate": 4.624775250772999e-06, |
| "loss": 0.6154, |
| "step": 1164 |
| }, |
| { |
| "epoch": 0.5707287201469687, |
| "grad_norm": 0.3429828882217407, |
| "learning_rate": 4.62408784892566e-06, |
| "loss": 0.6216, |
| "step": 1165 |
| }, |
| { |
| "epoch": 0.5712186160440906, |
| "grad_norm": 0.3496789038181305, |
| "learning_rate": 4.623399869186587e-06, |
| "loss": 0.6116, |
| "step": 1166 |
| }, |
| { |
| "epoch": 0.5717085119412125, |
| "grad_norm": 0.3350716531276703, |
| "learning_rate": 4.622711311742954e-06, |
| "loss": 0.6266, |
| "step": 1167 |
| }, |
| { |
| "epoch": 0.5721984078383343, |
| "grad_norm": 0.3509974777698517, |
| "learning_rate": 4.6220221767820926e-06, |
| "loss": 0.6623, |
| "step": 1168 |
| }, |
| { |
| "epoch": 0.5726883037354562, |
| "grad_norm": 0.33819615840911865, |
| "learning_rate": 4.621332464491495e-06, |
| "loss": 0.6323, |
| "step": 1169 |
| }, |
| { |
| "epoch": 0.5731781996325781, |
| "grad_norm": 0.3499974310398102, |
| "learning_rate": 4.620642175058808e-06, |
| "loss": 0.6694, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.5736680955296999, |
| "grad_norm": 0.34968307614326477, |
| "learning_rate": 4.619951308671836e-06, |
| "loss": 0.6417, |
| "step": 1171 |
| }, |
| { |
| "epoch": 0.5741579914268218, |
| "grad_norm": 0.34229612350463867, |
| "learning_rate": 4.6192598655185396e-06, |
| "loss": 0.6577, |
| "step": 1172 |
| }, |
| { |
| "epoch": 0.5746478873239437, |
| "grad_norm": 0.35713833570480347, |
| "learning_rate": 4.618567845787036e-06, |
| "loss": 0.6494, |
| "step": 1173 |
| }, |
| { |
| "epoch": 0.5751377832210656, |
| "grad_norm": 0.3432537913322449, |
| "learning_rate": 4.617875249665601e-06, |
| "loss": 0.6442, |
| "step": 1174 |
| }, |
| { |
| "epoch": 0.5756276791181874, |
| "grad_norm": 0.34398871660232544, |
| "learning_rate": 4.617182077342668e-06, |
| "loss": 0.6111, |
| "step": 1175 |
| }, |
| { |
| "epoch": 0.5761175750153092, |
| "grad_norm": 0.3581888675689697, |
| "learning_rate": 4.616488329006822e-06, |
| "loss": 0.6443, |
| "step": 1176 |
| }, |
| { |
| "epoch": 0.5766074709124311, |
| "grad_norm": 0.34683945775032043, |
| "learning_rate": 4.615794004846811e-06, |
| "loss": 0.6402, |
| "step": 1177 |
| }, |
| { |
| "epoch": 0.5770973668095529, |
| "grad_norm": 0.3287530541419983, |
| "learning_rate": 4.615099105051536e-06, |
| "loss": 0.6354, |
| "step": 1178 |
| }, |
| { |
| "epoch": 0.5775872627066748, |
| "grad_norm": 0.33705562353134155, |
| "learning_rate": 4.614403629810055e-06, |
| "loss": 0.628, |
| "step": 1179 |
| }, |
| { |
| "epoch": 0.5780771586037967, |
| "grad_norm": 0.3340762257575989, |
| "learning_rate": 4.613707579311584e-06, |
| "loss": 0.6575, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.5785670545009186, |
| "grad_norm": 0.34472784399986267, |
| "learning_rate": 4.613010953745494e-06, |
| "loss": 0.6307, |
| "step": 1181 |
| }, |
| { |
| "epoch": 0.5790569503980404, |
| "grad_norm": 0.3373768925666809, |
| "learning_rate": 4.612313753301313e-06, |
| "loss": 0.638, |
| "step": 1182 |
| }, |
| { |
| "epoch": 0.5795468462951623, |
| "grad_norm": 0.35141313076019287, |
| "learning_rate": 4.611615978168725e-06, |
| "loss": 0.6398, |
| "step": 1183 |
| }, |
| { |
| "epoch": 0.5800367421922842, |
| "grad_norm": 0.35514089465141296, |
| "learning_rate": 4.610917628537571e-06, |
| "loss": 0.6478, |
| "step": 1184 |
| }, |
| { |
| "epoch": 0.580526638089406, |
| "grad_norm": 0.3332272469997406, |
| "learning_rate": 4.61021870459785e-06, |
| "loss": 0.6333, |
| "step": 1185 |
| }, |
| { |
| "epoch": 0.5810165339865279, |
| "grad_norm": 0.3489050269126892, |
| "learning_rate": 4.6095192065397125e-06, |
| "loss": 0.6534, |
| "step": 1186 |
| }, |
| { |
| "epoch": 0.5815064298836498, |
| "grad_norm": 0.3449384272098541, |
| "learning_rate": 4.608819134553469e-06, |
| "loss": 0.6622, |
| "step": 1187 |
| }, |
| { |
| "epoch": 0.5819963257807715, |
| "grad_norm": 0.3545195162296295, |
| "learning_rate": 4.608118488829587e-06, |
| "loss": 0.6835, |
| "step": 1188 |
| }, |
| { |
| "epoch": 0.5824862216778934, |
| "grad_norm": 0.37098783254623413, |
| "learning_rate": 4.607417269558685e-06, |
| "loss": 0.6472, |
| "step": 1189 |
| }, |
| { |
| "epoch": 0.5829761175750153, |
| "grad_norm": 0.32945647835731506, |
| "learning_rate": 4.606715476931543e-06, |
| "loss": 0.6581, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.5834660134721372, |
| "grad_norm": 0.33627375960350037, |
| "learning_rate": 4.606013111139095e-06, |
| "loss": 0.6253, |
| "step": 1191 |
| }, |
| { |
| "epoch": 0.583955909369259, |
| "grad_norm": 0.34866514801979065, |
| "learning_rate": 4.605310172372429e-06, |
| "loss": 0.6474, |
| "step": 1192 |
| }, |
| { |
| "epoch": 0.5844458052663809, |
| "grad_norm": 0.34531986713409424, |
| "learning_rate": 4.6046066608227925e-06, |
| "loss": 0.6454, |
| "step": 1193 |
| }, |
| { |
| "epoch": 0.5849357011635028, |
| "grad_norm": 0.35326138138771057, |
| "learning_rate": 4.6039025766815855e-06, |
| "loss": 0.6415, |
| "step": 1194 |
| }, |
| { |
| "epoch": 0.5854255970606246, |
| "grad_norm": 0.3371431529521942, |
| "learning_rate": 4.6031979201403655e-06, |
| "loss": 0.6329, |
| "step": 1195 |
| }, |
| { |
| "epoch": 0.5859154929577465, |
| "grad_norm": 0.34482115507125854, |
| "learning_rate": 4.602492691390847e-06, |
| "loss": 0.6554, |
| "step": 1196 |
| }, |
| { |
| "epoch": 0.5864053888548684, |
| "grad_norm": 0.3397802412509918, |
| "learning_rate": 4.601786890624896e-06, |
| "loss": 0.6374, |
| "step": 1197 |
| }, |
| { |
| "epoch": 0.5868952847519902, |
| "grad_norm": 0.34569984674453735, |
| "learning_rate": 4.601080518034539e-06, |
| "loss": 0.6305, |
| "step": 1198 |
| }, |
| { |
| "epoch": 0.587385180649112, |
| "grad_norm": 0.34162670373916626, |
| "learning_rate": 4.6003735738119546e-06, |
| "loss": 0.6238, |
| "step": 1199 |
| }, |
| { |
| "epoch": 0.5878750765462339, |
| "grad_norm": 0.32768315076828003, |
| "learning_rate": 4.59966605814948e-06, |
| "loss": 0.6465, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.5883649724433558, |
| "grad_norm": 0.33572009205818176, |
| "learning_rate": 4.598957971239603e-06, |
| "loss": 0.6497, |
| "step": 1201 |
| }, |
| { |
| "epoch": 0.5888548683404776, |
| "grad_norm": 0.34493955969810486, |
| "learning_rate": 4.598249313274972e-06, |
| "loss": 0.6389, |
| "step": 1202 |
| }, |
| { |
| "epoch": 0.5893447642375995, |
| "grad_norm": 0.32795292139053345, |
| "learning_rate": 4.5975400844483885e-06, |
| "loss": 0.648, |
| "step": 1203 |
| }, |
| { |
| "epoch": 0.5898346601347214, |
| "grad_norm": 0.34611472487449646, |
| "learning_rate": 4.596830284952809e-06, |
| "loss": 0.6359, |
| "step": 1204 |
| }, |
| { |
| "epoch": 0.5903245560318432, |
| "grad_norm": 0.353735089302063, |
| "learning_rate": 4.596119914981346e-06, |
| "loss": 0.6448, |
| "step": 1205 |
| }, |
| { |
| "epoch": 0.5908144519289651, |
| "grad_norm": 0.3331935703754425, |
| "learning_rate": 4.5954089747272665e-06, |
| "loss": 0.6347, |
| "step": 1206 |
| }, |
| { |
| "epoch": 0.591304347826087, |
| "grad_norm": 0.35150009393692017, |
| "learning_rate": 4.5946974643839934e-06, |
| "loss": 0.6409, |
| "step": 1207 |
| }, |
| { |
| "epoch": 0.5917942437232088, |
| "grad_norm": 0.3617503345012665, |
| "learning_rate": 4.593985384145103e-06, |
| "loss": 0.665, |
| "step": 1208 |
| }, |
| { |
| "epoch": 0.5922841396203307, |
| "grad_norm": 0.3458351492881775, |
| "learning_rate": 4.593272734204331e-06, |
| "loss": 0.6473, |
| "step": 1209 |
| }, |
| { |
| "epoch": 0.5927740355174526, |
| "grad_norm": 0.35933324694633484, |
| "learning_rate": 4.592559514755562e-06, |
| "loss": 0.6561, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.5932639314145745, |
| "grad_norm": 0.34097525477409363, |
| "learning_rate": 4.591845725992841e-06, |
| "loss": 0.6501, |
| "step": 1211 |
| }, |
| { |
| "epoch": 0.5937538273116962, |
| "grad_norm": 0.3733212649822235, |
| "learning_rate": 4.591131368110363e-06, |
| "loss": 0.6705, |
| "step": 1212 |
| }, |
| { |
| "epoch": 0.5942437232088181, |
| "grad_norm": 0.3457193374633789, |
| "learning_rate": 4.590416441302483e-06, |
| "loss": 0.641, |
| "step": 1213 |
| }, |
| { |
| "epoch": 0.59473361910594, |
| "grad_norm": 0.36639225482940674, |
| "learning_rate": 4.589700945763707e-06, |
| "loss": 0.6582, |
| "step": 1214 |
| }, |
| { |
| "epoch": 0.5952235150030618, |
| "grad_norm": 0.3443004786968231, |
| "learning_rate": 4.588984881688696e-06, |
| "loss": 0.6359, |
| "step": 1215 |
| }, |
| { |
| "epoch": 0.5957134109001837, |
| "grad_norm": 0.33908525109291077, |
| "learning_rate": 4.588268249272269e-06, |
| "loss": 0.5971, |
| "step": 1216 |
| }, |
| { |
| "epoch": 0.5962033067973056, |
| "grad_norm": 0.3528711199760437, |
| "learning_rate": 4.5875510487093946e-06, |
| "loss": 0.6577, |
| "step": 1217 |
| }, |
| { |
| "epoch": 0.5966932026944274, |
| "grad_norm": 0.3453948199748993, |
| "learning_rate": 4.5868332801952e-06, |
| "loss": 0.665, |
| "step": 1218 |
| }, |
| { |
| "epoch": 0.5971830985915493, |
| "grad_norm": 0.3543250858783722, |
| "learning_rate": 4.586114943924964e-06, |
| "loss": 0.6449, |
| "step": 1219 |
| }, |
| { |
| "epoch": 0.5976729944886712, |
| "grad_norm": 0.35478734970092773, |
| "learning_rate": 4.585396040094124e-06, |
| "loss": 0.634, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.5981628903857931, |
| "grad_norm": 0.35524943470954895, |
| "learning_rate": 4.584676568898267e-06, |
| "loss": 0.624, |
| "step": 1221 |
| }, |
| { |
| "epoch": 0.5986527862829149, |
| "grad_norm": 0.35198506712913513, |
| "learning_rate": 4.583956530533137e-06, |
| "loss": 0.6355, |
| "step": 1222 |
| }, |
| { |
| "epoch": 0.5991426821800367, |
| "grad_norm": 0.355237752199173, |
| "learning_rate": 4.583235925194632e-06, |
| "loss": 0.6491, |
| "step": 1223 |
| }, |
| { |
| "epoch": 0.5996325780771586, |
| "grad_norm": 0.348114550113678, |
| "learning_rate": 4.582514753078805e-06, |
| "loss": 0.6518, |
| "step": 1224 |
| }, |
| { |
| "epoch": 0.6001224739742804, |
| "grad_norm": 0.351275235414505, |
| "learning_rate": 4.581793014381861e-06, |
| "loss": 0.6102, |
| "step": 1225 |
| }, |
| { |
| "epoch": 0.6006123698714023, |
| "grad_norm": 0.3485579490661621, |
| "learning_rate": 4.58107070930016e-06, |
| "loss": 0.6296, |
| "step": 1226 |
| }, |
| { |
| "epoch": 0.6011022657685242, |
| "grad_norm": 0.3615967929363251, |
| "learning_rate": 4.580347838030219e-06, |
| "loss": 0.6501, |
| "step": 1227 |
| }, |
| { |
| "epoch": 0.601592161665646, |
| "grad_norm": 0.33674904704093933, |
| "learning_rate": 4.579624400768704e-06, |
| "loss": 0.6393, |
| "step": 1228 |
| }, |
| { |
| "epoch": 0.6020820575627679, |
| "grad_norm": 0.3606870174407959, |
| "learning_rate": 4.578900397712439e-06, |
| "loss": 0.6315, |
| "step": 1229 |
| }, |
| { |
| "epoch": 0.6025719534598898, |
| "grad_norm": 0.3589491844177246, |
| "learning_rate": 4.5781758290584e-06, |
| "loss": 0.632, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.6030618493570117, |
| "grad_norm": 0.3339066803455353, |
| "learning_rate": 4.577450695003717e-06, |
| "loss": 0.6391, |
| "step": 1231 |
| }, |
| { |
| "epoch": 0.6035517452541335, |
| "grad_norm": 0.3310963213443756, |
| "learning_rate": 4.576724995745674e-06, |
| "loss": 0.6447, |
| "step": 1232 |
| }, |
| { |
| "epoch": 0.6040416411512554, |
| "grad_norm": 0.3522682785987854, |
| "learning_rate": 4.5759987314817115e-06, |
| "loss": 0.6486, |
| "step": 1233 |
| }, |
| { |
| "epoch": 0.6045315370483773, |
| "grad_norm": 0.3547263443470001, |
| "learning_rate": 4.575271902409418e-06, |
| "loss": 0.6352, |
| "step": 1234 |
| }, |
| { |
| "epoch": 0.605021432945499, |
| "grad_norm": 0.34916889667510986, |
| "learning_rate": 4.574544508726539e-06, |
| "loss": 0.6312, |
| "step": 1235 |
| }, |
| { |
| "epoch": 0.6055113288426209, |
| "grad_norm": 0.33695587515830994, |
| "learning_rate": 4.573816550630977e-06, |
| "loss": 0.6345, |
| "step": 1236 |
| }, |
| { |
| "epoch": 0.6060012247397428, |
| "grad_norm": 0.345257967710495, |
| "learning_rate": 4.57308802832078e-06, |
| "loss": 0.6353, |
| "step": 1237 |
| }, |
| { |
| "epoch": 0.6064911206368646, |
| "grad_norm": 0.36001551151275635, |
| "learning_rate": 4.572358941994158e-06, |
| "loss": 0.6327, |
| "step": 1238 |
| }, |
| { |
| "epoch": 0.6069810165339865, |
| "grad_norm": 0.3538808822631836, |
| "learning_rate": 4.571629291849467e-06, |
| "loss": 0.6475, |
| "step": 1239 |
| }, |
| { |
| "epoch": 0.6074709124311084, |
| "grad_norm": 0.34200891852378845, |
| "learning_rate": 4.570899078085223e-06, |
| "loss": 0.6101, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.6079608083282303, |
| "grad_norm": 0.34063735604286194, |
| "learning_rate": 4.57016830090009e-06, |
| "loss": 0.6288, |
| "step": 1241 |
| }, |
| { |
| "epoch": 0.6084507042253521, |
| "grad_norm": 0.3665306866168976, |
| "learning_rate": 4.569436960492889e-06, |
| "loss": 0.6801, |
| "step": 1242 |
| }, |
| { |
| "epoch": 0.608940600122474, |
| "grad_norm": 0.35070276260375977, |
| "learning_rate": 4.5687050570625915e-06, |
| "loss": 0.6408, |
| "step": 1243 |
| }, |
| { |
| "epoch": 0.6094304960195959, |
| "grad_norm": 0.33832818269729614, |
| "learning_rate": 4.5679725908083236e-06, |
| "loss": 0.6426, |
| "step": 1244 |
| }, |
| { |
| "epoch": 0.6099203919167177, |
| "grad_norm": 0.34335771203041077, |
| "learning_rate": 4.5672395619293645e-06, |
| "loss": 0.6562, |
| "step": 1245 |
| }, |
| { |
| "epoch": 0.6104102878138395, |
| "grad_norm": 0.3389594554901123, |
| "learning_rate": 4.566505970625147e-06, |
| "loss": 0.6519, |
| "step": 1246 |
| }, |
| { |
| "epoch": 0.6109001837109614, |
| "grad_norm": 0.33421632647514343, |
| "learning_rate": 4.565771817095255e-06, |
| "loss": 0.6308, |
| "step": 1247 |
| }, |
| { |
| "epoch": 0.6113900796080833, |
| "grad_norm": 0.3511391580104828, |
| "learning_rate": 4.565037101539428e-06, |
| "loss": 0.6095, |
| "step": 1248 |
| }, |
| { |
| "epoch": 0.6118799755052051, |
| "grad_norm": 0.3483385741710663, |
| "learning_rate": 4.564301824157557e-06, |
| "loss": 0.6042, |
| "step": 1249 |
| }, |
| { |
| "epoch": 0.612369871402327, |
| "grad_norm": 0.33592280745506287, |
| "learning_rate": 4.563565985149684e-06, |
| "loss": 0.6507, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.6128597672994489, |
| "grad_norm": 0.35174697637557983, |
| "learning_rate": 4.562829584716007e-06, |
| "loss": 0.6316, |
| "step": 1251 |
| }, |
| { |
| "epoch": 0.6133496631965707, |
| "grad_norm": 0.34856846928596497, |
| "learning_rate": 4.562092623056875e-06, |
| "loss": 0.6196, |
| "step": 1252 |
| }, |
| { |
| "epoch": 0.6138395590936926, |
| "grad_norm": 0.3431689143180847, |
| "learning_rate": 4.5613551003727905e-06, |
| "loss": 0.6273, |
| "step": 1253 |
| }, |
| { |
| "epoch": 0.6143294549908145, |
| "grad_norm": 0.34159907698631287, |
| "learning_rate": 4.560617016864408e-06, |
| "loss": 0.6265, |
| "step": 1254 |
| }, |
| { |
| "epoch": 0.6148193508879363, |
| "grad_norm": 0.3400711715221405, |
| "learning_rate": 4.5598783727325345e-06, |
| "loss": 0.6318, |
| "step": 1255 |
| }, |
| { |
| "epoch": 0.6153092467850582, |
| "grad_norm": 0.3741343319416046, |
| "learning_rate": 4.559139168178131e-06, |
| "loss": 0.6409, |
| "step": 1256 |
| }, |
| { |
| "epoch": 0.6157991426821801, |
| "grad_norm": 0.342777818441391, |
| "learning_rate": 4.558399403402308e-06, |
| "loss": 0.6472, |
| "step": 1257 |
| }, |
| { |
| "epoch": 0.616289038579302, |
| "grad_norm": 0.3362821936607361, |
| "learning_rate": 4.557659078606332e-06, |
| "loss": 0.6174, |
| "step": 1258 |
| }, |
| { |
| "epoch": 0.6167789344764237, |
| "grad_norm": 0.3407340347766876, |
| "learning_rate": 4.5569181939916195e-06, |
| "loss": 0.6475, |
| "step": 1259 |
| }, |
| { |
| "epoch": 0.6172688303735456, |
| "grad_norm": 0.3264087736606598, |
| "learning_rate": 4.5561767497597385e-06, |
| "loss": 0.6583, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.6177587262706675, |
| "grad_norm": 0.3396836221218109, |
| "learning_rate": 4.555434746112412e-06, |
| "loss": 0.656, |
| "step": 1261 |
| }, |
| { |
| "epoch": 0.6182486221677893, |
| "grad_norm": 0.3620426654815674, |
| "learning_rate": 4.5546921832515145e-06, |
| "loss": 0.642, |
| "step": 1262 |
| }, |
| { |
| "epoch": 0.6187385180649112, |
| "grad_norm": 0.3363342881202698, |
| "learning_rate": 4.5539490613790715e-06, |
| "loss": 0.6303, |
| "step": 1263 |
| }, |
| { |
| "epoch": 0.6192284139620331, |
| "grad_norm": 0.3500632047653198, |
| "learning_rate": 4.55320538069726e-06, |
| "loss": 0.6407, |
| "step": 1264 |
| }, |
| { |
| "epoch": 0.6197183098591549, |
| "grad_norm": 0.35342806577682495, |
| "learning_rate": 4.55246114140841e-06, |
| "loss": 0.6814, |
| "step": 1265 |
| }, |
| { |
| "epoch": 0.6202082057562768, |
| "grad_norm": 0.336715966463089, |
| "learning_rate": 4.5517163437150056e-06, |
| "loss": 0.6444, |
| "step": 1266 |
| }, |
| { |
| "epoch": 0.6206981016533987, |
| "grad_norm": 0.3470943868160248, |
| "learning_rate": 4.55097098781968e-06, |
| "loss": 0.6302, |
| "step": 1267 |
| }, |
| { |
| "epoch": 0.6211879975505206, |
| "grad_norm": 0.3440588116645813, |
| "learning_rate": 4.5502250739252165e-06, |
| "loss": 0.637, |
| "step": 1268 |
| }, |
| { |
| "epoch": 0.6216778934476423, |
| "grad_norm": 0.34795132279396057, |
| "learning_rate": 4.5494786022345565e-06, |
| "loss": 0.6319, |
| "step": 1269 |
| }, |
| { |
| "epoch": 0.6221677893447642, |
| "grad_norm": 0.35112684965133667, |
| "learning_rate": 4.548731572950788e-06, |
| "loss": 0.6207, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.6226576852418861, |
| "grad_norm": 0.34473106265068054, |
| "learning_rate": 4.547983986277151e-06, |
| "loss": 0.6297, |
| "step": 1271 |
| }, |
| { |
| "epoch": 0.6231475811390079, |
| "grad_norm": 0.3364686667919159, |
| "learning_rate": 4.547235842417041e-06, |
| "loss": 0.6783, |
| "step": 1272 |
| }, |
| { |
| "epoch": 0.6236374770361298, |
| "grad_norm": 0.3369466960430145, |
| "learning_rate": 4.546487141574e-06, |
| "loss": 0.6229, |
| "step": 1273 |
| }, |
| { |
| "epoch": 0.6241273729332517, |
| "grad_norm": 0.3586369752883911, |
| "learning_rate": 4.545737883951724e-06, |
| "loss": 0.6675, |
| "step": 1274 |
| }, |
| { |
| "epoch": 0.6246172688303735, |
| "grad_norm": 0.34704333543777466, |
| "learning_rate": 4.544988069754061e-06, |
| "loss": 0.6376, |
| "step": 1275 |
| }, |
| { |
| "epoch": 0.6251071647274954, |
| "grad_norm": 0.3543643057346344, |
| "learning_rate": 4.544237699185012e-06, |
| "loss": 0.6614, |
| "step": 1276 |
| }, |
| { |
| "epoch": 0.6255970606246173, |
| "grad_norm": 0.3370320796966553, |
| "learning_rate": 4.543486772448722e-06, |
| "loss": 0.6609, |
| "step": 1277 |
| }, |
| { |
| "epoch": 0.6260869565217392, |
| "grad_norm": 0.35931840538978577, |
| "learning_rate": 4.542735289749498e-06, |
| "loss": 0.6458, |
| "step": 1278 |
| }, |
| { |
| "epoch": 0.626576852418861, |
| "grad_norm": 0.35878509283065796, |
| "learning_rate": 4.5419832512917895e-06, |
| "loss": 0.658, |
| "step": 1279 |
| }, |
| { |
| "epoch": 0.6270667483159829, |
| "grad_norm": 0.3401985168457031, |
| "learning_rate": 4.5412306572802014e-06, |
| "loss": 0.6241, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.6275566442131048, |
| "grad_norm": 0.3442074656486511, |
| "learning_rate": 4.540477507919489e-06, |
| "loss": 0.6685, |
| "step": 1281 |
| }, |
| { |
| "epoch": 0.6280465401102265, |
| "grad_norm": 0.3460010290145874, |
| "learning_rate": 4.539723803414557e-06, |
| "loss": 0.6107, |
| "step": 1282 |
| }, |
| { |
| "epoch": 0.6285364360073484, |
| "grad_norm": 0.3613050580024719, |
| "learning_rate": 4.538969543970465e-06, |
| "loss": 0.6691, |
| "step": 1283 |
| }, |
| { |
| "epoch": 0.6290263319044703, |
| "grad_norm": 0.3598838448524475, |
| "learning_rate": 4.53821472979242e-06, |
| "loss": 0.6615, |
| "step": 1284 |
| }, |
| { |
| "epoch": 0.6295162278015921, |
| "grad_norm": 0.3557281792163849, |
| "learning_rate": 4.537459361085781e-06, |
| "loss": 0.6287, |
| "step": 1285 |
| }, |
| { |
| "epoch": 0.630006123698714, |
| "grad_norm": 0.33958539366722107, |
| "learning_rate": 4.5367034380560584e-06, |
| "loss": 0.6398, |
| "step": 1286 |
| }, |
| { |
| "epoch": 0.6304960195958359, |
| "grad_norm": 0.36244890093803406, |
| "learning_rate": 4.535946960908913e-06, |
| "loss": 0.6518, |
| "step": 1287 |
| }, |
| { |
| "epoch": 0.6309859154929578, |
| "grad_norm": 0.3795658349990845, |
| "learning_rate": 4.535189929850156e-06, |
| "loss": 0.6558, |
| "step": 1288 |
| }, |
| { |
| "epoch": 0.6314758113900796, |
| "grad_norm": 0.359651118516922, |
| "learning_rate": 4.534432345085751e-06, |
| "loss": 0.6182, |
| "step": 1289 |
| }, |
| { |
| "epoch": 0.6319657072872015, |
| "grad_norm": 0.37145259976387024, |
| "learning_rate": 4.533674206821809e-06, |
| "loss": 0.6564, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.6324556031843234, |
| "grad_norm": 0.3477351665496826, |
| "learning_rate": 4.532915515264595e-06, |
| "loss": 0.648, |
| "step": 1291 |
| }, |
| { |
| "epoch": 0.6329454990814452, |
| "grad_norm": 0.33393770456314087, |
| "learning_rate": 4.532156270620522e-06, |
| "loss": 0.6628, |
| "step": 1292 |
| }, |
| { |
| "epoch": 0.633435394978567, |
| "grad_norm": 0.35315823554992676, |
| "learning_rate": 4.531396473096156e-06, |
| "loss": 0.6305, |
| "step": 1293 |
| }, |
| { |
| "epoch": 0.6339252908756889, |
| "grad_norm": 0.35018977522850037, |
| "learning_rate": 4.53063612289821e-06, |
| "loss": 0.645, |
| "step": 1294 |
| }, |
| { |
| "epoch": 0.6344151867728107, |
| "grad_norm": 0.36220550537109375, |
| "learning_rate": 4.529875220233551e-06, |
| "loss": 0.6243, |
| "step": 1295 |
| }, |
| { |
| "epoch": 0.6349050826699326, |
| "grad_norm": 0.35167255997657776, |
| "learning_rate": 4.5291137653091935e-06, |
| "loss": 0.6497, |
| "step": 1296 |
| }, |
| { |
| "epoch": 0.6353949785670545, |
| "grad_norm": 0.36633917689323425, |
| "learning_rate": 4.528351758332303e-06, |
| "loss": 0.6378, |
| "step": 1297 |
| }, |
| { |
| "epoch": 0.6358848744641764, |
| "grad_norm": 0.3628346621990204, |
| "learning_rate": 4.5275891995101975e-06, |
| "loss": 0.6283, |
| "step": 1298 |
| }, |
| { |
| "epoch": 0.6363747703612982, |
| "grad_norm": 0.34469640254974365, |
| "learning_rate": 4.526826089050341e-06, |
| "loss": 0.6366, |
| "step": 1299 |
| }, |
| { |
| "epoch": 0.6368646662584201, |
| "grad_norm": 0.3598330020904541, |
| "learning_rate": 4.526062427160353e-06, |
| "loss": 0.6479, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.637354562155542, |
| "grad_norm": 0.3442913591861725, |
| "learning_rate": 4.525298214047995e-06, |
| "loss": 0.6231, |
| "step": 1301 |
| }, |
| { |
| "epoch": 0.6378444580526638, |
| "grad_norm": 0.3670358955860138, |
| "learning_rate": 4.5245334499211874e-06, |
| "loss": 0.6407, |
| "step": 1302 |
| }, |
| { |
| "epoch": 0.6383343539497857, |
| "grad_norm": 0.36164307594299316, |
| "learning_rate": 4.523768134987995e-06, |
| "loss": 0.6431, |
| "step": 1303 |
| }, |
| { |
| "epoch": 0.6388242498469076, |
| "grad_norm": 0.33182641863822937, |
| "learning_rate": 4.523002269456633e-06, |
| "loss": 0.6371, |
| "step": 1304 |
| }, |
| { |
| "epoch": 0.6393141457440293, |
| "grad_norm": 0.3582742214202881, |
| "learning_rate": 4.522235853535468e-06, |
| "loss": 0.6212, |
| "step": 1305 |
| }, |
| { |
| "epoch": 0.6398040416411512, |
| "grad_norm": 0.3448125123977661, |
| "learning_rate": 4.521468887433016e-06, |
| "loss": 0.6476, |
| "step": 1306 |
| }, |
| { |
| "epoch": 0.6402939375382731, |
| "grad_norm": 0.355258584022522, |
| "learning_rate": 4.520701371357942e-06, |
| "loss": 0.6284, |
| "step": 1307 |
| }, |
| { |
| "epoch": 0.640783833435395, |
| "grad_norm": 0.3545528054237366, |
| "learning_rate": 4.519933305519061e-06, |
| "loss": 0.6225, |
| "step": 1308 |
| }, |
| { |
| "epoch": 0.6412737293325168, |
| "grad_norm": 0.35198697447776794, |
| "learning_rate": 4.5191646901253375e-06, |
| "loss": 0.6257, |
| "step": 1309 |
| }, |
| { |
| "epoch": 0.6417636252296387, |
| "grad_norm": 0.3620002567768097, |
| "learning_rate": 4.518395525385884e-06, |
| "loss": 0.642, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.6422535211267606, |
| "grad_norm": 0.35877859592437744, |
| "learning_rate": 4.517625811509966e-06, |
| "loss": 0.66, |
| "step": 1311 |
| }, |
| { |
| "epoch": 0.6427434170238824, |
| "grad_norm": 0.3415077328681946, |
| "learning_rate": 4.5168555487069955e-06, |
| "loss": 0.615, |
| "step": 1312 |
| }, |
| { |
| "epoch": 0.6432333129210043, |
| "grad_norm": 0.34771469235420227, |
| "learning_rate": 4.516084737186534e-06, |
| "loss": 0.6361, |
| "step": 1313 |
| }, |
| { |
| "epoch": 0.6437232088181262, |
| "grad_norm": 0.35810625553131104, |
| "learning_rate": 4.515313377158295e-06, |
| "loss": 0.6555, |
| "step": 1314 |
| }, |
| { |
| "epoch": 0.6442131047152481, |
| "grad_norm": 0.36541587114334106, |
| "learning_rate": 4.514541468832137e-06, |
| "loss": 0.6216, |
| "step": 1315 |
| }, |
| { |
| "epoch": 0.6447030006123698, |
| "grad_norm": 0.3559669554233551, |
| "learning_rate": 4.5137690124180714e-06, |
| "loss": 0.6304, |
| "step": 1316 |
| }, |
| { |
| "epoch": 0.6451928965094917, |
| "grad_norm": 0.3511303663253784, |
| "learning_rate": 4.512996008126256e-06, |
| "loss": 0.6497, |
| "step": 1317 |
| }, |
| { |
| "epoch": 0.6456827924066136, |
| "grad_norm": 0.3545239269733429, |
| "learning_rate": 4.512222456166999e-06, |
| "loss": 0.6696, |
| "step": 1318 |
| }, |
| { |
| "epoch": 0.6461726883037354, |
| "grad_norm": 0.35808631777763367, |
| "learning_rate": 4.511448356750759e-06, |
| "loss": 0.6499, |
| "step": 1319 |
| }, |
| { |
| "epoch": 0.6466625842008573, |
| "grad_norm": 0.34952282905578613, |
| "learning_rate": 4.51067371008814e-06, |
| "loss": 0.628, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.6471524800979792, |
| "grad_norm": 0.33353301882743835, |
| "learning_rate": 4.509898516389899e-06, |
| "loss": 0.6132, |
| "step": 1321 |
| }, |
| { |
| "epoch": 0.647642375995101, |
| "grad_norm": 0.3683788478374481, |
| "learning_rate": 4.509122775866937e-06, |
| "loss": 0.6341, |
| "step": 1322 |
| }, |
| { |
| "epoch": 0.6481322718922229, |
| "grad_norm": 0.3552803695201874, |
| "learning_rate": 4.50834648873031e-06, |
| "loss": 0.62, |
| "step": 1323 |
| }, |
| { |
| "epoch": 0.6486221677893448, |
| "grad_norm": 0.35079067945480347, |
| "learning_rate": 4.507569655191216e-06, |
| "loss": 0.6079, |
| "step": 1324 |
| }, |
| { |
| "epoch": 0.6491120636864667, |
| "grad_norm": 0.34289830923080444, |
| "learning_rate": 4.506792275461007e-06, |
| "loss": 0.6469, |
| "step": 1325 |
| }, |
| { |
| "epoch": 0.6496019595835885, |
| "grad_norm": 0.3386910855770111, |
| "learning_rate": 4.506014349751179e-06, |
| "loss": 0.6557, |
| "step": 1326 |
| }, |
| { |
| "epoch": 0.6500918554807104, |
| "grad_norm": 0.37173083424568176, |
| "learning_rate": 4.505235878273382e-06, |
| "loss": 0.6401, |
| "step": 1327 |
| }, |
| { |
| "epoch": 0.6505817513778323, |
| "grad_norm": 0.37466418743133545, |
| "learning_rate": 4.504456861239409e-06, |
| "loss": 0.6153, |
| "step": 1328 |
| }, |
| { |
| "epoch": 0.651071647274954, |
| "grad_norm": 0.36054158210754395, |
| "learning_rate": 4.503677298861206e-06, |
| "loss": 0.6145, |
| "step": 1329 |
| }, |
| { |
| "epoch": 0.6515615431720759, |
| "grad_norm": 0.3630097508430481, |
| "learning_rate": 4.502897191350863e-06, |
| "loss": 0.6296, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.6520514390691978, |
| "grad_norm": 0.3524821400642395, |
| "learning_rate": 4.502116538920623e-06, |
| "loss": 0.6417, |
| "step": 1331 |
| }, |
| { |
| "epoch": 0.6525413349663196, |
| "grad_norm": 0.3513556122779846, |
| "learning_rate": 4.501335341782874e-06, |
| "loss": 0.6269, |
| "step": 1332 |
| }, |
| { |
| "epoch": 0.6530312308634415, |
| "grad_norm": 0.3473247289657593, |
| "learning_rate": 4.500553600150153e-06, |
| "loss": 0.6107, |
| "step": 1333 |
| }, |
| { |
| "epoch": 0.6535211267605634, |
| "grad_norm": 0.35425952076911926, |
| "learning_rate": 4.4997713142351436e-06, |
| "loss": 0.649, |
| "step": 1334 |
| }, |
| { |
| "epoch": 0.6540110226576853, |
| "grad_norm": 0.3341909945011139, |
| "learning_rate": 4.498988484250681e-06, |
| "loss": 0.6366, |
| "step": 1335 |
| }, |
| { |
| "epoch": 0.6545009185548071, |
| "grad_norm": 0.340781569480896, |
| "learning_rate": 4.498205110409746e-06, |
| "loss": 0.6243, |
| "step": 1336 |
| }, |
| { |
| "epoch": 0.654990814451929, |
| "grad_norm": 0.36256301403045654, |
| "learning_rate": 4.497421192925468e-06, |
| "loss": 0.6265, |
| "step": 1337 |
| }, |
| { |
| "epoch": 0.6554807103490509, |
| "grad_norm": 0.3434717357158661, |
| "learning_rate": 4.496636732011123e-06, |
| "loss": 0.63, |
| "step": 1338 |
| }, |
| { |
| "epoch": 0.6559706062461726, |
| "grad_norm": 0.3541300296783447, |
| "learning_rate": 4.495851727880139e-06, |
| "loss": 0.6459, |
| "step": 1339 |
| }, |
| { |
| "epoch": 0.6564605021432945, |
| "grad_norm": 0.3564087450504303, |
| "learning_rate": 4.4950661807460845e-06, |
| "loss": 0.6334, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.6569503980404164, |
| "grad_norm": 0.34885209798812866, |
| "learning_rate": 4.494280090822684e-06, |
| "loss": 0.6317, |
| "step": 1341 |
| }, |
| { |
| "epoch": 0.6574402939375382, |
| "grad_norm": 0.3564472496509552, |
| "learning_rate": 4.493493458323804e-06, |
| "loss": 0.6494, |
| "step": 1342 |
| }, |
| { |
| "epoch": 0.6579301898346601, |
| "grad_norm": 0.35500457882881165, |
| "learning_rate": 4.492706283463459e-06, |
| "loss": 0.6453, |
| "step": 1343 |
| }, |
| { |
| "epoch": 0.658420085731782, |
| "grad_norm": 0.3623114824295044, |
| "learning_rate": 4.4919185664558155e-06, |
| "loss": 0.6336, |
| "step": 1344 |
| }, |
| { |
| "epoch": 0.6589099816289039, |
| "grad_norm": 0.3338193893432617, |
| "learning_rate": 4.4911303075151815e-06, |
| "loss": 0.6408, |
| "step": 1345 |
| }, |
| { |
| "epoch": 0.6593998775260257, |
| "grad_norm": 0.3407876193523407, |
| "learning_rate": 4.490341506856017e-06, |
| "loss": 0.6248, |
| "step": 1346 |
| }, |
| { |
| "epoch": 0.6598897734231476, |
| "grad_norm": 0.3452967703342438, |
| "learning_rate": 4.489552164692926e-06, |
| "loss": 0.662, |
| "step": 1347 |
| }, |
| { |
| "epoch": 0.6603796693202695, |
| "grad_norm": 0.35615143179893494, |
| "learning_rate": 4.488762281240664e-06, |
| "loss": 0.6369, |
| "step": 1348 |
| }, |
| { |
| "epoch": 0.6608695652173913, |
| "grad_norm": 0.35876408219337463, |
| "learning_rate": 4.48797185671413e-06, |
| "loss": 0.6448, |
| "step": 1349 |
| }, |
| { |
| "epoch": 0.6613594611145132, |
| "grad_norm": 0.3522142171859741, |
| "learning_rate": 4.487180891328371e-06, |
| "loss": 0.6286, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.661849357011635, |
| "grad_norm": 0.3411237299442291, |
| "learning_rate": 4.486389385298583e-06, |
| "loss": 0.6281, |
| "step": 1351 |
| }, |
| { |
| "epoch": 0.6623392529087568, |
| "grad_norm": 0.3518471121788025, |
| "learning_rate": 4.485597338840106e-06, |
| "loss": 0.6391, |
| "step": 1352 |
| }, |
| { |
| "epoch": 0.6628291488058787, |
| "grad_norm": 0.35424360632896423, |
| "learning_rate": 4.48480475216843e-06, |
| "loss": 0.6272, |
| "step": 1353 |
| }, |
| { |
| "epoch": 0.6633190447030006, |
| "grad_norm": 0.35385259985923767, |
| "learning_rate": 4.48401162549919e-06, |
| "loss": 0.6423, |
| "step": 1354 |
| }, |
| { |
| "epoch": 0.6638089406001225, |
| "grad_norm": 0.33582809567451477, |
| "learning_rate": 4.48321795904817e-06, |
| "loss": 0.6471, |
| "step": 1355 |
| }, |
| { |
| "epoch": 0.6642988364972443, |
| "grad_norm": 0.35831162333488464, |
| "learning_rate": 4.4824237530312984e-06, |
| "loss": 0.6354, |
| "step": 1356 |
| }, |
| { |
| "epoch": 0.6647887323943662, |
| "grad_norm": 0.33920028805732727, |
| "learning_rate": 4.481629007664652e-06, |
| "loss": 0.6028, |
| "step": 1357 |
| }, |
| { |
| "epoch": 0.6652786282914881, |
| "grad_norm": 0.33606863021850586, |
| "learning_rate": 4.480833723164453e-06, |
| "loss": 0.6025, |
| "step": 1358 |
| }, |
| { |
| "epoch": 0.6657685241886099, |
| "grad_norm": 0.3546634316444397, |
| "learning_rate": 4.480037899747073e-06, |
| "loss": 0.6427, |
| "step": 1359 |
| }, |
| { |
| "epoch": 0.6662584200857318, |
| "grad_norm": 0.3600572347640991, |
| "learning_rate": 4.479241537629026e-06, |
| "loss": 0.6351, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.6667483159828537, |
| "grad_norm": 0.37154367566108704, |
| "learning_rate": 4.4784446370269765e-06, |
| "loss": 0.6371, |
| "step": 1361 |
| }, |
| { |
| "epoch": 0.6672382118799755, |
| "grad_norm": 0.3503185212612152, |
| "learning_rate": 4.477647198157733e-06, |
| "loss": 0.6452, |
| "step": 1362 |
| }, |
| { |
| "epoch": 0.6677281077770973, |
| "grad_norm": 0.3488251566886902, |
| "learning_rate": 4.476849221238253e-06, |
| "loss": 0.6447, |
| "step": 1363 |
| }, |
| { |
| "epoch": 0.6682180036742192, |
| "grad_norm": 0.3618388772010803, |
| "learning_rate": 4.476050706485637e-06, |
| "loss": 0.6293, |
| "step": 1364 |
| }, |
| { |
| "epoch": 0.6687078995713411, |
| "grad_norm": 0.3459049165248871, |
| "learning_rate": 4.475251654117134e-06, |
| "loss": 0.6524, |
| "step": 1365 |
| }, |
| { |
| "epoch": 0.6691977954684629, |
| "grad_norm": 0.33803868293762207, |
| "learning_rate": 4.4744520643501396e-06, |
| "loss": 0.6334, |
| "step": 1366 |
| }, |
| { |
| "epoch": 0.6696876913655848, |
| "grad_norm": 0.3458579480648041, |
| "learning_rate": 4.473651937402194e-06, |
| "loss": 0.6286, |
| "step": 1367 |
| }, |
| { |
| "epoch": 0.6701775872627067, |
| "grad_norm": 0.3435010612010956, |
| "learning_rate": 4.472851273490985e-06, |
| "loss": 0.6495, |
| "step": 1368 |
| }, |
| { |
| "epoch": 0.6706674831598285, |
| "grad_norm": 0.3350188732147217, |
| "learning_rate": 4.472050072834345e-06, |
| "loss": 0.625, |
| "step": 1369 |
| }, |
| { |
| "epoch": 0.6711573790569504, |
| "grad_norm": 0.3355303108692169, |
| "learning_rate": 4.471248335650255e-06, |
| "loss": 0.627, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.6716472749540723, |
| "grad_norm": 0.3544834554195404, |
| "learning_rate": 4.470446062156838e-06, |
| "loss": 0.6629, |
| "step": 1371 |
| }, |
| { |
| "epoch": 0.6721371708511942, |
| "grad_norm": 0.3765515089035034, |
| "learning_rate": 4.469643252572367e-06, |
| "loss": 0.6615, |
| "step": 1372 |
| }, |
| { |
| "epoch": 0.672627066748316, |
| "grad_norm": 0.36532217264175415, |
| "learning_rate": 4.468839907115259e-06, |
| "loss": 0.6492, |
| "step": 1373 |
| }, |
| { |
| "epoch": 0.6731169626454379, |
| "grad_norm": 0.3455331325531006, |
| "learning_rate": 4.468036026004075e-06, |
| "loss": 0.6604, |
| "step": 1374 |
| }, |
| { |
| "epoch": 0.6736068585425597, |
| "grad_norm": 0.34351903200149536, |
| "learning_rate": 4.4672316094575265e-06, |
| "loss": 0.6318, |
| "step": 1375 |
| }, |
| { |
| "epoch": 0.6740967544396815, |
| "grad_norm": 0.37456777691841125, |
| "learning_rate": 4.466426657694466e-06, |
| "loss": 0.6362, |
| "step": 1376 |
| }, |
| { |
| "epoch": 0.6745866503368034, |
| "grad_norm": 0.3516497015953064, |
| "learning_rate": 4.465621170933894e-06, |
| "loss": 0.6198, |
| "step": 1377 |
| }, |
| { |
| "epoch": 0.6750765462339253, |
| "grad_norm": 0.34695130586624146, |
| "learning_rate": 4.464815149394954e-06, |
| "loss": 0.6305, |
| "step": 1378 |
| }, |
| { |
| "epoch": 0.6755664421310471, |
| "grad_norm": 0.35416507720947266, |
| "learning_rate": 4.464008593296939e-06, |
| "loss": 0.6378, |
| "step": 1379 |
| }, |
| { |
| "epoch": 0.676056338028169, |
| "grad_norm": 0.3488617241382599, |
| "learning_rate": 4.463201502859286e-06, |
| "loss": 0.6306, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.6765462339252909, |
| "grad_norm": 0.34790799021720886, |
| "learning_rate": 4.462393878301574e-06, |
| "loss": 0.6608, |
| "step": 1381 |
| }, |
| { |
| "epoch": 0.6770361298224128, |
| "grad_norm": 0.3412380516529083, |
| "learning_rate": 4.461585719843533e-06, |
| "loss": 0.6133, |
| "step": 1382 |
| }, |
| { |
| "epoch": 0.6775260257195346, |
| "grad_norm": 0.35063865780830383, |
| "learning_rate": 4.460777027705033e-06, |
| "loss": 0.6292, |
| "step": 1383 |
| }, |
| { |
| "epoch": 0.6780159216166565, |
| "grad_norm": 0.33439743518829346, |
| "learning_rate": 4.459967802106092e-06, |
| "loss": 0.6287, |
| "step": 1384 |
| }, |
| { |
| "epoch": 0.6785058175137784, |
| "grad_norm": 0.34881168603897095, |
| "learning_rate": 4.459158043266874e-06, |
| "loss": 0.6503, |
| "step": 1385 |
| }, |
| { |
| "epoch": 0.6789957134109001, |
| "grad_norm": 0.3516039252281189, |
| "learning_rate": 4.458347751407686e-06, |
| "loss": 0.6667, |
| "step": 1386 |
| }, |
| { |
| "epoch": 0.679485609308022, |
| "grad_norm": 0.3718115985393524, |
| "learning_rate": 4.45753692674898e-06, |
| "loss": 0.6185, |
| "step": 1387 |
| }, |
| { |
| "epoch": 0.6799755052051439, |
| "grad_norm": 0.3400097191333771, |
| "learning_rate": 4.456725569511353e-06, |
| "loss": 0.65, |
| "step": 1388 |
| }, |
| { |
| "epoch": 0.6804654011022657, |
| "grad_norm": 0.35042303800582886, |
| "learning_rate": 4.45591367991555e-06, |
| "loss": 0.6319, |
| "step": 1389 |
| }, |
| { |
| "epoch": 0.6809552969993876, |
| "grad_norm": 0.3450476825237274, |
| "learning_rate": 4.455101258182458e-06, |
| "loss": 0.6413, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.6814451928965095, |
| "grad_norm": 0.3550189137458801, |
| "learning_rate": 4.454288304533107e-06, |
| "loss": 0.6349, |
| "step": 1391 |
| }, |
| { |
| "epoch": 0.6819350887936314, |
| "grad_norm": 0.34984391927719116, |
| "learning_rate": 4.453474819188676e-06, |
| "loss": 0.6509, |
| "step": 1392 |
| }, |
| { |
| "epoch": 0.6824249846907532, |
| "grad_norm": 0.35120296478271484, |
| "learning_rate": 4.452660802370485e-06, |
| "loss": 0.6078, |
| "step": 1393 |
| }, |
| { |
| "epoch": 0.6829148805878751, |
| "grad_norm": 0.34375014901161194, |
| "learning_rate": 4.451846254300002e-06, |
| "loss": 0.6337, |
| "step": 1394 |
| }, |
| { |
| "epoch": 0.683404776484997, |
| "grad_norm": 0.3296529948711395, |
| "learning_rate": 4.451031175198836e-06, |
| "loss": 0.6264, |
| "step": 1395 |
| }, |
| { |
| "epoch": 0.6838946723821188, |
| "grad_norm": 0.35613584518432617, |
| "learning_rate": 4.450215565288743e-06, |
| "loss": 0.6202, |
| "step": 1396 |
| }, |
| { |
| "epoch": 0.6843845682792407, |
| "grad_norm": 0.3382437825202942, |
| "learning_rate": 4.449399424791624e-06, |
| "loss": 0.6263, |
| "step": 1397 |
| }, |
| { |
| "epoch": 0.6848744641763626, |
| "grad_norm": 0.35394805669784546, |
| "learning_rate": 4.44858275392952e-06, |
| "loss": 0.6392, |
| "step": 1398 |
| }, |
| { |
| "epoch": 0.6853643600734843, |
| "grad_norm": 0.3411145806312561, |
| "learning_rate": 4.447765552924621e-06, |
| "loss": 0.6506, |
| "step": 1399 |
| }, |
| { |
| "epoch": 0.6858542559706062, |
| "grad_norm": 0.3441709876060486, |
| "learning_rate": 4.446947821999259e-06, |
| "loss": 0.6398, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.6863441518677281, |
| "grad_norm": 0.35997113585472107, |
| "learning_rate": 4.446129561375911e-06, |
| "loss": 0.6614, |
| "step": 1401 |
| }, |
| { |
| "epoch": 0.68683404776485, |
| "grad_norm": 0.3488248288631439, |
| "learning_rate": 4.445310771277197e-06, |
| "loss": 0.6482, |
| "step": 1402 |
| }, |
| { |
| "epoch": 0.6873239436619718, |
| "grad_norm": 0.3390911817550659, |
| "learning_rate": 4.444491451925883e-06, |
| "loss": 0.6138, |
| "step": 1403 |
| }, |
| { |
| "epoch": 0.6878138395590937, |
| "grad_norm": 0.3483215272426605, |
| "learning_rate": 4.443671603544878e-06, |
| "loss": 0.6455, |
| "step": 1404 |
| }, |
| { |
| "epoch": 0.6883037354562156, |
| "grad_norm": 0.36293405294418335, |
| "learning_rate": 4.442851226357232e-06, |
| "loss": 0.6562, |
| "step": 1405 |
| }, |
| { |
| "epoch": 0.6887936313533374, |
| "grad_norm": 0.35973989963531494, |
| "learning_rate": 4.442030320586145e-06, |
| "loss": 0.6355, |
| "step": 1406 |
| }, |
| { |
| "epoch": 0.6892835272504593, |
| "grad_norm": 0.35593295097351074, |
| "learning_rate": 4.441208886454956e-06, |
| "loss": 0.628, |
| "step": 1407 |
| }, |
| { |
| "epoch": 0.6897734231475812, |
| "grad_norm": 0.3477623164653778, |
| "learning_rate": 4.440386924187148e-06, |
| "loss": 0.6308, |
| "step": 1408 |
| }, |
| { |
| "epoch": 0.690263319044703, |
| "grad_norm": 0.3570405840873718, |
| "learning_rate": 4.43956443400635e-06, |
| "loss": 0.6481, |
| "step": 1409 |
| }, |
| { |
| "epoch": 0.6907532149418248, |
| "grad_norm": 0.3418077230453491, |
| "learning_rate": 4.438741416136335e-06, |
| "loss": 0.6331, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.6912431108389467, |
| "grad_norm": 0.3490599989891052, |
| "learning_rate": 4.4379178708010155e-06, |
| "loss": 0.6256, |
| "step": 1411 |
| }, |
| { |
| "epoch": 0.6917330067360686, |
| "grad_norm": 0.35988834500312805, |
| "learning_rate": 4.437093798224451e-06, |
| "loss": 0.6456, |
| "step": 1412 |
| }, |
| { |
| "epoch": 0.6922229026331904, |
| "grad_norm": 0.37563356757164, |
| "learning_rate": 4.4362691986308435e-06, |
| "loss": 0.648, |
| "step": 1413 |
| }, |
| { |
| "epoch": 0.6927127985303123, |
| "grad_norm": 0.37250471115112305, |
| "learning_rate": 4.435444072244539e-06, |
| "loss": 0.6773, |
| "step": 1414 |
| }, |
| { |
| "epoch": 0.6932026944274342, |
| "grad_norm": 0.34101811051368713, |
| "learning_rate": 4.434618419290024e-06, |
| "loss": 0.6535, |
| "step": 1415 |
| }, |
| { |
| "epoch": 0.693692590324556, |
| "grad_norm": 0.3587406277656555, |
| "learning_rate": 4.433792239991934e-06, |
| "loss": 0.6416, |
| "step": 1416 |
| }, |
| { |
| "epoch": 0.6941824862216779, |
| "grad_norm": 0.34490567445755005, |
| "learning_rate": 4.432965534575041e-06, |
| "loss": 0.6517, |
| "step": 1417 |
| }, |
| { |
| "epoch": 0.6946723821187998, |
| "grad_norm": 0.3421916961669922, |
| "learning_rate": 4.432138303264265e-06, |
| "loss": 0.648, |
| "step": 1418 |
| }, |
| { |
| "epoch": 0.6951622780159216, |
| "grad_norm": 0.34492430090904236, |
| "learning_rate": 4.431310546284668e-06, |
| "loss": 0.6534, |
| "step": 1419 |
| }, |
| { |
| "epoch": 0.6956521739130435, |
| "grad_norm": 0.33511802554130554, |
| "learning_rate": 4.430482263861451e-06, |
| "loss": 0.6188, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.6961420698101654, |
| "grad_norm": 0.3665695786476135, |
| "learning_rate": 4.429653456219965e-06, |
| "loss": 0.637, |
| "step": 1421 |
| }, |
| { |
| "epoch": 0.6966319657072872, |
| "grad_norm": 0.33063197135925293, |
| "learning_rate": 4.428824123585699e-06, |
| "loss": 0.6375, |
| "step": 1422 |
| }, |
| { |
| "epoch": 0.697121861604409, |
| "grad_norm": 0.3582562804222107, |
| "learning_rate": 4.427994266184285e-06, |
| "loss": 0.6294, |
| "step": 1423 |
| }, |
| { |
| "epoch": 0.6976117575015309, |
| "grad_norm": 0.3703780770301819, |
| "learning_rate": 4.427163884241501e-06, |
| "loss": 0.6699, |
| "step": 1424 |
| }, |
| { |
| "epoch": 0.6981016533986528, |
| "grad_norm": 0.35689881443977356, |
| "learning_rate": 4.426332977983264e-06, |
| "loss": 0.6439, |
| "step": 1425 |
| }, |
| { |
| "epoch": 0.6985915492957746, |
| "grad_norm": 0.3539344072341919, |
| "learning_rate": 4.425501547635635e-06, |
| "loss": 0.6608, |
| "step": 1426 |
| }, |
| { |
| "epoch": 0.6990814451928965, |
| "grad_norm": 0.3454554080963135, |
| "learning_rate": 4.424669593424819e-06, |
| "loss": 0.6364, |
| "step": 1427 |
| }, |
| { |
| "epoch": 0.6995713410900184, |
| "grad_norm": 0.340656042098999, |
| "learning_rate": 4.4238371155771614e-06, |
| "loss": 0.6277, |
| "step": 1428 |
| }, |
| { |
| "epoch": 0.7000612369871402, |
| "grad_norm": 0.35011354088783264, |
| "learning_rate": 4.423004114319151e-06, |
| "loss": 0.6621, |
| "step": 1429 |
| }, |
| { |
| "epoch": 0.7005511328842621, |
| "grad_norm": 0.34832993149757385, |
| "learning_rate": 4.42217058987742e-06, |
| "loss": 0.6516, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.701041028781384, |
| "grad_norm": 0.3486762046813965, |
| "learning_rate": 4.421336542478741e-06, |
| "loss": 0.6138, |
| "step": 1431 |
| }, |
| { |
| "epoch": 0.7015309246785059, |
| "grad_norm": 0.3553650975227356, |
| "learning_rate": 4.42050197235003e-06, |
| "loss": 0.6215, |
| "step": 1432 |
| }, |
| { |
| "epoch": 0.7020208205756276, |
| "grad_norm": 0.3504565954208374, |
| "learning_rate": 4.419666879718344e-06, |
| "loss": 0.6377, |
| "step": 1433 |
| }, |
| { |
| "epoch": 0.7025107164727495, |
| "grad_norm": 0.3645647466182709, |
| "learning_rate": 4.418831264810886e-06, |
| "loss": 0.6505, |
| "step": 1434 |
| }, |
| { |
| "epoch": 0.7030006123698714, |
| "grad_norm": 0.355643093585968, |
| "learning_rate": 4.417995127854995e-06, |
| "loss": 0.6316, |
| "step": 1435 |
| }, |
| { |
| "epoch": 0.7034905082669932, |
| "grad_norm": 0.34611374139785767, |
| "learning_rate": 4.417158469078156e-06, |
| "loss": 0.6224, |
| "step": 1436 |
| }, |
| { |
| "epoch": 0.7039804041641151, |
| "grad_norm": 0.35592782497406006, |
| "learning_rate": 4.416321288707997e-06, |
| "loss": 0.6477, |
| "step": 1437 |
| }, |
| { |
| "epoch": 0.704470300061237, |
| "grad_norm": 0.3591293394565582, |
| "learning_rate": 4.415483586972286e-06, |
| "loss": 0.6266, |
| "step": 1438 |
| }, |
| { |
| "epoch": 0.7049601959583589, |
| "grad_norm": 0.3570345342159271, |
| "learning_rate": 4.414645364098932e-06, |
| "loss": 0.6545, |
| "step": 1439 |
| }, |
| { |
| "epoch": 0.7054500918554807, |
| "grad_norm": 0.36388275027275085, |
| "learning_rate": 4.413806620315986e-06, |
| "loss": 0.656, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.7059399877526026, |
| "grad_norm": 0.3475639522075653, |
| "learning_rate": 4.4129673558516435e-06, |
| "loss": 0.6464, |
| "step": 1441 |
| }, |
| { |
| "epoch": 0.7064298836497245, |
| "grad_norm": 0.34937432408332825, |
| "learning_rate": 4.412127570934238e-06, |
| "loss": 0.6417, |
| "step": 1442 |
| }, |
| { |
| "epoch": 0.7069197795468463, |
| "grad_norm": 0.3504284918308258, |
| "learning_rate": 4.411287265792248e-06, |
| "loss": 0.6323, |
| "step": 1443 |
| }, |
| { |
| "epoch": 0.7074096754439682, |
| "grad_norm": 0.3767411708831787, |
| "learning_rate": 4.41044644065429e-06, |
| "loss": 0.6448, |
| "step": 1444 |
| }, |
| { |
| "epoch": 0.70789957134109, |
| "grad_norm": 0.3602970838546753, |
| "learning_rate": 4.409605095749125e-06, |
| "loss": 0.6345, |
| "step": 1445 |
| }, |
| { |
| "epoch": 0.7083894672382118, |
| "grad_norm": 0.3349767327308655, |
| "learning_rate": 4.408763231305654e-06, |
| "loss": 0.6224, |
| "step": 1446 |
| }, |
| { |
| "epoch": 0.7088793631353337, |
| "grad_norm": 0.36673104763031006, |
| "learning_rate": 4.407920847552918e-06, |
| "loss": 0.6457, |
| "step": 1447 |
| }, |
| { |
| "epoch": 0.7093692590324556, |
| "grad_norm": 0.3664603531360626, |
| "learning_rate": 4.407077944720104e-06, |
| "loss": 0.6302, |
| "step": 1448 |
| }, |
| { |
| "epoch": 0.7098591549295775, |
| "grad_norm": 0.3508128821849823, |
| "learning_rate": 4.4062345230365345e-06, |
| "loss": 0.6434, |
| "step": 1449 |
| }, |
| { |
| "epoch": 0.7103490508266993, |
| "grad_norm": 0.3559228479862213, |
| "learning_rate": 4.4053905827316765e-06, |
| "loss": 0.6112, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.7108389467238212, |
| "grad_norm": 0.3495516777038574, |
| "learning_rate": 4.404546124035137e-06, |
| "loss": 0.6522, |
| "step": 1451 |
| }, |
| { |
| "epoch": 0.7113288426209431, |
| "grad_norm": 0.3622876703739166, |
| "learning_rate": 4.4037011471766656e-06, |
| "loss": 0.667, |
| "step": 1452 |
| }, |
| { |
| "epoch": 0.7118187385180649, |
| "grad_norm": 0.3653157949447632, |
| "learning_rate": 4.402855652386151e-06, |
| "loss": 0.6518, |
| "step": 1453 |
| }, |
| { |
| "epoch": 0.7123086344151868, |
| "grad_norm": 0.36109858751296997, |
| "learning_rate": 4.402009639893622e-06, |
| "loss": 0.6193, |
| "step": 1454 |
| }, |
| { |
| "epoch": 0.7127985303123087, |
| "grad_norm": 0.3428484797477722, |
| "learning_rate": 4.401163109929251e-06, |
| "loss": 0.6384, |
| "step": 1455 |
| }, |
| { |
| "epoch": 0.7132884262094304, |
| "grad_norm": 0.33944132924079895, |
| "learning_rate": 4.40031606272335e-06, |
| "loss": 0.6195, |
| "step": 1456 |
| }, |
| { |
| "epoch": 0.7137783221065523, |
| "grad_norm": 0.34885114431381226, |
| "learning_rate": 4.399468498506372e-06, |
| "loss": 0.6374, |
| "step": 1457 |
| }, |
| { |
| "epoch": 0.7142682180036742, |
| "grad_norm": 0.3379736840724945, |
| "learning_rate": 4.39862041750891e-06, |
| "loss": 0.6476, |
| "step": 1458 |
| }, |
| { |
| "epoch": 0.7147581139007961, |
| "grad_norm": 0.3457338511943817, |
| "learning_rate": 4.397771819961696e-06, |
| "loss": 0.6336, |
| "step": 1459 |
| }, |
| { |
| "epoch": 0.7152480097979179, |
| "grad_norm": 0.35912013053894043, |
| "learning_rate": 4.3969227060956065e-06, |
| "loss": 0.6425, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.7157379056950398, |
| "grad_norm": 0.34900614619255066, |
| "learning_rate": 4.3960730761416556e-06, |
| "loss": 0.6181, |
| "step": 1461 |
| }, |
| { |
| "epoch": 0.7162278015921617, |
| "grad_norm": 0.3592720925807953, |
| "learning_rate": 4.395222930330999e-06, |
| "loss": 0.6493, |
| "step": 1462 |
| }, |
| { |
| "epoch": 0.7167176974892835, |
| "grad_norm": 0.37626612186431885, |
| "learning_rate": 4.394372268894932e-06, |
| "loss": 0.6488, |
| "step": 1463 |
| }, |
| { |
| "epoch": 0.7172075933864054, |
| "grad_norm": 0.33996111154556274, |
| "learning_rate": 4.3935210920648906e-06, |
| "loss": 0.6002, |
| "step": 1464 |
| }, |
| { |
| "epoch": 0.7176974892835273, |
| "grad_norm": 0.3402058482170105, |
| "learning_rate": 4.392669400072451e-06, |
| "loss": 0.6641, |
| "step": 1465 |
| }, |
| { |
| "epoch": 0.7181873851806491, |
| "grad_norm": 0.3400043249130249, |
| "learning_rate": 4.39181719314933e-06, |
| "loss": 0.6471, |
| "step": 1466 |
| }, |
| { |
| "epoch": 0.718677281077771, |
| "grad_norm": 0.35515210032463074, |
| "learning_rate": 4.3909644715273826e-06, |
| "loss": 0.616, |
| "step": 1467 |
| }, |
| { |
| "epoch": 0.7191671769748929, |
| "grad_norm": 0.3558686673641205, |
| "learning_rate": 4.390111235438606e-06, |
| "loss": 0.6475, |
| "step": 1468 |
| }, |
| { |
| "epoch": 0.7196570728720147, |
| "grad_norm": 0.357598215341568, |
| "learning_rate": 4.3892574851151375e-06, |
| "loss": 0.6427, |
| "step": 1469 |
| }, |
| { |
| "epoch": 0.7201469687691365, |
| "grad_norm": 0.3601952791213989, |
| "learning_rate": 4.388403220789252e-06, |
| "loss": 0.6324, |
| "step": 1470 |
| }, |
| { |
| "epoch": 0.7206368646662584, |
| "grad_norm": 0.3500250279903412, |
| "learning_rate": 4.387548442693367e-06, |
| "loss": 0.6429, |
| "step": 1471 |
| }, |
| { |
| "epoch": 0.7211267605633803, |
| "grad_norm": 0.3473350703716278, |
| "learning_rate": 4.386693151060036e-06, |
| "loss": 0.6341, |
| "step": 1472 |
| }, |
| { |
| "epoch": 0.7216166564605021, |
| "grad_norm": 0.3497799038887024, |
| "learning_rate": 4.385837346121957e-06, |
| "loss": 0.6235, |
| "step": 1473 |
| }, |
| { |
| "epoch": 0.722106552357624, |
| "grad_norm": 0.349621444940567, |
| "learning_rate": 4.384981028111965e-06, |
| "loss": 0.6265, |
| "step": 1474 |
| }, |
| { |
| "epoch": 0.7225964482547459, |
| "grad_norm": 0.35969698429107666, |
| "learning_rate": 4.384124197263033e-06, |
| "loss": 0.6484, |
| "step": 1475 |
| }, |
| { |
| "epoch": 0.7230863441518677, |
| "grad_norm": 0.3602536618709564, |
| "learning_rate": 4.383266853808278e-06, |
| "loss": 0.6515, |
| "step": 1476 |
| }, |
| { |
| "epoch": 0.7235762400489896, |
| "grad_norm": 0.35244783759117126, |
| "learning_rate": 4.382408997980952e-06, |
| "loss": 0.6041, |
| "step": 1477 |
| }, |
| { |
| "epoch": 0.7240661359461115, |
| "grad_norm": 0.3843582570552826, |
| "learning_rate": 4.38155063001445e-06, |
| "loss": 0.6119, |
| "step": 1478 |
| }, |
| { |
| "epoch": 0.7245560318432334, |
| "grad_norm": 0.3423139154911041, |
| "learning_rate": 4.3806917501423026e-06, |
| "loss": 0.6578, |
| "step": 1479 |
| }, |
| { |
| "epoch": 0.7250459277403551, |
| "grad_norm": 0.3562920391559601, |
| "learning_rate": 4.379832358598183e-06, |
| "loss": 0.644, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.725535823637477, |
| "grad_norm": 0.37188270688056946, |
| "learning_rate": 4.378972455615901e-06, |
| "loss": 0.6539, |
| "step": 1481 |
| }, |
| { |
| "epoch": 0.7260257195345989, |
| "grad_norm": 0.3575827479362488, |
| "learning_rate": 4.378112041429409e-06, |
| "loss": 0.6499, |
| "step": 1482 |
| }, |
| { |
| "epoch": 0.7265156154317207, |
| "grad_norm": 0.35851728916168213, |
| "learning_rate": 4.3772511162727934e-06, |
| "loss": 0.6381, |
| "step": 1483 |
| }, |
| { |
| "epoch": 0.7270055113288426, |
| "grad_norm": 0.35518619418144226, |
| "learning_rate": 4.376389680380286e-06, |
| "loss": 0.6302, |
| "step": 1484 |
| }, |
| { |
| "epoch": 0.7274954072259645, |
| "grad_norm": 0.33862000703811646, |
| "learning_rate": 4.37552773398625e-06, |
| "loss": 0.6378, |
| "step": 1485 |
| }, |
| { |
| "epoch": 0.7279853031230863, |
| "grad_norm": 0.3456350564956665, |
| "learning_rate": 4.374665277325195e-06, |
| "loss": 0.6266, |
| "step": 1486 |
| }, |
| { |
| "epoch": 0.7284751990202082, |
| "grad_norm": 0.35843735933303833, |
| "learning_rate": 4.373802310631765e-06, |
| "loss": 0.6189, |
| "step": 1487 |
| }, |
| { |
| "epoch": 0.7289650949173301, |
| "grad_norm": 0.35915929079055786, |
| "learning_rate": 4.372938834140742e-06, |
| "loss": 0.6305, |
| "step": 1488 |
| }, |
| { |
| "epoch": 0.729454990814452, |
| "grad_norm": 0.34460461139678955, |
| "learning_rate": 4.3720748480870515e-06, |
| "loss": 0.6325, |
| "step": 1489 |
| }, |
| { |
| "epoch": 0.7299448867115738, |
| "grad_norm": 0.34665390849113464, |
| "learning_rate": 4.371210352705753e-06, |
| "loss": 0.6403, |
| "step": 1490 |
| }, |
| { |
| "epoch": 0.7304347826086957, |
| "grad_norm": 0.34286579489707947, |
| "learning_rate": 4.370345348232046e-06, |
| "loss": 0.6241, |
| "step": 1491 |
| }, |
| { |
| "epoch": 0.7309246785058175, |
| "grad_norm": 0.36061859130859375, |
| "learning_rate": 4.369479834901268e-06, |
| "loss": 0.647, |
| "step": 1492 |
| }, |
| { |
| "epoch": 0.7314145744029393, |
| "grad_norm": 0.3598126173019409, |
| "learning_rate": 4.368613812948898e-06, |
| "loss": 0.6343, |
| "step": 1493 |
| }, |
| { |
| "epoch": 0.7319044703000612, |
| "grad_norm": 0.3460848331451416, |
| "learning_rate": 4.367747282610548e-06, |
| "loss": 0.6104, |
| "step": 1494 |
| }, |
| { |
| "epoch": 0.7323943661971831, |
| "grad_norm": 0.3402574360370636, |
| "learning_rate": 4.366880244121972e-06, |
| "loss": 0.6349, |
| "step": 1495 |
| }, |
| { |
| "epoch": 0.7328842620943049, |
| "grad_norm": 0.37308433651924133, |
| "learning_rate": 4.366012697719064e-06, |
| "loss": 0.6387, |
| "step": 1496 |
| }, |
| { |
| "epoch": 0.7333741579914268, |
| "grad_norm": 0.3392968773841858, |
| "learning_rate": 4.365144643637852e-06, |
| "loss": 0.6343, |
| "step": 1497 |
| }, |
| { |
| "epoch": 0.7338640538885487, |
| "grad_norm": 0.3485359251499176, |
| "learning_rate": 4.364276082114504e-06, |
| "loss": 0.6412, |
| "step": 1498 |
| }, |
| { |
| "epoch": 0.7343539497856706, |
| "grad_norm": 0.3518224358558655, |
| "learning_rate": 4.363407013385325e-06, |
| "loss": 0.6199, |
| "step": 1499 |
| }, |
| { |
| "epoch": 0.7348438456827924, |
| "grad_norm": 0.35371819138526917, |
| "learning_rate": 4.36253743768676e-06, |
| "loss": 0.6297, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.7353337415799143, |
| "grad_norm": 0.34904298186302185, |
| "learning_rate": 4.36166735525539e-06, |
| "loss": 0.5931, |
| "step": 1501 |
| }, |
| { |
| "epoch": 0.7358236374770362, |
| "grad_norm": 0.36501210927963257, |
| "learning_rate": 4.360796766327936e-06, |
| "loss": 0.6413, |
| "step": 1502 |
| }, |
| { |
| "epoch": 0.736313533374158, |
| "grad_norm": 0.356987327337265, |
| "learning_rate": 4.3599256711412555e-06, |
| "loss": 0.6476, |
| "step": 1503 |
| }, |
| { |
| "epoch": 0.7368034292712798, |
| "grad_norm": 0.3617223799228668, |
| "learning_rate": 4.3590540699323415e-06, |
| "loss": 0.6283, |
| "step": 1504 |
| }, |
| { |
| "epoch": 0.7372933251684017, |
| "grad_norm": 0.3590957820415497, |
| "learning_rate": 4.358181962938329e-06, |
| "loss": 0.633, |
| "step": 1505 |
| }, |
| { |
| "epoch": 0.7377832210655236, |
| "grad_norm": 0.3524998426437378, |
| "learning_rate": 4.357309350396488e-06, |
| "loss": 0.6341, |
| "step": 1506 |
| }, |
| { |
| "epoch": 0.7382731169626454, |
| "grad_norm": 0.3727060556411743, |
| "learning_rate": 4.356436232544227e-06, |
| "loss": 0.6502, |
| "step": 1507 |
| }, |
| { |
| "epoch": 0.7387630128597673, |
| "grad_norm": 0.349979043006897, |
| "learning_rate": 4.35556260961909e-06, |
| "loss": 0.6538, |
| "step": 1508 |
| }, |
| { |
| "epoch": 0.7392529087568892, |
| "grad_norm": 0.3617153763771057, |
| "learning_rate": 4.354688481858762e-06, |
| "loss": 0.627, |
| "step": 1509 |
| }, |
| { |
| "epoch": 0.739742804654011, |
| "grad_norm": 0.36576417088508606, |
| "learning_rate": 4.353813849501062e-06, |
| "loss": 0.6394, |
| "step": 1510 |
| }, |
| { |
| "epoch": 0.7402327005511329, |
| "grad_norm": 0.34674984216690063, |
| "learning_rate": 4.352938712783947e-06, |
| "loss": 0.6275, |
| "step": 1511 |
| }, |
| { |
| "epoch": 0.7407225964482548, |
| "grad_norm": 0.3658475875854492, |
| "learning_rate": 4.352063071945514e-06, |
| "loss": 0.6237, |
| "step": 1512 |
| }, |
| { |
| "epoch": 0.7412124923453766, |
| "grad_norm": 0.35605910420417786, |
| "learning_rate": 4.351186927223992e-06, |
| "loss": 0.6628, |
| "step": 1513 |
| }, |
| { |
| "epoch": 0.7417023882424985, |
| "grad_norm": 0.3472442030906677, |
| "learning_rate": 4.350310278857754e-06, |
| "loss": 0.6531, |
| "step": 1514 |
| }, |
| { |
| "epoch": 0.7421922841396204, |
| "grad_norm": 0.34654098749160767, |
| "learning_rate": 4.349433127085302e-06, |
| "loss": 0.6177, |
| "step": 1515 |
| }, |
| { |
| "epoch": 0.7426821800367422, |
| "grad_norm": 0.3483250141143799, |
| "learning_rate": 4.34855547214528e-06, |
| "loss": 0.5996, |
| "step": 1516 |
| }, |
| { |
| "epoch": 0.743172075933864, |
| "grad_norm": 0.3742973506450653, |
| "learning_rate": 4.34767731427647e-06, |
| "loss": 0.67, |
| "step": 1517 |
| }, |
| { |
| "epoch": 0.7436619718309859, |
| "grad_norm": 0.36761683225631714, |
| "learning_rate": 4.346798653717787e-06, |
| "loss": 0.6544, |
| "step": 1518 |
| }, |
| { |
| "epoch": 0.7441518677281078, |
| "grad_norm": 0.35679250955581665, |
| "learning_rate": 4.345919490708286e-06, |
| "loss": 0.627, |
| "step": 1519 |
| }, |
| { |
| "epoch": 0.7446417636252296, |
| "grad_norm": 0.34713515639305115, |
| "learning_rate": 4.345039825487156e-06, |
| "loss": 0.6363, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.7451316595223515, |
| "grad_norm": 0.3618803322315216, |
| "learning_rate": 4.344159658293723e-06, |
| "loss": 0.6641, |
| "step": 1521 |
| }, |
| { |
| "epoch": 0.7456215554194734, |
| "grad_norm": 0.35535937547683716, |
| "learning_rate": 4.343278989367452e-06, |
| "loss": 0.6325, |
| "step": 1522 |
| }, |
| { |
| "epoch": 0.7461114513165952, |
| "grad_norm": 0.3408360481262207, |
| "learning_rate": 4.342397818947943e-06, |
| "loss": 0.6324, |
| "step": 1523 |
| }, |
| { |
| "epoch": 0.7466013472137171, |
| "grad_norm": 0.3660116493701935, |
| "learning_rate": 4.341516147274931e-06, |
| "loss": 0.6098, |
| "step": 1524 |
| }, |
| { |
| "epoch": 0.747091243110839, |
| "grad_norm": 0.3499428629875183, |
| "learning_rate": 4.34063397458829e-06, |
| "loss": 0.667, |
| "step": 1525 |
| }, |
| { |
| "epoch": 0.7475811390079609, |
| "grad_norm": 0.3460999131202698, |
| "learning_rate": 4.339751301128028e-06, |
| "loss": 0.6339, |
| "step": 1526 |
| }, |
| { |
| "epoch": 0.7480710349050826, |
| "grad_norm": 0.3715626895427704, |
| "learning_rate": 4.338868127134291e-06, |
| "loss": 0.6494, |
| "step": 1527 |
| }, |
| { |
| "epoch": 0.7485609308022045, |
| "grad_norm": 0.35727787017822266, |
| "learning_rate": 4.3379844528473605e-06, |
| "loss": 0.6661, |
| "step": 1528 |
| }, |
| { |
| "epoch": 0.7490508266993264, |
| "grad_norm": 0.34379470348358154, |
| "learning_rate": 4.337100278507654e-06, |
| "loss": 0.6311, |
| "step": 1529 |
| }, |
| { |
| "epoch": 0.7495407225964482, |
| "grad_norm": 0.3414919972419739, |
| "learning_rate": 4.3362156043557246e-06, |
| "loss": 0.656, |
| "step": 1530 |
| }, |
| { |
| "epoch": 0.7500306184935701, |
| "grad_norm": 0.3487052619457245, |
| "learning_rate": 4.335330430632263e-06, |
| "loss": 0.6433, |
| "step": 1531 |
| }, |
| { |
| "epoch": 0.750520514390692, |
| "grad_norm": 0.3398865759372711, |
| "learning_rate": 4.334444757578093e-06, |
| "loss": 0.6083, |
| "step": 1532 |
| }, |
| { |
| "epoch": 0.7510104102878138, |
| "grad_norm": 0.3585461378097534, |
| "learning_rate": 4.3335585854341776e-06, |
| "loss": 0.6106, |
| "step": 1533 |
| }, |
| { |
| "epoch": 0.7515003061849357, |
| "grad_norm": 0.3617606461048126, |
| "learning_rate": 4.332671914441614e-06, |
| "loss": 0.6305, |
| "step": 1534 |
| }, |
| { |
| "epoch": 0.7519902020820576, |
| "grad_norm": 0.35764437913894653, |
| "learning_rate": 4.331784744841634e-06, |
| "loss": 0.6217, |
| "step": 1535 |
| }, |
| { |
| "epoch": 0.7524800979791795, |
| "grad_norm": 0.3439844250679016, |
| "learning_rate": 4.330897076875606e-06, |
| "loss": 0.6109, |
| "step": 1536 |
| }, |
| { |
| "epoch": 0.7529699938763013, |
| "grad_norm": 0.3562347888946533, |
| "learning_rate": 4.3300089107850365e-06, |
| "loss": 0.6455, |
| "step": 1537 |
| }, |
| { |
| "epoch": 0.7534598897734232, |
| "grad_norm": 0.3550952076911926, |
| "learning_rate": 4.329120246811562e-06, |
| "loss": 0.6443, |
| "step": 1538 |
| }, |
| { |
| "epoch": 0.753949785670545, |
| "grad_norm": 0.35900068283081055, |
| "learning_rate": 4.328231085196959e-06, |
| "loss": 0.6399, |
| "step": 1539 |
| }, |
| { |
| "epoch": 0.7544396815676668, |
| "grad_norm": 0.3578474819660187, |
| "learning_rate": 4.327341426183139e-06, |
| "loss": 0.6332, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.7549295774647887, |
| "grad_norm": 0.37041589617729187, |
| "learning_rate": 4.326451270012146e-06, |
| "loss": 0.6453, |
| "step": 1541 |
| }, |
| { |
| "epoch": 0.7554194733619106, |
| "grad_norm": 0.35958898067474365, |
| "learning_rate": 4.325560616926162e-06, |
| "loss": 0.6385, |
| "step": 1542 |
| }, |
| { |
| "epoch": 0.7559093692590324, |
| "grad_norm": 0.3471086919307709, |
| "learning_rate": 4.324669467167504e-06, |
| "loss": 0.6272, |
| "step": 1543 |
| }, |
| { |
| "epoch": 0.7563992651561543, |
| "grad_norm": 0.355865478515625, |
| "learning_rate": 4.323777820978622e-06, |
| "loss": 0.6502, |
| "step": 1544 |
| }, |
| { |
| "epoch": 0.7568891610532762, |
| "grad_norm": 0.3631455898284912, |
| "learning_rate": 4.322885678602103e-06, |
| "loss": 0.6361, |
| "step": 1545 |
| }, |
| { |
| "epoch": 0.7573790569503981, |
| "grad_norm": 0.3473595380783081, |
| "learning_rate": 4.321993040280669e-06, |
| "loss": 0.6271, |
| "step": 1546 |
| }, |
| { |
| "epoch": 0.7578689528475199, |
| "grad_norm": 0.3650197684764862, |
| "learning_rate": 4.321099906257175e-06, |
| "loss": 0.6371, |
| "step": 1547 |
| }, |
| { |
| "epoch": 0.7583588487446418, |
| "grad_norm": 0.3387010097503662, |
| "learning_rate": 4.320206276774613e-06, |
| "loss": 0.6436, |
| "step": 1548 |
| }, |
| { |
| "epoch": 0.7588487446417637, |
| "grad_norm": 0.3699718713760376, |
| "learning_rate": 4.3193121520761105e-06, |
| "loss": 0.6585, |
| "step": 1549 |
| }, |
| { |
| "epoch": 0.7593386405388854, |
| "grad_norm": 0.3527061343193054, |
| "learning_rate": 4.318417532404926e-06, |
| "loss": 0.6474, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.7598285364360073, |
| "grad_norm": 0.35305193066596985, |
| "learning_rate": 4.3175224180044575e-06, |
| "loss": 0.6245, |
| "step": 1551 |
| }, |
| { |
| "epoch": 0.7603184323331292, |
| "grad_norm": 0.352267861366272, |
| "learning_rate": 4.316626809118232e-06, |
| "loss": 0.6581, |
| "step": 1552 |
| }, |
| { |
| "epoch": 0.760808328230251, |
| "grad_norm": 0.35272592306137085, |
| "learning_rate": 4.3157307059899155e-06, |
| "loss": 0.631, |
| "step": 1553 |
| }, |
| { |
| "epoch": 0.7612982241273729, |
| "grad_norm": 0.3434283137321472, |
| "learning_rate": 4.314834108863308e-06, |
| "loss": 0.6205, |
| "step": 1554 |
| }, |
| { |
| "epoch": 0.7617881200244948, |
| "grad_norm": 0.35709479451179504, |
| "learning_rate": 4.313937017982342e-06, |
| "loss": 0.6205, |
| "step": 1555 |
| }, |
| { |
| "epoch": 0.7622780159216167, |
| "grad_norm": 0.3480152189731598, |
| "learning_rate": 4.313039433591085e-06, |
| "loss": 0.6281, |
| "step": 1556 |
| }, |
| { |
| "epoch": 0.7627679118187385, |
| "grad_norm": 0.3423363268375397, |
| "learning_rate": 4.3121413559337394e-06, |
| "loss": 0.6324, |
| "step": 1557 |
| }, |
| { |
| "epoch": 0.7632578077158604, |
| "grad_norm": 0.3515855371952057, |
| "learning_rate": 4.311242785254641e-06, |
| "loss": 0.6476, |
| "step": 1558 |
| }, |
| { |
| "epoch": 0.7637477036129823, |
| "grad_norm": 0.35674506425857544, |
| "learning_rate": 4.310343721798262e-06, |
| "loss": 0.6389, |
| "step": 1559 |
| }, |
| { |
| "epoch": 0.7642375995101041, |
| "grad_norm": 0.3473707437515259, |
| "learning_rate": 4.309444165809204e-06, |
| "loss": 0.6393, |
| "step": 1560 |
| }, |
| { |
| "epoch": 0.764727495407226, |
| "grad_norm": 0.3437351584434509, |
| "learning_rate": 4.308544117532209e-06, |
| "loss": 0.6496, |
| "step": 1561 |
| }, |
| { |
| "epoch": 0.7652173913043478, |
| "grad_norm": 0.35847991704940796, |
| "learning_rate": 4.307643577212145e-06, |
| "loss": 0.6237, |
| "step": 1562 |
| }, |
| { |
| "epoch": 0.7657072872014696, |
| "grad_norm": 0.3546234667301178, |
| "learning_rate": 4.306742545094022e-06, |
| "loss": 0.6363, |
| "step": 1563 |
| }, |
| { |
| "epoch": 0.7661971830985915, |
| "grad_norm": 0.3565414845943451, |
| "learning_rate": 4.305841021422978e-06, |
| "loss": 0.5923, |
| "step": 1564 |
| }, |
| { |
| "epoch": 0.7666870789957134, |
| "grad_norm": 0.3588482737541199, |
| "learning_rate": 4.304939006444288e-06, |
| "loss": 0.6266, |
| "step": 1565 |
| }, |
| { |
| "epoch": 0.7671769748928353, |
| "grad_norm": 0.3416237235069275, |
| "learning_rate": 4.3040365004033576e-06, |
| "loss": 0.6214, |
| "step": 1566 |
| }, |
| { |
| "epoch": 0.7676668707899571, |
| "grad_norm": 0.3760249614715576, |
| "learning_rate": 4.30313350354573e-06, |
| "loss": 0.6285, |
| "step": 1567 |
| }, |
| { |
| "epoch": 0.768156766687079, |
| "grad_norm": 0.33408862352371216, |
| "learning_rate": 4.302230016117079e-06, |
| "loss": 0.6094, |
| "step": 1568 |
| }, |
| { |
| "epoch": 0.7686466625842009, |
| "grad_norm": 0.3498431146144867, |
| "learning_rate": 4.301326038363213e-06, |
| "loss": 0.6187, |
| "step": 1569 |
| }, |
| { |
| "epoch": 0.7691365584813227, |
| "grad_norm": 0.3693348467350006, |
| "learning_rate": 4.300421570530073e-06, |
| "loss": 0.6644, |
| "step": 1570 |
| }, |
| { |
| "epoch": 0.7696264543784446, |
| "grad_norm": 0.3516463041305542, |
| "learning_rate": 4.299516612863734e-06, |
| "loss": 0.6295, |
| "step": 1571 |
| }, |
| { |
| "epoch": 0.7701163502755665, |
| "grad_norm": 0.35464391112327576, |
| "learning_rate": 4.298611165610404e-06, |
| "loss": 0.6246, |
| "step": 1572 |
| }, |
| { |
| "epoch": 0.7706062461726884, |
| "grad_norm": 0.3509035110473633, |
| "learning_rate": 4.297705229016423e-06, |
| "loss": 0.6263, |
| "step": 1573 |
| }, |
| { |
| "epoch": 0.7710961420698101, |
| "grad_norm": 0.35598504543304443, |
| "learning_rate": 4.296798803328267e-06, |
| "loss": 0.6479, |
| "step": 1574 |
| }, |
| { |
| "epoch": 0.771586037966932, |
| "grad_norm": 0.37206852436065674, |
| "learning_rate": 4.295891888792543e-06, |
| "loss": 0.6134, |
| "step": 1575 |
| }, |
| { |
| "epoch": 0.7720759338640539, |
| "grad_norm": 0.3516175448894501, |
| "learning_rate": 4.294984485655992e-06, |
| "loss": 0.6545, |
| "step": 1576 |
| }, |
| { |
| "epoch": 0.7725658297611757, |
| "grad_norm": 0.36359652876853943, |
| "learning_rate": 4.294076594165487e-06, |
| "loss": 0.6332, |
| "step": 1577 |
| }, |
| { |
| "epoch": 0.7730557256582976, |
| "grad_norm": 0.35042086243629456, |
| "learning_rate": 4.293168214568035e-06, |
| "loss": 0.6302, |
| "step": 1578 |
| }, |
| { |
| "epoch": 0.7735456215554195, |
| "grad_norm": 0.35204851627349854, |
| "learning_rate": 4.292259347110772e-06, |
| "loss": 0.6727, |
| "step": 1579 |
| }, |
| { |
| "epoch": 0.7740355174525413, |
| "grad_norm": 0.35548853874206543, |
| "learning_rate": 4.2913499920409735e-06, |
| "loss": 0.6345, |
| "step": 1580 |
| }, |
| { |
| "epoch": 0.7745254133496632, |
| "grad_norm": 0.35779857635498047, |
| "learning_rate": 4.290440149606042e-06, |
| "loss": 0.6139, |
| "step": 1581 |
| }, |
| { |
| "epoch": 0.7750153092467851, |
| "grad_norm": 0.3580545485019684, |
| "learning_rate": 4.289529820053515e-06, |
| "loss": 0.6419, |
| "step": 1582 |
| }, |
| { |
| "epoch": 0.775505205143907, |
| "grad_norm": 0.37659353017807007, |
| "learning_rate": 4.288619003631062e-06, |
| "loss": 0.6446, |
| "step": 1583 |
| }, |
| { |
| "epoch": 0.7759951010410288, |
| "grad_norm": 0.3591335415840149, |
| "learning_rate": 4.287707700586486e-06, |
| "loss": 0.6224, |
| "step": 1584 |
| }, |
| { |
| "epoch": 0.7764849969381507, |
| "grad_norm": 0.3432382345199585, |
| "learning_rate": 4.28679591116772e-06, |
| "loss": 0.6341, |
| "step": 1585 |
| }, |
| { |
| "epoch": 0.7769748928352725, |
| "grad_norm": 0.35006436705589294, |
| "learning_rate": 4.285883635622831e-06, |
| "loss": 0.6045, |
| "step": 1586 |
| }, |
| { |
| "epoch": 0.7774647887323943, |
| "grad_norm": 0.3341239094734192, |
| "learning_rate": 4.284970874200017e-06, |
| "loss": 0.6408, |
| "step": 1587 |
| }, |
| { |
| "epoch": 0.7779546846295162, |
| "grad_norm": 0.34813687205314636, |
| "learning_rate": 4.284057627147612e-06, |
| "loss": 0.6298, |
| "step": 1588 |
| }, |
| { |
| "epoch": 0.7784445805266381, |
| "grad_norm": 0.3582232594490051, |
| "learning_rate": 4.283143894714078e-06, |
| "loss": 0.6374, |
| "step": 1589 |
| }, |
| { |
| "epoch": 0.7789344764237599, |
| "grad_norm": 0.35491639375686646, |
| "learning_rate": 4.28222967714801e-06, |
| "loss": 0.6355, |
| "step": 1590 |
| }, |
| { |
| "epoch": 0.7794243723208818, |
| "grad_norm": 0.36043015122413635, |
| "learning_rate": 4.281314974698135e-06, |
| "loss": 0.6373, |
| "step": 1591 |
| }, |
| { |
| "epoch": 0.7799142682180037, |
| "grad_norm": 0.3543197512626648, |
| "learning_rate": 4.280399787613314e-06, |
| "loss": 0.63, |
| "step": 1592 |
| }, |
| { |
| "epoch": 0.7804041641151256, |
| "grad_norm": 0.3556041419506073, |
| "learning_rate": 4.279484116142536e-06, |
| "loss": 0.6066, |
| "step": 1593 |
| }, |
| { |
| "epoch": 0.7808940600122474, |
| "grad_norm": 0.35114777088165283, |
| "learning_rate": 4.278567960534925e-06, |
| "loss": 0.6397, |
| "step": 1594 |
| }, |
| { |
| "epoch": 0.7813839559093693, |
| "grad_norm": 0.35579121112823486, |
| "learning_rate": 4.277651321039736e-06, |
| "loss": 0.6348, |
| "step": 1595 |
| }, |
| { |
| "epoch": 0.7818738518064912, |
| "grad_norm": 0.35150033235549927, |
| "learning_rate": 4.2767341979063535e-06, |
| "loss": 0.6416, |
| "step": 1596 |
| }, |
| { |
| "epoch": 0.7823637477036129, |
| "grad_norm": 0.3624979853630066, |
| "learning_rate": 4.2758165913842975e-06, |
| "loss": 0.5853, |
| "step": 1597 |
| }, |
| { |
| "epoch": 0.7828536436007348, |
| "grad_norm": 0.3627711236476898, |
| "learning_rate": 4.274898501723217e-06, |
| "loss": 0.6421, |
| "step": 1598 |
| }, |
| { |
| "epoch": 0.7833435394978567, |
| "grad_norm": 0.36170175671577454, |
| "learning_rate": 4.273979929172892e-06, |
| "loss": 0.6371, |
| "step": 1599 |
| }, |
| { |
| "epoch": 0.7838334353949785, |
| "grad_norm": 0.3567507863044739, |
| "learning_rate": 4.273060873983235e-06, |
| "loss": 0.6386, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.7843233312921004, |
| "grad_norm": 0.3699304163455963, |
| "learning_rate": 4.27214133640429e-06, |
| "loss": 0.6442, |
| "step": 1601 |
| }, |
| { |
| "epoch": 0.7848132271892223, |
| "grad_norm": 0.36581847071647644, |
| "learning_rate": 4.271221316686232e-06, |
| "loss": 0.6471, |
| "step": 1602 |
| }, |
| { |
| "epoch": 0.7853031230863442, |
| "grad_norm": 0.35311195254325867, |
| "learning_rate": 4.270300815079365e-06, |
| "loss": 0.6282, |
| "step": 1603 |
| }, |
| { |
| "epoch": 0.785793018983466, |
| "grad_norm": 0.3268887400627136, |
| "learning_rate": 4.269379831834127e-06, |
| "loss": 0.6128, |
| "step": 1604 |
| }, |
| { |
| "epoch": 0.7862829148805879, |
| "grad_norm": 0.3437340557575226, |
| "learning_rate": 4.268458367201088e-06, |
| "loss": 0.6396, |
| "step": 1605 |
| }, |
| { |
| "epoch": 0.7867728107777098, |
| "grad_norm": 0.3491259813308716, |
| "learning_rate": 4.267536421430946e-06, |
| "loss": 0.6194, |
| "step": 1606 |
| }, |
| { |
| "epoch": 0.7872627066748316, |
| "grad_norm": 0.34736862778663635, |
| "learning_rate": 4.266613994774528e-06, |
| "loss": 0.6167, |
| "step": 1607 |
| }, |
| { |
| "epoch": 0.7877526025719535, |
| "grad_norm": 0.35211804509162903, |
| "learning_rate": 4.265691087482799e-06, |
| "loss": 0.6222, |
| "step": 1608 |
| }, |
| { |
| "epoch": 0.7882424984690753, |
| "grad_norm": 0.35701170563697815, |
| "learning_rate": 4.264767699806848e-06, |
| "loss": 0.6133, |
| "step": 1609 |
| }, |
| { |
| "epoch": 0.7887323943661971, |
| "grad_norm": 0.33028626441955566, |
| "learning_rate": 4.263843831997899e-06, |
| "loss": 0.6248, |
| "step": 1610 |
| }, |
| { |
| "epoch": 0.789222290263319, |
| "grad_norm": 0.3628077208995819, |
| "learning_rate": 4.262919484307303e-06, |
| "loss": 0.6366, |
| "step": 1611 |
| }, |
| { |
| "epoch": 0.7897121861604409, |
| "grad_norm": 0.3530922830104828, |
| "learning_rate": 4.2619946569865445e-06, |
| "loss": 0.6366, |
| "step": 1612 |
| }, |
| { |
| "epoch": 0.7902020820575628, |
| "grad_norm": 0.3449934422969818, |
| "learning_rate": 4.261069350287237e-06, |
| "loss": 0.6086, |
| "step": 1613 |
| }, |
| { |
| "epoch": 0.7906919779546846, |
| "grad_norm": 0.3515269458293915, |
| "learning_rate": 4.260143564461126e-06, |
| "loss": 0.5886, |
| "step": 1614 |
| }, |
| { |
| "epoch": 0.7911818738518065, |
| "grad_norm": 0.3541713356971741, |
| "learning_rate": 4.259217299760083e-06, |
| "loss": 0.6526, |
| "step": 1615 |
| }, |
| { |
| "epoch": 0.7916717697489284, |
| "grad_norm": 0.3405754268169403, |
| "learning_rate": 4.258290556436117e-06, |
| "loss": 0.6439, |
| "step": 1616 |
| }, |
| { |
| "epoch": 0.7921616656460502, |
| "grad_norm": 0.35600221157073975, |
| "learning_rate": 4.257363334741361e-06, |
| "loss": 0.6257, |
| "step": 1617 |
| }, |
| { |
| "epoch": 0.7926515615431721, |
| "grad_norm": 0.35318514704704285, |
| "learning_rate": 4.256435634928081e-06, |
| "loss": 0.6513, |
| "step": 1618 |
| }, |
| { |
| "epoch": 0.793141457440294, |
| "grad_norm": 0.34695348143577576, |
| "learning_rate": 4.255507457248672e-06, |
| "loss": 0.6401, |
| "step": 1619 |
| }, |
| { |
| "epoch": 0.7936313533374157, |
| "grad_norm": 0.33103105425834656, |
| "learning_rate": 4.25457880195566e-06, |
| "loss": 0.6261, |
| "step": 1620 |
| }, |
| { |
| "epoch": 0.7941212492345376, |
| "grad_norm": 0.35367587208747864, |
| "learning_rate": 4.2536496693016995e-06, |
| "loss": 0.6213, |
| "step": 1621 |
| }, |
| { |
| "epoch": 0.7946111451316595, |
| "grad_norm": 0.35644322633743286, |
| "learning_rate": 4.252720059539577e-06, |
| "loss": 0.6512, |
| "step": 1622 |
| }, |
| { |
| "epoch": 0.7951010410287814, |
| "grad_norm": 0.3602214753627777, |
| "learning_rate": 4.251789972922206e-06, |
| "loss": 0.6137, |
| "step": 1623 |
| }, |
| { |
| "epoch": 0.7955909369259032, |
| "grad_norm": 0.36309823393821716, |
| "learning_rate": 4.250859409702632e-06, |
| "loss": 0.6002, |
| "step": 1624 |
| }, |
| { |
| "epoch": 0.7960808328230251, |
| "grad_norm": 0.365477055311203, |
| "learning_rate": 4.24992837013403e-06, |
| "loss": 0.6633, |
| "step": 1625 |
| }, |
| { |
| "epoch": 0.796570728720147, |
| "grad_norm": 0.3455202877521515, |
| "learning_rate": 4.2489968544697045e-06, |
| "loss": 0.6502, |
| "step": 1626 |
| }, |
| { |
| "epoch": 0.7970606246172688, |
| "grad_norm": 0.37106695771217346, |
| "learning_rate": 4.248064862963087e-06, |
| "loss": 0.6236, |
| "step": 1627 |
| }, |
| { |
| "epoch": 0.7975505205143907, |
| "grad_norm": 0.3467376232147217, |
| "learning_rate": 4.247132395867742e-06, |
| "loss": 0.6214, |
| "step": 1628 |
| }, |
| { |
| "epoch": 0.7980404164115126, |
| "grad_norm": 0.3580789268016815, |
| "learning_rate": 4.246199453437362e-06, |
| "loss": 0.6419, |
| "step": 1629 |
| }, |
| { |
| "epoch": 0.7985303123086344, |
| "grad_norm": 0.3447851538658142, |
| "learning_rate": 4.2452660359257684e-06, |
| "loss": 0.6233, |
| "step": 1630 |
| }, |
| { |
| "epoch": 0.7990202082057563, |
| "grad_norm": 0.36099758744239807, |
| "learning_rate": 4.244332143586912e-06, |
| "loss": 0.6256, |
| "step": 1631 |
| }, |
| { |
| "epoch": 0.7995101041028781, |
| "grad_norm": 0.3572690188884735, |
| "learning_rate": 4.243397776674874e-06, |
| "loss": 0.5994, |
| "step": 1632 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 0.35768821835517883, |
| "learning_rate": 4.242462935443862e-06, |
| "loss": 0.6054, |
| "step": 1633 |
| }, |
| { |
| "epoch": 0.8004898958971218, |
| "grad_norm": 0.3552550971508026, |
| "learning_rate": 4.241527620148216e-06, |
| "loss": 0.6365, |
| "step": 1634 |
| }, |
| { |
| "epoch": 0.8009797917942437, |
| "grad_norm": 0.37358149886131287, |
| "learning_rate": 4.240591831042402e-06, |
| "loss": 0.6517, |
| "step": 1635 |
| }, |
| { |
| "epoch": 0.8014696876913656, |
| "grad_norm": 0.34565654397010803, |
| "learning_rate": 4.239655568381018e-06, |
| "loss": 0.6153, |
| "step": 1636 |
| }, |
| { |
| "epoch": 0.8019595835884874, |
| "grad_norm": 0.3581133484840393, |
| "learning_rate": 4.238718832418787e-06, |
| "loss": 0.6264, |
| "step": 1637 |
| }, |
| { |
| "epoch": 0.8024494794856093, |
| "grad_norm": 0.3637845516204834, |
| "learning_rate": 4.237781623410565e-06, |
| "loss": 0.638, |
| "step": 1638 |
| }, |
| { |
| "epoch": 0.8029393753827312, |
| "grad_norm": 0.36229655146598816, |
| "learning_rate": 4.236843941611332e-06, |
| "loss": 0.6106, |
| "step": 1639 |
| }, |
| { |
| "epoch": 0.8034292712798531, |
| "grad_norm": 0.358633428812027, |
| "learning_rate": 4.2359057872762025e-06, |
| "loss": 0.6628, |
| "step": 1640 |
| }, |
| { |
| "epoch": 0.8039191671769749, |
| "grad_norm": 0.35790446400642395, |
| "learning_rate": 4.2349671606604125e-06, |
| "loss": 0.6229, |
| "step": 1641 |
| }, |
| { |
| "epoch": 0.8044090630740968, |
| "grad_norm": 0.37761494517326355, |
| "learning_rate": 4.234028062019333e-06, |
| "loss": 0.6671, |
| "step": 1642 |
| }, |
| { |
| "epoch": 0.8048989589712187, |
| "grad_norm": 0.341201514005661, |
| "learning_rate": 4.233088491608459e-06, |
| "loss": 0.631, |
| "step": 1643 |
| }, |
| { |
| "epoch": 0.8053888548683404, |
| "grad_norm": 0.34282517433166504, |
| "learning_rate": 4.232148449683418e-06, |
| "loss": 0.6492, |
| "step": 1644 |
| }, |
| { |
| "epoch": 0.8058787507654623, |
| "grad_norm": 0.35871371626853943, |
| "learning_rate": 4.231207936499959e-06, |
| "loss": 0.6257, |
| "step": 1645 |
| }, |
| { |
| "epoch": 0.8063686466625842, |
| "grad_norm": 0.3889302611351013, |
| "learning_rate": 4.230266952313968e-06, |
| "loss": 0.6251, |
| "step": 1646 |
| }, |
| { |
| "epoch": 0.806858542559706, |
| "grad_norm": 0.3646315336227417, |
| "learning_rate": 4.229325497381453e-06, |
| "loss": 0.6371, |
| "step": 1647 |
| }, |
| { |
| "epoch": 0.8073484384568279, |
| "grad_norm": 0.365292489528656, |
| "learning_rate": 4.22838357195855e-06, |
| "loss": 0.624, |
| "step": 1648 |
| }, |
| { |
| "epoch": 0.8078383343539498, |
| "grad_norm": 0.34523457288742065, |
| "learning_rate": 4.227441176301527e-06, |
| "loss": 0.6358, |
| "step": 1649 |
| }, |
| { |
| "epoch": 0.8083282302510717, |
| "grad_norm": 0.3475101590156555, |
| "learning_rate": 4.226498310666776e-06, |
| "loss": 0.6262, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.8088181261481935, |
| "grad_norm": 0.3511309027671814, |
| "learning_rate": 4.225554975310819e-06, |
| "loss": 0.6231, |
| "step": 1651 |
| }, |
| { |
| "epoch": 0.8093080220453154, |
| "grad_norm": 0.3588724732398987, |
| "learning_rate": 4.224611170490307e-06, |
| "loss": 0.6277, |
| "step": 1652 |
| }, |
| { |
| "epoch": 0.8097979179424373, |
| "grad_norm": 0.3514706790447235, |
| "learning_rate": 4.223666896462015e-06, |
| "loss": 0.622, |
| "step": 1653 |
| }, |
| { |
| "epoch": 0.8102878138395591, |
| "grad_norm": 0.3686499297618866, |
| "learning_rate": 4.222722153482849e-06, |
| "loss": 0.6774, |
| "step": 1654 |
| }, |
| { |
| "epoch": 0.810777709736681, |
| "grad_norm": 0.3523634374141693, |
| "learning_rate": 4.221776941809841e-06, |
| "loss": 0.6279, |
| "step": 1655 |
| }, |
| { |
| "epoch": 0.8112676056338028, |
| "grad_norm": 0.3380214273929596, |
| "learning_rate": 4.22083126170015e-06, |
| "loss": 0.6245, |
| "step": 1656 |
| }, |
| { |
| "epoch": 0.8117575015309246, |
| "grad_norm": 0.3579692840576172, |
| "learning_rate": 4.219885113411063e-06, |
| "loss": 0.614, |
| "step": 1657 |
| }, |
| { |
| "epoch": 0.8122473974280465, |
| "grad_norm": 0.3645648956298828, |
| "learning_rate": 4.218938497199996e-06, |
| "loss": 0.6338, |
| "step": 1658 |
| }, |
| { |
| "epoch": 0.8127372933251684, |
| "grad_norm": 0.34988459944725037, |
| "learning_rate": 4.21799141332449e-06, |
| "loss": 0.6407, |
| "step": 1659 |
| }, |
| { |
| "epoch": 0.8132271892222903, |
| "grad_norm": 0.35096099972724915, |
| "learning_rate": 4.217043862042215e-06, |
| "loss": 0.6379, |
| "step": 1660 |
| }, |
| { |
| "epoch": 0.8137170851194121, |
| "grad_norm": 0.37124431133270264, |
| "learning_rate": 4.216095843610967e-06, |
| "loss": 0.6242, |
| "step": 1661 |
| }, |
| { |
| "epoch": 0.814206981016534, |
| "grad_norm": 0.35415878891944885, |
| "learning_rate": 4.2151473582886686e-06, |
| "loss": 0.6206, |
| "step": 1662 |
| }, |
| { |
| "epoch": 0.8146968769136559, |
| "grad_norm": 0.3575012981891632, |
| "learning_rate": 4.214198406333371e-06, |
| "loss": 0.6119, |
| "step": 1663 |
| }, |
| { |
| "epoch": 0.8151867728107777, |
| "grad_norm": 0.34369057416915894, |
| "learning_rate": 4.213248988003251e-06, |
| "loss": 0.6221, |
| "step": 1664 |
| }, |
| { |
| "epoch": 0.8156766687078996, |
| "grad_norm": 0.36129137873649597, |
| "learning_rate": 4.212299103556614e-06, |
| "loss": 0.6347, |
| "step": 1665 |
| }, |
| { |
| "epoch": 0.8161665646050215, |
| "grad_norm": 0.35974010825157166, |
| "learning_rate": 4.211348753251891e-06, |
| "loss": 0.6499, |
| "step": 1666 |
| }, |
| { |
| "epoch": 0.8166564605021432, |
| "grad_norm": 0.3585021197795868, |
| "learning_rate": 4.210397937347638e-06, |
| "loss": 0.6611, |
| "step": 1667 |
| }, |
| { |
| "epoch": 0.8171463563992651, |
| "grad_norm": 0.36124205589294434, |
| "learning_rate": 4.2094466561025415e-06, |
| "loss": 0.6432, |
| "step": 1668 |
| }, |
| { |
| "epoch": 0.817636252296387, |
| "grad_norm": 0.3618478775024414, |
| "learning_rate": 4.208494909775413e-06, |
| "loss": 0.612, |
| "step": 1669 |
| }, |
| { |
| "epoch": 0.8181261481935089, |
| "grad_norm": 0.3520048260688782, |
| "learning_rate": 4.207542698625188e-06, |
| "loss": 0.6263, |
| "step": 1670 |
| }, |
| { |
| "epoch": 0.8186160440906307, |
| "grad_norm": 0.36058175563812256, |
| "learning_rate": 4.2065900229109315e-06, |
| "loss": 0.6556, |
| "step": 1671 |
| }, |
| { |
| "epoch": 0.8191059399877526, |
| "grad_norm": 0.34738948941230774, |
| "learning_rate": 4.205636882891835e-06, |
| "loss": 0.6356, |
| "step": 1672 |
| }, |
| { |
| "epoch": 0.8195958358848745, |
| "grad_norm": 0.3644973933696747, |
| "learning_rate": 4.204683278827214e-06, |
| "loss": 0.6252, |
| "step": 1673 |
| }, |
| { |
| "epoch": 0.8200857317819963, |
| "grad_norm": 0.3600853979587555, |
| "learning_rate": 4.203729210976513e-06, |
| "loss": 0.6384, |
| "step": 1674 |
| }, |
| { |
| "epoch": 0.8205756276791182, |
| "grad_norm": 0.3561994135379791, |
| "learning_rate": 4.2027746795993e-06, |
| "loss": 0.6208, |
| "step": 1675 |
| }, |
| { |
| "epoch": 0.8210655235762401, |
| "grad_norm": 0.3534051179885864, |
| "learning_rate": 4.20181968495527e-06, |
| "loss": 0.6327, |
| "step": 1676 |
| }, |
| { |
| "epoch": 0.8215554194733619, |
| "grad_norm": 0.36424657702445984, |
| "learning_rate": 4.200864227304247e-06, |
| "loss": 0.6232, |
| "step": 1677 |
| }, |
| { |
| "epoch": 0.8220453153704838, |
| "grad_norm": 0.35763734579086304, |
| "learning_rate": 4.199908306906176e-06, |
| "loss": 0.6281, |
| "step": 1678 |
| }, |
| { |
| "epoch": 0.8225352112676056, |
| "grad_norm": 0.3533649146556854, |
| "learning_rate": 4.1989519240211305e-06, |
| "loss": 0.6262, |
| "step": 1679 |
| }, |
| { |
| "epoch": 0.8230251071647275, |
| "grad_norm": 0.3638383746147156, |
| "learning_rate": 4.197995078909311e-06, |
| "loss": 0.6343, |
| "step": 1680 |
| }, |
| { |
| "epoch": 0.8235150030618493, |
| "grad_norm": 0.348105788230896, |
| "learning_rate": 4.197037771831041e-06, |
| "loss": 0.6216, |
| "step": 1681 |
| }, |
| { |
| "epoch": 0.8240048989589712, |
| "grad_norm": 0.33755728602409363, |
| "learning_rate": 4.1960800030467705e-06, |
| "loss": 0.6325, |
| "step": 1682 |
| }, |
| { |
| "epoch": 0.8244947948560931, |
| "grad_norm": 0.34193381667137146, |
| "learning_rate": 4.195121772817077e-06, |
| "loss": 0.6257, |
| "step": 1683 |
| }, |
| { |
| "epoch": 0.8249846907532149, |
| "grad_norm": 0.3726844787597656, |
| "learning_rate": 4.194163081402662e-06, |
| "loss": 0.6393, |
| "step": 1684 |
| }, |
| { |
| "epoch": 0.8254745866503368, |
| "grad_norm": 0.3632981777191162, |
| "learning_rate": 4.1932039290643534e-06, |
| "loss": 0.6377, |
| "step": 1685 |
| }, |
| { |
| "epoch": 0.8259644825474587, |
| "grad_norm": 0.3556203544139862, |
| "learning_rate": 4.192244316063102e-06, |
| "loss": 0.621, |
| "step": 1686 |
| }, |
| { |
| "epoch": 0.8264543784445805, |
| "grad_norm": 0.34743455052375793, |
| "learning_rate": 4.191284242659986e-06, |
| "loss": 0.6416, |
| "step": 1687 |
| }, |
| { |
| "epoch": 0.8269442743417024, |
| "grad_norm": 0.3578084707260132, |
| "learning_rate": 4.190323709116211e-06, |
| "loss": 0.6362, |
| "step": 1688 |
| }, |
| { |
| "epoch": 0.8274341702388243, |
| "grad_norm": 0.36085325479507446, |
| "learning_rate": 4.189362715693102e-06, |
| "loss": 0.6291, |
| "step": 1689 |
| }, |
| { |
| "epoch": 0.8279240661359462, |
| "grad_norm": 0.36349424719810486, |
| "learning_rate": 4.188401262652114e-06, |
| "loss": 0.5976, |
| "step": 1690 |
| }, |
| { |
| "epoch": 0.8284139620330679, |
| "grad_norm": 0.3622356355190277, |
| "learning_rate": 4.187439350254826e-06, |
| "loss": 0.6348, |
| "step": 1691 |
| }, |
| { |
| "epoch": 0.8289038579301898, |
| "grad_norm": 0.3604823052883148, |
| "learning_rate": 4.186476978762941e-06, |
| "loss": 0.6329, |
| "step": 1692 |
| }, |
| { |
| "epoch": 0.8293937538273117, |
| "grad_norm": 0.36562973260879517, |
| "learning_rate": 4.185514148438288e-06, |
| "loss": 0.6238, |
| "step": 1693 |
| }, |
| { |
| "epoch": 0.8298836497244335, |
| "grad_norm": 0.36667400598526, |
| "learning_rate": 4.184550859542819e-06, |
| "loss": 0.6411, |
| "step": 1694 |
| }, |
| { |
| "epoch": 0.8303735456215554, |
| "grad_norm": 0.36434921622276306, |
| "learning_rate": 4.183587112338613e-06, |
| "loss": 0.6578, |
| "step": 1695 |
| }, |
| { |
| "epoch": 0.8308634415186773, |
| "grad_norm": 0.3545973002910614, |
| "learning_rate": 4.182622907087872e-06, |
| "loss": 0.6012, |
| "step": 1696 |
| }, |
| { |
| "epoch": 0.8313533374157991, |
| "grad_norm": 0.3564360737800598, |
| "learning_rate": 4.181658244052924e-06, |
| "loss": 0.648, |
| "step": 1697 |
| }, |
| { |
| "epoch": 0.831843233312921, |
| "grad_norm": 0.3650110065937042, |
| "learning_rate": 4.180693123496221e-06, |
| "loss": 0.6444, |
| "step": 1698 |
| }, |
| { |
| "epoch": 0.8323331292100429, |
| "grad_norm": 0.35943543910980225, |
| "learning_rate": 4.179727545680338e-06, |
| "loss": 0.6427, |
| "step": 1699 |
| }, |
| { |
| "epoch": 0.8328230251071648, |
| "grad_norm": 0.3513593375682831, |
| "learning_rate": 4.178761510867977e-06, |
| "loss": 0.637, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.8333129210042866, |
| "grad_norm": 0.3554759621620178, |
| "learning_rate": 4.177795019321963e-06, |
| "loss": 0.6416, |
| "step": 1701 |
| }, |
| { |
| "epoch": 0.8338028169014085, |
| "grad_norm": 0.35935404896736145, |
| "learning_rate": 4.176828071305244e-06, |
| "loss": 0.636, |
| "step": 1702 |
| }, |
| { |
| "epoch": 0.8342927127985303, |
| "grad_norm": 0.35404741764068604, |
| "learning_rate": 4.175860667080896e-06, |
| "loss": 0.6075, |
| "step": 1703 |
| }, |
| { |
| "epoch": 0.8347826086956521, |
| "grad_norm": 0.36948296427726746, |
| "learning_rate": 4.174892806912113e-06, |
| "loss": 0.6297, |
| "step": 1704 |
| }, |
| { |
| "epoch": 0.835272504592774, |
| "grad_norm": 0.34789204597473145, |
| "learning_rate": 4.1739244910622205e-06, |
| "loss": 0.6319, |
| "step": 1705 |
| }, |
| { |
| "epoch": 0.8357624004898959, |
| "grad_norm": 0.372383713722229, |
| "learning_rate": 4.172955719794662e-06, |
| "loss": 0.6421, |
| "step": 1706 |
| }, |
| { |
| "epoch": 0.8362522963870178, |
| "grad_norm": 0.37573274970054626, |
| "learning_rate": 4.171986493373008e-06, |
| "loss": 0.612, |
| "step": 1707 |
| }, |
| { |
| "epoch": 0.8367421922841396, |
| "grad_norm": 0.3678482174873352, |
| "learning_rate": 4.171016812060951e-06, |
| "loss": 0.5881, |
| "step": 1708 |
| }, |
| { |
| "epoch": 0.8372320881812615, |
| "grad_norm": 0.36841633915901184, |
| "learning_rate": 4.170046676122309e-06, |
| "loss": 0.5933, |
| "step": 1709 |
| }, |
| { |
| "epoch": 0.8377219840783834, |
| "grad_norm": 0.33924609422683716, |
| "learning_rate": 4.169076085821021e-06, |
| "loss": 0.639, |
| "step": 1710 |
| }, |
| { |
| "epoch": 0.8382118799755052, |
| "grad_norm": 0.3626523017883301, |
| "learning_rate": 4.168105041421155e-06, |
| "loss": 0.6169, |
| "step": 1711 |
| }, |
| { |
| "epoch": 0.8387017758726271, |
| "grad_norm": 0.3452063798904419, |
| "learning_rate": 4.167133543186896e-06, |
| "loss": 0.632, |
| "step": 1712 |
| }, |
| { |
| "epoch": 0.839191671769749, |
| "grad_norm": 0.34949636459350586, |
| "learning_rate": 4.166161591382557e-06, |
| "loss": 0.6253, |
| "step": 1713 |
| }, |
| { |
| "epoch": 0.8396815676668707, |
| "grad_norm": 0.37437132000923157, |
| "learning_rate": 4.165189186272572e-06, |
| "loss": 0.6222, |
| "step": 1714 |
| }, |
| { |
| "epoch": 0.8401714635639926, |
| "grad_norm": 0.35469987988471985, |
| "learning_rate": 4.164216328121499e-06, |
| "loss": 0.6246, |
| "step": 1715 |
| }, |
| { |
| "epoch": 0.8406613594611145, |
| "grad_norm": 0.33854183554649353, |
| "learning_rate": 4.1632430171940196e-06, |
| "loss": 0.627, |
| "step": 1716 |
| }, |
| { |
| "epoch": 0.8411512553582364, |
| "grad_norm": 0.357943058013916, |
| "learning_rate": 4.162269253754939e-06, |
| "loss": 0.6306, |
| "step": 1717 |
| }, |
| { |
| "epoch": 0.8416411512553582, |
| "grad_norm": 0.36788368225097656, |
| "learning_rate": 4.161295038069186e-06, |
| "loss": 0.6325, |
| "step": 1718 |
| }, |
| { |
| "epoch": 0.8421310471524801, |
| "grad_norm": 0.35560229420661926, |
| "learning_rate": 4.160320370401809e-06, |
| "loss": 0.6382, |
| "step": 1719 |
| }, |
| { |
| "epoch": 0.842620943049602, |
| "grad_norm": 0.37370407581329346, |
| "learning_rate": 4.1593452510179834e-06, |
| "loss": 0.6626, |
| "step": 1720 |
| }, |
| { |
| "epoch": 0.8431108389467238, |
| "grad_norm": 0.3486816883087158, |
| "learning_rate": 4.158369680183006e-06, |
| "loss": 0.6257, |
| "step": 1721 |
| }, |
| { |
| "epoch": 0.8436007348438457, |
| "grad_norm": 0.38826611638069153, |
| "learning_rate": 4.157393658162296e-06, |
| "loss": 0.6456, |
| "step": 1722 |
| }, |
| { |
| "epoch": 0.8440906307409676, |
| "grad_norm": 0.3426065146923065, |
| "learning_rate": 4.156417185221393e-06, |
| "loss": 0.6116, |
| "step": 1723 |
| }, |
| { |
| "epoch": 0.8445805266380894, |
| "grad_norm": 0.34008917212486267, |
| "learning_rate": 4.155440261625966e-06, |
| "loss": 0.6092, |
| "step": 1724 |
| }, |
| { |
| "epoch": 0.8450704225352113, |
| "grad_norm": 0.36330732703208923, |
| "learning_rate": 4.154462887641801e-06, |
| "loss": 0.6345, |
| "step": 1725 |
| }, |
| { |
| "epoch": 0.8455603184323331, |
| "grad_norm": 0.33931586146354675, |
| "learning_rate": 4.153485063534807e-06, |
| "loss": 0.6216, |
| "step": 1726 |
| }, |
| { |
| "epoch": 0.846050214329455, |
| "grad_norm": 0.35162895917892456, |
| "learning_rate": 4.152506789571018e-06, |
| "loss": 0.6104, |
| "step": 1727 |
| }, |
| { |
| "epoch": 0.8465401102265768, |
| "grad_norm": 0.3457432985305786, |
| "learning_rate": 4.151528066016589e-06, |
| "loss": 0.6292, |
| "step": 1728 |
| }, |
| { |
| "epoch": 0.8470300061236987, |
| "grad_norm": 0.3506641685962677, |
| "learning_rate": 4.150548893137796e-06, |
| "loss": 0.6454, |
| "step": 1729 |
| }, |
| { |
| "epoch": 0.8475199020208206, |
| "grad_norm": 0.3497201204299927, |
| "learning_rate": 4.149569271201039e-06, |
| "loss": 0.6035, |
| "step": 1730 |
| }, |
| { |
| "epoch": 0.8480097979179424, |
| "grad_norm": 0.36187872290611267, |
| "learning_rate": 4.148589200472839e-06, |
| "loss": 0.6305, |
| "step": 1731 |
| }, |
| { |
| "epoch": 0.8484996938150643, |
| "grad_norm": 0.3590366244316101, |
| "learning_rate": 4.147608681219842e-06, |
| "loss": 0.6045, |
| "step": 1732 |
| }, |
| { |
| "epoch": 0.8489895897121862, |
| "grad_norm": 0.34981295466423035, |
| "learning_rate": 4.146627713708811e-06, |
| "loss": 0.6359, |
| "step": 1733 |
| }, |
| { |
| "epoch": 0.849479485609308, |
| "grad_norm": 0.35321328043937683, |
| "learning_rate": 4.145646298206636e-06, |
| "loss": 0.634, |
| "step": 1734 |
| }, |
| { |
| "epoch": 0.8499693815064299, |
| "grad_norm": 0.36797085404396057, |
| "learning_rate": 4.144664434980325e-06, |
| "loss": 0.6321, |
| "step": 1735 |
| }, |
| { |
| "epoch": 0.8504592774035518, |
| "grad_norm": 0.35578250885009766, |
| "learning_rate": 4.14368212429701e-06, |
| "loss": 0.6154, |
| "step": 1736 |
| }, |
| { |
| "epoch": 0.8509491733006737, |
| "grad_norm": 0.33807629346847534, |
| "learning_rate": 4.142699366423945e-06, |
| "loss": 0.6026, |
| "step": 1737 |
| }, |
| { |
| "epoch": 0.8514390691977954, |
| "grad_norm": 0.36523035168647766, |
| "learning_rate": 4.141716161628504e-06, |
| "loss": 0.5957, |
| "step": 1738 |
| }, |
| { |
| "epoch": 0.8519289650949173, |
| "grad_norm": 0.36455363035202026, |
| "learning_rate": 4.140732510178184e-06, |
| "loss": 0.6183, |
| "step": 1739 |
| }, |
| { |
| "epoch": 0.8524188609920392, |
| "grad_norm": 0.3511872887611389, |
| "learning_rate": 4.1397484123406015e-06, |
| "loss": 0.6387, |
| "step": 1740 |
| }, |
| { |
| "epoch": 0.852908756889161, |
| "grad_norm": 0.34794583916664124, |
| "learning_rate": 4.1387638683834974e-06, |
| "loss": 0.6042, |
| "step": 1741 |
| }, |
| { |
| "epoch": 0.8533986527862829, |
| "grad_norm": 0.36754393577575684, |
| "learning_rate": 4.137778878574732e-06, |
| "loss": 0.6099, |
| "step": 1742 |
| }, |
| { |
| "epoch": 0.8538885486834048, |
| "grad_norm": 0.3553093373775482, |
| "learning_rate": 4.136793443182287e-06, |
| "loss": 0.6138, |
| "step": 1743 |
| }, |
| { |
| "epoch": 0.8543784445805266, |
| "grad_norm": 0.35336676239967346, |
| "learning_rate": 4.135807562474267e-06, |
| "loss": 0.6279, |
| "step": 1744 |
| }, |
| { |
| "epoch": 0.8548683404776485, |
| "grad_norm": 0.35361459851264954, |
| "learning_rate": 4.134821236718895e-06, |
| "loss": 0.6347, |
| "step": 1745 |
| }, |
| { |
| "epoch": 0.8553582363747704, |
| "grad_norm": 0.35480567812919617, |
| "learning_rate": 4.133834466184516e-06, |
| "loss": 0.6186, |
| "step": 1746 |
| }, |
| { |
| "epoch": 0.8558481322718923, |
| "grad_norm": 0.3604031801223755, |
| "learning_rate": 4.132847251139599e-06, |
| "loss": 0.6147, |
| "step": 1747 |
| }, |
| { |
| "epoch": 0.856338028169014, |
| "grad_norm": 0.35460978746414185, |
| "learning_rate": 4.131859591852729e-06, |
| "loss": 0.6355, |
| "step": 1748 |
| }, |
| { |
| "epoch": 0.856827924066136, |
| "grad_norm": 0.3537762761116028, |
| "learning_rate": 4.130871488592617e-06, |
| "loss": 0.603, |
| "step": 1749 |
| }, |
| { |
| "epoch": 0.8573178199632578, |
| "grad_norm": 0.3467521071434021, |
| "learning_rate": 4.129882941628089e-06, |
| "loss": 0.6511, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.8578077158603796, |
| "grad_norm": 0.3615700602531433, |
| "learning_rate": 4.128893951228098e-06, |
| "loss": 0.665, |
| "step": 1751 |
| }, |
| { |
| "epoch": 0.8582976117575015, |
| "grad_norm": 0.36621513962745667, |
| "learning_rate": 4.1279045176617115e-06, |
| "loss": 0.6258, |
| "step": 1752 |
| }, |
| { |
| "epoch": 0.8587875076546234, |
| "grad_norm": 0.3570707440376282, |
| "learning_rate": 4.126914641198123e-06, |
| "loss": 0.625, |
| "step": 1753 |
| }, |
| { |
| "epoch": 0.8592774035517452, |
| "grad_norm": 0.3643128573894501, |
| "learning_rate": 4.125924322106643e-06, |
| "loss": 0.6517, |
| "step": 1754 |
| }, |
| { |
| "epoch": 0.8597672994488671, |
| "grad_norm": 0.3491007387638092, |
| "learning_rate": 4.124933560656703e-06, |
| "loss": 0.6287, |
| "step": 1755 |
| }, |
| { |
| "epoch": 0.860257195345989, |
| "grad_norm": 0.3507339060306549, |
| "learning_rate": 4.123942357117856e-06, |
| "loss": 0.6517, |
| "step": 1756 |
| }, |
| { |
| "epoch": 0.8607470912431109, |
| "grad_norm": 0.3513590097427368, |
| "learning_rate": 4.122950711759775e-06, |
| "loss": 0.635, |
| "step": 1757 |
| }, |
| { |
| "epoch": 0.8612369871402327, |
| "grad_norm": 0.3566867709159851, |
| "learning_rate": 4.121958624852253e-06, |
| "loss": 0.6013, |
| "step": 1758 |
| }, |
| { |
| "epoch": 0.8617268830373546, |
| "grad_norm": 0.35651153326034546, |
| "learning_rate": 4.120966096665201e-06, |
| "loss": 0.6442, |
| "step": 1759 |
| }, |
| { |
| "epoch": 0.8622167789344765, |
| "grad_norm": 0.3672233819961548, |
| "learning_rate": 4.119973127468654e-06, |
| "loss": 0.6487, |
| "step": 1760 |
| }, |
| { |
| "epoch": 0.8627066748315982, |
| "grad_norm": 0.35391879081726074, |
| "learning_rate": 4.118979717532764e-06, |
| "loss": 0.6348, |
| "step": 1761 |
| }, |
| { |
| "epoch": 0.8631965707287201, |
| "grad_norm": 0.3441363573074341, |
| "learning_rate": 4.1179858671278044e-06, |
| "loss": 0.6458, |
| "step": 1762 |
| }, |
| { |
| "epoch": 0.863686466625842, |
| "grad_norm": 0.3333560824394226, |
| "learning_rate": 4.116991576524167e-06, |
| "loss": 0.6168, |
| "step": 1763 |
| }, |
| { |
| "epoch": 0.8641763625229639, |
| "grad_norm": 0.34725016355514526, |
| "learning_rate": 4.115996845992366e-06, |
| "loss": 0.6328, |
| "step": 1764 |
| }, |
| { |
| "epoch": 0.8646662584200857, |
| "grad_norm": 0.3482379913330078, |
| "learning_rate": 4.115001675803033e-06, |
| "loss": 0.6331, |
| "step": 1765 |
| }, |
| { |
| "epoch": 0.8651561543172076, |
| "grad_norm": 0.3419699966907501, |
| "learning_rate": 4.114006066226919e-06, |
| "loss": 0.6225, |
| "step": 1766 |
| }, |
| { |
| "epoch": 0.8656460502143295, |
| "grad_norm": 0.3558363914489746, |
| "learning_rate": 4.113010017534896e-06, |
| "loss": 0.6349, |
| "step": 1767 |
| }, |
| { |
| "epoch": 0.8661359461114513, |
| "grad_norm": 0.3551356792449951, |
| "learning_rate": 4.112013529997954e-06, |
| "loss": 0.5967, |
| "step": 1768 |
| }, |
| { |
| "epoch": 0.8666258420085732, |
| "grad_norm": 0.3523545265197754, |
| "learning_rate": 4.111016603887205e-06, |
| "loss": 0.6363, |
| "step": 1769 |
| }, |
| { |
| "epoch": 0.8671157379056951, |
| "grad_norm": 0.3577764630317688, |
| "learning_rate": 4.1100192394738764e-06, |
| "loss": 0.621, |
| "step": 1770 |
| }, |
| { |
| "epoch": 0.8676056338028169, |
| "grad_norm": 0.3645519018173218, |
| "learning_rate": 4.109021437029318e-06, |
| "loss": 0.6575, |
| "step": 1771 |
| }, |
| { |
| "epoch": 0.8680955296999388, |
| "grad_norm": 0.34731167554855347, |
| "learning_rate": 4.108023196824998e-06, |
| "loss": 0.5998, |
| "step": 1772 |
| }, |
| { |
| "epoch": 0.8685854255970606, |
| "grad_norm": 0.3572891056537628, |
| "learning_rate": 4.107024519132503e-06, |
| "loss": 0.6172, |
| "step": 1773 |
| }, |
| { |
| "epoch": 0.8690753214941825, |
| "grad_norm": 0.3890896737575531, |
| "learning_rate": 4.10602540422354e-06, |
| "loss": 0.6429, |
| "step": 1774 |
| }, |
| { |
| "epoch": 0.8695652173913043, |
| "grad_norm": 0.3684241473674774, |
| "learning_rate": 4.105025852369933e-06, |
| "loss": 0.6395, |
| "step": 1775 |
| }, |
| { |
| "epoch": 0.8700551132884262, |
| "grad_norm": 0.3604837656021118, |
| "learning_rate": 4.104025863843626e-06, |
| "loss": 0.6513, |
| "step": 1776 |
| }, |
| { |
| "epoch": 0.8705450091855481, |
| "grad_norm": 0.35402780771255493, |
| "learning_rate": 4.103025438916682e-06, |
| "loss": 0.6296, |
| "step": 1777 |
| }, |
| { |
| "epoch": 0.8710349050826699, |
| "grad_norm": 0.3665107786655426, |
| "learning_rate": 4.102024577861282e-06, |
| "loss": 0.6351, |
| "step": 1778 |
| }, |
| { |
| "epoch": 0.8715248009797918, |
| "grad_norm": 0.3513564169406891, |
| "learning_rate": 4.101023280949726e-06, |
| "loss": 0.647, |
| "step": 1779 |
| }, |
| { |
| "epoch": 0.8720146968769137, |
| "grad_norm": 0.3502311706542969, |
| "learning_rate": 4.100021548454434e-06, |
| "loss": 0.6501, |
| "step": 1780 |
| }, |
| { |
| "epoch": 0.8725045927740355, |
| "grad_norm": 0.37091171741485596, |
| "learning_rate": 4.0990193806479426e-06, |
| "loss": 0.6542, |
| "step": 1781 |
| }, |
| { |
| "epoch": 0.8729944886711574, |
| "grad_norm": 0.35671868920326233, |
| "learning_rate": 4.098016777802907e-06, |
| "loss": 0.6384, |
| "step": 1782 |
| }, |
| { |
| "epoch": 0.8734843845682793, |
| "grad_norm": 0.3722166121006012, |
| "learning_rate": 4.097013740192101e-06, |
| "loss": 0.6407, |
| "step": 1783 |
| }, |
| { |
| "epoch": 0.8739742804654012, |
| "grad_norm": 0.37666139006614685, |
| "learning_rate": 4.096010268088418e-06, |
| "loss": 0.6438, |
| "step": 1784 |
| }, |
| { |
| "epoch": 0.8744641763625229, |
| "grad_norm": 0.3596649169921875, |
| "learning_rate": 4.095006361764867e-06, |
| "loss": 0.6393, |
| "step": 1785 |
| }, |
| { |
| "epoch": 0.8749540722596448, |
| "grad_norm": 0.37628263235092163, |
| "learning_rate": 4.094002021494577e-06, |
| "loss": 0.6187, |
| "step": 1786 |
| }, |
| { |
| "epoch": 0.8754439681567667, |
| "grad_norm": 0.3606310188770294, |
| "learning_rate": 4.092997247550796e-06, |
| "loss": 0.6279, |
| "step": 1787 |
| }, |
| { |
| "epoch": 0.8759338640538885, |
| "grad_norm": 0.3513457179069519, |
| "learning_rate": 4.091992040206887e-06, |
| "loss": 0.6319, |
| "step": 1788 |
| }, |
| { |
| "epoch": 0.8764237599510104, |
| "grad_norm": 0.34625834226608276, |
| "learning_rate": 4.090986399736333e-06, |
| "loss": 0.6373, |
| "step": 1789 |
| }, |
| { |
| "epoch": 0.8769136558481323, |
| "grad_norm": 0.3485386371612549, |
| "learning_rate": 4.089980326412734e-06, |
| "loss": 0.6302, |
| "step": 1790 |
| }, |
| { |
| "epoch": 0.8774035517452541, |
| "grad_norm": 0.35028499364852905, |
| "learning_rate": 4.088973820509811e-06, |
| "loss": 0.6411, |
| "step": 1791 |
| }, |
| { |
| "epoch": 0.877893447642376, |
| "grad_norm": 0.35805413126945496, |
| "learning_rate": 4.087966882301396e-06, |
| "loss": 0.6194, |
| "step": 1792 |
| }, |
| { |
| "epoch": 0.8783833435394979, |
| "grad_norm": 0.35562488436698914, |
| "learning_rate": 4.086959512061444e-06, |
| "loss": 0.622, |
| "step": 1793 |
| }, |
| { |
| "epoch": 0.8788732394366198, |
| "grad_norm": 0.33581987023353577, |
| "learning_rate": 4.085951710064027e-06, |
| "loss": 0.568, |
| "step": 1794 |
| }, |
| { |
| "epoch": 0.8793631353337416, |
| "grad_norm": 0.34489452838897705, |
| "learning_rate": 4.084943476583332e-06, |
| "loss": 0.6419, |
| "step": 1795 |
| }, |
| { |
| "epoch": 0.8798530312308634, |
| "grad_norm": 0.35095247626304626, |
| "learning_rate": 4.0839348118936665e-06, |
| "loss": 0.6247, |
| "step": 1796 |
| }, |
| { |
| "epoch": 0.8803429271279853, |
| "grad_norm": 0.34744447469711304, |
| "learning_rate": 4.082925716269452e-06, |
| "loss": 0.6157, |
| "step": 1797 |
| }, |
| { |
| "epoch": 0.8808328230251071, |
| "grad_norm": 0.35207876563072205, |
| "learning_rate": 4.08191618998523e-06, |
| "loss": 0.6346, |
| "step": 1798 |
| }, |
| { |
| "epoch": 0.881322718922229, |
| "grad_norm": 0.3440977931022644, |
| "learning_rate": 4.080906233315658e-06, |
| "loss": 0.6332, |
| "step": 1799 |
| }, |
| { |
| "epoch": 0.8818126148193509, |
| "grad_norm": 0.3496447801589966, |
| "learning_rate": 4.0798958465355095e-06, |
| "loss": 0.6404, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.8823025107164727, |
| "grad_norm": 0.3570871651172638, |
| "learning_rate": 4.078885029919678e-06, |
| "loss": 0.6242, |
| "step": 1801 |
| }, |
| { |
| "epoch": 0.8827924066135946, |
| "grad_norm": 0.36829832196235657, |
| "learning_rate": 4.077873783743171e-06, |
| "loss": 0.6245, |
| "step": 1802 |
| }, |
| { |
| "epoch": 0.8832823025107165, |
| "grad_norm": 0.36631497740745544, |
| "learning_rate": 4.076862108281116e-06, |
| "loss": 0.6343, |
| "step": 1803 |
| }, |
| { |
| "epoch": 0.8837721984078384, |
| "grad_norm": 0.358917772769928, |
| "learning_rate": 4.075850003808752e-06, |
| "loss": 0.6369, |
| "step": 1804 |
| }, |
| { |
| "epoch": 0.8842620943049602, |
| "grad_norm": 0.36538082361221313, |
| "learning_rate": 4.07483747060144e-06, |
| "loss": 0.6323, |
| "step": 1805 |
| }, |
| { |
| "epoch": 0.8847519902020821, |
| "grad_norm": 0.3712175190448761, |
| "learning_rate": 4.073824508934656e-06, |
| "loss": 0.6186, |
| "step": 1806 |
| }, |
| { |
| "epoch": 0.885241886099204, |
| "grad_norm": 0.3590482771396637, |
| "learning_rate": 4.072811119083992e-06, |
| "loss": 0.6177, |
| "step": 1807 |
| }, |
| { |
| "epoch": 0.8857317819963257, |
| "grad_norm": 0.3526355028152466, |
| "learning_rate": 4.0717973013251564e-06, |
| "loss": 0.6115, |
| "step": 1808 |
| }, |
| { |
| "epoch": 0.8862216778934476, |
| "grad_norm": 0.35488682985305786, |
| "learning_rate": 4.070783055933975e-06, |
| "loss": 0.6342, |
| "step": 1809 |
| }, |
| { |
| "epoch": 0.8867115737905695, |
| "grad_norm": 0.3698574900627136, |
| "learning_rate": 4.069768383186388e-06, |
| "loss": 0.5936, |
| "step": 1810 |
| }, |
| { |
| "epoch": 0.8872014696876913, |
| "grad_norm": 0.3756300210952759, |
| "learning_rate": 4.068753283358455e-06, |
| "loss": 0.6303, |
| "step": 1811 |
| }, |
| { |
| "epoch": 0.8876913655848132, |
| "grad_norm": 0.36526453495025635, |
| "learning_rate": 4.067737756726348e-06, |
| "loss": 0.6407, |
| "step": 1812 |
| }, |
| { |
| "epoch": 0.8881812614819351, |
| "grad_norm": 0.3452131450176239, |
| "learning_rate": 4.066721803566358e-06, |
| "loss": 0.5891, |
| "step": 1813 |
| }, |
| { |
| "epoch": 0.888671157379057, |
| "grad_norm": 0.3645488917827606, |
| "learning_rate": 4.065705424154892e-06, |
| "loss": 0.6425, |
| "step": 1814 |
| }, |
| { |
| "epoch": 0.8891610532761788, |
| "grad_norm": 0.3663477301597595, |
| "learning_rate": 4.064688618768472e-06, |
| "loss": 0.6577, |
| "step": 1815 |
| }, |
| { |
| "epoch": 0.8896509491733007, |
| "grad_norm": 0.3654061257839203, |
| "learning_rate": 4.0636713876837345e-06, |
| "loss": 0.6422, |
| "step": 1816 |
| }, |
| { |
| "epoch": 0.8901408450704226, |
| "grad_norm": 0.3733082711696625, |
| "learning_rate": 4.062653731177434e-06, |
| "loss": 0.6006, |
| "step": 1817 |
| }, |
| { |
| "epoch": 0.8906307409675444, |
| "grad_norm": 0.3536593019962311, |
| "learning_rate": 4.06163564952644e-06, |
| "loss": 0.6134, |
| "step": 1818 |
| }, |
| { |
| "epoch": 0.8911206368646662, |
| "grad_norm": 0.3528878092765808, |
| "learning_rate": 4.060617143007739e-06, |
| "loss": 0.6341, |
| "step": 1819 |
| }, |
| { |
| "epoch": 0.8916105327617881, |
| "grad_norm": 0.35610130429267883, |
| "learning_rate": 4.05959821189843e-06, |
| "loss": 0.617, |
| "step": 1820 |
| }, |
| { |
| "epoch": 0.8921004286589099, |
| "grad_norm": 0.35914796590805054, |
| "learning_rate": 4.058578856475731e-06, |
| "loss": 0.6271, |
| "step": 1821 |
| }, |
| { |
| "epoch": 0.8925903245560318, |
| "grad_norm": 0.36907869577407837, |
| "learning_rate": 4.0575590770169724e-06, |
| "loss": 0.6567, |
| "step": 1822 |
| }, |
| { |
| "epoch": 0.8930802204531537, |
| "grad_norm": 0.3563164174556732, |
| "learning_rate": 4.0565388737996016e-06, |
| "loss": 0.6443, |
| "step": 1823 |
| }, |
| { |
| "epoch": 0.8935701163502756, |
| "grad_norm": 0.3560650944709778, |
| "learning_rate": 4.055518247101182e-06, |
| "loss": 0.6574, |
| "step": 1824 |
| }, |
| { |
| "epoch": 0.8940600122473974, |
| "grad_norm": 0.3615463078022003, |
| "learning_rate": 4.0544971971993905e-06, |
| "loss": 0.625, |
| "step": 1825 |
| }, |
| { |
| "epoch": 0.8945499081445193, |
| "grad_norm": 0.3564011752605438, |
| "learning_rate": 4.053475724372019e-06, |
| "loss": 0.6119, |
| "step": 1826 |
| }, |
| { |
| "epoch": 0.8950398040416412, |
| "grad_norm": 0.3574683368206024, |
| "learning_rate": 4.052453828896978e-06, |
| "loss": 0.5942, |
| "step": 1827 |
| }, |
| { |
| "epoch": 0.895529699938763, |
| "grad_norm": 0.35468223690986633, |
| "learning_rate": 4.051431511052287e-06, |
| "loss": 0.6016, |
| "step": 1828 |
| }, |
| { |
| "epoch": 0.8960195958358849, |
| "grad_norm": 0.3870941698551178, |
| "learning_rate": 4.0504087711160875e-06, |
| "loss": 0.6654, |
| "step": 1829 |
| }, |
| { |
| "epoch": 0.8965094917330068, |
| "grad_norm": 0.3580857217311859, |
| "learning_rate": 4.049385609366628e-06, |
| "loss": 0.6343, |
| "step": 1830 |
| }, |
| { |
| "epoch": 0.8969993876301287, |
| "grad_norm": 0.35600078105926514, |
| "learning_rate": 4.04836202608228e-06, |
| "loss": 0.6368, |
| "step": 1831 |
| }, |
| { |
| "epoch": 0.8974892835272504, |
| "grad_norm": 0.3583797812461853, |
| "learning_rate": 4.047338021541522e-06, |
| "loss": 0.6369, |
| "step": 1832 |
| }, |
| { |
| "epoch": 0.8979791794243723, |
| "grad_norm": 0.36189258098602295, |
| "learning_rate": 4.046313596022952e-06, |
| "loss": 0.6149, |
| "step": 1833 |
| }, |
| { |
| "epoch": 0.8984690753214942, |
| "grad_norm": 0.3654255270957947, |
| "learning_rate": 4.0452887498052825e-06, |
| "loss": 0.6005, |
| "step": 1834 |
| }, |
| { |
| "epoch": 0.898958971218616, |
| "grad_norm": 0.36113041639328003, |
| "learning_rate": 4.0442634831673375e-06, |
| "loss": 0.6642, |
| "step": 1835 |
| }, |
| { |
| "epoch": 0.8994488671157379, |
| "grad_norm": 0.3693462908267975, |
| "learning_rate": 4.043237796388056e-06, |
| "loss": 0.6199, |
| "step": 1836 |
| }, |
| { |
| "epoch": 0.8999387630128598, |
| "grad_norm": 0.35426223278045654, |
| "learning_rate": 4.042211689746496e-06, |
| "loss": 0.6226, |
| "step": 1837 |
| }, |
| { |
| "epoch": 0.9004286589099816, |
| "grad_norm": 0.3826047480106354, |
| "learning_rate": 4.041185163521822e-06, |
| "loss": 0.6345, |
| "step": 1838 |
| }, |
| { |
| "epoch": 0.9009185548071035, |
| "grad_norm": 0.3614358603954315, |
| "learning_rate": 4.04015821799332e-06, |
| "loss": 0.6278, |
| "step": 1839 |
| }, |
| { |
| "epoch": 0.9014084507042254, |
| "grad_norm": 0.35768815875053406, |
| "learning_rate": 4.039130853440385e-06, |
| "loss": 0.5985, |
| "step": 1840 |
| }, |
| { |
| "epoch": 0.9018983466013473, |
| "grad_norm": 0.36194565892219543, |
| "learning_rate": 4.038103070142528e-06, |
| "loss": 0.6281, |
| "step": 1841 |
| }, |
| { |
| "epoch": 0.902388242498469, |
| "grad_norm": 0.3722645938396454, |
| "learning_rate": 4.037074868379374e-06, |
| "loss": 0.6285, |
| "step": 1842 |
| }, |
| { |
| "epoch": 0.902878138395591, |
| "grad_norm": 0.3487153947353363, |
| "learning_rate": 4.036046248430661e-06, |
| "loss": 0.6345, |
| "step": 1843 |
| }, |
| { |
| "epoch": 0.9033680342927128, |
| "grad_norm": 0.35068756341934204, |
| "learning_rate": 4.035017210576242e-06, |
| "loss": 0.6214, |
| "step": 1844 |
| }, |
| { |
| "epoch": 0.9038579301898346, |
| "grad_norm": 0.3582833409309387, |
| "learning_rate": 4.033987755096084e-06, |
| "loss": 0.642, |
| "step": 1845 |
| }, |
| { |
| "epoch": 0.9043478260869565, |
| "grad_norm": 0.3385012745857239, |
| "learning_rate": 4.032957882270264e-06, |
| "loss": 0.6125, |
| "step": 1846 |
| }, |
| { |
| "epoch": 0.9048377219840784, |
| "grad_norm": 0.3503551781177521, |
| "learning_rate": 4.0319275923789765e-06, |
| "loss": 0.6447, |
| "step": 1847 |
| }, |
| { |
| "epoch": 0.9053276178812002, |
| "grad_norm": 0.35716068744659424, |
| "learning_rate": 4.03089688570253e-06, |
| "loss": 0.6371, |
| "step": 1848 |
| }, |
| { |
| "epoch": 0.9058175137783221, |
| "grad_norm": 0.36254581809043884, |
| "learning_rate": 4.029865762521342e-06, |
| "loss": 0.6388, |
| "step": 1849 |
| }, |
| { |
| "epoch": 0.906307409675444, |
| "grad_norm": 0.3596474528312683, |
| "learning_rate": 4.028834223115947e-06, |
| "loss": 0.6634, |
| "step": 1850 |
| }, |
| { |
| "epoch": 0.9067973055725659, |
| "grad_norm": 0.35758960247039795, |
| "learning_rate": 4.027802267766991e-06, |
| "loss": 0.6292, |
| "step": 1851 |
| }, |
| { |
| "epoch": 0.9072872014696877, |
| "grad_norm": 0.34626519680023193, |
| "learning_rate": 4.026769896755235e-06, |
| "loss": 0.6252, |
| "step": 1852 |
| }, |
| { |
| "epoch": 0.9077770973668096, |
| "grad_norm": 0.34695932269096375, |
| "learning_rate": 4.025737110361551e-06, |
| "loss": 0.6054, |
| "step": 1853 |
| }, |
| { |
| "epoch": 0.9082669932639315, |
| "grad_norm": 0.3792075216770172, |
| "learning_rate": 4.0247039088669255e-06, |
| "loss": 0.6513, |
| "step": 1854 |
| }, |
| { |
| "epoch": 0.9087568891610532, |
| "grad_norm": 0.36036989092826843, |
| "learning_rate": 4.023670292552456e-06, |
| "loss": 0.5844, |
| "step": 1855 |
| }, |
| { |
| "epoch": 0.9092467850581751, |
| "grad_norm": 0.3645704388618469, |
| "learning_rate": 4.022636261699356e-06, |
| "loss": 0.6277, |
| "step": 1856 |
| }, |
| { |
| "epoch": 0.909736680955297, |
| "grad_norm": 0.3849221169948578, |
| "learning_rate": 4.021601816588948e-06, |
| "loss": 0.6477, |
| "step": 1857 |
| }, |
| { |
| "epoch": 0.9102265768524188, |
| "grad_norm": 0.356092244386673, |
| "learning_rate": 4.020566957502671e-06, |
| "loss": 0.6274, |
| "step": 1858 |
| }, |
| { |
| "epoch": 0.9107164727495407, |
| "grad_norm": 0.37304049730300903, |
| "learning_rate": 4.019531684722074e-06, |
| "loss": 0.5975, |
| "step": 1859 |
| }, |
| { |
| "epoch": 0.9112063686466626, |
| "grad_norm": 0.36938560009002686, |
| "learning_rate": 4.0184959985288194e-06, |
| "loss": 0.6473, |
| "step": 1860 |
| }, |
| { |
| "epoch": 0.9116962645437845, |
| "grad_norm": 0.34542253613471985, |
| "learning_rate": 4.017459899204681e-06, |
| "loss": 0.6165, |
| "step": 1861 |
| }, |
| { |
| "epoch": 0.9121861604409063, |
| "grad_norm": 0.3559544086456299, |
| "learning_rate": 4.016423387031548e-06, |
| "loss": 0.6188, |
| "step": 1862 |
| }, |
| { |
| "epoch": 0.9126760563380282, |
| "grad_norm": 0.36169734597206116, |
| "learning_rate": 4.015386462291419e-06, |
| "loss": 0.5922, |
| "step": 1863 |
| }, |
| { |
| "epoch": 0.9131659522351501, |
| "grad_norm": 0.396477609872818, |
| "learning_rate": 4.0143491252664055e-06, |
| "loss": 0.6297, |
| "step": 1864 |
| }, |
| { |
| "epoch": 0.9136558481322719, |
| "grad_norm": 0.35498955845832825, |
| "learning_rate": 4.013311376238732e-06, |
| "loss": 0.624, |
| "step": 1865 |
| }, |
| { |
| "epoch": 0.9141457440293937, |
| "grad_norm": 0.3794139623641968, |
| "learning_rate": 4.012273215490734e-06, |
| "loss": 0.6572, |
| "step": 1866 |
| }, |
| { |
| "epoch": 0.9146356399265156, |
| "grad_norm": 0.3604520261287689, |
| "learning_rate": 4.01123464330486e-06, |
| "loss": 0.63, |
| "step": 1867 |
| }, |
| { |
| "epoch": 0.9151255358236374, |
| "grad_norm": 0.36176711320877075, |
| "learning_rate": 4.010195659963671e-06, |
| "loss": 0.6218, |
| "step": 1868 |
| }, |
| { |
| "epoch": 0.9156154317207593, |
| "grad_norm": 0.360069215297699, |
| "learning_rate": 4.009156265749836e-06, |
| "loss": 0.6452, |
| "step": 1869 |
| }, |
| { |
| "epoch": 0.9161053276178812, |
| "grad_norm": 0.3508584499359131, |
| "learning_rate": 4.008116460946141e-06, |
| "loss": 0.6217, |
| "step": 1870 |
| }, |
| { |
| "epoch": 0.9165952235150031, |
| "grad_norm": 0.36335399746894836, |
| "learning_rate": 4.0070762458354805e-06, |
| "loss": 0.6274, |
| "step": 1871 |
| }, |
| { |
| "epoch": 0.9170851194121249, |
| "grad_norm": 0.3488854467868805, |
| "learning_rate": 4.0060356207008625e-06, |
| "loss": 0.6369, |
| "step": 1872 |
| }, |
| { |
| "epoch": 0.9175750153092468, |
| "grad_norm": 0.33648115396499634, |
| "learning_rate": 4.004994585825404e-06, |
| "loss": 0.5908, |
| "step": 1873 |
| }, |
| { |
| "epoch": 0.9180649112063687, |
| "grad_norm": 0.36617884039878845, |
| "learning_rate": 4.0039531414923365e-06, |
| "loss": 0.6186, |
| "step": 1874 |
| }, |
| { |
| "epoch": 0.9185548071034905, |
| "grad_norm": 0.3616359233856201, |
| "learning_rate": 4.0029112879850004e-06, |
| "loss": 0.6173, |
| "step": 1875 |
| }, |
| { |
| "epoch": 0.9190447030006124, |
| "grad_norm": 0.3591029644012451, |
| "learning_rate": 4.001869025586849e-06, |
| "loss": 0.6218, |
| "step": 1876 |
| }, |
| { |
| "epoch": 0.9195345988977343, |
| "grad_norm": 0.3484629988670349, |
| "learning_rate": 4.000826354581446e-06, |
| "loss": 0.624, |
| "step": 1877 |
| }, |
| { |
| "epoch": 0.920024494794856, |
| "grad_norm": 0.366221159696579, |
| "learning_rate": 3.9997832752524664e-06, |
| "loss": 0.6539, |
| "step": 1878 |
| }, |
| { |
| "epoch": 0.9205143906919779, |
| "grad_norm": 0.3641989529132843, |
| "learning_rate": 3.9987397878836976e-06, |
| "loss": 0.6072, |
| "step": 1879 |
| }, |
| { |
| "epoch": 0.9210042865890998, |
| "grad_norm": 0.3581095337867737, |
| "learning_rate": 3.997695892759035e-06, |
| "loss": 0.6071, |
| "step": 1880 |
| }, |
| { |
| "epoch": 0.9214941824862217, |
| "grad_norm": 0.3488609790802002, |
| "learning_rate": 3.996651590162488e-06, |
| "loss": 0.6015, |
| "step": 1881 |
| }, |
| { |
| "epoch": 0.9219840783833435, |
| "grad_norm": 0.3491750955581665, |
| "learning_rate": 3.995606880378176e-06, |
| "loss": 0.6407, |
| "step": 1882 |
| }, |
| { |
| "epoch": 0.9224739742804654, |
| "grad_norm": 0.3692910969257355, |
| "learning_rate": 3.994561763690328e-06, |
| "loss": 0.6435, |
| "step": 1883 |
| }, |
| { |
| "epoch": 0.9229638701775873, |
| "grad_norm": 0.3535357117652893, |
| "learning_rate": 3.993516240383285e-06, |
| "loss": 0.6346, |
| "step": 1884 |
| }, |
| { |
| "epoch": 0.9234537660747091, |
| "grad_norm": 0.3640367090702057, |
| "learning_rate": 3.992470310741497e-06, |
| "loss": 0.6062, |
| "step": 1885 |
| }, |
| { |
| "epoch": 0.923943661971831, |
| "grad_norm": 0.35146427154541016, |
| "learning_rate": 3.9914239750495276e-06, |
| "loss": 0.6195, |
| "step": 1886 |
| }, |
| { |
| "epoch": 0.9244335578689529, |
| "grad_norm": 0.36105847358703613, |
| "learning_rate": 3.990377233592048e-06, |
| "loss": 0.6403, |
| "step": 1887 |
| }, |
| { |
| "epoch": 0.9249234537660747, |
| "grad_norm": 0.3821764886379242, |
| "learning_rate": 3.989330086653841e-06, |
| "loss": 0.6302, |
| "step": 1888 |
| }, |
| { |
| "epoch": 0.9254133496631965, |
| "grad_norm": 0.3889336884021759, |
| "learning_rate": 3.988282534519799e-06, |
| "loss": 0.6222, |
| "step": 1889 |
| }, |
| { |
| "epoch": 0.9259032455603184, |
| "grad_norm": 0.3606071174144745, |
| "learning_rate": 3.9872345774749255e-06, |
| "loss": 0.6098, |
| "step": 1890 |
| }, |
| { |
| "epoch": 0.9263931414574403, |
| "grad_norm": 0.3542819619178772, |
| "learning_rate": 3.986186215804334e-06, |
| "loss": 0.6222, |
| "step": 1891 |
| }, |
| { |
| "epoch": 0.9268830373545621, |
| "grad_norm": 0.35070300102233887, |
| "learning_rate": 3.985137449793248e-06, |
| "loss": 0.6436, |
| "step": 1892 |
| }, |
| { |
| "epoch": 0.927372933251684, |
| "grad_norm": 0.35780608654022217, |
| "learning_rate": 3.984088279727e-06, |
| "loss": 0.6206, |
| "step": 1893 |
| }, |
| { |
| "epoch": 0.9278628291488059, |
| "grad_norm": 0.35544446110725403, |
| "learning_rate": 3.9830387058910345e-06, |
| "loss": 0.6367, |
| "step": 1894 |
| }, |
| { |
| "epoch": 0.9283527250459277, |
| "grad_norm": 0.34838807582855225, |
| "learning_rate": 3.981988728570904e-06, |
| "loss": 0.6406, |
| "step": 1895 |
| }, |
| { |
| "epoch": 0.9288426209430496, |
| "grad_norm": 0.35056522488594055, |
| "learning_rate": 3.980938348052272e-06, |
| "loss": 0.6362, |
| "step": 1896 |
| }, |
| { |
| "epoch": 0.9293325168401715, |
| "grad_norm": 0.36799871921539307, |
| "learning_rate": 3.97988756462091e-06, |
| "loss": 0.6235, |
| "step": 1897 |
| }, |
| { |
| "epoch": 0.9298224127372934, |
| "grad_norm": 0.37848344445228577, |
| "learning_rate": 3.978836378562702e-06, |
| "loss": 0.6282, |
| "step": 1898 |
| }, |
| { |
| "epoch": 0.9303123086344152, |
| "grad_norm": 0.36926591396331787, |
| "learning_rate": 3.977784790163638e-06, |
| "loss": 0.635, |
| "step": 1899 |
| }, |
| { |
| "epoch": 0.9308022045315371, |
| "grad_norm": 0.35432732105255127, |
| "learning_rate": 3.976732799709821e-06, |
| "loss": 0.6322, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.931292100428659, |
| "grad_norm": 0.36038362979888916, |
| "learning_rate": 3.97568040748746e-06, |
| "loss": 0.6028, |
| "step": 1901 |
| }, |
| { |
| "epoch": 0.9317819963257807, |
| "grad_norm": 0.3726102113723755, |
| "learning_rate": 3.974627613782876e-06, |
| "loss": 0.6231, |
| "step": 1902 |
| }, |
| { |
| "epoch": 0.9322718922229026, |
| "grad_norm": 0.36531907320022583, |
| "learning_rate": 3.973574418882498e-06, |
| "loss": 0.6362, |
| "step": 1903 |
| }, |
| { |
| "epoch": 0.9327617881200245, |
| "grad_norm": 0.3725537955760956, |
| "learning_rate": 3.9725208230728655e-06, |
| "loss": 0.6418, |
| "step": 1904 |
| }, |
| { |
| "epoch": 0.9332516840171463, |
| "grad_norm": 0.3566921353340149, |
| "learning_rate": 3.971466826640623e-06, |
| "loss": 0.644, |
| "step": 1905 |
| }, |
| { |
| "epoch": 0.9337415799142682, |
| "grad_norm": 0.35900595784187317, |
| "learning_rate": 3.970412429872529e-06, |
| "loss": 0.6519, |
| "step": 1906 |
| }, |
| { |
| "epoch": 0.9342314758113901, |
| "grad_norm": 0.36603233218193054, |
| "learning_rate": 3.969357633055448e-06, |
| "loss": 0.6341, |
| "step": 1907 |
| }, |
| { |
| "epoch": 0.934721371708512, |
| "grad_norm": 0.3567545413970947, |
| "learning_rate": 3.968302436476356e-06, |
| "loss": 0.6285, |
| "step": 1908 |
| }, |
| { |
| "epoch": 0.9352112676056338, |
| "grad_norm": 0.3554147183895111, |
| "learning_rate": 3.967246840422334e-06, |
| "loss": 0.6425, |
| "step": 1909 |
| }, |
| { |
| "epoch": 0.9357011635027557, |
| "grad_norm": 0.36624595522880554, |
| "learning_rate": 3.966190845180574e-06, |
| "loss": 0.5982, |
| "step": 1910 |
| }, |
| { |
| "epoch": 0.9361910593998776, |
| "grad_norm": 0.36004313826560974, |
| "learning_rate": 3.9651344510383775e-06, |
| "loss": 0.6278, |
| "step": 1911 |
| }, |
| { |
| "epoch": 0.9366809552969994, |
| "grad_norm": 0.35689929127693176, |
| "learning_rate": 3.964077658283151e-06, |
| "loss": 0.6367, |
| "step": 1912 |
| }, |
| { |
| "epoch": 0.9371708511941212, |
| "grad_norm": 0.35063254833221436, |
| "learning_rate": 3.963020467202413e-06, |
| "loss": 0.6005, |
| "step": 1913 |
| }, |
| { |
| "epoch": 0.9376607470912431, |
| "grad_norm": 0.37015673518180847, |
| "learning_rate": 3.96196287808379e-06, |
| "loss": 0.6258, |
| "step": 1914 |
| }, |
| { |
| "epoch": 0.9381506429883649, |
| "grad_norm": 0.3625577390193939, |
| "learning_rate": 3.960904891215015e-06, |
| "loss": 0.6365, |
| "step": 1915 |
| }, |
| { |
| "epoch": 0.9386405388854868, |
| "grad_norm": 0.3661051392555237, |
| "learning_rate": 3.9598465068839285e-06, |
| "loss": 0.6044, |
| "step": 1916 |
| }, |
| { |
| "epoch": 0.9391304347826087, |
| "grad_norm": 0.362802118062973, |
| "learning_rate": 3.958787725378483e-06, |
| "loss": 0.635, |
| "step": 1917 |
| }, |
| { |
| "epoch": 0.9396203306797306, |
| "grad_norm": 0.37044766545295715, |
| "learning_rate": 3.957728546986735e-06, |
| "loss": 0.6189, |
| "step": 1918 |
| }, |
| { |
| "epoch": 0.9401102265768524, |
| "grad_norm": 0.3538433611392975, |
| "learning_rate": 3.9566689719968535e-06, |
| "loss": 0.609, |
| "step": 1919 |
| }, |
| { |
| "epoch": 0.9406001224739743, |
| "grad_norm": 0.3680160343647003, |
| "learning_rate": 3.955609000697109e-06, |
| "loss": 0.6498, |
| "step": 1920 |
| }, |
| { |
| "epoch": 0.9410900183710962, |
| "grad_norm": 0.37171587347984314, |
| "learning_rate": 3.954548633375887e-06, |
| "loss": 0.6175, |
| "step": 1921 |
| }, |
| { |
| "epoch": 0.941579914268218, |
| "grad_norm": 0.33701273798942566, |
| "learning_rate": 3.953487870321674e-06, |
| "loss": 0.5954, |
| "step": 1922 |
| }, |
| { |
| "epoch": 0.9420698101653399, |
| "grad_norm": 0.3595403730869293, |
| "learning_rate": 3.95242671182307e-06, |
| "loss": 0.6421, |
| "step": 1923 |
| }, |
| { |
| "epoch": 0.9425597060624618, |
| "grad_norm": 0.34721899032592773, |
| "learning_rate": 3.951365158168778e-06, |
| "loss": 0.639, |
| "step": 1924 |
| }, |
| { |
| "epoch": 0.9430496019595835, |
| "grad_norm": 0.3664315640926361, |
| "learning_rate": 3.950303209647613e-06, |
| "loss": 0.6145, |
| "step": 1925 |
| }, |
| { |
| "epoch": 0.9435394978567054, |
| "grad_norm": 0.3695625066757202, |
| "learning_rate": 3.949240866548492e-06, |
| "loss": 0.6511, |
| "step": 1926 |
| }, |
| { |
| "epoch": 0.9440293937538273, |
| "grad_norm": 0.3734401762485504, |
| "learning_rate": 3.948178129160444e-06, |
| "loss": 0.627, |
| "step": 1927 |
| }, |
| { |
| "epoch": 0.9445192896509492, |
| "grad_norm": 0.3736937642097473, |
| "learning_rate": 3.947114997772602e-06, |
| "loss": 0.6006, |
| "step": 1928 |
| }, |
| { |
| "epoch": 0.945009185548071, |
| "grad_norm": 0.366105318069458, |
| "learning_rate": 3.946051472674211e-06, |
| "loss": 0.6031, |
| "step": 1929 |
| }, |
| { |
| "epoch": 0.9454990814451929, |
| "grad_norm": 0.3569345474243164, |
| "learning_rate": 3.944987554154616e-06, |
| "loss": 0.6117, |
| "step": 1930 |
| }, |
| { |
| "epoch": 0.9459889773423148, |
| "grad_norm": 0.35967040061950684, |
| "learning_rate": 3.943923242503275e-06, |
| "loss": 0.6417, |
| "step": 1931 |
| }, |
| { |
| "epoch": 0.9464788732394366, |
| "grad_norm": 0.365779846906662, |
| "learning_rate": 3.94285853800975e-06, |
| "loss": 0.6189, |
| "step": 1932 |
| }, |
| { |
| "epoch": 0.9469687691365585, |
| "grad_norm": 0.35282260179519653, |
| "learning_rate": 3.941793440963712e-06, |
| "loss": 0.6236, |
| "step": 1933 |
| }, |
| { |
| "epoch": 0.9474586650336804, |
| "grad_norm": 0.3678666055202484, |
| "learning_rate": 3.940727951654936e-06, |
| "loss": 0.6479, |
| "step": 1934 |
| }, |
| { |
| "epoch": 0.9479485609308022, |
| "grad_norm": 0.35487714409828186, |
| "learning_rate": 3.939662070373306e-06, |
| "loss": 0.6344, |
| "step": 1935 |
| }, |
| { |
| "epoch": 0.948438456827924, |
| "grad_norm": 0.3745673894882202, |
| "learning_rate": 3.938595797408812e-06, |
| "loss": 0.6327, |
| "step": 1936 |
| }, |
| { |
| "epoch": 0.9489283527250459, |
| "grad_norm": 0.34707069396972656, |
| "learning_rate": 3.93752913305155e-06, |
| "loss": 0.6264, |
| "step": 1937 |
| }, |
| { |
| "epoch": 0.9494182486221678, |
| "grad_norm": 0.3721786141395569, |
| "learning_rate": 3.936462077591722e-06, |
| "loss": 0.6469, |
| "step": 1938 |
| }, |
| { |
| "epoch": 0.9499081445192896, |
| "grad_norm": 0.36948245763778687, |
| "learning_rate": 3.93539463131964e-06, |
| "loss": 0.6323, |
| "step": 1939 |
| }, |
| { |
| "epoch": 0.9503980404164115, |
| "grad_norm": 0.3576546907424927, |
| "learning_rate": 3.934326794525718e-06, |
| "loss": 0.6413, |
| "step": 1940 |
| }, |
| { |
| "epoch": 0.9508879363135334, |
| "grad_norm": 0.35429394245147705, |
| "learning_rate": 3.933258567500477e-06, |
| "loss": 0.6133, |
| "step": 1941 |
| }, |
| { |
| "epoch": 0.9513778322106552, |
| "grad_norm": 0.34951600432395935, |
| "learning_rate": 3.932189950534548e-06, |
| "loss": 0.6254, |
| "step": 1942 |
| }, |
| { |
| "epoch": 0.9518677281077771, |
| "grad_norm": 0.35437116026878357, |
| "learning_rate": 3.931120943918661e-06, |
| "loss": 0.6434, |
| "step": 1943 |
| }, |
| { |
| "epoch": 0.952357624004899, |
| "grad_norm": 0.35042843222618103, |
| "learning_rate": 3.9300515479436595e-06, |
| "loss": 0.6221, |
| "step": 1944 |
| }, |
| { |
| "epoch": 0.9528475199020208, |
| "grad_norm": 0.34926679730415344, |
| "learning_rate": 3.928981762900489e-06, |
| "loss": 0.5836, |
| "step": 1945 |
| }, |
| { |
| "epoch": 0.9533374157991427, |
| "grad_norm": 0.3715307414531708, |
| "learning_rate": 3.927911589080201e-06, |
| "loss": 0.6155, |
| "step": 1946 |
| }, |
| { |
| "epoch": 0.9538273116962646, |
| "grad_norm": 0.35882413387298584, |
| "learning_rate": 3.926841026773952e-06, |
| "loss": 0.6186, |
| "step": 1947 |
| }, |
| { |
| "epoch": 0.9543172075933865, |
| "grad_norm": 0.37663209438323975, |
| "learning_rate": 3.925770076273008e-06, |
| "loss": 0.6565, |
| "step": 1948 |
| }, |
| { |
| "epoch": 0.9548071034905082, |
| "grad_norm": 0.36306554079055786, |
| "learning_rate": 3.924698737868737e-06, |
| "loss": 0.6346, |
| "step": 1949 |
| }, |
| { |
| "epoch": 0.9552969993876301, |
| "grad_norm": 0.3902411162853241, |
| "learning_rate": 3.923627011852612e-06, |
| "loss": 0.584, |
| "step": 1950 |
| }, |
| { |
| "epoch": 0.955786895284752, |
| "grad_norm": 0.36925509572029114, |
| "learning_rate": 3.922554898516216e-06, |
| "loss": 0.6358, |
| "step": 1951 |
| }, |
| { |
| "epoch": 0.9562767911818738, |
| "grad_norm": 0.3701590597629547, |
| "learning_rate": 3.9214823981512305e-06, |
| "loss": 0.6185, |
| "step": 1952 |
| }, |
| { |
| "epoch": 0.9567666870789957, |
| "grad_norm": 0.36690884828567505, |
| "learning_rate": 3.920409511049451e-06, |
| "loss": 0.6318, |
| "step": 1953 |
| }, |
| { |
| "epoch": 0.9572565829761176, |
| "grad_norm": 0.38318440318107605, |
| "learning_rate": 3.919336237502769e-06, |
| "loss": 0.6082, |
| "step": 1954 |
| }, |
| { |
| "epoch": 0.9577464788732394, |
| "grad_norm": 0.37501823902130127, |
| "learning_rate": 3.918262577803188e-06, |
| "loss": 0.6186, |
| "step": 1955 |
| }, |
| { |
| "epoch": 0.9582363747703613, |
| "grad_norm": 0.34495916962623596, |
| "learning_rate": 3.917188532242814e-06, |
| "loss": 0.5996, |
| "step": 1956 |
| }, |
| { |
| "epoch": 0.9587262706674832, |
| "grad_norm": 0.35945406556129456, |
| "learning_rate": 3.916114101113857e-06, |
| "loss": 0.5956, |
| "step": 1957 |
| }, |
| { |
| "epoch": 0.9592161665646051, |
| "grad_norm": 0.36524417996406555, |
| "learning_rate": 3.915039284708634e-06, |
| "loss": 0.6537, |
| "step": 1958 |
| }, |
| { |
| "epoch": 0.9597060624617268, |
| "grad_norm": 0.3438582420349121, |
| "learning_rate": 3.913964083319566e-06, |
| "loss": 0.5874, |
| "step": 1959 |
| }, |
| { |
| "epoch": 0.9601959583588487, |
| "grad_norm": 0.3767183721065521, |
| "learning_rate": 3.912888497239177e-06, |
| "loss": 0.6268, |
| "step": 1960 |
| }, |
| { |
| "epoch": 0.9606858542559706, |
| "grad_norm": 0.3726462125778198, |
| "learning_rate": 3.911812526760099e-06, |
| "loss": 0.6273, |
| "step": 1961 |
| }, |
| { |
| "epoch": 0.9611757501530924, |
| "grad_norm": 0.3737446367740631, |
| "learning_rate": 3.910736172175066e-06, |
| "loss": 0.6267, |
| "step": 1962 |
| }, |
| { |
| "epoch": 0.9616656460502143, |
| "grad_norm": 0.3493219017982483, |
| "learning_rate": 3.909659433776918e-06, |
| "loss": 0.618, |
| "step": 1963 |
| }, |
| { |
| "epoch": 0.9621555419473362, |
| "grad_norm": 0.37235260009765625, |
| "learning_rate": 3.908582311858597e-06, |
| "loss": 0.6447, |
| "step": 1964 |
| }, |
| { |
| "epoch": 0.9626454378444581, |
| "grad_norm": 0.37288960814476013, |
| "learning_rate": 3.907504806713154e-06, |
| "loss": 0.6307, |
| "step": 1965 |
| }, |
| { |
| "epoch": 0.9631353337415799, |
| "grad_norm": 0.3650355041027069, |
| "learning_rate": 3.906426918633738e-06, |
| "loss": 0.624, |
| "step": 1966 |
| }, |
| { |
| "epoch": 0.9636252296387018, |
| "grad_norm": 0.34496167302131653, |
| "learning_rate": 3.905348647913607e-06, |
| "loss": 0.6037, |
| "step": 1967 |
| }, |
| { |
| "epoch": 0.9641151255358237, |
| "grad_norm": 0.37034958600997925, |
| "learning_rate": 3.904269994846122e-06, |
| "loss": 0.608, |
| "step": 1968 |
| }, |
| { |
| "epoch": 0.9646050214329455, |
| "grad_norm": 0.3597949743270874, |
| "learning_rate": 3.903190959724748e-06, |
| "loss": 0.5981, |
| "step": 1969 |
| }, |
| { |
| "epoch": 0.9650949173300674, |
| "grad_norm": 0.3547936975955963, |
| "learning_rate": 3.902111542843052e-06, |
| "loss": 0.6195, |
| "step": 1970 |
| }, |
| { |
| "epoch": 0.9655848132271893, |
| "grad_norm": 0.3734746277332306, |
| "learning_rate": 3.9010317444947074e-06, |
| "loss": 0.6278, |
| "step": 1971 |
| }, |
| { |
| "epoch": 0.966074709124311, |
| "grad_norm": 0.3595636487007141, |
| "learning_rate": 3.899951564973491e-06, |
| "loss": 0.6236, |
| "step": 1972 |
| }, |
| { |
| "epoch": 0.9665646050214329, |
| "grad_norm": 0.3781930208206177, |
| "learning_rate": 3.898871004573282e-06, |
| "loss": 0.6224, |
| "step": 1973 |
| }, |
| { |
| "epoch": 0.9670545009185548, |
| "grad_norm": 0.35438016057014465, |
| "learning_rate": 3.897790063588065e-06, |
| "loss": 0.6445, |
| "step": 1974 |
| }, |
| { |
| "epoch": 0.9675443968156767, |
| "grad_norm": 0.361677348613739, |
| "learning_rate": 3.896708742311925e-06, |
| "loss": 0.6271, |
| "step": 1975 |
| }, |
| { |
| "epoch": 0.9680342927127985, |
| "grad_norm": 0.3686682879924774, |
| "learning_rate": 3.895627041039055e-06, |
| "loss": 0.6232, |
| "step": 1976 |
| }, |
| { |
| "epoch": 0.9685241886099204, |
| "grad_norm": 0.3676711320877075, |
| "learning_rate": 3.894544960063748e-06, |
| "loss": 0.6196, |
| "step": 1977 |
| }, |
| { |
| "epoch": 0.9690140845070423, |
| "grad_norm": 0.3637702167034149, |
| "learning_rate": 3.893462499680399e-06, |
| "loss": 0.6257, |
| "step": 1978 |
| }, |
| { |
| "epoch": 0.9695039804041641, |
| "grad_norm": 0.3684902489185333, |
| "learning_rate": 3.892379660183512e-06, |
| "loss": 0.6271, |
| "step": 1979 |
| }, |
| { |
| "epoch": 0.969993876301286, |
| "grad_norm": 0.35545337200164795, |
| "learning_rate": 3.891296441867689e-06, |
| "loss": 0.6133, |
| "step": 1980 |
| }, |
| { |
| "epoch": 0.9704837721984079, |
| "grad_norm": 0.3473877012729645, |
| "learning_rate": 3.890212845027637e-06, |
| "loss": 0.6283, |
| "step": 1981 |
| }, |
| { |
| "epoch": 0.9709736680955297, |
| "grad_norm": 0.3448870778083801, |
| "learning_rate": 3.8891288699581655e-06, |
| "loss": 0.6212, |
| "step": 1982 |
| }, |
| { |
| "epoch": 0.9714635639926515, |
| "grad_norm": 0.37230542302131653, |
| "learning_rate": 3.888044516954187e-06, |
| "loss": 0.6195, |
| "step": 1983 |
| }, |
| { |
| "epoch": 0.9719534598897734, |
| "grad_norm": 0.3552440106868744, |
| "learning_rate": 3.886959786310718e-06, |
| "loss": 0.663, |
| "step": 1984 |
| }, |
| { |
| "epoch": 0.9724433557868953, |
| "grad_norm": 0.36294302344322205, |
| "learning_rate": 3.885874678322874e-06, |
| "loss": 0.6145, |
| "step": 1985 |
| }, |
| { |
| "epoch": 0.9729332516840171, |
| "grad_norm": 0.3489057719707489, |
| "learning_rate": 3.884789193285879e-06, |
| "loss": 0.6149, |
| "step": 1986 |
| }, |
| { |
| "epoch": 0.973423147581139, |
| "grad_norm": 0.3702811002731323, |
| "learning_rate": 3.8837033314950544e-06, |
| "loss": 0.6065, |
| "step": 1987 |
| }, |
| { |
| "epoch": 0.9739130434782609, |
| "grad_norm": 0.35552167892456055, |
| "learning_rate": 3.882617093245826e-06, |
| "loss": 0.6494, |
| "step": 1988 |
| }, |
| { |
| "epoch": 0.9744029393753827, |
| "grad_norm": 0.3572336435317993, |
| "learning_rate": 3.881530478833724e-06, |
| "loss": 0.6326, |
| "step": 1989 |
| }, |
| { |
| "epoch": 0.9748928352725046, |
| "grad_norm": 0.3529970645904541, |
| "learning_rate": 3.880443488554377e-06, |
| "loss": 0.6277, |
| "step": 1990 |
| }, |
| { |
| "epoch": 0.9753827311696265, |
| "grad_norm": 0.37120121717453003, |
| "learning_rate": 3.879356122703518e-06, |
| "loss": 0.6376, |
| "step": 1991 |
| }, |
| { |
| "epoch": 0.9758726270667483, |
| "grad_norm": 0.38676661252975464, |
| "learning_rate": 3.878268381576984e-06, |
| "loss": 0.6096, |
| "step": 1992 |
| }, |
| { |
| "epoch": 0.9763625229638702, |
| "grad_norm": 0.3568069040775299, |
| "learning_rate": 3.87718026547071e-06, |
| "loss": 0.5951, |
| "step": 1993 |
| }, |
| { |
| "epoch": 0.9768524188609921, |
| "grad_norm": 0.35809725522994995, |
| "learning_rate": 3.876091774680737e-06, |
| "loss": 0.5991, |
| "step": 1994 |
| }, |
| { |
| "epoch": 0.977342314758114, |
| "grad_norm": 0.367899090051651, |
| "learning_rate": 3.875002909503205e-06, |
| "loss": 0.5961, |
| "step": 1995 |
| }, |
| { |
| "epoch": 0.9778322106552357, |
| "grad_norm": 0.36738499999046326, |
| "learning_rate": 3.873913670234358e-06, |
| "loss": 0.5969, |
| "step": 1996 |
| }, |
| { |
| "epoch": 0.9783221065523576, |
| "grad_norm": 0.36432838439941406, |
| "learning_rate": 3.87282405717054e-06, |
| "loss": 0.6235, |
| "step": 1997 |
| }, |
| { |
| "epoch": 0.9788120024494795, |
| "grad_norm": 0.3667569160461426, |
| "learning_rate": 3.8717340706081975e-06, |
| "loss": 0.6168, |
| "step": 1998 |
| }, |
| { |
| "epoch": 0.9793018983466013, |
| "grad_norm": 0.3636808395385742, |
| "learning_rate": 3.870643710843878e-06, |
| "loss": 0.6294, |
| "step": 1999 |
| }, |
| { |
| "epoch": 0.9797917942437232, |
| "grad_norm": 0.3527030646800995, |
| "learning_rate": 3.869552978174233e-06, |
| "loss": 0.5993, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.9802816901408451, |
| "grad_norm": 0.35959500074386597, |
| "learning_rate": 3.868461872896011e-06, |
| "loss": 0.6486, |
| "step": 2001 |
| }, |
| { |
| "epoch": 0.9807715860379669, |
| "grad_norm": 0.36117956042289734, |
| "learning_rate": 3.8673703953060685e-06, |
| "loss": 0.6335, |
| "step": 2002 |
| }, |
| { |
| "epoch": 0.9812614819350888, |
| "grad_norm": 0.3546423017978668, |
| "learning_rate": 3.8662785457013544e-06, |
| "loss": 0.6193, |
| "step": 2003 |
| }, |
| { |
| "epoch": 0.9817513778322107, |
| "grad_norm": 0.3523540496826172, |
| "learning_rate": 3.865186324378927e-06, |
| "loss": 0.6072, |
| "step": 2004 |
| }, |
| { |
| "epoch": 0.9822412737293326, |
| "grad_norm": 0.3531913757324219, |
| "learning_rate": 3.8640937316359415e-06, |
| "loss": 0.627, |
| "step": 2005 |
| }, |
| { |
| "epoch": 0.9827311696264543, |
| "grad_norm": 0.36261194944381714, |
| "learning_rate": 3.863000767769655e-06, |
| "loss": 0.6193, |
| "step": 2006 |
| }, |
| { |
| "epoch": 0.9832210655235762, |
| "grad_norm": 0.37107813358306885, |
| "learning_rate": 3.861907433077425e-06, |
| "loss": 0.613, |
| "step": 2007 |
| }, |
| { |
| "epoch": 0.9837109614206981, |
| "grad_norm": 0.3672920763492584, |
| "learning_rate": 3.860813727856712e-06, |
| "loss": 0.6213, |
| "step": 2008 |
| }, |
| { |
| "epoch": 0.9842008573178199, |
| "grad_norm": 0.35620445013046265, |
| "learning_rate": 3.859719652405074e-06, |
| "loss": 0.6233, |
| "step": 2009 |
| }, |
| { |
| "epoch": 0.9846907532149418, |
| "grad_norm": 0.380731999874115, |
| "learning_rate": 3.858625207020173e-06, |
| "loss": 0.5993, |
| "step": 2010 |
| }, |
| { |
| "epoch": 0.9851806491120637, |
| "grad_norm": 0.3719562888145447, |
| "learning_rate": 3.85753039199977e-06, |
| "loss": 0.6203, |
| "step": 2011 |
| }, |
| { |
| "epoch": 0.9856705450091855, |
| "grad_norm": 0.3641625940799713, |
| "learning_rate": 3.856435207641725e-06, |
| "loss": 0.6091, |
| "step": 2012 |
| }, |
| { |
| "epoch": 0.9861604409063074, |
| "grad_norm": 0.3648725152015686, |
| "learning_rate": 3.855339654244003e-06, |
| "loss": 0.6306, |
| "step": 2013 |
| }, |
| { |
| "epoch": 0.9866503368034293, |
| "grad_norm": 0.3548206090927124, |
| "learning_rate": 3.8542437321046654e-06, |
| "loss": 0.6358, |
| "step": 2014 |
| }, |
| { |
| "epoch": 0.9871402327005512, |
| "grad_norm": 0.3661384880542755, |
| "learning_rate": 3.853147441521875e-06, |
| "loss": 0.6308, |
| "step": 2015 |
| }, |
| { |
| "epoch": 0.987630128597673, |
| "grad_norm": 0.35083141922950745, |
| "learning_rate": 3.8520507827938955e-06, |
| "loss": 0.6251, |
| "step": 2016 |
| }, |
| { |
| "epoch": 0.9881200244947949, |
| "grad_norm": 0.37429776787757874, |
| "learning_rate": 3.850953756219089e-06, |
| "loss": 0.6421, |
| "step": 2017 |
| }, |
| { |
| "epoch": 0.9886099203919168, |
| "grad_norm": 0.3655894100666046, |
| "learning_rate": 3.849856362095921e-06, |
| "loss": 0.6497, |
| "step": 2018 |
| }, |
| { |
| "epoch": 0.9890998162890385, |
| "grad_norm": 0.3669481575489044, |
| "learning_rate": 3.848758600722953e-06, |
| "loss": 0.6157, |
| "step": 2019 |
| }, |
| { |
| "epoch": 0.9895897121861604, |
| "grad_norm": 0.3566364347934723, |
| "learning_rate": 3.847660472398849e-06, |
| "loss": 0.6148, |
| "step": 2020 |
| }, |
| { |
| "epoch": 0.9900796080832823, |
| "grad_norm": 0.3615729808807373, |
| "learning_rate": 3.846561977422373e-06, |
| "loss": 0.6266, |
| "step": 2021 |
| }, |
| { |
| "epoch": 0.9905695039804041, |
| "grad_norm": 0.37268856167793274, |
| "learning_rate": 3.845463116092387e-06, |
| "loss": 0.6189, |
| "step": 2022 |
| }, |
| { |
| "epoch": 0.991059399877526, |
| "grad_norm": 0.37262001633644104, |
| "learning_rate": 3.844363888707855e-06, |
| "loss": 0.6033, |
| "step": 2023 |
| }, |
| { |
| "epoch": 0.9915492957746479, |
| "grad_norm": 0.3532877564430237, |
| "learning_rate": 3.843264295567836e-06, |
| "loss": 0.6293, |
| "step": 2024 |
| }, |
| { |
| "epoch": 0.9920391916717698, |
| "grad_norm": 0.370309978723526, |
| "learning_rate": 3.842164336971496e-06, |
| "loss": 0.618, |
| "step": 2025 |
| }, |
| { |
| "epoch": 0.9925290875688916, |
| "grad_norm": 0.3688422739505768, |
| "learning_rate": 3.841064013218093e-06, |
| "loss": 0.6375, |
| "step": 2026 |
| }, |
| { |
| "epoch": 0.9930189834660135, |
| "grad_norm": 0.3712131977081299, |
| "learning_rate": 3.839963324606989e-06, |
| "loss": 0.637, |
| "step": 2027 |
| }, |
| { |
| "epoch": 0.9935088793631354, |
| "grad_norm": 0.348637193441391, |
| "learning_rate": 3.838862271437643e-06, |
| "loss": 0.6249, |
| "step": 2028 |
| }, |
| { |
| "epoch": 0.9939987752602572, |
| "grad_norm": 0.37826526165008545, |
| "learning_rate": 3.837760854009614e-06, |
| "loss": 0.613, |
| "step": 2029 |
| }, |
| { |
| "epoch": 0.994488671157379, |
| "grad_norm": 0.3614782989025116, |
| "learning_rate": 3.836659072622561e-06, |
| "loss": 0.6404, |
| "step": 2030 |
| }, |
| { |
| "epoch": 0.9949785670545009, |
| "grad_norm": 0.3549116849899292, |
| "learning_rate": 3.83555692757624e-06, |
| "loss": 0.6055, |
| "step": 2031 |
| }, |
| { |
| "epoch": 0.9954684629516228, |
| "grad_norm": 0.34219223260879517, |
| "learning_rate": 3.834454419170508e-06, |
| "loss": 0.6064, |
| "step": 2032 |
| }, |
| { |
| "epoch": 0.9959583588487446, |
| "grad_norm": 0.35754355788230896, |
| "learning_rate": 3.833351547705319e-06, |
| "loss": 0.6182, |
| "step": 2033 |
| }, |
| { |
| "epoch": 0.9964482547458665, |
| "grad_norm": 0.36640578508377075, |
| "learning_rate": 3.832248313480727e-06, |
| "loss": 0.6177, |
| "step": 2034 |
| }, |
| { |
| "epoch": 0.9969381506429884, |
| "grad_norm": 0.36644652485847473, |
| "learning_rate": 3.831144716796884e-06, |
| "loss": 0.6331, |
| "step": 2035 |
| }, |
| { |
| "epoch": 0.9974280465401102, |
| "grad_norm": 0.35853859782218933, |
| "learning_rate": 3.830040757954041e-06, |
| "loss": 0.6277, |
| "step": 2036 |
| }, |
| { |
| "epoch": 0.9979179424372321, |
| "grad_norm": 0.344949334859848, |
| "learning_rate": 3.828936437252549e-06, |
| "loss": 0.6289, |
| "step": 2037 |
| }, |
| { |
| "epoch": 0.998407838334354, |
| "grad_norm": 0.35342395305633545, |
| "learning_rate": 3.827831754992854e-06, |
| "loss": 0.6442, |
| "step": 2038 |
| }, |
| { |
| "epoch": 0.9988977342314758, |
| "grad_norm": 0.36239710450172424, |
| "learning_rate": 3.826726711475502e-06, |
| "loss": 0.633, |
| "step": 2039 |
| }, |
| { |
| "epoch": 0.9993876301285977, |
| "grad_norm": 0.35739865899086, |
| "learning_rate": 3.82562130700114e-06, |
| "loss": 0.6302, |
| "step": 2040 |
| }, |
| { |
| "epoch": 0.9998775260257196, |
| "grad_norm": 0.3577716648578644, |
| "learning_rate": 3.8245155418705075e-06, |
| "loss": 0.6137, |
| "step": 2041 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 6123, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 2041, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.8288154127512895e+19, |
| "train_batch_size": 12, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|