| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.993178717598909, | |
| "eval_steps": 500, | |
| "global_step": 915, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.005457025920873124, | |
| "grad_norm": 8.4054853913818, | |
| "learning_rate": 8.695652173913044e-07, | |
| "loss": 1.4358, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.010914051841746248, | |
| "grad_norm": 8.27769761646866, | |
| "learning_rate": 1.7391304347826088e-06, | |
| "loss": 1.4226, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.01637107776261937, | |
| "grad_norm": 8.322721166007087, | |
| "learning_rate": 2.6086956521739132e-06, | |
| "loss": 1.4269, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.021828103683492497, | |
| "grad_norm": 7.801471051907299, | |
| "learning_rate": 3.4782608695652175e-06, | |
| "loss": 1.4294, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.027285129604365622, | |
| "grad_norm": 6.35431414486227, | |
| "learning_rate": 4.347826086956522e-06, | |
| "loss": 1.3629, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.03274215552523874, | |
| "grad_norm": 3.4381748863553043, | |
| "learning_rate": 5.2173913043478265e-06, | |
| "loss": 1.3331, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.03819918144611187, | |
| "grad_norm": 2.674477084182652, | |
| "learning_rate": 6.086956521739132e-06, | |
| "loss": 1.3014, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.04365620736698499, | |
| "grad_norm": 6.6546035127733765, | |
| "learning_rate": 6.956521739130435e-06, | |
| "loss": 1.3118, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.04911323328785812, | |
| "grad_norm": 6.905025069917795, | |
| "learning_rate": 7.82608695652174e-06, | |
| "loss": 1.3125, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.054570259208731244, | |
| "grad_norm": 6.814662162479713, | |
| "learning_rate": 8.695652173913044e-06, | |
| "loss": 1.3129, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.06002728512960437, | |
| "grad_norm": 6.340208147957234, | |
| "learning_rate": 9.565217391304349e-06, | |
| "loss": 1.2766, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.06548431105047749, | |
| "grad_norm": 5.393237093948397, | |
| "learning_rate": 1.0434782608695653e-05, | |
| "loss": 1.2624, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.07094133697135062, | |
| "grad_norm": 3.425534423837231, | |
| "learning_rate": 1.1304347826086957e-05, | |
| "loss": 1.2328, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.07639836289222374, | |
| "grad_norm": 2.2647521966523274, | |
| "learning_rate": 1.2173913043478263e-05, | |
| "loss": 1.2134, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.08185538881309687, | |
| "grad_norm": 2.2313353605470545, | |
| "learning_rate": 1.3043478260869566e-05, | |
| "loss": 1.1939, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.08731241473396999, | |
| "grad_norm": 2.361898565307054, | |
| "learning_rate": 1.391304347826087e-05, | |
| "loss": 1.18, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.0927694406548431, | |
| "grad_norm": 2.33791134918371, | |
| "learning_rate": 1.4782608695652174e-05, | |
| "loss": 1.1511, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.09822646657571624, | |
| "grad_norm": 1.6798499615830327, | |
| "learning_rate": 1.565217391304348e-05, | |
| "loss": 1.1595, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.10368349249658936, | |
| "grad_norm": 1.6970897953125714, | |
| "learning_rate": 1.6521739130434785e-05, | |
| "loss": 1.1375, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.10914051841746249, | |
| "grad_norm": 1.6395457480905744, | |
| "learning_rate": 1.739130434782609e-05, | |
| "loss": 1.1345, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.1145975443383356, | |
| "grad_norm": 1.5035713589688318, | |
| "learning_rate": 1.8260869565217393e-05, | |
| "loss": 1.1337, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.12005457025920874, | |
| "grad_norm": 1.225373073362722, | |
| "learning_rate": 1.9130434782608697e-05, | |
| "loss": 1.1186, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.12551159618008187, | |
| "grad_norm": 1.5356355968160549, | |
| "learning_rate": 2e-05, | |
| "loss": 1.1286, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.13096862210095497, | |
| "grad_norm": 1.1025414498974069, | |
| "learning_rate": 2.0869565217391306e-05, | |
| "loss": 1.0892, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.1364256480218281, | |
| "grad_norm": 1.3350107601336392, | |
| "learning_rate": 2.173913043478261e-05, | |
| "loss": 1.0761, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.14188267394270124, | |
| "grad_norm": 1.1929582575416837, | |
| "learning_rate": 2.2608695652173914e-05, | |
| "loss": 1.0883, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.14733969986357434, | |
| "grad_norm": 0.935495351823663, | |
| "learning_rate": 2.3478260869565222e-05, | |
| "loss": 1.0597, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.15279672578444747, | |
| "grad_norm": 1.5455709498780492, | |
| "learning_rate": 2.4347826086956526e-05, | |
| "loss": 1.0595, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.1582537517053206, | |
| "grad_norm": 1.1083733884089981, | |
| "learning_rate": 2.521739130434783e-05, | |
| "loss": 1.0629, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.16371077762619374, | |
| "grad_norm": 1.7349180323187294, | |
| "learning_rate": 2.608695652173913e-05, | |
| "loss": 1.0855, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.16916780354706684, | |
| "grad_norm": 1.0670211570486026, | |
| "learning_rate": 2.6956521739130436e-05, | |
| "loss": 1.0503, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.17462482946793997, | |
| "grad_norm": 1.6288820867791205, | |
| "learning_rate": 2.782608695652174e-05, | |
| "loss": 1.0613, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.1800818553888131, | |
| "grad_norm": 1.329624751880889, | |
| "learning_rate": 2.8695652173913044e-05, | |
| "loss": 1.0592, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.1855388813096862, | |
| "grad_norm": 1.1836294011167645, | |
| "learning_rate": 2.956521739130435e-05, | |
| "loss": 1.0546, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.19099590723055934, | |
| "grad_norm": 1.4068311305836099, | |
| "learning_rate": 3.0434782608695656e-05, | |
| "loss": 1.0647, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.19645293315143247, | |
| "grad_norm": 1.3722669810117456, | |
| "learning_rate": 3.130434782608696e-05, | |
| "loss": 1.0565, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.2019099590723056, | |
| "grad_norm": 2.5377362025037993, | |
| "learning_rate": 3.2173913043478265e-05, | |
| "loss": 1.0801, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.2073669849931787, | |
| "grad_norm": 1.3833891330993688, | |
| "learning_rate": 3.304347826086957e-05, | |
| "loss": 1.058, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.21282401091405184, | |
| "grad_norm": 3.3467609300165893, | |
| "learning_rate": 3.391304347826087e-05, | |
| "loss": 1.039, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.21828103683492497, | |
| "grad_norm": 2.5867789286474343, | |
| "learning_rate": 3.478260869565218e-05, | |
| "loss": 1.0357, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.22373806275579808, | |
| "grad_norm": 2.620000651253596, | |
| "learning_rate": 3.565217391304348e-05, | |
| "loss": 1.0629, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.2291950886766712, | |
| "grad_norm": 2.4068260667323664, | |
| "learning_rate": 3.6521739130434786e-05, | |
| "loss": 1.0411, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.23465211459754434, | |
| "grad_norm": 2.2613496975518754, | |
| "learning_rate": 3.739130434782609e-05, | |
| "loss": 1.0615, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.24010914051841747, | |
| "grad_norm": 2.4302337424475295, | |
| "learning_rate": 3.8260869565217395e-05, | |
| "loss": 1.042, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.24556616643929058, | |
| "grad_norm": 2.0626105805304604, | |
| "learning_rate": 3.91304347826087e-05, | |
| "loss": 1.007, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.25102319236016374, | |
| "grad_norm": 2.0977725511989327, | |
| "learning_rate": 4e-05, | |
| "loss": 1.03, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.25648021828103684, | |
| "grad_norm": 2.1512078484152757, | |
| "learning_rate": 4.0869565217391314e-05, | |
| "loss": 1.0346, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.26193724420190995, | |
| "grad_norm": 1.5000259392405852, | |
| "learning_rate": 4.173913043478261e-05, | |
| "loss": 1.0324, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.2673942701227831, | |
| "grad_norm": 1.6825832607122888, | |
| "learning_rate": 4.2608695652173916e-05, | |
| "loss": 1.0265, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.2728512960436562, | |
| "grad_norm": 2.3652304527471, | |
| "learning_rate": 4.347826086956522e-05, | |
| "loss": 1.0355, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.2783083219645293, | |
| "grad_norm": 1.4430495759345763, | |
| "learning_rate": 4.4347826086956525e-05, | |
| "loss": 1.0204, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.2837653478854025, | |
| "grad_norm": 1.9471116948950629, | |
| "learning_rate": 4.521739130434783e-05, | |
| "loss": 1.0233, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.2892223738062756, | |
| "grad_norm": 0.9851562319686198, | |
| "learning_rate": 4.608695652173913e-05, | |
| "loss": 1.0224, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.2946793997271487, | |
| "grad_norm": 2.289997829572797, | |
| "learning_rate": 4.6956521739130444e-05, | |
| "loss": 1.0494, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.30013642564802184, | |
| "grad_norm": 1.9221832471449953, | |
| "learning_rate": 4.782608695652174e-05, | |
| "loss": 1.0078, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.30559345156889495, | |
| "grad_norm": 2.0382214762471964, | |
| "learning_rate": 4.869565217391305e-05, | |
| "loss": 1.0475, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.31105047748976805, | |
| "grad_norm": 2.518003982101406, | |
| "learning_rate": 4.956521739130435e-05, | |
| "loss": 1.0277, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.3165075034106412, | |
| "grad_norm": 1.810486050762184, | |
| "learning_rate": 5.043478260869566e-05, | |
| "loss": 1.0167, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.3219645293315143, | |
| "grad_norm": 2.693889814467265, | |
| "learning_rate": 5.130434782608696e-05, | |
| "loss": 1.0236, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.3274215552523875, | |
| "grad_norm": 1.859820799030674, | |
| "learning_rate": 5.217391304347826e-05, | |
| "loss": 1.0109, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.3328785811732606, | |
| "grad_norm": 2.1889623213157674, | |
| "learning_rate": 5.304347826086957e-05, | |
| "loss": 1.0222, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.3383356070941337, | |
| "grad_norm": 2.300198913554314, | |
| "learning_rate": 5.391304347826087e-05, | |
| "loss": 1.0365, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.34379263301500684, | |
| "grad_norm": 1.5430289249817781, | |
| "learning_rate": 5.478260869565218e-05, | |
| "loss": 1.0128, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.34924965893587995, | |
| "grad_norm": 3.477710986140807, | |
| "learning_rate": 5.565217391304348e-05, | |
| "loss": 1.0071, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.35470668485675305, | |
| "grad_norm": 2.7818030874339033, | |
| "learning_rate": 5.652173913043479e-05, | |
| "loss": 1.0202, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.3601637107776262, | |
| "grad_norm": 2.6548687966990943, | |
| "learning_rate": 5.739130434782609e-05, | |
| "loss": 1.022, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.3656207366984993, | |
| "grad_norm": 2.9183382843347623, | |
| "learning_rate": 5.82608695652174e-05, | |
| "loss": 1.0245, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.3710777626193724, | |
| "grad_norm": 1.4627910570537404, | |
| "learning_rate": 5.91304347826087e-05, | |
| "loss": 0.9863, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.3765347885402456, | |
| "grad_norm": 2.295615927686121, | |
| "learning_rate": 6.000000000000001e-05, | |
| "loss": 1.0038, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.3819918144611187, | |
| "grad_norm": 2.463602586043397, | |
| "learning_rate": 6.086956521739131e-05, | |
| "loss": 1.0182, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.3874488403819918, | |
| "grad_norm": 2.388407526596309, | |
| "learning_rate": 6.173913043478262e-05, | |
| "loss": 1.0142, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.39290586630286495, | |
| "grad_norm": 2.5037634590498357, | |
| "learning_rate": 6.260869565217392e-05, | |
| "loss": 1.0078, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.39836289222373805, | |
| "grad_norm": 1.3685081163007409, | |
| "learning_rate": 6.347826086956523e-05, | |
| "loss": 1.0144, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.4038199181446112, | |
| "grad_norm": 2.987414238598592, | |
| "learning_rate": 6.434782608695653e-05, | |
| "loss": 1.0227, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.4092769440654843, | |
| "grad_norm": 2.285557770071163, | |
| "learning_rate": 6.521739130434783e-05, | |
| "loss": 1.0216, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.4147339699863574, | |
| "grad_norm": 2.1674983433636137, | |
| "learning_rate": 6.608695652173914e-05, | |
| "loss": 1.0075, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.4201909959072306, | |
| "grad_norm": 2.3056726088264905, | |
| "learning_rate": 6.695652173913044e-05, | |
| "loss": 0.9964, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.4256480218281037, | |
| "grad_norm": 2.6041661067718525, | |
| "learning_rate": 6.782608695652175e-05, | |
| "loss": 0.9956, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.4311050477489768, | |
| "grad_norm": 1.837082219111084, | |
| "learning_rate": 6.869565217391305e-05, | |
| "loss": 1.0209, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.43656207366984995, | |
| "grad_norm": 2.7126777057295275, | |
| "learning_rate": 6.956521739130436e-05, | |
| "loss": 1.0037, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.44201909959072305, | |
| "grad_norm": 2.782292772347231, | |
| "learning_rate": 7.043478260869566e-05, | |
| "loss": 1.0018, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.44747612551159616, | |
| "grad_norm": 1.8194383009699067, | |
| "learning_rate": 7.130434782608696e-05, | |
| "loss": 1.0102, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.4529331514324693, | |
| "grad_norm": 2.3506780994607177, | |
| "learning_rate": 7.217391304347827e-05, | |
| "loss": 1.0011, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.4583901773533424, | |
| "grad_norm": 2.588474724733464, | |
| "learning_rate": 7.304347826086957e-05, | |
| "loss": 0.9918, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.4638472032742155, | |
| "grad_norm": 2.966355401640028, | |
| "learning_rate": 7.391304347826088e-05, | |
| "loss": 1.0082, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.4693042291950887, | |
| "grad_norm": 1.63650415845399, | |
| "learning_rate": 7.478260869565218e-05, | |
| "loss": 0.9965, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.4747612551159618, | |
| "grad_norm": 3.039157506943887, | |
| "learning_rate": 7.565217391304349e-05, | |
| "loss": 1.0186, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.48021828103683495, | |
| "grad_norm": 2.5849399835417897, | |
| "learning_rate": 7.652173913043479e-05, | |
| "loss": 1.0188, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.48567530695770805, | |
| "grad_norm": 1.5912811102421822, | |
| "learning_rate": 7.73913043478261e-05, | |
| "loss": 0.9933, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.49113233287858116, | |
| "grad_norm": 2.3088953829836547, | |
| "learning_rate": 7.82608695652174e-05, | |
| "loss": 1.0237, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.4965893587994543, | |
| "grad_norm": 2.965499963162432, | |
| "learning_rate": 7.91304347826087e-05, | |
| "loss": 0.9983, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.5020463847203275, | |
| "grad_norm": 1.7790041385019335, | |
| "learning_rate": 8e-05, | |
| "loss": 1.0253, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.5075034106412005, | |
| "grad_norm": 3.3482881631202175, | |
| "learning_rate": 7.999970857316263e-05, | |
| "loss": 1.0017, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.5129604365620737, | |
| "grad_norm": 2.0737327995304757, | |
| "learning_rate": 7.999883429689698e-05, | |
| "loss": 1.0161, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.5184174624829468, | |
| "grad_norm": 3.4718599185205252, | |
| "learning_rate": 7.999737718394245e-05, | |
| "loss": 1.0227, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.5238744884038199, | |
| "grad_norm": 2.6905459489144494, | |
| "learning_rate": 7.999533725553111e-05, | |
| "loss": 1.0238, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.529331514324693, | |
| "grad_norm": 2.643496365226572, | |
| "learning_rate": 7.999271454138746e-05, | |
| "loss": 0.9963, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.5347885402455662, | |
| "grad_norm": 2.398472468783254, | |
| "learning_rate": 7.998950907972797e-05, | |
| "loss": 1.0067, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.5402455661664393, | |
| "grad_norm": 2.4862794999589632, | |
| "learning_rate": 7.998572091726051e-05, | |
| "loss": 1.0016, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.5457025920873124, | |
| "grad_norm": 2.703979830017154, | |
| "learning_rate": 7.99813501091837e-05, | |
| "loss": 0.9997, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.5511596180081856, | |
| "grad_norm": 2.638325870561522, | |
| "learning_rate": 7.997639671918607e-05, | |
| "loss": 0.9666, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.5566166439290586, | |
| "grad_norm": 1.923660396652347, | |
| "learning_rate": 7.997086081944518e-05, | |
| "loss": 0.9901, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.5620736698499318, | |
| "grad_norm": 1.8412763920684536, | |
| "learning_rate": 7.996474249062649e-05, | |
| "loss": 0.9837, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.567530695770805, | |
| "grad_norm": 3.299280808379093, | |
| "learning_rate": 7.995804182188227e-05, | |
| "loss": 0.9911, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.572987721691678, | |
| "grad_norm": 2.370087367079061, | |
| "learning_rate": 7.995075891085025e-05, | |
| "loss": 0.9738, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.5784447476125512, | |
| "grad_norm": 1.6382462163124434, | |
| "learning_rate": 7.994289386365223e-05, | |
| "loss": 0.9915, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.5839017735334243, | |
| "grad_norm": 2.1387676364417407, | |
| "learning_rate": 7.99344467948925e-05, | |
| "loss": 0.9986, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.5893587994542974, | |
| "grad_norm": 4.201439600148248, | |
| "learning_rate": 7.992541782765617e-05, | |
| "loss": 0.9959, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.5948158253751705, | |
| "grad_norm": 1.9732356205001438, | |
| "learning_rate": 7.991580709350743e-05, | |
| "loss": 1.0058, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.6002728512960437, | |
| "grad_norm": 5.603033692402054, | |
| "learning_rate": 7.990561473248756e-05, | |
| "loss": 1.0192, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.6057298772169167, | |
| "grad_norm": 4.217157974337054, | |
| "learning_rate": 7.989484089311294e-05, | |
| "loss": 1.0206, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.6111869031377899, | |
| "grad_norm": 4.733955047783259, | |
| "learning_rate": 7.988348573237286e-05, | |
| "loss": 1.032, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.616643929058663, | |
| "grad_norm": 4.030897341866623, | |
| "learning_rate": 7.987154941572726e-05, | |
| "loss": 1.0136, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.6221009549795361, | |
| "grad_norm": 3.2852614632490287, | |
| "learning_rate": 7.985903211710429e-05, | |
| "loss": 0.984, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.6275579809004093, | |
| "grad_norm": 2.8477242486115157, | |
| "learning_rate": 7.984593401889778e-05, | |
| "loss": 0.9861, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.6330150068212824, | |
| "grad_norm": 2.979343225271941, | |
| "learning_rate": 7.98322553119646e-05, | |
| "loss": 1.0057, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.6384720327421555, | |
| "grad_norm": 2.4230735574970295, | |
| "learning_rate": 7.981799619562186e-05, | |
| "loss": 0.9798, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.6439290586630286, | |
| "grad_norm": 1.4903910249137124, | |
| "learning_rate": 7.980315687764404e-05, | |
| "loss": 0.9832, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.6493860845839018, | |
| "grad_norm": 2.851390699017869, | |
| "learning_rate": 7.978773757425987e-05, | |
| "loss": 0.9859, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.654843110504775, | |
| "grad_norm": 2.449200648456771, | |
| "learning_rate": 7.977173851014935e-05, | |
| "loss": 1.0016, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.660300136425648, | |
| "grad_norm": 1.3908196441719074, | |
| "learning_rate": 7.975515991844028e-05, | |
| "loss": 0.9925, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.6657571623465212, | |
| "grad_norm": 2.709126199988088, | |
| "learning_rate": 7.973800204070497e-05, | |
| "loss": 1.0183, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.6712141882673943, | |
| "grad_norm": 2.4950773390337284, | |
| "learning_rate": 7.972026512695677e-05, | |
| "loss": 0.9983, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.6766712141882674, | |
| "grad_norm": 1.8156715188883938, | |
| "learning_rate": 7.970194943564626e-05, | |
| "loss": 0.9966, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.6821282401091405, | |
| "grad_norm": 2.0124460248386837, | |
| "learning_rate": 7.968305523365768e-05, | |
| "loss": 0.9861, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.6875852660300137, | |
| "grad_norm": 2.327548310536662, | |
| "learning_rate": 7.96635827963049e-05, | |
| "loss": 1.005, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.6930422919508867, | |
| "grad_norm": 2.0384540649729046, | |
| "learning_rate": 7.964353240732744e-05, | |
| "loss": 0.9643, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.6984993178717599, | |
| "grad_norm": 1.5857232283507334, | |
| "learning_rate": 7.96229043588864e-05, | |
| "loss": 0.9942, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.703956343792633, | |
| "grad_norm": 2.6642249621882947, | |
| "learning_rate": 7.960169895156011e-05, | |
| "loss": 1.0059, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.7094133697135061, | |
| "grad_norm": 2.003197328585847, | |
| "learning_rate": 7.95799164943398e-05, | |
| "loss": 0.9703, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.7148703956343793, | |
| "grad_norm": 2.080102961922397, | |
| "learning_rate": 7.955755730462512e-05, | |
| "loss": 0.9901, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.7203274215552524, | |
| "grad_norm": 2.035255856600717, | |
| "learning_rate": 7.953462170821948e-05, | |
| "loss": 0.9782, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.7257844474761255, | |
| "grad_norm": 2.352960888257511, | |
| "learning_rate": 7.951111003932526e-05, | |
| "loss": 0.9849, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.7312414733969986, | |
| "grad_norm": 2.073525444432574, | |
| "learning_rate": 7.948702264053904e-05, | |
| "loss": 0.9678, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.7366984993178718, | |
| "grad_norm": 2.3421982402930444, | |
| "learning_rate": 7.946235986284655e-05, | |
| "loss": 0.9783, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.7421555252387448, | |
| "grad_norm": 1.5736240970851025, | |
| "learning_rate": 7.943712206561755e-05, | |
| "loss": 0.9841, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.747612551159618, | |
| "grad_norm": 2.489956249226116, | |
| "learning_rate": 7.94113096166006e-05, | |
| "loss": 0.9639, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.7530695770804912, | |
| "grad_norm": 1.5389665449534187, | |
| "learning_rate": 7.938492289191775e-05, | |
| "loss": 0.9749, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.7585266030013642, | |
| "grad_norm": 2.8469902237174884, | |
| "learning_rate": 7.935796227605896e-05, | |
| "loss": 0.9704, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.7639836289222374, | |
| "grad_norm": 2.0927134145359885, | |
| "learning_rate": 7.933042816187659e-05, | |
| "loss": 0.9851, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.7694406548431105, | |
| "grad_norm": 1.9936042857918566, | |
| "learning_rate": 7.930232095057961e-05, | |
| "loss": 1.0029, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.7748976807639836, | |
| "grad_norm": 2.386887860120376, | |
| "learning_rate": 7.927364105172784e-05, | |
| "loss": 0.9846, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.7803547066848567, | |
| "grad_norm": 2.3622753734620368, | |
| "learning_rate": 7.924438888322588e-05, | |
| "loss": 0.9793, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.7858117326057299, | |
| "grad_norm": 2.1818780607623753, | |
| "learning_rate": 7.921456487131706e-05, | |
| "loss": 0.9788, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.791268758526603, | |
| "grad_norm": 1.182780599547151, | |
| "learning_rate": 7.918416945057726e-05, | |
| "loss": 0.9643, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.7967257844474761, | |
| "grad_norm": 2.8211946276144255, | |
| "learning_rate": 7.915320306390856e-05, | |
| "loss": 1.0119, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.8021828103683493, | |
| "grad_norm": 2.388157316898997, | |
| "learning_rate": 7.912166616253276e-05, | |
| "loss": 0.9619, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.8076398362892224, | |
| "grad_norm": 1.5731846927644901, | |
| "learning_rate": 7.908955920598482e-05, | |
| "loss": 0.9627, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.8130968622100955, | |
| "grad_norm": 2.546269463790635, | |
| "learning_rate": 7.90568826621062e-05, | |
| "loss": 1.0079, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.8185538881309686, | |
| "grad_norm": 1.671363635943377, | |
| "learning_rate": 7.902363700703797e-05, | |
| "loss": 0.9845, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.8240109140518418, | |
| "grad_norm": 2.1689468730120343, | |
| "learning_rate": 7.898982272521396e-05, | |
| "loss": 0.9834, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.8294679399727148, | |
| "grad_norm": 2.5028825478335253, | |
| "learning_rate": 7.895544030935361e-05, | |
| "loss": 0.9785, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.834924965893588, | |
| "grad_norm": 1.7649827216268308, | |
| "learning_rate": 7.892049026045487e-05, | |
| "loss": 0.9635, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.8403819918144612, | |
| "grad_norm": 2.0516340222455143, | |
| "learning_rate": 7.888497308778685e-05, | |
| "loss": 0.9655, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.8458390177353342, | |
| "grad_norm": 1.643525463034892, | |
| "learning_rate": 7.88488893088824e-05, | |
| "loss": 0.9771, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.8512960436562074, | |
| "grad_norm": 2.5710664190674977, | |
| "learning_rate": 7.88122394495306e-05, | |
| "loss": 0.9734, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.8567530695770805, | |
| "grad_norm": 1.8164577100205415, | |
| "learning_rate": 7.877502404376911e-05, | |
| "loss": 0.9765, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.8622100954979536, | |
| "grad_norm": 2.675088927773194, | |
| "learning_rate": 7.87372436338763e-05, | |
| "loss": 0.973, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.8676671214188267, | |
| "grad_norm": 1.6058702069668531, | |
| "learning_rate": 7.869889877036344e-05, | |
| "loss": 0.9927, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.8731241473396999, | |
| "grad_norm": 2.726714878756649, | |
| "learning_rate": 7.865999001196666e-05, | |
| "loss": 0.9591, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.878581173260573, | |
| "grad_norm": 2.294598196915522, | |
| "learning_rate": 7.862051792563878e-05, | |
| "loss": 0.9958, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.8840381991814461, | |
| "grad_norm": 2.1238876211739766, | |
| "learning_rate": 7.858048308654104e-05, | |
| "loss": 0.9747, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.8894952251023193, | |
| "grad_norm": 2.02064179400817, | |
| "learning_rate": 7.853988607803479e-05, | |
| "loss": 0.9917, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.8949522510231923, | |
| "grad_norm": 1.3894273755079185, | |
| "learning_rate": 7.849872749167291e-05, | |
| "loss": 0.9781, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.9004092769440655, | |
| "grad_norm": 1.648978217855913, | |
| "learning_rate": 7.845700792719124e-05, | |
| "loss": 0.9698, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.9058663028649386, | |
| "grad_norm": 3.51030441484047, | |
| "learning_rate": 7.84147279924998e-05, | |
| "loss": 0.9863, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.9113233287858117, | |
| "grad_norm": 1.5500687413665548, | |
| "learning_rate": 7.837188830367401e-05, | |
| "loss": 0.9986, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.9167803547066848, | |
| "grad_norm": 5.248314809323723, | |
| "learning_rate": 7.832848948494559e-05, | |
| "loss": 1.0028, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.922237380627558, | |
| "grad_norm": 3.4816630300759988, | |
| "learning_rate": 7.828453216869356e-05, | |
| "loss": 1.0078, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.927694406548431, | |
| "grad_norm": 5.427302474462801, | |
| "learning_rate": 7.824001699543502e-05, | |
| "loss": 1.0043, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.9331514324693042, | |
| "grad_norm": 4.826695970132574, | |
| "learning_rate": 7.819494461381577e-05, | |
| "loss": 1.0106, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.9386084583901774, | |
| "grad_norm": 2.4392637209839765, | |
| "learning_rate": 7.81493156806009e-05, | |
| "loss": 0.9935, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.9440654843110505, | |
| "grad_norm": 3.8244216268633036, | |
| "learning_rate": 7.81031308606652e-05, | |
| "loss": 0.976, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.9495225102319236, | |
| "grad_norm": 2.8457649655988115, | |
| "learning_rate": 7.805639082698342e-05, | |
| "loss": 0.9786, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.9549795361527967, | |
| "grad_norm": 2.731706315180723, | |
| "learning_rate": 7.800909626062064e-05, | |
| "loss": 0.9771, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.9604365620736699, | |
| "grad_norm": 2.202992780626566, | |
| "learning_rate": 7.796124785072212e-05, | |
| "loss": 0.9915, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.965893587994543, | |
| "grad_norm": 1.9587247755808166, | |
| "learning_rate": 7.791284629450338e-05, | |
| "loss": 0.9702, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.9713506139154161, | |
| "grad_norm": 2.1379267827418538, | |
| "learning_rate": 7.78638922972401e-05, | |
| "loss": 0.9713, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.9768076398362893, | |
| "grad_norm": 1.9276330849235372, | |
| "learning_rate": 7.781438657225764e-05, | |
| "loss": 0.9825, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.9822646657571623, | |
| "grad_norm": 2.2740646257729353, | |
| "learning_rate": 7.77643298409209e-05, | |
| "loss": 0.9546, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.9877216916780355, | |
| "grad_norm": 1.6849230748486481, | |
| "learning_rate": 7.77137228326236e-05, | |
| "loss": 0.9878, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.9931787175989086, | |
| "grad_norm": 1.930950153364107, | |
| "learning_rate": 7.766256628477775e-05, | |
| "loss": 0.9614, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.9986357435197817, | |
| "grad_norm": 1.651071282168761, | |
| "learning_rate": 7.761086094280291e-05, | |
| "loss": 0.9834, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.004092769440655, | |
| "grad_norm": 3.4237344018894738, | |
| "learning_rate": 7.755860756011531e-05, | |
| "loss": 1.6698, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.009549795361528, | |
| "grad_norm": 2.550843838303363, | |
| "learning_rate": 7.750580689811683e-05, | |
| "loss": 0.9341, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.015006821282401, | |
| "grad_norm": 1.2237286076246516, | |
| "learning_rate": 7.745245972618396e-05, | |
| "loss": 0.969, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.0204638472032743, | |
| "grad_norm": 1.7619044634166372, | |
| "learning_rate": 7.739856682165661e-05, | |
| "loss": 0.9697, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.0259208731241474, | |
| "grad_norm": 1.84237685056023, | |
| "learning_rate": 7.73441289698267e-05, | |
| "loss": 0.9706, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.0313778990450204, | |
| "grad_norm": 2.246735296743444, | |
| "learning_rate": 7.728914696392677e-05, | |
| "loss": 0.9599, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.0368349249658937, | |
| "grad_norm": 1.5477004028342645, | |
| "learning_rate": 7.723362160511844e-05, | |
| "loss": 0.9341, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.0422919508867667, | |
| "grad_norm": 1.628646582504066, | |
| "learning_rate": 7.717755370248069e-05, | |
| "loss": 0.9311, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.0477489768076398, | |
| "grad_norm": 2.7608821191707436, | |
| "learning_rate": 7.712094407299809e-05, | |
| "loss": 0.9521, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.053206002728513, | |
| "grad_norm": 1.6043767223562262, | |
| "learning_rate": 7.706379354154891e-05, | |
| "loss": 0.9591, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.058663028649386, | |
| "grad_norm": 2.6819244792153336, | |
| "learning_rate": 7.700610294089309e-05, | |
| "loss": 0.9326, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.0641200545702592, | |
| "grad_norm": 2.170173544833822, | |
| "learning_rate": 7.694787311166009e-05, | |
| "loss": 0.976, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.0695770804911324, | |
| "grad_norm": 3.105411956285718, | |
| "learning_rate": 7.688910490233664e-05, | |
| "loss": 0.9667, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.0750341064120055, | |
| "grad_norm": 2.0189173881999083, | |
| "learning_rate": 7.682979916925445e-05, | |
| "loss": 0.9432, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.0804911323328785, | |
| "grad_norm": 3.0432618104084512, | |
| "learning_rate": 7.676995677657759e-05, | |
| "loss": 0.9494, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.0859481582537518, | |
| "grad_norm": 2.6892073422610805, | |
| "learning_rate": 7.670957859629003e-05, | |
| "loss": 0.9705, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.0914051841746248, | |
| "grad_norm": 1.7496055102061392, | |
| "learning_rate": 7.664866550818289e-05, | |
| "loss": 0.9446, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.096862210095498, | |
| "grad_norm": 2.1862992841348547, | |
| "learning_rate": 7.65872183998416e-05, | |
| "loss": 0.9442, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.1023192360163712, | |
| "grad_norm": 2.311563125962559, | |
| "learning_rate": 7.652523816663298e-05, | |
| "loss": 0.9488, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.1077762619372442, | |
| "grad_norm": 1.5707872490013692, | |
| "learning_rate": 7.646272571169221e-05, | |
| "loss": 0.9428, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.1132332878581173, | |
| "grad_norm": 1.1293404204912825, | |
| "learning_rate": 7.639968194590961e-05, | |
| "loss": 0.9377, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.1186903137789905, | |
| "grad_norm": 2.0853381432635087, | |
| "learning_rate": 7.633610778791746e-05, | |
| "loss": 0.9435, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.1241473396998636, | |
| "grad_norm": 1.6237281426907448, | |
| "learning_rate": 7.627200416407656e-05, | |
| "loss": 0.9545, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.1296043656207366, | |
| "grad_norm": 2.338186851948375, | |
| "learning_rate": 7.620737200846271e-05, | |
| "loss": 0.9547, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.13506139154161, | |
| "grad_norm": 1.6338755462437418, | |
| "learning_rate": 7.614221226285317e-05, | |
| "loss": 0.9571, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.140518417462483, | |
| "grad_norm": 2.4727023861577915, | |
| "learning_rate": 7.607652587671284e-05, | |
| "loss": 0.9449, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.145975443383356, | |
| "grad_norm": 1.6214090487676778, | |
| "learning_rate": 7.601031380718053e-05, | |
| "loss": 0.9336, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.1514324693042293, | |
| "grad_norm": 2.8758801504575318, | |
| "learning_rate": 7.594357701905493e-05, | |
| "loss": 0.9478, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.1568894952251023, | |
| "grad_norm": 2.343741537607968, | |
| "learning_rate": 7.58763164847806e-05, | |
| "loss": 0.9568, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.1623465211459754, | |
| "grad_norm": 2.1342068216692374, | |
| "learning_rate": 7.580853318443378e-05, | |
| "loss": 0.9506, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.1678035470668486, | |
| "grad_norm": 1.9405401625810694, | |
| "learning_rate": 7.574022810570811e-05, | |
| "loss": 0.9598, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.1732605729877217, | |
| "grad_norm": 2.047006811241721, | |
| "learning_rate": 7.567140224390026e-05, | |
| "loss": 0.9398, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.1787175989085947, | |
| "grad_norm": 1.2649054358403522, | |
| "learning_rate": 7.560205660189536e-05, | |
| "loss": 0.9496, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.184174624829468, | |
| "grad_norm": 2.2873286146173024, | |
| "learning_rate": 7.553219219015247e-05, | |
| "loss": 0.9356, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.189631650750341, | |
| "grad_norm": 1.7493032316806663, | |
| "learning_rate": 7.546181002668985e-05, | |
| "loss": 0.9433, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.195088676671214, | |
| "grad_norm": 2.0223831026311516, | |
| "learning_rate": 7.539091113707003e-05, | |
| "loss": 0.9499, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.2005457025920874, | |
| "grad_norm": 1.8819082575345663, | |
| "learning_rate": 7.5319496554385e-05, | |
| "loss": 0.9168, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.2060027285129604, | |
| "grad_norm": 1.4387903068149017, | |
| "learning_rate": 7.524756731924103e-05, | |
| "loss": 0.9594, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.2114597544338335, | |
| "grad_norm": 1.7254879140852797, | |
| "learning_rate": 7.517512447974361e-05, | |
| "loss": 0.9591, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.2169167803547067, | |
| "grad_norm": 1.6398981256156826, | |
| "learning_rate": 7.510216909148214e-05, | |
| "loss": 0.9597, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.2223738062755798, | |
| "grad_norm": 1.9307517212835448, | |
| "learning_rate": 7.502870221751448e-05, | |
| "loss": 0.9531, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.2278308321964528, | |
| "grad_norm": 2.901172104154794, | |
| "learning_rate": 7.495472492835159e-05, | |
| "loss": 0.9514, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.233287858117326, | |
| "grad_norm": 1.2148442619506596, | |
| "learning_rate": 7.488023830194185e-05, | |
| "loss": 0.9525, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.2387448840381992, | |
| "grad_norm": 3.0744826253657154, | |
| "learning_rate": 7.480524342365535e-05, | |
| "loss": 0.9334, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.2442019099590724, | |
| "grad_norm": 2.512018245011184, | |
| "learning_rate": 7.472974138626809e-05, | |
| "loss": 0.9748, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.2496589358799455, | |
| "grad_norm": 2.0167717510810075, | |
| "learning_rate": 7.465373328994608e-05, | |
| "loss": 0.9561, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.2551159618008185, | |
| "grad_norm": 2.69272126540707, | |
| "learning_rate": 7.457722024222927e-05, | |
| "loss": 0.9535, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.2605729877216918, | |
| "grad_norm": 2.1489193698787075, | |
| "learning_rate": 7.450020335801544e-05, | |
| "loss": 0.9497, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.2660300136425648, | |
| "grad_norm": 1.674495703338951, | |
| "learning_rate": 7.442268375954394e-05, | |
| "loss": 0.9468, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.271487039563438, | |
| "grad_norm": 2.861569881618082, | |
| "learning_rate": 7.434466257637933e-05, | |
| "loss": 0.953, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.2769440654843112, | |
| "grad_norm": 1.505253232519278, | |
| "learning_rate": 7.426614094539496e-05, | |
| "loss": 0.9478, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.2824010914051842, | |
| "grad_norm": 2.1966862404860317, | |
| "learning_rate": 7.418712001075635e-05, | |
| "loss": 0.9434, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.2878581173260573, | |
| "grad_norm": 1.8142529829887761, | |
| "learning_rate": 7.410760092390456e-05, | |
| "loss": 0.9185, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.2933151432469305, | |
| "grad_norm": 2.042423103810234, | |
| "learning_rate": 7.402758484353938e-05, | |
| "loss": 0.947, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.2987721691678036, | |
| "grad_norm": 2.807776807250219, | |
| "learning_rate": 7.394707293560248e-05, | |
| "loss": 0.9389, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.3042291950886766, | |
| "grad_norm": 1.843186901692036, | |
| "learning_rate": 7.386606637326038e-05, | |
| "loss": 0.9495, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.30968622100955, | |
| "grad_norm": 3.4432614710350853, | |
| "learning_rate": 7.378456633688742e-05, | |
| "loss": 0.944, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.315143246930423, | |
| "grad_norm": 2.9796135612528145, | |
| "learning_rate": 7.370257401404846e-05, | |
| "loss": 0.96, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.320600272851296, | |
| "grad_norm": 2.4600116762336706, | |
| "learning_rate": 7.36200905994817e-05, | |
| "loss": 0.9522, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.3260572987721693, | |
| "grad_norm": 2.5423218083756924, | |
| "learning_rate": 7.353711729508114e-05, | |
| "loss": 0.9332, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.3315143246930423, | |
| "grad_norm": 1.5354349040627853, | |
| "learning_rate": 7.34536553098792e-05, | |
| "loss": 0.9649, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.3369713506139154, | |
| "grad_norm": 3.3281556440961704, | |
| "learning_rate": 7.336970586002896e-05, | |
| "loss": 0.9727, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.3424283765347886, | |
| "grad_norm": 1.9703109238860381, | |
| "learning_rate": 7.328527016878658e-05, | |
| "loss": 0.9457, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.3478854024556617, | |
| "grad_norm": 3.5456743561386026, | |
| "learning_rate": 7.320034946649339e-05, | |
| "loss": 0.939, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.3533424283765347, | |
| "grad_norm": 3.043515887992997, | |
| "learning_rate": 7.311494499055794e-05, | |
| "loss": 0.9647, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.358799454297408, | |
| "grad_norm": 2.2225676170915265, | |
| "learning_rate": 7.302905798543809e-05, | |
| "loss": 0.9434, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.364256480218281, | |
| "grad_norm": 2.316972441261721, | |
| "learning_rate": 7.294268970262271e-05, | |
| "loss": 0.9507, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.369713506139154, | |
| "grad_norm": 1.3452402784796123, | |
| "learning_rate": 7.28558414006136e-05, | |
| "loss": 0.9401, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.3751705320600274, | |
| "grad_norm": 3.2470762419612194, | |
| "learning_rate": 7.276851434490706e-05, | |
| "loss": 0.9578, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.3806275579809004, | |
| "grad_norm": 2.7728684726943884, | |
| "learning_rate": 7.268070980797547e-05, | |
| "loss": 0.947, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.3860845839017735, | |
| "grad_norm": 2.9439862066425593, | |
| "learning_rate": 7.259242906924876e-05, | |
| "loss": 0.9588, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.3915416098226467, | |
| "grad_norm": 2.4925113769599463, | |
| "learning_rate": 7.250367341509573e-05, | |
| "loss": 0.9481, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.3969986357435198, | |
| "grad_norm": 2.4424975656100707, | |
| "learning_rate": 7.241444413880539e-05, | |
| "loss": 0.9477, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.4024556616643928, | |
| "grad_norm": 2.181544082143711, | |
| "learning_rate": 7.232474254056801e-05, | |
| "loss": 0.9412, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.407912687585266, | |
| "grad_norm": 1.7636209175878623, | |
| "learning_rate": 7.223456992745626e-05, | |
| "loss": 0.9347, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.4133697135061392, | |
| "grad_norm": 2.146267438897671, | |
| "learning_rate": 7.214392761340611e-05, | |
| "loss": 0.9383, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.4188267394270122, | |
| "grad_norm": 1.8385114341486435, | |
| "learning_rate": 7.205281691919769e-05, | |
| "loss": 0.9619, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.4242837653478855, | |
| "grad_norm": 1.0980340436389295, | |
| "learning_rate": 7.196123917243609e-05, | |
| "loss": 0.9473, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.4297407912687585, | |
| "grad_norm": 1.9383750019873172, | |
| "learning_rate": 7.186919570753194e-05, | |
| "loss": 0.9375, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.4351978171896316, | |
| "grad_norm": 1.6119364273709456, | |
| "learning_rate": 7.177668786568207e-05, | |
| "loss": 0.9494, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.4406548431105048, | |
| "grad_norm": 2.1786353908626364, | |
| "learning_rate": 7.168371699484984e-05, | |
| "loss": 0.9239, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.446111869031378, | |
| "grad_norm": 1.2435906317794803, | |
| "learning_rate": 7.159028444974562e-05, | |
| "loss": 0.9503, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.451568894952251, | |
| "grad_norm": 1.2927889875300842, | |
| "learning_rate": 7.149639159180695e-05, | |
| "loss": 0.9302, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.4570259208731242, | |
| "grad_norm": 1.3968086773426196, | |
| "learning_rate": 7.140203978917875e-05, | |
| "loss": 0.9501, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.4624829467939973, | |
| "grad_norm": 1.8563809213966032, | |
| "learning_rate": 7.130723041669342e-05, | |
| "loss": 0.9433, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.4679399727148703, | |
| "grad_norm": 1.8388340071411, | |
| "learning_rate": 7.121196485585071e-05, | |
| "loss": 0.9297, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.4733969986357436, | |
| "grad_norm": 1.6925793007469925, | |
| "learning_rate": 7.111624449479769e-05, | |
| "loss": 0.9584, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.4788540245566166, | |
| "grad_norm": 1.549989447828772, | |
| "learning_rate": 7.102007072830847e-05, | |
| "loss": 0.9519, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.4843110504774897, | |
| "grad_norm": 1.4412184055184636, | |
| "learning_rate": 7.092344495776387e-05, | |
| "loss": 0.9317, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.489768076398363, | |
| "grad_norm": 2.3051269764350955, | |
| "learning_rate": 7.082636859113104e-05, | |
| "loss": 0.9262, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.495225102319236, | |
| "grad_norm": 1.4684364353103458, | |
| "learning_rate": 7.072884304294289e-05, | |
| "loss": 0.9479, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.500682128240109, | |
| "grad_norm": 1.8345686209311367, | |
| "learning_rate": 7.063086973427754e-05, | |
| "loss": 0.9554, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.5061391541609823, | |
| "grad_norm": 1.2709699073133172, | |
| "learning_rate": 7.053245009273753e-05, | |
| "loss": 0.9412, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.5115961800818554, | |
| "grad_norm": 1.9685573107871919, | |
| "learning_rate": 7.043358555242914e-05, | |
| "loss": 0.9543, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.5170532060027284, | |
| "grad_norm": 1.8665000478114042, | |
| "learning_rate": 7.033427755394137e-05, | |
| "loss": 0.9238, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.5225102319236017, | |
| "grad_norm": 1.5383694260036942, | |
| "learning_rate": 7.023452754432503e-05, | |
| "loss": 0.951, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.5279672578444747, | |
| "grad_norm": 1.6875275552125621, | |
| "learning_rate": 7.013433697707159e-05, | |
| "loss": 0.9611, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.5334242837653478, | |
| "grad_norm": 1.939216526044695, | |
| "learning_rate": 7.003370731209207e-05, | |
| "loss": 0.9387, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.538881309686221, | |
| "grad_norm": 1.3819701612894104, | |
| "learning_rate": 6.993264001569574e-05, | |
| "loss": 0.9421, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.544338335607094, | |
| "grad_norm": 1.660535171681228, | |
| "learning_rate": 6.983113656056867e-05, | |
| "loss": 0.9286, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.5497953615279672, | |
| "grad_norm": 2.3185193154328876, | |
| "learning_rate": 6.972919842575247e-05, | |
| "loss": 0.9312, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.5552523874488404, | |
| "grad_norm": 0.9052415588070069, | |
| "learning_rate": 6.96268270966225e-05, | |
| "loss": 0.9056, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.5607094133697135, | |
| "grad_norm": 1.4561558929000948, | |
| "learning_rate": 6.952402406486644e-05, | |
| "loss": 0.9381, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.5661664392905865, | |
| "grad_norm": 1.543574571453614, | |
| "learning_rate": 6.942079082846239e-05, | |
| "loss": 0.9584, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.5716234652114598, | |
| "grad_norm": 1.499614067677993, | |
| "learning_rate": 6.931712889165711e-05, | |
| "loss": 0.9384, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.5770804911323328, | |
| "grad_norm": 1.6336048217930654, | |
| "learning_rate": 6.921303976494416e-05, | |
| "loss": 0.9529, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.5825375170532059, | |
| "grad_norm": 2.0168602944752982, | |
| "learning_rate": 6.910852496504175e-05, | |
| "loss": 0.9137, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.5879945429740792, | |
| "grad_norm": 1.2730565942717105, | |
| "learning_rate": 6.900358601487079e-05, | |
| "loss": 0.9349, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.5934515688949522, | |
| "grad_norm": 1.9190133574494301, | |
| "learning_rate": 6.889822444353258e-05, | |
| "loss": 0.9431, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.5989085948158253, | |
| "grad_norm": 1.6265733490246796, | |
| "learning_rate": 6.87924417862866e-05, | |
| "loss": 0.9532, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.6043656207366985, | |
| "grad_norm": 1.6623018948743888, | |
| "learning_rate": 6.868623958452812e-05, | |
| "loss": 0.9471, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.6098226466575716, | |
| "grad_norm": 0.8931957332544022, | |
| "learning_rate": 6.857961938576573e-05, | |
| "loss": 0.935, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.6152796725784446, | |
| "grad_norm": 1.954298138706908, | |
| "learning_rate": 6.84725827435988e-05, | |
| "loss": 0.9281, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.620736698499318, | |
| "grad_norm": 1.5596400465604372, | |
| "learning_rate": 6.83651312176948e-05, | |
| "loss": 0.9291, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.626193724420191, | |
| "grad_norm": 1.748314687412271, | |
| "learning_rate": 6.825726637376669e-05, | |
| "loss": 0.9491, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.631650750341064, | |
| "grad_norm": 1.5797658678155715, | |
| "learning_rate": 6.814898978354997e-05, | |
| "loss": 0.9409, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.6371077762619373, | |
| "grad_norm": 1.433746877281443, | |
| "learning_rate": 6.804030302477985e-05, | |
| "loss": 0.9199, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.6425648021828103, | |
| "grad_norm": 1.581155799885627, | |
| "learning_rate": 6.793120768116825e-05, | |
| "loss": 0.923, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.6480218281036834, | |
| "grad_norm": 2.1317295688148445, | |
| "learning_rate": 6.782170534238073e-05, | |
| "loss": 0.9147, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.6534788540245566, | |
| "grad_norm": 1.3648515934883898, | |
| "learning_rate": 6.771179760401329e-05, | |
| "loss": 0.9261, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.65893587994543, | |
| "grad_norm": 1.3152685343318224, | |
| "learning_rate": 6.760148606756916e-05, | |
| "loss": 0.929, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.6643929058663027, | |
| "grad_norm": 1.5395500820286134, | |
| "learning_rate": 6.749077234043545e-05, | |
| "loss": 0.9204, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.669849931787176, | |
| "grad_norm": 1.2417107057753007, | |
| "learning_rate": 6.737965803585975e-05, | |
| "loss": 0.9459, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.6753069577080493, | |
| "grad_norm": 2.5080349522146173, | |
| "learning_rate": 6.726814477292654e-05, | |
| "loss": 0.9374, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.680763983628922, | |
| "grad_norm": 1.0409667518300874, | |
| "learning_rate": 6.715623417653372e-05, | |
| "loss": 0.9348, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.6862210095497954, | |
| "grad_norm": 1.4159003070105012, | |
| "learning_rate": 6.704392787736884e-05, | |
| "loss": 0.9429, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.6916780354706686, | |
| "grad_norm": 1.4352837307400224, | |
| "learning_rate": 6.69312275118854e-05, | |
| "loss": 0.9355, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.6971350613915415, | |
| "grad_norm": 2.217803527040767, | |
| "learning_rate": 6.681813472227893e-05, | |
| "loss": 0.9133, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.7025920873124147, | |
| "grad_norm": 1.6239667118361827, | |
| "learning_rate": 6.670465115646313e-05, | |
| "loss": 0.9605, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.708049113233288, | |
| "grad_norm": 1.6735982665826579, | |
| "learning_rate": 6.659077846804586e-05, | |
| "loss": 0.9508, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.7135061391541608, | |
| "grad_norm": 1.6595574263156625, | |
| "learning_rate": 6.647651831630496e-05, | |
| "loss": 0.9255, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.718963165075034, | |
| "grad_norm": 2.1924142398412374, | |
| "learning_rate": 6.63618723661642e-05, | |
| "loss": 0.9512, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.7244201909959074, | |
| "grad_norm": 1.1050184457511385, | |
| "learning_rate": 6.624684228816887e-05, | |
| "loss": 0.9412, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.7298772169167802, | |
| "grad_norm": 2.013890918931931, | |
| "learning_rate": 6.61314297584616e-05, | |
| "loss": 0.9637, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.7353342428376535, | |
| "grad_norm": 1.6263566377440777, | |
| "learning_rate": 6.601563645875779e-05, | |
| "loss": 0.9604, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.7407912687585267, | |
| "grad_norm": 1.6785802235449294, | |
| "learning_rate": 6.58994640763212e-05, | |
| "loss": 0.9508, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.7462482946793996, | |
| "grad_norm": 2.332156868426827, | |
| "learning_rate": 6.578291430393935e-05, | |
| "loss": 0.943, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.7517053206002728, | |
| "grad_norm": 1.501192527788078, | |
| "learning_rate": 6.566598883989879e-05, | |
| "loss": 0.9405, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.7571623465211461, | |
| "grad_norm": 3.091321379287327, | |
| "learning_rate": 6.554868938796044e-05, | |
| "loss": 0.9403, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.762619372442019, | |
| "grad_norm": 2.994851374878646, | |
| "learning_rate": 6.543101765733473e-05, | |
| "loss": 0.9372, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.7680763983628922, | |
| "grad_norm": 1.6077026093358928, | |
| "learning_rate": 6.531297536265668e-05, | |
| "loss": 0.9329, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.7735334242837655, | |
| "grad_norm": 1.9574778352925133, | |
| "learning_rate": 6.519456422396089e-05, | |
| "loss": 0.9482, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.7789904502046383, | |
| "grad_norm": 1.4810080659785045, | |
| "learning_rate": 6.507578596665655e-05, | |
| "loss": 0.9598, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.7844474761255116, | |
| "grad_norm": 1.4703841544273384, | |
| "learning_rate": 6.495664232150227e-05, | |
| "loss": 0.9413, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.7899045020463848, | |
| "grad_norm": 1.2925625121336821, | |
| "learning_rate": 6.483713502458082e-05, | |
| "loss": 0.9423, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.795361527967258, | |
| "grad_norm": 1.4691567325910997, | |
| "learning_rate": 6.471726581727391e-05, | |
| "loss": 0.9513, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.800818553888131, | |
| "grad_norm": 1.6492564208989655, | |
| "learning_rate": 6.459703644623669e-05, | |
| "loss": 0.9247, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.8062755798090042, | |
| "grad_norm": 1.8176791992294896, | |
| "learning_rate": 6.447644866337246e-05, | |
| "loss": 0.9201, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.8117326057298773, | |
| "grad_norm": 1.2443659972541816, | |
| "learning_rate": 6.435550422580703e-05, | |
| "loss": 0.9428, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.8171896316507503, | |
| "grad_norm": 0.9585197685327218, | |
| "learning_rate": 6.423420489586312e-05, | |
| "loss": 0.9098, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.8226466575716236, | |
| "grad_norm": 1.9263653384878547, | |
| "learning_rate": 6.411255244103476e-05, | |
| "loss": 0.9445, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.8281036834924966, | |
| "grad_norm": 1.4002772834034205, | |
| "learning_rate": 6.399054863396147e-05, | |
| "loss": 0.9407, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.8335607094133697, | |
| "grad_norm": 2.263455086974397, | |
| "learning_rate": 6.38681952524024e-05, | |
| "loss": 0.9391, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.839017735334243, | |
| "grad_norm": 1.6097342527863918, | |
| "learning_rate": 6.374549407921052e-05, | |
| "loss": 0.9373, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.844474761255116, | |
| "grad_norm": 1.9906900776578618, | |
| "learning_rate": 6.362244690230658e-05, | |
| "loss": 0.9317, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.849931787175989, | |
| "grad_norm": 1.7450912010683954, | |
| "learning_rate": 6.349905551465302e-05, | |
| "loss": 0.9338, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.8553888130968623, | |
| "grad_norm": 1.4836167911447387, | |
| "learning_rate": 6.3375321714228e-05, | |
| "loss": 0.946, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.8608458390177354, | |
| "grad_norm": 1.2615365954555837, | |
| "learning_rate": 6.325124730399897e-05, | |
| "loss": 0.94, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.8663028649386084, | |
| "grad_norm": 1.512046504536024, | |
| "learning_rate": 6.312683409189659e-05, | |
| "loss": 0.927, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.8717598908594817, | |
| "grad_norm": 1.0177814524104347, | |
| "learning_rate": 6.300208389078834e-05, | |
| "loss": 0.9239, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.8772169167803547, | |
| "grad_norm": 2.0413829776753576, | |
| "learning_rate": 6.2876998518452e-05, | |
| "loss": 0.9407, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.8826739427012278, | |
| "grad_norm": 1.9502926154029763, | |
| "learning_rate": 6.275157979754932e-05, | |
| "loss": 0.9476, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.888130968622101, | |
| "grad_norm": 1.1523138049064319, | |
| "learning_rate": 6.262582955559936e-05, | |
| "loss": 0.9483, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.893587994542974, | |
| "grad_norm": 1.4261842624847951, | |
| "learning_rate": 6.249974962495187e-05, | |
| "loss": 0.9441, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.8990450204638472, | |
| "grad_norm": 1.2008850401868307, | |
| "learning_rate": 6.237334184276066e-05, | |
| "loss": 0.9143, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.9045020463847204, | |
| "grad_norm": 2.2130995474869377, | |
| "learning_rate": 6.224660805095669e-05, | |
| "loss": 0.9554, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.9099590723055935, | |
| "grad_norm": 1.2762628629624744, | |
| "learning_rate": 6.21195500962214e-05, | |
| "loss": 0.9392, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.9154160982264665, | |
| "grad_norm": 1.5320175021839157, | |
| "learning_rate": 6.199216982995966e-05, | |
| "loss": 0.9311, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.9208731241473398, | |
| "grad_norm": 1.0910833731453622, | |
| "learning_rate": 6.18644691082729e-05, | |
| "loss": 0.9125, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.9263301500682128, | |
| "grad_norm": 2.3128739475758846, | |
| "learning_rate": 6.173644979193199e-05, | |
| "loss": 0.9354, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.931787175989086, | |
| "grad_norm": 1.525750875087295, | |
| "learning_rate": 6.160811374635015e-05, | |
| "loss": 0.9308, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.9372442019099592, | |
| "grad_norm": 1.1778808648494785, | |
| "learning_rate": 6.147946284155576e-05, | |
| "loss": 0.9388, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.9427012278308322, | |
| "grad_norm": 3.1414998446163396, | |
| "learning_rate": 6.135049895216516e-05, | |
| "loss": 0.9208, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.9481582537517053, | |
| "grad_norm": 2.4226620501686384, | |
| "learning_rate": 6.122122395735525e-05, | |
| "loss": 0.9219, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.9536152796725785, | |
| "grad_norm": 3.2604962642078203, | |
| "learning_rate": 6.10916397408362e-05, | |
| "loss": 0.9344, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.9590723055934516, | |
| "grad_norm": 2.0869000164131326, | |
| "learning_rate": 6.096174819082389e-05, | |
| "loss": 0.9495, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.9645293315143246, | |
| "grad_norm": 3.808496465576956, | |
| "learning_rate": 6.0831551200012534e-05, | |
| "loss": 0.9567, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.969986357435198, | |
| "grad_norm": 3.241680278099208, | |
| "learning_rate": 6.070105066554698e-05, | |
| "loss": 0.9117, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.975443383356071, | |
| "grad_norm": 2.0073822332535984, | |
| "learning_rate": 6.057024848899515e-05, | |
| "loss": 0.9246, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.980900409276944, | |
| "grad_norm": 1.8404335145090698, | |
| "learning_rate": 6.0439146576320256e-05, | |
| "loss": 0.9528, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.9863574351978173, | |
| "grad_norm": 2.4786795274720053, | |
| "learning_rate": 6.0307746837853087e-05, | |
| "loss": 0.9327, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.9918144611186903, | |
| "grad_norm": 1.5081344365906557, | |
| "learning_rate": 6.017605118826417e-05, | |
| "loss": 0.9451, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.9972714870395634, | |
| "grad_norm": 2.5201008232251625, | |
| "learning_rate": 6.0044061546535815e-05, | |
| "loss": 0.931, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 2.0027285129604366, | |
| "grad_norm": 2.517633847206527, | |
| "learning_rate": 5.991177983593423e-05, | |
| "loss": 1.6346, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 2.00818553888131, | |
| "grad_norm": 2.525593797167957, | |
| "learning_rate": 5.9779207983981435e-05, | |
| "loss": 0.9096, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 2.0136425648021827, | |
| "grad_norm": 1.6371289597658272, | |
| "learning_rate": 5.964634792242723e-05, | |
| "loss": 0.924, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 2.019099590723056, | |
| "grad_norm": 2.3442082909219586, | |
| "learning_rate": 5.951320158722095e-05, | |
| "loss": 0.8972, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.0245566166439293, | |
| "grad_norm": 1.8551133091360161, | |
| "learning_rate": 5.937977091848341e-05, | |
| "loss": 0.9081, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 2.030013642564802, | |
| "grad_norm": 1.6933903655893108, | |
| "learning_rate": 5.924605786047847e-05, | |
| "loss": 0.9227, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 2.0354706684856754, | |
| "grad_norm": 1.9844111089596346, | |
| "learning_rate": 5.911206436158482e-05, | |
| "loss": 0.9123, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 2.0409276944065486, | |
| "grad_norm": 1.0962915420790558, | |
| "learning_rate": 5.897779237426753e-05, | |
| "loss": 0.9033, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 2.0463847203274215, | |
| "grad_norm": 2.2324391296180934, | |
| "learning_rate": 5.884324385504966e-05, | |
| "loss": 0.9129, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.0518417462482947, | |
| "grad_norm": 1.2807887920016723, | |
| "learning_rate": 5.870842076448364e-05, | |
| "loss": 0.9066, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 2.057298772169168, | |
| "grad_norm": 1.6112376316686408, | |
| "learning_rate": 5.857332506712285e-05, | |
| "loss": 0.9257, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 2.062755798090041, | |
| "grad_norm": 1.6753107815242307, | |
| "learning_rate": 5.843795873149285e-05, | |
| "loss": 0.933, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 2.068212824010914, | |
| "grad_norm": 1.3373949314164593, | |
| "learning_rate": 5.830232373006283e-05, | |
| "loss": 0.9306, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 2.0736698499317874, | |
| "grad_norm": 1.5344827214716636, | |
| "learning_rate": 5.816642203921674e-05, | |
| "loss": 0.8968, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.07912687585266, | |
| "grad_norm": 1.177900589075632, | |
| "learning_rate": 5.8030255639224584e-05, | |
| "loss": 0.885, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 2.0845839017735335, | |
| "grad_norm": 1.5511949126090052, | |
| "learning_rate": 5.789382651421354e-05, | |
| "loss": 0.9086, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 2.0900409276944067, | |
| "grad_norm": 1.3198466527101826, | |
| "learning_rate": 5.775713665213899e-05, | |
| "loss": 0.9153, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 2.0954979536152796, | |
| "grad_norm": 1.3654261455944285, | |
| "learning_rate": 5.7620188044755684e-05, | |
| "loss": 0.9245, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 2.100954979536153, | |
| "grad_norm": 1.4121475396535077, | |
| "learning_rate": 5.748298268758859e-05, | |
| "loss": 0.9045, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 2.106412005457026, | |
| "grad_norm": 1.2226144369638559, | |
| "learning_rate": 5.734552257990386e-05, | |
| "loss": 0.9309, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 2.111869031377899, | |
| "grad_norm": 1.1754628781979755, | |
| "learning_rate": 5.720780972467974e-05, | |
| "loss": 0.9064, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 2.117326057298772, | |
| "grad_norm": 1.6352507288221967, | |
| "learning_rate": 5.7069846128577304e-05, | |
| "loss": 0.9032, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 2.1227830832196455, | |
| "grad_norm": 1.2127070473615575, | |
| "learning_rate": 5.693163380191127e-05, | |
| "loss": 0.8939, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 2.1282401091405183, | |
| "grad_norm": 1.2698085787934459, | |
| "learning_rate": 5.679317475862073e-05, | |
| "loss": 0.9115, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.1336971350613916, | |
| "grad_norm": 1.0503429136187135, | |
| "learning_rate": 5.665447101623971e-05, | |
| "loss": 0.89, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 2.139154160982265, | |
| "grad_norm": 1.2859700958716116, | |
| "learning_rate": 5.6515524595867855e-05, | |
| "loss": 0.9136, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 2.1446111869031377, | |
| "grad_norm": 1.8523331576107729, | |
| "learning_rate": 5.6376337522141e-05, | |
| "loss": 0.8926, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 2.150068212824011, | |
| "grad_norm": 0.9691735187094292, | |
| "learning_rate": 5.623691182320153e-05, | |
| "loss": 0.9126, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 2.155525238744884, | |
| "grad_norm": 1.2395540253844441, | |
| "learning_rate": 5.6097249530669004e-05, | |
| "loss": 0.9189, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 2.160982264665757, | |
| "grad_norm": 0.9624385840728625, | |
| "learning_rate": 5.5957352679610417e-05, | |
| "loss": 0.9054, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 2.1664392905866303, | |
| "grad_norm": 1.512630856479847, | |
| "learning_rate": 5.58172233085106e-05, | |
| "loss": 0.8978, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 2.1718963165075036, | |
| "grad_norm": 1.3726882811029215, | |
| "learning_rate": 5.567686345924256e-05, | |
| "loss": 0.9254, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 2.1773533424283764, | |
| "grad_norm": 1.514528048915799, | |
| "learning_rate": 5.5536275177037615e-05, | |
| "loss": 0.8953, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 2.1828103683492497, | |
| "grad_norm": 0.9854058808309464, | |
| "learning_rate": 5.53954605104557e-05, | |
| "loss": 0.8994, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.188267394270123, | |
| "grad_norm": 1.3982634603651707, | |
| "learning_rate": 5.5254421511355466e-05, | |
| "loss": 0.9191, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 2.193724420190996, | |
| "grad_norm": 0.945935420435088, | |
| "learning_rate": 5.511316023486437e-05, | |
| "loss": 0.8954, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 2.199181446111869, | |
| "grad_norm": 1.7995542419322705, | |
| "learning_rate": 5.497167873934877e-05, | |
| "loss": 0.912, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 2.2046384720327423, | |
| "grad_norm": 1.4321355586507285, | |
| "learning_rate": 5.482997908638393e-05, | |
| "loss": 0.8979, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 2.210095497953615, | |
| "grad_norm": 0.9196538171557571, | |
| "learning_rate": 5.46880633407239e-05, | |
| "loss": 0.895, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 2.2155525238744884, | |
| "grad_norm": 1.5548446371415476, | |
| "learning_rate": 5.454593357027154e-05, | |
| "loss": 0.913, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 2.2210095497953617, | |
| "grad_norm": 1.657041068857267, | |
| "learning_rate": 5.440359184604834e-05, | |
| "loss": 0.9279, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 2.2264665757162345, | |
| "grad_norm": 1.0626769214920089, | |
| "learning_rate": 5.42610402421642e-05, | |
| "loss": 0.893, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 2.231923601637108, | |
| "grad_norm": 1.5408374648839536, | |
| "learning_rate": 5.411828083578729e-05, | |
| "loss": 0.9044, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 2.237380627557981, | |
| "grad_norm": 1.05976554269391, | |
| "learning_rate": 5.397531570711373e-05, | |
| "loss": 0.8687, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 2.242837653478854, | |
| "grad_norm": 1.8958984234334453, | |
| "learning_rate": 5.3832146939337267e-05, | |
| "loss": 0.9318, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 2.248294679399727, | |
| "grad_norm": 1.3494319992439094, | |
| "learning_rate": 5.368877661861899e-05, | |
| "loss": 0.9235, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 2.2537517053206004, | |
| "grad_norm": 1.4368846046733357, | |
| "learning_rate": 5.3545206834056816e-05, | |
| "loss": 0.9123, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 2.2592087312414733, | |
| "grad_norm": 1.8256579203781316, | |
| "learning_rate": 5.340143967765519e-05, | |
| "loss": 0.9035, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 2.2646657571623465, | |
| "grad_norm": 1.3166251617751332, | |
| "learning_rate": 5.325747724429448e-05, | |
| "loss": 0.9132, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 2.27012278308322, | |
| "grad_norm": 1.5644479487633818, | |
| "learning_rate": 5.311332163170054e-05, | |
| "loss": 0.8882, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 2.2755798090040926, | |
| "grad_norm": 1.7730325855876201, | |
| "learning_rate": 5.296897494041407e-05, | |
| "loss": 0.9096, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 2.281036834924966, | |
| "grad_norm": 0.690579518867841, | |
| "learning_rate": 5.282443927376004e-05, | |
| "loss": 0.9008, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 2.286493860845839, | |
| "grad_norm": 1.525525192362239, | |
| "learning_rate": 5.267971673781708e-05, | |
| "loss": 0.9091, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 2.291950886766712, | |
| "grad_norm": 1.1450930632937755, | |
| "learning_rate": 5.253480944138675e-05, | |
| "loss": 0.9077, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 2.2974079126875853, | |
| "grad_norm": 1.5853135303829244, | |
| "learning_rate": 5.238971949596277e-05, | |
| "loss": 0.9076, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 2.3028649386084585, | |
| "grad_norm": 1.0964682564769086, | |
| "learning_rate": 5.2244449015700364e-05, | |
| "loss": 0.9065, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 2.3083219645293314, | |
| "grad_norm": 0.9213215067007345, | |
| "learning_rate": 5.209900011738536e-05, | |
| "loss": 0.9075, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 2.3137789904502046, | |
| "grad_norm": 0.9574566792724861, | |
| "learning_rate": 5.1953374920403367e-05, | |
| "loss": 0.8995, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 2.319236016371078, | |
| "grad_norm": 1.4073999493333642, | |
| "learning_rate": 5.1807575546708925e-05, | |
| "loss": 0.9051, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.3246930422919507, | |
| "grad_norm": 1.5239194175384891, | |
| "learning_rate": 5.166160412079456e-05, | |
| "loss": 0.8978, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 2.330150068212824, | |
| "grad_norm": 0.5990721338816578, | |
| "learning_rate": 5.15154627696598e-05, | |
| "loss": 0.9289, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 2.3356070941336973, | |
| "grad_norm": 0.9305075248801276, | |
| "learning_rate": 5.136915362278025e-05, | |
| "loss": 0.9002, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 2.34106412005457, | |
| "grad_norm": 1.159552986565052, | |
| "learning_rate": 5.1222678812076514e-05, | |
| "loss": 0.8991, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 2.3465211459754434, | |
| "grad_norm": 1.544608318550361, | |
| "learning_rate": 5.1076040471883105e-05, | |
| "loss": 0.9041, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.3519781718963166, | |
| "grad_norm": 0.9906368417277753, | |
| "learning_rate": 5.092924073891745e-05, | |
| "loss": 0.9056, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 2.3574351978171895, | |
| "grad_norm": 1.2503392254533339, | |
| "learning_rate": 5.078228175224861e-05, | |
| "loss": 0.9071, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 2.3628922237380627, | |
| "grad_norm": 1.0117345420422286, | |
| "learning_rate": 5.063516565326622e-05, | |
| "loss": 0.9007, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 2.368349249658936, | |
| "grad_norm": 1.5307989423567165, | |
| "learning_rate": 5.048789458564928e-05, | |
| "loss": 0.9082, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 2.373806275579809, | |
| "grad_norm": 1.3272756265512895, | |
| "learning_rate": 5.034047069533485e-05, | |
| "loss": 0.9169, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 2.379263301500682, | |
| "grad_norm": 0.9338875905847049, | |
| "learning_rate": 5.019289613048683e-05, | |
| "loss": 0.908, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 2.3847203274215554, | |
| "grad_norm": 1.0158152693801568, | |
| "learning_rate": 5.004517304146467e-05, | |
| "loss": 0.8957, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 2.390177353342428, | |
| "grad_norm": 1.535254150172697, | |
| "learning_rate": 4.989730358079198e-05, | |
| "loss": 0.8862, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 2.3956343792633015, | |
| "grad_norm": 1.0474435875199142, | |
| "learning_rate": 4.9749289903125236e-05, | |
| "loss": 0.8882, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 2.4010914051841747, | |
| "grad_norm": 1.5467304860312037, | |
| "learning_rate": 4.960113416522233e-05, | |
| "loss": 0.9022, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 2.4065484311050476, | |
| "grad_norm": 0.7171663599148432, | |
| "learning_rate": 4.945283852591119e-05, | |
| "loss": 0.8952, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 2.412005457025921, | |
| "grad_norm": 1.184417711535613, | |
| "learning_rate": 4.930440514605826e-05, | |
| "loss": 0.9065, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 2.417462482946794, | |
| "grad_norm": 1.204723229806714, | |
| "learning_rate": 4.9155836188537045e-05, | |
| "loss": 0.909, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 2.422919508867667, | |
| "grad_norm": 1.2380345749344617, | |
| "learning_rate": 4.9007133818196636e-05, | |
| "loss": 0.8951, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 2.42837653478854, | |
| "grad_norm": 1.265693694540876, | |
| "learning_rate": 4.885830020183011e-05, | |
| "loss": 0.9181, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 2.4338335607094135, | |
| "grad_norm": 1.099951391116176, | |
| "learning_rate": 4.870933750814296e-05, | |
| "loss": 0.9179, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 2.4392905866302863, | |
| "grad_norm": 1.2414338650044854, | |
| "learning_rate": 4.8560247907721535e-05, | |
| "loss": 0.886, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 2.4447476125511596, | |
| "grad_norm": 1.5704709003561375, | |
| "learning_rate": 4.841103357300137e-05, | |
| "loss": 0.9175, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 2.450204638472033, | |
| "grad_norm": 0.534836475556195, | |
| "learning_rate": 4.826169667823553e-05, | |
| "loss": 0.9199, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 2.4556616643929057, | |
| "grad_norm": 0.8776370257962468, | |
| "learning_rate": 4.811223939946299e-05, | |
| "loss": 0.8944, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.461118690313779, | |
| "grad_norm": 1.9706453577764773, | |
| "learning_rate": 4.796266391447683e-05, | |
| "loss": 0.9127, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 2.466575716234652, | |
| "grad_norm": 0.8249118962292379, | |
| "learning_rate": 4.7812972402792574e-05, | |
| "loss": 0.9089, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 2.472032742155525, | |
| "grad_norm": 1.7629114368166088, | |
| "learning_rate": 4.766316704561645e-05, | |
| "loss": 0.9111, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 2.4774897680763983, | |
| "grad_norm": 1.4811036485248286, | |
| "learning_rate": 4.75132500258135e-05, | |
| "loss": 0.9073, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 2.4829467939972716, | |
| "grad_norm": 1.1161608577851434, | |
| "learning_rate": 4.736322352787586e-05, | |
| "loss": 0.9044, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 2.488403819918145, | |
| "grad_norm": 1.2828229918284093, | |
| "learning_rate": 4.721308973789096e-05, | |
| "loss": 0.9, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 2.4938608458390177, | |
| "grad_norm": 1.517713518373473, | |
| "learning_rate": 4.7062850843509564e-05, | |
| "loss": 0.9027, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 2.499317871759891, | |
| "grad_norm": 1.3348391079469917, | |
| "learning_rate": 4.6912509033913945e-05, | |
| "loss": 0.9195, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 2.504774897680764, | |
| "grad_norm": 1.245752982162094, | |
| "learning_rate": 4.6762066499786015e-05, | |
| "loss": 0.9243, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 2.510231923601637, | |
| "grad_norm": 1.1304452292363005, | |
| "learning_rate": 4.661152543327539e-05, | |
| "loss": 0.9053, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.5156889495225103, | |
| "grad_norm": 1.5152505332881556, | |
| "learning_rate": 4.64608880279674e-05, | |
| "loss": 0.9025, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 2.5211459754433836, | |
| "grad_norm": 0.6895652709575827, | |
| "learning_rate": 4.631015647885118e-05, | |
| "loss": 0.877, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 2.5266030013642564, | |
| "grad_norm": 1.0031211901568315, | |
| "learning_rate": 4.6159332982287654e-05, | |
| "loss": 0.9165, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 2.5320600272851297, | |
| "grad_norm": 0.9856933305068003, | |
| "learning_rate": 4.6008419735977567e-05, | |
| "loss": 0.8947, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 2.5375170532060025, | |
| "grad_norm": 1.4074395011531378, | |
| "learning_rate": 4.585741893892941e-05, | |
| "loss": 0.9031, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 2.542974079126876, | |
| "grad_norm": 1.0947772923264532, | |
| "learning_rate": 4.570633279142743e-05, | |
| "loss": 0.895, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 2.548431105047749, | |
| "grad_norm": 0.9954166963630334, | |
| "learning_rate": 4.5555163494999535e-05, | |
| "loss": 0.9155, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 2.5538881309686223, | |
| "grad_norm": 1.0252150819212769, | |
| "learning_rate": 4.5403913252385206e-05, | |
| "loss": 0.8998, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 2.559345156889495, | |
| "grad_norm": 0.5854437882911242, | |
| "learning_rate": 4.525258426750346e-05, | |
| "loss": 0.9013, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 2.5648021828103684, | |
| "grad_norm": 0.7840815488236171, | |
| "learning_rate": 4.510117874542064e-05, | |
| "loss": 0.8883, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 2.5702592087312413, | |
| "grad_norm": 1.0631188620110363, | |
| "learning_rate": 4.494969889231839e-05, | |
| "loss": 0.89, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 2.5757162346521145, | |
| "grad_norm": 0.6218999930771435, | |
| "learning_rate": 4.4798146915461446e-05, | |
| "loss": 0.8793, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 2.581173260572988, | |
| "grad_norm": 0.8064002493767205, | |
| "learning_rate": 4.464652502316545e-05, | |
| "loss": 0.9076, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 2.586630286493861, | |
| "grad_norm": 0.8565103993043336, | |
| "learning_rate": 4.4494835424764846e-05, | |
| "loss": 0.8928, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 2.592087312414734, | |
| "grad_norm": 0.8227008226865807, | |
| "learning_rate": 4.434308033058062e-05, | |
| "loss": 0.9115, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 2.597544338335607, | |
| "grad_norm": 0.552330723413834, | |
| "learning_rate": 4.4191261951888137e-05, | |
| "loss": 0.905, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 2.60300136425648, | |
| "grad_norm": 0.9160367599695006, | |
| "learning_rate": 4.4039382500884884e-05, | |
| "loss": 0.8907, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 2.6084583901773533, | |
| "grad_norm": 1.2743846931315213, | |
| "learning_rate": 4.3887444190658276e-05, | |
| "loss": 0.8979, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 2.6139154160982265, | |
| "grad_norm": 1.0391301799339259, | |
| "learning_rate": 4.373544923515337e-05, | |
| "loss": 0.9025, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 2.6193724420191, | |
| "grad_norm": 1.3713321504465736, | |
| "learning_rate": 4.358339984914063e-05, | |
| "loss": 0.8801, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.6248294679399726, | |
| "grad_norm": 0.8066646654611648, | |
| "learning_rate": 4.343129824818363e-05, | |
| "loss": 0.9041, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 2.630286493860846, | |
| "grad_norm": 0.7832853640483513, | |
| "learning_rate": 4.32791466486068e-05, | |
| "loss": 0.9203, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 2.6357435197817187, | |
| "grad_norm": 0.7057444529833893, | |
| "learning_rate": 4.312694726746312e-05, | |
| "loss": 0.9049, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 2.641200545702592, | |
| "grad_norm": 0.6493100641693694, | |
| "learning_rate": 4.29747023225018e-05, | |
| "loss": 0.8992, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 2.6466575716234653, | |
| "grad_norm": 0.6608005917805958, | |
| "learning_rate": 4.282241403213598e-05, | |
| "loss": 0.8936, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 2.6521145975443385, | |
| "grad_norm": 0.9967757502926118, | |
| "learning_rate": 4.267008461541041e-05, | |
| "loss": 0.8957, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 2.6575716234652114, | |
| "grad_norm": 1.4601123062706318, | |
| "learning_rate": 4.251771629196908e-05, | |
| "loss": 0.8933, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 2.6630286493860846, | |
| "grad_norm": 0.8449635915962943, | |
| "learning_rate": 4.2365311282022936e-05, | |
| "loss": 0.897, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 2.6684856753069575, | |
| "grad_norm": 0.796727650357652, | |
| "learning_rate": 4.221287180631747e-05, | |
| "loss": 0.8943, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 2.6739427012278307, | |
| "grad_norm": 1.064925467642507, | |
| "learning_rate": 4.20604000861004e-05, | |
| "loss": 0.9028, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.679399727148704, | |
| "grad_norm": 1.8132030562855181, | |
| "learning_rate": 4.190789834308929e-05, | |
| "loss": 0.8899, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 2.6848567530695773, | |
| "grad_norm": 0.6460897975291272, | |
| "learning_rate": 4.175536879943919e-05, | |
| "loss": 0.888, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 2.69031377899045, | |
| "grad_norm": 2.32226453518839, | |
| "learning_rate": 4.160281367771019e-05, | |
| "loss": 0.9041, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 2.6957708049113234, | |
| "grad_norm": 1.074926776476922, | |
| "learning_rate": 4.1450235200835145e-05, | |
| "loss": 0.9112, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 2.701227830832196, | |
| "grad_norm": 2.988265988173717, | |
| "learning_rate": 4.1297635592087196e-05, | |
| "loss": 0.9137, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 2.7066848567530695, | |
| "grad_norm": 2.748929837066849, | |
| "learning_rate": 4.114501707504741e-05, | |
| "loss": 0.8964, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 2.7121418826739427, | |
| "grad_norm": 1.3745356070302501, | |
| "learning_rate": 4.099238187357239e-05, | |
| "loss": 0.9038, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 2.717598908594816, | |
| "grad_norm": 1.5677671965351183, | |
| "learning_rate": 4.083973221176182e-05, | |
| "loss": 0.9294, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 2.723055934515689, | |
| "grad_norm": 1.573723869725014, | |
| "learning_rate": 4.0687070313926114e-05, | |
| "loss": 0.919, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 2.728512960436562, | |
| "grad_norm": 0.9328025105721132, | |
| "learning_rate": 4.053439840455398e-05, | |
| "loss": 0.8881, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.733969986357435, | |
| "grad_norm": 1.3306299787288607, | |
| "learning_rate": 4.038171870827999e-05, | |
| "loss": 0.9194, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 2.739427012278308, | |
| "grad_norm": 1.0631582399329886, | |
| "learning_rate": 4.0229033449852206e-05, | |
| "loss": 0.9342, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 2.7448840381991815, | |
| "grad_norm": 1.4745520296021233, | |
| "learning_rate": 4.0076344854099724e-05, | |
| "loss": 0.9026, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 2.7503410641200547, | |
| "grad_norm": 1.0276507362237068, | |
| "learning_rate": 3.9923655145900275e-05, | |
| "loss": 0.9064, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 2.7557980900409276, | |
| "grad_norm": 1.4421360481508985, | |
| "learning_rate": 3.97709665501478e-05, | |
| "loss": 0.9256, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.761255115961801, | |
| "grad_norm": 1.324734470431852, | |
| "learning_rate": 3.9618281291720025e-05, | |
| "loss": 0.884, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 2.7667121418826737, | |
| "grad_norm": 0.7620830957680698, | |
| "learning_rate": 3.9465601595446035e-05, | |
| "loss": 0.9004, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 2.772169167803547, | |
| "grad_norm": 1.4595376240477347, | |
| "learning_rate": 3.93129296860739e-05, | |
| "loss": 0.911, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 2.77762619372442, | |
| "grad_norm": 1.0958345521160193, | |
| "learning_rate": 3.9160267788238196e-05, | |
| "loss": 0.9113, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 2.7830832196452935, | |
| "grad_norm": 0.9499614564643925, | |
| "learning_rate": 3.9007618126427614e-05, | |
| "loss": 0.8727, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.7885402455661663, | |
| "grad_norm": 1.6986030669586536, | |
| "learning_rate": 3.885498292495259e-05, | |
| "loss": 0.9059, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 2.7939972714870396, | |
| "grad_norm": 0.8253935525812467, | |
| "learning_rate": 3.870236440791281e-05, | |
| "loss": 0.8994, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 2.799454297407913, | |
| "grad_norm": 1.62017400053124, | |
| "learning_rate": 3.854976479916486e-05, | |
| "loss": 0.8818, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 2.8049113233287857, | |
| "grad_norm": 1.0295253405729237, | |
| "learning_rate": 3.839718632228982e-05, | |
| "loss": 0.8977, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 2.810368349249659, | |
| "grad_norm": 1.3655909260152348, | |
| "learning_rate": 3.8244631200560835e-05, | |
| "loss": 0.9078, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 2.815825375170532, | |
| "grad_norm": 1.0954541627416656, | |
| "learning_rate": 3.809210165691072e-05, | |
| "loss": 0.8977, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 2.821282401091405, | |
| "grad_norm": 1.1586044724363185, | |
| "learning_rate": 3.79395999138996e-05, | |
| "loss": 0.9056, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 2.8267394270122783, | |
| "grad_norm": 0.8829661993056394, | |
| "learning_rate": 3.7787128193682546e-05, | |
| "loss": 0.8965, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 2.8321964529331516, | |
| "grad_norm": 0.8815798999675472, | |
| "learning_rate": 3.763468871797708e-05, | |
| "loss": 0.9197, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 2.8376534788540244, | |
| "grad_norm": 1.2538953598040594, | |
| "learning_rate": 3.7482283708030936e-05, | |
| "loss": 0.8774, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.8431105047748977, | |
| "grad_norm": 0.728098507307315, | |
| "learning_rate": 3.7329915384589615e-05, | |
| "loss": 0.9103, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 2.848567530695771, | |
| "grad_norm": 1.1109117113371303, | |
| "learning_rate": 3.717758596786404e-05, | |
| "loss": 0.9196, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 2.854024556616644, | |
| "grad_norm": 1.0718036954886652, | |
| "learning_rate": 3.702529767749821e-05, | |
| "loss": 0.8963, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 2.859481582537517, | |
| "grad_norm": 1.048803705132213, | |
| "learning_rate": 3.687305273253689e-05, | |
| "loss": 0.8935, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 2.8649386084583903, | |
| "grad_norm": 0.8887966210033448, | |
| "learning_rate": 3.672085335139321e-05, | |
| "loss": 0.9014, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 2.870395634379263, | |
| "grad_norm": 0.7017332784967888, | |
| "learning_rate": 3.656870175181638e-05, | |
| "loss": 0.9019, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 2.8758526603001364, | |
| "grad_norm": 0.8896432018913878, | |
| "learning_rate": 3.6416600150859386e-05, | |
| "loss": 0.9178, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 2.8813096862210097, | |
| "grad_norm": 0.7645003604259587, | |
| "learning_rate": 3.626455076484665e-05, | |
| "loss": 0.8967, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 2.8867667121418825, | |
| "grad_norm": 1.2198348096547293, | |
| "learning_rate": 3.611255580934173e-05, | |
| "loss": 0.8818, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 2.892223738062756, | |
| "grad_norm": 0.6151717260376284, | |
| "learning_rate": 3.596061749911512e-05, | |
| "loss": 0.8922, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.897680763983629, | |
| "grad_norm": 0.815800617981215, | |
| "learning_rate": 3.5808738048111884e-05, | |
| "loss": 0.9042, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 2.903137789904502, | |
| "grad_norm": 0.8465415157806591, | |
| "learning_rate": 3.565691966941939e-05, | |
| "loss": 0.9033, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 2.908594815825375, | |
| "grad_norm": 0.9997567059531529, | |
| "learning_rate": 3.550516457523517e-05, | |
| "loss": 0.8865, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 2.9140518417462484, | |
| "grad_norm": 0.9200304101564798, | |
| "learning_rate": 3.535347497683456e-05, | |
| "loss": 0.8817, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 2.9195088676671213, | |
| "grad_norm": 0.8027379053061545, | |
| "learning_rate": 3.5201853084538554e-05, | |
| "loss": 0.9084, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 2.9249658935879945, | |
| "grad_norm": 0.5469780406914913, | |
| "learning_rate": 3.505030110768161e-05, | |
| "loss": 0.8802, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 2.930422919508868, | |
| "grad_norm": 0.7413900149381596, | |
| "learning_rate": 3.4898821254579374e-05, | |
| "loss": 0.903, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 2.9358799454297406, | |
| "grad_norm": 0.8762727718053043, | |
| "learning_rate": 3.4747415732496556e-05, | |
| "loss": 0.9188, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 2.941336971350614, | |
| "grad_norm": 0.9396772677381491, | |
| "learning_rate": 3.459608674761481e-05, | |
| "loss": 0.901, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 2.946793997271487, | |
| "grad_norm": 0.6680089017701383, | |
| "learning_rate": 3.4444836505000485e-05, | |
| "loss": 0.8923, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.9522510231923604, | |
| "grad_norm": 0.47043991279916797, | |
| "learning_rate": 3.429366720857259e-05, | |
| "loss": 0.88, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 2.9577080491132333, | |
| "grad_norm": 0.4955656876316972, | |
| "learning_rate": 3.4142581061070596e-05, | |
| "loss": 0.8939, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 2.9631650750341065, | |
| "grad_norm": 0.8318305087447663, | |
| "learning_rate": 3.399158026402245e-05, | |
| "loss": 0.9006, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 2.9686221009549794, | |
| "grad_norm": 0.9724940321029891, | |
| "learning_rate": 3.384066701771235e-05, | |
| "loss": 0.8856, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 2.9740791268758526, | |
| "grad_norm": 0.6009045145969995, | |
| "learning_rate": 3.368984352114883e-05, | |
| "loss": 0.9143, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 2.979536152796726, | |
| "grad_norm": 0.39363538472622567, | |
| "learning_rate": 3.353911197203262e-05, | |
| "loss": 0.8902, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 2.984993178717599, | |
| "grad_norm": 0.5286633463839729, | |
| "learning_rate": 3.338847456672463e-05, | |
| "loss": 0.8895, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 2.990450204638472, | |
| "grad_norm": 0.7531125160359047, | |
| "learning_rate": 3.323793350021399e-05, | |
| "loss": 0.9012, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 2.9959072305593453, | |
| "grad_norm": 0.7249830120473333, | |
| "learning_rate": 3.3087490966086075e-05, | |
| "loss": 0.8936, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 3.001364256480218, | |
| "grad_norm": 0.7613973707952983, | |
| "learning_rate": 3.293714915649045e-05, | |
| "loss": 1.5762, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 3.0068212824010914, | |
| "grad_norm": 0.5390667663982144, | |
| "learning_rate": 3.278691026210904e-05, | |
| "loss": 0.86, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 3.0122783083219646, | |
| "grad_norm": 0.4441457511204983, | |
| "learning_rate": 3.2636776472124144e-05, | |
| "loss": 0.8687, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 3.0177353342428375, | |
| "grad_norm": 0.547857567525407, | |
| "learning_rate": 3.248674997418652e-05, | |
| "loss": 0.8613, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 3.0231923601637107, | |
| "grad_norm": 0.5080127949107659, | |
| "learning_rate": 3.233683295438356e-05, | |
| "loss": 0.8634, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 3.028649386084584, | |
| "grad_norm": 0.4627457990776784, | |
| "learning_rate": 3.2187027597207426e-05, | |
| "loss": 0.8707, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 3.034106412005457, | |
| "grad_norm": 0.4908817778767444, | |
| "learning_rate": 3.2037336085523186e-05, | |
| "loss": 0.8597, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 3.03956343792633, | |
| "grad_norm": 0.5198441754829152, | |
| "learning_rate": 3.188776060053702e-05, | |
| "loss": 0.8431, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 3.0450204638472034, | |
| "grad_norm": 0.4818792863535078, | |
| "learning_rate": 3.173830332176448e-05, | |
| "loss": 0.8809, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 3.050477489768076, | |
| "grad_norm": 0.44582198493157366, | |
| "learning_rate": 3.158896642699865e-05, | |
| "loss": 0.8648, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 3.0559345156889495, | |
| "grad_norm": 0.33617908042386857, | |
| "learning_rate": 3.1439752092278485e-05, | |
| "loss": 0.8884, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 3.0613915416098227, | |
| "grad_norm": 0.5488599072908693, | |
| "learning_rate": 3.129066249185704e-05, | |
| "loss": 0.8681, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 3.0668485675306956, | |
| "grad_norm": 0.573743615074981, | |
| "learning_rate": 3.11416997981699e-05, | |
| "loss": 0.8716, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 3.072305593451569, | |
| "grad_norm": 0.3440741022604367, | |
| "learning_rate": 3.099286618180337e-05, | |
| "loss": 0.8685, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 3.077762619372442, | |
| "grad_norm": 0.48263781958172847, | |
| "learning_rate": 3.084416381146297e-05, | |
| "loss": 0.8612, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 3.083219645293315, | |
| "grad_norm": 0.5341856968582221, | |
| "learning_rate": 3.069559485394176e-05, | |
| "loss": 0.8906, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 3.088676671214188, | |
| "grad_norm": 0.44955890178043545, | |
| "learning_rate": 3.054716147408883e-05, | |
| "loss": 0.8647, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 3.0941336971350615, | |
| "grad_norm": 0.24528391692821006, | |
| "learning_rate": 3.039886583477767e-05, | |
| "loss": 0.8734, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 3.0995907230559343, | |
| "grad_norm": 0.3811275418491481, | |
| "learning_rate": 3.0250710096874778e-05, | |
| "loss": 0.8571, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 3.1050477489768076, | |
| "grad_norm": 0.4556061152291196, | |
| "learning_rate": 3.0102696419208032e-05, | |
| "loss": 0.8425, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 3.110504774897681, | |
| "grad_norm": 0.396338506458531, | |
| "learning_rate": 2.995482695853535e-05, | |
| "loss": 0.876, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 3.1159618008185537, | |
| "grad_norm": 0.2967118646166294, | |
| "learning_rate": 2.9807103869513184e-05, | |
| "loss": 0.8728, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 3.121418826739427, | |
| "grad_norm": 0.3675099822893278, | |
| "learning_rate": 2.965952930466517e-05, | |
| "loss": 0.8933, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 3.1268758526603, | |
| "grad_norm": 0.45762346450524527, | |
| "learning_rate": 2.9512105414350722e-05, | |
| "loss": 0.8888, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 3.132332878581173, | |
| "grad_norm": 0.4221166157533506, | |
| "learning_rate": 2.9364834346733784e-05, | |
| "loss": 0.8791, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 3.1377899045020463, | |
| "grad_norm": 0.2995025167192171, | |
| "learning_rate": 2.9217718247751407e-05, | |
| "loss": 0.8767, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 3.1432469304229196, | |
| "grad_norm": 0.4571280036227124, | |
| "learning_rate": 2.907075926108257e-05, | |
| "loss": 0.879, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 3.148703956343793, | |
| "grad_norm": 0.5184416527023764, | |
| "learning_rate": 2.892395952811691e-05, | |
| "loss": 0.8824, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 3.1541609822646657, | |
| "grad_norm": 0.2985433643589337, | |
| "learning_rate": 2.8777321187923513e-05, | |
| "loss": 0.8598, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 3.159618008185539, | |
| "grad_norm": 0.3051428245095018, | |
| "learning_rate": 2.8630846377219756e-05, | |
| "loss": 0.8637, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 3.1650750341064118, | |
| "grad_norm": 0.3572619248836196, | |
| "learning_rate": 2.848453723034021e-05, | |
| "loss": 0.88, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 3.170532060027285, | |
| "grad_norm": 0.3556049119216331, | |
| "learning_rate": 2.8338395879205447e-05, | |
| "loss": 0.8648, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 3.1759890859481583, | |
| "grad_norm": 0.22986119097957075, | |
| "learning_rate": 2.819242445329108e-05, | |
| "loss": 0.8574, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 3.1814461118690316, | |
| "grad_norm": 0.2812293245829613, | |
| "learning_rate": 2.8046625079596643e-05, | |
| "loss": 0.8635, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 3.1869031377899044, | |
| "grad_norm": 0.36671153393330513, | |
| "learning_rate": 2.7900999882614663e-05, | |
| "loss": 0.8559, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 3.1923601637107777, | |
| "grad_norm": 0.2537974452996025, | |
| "learning_rate": 2.7755550984299656e-05, | |
| "loss": 0.8718, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 3.197817189631651, | |
| "grad_norm": 0.2168685243882707, | |
| "learning_rate": 2.7610280504037236e-05, | |
| "loss": 0.8677, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 3.203274215552524, | |
| "grad_norm": 0.2751037843213298, | |
| "learning_rate": 2.746519055861326e-05, | |
| "loss": 0.8715, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 3.208731241473397, | |
| "grad_norm": 0.25433874445888727, | |
| "learning_rate": 2.732028326218292e-05, | |
| "loss": 0.8526, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 3.2141882673942703, | |
| "grad_norm": 0.23235048668899896, | |
| "learning_rate": 2.7175560726239972e-05, | |
| "loss": 0.8542, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 3.219645293315143, | |
| "grad_norm": 0.2449996067653389, | |
| "learning_rate": 2.7031025059585957e-05, | |
| "loss": 0.8805, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 3.2251023192360164, | |
| "grad_norm": 0.797386710016043, | |
| "learning_rate": 2.6886678368299478e-05, | |
| "loss": 0.879, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 3.2305593451568897, | |
| "grad_norm": 0.2212327644878727, | |
| "learning_rate": 2.674252275570552e-05, | |
| "loss": 0.8581, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 3.2360163710777625, | |
| "grad_norm": 0.2762951579133119, | |
| "learning_rate": 2.659856032234482e-05, | |
| "loss": 0.8672, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 3.241473396998636, | |
| "grad_norm": 0.22565321140932834, | |
| "learning_rate": 2.6454793165943187e-05, | |
| "loss": 0.8635, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 3.246930422919509, | |
| "grad_norm": 0.2521628451989612, | |
| "learning_rate": 2.631122338138103e-05, | |
| "loss": 0.8596, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 3.252387448840382, | |
| "grad_norm": 0.2570062440080345, | |
| "learning_rate": 2.6167853060662743e-05, | |
| "loss": 0.8886, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 3.257844474761255, | |
| "grad_norm": 0.2607472825748654, | |
| "learning_rate": 2.6024684292886288e-05, | |
| "loss": 0.8508, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 3.2633015006821284, | |
| "grad_norm": 0.23914118744624313, | |
| "learning_rate": 2.5881719164212714e-05, | |
| "loss": 0.8578, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 3.2687585266030013, | |
| "grad_norm": 0.3034320915776727, | |
| "learning_rate": 2.5738959757835807e-05, | |
| "loss": 0.8555, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 3.2742155525238745, | |
| "grad_norm": 0.20652047060980766, | |
| "learning_rate": 2.559640815395167e-05, | |
| "loss": 0.892, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 3.279672578444748, | |
| "grad_norm": 0.28264748490488173, | |
| "learning_rate": 2.5454066429728465e-05, | |
| "loss": 0.8568, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 3.2851296043656206, | |
| "grad_norm": 0.20253926721485932, | |
| "learning_rate": 2.531193665927612e-05, | |
| "loss": 0.8494, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 3.290586630286494, | |
| "grad_norm": 0.2776465457759516, | |
| "learning_rate": 2.5170020913616092e-05, | |
| "loss": 0.8653, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 3.296043656207367, | |
| "grad_norm": 0.2444576656506593, | |
| "learning_rate": 2.5028321260651235e-05, | |
| "loss": 0.8653, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 3.30150068212824, | |
| "grad_norm": 0.2442116426325601, | |
| "learning_rate": 2.4886839765135643e-05, | |
| "loss": 0.8519, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 3.3069577080491133, | |
| "grad_norm": 0.2959584392164269, | |
| "learning_rate": 2.4745578488644544e-05, | |
| "loss": 0.8746, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 3.3124147339699865, | |
| "grad_norm": 0.2021198154975386, | |
| "learning_rate": 2.460453948954431e-05, | |
| "loss": 0.8553, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 3.3178717598908594, | |
| "grad_norm": 0.24343202473760756, | |
| "learning_rate": 2.44637248229624e-05, | |
| "loss": 0.8804, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 3.3233287858117326, | |
| "grad_norm": 0.2043418483873985, | |
| "learning_rate": 2.432313654075746e-05, | |
| "loss": 0.8563, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 3.328785811732606, | |
| "grad_norm": 0.24604327736427975, | |
| "learning_rate": 2.4182776691489407e-05, | |
| "loss": 0.8845, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 3.3342428376534787, | |
| "grad_norm": 0.2359424714772047, | |
| "learning_rate": 2.4042647320389597e-05, | |
| "loss": 0.87, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 3.339699863574352, | |
| "grad_norm": 0.21890191711148535, | |
| "learning_rate": 2.3902750469331003e-05, | |
| "loss": 0.8768, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 3.3451568894952253, | |
| "grad_norm": 0.26622346979171607, | |
| "learning_rate": 2.3763088176798473e-05, | |
| "loss": 0.8673, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 3.350613915416098, | |
| "grad_norm": 0.2335026323983004, | |
| "learning_rate": 2.3623662477859016e-05, | |
| "loss": 0.8764, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 3.3560709413369714, | |
| "grad_norm": 0.3242878727490004, | |
| "learning_rate": 2.348447540413215e-05, | |
| "loss": 0.8594, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 3.3615279672578446, | |
| "grad_norm": 0.30589131325231633, | |
| "learning_rate": 2.3345528983760306e-05, | |
| "loss": 0.887, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 3.3669849931787175, | |
| "grad_norm": 0.3363507012203662, | |
| "learning_rate": 2.3206825241379275e-05, | |
| "loss": 0.8726, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 3.3724420190995907, | |
| "grad_norm": 0.26053908816844135, | |
| "learning_rate": 2.3068366198088725e-05, | |
| "loss": 0.8585, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 3.377899045020464, | |
| "grad_norm": 0.34640941236780975, | |
| "learning_rate": 2.293015387142271e-05, | |
| "loss": 0.8686, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 3.383356070941337, | |
| "grad_norm": 0.22519137087127883, | |
| "learning_rate": 2.2792190275320268e-05, | |
| "loss": 0.8524, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 3.38881309686221, | |
| "grad_norm": 0.27989008535136384, | |
| "learning_rate": 2.265447742009615e-05, | |
| "loss": 0.8757, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 3.3942701227830834, | |
| "grad_norm": 0.22353901032718793, | |
| "learning_rate": 2.251701731241143e-05, | |
| "loss": 0.8682, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 3.399727148703956, | |
| "grad_norm": 0.2451781215581771, | |
| "learning_rate": 2.237981195524432e-05, | |
| "loss": 0.8658, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 3.4051841746248295, | |
| "grad_norm": 0.21236674174467807, | |
| "learning_rate": 2.2242863347861018e-05, | |
| "loss": 0.857, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 3.4106412005457027, | |
| "grad_norm": 0.24488702827145759, | |
| "learning_rate": 2.210617348578649e-05, | |
| "loss": 0.8751, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 3.4160982264665756, | |
| "grad_norm": 0.19931948544158734, | |
| "learning_rate": 2.196974436077543e-05, | |
| "loss": 0.8721, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 3.421555252387449, | |
| "grad_norm": 0.20656952281941016, | |
| "learning_rate": 2.1833577960783266e-05, | |
| "loss": 0.8837, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 3.427012278308322, | |
| "grad_norm": 0.2111569456766622, | |
| "learning_rate": 2.1697676269937178e-05, | |
| "loss": 0.8578, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 3.432469304229195, | |
| "grad_norm": 0.20607917856291608, | |
| "learning_rate": 2.1562041268507157e-05, | |
| "loss": 0.8572, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 3.437926330150068, | |
| "grad_norm": 0.2091976197169965, | |
| "learning_rate": 2.1426674932877156e-05, | |
| "loss": 0.8574, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 3.4433833560709415, | |
| "grad_norm": 0.18994556197856097, | |
| "learning_rate": 2.1291579235516363e-05, | |
| "loss": 0.8732, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 3.4488403819918143, | |
| "grad_norm": 0.20762295552640858, | |
| "learning_rate": 2.115675614495036e-05, | |
| "loss": 0.8643, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 3.4542974079126876, | |
| "grad_norm": 0.21448026859846708, | |
| "learning_rate": 2.1022207625732477e-05, | |
| "loss": 0.8554, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 3.459754433833561, | |
| "grad_norm": 0.22780231585554378, | |
| "learning_rate": 2.0887935638415203e-05, | |
| "loss": 0.8538, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 3.4652114597544337, | |
| "grad_norm": 0.21671493285699542, | |
| "learning_rate": 2.075394213952155e-05, | |
| "loss": 0.8899, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 3.470668485675307, | |
| "grad_norm": 0.25593975320430495, | |
| "learning_rate": 2.0620229081516596e-05, | |
| "loss": 0.87, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 3.47612551159618, | |
| "grad_norm": 0.26516912586558916, | |
| "learning_rate": 2.0486798412779053e-05, | |
| "loss": 0.8776, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 3.481582537517053, | |
| "grad_norm": 0.30391402267040696, | |
| "learning_rate": 2.035365207757279e-05, | |
| "loss": 0.8562, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 3.4870395634379263, | |
| "grad_norm": 0.2728528801053457, | |
| "learning_rate": 2.022079201601856e-05, | |
| "loss": 0.8689, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 3.4924965893587996, | |
| "grad_norm": 0.3399534342641818, | |
| "learning_rate": 2.008822016406578e-05, | |
| "loss": 0.8664, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 3.4979536152796724, | |
| "grad_norm": 0.23908020280596304, | |
| "learning_rate": 1.9955938453464198e-05, | |
| "loss": 0.8514, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 3.5034106412005457, | |
| "grad_norm": 0.2747275787837572, | |
| "learning_rate": 1.9823948811735834e-05, | |
| "loss": 0.864, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 3.508867667121419, | |
| "grad_norm": 0.2675522923940259, | |
| "learning_rate": 1.9692253162146913e-05, | |
| "loss": 0.8726, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 3.5143246930422922, | |
| "grad_norm": 0.21623449651605725, | |
| "learning_rate": 1.9560853423679754e-05, | |
| "loss": 0.8738, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 3.519781718963165, | |
| "grad_norm": 0.2815341384705888, | |
| "learning_rate": 1.9429751511004858e-05, | |
| "loss": 0.8616, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 3.5252387448840383, | |
| "grad_norm": 0.23139814338333467, | |
| "learning_rate": 1.9298949334453026e-05, | |
| "loss": 0.892, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 3.530695770804911, | |
| "grad_norm": 0.22944247960132097, | |
| "learning_rate": 1.916844879998748e-05, | |
| "loss": 0.8786, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 3.5361527967257844, | |
| "grad_norm": 0.2976290320962648, | |
| "learning_rate": 1.9038251809176117e-05, | |
| "loss": 0.8858, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 3.5416098226466577, | |
| "grad_norm": 0.17470790976276032, | |
| "learning_rate": 1.890836025916382e-05, | |
| "loss": 0.8743, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 3.547066848567531, | |
| "grad_norm": 0.2972531009548509, | |
| "learning_rate": 1.8778776042644763e-05, | |
| "loss": 0.8656, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 3.552523874488404, | |
| "grad_norm": 0.19583546204252775, | |
| "learning_rate": 1.864950104783484e-05, | |
| "loss": 0.8715, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 3.557980900409277, | |
| "grad_norm": 0.3083259254483549, | |
| "learning_rate": 1.852053715844424e-05, | |
| "loss": 0.8813, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 3.56343792633015, | |
| "grad_norm": 0.27570698182575126, | |
| "learning_rate": 1.839188625364986e-05, | |
| "loss": 0.8781, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 3.568894952251023, | |
| "grad_norm": 0.2509442468585389, | |
| "learning_rate": 1.8263550208068018e-05, | |
| "loss": 0.8655, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 3.5743519781718964, | |
| "grad_norm": 0.23809316521872612, | |
| "learning_rate": 1.813553089172709e-05, | |
| "loss": 0.852, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 3.5798090040927697, | |
| "grad_norm": 0.26122110657564457, | |
| "learning_rate": 1.800783017004034e-05, | |
| "loss": 0.8493, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 3.5852660300136425, | |
| "grad_norm": 0.2398312011858498, | |
| "learning_rate": 1.7880449903778608e-05, | |
| "loss": 0.8434, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 3.590723055934516, | |
| "grad_norm": 0.2312681495260136, | |
| "learning_rate": 1.7753391949043318e-05, | |
| "loss": 0.875, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 3.5961800818553886, | |
| "grad_norm": 0.2590426316177245, | |
| "learning_rate": 1.7626658157239358e-05, | |
| "loss": 0.8791, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 3.601637107776262, | |
| "grad_norm": 0.2288407400611565, | |
| "learning_rate": 1.7500250375048136e-05, | |
| "loss": 0.8338, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 3.607094133697135, | |
| "grad_norm": 0.26275325952125483, | |
| "learning_rate": 1.7374170444400645e-05, | |
| "loss": 0.8831, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 3.6125511596180084, | |
| "grad_norm": 0.206477324415559, | |
| "learning_rate": 1.7248420202450687e-05, | |
| "loss": 0.8725, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 3.6180081855388813, | |
| "grad_norm": 0.24242230587828761, | |
| "learning_rate": 1.7123001481548017e-05, | |
| "loss": 0.8669, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 3.6234652114597545, | |
| "grad_norm": 0.1974706432396845, | |
| "learning_rate": 1.6997916109211674e-05, | |
| "loss": 0.8524, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 3.6289222373806274, | |
| "grad_norm": 0.22518193266840958, | |
| "learning_rate": 1.6873165908103413e-05, | |
| "loss": 0.8776, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 3.6343792633015006, | |
| "grad_norm": 0.21504454890010272, | |
| "learning_rate": 1.6748752696001047e-05, | |
| "loss": 0.8508, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 3.639836289222374, | |
| "grad_norm": 0.19073320821411494, | |
| "learning_rate": 1.6624678285772006e-05, | |
| "loss": 0.8681, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 3.645293315143247, | |
| "grad_norm": 0.21561726002963372, | |
| "learning_rate": 1.6500944485346972e-05, | |
| "loss": 0.8768, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 3.65075034106412, | |
| "grad_norm": 0.20856126216642382, | |
| "learning_rate": 1.6377553097693435e-05, | |
| "loss": 0.8642, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 3.6562073669849933, | |
| "grad_norm": 0.2503748509640339, | |
| "learning_rate": 1.625450592078949e-05, | |
| "loss": 0.8606, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 3.661664392905866, | |
| "grad_norm": 0.2114606010902437, | |
| "learning_rate": 1.6131804747597616e-05, | |
| "loss": 0.8402, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 3.6671214188267394, | |
| "grad_norm": 0.23321442279608323, | |
| "learning_rate": 1.600945136603855e-05, | |
| "loss": 0.8757, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 3.6725784447476126, | |
| "grad_norm": 0.21972342779108284, | |
| "learning_rate": 1.5887447558965256e-05, | |
| "loss": 0.8768, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 3.678035470668486, | |
| "grad_norm": 0.21435770379641797, | |
| "learning_rate": 1.5765795104136894e-05, | |
| "loss": 0.8776, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 3.6834924965893587, | |
| "grad_norm": 0.26135419893907624, | |
| "learning_rate": 1.5644495774192998e-05, | |
| "loss": 0.8729, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 3.688949522510232, | |
| "grad_norm": 0.21062873576314992, | |
| "learning_rate": 1.552355133662755e-05, | |
| "loss": 0.8789, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 3.694406548431105, | |
| "grad_norm": 0.24113475613862131, | |
| "learning_rate": 1.540296355376332e-05, | |
| "loss": 0.8548, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 3.699863574351978, | |
| "grad_norm": 0.23808416561771722, | |
| "learning_rate": 1.5282734182726113e-05, | |
| "loss": 0.8503, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 3.7053206002728514, | |
| "grad_norm": 0.21962958512416478, | |
| "learning_rate": 1.516286497541919e-05, | |
| "loss": 0.87, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 3.7107776261937246, | |
| "grad_norm": 0.2069921814306847, | |
| "learning_rate": 1.5043357678497734e-05, | |
| "loss": 0.8506, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 3.7162346521145975, | |
| "grad_norm": 0.20788126394222645, | |
| "learning_rate": 1.4924214033343454e-05, | |
| "loss": 0.8746, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 3.7216916780354707, | |
| "grad_norm": 0.20079691647476383, | |
| "learning_rate": 1.4805435776039123e-05, | |
| "loss": 0.8776, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 3.7271487039563436, | |
| "grad_norm": 0.19394731600989945, | |
| "learning_rate": 1.4687024637343337e-05, | |
| "loss": 0.8732, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 3.732605729877217, | |
| "grad_norm": 0.2298056829986313, | |
| "learning_rate": 1.4568982342665274e-05, | |
| "loss": 0.8782, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 3.73806275579809, | |
| "grad_norm": 0.19743254239762598, | |
| "learning_rate": 1.4451310612039566e-05, | |
| "loss": 0.8615, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 3.7435197817189634, | |
| "grad_norm": 0.21442008615184732, | |
| "learning_rate": 1.4334011160101216e-05, | |
| "loss": 0.8755, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 3.748976807639836, | |
| "grad_norm": 0.22137254411181218, | |
| "learning_rate": 1.4217085696060667e-05, | |
| "loss": 0.8513, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 3.7544338335607095, | |
| "grad_norm": 0.23777216991358147, | |
| "learning_rate": 1.4100535923678815e-05, | |
| "loss": 0.8649, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 3.7598908594815823, | |
| "grad_norm": 0.23646741573904131, | |
| "learning_rate": 1.3984363541242218e-05, | |
| "loss": 0.8616, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 3.7653478854024556, | |
| "grad_norm": 0.16903481368402593, | |
| "learning_rate": 1.3868570241538412e-05, | |
| "loss": 0.8729, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 3.770804911323329, | |
| "grad_norm": 0.22814476574366072, | |
| "learning_rate": 1.375315771183114e-05, | |
| "loss": 0.8792, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 3.776261937244202, | |
| "grad_norm": 0.2138508475050253, | |
| "learning_rate": 1.3638127633835808e-05, | |
| "loss": 0.8514, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 3.781718963165075, | |
| "grad_norm": 0.1702228436710734, | |
| "learning_rate": 1.3523481683695034e-05, | |
| "loss": 0.8569, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 3.787175989085948, | |
| "grad_norm": 0.275935108213432, | |
| "learning_rate": 1.3409221531954146e-05, | |
| "loss": 0.8642, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 3.792633015006821, | |
| "grad_norm": 0.18178655458313908, | |
| "learning_rate": 1.329534884353687e-05, | |
| "loss": 0.8617, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 3.7980900409276943, | |
| "grad_norm": 0.19502338070740763, | |
| "learning_rate": 1.3181865277721086e-05, | |
| "loss": 0.8957, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 3.8035470668485676, | |
| "grad_norm": 0.24435912255988515, | |
| "learning_rate": 1.3068772488114619e-05, | |
| "loss": 0.8534, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 3.809004092769441, | |
| "grad_norm": 0.1790945885669287, | |
| "learning_rate": 1.2956072122631173e-05, | |
| "loss": 0.8767, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 3.8144611186903137, | |
| "grad_norm": 0.1799037891667036, | |
| "learning_rate": 1.2843765823466292e-05, | |
| "loss": 0.8817, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 3.819918144611187, | |
| "grad_norm": 0.23900669879884437, | |
| "learning_rate": 1.2731855227073475e-05, | |
| "loss": 0.8577, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 3.8253751705320598, | |
| "grad_norm": 0.1996247040670919, | |
| "learning_rate": 1.2620341964140264e-05, | |
| "loss": 0.8767, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 3.830832196452933, | |
| "grad_norm": 0.19203450849102435, | |
| "learning_rate": 1.2509227659564548e-05, | |
| "loss": 0.8604, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 3.8362892223738063, | |
| "grad_norm": 0.20665454749391476, | |
| "learning_rate": 1.2398513932430847e-05, | |
| "loss": 0.88, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 3.8417462482946796, | |
| "grad_norm": 0.20396894732105844, | |
| "learning_rate": 1.2288202395986723e-05, | |
| "loss": 0.8705, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 3.8472032742155524, | |
| "grad_norm": 0.18826995936772803, | |
| "learning_rate": 1.2178294657619274e-05, | |
| "loss": 0.8938, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 3.8526603001364257, | |
| "grad_norm": 0.25405087801781573, | |
| "learning_rate": 1.2068792318831748e-05, | |
| "loss": 0.8786, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 3.8581173260572985, | |
| "grad_norm": 0.17190779379675156, | |
| "learning_rate": 1.1959696975220156e-05, | |
| "loss": 0.8731, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 3.863574351978172, | |
| "grad_norm": 0.2035464468290126, | |
| "learning_rate": 1.185101021645004e-05, | |
| "loss": 0.8544, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 3.869031377899045, | |
| "grad_norm": 0.1775096388954675, | |
| "learning_rate": 1.1742733626233322e-05, | |
| "loss": 0.8738, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 3.8744884038199183, | |
| "grad_norm": 0.1962472136724812, | |
| "learning_rate": 1.163486878230521e-05, | |
| "loss": 0.8655, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 3.879945429740791, | |
| "grad_norm": 0.19182896784098635, | |
| "learning_rate": 1.1527417256401217e-05, | |
| "loss": 0.8625, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 3.8854024556616644, | |
| "grad_norm": 0.1772961855031946, | |
| "learning_rate": 1.1420380614234277e-05, | |
| "loss": 0.8691, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 3.8908594815825372, | |
| "grad_norm": 0.19619850458804647, | |
| "learning_rate": 1.1313760415471875e-05, | |
| "loss": 0.8613, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 3.8963165075034105, | |
| "grad_norm": 0.2241487330551882, | |
| "learning_rate": 1.1207558213713398e-05, | |
| "loss": 0.8811, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 3.901773533424284, | |
| "grad_norm": 0.158115197476845, | |
| "learning_rate": 1.1101775556467427e-05, | |
| "loss": 0.8665, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 3.907230559345157, | |
| "grad_norm": 0.22420983108248752, | |
| "learning_rate": 1.0996413985129224e-05, | |
| "loss": 0.8894, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 3.91268758526603, | |
| "grad_norm": 0.1610631974388593, | |
| "learning_rate": 1.0891475034958265e-05, | |
| "loss": 0.8623, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 3.918144611186903, | |
| "grad_norm": 0.15967594778215696, | |
| "learning_rate": 1.0786960235055855e-05, | |
| "loss": 0.8882, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 3.923601637107776, | |
| "grad_norm": 0.19319520783715666, | |
| "learning_rate": 1.0682871108342896e-05, | |
| "loss": 0.857, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 3.9290586630286493, | |
| "grad_norm": 0.15711658830357056, | |
| "learning_rate": 1.0579209171537626e-05, | |
| "loss": 0.8576, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 3.9345156889495225, | |
| "grad_norm": 0.1826574560537012, | |
| "learning_rate": 1.0475975935133573e-05, | |
| "loss": 0.8515, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 3.939972714870396, | |
| "grad_norm": 0.16069667852451064, | |
| "learning_rate": 1.0373172903377507e-05, | |
| "loss": 0.8692, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 3.9454297407912686, | |
| "grad_norm": 0.16866708527324803, | |
| "learning_rate": 1.0270801574247553e-05, | |
| "loss": 0.8788, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 3.950886766712142, | |
| "grad_norm": 0.1530248706749601, | |
| "learning_rate": 1.0168863439431336e-05, | |
| "loss": 0.8653, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 3.956343792633015, | |
| "grad_norm": 0.15769274523646723, | |
| "learning_rate": 1.0067359984304286e-05, | |
| "loss": 0.8676, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 3.961800818553888, | |
| "grad_norm": 0.15424491068484994, | |
| "learning_rate": 9.966292687907928e-06, | |
| "loss": 0.853, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 3.9672578444747613, | |
| "grad_norm": 0.15860068150396306, | |
| "learning_rate": 9.865663022928413e-06, | |
| "loss": 0.8605, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 3.9727148703956345, | |
| "grad_norm": 0.1700340270255791, | |
| "learning_rate": 9.76547245567498e-06, | |
| "loss": 0.8682, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 3.9781718963165074, | |
| "grad_norm": 0.15646496600104898, | |
| "learning_rate": 9.66572244605864e-06, | |
| "loss": 0.8555, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 3.9836289222373806, | |
| "grad_norm": 0.1541837949594622, | |
| "learning_rate": 9.566414447570861e-06, | |
| "loss": 0.8697, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 3.989085948158254, | |
| "grad_norm": 0.15359278717510694, | |
| "learning_rate": 9.467549907262476e-06, | |
| "loss": 0.8709, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 3.9945429740791267, | |
| "grad_norm": 0.16227617763941107, | |
| "learning_rate": 9.36913026572248e-06, | |
| "loss": 0.8664, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 0.3013369962541664, | |
| "learning_rate": 9.271156957057123e-06, | |
| "loss": 1.5027, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 4.005457025920873, | |
| "grad_norm": 0.17363692755284943, | |
| "learning_rate": 9.173631408868976e-06, | |
| "loss": 0.85, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 4.0109140518417465, | |
| "grad_norm": 0.19633143503130812, | |
| "learning_rate": 9.076555042236142e-06, | |
| "loss": 0.8465, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 4.01637107776262, | |
| "grad_norm": 0.19091321505385692, | |
| "learning_rate": 8.979929271691538e-06, | |
| "loss": 0.8522, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 4.021828103683492, | |
| "grad_norm": 0.17707201863006375, | |
| "learning_rate": 8.88375550520232e-06, | |
| "loss": 0.8506, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 4.0272851296043655, | |
| "grad_norm": 0.20661036944371583, | |
| "learning_rate": 8.788035144149299e-06, | |
| "loss": 0.8323, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 4.032742155525239, | |
| "grad_norm": 0.1798715373187608, | |
| "learning_rate": 8.692769583306596e-06, | |
| "loss": 0.8603, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 4.038199181446112, | |
| "grad_norm": 0.15777175533167295, | |
| "learning_rate": 8.597960210821261e-06, | |
| "loss": 0.8279, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 4.043656207366985, | |
| "grad_norm": 0.22344847319218858, | |
| "learning_rate": 8.50360840819307e-06, | |
| "loss": 0.8403, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 4.0491132332878585, | |
| "grad_norm": 0.1816715320324387, | |
| "learning_rate": 8.409715550254396e-06, | |
| "loss": 0.8376, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 4.054570259208731, | |
| "grad_norm": 0.15853189298493875, | |
| "learning_rate": 8.316283005150159e-06, | |
| "loss": 0.8511, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 4.060027285129604, | |
| "grad_norm": 0.21309295778042034, | |
| "learning_rate": 8.223312134317942e-06, | |
| "loss": 0.8586, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 4.0654843110504775, | |
| "grad_norm": 0.1689534095697035, | |
| "learning_rate": 8.130804292468069e-06, | |
| "loss": 0.8117, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 4.070941336971351, | |
| "grad_norm": 0.13977942461160103, | |
| "learning_rate": 8.038760827563936e-06, | |
| "loss": 0.8463, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 4.076398362892224, | |
| "grad_norm": 0.16628039538402697, | |
| "learning_rate": 7.94718308080233e-06, | |
| "loss": 0.8631, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 4.081855388813097, | |
| "grad_norm": 0.1799528768822378, | |
| "learning_rate": 7.856072386593915e-06, | |
| "loss": 0.8504, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 4.08731241473397, | |
| "grad_norm": 0.15460807238366664, | |
| "learning_rate": 7.765430072543748e-06, | |
| "loss": 0.8358, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 4.092769440654843, | |
| "grad_norm": 0.1615993268635049, | |
| "learning_rate": 7.675257459431988e-06, | |
| "loss": 0.8407, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 4.098226466575716, | |
| "grad_norm": 0.15392647616267377, | |
| "learning_rate": 7.585555861194613e-06, | |
| "loss": 0.8334, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 4.1036834924965895, | |
| "grad_norm": 0.14497560365020726, | |
| "learning_rate": 7.496326584904277e-06, | |
| "loss": 0.8362, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 4.109140518417463, | |
| "grad_norm": 0.15870809306205555, | |
| "learning_rate": 7.4075709307512575e-06, | |
| "loss": 0.8534, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 4.114597544338336, | |
| "grad_norm": 0.15582879109817865, | |
| "learning_rate": 7.31929019202454e-06, | |
| "loss": 0.8379, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 4.120054570259208, | |
| "grad_norm": 0.14414796007354208, | |
| "learning_rate": 7.231485655092943e-06, | |
| "loss": 0.8479, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 4.125511596180082, | |
| "grad_norm": 0.15940644110537328, | |
| "learning_rate": 7.144158599386402e-06, | |
| "loss": 0.8708, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 4.130968622100955, | |
| "grad_norm": 0.15073810143549604, | |
| "learning_rate": 7.057310297377298e-06, | |
| "loss": 0.8466, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 4.136425648021828, | |
| "grad_norm": 0.13150444462070968, | |
| "learning_rate": 6.97094201456193e-06, | |
| "loss": 0.8383, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 4.1418826739427015, | |
| "grad_norm": 0.1682880467445867, | |
| "learning_rate": 6.8850550094420675e-06, | |
| "loss": 0.8492, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 4.147339699863575, | |
| "grad_norm": 0.1282839298295238, | |
| "learning_rate": 6.799650533506632e-06, | |
| "loss": 0.86, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 4.152796725784447, | |
| "grad_norm": 0.14656575634048025, | |
| "learning_rate": 6.714729831213422e-06, | |
| "loss": 0.8438, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 4.15825375170532, | |
| "grad_norm": 0.1533256626816939, | |
| "learning_rate": 6.630294139971045e-06, | |
| "loss": 0.8403, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 4.163710777626194, | |
| "grad_norm": 0.1380671788725022, | |
| "learning_rate": 6.546344690120809e-06, | |
| "loss": 0.8521, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 4.169167803547067, | |
| "grad_norm": 0.1203348172009764, | |
| "learning_rate": 6.46288270491886e-06, | |
| "loss": 0.8558, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 4.17462482946794, | |
| "grad_norm": 0.13733698891901175, | |
| "learning_rate": 6.3799094005183095e-06, | |
| "loss": 0.8369, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 4.1800818553888135, | |
| "grad_norm": 0.13345236448780423, | |
| "learning_rate": 6.297425985951546e-06, | |
| "loss": 0.8195, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 4.185538881309686, | |
| "grad_norm": 0.14321025830983808, | |
| "learning_rate": 6.215433663112596e-06, | |
| "loss": 0.8547, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 4.190995907230559, | |
| "grad_norm": 0.13924098390685724, | |
| "learning_rate": 6.133933626739623e-06, | |
| "loss": 0.8255, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 4.196452933151432, | |
| "grad_norm": 0.1261806757953622, | |
| "learning_rate": 6.052927064397534e-06, | |
| "loss": 0.8417, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 4.201909959072306, | |
| "grad_norm": 0.14319746757365104, | |
| "learning_rate": 5.972415156460631e-06, | |
| "loss": 0.836, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 4.207366984993179, | |
| "grad_norm": 0.13488872447248845, | |
| "learning_rate": 5.892399076095454e-06, | |
| "loss": 0.823, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 4.212824010914052, | |
| "grad_norm": 0.14256043162591792, | |
| "learning_rate": 5.812879989243661e-06, | |
| "loss": 0.8292, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 4.218281036834925, | |
| "grad_norm": 0.14297739682297367, | |
| "learning_rate": 5.733859054605044e-06, | |
| "loss": 0.8492, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 4.223738062755798, | |
| "grad_norm": 0.13214429368901517, | |
| "learning_rate": 5.655337423620677e-06, | |
| "loss": 0.8485, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 4.229195088676671, | |
| "grad_norm": 0.1309888866682516, | |
| "learning_rate": 5.5773162404560675e-06, | |
| "loss": 0.8587, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 4.234652114597544, | |
| "grad_norm": 0.5572690678106634, | |
| "learning_rate": 5.499796641984572e-06, | |
| "loss": 0.8711, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 4.240109140518418, | |
| "grad_norm": 0.142020973503765, | |
| "learning_rate": 5.422779757770742e-06, | |
| "loss": 0.8395, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 4.245566166439291, | |
| "grad_norm": 0.1422911809797413, | |
| "learning_rate": 5.346266710053938e-06, | |
| "loss": 0.8371, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 4.251023192360163, | |
| "grad_norm": 0.13566149323405943, | |
| "learning_rate": 5.270258613731929e-06, | |
| "loss": 0.837, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 4.256480218281037, | |
| "grad_norm": 0.1403658624168453, | |
| "learning_rate": 5.194756576344664e-06, | |
| "loss": 0.8472, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 4.26193724420191, | |
| "grad_norm": 0.12851416052393547, | |
| "learning_rate": 5.119761698058158e-06, | |
| "loss": 0.842, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 4.267394270122783, | |
| "grad_norm": 0.1267181054298902, | |
| "learning_rate": 5.045275071648417e-06, | |
| "loss": 0.8486, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 4.272851296043656, | |
| "grad_norm": 0.14230492360141342, | |
| "learning_rate": 4.971297782485534e-06, | |
| "loss": 0.8523, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 4.27830832196453, | |
| "grad_norm": 0.1385288616652607, | |
| "learning_rate": 4.897830908517884e-06, | |
| "loss": 0.8636, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 4.283765347885402, | |
| "grad_norm": 0.12011049268492519, | |
| "learning_rate": 4.824875520256398e-06, | |
| "loss": 0.8527, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 4.289222373806275, | |
| "grad_norm": 0.12515350933731695, | |
| "learning_rate": 4.752432680758978e-06, | |
| "loss": 0.8446, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 4.294679399727149, | |
| "grad_norm": 0.13980725339910127, | |
| "learning_rate": 4.6805034456150145e-06, | |
| "loss": 0.8463, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 4.300136425648022, | |
| "grad_norm": 0.16408440175020803, | |
| "learning_rate": 4.609088862929971e-06, | |
| "loss": 0.8292, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 4.305593451568895, | |
| "grad_norm": 0.12454830323243238, | |
| "learning_rate": 4.5381899733101606e-06, | |
| "loss": 0.8512, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 4.311050477489768, | |
| "grad_norm": 0.13173108796782726, | |
| "learning_rate": 4.4678078098475374e-06, | |
| "loss": 0.8636, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 4.316507503410641, | |
| "grad_norm": 0.1372653963988401, | |
| "learning_rate": 4.39794339810466e-06, | |
| "loss": 0.8546, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 4.321964529331514, | |
| "grad_norm": 0.1454111561073045, | |
| "learning_rate": 4.328597756099764e-06, | |
| "loss": 0.8285, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 4.327421555252387, | |
| "grad_norm": 0.13292312782936808, | |
| "learning_rate": 4.259771894291889e-06, | |
| "loss": 0.844, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 4.332878581173261, | |
| "grad_norm": 0.13113926812021118, | |
| "learning_rate": 4.191466815566223e-06, | |
| "loss": 0.8624, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 4.338335607094134, | |
| "grad_norm": 0.13723911665389374, | |
| "learning_rate": 4.123683515219403e-06, | |
| "loss": 0.866, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 4.343792633015007, | |
| "grad_norm": 0.1274185802096606, | |
| "learning_rate": 4.056422980945076e-06, | |
| "loss": 0.835, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 4.34924965893588, | |
| "grad_norm": 0.15079186440244974, | |
| "learning_rate": 3.98968619281948e-06, | |
| "loss": 0.855, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 4.354706684856753, | |
| "grad_norm": 0.13975651293727326, | |
| "learning_rate": 3.923474123287165e-06, | |
| "loss": 0.838, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 4.360163710777626, | |
| "grad_norm": 0.12596602187941267, | |
| "learning_rate": 3.857787737146841e-06, | |
| "loss": 0.8424, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 4.365620736698499, | |
| "grad_norm": 0.13192216189243972, | |
| "learning_rate": 3.7926279915372877e-06, | |
| "loss": 0.8488, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 4.371077762619373, | |
| "grad_norm": 0.11294983013886342, | |
| "learning_rate": 3.727995835923448e-06, | |
| "loss": 0.8377, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 4.376534788540246, | |
| "grad_norm": 0.12208950440799905, | |
| "learning_rate": 3.663892212082547e-06, | |
| "loss": 0.8451, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 4.381991814461118, | |
| "grad_norm": 0.12500010048360874, | |
| "learning_rate": 3.6003180540904014e-06, | |
| "loss": 0.8565, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 4.387448840381992, | |
| "grad_norm": 0.11635114649417948, | |
| "learning_rate": 3.5372742883078083e-06, | |
| "loss": 0.8577, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 4.392905866302865, | |
| "grad_norm": 0.13168309738770168, | |
| "learning_rate": 3.4747618333670263e-06, | |
| "loss": 0.8469, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 4.398362892223738, | |
| "grad_norm": 0.12825374722074376, | |
| "learning_rate": 3.4127816001583968e-06, | |
| "loss": 0.8563, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 4.403819918144611, | |
| "grad_norm": 0.11335046162731692, | |
| "learning_rate": 3.351334491817113e-06, | |
| "loss": 0.8449, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 4.409276944065485, | |
| "grad_norm": 0.12437993837114146, | |
| "learning_rate": 3.290421403709978e-06, | |
| "loss": 0.8403, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 4.414733969986357, | |
| "grad_norm": 0.12277805128760136, | |
| "learning_rate": 3.230043223422432e-06, | |
| "loss": 0.8476, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 4.42019099590723, | |
| "grad_norm": 0.1229213914667106, | |
| "learning_rate": 3.170200830745569e-06, | |
| "loss": 0.8395, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 4.425648021828104, | |
| "grad_norm": 0.10562773517470789, | |
| "learning_rate": 3.1108950976633624e-06, | |
| "loss": 0.854, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 4.431105047748977, | |
| "grad_norm": 0.11479598409158459, | |
| "learning_rate": 3.052126888339917e-06, | |
| "loss": 0.8356, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 4.43656207366985, | |
| "grad_norm": 0.11116219748454473, | |
| "learning_rate": 2.993897059106914e-06, | |
| "loss": 0.8485, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 4.442019099590723, | |
| "grad_norm": 0.12390938866072493, | |
| "learning_rate": 2.936206458451092e-06, | |
| "loss": 0.8421, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 4.447476125511596, | |
| "grad_norm": 0.11225533367333312, | |
| "learning_rate": 2.879055927001919e-06, | |
| "loss": 0.8326, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 4.452933151432469, | |
| "grad_norm": 0.12120537299560043, | |
| "learning_rate": 2.8224462975193235e-06, | |
| "loss": 0.8268, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 4.458390177353342, | |
| "grad_norm": 0.11161258091057706, | |
| "learning_rate": 2.7663783948815725e-06, | |
| "loss": 0.8404, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 4.463847203274216, | |
| "grad_norm": 0.1115528748397204, | |
| "learning_rate": 2.7108530360732354e-06, | |
| "loss": 0.8712, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 4.469304229195089, | |
| "grad_norm": 0.10731054795986365, | |
| "learning_rate": 2.655871030173307e-06, | |
| "loss": 0.832, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 4.474761255115962, | |
| "grad_norm": 0.1151101918058789, | |
| "learning_rate": 2.6014331783433957e-06, | |
| "loss": 0.8151, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 4.480218281036835, | |
| "grad_norm": 0.12150762806194712, | |
| "learning_rate": 2.5475402738160426e-06, | |
| "loss": 0.8709, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 4.485675306957708, | |
| "grad_norm": 0.11481783034848463, | |
| "learning_rate": 2.4941931018831822e-06, | |
| "loss": 0.8589, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 4.491132332878581, | |
| "grad_norm": 0.12049475973057963, | |
| "learning_rate": 2.4413924398846957e-06, | |
| "loss": 0.8455, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 4.496589358799454, | |
| "grad_norm": 0.1263474250088142, | |
| "learning_rate": 2.3891390571970875e-06, | |
| "loss": 0.8728, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 4.502046384720328, | |
| "grad_norm": 0.11596607365643856, | |
| "learning_rate": 2.3374337152222503e-06, | |
| "loss": 0.8586, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 4.507503410641201, | |
| "grad_norm": 0.11715195084129398, | |
| "learning_rate": 2.2862771673764116e-06, | |
| "loss": 0.8267, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 4.512960436562073, | |
| "grad_norm": 0.10916905965796952, | |
| "learning_rate": 2.2356701590791107e-06, | |
| "loss": 0.8377, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 4.5184174624829465, | |
| "grad_norm": 0.10989084515422318, | |
| "learning_rate": 2.1856134277423634e-06, | |
| "loss": 0.8456, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 4.52387448840382, | |
| "grad_norm": 0.11415331936192948, | |
| "learning_rate": 2.13610770275992e-06, | |
| "loss": 0.8428, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 4.529331514324693, | |
| "grad_norm": 0.11187464286160874, | |
| "learning_rate": 2.087153705496623e-06, | |
| "loss": 0.855, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 4.534788540245566, | |
| "grad_norm": 0.11068155097698111, | |
| "learning_rate": 2.038752149277898e-06, | |
| "loss": 0.8439, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 4.54024556616644, | |
| "grad_norm": 0.11113729825886462, | |
| "learning_rate": 1.9909037393793708e-06, | |
| "loss": 0.8448, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 4.545702592087313, | |
| "grad_norm": 0.13136302301656613, | |
| "learning_rate": 1.943609173016583e-06, | |
| "loss": 0.8413, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 4.551159618008185, | |
| "grad_norm": 0.11768000783406421, | |
| "learning_rate": 1.896869139334827e-06, | |
| "loss": 0.8622, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 4.5566166439290585, | |
| "grad_norm": 0.12259054296868203, | |
| "learning_rate": 1.8506843193991076e-06, | |
| "loss": 0.8287, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 4.562073669849932, | |
| "grad_norm": 0.12126631078010071, | |
| "learning_rate": 1.8050553861842335e-06, | |
| "loss": 0.8568, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 4.567530695770805, | |
| "grad_norm": 0.12030422351425824, | |
| "learning_rate": 1.7599830045649825e-06, | |
| "loss": 0.851, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 4.572987721691678, | |
| "grad_norm": 0.10681554191695464, | |
| "learning_rate": 1.7154678313064455e-06, | |
| "loss": 0.8517, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 4.578444747612551, | |
| "grad_norm": 0.10201295749034411, | |
| "learning_rate": 1.671510515054422e-06, | |
| "loss": 0.8482, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 4.583901773533424, | |
| "grad_norm": 0.10882756711091733, | |
| "learning_rate": 1.6281116963259957e-06, | |
| "loss": 0.8454, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 4.589358799454297, | |
| "grad_norm": 0.11080725448199538, | |
| "learning_rate": 1.585272007500196e-06, | |
| "loss": 0.8228, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 4.5948158253751705, | |
| "grad_norm": 0.1064247093738944, | |
| "learning_rate": 1.5429920728087688e-06, | |
| "loss": 0.852, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 4.600272851296044, | |
| "grad_norm": 0.10624759120004654, | |
| "learning_rate": 1.5012725083270963e-06, | |
| "loss": 0.851, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 4.605729877216917, | |
| "grad_norm": 0.11230799021650528, | |
| "learning_rate": 1.4601139219652205e-06, | |
| "loss": 0.8757, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 4.61118690313779, | |
| "grad_norm": 0.11562162612177647, | |
| "learning_rate": 1.4195169134589715e-06, | |
| "loss": 0.8608, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 4.616643929058663, | |
| "grad_norm": 0.11226676876853169, | |
| "learning_rate": 1.3794820743612404e-06, | |
| "loss": 0.858, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 4.622100954979536, | |
| "grad_norm": 0.11074079216642702, | |
| "learning_rate": 1.340009988033346e-06, | |
| "loss": 0.8457, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 4.627557980900409, | |
| "grad_norm": 0.11782568400682915, | |
| "learning_rate": 1.3011012296365633e-06, | |
| "loss": 0.8486, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 4.6330150068212825, | |
| "grad_norm": 0.1151989040836682, | |
| "learning_rate": 1.2627563661237097e-06, | |
| "loss": 0.8394, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 4.638472032742156, | |
| "grad_norm": 0.10221115562948982, | |
| "learning_rate": 1.2249759562308916e-06, | |
| "loss": 0.8371, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 4.643929058663028, | |
| "grad_norm": 0.10670215373834935, | |
| "learning_rate": 1.1877605504693945e-06, | |
| "loss": 0.8535, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 4.6493860845839015, | |
| "grad_norm": 0.09994602009397384, | |
| "learning_rate": 1.151110691117605e-06, | |
| "loss": 0.8488, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 4.654843110504775, | |
| "grad_norm": 0.10378931622905237, | |
| "learning_rate": 1.1150269122131596e-06, | |
| "loss": 0.8287, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 4.660300136425648, | |
| "grad_norm": 0.11214724803482708, | |
| "learning_rate": 1.0795097395451371e-06, | |
| "loss": 0.8516, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 4.665757162346521, | |
| "grad_norm": 0.10833228711670279, | |
| "learning_rate": 1.044559690646394e-06, | |
| "loss": 0.8346, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 4.6712141882673945, | |
| "grad_norm": 0.1066533070122156, | |
| "learning_rate": 1.0101772747860462e-06, | |
| "loss": 0.8482, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 4.676671214188268, | |
| "grad_norm": 0.1020151272866244, | |
| "learning_rate": 9.763629929620345e-07, | |
| "loss": 0.8365, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 4.68212824010914, | |
| "grad_norm": 0.10837508125099268, | |
| "learning_rate": 9.431173378938108e-07, | |
| "loss": 0.8399, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 4.6875852660300135, | |
| "grad_norm": 0.10123855993633649, | |
| "learning_rate": 9.104407940151882e-07, | |
| "loss": 0.8531, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 4.693042291950887, | |
| "grad_norm": 0.09594740104370002, | |
| "learning_rate": 8.783338374672534e-07, | |
| "loss": 0.8498, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 4.69849931787176, | |
| "grad_norm": 0.10314744523546084, | |
| "learning_rate": 8.467969360914519e-07, | |
| "loss": 0.8417, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 4.703956343792633, | |
| "grad_norm": 0.1070045235427388, | |
| "learning_rate": 8.158305494227492e-07, | |
| "loss": 0.8422, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 4.709413369713506, | |
| "grad_norm": 0.11029818790553916, | |
| "learning_rate": 7.854351286829565e-07, | |
| "loss": 0.8342, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 4.714870395634379, | |
| "grad_norm": 0.10651305096899819, | |
| "learning_rate": 7.556111167741354e-07, | |
| "loss": 0.8238, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 4.720327421555252, | |
| "grad_norm": 0.11712154625311387, | |
| "learning_rate": 7.263589482721678e-07, | |
| "loss": 0.8367, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 4.7257844474761255, | |
| "grad_norm": 0.11677316573492764, | |
| "learning_rate": 6.976790494203967e-07, | |
| "loss": 0.8289, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 4.731241473396999, | |
| "grad_norm": 0.10787652603448866, | |
| "learning_rate": 6.695718381234306e-07, | |
| "loss": 0.8726, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 4.736698499317872, | |
| "grad_norm": 0.10325574905197629, | |
| "learning_rate": 6.420377239410514e-07, | |
| "loss": 0.8545, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 4.742155525238745, | |
| "grad_norm": 0.10313792569802335, | |
| "learning_rate": 6.150771080822581e-07, | |
| "loss": 0.824, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 4.747612551159618, | |
| "grad_norm": 0.10775368857971664, | |
| "learning_rate": 5.886903833994018e-07, | |
| "loss": 0.8471, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 4.753069577080491, | |
| "grad_norm": 0.11948172384943638, | |
| "learning_rate": 5.628779343824642e-07, | |
| "loss": 0.8506, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 4.758526603001364, | |
| "grad_norm": 0.10870113816849278, | |
| "learning_rate": 5.376401371534589e-07, | |
| "loss": 0.8288, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 4.7639836289222375, | |
| "grad_norm": 0.10292503580108092, | |
| "learning_rate": 5.129773594609688e-07, | |
| "loss": 0.8541, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 4.769440654843111, | |
| "grad_norm": 0.11208161020367986, | |
| "learning_rate": 4.888899606747544e-07, | |
| "loss": 0.8348, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 4.774897680763983, | |
| "grad_norm": 0.10001712675478809, | |
| "learning_rate": 4.6537829178053165e-07, | |
| "loss": 0.8376, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 4.780354706684856, | |
| "grad_norm": 0.10211251792319674, | |
| "learning_rate": 4.424426953748784e-07, | |
| "loss": 0.8347, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 4.78581173260573, | |
| "grad_norm": 0.1007729458690365, | |
| "learning_rate": 4.200835056602026e-07, | |
| "loss": 0.82, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 4.791268758526603, | |
| "grad_norm": 0.09603334227951416, | |
| "learning_rate": 3.983010484399019e-07, | |
| "loss": 0.8404, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 4.796725784447476, | |
| "grad_norm": 0.09858339692794088, | |
| "learning_rate": 3.7709564111361176e-07, | |
| "loss": 0.8478, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 4.8021828103683495, | |
| "grad_norm": 0.1018617706221292, | |
| "learning_rate": 3.564675926725647e-07, | |
| "loss": 0.8304, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 4.807639836289223, | |
| "grad_norm": 0.11128093657030838, | |
| "learning_rate": 3.364172036951141e-07, | |
| "loss": 0.8509, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 4.813096862210095, | |
| "grad_norm": 0.10484688329338818, | |
| "learning_rate": 3.169447663423242e-07, | |
| "loss": 0.845, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 4.818553888130968, | |
| "grad_norm": 0.09768509450771364, | |
| "learning_rate": 2.9805056435374677e-07, | |
| "loss": 0.8298, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 4.824010914051842, | |
| "grad_norm": 0.10582684400822324, | |
| "learning_rate": 2.79734873043247e-07, | |
| "loss": 0.8591, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 4.829467939972715, | |
| "grad_norm": 0.09840698310186516, | |
| "learning_rate": 2.619979592950328e-07, | |
| "loss": 0.8289, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 4.834924965893588, | |
| "grad_norm": 0.0984641962271483, | |
| "learning_rate": 2.44840081559734e-07, | |
| "loss": 0.8257, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 4.8403819918144615, | |
| "grad_norm": 0.09623214439665646, | |
| "learning_rate": 2.2826148985065855e-07, | |
| "loss": 0.8317, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 4.845839017735334, | |
| "grad_norm": 0.0979824677211241, | |
| "learning_rate": 2.12262425740124e-07, | |
| "loss": 0.8601, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 4.851296043656207, | |
| "grad_norm": 0.0973877467431622, | |
| "learning_rate": 1.9684312235597635e-07, | |
| "loss": 0.8434, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 4.85675306957708, | |
| "grad_norm": 0.10398288588066054, | |
| "learning_rate": 1.820038043781436e-07, | |
| "loss": 0.8389, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 4.862210095497954, | |
| "grad_norm": 0.10107521480254324, | |
| "learning_rate": 1.6774468803540723e-07, | |
| "loss": 0.8467, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 4.867667121418827, | |
| "grad_norm": 0.09869803265596083, | |
| "learning_rate": 1.5406598110222714e-07, | |
| "loss": 0.8658, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 4.8731241473397, | |
| "grad_norm": 0.09751231833043027, | |
| "learning_rate": 1.4096788289571727e-07, | |
| "loss": 0.8295, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 4.878581173260573, | |
| "grad_norm": 0.10249837791894234, | |
| "learning_rate": 1.2845058427274126e-07, | |
| "loss": 0.8336, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 4.884038199181446, | |
| "grad_norm": 0.1026402652640808, | |
| "learning_rate": 1.1651426762714135e-07, | |
| "loss": 0.847, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 4.889495225102319, | |
| "grad_norm": 0.09511533539202549, | |
| "learning_rate": 1.0515910688706942e-07, | |
| "loss": 0.8365, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 4.894952251023192, | |
| "grad_norm": 0.09998622276585635, | |
| "learning_rate": 9.43852675124468e-08, | |
| "loss": 0.8445, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 4.900409276944066, | |
| "grad_norm": 0.1110312354933706, | |
| "learning_rate": 8.419290649257505e-08, | |
| "loss": 0.842, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 4.905866302864939, | |
| "grad_norm": 0.10338515861192228, | |
| "learning_rate": 7.458217234383114e-08, | |
| "loss": 0.8529, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 4.911323328785811, | |
| "grad_norm": 0.09839456714368296, | |
| "learning_rate": 6.555320510750474e-08, | |
| "loss": 0.866, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 4.916780354706685, | |
| "grad_norm": 0.09788726743721791, | |
| "learning_rate": 5.710613634777318e-08, | |
| "loss": 0.857, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 4.922237380627558, | |
| "grad_norm": 0.10202064595622327, | |
| "learning_rate": 4.92410891497519e-08, | |
| "loss": 0.8388, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 4.927694406548431, | |
| "grad_norm": 0.09501921671184542, | |
| "learning_rate": 4.195817811774028e-08, | |
| "loss": 0.8476, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 4.933151432469304, | |
| "grad_norm": 0.09497532005633476, | |
| "learning_rate": 3.5257509373520793e-08, | |
| "loss": 0.8456, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 4.938608458390178, | |
| "grad_norm": 0.09984936254065943, | |
| "learning_rate": 2.9139180554826894e-08, | |
| "loss": 0.837, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 4.94406548431105, | |
| "grad_norm": 0.09792411355955151, | |
| "learning_rate": 2.3603280813926376e-08, | |
| "loss": 0.8349, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 4.949522510231923, | |
| "grad_norm": 0.10968982169077339, | |
| "learning_rate": 1.864989081630242e-08, | |
| "loss": 0.833, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 4.954979536152797, | |
| "grad_norm": 0.10153023843053058, | |
| "learning_rate": 1.427908273949008e-08, | |
| "loss": 0.8522, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 4.96043656207367, | |
| "grad_norm": 0.10063425319130553, | |
| "learning_rate": 1.049092027203713e-08, | |
| "loss": 0.8565, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 4.965893587994543, | |
| "grad_norm": 0.10080262727182424, | |
| "learning_rate": 7.285458612544815e-09, | |
| "loss": 0.8523, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 4.971350613915416, | |
| "grad_norm": 0.10291513147053918, | |
| "learning_rate": 4.662744468899583e-09, | |
| "loss": 0.8401, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 4.97680763983629, | |
| "grad_norm": 0.10762944685776475, | |
| "learning_rate": 2.6228160575580976e-09, | |
| "loss": 0.8275, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 4.982264665757162, | |
| "grad_norm": 0.5698686611905339, | |
| "learning_rate": 1.1657031030232192e-09, | |
| "loss": 0.8362, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 4.987721691678035, | |
| "grad_norm": 0.099846122094202, | |
| "learning_rate": 2.914268373732654e-10, | |
| "loss": 0.8625, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 4.993178717598909, | |
| "grad_norm": 0.09803245510137683, | |
| "learning_rate": 0.0, | |
| "loss": 0.8503, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 4.993178717598909, | |
| "step": 915, | |
| "total_flos": 1.883960626772548e+19, | |
| "train_loss": 0.9225219208686078, | |
| "train_runtime": 49142.0532, | |
| "train_samples_per_second": 9.54, | |
| "train_steps_per_second": 0.019 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 915, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.883960626772548e+19, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |