| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.9938900203665986, |
| "eval_steps": 500, |
| "global_step": 735, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.004073319755600814, |
| "grad_norm": 23.824964125169807, |
| "learning_rate": 1.3513513513513515e-07, |
| "loss": 0.5952, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.008146639511201629, |
| "grad_norm": 20.792130106975634, |
| "learning_rate": 2.702702702702703e-07, |
| "loss": 0.5565, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.012219959266802444, |
| "grad_norm": 21.573609128940706, |
| "learning_rate": 4.0540540540540546e-07, |
| "loss": 0.5819, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.016293279022403257, |
| "grad_norm": 22.665299039841127, |
| "learning_rate": 5.405405405405406e-07, |
| "loss": 0.5779, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.020366598778004074, |
| "grad_norm": 23.40778249191501, |
| "learning_rate": 6.756756756756758e-07, |
| "loss": 0.5921, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.024439918533604887, |
| "grad_norm": 20.972897978817873, |
| "learning_rate": 8.108108108108109e-07, |
| "loss": 0.5751, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.028513238289205704, |
| "grad_norm": 16.887748894661097, |
| "learning_rate": 9.459459459459461e-07, |
| "loss": 0.5545, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.032586558044806514, |
| "grad_norm": 17.86175699572998, |
| "learning_rate": 1.0810810810810812e-06, |
| "loss": 0.5376, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.03665987780040733, |
| "grad_norm": 17.72230837565129, |
| "learning_rate": 1.2162162162162164e-06, |
| "loss": 0.5444, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.04073319755600815, |
| "grad_norm": 11.181763921686693, |
| "learning_rate": 1.3513513513513515e-06, |
| "loss": 0.4643, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.04480651731160896, |
| "grad_norm": 9.552101095678859, |
| "learning_rate": 1.4864864864864868e-06, |
| "loss": 0.4833, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.048879837067209775, |
| "grad_norm": 10.94833092122812, |
| "learning_rate": 1.6216216216216219e-06, |
| "loss": 0.4879, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.05295315682281059, |
| "grad_norm": 6.381541559122312, |
| "learning_rate": 1.756756756756757e-06, |
| "loss": 0.4007, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.05702647657841141, |
| "grad_norm": 6.832393405606709, |
| "learning_rate": 1.8918918918918922e-06, |
| "loss": 0.4204, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.06109979633401222, |
| "grad_norm": 6.609393676391953, |
| "learning_rate": 2.0270270270270273e-06, |
| "loss": 0.3908, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.06517311608961303, |
| "grad_norm": 6.676921089687173, |
| "learning_rate": 2.1621621621621623e-06, |
| "loss": 0.3902, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.06924643584521385, |
| "grad_norm": 7.5639388377323105, |
| "learning_rate": 2.297297297297298e-06, |
| "loss": 0.3383, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.07331975560081466, |
| "grad_norm": 8.331039733268424, |
| "learning_rate": 2.432432432432433e-06, |
| "loss": 0.3174, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.07739307535641547, |
| "grad_norm": 8.148672780923969, |
| "learning_rate": 2.5675675675675675e-06, |
| "loss": 0.3672, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.0814663951120163, |
| "grad_norm": 6.780483874894268, |
| "learning_rate": 2.702702702702703e-06, |
| "loss": 0.3116, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.0855397148676171, |
| "grad_norm": 4.805864379030838, |
| "learning_rate": 2.837837837837838e-06, |
| "loss": 0.3396, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.08961303462321792, |
| "grad_norm": 3.6861122781357296, |
| "learning_rate": 2.9729729729729736e-06, |
| "loss": 0.2894, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.09368635437881874, |
| "grad_norm": 2.424338342545125, |
| "learning_rate": 3.1081081081081082e-06, |
| "loss": 0.276, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.09775967413441955, |
| "grad_norm": 2.454102047608567, |
| "learning_rate": 3.2432432432432437e-06, |
| "loss": 0.2522, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.10183299389002037, |
| "grad_norm": 2.434667107253859, |
| "learning_rate": 3.3783783783783788e-06, |
| "loss": 0.2275, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.10590631364562118, |
| "grad_norm": 1.9885718007767978, |
| "learning_rate": 3.513513513513514e-06, |
| "loss": 0.2364, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.109979633401222, |
| "grad_norm": 1.8367938144698, |
| "learning_rate": 3.648648648648649e-06, |
| "loss": 0.2013, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.11405295315682282, |
| "grad_norm": 1.7919827386347944, |
| "learning_rate": 3.7837837837837844e-06, |
| "loss": 0.2055, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.11812627291242363, |
| "grad_norm": 1.4735770553928993, |
| "learning_rate": 3.918918918918919e-06, |
| "loss": 0.1805, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.12219959266802444, |
| "grad_norm": 1.6556457927609385, |
| "learning_rate": 4.0540540540540545e-06, |
| "loss": 0.2087, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.12627291242362526, |
| "grad_norm": 1.667948154965, |
| "learning_rate": 4.189189189189189e-06, |
| "loss": 0.1822, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.13034623217922606, |
| "grad_norm": 1.4800790265047246, |
| "learning_rate": 4.324324324324325e-06, |
| "loss": 0.2199, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.13441955193482688, |
| "grad_norm": 1.2160350494301022, |
| "learning_rate": 4.45945945945946e-06, |
| "loss": 0.1985, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.1384928716904277, |
| "grad_norm": 1.4723593669345978, |
| "learning_rate": 4.594594594594596e-06, |
| "loss": 0.1534, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.1425661914460285, |
| "grad_norm": 1.1920087475663694, |
| "learning_rate": 4.72972972972973e-06, |
| "loss": 0.2011, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.14663951120162932, |
| "grad_norm": 1.2138426601362036, |
| "learning_rate": 4.864864864864866e-06, |
| "loss": 0.1877, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.15071283095723015, |
| "grad_norm": 0.9822556889103995, |
| "learning_rate": 5e-06, |
| "loss": 0.1545, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.15478615071283094, |
| "grad_norm": 1.0957325932541686, |
| "learning_rate": 5.135135135135135e-06, |
| "loss": 0.1767, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.15885947046843177, |
| "grad_norm": 1.1389331214419396, |
| "learning_rate": 5.2702702702702705e-06, |
| "loss": 0.1869, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.1629327902240326, |
| "grad_norm": 1.1848275712041765, |
| "learning_rate": 5.405405405405406e-06, |
| "loss": 0.2045, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.1670061099796334, |
| "grad_norm": 1.0219090599565614, |
| "learning_rate": 5.540540540540541e-06, |
| "loss": 0.1511, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.1710794297352342, |
| "grad_norm": 1.037762139895163, |
| "learning_rate": 5.675675675675676e-06, |
| "loss": 0.1429, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.17515274949083504, |
| "grad_norm": 1.2123270149154408, |
| "learning_rate": 5.810810810810811e-06, |
| "loss": 0.2093, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.17922606924643583, |
| "grad_norm": 0.9994332754710848, |
| "learning_rate": 5.945945945945947e-06, |
| "loss": 0.1482, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.18329938900203666, |
| "grad_norm": 0.9759649500221467, |
| "learning_rate": 6.081081081081082e-06, |
| "loss": 0.169, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.18737270875763748, |
| "grad_norm": 0.8890858455338041, |
| "learning_rate": 6.2162162162162164e-06, |
| "loss": 0.1595, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.19144602851323828, |
| "grad_norm": 1.057468233061111, |
| "learning_rate": 6.351351351351351e-06, |
| "loss": 0.1686, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.1955193482688391, |
| "grad_norm": 0.9889012957228345, |
| "learning_rate": 6.486486486486487e-06, |
| "loss": 0.1279, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.19959266802443992, |
| "grad_norm": 1.0465108885119678, |
| "learning_rate": 6.621621621621622e-06, |
| "loss": 0.1742, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.20366598778004075, |
| "grad_norm": 0.8831249195014879, |
| "learning_rate": 6.7567567567567575e-06, |
| "loss": 0.1334, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.20773930753564154, |
| "grad_norm": 0.9819130148333998, |
| "learning_rate": 6.891891891891892e-06, |
| "loss": 0.1689, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.21181262729124237, |
| "grad_norm": 0.8281002397605837, |
| "learning_rate": 7.027027027027028e-06, |
| "loss": 0.1354, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.2158859470468432, |
| "grad_norm": 0.9161399847942696, |
| "learning_rate": 7.162162162162163e-06, |
| "loss": 0.164, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.219959266802444, |
| "grad_norm": 0.9421690374120442, |
| "learning_rate": 7.297297297297298e-06, |
| "loss": 0.1503, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.2240325865580448, |
| "grad_norm": 0.8130020303290227, |
| "learning_rate": 7.4324324324324324e-06, |
| "loss": 0.1423, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.22810590631364563, |
| "grad_norm": 0.9970722886960967, |
| "learning_rate": 7.567567567567569e-06, |
| "loss": 0.1487, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.23217922606924643, |
| "grad_norm": 0.8203952756746478, |
| "learning_rate": 7.702702702702704e-06, |
| "loss": 0.1267, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.23625254582484725, |
| "grad_norm": 1.4183839632663762, |
| "learning_rate": 7.837837837837838e-06, |
| "loss": 0.1451, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.24032586558044808, |
| "grad_norm": 1.12608670520537, |
| "learning_rate": 7.972972972972974e-06, |
| "loss": 0.1869, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.24439918533604887, |
| "grad_norm": 0.9570526231731923, |
| "learning_rate": 8.108108108108109e-06, |
| "loss": 0.1704, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.2484725050916497, |
| "grad_norm": 0.8431424142474028, |
| "learning_rate": 8.243243243243245e-06, |
| "loss": 0.1346, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.2525458248472505, |
| "grad_norm": 0.8274933027878987, |
| "learning_rate": 8.378378378378378e-06, |
| "loss": 0.1311, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.25661914460285135, |
| "grad_norm": 0.8335933384140155, |
| "learning_rate": 8.513513513513514e-06, |
| "loss": 0.1285, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.2606924643584521, |
| "grad_norm": 0.9021941206765985, |
| "learning_rate": 8.64864864864865e-06, |
| "loss": 0.1382, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.26476578411405294, |
| "grad_norm": 0.970438764645265, |
| "learning_rate": 8.783783783783785e-06, |
| "loss": 0.1996, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.26883910386965376, |
| "grad_norm": 0.8614470336810528, |
| "learning_rate": 8.91891891891892e-06, |
| "loss": 0.1374, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.2729124236252546, |
| "grad_norm": 0.9082832110316597, |
| "learning_rate": 9.054054054054054e-06, |
| "loss": 0.1491, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.2769857433808554, |
| "grad_norm": 0.8603140215430741, |
| "learning_rate": 9.189189189189191e-06, |
| "loss": 0.1378, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.28105906313645623, |
| "grad_norm": 0.7282212754890712, |
| "learning_rate": 9.324324324324325e-06, |
| "loss": 0.1342, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.285132382892057, |
| "grad_norm": 0.7180410040610982, |
| "learning_rate": 9.45945945945946e-06, |
| "loss": 0.1268, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.2892057026476578, |
| "grad_norm": 0.8534673201881381, |
| "learning_rate": 9.594594594594594e-06, |
| "loss": 0.131, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.29327902240325865, |
| "grad_norm": 0.7212166839082949, |
| "learning_rate": 9.729729729729732e-06, |
| "loss": 0.1147, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.2973523421588595, |
| "grad_norm": 0.7087737998963768, |
| "learning_rate": 9.864864864864865e-06, |
| "loss": 0.1301, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.3014256619144603, |
| "grad_norm": 0.9206428866359043, |
| "learning_rate": 1e-05, |
| "loss": 0.1725, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.3054989816700611, |
| "grad_norm": 0.7152709572200677, |
| "learning_rate": 9.99994352762958e-06, |
| "loss": 0.1325, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.3095723014256619, |
| "grad_norm": 0.8107989286549796, |
| "learning_rate": 9.999774111793974e-06, |
| "loss": 0.1542, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.3136456211812627, |
| "grad_norm": 1.0045788471120862, |
| "learning_rate": 9.999491756320105e-06, |
| "loss": 0.1859, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.31771894093686354, |
| "grad_norm": 0.8470307772036026, |
| "learning_rate": 9.99909646758609e-06, |
| "loss": 0.1512, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.32179226069246436, |
| "grad_norm": 0.7009995127648928, |
| "learning_rate": 9.99858825452108e-06, |
| "loss": 0.1167, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.3258655804480652, |
| "grad_norm": 0.9262545494889008, |
| "learning_rate": 9.997967128605078e-06, |
| "loss": 0.1901, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.329938900203666, |
| "grad_norm": 0.6482185529135742, |
| "learning_rate": 9.997233103868664e-06, |
| "loss": 0.1196, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.3340122199592668, |
| "grad_norm": 0.8984659045249415, |
| "learning_rate": 9.996386196892683e-06, |
| "loss": 0.1838, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.3380855397148676, |
| "grad_norm": 0.837152179870576, |
| "learning_rate": 9.995426426807875e-06, |
| "loss": 0.1545, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.3421588594704684, |
| "grad_norm": 0.7515065306594377, |
| "learning_rate": 9.994353815294438e-06, |
| "loss": 0.1376, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.34623217922606925, |
| "grad_norm": 0.6920252188664211, |
| "learning_rate": 9.993168386581533e-06, |
| "loss": 0.1138, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.35030549898167007, |
| "grad_norm": 0.7346067171258671, |
| "learning_rate": 9.991870167446751e-06, |
| "loss": 0.1352, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.3543788187372709, |
| "grad_norm": 0.7429787226297623, |
| "learning_rate": 9.990459187215498e-06, |
| "loss": 0.1241, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.35845213849287166, |
| "grad_norm": 0.8389632287689357, |
| "learning_rate": 9.98893547776033e-06, |
| "loss": 0.1504, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.3625254582484725, |
| "grad_norm": 0.9104211244706165, |
| "learning_rate": 9.987299073500245e-06, |
| "loss": 0.1828, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.3665987780040733, |
| "grad_norm": 0.637231976348691, |
| "learning_rate": 9.985550011399889e-06, |
| "loss": 0.1231, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.37067209775967414, |
| "grad_norm": 0.7952233376734515, |
| "learning_rate": 9.98368833096874e-06, |
| "loss": 0.1545, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.37474541751527496, |
| "grad_norm": 0.8178341423940675, |
| "learning_rate": 9.981714074260196e-06, |
| "loss": 0.1718, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.3788187372708758, |
| "grad_norm": 0.7081194927491404, |
| "learning_rate": 9.979627285870644e-06, |
| "loss": 0.1205, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.38289205702647655, |
| "grad_norm": 0.9738374781488583, |
| "learning_rate": 9.977428012938437e-06, |
| "loss": 0.2153, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.3869653767820774, |
| "grad_norm": 0.6944995167591187, |
| "learning_rate": 9.975116305142836e-06, |
| "loss": 0.1313, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.3910386965376782, |
| "grad_norm": 0.6438836352182529, |
| "learning_rate": 9.97269221470289e-06, |
| "loss": 0.1195, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.395112016293279, |
| "grad_norm": 0.626711257476653, |
| "learning_rate": 9.97015579637625e-06, |
| "loss": 0.1157, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.39918533604887985, |
| "grad_norm": 0.7051314562639895, |
| "learning_rate": 9.967507107457942e-06, |
| "loss": 0.1305, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.40325865580448067, |
| "grad_norm": 0.7234548878547821, |
| "learning_rate": 9.96474620777906e-06, |
| "loss": 0.1449, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.4073319755600815, |
| "grad_norm": 0.7939988212630088, |
| "learning_rate": 9.961873159705426e-06, |
| "loss": 0.1456, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.41140529531568226, |
| "grad_norm": 0.8715975324921529, |
| "learning_rate": 9.95888802813617e-06, |
| "loss": 0.1692, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.4154786150712831, |
| "grad_norm": 0.7593874115804871, |
| "learning_rate": 9.955790880502278e-06, |
| "loss": 0.1283, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.4195519348268839, |
| "grad_norm": 0.6617646916228898, |
| "learning_rate": 9.952581786765057e-06, |
| "loss": 0.1203, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.42362525458248473, |
| "grad_norm": 1.029951911898737, |
| "learning_rate": 9.949260819414557e-06, |
| "loss": 0.1604, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.42769857433808556, |
| "grad_norm": 0.7297142208486048, |
| "learning_rate": 9.945828053467939e-06, |
| "loss": 0.125, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.4317718940936864, |
| "grad_norm": 0.849465753641272, |
| "learning_rate": 9.942283566467773e-06, |
| "loss": 0.1652, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.43584521384928715, |
| "grad_norm": 0.9537759676029692, |
| "learning_rate": 9.938627438480295e-06, |
| "loss": 0.163, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.439918533604888, |
| "grad_norm": 0.8518878720261965, |
| "learning_rate": 9.93485975209359e-06, |
| "loss": 0.1592, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.4439918533604888, |
| "grad_norm": 0.8378599190963338, |
| "learning_rate": 9.930980592415728e-06, |
| "loss": 0.1595, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.4480651731160896, |
| "grad_norm": 1.0402939000365234, |
| "learning_rate": 9.926990047072849e-06, |
| "loss": 0.2398, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.45213849287169044, |
| "grad_norm": 0.6993227464857973, |
| "learning_rate": 9.922888206207174e-06, |
| "loss": 0.1234, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.45621181262729127, |
| "grad_norm": 0.7456867616976194, |
| "learning_rate": 9.918675162474974e-06, |
| "loss": 0.1188, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.46028513238289204, |
| "grad_norm": 0.8390521662178461, |
| "learning_rate": 9.914351011044472e-06, |
| "loss": 0.1735, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.46435845213849286, |
| "grad_norm": 0.6503558558436285, |
| "learning_rate": 9.909915849593705e-06, |
| "loss": 0.1145, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.4684317718940937, |
| "grad_norm": 0.7135233566145218, |
| "learning_rate": 9.905369778308304e-06, |
| "loss": 0.1239, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.4725050916496945, |
| "grad_norm": 0.7915418441722256, |
| "learning_rate": 9.900712899879237e-06, |
| "loss": 0.1612, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.47657841140529533, |
| "grad_norm": 0.8513282101582397, |
| "learning_rate": 9.895945319500488e-06, |
| "loss": 0.1452, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.48065173116089616, |
| "grad_norm": 0.7135105617487806, |
| "learning_rate": 9.891067144866687e-06, |
| "loss": 0.1422, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.4847250509164969, |
| "grad_norm": 0.6323587428292526, |
| "learning_rate": 9.886078486170665e-06, |
| "loss": 0.1095, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.48879837067209775, |
| "grad_norm": 0.9065653266934791, |
| "learning_rate": 9.880979456100974e-06, |
| "loss": 0.1431, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.49287169042769857, |
| "grad_norm": 0.7751685107169846, |
| "learning_rate": 9.875770169839343e-06, |
| "loss": 0.1329, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.4969450101832994, |
| "grad_norm": 0.7775568356730066, |
| "learning_rate": 9.870450745058066e-06, |
| "loss": 0.1322, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.5010183299389002, |
| "grad_norm": 0.7341546572224038, |
| "learning_rate": 9.865021301917358e-06, |
| "loss": 0.1348, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.505091649694501, |
| "grad_norm": 0.6562007228286519, |
| "learning_rate": 9.859481963062623e-06, |
| "loss": 0.1136, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.5091649694501018, |
| "grad_norm": 0.6319651747837767, |
| "learning_rate": 9.853832853621703e-06, |
| "loss": 0.1272, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.5132382892057027, |
| "grad_norm": 0.6383678064695314, |
| "learning_rate": 9.848074101202037e-06, |
| "loss": 0.1254, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.5173116089613035, |
| "grad_norm": 0.6126258779026864, |
| "learning_rate": 9.842205835887785e-06, |
| "loss": 0.1218, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.5213849287169042, |
| "grad_norm": 0.6794468649647263, |
| "learning_rate": 9.836228190236892e-06, |
| "loss": 0.1421, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.5254582484725051, |
| "grad_norm": 0.7544742724450487, |
| "learning_rate": 9.83014129927808e-06, |
| "loss": 0.1357, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.5295315682281059, |
| "grad_norm": 0.7669942329931978, |
| "learning_rate": 9.823945300507815e-06, |
| "loss": 0.1436, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.5336048879837068, |
| "grad_norm": 0.7762330832811671, |
| "learning_rate": 9.817640333887194e-06, |
| "loss": 0.1405, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.5376782077393075, |
| "grad_norm": 0.615226329982223, |
| "learning_rate": 9.81122654183878e-06, |
| "loss": 0.1121, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.5417515274949084, |
| "grad_norm": 0.5964775286827279, |
| "learning_rate": 9.804704069243389e-06, |
| "loss": 0.1223, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.5458248472505092, |
| "grad_norm": 0.6225978618260937, |
| "learning_rate": 9.798073063436815e-06, |
| "loss": 0.1142, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.5498981670061099, |
| "grad_norm": 0.815267755328249, |
| "learning_rate": 9.791333674206507e-06, |
| "loss": 0.1815, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.5539714867617108, |
| "grad_norm": 0.5825808448772949, |
| "learning_rate": 9.784486053788179e-06, |
| "loss": 0.1086, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.5580448065173116, |
| "grad_norm": 1.3225629142653186, |
| "learning_rate": 9.77753035686237e-06, |
| "loss": 0.1471, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.5621181262729125, |
| "grad_norm": 0.7650343416921964, |
| "learning_rate": 9.770466740550963e-06, |
| "loss": 0.1591, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.5661914460285132, |
| "grad_norm": 0.6539778327128881, |
| "learning_rate": 9.763295364413616e-06, |
| "loss": 0.1231, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.570264765784114, |
| "grad_norm": 0.7570996654462402, |
| "learning_rate": 9.756016390444174e-06, |
| "loss": 0.1433, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.5743380855397149, |
| "grad_norm": 0.6565421225707244, |
| "learning_rate": 9.748629983067004e-06, |
| "loss": 0.1348, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.5784114052953157, |
| "grad_norm": 0.898400206098221, |
| "learning_rate": 9.741136309133279e-06, |
| "loss": 0.1741, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.5824847250509165, |
| "grad_norm": 0.6773875733131056, |
| "learning_rate": 9.733535537917211e-06, |
| "loss": 0.1192, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.5865580448065173, |
| "grad_norm": 0.6499907493304559, |
| "learning_rate": 9.725827841112226e-06, |
| "loss": 0.1221, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.5906313645621182, |
| "grad_norm": 0.6724188756258436, |
| "learning_rate": 9.718013392827087e-06, |
| "loss": 0.115, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.594704684317719, |
| "grad_norm": 0.8749517169859614, |
| "learning_rate": 9.710092369581966e-06, |
| "loss": 0.1564, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.5987780040733197, |
| "grad_norm": 0.6556157333282042, |
| "learning_rate": 9.702064950304442e-06, |
| "loss": 0.1243, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.6028513238289206, |
| "grad_norm": 0.6451437171803145, |
| "learning_rate": 9.693931316325473e-06, |
| "loss": 0.0974, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.6069246435845214, |
| "grad_norm": 0.5696644665405343, |
| "learning_rate": 9.685691651375297e-06, |
| "loss": 0.1014, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.6109979633401222, |
| "grad_norm": 0.551827035749603, |
| "learning_rate": 9.677346141579277e-06, |
| "loss": 0.1044, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.615071283095723, |
| "grad_norm": 0.6992001259298825, |
| "learning_rate": 9.668894975453705e-06, |
| "loss": 0.1563, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.6191446028513238, |
| "grad_norm": 0.8242017570338659, |
| "learning_rate": 9.66033834390153e-06, |
| "loss": 0.1387, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.6232179226069247, |
| "grad_norm": 0.6910389197788058, |
| "learning_rate": 9.65167644020806e-06, |
| "loss": 0.1281, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.6272912423625254, |
| "grad_norm": 0.6486529284986234, |
| "learning_rate": 9.64290946003659e-06, |
| "loss": 0.104, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.6313645621181263, |
| "grad_norm": 0.6188812360416122, |
| "learning_rate": 9.63403760142398e-06, |
| "loss": 0.1047, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.6354378818737271, |
| "grad_norm": 0.7061121651276522, |
| "learning_rate": 9.625061064776183e-06, |
| "loss": 0.1152, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.639511201629328, |
| "grad_norm": 0.5556563413441589, |
| "learning_rate": 9.61598005286372e-06, |
| "loss": 0.0972, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.6435845213849287, |
| "grad_norm": 0.781365328802982, |
| "learning_rate": 9.606794770817102e-06, |
| "loss": 0.1843, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.6476578411405295, |
| "grad_norm": 0.8397251470705924, |
| "learning_rate": 9.597505426122184e-06, |
| "loss": 0.1601, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.6517311608961304, |
| "grad_norm": 0.8321372569992275, |
| "learning_rate": 9.588112228615495e-06, |
| "loss": 0.1775, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.6558044806517311, |
| "grad_norm": 0.7488121903522112, |
| "learning_rate": 9.57861539047949e-06, |
| "loss": 0.1417, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.659877800407332, |
| "grad_norm": 0.9083713708119657, |
| "learning_rate": 9.569015126237744e-06, |
| "loss": 0.157, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.6639511201629328, |
| "grad_norm": 0.628996693890361, |
| "learning_rate": 9.559311652750135e-06, |
| "loss": 0.1213, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.6680244399185336, |
| "grad_norm": 0.6704828711803635, |
| "learning_rate": 9.549505189207924e-06, |
| "loss": 0.1024, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.6720977596741344, |
| "grad_norm": 0.7611412224584696, |
| "learning_rate": 9.539595957128803e-06, |
| "loss": 0.1723, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.6761710794297352, |
| "grad_norm": 0.5999279860860178, |
| "learning_rate": 9.529584180351902e-06, |
| "loss": 0.1204, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.6802443991853361, |
| "grad_norm": 0.7260348130676614, |
| "learning_rate": 9.519470085032733e-06, |
| "loss": 0.1304, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.6843177189409368, |
| "grad_norm": 0.6547880435271723, |
| "learning_rate": 9.509253899638066e-06, |
| "loss": 0.1086, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.6883910386965377, |
| "grad_norm": 0.8283017690389637, |
| "learning_rate": 9.498935854940785e-06, |
| "loss": 0.1706, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.6924643584521385, |
| "grad_norm": 0.5995138124919688, |
| "learning_rate": 9.488516184014667e-06, |
| "loss": 0.1109, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.6965376782077393, |
| "grad_norm": 0.7482109441138194, |
| "learning_rate": 9.477995122229117e-06, |
| "loss": 0.1563, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.7006109979633401, |
| "grad_norm": 0.5565696562628861, |
| "learning_rate": 9.467372907243858e-06, |
| "loss": 0.108, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.7046843177189409, |
| "grad_norm": 0.6650405645513844, |
| "learning_rate": 9.456649779003548e-06, |
| "loss": 0.1213, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.7087576374745418, |
| "grad_norm": 0.669848228626357, |
| "learning_rate": 9.44582597973238e-06, |
| "loss": 0.1342, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.7128309572301426, |
| "grad_norm": 0.7347508918820195, |
| "learning_rate": 9.434901753928593e-06, |
| "loss": 0.1483, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.7169042769857433, |
| "grad_norm": 0.6391827423416541, |
| "learning_rate": 9.423877348358956e-06, |
| "loss": 0.1034, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.7209775967413442, |
| "grad_norm": 0.5605526677984427, |
| "learning_rate": 9.4127530120532e-06, |
| "loss": 0.1097, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.725050916496945, |
| "grad_norm": 0.9156051771876055, |
| "learning_rate": 9.401528996298375e-06, |
| "loss": 0.1605, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.7291242362525459, |
| "grad_norm": 0.6724559028773452, |
| "learning_rate": 9.390205554633193e-06, |
| "loss": 0.1109, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.7331975560081466, |
| "grad_norm": 0.8043278205356049, |
| "learning_rate": 9.378782942842292e-06, |
| "loss": 0.1416, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.7372708757637475, |
| "grad_norm": 0.9524248406588993, |
| "learning_rate": 9.367261418950459e-06, |
| "loss": 0.1912, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.7413441955193483, |
| "grad_norm": 0.859134432298125, |
| "learning_rate": 9.355641243216798e-06, |
| "loss": 0.1767, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.745417515274949, |
| "grad_norm": 0.7115286274882953, |
| "learning_rate": 9.343922678128854e-06, |
| "loss": 0.1143, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.7494908350305499, |
| "grad_norm": 0.6124872270306644, |
| "learning_rate": 9.332105988396692e-06, |
| "loss": 0.1268, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.7535641547861507, |
| "grad_norm": 0.7046380669848973, |
| "learning_rate": 9.3201914409469e-06, |
| "loss": 0.1378, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.7576374745417516, |
| "grad_norm": 0.6591259043283353, |
| "learning_rate": 9.308179304916573e-06, |
| "loss": 0.1187, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.7617107942973523, |
| "grad_norm": 0.5582584428195212, |
| "learning_rate": 9.29606985164723e-06, |
| "loss": 0.1056, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.7657841140529531, |
| "grad_norm": 0.7057413446592607, |
| "learning_rate": 9.283863354678683e-06, |
| "loss": 0.1388, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.769857433808554, |
| "grad_norm": 0.6651867985544857, |
| "learning_rate": 9.27156008974286e-06, |
| "loss": 0.1349, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.7739307535641547, |
| "grad_norm": 0.5490316291784278, |
| "learning_rate": 9.259160334757575e-06, |
| "loss": 0.1093, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.7780040733197556, |
| "grad_norm": 0.6693977291345852, |
| "learning_rate": 9.246664369820249e-06, |
| "loss": 0.1363, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.7820773930753564, |
| "grad_norm": 1.1425683638924742, |
| "learning_rate": 9.234072477201588e-06, |
| "loss": 0.2333, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.7861507128309573, |
| "grad_norm": 0.7910966546717793, |
| "learning_rate": 9.2213849413392e-06, |
| "loss": 0.132, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.790224032586558, |
| "grad_norm": 0.5583844849373505, |
| "learning_rate": 9.208602048831176e-06, |
| "loss": 0.1078, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.7942973523421588, |
| "grad_norm": 0.6362398263327462, |
| "learning_rate": 9.195724088429611e-06, |
| "loss": 0.1125, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.7983706720977597, |
| "grad_norm": 0.6312268856872302, |
| "learning_rate": 9.18275135103409e-06, |
| "loss": 0.1205, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.8024439918533605, |
| "grad_norm": 0.709100148226366, |
| "learning_rate": 9.169684129685099e-06, |
| "loss": 0.137, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.8065173116089613, |
| "grad_norm": 0.8585030567898706, |
| "learning_rate": 9.156522719557428e-06, |
| "loss": 0.1841, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.8105906313645621, |
| "grad_norm": 0.7720024680339406, |
| "learning_rate": 9.143267417953486e-06, |
| "loss": 0.1611, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.814663951120163, |
| "grad_norm": 0.766858183204215, |
| "learning_rate": 9.129918524296596e-06, |
| "loss": 0.183, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.8187372708757638, |
| "grad_norm": 0.6168849883764692, |
| "learning_rate": 9.11647634012422e-06, |
| "loss": 0.1063, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.8228105906313645, |
| "grad_norm": 0.5983223736667403, |
| "learning_rate": 9.102941169081167e-06, |
| "loss": 0.1213, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.8268839103869654, |
| "grad_norm": 0.6795517747355136, |
| "learning_rate": 9.089313316912708e-06, |
| "loss": 0.1443, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.8309572301425662, |
| "grad_norm": 0.5950715141515012, |
| "learning_rate": 9.075593091457692e-06, |
| "loss": 0.1248, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.835030549898167, |
| "grad_norm": 0.6937674892288094, |
| "learning_rate": 9.061780802641582e-06, |
| "loss": 0.1225, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.8391038696537678, |
| "grad_norm": 0.564982239555292, |
| "learning_rate": 9.047876762469451e-06, |
| "loss": 0.1098, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.8431771894093686, |
| "grad_norm": 0.555579073564413, |
| "learning_rate": 9.033881285018945e-06, |
| "loss": 0.1103, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.8472505091649695, |
| "grad_norm": 0.7286989593147949, |
| "learning_rate": 9.019794686433174e-06, |
| "loss": 0.1585, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.8513238289205702, |
| "grad_norm": 0.6199327825385911, |
| "learning_rate": 9.005617284913586e-06, |
| "loss": 0.1033, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.8553971486761711, |
| "grad_norm": 1.0746055931474208, |
| "learning_rate": 8.991349400712772e-06, |
| "loss": 0.1198, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.8594704684317719, |
| "grad_norm": 0.6958942382972106, |
| "learning_rate": 8.976991356127225e-06, |
| "loss": 0.1296, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.8635437881873728, |
| "grad_norm": 0.5191305720079316, |
| "learning_rate": 8.962543475490068e-06, |
| "loss": 0.1088, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.8676171079429735, |
| "grad_norm": 0.5607712315877025, |
| "learning_rate": 8.948006085163735e-06, |
| "loss": 0.1098, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.8716904276985743, |
| "grad_norm": 0.6059889598401135, |
| "learning_rate": 8.933379513532575e-06, |
| "loss": 0.1099, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.8757637474541752, |
| "grad_norm": 0.6476798464502789, |
| "learning_rate": 8.91866409099546e-06, |
| "loss": 0.1094, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.879837067209776, |
| "grad_norm": 0.5826987697479398, |
| "learning_rate": 8.903860149958308e-06, |
| "loss": 0.1091, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.8839103869653768, |
| "grad_norm": 0.7303426494343233, |
| "learning_rate": 8.888968024826575e-06, |
| "loss": 0.1331, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.8879837067209776, |
| "grad_norm": 0.6718807308806929, |
| "learning_rate": 8.873988051997702e-06, |
| "loss": 0.1057, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.8920570264765784, |
| "grad_norm": 0.6855817378635226, |
| "learning_rate": 8.85892056985352e-06, |
| "loss": 0.1352, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.8961303462321792, |
| "grad_norm": 0.9328805924079209, |
| "learning_rate": 8.8437659187526e-06, |
| "loss": 0.2314, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.90020366598778, |
| "grad_norm": 1.299377120456489, |
| "learning_rate": 8.828524441022575e-06, |
| "loss": 0.1757, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.9042769857433809, |
| "grad_norm": 0.5748370208943465, |
| "learning_rate": 8.813196480952393e-06, |
| "loss": 0.1069, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.9083503054989817, |
| "grad_norm": 0.5812871873714258, |
| "learning_rate": 8.797782384784549e-06, |
| "loss": 0.1226, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.9124236252545825, |
| "grad_norm": 0.5370517245896169, |
| "learning_rate": 8.782282500707262e-06, |
| "loss": 0.1064, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.9164969450101833, |
| "grad_norm": 0.6224076112187877, |
| "learning_rate": 8.766697178846611e-06, |
| "loss": 0.1256, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.9205702647657841, |
| "grad_norm": 0.7946201171094479, |
| "learning_rate": 8.751026771258622e-06, |
| "loss": 0.1347, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.924643584521385, |
| "grad_norm": 0.603334336971077, |
| "learning_rate": 8.735271631921322e-06, |
| "loss": 0.1102, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.9287169042769857, |
| "grad_norm": 0.697935818051858, |
| "learning_rate": 8.719432116726738e-06, |
| "loss": 0.1391, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.9327902240325866, |
| "grad_norm": 0.666582821831994, |
| "learning_rate": 8.703508583472855e-06, |
| "loss": 0.1488, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.9368635437881874, |
| "grad_norm": 0.7034782472780129, |
| "learning_rate": 8.68750139185554e-06, |
| "loss": 0.1329, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.9409368635437881, |
| "grad_norm": 0.5918502543937683, |
| "learning_rate": 8.671410903460416e-06, |
| "loss": 0.1232, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.945010183299389, |
| "grad_norm": 0.8353895769621154, |
| "learning_rate": 8.65523748175469e-06, |
| "loss": 0.1634, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.9490835030549898, |
| "grad_norm": 0.9008774921594447, |
| "learning_rate": 8.63898149207895e-06, |
| "loss": 0.1731, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.9531568228105907, |
| "grad_norm": 0.7069993957977634, |
| "learning_rate": 8.622643301638902e-06, |
| "loss": 0.1353, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.9572301425661914, |
| "grad_norm": 0.5454997063658868, |
| "learning_rate": 8.606223279497081e-06, |
| "loss": 0.0998, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.9613034623217923, |
| "grad_norm": 0.5844763287601724, |
| "learning_rate": 8.589721796564521e-06, |
| "loss": 0.1, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.9653767820773931, |
| "grad_norm": 0.646678159444578, |
| "learning_rate": 8.57313922559236e-06, |
| "loss": 0.1228, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.9694501018329938, |
| "grad_norm": 0.7159700211118728, |
| "learning_rate": 8.556475941163436e-06, |
| "loss": 0.1138, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.9735234215885947, |
| "grad_norm": 0.8296378950623643, |
| "learning_rate": 8.539732319683817e-06, |
| "loss": 0.1561, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.9775967413441955, |
| "grad_norm": 0.6278151013161263, |
| "learning_rate": 8.5229087393743e-06, |
| "loss": 0.1156, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.9816700610997964, |
| "grad_norm": 0.7332425478131959, |
| "learning_rate": 8.506005580261872e-06, |
| "loss": 0.1499, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.9857433808553971, |
| "grad_norm": 0.5908395817715482, |
| "learning_rate": 8.489023224171114e-06, |
| "loss": 0.1158, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.9898167006109979, |
| "grad_norm": 0.5274990007802748, |
| "learning_rate": 8.47196205471559e-06, |
| "loss": 0.0892, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.9938900203665988, |
| "grad_norm": 0.7634554290191897, |
| "learning_rate": 8.45482245728917e-06, |
| "loss": 0.1703, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.9979633401221996, |
| "grad_norm": 1.1267985982905535, |
| "learning_rate": 8.437604819057336e-06, |
| "loss": 0.1478, |
| "step": 245 |
| }, |
| { |
| "epoch": 1.0020366598778003, |
| "grad_norm": 0.6187056605160776, |
| "learning_rate": 8.420309528948422e-06, |
| "loss": 0.1129, |
| "step": 246 |
| }, |
| { |
| "epoch": 1.0061099796334012, |
| "grad_norm": 0.5913495378935864, |
| "learning_rate": 8.40293697764484e-06, |
| "loss": 0.0908, |
| "step": 247 |
| }, |
| { |
| "epoch": 1.010183299389002, |
| "grad_norm": 0.5616368536142738, |
| "learning_rate": 8.385487557574253e-06, |
| "loss": 0.0933, |
| "step": 248 |
| }, |
| { |
| "epoch": 1.0142566191446027, |
| "grad_norm": 0.5652757010313284, |
| "learning_rate": 8.367961662900704e-06, |
| "loss": 0.0882, |
| "step": 249 |
| }, |
| { |
| "epoch": 1.0183299389002036, |
| "grad_norm": 0.5636160029762864, |
| "learning_rate": 8.35035968951572e-06, |
| "loss": 0.1035, |
| "step": 250 |
| }, |
| { |
| "epoch": 1.0224032586558045, |
| "grad_norm": 0.6332266023154626, |
| "learning_rate": 8.33268203502937e-06, |
| "loss": 0.1114, |
| "step": 251 |
| }, |
| { |
| "epoch": 1.0264765784114054, |
| "grad_norm": 0.5429353422508607, |
| "learning_rate": 8.314929098761268e-06, |
| "loss": 0.0895, |
| "step": 252 |
| }, |
| { |
| "epoch": 1.030549898167006, |
| "grad_norm": 0.5198523638805784, |
| "learning_rate": 8.297101281731576e-06, |
| "loss": 0.0939, |
| "step": 253 |
| }, |
| { |
| "epoch": 1.034623217922607, |
| "grad_norm": 0.5553114410863834, |
| "learning_rate": 8.279198986651925e-06, |
| "loss": 0.1038, |
| "step": 254 |
| }, |
| { |
| "epoch": 1.0386965376782078, |
| "grad_norm": 0.5411538002090317, |
| "learning_rate": 8.261222617916335e-06, |
| "loss": 0.085, |
| "step": 255 |
| }, |
| { |
| "epoch": 1.0427698574338085, |
| "grad_norm": 0.6365529963594366, |
| "learning_rate": 8.243172581592066e-06, |
| "loss": 0.1127, |
| "step": 256 |
| }, |
| { |
| "epoch": 1.0468431771894093, |
| "grad_norm": 0.5318863281817316, |
| "learning_rate": 8.22504928541045e-06, |
| "loss": 0.0926, |
| "step": 257 |
| }, |
| { |
| "epoch": 1.0509164969450102, |
| "grad_norm": 0.5496286481734359, |
| "learning_rate": 8.206853138757687e-06, |
| "loss": 0.0842, |
| "step": 258 |
| }, |
| { |
| "epoch": 1.054989816700611, |
| "grad_norm": 0.574862630808739, |
| "learning_rate": 8.188584552665592e-06, |
| "loss": 0.0895, |
| "step": 259 |
| }, |
| { |
| "epoch": 1.0590631364562118, |
| "grad_norm": 0.5617567784768843, |
| "learning_rate": 8.17024393980231e-06, |
| "loss": 0.103, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.0631364562118126, |
| "grad_norm": 0.5936736485712034, |
| "learning_rate": 8.15183171446299e-06, |
| "loss": 0.0682, |
| "step": 261 |
| }, |
| { |
| "epoch": 1.0672097759674135, |
| "grad_norm": 0.4784398579577645, |
| "learning_rate": 8.133348292560442e-06, |
| "loss": 0.0758, |
| "step": 262 |
| }, |
| { |
| "epoch": 1.0712830957230142, |
| "grad_norm": 0.6976139285106856, |
| "learning_rate": 8.114794091615718e-06, |
| "loss": 0.123, |
| "step": 263 |
| }, |
| { |
| "epoch": 1.075356415478615, |
| "grad_norm": 0.519896716209068, |
| "learning_rate": 8.096169530748708e-06, |
| "loss": 0.0665, |
| "step": 264 |
| }, |
| { |
| "epoch": 1.079429735234216, |
| "grad_norm": 0.58796972154354, |
| "learning_rate": 8.077475030668647e-06, |
| "loss": 0.0984, |
| "step": 265 |
| }, |
| { |
| "epoch": 1.0835030549898166, |
| "grad_norm": 0.634495385358814, |
| "learning_rate": 8.058711013664633e-06, |
| "loss": 0.0837, |
| "step": 266 |
| }, |
| { |
| "epoch": 1.0875763747454175, |
| "grad_norm": 0.5488781232789869, |
| "learning_rate": 8.039877903596069e-06, |
| "loss": 0.0847, |
| "step": 267 |
| }, |
| { |
| "epoch": 1.0916496945010183, |
| "grad_norm": 0.5402517916966425, |
| "learning_rate": 8.020976125883105e-06, |
| "loss": 0.08, |
| "step": 268 |
| }, |
| { |
| "epoch": 1.0957230142566192, |
| "grad_norm": 0.5792386077730799, |
| "learning_rate": 8.002006107497018e-06, |
| "loss": 0.0974, |
| "step": 269 |
| }, |
| { |
| "epoch": 1.0997963340122199, |
| "grad_norm": 0.5672049919315055, |
| "learning_rate": 7.982968276950568e-06, |
| "loss": 0.0749, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.1038696537678208, |
| "grad_norm": 0.7111458972835449, |
| "learning_rate": 7.963863064288326e-06, |
| "loss": 0.1058, |
| "step": 271 |
| }, |
| { |
| "epoch": 1.1079429735234216, |
| "grad_norm": 0.48575754546390054, |
| "learning_rate": 7.944690901076949e-06, |
| "loss": 0.0795, |
| "step": 272 |
| }, |
| { |
| "epoch": 1.1120162932790225, |
| "grad_norm": 0.5792516959825752, |
| "learning_rate": 7.925452220395436e-06, |
| "loss": 0.0971, |
| "step": 273 |
| }, |
| { |
| "epoch": 1.1160896130346232, |
| "grad_norm": 0.5718749385789645, |
| "learning_rate": 7.906147456825349e-06, |
| "loss": 0.0931, |
| "step": 274 |
| }, |
| { |
| "epoch": 1.120162932790224, |
| "grad_norm": 0.550456136935712, |
| "learning_rate": 7.886777046440993e-06, |
| "loss": 0.0846, |
| "step": 275 |
| }, |
| { |
| "epoch": 1.124236252545825, |
| "grad_norm": 0.7570171924375668, |
| "learning_rate": 7.867341426799562e-06, |
| "loss": 0.1285, |
| "step": 276 |
| }, |
| { |
| "epoch": 1.1283095723014256, |
| "grad_norm": 0.6365681129768777, |
| "learning_rate": 7.847841036931263e-06, |
| "loss": 0.096, |
| "step": 277 |
| }, |
| { |
| "epoch": 1.1323828920570265, |
| "grad_norm": 0.492436799938671, |
| "learning_rate": 7.828276317329388e-06, |
| "loss": 0.0804, |
| "step": 278 |
| }, |
| { |
| "epoch": 1.1364562118126273, |
| "grad_norm": 0.48004623386034234, |
| "learning_rate": 7.80864770994038e-06, |
| "loss": 0.0707, |
| "step": 279 |
| }, |
| { |
| "epoch": 1.140529531568228, |
| "grad_norm": 0.6153659743136082, |
| "learning_rate": 7.788955658153829e-06, |
| "loss": 0.0868, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.1446028513238289, |
| "grad_norm": 0.5206472649098633, |
| "learning_rate": 7.769200606792476e-06, |
| "loss": 0.0727, |
| "step": 281 |
| }, |
| { |
| "epoch": 1.1486761710794298, |
| "grad_norm": 1.0068669303466504, |
| "learning_rate": 7.749383002102147e-06, |
| "loss": 0.0941, |
| "step": 282 |
| }, |
| { |
| "epoch": 1.1527494908350306, |
| "grad_norm": 0.5965302510362305, |
| "learning_rate": 7.72950329174169e-06, |
| "loss": 0.0941, |
| "step": 283 |
| }, |
| { |
| "epoch": 1.1568228105906313, |
| "grad_norm": 0.5585120944783005, |
| "learning_rate": 7.709561924772855e-06, |
| "loss": 0.0919, |
| "step": 284 |
| }, |
| { |
| "epoch": 1.1608961303462322, |
| "grad_norm": 0.6659549215366509, |
| "learning_rate": 7.689559351650142e-06, |
| "loss": 0.1076, |
| "step": 285 |
| }, |
| { |
| "epoch": 1.164969450101833, |
| "grad_norm": 0.6337052265305244, |
| "learning_rate": 7.66949602421064e-06, |
| "loss": 0.0967, |
| "step": 286 |
| }, |
| { |
| "epoch": 1.1690427698574337, |
| "grad_norm": 0.5413928600595976, |
| "learning_rate": 7.649372395663816e-06, |
| "loss": 0.0702, |
| "step": 287 |
| }, |
| { |
| "epoch": 1.1731160896130346, |
| "grad_norm": 0.5968246250826901, |
| "learning_rate": 7.629188920581267e-06, |
| "loss": 0.0914, |
| "step": 288 |
| }, |
| { |
| "epoch": 1.1771894093686355, |
| "grad_norm": 0.5750161126043614, |
| "learning_rate": 7.608946054886468e-06, |
| "loss": 0.0795, |
| "step": 289 |
| }, |
| { |
| "epoch": 1.1812627291242364, |
| "grad_norm": 0.6328078257009621, |
| "learning_rate": 7.588644255844464e-06, |
| "loss": 0.1047, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.185336048879837, |
| "grad_norm": 0.5192894463425617, |
| "learning_rate": 7.568283982051538e-06, |
| "loss": 0.0769, |
| "step": 291 |
| }, |
| { |
| "epoch": 1.189409368635438, |
| "grad_norm": 0.5224024834024625, |
| "learning_rate": 7.5478656934248626e-06, |
| "loss": 0.0773, |
| "step": 292 |
| }, |
| { |
| "epoch": 1.1934826883910388, |
| "grad_norm": 0.5210199420188125, |
| "learning_rate": 7.527389851192099e-06, |
| "loss": 0.0815, |
| "step": 293 |
| }, |
| { |
| "epoch": 1.1975560081466394, |
| "grad_norm": 0.5591128668579426, |
| "learning_rate": 7.506856917880989e-06, |
| "loss": 0.0849, |
| "step": 294 |
| }, |
| { |
| "epoch": 1.2016293279022403, |
| "grad_norm": 0.5371251805156421, |
| "learning_rate": 7.486267357308896e-06, |
| "loss": 0.0824, |
| "step": 295 |
| }, |
| { |
| "epoch": 1.2057026476578412, |
| "grad_norm": 0.5913753985417323, |
| "learning_rate": 7.465621634572336e-06, |
| "loss": 0.0871, |
| "step": 296 |
| }, |
| { |
| "epoch": 1.2097759674134418, |
| "grad_norm": 0.6691676202294405, |
| "learning_rate": 7.444920216036473e-06, |
| "loss": 0.092, |
| "step": 297 |
| }, |
| { |
| "epoch": 1.2138492871690427, |
| "grad_norm": 0.5398872238775144, |
| "learning_rate": 7.4241635693245766e-06, |
| "loss": 0.0772, |
| "step": 298 |
| }, |
| { |
| "epoch": 1.2179226069246436, |
| "grad_norm": 0.48253635582708276, |
| "learning_rate": 7.40335216330746e-06, |
| "loss": 0.0751, |
| "step": 299 |
| }, |
| { |
| "epoch": 1.2219959266802445, |
| "grad_norm": 0.48835957091099164, |
| "learning_rate": 7.382486468092899e-06, |
| "loss": 0.0837, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.2260692464358451, |
| "grad_norm": 0.5918757448537996, |
| "learning_rate": 7.361566955014999e-06, |
| "loss": 0.0812, |
| "step": 301 |
| }, |
| { |
| "epoch": 1.230142566191446, |
| "grad_norm": 0.5363034912281799, |
| "learning_rate": 7.340594096623559e-06, |
| "loss": 0.0889, |
| "step": 302 |
| }, |
| { |
| "epoch": 1.234215885947047, |
| "grad_norm": 0.4887082412663643, |
| "learning_rate": 7.319568366673389e-06, |
| "loss": 0.0804, |
| "step": 303 |
| }, |
| { |
| "epoch": 1.2382892057026478, |
| "grad_norm": 0.5834893991853306, |
| "learning_rate": 7.2984902401136115e-06, |
| "loss": 0.0887, |
| "step": 304 |
| }, |
| { |
| "epoch": 1.2423625254582484, |
| "grad_norm": 0.5321072997085449, |
| "learning_rate": 7.277360193076936e-06, |
| "loss": 0.0768, |
| "step": 305 |
| }, |
| { |
| "epoch": 1.2464358452138493, |
| "grad_norm": 0.5090603439958107, |
| "learning_rate": 7.256178702868899e-06, |
| "loss": 0.0762, |
| "step": 306 |
| }, |
| { |
| "epoch": 1.2505091649694502, |
| "grad_norm": 0.5488722971833677, |
| "learning_rate": 7.234946247957087e-06, |
| "loss": 0.0897, |
| "step": 307 |
| }, |
| { |
| "epoch": 1.2545824847250509, |
| "grad_norm": 0.6264489946049278, |
| "learning_rate": 7.213663307960321e-06, |
| "loss": 0.0904, |
| "step": 308 |
| }, |
| { |
| "epoch": 1.2586558044806517, |
| "grad_norm": 0.6974895794127886, |
| "learning_rate": 7.192330363637832e-06, |
| "loss": 0.1082, |
| "step": 309 |
| }, |
| { |
| "epoch": 1.2627291242362526, |
| "grad_norm": 0.549268939237344, |
| "learning_rate": 7.170947896878392e-06, |
| "loss": 0.0804, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.2668024439918533, |
| "grad_norm": 0.591193047575418, |
| "learning_rate": 7.149516390689433e-06, |
| "loss": 0.0785, |
| "step": 311 |
| }, |
| { |
| "epoch": 1.2708757637474541, |
| "grad_norm": 0.5224208378441448, |
| "learning_rate": 7.12803632918614e-06, |
| "loss": 0.0762, |
| "step": 312 |
| }, |
| { |
| "epoch": 1.274949083503055, |
| "grad_norm": 0.505146735236663, |
| "learning_rate": 7.1065081975805086e-06, |
| "loss": 0.0771, |
| "step": 313 |
| }, |
| { |
| "epoch": 1.2790224032586557, |
| "grad_norm": 0.644610545389085, |
| "learning_rate": 7.084932482170385e-06, |
| "loss": 0.0953, |
| "step": 314 |
| }, |
| { |
| "epoch": 1.2830957230142566, |
| "grad_norm": 0.49839150457579895, |
| "learning_rate": 7.063309670328491e-06, |
| "loss": 0.0892, |
| "step": 315 |
| }, |
| { |
| "epoch": 1.2871690427698574, |
| "grad_norm": 0.5490897883260675, |
| "learning_rate": 7.041640250491398e-06, |
| "loss": 0.0775, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.2912423625254583, |
| "grad_norm": 0.49591235513001225, |
| "learning_rate": 7.019924712148511e-06, |
| "loss": 0.0724, |
| "step": 317 |
| }, |
| { |
| "epoch": 1.2953156822810592, |
| "grad_norm": 0.6101100773766077, |
| "learning_rate": 6.998163545830998e-06, |
| "loss": 0.0827, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.2993890020366599, |
| "grad_norm": 0.5594356046759834, |
| "learning_rate": 6.976357243100718e-06, |
| "loss": 0.0849, |
| "step": 319 |
| }, |
| { |
| "epoch": 1.3034623217922607, |
| "grad_norm": 0.6419949606467299, |
| "learning_rate": 6.954506296539112e-06, |
| "loss": 0.0943, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.3075356415478616, |
| "grad_norm": 0.6232909291031316, |
| "learning_rate": 6.9326111997360775e-06, |
| "loss": 0.0896, |
| "step": 321 |
| }, |
| { |
| "epoch": 1.3116089613034623, |
| "grad_norm": 0.6616478745683936, |
| "learning_rate": 6.910672447278827e-06, |
| "loss": 0.0947, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.3156822810590632, |
| "grad_norm": 0.5163183410160286, |
| "learning_rate": 6.8886905347406985e-06, |
| "loss": 0.0918, |
| "step": 323 |
| }, |
| { |
| "epoch": 1.319755600814664, |
| "grad_norm": 0.5890621449630412, |
| "learning_rate": 6.866665958669976e-06, |
| "loss": 0.0999, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.3238289205702647, |
| "grad_norm": 0.506397655458691, |
| "learning_rate": 6.844599216578667e-06, |
| "loss": 0.0811, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.3279022403258656, |
| "grad_norm": 0.6732628528346883, |
| "learning_rate": 6.822490806931262e-06, |
| "loss": 0.0953, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.3319755600814664, |
| "grad_norm": 0.5397963091210236, |
| "learning_rate": 6.800341229133486e-06, |
| "loss": 0.0892, |
| "step": 327 |
| }, |
| { |
| "epoch": 1.336048879837067, |
| "grad_norm": 0.6332279801225065, |
| "learning_rate": 6.778150983520999e-06, |
| "loss": 0.1212, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.340122199592668, |
| "grad_norm": 0.5724912027337742, |
| "learning_rate": 6.755920571348111e-06, |
| "loss": 0.0846, |
| "step": 329 |
| }, |
| { |
| "epoch": 1.3441955193482689, |
| "grad_norm": 0.6046452826990528, |
| "learning_rate": 6.73365049477645e-06, |
| "loss": 0.0794, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.3482688391038695, |
| "grad_norm": 1.0586213565477098, |
| "learning_rate": 6.711341256863623e-06, |
| "loss": 0.1082, |
| "step": 331 |
| }, |
| { |
| "epoch": 1.3523421588594704, |
| "grad_norm": 0.5781720627165611, |
| "learning_rate": 6.688993361551847e-06, |
| "loss": 0.0855, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.3564154786150713, |
| "grad_norm": 0.6593767357388312, |
| "learning_rate": 6.66660731365657e-06, |
| "loss": 0.0971, |
| "step": 333 |
| }, |
| { |
| "epoch": 1.3604887983706722, |
| "grad_norm": 0.495868519093137, |
| "learning_rate": 6.64418361885507e-06, |
| "loss": 0.087, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.364562118126273, |
| "grad_norm": 0.6608981571039724, |
| "learning_rate": 6.621722783675024e-06, |
| "loss": 0.0923, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.3686354378818737, |
| "grad_norm": 0.5146955799436094, |
| "learning_rate": 6.599225315483076e-06, |
| "loss": 0.0814, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.3727087576374746, |
| "grad_norm": 0.5163299227754969, |
| "learning_rate": 6.576691722473368e-06, |
| "loss": 0.0768, |
| "step": 337 |
| }, |
| { |
| "epoch": 1.3767820773930755, |
| "grad_norm": 0.620269362373383, |
| "learning_rate": 6.554122513656065e-06, |
| "loss": 0.104, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.3808553971486761, |
| "grad_norm": 0.5514828242374266, |
| "learning_rate": 6.531518198845854e-06, |
| "loss": 0.0881, |
| "step": 339 |
| }, |
| { |
| "epoch": 1.384928716904277, |
| "grad_norm": 0.5986245871634709, |
| "learning_rate": 6.508879288650431e-06, |
| "loss": 0.0892, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.3890020366598779, |
| "grad_norm": 0.6423399965038064, |
| "learning_rate": 6.486206294458966e-06, |
| "loss": 0.1107, |
| "step": 341 |
| }, |
| { |
| "epoch": 1.3930753564154785, |
| "grad_norm": 0.5771051149696822, |
| "learning_rate": 6.463499728430549e-06, |
| "loss": 0.0805, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.3971486761710794, |
| "grad_norm": 0.6630093065985597, |
| "learning_rate": 6.4407601034826225e-06, |
| "loss": 0.1029, |
| "step": 343 |
| }, |
| { |
| "epoch": 1.4012219959266803, |
| "grad_norm": 0.5317823459484048, |
| "learning_rate": 6.417987933279397e-06, |
| "loss": 0.0818, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.405295315682281, |
| "grad_norm": 0.5430869992388687, |
| "learning_rate": 6.395183732220242e-06, |
| "loss": 0.0753, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.4093686354378818, |
| "grad_norm": 0.5021147786042328, |
| "learning_rate": 6.372348015428077e-06, |
| "loss": 0.0808, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.4134419551934827, |
| "grad_norm": 0.5622084220448136, |
| "learning_rate": 6.349481298737723e-06, |
| "loss": 0.0883, |
| "step": 347 |
| }, |
| { |
| "epoch": 1.4175152749490836, |
| "grad_norm": 0.5140862939844998, |
| "learning_rate": 6.32658409868426e-06, |
| "loss": 0.0727, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.4215885947046842, |
| "grad_norm": 0.546278558955185, |
| "learning_rate": 6.303656932491349e-06, |
| "loss": 0.0833, |
| "step": 349 |
| }, |
| { |
| "epoch": 1.4256619144602851, |
| "grad_norm": 0.7731600820142971, |
| "learning_rate": 6.280700318059563e-06, |
| "loss": 0.109, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.429735234215886, |
| "grad_norm": 0.6851651959039945, |
| "learning_rate": 6.257714773954674e-06, |
| "loss": 0.1086, |
| "step": 351 |
| }, |
| { |
| "epoch": 1.4338085539714869, |
| "grad_norm": 0.5634955631410733, |
| "learning_rate": 6.234700819395946e-06, |
| "loss": 0.0866, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.4378818737270875, |
| "grad_norm": 0.5530021728157943, |
| "learning_rate": 6.211658974244407e-06, |
| "loss": 0.09, |
| "step": 353 |
| }, |
| { |
| "epoch": 1.4419551934826884, |
| "grad_norm": 0.5256121604623915, |
| "learning_rate": 6.1885897589911e-06, |
| "loss": 0.0852, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.4460285132382893, |
| "grad_norm": 0.8664974074782705, |
| "learning_rate": 6.1654936947453355e-06, |
| "loss": 0.1055, |
| "step": 355 |
| }, |
| { |
| "epoch": 1.45010183299389, |
| "grad_norm": 0.4705188596010589, |
| "learning_rate": 6.142371303222909e-06, |
| "loss": 0.0768, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.4541751527494908, |
| "grad_norm": 0.5819152871091908, |
| "learning_rate": 6.119223106734328e-06, |
| "loss": 0.0877, |
| "step": 357 |
| }, |
| { |
| "epoch": 1.4582484725050917, |
| "grad_norm": 0.5623475445239307, |
| "learning_rate": 6.0960496281729995e-06, |
| "loss": 0.0766, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.4623217922606924, |
| "grad_norm": 0.5188522023672607, |
| "learning_rate": 6.072851391003432e-06, |
| "loss": 0.0832, |
| "step": 359 |
| }, |
| { |
| "epoch": 1.4663951120162932, |
| "grad_norm": 0.5747162205315378, |
| "learning_rate": 6.0496289192494e-06, |
| "loss": 0.0911, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.4704684317718941, |
| "grad_norm": 0.6903703920567706, |
| "learning_rate": 6.026382737482116e-06, |
| "loss": 0.1054, |
| "step": 361 |
| }, |
| { |
| "epoch": 1.4745417515274948, |
| "grad_norm": 0.6235308391895458, |
| "learning_rate": 6.003113370808375e-06, |
| "loss": 0.0944, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.4786150712830957, |
| "grad_norm": 0.6733566235537274, |
| "learning_rate": 5.979821344858695e-06, |
| "loss": 0.1185, |
| "step": 363 |
| }, |
| { |
| "epoch": 1.4826883910386965, |
| "grad_norm": 0.6497171684342544, |
| "learning_rate": 5.956507185775441e-06, |
| "loss": 0.0959, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.4867617107942974, |
| "grad_norm": 0.5460564309088338, |
| "learning_rate": 5.933171420200946e-06, |
| "loss": 0.0806, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.4908350305498983, |
| "grad_norm": 0.5428353306817464, |
| "learning_rate": 5.909814575265609e-06, |
| "loss": 0.0811, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.494908350305499, |
| "grad_norm": 0.48663923521437247, |
| "learning_rate": 5.88643717857599e-06, |
| "loss": 0.0721, |
| "step": 367 |
| }, |
| { |
| "epoch": 1.4989816700610998, |
| "grad_norm": 0.479616157298652, |
| "learning_rate": 5.863039758202889e-06, |
| "loss": 0.08, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.5030549898167007, |
| "grad_norm": 0.5152283487658851, |
| "learning_rate": 5.839622842669423e-06, |
| "loss": 0.0843, |
| "step": 369 |
| }, |
| { |
| "epoch": 1.5071283095723014, |
| "grad_norm": 0.5682101518573992, |
| "learning_rate": 5.816186960939084e-06, |
| "loss": 0.0977, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.5112016293279023, |
| "grad_norm": 0.5876280880615184, |
| "learning_rate": 5.7927326424037875e-06, |
| "loss": 0.0867, |
| "step": 371 |
| }, |
| { |
| "epoch": 1.5152749490835031, |
| "grad_norm": 0.5358927031965173, |
| "learning_rate": 5.7692604168719225e-06, |
| "loss": 0.0811, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.5193482688391038, |
| "grad_norm": 0.4876451829148915, |
| "learning_rate": 5.745770814556373e-06, |
| "loss": 0.0827, |
| "step": 373 |
| }, |
| { |
| "epoch": 1.5234215885947047, |
| "grad_norm": 0.7122665193963945, |
| "learning_rate": 5.722264366062549e-06, |
| "loss": 0.122, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.5274949083503055, |
| "grad_norm": 0.6377859456220404, |
| "learning_rate": 5.698741602376395e-06, |
| "loss": 0.0986, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.5315682281059062, |
| "grad_norm": 0.461691161595667, |
| "learning_rate": 5.675203054852403e-06, |
| "loss": 0.0795, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.535641547861507, |
| "grad_norm": 0.5439449283026275, |
| "learning_rate": 5.651649255201603e-06, |
| "loss": 0.0975, |
| "step": 377 |
| }, |
| { |
| "epoch": 1.539714867617108, |
| "grad_norm": 0.567642967696227, |
| "learning_rate": 5.628080735479553e-06, |
| "loss": 0.0876, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.5437881873727086, |
| "grad_norm": 0.49972042714354553, |
| "learning_rate": 5.604498028074323e-06, |
| "loss": 0.077, |
| "step": 379 |
| }, |
| { |
| "epoch": 1.5478615071283097, |
| "grad_norm": 0.5101908846063549, |
| "learning_rate": 5.580901665694471e-06, |
| "loss": 0.0796, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.5519348268839104, |
| "grad_norm": 0.7577385737001251, |
| "learning_rate": 5.557292181357003e-06, |
| "loss": 0.1011, |
| "step": 381 |
| }, |
| { |
| "epoch": 1.556008146639511, |
| "grad_norm": 0.5586593686436576, |
| "learning_rate": 5.533670108375334e-06, |
| "loss": 0.0808, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.5600814663951121, |
| "grad_norm": 0.5981230389294948, |
| "learning_rate": 5.510035980347249e-06, |
| "loss": 0.0691, |
| "step": 383 |
| }, |
| { |
| "epoch": 1.5641547861507128, |
| "grad_norm": 0.7606096821073176, |
| "learning_rate": 5.486390331142841e-06, |
| "loss": 0.1296, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.5682281059063137, |
| "grad_norm": 0.794080616806581, |
| "learning_rate": 5.462733694892452e-06, |
| "loss": 0.1061, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.5723014256619146, |
| "grad_norm": 0.5501023333068423, |
| "learning_rate": 5.439066605974615e-06, |
| "loss": 0.0972, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.5763747454175152, |
| "grad_norm": 0.5610437243966598, |
| "learning_rate": 5.415389599003972e-06, |
| "loss": 0.0858, |
| "step": 387 |
| }, |
| { |
| "epoch": 1.580448065173116, |
| "grad_norm": 0.6090151591190684, |
| "learning_rate": 5.391703208819209e-06, |
| "loss": 0.0939, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.584521384928717, |
| "grad_norm": 0.5789457205019639, |
| "learning_rate": 5.368007970470964e-06, |
| "loss": 0.0857, |
| "step": 389 |
| }, |
| { |
| "epoch": 1.5885947046843176, |
| "grad_norm": 0.6480985481494451, |
| "learning_rate": 5.344304419209748e-06, |
| "loss": 0.1046, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.5926680244399185, |
| "grad_norm": 0.48918395695439915, |
| "learning_rate": 5.3205930904738544e-06, |
| "loss": 0.0719, |
| "step": 391 |
| }, |
| { |
| "epoch": 1.5967413441955194, |
| "grad_norm": 0.7692780863505633, |
| "learning_rate": 5.296874519877256e-06, |
| "loss": 0.1172, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.60081466395112, |
| "grad_norm": 0.5686376937846824, |
| "learning_rate": 5.273149243197517e-06, |
| "loss": 0.1007, |
| "step": 393 |
| }, |
| { |
| "epoch": 1.6048879837067211, |
| "grad_norm": 0.51653612739051, |
| "learning_rate": 5.2494177963636785e-06, |
| "loss": 0.0845, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.6089613034623218, |
| "grad_norm": 0.5748512004824357, |
| "learning_rate": 5.225680715444168e-06, |
| "loss": 0.0956, |
| "step": 395 |
| }, |
| { |
| "epoch": 1.6130346232179225, |
| "grad_norm": 0.5193507274735706, |
| "learning_rate": 5.201938536634674e-06, |
| "loss": 0.0825, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.6171079429735236, |
| "grad_norm": 0.4817480854856676, |
| "learning_rate": 5.178191796246043e-06, |
| "loss": 0.076, |
| "step": 397 |
| }, |
| { |
| "epoch": 1.6211812627291242, |
| "grad_norm": 0.5747445143952108, |
| "learning_rate": 5.154441030692162e-06, |
| "loss": 0.094, |
| "step": 398 |
| }, |
| { |
| "epoch": 1.625254582484725, |
| "grad_norm": 0.48918245375080166, |
| "learning_rate": 5.1306867764778445e-06, |
| "loss": 0.0644, |
| "step": 399 |
| }, |
| { |
| "epoch": 1.629327902240326, |
| "grad_norm": 0.4917464907906448, |
| "learning_rate": 5.106929570186706e-06, |
| "loss": 0.0686, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.6334012219959266, |
| "grad_norm": 0.5863141127665724, |
| "learning_rate": 5.083169948469049e-06, |
| "loss": 0.0984, |
| "step": 401 |
| }, |
| { |
| "epoch": 1.6374745417515275, |
| "grad_norm": 0.6651514708842179, |
| "learning_rate": 5.059408448029737e-06, |
| "loss": 0.1026, |
| "step": 402 |
| }, |
| { |
| "epoch": 1.6415478615071284, |
| "grad_norm": 0.5852257476771185, |
| "learning_rate": 5.0356456056160715e-06, |
| "loss": 0.1009, |
| "step": 403 |
| }, |
| { |
| "epoch": 1.645621181262729, |
| "grad_norm": 0.5038722580287451, |
| "learning_rate": 5.0118819580056686e-06, |
| "loss": 0.0764, |
| "step": 404 |
| }, |
| { |
| "epoch": 1.64969450101833, |
| "grad_norm": 0.5353954386280294, |
| "learning_rate": 4.988118041994332e-06, |
| "loss": 0.0775, |
| "step": 405 |
| }, |
| { |
| "epoch": 1.6537678207739308, |
| "grad_norm": 0.5151150395311881, |
| "learning_rate": 4.964354394383929e-06, |
| "loss": 0.0852, |
| "step": 406 |
| }, |
| { |
| "epoch": 1.6578411405295315, |
| "grad_norm": 0.5193395511091946, |
| "learning_rate": 4.940591551970264e-06, |
| "loss": 0.0749, |
| "step": 407 |
| }, |
| { |
| "epoch": 1.6619144602851323, |
| "grad_norm": 0.49883451263513406, |
| "learning_rate": 4.9168300515309515e-06, |
| "loss": 0.0809, |
| "step": 408 |
| }, |
| { |
| "epoch": 1.6659877800407332, |
| "grad_norm": 0.8981230902398553, |
| "learning_rate": 4.8930704298132965e-06, |
| "loss": 0.1123, |
| "step": 409 |
| }, |
| { |
| "epoch": 1.6700610997963339, |
| "grad_norm": 0.5152672358496505, |
| "learning_rate": 4.869313223522159e-06, |
| "loss": 0.0765, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.674134419551935, |
| "grad_norm": 0.5557694576662973, |
| "learning_rate": 4.845558969307839e-06, |
| "loss": 0.0801, |
| "step": 411 |
| }, |
| { |
| "epoch": 1.6782077393075356, |
| "grad_norm": 0.5798427128733209, |
| "learning_rate": 4.821808203753959e-06, |
| "loss": 0.0897, |
| "step": 412 |
| }, |
| { |
| "epoch": 1.6822810590631363, |
| "grad_norm": 0.5291569470771305, |
| "learning_rate": 4.798061463365327e-06, |
| "loss": 0.0866, |
| "step": 413 |
| }, |
| { |
| "epoch": 1.6863543788187374, |
| "grad_norm": 0.5049658440961153, |
| "learning_rate": 4.774319284555833e-06, |
| "loss": 0.0813, |
| "step": 414 |
| }, |
| { |
| "epoch": 1.690427698574338, |
| "grad_norm": 0.5025553793538183, |
| "learning_rate": 4.7505822036363214e-06, |
| "loss": 0.0796, |
| "step": 415 |
| }, |
| { |
| "epoch": 1.694501018329939, |
| "grad_norm": 0.5762684802173211, |
| "learning_rate": 4.726850756802486e-06, |
| "loss": 0.0814, |
| "step": 416 |
| }, |
| { |
| "epoch": 1.6985743380855398, |
| "grad_norm": 0.5504791385645241, |
| "learning_rate": 4.703125480122747e-06, |
| "loss": 0.0741, |
| "step": 417 |
| }, |
| { |
| "epoch": 1.7026476578411405, |
| "grad_norm": 0.6017024701235201, |
| "learning_rate": 4.679406909526147e-06, |
| "loss": 0.0998, |
| "step": 418 |
| }, |
| { |
| "epoch": 1.7067209775967414, |
| "grad_norm": 0.4780371786982221, |
| "learning_rate": 4.655695580790254e-06, |
| "loss": 0.085, |
| "step": 419 |
| }, |
| { |
| "epoch": 1.7107942973523422, |
| "grad_norm": 0.746827565864312, |
| "learning_rate": 4.631992029529037e-06, |
| "loss": 0.1169, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.7148676171079429, |
| "grad_norm": 0.47907481016484715, |
| "learning_rate": 4.608296791180793e-06, |
| "loss": 0.0855, |
| "step": 421 |
| }, |
| { |
| "epoch": 1.7189409368635438, |
| "grad_norm": 0.5221097728983394, |
| "learning_rate": 4.584610400996028e-06, |
| "loss": 0.082, |
| "step": 422 |
| }, |
| { |
| "epoch": 1.7230142566191446, |
| "grad_norm": 0.5523006830876969, |
| "learning_rate": 4.560933394025386e-06, |
| "loss": 0.0866, |
| "step": 423 |
| }, |
| { |
| "epoch": 1.7270875763747453, |
| "grad_norm": 0.5367656406361516, |
| "learning_rate": 4.537266305107549e-06, |
| "loss": 0.0908, |
| "step": 424 |
| }, |
| { |
| "epoch": 1.7311608961303462, |
| "grad_norm": 0.48031799270557307, |
| "learning_rate": 4.513609668857162e-06, |
| "loss": 0.0719, |
| "step": 425 |
| }, |
| { |
| "epoch": 1.735234215885947, |
| "grad_norm": 0.48069086444297604, |
| "learning_rate": 4.489964019652752e-06, |
| "loss": 0.0726, |
| "step": 426 |
| }, |
| { |
| "epoch": 1.7393075356415477, |
| "grad_norm": 0.6097790204719554, |
| "learning_rate": 4.4663298916246665e-06, |
| "loss": 0.0974, |
| "step": 427 |
| }, |
| { |
| "epoch": 1.7433808553971488, |
| "grad_norm": 0.5760410806282759, |
| "learning_rate": 4.442707818642999e-06, |
| "loss": 0.091, |
| "step": 428 |
| }, |
| { |
| "epoch": 1.7474541751527495, |
| "grad_norm": 0.6416791366070191, |
| "learning_rate": 4.419098334305529e-06, |
| "loss": 0.0949, |
| "step": 429 |
| }, |
| { |
| "epoch": 1.7515274949083504, |
| "grad_norm": 0.5862190265305518, |
| "learning_rate": 4.395501971925677e-06, |
| "loss": 0.0855, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.7556008146639512, |
| "grad_norm": 0.6048611684577604, |
| "learning_rate": 4.371919264520449e-06, |
| "loss": 0.0877, |
| "step": 431 |
| }, |
| { |
| "epoch": 1.759674134419552, |
| "grad_norm": 0.5889907283036164, |
| "learning_rate": 4.348350744798399e-06, |
| "loss": 0.0853, |
| "step": 432 |
| }, |
| { |
| "epoch": 1.7637474541751528, |
| "grad_norm": 0.5577447494891478, |
| "learning_rate": 4.324796945147598e-06, |
| "loss": 0.082, |
| "step": 433 |
| }, |
| { |
| "epoch": 1.7678207739307537, |
| "grad_norm": 0.5717842104361903, |
| "learning_rate": 4.301258397623606e-06, |
| "loss": 0.0871, |
| "step": 434 |
| }, |
| { |
| "epoch": 1.7718940936863543, |
| "grad_norm": 0.930762869092301, |
| "learning_rate": 4.2777356339374526e-06, |
| "loss": 0.0956, |
| "step": 435 |
| }, |
| { |
| "epoch": 1.7759674134419552, |
| "grad_norm": 0.7614948263463629, |
| "learning_rate": 4.254229185443628e-06, |
| "loss": 0.0849, |
| "step": 436 |
| }, |
| { |
| "epoch": 1.780040733197556, |
| "grad_norm": 0.5595057518503066, |
| "learning_rate": 4.230739583128078e-06, |
| "loss": 0.0813, |
| "step": 437 |
| }, |
| { |
| "epoch": 1.7841140529531567, |
| "grad_norm": 0.9808634809421035, |
| "learning_rate": 4.2072673575962125e-06, |
| "loss": 0.1033, |
| "step": 438 |
| }, |
| { |
| "epoch": 1.7881873727087576, |
| "grad_norm": 0.8130473022456658, |
| "learning_rate": 4.183813039060919e-06, |
| "loss": 0.1211, |
| "step": 439 |
| }, |
| { |
| "epoch": 1.7922606924643585, |
| "grad_norm": 0.4820600791663271, |
| "learning_rate": 4.160377157330579e-06, |
| "loss": 0.0866, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.7963340122199591, |
| "grad_norm": 0.5556912970672404, |
| "learning_rate": 4.136960241797113e-06, |
| "loss": 0.07, |
| "step": 441 |
| }, |
| { |
| "epoch": 1.8004073319755602, |
| "grad_norm": 0.5564448688588367, |
| "learning_rate": 4.113562821424012e-06, |
| "loss": 0.0946, |
| "step": 442 |
| }, |
| { |
| "epoch": 1.804480651731161, |
| "grad_norm": 0.5090145761253313, |
| "learning_rate": 4.090185424734392e-06, |
| "loss": 0.0863, |
| "step": 443 |
| }, |
| { |
| "epoch": 1.8085539714867616, |
| "grad_norm": 0.6463252873050557, |
| "learning_rate": 4.066828579799054e-06, |
| "loss": 0.0847, |
| "step": 444 |
| }, |
| { |
| "epoch": 1.8126272912423627, |
| "grad_norm": 0.4759869187980572, |
| "learning_rate": 4.043492814224559e-06, |
| "loss": 0.0771, |
| "step": 445 |
| }, |
| { |
| "epoch": 1.8167006109979633, |
| "grad_norm": 0.7145771793640622, |
| "learning_rate": 4.020178655141307e-06, |
| "loss": 0.0871, |
| "step": 446 |
| }, |
| { |
| "epoch": 1.8207739307535642, |
| "grad_norm": 0.5516106066061452, |
| "learning_rate": 3.9968866291916254e-06, |
| "loss": 0.0916, |
| "step": 447 |
| }, |
| { |
| "epoch": 1.824847250509165, |
| "grad_norm": 0.49794076849820285, |
| "learning_rate": 3.973617262517886e-06, |
| "loss": 0.073, |
| "step": 448 |
| }, |
| { |
| "epoch": 1.8289205702647657, |
| "grad_norm": 0.5310012013508015, |
| "learning_rate": 3.950371080750602e-06, |
| "loss": 0.0881, |
| "step": 449 |
| }, |
| { |
| "epoch": 1.8329938900203666, |
| "grad_norm": 0.8043026847501894, |
| "learning_rate": 3.927148608996569e-06, |
| "loss": 0.1246, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.8370672097759675, |
| "grad_norm": 0.7009938506971007, |
| "learning_rate": 3.903950371827001e-06, |
| "loss": 0.0963, |
| "step": 451 |
| }, |
| { |
| "epoch": 1.8411405295315681, |
| "grad_norm": 0.5611447978601334, |
| "learning_rate": 3.880776893265673e-06, |
| "loss": 0.0895, |
| "step": 452 |
| }, |
| { |
| "epoch": 1.845213849287169, |
| "grad_norm": 0.46391265116368263, |
| "learning_rate": 3.85762869677709e-06, |
| "loss": 0.0637, |
| "step": 453 |
| }, |
| { |
| "epoch": 1.84928716904277, |
| "grad_norm": 0.5980975272395279, |
| "learning_rate": 3.834506305254667e-06, |
| "loss": 0.1008, |
| "step": 454 |
| }, |
| { |
| "epoch": 1.8533604887983706, |
| "grad_norm": 0.5925659752681625, |
| "learning_rate": 3.811410241008902e-06, |
| "loss": 0.0922, |
| "step": 455 |
| }, |
| { |
| "epoch": 1.8574338085539714, |
| "grad_norm": 0.5203281057272098, |
| "learning_rate": 3.788341025755595e-06, |
| "loss": 0.0837, |
| "step": 456 |
| }, |
| { |
| "epoch": 1.8615071283095723, |
| "grad_norm": 0.5683416732627976, |
| "learning_rate": 3.765299180604055e-06, |
| "loss": 0.0931, |
| "step": 457 |
| }, |
| { |
| "epoch": 1.865580448065173, |
| "grad_norm": 0.45766679262081533, |
| "learning_rate": 3.7422852260453274e-06, |
| "loss": 0.0765, |
| "step": 458 |
| }, |
| { |
| "epoch": 1.869653767820774, |
| "grad_norm": 0.6103354315071964, |
| "learning_rate": 3.719299681940437e-06, |
| "loss": 0.1038, |
| "step": 459 |
| }, |
| { |
| "epoch": 1.8737270875763747, |
| "grad_norm": 0.5502923630619594, |
| "learning_rate": 3.696343067508651e-06, |
| "loss": 0.0921, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.8778004073319754, |
| "grad_norm": 0.5237191630941678, |
| "learning_rate": 3.673415901315743e-06, |
| "loss": 0.0749, |
| "step": 461 |
| }, |
| { |
| "epoch": 1.8818737270875765, |
| "grad_norm": 0.4968192378714128, |
| "learning_rate": 3.650518701262278e-06, |
| "loss": 0.0795, |
| "step": 462 |
| }, |
| { |
| "epoch": 1.8859470468431772, |
| "grad_norm": 0.5545184029879875, |
| "learning_rate": 3.6276519845719237e-06, |
| "loss": 0.0815, |
| "step": 463 |
| }, |
| { |
| "epoch": 1.890020366598778, |
| "grad_norm": 0.6339129226700043, |
| "learning_rate": 3.6048162677797595e-06, |
| "loss": 0.0972, |
| "step": 464 |
| }, |
| { |
| "epoch": 1.894093686354379, |
| "grad_norm": 0.5122625390598023, |
| "learning_rate": 3.582012066720605e-06, |
| "loss": 0.0755, |
| "step": 465 |
| }, |
| { |
| "epoch": 1.8981670061099796, |
| "grad_norm": 0.5414776721622026, |
| "learning_rate": 3.559239896517379e-06, |
| "loss": 0.0929, |
| "step": 466 |
| }, |
| { |
| "epoch": 1.9022403258655805, |
| "grad_norm": 0.8247571133944617, |
| "learning_rate": 3.536500271569452e-06, |
| "loss": 0.0942, |
| "step": 467 |
| }, |
| { |
| "epoch": 1.9063136456211813, |
| "grad_norm": 0.4997167517086371, |
| "learning_rate": 3.5137937055410343e-06, |
| "loss": 0.0734, |
| "step": 468 |
| }, |
| { |
| "epoch": 1.910386965376782, |
| "grad_norm": 0.6106920717674926, |
| "learning_rate": 3.4911207113495703e-06, |
| "loss": 0.0984, |
| "step": 469 |
| }, |
| { |
| "epoch": 1.9144602851323829, |
| "grad_norm": 0.6138978815868072, |
| "learning_rate": 3.4684818011541484e-06, |
| "loss": 0.0882, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.9185336048879837, |
| "grad_norm": 0.569918193290306, |
| "learning_rate": 3.4458774863439366e-06, |
| "loss": 0.0976, |
| "step": 471 |
| }, |
| { |
| "epoch": 1.9226069246435844, |
| "grad_norm": 0.660022445222452, |
| "learning_rate": 3.423308277526633e-06, |
| "loss": 0.0965, |
| "step": 472 |
| }, |
| { |
| "epoch": 1.9266802443991853, |
| "grad_norm": 0.5743611657151331, |
| "learning_rate": 3.4007746845169253e-06, |
| "loss": 0.096, |
| "step": 473 |
| }, |
| { |
| "epoch": 1.9307535641547862, |
| "grad_norm": 0.5979947072027607, |
| "learning_rate": 3.3782772163249767e-06, |
| "loss": 0.077, |
| "step": 474 |
| }, |
| { |
| "epoch": 1.9348268839103868, |
| "grad_norm": 0.4623921743915121, |
| "learning_rate": 3.3558163811449317e-06, |
| "loss": 0.0784, |
| "step": 475 |
| }, |
| { |
| "epoch": 1.938900203665988, |
| "grad_norm": 0.6249731314573497, |
| "learning_rate": 3.3333926863434317e-06, |
| "loss": 0.0952, |
| "step": 476 |
| }, |
| { |
| "epoch": 1.9429735234215886, |
| "grad_norm": 0.5904409847712071, |
| "learning_rate": 3.311006638448155e-06, |
| "loss": 0.0802, |
| "step": 477 |
| }, |
| { |
| "epoch": 1.9470468431771895, |
| "grad_norm": 0.49939998942066105, |
| "learning_rate": 3.288658743136378e-06, |
| "loss": 0.0715, |
| "step": 478 |
| }, |
| { |
| "epoch": 1.9511201629327903, |
| "grad_norm": 0.7453705405570098, |
| "learning_rate": 3.2663495052235505e-06, |
| "loss": 0.1228, |
| "step": 479 |
| }, |
| { |
| "epoch": 1.955193482688391, |
| "grad_norm": 0.5630003297157556, |
| "learning_rate": 3.2440794286518896e-06, |
| "loss": 0.0944, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.9592668024439919, |
| "grad_norm": 0.5437974107964264, |
| "learning_rate": 3.2218490164790015e-06, |
| "loss": 0.0758, |
| "step": 481 |
| }, |
| { |
| "epoch": 1.9633401221995928, |
| "grad_norm": 0.5489699488329995, |
| "learning_rate": 3.199658770866515e-06, |
| "loss": 0.0842, |
| "step": 482 |
| }, |
| { |
| "epoch": 1.9674134419551934, |
| "grad_norm": 0.5183239217287703, |
| "learning_rate": 3.1775091930687374e-06, |
| "loss": 0.0748, |
| "step": 483 |
| }, |
| { |
| "epoch": 1.9714867617107943, |
| "grad_norm": 0.48101469467772406, |
| "learning_rate": 3.1554007834213357e-06, |
| "loss": 0.079, |
| "step": 484 |
| }, |
| { |
| "epoch": 1.9755600814663952, |
| "grad_norm": 0.6414823325991624, |
| "learning_rate": 3.1333340413300263e-06, |
| "loss": 0.0957, |
| "step": 485 |
| }, |
| { |
| "epoch": 1.9796334012219958, |
| "grad_norm": 0.5282513557861422, |
| "learning_rate": 3.1113094652593023e-06, |
| "loss": 0.0773, |
| "step": 486 |
| }, |
| { |
| "epoch": 1.9837067209775967, |
| "grad_norm": 0.7464872716498775, |
| "learning_rate": 3.0893275527211742e-06, |
| "loss": 0.1267, |
| "step": 487 |
| }, |
| { |
| "epoch": 1.9877800407331976, |
| "grad_norm": 0.5355022750197869, |
| "learning_rate": 3.067388800263923e-06, |
| "loss": 0.0916, |
| "step": 488 |
| }, |
| { |
| "epoch": 1.9918533604887982, |
| "grad_norm": 0.5075728839340562, |
| "learning_rate": 3.04549370346089e-06, |
| "loss": 0.0753, |
| "step": 489 |
| }, |
| { |
| "epoch": 1.9959266802443993, |
| "grad_norm": 0.4941017414087453, |
| "learning_rate": 3.0236427568992845e-06, |
| "loss": 0.0837, |
| "step": 490 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.5982027060816123, |
| "learning_rate": 3.0018364541690048e-06, |
| "loss": 0.0947, |
| "step": 491 |
| }, |
| { |
| "epoch": 2.0040733197556007, |
| "grad_norm": 0.41739627753796676, |
| "learning_rate": 2.9800752878514903e-06, |
| "loss": 0.0559, |
| "step": 492 |
| }, |
| { |
| "epoch": 2.0081466395112018, |
| "grad_norm": 0.5194052162916396, |
| "learning_rate": 2.958359749508603e-06, |
| "loss": 0.0638, |
| "step": 493 |
| }, |
| { |
| "epoch": 2.0122199592668024, |
| "grad_norm": 0.43221139878939163, |
| "learning_rate": 2.936690329671511e-06, |
| "loss": 0.0547, |
| "step": 494 |
| }, |
| { |
| "epoch": 2.016293279022403, |
| "grad_norm": 0.4101066842627814, |
| "learning_rate": 2.915067517829615e-06, |
| "loss": 0.056, |
| "step": 495 |
| }, |
| { |
| "epoch": 2.020366598778004, |
| "grad_norm": 0.4348139876288608, |
| "learning_rate": 2.893491802419492e-06, |
| "loss": 0.0598, |
| "step": 496 |
| }, |
| { |
| "epoch": 2.024439918533605, |
| "grad_norm": 0.3947374919103676, |
| "learning_rate": 2.871963670813861e-06, |
| "loss": 0.0482, |
| "step": 497 |
| }, |
| { |
| "epoch": 2.0285132382892055, |
| "grad_norm": 0.4666236263535878, |
| "learning_rate": 2.850483609310567e-06, |
| "loss": 0.0556, |
| "step": 498 |
| }, |
| { |
| "epoch": 2.0325865580448066, |
| "grad_norm": 0.47455956467709703, |
| "learning_rate": 2.829052103121611e-06, |
| "loss": 0.0559, |
| "step": 499 |
| }, |
| { |
| "epoch": 2.0366598778004072, |
| "grad_norm": 0.4630769089403623, |
| "learning_rate": 2.807669636362169e-06, |
| "loss": 0.0572, |
| "step": 500 |
| }, |
| { |
| "epoch": 2.0407331975560083, |
| "grad_norm": 0.5649252991193849, |
| "learning_rate": 2.7863366920396805e-06, |
| "loss": 0.062, |
| "step": 501 |
| }, |
| { |
| "epoch": 2.044806517311609, |
| "grad_norm": 0.558125756283227, |
| "learning_rate": 2.765053752042915e-06, |
| "loss": 0.0642, |
| "step": 502 |
| }, |
| { |
| "epoch": 2.0488798370672097, |
| "grad_norm": 0.4895203581054696, |
| "learning_rate": 2.7438212971311016e-06, |
| "loss": 0.0568, |
| "step": 503 |
| }, |
| { |
| "epoch": 2.0529531568228108, |
| "grad_norm": 0.48090156728485023, |
| "learning_rate": 2.722639806923066e-06, |
| "loss": 0.0503, |
| "step": 504 |
| }, |
| { |
| "epoch": 2.0570264765784114, |
| "grad_norm": 0.5750738252028132, |
| "learning_rate": 2.7015097598863906e-06, |
| "loss": 0.0585, |
| "step": 505 |
| }, |
| { |
| "epoch": 2.061099796334012, |
| "grad_norm": 0.5447627664162749, |
| "learning_rate": 2.680431633326614e-06, |
| "loss": 0.0522, |
| "step": 506 |
| }, |
| { |
| "epoch": 2.065173116089613, |
| "grad_norm": 0.5725139245726798, |
| "learning_rate": 2.659405903376442e-06, |
| "loss": 0.0628, |
| "step": 507 |
| }, |
| { |
| "epoch": 2.069246435845214, |
| "grad_norm": 0.5802775382370658, |
| "learning_rate": 2.6384330449850028e-06, |
| "loss": 0.061, |
| "step": 508 |
| }, |
| { |
| "epoch": 2.0733197556008145, |
| "grad_norm": 0.515420111836459, |
| "learning_rate": 2.617513531907103e-06, |
| "loss": 0.0631, |
| "step": 509 |
| }, |
| { |
| "epoch": 2.0773930753564156, |
| "grad_norm": 0.5951978341961678, |
| "learning_rate": 2.5966478366925406e-06, |
| "loss": 0.0615, |
| "step": 510 |
| }, |
| { |
| "epoch": 2.0814663951120163, |
| "grad_norm": 0.6125738833476299, |
| "learning_rate": 2.5758364306754247e-06, |
| "loss": 0.0654, |
| "step": 511 |
| }, |
| { |
| "epoch": 2.085539714867617, |
| "grad_norm": 0.5960636987432294, |
| "learning_rate": 2.5550797839635283e-06, |
| "loss": 0.057, |
| "step": 512 |
| }, |
| { |
| "epoch": 2.089613034623218, |
| "grad_norm": 0.5261776219654868, |
| "learning_rate": 2.5343783654276644e-06, |
| "loss": 0.0575, |
| "step": 513 |
| }, |
| { |
| "epoch": 2.0936863543788187, |
| "grad_norm": 0.5144515938081482, |
| "learning_rate": 2.5137326426911067e-06, |
| "loss": 0.0543, |
| "step": 514 |
| }, |
| { |
| "epoch": 2.0977596741344193, |
| "grad_norm": 0.572024693066549, |
| "learning_rate": 2.493143082119013e-06, |
| "loss": 0.0637, |
| "step": 515 |
| }, |
| { |
| "epoch": 2.1018329938900204, |
| "grad_norm": 0.545851870974611, |
| "learning_rate": 2.472610148807903e-06, |
| "loss": 0.0618, |
| "step": 516 |
| }, |
| { |
| "epoch": 2.105906313645621, |
| "grad_norm": 0.5414643666671024, |
| "learning_rate": 2.452134306575139e-06, |
| "loss": 0.0564, |
| "step": 517 |
| }, |
| { |
| "epoch": 2.109979633401222, |
| "grad_norm": 0.6543225396849018, |
| "learning_rate": 2.431716017948462e-06, |
| "loss": 0.0655, |
| "step": 518 |
| }, |
| { |
| "epoch": 2.114052953156823, |
| "grad_norm": 0.4815685834290876, |
| "learning_rate": 2.4113557441555384e-06, |
| "loss": 0.0499, |
| "step": 519 |
| }, |
| { |
| "epoch": 2.1181262729124235, |
| "grad_norm": 0.49046636029691665, |
| "learning_rate": 2.391053945113533e-06, |
| "loss": 0.0496, |
| "step": 520 |
| }, |
| { |
| "epoch": 2.1221995926680246, |
| "grad_norm": 0.4758825318552587, |
| "learning_rate": 2.370811079418735e-06, |
| "loss": 0.0595, |
| "step": 521 |
| }, |
| { |
| "epoch": 2.1262729124236253, |
| "grad_norm": 0.647369900668898, |
| "learning_rate": 2.350627604336186e-06, |
| "loss": 0.0708, |
| "step": 522 |
| }, |
| { |
| "epoch": 2.130346232179226, |
| "grad_norm": 0.5388254838483854, |
| "learning_rate": 2.330503975789361e-06, |
| "loss": 0.0635, |
| "step": 523 |
| }, |
| { |
| "epoch": 2.134419551934827, |
| "grad_norm": 0.5056500948196868, |
| "learning_rate": 2.3104406483498593e-06, |
| "loss": 0.0572, |
| "step": 524 |
| }, |
| { |
| "epoch": 2.1384928716904277, |
| "grad_norm": 0.5150832166313588, |
| "learning_rate": 2.290438075227146e-06, |
| "loss": 0.0537, |
| "step": 525 |
| }, |
| { |
| "epoch": 2.1425661914460283, |
| "grad_norm": 0.4915141891747677, |
| "learning_rate": 2.270496708258309e-06, |
| "loss": 0.0626, |
| "step": 526 |
| }, |
| { |
| "epoch": 2.1466395112016294, |
| "grad_norm": 0.5458068303000463, |
| "learning_rate": 2.2506169978978543e-06, |
| "loss": 0.057, |
| "step": 527 |
| }, |
| { |
| "epoch": 2.15071283095723, |
| "grad_norm": 0.5407840316898792, |
| "learning_rate": 2.230799393207526e-06, |
| "loss": 0.0547, |
| "step": 528 |
| }, |
| { |
| "epoch": 2.1547861507128308, |
| "grad_norm": 0.5712279431462186, |
| "learning_rate": 2.2110443418461723e-06, |
| "loss": 0.0497, |
| "step": 529 |
| }, |
| { |
| "epoch": 2.158859470468432, |
| "grad_norm": 0.5030411576480762, |
| "learning_rate": 2.191352290059621e-06, |
| "loss": 0.0562, |
| "step": 530 |
| }, |
| { |
| "epoch": 2.1629327902240325, |
| "grad_norm": 0.5793143030651475, |
| "learning_rate": 2.171723682670613e-06, |
| "loss": 0.0706, |
| "step": 531 |
| }, |
| { |
| "epoch": 2.167006109979633, |
| "grad_norm": 0.5799698434626387, |
| "learning_rate": 2.152158963068739e-06, |
| "loss": 0.0596, |
| "step": 532 |
| }, |
| { |
| "epoch": 2.1710794297352343, |
| "grad_norm": 0.5214699207739314, |
| "learning_rate": 2.1326585732004384e-06, |
| "loss": 0.0542, |
| "step": 533 |
| }, |
| { |
| "epoch": 2.175152749490835, |
| "grad_norm": 0.568434571706133, |
| "learning_rate": 2.1132229535590092e-06, |
| "loss": 0.0636, |
| "step": 534 |
| }, |
| { |
| "epoch": 2.179226069246436, |
| "grad_norm": 0.5296937688449943, |
| "learning_rate": 2.093852543174652e-06, |
| "loss": 0.0581, |
| "step": 535 |
| }, |
| { |
| "epoch": 2.1832993890020367, |
| "grad_norm": 0.5134442857396613, |
| "learning_rate": 2.0745477796045664e-06, |
| "loss": 0.0512, |
| "step": 536 |
| }, |
| { |
| "epoch": 2.1873727087576373, |
| "grad_norm": 0.5653937319509575, |
| "learning_rate": 2.0553090989230527e-06, |
| "loss": 0.058, |
| "step": 537 |
| }, |
| { |
| "epoch": 2.1914460285132384, |
| "grad_norm": 0.6080152092281943, |
| "learning_rate": 2.036136935711674e-06, |
| "loss": 0.0555, |
| "step": 538 |
| }, |
| { |
| "epoch": 2.195519348268839, |
| "grad_norm": 0.488710978054397, |
| "learning_rate": 2.017031723049432e-06, |
| "loss": 0.045, |
| "step": 539 |
| }, |
| { |
| "epoch": 2.1995926680244398, |
| "grad_norm": 0.6184212728670258, |
| "learning_rate": 1.997993892502983e-06, |
| "loss": 0.0595, |
| "step": 540 |
| }, |
| { |
| "epoch": 2.203665987780041, |
| "grad_norm": 0.5835701633877641, |
| "learning_rate": 1.979023874116895e-06, |
| "loss": 0.0663, |
| "step": 541 |
| }, |
| { |
| "epoch": 2.2077393075356415, |
| "grad_norm": 0.47659978007109965, |
| "learning_rate": 1.9601220964039324e-06, |
| "loss": 0.0499, |
| "step": 542 |
| }, |
| { |
| "epoch": 2.211812627291242, |
| "grad_norm": 0.543849339331471, |
| "learning_rate": 1.9412889863353683e-06, |
| "loss": 0.0572, |
| "step": 543 |
| }, |
| { |
| "epoch": 2.2158859470468433, |
| "grad_norm": 0.5121540836519293, |
| "learning_rate": 1.9225249693313547e-06, |
| "loss": 0.0518, |
| "step": 544 |
| }, |
| { |
| "epoch": 2.219959266802444, |
| "grad_norm": 0.5049495996736645, |
| "learning_rate": 1.9038304692512943e-06, |
| "loss": 0.0577, |
| "step": 545 |
| }, |
| { |
| "epoch": 2.224032586558045, |
| "grad_norm": 0.5700663714446503, |
| "learning_rate": 1.8852059083842838e-06, |
| "loss": 0.0567, |
| "step": 546 |
| }, |
| { |
| "epoch": 2.2281059063136457, |
| "grad_norm": 0.5420860557689853, |
| "learning_rate": 1.8666517074395607e-06, |
| "loss": 0.0674, |
| "step": 547 |
| }, |
| { |
| "epoch": 2.2321792260692463, |
| "grad_norm": 0.4614287451435143, |
| "learning_rate": 1.8481682855370098e-06, |
| "loss": 0.0436, |
| "step": 548 |
| }, |
| { |
| "epoch": 2.2362525458248474, |
| "grad_norm": 0.517595752912596, |
| "learning_rate": 1.829756060197692e-06, |
| "loss": 0.0524, |
| "step": 549 |
| }, |
| { |
| "epoch": 2.240325865580448, |
| "grad_norm": 0.6030219035598918, |
| "learning_rate": 1.8114154473344081e-06, |
| "loss": 0.0626, |
| "step": 550 |
| }, |
| { |
| "epoch": 2.2443991853360488, |
| "grad_norm": 0.4537405483658188, |
| "learning_rate": 1.7931468612423142e-06, |
| "loss": 0.0481, |
| "step": 551 |
| }, |
| { |
| "epoch": 2.24847250509165, |
| "grad_norm": 0.4620205179717914, |
| "learning_rate": 1.7749507145895518e-06, |
| "loss": 0.0478, |
| "step": 552 |
| }, |
| { |
| "epoch": 2.2525458248472505, |
| "grad_norm": 0.6126699525075775, |
| "learning_rate": 1.756827418407936e-06, |
| "loss": 0.0576, |
| "step": 553 |
| }, |
| { |
| "epoch": 2.256619144602851, |
| "grad_norm": 0.5581597586956156, |
| "learning_rate": 1.7387773820836668e-06, |
| "loss": 0.0531, |
| "step": 554 |
| }, |
| { |
| "epoch": 2.2606924643584523, |
| "grad_norm": 0.5308630440763874, |
| "learning_rate": 1.7208010133480751e-06, |
| "loss": 0.0522, |
| "step": 555 |
| }, |
| { |
| "epoch": 2.264765784114053, |
| "grad_norm": 0.5576868492647639, |
| "learning_rate": 1.7028987182684248e-06, |
| "loss": 0.0643, |
| "step": 556 |
| }, |
| { |
| "epoch": 2.2688391038696536, |
| "grad_norm": 0.4739867715470504, |
| "learning_rate": 1.6850709012387328e-06, |
| "loss": 0.0507, |
| "step": 557 |
| }, |
| { |
| "epoch": 2.2729124236252547, |
| "grad_norm": 0.4862381731884336, |
| "learning_rate": 1.6673179649706312e-06, |
| "loss": 0.0509, |
| "step": 558 |
| }, |
| { |
| "epoch": 2.2769857433808554, |
| "grad_norm": 0.5097025963381955, |
| "learning_rate": 1.64964031048428e-06, |
| "loss": 0.0475, |
| "step": 559 |
| }, |
| { |
| "epoch": 2.281059063136456, |
| "grad_norm": 0.4902629748840515, |
| "learning_rate": 1.632038337099297e-06, |
| "loss": 0.0559, |
| "step": 560 |
| }, |
| { |
| "epoch": 2.285132382892057, |
| "grad_norm": 0.4988077996707139, |
| "learning_rate": 1.6145124424257497e-06, |
| "loss": 0.0519, |
| "step": 561 |
| }, |
| { |
| "epoch": 2.2892057026476578, |
| "grad_norm": 0.5297849065320843, |
| "learning_rate": 1.5970630223551614e-06, |
| "loss": 0.0572, |
| "step": 562 |
| }, |
| { |
| "epoch": 2.293279022403259, |
| "grad_norm": 0.4802452741199709, |
| "learning_rate": 1.5796904710515792e-06, |
| "loss": 0.0486, |
| "step": 563 |
| }, |
| { |
| "epoch": 2.2973523421588595, |
| "grad_norm": 0.5519063678575443, |
| "learning_rate": 1.5623951809426663e-06, |
| "loss": 0.059, |
| "step": 564 |
| }, |
| { |
| "epoch": 2.30142566191446, |
| "grad_norm": 0.5981998587265295, |
| "learning_rate": 1.5451775427108302e-06, |
| "loss": 0.0563, |
| "step": 565 |
| }, |
| { |
| "epoch": 2.3054989816700613, |
| "grad_norm": 0.49048359871517827, |
| "learning_rate": 1.5280379452844124e-06, |
| "loss": 0.0494, |
| "step": 566 |
| }, |
| { |
| "epoch": 2.309572301425662, |
| "grad_norm": 0.4694994396954776, |
| "learning_rate": 1.510976775828887e-06, |
| "loss": 0.0484, |
| "step": 567 |
| }, |
| { |
| "epoch": 2.3136456211812626, |
| "grad_norm": 0.5234995562072229, |
| "learning_rate": 1.493994419738129e-06, |
| "loss": 0.0515, |
| "step": 568 |
| }, |
| { |
| "epoch": 2.3177189409368637, |
| "grad_norm": 0.503070951611636, |
| "learning_rate": 1.4770912606257003e-06, |
| "loss": 0.0619, |
| "step": 569 |
| }, |
| { |
| "epoch": 2.3217922606924644, |
| "grad_norm": 0.5755040936134925, |
| "learning_rate": 1.4602676803161842e-06, |
| "loss": 0.0501, |
| "step": 570 |
| }, |
| { |
| "epoch": 2.325865580448065, |
| "grad_norm": 0.4991245391049469, |
| "learning_rate": 1.4435240588365645e-06, |
| "loss": 0.0514, |
| "step": 571 |
| }, |
| { |
| "epoch": 2.329938900203666, |
| "grad_norm": 0.5042831034359107, |
| "learning_rate": 1.4268607744076419e-06, |
| "loss": 0.0524, |
| "step": 572 |
| }, |
| { |
| "epoch": 2.3340122199592668, |
| "grad_norm": 0.5424941326990412, |
| "learning_rate": 1.41027820343548e-06, |
| "loss": 0.0592, |
| "step": 573 |
| }, |
| { |
| "epoch": 2.3380855397148674, |
| "grad_norm": 0.5921813112653782, |
| "learning_rate": 1.3937767205029196e-06, |
| "loss": 0.0691, |
| "step": 574 |
| }, |
| { |
| "epoch": 2.3421588594704685, |
| "grad_norm": 0.5279157207964021, |
| "learning_rate": 1.3773566983610992e-06, |
| "loss": 0.0613, |
| "step": 575 |
| }, |
| { |
| "epoch": 2.346232179226069, |
| "grad_norm": 0.6451848618235845, |
| "learning_rate": 1.3610185079210514e-06, |
| "loss": 0.0488, |
| "step": 576 |
| }, |
| { |
| "epoch": 2.35030549898167, |
| "grad_norm": 0.5434687637783986, |
| "learning_rate": 1.34476251824531e-06, |
| "loss": 0.0531, |
| "step": 577 |
| }, |
| { |
| "epoch": 2.354378818737271, |
| "grad_norm": 0.5182772408097506, |
| "learning_rate": 1.3285890965395853e-06, |
| "loss": 0.0573, |
| "step": 578 |
| }, |
| { |
| "epoch": 2.3584521384928716, |
| "grad_norm": 0.5202221262502522, |
| "learning_rate": 1.3124986081444625e-06, |
| "loss": 0.0578, |
| "step": 579 |
| }, |
| { |
| "epoch": 2.3625254582484727, |
| "grad_norm": 0.48899670191739664, |
| "learning_rate": 1.296491416527147e-06, |
| "loss": 0.0559, |
| "step": 580 |
| }, |
| { |
| "epoch": 2.3665987780040734, |
| "grad_norm": 0.5995376035948641, |
| "learning_rate": 1.2805678832732627e-06, |
| "loss": 0.0701, |
| "step": 581 |
| }, |
| { |
| "epoch": 2.370672097759674, |
| "grad_norm": 0.545491579973712, |
| "learning_rate": 1.264728368078678e-06, |
| "loss": 0.0512, |
| "step": 582 |
| }, |
| { |
| "epoch": 2.374745417515275, |
| "grad_norm": 0.49270262832855355, |
| "learning_rate": 1.248973228741378e-06, |
| "loss": 0.0544, |
| "step": 583 |
| }, |
| { |
| "epoch": 2.378818737270876, |
| "grad_norm": 0.5170916847588058, |
| "learning_rate": 1.2333028211533916e-06, |
| "loss": 0.0505, |
| "step": 584 |
| }, |
| { |
| "epoch": 2.3828920570264764, |
| "grad_norm": 0.5768850325283753, |
| "learning_rate": 1.21771749929274e-06, |
| "loss": 0.0514, |
| "step": 585 |
| }, |
| { |
| "epoch": 2.3869653767820775, |
| "grad_norm": 0.5560903902429476, |
| "learning_rate": 1.2022176152154525e-06, |
| "loss": 0.0565, |
| "step": 586 |
| }, |
| { |
| "epoch": 2.391038696537678, |
| "grad_norm": 0.5391901318945055, |
| "learning_rate": 1.1868035190476085e-06, |
| "loss": 0.0508, |
| "step": 587 |
| }, |
| { |
| "epoch": 2.395112016293279, |
| "grad_norm": 0.5166310408547459, |
| "learning_rate": 1.1714755589774252e-06, |
| "loss": 0.0493, |
| "step": 588 |
| }, |
| { |
| "epoch": 2.39918533604888, |
| "grad_norm": 0.4915782704851573, |
| "learning_rate": 1.1562340812474004e-06, |
| "loss": 0.0457, |
| "step": 589 |
| }, |
| { |
| "epoch": 2.4032586558044806, |
| "grad_norm": 0.48093430324874514, |
| "learning_rate": 1.1410794301464817e-06, |
| "loss": 0.049, |
| "step": 590 |
| }, |
| { |
| "epoch": 2.4073319755600817, |
| "grad_norm": 0.48617974521722185, |
| "learning_rate": 1.1260119480023008e-06, |
| "loss": 0.0447, |
| "step": 591 |
| }, |
| { |
| "epoch": 2.4114052953156824, |
| "grad_norm": 0.478790430653292, |
| "learning_rate": 1.1110319751734271e-06, |
| "loss": 0.0472, |
| "step": 592 |
| }, |
| { |
| "epoch": 2.415478615071283, |
| "grad_norm": 0.5288857498675965, |
| "learning_rate": 1.0961398500416926e-06, |
| "loss": 0.0519, |
| "step": 593 |
| }, |
| { |
| "epoch": 2.4195519348268837, |
| "grad_norm": 0.5291272675723724, |
| "learning_rate": 1.0813359090045412e-06, |
| "loss": 0.0504, |
| "step": 594 |
| }, |
| { |
| "epoch": 2.423625254582485, |
| "grad_norm": 0.5452112728644081, |
| "learning_rate": 1.0666204864674263e-06, |
| "loss": 0.048, |
| "step": 595 |
| }, |
| { |
| "epoch": 2.4276985743380854, |
| "grad_norm": 0.5492279766714868, |
| "learning_rate": 1.0519939148362667e-06, |
| "loss": 0.0583, |
| "step": 596 |
| }, |
| { |
| "epoch": 2.4317718940936865, |
| "grad_norm": 0.5650775204552605, |
| "learning_rate": 1.0374565245099328e-06, |
| "loss": 0.0493, |
| "step": 597 |
| }, |
| { |
| "epoch": 2.435845213849287, |
| "grad_norm": 0.6515692514492948, |
| "learning_rate": 1.0230086438727771e-06, |
| "loss": 0.0608, |
| "step": 598 |
| }, |
| { |
| "epoch": 2.439918533604888, |
| "grad_norm": 0.48018153937375496, |
| "learning_rate": 1.0086505992872304e-06, |
| "loss": 0.0549, |
| "step": 599 |
| }, |
| { |
| "epoch": 2.443991853360489, |
| "grad_norm": 0.5159136019897271, |
| "learning_rate": 9.943827150864143e-07, |
| "loss": 0.0508, |
| "step": 600 |
| }, |
| { |
| "epoch": 2.4480651731160896, |
| "grad_norm": 0.49725131426270386, |
| "learning_rate": 9.80205313566827e-07, |
| "loss": 0.0542, |
| "step": 601 |
| }, |
| { |
| "epoch": 2.4521384928716903, |
| "grad_norm": 0.5690815999197327, |
| "learning_rate": 9.66118714981058e-07, |
| "loss": 0.0609, |
| "step": 602 |
| }, |
| { |
| "epoch": 2.4562118126272914, |
| "grad_norm": 0.5068009908986237, |
| "learning_rate": 9.521232375305494e-07, |
| "loss": 0.0545, |
| "step": 603 |
| }, |
| { |
| "epoch": 2.460285132382892, |
| "grad_norm": 0.5358914026069089, |
| "learning_rate": 9.382191973584193e-07, |
| "loss": 0.0442, |
| "step": 604 |
| }, |
| { |
| "epoch": 2.4643584521384927, |
| "grad_norm": 0.5706011147112774, |
| "learning_rate": 9.244069085423074e-07, |
| "loss": 0.0661, |
| "step": 605 |
| }, |
| { |
| "epoch": 2.468431771894094, |
| "grad_norm": 0.5333522419392427, |
| "learning_rate": 9.106866830872929e-07, |
| "loss": 0.0486, |
| "step": 606 |
| }, |
| { |
| "epoch": 2.4725050916496945, |
| "grad_norm": 0.5246751984603026, |
| "learning_rate": 8.970588309188343e-07, |
| "loss": 0.0547, |
| "step": 607 |
| }, |
| { |
| "epoch": 2.4765784114052956, |
| "grad_norm": 0.6097988006585077, |
| "learning_rate": 8.835236598757796e-07, |
| "loss": 0.0584, |
| "step": 608 |
| }, |
| { |
| "epoch": 2.480651731160896, |
| "grad_norm": 0.4935706902056788, |
| "learning_rate": 8.70081475703406e-07, |
| "loss": 0.0478, |
| "step": 609 |
| }, |
| { |
| "epoch": 2.484725050916497, |
| "grad_norm": 0.5985362716660774, |
| "learning_rate": 8.567325820465156e-07, |
| "loss": 0.0597, |
| "step": 610 |
| }, |
| { |
| "epoch": 2.4887983706720975, |
| "grad_norm": 0.5284791943513719, |
| "learning_rate": 8.434772804425734e-07, |
| "loss": 0.0531, |
| "step": 611 |
| }, |
| { |
| "epoch": 2.4928716904276986, |
| "grad_norm": 0.522781613963704, |
| "learning_rate": 8.303158703149023e-07, |
| "loss": 0.0544, |
| "step": 612 |
| }, |
| { |
| "epoch": 2.4969450101832993, |
| "grad_norm": 0.5096764047022209, |
| "learning_rate": 8.172486489659115e-07, |
| "loss": 0.0527, |
| "step": 613 |
| }, |
| { |
| "epoch": 2.5010183299389004, |
| "grad_norm": 0.523550451350741, |
| "learning_rate": 8.042759115703891e-07, |
| "loss": 0.058, |
| "step": 614 |
| }, |
| { |
| "epoch": 2.505091649694501, |
| "grad_norm": 0.6048681173164258, |
| "learning_rate": 7.913979511688252e-07, |
| "loss": 0.0483, |
| "step": 615 |
| }, |
| { |
| "epoch": 2.5091649694501017, |
| "grad_norm": 0.5391166434768498, |
| "learning_rate": 7.78615058660801e-07, |
| "loss": 0.0605, |
| "step": 616 |
| }, |
| { |
| "epoch": 2.513238289205703, |
| "grad_norm": 0.5352189458167353, |
| "learning_rate": 7.659275227984142e-07, |
| "loss": 0.0523, |
| "step": 617 |
| }, |
| { |
| "epoch": 2.5173116089613035, |
| "grad_norm": 0.4683551087780319, |
| "learning_rate": 7.533356301797523e-07, |
| "loss": 0.0502, |
| "step": 618 |
| }, |
| { |
| "epoch": 2.521384928716904, |
| "grad_norm": 0.47114338566637726, |
| "learning_rate": 7.408396652424271e-07, |
| "loss": 0.0448, |
| "step": 619 |
| }, |
| { |
| "epoch": 2.525458248472505, |
| "grad_norm": 0.640409339931811, |
| "learning_rate": 7.28439910257141e-07, |
| "loss": 0.0651, |
| "step": 620 |
| }, |
| { |
| "epoch": 2.529531568228106, |
| "grad_norm": 0.5601868681505874, |
| "learning_rate": 7.161366453213181e-07, |
| "loss": 0.0491, |
| "step": 621 |
| }, |
| { |
| "epoch": 2.5336048879837065, |
| "grad_norm": 0.5268991684105175, |
| "learning_rate": 7.03930148352771e-07, |
| "loss": 0.0561, |
| "step": 622 |
| }, |
| { |
| "epoch": 2.5376782077393076, |
| "grad_norm": 0.5189508527991806, |
| "learning_rate": 6.918206950834283e-07, |
| "loss": 0.0564, |
| "step": 623 |
| }, |
| { |
| "epoch": 2.5417515274949083, |
| "grad_norm": 0.5499500751931548, |
| "learning_rate": 6.798085590531012e-07, |
| "loss": 0.0517, |
| "step": 624 |
| }, |
| { |
| "epoch": 2.5458248472505094, |
| "grad_norm": 0.4527035789857888, |
| "learning_rate": 6.678940116033095e-07, |
| "loss": 0.0375, |
| "step": 625 |
| }, |
| { |
| "epoch": 2.54989816700611, |
| "grad_norm": 0.5338998771545707, |
| "learning_rate": 6.560773218711458e-07, |
| "loss": 0.0499, |
| "step": 626 |
| }, |
| { |
| "epoch": 2.5539714867617107, |
| "grad_norm": 0.5735924608061649, |
| "learning_rate": 6.443587567832044e-07, |
| "loss": 0.0492, |
| "step": 627 |
| }, |
| { |
| "epoch": 2.5580448065173114, |
| "grad_norm": 0.603349350424592, |
| "learning_rate": 6.327385810495423e-07, |
| "loss": 0.0522, |
| "step": 628 |
| }, |
| { |
| "epoch": 2.5621181262729125, |
| "grad_norm": 0.5595778882066061, |
| "learning_rate": 6.212170571577087e-07, |
| "loss": 0.0568, |
| "step": 629 |
| }, |
| { |
| "epoch": 2.566191446028513, |
| "grad_norm": 0.512969682605379, |
| "learning_rate": 6.097944453668081e-07, |
| "loss": 0.0526, |
| "step": 630 |
| }, |
| { |
| "epoch": 2.5702647657841142, |
| "grad_norm": 0.5180380127456573, |
| "learning_rate": 5.984710037016267e-07, |
| "loss": 0.0483, |
| "step": 631 |
| }, |
| { |
| "epoch": 2.574338085539715, |
| "grad_norm": 0.6075975914188283, |
| "learning_rate": 5.872469879468024e-07, |
| "loss": 0.0721, |
| "step": 632 |
| }, |
| { |
| "epoch": 2.5784114052953155, |
| "grad_norm": 0.5336722886897108, |
| "learning_rate": 5.761226516410434e-07, |
| "loss": 0.0463, |
| "step": 633 |
| }, |
| { |
| "epoch": 2.5824847250509166, |
| "grad_norm": 0.6058456905813909, |
| "learning_rate": 5.650982460714083e-07, |
| "loss": 0.0626, |
| "step": 634 |
| }, |
| { |
| "epoch": 2.5865580448065173, |
| "grad_norm": 0.5754371523221545, |
| "learning_rate": 5.54174020267621e-07, |
| "loss": 0.0594, |
| "step": 635 |
| }, |
| { |
| "epoch": 2.5906313645621184, |
| "grad_norm": 0.4887591182444653, |
| "learning_rate": 5.433502209964531e-07, |
| "loss": 0.0501, |
| "step": 636 |
| }, |
| { |
| "epoch": 2.594704684317719, |
| "grad_norm": 0.5360715704188095, |
| "learning_rate": 5.326270927561444e-07, |
| "loss": 0.0607, |
| "step": 637 |
| }, |
| { |
| "epoch": 2.5987780040733197, |
| "grad_norm": 0.5087080282646776, |
| "learning_rate": 5.22004877770883e-07, |
| "loss": 0.0548, |
| "step": 638 |
| }, |
| { |
| "epoch": 2.6028513238289204, |
| "grad_norm": 0.5861344050054748, |
| "learning_rate": 5.114838159853336e-07, |
| "loss": 0.0574, |
| "step": 639 |
| }, |
| { |
| "epoch": 2.6069246435845215, |
| "grad_norm": 0.5708633055924194, |
| "learning_rate": 5.010641450592158e-07, |
| "loss": 0.0632, |
| "step": 640 |
| }, |
| { |
| "epoch": 2.610997963340122, |
| "grad_norm": 0.662348811517296, |
| "learning_rate": 4.907461003619346e-07, |
| "loss": 0.0651, |
| "step": 641 |
| }, |
| { |
| "epoch": 2.6150712830957232, |
| "grad_norm": 0.5246547863472681, |
| "learning_rate": 4.805299149672682e-07, |
| "loss": 0.0526, |
| "step": 642 |
| }, |
| { |
| "epoch": 2.619144602851324, |
| "grad_norm": 0.5043876018156805, |
| "learning_rate": 4.7041581964809733e-07, |
| "loss": 0.049, |
| "step": 643 |
| }, |
| { |
| "epoch": 2.6232179226069245, |
| "grad_norm": 0.5353768729451103, |
| "learning_rate": 4.6040404287119924e-07, |
| "loss": 0.0541, |
| "step": 644 |
| }, |
| { |
| "epoch": 2.627291242362525, |
| "grad_norm": 0.5359117377100427, |
| "learning_rate": 4.504948107920781e-07, |
| "loss": 0.0521, |
| "step": 645 |
| }, |
| { |
| "epoch": 2.6313645621181263, |
| "grad_norm": 0.5043588086133749, |
| "learning_rate": 4.4068834724986466e-07, |
| "loss": 0.0522, |
| "step": 646 |
| }, |
| { |
| "epoch": 2.635437881873727, |
| "grad_norm": 0.5746780285678063, |
| "learning_rate": 4.309848737622568e-07, |
| "loss": 0.0543, |
| "step": 647 |
| }, |
| { |
| "epoch": 2.639511201629328, |
| "grad_norm": 0.5265122594002428, |
| "learning_rate": 4.213846095205126e-07, |
| "loss": 0.0506, |
| "step": 648 |
| }, |
| { |
| "epoch": 2.6435845213849287, |
| "grad_norm": 0.661586406565051, |
| "learning_rate": 4.1188777138450487e-07, |
| "loss": 0.0683, |
| "step": 649 |
| }, |
| { |
| "epoch": 2.6476578411405294, |
| "grad_norm": 0.5451290005280155, |
| "learning_rate": 4.024945738778163e-07, |
| "loss": 0.0638, |
| "step": 650 |
| }, |
| { |
| "epoch": 2.6517311608961305, |
| "grad_norm": 0.506889681969563, |
| "learning_rate": 3.9320522918289973e-07, |
| "loss": 0.0539, |
| "step": 651 |
| }, |
| { |
| "epoch": 2.655804480651731, |
| "grad_norm": 0.508054613629184, |
| "learning_rate": 3.8401994713628044e-07, |
| "loss": 0.058, |
| "step": 652 |
| }, |
| { |
| "epoch": 2.6598778004073322, |
| "grad_norm": 0.46411072307012485, |
| "learning_rate": 3.7493893522381866e-07, |
| "loss": 0.0487, |
| "step": 653 |
| }, |
| { |
| "epoch": 2.663951120162933, |
| "grad_norm": 0.5425758195691035, |
| "learning_rate": 3.6596239857602136e-07, |
| "loss": 0.0634, |
| "step": 654 |
| }, |
| { |
| "epoch": 2.6680244399185336, |
| "grad_norm": 0.49451737449220107, |
| "learning_rate": 3.570905399634111e-07, |
| "loss": 0.0459, |
| "step": 655 |
| }, |
| { |
| "epoch": 2.672097759674134, |
| "grad_norm": 0.44210863796675465, |
| "learning_rate": 3.483235597919404e-07, |
| "loss": 0.0477, |
| "step": 656 |
| }, |
| { |
| "epoch": 2.6761710794297353, |
| "grad_norm": 0.49445804954150435, |
| "learning_rate": 3.396616560984711e-07, |
| "loss": 0.0475, |
| "step": 657 |
| }, |
| { |
| "epoch": 2.680244399185336, |
| "grad_norm": 0.46634588152626033, |
| "learning_rate": 3.31105024546296e-07, |
| "loss": 0.0481, |
| "step": 658 |
| }, |
| { |
| "epoch": 2.684317718940937, |
| "grad_norm": 0.601079346187547, |
| "learning_rate": 3.226538584207228e-07, |
| "loss": 0.0667, |
| "step": 659 |
| }, |
| { |
| "epoch": 2.6883910386965377, |
| "grad_norm": 0.5812700555601793, |
| "learning_rate": 3.1430834862470395e-07, |
| "loss": 0.0705, |
| "step": 660 |
| }, |
| { |
| "epoch": 2.6924643584521384, |
| "grad_norm": 0.7003973674162264, |
| "learning_rate": 3.0606868367452746e-07, |
| "loss": 0.0535, |
| "step": 661 |
| }, |
| { |
| "epoch": 2.696537678207739, |
| "grad_norm": 0.51665925274009, |
| "learning_rate": 2.9793504969555965e-07, |
| "loss": 0.06, |
| "step": 662 |
| }, |
| { |
| "epoch": 2.70061099796334, |
| "grad_norm": 0.5697406967057358, |
| "learning_rate": 2.899076304180348e-07, |
| "loss": 0.057, |
| "step": 663 |
| }, |
| { |
| "epoch": 2.704684317718941, |
| "grad_norm": 0.720482009992466, |
| "learning_rate": 2.819866071729127e-07, |
| "loss": 0.0728, |
| "step": 664 |
| }, |
| { |
| "epoch": 2.708757637474542, |
| "grad_norm": 0.5699499020495501, |
| "learning_rate": 2.7417215888777493e-07, |
| "loss": 0.051, |
| "step": 665 |
| }, |
| { |
| "epoch": 2.7128309572301426, |
| "grad_norm": 0.6573556011316422, |
| "learning_rate": 2.6646446208279054e-07, |
| "loss": 0.0538, |
| "step": 666 |
| }, |
| { |
| "epoch": 2.716904276985743, |
| "grad_norm": 0.4887856278150233, |
| "learning_rate": 2.5886369086672193e-07, |
| "loss": 0.053, |
| "step": 667 |
| }, |
| { |
| "epoch": 2.7209775967413443, |
| "grad_norm": 0.49280542352437884, |
| "learning_rate": 2.513700169329963e-07, |
| "loss": 0.0495, |
| "step": 668 |
| }, |
| { |
| "epoch": 2.725050916496945, |
| "grad_norm": 0.6169181780694558, |
| "learning_rate": 2.439836095558262e-07, |
| "loss": 0.0636, |
| "step": 669 |
| }, |
| { |
| "epoch": 2.729124236252546, |
| "grad_norm": 0.5218412387171426, |
| "learning_rate": 2.3670463558638556e-07, |
| "loss": 0.0481, |
| "step": 670 |
| }, |
| { |
| "epoch": 2.7331975560081467, |
| "grad_norm": 0.5680461781449695, |
| "learning_rate": 2.2953325944903848e-07, |
| "loss": 0.0526, |
| "step": 671 |
| }, |
| { |
| "epoch": 2.7372708757637474, |
| "grad_norm": 0.5492531983164374, |
| "learning_rate": 2.2246964313763053e-07, |
| "loss": 0.0571, |
| "step": 672 |
| }, |
| { |
| "epoch": 2.741344195519348, |
| "grad_norm": 0.5560387843417408, |
| "learning_rate": 2.1551394621182277e-07, |
| "loss": 0.0492, |
| "step": 673 |
| }, |
| { |
| "epoch": 2.745417515274949, |
| "grad_norm": 0.5543243275706182, |
| "learning_rate": 2.08666325793494e-07, |
| "loss": 0.0554, |
| "step": 674 |
| }, |
| { |
| "epoch": 2.74949083503055, |
| "grad_norm": 0.49903525405060517, |
| "learning_rate": 2.0192693656318597e-07, |
| "loss": 0.0527, |
| "step": 675 |
| }, |
| { |
| "epoch": 2.753564154786151, |
| "grad_norm": 0.5378231743820772, |
| "learning_rate": 1.9529593075661267e-07, |
| "loss": 0.0554, |
| "step": 676 |
| }, |
| { |
| "epoch": 2.7576374745417516, |
| "grad_norm": 0.531142895329079, |
| "learning_rate": 1.8877345816122162e-07, |
| "loss": 0.0497, |
| "step": 677 |
| }, |
| { |
| "epoch": 2.7617107942973522, |
| "grad_norm": 0.5354906898018376, |
| "learning_rate": 1.8235966611280687e-07, |
| "loss": 0.055, |
| "step": 678 |
| }, |
| { |
| "epoch": 2.765784114052953, |
| "grad_norm": 0.4819540501851211, |
| "learning_rate": 1.760546994921858e-07, |
| "loss": 0.0506, |
| "step": 679 |
| }, |
| { |
| "epoch": 2.769857433808554, |
| "grad_norm": 0.7378374904306403, |
| "learning_rate": 1.6985870072192156e-07, |
| "loss": 0.0669, |
| "step": 680 |
| }, |
| { |
| "epoch": 2.7739307535641546, |
| "grad_norm": 0.5443906211086335, |
| "learning_rate": 1.6377180976310968e-07, |
| "loss": 0.0556, |
| "step": 681 |
| }, |
| { |
| "epoch": 2.7780040733197557, |
| "grad_norm": 0.532807192422146, |
| "learning_rate": 1.5779416411221437e-07, |
| "loss": 0.051, |
| "step": 682 |
| }, |
| { |
| "epoch": 2.7820773930753564, |
| "grad_norm": 0.49443742410841357, |
| "learning_rate": 1.5192589879796383e-07, |
| "loss": 0.0454, |
| "step": 683 |
| }, |
| { |
| "epoch": 2.786150712830957, |
| "grad_norm": 0.4630376231064289, |
| "learning_rate": 1.4616714637829822e-07, |
| "loss": 0.0523, |
| "step": 684 |
| }, |
| { |
| "epoch": 2.790224032586558, |
| "grad_norm": 0.539375432278045, |
| "learning_rate": 1.4051803693737876e-07, |
| "loss": 0.0537, |
| "step": 685 |
| }, |
| { |
| "epoch": 2.794297352342159, |
| "grad_norm": 0.8034073743488359, |
| "learning_rate": 1.3497869808264453e-07, |
| "loss": 0.0795, |
| "step": 686 |
| }, |
| { |
| "epoch": 2.79837067209776, |
| "grad_norm": 0.5038819553185171, |
| "learning_rate": 1.2954925494193472e-07, |
| "loss": 0.0538, |
| "step": 687 |
| }, |
| { |
| "epoch": 2.8024439918533606, |
| "grad_norm": 0.48441980404994855, |
| "learning_rate": 1.2422983016065816e-07, |
| "loss": 0.0479, |
| "step": 688 |
| }, |
| { |
| "epoch": 2.8065173116089612, |
| "grad_norm": 0.8743628097944234, |
| "learning_rate": 1.1902054389902662e-07, |
| "loss": 0.0618, |
| "step": 689 |
| }, |
| { |
| "epoch": 2.810590631364562, |
| "grad_norm": 0.5425881067127292, |
| "learning_rate": 1.1392151382933647e-07, |
| "loss": 0.0468, |
| "step": 690 |
| }, |
| { |
| "epoch": 2.814663951120163, |
| "grad_norm": 0.567368583615139, |
| "learning_rate": 1.0893285513331353e-07, |
| "loss": 0.0571, |
| "step": 691 |
| }, |
| { |
| "epoch": 2.8187372708757636, |
| "grad_norm": 0.5327963060010983, |
| "learning_rate": 1.0405468049951184e-07, |
| "loss": 0.0531, |
| "step": 692 |
| }, |
| { |
| "epoch": 2.8228105906313647, |
| "grad_norm": 0.538831659174035, |
| "learning_rate": 9.928710012076404e-08, |
| "loss": 0.0627, |
| "step": 693 |
| }, |
| { |
| "epoch": 2.8268839103869654, |
| "grad_norm": 0.5154716019555042, |
| "learning_rate": 9.463022169169666e-08, |
| "loss": 0.0532, |
| "step": 694 |
| }, |
| { |
| "epoch": 2.830957230142566, |
| "grad_norm": 0.5487742948753056, |
| "learning_rate": 9.008415040629548e-08, |
| "loss": 0.0521, |
| "step": 695 |
| }, |
| { |
| "epoch": 2.835030549898167, |
| "grad_norm": 0.5646931710560842, |
| "learning_rate": 8.564898895552843e-08, |
| "loss": 0.0576, |
| "step": 696 |
| }, |
| { |
| "epoch": 2.839103869653768, |
| "grad_norm": 0.5357614665420911, |
| "learning_rate": 8.132483752502806e-08, |
| "loss": 0.0411, |
| "step": 697 |
| }, |
| { |
| "epoch": 2.8431771894093685, |
| "grad_norm": 0.5261194274574259, |
| "learning_rate": 7.711179379282674e-08, |
| "loss": 0.0586, |
| "step": 698 |
| }, |
| { |
| "epoch": 2.8472505091649696, |
| "grad_norm": 0.64867339540957, |
| "learning_rate": 7.300995292715107e-08, |
| "loss": 0.0718, |
| "step": 699 |
| }, |
| { |
| "epoch": 2.8513238289205702, |
| "grad_norm": 0.5622104668213962, |
| "learning_rate": 6.901940758427206e-08, |
| "loss": 0.0613, |
| "step": 700 |
| }, |
| { |
| "epoch": 2.855397148676171, |
| "grad_norm": 0.5058496893775121, |
| "learning_rate": 6.514024790641116e-08, |
| "loss": 0.0433, |
| "step": 701 |
| }, |
| { |
| "epoch": 2.859470468431772, |
| "grad_norm": 0.511327209676788, |
| "learning_rate": 6.137256151970583e-08, |
| "loss": 0.0476, |
| "step": 702 |
| }, |
| { |
| "epoch": 2.8635437881873727, |
| "grad_norm": 0.4909084303099041, |
| "learning_rate": 5.771643353222778e-08, |
| "loss": 0.0503, |
| "step": 703 |
| }, |
| { |
| "epoch": 2.8676171079429738, |
| "grad_norm": 0.5116618241300486, |
| "learning_rate": 5.417194653206337e-08, |
| "loss": 0.0534, |
| "step": 704 |
| }, |
| { |
| "epoch": 2.8716904276985744, |
| "grad_norm": 0.5584027076430503, |
| "learning_rate": 5.073918058544458e-08, |
| "loss": 0.0541, |
| "step": 705 |
| }, |
| { |
| "epoch": 2.875763747454175, |
| "grad_norm": 0.5057123288706858, |
| "learning_rate": 4.741821323494489e-08, |
| "loss": 0.049, |
| "step": 706 |
| }, |
| { |
| "epoch": 2.8798370672097757, |
| "grad_norm": 0.47063093819659735, |
| "learning_rate": 4.4209119497722883e-08, |
| "loss": 0.0459, |
| "step": 707 |
| }, |
| { |
| "epoch": 2.883910386965377, |
| "grad_norm": 0.5499570294814297, |
| "learning_rate": 4.1111971863830866e-08, |
| "loss": 0.051, |
| "step": 708 |
| }, |
| { |
| "epoch": 2.8879837067209775, |
| "grad_norm": 0.562032429860817, |
| "learning_rate": 3.812684029457614e-08, |
| "loss": 0.0551, |
| "step": 709 |
| }, |
| { |
| "epoch": 2.8920570264765786, |
| "grad_norm": 0.4803177481708313, |
| "learning_rate": 3.525379222094061e-08, |
| "loss": 0.0482, |
| "step": 710 |
| }, |
| { |
| "epoch": 2.8961303462321792, |
| "grad_norm": 0.5693974905842359, |
| "learning_rate": 3.249289254205867e-08, |
| "loss": 0.0547, |
| "step": 711 |
| }, |
| { |
| "epoch": 2.90020366598778, |
| "grad_norm": 0.5093963289065777, |
| "learning_rate": 2.984420362375007e-08, |
| "loss": 0.0483, |
| "step": 712 |
| }, |
| { |
| "epoch": 2.904276985743381, |
| "grad_norm": 0.5370768480301503, |
| "learning_rate": 2.7307785297111533e-08, |
| "loss": 0.049, |
| "step": 713 |
| }, |
| { |
| "epoch": 2.9083503054989817, |
| "grad_norm": 0.4995391928644817, |
| "learning_rate": 2.488369485716513e-08, |
| "loss": 0.0554, |
| "step": 714 |
| }, |
| { |
| "epoch": 2.9124236252545828, |
| "grad_norm": 0.5285125291946263, |
| "learning_rate": 2.2571987061564827e-08, |
| "loss": 0.0501, |
| "step": 715 |
| }, |
| { |
| "epoch": 2.9164969450101834, |
| "grad_norm": 0.459843419226465, |
| "learning_rate": 2.0372714129356375e-08, |
| "loss": 0.0499, |
| "step": 716 |
| }, |
| { |
| "epoch": 2.920570264765784, |
| "grad_norm": 0.5328141129452822, |
| "learning_rate": 1.8285925739803812e-08, |
| "loss": 0.0479, |
| "step": 717 |
| }, |
| { |
| "epoch": 2.9246435845213847, |
| "grad_norm": 0.5049667227544917, |
| "learning_rate": 1.631166903126147e-08, |
| "loss": 0.056, |
| "step": 718 |
| }, |
| { |
| "epoch": 2.928716904276986, |
| "grad_norm": 0.6654261685402668, |
| "learning_rate": 1.4449988600111486e-08, |
| "loss": 0.0753, |
| "step": 719 |
| }, |
| { |
| "epoch": 2.9327902240325865, |
| "grad_norm": 0.5307305705426542, |
| "learning_rate": 1.2700926499756295e-08, |
| "loss": 0.06, |
| "step": 720 |
| }, |
| { |
| "epoch": 2.9368635437881876, |
| "grad_norm": 0.47213203439246243, |
| "learning_rate": 1.1064522239669916e-08, |
| "loss": 0.0447, |
| "step": 721 |
| }, |
| { |
| "epoch": 2.9409368635437882, |
| "grad_norm": 0.7320393187073869, |
| "learning_rate": 9.54081278450314e-09, |
| "loss": 0.055, |
| "step": 722 |
| }, |
| { |
| "epoch": 2.945010183299389, |
| "grad_norm": 0.5028660422498342, |
| "learning_rate": 8.129832553249173e-09, |
| "loss": 0.0503, |
| "step": 723 |
| }, |
| { |
| "epoch": 2.9490835030549896, |
| "grad_norm": 0.5333639247041516, |
| "learning_rate": 6.831613418468163e-09, |
| "loss": 0.0583, |
| "step": 724 |
| }, |
| { |
| "epoch": 2.9531568228105907, |
| "grad_norm": 0.5073138357128516, |
| "learning_rate": 5.646184705563884e-09, |
| "loss": 0.0442, |
| "step": 725 |
| }, |
| { |
| "epoch": 2.9572301425661913, |
| "grad_norm": 0.5114472818399121, |
| "learning_rate": 4.573573192125369e-09, |
| "loss": 0.0458, |
| "step": 726 |
| }, |
| { |
| "epoch": 2.9613034623217924, |
| "grad_norm": 0.5014539904140853, |
| "learning_rate": 3.613803107317959e-09, |
| "loss": 0.0583, |
| "step": 727 |
| }, |
| { |
| "epoch": 2.965376782077393, |
| "grad_norm": 0.548413831695217, |
| "learning_rate": 2.7668961313376263e-09, |
| "loss": 0.0542, |
| "step": 728 |
| }, |
| { |
| "epoch": 2.9694501018329937, |
| "grad_norm": 0.5658606230815872, |
| "learning_rate": 2.0328713949230304e-09, |
| "loss": 0.0606, |
| "step": 729 |
| }, |
| { |
| "epoch": 2.973523421588595, |
| "grad_norm": 0.5401340430143629, |
| "learning_rate": 1.4117454789208673e-09, |
| "loss": 0.0575, |
| "step": 730 |
| }, |
| { |
| "epoch": 2.9775967413441955, |
| "grad_norm": 0.5284701914276811, |
| "learning_rate": 9.03532413911723e-10, |
| "loss": 0.0527, |
| "step": 731 |
| }, |
| { |
| "epoch": 2.9816700610997966, |
| "grad_norm": 0.483298515577993, |
| "learning_rate": 5.08243679894771e-10, |
| "loss": 0.0427, |
| "step": 732 |
| }, |
| { |
| "epoch": 2.9857433808553973, |
| "grad_norm": 0.5532393922992762, |
| "learning_rate": 2.2588820602631457e-10, |
| "loss": 0.0652, |
| "step": 733 |
| }, |
| { |
| "epoch": 2.989816700610998, |
| "grad_norm": 0.5608508235291606, |
| "learning_rate": 5.6472370419391464e-11, |
| "loss": 0.061, |
| "step": 734 |
| }, |
| { |
| "epoch": 2.9938900203665986, |
| "grad_norm": 0.5594965133018163, |
| "learning_rate": 0.0, |
| "loss": 0.0671, |
| "step": 735 |
| }, |
| { |
| "epoch": 2.9938900203665986, |
| "step": 735, |
| "total_flos": 31611385036800.0, |
| "train_loss": 0.10544327923736604, |
| "train_runtime": 3212.0008, |
| "train_samples_per_second": 3.668, |
| "train_steps_per_second": 0.229 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 735, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 250, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 31611385036800.0, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|