| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.9938900203665986, |
| "eval_steps": 500, |
| "global_step": 735, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.004073319755600814, |
| "grad_norm": 6.640312047414066, |
| "learning_rate": 1.3513513513513515e-07, |
| "loss": 0.2975, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.008146639511201629, |
| "grad_norm": 6.28588556129052, |
| "learning_rate": 2.702702702702703e-07, |
| "loss": 0.2922, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.012219959266802444, |
| "grad_norm": 6.53425740231569, |
| "learning_rate": 4.0540540540540546e-07, |
| "loss": 0.3306, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.016293279022403257, |
| "grad_norm": 6.4274428823636995, |
| "learning_rate": 5.405405405405406e-07, |
| "loss": 0.298, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.020366598778004074, |
| "grad_norm": 6.300509704789429, |
| "learning_rate": 6.756756756756758e-07, |
| "loss": 0.2764, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.024439918533604887, |
| "grad_norm": 5.664520140886496, |
| "learning_rate": 8.108108108108109e-07, |
| "loss": 0.282, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.028513238289205704, |
| "grad_norm": 5.353461680124434, |
| "learning_rate": 9.459459459459461e-07, |
| "loss": 0.3105, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.032586558044806514, |
| "grad_norm": 4.456679664512122, |
| "learning_rate": 1.0810810810810812e-06, |
| "loss": 0.2629, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.03665987780040733, |
| "grad_norm": 4.728194529681125, |
| "learning_rate": 1.2162162162162164e-06, |
| "loss": 0.2625, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.04073319755600815, |
| "grad_norm": 3.0137557205711736, |
| "learning_rate": 1.3513513513513515e-06, |
| "loss": 0.2324, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.04480651731160896, |
| "grad_norm": 3.805111529191594, |
| "learning_rate": 1.4864864864864868e-06, |
| "loss": 0.2635, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.048879837067209775, |
| "grad_norm": 3.340077069220912, |
| "learning_rate": 1.6216216216216219e-06, |
| "loss": 0.2386, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.05295315682281059, |
| "grad_norm": 3.997290518175738, |
| "learning_rate": 1.756756756756757e-06, |
| "loss": 0.2226, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.05702647657841141, |
| "grad_norm": 3.977876990477163, |
| "learning_rate": 1.8918918918918922e-06, |
| "loss": 0.2593, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.06109979633401222, |
| "grad_norm": 3.367674913595299, |
| "learning_rate": 2.0270270270270273e-06, |
| "loss": 0.2355, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.06517311608961303, |
| "grad_norm": 3.1181805119884842, |
| "learning_rate": 2.1621621621621623e-06, |
| "loss": 0.2364, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.06924643584521385, |
| "grad_norm": 2.9154930914971473, |
| "learning_rate": 2.297297297297298e-06, |
| "loss": 0.2072, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.07331975560081466, |
| "grad_norm": 2.926858516811969, |
| "learning_rate": 2.432432432432433e-06, |
| "loss": 0.193, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.07739307535641547, |
| "grad_norm": 3.2833775927281867, |
| "learning_rate": 2.5675675675675675e-06, |
| "loss": 0.2429, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.0814663951120163, |
| "grad_norm": 2.60551584899032, |
| "learning_rate": 2.702702702702703e-06, |
| "loss": 0.1904, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.0855397148676171, |
| "grad_norm": 2.4720627934368578, |
| "learning_rate": 2.837837837837838e-06, |
| "loss": 0.2241, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.08961303462321792, |
| "grad_norm": 2.7061457567778286, |
| "learning_rate": 2.9729729729729736e-06, |
| "loss": 0.1787, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.09368635437881874, |
| "grad_norm": 2.5113937626874554, |
| "learning_rate": 3.1081081081081082e-06, |
| "loss": 0.2011, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.09775967413441955, |
| "grad_norm": 2.28679738040873, |
| "learning_rate": 3.2432432432432437e-06, |
| "loss": 0.1757, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.10183299389002037, |
| "grad_norm": 1.9954370385777342, |
| "learning_rate": 3.3783783783783788e-06, |
| "loss": 0.156, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.10590631364562118, |
| "grad_norm": 1.865602902801458, |
| "learning_rate": 3.513513513513514e-06, |
| "loss": 0.1652, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.109979633401222, |
| "grad_norm": 2.2088323944290162, |
| "learning_rate": 3.648648648648649e-06, |
| "loss": 0.1481, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.11405295315682282, |
| "grad_norm": 2.3442977739891253, |
| "learning_rate": 3.7837837837837844e-06, |
| "loss": 0.1499, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.11812627291242363, |
| "grad_norm": 1.4670114524312288, |
| "learning_rate": 3.918918918918919e-06, |
| "loss": 0.1372, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.12219959266802444, |
| "grad_norm": 2.0073103982055134, |
| "learning_rate": 4.0540540540540545e-06, |
| "loss": 0.1705, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.12627291242362526, |
| "grad_norm": 1.832956891032527, |
| "learning_rate": 4.189189189189189e-06, |
| "loss": 0.1435, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.13034623217922606, |
| "grad_norm": 2.094155576804916, |
| "learning_rate": 4.324324324324325e-06, |
| "loss": 0.1775, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.13441955193482688, |
| "grad_norm": 1.8236712859693212, |
| "learning_rate": 4.45945945945946e-06, |
| "loss": 0.1754, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.1384928716904277, |
| "grad_norm": 1.593806859793022, |
| "learning_rate": 4.594594594594596e-06, |
| "loss": 0.1218, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.1425661914460285, |
| "grad_norm": 1.8242371066883118, |
| "learning_rate": 4.72972972972973e-06, |
| "loss": 0.1676, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.14663951120162932, |
| "grad_norm": 1.6067651811563963, |
| "learning_rate": 4.864864864864866e-06, |
| "loss": 0.1561, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.15071283095723015, |
| "grad_norm": 1.6523754405520419, |
| "learning_rate": 5e-06, |
| "loss": 0.1314, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.15478615071283094, |
| "grad_norm": 1.5370079826272665, |
| "learning_rate": 5.135135135135135e-06, |
| "loss": 0.1517, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.15885947046843177, |
| "grad_norm": 1.672563461104479, |
| "learning_rate": 5.2702702702702705e-06, |
| "loss": 0.1697, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.1629327902240326, |
| "grad_norm": 1.691060384246397, |
| "learning_rate": 5.405405405405406e-06, |
| "loss": 0.1803, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.1670061099796334, |
| "grad_norm": 1.5317972325193578, |
| "learning_rate": 5.540540540540541e-06, |
| "loss": 0.131, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.1710794297352342, |
| "grad_norm": 1.4413677201314865, |
| "learning_rate": 5.675675675675676e-06, |
| "loss": 0.1195, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.17515274949083504, |
| "grad_norm": 1.7167120970950247, |
| "learning_rate": 5.810810810810811e-06, |
| "loss": 0.1872, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.17922606924643583, |
| "grad_norm": 1.415008811809238, |
| "learning_rate": 5.945945945945947e-06, |
| "loss": 0.1329, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.18329938900203666, |
| "grad_norm": 1.4698432274933515, |
| "learning_rate": 6.081081081081082e-06, |
| "loss": 0.148, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.18737270875763748, |
| "grad_norm": 1.4400084379892886, |
| "learning_rate": 6.2162162162162164e-06, |
| "loss": 0.1481, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.19144602851323828, |
| "grad_norm": 2.0030021688014505, |
| "learning_rate": 6.351351351351351e-06, |
| "loss": 0.1542, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.1955193482688391, |
| "grad_norm": 1.5204875720098323, |
| "learning_rate": 6.486486486486487e-06, |
| "loss": 0.1142, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.19959266802443992, |
| "grad_norm": 1.8023111763307385, |
| "learning_rate": 6.621621621621622e-06, |
| "loss": 0.1623, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.20366598778004075, |
| "grad_norm": 1.387841444224276, |
| "learning_rate": 6.7567567567567575e-06, |
| "loss": 0.1226, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.20773930753564154, |
| "grad_norm": 1.4770728603330572, |
| "learning_rate": 6.891891891891892e-06, |
| "loss": 0.1532, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.21181262729124237, |
| "grad_norm": 1.5526099510243074, |
| "learning_rate": 7.027027027027028e-06, |
| "loss": 0.1203, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.2158859470468432, |
| "grad_norm": 1.522290383063382, |
| "learning_rate": 7.162162162162163e-06, |
| "loss": 0.1515, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.219959266802444, |
| "grad_norm": 1.5406804163240948, |
| "learning_rate": 7.297297297297298e-06, |
| "loss": 0.1469, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.2240325865580448, |
| "grad_norm": 1.527302080895313, |
| "learning_rate": 7.4324324324324324e-06, |
| "loss": 0.1352, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.22810590631364563, |
| "grad_norm": 1.558342044097894, |
| "learning_rate": 7.567567567567569e-06, |
| "loss": 0.1411, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.23217922606924643, |
| "grad_norm": 1.2770172249503504, |
| "learning_rate": 7.702702702702704e-06, |
| "loss": 0.1182, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.23625254582484725, |
| "grad_norm": 1.3969066787405495, |
| "learning_rate": 7.837837837837838e-06, |
| "loss": 0.137, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.24032586558044808, |
| "grad_norm": 1.67018108612516, |
| "learning_rate": 7.972972972972974e-06, |
| "loss": 0.1798, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.24439918533604887, |
| "grad_norm": 1.5897484049578208, |
| "learning_rate": 8.108108108108109e-06, |
| "loss": 0.1659, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.2484725050916497, |
| "grad_norm": 1.158481901678518, |
| "learning_rate": 8.243243243243245e-06, |
| "loss": 0.1247, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.2525458248472505, |
| "grad_norm": 1.2947448113190885, |
| "learning_rate": 8.378378378378378e-06, |
| "loss": 0.1219, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.25661914460285135, |
| "grad_norm": 1.489514366083781, |
| "learning_rate": 8.513513513513514e-06, |
| "loss": 0.1276, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.2606924643584521, |
| "grad_norm": 1.2576829907966123, |
| "learning_rate": 8.64864864864865e-06, |
| "loss": 0.1313, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.26476578411405294, |
| "grad_norm": 1.759594359119281, |
| "learning_rate": 8.783783783783785e-06, |
| "loss": 0.207, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.26883910386965376, |
| "grad_norm": 1.2363649869365454, |
| "learning_rate": 8.91891891891892e-06, |
| "loss": 0.1334, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.2729124236252546, |
| "grad_norm": 1.466539848741119, |
| "learning_rate": 9.054054054054054e-06, |
| "loss": 0.1495, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.2769857433808554, |
| "grad_norm": 1.3201296592763596, |
| "learning_rate": 9.189189189189191e-06, |
| "loss": 0.1324, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.28105906313645623, |
| "grad_norm": 1.2111706000441922, |
| "learning_rate": 9.324324324324325e-06, |
| "loss": 0.1312, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.285132382892057, |
| "grad_norm": 1.2979543096114095, |
| "learning_rate": 9.45945945945946e-06, |
| "loss": 0.1208, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.2892057026476578, |
| "grad_norm": 1.4259159096209377, |
| "learning_rate": 9.594594594594594e-06, |
| "loss": 0.124, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.29327902240325865, |
| "grad_norm": 1.216603620132262, |
| "learning_rate": 9.729729729729732e-06, |
| "loss": 0.1157, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.2973523421588595, |
| "grad_norm": 1.3357647312150325, |
| "learning_rate": 9.864864864864865e-06, |
| "loss": 0.1297, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.3014256619144603, |
| "grad_norm": 1.494099520851434, |
| "learning_rate": 1e-05, |
| "loss": 0.1841, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.3054989816700611, |
| "grad_norm": 1.3145157836751054, |
| "learning_rate": 9.99994352762958e-06, |
| "loss": 0.1355, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.3095723014256619, |
| "grad_norm": 1.5739902543589488, |
| "learning_rate": 9.999774111793974e-06, |
| "loss": 0.1753, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.3136456211812627, |
| "grad_norm": 1.9252674009965682, |
| "learning_rate": 9.999491756320105e-06, |
| "loss": 0.1881, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.31771894093686354, |
| "grad_norm": 1.4979094601422043, |
| "learning_rate": 9.99909646758609e-06, |
| "loss": 0.1562, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.32179226069246436, |
| "grad_norm": 1.2807564386337615, |
| "learning_rate": 9.99858825452108e-06, |
| "loss": 0.1207, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.3258655804480652, |
| "grad_norm": 1.6174801266480536, |
| "learning_rate": 9.997967128605078e-06, |
| "loss": 0.2017, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.329938900203666, |
| "grad_norm": 1.4159142195809724, |
| "learning_rate": 9.997233103868664e-06, |
| "loss": 0.124, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.3340122199592668, |
| "grad_norm": 1.6942010203200524, |
| "learning_rate": 9.996386196892683e-06, |
| "loss": 0.1937, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.3380855397148676, |
| "grad_norm": 1.5919535697796021, |
| "learning_rate": 9.995426426807875e-06, |
| "loss": 0.1577, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.3421588594704684, |
| "grad_norm": 1.1350013056899857, |
| "learning_rate": 9.994353815294438e-06, |
| "loss": 0.1405, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.34623217922606925, |
| "grad_norm": 1.3406415209215592, |
| "learning_rate": 9.993168386581533e-06, |
| "loss": 0.115, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.35030549898167007, |
| "grad_norm": 1.4661144336991712, |
| "learning_rate": 9.991870167446751e-06, |
| "loss": 0.1352, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.3543788187372709, |
| "grad_norm": 1.455419254668406, |
| "learning_rate": 9.990459187215498e-06, |
| "loss": 0.1292, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.35845213849287166, |
| "grad_norm": 1.5030594271263464, |
| "learning_rate": 9.98893547776033e-06, |
| "loss": 0.1561, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.3625254582484725, |
| "grad_norm": 1.6530134787902149, |
| "learning_rate": 9.987299073500245e-06, |
| "loss": 0.2027, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.3665987780040733, |
| "grad_norm": 1.2625338529893853, |
| "learning_rate": 9.985550011399889e-06, |
| "loss": 0.1272, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.37067209775967414, |
| "grad_norm": 1.2071106713338415, |
| "learning_rate": 9.98368833096874e-06, |
| "loss": 0.1666, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.37474541751527496, |
| "grad_norm": 1.6780625112925267, |
| "learning_rate": 9.981714074260196e-06, |
| "loss": 0.1846, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.3788187372708758, |
| "grad_norm": 1.3038545888439474, |
| "learning_rate": 9.979627285870644e-06, |
| "loss": 0.1259, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.38289205702647655, |
| "grad_norm": 1.8469161081473644, |
| "learning_rate": 9.977428012938437e-06, |
| "loss": 0.2386, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.3869653767820774, |
| "grad_norm": 1.4135444068823033, |
| "learning_rate": 9.975116305142836e-06, |
| "loss": 0.1359, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.3910386965376782, |
| "grad_norm": 0.9193033843457485, |
| "learning_rate": 9.97269221470289e-06, |
| "loss": 0.1211, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.395112016293279, |
| "grad_norm": 1.2085571858788857, |
| "learning_rate": 9.97015579637625e-06, |
| "loss": 0.1139, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.39918533604887985, |
| "grad_norm": 1.1994440753116877, |
| "learning_rate": 9.967507107457942e-06, |
| "loss": 0.1324, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.40325865580448067, |
| "grad_norm": 1.2284108132440392, |
| "learning_rate": 9.96474620777906e-06, |
| "loss": 0.1499, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.4073319755600815, |
| "grad_norm": 1.6212975171489614, |
| "learning_rate": 9.961873159705426e-06, |
| "loss": 0.1581, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.41140529531568226, |
| "grad_norm": 1.764584729235939, |
| "learning_rate": 9.95888802813617e-06, |
| "loss": 0.2015, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.4154786150712831, |
| "grad_norm": 0.9967692738558999, |
| "learning_rate": 9.955790880502278e-06, |
| "loss": 0.1271, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.4195519348268839, |
| "grad_norm": 1.021216540491067, |
| "learning_rate": 9.952581786765057e-06, |
| "loss": 0.1194, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.42362525458248473, |
| "grad_norm": 1.803707990576697, |
| "learning_rate": 9.949260819414557e-06, |
| "loss": 0.1783, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.42769857433808556, |
| "grad_norm": 1.2598754070444962, |
| "learning_rate": 9.945828053467939e-06, |
| "loss": 0.1336, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.4317718940936864, |
| "grad_norm": 1.6420031533088841, |
| "learning_rate": 9.942283566467773e-06, |
| "loss": 0.1917, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.43584521384928715, |
| "grad_norm": 1.4491181396568242, |
| "learning_rate": 9.938627438480295e-06, |
| "loss": 0.1698, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.439918533604888, |
| "grad_norm": 1.5149064154516412, |
| "learning_rate": 9.93485975209359e-06, |
| "loss": 0.1752, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.4439918533604888, |
| "grad_norm": 1.2955834669569637, |
| "learning_rate": 9.930980592415728e-06, |
| "loss": 0.1723, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.4480651731160896, |
| "grad_norm": 2.017971717171439, |
| "learning_rate": 9.926990047072849e-06, |
| "loss": 0.2711, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.45213849287169044, |
| "grad_norm": 1.3181601641420488, |
| "learning_rate": 9.922888206207174e-06, |
| "loss": 0.1275, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.45621181262729127, |
| "grad_norm": 1.1244851230272161, |
| "learning_rate": 9.918675162474974e-06, |
| "loss": 0.119, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.46028513238289204, |
| "grad_norm": 1.6894775763034329, |
| "learning_rate": 9.914351011044472e-06, |
| "loss": 0.181, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.46435845213849286, |
| "grad_norm": 1.218930853717246, |
| "learning_rate": 9.909915849593705e-06, |
| "loss": 0.118, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.4684317718940937, |
| "grad_norm": 1.3249036934529779, |
| "learning_rate": 9.905369778308304e-06, |
| "loss": 0.1301, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.4725050916496945, |
| "grad_norm": 1.524266331724149, |
| "learning_rate": 9.900712899879237e-06, |
| "loss": 0.1762, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.47657841140529533, |
| "grad_norm": 1.7086702641879, |
| "learning_rate": 9.895945319500488e-06, |
| "loss": 0.161, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.48065173116089616, |
| "grad_norm": 1.2161786256312905, |
| "learning_rate": 9.891067144866687e-06, |
| "loss": 0.151, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.4847250509164969, |
| "grad_norm": 1.1889624963960783, |
| "learning_rate": 9.886078486170665e-06, |
| "loss": 0.11, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.48879837067209775, |
| "grad_norm": 1.365243201547572, |
| "learning_rate": 9.880979456100974e-06, |
| "loss": 0.1497, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.49287169042769857, |
| "grad_norm": 1.2585043332112358, |
| "learning_rate": 9.875770169839343e-06, |
| "loss": 0.1432, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.4969450101832994, |
| "grad_norm": 1.4002031183579842, |
| "learning_rate": 9.870450745058066e-06, |
| "loss": 0.1375, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.5010183299389002, |
| "grad_norm": 1.1440514502080288, |
| "learning_rate": 9.865021301917358e-06, |
| "loss": 0.1402, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.505091649694501, |
| "grad_norm": 1.2488108384566508, |
| "learning_rate": 9.859481963062623e-06, |
| "loss": 0.1162, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.5091649694501018, |
| "grad_norm": 0.9510210043207896, |
| "learning_rate": 9.853832853621703e-06, |
| "loss": 0.1261, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.5132382892057027, |
| "grad_norm": 1.10816343398081, |
| "learning_rate": 9.848074101202037e-06, |
| "loss": 0.13, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.5173116089613035, |
| "grad_norm": 1.1626046136273134, |
| "learning_rate": 9.842205835887785e-06, |
| "loss": 0.1272, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.5213849287169042, |
| "grad_norm": 1.1143169256996153, |
| "learning_rate": 9.836228190236892e-06, |
| "loss": 0.1511, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.5254582484725051, |
| "grad_norm": 1.4130055894133877, |
| "learning_rate": 9.83014129927808e-06, |
| "loss": 0.1429, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.5295315682281059, |
| "grad_norm": 1.1740739283614359, |
| "learning_rate": 9.823945300507815e-06, |
| "loss": 0.1482, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.5336048879837068, |
| "grad_norm": 1.2043935670338368, |
| "learning_rate": 9.817640333887194e-06, |
| "loss": 0.1469, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.5376782077393075, |
| "grad_norm": 1.0821808237384798, |
| "learning_rate": 9.81122654183878e-06, |
| "loss": 0.1166, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.5417515274949084, |
| "grad_norm": 0.8388713275721755, |
| "learning_rate": 9.804704069243389e-06, |
| "loss": 0.1184, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.5458248472505092, |
| "grad_norm": 0.8619134174032774, |
| "learning_rate": 9.798073063436815e-06, |
| "loss": 0.1129, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.5498981670061099, |
| "grad_norm": 1.5195117909833376, |
| "learning_rate": 9.791333674206507e-06, |
| "loss": 0.1981, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.5539714867617108, |
| "grad_norm": 1.0850667490173038, |
| "learning_rate": 9.784486053788179e-06, |
| "loss": 0.1107, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.5580448065173116, |
| "grad_norm": 5.378203932427923, |
| "learning_rate": 9.77753035686237e-06, |
| "loss": 0.1653, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.5621181262729125, |
| "grad_norm": 1.4728169236524777, |
| "learning_rate": 9.770466740550963e-06, |
| "loss": 0.1684, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.5661914460285132, |
| "grad_norm": 1.0273885824594953, |
| "learning_rate": 9.763295364413616e-06, |
| "loss": 0.1255, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.570264765784114, |
| "grad_norm": 1.1688264950391403, |
| "learning_rate": 9.756016390444174e-06, |
| "loss": 0.1485, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.5743380855397149, |
| "grad_norm": 1.0399349052714333, |
| "learning_rate": 9.748629983067004e-06, |
| "loss": 0.1393, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.5784114052953157, |
| "grad_norm": 1.7232917988923804, |
| "learning_rate": 9.741136309133279e-06, |
| "loss": 0.1983, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.5824847250509165, |
| "grad_norm": 1.0879663731187623, |
| "learning_rate": 9.733535537917211e-06, |
| "loss": 0.1274, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.5865580448065173, |
| "grad_norm": 0.9368564524590517, |
| "learning_rate": 9.725827841112226e-06, |
| "loss": 0.1207, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.5906313645621182, |
| "grad_norm": 1.0798454525817731, |
| "learning_rate": 9.718013392827087e-06, |
| "loss": 0.1163, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.594704684317719, |
| "grad_norm": 1.5318145919287482, |
| "learning_rate": 9.710092369581966e-06, |
| "loss": 0.1623, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.5987780040733197, |
| "grad_norm": 1.203723637498308, |
| "learning_rate": 9.702064950304442e-06, |
| "loss": 0.1287, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.6028513238289206, |
| "grad_norm": 1.0956856988556738, |
| "learning_rate": 9.693931316325473e-06, |
| "loss": 0.1027, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.6069246435845214, |
| "grad_norm": 1.1062672875600013, |
| "learning_rate": 9.685691651375297e-06, |
| "loss": 0.1069, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.6109979633401222, |
| "grad_norm": 0.9590820963065577, |
| "learning_rate": 9.677346141579277e-06, |
| "loss": 0.1073, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.615071283095723, |
| "grad_norm": 1.3618508422829732, |
| "learning_rate": 9.668894975453705e-06, |
| "loss": 0.1664, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.6191446028513238, |
| "grad_norm": 1.4665857948785328, |
| "learning_rate": 9.66033834390153e-06, |
| "loss": 0.1471, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.6232179226069247, |
| "grad_norm": 1.1291116955509388, |
| "learning_rate": 9.65167644020806e-06, |
| "loss": 0.1352, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.6272912423625254, |
| "grad_norm": 0.9222800631795732, |
| "learning_rate": 9.64290946003659e-06, |
| "loss": 0.104, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.6313645621181263, |
| "grad_norm": 1.022018748144035, |
| "learning_rate": 9.63403760142398e-06, |
| "loss": 0.1065, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.6354378818737271, |
| "grad_norm": 1.001598283035824, |
| "learning_rate": 9.625061064776183e-06, |
| "loss": 0.1166, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.639511201629328, |
| "grad_norm": 0.8769241639145234, |
| "learning_rate": 9.61598005286372e-06, |
| "loss": 0.0995, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.6435845213849287, |
| "grad_norm": 1.8569649444104026, |
| "learning_rate": 9.606794770817102e-06, |
| "loss": 0.203, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.6476578411405295, |
| "grad_norm": 1.2047388338203417, |
| "learning_rate": 9.597505426122184e-06, |
| "loss": 0.1697, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.6517311608961304, |
| "grad_norm": 1.2752618352547809, |
| "learning_rate": 9.588112228615495e-06, |
| "loss": 0.1917, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.6558044806517311, |
| "grad_norm": 1.2319452847872203, |
| "learning_rate": 9.57861539047949e-06, |
| "loss": 0.144, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.659877800407332, |
| "grad_norm": 1.5388901828747517, |
| "learning_rate": 9.569015126237744e-06, |
| "loss": 0.1687, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.6639511201629328, |
| "grad_norm": 0.8967339263601748, |
| "learning_rate": 9.559311652750135e-06, |
| "loss": 0.1214, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.6680244399185336, |
| "grad_norm": 0.8769857565124224, |
| "learning_rate": 9.549505189207924e-06, |
| "loss": 0.1026, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.6720977596741344, |
| "grad_norm": 1.3154047046175807, |
| "learning_rate": 9.539595957128803e-06, |
| "loss": 0.1849, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.6761710794297352, |
| "grad_norm": 0.9498449430689078, |
| "learning_rate": 9.529584180351902e-06, |
| "loss": 0.126, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.6802443991853361, |
| "grad_norm": 1.249038222628632, |
| "learning_rate": 9.519470085032733e-06, |
| "loss": 0.1389, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.6843177189409368, |
| "grad_norm": 1.0021120176773701, |
| "learning_rate": 9.509253899638066e-06, |
| "loss": 0.1097, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.6883910386965377, |
| "grad_norm": 1.8720586325440431, |
| "learning_rate": 9.498935854940785e-06, |
| "loss": 0.191, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.6924643584521385, |
| "grad_norm": 0.8779925461874577, |
| "learning_rate": 9.488516184014667e-06, |
| "loss": 0.1145, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.6965376782077393, |
| "grad_norm": 1.1255708822925372, |
| "learning_rate": 9.477995122229117e-06, |
| "loss": 0.1648, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.7006109979633401, |
| "grad_norm": 0.816369690322416, |
| "learning_rate": 9.467372907243858e-06, |
| "loss": 0.1067, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.7046843177189409, |
| "grad_norm": 1.108258499952871, |
| "learning_rate": 9.456649779003548e-06, |
| "loss": 0.1333, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.7087576374745418, |
| "grad_norm": 0.9004600691246489, |
| "learning_rate": 9.44582597973238e-06, |
| "loss": 0.1326, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.7128309572301426, |
| "grad_norm": 1.1680632813950595, |
| "learning_rate": 9.434901753928593e-06, |
| "loss": 0.156, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.7169042769857433, |
| "grad_norm": 0.9349361326216877, |
| "learning_rate": 9.423877348358956e-06, |
| "loss": 0.1068, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.7209775967413442, |
| "grad_norm": 0.7647245750773263, |
| "learning_rate": 9.4127530120532e-06, |
| "loss": 0.1071, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.725050916496945, |
| "grad_norm": 1.5837338501339817, |
| "learning_rate": 9.401528996298375e-06, |
| "loss": 0.1804, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.7291242362525459, |
| "grad_norm": 0.8862736019513471, |
| "learning_rate": 9.390205554633193e-06, |
| "loss": 0.1141, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.7331975560081466, |
| "grad_norm": 1.6364898998524855, |
| "learning_rate": 9.378782942842292e-06, |
| "loss": 0.154, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.7372708757637475, |
| "grad_norm": 1.4722723655088397, |
| "learning_rate": 9.367261418950459e-06, |
| "loss": 0.2054, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.7413441955193483, |
| "grad_norm": 1.2922211250976714, |
| "learning_rate": 9.355641243216798e-06, |
| "loss": 0.1869, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.745417515274949, |
| "grad_norm": 1.4988694289422715, |
| "learning_rate": 9.343922678128854e-06, |
| "loss": 0.1205, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.7494908350305499, |
| "grad_norm": 1.1189091721473459, |
| "learning_rate": 9.332105988396692e-06, |
| "loss": 0.1346, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.7535641547861507, |
| "grad_norm": 0.9495523540335931, |
| "learning_rate": 9.3201914409469e-06, |
| "loss": 0.1414, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.7576374745417516, |
| "grad_norm": 1.1449056285447028, |
| "learning_rate": 9.308179304916573e-06, |
| "loss": 0.1296, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.7617107942973523, |
| "grad_norm": 0.9587320256117761, |
| "learning_rate": 9.29606985164723e-06, |
| "loss": 0.1092, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.7657841140529531, |
| "grad_norm": 1.0410633462297467, |
| "learning_rate": 9.283863354678683e-06, |
| "loss": 0.1433, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.769857433808554, |
| "grad_norm": 1.1206598030991681, |
| "learning_rate": 9.27156008974286e-06, |
| "loss": 0.143, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.7739307535641547, |
| "grad_norm": 0.9317753678463256, |
| "learning_rate": 9.259160334757575e-06, |
| "loss": 0.1112, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.7780040733197556, |
| "grad_norm": 0.9788621413861975, |
| "learning_rate": 9.246664369820249e-06, |
| "loss": 0.1406, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.7820773930753564, |
| "grad_norm": 2.0096874215766145, |
| "learning_rate": 9.234072477201588e-06, |
| "loss": 0.2634, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.7861507128309573, |
| "grad_norm": 1.4500526896303214, |
| "learning_rate": 9.2213849413392e-06, |
| "loss": 0.1386, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.790224032586558, |
| "grad_norm": 0.7432983646218648, |
| "learning_rate": 9.208602048831176e-06, |
| "loss": 0.109, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.7942973523421588, |
| "grad_norm": 0.8971636740777943, |
| "learning_rate": 9.195724088429611e-06, |
| "loss": 0.1153, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.7983706720977597, |
| "grad_norm": 0.8965234237275304, |
| "learning_rate": 9.18275135103409e-06, |
| "loss": 0.1197, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.8024439918533605, |
| "grad_norm": 1.0574527032014713, |
| "learning_rate": 9.169684129685099e-06, |
| "loss": 0.1417, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.8065173116089613, |
| "grad_norm": 1.6715970219344671, |
| "learning_rate": 9.156522719557428e-06, |
| "loss": 0.2119, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.8105906313645621, |
| "grad_norm": 1.4942152004701117, |
| "learning_rate": 9.143267417953486e-06, |
| "loss": 0.1713, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.814663951120163, |
| "grad_norm": 1.1953125576245964, |
| "learning_rate": 9.129918524296596e-06, |
| "loss": 0.2013, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.8187372708757638, |
| "grad_norm": 0.8487901135540641, |
| "learning_rate": 9.11647634012422e-06, |
| "loss": 0.1071, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.8228105906313645, |
| "grad_norm": 0.8755395856306362, |
| "learning_rate": 9.102941169081167e-06, |
| "loss": 0.1225, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.8268839103869654, |
| "grad_norm": 1.0216007592933207, |
| "learning_rate": 9.089313316912708e-06, |
| "loss": 0.1519, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.8309572301425662, |
| "grad_norm": 0.9504018753400427, |
| "learning_rate": 9.075593091457692e-06, |
| "loss": 0.1232, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.835030549898167, |
| "grad_norm": 0.836150880582385, |
| "learning_rate": 9.061780802641582e-06, |
| "loss": 0.1239, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.8391038696537678, |
| "grad_norm": 0.7970664385278868, |
| "learning_rate": 9.047876762469451e-06, |
| "loss": 0.1089, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.8431771894093686, |
| "grad_norm": 0.8842436763981146, |
| "learning_rate": 9.033881285018945e-06, |
| "loss": 0.1125, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.8472505091649695, |
| "grad_norm": 1.2825326027052009, |
| "learning_rate": 9.019794686433174e-06, |
| "loss": 0.177, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.8513238289205702, |
| "grad_norm": 0.9060622603360827, |
| "learning_rate": 9.005617284913586e-06, |
| "loss": 0.1051, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.8553971486761711, |
| "grad_norm": 1.6864332980917753, |
| "learning_rate": 8.991349400712772e-06, |
| "loss": 0.127, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.8594704684317719, |
| "grad_norm": 1.0001473814777286, |
| "learning_rate": 8.976991356127225e-06, |
| "loss": 0.1325, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.8635437881873728, |
| "grad_norm": 0.7291694120330555, |
| "learning_rate": 8.962543475490068e-06, |
| "loss": 0.1121, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.8676171079429735, |
| "grad_norm": 0.9272720834880637, |
| "learning_rate": 8.948006085163735e-06, |
| "loss": 0.1119, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.8716904276985743, |
| "grad_norm": 0.8312647759455691, |
| "learning_rate": 8.933379513532575e-06, |
| "loss": 0.1112, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.8757637474541752, |
| "grad_norm": 0.8149067559673082, |
| "learning_rate": 8.91866409099546e-06, |
| "loss": 0.1093, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.879837067209776, |
| "grad_norm": 0.8297195108998483, |
| "learning_rate": 8.903860149958308e-06, |
| "loss": 0.1069, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.8839103869653768, |
| "grad_norm": 1.0028275547991912, |
| "learning_rate": 8.888968024826575e-06, |
| "loss": 0.1382, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.8879837067209776, |
| "grad_norm": 1.0402053627346783, |
| "learning_rate": 8.873988051997702e-06, |
| "loss": 0.1077, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.8920570264765784, |
| "grad_norm": 1.016561804402662, |
| "learning_rate": 8.85892056985352e-06, |
| "loss": 0.1432, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.8961303462321792, |
| "grad_norm": 1.6116327142859865, |
| "learning_rate": 8.8437659187526e-06, |
| "loss": 0.2553, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.90020366598778, |
| "grad_norm": 1.9529266780373014, |
| "learning_rate": 8.828524441022575e-06, |
| "loss": 0.194, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.9042769857433809, |
| "grad_norm": 0.7615347250958462, |
| "learning_rate": 8.813196480952393e-06, |
| "loss": 0.1043, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.9083503054989817, |
| "grad_norm": 0.8628795475365344, |
| "learning_rate": 8.797782384784549e-06, |
| "loss": 0.1258, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.9124236252545825, |
| "grad_norm": 0.8101851952339439, |
| "learning_rate": 8.782282500707262e-06, |
| "loss": 0.1067, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.9164969450101833, |
| "grad_norm": 0.8946136137938691, |
| "learning_rate": 8.766697178846611e-06, |
| "loss": 0.1312, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.9205702647657841, |
| "grad_norm": 1.1880991953150588, |
| "learning_rate": 8.751026771258622e-06, |
| "loss": 0.1457, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.924643584521385, |
| "grad_norm": 0.7683685928101024, |
| "learning_rate": 8.735271631921322e-06, |
| "loss": 0.1094, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.9287169042769857, |
| "grad_norm": 1.0839731638909997, |
| "learning_rate": 8.719432116726738e-06, |
| "loss": 0.1446, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.9327902240325866, |
| "grad_norm": 1.0311134631629166, |
| "learning_rate": 8.703508583472855e-06, |
| "loss": 0.1551, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.9368635437881874, |
| "grad_norm": 0.9245375301250945, |
| "learning_rate": 8.68750139185554e-06, |
| "loss": 0.1316, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.9409368635437881, |
| "grad_norm": 0.7855768495227055, |
| "learning_rate": 8.671410903460416e-06, |
| "loss": 0.1256, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.945010183299389, |
| "grad_norm": 1.4592779355487666, |
| "learning_rate": 8.65523748175469e-06, |
| "loss": 0.1715, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.9490835030549898, |
| "grad_norm": 1.444988409307209, |
| "learning_rate": 8.63898149207895e-06, |
| "loss": 0.1826, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.9531568228105907, |
| "grad_norm": 1.064651622477415, |
| "learning_rate": 8.622643301638902e-06, |
| "loss": 0.1451, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.9572301425661914, |
| "grad_norm": 0.7774230497311596, |
| "learning_rate": 8.606223279497081e-06, |
| "loss": 0.0999, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.9613034623217923, |
| "grad_norm": 0.9665945798764777, |
| "learning_rate": 8.589721796564521e-06, |
| "loss": 0.1011, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.9653767820773931, |
| "grad_norm": 0.9332915183840536, |
| "learning_rate": 8.57313922559236e-06, |
| "loss": 0.131, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.9694501018329938, |
| "grad_norm": 1.2006420660113526, |
| "learning_rate": 8.556475941163436e-06, |
| "loss": 0.1206, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.9735234215885947, |
| "grad_norm": 1.3281125416185249, |
| "learning_rate": 8.539732319683817e-06, |
| "loss": 0.1687, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.9775967413441955, |
| "grad_norm": 0.9158134622590804, |
| "learning_rate": 8.5229087393743e-06, |
| "loss": 0.1204, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.9816700610997964, |
| "grad_norm": 1.2823822319441516, |
| "learning_rate": 8.506005580261872e-06, |
| "loss": 0.1613, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.9857433808553971, |
| "grad_norm": 0.9217849677879558, |
| "learning_rate": 8.489023224171114e-06, |
| "loss": 0.1151, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.9898167006109979, |
| "grad_norm": 0.8536566198123717, |
| "learning_rate": 8.47196205471559e-06, |
| "loss": 0.0979, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.9938900203665988, |
| "grad_norm": 1.2576661581152648, |
| "learning_rate": 8.45482245728917e-06, |
| "loss": 0.1824, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.9979633401221996, |
| "grad_norm": 1.601767617645757, |
| "learning_rate": 8.437604819057336e-06, |
| "loss": 0.1619, |
| "step": 245 |
| }, |
| { |
| "epoch": 1.0020366598778003, |
| "grad_norm": 0.9672208490405952, |
| "learning_rate": 8.420309528948422e-06, |
| "loss": 0.1146, |
| "step": 246 |
| }, |
| { |
| "epoch": 1.0061099796334012, |
| "grad_norm": 0.832508397003989, |
| "learning_rate": 8.40293697764484e-06, |
| "loss": 0.0915, |
| "step": 247 |
| }, |
| { |
| "epoch": 1.010183299389002, |
| "grad_norm": 0.904130963567214, |
| "learning_rate": 8.385487557574253e-06, |
| "loss": 0.0944, |
| "step": 248 |
| }, |
| { |
| "epoch": 1.0142566191446027, |
| "grad_norm": 0.7450068739811258, |
| "learning_rate": 8.367961662900704e-06, |
| "loss": 0.0827, |
| "step": 249 |
| }, |
| { |
| "epoch": 1.0183299389002036, |
| "grad_norm": 0.9476412140789163, |
| "learning_rate": 8.35035968951572e-06, |
| "loss": 0.1092, |
| "step": 250 |
| }, |
| { |
| "epoch": 1.0224032586558045, |
| "grad_norm": 0.9386152878297552, |
| "learning_rate": 8.33268203502937e-06, |
| "loss": 0.1095, |
| "step": 251 |
| }, |
| { |
| "epoch": 1.0264765784114054, |
| "grad_norm": 0.9603179696847995, |
| "learning_rate": 8.314929098761268e-06, |
| "loss": 0.0915, |
| "step": 252 |
| }, |
| { |
| "epoch": 1.030549898167006, |
| "grad_norm": 0.8304219961514915, |
| "learning_rate": 8.297101281731576e-06, |
| "loss": 0.0954, |
| "step": 253 |
| }, |
| { |
| "epoch": 1.034623217922607, |
| "grad_norm": 0.9195780795898366, |
| "learning_rate": 8.279198986651925e-06, |
| "loss": 0.1024, |
| "step": 254 |
| }, |
| { |
| "epoch": 1.0386965376782078, |
| "grad_norm": 0.8515089836255187, |
| "learning_rate": 8.261222617916335e-06, |
| "loss": 0.0841, |
| "step": 255 |
| }, |
| { |
| "epoch": 1.0427698574338085, |
| "grad_norm": 0.9554349159166925, |
| "learning_rate": 8.243172581592066e-06, |
| "loss": 0.1058, |
| "step": 256 |
| }, |
| { |
| "epoch": 1.0468431771894093, |
| "grad_norm": 0.6631561369471801, |
| "learning_rate": 8.22504928541045e-06, |
| "loss": 0.0886, |
| "step": 257 |
| }, |
| { |
| "epoch": 1.0509164969450102, |
| "grad_norm": 0.9048066298357775, |
| "learning_rate": 8.206853138757687e-06, |
| "loss": 0.0844, |
| "step": 258 |
| }, |
| { |
| "epoch": 1.054989816700611, |
| "grad_norm": 0.9562971602799583, |
| "learning_rate": 8.188584552665592e-06, |
| "loss": 0.0915, |
| "step": 259 |
| }, |
| { |
| "epoch": 1.0590631364562118, |
| "grad_norm": 1.002361514741711, |
| "learning_rate": 8.17024393980231e-06, |
| "loss": 0.1016, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.0631364562118126, |
| "grad_norm": 0.7452508340661453, |
| "learning_rate": 8.15183171446299e-06, |
| "loss": 0.0674, |
| "step": 261 |
| }, |
| { |
| "epoch": 1.0672097759674135, |
| "grad_norm": 0.817626319509027, |
| "learning_rate": 8.133348292560442e-06, |
| "loss": 0.0785, |
| "step": 262 |
| }, |
| { |
| "epoch": 1.0712830957230142, |
| "grad_norm": 1.1811024484035837, |
| "learning_rate": 8.114794091615718e-06, |
| "loss": 0.1265, |
| "step": 263 |
| }, |
| { |
| "epoch": 1.075356415478615, |
| "grad_norm": 0.603310266983341, |
| "learning_rate": 8.096169530748708e-06, |
| "loss": 0.0638, |
| "step": 264 |
| }, |
| { |
| "epoch": 1.079429735234216, |
| "grad_norm": 0.7947710103588282, |
| "learning_rate": 8.077475030668647e-06, |
| "loss": 0.096, |
| "step": 265 |
| }, |
| { |
| "epoch": 1.0835030549898166, |
| "grad_norm": 0.9274759793959164, |
| "learning_rate": 8.058711013664633e-06, |
| "loss": 0.0879, |
| "step": 266 |
| }, |
| { |
| "epoch": 1.0875763747454175, |
| "grad_norm": 0.7085855447631716, |
| "learning_rate": 8.039877903596069e-06, |
| "loss": 0.0847, |
| "step": 267 |
| }, |
| { |
| "epoch": 1.0916496945010183, |
| "grad_norm": 0.7746156648229091, |
| "learning_rate": 8.020976125883105e-06, |
| "loss": 0.0814, |
| "step": 268 |
| }, |
| { |
| "epoch": 1.0957230142566192, |
| "grad_norm": 0.9487432139661005, |
| "learning_rate": 8.002006107497018e-06, |
| "loss": 0.099, |
| "step": 269 |
| }, |
| { |
| "epoch": 1.0997963340122199, |
| "grad_norm": 0.7861823082141532, |
| "learning_rate": 7.982968276950568e-06, |
| "loss": 0.0719, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.1038696537678208, |
| "grad_norm": 1.2995214283719732, |
| "learning_rate": 7.963863064288326e-06, |
| "loss": 0.1165, |
| "step": 271 |
| }, |
| { |
| "epoch": 1.1079429735234216, |
| "grad_norm": 0.6796566871802787, |
| "learning_rate": 7.944690901076949e-06, |
| "loss": 0.078, |
| "step": 272 |
| }, |
| { |
| "epoch": 1.1120162932790225, |
| "grad_norm": 0.853762040034195, |
| "learning_rate": 7.925452220395436e-06, |
| "loss": 0.0994, |
| "step": 273 |
| }, |
| { |
| "epoch": 1.1160896130346232, |
| "grad_norm": 0.8516487725429964, |
| "learning_rate": 7.906147456825349e-06, |
| "loss": 0.0988, |
| "step": 274 |
| }, |
| { |
| "epoch": 1.120162932790224, |
| "grad_norm": 0.7864403540144408, |
| "learning_rate": 7.886777046440993e-06, |
| "loss": 0.0814, |
| "step": 275 |
| }, |
| { |
| "epoch": 1.124236252545825, |
| "grad_norm": 1.1716856642298268, |
| "learning_rate": 7.867341426799562e-06, |
| "loss": 0.1396, |
| "step": 276 |
| }, |
| { |
| "epoch": 1.1283095723014256, |
| "grad_norm": 0.7736692798031964, |
| "learning_rate": 7.847841036931263e-06, |
| "loss": 0.0963, |
| "step": 277 |
| }, |
| { |
| "epoch": 1.1323828920570265, |
| "grad_norm": 0.627853542856467, |
| "learning_rate": 7.828276317329388e-06, |
| "loss": 0.0796, |
| "step": 278 |
| }, |
| { |
| "epoch": 1.1364562118126273, |
| "grad_norm": 0.6987894756370886, |
| "learning_rate": 7.80864770994038e-06, |
| "loss": 0.0673, |
| "step": 279 |
| }, |
| { |
| "epoch": 1.140529531568228, |
| "grad_norm": 0.7372216853011646, |
| "learning_rate": 7.788955658153829e-06, |
| "loss": 0.0861, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.1446028513238289, |
| "grad_norm": 0.6309328369589573, |
| "learning_rate": 7.769200606792476e-06, |
| "loss": 0.0732, |
| "step": 281 |
| }, |
| { |
| "epoch": 1.1486761710794298, |
| "grad_norm": 1.5872377960488204, |
| "learning_rate": 7.749383002102147e-06, |
| "loss": 0.098, |
| "step": 282 |
| }, |
| { |
| "epoch": 1.1527494908350306, |
| "grad_norm": 0.8441328057902189, |
| "learning_rate": 7.72950329174169e-06, |
| "loss": 0.0906, |
| "step": 283 |
| }, |
| { |
| "epoch": 1.1568228105906313, |
| "grad_norm": 0.843705996727692, |
| "learning_rate": 7.709561924772855e-06, |
| "loss": 0.0979, |
| "step": 284 |
| }, |
| { |
| "epoch": 1.1608961303462322, |
| "grad_norm": 0.977169845042871, |
| "learning_rate": 7.689559351650142e-06, |
| "loss": 0.1124, |
| "step": 285 |
| }, |
| { |
| "epoch": 1.164969450101833, |
| "grad_norm": 0.9463259076204176, |
| "learning_rate": 7.66949602421064e-06, |
| "loss": 0.1068, |
| "step": 286 |
| }, |
| { |
| "epoch": 1.1690427698574337, |
| "grad_norm": 0.811424080065267, |
| "learning_rate": 7.649372395663816e-06, |
| "loss": 0.072, |
| "step": 287 |
| }, |
| { |
| "epoch": 1.1731160896130346, |
| "grad_norm": 0.8132304851241069, |
| "learning_rate": 7.629188920581267e-06, |
| "loss": 0.0884, |
| "step": 288 |
| }, |
| { |
| "epoch": 1.1771894093686355, |
| "grad_norm": 1.4632519153044663, |
| "learning_rate": 7.608946054886468e-06, |
| "loss": 0.0991, |
| "step": 289 |
| }, |
| { |
| "epoch": 1.1812627291242364, |
| "grad_norm": 0.9085288701945666, |
| "learning_rate": 7.588644255844464e-06, |
| "loss": 0.1015, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.185336048879837, |
| "grad_norm": 0.7812384333509852, |
| "learning_rate": 7.568283982051538e-06, |
| "loss": 0.0773, |
| "step": 291 |
| }, |
| { |
| "epoch": 1.189409368635438, |
| "grad_norm": 0.6164352412170983, |
| "learning_rate": 7.5478656934248626e-06, |
| "loss": 0.0742, |
| "step": 292 |
| }, |
| { |
| "epoch": 1.1934826883910388, |
| "grad_norm": 0.7441220005875893, |
| "learning_rate": 7.527389851192099e-06, |
| "loss": 0.0805, |
| "step": 293 |
| }, |
| { |
| "epoch": 1.1975560081466394, |
| "grad_norm": 0.8878229183151843, |
| "learning_rate": 7.506856917880989e-06, |
| "loss": 0.0831, |
| "step": 294 |
| }, |
| { |
| "epoch": 1.2016293279022403, |
| "grad_norm": 0.6283198281323106, |
| "learning_rate": 7.486267357308896e-06, |
| "loss": 0.0807, |
| "step": 295 |
| }, |
| { |
| "epoch": 1.2057026476578412, |
| "grad_norm": 0.8264549827812461, |
| "learning_rate": 7.465621634572336e-06, |
| "loss": 0.0857, |
| "step": 296 |
| }, |
| { |
| "epoch": 1.2097759674134418, |
| "grad_norm": 1.0321645288915193, |
| "learning_rate": 7.444920216036473e-06, |
| "loss": 0.0969, |
| "step": 297 |
| }, |
| { |
| "epoch": 1.2138492871690427, |
| "grad_norm": 0.8253428462864323, |
| "learning_rate": 7.4241635693245766e-06, |
| "loss": 0.0745, |
| "step": 298 |
| }, |
| { |
| "epoch": 1.2179226069246436, |
| "grad_norm": 0.612362708583028, |
| "learning_rate": 7.40335216330746e-06, |
| "loss": 0.0724, |
| "step": 299 |
| }, |
| { |
| "epoch": 1.2219959266802445, |
| "grad_norm": 0.7460056721898694, |
| "learning_rate": 7.382486468092899e-06, |
| "loss": 0.0826, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.2260692464358451, |
| "grad_norm": 2.407874545569998, |
| "learning_rate": 7.361566955014999e-06, |
| "loss": 0.0938, |
| "step": 301 |
| }, |
| { |
| "epoch": 1.230142566191446, |
| "grad_norm": 1.531711526524773, |
| "learning_rate": 7.340594096623559e-06, |
| "loss": 0.0895, |
| "step": 302 |
| }, |
| { |
| "epoch": 1.234215885947047, |
| "grad_norm": 0.6587659825194164, |
| "learning_rate": 7.319568366673389e-06, |
| "loss": 0.0791, |
| "step": 303 |
| }, |
| { |
| "epoch": 1.2382892057026478, |
| "grad_norm": 0.8381388182575081, |
| "learning_rate": 7.2984902401136115e-06, |
| "loss": 0.0908, |
| "step": 304 |
| }, |
| { |
| "epoch": 1.2423625254582484, |
| "grad_norm": 0.8130694806248154, |
| "learning_rate": 7.277360193076936e-06, |
| "loss": 0.0837, |
| "step": 305 |
| }, |
| { |
| "epoch": 1.2464358452138493, |
| "grad_norm": 0.6905135676271011, |
| "learning_rate": 7.256178702868899e-06, |
| "loss": 0.0787, |
| "step": 306 |
| }, |
| { |
| "epoch": 1.2505091649694502, |
| "grad_norm": 0.7451067168863142, |
| "learning_rate": 7.234946247957087e-06, |
| "loss": 0.0859, |
| "step": 307 |
| }, |
| { |
| "epoch": 1.2545824847250509, |
| "grad_norm": 0.9065093126808257, |
| "learning_rate": 7.213663307960321e-06, |
| "loss": 0.093, |
| "step": 308 |
| }, |
| { |
| "epoch": 1.2586558044806517, |
| "grad_norm": 1.1386079619114733, |
| "learning_rate": 7.192330363637832e-06, |
| "loss": 0.1148, |
| "step": 309 |
| }, |
| { |
| "epoch": 1.2627291242362526, |
| "grad_norm": 0.8040303061288451, |
| "learning_rate": 7.170947896878392e-06, |
| "loss": 0.0794, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.2668024439918533, |
| "grad_norm": 0.7068240451041161, |
| "learning_rate": 7.149516390689433e-06, |
| "loss": 0.0772, |
| "step": 311 |
| }, |
| { |
| "epoch": 1.2708757637474541, |
| "grad_norm": 0.713991995225983, |
| "learning_rate": 7.12803632918614e-06, |
| "loss": 0.0773, |
| "step": 312 |
| }, |
| { |
| "epoch": 1.274949083503055, |
| "grad_norm": 0.8238519181704003, |
| "learning_rate": 7.1065081975805086e-06, |
| "loss": 0.0772, |
| "step": 313 |
| }, |
| { |
| "epoch": 1.2790224032586557, |
| "grad_norm": 1.100568471941002, |
| "learning_rate": 7.084932482170385e-06, |
| "loss": 0.1006, |
| "step": 314 |
| }, |
| { |
| "epoch": 1.2830957230142566, |
| "grad_norm": 0.7759792325387684, |
| "learning_rate": 7.063309670328491e-06, |
| "loss": 0.0884, |
| "step": 315 |
| }, |
| { |
| "epoch": 1.2871690427698574, |
| "grad_norm": 0.7869101421309218, |
| "learning_rate": 7.041640250491398e-06, |
| "loss": 0.076, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.2912423625254583, |
| "grad_norm": 0.6354916164352619, |
| "learning_rate": 7.019924712148511e-06, |
| "loss": 0.0719, |
| "step": 317 |
| }, |
| { |
| "epoch": 1.2953156822810592, |
| "grad_norm": 0.9646300732606192, |
| "learning_rate": 6.998163545830998e-06, |
| "loss": 0.0896, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.2993890020366599, |
| "grad_norm": 0.7887777312539676, |
| "learning_rate": 6.976357243100718e-06, |
| "loss": 0.0894, |
| "step": 319 |
| }, |
| { |
| "epoch": 1.3034623217922607, |
| "grad_norm": 0.8543058333089151, |
| "learning_rate": 6.954506296539112e-06, |
| "loss": 0.0924, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.3075356415478616, |
| "grad_norm": 0.8888556729496842, |
| "learning_rate": 6.9326111997360775e-06, |
| "loss": 0.089, |
| "step": 321 |
| }, |
| { |
| "epoch": 1.3116089613034623, |
| "grad_norm": 1.0511089795556172, |
| "learning_rate": 6.910672447278827e-06, |
| "loss": 0.0996, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.3156822810590632, |
| "grad_norm": 0.6640960066694046, |
| "learning_rate": 6.8886905347406985e-06, |
| "loss": 0.0889, |
| "step": 323 |
| }, |
| { |
| "epoch": 1.319755600814664, |
| "grad_norm": 0.7474191392306333, |
| "learning_rate": 6.866665958669976e-06, |
| "loss": 0.1038, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.3238289205702647, |
| "grad_norm": 0.6625825963174811, |
| "learning_rate": 6.844599216578667e-06, |
| "loss": 0.0785, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.3279022403258656, |
| "grad_norm": 1.0051676188526157, |
| "learning_rate": 6.822490806931262e-06, |
| "loss": 0.1002, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.3319755600814664, |
| "grad_norm": 0.6599233049529285, |
| "learning_rate": 6.800341229133486e-06, |
| "loss": 0.0869, |
| "step": 327 |
| }, |
| { |
| "epoch": 1.336048879837067, |
| "grad_norm": 0.9213204884412184, |
| "learning_rate": 6.778150983520999e-06, |
| "loss": 0.1235, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.340122199592668, |
| "grad_norm": 0.7390992342429102, |
| "learning_rate": 6.755920571348111e-06, |
| "loss": 0.0839, |
| "step": 329 |
| }, |
| { |
| "epoch": 1.3441955193482689, |
| "grad_norm": 0.8491729200395206, |
| "learning_rate": 6.73365049477645e-06, |
| "loss": 0.0851, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.3482688391038695, |
| "grad_norm": 1.9733113662315056, |
| "learning_rate": 6.711341256863623e-06, |
| "loss": 0.121, |
| "step": 331 |
| }, |
| { |
| "epoch": 1.3523421588594704, |
| "grad_norm": 0.7374060548134543, |
| "learning_rate": 6.688993361551847e-06, |
| "loss": 0.0829, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.3564154786150713, |
| "grad_norm": 0.9445537578309379, |
| "learning_rate": 6.66660731365657e-06, |
| "loss": 0.1029, |
| "step": 333 |
| }, |
| { |
| "epoch": 1.3604887983706722, |
| "grad_norm": 0.6167725635362412, |
| "learning_rate": 6.64418361885507e-06, |
| "loss": 0.0846, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.364562118126273, |
| "grad_norm": 0.9582564113338518, |
| "learning_rate": 6.621722783675024e-06, |
| "loss": 0.0884, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.3686354378818737, |
| "grad_norm": 0.7341939098462013, |
| "learning_rate": 6.599225315483076e-06, |
| "loss": 0.0822, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.3727087576374746, |
| "grad_norm": 0.6266005187010786, |
| "learning_rate": 6.576691722473368e-06, |
| "loss": 0.0741, |
| "step": 337 |
| }, |
| { |
| "epoch": 1.3767820773930755, |
| "grad_norm": 1.0420040214165822, |
| "learning_rate": 6.554122513656065e-06, |
| "loss": 0.1013, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.3808553971486761, |
| "grad_norm": 0.7579135385444917, |
| "learning_rate": 6.531518198845854e-06, |
| "loss": 0.0869, |
| "step": 339 |
| }, |
| { |
| "epoch": 1.384928716904277, |
| "grad_norm": 0.9115989624081831, |
| "learning_rate": 6.508879288650431e-06, |
| "loss": 0.0942, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.3890020366598779, |
| "grad_norm": 1.0153542283519323, |
| "learning_rate": 6.486206294458966e-06, |
| "loss": 0.1146, |
| "step": 341 |
| }, |
| { |
| "epoch": 1.3930753564154785, |
| "grad_norm": 0.732006592180605, |
| "learning_rate": 6.463499728430549e-06, |
| "loss": 0.0766, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.3971486761710794, |
| "grad_norm": 1.0103652295314396, |
| "learning_rate": 6.4407601034826225e-06, |
| "loss": 0.1056, |
| "step": 343 |
| }, |
| { |
| "epoch": 1.4012219959266803, |
| "grad_norm": 0.645303692033008, |
| "learning_rate": 6.417987933279397e-06, |
| "loss": 0.082, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.405295315682281, |
| "grad_norm": 0.6637967514075462, |
| "learning_rate": 6.395183732220242e-06, |
| "loss": 0.0715, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.4093686354378818, |
| "grad_norm": 0.8370194911957503, |
| "learning_rate": 6.372348015428077e-06, |
| "loss": 0.0809, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.4134419551934827, |
| "grad_norm": 0.7044123550494559, |
| "learning_rate": 6.349481298737723e-06, |
| "loss": 0.0861, |
| "step": 347 |
| }, |
| { |
| "epoch": 1.4175152749490836, |
| "grad_norm": 0.6093242409125569, |
| "learning_rate": 6.32658409868426e-06, |
| "loss": 0.0684, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.4215885947046842, |
| "grad_norm": 0.632063149807712, |
| "learning_rate": 6.303656932491349e-06, |
| "loss": 0.0802, |
| "step": 349 |
| }, |
| { |
| "epoch": 1.4256619144602851, |
| "grad_norm": 1.0427713522649937, |
| "learning_rate": 6.280700318059563e-06, |
| "loss": 0.1154, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.429735234215886, |
| "grad_norm": 1.0764304967406033, |
| "learning_rate": 6.257714773954674e-06, |
| "loss": 0.1164, |
| "step": 351 |
| }, |
| { |
| "epoch": 1.4338085539714869, |
| "grad_norm": 0.7365645776407571, |
| "learning_rate": 6.234700819395946e-06, |
| "loss": 0.0901, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.4378818737270875, |
| "grad_norm": 0.7945282541810098, |
| "learning_rate": 6.211658974244407e-06, |
| "loss": 0.0908, |
| "step": 353 |
| }, |
| { |
| "epoch": 1.4419551934826884, |
| "grad_norm": 0.6918143511185231, |
| "learning_rate": 6.1885897589911e-06, |
| "loss": 0.0845, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.4460285132382893, |
| "grad_norm": 1.1124806520090096, |
| "learning_rate": 6.1654936947453355e-06, |
| "loss": 0.1019, |
| "step": 355 |
| }, |
| { |
| "epoch": 1.45010183299389, |
| "grad_norm": 0.6025591013529961, |
| "learning_rate": 6.142371303222909e-06, |
| "loss": 0.0755, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.4541751527494908, |
| "grad_norm": 0.8630513814588915, |
| "learning_rate": 6.119223106734328e-06, |
| "loss": 0.0895, |
| "step": 357 |
| }, |
| { |
| "epoch": 1.4582484725050917, |
| "grad_norm": 0.7965271271909935, |
| "learning_rate": 6.0960496281729995e-06, |
| "loss": 0.0783, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.4623217922606924, |
| "grad_norm": 0.7038891889592316, |
| "learning_rate": 6.072851391003432e-06, |
| "loss": 0.0796, |
| "step": 359 |
| }, |
| { |
| "epoch": 1.4663951120162932, |
| "grad_norm": 0.6384512393476599, |
| "learning_rate": 6.0496289192494e-06, |
| "loss": 0.0881, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.4704684317718941, |
| "grad_norm": 0.940655034735333, |
| "learning_rate": 6.026382737482116e-06, |
| "loss": 0.102, |
| "step": 361 |
| }, |
| { |
| "epoch": 1.4745417515274948, |
| "grad_norm": 0.9682934531343439, |
| "learning_rate": 6.003113370808375e-06, |
| "loss": 0.1029, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.4786150712830957, |
| "grad_norm": 1.0247819631277137, |
| "learning_rate": 5.979821344858695e-06, |
| "loss": 0.1197, |
| "step": 363 |
| }, |
| { |
| "epoch": 1.4826883910386965, |
| "grad_norm": 0.7672663757155214, |
| "learning_rate": 5.956507185775441e-06, |
| "loss": 0.0917, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.4867617107942974, |
| "grad_norm": 0.6578195533765551, |
| "learning_rate": 5.933171420200946e-06, |
| "loss": 0.0811, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.4908350305498983, |
| "grad_norm": 0.6603035021161331, |
| "learning_rate": 5.909814575265609e-06, |
| "loss": 0.0819, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.494908350305499, |
| "grad_norm": 0.583612907656864, |
| "learning_rate": 5.88643717857599e-06, |
| "loss": 0.0664, |
| "step": 367 |
| }, |
| { |
| "epoch": 1.4989816700610998, |
| "grad_norm": 0.6176322927829936, |
| "learning_rate": 5.863039758202889e-06, |
| "loss": 0.0771, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.5030549898167007, |
| "grad_norm": 0.7024687823342335, |
| "learning_rate": 5.839622842669423e-06, |
| "loss": 0.0815, |
| "step": 369 |
| }, |
| { |
| "epoch": 1.5071283095723014, |
| "grad_norm": 0.8718566327697364, |
| "learning_rate": 5.816186960939084e-06, |
| "loss": 0.0974, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.5112016293279023, |
| "grad_norm": 0.7361502684673189, |
| "learning_rate": 5.7927326424037875e-06, |
| "loss": 0.0848, |
| "step": 371 |
| }, |
| { |
| "epoch": 1.5152749490835031, |
| "grad_norm": 0.6902398893653613, |
| "learning_rate": 5.7692604168719225e-06, |
| "loss": 0.0783, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.5193482688391038, |
| "grad_norm": 0.572845083615776, |
| "learning_rate": 5.745770814556373e-06, |
| "loss": 0.0763, |
| "step": 373 |
| }, |
| { |
| "epoch": 1.5234215885947047, |
| "grad_norm": 1.0372051056861917, |
| "learning_rate": 5.722264366062549e-06, |
| "loss": 0.1234, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.5274949083503055, |
| "grad_norm": 0.9263356887643026, |
| "learning_rate": 5.698741602376395e-06, |
| "loss": 0.0958, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.5315682281059062, |
| "grad_norm": 0.6171130691440203, |
| "learning_rate": 5.675203054852403e-06, |
| "loss": 0.0772, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.535641547861507, |
| "grad_norm": 0.686848420732314, |
| "learning_rate": 5.651649255201603e-06, |
| "loss": 0.091, |
| "step": 377 |
| }, |
| { |
| "epoch": 1.539714867617108, |
| "grad_norm": 0.6841747229884891, |
| "learning_rate": 5.628080735479553e-06, |
| "loss": 0.0861, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.5437881873727086, |
| "grad_norm": 0.5919736957226754, |
| "learning_rate": 5.604498028074323e-06, |
| "loss": 0.0702, |
| "step": 379 |
| }, |
| { |
| "epoch": 1.5478615071283097, |
| "grad_norm": 0.5832264712402621, |
| "learning_rate": 5.580901665694471e-06, |
| "loss": 0.0744, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.5519348268839104, |
| "grad_norm": 0.8947933752883832, |
| "learning_rate": 5.557292181357003e-06, |
| "loss": 0.099, |
| "step": 381 |
| }, |
| { |
| "epoch": 1.556008146639511, |
| "grad_norm": 0.7901652985659288, |
| "learning_rate": 5.533670108375334e-06, |
| "loss": 0.0808, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.5600814663951121, |
| "grad_norm": 0.7423631802302912, |
| "learning_rate": 5.510035980347249e-06, |
| "loss": 0.0691, |
| "step": 383 |
| }, |
| { |
| "epoch": 1.5641547861507128, |
| "grad_norm": 1.1196975716000397, |
| "learning_rate": 5.486390331142841e-06, |
| "loss": 0.14, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.5682281059063137, |
| "grad_norm": 1.1562803291517847, |
| "learning_rate": 5.462733694892452e-06, |
| "loss": 0.1132, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.5723014256619146, |
| "grad_norm": 0.5971769426579886, |
| "learning_rate": 5.439066605974615e-06, |
| "loss": 0.0918, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.5763747454175152, |
| "grad_norm": 0.7622249533746938, |
| "learning_rate": 5.415389599003972e-06, |
| "loss": 0.0834, |
| "step": 387 |
| }, |
| { |
| "epoch": 1.580448065173116, |
| "grad_norm": 0.8225191930854497, |
| "learning_rate": 5.391703208819209e-06, |
| "loss": 0.0901, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.584521384928717, |
| "grad_norm": 0.903243980934119, |
| "learning_rate": 5.368007970470964e-06, |
| "loss": 0.0877, |
| "step": 389 |
| }, |
| { |
| "epoch": 1.5885947046843176, |
| "grad_norm": 0.8517368255821154, |
| "learning_rate": 5.344304419209748e-06, |
| "loss": 0.0982, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.5926680244399185, |
| "grad_norm": 0.5949993054618524, |
| "learning_rate": 5.3205930904738544e-06, |
| "loss": 0.069, |
| "step": 391 |
| }, |
| { |
| "epoch": 1.5967413441955194, |
| "grad_norm": 0.9588124561719404, |
| "learning_rate": 5.296874519877256e-06, |
| "loss": 0.1151, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.60081466395112, |
| "grad_norm": 0.7682037010435501, |
| "learning_rate": 5.273149243197517e-06, |
| "loss": 0.1008, |
| "step": 393 |
| }, |
| { |
| "epoch": 1.6048879837067211, |
| "grad_norm": 0.6305755320656267, |
| "learning_rate": 5.2494177963636785e-06, |
| "loss": 0.0809, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.6089613034623218, |
| "grad_norm": 0.8293942768800023, |
| "learning_rate": 5.225680715444168e-06, |
| "loss": 0.0933, |
| "step": 395 |
| }, |
| { |
| "epoch": 1.6130346232179225, |
| "grad_norm": 0.6400765956762063, |
| "learning_rate": 5.201938536634674e-06, |
| "loss": 0.083, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.6171079429735236, |
| "grad_norm": 0.6237036767747972, |
| "learning_rate": 5.178191796246043e-06, |
| "loss": 0.0737, |
| "step": 397 |
| }, |
| { |
| "epoch": 1.6211812627291242, |
| "grad_norm": 0.76728636527015, |
| "learning_rate": 5.154441030692162e-06, |
| "loss": 0.0925, |
| "step": 398 |
| }, |
| { |
| "epoch": 1.625254582484725, |
| "grad_norm": 0.6271220608058555, |
| "learning_rate": 5.1306867764778445e-06, |
| "loss": 0.0667, |
| "step": 399 |
| }, |
| { |
| "epoch": 1.629327902240326, |
| "grad_norm": 0.563944778713813, |
| "learning_rate": 5.106929570186706e-06, |
| "loss": 0.0637, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.6334012219959266, |
| "grad_norm": 0.8300464332472979, |
| "learning_rate": 5.083169948469049e-06, |
| "loss": 0.0979, |
| "step": 401 |
| }, |
| { |
| "epoch": 1.6374745417515275, |
| "grad_norm": 0.8778673983216725, |
| "learning_rate": 5.059408448029737e-06, |
| "loss": 0.1003, |
| "step": 402 |
| }, |
| { |
| "epoch": 1.6415478615071284, |
| "grad_norm": 0.9971185861157015, |
| "learning_rate": 5.0356456056160715e-06, |
| "loss": 0.1058, |
| "step": 403 |
| }, |
| { |
| "epoch": 1.645621181262729, |
| "grad_norm": 0.6245905119244692, |
| "learning_rate": 5.0118819580056686e-06, |
| "loss": 0.0732, |
| "step": 404 |
| }, |
| { |
| "epoch": 1.64969450101833, |
| "grad_norm": 0.6093763548801165, |
| "learning_rate": 4.988118041994332e-06, |
| "loss": 0.073, |
| "step": 405 |
| }, |
| { |
| "epoch": 1.6537678207739308, |
| "grad_norm": 0.732574864023735, |
| "learning_rate": 4.964354394383929e-06, |
| "loss": 0.0784, |
| "step": 406 |
| }, |
| { |
| "epoch": 1.6578411405295315, |
| "grad_norm": 0.5623878101115458, |
| "learning_rate": 4.940591551970264e-06, |
| "loss": 0.0705, |
| "step": 407 |
| }, |
| { |
| "epoch": 1.6619144602851323, |
| "grad_norm": 0.6836109996389221, |
| "learning_rate": 4.9168300515309515e-06, |
| "loss": 0.0759, |
| "step": 408 |
| }, |
| { |
| "epoch": 1.6659877800407332, |
| "grad_norm": 1.0751642454268762, |
| "learning_rate": 4.8930704298132965e-06, |
| "loss": 0.1147, |
| "step": 409 |
| }, |
| { |
| "epoch": 1.6700610997963339, |
| "grad_norm": 0.6925121226298128, |
| "learning_rate": 4.869313223522159e-06, |
| "loss": 0.0773, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.674134419551935, |
| "grad_norm": 0.6533487029013805, |
| "learning_rate": 4.845558969307839e-06, |
| "loss": 0.0769, |
| "step": 411 |
| }, |
| { |
| "epoch": 1.6782077393075356, |
| "grad_norm": 0.6834593440184977, |
| "learning_rate": 4.821808203753959e-06, |
| "loss": 0.0848, |
| "step": 412 |
| }, |
| { |
| "epoch": 1.6822810590631363, |
| "grad_norm": 0.5761353995488768, |
| "learning_rate": 4.798061463365327e-06, |
| "loss": 0.0784, |
| "step": 413 |
| }, |
| { |
| "epoch": 1.6863543788187374, |
| "grad_norm": 0.5422511517223145, |
| "learning_rate": 4.774319284555833e-06, |
| "loss": 0.0744, |
| "step": 414 |
| }, |
| { |
| "epoch": 1.690427698574338, |
| "grad_norm": 0.617398792027579, |
| "learning_rate": 4.7505822036363214e-06, |
| "loss": 0.0732, |
| "step": 415 |
| }, |
| { |
| "epoch": 1.694501018329939, |
| "grad_norm": 0.9403552399900484, |
| "learning_rate": 4.726850756802486e-06, |
| "loss": 0.085, |
| "step": 416 |
| }, |
| { |
| "epoch": 1.6985743380855398, |
| "grad_norm": 0.5589577658327266, |
| "learning_rate": 4.703125480122747e-06, |
| "loss": 0.07, |
| "step": 417 |
| }, |
| { |
| "epoch": 1.7026476578411405, |
| "grad_norm": 0.8384307665718973, |
| "learning_rate": 4.679406909526147e-06, |
| "loss": 0.1021, |
| "step": 418 |
| }, |
| { |
| "epoch": 1.7067209775967414, |
| "grad_norm": 0.6378594747202708, |
| "learning_rate": 4.655695580790254e-06, |
| "loss": 0.0826, |
| "step": 419 |
| }, |
| { |
| "epoch": 1.7107942973523422, |
| "grad_norm": 0.9335835620964789, |
| "learning_rate": 4.631992029529037e-06, |
| "loss": 0.1112, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.7148676171079429, |
| "grad_norm": 0.6170098686203401, |
| "learning_rate": 4.608296791180793e-06, |
| "loss": 0.0848, |
| "step": 421 |
| }, |
| { |
| "epoch": 1.7189409368635438, |
| "grad_norm": 0.6186288012037354, |
| "learning_rate": 4.584610400996028e-06, |
| "loss": 0.0795, |
| "step": 422 |
| }, |
| { |
| "epoch": 1.7230142566191446, |
| "grad_norm": 0.7591412034838235, |
| "learning_rate": 4.560933394025386e-06, |
| "loss": 0.0865, |
| "step": 423 |
| }, |
| { |
| "epoch": 1.7270875763747453, |
| "grad_norm": 0.6645225808347813, |
| "learning_rate": 4.537266305107549e-06, |
| "loss": 0.087, |
| "step": 424 |
| }, |
| { |
| "epoch": 1.7311608961303462, |
| "grad_norm": 0.5774310704171809, |
| "learning_rate": 4.513609668857162e-06, |
| "loss": 0.0683, |
| "step": 425 |
| }, |
| { |
| "epoch": 1.735234215885947, |
| "grad_norm": 0.5396004853223214, |
| "learning_rate": 4.489964019652752e-06, |
| "loss": 0.0645, |
| "step": 426 |
| }, |
| { |
| "epoch": 1.7393075356415477, |
| "grad_norm": 0.8193272698607866, |
| "learning_rate": 4.4663298916246665e-06, |
| "loss": 0.0966, |
| "step": 427 |
| }, |
| { |
| "epoch": 1.7433808553971488, |
| "grad_norm": 0.8438620683447817, |
| "learning_rate": 4.442707818642999e-06, |
| "loss": 0.0862, |
| "step": 428 |
| }, |
| { |
| "epoch": 1.7474541751527495, |
| "grad_norm": 0.8986877868414216, |
| "learning_rate": 4.419098334305529e-06, |
| "loss": 0.0909, |
| "step": 429 |
| }, |
| { |
| "epoch": 1.7515274949083504, |
| "grad_norm": 0.7300173025939761, |
| "learning_rate": 4.395501971925677e-06, |
| "loss": 0.0807, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.7556008146639512, |
| "grad_norm": 0.8080156287729479, |
| "learning_rate": 4.371919264520449e-06, |
| "loss": 0.0865, |
| "step": 431 |
| }, |
| { |
| "epoch": 1.759674134419552, |
| "grad_norm": 0.8071100091965066, |
| "learning_rate": 4.348350744798399e-06, |
| "loss": 0.0845, |
| "step": 432 |
| }, |
| { |
| "epoch": 1.7637474541751528, |
| "grad_norm": 0.6345944880929723, |
| "learning_rate": 4.324796945147598e-06, |
| "loss": 0.0746, |
| "step": 433 |
| }, |
| { |
| "epoch": 1.7678207739307537, |
| "grad_norm": 0.7862831637321498, |
| "learning_rate": 4.301258397623606e-06, |
| "loss": 0.0876, |
| "step": 434 |
| }, |
| { |
| "epoch": 1.7718940936863543, |
| "grad_norm": 0.7699681856032118, |
| "learning_rate": 4.2777356339374526e-06, |
| "loss": 0.0919, |
| "step": 435 |
| }, |
| { |
| "epoch": 1.7759674134419552, |
| "grad_norm": 1.260183839778813, |
| "learning_rate": 4.254229185443628e-06, |
| "loss": 0.0864, |
| "step": 436 |
| }, |
| { |
| "epoch": 1.780040733197556, |
| "grad_norm": 0.6829744950654246, |
| "learning_rate": 4.230739583128078e-06, |
| "loss": 0.0746, |
| "step": 437 |
| }, |
| { |
| "epoch": 1.7841140529531567, |
| "grad_norm": 1.2716252017981826, |
| "learning_rate": 4.2072673575962125e-06, |
| "loss": 0.1001, |
| "step": 438 |
| }, |
| { |
| "epoch": 1.7881873727087576, |
| "grad_norm": 0.997750452895151, |
| "learning_rate": 4.183813039060919e-06, |
| "loss": 0.1182, |
| "step": 439 |
| }, |
| { |
| "epoch": 1.7922606924643585, |
| "grad_norm": 0.6022378654035607, |
| "learning_rate": 4.160377157330579e-06, |
| "loss": 0.0851, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.7963340122199591, |
| "grad_norm": 0.7627379348442356, |
| "learning_rate": 4.136960241797113e-06, |
| "loss": 0.0675, |
| "step": 441 |
| }, |
| { |
| "epoch": 1.8004073319755602, |
| "grad_norm": 0.7651666287124916, |
| "learning_rate": 4.113562821424012e-06, |
| "loss": 0.0916, |
| "step": 442 |
| }, |
| { |
| "epoch": 1.804480651731161, |
| "grad_norm": 0.7330363703477193, |
| "learning_rate": 4.090185424734392e-06, |
| "loss": 0.0838, |
| "step": 443 |
| }, |
| { |
| "epoch": 1.8085539714867616, |
| "grad_norm": 0.7101706692426837, |
| "learning_rate": 4.066828579799054e-06, |
| "loss": 0.0791, |
| "step": 444 |
| }, |
| { |
| "epoch": 1.8126272912423627, |
| "grad_norm": 0.5688223211779385, |
| "learning_rate": 4.043492814224559e-06, |
| "loss": 0.0709, |
| "step": 445 |
| }, |
| { |
| "epoch": 1.8167006109979633, |
| "grad_norm": 0.8376097479341676, |
| "learning_rate": 4.020178655141307e-06, |
| "loss": 0.0826, |
| "step": 446 |
| }, |
| { |
| "epoch": 1.8207739307535642, |
| "grad_norm": 0.7808635217722791, |
| "learning_rate": 3.9968866291916254e-06, |
| "loss": 0.0895, |
| "step": 447 |
| }, |
| { |
| "epoch": 1.824847250509165, |
| "grad_norm": 0.6170736835701749, |
| "learning_rate": 3.973617262517886e-06, |
| "loss": 0.0682, |
| "step": 448 |
| }, |
| { |
| "epoch": 1.8289205702647657, |
| "grad_norm": 0.7757035878498671, |
| "learning_rate": 3.950371080750602e-06, |
| "loss": 0.0856, |
| "step": 449 |
| }, |
| { |
| "epoch": 1.8329938900203666, |
| "grad_norm": 1.0217188455954884, |
| "learning_rate": 3.927148608996569e-06, |
| "loss": 0.1131, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.8370672097759675, |
| "grad_norm": 0.9683593480288422, |
| "learning_rate": 3.903950371827001e-06, |
| "loss": 0.0921, |
| "step": 451 |
| }, |
| { |
| "epoch": 1.8411405295315681, |
| "grad_norm": 0.6337703466634409, |
| "learning_rate": 3.880776893265673e-06, |
| "loss": 0.0827, |
| "step": 452 |
| }, |
| { |
| "epoch": 1.845213849287169, |
| "grad_norm": 0.7361682132709152, |
| "learning_rate": 3.85762869677709e-06, |
| "loss": 0.0636, |
| "step": 453 |
| }, |
| { |
| "epoch": 1.84928716904277, |
| "grad_norm": 0.7754176214282682, |
| "learning_rate": 3.834506305254667e-06, |
| "loss": 0.0961, |
| "step": 454 |
| }, |
| { |
| "epoch": 1.8533604887983706, |
| "grad_norm": 0.828382619398843, |
| "learning_rate": 3.811410241008902e-06, |
| "loss": 0.0951, |
| "step": 455 |
| }, |
| { |
| "epoch": 1.8574338085539714, |
| "grad_norm": 0.5945605581238207, |
| "learning_rate": 3.788341025755595e-06, |
| "loss": 0.0763, |
| "step": 456 |
| }, |
| { |
| "epoch": 1.8615071283095723, |
| "grad_norm": 0.7934942467804943, |
| "learning_rate": 3.765299180604055e-06, |
| "loss": 0.0934, |
| "step": 457 |
| }, |
| { |
| "epoch": 1.865580448065173, |
| "grad_norm": 0.6628615018421619, |
| "learning_rate": 3.7422852260453274e-06, |
| "loss": 0.0753, |
| "step": 458 |
| }, |
| { |
| "epoch": 1.869653767820774, |
| "grad_norm": 0.8426888513729046, |
| "learning_rate": 3.719299681940437e-06, |
| "loss": 0.0983, |
| "step": 459 |
| }, |
| { |
| "epoch": 1.8737270875763747, |
| "grad_norm": 0.8833092682930523, |
| "learning_rate": 3.696343067508651e-06, |
| "loss": 0.0841, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.8778004073319754, |
| "grad_norm": 0.5875978979383154, |
| "learning_rate": 3.673415901315743e-06, |
| "loss": 0.0693, |
| "step": 461 |
| }, |
| { |
| "epoch": 1.8818737270875765, |
| "grad_norm": 0.6322423557535214, |
| "learning_rate": 3.650518701262278e-06, |
| "loss": 0.0781, |
| "step": 462 |
| }, |
| { |
| "epoch": 1.8859470468431772, |
| "grad_norm": 0.7821903785073223, |
| "learning_rate": 3.6276519845719237e-06, |
| "loss": 0.077, |
| "step": 463 |
| }, |
| { |
| "epoch": 1.890020366598778, |
| "grad_norm": 0.8674854155144539, |
| "learning_rate": 3.6048162677797595e-06, |
| "loss": 0.0927, |
| "step": 464 |
| }, |
| { |
| "epoch": 1.894093686354379, |
| "grad_norm": 0.59647645188795, |
| "learning_rate": 3.582012066720605e-06, |
| "loss": 0.0718, |
| "step": 465 |
| }, |
| { |
| "epoch": 1.8981670061099796, |
| "grad_norm": 0.6297079342542728, |
| "learning_rate": 3.559239896517379e-06, |
| "loss": 0.0854, |
| "step": 466 |
| }, |
| { |
| "epoch": 1.9022403258655805, |
| "grad_norm": 1.041008043566478, |
| "learning_rate": 3.536500271569452e-06, |
| "loss": 0.0873, |
| "step": 467 |
| }, |
| { |
| "epoch": 1.9063136456211813, |
| "grad_norm": 0.6843579823593634, |
| "learning_rate": 3.5137937055410343e-06, |
| "loss": 0.0716, |
| "step": 468 |
| }, |
| { |
| "epoch": 1.910386965376782, |
| "grad_norm": 0.8825368664636357, |
| "learning_rate": 3.4911207113495703e-06, |
| "loss": 0.0955, |
| "step": 469 |
| }, |
| { |
| "epoch": 1.9144602851323829, |
| "grad_norm": 0.7381839178415016, |
| "learning_rate": 3.4684818011541484e-06, |
| "loss": 0.0815, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.9185336048879837, |
| "grad_norm": 0.7678147459965866, |
| "learning_rate": 3.4458774863439366e-06, |
| "loss": 0.0895, |
| "step": 471 |
| }, |
| { |
| "epoch": 1.9226069246435844, |
| "grad_norm": 0.8220888342463925, |
| "learning_rate": 3.423308277526633e-06, |
| "loss": 0.0914, |
| "step": 472 |
| }, |
| { |
| "epoch": 1.9266802443991853, |
| "grad_norm": 0.7021908890281842, |
| "learning_rate": 3.4007746845169253e-06, |
| "loss": 0.0906, |
| "step": 473 |
| }, |
| { |
| "epoch": 1.9307535641547862, |
| "grad_norm": 0.7284906476786358, |
| "learning_rate": 3.3782772163249767e-06, |
| "loss": 0.0725, |
| "step": 474 |
| }, |
| { |
| "epoch": 1.9348268839103868, |
| "grad_norm": 0.5533573194210741, |
| "learning_rate": 3.3558163811449317e-06, |
| "loss": 0.0731, |
| "step": 475 |
| }, |
| { |
| "epoch": 1.938900203665988, |
| "grad_norm": 0.9405832752114753, |
| "learning_rate": 3.3333926863434317e-06, |
| "loss": 0.0962, |
| "step": 476 |
| }, |
| { |
| "epoch": 1.9429735234215886, |
| "grad_norm": 0.6819165425758139, |
| "learning_rate": 3.311006638448155e-06, |
| "loss": 0.0767, |
| "step": 477 |
| }, |
| { |
| "epoch": 1.9470468431771895, |
| "grad_norm": 0.6845642751352256, |
| "learning_rate": 3.288658743136378e-06, |
| "loss": 0.0695, |
| "step": 478 |
| }, |
| { |
| "epoch": 1.9511201629327903, |
| "grad_norm": 1.012736448173238, |
| "learning_rate": 3.2663495052235505e-06, |
| "loss": 0.1222, |
| "step": 479 |
| }, |
| { |
| "epoch": 1.955193482688391, |
| "grad_norm": 0.6928154487833174, |
| "learning_rate": 3.2440794286518896e-06, |
| "loss": 0.0903, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.9592668024439919, |
| "grad_norm": 0.6484293467201061, |
| "learning_rate": 3.2218490164790015e-06, |
| "loss": 0.0696, |
| "step": 481 |
| }, |
| { |
| "epoch": 1.9633401221995928, |
| "grad_norm": 0.6538882594004228, |
| "learning_rate": 3.199658770866515e-06, |
| "loss": 0.0787, |
| "step": 482 |
| }, |
| { |
| "epoch": 1.9674134419551934, |
| "grad_norm": 0.5752828743925392, |
| "learning_rate": 3.1775091930687374e-06, |
| "loss": 0.0675, |
| "step": 483 |
| }, |
| { |
| "epoch": 1.9714867617107943, |
| "grad_norm": 0.5743945825655228, |
| "learning_rate": 3.1554007834213357e-06, |
| "loss": 0.0715, |
| "step": 484 |
| }, |
| { |
| "epoch": 1.9755600814663952, |
| "grad_norm": 0.8159794403070233, |
| "learning_rate": 3.1333340413300263e-06, |
| "loss": 0.0921, |
| "step": 485 |
| }, |
| { |
| "epoch": 1.9796334012219958, |
| "grad_norm": 0.6636547264511075, |
| "learning_rate": 3.1113094652593023e-06, |
| "loss": 0.0759, |
| "step": 486 |
| }, |
| { |
| "epoch": 1.9837067209775967, |
| "grad_norm": 1.0216030221101335, |
| "learning_rate": 3.0893275527211742e-06, |
| "loss": 0.1142, |
| "step": 487 |
| }, |
| { |
| "epoch": 1.9877800407331976, |
| "grad_norm": 0.6661497473358808, |
| "learning_rate": 3.067388800263923e-06, |
| "loss": 0.0861, |
| "step": 488 |
| }, |
| { |
| "epoch": 1.9918533604887982, |
| "grad_norm": 0.6220758643441348, |
| "learning_rate": 3.04549370346089e-06, |
| "loss": 0.0724, |
| "step": 489 |
| }, |
| { |
| "epoch": 1.9959266802443993, |
| "grad_norm": 0.6380698066205512, |
| "learning_rate": 3.0236427568992845e-06, |
| "loss": 0.081, |
| "step": 490 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 1.0732156070255234, |
| "learning_rate": 3.0018364541690048e-06, |
| "loss": 0.0967, |
| "step": 491 |
| }, |
| { |
| "epoch": 2.0040733197556007, |
| "grad_norm": 0.46382587645965473, |
| "learning_rate": 2.9800752878514903e-06, |
| "loss": 0.0486, |
| "step": 492 |
| }, |
| { |
| "epoch": 2.0081466395112018, |
| "grad_norm": 0.6061183961253724, |
| "learning_rate": 2.958359749508603e-06, |
| "loss": 0.0551, |
| "step": 493 |
| }, |
| { |
| "epoch": 2.0122199592668024, |
| "grad_norm": 0.4921733736983431, |
| "learning_rate": 2.936690329671511e-06, |
| "loss": 0.0454, |
| "step": 494 |
| }, |
| { |
| "epoch": 2.016293279022403, |
| "grad_norm": 0.43183509951355314, |
| "learning_rate": 2.915067517829615e-06, |
| "loss": 0.0466, |
| "step": 495 |
| }, |
| { |
| "epoch": 2.020366598778004, |
| "grad_norm": 0.49575364629777624, |
| "learning_rate": 2.893491802419492e-06, |
| "loss": 0.049, |
| "step": 496 |
| }, |
| { |
| "epoch": 2.024439918533605, |
| "grad_norm": 0.4271031180271253, |
| "learning_rate": 2.871963670813861e-06, |
| "loss": 0.0393, |
| "step": 497 |
| }, |
| { |
| "epoch": 2.0285132382892055, |
| "grad_norm": 0.5825597063224182, |
| "learning_rate": 2.850483609310567e-06, |
| "loss": 0.0439, |
| "step": 498 |
| }, |
| { |
| "epoch": 2.0325865580448066, |
| "grad_norm": 0.7134060092750841, |
| "learning_rate": 2.829052103121611e-06, |
| "loss": 0.0507, |
| "step": 499 |
| }, |
| { |
| "epoch": 2.0366598778004072, |
| "grad_norm": 0.5359846386854068, |
| "learning_rate": 2.807669636362169e-06, |
| "loss": 0.0465, |
| "step": 500 |
| }, |
| { |
| "epoch": 2.0407331975560083, |
| "grad_norm": 1.2307395730228317, |
| "learning_rate": 2.7863366920396805e-06, |
| "loss": 0.0609, |
| "step": 501 |
| }, |
| { |
| "epoch": 2.044806517311609, |
| "grad_norm": 0.7016648303052435, |
| "learning_rate": 2.765053752042915e-06, |
| "loss": 0.0542, |
| "step": 502 |
| }, |
| { |
| "epoch": 2.0488798370672097, |
| "grad_norm": 0.6321598695043944, |
| "learning_rate": 2.7438212971311016e-06, |
| "loss": 0.0462, |
| "step": 503 |
| }, |
| { |
| "epoch": 2.0529531568228108, |
| "grad_norm": 0.524422558245556, |
| "learning_rate": 2.722639806923066e-06, |
| "loss": 0.0387, |
| "step": 504 |
| }, |
| { |
| "epoch": 2.0570264765784114, |
| "grad_norm": 0.8384795501307071, |
| "learning_rate": 2.7015097598863906e-06, |
| "loss": 0.0534, |
| "step": 505 |
| }, |
| { |
| "epoch": 2.061099796334012, |
| "grad_norm": 0.6240843386469663, |
| "learning_rate": 2.680431633326614e-06, |
| "loss": 0.0412, |
| "step": 506 |
| }, |
| { |
| "epoch": 2.065173116089613, |
| "grad_norm": 0.6307360193841159, |
| "learning_rate": 2.659405903376442e-06, |
| "loss": 0.0488, |
| "step": 507 |
| }, |
| { |
| "epoch": 2.069246435845214, |
| "grad_norm": 0.6330618915300124, |
| "learning_rate": 2.6384330449850028e-06, |
| "loss": 0.0502, |
| "step": 508 |
| }, |
| { |
| "epoch": 2.0733197556008145, |
| "grad_norm": 0.5880527544333638, |
| "learning_rate": 2.617513531907103e-06, |
| "loss": 0.0494, |
| "step": 509 |
| }, |
| { |
| "epoch": 2.0773930753564156, |
| "grad_norm": 0.7335711218887413, |
| "learning_rate": 2.5966478366925406e-06, |
| "loss": 0.052, |
| "step": 510 |
| }, |
| { |
| "epoch": 2.0814663951120163, |
| "grad_norm": 0.8141104911415494, |
| "learning_rate": 2.5758364306754247e-06, |
| "loss": 0.0513, |
| "step": 511 |
| }, |
| { |
| "epoch": 2.085539714867617, |
| "grad_norm": 0.7214645518132131, |
| "learning_rate": 2.5550797839635283e-06, |
| "loss": 0.0459, |
| "step": 512 |
| }, |
| { |
| "epoch": 2.089613034623218, |
| "grad_norm": 0.5903300427666225, |
| "learning_rate": 2.5343783654276644e-06, |
| "loss": 0.0466, |
| "step": 513 |
| }, |
| { |
| "epoch": 2.0936863543788187, |
| "grad_norm": 0.6287638475587176, |
| "learning_rate": 2.5137326426911067e-06, |
| "loss": 0.0457, |
| "step": 514 |
| }, |
| { |
| "epoch": 2.0977596741344193, |
| "grad_norm": 0.6016948711437964, |
| "learning_rate": 2.493143082119013e-06, |
| "loss": 0.0474, |
| "step": 515 |
| }, |
| { |
| "epoch": 2.1018329938900204, |
| "grad_norm": 0.7019678605348776, |
| "learning_rate": 2.472610148807903e-06, |
| "loss": 0.0471, |
| "step": 516 |
| }, |
| { |
| "epoch": 2.105906313645621, |
| "grad_norm": 0.6650222349525898, |
| "learning_rate": 2.452134306575139e-06, |
| "loss": 0.047, |
| "step": 517 |
| }, |
| { |
| "epoch": 2.109979633401222, |
| "grad_norm": 0.6319951154070054, |
| "learning_rate": 2.431716017948462e-06, |
| "loss": 0.0541, |
| "step": 518 |
| }, |
| { |
| "epoch": 2.114052953156823, |
| "grad_norm": 0.9156490212278536, |
| "learning_rate": 2.4113557441555384e-06, |
| "loss": 0.042, |
| "step": 519 |
| }, |
| { |
| "epoch": 2.1181262729124235, |
| "grad_norm": 0.5908911924587066, |
| "learning_rate": 2.391053945113533e-06, |
| "loss": 0.0384, |
| "step": 520 |
| }, |
| { |
| "epoch": 2.1221995926680246, |
| "grad_norm": 0.5997079936401813, |
| "learning_rate": 2.370811079418735e-06, |
| "loss": 0.049, |
| "step": 521 |
| }, |
| { |
| "epoch": 2.1262729124236253, |
| "grad_norm": 0.8224985458683419, |
| "learning_rate": 2.350627604336186e-06, |
| "loss": 0.053, |
| "step": 522 |
| }, |
| { |
| "epoch": 2.130346232179226, |
| "grad_norm": 0.6710057271136974, |
| "learning_rate": 2.330503975789361e-06, |
| "loss": 0.0515, |
| "step": 523 |
| }, |
| { |
| "epoch": 2.134419551934827, |
| "grad_norm": 0.6977361731485422, |
| "learning_rate": 2.3104406483498593e-06, |
| "loss": 0.0466, |
| "step": 524 |
| }, |
| { |
| "epoch": 2.1384928716904277, |
| "grad_norm": 0.7318716291498116, |
| "learning_rate": 2.290438075227146e-06, |
| "loss": 0.0459, |
| "step": 525 |
| }, |
| { |
| "epoch": 2.1425661914460283, |
| "grad_norm": 0.6417195973672337, |
| "learning_rate": 2.270496708258309e-06, |
| "loss": 0.0486, |
| "step": 526 |
| }, |
| { |
| "epoch": 2.1466395112016294, |
| "grad_norm": 0.7278961564752641, |
| "learning_rate": 2.2506169978978543e-06, |
| "loss": 0.0485, |
| "step": 527 |
| }, |
| { |
| "epoch": 2.15071283095723, |
| "grad_norm": 0.6405057377374997, |
| "learning_rate": 2.230799393207526e-06, |
| "loss": 0.047, |
| "step": 528 |
| }, |
| { |
| "epoch": 2.1547861507128308, |
| "grad_norm": 0.6383506826364762, |
| "learning_rate": 2.2110443418461723e-06, |
| "loss": 0.0383, |
| "step": 529 |
| }, |
| { |
| "epoch": 2.158859470468432, |
| "grad_norm": 0.6064990723637974, |
| "learning_rate": 2.191352290059621e-06, |
| "loss": 0.0437, |
| "step": 530 |
| }, |
| { |
| "epoch": 2.1629327902240325, |
| "grad_norm": 0.772798038718517, |
| "learning_rate": 2.171723682670613e-06, |
| "loss": 0.0582, |
| "step": 531 |
| }, |
| { |
| "epoch": 2.167006109979633, |
| "grad_norm": 0.7039586643696689, |
| "learning_rate": 2.152158963068739e-06, |
| "loss": 0.0444, |
| "step": 532 |
| }, |
| { |
| "epoch": 2.1710794297352343, |
| "grad_norm": 0.5636185384959056, |
| "learning_rate": 2.1326585732004384e-06, |
| "loss": 0.0431, |
| "step": 533 |
| }, |
| { |
| "epoch": 2.175152749490835, |
| "grad_norm": 0.6483499440908992, |
| "learning_rate": 2.1132229535590092e-06, |
| "loss": 0.0509, |
| "step": 534 |
| }, |
| { |
| "epoch": 2.179226069246436, |
| "grad_norm": 0.5790468097555252, |
| "learning_rate": 2.093852543174652e-06, |
| "loss": 0.045, |
| "step": 535 |
| }, |
| { |
| "epoch": 2.1832993890020367, |
| "grad_norm": 0.5662346010696749, |
| "learning_rate": 2.0745477796045664e-06, |
| "loss": 0.0421, |
| "step": 536 |
| }, |
| { |
| "epoch": 2.1873727087576373, |
| "grad_norm": 0.9206343975668372, |
| "learning_rate": 2.0553090989230527e-06, |
| "loss": 0.0479, |
| "step": 537 |
| }, |
| { |
| "epoch": 2.1914460285132384, |
| "grad_norm": 0.657682562867677, |
| "learning_rate": 2.036136935711674e-06, |
| "loss": 0.0393, |
| "step": 538 |
| }, |
| { |
| "epoch": 2.195519348268839, |
| "grad_norm": 0.5162086561290653, |
| "learning_rate": 2.017031723049432e-06, |
| "loss": 0.0348, |
| "step": 539 |
| }, |
| { |
| "epoch": 2.1995926680244398, |
| "grad_norm": 0.7166136712411706, |
| "learning_rate": 1.997993892502983e-06, |
| "loss": 0.0478, |
| "step": 540 |
| }, |
| { |
| "epoch": 2.203665987780041, |
| "grad_norm": 0.6420851301612974, |
| "learning_rate": 1.979023874116895e-06, |
| "loss": 0.0504, |
| "step": 541 |
| }, |
| { |
| "epoch": 2.2077393075356415, |
| "grad_norm": 0.5674151016939644, |
| "learning_rate": 1.9601220964039324e-06, |
| "loss": 0.0418, |
| "step": 542 |
| }, |
| { |
| "epoch": 2.211812627291242, |
| "grad_norm": 0.6248094181782853, |
| "learning_rate": 1.9412889863353683e-06, |
| "loss": 0.0464, |
| "step": 543 |
| }, |
| { |
| "epoch": 2.2158859470468433, |
| "grad_norm": 0.6115409983253244, |
| "learning_rate": 1.9225249693313547e-06, |
| "loss": 0.0422, |
| "step": 544 |
| }, |
| { |
| "epoch": 2.219959266802444, |
| "grad_norm": 0.5373674294031344, |
| "learning_rate": 1.9038304692512943e-06, |
| "loss": 0.0447, |
| "step": 545 |
| }, |
| { |
| "epoch": 2.224032586558045, |
| "grad_norm": 0.6095403003841355, |
| "learning_rate": 1.8852059083842838e-06, |
| "loss": 0.0447, |
| "step": 546 |
| }, |
| { |
| "epoch": 2.2281059063136457, |
| "grad_norm": 0.6108027242445478, |
| "learning_rate": 1.8666517074395607e-06, |
| "loss": 0.0517, |
| "step": 547 |
| }, |
| { |
| "epoch": 2.2321792260692463, |
| "grad_norm": 0.47923271839833664, |
| "learning_rate": 1.8481682855370098e-06, |
| "loss": 0.0342, |
| "step": 548 |
| }, |
| { |
| "epoch": 2.2362525458248474, |
| "grad_norm": 0.6016755510602564, |
| "learning_rate": 1.829756060197692e-06, |
| "loss": 0.0417, |
| "step": 549 |
| }, |
| { |
| "epoch": 2.240325865580448, |
| "grad_norm": 0.7022430834159812, |
| "learning_rate": 1.8114154473344081e-06, |
| "loss": 0.0481, |
| "step": 550 |
| }, |
| { |
| "epoch": 2.2443991853360488, |
| "grad_norm": 0.5768106404539921, |
| "learning_rate": 1.7931468612423142e-06, |
| "loss": 0.039, |
| "step": 551 |
| }, |
| { |
| "epoch": 2.24847250509165, |
| "grad_norm": 0.6323933887994607, |
| "learning_rate": 1.7749507145895518e-06, |
| "loss": 0.0415, |
| "step": 552 |
| }, |
| { |
| "epoch": 2.2525458248472505, |
| "grad_norm": 0.706898611038952, |
| "learning_rate": 1.756827418407936e-06, |
| "loss": 0.0447, |
| "step": 553 |
| }, |
| { |
| "epoch": 2.256619144602851, |
| "grad_norm": 0.6753433476547962, |
| "learning_rate": 1.7387773820836668e-06, |
| "loss": 0.0435, |
| "step": 554 |
| }, |
| { |
| "epoch": 2.2606924643584523, |
| "grad_norm": 0.6345110906473638, |
| "learning_rate": 1.7208010133480751e-06, |
| "loss": 0.0408, |
| "step": 555 |
| }, |
| { |
| "epoch": 2.264765784114053, |
| "grad_norm": 0.6349996250849919, |
| "learning_rate": 1.7028987182684248e-06, |
| "loss": 0.0467, |
| "step": 556 |
| }, |
| { |
| "epoch": 2.2688391038696536, |
| "grad_norm": 0.5478171278806082, |
| "learning_rate": 1.6850709012387328e-06, |
| "loss": 0.0421, |
| "step": 557 |
| }, |
| { |
| "epoch": 2.2729124236252547, |
| "grad_norm": 0.5570382415949389, |
| "learning_rate": 1.6673179649706312e-06, |
| "loss": 0.0412, |
| "step": 558 |
| }, |
| { |
| "epoch": 2.2769857433808554, |
| "grad_norm": 0.6704892257058723, |
| "learning_rate": 1.64964031048428e-06, |
| "loss": 0.0432, |
| "step": 559 |
| }, |
| { |
| "epoch": 2.281059063136456, |
| "grad_norm": 0.5353427142605596, |
| "learning_rate": 1.632038337099297e-06, |
| "loss": 0.0421, |
| "step": 560 |
| }, |
| { |
| "epoch": 2.285132382892057, |
| "grad_norm": 0.5675614565902064, |
| "learning_rate": 1.6145124424257497e-06, |
| "loss": 0.0408, |
| "step": 561 |
| }, |
| { |
| "epoch": 2.2892057026476578, |
| "grad_norm": 0.6215446356434404, |
| "learning_rate": 1.5970630223551614e-06, |
| "loss": 0.0441, |
| "step": 562 |
| }, |
| { |
| "epoch": 2.293279022403259, |
| "grad_norm": 0.5312552367132775, |
| "learning_rate": 1.5796904710515792e-06, |
| "loss": 0.0374, |
| "step": 563 |
| }, |
| { |
| "epoch": 2.2973523421588595, |
| "grad_norm": 0.6711448627336817, |
| "learning_rate": 1.5623951809426663e-06, |
| "loss": 0.0466, |
| "step": 564 |
| }, |
| { |
| "epoch": 2.30142566191446, |
| "grad_norm": 0.7355915752823369, |
| "learning_rate": 1.5451775427108302e-06, |
| "loss": 0.0409, |
| "step": 565 |
| }, |
| { |
| "epoch": 2.3054989816700613, |
| "grad_norm": 0.5492792440607698, |
| "learning_rate": 1.5280379452844124e-06, |
| "loss": 0.0406, |
| "step": 566 |
| }, |
| { |
| "epoch": 2.309572301425662, |
| "grad_norm": 0.5646796515852444, |
| "learning_rate": 1.510976775828887e-06, |
| "loss": 0.0384, |
| "step": 567 |
| }, |
| { |
| "epoch": 2.3136456211812626, |
| "grad_norm": 0.5825549184664148, |
| "learning_rate": 1.493994419738129e-06, |
| "loss": 0.041, |
| "step": 568 |
| }, |
| { |
| "epoch": 2.3177189409368637, |
| "grad_norm": 0.5817560681286719, |
| "learning_rate": 1.4770912606257003e-06, |
| "loss": 0.0499, |
| "step": 569 |
| }, |
| { |
| "epoch": 2.3217922606924644, |
| "grad_norm": 0.9844440093209132, |
| "learning_rate": 1.4602676803161842e-06, |
| "loss": 0.0517, |
| "step": 570 |
| }, |
| { |
| "epoch": 2.325865580448065, |
| "grad_norm": 0.6142006283022792, |
| "learning_rate": 1.4435240588365645e-06, |
| "loss": 0.0406, |
| "step": 571 |
| }, |
| { |
| "epoch": 2.329938900203666, |
| "grad_norm": 0.5693932954149746, |
| "learning_rate": 1.4268607744076419e-06, |
| "loss": 0.0407, |
| "step": 572 |
| }, |
| { |
| "epoch": 2.3340122199592668, |
| "grad_norm": 0.5950772597911713, |
| "learning_rate": 1.41027820343548e-06, |
| "loss": 0.048, |
| "step": 573 |
| }, |
| { |
| "epoch": 2.3380855397148674, |
| "grad_norm": 0.7237495768701298, |
| "learning_rate": 1.3937767205029196e-06, |
| "loss": 0.0549, |
| "step": 574 |
| }, |
| { |
| "epoch": 2.3421588594704685, |
| "grad_norm": 0.67988274483473, |
| "learning_rate": 1.3773566983610992e-06, |
| "loss": 0.0523, |
| "step": 575 |
| }, |
| { |
| "epoch": 2.346232179226069, |
| "grad_norm": 0.8056627345317472, |
| "learning_rate": 1.3610185079210514e-06, |
| "loss": 0.0426, |
| "step": 576 |
| }, |
| { |
| "epoch": 2.35030549898167, |
| "grad_norm": 0.6115391073948011, |
| "learning_rate": 1.34476251824531e-06, |
| "loss": 0.0419, |
| "step": 577 |
| }, |
| { |
| "epoch": 2.354378818737271, |
| "grad_norm": 0.6092462729328992, |
| "learning_rate": 1.3285890965395853e-06, |
| "loss": 0.0451, |
| "step": 578 |
| }, |
| { |
| "epoch": 2.3584521384928716, |
| "grad_norm": 0.5776725050326151, |
| "learning_rate": 1.3124986081444625e-06, |
| "loss": 0.0471, |
| "step": 579 |
| }, |
| { |
| "epoch": 2.3625254582484727, |
| "grad_norm": 0.6555280480435673, |
| "learning_rate": 1.296491416527147e-06, |
| "loss": 0.0449, |
| "step": 580 |
| }, |
| { |
| "epoch": 2.3665987780040734, |
| "grad_norm": 0.6569585284725156, |
| "learning_rate": 1.2805678832732627e-06, |
| "loss": 0.0546, |
| "step": 581 |
| }, |
| { |
| "epoch": 2.370672097759674, |
| "grad_norm": 0.7843231218862423, |
| "learning_rate": 1.264728368078678e-06, |
| "loss": 0.0447, |
| "step": 582 |
| }, |
| { |
| "epoch": 2.374745417515275, |
| "grad_norm": 0.5190101550736153, |
| "learning_rate": 1.248973228741378e-06, |
| "loss": 0.0444, |
| "step": 583 |
| }, |
| { |
| "epoch": 2.378818737270876, |
| "grad_norm": 0.5731375070374565, |
| "learning_rate": 1.2333028211533916e-06, |
| "loss": 0.0402, |
| "step": 584 |
| }, |
| { |
| "epoch": 2.3828920570264764, |
| "grad_norm": 0.7216183443132362, |
| "learning_rate": 1.21771749929274e-06, |
| "loss": 0.047, |
| "step": 585 |
| }, |
| { |
| "epoch": 2.3869653767820775, |
| "grad_norm": 0.6574547850481611, |
| "learning_rate": 1.2022176152154525e-06, |
| "loss": 0.0452, |
| "step": 586 |
| }, |
| { |
| "epoch": 2.391038696537678, |
| "grad_norm": 0.6995633619657223, |
| "learning_rate": 1.1868035190476085e-06, |
| "loss": 0.0437, |
| "step": 587 |
| }, |
| { |
| "epoch": 2.395112016293279, |
| "grad_norm": 0.5381295030205419, |
| "learning_rate": 1.1714755589774252e-06, |
| "loss": 0.0374, |
| "step": 588 |
| }, |
| { |
| "epoch": 2.39918533604888, |
| "grad_norm": 0.5193316596822176, |
| "learning_rate": 1.1562340812474004e-06, |
| "loss": 0.0335, |
| "step": 589 |
| }, |
| { |
| "epoch": 2.4032586558044806, |
| "grad_norm": 0.558050549263979, |
| "learning_rate": 1.1410794301464817e-06, |
| "loss": 0.04, |
| "step": 590 |
| }, |
| { |
| "epoch": 2.4073319755600817, |
| "grad_norm": 0.6124096552105914, |
| "learning_rate": 1.1260119480023008e-06, |
| "loss": 0.0365, |
| "step": 591 |
| }, |
| { |
| "epoch": 2.4114052953156824, |
| "grad_norm": 0.5294199833428392, |
| "learning_rate": 1.1110319751734271e-06, |
| "loss": 0.0386, |
| "step": 592 |
| }, |
| { |
| "epoch": 2.415478615071283, |
| "grad_norm": 0.6469614215212903, |
| "learning_rate": 1.0961398500416926e-06, |
| "loss": 0.042, |
| "step": 593 |
| }, |
| { |
| "epoch": 2.4195519348268837, |
| "grad_norm": 0.5557240307053494, |
| "learning_rate": 1.0813359090045412e-06, |
| "loss": 0.0399, |
| "step": 594 |
| }, |
| { |
| "epoch": 2.423625254582485, |
| "grad_norm": 0.6300256880608739, |
| "learning_rate": 1.0666204864674263e-06, |
| "loss": 0.0388, |
| "step": 595 |
| }, |
| { |
| "epoch": 2.4276985743380854, |
| "grad_norm": 0.655959102130783, |
| "learning_rate": 1.0519939148362667e-06, |
| "loss": 0.0478, |
| "step": 596 |
| }, |
| { |
| "epoch": 2.4317718940936865, |
| "grad_norm": 0.6094559864136577, |
| "learning_rate": 1.0374565245099328e-06, |
| "loss": 0.0388, |
| "step": 597 |
| }, |
| { |
| "epoch": 2.435845213849287, |
| "grad_norm": 0.7411603450389928, |
| "learning_rate": 1.0230086438727771e-06, |
| "loss": 0.0504, |
| "step": 598 |
| }, |
| { |
| "epoch": 2.439918533604888, |
| "grad_norm": 0.5681006917233394, |
| "learning_rate": 1.0086505992872304e-06, |
| "loss": 0.0427, |
| "step": 599 |
| }, |
| { |
| "epoch": 2.443991853360489, |
| "grad_norm": 0.609417026917768, |
| "learning_rate": 9.943827150864143e-07, |
| "loss": 0.0396, |
| "step": 600 |
| }, |
| { |
| "epoch": 2.4480651731160896, |
| "grad_norm": 0.7715933182445011, |
| "learning_rate": 9.80205313566827e-07, |
| "loss": 0.0479, |
| "step": 601 |
| }, |
| { |
| "epoch": 2.4521384928716903, |
| "grad_norm": 0.5588281033029895, |
| "learning_rate": 9.66118714981058e-07, |
| "loss": 0.0468, |
| "step": 602 |
| }, |
| { |
| "epoch": 2.4562118126272914, |
| "grad_norm": 0.6202480044275774, |
| "learning_rate": 9.521232375305494e-07, |
| "loss": 0.0428, |
| "step": 603 |
| }, |
| { |
| "epoch": 2.460285132382892, |
| "grad_norm": 0.5549990136472819, |
| "learning_rate": 9.382191973584193e-07, |
| "loss": 0.0321, |
| "step": 604 |
| }, |
| { |
| "epoch": 2.4643584521384927, |
| "grad_norm": 0.7125183614456646, |
| "learning_rate": 9.244069085423074e-07, |
| "loss": 0.052, |
| "step": 605 |
| }, |
| { |
| "epoch": 2.468431771894094, |
| "grad_norm": 0.5565747141010096, |
| "learning_rate": 9.106866830872929e-07, |
| "loss": 0.0366, |
| "step": 606 |
| }, |
| { |
| "epoch": 2.4725050916496945, |
| "grad_norm": 0.616895795551855, |
| "learning_rate": 8.970588309188343e-07, |
| "loss": 0.0438, |
| "step": 607 |
| }, |
| { |
| "epoch": 2.4765784114052956, |
| "grad_norm": 0.7071703538896843, |
| "learning_rate": 8.835236598757796e-07, |
| "loss": 0.0437, |
| "step": 608 |
| }, |
| { |
| "epoch": 2.480651731160896, |
| "grad_norm": 0.550733977240549, |
| "learning_rate": 8.70081475703406e-07, |
| "loss": 0.0355, |
| "step": 609 |
| }, |
| { |
| "epoch": 2.484725050916497, |
| "grad_norm": 0.9168151315445275, |
| "learning_rate": 8.567325820465156e-07, |
| "loss": 0.0536, |
| "step": 610 |
| }, |
| { |
| "epoch": 2.4887983706720975, |
| "grad_norm": 0.5877013322142949, |
| "learning_rate": 8.434772804425734e-07, |
| "loss": 0.0401, |
| "step": 611 |
| }, |
| { |
| "epoch": 2.4928716904276986, |
| "grad_norm": 0.6525278969423508, |
| "learning_rate": 8.303158703149023e-07, |
| "loss": 0.0434, |
| "step": 612 |
| }, |
| { |
| "epoch": 2.4969450101832993, |
| "grad_norm": 0.5668632582217858, |
| "learning_rate": 8.172486489659115e-07, |
| "loss": 0.0405, |
| "step": 613 |
| }, |
| { |
| "epoch": 2.5010183299389004, |
| "grad_norm": 0.5767158406225441, |
| "learning_rate": 8.042759115703891e-07, |
| "loss": 0.0439, |
| "step": 614 |
| }, |
| { |
| "epoch": 2.505091649694501, |
| "grad_norm": 0.7263909107216523, |
| "learning_rate": 7.913979511688252e-07, |
| "loss": 0.039, |
| "step": 615 |
| }, |
| { |
| "epoch": 2.5091649694501017, |
| "grad_norm": 0.6388219751227069, |
| "learning_rate": 7.78615058660801e-07, |
| "loss": 0.0489, |
| "step": 616 |
| }, |
| { |
| "epoch": 2.513238289205703, |
| "grad_norm": 0.5854521751719799, |
| "learning_rate": 7.659275227984142e-07, |
| "loss": 0.0389, |
| "step": 617 |
| }, |
| { |
| "epoch": 2.5173116089613035, |
| "grad_norm": 0.5305304944305677, |
| "learning_rate": 7.533356301797523e-07, |
| "loss": 0.0411, |
| "step": 618 |
| }, |
| { |
| "epoch": 2.521384928716904, |
| "grad_norm": 0.557310110228571, |
| "learning_rate": 7.408396652424271e-07, |
| "loss": 0.0353, |
| "step": 619 |
| }, |
| { |
| "epoch": 2.525458248472505, |
| "grad_norm": 0.7836187943265139, |
| "learning_rate": 7.28439910257141e-07, |
| "loss": 0.0564, |
| "step": 620 |
| }, |
| { |
| "epoch": 2.529531568228106, |
| "grad_norm": 0.6076346237905078, |
| "learning_rate": 7.161366453213181e-07, |
| "loss": 0.0388, |
| "step": 621 |
| }, |
| { |
| "epoch": 2.5336048879837065, |
| "grad_norm": 0.5905401181549683, |
| "learning_rate": 7.03930148352771e-07, |
| "loss": 0.0426, |
| "step": 622 |
| }, |
| { |
| "epoch": 2.5376782077393076, |
| "grad_norm": 0.5845080037540452, |
| "learning_rate": 6.918206950834283e-07, |
| "loss": 0.0439, |
| "step": 623 |
| }, |
| { |
| "epoch": 2.5417515274949083, |
| "grad_norm": 0.6528843977830857, |
| "learning_rate": 6.798085590531012e-07, |
| "loss": 0.0424, |
| "step": 624 |
| }, |
| { |
| "epoch": 2.5458248472505094, |
| "grad_norm": 0.5060089875025451, |
| "learning_rate": 6.678940116033095e-07, |
| "loss": 0.0306, |
| "step": 625 |
| }, |
| { |
| "epoch": 2.54989816700611, |
| "grad_norm": 0.6444888573719301, |
| "learning_rate": 6.560773218711458e-07, |
| "loss": 0.0405, |
| "step": 626 |
| }, |
| { |
| "epoch": 2.5539714867617107, |
| "grad_norm": 0.6910140407804498, |
| "learning_rate": 6.443587567832044e-07, |
| "loss": 0.0395, |
| "step": 627 |
| }, |
| { |
| "epoch": 2.5580448065173114, |
| "grad_norm": 0.7112890892772754, |
| "learning_rate": 6.327385810495423e-07, |
| "loss": 0.0454, |
| "step": 628 |
| }, |
| { |
| "epoch": 2.5621181262729125, |
| "grad_norm": 0.5927299095854747, |
| "learning_rate": 6.212170571577087e-07, |
| "loss": 0.0444, |
| "step": 629 |
| }, |
| { |
| "epoch": 2.566191446028513, |
| "grad_norm": 0.5535609388587451, |
| "learning_rate": 6.097944453668081e-07, |
| "loss": 0.0406, |
| "step": 630 |
| }, |
| { |
| "epoch": 2.5702647657841142, |
| "grad_norm": 0.5451818995222956, |
| "learning_rate": 5.984710037016267e-07, |
| "loss": 0.0374, |
| "step": 631 |
| }, |
| { |
| "epoch": 2.574338085539715, |
| "grad_norm": 0.7054245569688469, |
| "learning_rate": 5.872469879468024e-07, |
| "loss": 0.055, |
| "step": 632 |
| }, |
| { |
| "epoch": 2.5784114052953155, |
| "grad_norm": 0.5518787075176712, |
| "learning_rate": 5.761226516410434e-07, |
| "loss": 0.0347, |
| "step": 633 |
| }, |
| { |
| "epoch": 2.5824847250509166, |
| "grad_norm": 0.796243379976555, |
| "learning_rate": 5.650982460714083e-07, |
| "loss": 0.0481, |
| "step": 634 |
| }, |
| { |
| "epoch": 2.5865580448065173, |
| "grad_norm": 0.7188252233855069, |
| "learning_rate": 5.54174020267621e-07, |
| "loss": 0.0443, |
| "step": 635 |
| }, |
| { |
| "epoch": 2.5906313645621184, |
| "grad_norm": 0.5526783617745519, |
| "learning_rate": 5.433502209964531e-07, |
| "loss": 0.0396, |
| "step": 636 |
| }, |
| { |
| "epoch": 2.594704684317719, |
| "grad_norm": 0.5825176845240135, |
| "learning_rate": 5.326270927561444e-07, |
| "loss": 0.0455, |
| "step": 637 |
| }, |
| { |
| "epoch": 2.5987780040733197, |
| "grad_norm": 0.571850531305161, |
| "learning_rate": 5.22004877770883e-07, |
| "loss": 0.041, |
| "step": 638 |
| }, |
| { |
| "epoch": 2.6028513238289204, |
| "grad_norm": 0.6483190905465236, |
| "learning_rate": 5.114838159853336e-07, |
| "loss": 0.0438, |
| "step": 639 |
| }, |
| { |
| "epoch": 2.6069246435845215, |
| "grad_norm": 0.6037537099679922, |
| "learning_rate": 5.010641450592158e-07, |
| "loss": 0.0437, |
| "step": 640 |
| }, |
| { |
| "epoch": 2.610997963340122, |
| "grad_norm": 0.7252652851456096, |
| "learning_rate": 4.907461003619346e-07, |
| "loss": 0.048, |
| "step": 641 |
| }, |
| { |
| "epoch": 2.6150712830957232, |
| "grad_norm": 0.7432983808595361, |
| "learning_rate": 4.805299149672682e-07, |
| "loss": 0.0448, |
| "step": 642 |
| }, |
| { |
| "epoch": 2.619144602851324, |
| "grad_norm": 0.5876266367931727, |
| "learning_rate": 4.7041581964809733e-07, |
| "loss": 0.0377, |
| "step": 643 |
| }, |
| { |
| "epoch": 2.6232179226069245, |
| "grad_norm": 0.5798950545458074, |
| "learning_rate": 4.6040404287119924e-07, |
| "loss": 0.0438, |
| "step": 644 |
| }, |
| { |
| "epoch": 2.627291242362525, |
| "grad_norm": 0.5827291901167612, |
| "learning_rate": 4.504948107920781e-07, |
| "loss": 0.0386, |
| "step": 645 |
| }, |
| { |
| "epoch": 2.6313645621181263, |
| "grad_norm": 0.5454690396247462, |
| "learning_rate": 4.4068834724986466e-07, |
| "loss": 0.0404, |
| "step": 646 |
| }, |
| { |
| "epoch": 2.635437881873727, |
| "grad_norm": 0.591772305720897, |
| "learning_rate": 4.309848737622568e-07, |
| "loss": 0.0411, |
| "step": 647 |
| }, |
| { |
| "epoch": 2.639511201629328, |
| "grad_norm": 0.6030630084243364, |
| "learning_rate": 4.213846095205126e-07, |
| "loss": 0.0378, |
| "step": 648 |
| }, |
| { |
| "epoch": 2.6435845213849287, |
| "grad_norm": 0.9035073373124242, |
| "learning_rate": 4.1188777138450487e-07, |
| "loss": 0.0637, |
| "step": 649 |
| }, |
| { |
| "epoch": 2.6476578411405294, |
| "grad_norm": 0.6306988274701608, |
| "learning_rate": 4.024945738778163e-07, |
| "loss": 0.0455, |
| "step": 650 |
| }, |
| { |
| "epoch": 2.6517311608961305, |
| "grad_norm": 0.5571148701616122, |
| "learning_rate": 3.9320522918289973e-07, |
| "loss": 0.0418, |
| "step": 651 |
| }, |
| { |
| "epoch": 2.655804480651731, |
| "grad_norm": 0.6025888229873192, |
| "learning_rate": 3.8401994713628044e-07, |
| "loss": 0.0447, |
| "step": 652 |
| }, |
| { |
| "epoch": 2.6598778004073322, |
| "grad_norm": 0.5348586235615737, |
| "learning_rate": 3.7493893522381866e-07, |
| "loss": 0.0389, |
| "step": 653 |
| }, |
| { |
| "epoch": 2.663951120162933, |
| "grad_norm": 0.6516858785582209, |
| "learning_rate": 3.6596239857602136e-07, |
| "loss": 0.0488, |
| "step": 654 |
| }, |
| { |
| "epoch": 2.6680244399185336, |
| "grad_norm": 0.5588332726110832, |
| "learning_rate": 3.570905399634111e-07, |
| "loss": 0.0354, |
| "step": 655 |
| }, |
| { |
| "epoch": 2.672097759674134, |
| "grad_norm": 0.5451393645035304, |
| "learning_rate": 3.483235597919404e-07, |
| "loss": 0.0382, |
| "step": 656 |
| }, |
| { |
| "epoch": 2.6761710794297353, |
| "grad_norm": 0.5736021789458674, |
| "learning_rate": 3.396616560984711e-07, |
| "loss": 0.0361, |
| "step": 657 |
| }, |
| { |
| "epoch": 2.680244399185336, |
| "grad_norm": 0.7760132004231503, |
| "learning_rate": 3.31105024546296e-07, |
| "loss": 0.0376, |
| "step": 658 |
| }, |
| { |
| "epoch": 2.684317718940937, |
| "grad_norm": 0.7396815329059558, |
| "learning_rate": 3.226538584207228e-07, |
| "loss": 0.0473, |
| "step": 659 |
| }, |
| { |
| "epoch": 2.6883910386965377, |
| "grad_norm": 0.7964726796467382, |
| "learning_rate": 3.1430834862470395e-07, |
| "loss": 0.0603, |
| "step": 660 |
| }, |
| { |
| "epoch": 2.6924643584521384, |
| "grad_norm": 1.5980896143918564, |
| "learning_rate": 3.0606868367452746e-07, |
| "loss": 0.0643, |
| "step": 661 |
| }, |
| { |
| "epoch": 2.696537678207739, |
| "grad_norm": 0.6085849621464253, |
| "learning_rate": 2.9793504969555965e-07, |
| "loss": 0.0447, |
| "step": 662 |
| }, |
| { |
| "epoch": 2.70061099796334, |
| "grad_norm": 0.5833066393844298, |
| "learning_rate": 2.899076304180348e-07, |
| "loss": 0.0412, |
| "step": 663 |
| }, |
| { |
| "epoch": 2.704684317718941, |
| "grad_norm": 0.819223247703981, |
| "learning_rate": 2.819866071729127e-07, |
| "loss": 0.0594, |
| "step": 664 |
| }, |
| { |
| "epoch": 2.708757637474542, |
| "grad_norm": 0.6560481778570941, |
| "learning_rate": 2.7417215888777493e-07, |
| "loss": 0.0391, |
| "step": 665 |
| }, |
| { |
| "epoch": 2.7128309572301426, |
| "grad_norm": 0.7882666613871305, |
| "learning_rate": 2.6646446208279054e-07, |
| "loss": 0.0442, |
| "step": 666 |
| }, |
| { |
| "epoch": 2.716904276985743, |
| "grad_norm": 0.5727739809065823, |
| "learning_rate": 2.5886369086672193e-07, |
| "loss": 0.0431, |
| "step": 667 |
| }, |
| { |
| "epoch": 2.7209775967413443, |
| "grad_norm": 0.5899312889497369, |
| "learning_rate": 2.513700169329963e-07, |
| "loss": 0.0378, |
| "step": 668 |
| }, |
| { |
| "epoch": 2.725050916496945, |
| "grad_norm": 0.8321164992248397, |
| "learning_rate": 2.439836095558262e-07, |
| "loss": 0.052, |
| "step": 669 |
| }, |
| { |
| "epoch": 2.729124236252546, |
| "grad_norm": 0.5571226051042548, |
| "learning_rate": 2.3670463558638556e-07, |
| "loss": 0.0392, |
| "step": 670 |
| }, |
| { |
| "epoch": 2.7331975560081467, |
| "grad_norm": 0.6530488652200966, |
| "learning_rate": 2.2953325944903848e-07, |
| "loss": 0.0395, |
| "step": 671 |
| }, |
| { |
| "epoch": 2.7372708757637474, |
| "grad_norm": 0.6113145873412146, |
| "learning_rate": 2.2246964313763053e-07, |
| "loss": 0.041, |
| "step": 672 |
| }, |
| { |
| "epoch": 2.741344195519348, |
| "grad_norm": 0.7095148513537615, |
| "learning_rate": 2.1551394621182277e-07, |
| "loss": 0.0397, |
| "step": 673 |
| }, |
| { |
| "epoch": 2.745417515274949, |
| "grad_norm": 0.6033544892162519, |
| "learning_rate": 2.08666325793494e-07, |
| "loss": 0.0443, |
| "step": 674 |
| }, |
| { |
| "epoch": 2.74949083503055, |
| "grad_norm": 0.5446102953157629, |
| "learning_rate": 2.0192693656318597e-07, |
| "loss": 0.0408, |
| "step": 675 |
| }, |
| { |
| "epoch": 2.753564154786151, |
| "grad_norm": 0.6143476593669657, |
| "learning_rate": 1.9529593075661267e-07, |
| "loss": 0.0431, |
| "step": 676 |
| }, |
| { |
| "epoch": 2.7576374745417516, |
| "grad_norm": 0.5937558119917979, |
| "learning_rate": 1.8877345816122162e-07, |
| "loss": 0.0389, |
| "step": 677 |
| }, |
| { |
| "epoch": 2.7617107942973522, |
| "grad_norm": 0.6700927045976569, |
| "learning_rate": 1.8235966611280687e-07, |
| "loss": 0.0427, |
| "step": 678 |
| }, |
| { |
| "epoch": 2.765784114052953, |
| "grad_norm": 0.6396378419079983, |
| "learning_rate": 1.760546994921858e-07, |
| "loss": 0.041, |
| "step": 679 |
| }, |
| { |
| "epoch": 2.769857433808554, |
| "grad_norm": 0.7273252196426618, |
| "learning_rate": 1.6985870072192156e-07, |
| "loss": 0.0486, |
| "step": 680 |
| }, |
| { |
| "epoch": 2.7739307535641546, |
| "grad_norm": 0.6092698543049133, |
| "learning_rate": 1.6377180976310968e-07, |
| "loss": 0.0414, |
| "step": 681 |
| }, |
| { |
| "epoch": 2.7780040733197557, |
| "grad_norm": 0.6384474489803748, |
| "learning_rate": 1.5779416411221437e-07, |
| "loss": 0.0409, |
| "step": 682 |
| }, |
| { |
| "epoch": 2.7820773930753564, |
| "grad_norm": 0.7062056069710516, |
| "learning_rate": 1.5192589879796383e-07, |
| "loss": 0.0361, |
| "step": 683 |
| }, |
| { |
| "epoch": 2.786150712830957, |
| "grad_norm": 0.6060235266872488, |
| "learning_rate": 1.4616714637829822e-07, |
| "loss": 0.0416, |
| "step": 684 |
| }, |
| { |
| "epoch": 2.790224032586558, |
| "grad_norm": 0.6872805545772854, |
| "learning_rate": 1.4051803693737876e-07, |
| "loss": 0.0447, |
| "step": 685 |
| }, |
| { |
| "epoch": 2.794297352342159, |
| "grad_norm": 1.1080142515005726, |
| "learning_rate": 1.3497869808264453e-07, |
| "loss": 0.0572, |
| "step": 686 |
| }, |
| { |
| "epoch": 2.79837067209776, |
| "grad_norm": 0.576061423457441, |
| "learning_rate": 1.2954925494193472e-07, |
| "loss": 0.0407, |
| "step": 687 |
| }, |
| { |
| "epoch": 2.8024439918533606, |
| "grad_norm": 0.5287732197801528, |
| "learning_rate": 1.2422983016065816e-07, |
| "loss": 0.037, |
| "step": 688 |
| }, |
| { |
| "epoch": 2.8065173116089612, |
| "grad_norm": 1.0183232038437686, |
| "learning_rate": 1.1902054389902662e-07, |
| "loss": 0.0519, |
| "step": 689 |
| }, |
| { |
| "epoch": 2.810590631364562, |
| "grad_norm": 0.7757556677352597, |
| "learning_rate": 1.1392151382933647e-07, |
| "loss": 0.0393, |
| "step": 690 |
| }, |
| { |
| "epoch": 2.814663951120163, |
| "grad_norm": 0.6719286279824974, |
| "learning_rate": 1.0893285513331353e-07, |
| "loss": 0.0466, |
| "step": 691 |
| }, |
| { |
| "epoch": 2.8187372708757636, |
| "grad_norm": 0.6944618367685592, |
| "learning_rate": 1.0405468049951184e-07, |
| "loss": 0.0413, |
| "step": 692 |
| }, |
| { |
| "epoch": 2.8228105906313647, |
| "grad_norm": 0.61552802643045, |
| "learning_rate": 9.928710012076404e-08, |
| "loss": 0.0471, |
| "step": 693 |
| }, |
| { |
| "epoch": 2.8268839103869654, |
| "grad_norm": 0.5410095050369199, |
| "learning_rate": 9.463022169169666e-08, |
| "loss": 0.0405, |
| "step": 694 |
| }, |
| { |
| "epoch": 2.830957230142566, |
| "grad_norm": 0.6222514939239194, |
| "learning_rate": 9.008415040629548e-08, |
| "loss": 0.0381, |
| "step": 695 |
| }, |
| { |
| "epoch": 2.835030549898167, |
| "grad_norm": 0.716237190221308, |
| "learning_rate": 8.564898895552843e-08, |
| "loss": 0.0483, |
| "step": 696 |
| }, |
| { |
| "epoch": 2.839103869653768, |
| "grad_norm": 0.5438713399396514, |
| "learning_rate": 8.132483752502806e-08, |
| "loss": 0.0307, |
| "step": 697 |
| }, |
| { |
| "epoch": 2.8431771894093685, |
| "grad_norm": 0.5546226611008334, |
| "learning_rate": 7.711179379282674e-08, |
| "loss": 0.0436, |
| "step": 698 |
| }, |
| { |
| "epoch": 2.8472505091649696, |
| "grad_norm": 0.7795946084989639, |
| "learning_rate": 7.300995292715107e-08, |
| "loss": 0.0544, |
| "step": 699 |
| }, |
| { |
| "epoch": 2.8513238289205702, |
| "grad_norm": 0.7271455444954849, |
| "learning_rate": 6.901940758427206e-08, |
| "loss": 0.0506, |
| "step": 700 |
| }, |
| { |
| "epoch": 2.855397148676171, |
| "grad_norm": 0.6167947815542473, |
| "learning_rate": 6.514024790641116e-08, |
| "loss": 0.0338, |
| "step": 701 |
| }, |
| { |
| "epoch": 2.859470468431772, |
| "grad_norm": 0.9028615937159957, |
| "learning_rate": 6.137256151970583e-08, |
| "loss": 0.0384, |
| "step": 702 |
| }, |
| { |
| "epoch": 2.8635437881873727, |
| "grad_norm": 0.5586903994455261, |
| "learning_rate": 5.771643353222778e-08, |
| "loss": 0.0417, |
| "step": 703 |
| }, |
| { |
| "epoch": 2.8676171079429738, |
| "grad_norm": 0.5744286666789391, |
| "learning_rate": 5.417194653206337e-08, |
| "loss": 0.0398, |
| "step": 704 |
| }, |
| { |
| "epoch": 2.8716904276985744, |
| "grad_norm": 0.5715192295451592, |
| "learning_rate": 5.073918058544458e-08, |
| "loss": 0.0418, |
| "step": 705 |
| }, |
| { |
| "epoch": 2.875763747454175, |
| "grad_norm": 0.6253865033754882, |
| "learning_rate": 4.741821323494489e-08, |
| "loss": 0.039, |
| "step": 706 |
| }, |
| { |
| "epoch": 2.8798370672097757, |
| "grad_norm": 0.5416303891716191, |
| "learning_rate": 4.4209119497722883e-08, |
| "loss": 0.0354, |
| "step": 707 |
| }, |
| { |
| "epoch": 2.883910386965377, |
| "grad_norm": 0.7147732942247966, |
| "learning_rate": 4.1111971863830866e-08, |
| "loss": 0.0429, |
| "step": 708 |
| }, |
| { |
| "epoch": 2.8879837067209775, |
| "grad_norm": 0.6143363731337814, |
| "learning_rate": 3.812684029457614e-08, |
| "loss": 0.0409, |
| "step": 709 |
| }, |
| { |
| "epoch": 2.8920570264765786, |
| "grad_norm": 0.5571624531996129, |
| "learning_rate": 3.525379222094061e-08, |
| "loss": 0.0378, |
| "step": 710 |
| }, |
| { |
| "epoch": 2.8961303462321792, |
| "grad_norm": 0.5623398083462782, |
| "learning_rate": 3.249289254205867e-08, |
| "loss": 0.0384, |
| "step": 711 |
| }, |
| { |
| "epoch": 2.90020366598778, |
| "grad_norm": 0.620822915325178, |
| "learning_rate": 2.984420362375007e-08, |
| "loss": 0.0389, |
| "step": 712 |
| }, |
| { |
| "epoch": 2.904276985743381, |
| "grad_norm": 0.6323804053973247, |
| "learning_rate": 2.7307785297111533e-08, |
| "loss": 0.0382, |
| "step": 713 |
| }, |
| { |
| "epoch": 2.9083503054989817, |
| "grad_norm": 0.577630536988922, |
| "learning_rate": 2.488369485716513e-08, |
| "loss": 0.0424, |
| "step": 714 |
| }, |
| { |
| "epoch": 2.9124236252545828, |
| "grad_norm": 0.5908644464528476, |
| "learning_rate": 2.2571987061564827e-08, |
| "loss": 0.0374, |
| "step": 715 |
| }, |
| { |
| "epoch": 2.9164969450101834, |
| "grad_norm": 0.5819680044309804, |
| "learning_rate": 2.0372714129356375e-08, |
| "loss": 0.0418, |
| "step": 716 |
| }, |
| { |
| "epoch": 2.920570264765784, |
| "grad_norm": 0.5998719655116901, |
| "learning_rate": 1.8285925739803812e-08, |
| "loss": 0.0373, |
| "step": 717 |
| }, |
| { |
| "epoch": 2.9246435845213847, |
| "grad_norm": 0.6039864647607079, |
| "learning_rate": 1.631166903126147e-08, |
| "loss": 0.0459, |
| "step": 718 |
| }, |
| { |
| "epoch": 2.928716904276986, |
| "grad_norm": 0.762786479746698, |
| "learning_rate": 1.4449988600111486e-08, |
| "loss": 0.0515, |
| "step": 719 |
| }, |
| { |
| "epoch": 2.9327902240325865, |
| "grad_norm": 0.6018854526331516, |
| "learning_rate": 1.2700926499756295e-08, |
| "loss": 0.0414, |
| "step": 720 |
| }, |
| { |
| "epoch": 2.9368635437881876, |
| "grad_norm": 0.5961101580877264, |
| "learning_rate": 1.1064522239669916e-08, |
| "loss": 0.0354, |
| "step": 721 |
| }, |
| { |
| "epoch": 2.9409368635437882, |
| "grad_norm": 1.7052061344176328, |
| "learning_rate": 9.54081278450314e-09, |
| "loss": 0.056, |
| "step": 722 |
| }, |
| { |
| "epoch": 2.945010183299389, |
| "grad_norm": 0.6291245374019063, |
| "learning_rate": 8.129832553249173e-09, |
| "loss": 0.0396, |
| "step": 723 |
| }, |
| { |
| "epoch": 2.9490835030549896, |
| "grad_norm": 0.6127187852232583, |
| "learning_rate": 6.831613418468163e-09, |
| "loss": 0.0459, |
| "step": 724 |
| }, |
| { |
| "epoch": 2.9531568228105907, |
| "grad_norm": 0.7310466052458664, |
| "learning_rate": 5.646184705563884e-09, |
| "loss": 0.0375, |
| "step": 725 |
| }, |
| { |
| "epoch": 2.9572301425661913, |
| "grad_norm": 0.5622139337651338, |
| "learning_rate": 4.573573192125369e-09, |
| "loss": 0.0351, |
| "step": 726 |
| }, |
| { |
| "epoch": 2.9613034623217924, |
| "grad_norm": 0.5994897822486772, |
| "learning_rate": 3.613803107317959e-09, |
| "loss": 0.0483, |
| "step": 727 |
| }, |
| { |
| "epoch": 2.965376782077393, |
| "grad_norm": 0.6310995479806456, |
| "learning_rate": 2.7668961313376263e-09, |
| "loss": 0.0414, |
| "step": 728 |
| }, |
| { |
| "epoch": 2.9694501018329937, |
| "grad_norm": 0.6948629856817712, |
| "learning_rate": 2.0328713949230304e-09, |
| "loss": 0.0495, |
| "step": 729 |
| }, |
| { |
| "epoch": 2.973523421588595, |
| "grad_norm": 0.7318123074015203, |
| "learning_rate": 1.4117454789208673e-09, |
| "loss": 0.046, |
| "step": 730 |
| }, |
| { |
| "epoch": 2.9775967413441955, |
| "grad_norm": 0.6523465210659862, |
| "learning_rate": 9.03532413911723e-10, |
| "loss": 0.0442, |
| "step": 731 |
| }, |
| { |
| "epoch": 2.9816700610997966, |
| "grad_norm": 0.5390724631100189, |
| "learning_rate": 5.08243679894771e-10, |
| "loss": 0.0332, |
| "step": 732 |
| }, |
| { |
| "epoch": 2.9857433808553973, |
| "grad_norm": 0.6407224408398684, |
| "learning_rate": 2.2588820602631457e-10, |
| "loss": 0.0489, |
| "step": 733 |
| }, |
| { |
| "epoch": 2.989816700610998, |
| "grad_norm": 0.7098372871776469, |
| "learning_rate": 5.6472370419391464e-11, |
| "loss": 0.0476, |
| "step": 734 |
| }, |
| { |
| "epoch": 2.9938900203665986, |
| "grad_norm": 0.6576944358362913, |
| "learning_rate": 0.0, |
| "loss": 0.048, |
| "step": 735 |
| }, |
| { |
| "epoch": 2.9938900203665986, |
| "step": 735, |
| "total_flos": 48064094208000.0, |
| "train_loss": 0.0952699331224573, |
| "train_runtime": 5965.7023, |
| "train_samples_per_second": 1.975, |
| "train_steps_per_second": 0.123 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 735, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 48064094208000.0, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|