| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.9933774834437086, |
| "eval_steps": 500, |
| "global_step": 402, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.004966887417218543, |
| "grad_norm": 35.1089973449707, |
| "learning_rate": 5.0000000000000004e-08, |
| "loss": 5.1044, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.009933774834437087, |
| "grad_norm": 35.22032165527344, |
| "learning_rate": 1.0000000000000001e-07, |
| "loss": 5.0735, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.014900662251655629, |
| "grad_norm": 35.83143997192383, |
| "learning_rate": 1.5000000000000002e-07, |
| "loss": 5.1028, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.019867549668874173, |
| "grad_norm": 35.02216339111328, |
| "learning_rate": 2.0000000000000002e-07, |
| "loss": 5.0132, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.024834437086092714, |
| "grad_norm": 35.219600677490234, |
| "learning_rate": 2.5000000000000004e-07, |
| "loss": 4.9707, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.029801324503311258, |
| "grad_norm": 36.146759033203125, |
| "learning_rate": 3.0000000000000004e-07, |
| "loss": 5.0997, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.0347682119205298, |
| "grad_norm": 35.079566955566406, |
| "learning_rate": 3.5000000000000004e-07, |
| "loss": 5.0102, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.039735099337748346, |
| "grad_norm": 33.08433532714844, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 4.7433, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.04470198675496689, |
| "grad_norm": 33.799949645996094, |
| "learning_rate": 4.5000000000000003e-07, |
| "loss": 4.8223, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.04966887417218543, |
| "grad_norm": 35.39978790283203, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 5.1291, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.054635761589403975, |
| "grad_norm": 33.871028900146484, |
| "learning_rate": 5.5e-07, |
| "loss": 4.8411, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.059602649006622516, |
| "grad_norm": 33.94557571411133, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 4.8888, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.06456953642384106, |
| "grad_norm": 35.7645263671875, |
| "learning_rate": 6.5e-07, |
| "loss": 4.9706, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.0695364238410596, |
| "grad_norm": 33.34524917602539, |
| "learning_rate": 7.000000000000001e-07, |
| "loss": 4.8249, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.07450331125827815, |
| "grad_norm": 31.388317108154297, |
| "learning_rate": 7.5e-07, |
| "loss": 4.4837, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.07947019867549669, |
| "grad_norm": 30.788236618041992, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 4.4459, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.08443708609271523, |
| "grad_norm": 30.05523109436035, |
| "learning_rate": 8.500000000000001e-07, |
| "loss": 4.4497, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.08940397350993377, |
| "grad_norm": 27.767133712768555, |
| "learning_rate": 9.000000000000001e-07, |
| "loss": 4.3426, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.09437086092715231, |
| "grad_norm": 26.096942901611328, |
| "learning_rate": 9.500000000000001e-07, |
| "loss": 4.1846, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.09933774834437085, |
| "grad_norm": 24.659503936767578, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 4.1404, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.10430463576158941, |
| "grad_norm": 22.810558319091797, |
| "learning_rate": 1.0500000000000001e-06, |
| "loss": 3.9254, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.10927152317880795, |
| "grad_norm": 21.360898971557617, |
| "learning_rate": 1.1e-06, |
| "loss": 3.7266, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.11423841059602649, |
| "grad_norm": 22.106414794921875, |
| "learning_rate": 1.1500000000000002e-06, |
| "loss": 3.6787, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.11920529801324503, |
| "grad_norm": 19.86927032470703, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 3.3947, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.12417218543046357, |
| "grad_norm": 20.86886215209961, |
| "learning_rate": 1.25e-06, |
| "loss": 3.3619, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.1291390728476821, |
| "grad_norm": 20.39168357849121, |
| "learning_rate": 1.3e-06, |
| "loss": 3.1309, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.13410596026490065, |
| "grad_norm": 20.989742279052734, |
| "learning_rate": 1.3500000000000002e-06, |
| "loss": 3.0103, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.1390728476821192, |
| "grad_norm": 19.428770065307617, |
| "learning_rate": 1.4000000000000001e-06, |
| "loss": 2.8232, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.14403973509933773, |
| "grad_norm": 16.829797744750977, |
| "learning_rate": 1.45e-06, |
| "loss": 2.6573, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.1490066225165563, |
| "grad_norm": 15.379148483276367, |
| "learning_rate": 1.5e-06, |
| "loss": 2.5938, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.15397350993377484, |
| "grad_norm": 14.703971862792969, |
| "learning_rate": 1.5500000000000002e-06, |
| "loss": 2.4588, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.15894039735099338, |
| "grad_norm": 14.567538261413574, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 2.211, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.16390728476821192, |
| "grad_norm": 14.908529281616211, |
| "learning_rate": 1.6500000000000003e-06, |
| "loss": 2.0326, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.16887417218543047, |
| "grad_norm": 15.71338939666748, |
| "learning_rate": 1.7000000000000002e-06, |
| "loss": 1.9728, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.173841059602649, |
| "grad_norm": 15.91982364654541, |
| "learning_rate": 1.75e-06, |
| "loss": 1.8389, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.17880794701986755, |
| "grad_norm": 15.001914024353027, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 1.6801, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.1837748344370861, |
| "grad_norm": 13.963605880737305, |
| "learning_rate": 1.85e-06, |
| "loss": 1.4723, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.18874172185430463, |
| "grad_norm": 13.326650619506836, |
| "learning_rate": 1.9000000000000002e-06, |
| "loss": 1.3989, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.19370860927152317, |
| "grad_norm": 13.237666130065918, |
| "learning_rate": 1.9500000000000004e-06, |
| "loss": 1.2334, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.1986754966887417, |
| "grad_norm": 12.756895065307617, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 1.0125, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.20364238410596028, |
| "grad_norm": 12.65739917755127, |
| "learning_rate": 2.05e-06, |
| "loss": 0.9345, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.20860927152317882, |
| "grad_norm": 12.268824577331543, |
| "learning_rate": 2.1000000000000002e-06, |
| "loss": 0.7775, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.21357615894039736, |
| "grad_norm": 11.216493606567383, |
| "learning_rate": 2.15e-06, |
| "loss": 0.6453, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.2185430463576159, |
| "grad_norm": 10.217277526855469, |
| "learning_rate": 2.2e-06, |
| "loss": 0.5069, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.22350993377483444, |
| "grad_norm": 8.963619232177734, |
| "learning_rate": 2.25e-06, |
| "loss": 0.4146, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.22847682119205298, |
| "grad_norm": 7.5903706550598145, |
| "learning_rate": 2.3000000000000004e-06, |
| "loss": 0.3154, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.23344370860927152, |
| "grad_norm": 5.7549147605896, |
| "learning_rate": 2.35e-06, |
| "loss": 0.2397, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.23841059602649006, |
| "grad_norm": 3.816410779953003, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 0.1793, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.2433774834437086, |
| "grad_norm": 2.867004156112671, |
| "learning_rate": 2.4500000000000003e-06, |
| "loss": 0.1624, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.24834437086092714, |
| "grad_norm": 2.462583303451538, |
| "learning_rate": 2.5e-06, |
| "loss": 0.135, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.2533112582781457, |
| "grad_norm": 2.168409824371338, |
| "learning_rate": 2.55e-06, |
| "loss": 0.1176, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.2582781456953642, |
| "grad_norm": 1.5076923370361328, |
| "learning_rate": 2.6e-06, |
| "loss": 0.1294, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.2632450331125828, |
| "grad_norm": 1.422659158706665, |
| "learning_rate": 2.6500000000000005e-06, |
| "loss": 0.0929, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.2682119205298013, |
| "grad_norm": 1.2882518768310547, |
| "learning_rate": 2.7000000000000004e-06, |
| "loss": 0.0872, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.2731788079470199, |
| "grad_norm": 0.9768906235694885, |
| "learning_rate": 2.7500000000000004e-06, |
| "loss": 0.1038, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.2781456953642384, |
| "grad_norm": 0.6768905520439148, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 0.0839, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.28311258278145696, |
| "grad_norm": 0.8317804932594299, |
| "learning_rate": 2.85e-06, |
| "loss": 0.0793, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.28807947019867547, |
| "grad_norm": 1.0260225534439087, |
| "learning_rate": 2.9e-06, |
| "loss": 0.0907, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.29304635761589404, |
| "grad_norm": 0.69094318151474, |
| "learning_rate": 2.95e-06, |
| "loss": 0.0748, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.2980132450331126, |
| "grad_norm": 0.6794005036354065, |
| "learning_rate": 3e-06, |
| "loss": 0.0784, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.3029801324503311, |
| "grad_norm": 0.6374951004981995, |
| "learning_rate": 3.05e-06, |
| "loss": 0.0739, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.3079470198675497, |
| "grad_norm": 0.7064375281333923, |
| "learning_rate": 3.1000000000000004e-06, |
| "loss": 0.0729, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.3129139072847682, |
| "grad_norm": 0.4373936951160431, |
| "learning_rate": 3.1500000000000003e-06, |
| "loss": 0.0682, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.31788079470198677, |
| "grad_norm": 0.5880410075187683, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 0.0666, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.3228476821192053, |
| "grad_norm": 0.4951966106891632, |
| "learning_rate": 3.2500000000000002e-06, |
| "loss": 0.0709, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.32781456953642385, |
| "grad_norm": 0.46179434657096863, |
| "learning_rate": 3.3000000000000006e-06, |
| "loss": 0.0669, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.33278145695364236, |
| "grad_norm": 0.543757438659668, |
| "learning_rate": 3.3500000000000005e-06, |
| "loss": 0.0716, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.33774834437086093, |
| "grad_norm": 0.5536766052246094, |
| "learning_rate": 3.4000000000000005e-06, |
| "loss": 0.0751, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.34271523178807944, |
| "grad_norm": 0.4959389865398407, |
| "learning_rate": 3.45e-06, |
| "loss": 0.0725, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.347682119205298, |
| "grad_norm": 0.40888911485671997, |
| "learning_rate": 3.5e-06, |
| "loss": 0.0696, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.3526490066225166, |
| "grad_norm": 0.41407281160354614, |
| "learning_rate": 3.5500000000000003e-06, |
| "loss": 0.0577, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.3576158940397351, |
| "grad_norm": 0.45143064856529236, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 0.0666, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.36258278145695366, |
| "grad_norm": 0.4229239523410797, |
| "learning_rate": 3.65e-06, |
| "loss": 0.0657, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.3675496688741722, |
| "grad_norm": 0.32228904962539673, |
| "learning_rate": 3.7e-06, |
| "loss": 0.0604, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.37251655629139074, |
| "grad_norm": 0.48797911405563354, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.0658, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.37748344370860926, |
| "grad_norm": 0.5396533012390137, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 0.0652, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.3824503311258278, |
| "grad_norm": 0.3672020733356476, |
| "learning_rate": 3.85e-06, |
| "loss": 0.0559, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.38741721854304634, |
| "grad_norm": 0.4659889340400696, |
| "learning_rate": 3.900000000000001e-06, |
| "loss": 0.0596, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.3923841059602649, |
| "grad_norm": 0.40139755606651306, |
| "learning_rate": 3.95e-06, |
| "loss": 0.0594, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.3973509933774834, |
| "grad_norm": 0.32445284724235535, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.0574, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.402317880794702, |
| "grad_norm": 0.5123882293701172, |
| "learning_rate": 4.05e-06, |
| "loss": 0.0694, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.40728476821192056, |
| "grad_norm": 0.3744450509548187, |
| "learning_rate": 4.1e-06, |
| "loss": 0.0663, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.41225165562913907, |
| "grad_norm": 0.4288279414176941, |
| "learning_rate": 4.15e-06, |
| "loss": 0.0575, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.41721854304635764, |
| "grad_norm": 0.3742220997810364, |
| "learning_rate": 4.2000000000000004e-06, |
| "loss": 0.0643, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.42218543046357615, |
| "grad_norm": 0.3413388133049011, |
| "learning_rate": 4.25e-06, |
| "loss": 0.0599, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.4271523178807947, |
| "grad_norm": 0.42702731490135193, |
| "learning_rate": 4.3e-06, |
| "loss": 0.0607, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.43211920529801323, |
| "grad_norm": 0.28603485226631165, |
| "learning_rate": 4.350000000000001e-06, |
| "loss": 0.0539, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.4370860927152318, |
| "grad_norm": 0.39420223236083984, |
| "learning_rate": 4.4e-06, |
| "loss": 0.0536, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.4420529801324503, |
| "grad_norm": 0.3824305534362793, |
| "learning_rate": 4.450000000000001e-06, |
| "loss": 0.0545, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.4470198675496689, |
| "grad_norm": 0.7128148674964905, |
| "learning_rate": 4.5e-06, |
| "loss": 0.066, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.4519867549668874, |
| "grad_norm": 0.43943652510643005, |
| "learning_rate": 4.5500000000000005e-06, |
| "loss": 0.0551, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.45695364238410596, |
| "grad_norm": 0.4158875048160553, |
| "learning_rate": 4.600000000000001e-06, |
| "loss": 0.0597, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.46192052980132453, |
| "grad_norm": 0.4504539668560028, |
| "learning_rate": 4.65e-06, |
| "loss": 0.0651, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.46688741721854304, |
| "grad_norm": 0.2968044579029083, |
| "learning_rate": 4.7e-06, |
| "loss": 0.0601, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.4718543046357616, |
| "grad_norm": 0.3136419951915741, |
| "learning_rate": 4.75e-06, |
| "loss": 0.0537, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.4768211920529801, |
| "grad_norm": 0.3263697922229767, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 0.0571, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.4817880794701987, |
| "grad_norm": 0.4513280391693115, |
| "learning_rate": 4.85e-06, |
| "loss": 0.0578, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.4867549668874172, |
| "grad_norm": 0.32309144735336304, |
| "learning_rate": 4.9000000000000005e-06, |
| "loss": 0.0532, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.4917218543046358, |
| "grad_norm": 0.3191864788532257, |
| "learning_rate": 4.95e-06, |
| "loss": 0.0563, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.4966887417218543, |
| "grad_norm": 0.3069349229335785, |
| "learning_rate": 5e-06, |
| "loss": 0.0492, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.5016556291390728, |
| "grad_norm": 0.592820942401886, |
| "learning_rate": 4.999989914458693e-06, |
| "loss": 0.051, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.5066225165562914, |
| "grad_norm": 0.6325397491455078, |
| "learning_rate": 4.999959657916147e-06, |
| "loss": 0.0621, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.5115894039735099, |
| "grad_norm": 0.43557360768318176, |
| "learning_rate": 4.999909230616483e-06, |
| "loss": 0.0579, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.5165562913907285, |
| "grad_norm": 0.5436730980873108, |
| "learning_rate": 4.999838632966572e-06, |
| "loss": 0.0688, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.5215231788079471, |
| "grad_norm": 0.4149836301803589, |
| "learning_rate": 4.999747865536025e-06, |
| "loss": 0.0504, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.5264900662251656, |
| "grad_norm": 0.3056333661079407, |
| "learning_rate": 4.999636929057196e-06, |
| "loss": 0.0582, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.5314569536423841, |
| "grad_norm": 0.3057827651500702, |
| "learning_rate": 4.999505824425164e-06, |
| "loss": 0.0583, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.5364238410596026, |
| "grad_norm": 0.29118627309799194, |
| "learning_rate": 4.999354552697742e-06, |
| "loss": 0.0545, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.5413907284768212, |
| "grad_norm": 0.4180416762828827, |
| "learning_rate": 4.999183115095453e-06, |
| "loss": 0.0635, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.5463576158940397, |
| "grad_norm": 0.36227947473526, |
| "learning_rate": 4.998991513001532e-06, |
| "loss": 0.0581, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.5513245033112583, |
| "grad_norm": 0.4391280710697174, |
| "learning_rate": 4.998779747961906e-06, |
| "loss": 0.0569, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.5562913907284768, |
| "grad_norm": 0.40176400542259216, |
| "learning_rate": 4.998547821685188e-06, |
| "loss": 0.0479, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.5612582781456954, |
| "grad_norm": 0.28267550468444824, |
| "learning_rate": 4.998295736042659e-06, |
| "loss": 0.0562, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.5662251655629139, |
| "grad_norm": 0.29833951592445374, |
| "learning_rate": 4.998023493068255e-06, |
| "loss": 0.0551, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.5711920529801324, |
| "grad_norm": 0.4476202130317688, |
| "learning_rate": 4.997731094958551e-06, |
| "loss": 0.0493, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.5761589403973509, |
| "grad_norm": 0.3935531973838806, |
| "learning_rate": 4.997418544072742e-06, |
| "loss": 0.0602, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.5811258278145696, |
| "grad_norm": 0.3693472743034363, |
| "learning_rate": 4.9970858429326215e-06, |
| "loss": 0.0567, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.5860927152317881, |
| "grad_norm": 0.5814310312271118, |
| "learning_rate": 4.99673299422257e-06, |
| "loss": 0.0696, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.5910596026490066, |
| "grad_norm": 0.32046371698379517, |
| "learning_rate": 4.996360000789519e-06, |
| "loss": 0.0498, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.5960264900662252, |
| "grad_norm": 0.45766863226890564, |
| "learning_rate": 4.995966865642946e-06, |
| "loss": 0.0501, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.6009933774834437, |
| "grad_norm": 0.2858210504055023, |
| "learning_rate": 4.995553591954832e-06, |
| "loss": 0.0516, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.6059602649006622, |
| "grad_norm": 0.2883959412574768, |
| "learning_rate": 4.9951201830596505e-06, |
| "loss": 0.0565, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.6109271523178808, |
| "grad_norm": 0.2570095360279083, |
| "learning_rate": 4.994666642454331e-06, |
| "loss": 0.0517, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.6158940397350994, |
| "grad_norm": 0.34462130069732666, |
| "learning_rate": 4.994192973798236e-06, |
| "loss": 0.0589, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.6208609271523179, |
| "grad_norm": 0.2416890263557434, |
| "learning_rate": 4.993699180913127e-06, |
| "loss": 0.0544, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.6258278145695364, |
| "grad_norm": 0.2676381468772888, |
| "learning_rate": 4.993185267783142e-06, |
| "loss": 0.0506, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.6307947019867549, |
| "grad_norm": 0.2828262150287628, |
| "learning_rate": 4.992651238554753e-06, |
| "loss": 0.0504, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.6357615894039735, |
| "grad_norm": 0.2980761229991913, |
| "learning_rate": 4.99209709753674e-06, |
| "loss": 0.053, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.640728476821192, |
| "grad_norm": 0.30427825450897217, |
| "learning_rate": 4.991522849200152e-06, |
| "loss": 0.0517, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.6456953642384106, |
| "grad_norm": 0.32718971371650696, |
| "learning_rate": 4.990928498178274e-06, |
| "loss": 0.0543, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.6506622516556292, |
| "grad_norm": 0.26971936225891113, |
| "learning_rate": 4.990314049266586e-06, |
| "loss": 0.0498, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.6556291390728477, |
| "grad_norm": 0.2683403193950653, |
| "learning_rate": 4.989679507422728e-06, |
| "loss": 0.0456, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.6605960264900662, |
| "grad_norm": 0.3088124990463257, |
| "learning_rate": 4.989024877766461e-06, |
| "loss": 0.0549, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.6655629139072847, |
| "grad_norm": 0.2979089021682739, |
| "learning_rate": 4.988350165579618e-06, |
| "loss": 0.0493, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.6705298013245033, |
| "grad_norm": 0.3119763731956482, |
| "learning_rate": 4.987655376306069e-06, |
| "loss": 0.0494, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.6754966887417219, |
| "grad_norm": 0.26640820503234863, |
| "learning_rate": 4.986940515551676e-06, |
| "loss": 0.0466, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.6804635761589404, |
| "grad_norm": 0.34190040826797485, |
| "learning_rate": 4.9862055890842455e-06, |
| "loss": 0.052, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.6854304635761589, |
| "grad_norm": 0.3251188099384308, |
| "learning_rate": 4.9854506028334805e-06, |
| "loss": 0.0475, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.6903973509933775, |
| "grad_norm": 0.31759873032569885, |
| "learning_rate": 4.984675562890939e-06, |
| "loss": 0.0473, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.695364238410596, |
| "grad_norm": 0.382521390914917, |
| "learning_rate": 4.983880475509978e-06, |
| "loss": 0.0569, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.7003311258278145, |
| "grad_norm": 0.2814515233039856, |
| "learning_rate": 4.983065347105707e-06, |
| "loss": 0.0512, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.7052980132450332, |
| "grad_norm": 0.3223007321357727, |
| "learning_rate": 4.982230184254934e-06, |
| "loss": 0.0563, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.7102649006622517, |
| "grad_norm": 0.30451443791389465, |
| "learning_rate": 4.981374993696116e-06, |
| "loss": 0.0412, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.7152317880794702, |
| "grad_norm": 0.39687198400497437, |
| "learning_rate": 4.9804997823292996e-06, |
| "loss": 0.0507, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.7201986754966887, |
| "grad_norm": 0.311040997505188, |
| "learning_rate": 4.97960455721607e-06, |
| "loss": 0.0483, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.7251655629139073, |
| "grad_norm": 0.33765530586242676, |
| "learning_rate": 4.978689325579491e-06, |
| "loss": 0.0431, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.7301324503311258, |
| "grad_norm": 0.321216344833374, |
| "learning_rate": 4.9777540948040474e-06, |
| "loss": 0.0537, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.7350993377483444, |
| "grad_norm": 0.38110417127609253, |
| "learning_rate": 4.976798872435586e-06, |
| "loss": 0.0535, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.7400662251655629, |
| "grad_norm": 0.35567203164100647, |
| "learning_rate": 4.975823666181256e-06, |
| "loss": 0.0481, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.7450331125827815, |
| "grad_norm": 0.27548110485076904, |
| "learning_rate": 4.974828483909441e-06, |
| "loss": 0.0467, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 0.33838632702827454, |
| "learning_rate": 4.9738133336497045e-06, |
| "loss": 0.0551, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.7549668874172185, |
| "grad_norm": 0.29228779673576355, |
| "learning_rate": 4.972778223592717e-06, |
| "loss": 0.0483, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.7599337748344371, |
| "grad_norm": 0.28962090611457825, |
| "learning_rate": 4.9717231620901964e-06, |
| "loss": 0.0509, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.7649006622516556, |
| "grad_norm": 0.2481975555419922, |
| "learning_rate": 4.970648157654836e-06, |
| "loss": 0.0473, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.7698675496688742, |
| "grad_norm": 0.2947401702404022, |
| "learning_rate": 4.969553218960235e-06, |
| "loss": 0.0449, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.7748344370860927, |
| "grad_norm": 0.291072815656662, |
| "learning_rate": 4.968438354840834e-06, |
| "loss": 0.0547, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.7798013245033113, |
| "grad_norm": 0.2935287356376648, |
| "learning_rate": 4.96730357429184e-06, |
| "loss": 0.0517, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.7847682119205298, |
| "grad_norm": 0.3540295958518982, |
| "learning_rate": 4.966148886469153e-06, |
| "loss": 0.0468, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.7897350993377483, |
| "grad_norm": 0.2809925973415375, |
| "learning_rate": 4.964974300689295e-06, |
| "loss": 0.0474, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.7947019867549668, |
| "grad_norm": 0.3492342531681061, |
| "learning_rate": 4.963779826429333e-06, |
| "loss": 0.056, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.7996688741721855, |
| "grad_norm": 0.39458170533180237, |
| "learning_rate": 4.9625654733268016e-06, |
| "loss": 0.0496, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.804635761589404, |
| "grad_norm": 0.30106887221336365, |
| "learning_rate": 4.961331251179629e-06, |
| "loss": 0.0401, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.8096026490066225, |
| "grad_norm": 0.297493577003479, |
| "learning_rate": 4.960077169946052e-06, |
| "loss": 0.0488, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.8145695364238411, |
| "grad_norm": 0.30854183435440063, |
| "learning_rate": 4.958803239744542e-06, |
| "loss": 0.0452, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.8195364238410596, |
| "grad_norm": 0.3040302097797394, |
| "learning_rate": 4.9575094708537205e-06, |
| "loss": 0.0497, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.8245033112582781, |
| "grad_norm": 0.3189365267753601, |
| "learning_rate": 4.956195873712274e-06, |
| "loss": 0.0404, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.8294701986754967, |
| "grad_norm": 0.27114391326904297, |
| "learning_rate": 4.954862458918873e-06, |
| "loss": 0.0376, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.8344370860927153, |
| "grad_norm": 0.37876346707344055, |
| "learning_rate": 4.953509237232086e-06, |
| "loss": 0.0536, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.8394039735099338, |
| "grad_norm": 0.3651692867279053, |
| "learning_rate": 4.952136219570292e-06, |
| "loss": 0.0513, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.8443708609271523, |
| "grad_norm": 0.2958744764328003, |
| "learning_rate": 4.950743417011591e-06, |
| "loss": 0.0448, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.8493377483443708, |
| "grad_norm": 0.2732123136520386, |
| "learning_rate": 4.9493308407937176e-06, |
| "loss": 0.0441, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.8543046357615894, |
| "grad_norm": 0.2853662073612213, |
| "learning_rate": 4.947898502313949e-06, |
| "loss": 0.039, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.859271523178808, |
| "grad_norm": 0.3872867822647095, |
| "learning_rate": 4.946446413129011e-06, |
| "loss": 0.0561, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.8642384105960265, |
| "grad_norm": 0.33438095450401306, |
| "learning_rate": 4.944974584954989e-06, |
| "loss": 0.0427, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.8692052980132451, |
| "grad_norm": 0.3008469045162201, |
| "learning_rate": 4.943483029667231e-06, |
| "loss": 0.0417, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.8741721854304636, |
| "grad_norm": 0.298586368560791, |
| "learning_rate": 4.941971759300249e-06, |
| "loss": 0.0508, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.8791390728476821, |
| "grad_norm": 0.2677428126335144, |
| "learning_rate": 4.9404407860476275e-06, |
| "loss": 0.0468, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.8841059602649006, |
| "grad_norm": 0.2635156214237213, |
| "learning_rate": 4.938890122261923e-06, |
| "loss": 0.0467, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.8890728476821192, |
| "grad_norm": 0.29530155658721924, |
| "learning_rate": 4.937319780454559e-06, |
| "loss": 0.0408, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.8940397350993378, |
| "grad_norm": 0.2985950708389282, |
| "learning_rate": 4.935729773295738e-06, |
| "loss": 0.0397, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.8990066225165563, |
| "grad_norm": 0.3108009696006775, |
| "learning_rate": 4.934120113614322e-06, |
| "loss": 0.0405, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.9039735099337748, |
| "grad_norm": 0.32336103916168213, |
| "learning_rate": 4.932490814397744e-06, |
| "loss": 0.0435, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.9089403973509934, |
| "grad_norm": 0.3786524534225464, |
| "learning_rate": 4.930841888791898e-06, |
| "loss": 0.0455, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.9139072847682119, |
| "grad_norm": 0.32045990228652954, |
| "learning_rate": 4.929173350101025e-06, |
| "loss": 0.0424, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.9188741721854304, |
| "grad_norm": 0.4801805913448334, |
| "learning_rate": 4.927485211787622e-06, |
| "loss": 0.0558, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.9238410596026491, |
| "grad_norm": 0.28597143292427063, |
| "learning_rate": 4.925777487472318e-06, |
| "loss": 0.0378, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.9288079470198676, |
| "grad_norm": 0.2773878872394562, |
| "learning_rate": 4.9240501909337725e-06, |
| "loss": 0.0439, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.9337748344370861, |
| "grad_norm": 0.2605089545249939, |
| "learning_rate": 4.922303336108562e-06, |
| "loss": 0.0361, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.9387417218543046, |
| "grad_norm": 0.2805444598197937, |
| "learning_rate": 4.920536937091068e-06, |
| "loss": 0.0415, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.9437086092715232, |
| "grad_norm": 0.33180198073387146, |
| "learning_rate": 4.918751008133362e-06, |
| "loss": 0.0427, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.9486754966887417, |
| "grad_norm": 0.28023046255111694, |
| "learning_rate": 4.916945563645093e-06, |
| "loss": 0.0419, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.9536423841059603, |
| "grad_norm": 0.32969868183135986, |
| "learning_rate": 4.915120618193369e-06, |
| "loss": 0.0486, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.9586092715231788, |
| "grad_norm": 0.3446713984012604, |
| "learning_rate": 4.913276186502639e-06, |
| "loss": 0.0479, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.9635761589403974, |
| "grad_norm": 0.2720557749271393, |
| "learning_rate": 4.911412283454579e-06, |
| "loss": 0.0388, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.9685430463576159, |
| "grad_norm": 0.28475743532180786, |
| "learning_rate": 4.909528924087963e-06, |
| "loss": 0.0402, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.9735099337748344, |
| "grad_norm": 0.3087317943572998, |
| "learning_rate": 4.907626123598552e-06, |
| "loss": 0.0421, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.9784768211920529, |
| "grad_norm": 0.3794298470020294, |
| "learning_rate": 4.9057038973389635e-06, |
| "loss": 0.0489, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.9834437086092715, |
| "grad_norm": 0.31470081210136414, |
| "learning_rate": 4.903762260818552e-06, |
| "loss": 0.0419, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.9884105960264901, |
| "grad_norm": 0.3073600232601166, |
| "learning_rate": 4.90180122970328e-06, |
| "loss": 0.0373, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.9933774834437086, |
| "grad_norm": 0.35700473189353943, |
| "learning_rate": 4.899820819815598e-06, |
| "loss": 0.0382, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.9983443708609272, |
| "grad_norm": 0.38011306524276733, |
| "learning_rate": 4.89782104713431e-06, |
| "loss": 0.0437, |
| "step": 201 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.38011306524276733, |
| "learning_rate": 4.895801927794448e-06, |
| "loss": 0.0379, |
| "step": 202 |
| }, |
| { |
| "epoch": 1.0049668874172186, |
| "grad_norm": 0.5858293175697327, |
| "learning_rate": 4.8937634780871406e-06, |
| "loss": 0.0381, |
| "step": 203 |
| }, |
| { |
| "epoch": 1.009933774834437, |
| "grad_norm": 0.27857479453086853, |
| "learning_rate": 4.891705714459483e-06, |
| "loss": 0.0357, |
| "step": 204 |
| }, |
| { |
| "epoch": 1.0149006622516556, |
| "grad_norm": 0.331392377614975, |
| "learning_rate": 4.8896286535144025e-06, |
| "loss": 0.0327, |
| "step": 205 |
| }, |
| { |
| "epoch": 1.0198675496688743, |
| "grad_norm": 0.31564489006996155, |
| "learning_rate": 4.8875323120105275e-06, |
| "loss": 0.0329, |
| "step": 206 |
| }, |
| { |
| "epoch": 1.0248344370860927, |
| "grad_norm": 0.3151126801967621, |
| "learning_rate": 4.885416706862049e-06, |
| "loss": 0.0361, |
| "step": 207 |
| }, |
| { |
| "epoch": 1.0298013245033113, |
| "grad_norm": 0.3161706030368805, |
| "learning_rate": 4.883281855138585e-06, |
| "loss": 0.0363, |
| "step": 208 |
| }, |
| { |
| "epoch": 1.0347682119205297, |
| "grad_norm": 0.3697150945663452, |
| "learning_rate": 4.8811277740650436e-06, |
| "loss": 0.0402, |
| "step": 209 |
| }, |
| { |
| "epoch": 1.0397350993377483, |
| "grad_norm": 0.3180171549320221, |
| "learning_rate": 4.878954481021484e-06, |
| "loss": 0.0305, |
| "step": 210 |
| }, |
| { |
| "epoch": 1.044701986754967, |
| "grad_norm": 0.3020327389240265, |
| "learning_rate": 4.876761993542975e-06, |
| "loss": 0.0322, |
| "step": 211 |
| }, |
| { |
| "epoch": 1.0496688741721854, |
| "grad_norm": 0.44095057249069214, |
| "learning_rate": 4.874550329319457e-06, |
| "loss": 0.033, |
| "step": 212 |
| }, |
| { |
| "epoch": 1.054635761589404, |
| "grad_norm": 0.36998042464256287, |
| "learning_rate": 4.872319506195593e-06, |
| "loss": 0.0296, |
| "step": 213 |
| }, |
| { |
| "epoch": 1.0596026490066226, |
| "grad_norm": 0.3310064971446991, |
| "learning_rate": 4.87006954217063e-06, |
| "loss": 0.0309, |
| "step": 214 |
| }, |
| { |
| "epoch": 1.064569536423841, |
| "grad_norm": 0.3527117669582367, |
| "learning_rate": 4.867800455398252e-06, |
| "loss": 0.0367, |
| "step": 215 |
| }, |
| { |
| "epoch": 1.0695364238410596, |
| "grad_norm": 0.36431634426116943, |
| "learning_rate": 4.8655122641864335e-06, |
| "loss": 0.0323, |
| "step": 216 |
| }, |
| { |
| "epoch": 1.0745033112582782, |
| "grad_norm": 0.3809157609939575, |
| "learning_rate": 4.8632049869972944e-06, |
| "loss": 0.033, |
| "step": 217 |
| }, |
| { |
| "epoch": 1.0794701986754967, |
| "grad_norm": 0.27598831057548523, |
| "learning_rate": 4.8608786424469434e-06, |
| "loss": 0.0302, |
| "step": 218 |
| }, |
| { |
| "epoch": 1.0844370860927153, |
| "grad_norm": 0.36997970938682556, |
| "learning_rate": 4.858533249305337e-06, |
| "loss": 0.0317, |
| "step": 219 |
| }, |
| { |
| "epoch": 1.0894039735099337, |
| "grad_norm": 0.30946972966194153, |
| "learning_rate": 4.856168826496123e-06, |
| "loss": 0.0342, |
| "step": 220 |
| }, |
| { |
| "epoch": 1.0943708609271523, |
| "grad_norm": 0.3649260699748993, |
| "learning_rate": 4.853785393096487e-06, |
| "loss": 0.0363, |
| "step": 221 |
| }, |
| { |
| "epoch": 1.099337748344371, |
| "grad_norm": 0.3411741554737091, |
| "learning_rate": 4.851382968337004e-06, |
| "loss": 0.0297, |
| "step": 222 |
| }, |
| { |
| "epoch": 1.1043046357615893, |
| "grad_norm": 0.34998854994773865, |
| "learning_rate": 4.848961571601476e-06, |
| "loss": 0.0406, |
| "step": 223 |
| }, |
| { |
| "epoch": 1.109271523178808, |
| "grad_norm": 0.3024022579193115, |
| "learning_rate": 4.84652122242678e-06, |
| "loss": 0.0311, |
| "step": 224 |
| }, |
| { |
| "epoch": 1.1142384105960266, |
| "grad_norm": 0.302640825510025, |
| "learning_rate": 4.844061940502711e-06, |
| "loss": 0.0354, |
| "step": 225 |
| }, |
| { |
| "epoch": 1.119205298013245, |
| "grad_norm": 0.3342653512954712, |
| "learning_rate": 4.84158374567182e-06, |
| "loss": 0.037, |
| "step": 226 |
| }, |
| { |
| "epoch": 1.1241721854304636, |
| "grad_norm": 0.3428351581096649, |
| "learning_rate": 4.839086657929256e-06, |
| "loss": 0.0299, |
| "step": 227 |
| }, |
| { |
| "epoch": 1.129139072847682, |
| "grad_norm": 0.2952878177165985, |
| "learning_rate": 4.836570697422605e-06, |
| "loss": 0.0331, |
| "step": 228 |
| }, |
| { |
| "epoch": 1.1341059602649006, |
| "grad_norm": 0.3805794417858124, |
| "learning_rate": 4.834035884451725e-06, |
| "loss": 0.0429, |
| "step": 229 |
| }, |
| { |
| "epoch": 1.1390728476821192, |
| "grad_norm": 0.33509016036987305, |
| "learning_rate": 4.831482239468585e-06, |
| "loss": 0.0302, |
| "step": 230 |
| }, |
| { |
| "epoch": 1.1440397350993377, |
| "grad_norm": 0.3166174590587616, |
| "learning_rate": 4.8289097830770995e-06, |
| "loss": 0.0306, |
| "step": 231 |
| }, |
| { |
| "epoch": 1.1490066225165563, |
| "grad_norm": 0.3474961817264557, |
| "learning_rate": 4.826318536032959e-06, |
| "loss": 0.0355, |
| "step": 232 |
| }, |
| { |
| "epoch": 1.153973509933775, |
| "grad_norm": 0.3208262026309967, |
| "learning_rate": 4.823708519243468e-06, |
| "loss": 0.0298, |
| "step": 233 |
| }, |
| { |
| "epoch": 1.1589403973509933, |
| "grad_norm": 0.3397170901298523, |
| "learning_rate": 4.821079753767371e-06, |
| "loss": 0.032, |
| "step": 234 |
| }, |
| { |
| "epoch": 1.163907284768212, |
| "grad_norm": 0.3516407012939453, |
| "learning_rate": 4.818432260814688e-06, |
| "loss": 0.0323, |
| "step": 235 |
| }, |
| { |
| "epoch": 1.1688741721854305, |
| "grad_norm": 0.313174307346344, |
| "learning_rate": 4.815766061746538e-06, |
| "loss": 0.03, |
| "step": 236 |
| }, |
| { |
| "epoch": 1.173841059602649, |
| "grad_norm": 0.3192870020866394, |
| "learning_rate": 4.813081178074969e-06, |
| "loss": 0.0256, |
| "step": 237 |
| }, |
| { |
| "epoch": 1.1788079470198676, |
| "grad_norm": 0.3418073356151581, |
| "learning_rate": 4.8103776314627845e-06, |
| "loss": 0.0249, |
| "step": 238 |
| }, |
| { |
| "epoch": 1.1837748344370862, |
| "grad_norm": 0.38076093792915344, |
| "learning_rate": 4.807655443723371e-06, |
| "loss": 0.0355, |
| "step": 239 |
| }, |
| { |
| "epoch": 1.1887417218543046, |
| "grad_norm": 0.4882449805736542, |
| "learning_rate": 4.804914636820517e-06, |
| "loss": 0.0332, |
| "step": 240 |
| }, |
| { |
| "epoch": 1.1937086092715232, |
| "grad_norm": 0.40234386920928955, |
| "learning_rate": 4.80215523286824e-06, |
| "loss": 0.0272, |
| "step": 241 |
| }, |
| { |
| "epoch": 1.1986754966887416, |
| "grad_norm": 0.4203873872756958, |
| "learning_rate": 4.799377254130606e-06, |
| "loss": 0.0357, |
| "step": 242 |
| }, |
| { |
| "epoch": 1.2036423841059603, |
| "grad_norm": 0.41364455223083496, |
| "learning_rate": 4.79658072302155e-06, |
| "loss": 0.0291, |
| "step": 243 |
| }, |
| { |
| "epoch": 1.2086092715231789, |
| "grad_norm": 0.3474646508693695, |
| "learning_rate": 4.7937656621046966e-06, |
| "loss": 0.026, |
| "step": 244 |
| }, |
| { |
| "epoch": 1.2135761589403973, |
| "grad_norm": 0.36813393235206604, |
| "learning_rate": 4.790932094093176e-06, |
| "loss": 0.0262, |
| "step": 245 |
| }, |
| { |
| "epoch": 1.218543046357616, |
| "grad_norm": 0.3225281238555908, |
| "learning_rate": 4.788080041849443e-06, |
| "loss": 0.0334, |
| "step": 246 |
| }, |
| { |
| "epoch": 1.2235099337748345, |
| "grad_norm": 0.3596038222312927, |
| "learning_rate": 4.785209528385087e-06, |
| "loss": 0.0312, |
| "step": 247 |
| }, |
| { |
| "epoch": 1.228476821192053, |
| "grad_norm": 0.3386133313179016, |
| "learning_rate": 4.7823205768606575e-06, |
| "loss": 0.0239, |
| "step": 248 |
| }, |
| { |
| "epoch": 1.2334437086092715, |
| "grad_norm": 0.34161025285720825, |
| "learning_rate": 4.779413210585464e-06, |
| "loss": 0.0296, |
| "step": 249 |
| }, |
| { |
| "epoch": 1.23841059602649, |
| "grad_norm": 0.4003826975822449, |
| "learning_rate": 4.776487453017398e-06, |
| "loss": 0.0328, |
| "step": 250 |
| }, |
| { |
| "epoch": 1.2433774834437086, |
| "grad_norm": 0.3628286123275757, |
| "learning_rate": 4.773543327762737e-06, |
| "loss": 0.0245, |
| "step": 251 |
| }, |
| { |
| "epoch": 1.2483443708609272, |
| "grad_norm": 0.34538671374320984, |
| "learning_rate": 4.770580858575961e-06, |
| "loss": 0.0267, |
| "step": 252 |
| }, |
| { |
| "epoch": 1.2533112582781456, |
| "grad_norm": 0.3221345543861389, |
| "learning_rate": 4.767600069359551e-06, |
| "loss": 0.0202, |
| "step": 253 |
| }, |
| { |
| "epoch": 1.2582781456953642, |
| "grad_norm": 0.3598504066467285, |
| "learning_rate": 4.764600984163809e-06, |
| "loss": 0.0284, |
| "step": 254 |
| }, |
| { |
| "epoch": 1.2632450331125828, |
| "grad_norm": 0.3942805826663971, |
| "learning_rate": 4.761583627186649e-06, |
| "loss": 0.0242, |
| "step": 255 |
| }, |
| { |
| "epoch": 1.2682119205298013, |
| "grad_norm": 0.3926999270915985, |
| "learning_rate": 4.7585480227734175e-06, |
| "loss": 0.0272, |
| "step": 256 |
| }, |
| { |
| "epoch": 1.2731788079470199, |
| "grad_norm": 0.43967002630233765, |
| "learning_rate": 4.755494195416683e-06, |
| "loss": 0.0197, |
| "step": 257 |
| }, |
| { |
| "epoch": 1.2781456953642385, |
| "grad_norm": 0.37213134765625, |
| "learning_rate": 4.752422169756048e-06, |
| "loss": 0.0304, |
| "step": 258 |
| }, |
| { |
| "epoch": 1.283112582781457, |
| "grad_norm": 0.3521837592124939, |
| "learning_rate": 4.749331970577946e-06, |
| "loss": 0.0192, |
| "step": 259 |
| }, |
| { |
| "epoch": 1.2880794701986755, |
| "grad_norm": 0.3851149082183838, |
| "learning_rate": 4.746223622815441e-06, |
| "loss": 0.0281, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.2930463576158941, |
| "grad_norm": 0.3802141547203064, |
| "learning_rate": 4.743097151548031e-06, |
| "loss": 0.0234, |
| "step": 261 |
| }, |
| { |
| "epoch": 1.2980132450331126, |
| "grad_norm": 0.38271185755729675, |
| "learning_rate": 4.739952582001438e-06, |
| "loss": 0.0299, |
| "step": 262 |
| }, |
| { |
| "epoch": 1.3029801324503312, |
| "grad_norm": 0.4407588839530945, |
| "learning_rate": 4.736789939547411e-06, |
| "loss": 0.0269, |
| "step": 263 |
| }, |
| { |
| "epoch": 1.3079470198675498, |
| "grad_norm": 0.36599260568618774, |
| "learning_rate": 4.733609249703522e-06, |
| "loss": 0.0328, |
| "step": 264 |
| }, |
| { |
| "epoch": 1.3129139072847682, |
| "grad_norm": 0.440237820148468, |
| "learning_rate": 4.730410538132949e-06, |
| "loss": 0.0276, |
| "step": 265 |
| }, |
| { |
| "epoch": 1.3178807947019868, |
| "grad_norm": 0.4273228943347931, |
| "learning_rate": 4.7271938306442855e-06, |
| "loss": 0.0256, |
| "step": 266 |
| }, |
| { |
| "epoch": 1.3228476821192052, |
| "grad_norm": 0.36070936918258667, |
| "learning_rate": 4.72395915319132e-06, |
| "loss": 0.0261, |
| "step": 267 |
| }, |
| { |
| "epoch": 1.3278145695364238, |
| "grad_norm": 0.37429606914520264, |
| "learning_rate": 4.72070653187283e-06, |
| "loss": 0.0232, |
| "step": 268 |
| }, |
| { |
| "epoch": 1.3327814569536423, |
| "grad_norm": 0.3114779591560364, |
| "learning_rate": 4.717435992932374e-06, |
| "loss": 0.0255, |
| "step": 269 |
| }, |
| { |
| "epoch": 1.3377483443708609, |
| "grad_norm": 0.435793936252594, |
| "learning_rate": 4.714147562758076e-06, |
| "loss": 0.0246, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.3427152317880795, |
| "grad_norm": 0.35718899965286255, |
| "learning_rate": 4.710841267882414e-06, |
| "loss": 0.0203, |
| "step": 271 |
| }, |
| { |
| "epoch": 1.347682119205298, |
| "grad_norm": 0.5373225808143616, |
| "learning_rate": 4.7075171349820084e-06, |
| "loss": 0.0309, |
| "step": 272 |
| }, |
| { |
| "epoch": 1.3526490066225165, |
| "grad_norm": 0.4737128019332886, |
| "learning_rate": 4.704175190877401e-06, |
| "loss": 0.0246, |
| "step": 273 |
| }, |
| { |
| "epoch": 1.3576158940397351, |
| "grad_norm": 0.4406234323978424, |
| "learning_rate": 4.700815462532846e-06, |
| "loss": 0.0285, |
| "step": 274 |
| }, |
| { |
| "epoch": 1.3625827814569536, |
| "grad_norm": 0.4181138873100281, |
| "learning_rate": 4.697437977056085e-06, |
| "loss": 0.0227, |
| "step": 275 |
| }, |
| { |
| "epoch": 1.3675496688741722, |
| "grad_norm": 0.3848871886730194, |
| "learning_rate": 4.694042761698135e-06, |
| "loss": 0.0274, |
| "step": 276 |
| }, |
| { |
| "epoch": 1.3725165562913908, |
| "grad_norm": 0.3615216016769409, |
| "learning_rate": 4.690629843853061e-06, |
| "loss": 0.0274, |
| "step": 277 |
| }, |
| { |
| "epoch": 1.3774834437086092, |
| "grad_norm": 0.34359046816825867, |
| "learning_rate": 4.687199251057765e-06, |
| "loss": 0.0197, |
| "step": 278 |
| }, |
| { |
| "epoch": 1.3824503311258278, |
| "grad_norm": 0.3309035301208496, |
| "learning_rate": 4.683751010991755e-06, |
| "loss": 0.0187, |
| "step": 279 |
| }, |
| { |
| "epoch": 1.3874172185430464, |
| "grad_norm": 0.33149197697639465, |
| "learning_rate": 4.6802851514769235e-06, |
| "loss": 0.0297, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.3923841059602649, |
| "grad_norm": 0.3412141799926758, |
| "learning_rate": 4.676801700477327e-06, |
| "loss": 0.0235, |
| "step": 281 |
| }, |
| { |
| "epoch": 1.3973509933774835, |
| "grad_norm": 0.3106585144996643, |
| "learning_rate": 4.673300686098957e-06, |
| "loss": 0.0218, |
| "step": 282 |
| }, |
| { |
| "epoch": 1.402317880794702, |
| "grad_norm": 0.32849153876304626, |
| "learning_rate": 4.669782136589512e-06, |
| "loss": 0.0213, |
| "step": 283 |
| }, |
| { |
| "epoch": 1.4072847682119205, |
| "grad_norm": 0.43728697299957275, |
| "learning_rate": 4.666246080338175e-06, |
| "loss": 0.0212, |
| "step": 284 |
| }, |
| { |
| "epoch": 1.4122516556291391, |
| "grad_norm": 0.42004719376564026, |
| "learning_rate": 4.662692545875379e-06, |
| "loss": 0.024, |
| "step": 285 |
| }, |
| { |
| "epoch": 1.4172185430463577, |
| "grad_norm": 0.5100045800209045, |
| "learning_rate": 4.6591215618725775e-06, |
| "loss": 0.0259, |
| "step": 286 |
| }, |
| { |
| "epoch": 1.4221854304635762, |
| "grad_norm": 0.4625544250011444, |
| "learning_rate": 4.655533157142016e-06, |
| "loss": 0.0184, |
| "step": 287 |
| }, |
| { |
| "epoch": 1.4271523178807948, |
| "grad_norm": 0.4128156900405884, |
| "learning_rate": 4.651927360636499e-06, |
| "loss": 0.0154, |
| "step": 288 |
| }, |
| { |
| "epoch": 1.4321192052980132, |
| "grad_norm": 0.43833258748054504, |
| "learning_rate": 4.648304201449153e-06, |
| "loss": 0.0203, |
| "step": 289 |
| }, |
| { |
| "epoch": 1.4370860927152318, |
| "grad_norm": 0.4920101463794708, |
| "learning_rate": 4.644663708813196e-06, |
| "loss": 0.0262, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.4420529801324502, |
| "grad_norm": 0.5934157371520996, |
| "learning_rate": 4.641005912101699e-06, |
| "loss": 0.0293, |
| "step": 291 |
| }, |
| { |
| "epoch": 1.4470198675496688, |
| "grad_norm": 0.4239577651023865, |
| "learning_rate": 4.63733084082735e-06, |
| "loss": 0.014, |
| "step": 292 |
| }, |
| { |
| "epoch": 1.4519867549668874, |
| "grad_norm": 0.34310004115104675, |
| "learning_rate": 4.633638524642216e-06, |
| "loss": 0.019, |
| "step": 293 |
| }, |
| { |
| "epoch": 1.4569536423841059, |
| "grad_norm": 0.37336331605911255, |
| "learning_rate": 4.629928993337501e-06, |
| "loss": 0.023, |
| "step": 294 |
| }, |
| { |
| "epoch": 1.4619205298013245, |
| "grad_norm": 0.5502914786338806, |
| "learning_rate": 4.626202276843312e-06, |
| "loss": 0.0256, |
| "step": 295 |
| }, |
| { |
| "epoch": 1.466887417218543, |
| "grad_norm": 0.4025366008281708, |
| "learning_rate": 4.622458405228411e-06, |
| "loss": 0.0212, |
| "step": 296 |
| }, |
| { |
| "epoch": 1.4718543046357615, |
| "grad_norm": 0.35220882296562195, |
| "learning_rate": 4.618697408699974e-06, |
| "loss": 0.0193, |
| "step": 297 |
| }, |
| { |
| "epoch": 1.4768211920529801, |
| "grad_norm": 0.42906683683395386, |
| "learning_rate": 4.614919317603351e-06, |
| "loss": 0.0183, |
| "step": 298 |
| }, |
| { |
| "epoch": 1.4817880794701987, |
| "grad_norm": 0.41879740357398987, |
| "learning_rate": 4.611124162421817e-06, |
| "loss": 0.024, |
| "step": 299 |
| }, |
| { |
| "epoch": 1.4867549668874172, |
| "grad_norm": 0.3032141923904419, |
| "learning_rate": 4.607311973776329e-06, |
| "loss": 0.0161, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.4917218543046358, |
| "grad_norm": 0.4052025377750397, |
| "learning_rate": 4.603482782425272e-06, |
| "loss": 0.0185, |
| "step": 301 |
| }, |
| { |
| "epoch": 1.4966887417218544, |
| "grad_norm": 0.3545592129230499, |
| "learning_rate": 4.599636619264223e-06, |
| "loss": 0.023, |
| "step": 302 |
| }, |
| { |
| "epoch": 1.5016556291390728, |
| "grad_norm": 0.4538367986679077, |
| "learning_rate": 4.595773515325692e-06, |
| "loss": 0.0142, |
| "step": 303 |
| }, |
| { |
| "epoch": 1.5066225165562914, |
| "grad_norm": 0.6581038236618042, |
| "learning_rate": 4.591893501778873e-06, |
| "loss": 0.0173, |
| "step": 304 |
| }, |
| { |
| "epoch": 1.51158940397351, |
| "grad_norm": 0.5108897089958191, |
| "learning_rate": 4.587996609929396e-06, |
| "loss": 0.0176, |
| "step": 305 |
| }, |
| { |
| "epoch": 1.5165562913907285, |
| "grad_norm": 0.3592700660228729, |
| "learning_rate": 4.5840828712190725e-06, |
| "loss": 0.0142, |
| "step": 306 |
| }, |
| { |
| "epoch": 1.521523178807947, |
| "grad_norm": 0.6172234416007996, |
| "learning_rate": 4.5801523172256415e-06, |
| "loss": 0.018, |
| "step": 307 |
| }, |
| { |
| "epoch": 1.5264900662251657, |
| "grad_norm": 0.3826143741607666, |
| "learning_rate": 4.576204979662513e-06, |
| "loss": 0.0158, |
| "step": 308 |
| }, |
| { |
| "epoch": 1.531456953642384, |
| "grad_norm": 0.5595983862876892, |
| "learning_rate": 4.572240890378518e-06, |
| "loss": 0.0224, |
| "step": 309 |
| }, |
| { |
| "epoch": 1.5364238410596025, |
| "grad_norm": 0.4325421452522278, |
| "learning_rate": 4.568260081357644e-06, |
| "loss": 0.014, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.5413907284768213, |
| "grad_norm": 0.4050692617893219, |
| "learning_rate": 4.564262584718782e-06, |
| "loss": 0.0137, |
| "step": 311 |
| }, |
| { |
| "epoch": 1.5463576158940397, |
| "grad_norm": 0.4626636803150177, |
| "learning_rate": 4.560248432715467e-06, |
| "loss": 0.0158, |
| "step": 312 |
| }, |
| { |
| "epoch": 1.5513245033112582, |
| "grad_norm": 0.5102334022521973, |
| "learning_rate": 4.556217657735616e-06, |
| "loss": 0.0184, |
| "step": 313 |
| }, |
| { |
| "epoch": 1.5562913907284768, |
| "grad_norm": 0.3994642496109009, |
| "learning_rate": 4.552170292301265e-06, |
| "loss": 0.0114, |
| "step": 314 |
| }, |
| { |
| "epoch": 1.5612582781456954, |
| "grad_norm": 0.4770815968513489, |
| "learning_rate": 4.548106369068312e-06, |
| "loss": 0.0148, |
| "step": 315 |
| }, |
| { |
| "epoch": 1.5662251655629138, |
| "grad_norm": 0.33928975462913513, |
| "learning_rate": 4.54402592082625e-06, |
| "loss": 0.0175, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.5711920529801324, |
| "grad_norm": 0.4363187849521637, |
| "learning_rate": 4.539928980497903e-06, |
| "loss": 0.0072, |
| "step": 317 |
| }, |
| { |
| "epoch": 1.576158940397351, |
| "grad_norm": 0.4603089392185211, |
| "learning_rate": 4.535815581139158e-06, |
| "loss": 0.0104, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.5811258278145695, |
| "grad_norm": 0.41406941413879395, |
| "learning_rate": 4.531685755938704e-06, |
| "loss": 0.009, |
| "step": 319 |
| }, |
| { |
| "epoch": 1.586092715231788, |
| "grad_norm": 0.5116543173789978, |
| "learning_rate": 4.527539538217758e-06, |
| "loss": 0.0245, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.5910596026490067, |
| "grad_norm": 0.47456589341163635, |
| "learning_rate": 4.523376961429801e-06, |
| "loss": 0.0133, |
| "step": 321 |
| }, |
| { |
| "epoch": 1.596026490066225, |
| "grad_norm": 0.39687836170196533, |
| "learning_rate": 4.519198059160303e-06, |
| "loss": 0.0123, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.6009933774834437, |
| "grad_norm": 0.5981389880180359, |
| "learning_rate": 4.51500286512646e-06, |
| "loss": 0.0157, |
| "step": 323 |
| }, |
| { |
| "epoch": 1.6059602649006623, |
| "grad_norm": 0.49498969316482544, |
| "learning_rate": 4.510791413176912e-06, |
| "loss": 0.0207, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.6109271523178808, |
| "grad_norm": 0.4387689530849457, |
| "learning_rate": 4.506563737291479e-06, |
| "loss": 0.0116, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.6158940397350994, |
| "grad_norm": 0.31551048159599304, |
| "learning_rate": 4.502319871580879e-06, |
| "loss": 0.0126, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.620860927152318, |
| "grad_norm": 0.4342552423477173, |
| "learning_rate": 4.498059850286459e-06, |
| "loss": 0.0137, |
| "step": 327 |
| }, |
| { |
| "epoch": 1.6258278145695364, |
| "grad_norm": 0.43873131275177, |
| "learning_rate": 4.493783707779916e-06, |
| "loss": 0.0144, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.6307947019867548, |
| "grad_norm": 0.3671911358833313, |
| "learning_rate": 4.489491478563019e-06, |
| "loss": 0.0171, |
| "step": 329 |
| }, |
| { |
| "epoch": 1.6357615894039736, |
| "grad_norm": 0.37722423672676086, |
| "learning_rate": 4.4851831972673335e-06, |
| "loss": 0.0173, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.640728476821192, |
| "grad_norm": 0.3447704613208771, |
| "learning_rate": 4.480858898653936e-06, |
| "loss": 0.009, |
| "step": 331 |
| }, |
| { |
| "epoch": 1.6456953642384105, |
| "grad_norm": 0.325804740190506, |
| "learning_rate": 4.476518617613143e-06, |
| "loss": 0.015, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.6506622516556293, |
| "grad_norm": 0.3346543312072754, |
| "learning_rate": 4.472162389164219e-06, |
| "loss": 0.0164, |
| "step": 333 |
| }, |
| { |
| "epoch": 1.6556291390728477, |
| "grad_norm": 0.35681554675102234, |
| "learning_rate": 4.467790248455103e-06, |
| "loss": 0.012, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.660596026490066, |
| "grad_norm": 0.2275272011756897, |
| "learning_rate": 4.46340223076212e-06, |
| "loss": 0.0074, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.6655629139072847, |
| "grad_norm": 0.44045180082321167, |
| "learning_rate": 4.458998371489695e-06, |
| "loss": 0.0085, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.6705298013245033, |
| "grad_norm": 0.325270414352417, |
| "learning_rate": 4.454578706170075e-06, |
| "loss": 0.0141, |
| "step": 337 |
| }, |
| { |
| "epoch": 1.6754966887417218, |
| "grad_norm": 0.5011297464370728, |
| "learning_rate": 4.450143270463031e-06, |
| "loss": 0.012, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.6804635761589404, |
| "grad_norm": 0.3271932303905487, |
| "learning_rate": 4.445692100155579e-06, |
| "loss": 0.0119, |
| "step": 339 |
| }, |
| { |
| "epoch": 1.685430463576159, |
| "grad_norm": 0.4148896634578705, |
| "learning_rate": 4.441225231161691e-06, |
| "loss": 0.0141, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.6903973509933774, |
| "grad_norm": 0.2449064701795578, |
| "learning_rate": 4.436742699521998e-06, |
| "loss": 0.0154, |
| "step": 341 |
| }, |
| { |
| "epoch": 1.695364238410596, |
| "grad_norm": 0.46324676275253296, |
| "learning_rate": 4.432244541403506e-06, |
| "loss": 0.0102, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.7003311258278146, |
| "grad_norm": 0.3034791946411133, |
| "learning_rate": 4.4277307930993055e-06, |
| "loss": 0.0132, |
| "step": 343 |
| }, |
| { |
| "epoch": 1.705298013245033, |
| "grad_norm": 0.502938985824585, |
| "learning_rate": 4.42320149102827e-06, |
| "loss": 0.0122, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.7102649006622517, |
| "grad_norm": 0.35072168707847595, |
| "learning_rate": 4.41865667173477e-06, |
| "loss": 0.0081, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.7152317880794703, |
| "grad_norm": 0.4683709442615509, |
| "learning_rate": 4.414096371888378e-06, |
| "loss": 0.0152, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.7201986754966887, |
| "grad_norm": 0.22903448343276978, |
| "learning_rate": 4.409520628283566e-06, |
| "loss": 0.0074, |
| "step": 347 |
| }, |
| { |
| "epoch": 1.7251655629139073, |
| "grad_norm": 0.4040380120277405, |
| "learning_rate": 4.404929477839415e-06, |
| "loss": 0.0088, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.730132450331126, |
| "grad_norm": 0.3377884328365326, |
| "learning_rate": 4.400322957599314e-06, |
| "loss": 0.0156, |
| "step": 349 |
| }, |
| { |
| "epoch": 1.7350993377483444, |
| "grad_norm": 0.34837478399276733, |
| "learning_rate": 4.395701104730667e-06, |
| "loss": 0.011, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.7400662251655628, |
| "grad_norm": 0.3430151343345642, |
| "learning_rate": 4.3910639565245805e-06, |
| "loss": 0.012, |
| "step": 351 |
| }, |
| { |
| "epoch": 1.7450331125827816, |
| "grad_norm": 0.3015480041503906, |
| "learning_rate": 4.386411550395576e-06, |
| "loss": 0.011, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.75, |
| "grad_norm": 0.3030356466770172, |
| "learning_rate": 4.3817439238812794e-06, |
| "loss": 0.0072, |
| "step": 353 |
| }, |
| { |
| "epoch": 1.7549668874172184, |
| "grad_norm": 0.2574770748615265, |
| "learning_rate": 4.377061114642126e-06, |
| "loss": 0.0058, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.7599337748344372, |
| "grad_norm": 0.32613250613212585, |
| "learning_rate": 4.372363160461045e-06, |
| "loss": 0.01, |
| "step": 355 |
| }, |
| { |
| "epoch": 1.7649006622516556, |
| "grad_norm": 0.31548556685447693, |
| "learning_rate": 4.367650099243167e-06, |
| "loss": 0.0106, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.769867549668874, |
| "grad_norm": 0.3480343520641327, |
| "learning_rate": 4.36292196901551e-06, |
| "loss": 0.0142, |
| "step": 357 |
| }, |
| { |
| "epoch": 1.7748344370860927, |
| "grad_norm": 0.4790445566177368, |
| "learning_rate": 4.358178807926678e-06, |
| "loss": 0.0112, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.7798013245033113, |
| "grad_norm": 0.38480401039123535, |
| "learning_rate": 4.353420654246546e-06, |
| "loss": 0.0134, |
| "step": 359 |
| }, |
| { |
| "epoch": 1.7847682119205297, |
| "grad_norm": 0.355960488319397, |
| "learning_rate": 4.34864754636596e-06, |
| "loss": 0.0059, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.7897350993377483, |
| "grad_norm": 0.3221181035041809, |
| "learning_rate": 4.3438595227964206e-06, |
| "loss": 0.011, |
| "step": 361 |
| }, |
| { |
| "epoch": 1.794701986754967, |
| "grad_norm": 0.44287869334220886, |
| "learning_rate": 4.339056622169777e-06, |
| "loss": 0.01, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.7996688741721854, |
| "grad_norm": 0.30105292797088623, |
| "learning_rate": 4.33423888323791e-06, |
| "loss": 0.0078, |
| "step": 363 |
| }, |
| { |
| "epoch": 1.804635761589404, |
| "grad_norm": 0.43845006823539734, |
| "learning_rate": 4.329406344872423e-06, |
| "loss": 0.0109, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.8096026490066226, |
| "grad_norm": 0.4705260694026947, |
| "learning_rate": 4.3245590460643296e-06, |
| "loss": 0.0091, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.814569536423841, |
| "grad_norm": 0.2653980255126953, |
| "learning_rate": 4.319697025923736e-06, |
| "loss": 0.0058, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.8195364238410596, |
| "grad_norm": 0.3086546063423157, |
| "learning_rate": 4.314820323679524e-06, |
| "loss": 0.0091, |
| "step": 367 |
| }, |
| { |
| "epoch": 1.8245033112582782, |
| "grad_norm": 0.43733829259872437, |
| "learning_rate": 4.309928978679041e-06, |
| "loss": 0.0089, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.8294701986754967, |
| "grad_norm": 0.3844042718410492, |
| "learning_rate": 4.305023030387776e-06, |
| "loss": 0.0078, |
| "step": 369 |
| }, |
| { |
| "epoch": 1.8344370860927153, |
| "grad_norm": 0.3441635072231293, |
| "learning_rate": 4.3001025183890445e-06, |
| "loss": 0.01, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.839403973509934, |
| "grad_norm": 0.44443604350090027, |
| "learning_rate": 4.295167482383667e-06, |
| "loss": 0.0121, |
| "step": 371 |
| }, |
| { |
| "epoch": 1.8443708609271523, |
| "grad_norm": 0.5100785493850708, |
| "learning_rate": 4.290217962189654e-06, |
| "loss": 0.0122, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.8493377483443707, |
| "grad_norm": 0.44218710064888, |
| "learning_rate": 4.285253997741875e-06, |
| "loss": 0.0038, |
| "step": 373 |
| }, |
| { |
| "epoch": 1.8543046357615895, |
| "grad_norm": 0.4703184962272644, |
| "learning_rate": 4.2802756290917454e-06, |
| "loss": 0.0081, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.859271523178808, |
| "grad_norm": 0.4065220057964325, |
| "learning_rate": 4.2752828964068996e-06, |
| "loss": 0.0087, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.8642384105960264, |
| "grad_norm": 0.45641881227493286, |
| "learning_rate": 4.270275839970868e-06, |
| "loss": 0.0067, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.8692052980132452, |
| "grad_norm": 0.28219515085220337, |
| "learning_rate": 4.2652545001827475e-06, |
| "loss": 0.0076, |
| "step": 377 |
| }, |
| { |
| "epoch": 1.8741721854304636, |
| "grad_norm": 0.35546135902404785, |
| "learning_rate": 4.260218917556885e-06, |
| "loss": 0.0104, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.879139072847682, |
| "grad_norm": 0.33450189232826233, |
| "learning_rate": 4.25516913272254e-06, |
| "loss": 0.0048, |
| "step": 379 |
| }, |
| { |
| "epoch": 1.8841059602649006, |
| "grad_norm": 0.3185473084449768, |
| "learning_rate": 4.250105186423564e-06, |
| "loss": 0.0106, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.8890728476821192, |
| "grad_norm": 0.31713488698005676, |
| "learning_rate": 4.245027119518068e-06, |
| "loss": 0.0066, |
| "step": 381 |
| }, |
| { |
| "epoch": 1.8940397350993377, |
| "grad_norm": 0.5118482708930969, |
| "learning_rate": 4.239934972978096e-06, |
| "loss": 0.0056, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.8990066225165563, |
| "grad_norm": 0.5072982311248779, |
| "learning_rate": 4.234828787889291e-06, |
| "loss": 0.0051, |
| "step": 383 |
| }, |
| { |
| "epoch": 1.903973509933775, |
| "grad_norm": 0.33334094285964966, |
| "learning_rate": 4.229708605450563e-06, |
| "loss": 0.0104, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.9089403973509933, |
| "grad_norm": 0.33032891154289246, |
| "learning_rate": 4.224574466973766e-06, |
| "loss": 0.006, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.913907284768212, |
| "grad_norm": 0.36170345544815063, |
| "learning_rate": 4.219426413883348e-06, |
| "loss": 0.0068, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.9188741721854305, |
| "grad_norm": 0.2647745609283447, |
| "learning_rate": 4.2142644877160334e-06, |
| "loss": 0.0041, |
| "step": 387 |
| }, |
| { |
| "epoch": 1.923841059602649, |
| "grad_norm": 0.35909488797187805, |
| "learning_rate": 4.2090887301204765e-06, |
| "loss": 0.0108, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.9288079470198676, |
| "grad_norm": 2.257672071456909, |
| "learning_rate": 4.2038991828569325e-06, |
| "loss": 0.0067, |
| "step": 389 |
| }, |
| { |
| "epoch": 1.9337748344370862, |
| "grad_norm": 0.21206845343112946, |
| "learning_rate": 4.198695887796915e-06, |
| "loss": 0.0029, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.9387417218543046, |
| "grad_norm": 0.3053521513938904, |
| "learning_rate": 4.193478886922861e-06, |
| "loss": 0.0076, |
| "step": 391 |
| }, |
| { |
| "epoch": 1.9437086092715232, |
| "grad_norm": 0.28233954310417175, |
| "learning_rate": 4.188248222327794e-06, |
| "loss": 0.007, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.9486754966887418, |
| "grad_norm": 0.23707984387874603, |
| "learning_rate": 4.183003936214981e-06, |
| "loss": 0.0035, |
| "step": 393 |
| }, |
| { |
| "epoch": 1.9536423841059603, |
| "grad_norm": 0.3283425569534302, |
| "learning_rate": 4.177746070897593e-06, |
| "loss": 0.007, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.9586092715231787, |
| "grad_norm": 0.43405845761299133, |
| "learning_rate": 4.172474668798363e-06, |
| "loss": 0.0062, |
| "step": 395 |
| }, |
| { |
| "epoch": 1.9635761589403975, |
| "grad_norm": 0.4817218482494354, |
| "learning_rate": 4.167189772449248e-06, |
| "loss": 0.0081, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.968543046357616, |
| "grad_norm": 0.4744330048561096, |
| "learning_rate": 4.16189142449108e-06, |
| "loss": 0.0031, |
| "step": 397 |
| }, |
| { |
| "epoch": 1.9735099337748343, |
| "grad_norm": 0.302048921585083, |
| "learning_rate": 4.156579667673224e-06, |
| "loss": 0.0024, |
| "step": 398 |
| }, |
| { |
| "epoch": 1.978476821192053, |
| "grad_norm": 0.24172364175319672, |
| "learning_rate": 4.151254544853235e-06, |
| "loss": 0.0029, |
| "step": 399 |
| }, |
| { |
| "epoch": 1.9834437086092715, |
| "grad_norm": 0.26656797528266907, |
| "learning_rate": 4.145916098996509e-06, |
| "loss": 0.0066, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.98841059602649, |
| "grad_norm": 0.35597696900367737, |
| "learning_rate": 4.140564373175939e-06, |
| "loss": 0.0045, |
| "step": 401 |
| }, |
| { |
| "epoch": 1.9933774834437086, |
| "grad_norm": 0.4242151081562042, |
| "learning_rate": 4.135199410571568e-06, |
| "loss": 0.0057, |
| "step": 402 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 1206, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 6, |
| "save_steps": 201, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 8.990440589644268e+17, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|