| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 4.99492385786802, |
| "eval_steps": 500, |
| "global_step": 1230, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0040609137055837565, |
| "grad_norm": 8.62887018244275, |
| "learning_rate": 3.2520325203252037e-07, |
| "loss": 1.3281, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.008121827411167513, |
| "grad_norm": 8.528693763624386, |
| "learning_rate": 6.504065040650407e-07, |
| "loss": 1.3167, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.012182741116751269, |
| "grad_norm": 8.64295988544749, |
| "learning_rate": 9.75609756097561e-07, |
| "loss": 1.3497, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.016243654822335026, |
| "grad_norm": 8.513854480399086, |
| "learning_rate": 1.3008130081300815e-06, |
| "loss": 1.3226, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.02030456852791878, |
| "grad_norm": 8.034222432123684, |
| "learning_rate": 1.6260162601626018e-06, |
| "loss": 1.285, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.024365482233502538, |
| "grad_norm": 7.956353805331463, |
| "learning_rate": 1.951219512195122e-06, |
| "loss": 1.313, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.028426395939086295, |
| "grad_norm": 6.639649080704839, |
| "learning_rate": 2.2764227642276426e-06, |
| "loss": 1.277, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.03248730964467005, |
| "grad_norm": 6.145878036438217, |
| "learning_rate": 2.601626016260163e-06, |
| "loss": 1.2577, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.03654822335025381, |
| "grad_norm": 3.7121117474970133, |
| "learning_rate": 2.926829268292683e-06, |
| "loss": 1.242, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.04060913705583756, |
| "grad_norm": 3.0026204471431077, |
| "learning_rate": 3.2520325203252037e-06, |
| "loss": 1.2168, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.04467005076142132, |
| "grad_norm": 2.769041332882583, |
| "learning_rate": 3.577235772357724e-06, |
| "loss": 1.2155, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.048730964467005075, |
| "grad_norm": 6.377649257998999, |
| "learning_rate": 3.902439024390244e-06, |
| "loss": 1.206, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.05279187817258883, |
| "grad_norm": 7.028820624654692, |
| "learning_rate": 4.227642276422765e-06, |
| "loss": 1.224, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.05685279187817259, |
| "grad_norm": 7.169815209444628, |
| "learning_rate": 4.552845528455285e-06, |
| "loss": 1.2076, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.06091370558375635, |
| "grad_norm": 6.903469103811548, |
| "learning_rate": 4.8780487804878055e-06, |
| "loss": 1.1946, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.0649746192893401, |
| "grad_norm": 5.51194757755764, |
| "learning_rate": 5.203252032520326e-06, |
| "loss": 1.1696, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.06903553299492386, |
| "grad_norm": 4.905523952903594, |
| "learning_rate": 5.528455284552846e-06, |
| "loss": 1.1389, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.07309644670050762, |
| "grad_norm": 3.771508935085431, |
| "learning_rate": 5.853658536585366e-06, |
| "loss": 1.0961, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.07715736040609138, |
| "grad_norm": 2.5744042874872193, |
| "learning_rate": 6.178861788617887e-06, |
| "loss": 1.0917, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.08121827411167512, |
| "grad_norm": 1.8431005762273183, |
| "learning_rate": 6.504065040650407e-06, |
| "loss": 1.1078, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.08527918781725888, |
| "grad_norm": 1.5364887515121395, |
| "learning_rate": 6.829268292682928e-06, |
| "loss": 1.0766, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.08934010152284264, |
| "grad_norm": 1.766853743034859, |
| "learning_rate": 7.154471544715448e-06, |
| "loss": 1.0758, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.09340101522842639, |
| "grad_norm": 1.6796973091236893, |
| "learning_rate": 7.4796747967479676e-06, |
| "loss": 1.0812, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.09746192893401015, |
| "grad_norm": 1.4139633784286207, |
| "learning_rate": 7.804878048780489e-06, |
| "loss": 1.0809, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.10152284263959391, |
| "grad_norm": 1.1019008459828188, |
| "learning_rate": 8.130081300813009e-06, |
| "loss": 1.08, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.10558375634517767, |
| "grad_norm": 0.8894528191997672, |
| "learning_rate": 8.45528455284553e-06, |
| "loss": 1.0642, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.10964467005076142, |
| "grad_norm": 0.9719521032491878, |
| "learning_rate": 8.78048780487805e-06, |
| "loss": 1.0312, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.11370558375634518, |
| "grad_norm": 1.107583812116328, |
| "learning_rate": 9.10569105691057e-06, |
| "loss": 1.021, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.11776649746192894, |
| "grad_norm": 0.9134551268878944, |
| "learning_rate": 9.43089430894309e-06, |
| "loss": 1.0505, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.1218274111675127, |
| "grad_norm": 0.7210846664331737, |
| "learning_rate": 9.756097560975611e-06, |
| "loss": 0.9874, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.12588832487309645, |
| "grad_norm": 0.7659361480497492, |
| "learning_rate": 1.008130081300813e-05, |
| "loss": 1.0069, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.1299492385786802, |
| "grad_norm": 0.7438429636075108, |
| "learning_rate": 1.0406504065040652e-05, |
| "loss": 1.0112, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.13401015228426397, |
| "grad_norm": 0.6602902126941446, |
| "learning_rate": 1.0731707317073172e-05, |
| "loss": 1.0098, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.13807106598984772, |
| "grad_norm": 0.7053271024764887, |
| "learning_rate": 1.1056910569105692e-05, |
| "loss": 0.9973, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.14213197969543148, |
| "grad_norm": 0.7850807386281604, |
| "learning_rate": 1.1382113821138213e-05, |
| "loss": 0.9651, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.14619289340101524, |
| "grad_norm": 0.8422666355462011, |
| "learning_rate": 1.1707317073170731e-05, |
| "loss": 1.0283, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.150253807106599, |
| "grad_norm": 0.7041794430867934, |
| "learning_rate": 1.2032520325203254e-05, |
| "loss": 0.9736, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.15431472081218275, |
| "grad_norm": 0.7150981346028934, |
| "learning_rate": 1.2357723577235774e-05, |
| "loss": 0.9735, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.15837563451776648, |
| "grad_norm": 0.5678935577448442, |
| "learning_rate": 1.2682926829268294e-05, |
| "loss": 0.9805, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.16243654822335024, |
| "grad_norm": 0.7160267447779376, |
| "learning_rate": 1.3008130081300815e-05, |
| "loss": 0.9921, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.166497461928934, |
| "grad_norm": 0.6097502302876489, |
| "learning_rate": 1.3333333333333333e-05, |
| "loss": 0.9298, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.17055837563451776, |
| "grad_norm": 0.6849819481605205, |
| "learning_rate": 1.3658536585365855e-05, |
| "loss": 0.9676, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.1746192893401015, |
| "grad_norm": 0.6372314391490987, |
| "learning_rate": 1.3983739837398376e-05, |
| "loss": 0.9593, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.17868020304568527, |
| "grad_norm": 0.6949260989786994, |
| "learning_rate": 1.4308943089430896e-05, |
| "loss": 0.9642, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.18274111675126903, |
| "grad_norm": 0.6660665121083698, |
| "learning_rate": 1.4634146341463415e-05, |
| "loss": 0.9533, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.18680203045685279, |
| "grad_norm": 0.4770367257773198, |
| "learning_rate": 1.4959349593495935e-05, |
| "loss": 0.9431, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.19086294416243654, |
| "grad_norm": 0.7214063100874251, |
| "learning_rate": 1.528455284552846e-05, |
| "loss": 0.9418, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.1949238578680203, |
| "grad_norm": 0.6206595122857601, |
| "learning_rate": 1.5609756097560978e-05, |
| "loss": 0.9835, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.19898477157360406, |
| "grad_norm": 0.5819135578361109, |
| "learning_rate": 1.5934959349593496e-05, |
| "loss": 0.9219, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.20304568527918782, |
| "grad_norm": 0.6984130437647903, |
| "learning_rate": 1.6260162601626018e-05, |
| "loss": 0.9447, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.20710659898477157, |
| "grad_norm": 0.599926623605935, |
| "learning_rate": 1.6585365853658537e-05, |
| "loss": 0.9499, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.21116751269035533, |
| "grad_norm": 0.829014022306582, |
| "learning_rate": 1.691056910569106e-05, |
| "loss": 0.9425, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.2152284263959391, |
| "grad_norm": 1.343436172185058, |
| "learning_rate": 1.7235772357723578e-05, |
| "loss": 0.9445, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.21928934010152284, |
| "grad_norm": 0.9026823421939604, |
| "learning_rate": 1.75609756097561e-05, |
| "loss": 0.9366, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.2233502538071066, |
| "grad_norm": 1.0967883445136937, |
| "learning_rate": 1.788617886178862e-05, |
| "loss": 0.9341, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.22741116751269036, |
| "grad_norm": 1.1948480173664822, |
| "learning_rate": 1.821138211382114e-05, |
| "loss": 0.9294, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.23147208121827412, |
| "grad_norm": 0.8493475703298556, |
| "learning_rate": 1.8536585365853663e-05, |
| "loss": 0.9487, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.23553299492385787, |
| "grad_norm": 1.3492368231774985, |
| "learning_rate": 1.886178861788618e-05, |
| "loss": 0.9282, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.23959390862944163, |
| "grad_norm": 1.3087960937509129, |
| "learning_rate": 1.91869918699187e-05, |
| "loss": 0.9388, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.2436548223350254, |
| "grad_norm": 1.1617787103967965, |
| "learning_rate": 1.9512195121951222e-05, |
| "loss": 0.947, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.24771573604060915, |
| "grad_norm": 1.1354482550335967, |
| "learning_rate": 1.983739837398374e-05, |
| "loss": 0.9391, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.2517766497461929, |
| "grad_norm": 1.3242366978315538, |
| "learning_rate": 2.016260162601626e-05, |
| "loss": 0.9107, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.25583756345177666, |
| "grad_norm": 1.1395001987487705, |
| "learning_rate": 2.048780487804878e-05, |
| "loss": 0.9504, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.2598984771573604, |
| "grad_norm": 0.9928039484149661, |
| "learning_rate": 2.0813008130081303e-05, |
| "loss": 0.9206, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.2639593908629442, |
| "grad_norm": 0.9320816198662614, |
| "learning_rate": 2.1138211382113822e-05, |
| "loss": 0.9186, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.26802030456852793, |
| "grad_norm": 1.2766110623793032, |
| "learning_rate": 2.1463414634146344e-05, |
| "loss": 0.9144, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.2720812182741117, |
| "grad_norm": 0.839313541450921, |
| "learning_rate": 2.1788617886178863e-05, |
| "loss": 0.9133, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.27614213197969545, |
| "grad_norm": 1.282384554332314, |
| "learning_rate": 2.2113821138211385e-05, |
| "loss": 0.9207, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.2802030456852792, |
| "grad_norm": 0.8111838756570011, |
| "learning_rate": 2.2439024390243907e-05, |
| "loss": 0.8936, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.28426395939086296, |
| "grad_norm": 0.9622977658184423, |
| "learning_rate": 2.2764227642276426e-05, |
| "loss": 0.8851, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.2883248730964467, |
| "grad_norm": 0.7758344644787591, |
| "learning_rate": 2.3089430894308948e-05, |
| "loss": 0.9121, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.2923857868020305, |
| "grad_norm": 0.7562640940886655, |
| "learning_rate": 2.3414634146341463e-05, |
| "loss": 0.9339, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.29644670050761424, |
| "grad_norm": 0.8905332558044384, |
| "learning_rate": 2.3739837398373985e-05, |
| "loss": 0.9306, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.300507614213198, |
| "grad_norm": 1.2270641158421352, |
| "learning_rate": 2.4065040650406507e-05, |
| "loss": 0.9235, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.30456852791878175, |
| "grad_norm": 1.0656954222404864, |
| "learning_rate": 2.4390243902439026e-05, |
| "loss": 0.9238, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.3086294416243655, |
| "grad_norm": 1.2196245463670141, |
| "learning_rate": 2.4715447154471548e-05, |
| "loss": 0.9497, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.31269035532994927, |
| "grad_norm": 1.150393707175204, |
| "learning_rate": 2.5040650406504066e-05, |
| "loss": 0.9216, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.31675126903553297, |
| "grad_norm": 1.090410484121022, |
| "learning_rate": 2.536585365853659e-05, |
| "loss": 0.918, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.3208121827411167, |
| "grad_norm": 1.235719658980269, |
| "learning_rate": 2.569105691056911e-05, |
| "loss": 0.9219, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.3248730964467005, |
| "grad_norm": 1.2523455830778798, |
| "learning_rate": 2.601626016260163e-05, |
| "loss": 0.9255, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.32893401015228424, |
| "grad_norm": 1.209276984792975, |
| "learning_rate": 2.634146341463415e-05, |
| "loss": 0.93, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.332994923857868, |
| "grad_norm": 0.9630847912963698, |
| "learning_rate": 2.6666666666666667e-05, |
| "loss": 0.9115, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.33705583756345175, |
| "grad_norm": 1.4372311838774192, |
| "learning_rate": 2.699186991869919e-05, |
| "loss": 0.8968, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.3411167512690355, |
| "grad_norm": 1.0825911155684622, |
| "learning_rate": 2.731707317073171e-05, |
| "loss": 0.9312, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.34517766497461927, |
| "grad_norm": 1.2645456054818507, |
| "learning_rate": 2.764227642276423e-05, |
| "loss": 0.9099, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.349238578680203, |
| "grad_norm": 1.371893094379104, |
| "learning_rate": 2.796747967479675e-05, |
| "loss": 0.909, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.3532994923857868, |
| "grad_norm": 0.9776119228492275, |
| "learning_rate": 2.829268292682927e-05, |
| "loss": 0.9037, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.35736040609137054, |
| "grad_norm": 1.1683176873484633, |
| "learning_rate": 2.8617886178861792e-05, |
| "loss": 0.9062, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.3614213197969543, |
| "grad_norm": 1.832036981881903, |
| "learning_rate": 2.8943089430894314e-05, |
| "loss": 0.9205, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.36548223350253806, |
| "grad_norm": 1.1194694253811004, |
| "learning_rate": 2.926829268292683e-05, |
| "loss": 0.9003, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.3695431472081218, |
| "grad_norm": 1.9711060947222045, |
| "learning_rate": 2.959349593495935e-05, |
| "loss": 0.9194, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.37360406091370557, |
| "grad_norm": 1.6491355556209406, |
| "learning_rate": 2.991869918699187e-05, |
| "loss": 0.9349, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.37766497461928933, |
| "grad_norm": 1.4354683835503652, |
| "learning_rate": 3.0243902439024392e-05, |
| "loss": 0.9328, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.3817258883248731, |
| "grad_norm": 1.3309451738696487, |
| "learning_rate": 3.056910569105692e-05, |
| "loss": 0.9161, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.38578680203045684, |
| "grad_norm": 1.5811084801844135, |
| "learning_rate": 3.089430894308943e-05, |
| "loss": 0.9188, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.3898477157360406, |
| "grad_norm": 1.176548775430425, |
| "learning_rate": 3.1219512195121955e-05, |
| "loss": 0.9051, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.39390862944162436, |
| "grad_norm": 1.2122534887391603, |
| "learning_rate": 3.154471544715447e-05, |
| "loss": 0.8937, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.3979695431472081, |
| "grad_norm": 1.12501344622318, |
| "learning_rate": 3.186991869918699e-05, |
| "loss": 0.8748, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.4020304568527919, |
| "grad_norm": 1.5974353003522328, |
| "learning_rate": 3.2195121951219514e-05, |
| "loss": 0.9434, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.40609137055837563, |
| "grad_norm": 1.274490925200243, |
| "learning_rate": 3.2520325203252037e-05, |
| "loss": 0.8936, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.4101522842639594, |
| "grad_norm": 1.3596273471267648, |
| "learning_rate": 3.284552845528456e-05, |
| "loss": 0.9214, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.41421319796954315, |
| "grad_norm": 1.1164242189125648, |
| "learning_rate": 3.3170731707317074e-05, |
| "loss": 0.9074, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.4182741116751269, |
| "grad_norm": 1.7027932599729587, |
| "learning_rate": 3.3495934959349596e-05, |
| "loss": 0.9167, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.42233502538071066, |
| "grad_norm": 1.3498510889282662, |
| "learning_rate": 3.382113821138212e-05, |
| "loss": 0.8972, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.4263959390862944, |
| "grad_norm": 1.2966464251641212, |
| "learning_rate": 3.414634146341463e-05, |
| "loss": 0.8847, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.4304568527918782, |
| "grad_norm": 1.3361713238901625, |
| "learning_rate": 3.4471544715447155e-05, |
| "loss": 0.8965, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.43451776649746193, |
| "grad_norm": 1.3007432538326449, |
| "learning_rate": 3.479674796747968e-05, |
| "loss": 0.9055, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.4385786802030457, |
| "grad_norm": 1.3616170274936827, |
| "learning_rate": 3.51219512195122e-05, |
| "loss": 0.885, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.44263959390862945, |
| "grad_norm": 1.1151752619167619, |
| "learning_rate": 3.544715447154472e-05, |
| "loss": 0.9021, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.4467005076142132, |
| "grad_norm": 1.4081969056893318, |
| "learning_rate": 3.577235772357724e-05, |
| "loss": 0.9121, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.45076142131979696, |
| "grad_norm": 1.3636162242197252, |
| "learning_rate": 3.609756097560976e-05, |
| "loss": 0.9209, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.4548223350253807, |
| "grad_norm": 1.3529060508094846, |
| "learning_rate": 3.642276422764228e-05, |
| "loss": 0.9087, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.4588832487309645, |
| "grad_norm": 1.0863826570098778, |
| "learning_rate": 3.67479674796748e-05, |
| "loss": 0.8899, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.46294416243654823, |
| "grad_norm": 1.8508252011914397, |
| "learning_rate": 3.7073170731707325e-05, |
| "loss": 0.9056, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.467005076142132, |
| "grad_norm": 1.0882435369896517, |
| "learning_rate": 3.739837398373984e-05, |
| "loss": 0.8992, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.47106598984771575, |
| "grad_norm": 1.548103089374484, |
| "learning_rate": 3.772357723577236e-05, |
| "loss": 0.8992, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.4751269035532995, |
| "grad_norm": 2.105575269063434, |
| "learning_rate": 3.804878048780488e-05, |
| "loss": 0.8969, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.47918781725888326, |
| "grad_norm": 1.1172540075433763, |
| "learning_rate": 3.83739837398374e-05, |
| "loss": 0.8839, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.483248730964467, |
| "grad_norm": 2.096224983174092, |
| "learning_rate": 3.869918699186992e-05, |
| "loss": 0.9189, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.4873096446700508, |
| "grad_norm": 1.6004033011411418, |
| "learning_rate": 3.9024390243902444e-05, |
| "loss": 0.8718, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.49137055837563454, |
| "grad_norm": 1.9947362072601837, |
| "learning_rate": 3.9349593495934966e-05, |
| "loss": 0.918, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.4954314720812183, |
| "grad_norm": 1.3759430526372507, |
| "learning_rate": 3.967479674796748e-05, |
| "loss": 0.8972, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.49949238578680205, |
| "grad_norm": 1.9098124889098713, |
| "learning_rate": 4e-05, |
| "loss": 0.9019, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.5035532994923858, |
| "grad_norm": 1.082614628150109, |
| "learning_rate": 3.999991946137476e-05, |
| "loss": 0.9028, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.5076142131979695, |
| "grad_norm": 2.559425102628464, |
| "learning_rate": 3.999967784614766e-05, |
| "loss": 0.9214, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.5116751269035533, |
| "grad_norm": 2.2365217011388614, |
| "learning_rate": 3.9999275156264656e-05, |
| "loss": 0.9241, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.515736040609137, |
| "grad_norm": 1.8063156574474821, |
| "learning_rate": 3.999871139496895e-05, |
| "loss": 0.9111, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.5197969543147208, |
| "grad_norm": 1.7083344676113184, |
| "learning_rate": 3.9997986566800995e-05, |
| "loss": 0.8995, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.5238578680203045, |
| "grad_norm": 1.729611466678804, |
| "learning_rate": 3.999710067759846e-05, |
| "loss": 0.8946, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.5279187817258884, |
| "grad_norm": 1.4869825271287, |
| "learning_rate": 3.999605373449617e-05, |
| "loss": 0.9068, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.531979695431472, |
| "grad_norm": 1.3561492611073167, |
| "learning_rate": 3.9994845745926075e-05, |
| "loss": 0.9025, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.5360406091370559, |
| "grad_norm": 1.0385530287552618, |
| "learning_rate": 3.999347672161713e-05, |
| "loss": 0.8899, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.5401015228426396, |
| "grad_norm": 1.5019294934265033, |
| "learning_rate": 3.999194667259528e-05, |
| "loss": 0.8857, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.5441624365482234, |
| "grad_norm": 1.4418973667053565, |
| "learning_rate": 3.999025561118334e-05, |
| "loss": 0.8958, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.5482233502538071, |
| "grad_norm": 1.2572340067708199, |
| "learning_rate": 3.998840355100086e-05, |
| "loss": 0.9194, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.5522842639593909, |
| "grad_norm": 1.0609439598407637, |
| "learning_rate": 3.998639050696409e-05, |
| "loss": 0.8905, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.5563451776649746, |
| "grad_norm": 1.828464746740132, |
| "learning_rate": 3.998421649528582e-05, |
| "loss": 0.9098, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.5604060913705584, |
| "grad_norm": 1.3123199894067794, |
| "learning_rate": 3.9981881533475234e-05, |
| "loss": 0.8963, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.5644670050761421, |
| "grad_norm": 1.5113191363186305, |
| "learning_rate": 3.997938564033779e-05, |
| "loss": 0.9081, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.5685279187817259, |
| "grad_norm": 1.4594144525399735, |
| "learning_rate": 3.9976728835975064e-05, |
| "loss": 0.904, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.5725888324873096, |
| "grad_norm": 1.0739239964028016, |
| "learning_rate": 3.9973911141784605e-05, |
| "loss": 0.8955, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.5766497461928934, |
| "grad_norm": 1.1044482971871308, |
| "learning_rate": 3.997093258045973e-05, |
| "loss": 0.9116, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.5807106598984771, |
| "grad_norm": 1.3541917407610233, |
| "learning_rate": 3.996779317598936e-05, |
| "loss": 0.872, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.584771573604061, |
| "grad_norm": 1.4788266851172207, |
| "learning_rate": 3.996449295365782e-05, |
| "loss": 0.9205, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.5888324873096447, |
| "grad_norm": 1.3629106749425273, |
| "learning_rate": 3.996103194004467e-05, |
| "loss": 0.8713, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.5928934010152285, |
| "grad_norm": 2.4427358502687513, |
| "learning_rate": 3.995741016302441e-05, |
| "loss": 0.8939, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.5969543147208122, |
| "grad_norm": 1.6797175024449704, |
| "learning_rate": 3.9953627651766364e-05, |
| "loss": 0.8754, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.601015228426396, |
| "grad_norm": 2.656246713221402, |
| "learning_rate": 3.9949684436734325e-05, |
| "loss": 0.9144, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.6050761421319797, |
| "grad_norm": 2.2256619791977332, |
| "learning_rate": 3.994558054968643e-05, |
| "loss": 0.8902, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.6091370558375635, |
| "grad_norm": 1.902247994330618, |
| "learning_rate": 3.994131602367481e-05, |
| "loss": 0.887, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.6131979695431472, |
| "grad_norm": 1.4339019895673746, |
| "learning_rate": 3.9936890893045376e-05, |
| "loss": 0.9245, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.617258883248731, |
| "grad_norm": 2.006879516844076, |
| "learning_rate": 3.993230519343752e-05, |
| "loss": 0.8953, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.6213197969543147, |
| "grad_norm": 1.4766747212288462, |
| "learning_rate": 3.992755896178383e-05, |
| "loss": 0.9287, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.6253807106598985, |
| "grad_norm": 1.6612016246841708, |
| "learning_rate": 3.992265223630981e-05, |
| "loss": 0.9113, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.6294416243654822, |
| "grad_norm": 1.2524581232021137, |
| "learning_rate": 3.991758505653355e-05, |
| "loss": 0.9208, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.6335025380710659, |
| "grad_norm": 0.8947457667466222, |
| "learning_rate": 3.991235746326543e-05, |
| "loss": 0.8935, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.6375634517766497, |
| "grad_norm": 1.1872808865574065, |
| "learning_rate": 3.9906969498607745e-05, |
| "loss": 0.8804, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.6416243654822334, |
| "grad_norm": 1.1607741588867717, |
| "learning_rate": 3.990142120595444e-05, |
| "loss": 0.8717, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.6456852791878173, |
| "grad_norm": 1.1301289500098948, |
| "learning_rate": 3.98957126299907e-05, |
| "loss": 0.8694, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.649746192893401, |
| "grad_norm": 1.1062587867402132, |
| "learning_rate": 3.9889843816692596e-05, |
| "loss": 0.9092, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.6538071065989848, |
| "grad_norm": 1.4735621770055154, |
| "learning_rate": 3.9883814813326766e-05, |
| "loss": 0.8964, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.6578680203045685, |
| "grad_norm": 0.7922607397953237, |
| "learning_rate": 3.9877625668449956e-05, |
| "loss": 0.896, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.6619289340101523, |
| "grad_norm": 0.8564877101370162, |
| "learning_rate": 3.98712764319087e-05, |
| "loss": 0.8746, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.665989847715736, |
| "grad_norm": 1.0659363574068204, |
| "learning_rate": 3.9864767154838864e-05, |
| "loss": 0.9058, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.6700507614213198, |
| "grad_norm": 1.1373363162559778, |
| "learning_rate": 3.9858097889665277e-05, |
| "loss": 0.8672, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.6741116751269035, |
| "grad_norm": 0.8084407152789492, |
| "learning_rate": 3.985126869010129e-05, |
| "loss": 0.8885, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.6781725888324873, |
| "grad_norm": 1.1688586467424646, |
| "learning_rate": 3.984427961114833e-05, |
| "loss": 0.8966, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.682233502538071, |
| "grad_norm": 0.9545097994276284, |
| "learning_rate": 3.9837130709095475e-05, |
| "loss": 0.9262, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.6862944162436548, |
| "grad_norm": 0.8306555701034513, |
| "learning_rate": 3.982982204151901e-05, |
| "loss": 0.8884, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.6903553299492385, |
| "grad_norm": 0.896887761734214, |
| "learning_rate": 3.982235366728193e-05, |
| "loss": 0.8773, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.6944162436548224, |
| "grad_norm": 1.0750114859521285, |
| "learning_rate": 3.9814725646533505e-05, |
| "loss": 0.8884, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.698477157360406, |
| "grad_norm": 1.398388939662489, |
| "learning_rate": 3.9806938040708746e-05, |
| "loss": 0.8754, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.7025380710659899, |
| "grad_norm": 0.9809999765020677, |
| "learning_rate": 3.9798990912527976e-05, |
| "loss": 0.882, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.7065989847715736, |
| "grad_norm": 1.2150932671667536, |
| "learning_rate": 3.979088432599627e-05, |
| "loss": 0.8752, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.7106598984771574, |
| "grad_norm": 0.7965308868517167, |
| "learning_rate": 3.9782618346402964e-05, |
| "loss": 0.857, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.7147208121827411, |
| "grad_norm": 0.7367530890948703, |
| "learning_rate": 3.977419304032111e-05, |
| "loss": 0.8654, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.7187817258883249, |
| "grad_norm": 0.8664659272194589, |
| "learning_rate": 3.976560847560697e-05, |
| "loss": 0.8746, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.7228426395939086, |
| "grad_norm": 0.9052609132468968, |
| "learning_rate": 3.9756864721399456e-05, |
| "loss": 0.8682, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.7269035532994924, |
| "grad_norm": 0.8704482585060862, |
| "learning_rate": 3.974796184811956e-05, |
| "loss": 0.888, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.7309644670050761, |
| "grad_norm": 0.7867736193455253, |
| "learning_rate": 3.973889992746979e-05, |
| "loss": 0.9054, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.7350253807106599, |
| "grad_norm": 1.1466452926946205, |
| "learning_rate": 3.972967903243361e-05, |
| "loss": 0.8982, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.7390862944162436, |
| "grad_norm": 1.3303110617626006, |
| "learning_rate": 3.972029923727486e-05, |
| "loss": 0.8733, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.7431472081218274, |
| "grad_norm": 1.1064217635357834, |
| "learning_rate": 3.971076061753709e-05, |
| "loss": 0.876, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.7472081218274111, |
| "grad_norm": 0.8736652299666252, |
| "learning_rate": 3.9701063250043066e-05, |
| "loss": 0.9041, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.751269035532995, |
| "grad_norm": 0.6874008145028504, |
| "learning_rate": 3.969120721289402e-05, |
| "loss": 0.8986, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.7553299492385787, |
| "grad_norm": 0.5366622765527447, |
| "learning_rate": 3.9681192585469146e-05, |
| "loss": 0.8768, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.7593908629441625, |
| "grad_norm": 0.7019165424708133, |
| "learning_rate": 3.9671019448424865e-05, |
| "loss": 0.8826, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.7634517766497462, |
| "grad_norm": 1.0041895606249729, |
| "learning_rate": 3.966068788369422e-05, |
| "loss": 0.9054, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.76751269035533, |
| "grad_norm": 1.12437982338163, |
| "learning_rate": 3.965019797448622e-05, |
| "loss": 0.9073, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.7715736040609137, |
| "grad_norm": 1.0582060197050378, |
| "learning_rate": 3.963954980528515e-05, |
| "loss": 0.9264, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.7756345177664975, |
| "grad_norm": 1.365876984339088, |
| "learning_rate": 3.9628743461849905e-05, |
| "loss": 0.876, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.7796954314720812, |
| "grad_norm": 0.8184426932769878, |
| "learning_rate": 3.961777903121329e-05, |
| "loss": 0.8864, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.783756345177665, |
| "grad_norm": 0.6577528037342761, |
| "learning_rate": 3.960665660168131e-05, |
| "loss": 0.8727, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.7878172588832487, |
| "grad_norm": 0.843479299371653, |
| "learning_rate": 3.9595376262832485e-05, |
| "loss": 0.8894, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.7918781725888325, |
| "grad_norm": 1.3447235795085584, |
| "learning_rate": 3.9583938105517127e-05, |
| "loss": 0.8784, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.7959390862944162, |
| "grad_norm": 1.0975742696354134, |
| "learning_rate": 3.957234222185657e-05, |
| "loss": 0.8975, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 0.7498545128384897, |
| "learning_rate": 3.9560588705242474e-05, |
| "loss": 0.8948, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.8040609137055837, |
| "grad_norm": 0.7117837345568039, |
| "learning_rate": 3.954867765033605e-05, |
| "loss": 0.894, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.8081218274111676, |
| "grad_norm": 0.6808730780261149, |
| "learning_rate": 3.953660915306728e-05, |
| "loss": 0.8652, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.8121827411167513, |
| "grad_norm": 0.8124656346643899, |
| "learning_rate": 3.952438331063419e-05, |
| "loss": 0.8833, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.8162436548223351, |
| "grad_norm": 0.9400758170270892, |
| "learning_rate": 3.951200022150205e-05, |
| "loss": 0.8854, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.8203045685279188, |
| "grad_norm": 1.1049216056060052, |
| "learning_rate": 3.949945998540253e-05, |
| "loss": 0.874, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.8243654822335026, |
| "grad_norm": 1.1048812029285373, |
| "learning_rate": 3.9486762703332993e-05, |
| "loss": 0.8589, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.8284263959390863, |
| "grad_norm": 1.1303699376953664, |
| "learning_rate": 3.947390847755559e-05, |
| "loss": 0.8798, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.8324873096446701, |
| "grad_norm": 0.8848178315244256, |
| "learning_rate": 3.946089741159648e-05, |
| "loss": 0.8922, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.8365482233502538, |
| "grad_norm": 0.7503975377719104, |
| "learning_rate": 3.944772961024501e-05, |
| "loss": 0.8841, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.8406091370558376, |
| "grad_norm": 0.5971919276044977, |
| "learning_rate": 3.943440517955285e-05, |
| "loss": 0.8832, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.8446700507614213, |
| "grad_norm": 0.5979145635380856, |
| "learning_rate": 3.9420924226833126e-05, |
| "loss": 0.8713, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.848730964467005, |
| "grad_norm": 0.6732671100669172, |
| "learning_rate": 3.9407286860659566e-05, |
| "loss": 0.849, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.8527918781725888, |
| "grad_norm": 0.6174177952103478, |
| "learning_rate": 3.9393493190865657e-05, |
| "loss": 0.8839, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.8568527918781725, |
| "grad_norm": 0.7612283827735356, |
| "learning_rate": 3.937954332854371e-05, |
| "loss": 0.9072, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.8609137055837564, |
| "grad_norm": 0.6636998492164042, |
| "learning_rate": 3.9365437386044016e-05, |
| "loss": 0.8758, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.86497461928934, |
| "grad_norm": 0.5883924354128413, |
| "learning_rate": 3.935117547697387e-05, |
| "loss": 0.8653, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.8690355329949239, |
| "grad_norm": 0.635271334829329, |
| "learning_rate": 3.933675771619675e-05, |
| "loss": 0.8697, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.8730964467005076, |
| "grad_norm": 0.6520681868719912, |
| "learning_rate": 3.932218421983131e-05, |
| "loss": 0.8724, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.8771573604060914, |
| "grad_norm": 0.8942318463482811, |
| "learning_rate": 3.9307455105250484e-05, |
| "loss": 0.869, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.8812182741116751, |
| "grad_norm": 1.0723982134418812, |
| "learning_rate": 3.929257049108054e-05, |
| "loss": 0.8595, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.8852791878172589, |
| "grad_norm": 0.9862484313163941, |
| "learning_rate": 3.927753049720011e-05, |
| "loss": 0.8539, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.8893401015228426, |
| "grad_norm": 1.1745551086540567, |
| "learning_rate": 3.9262335244739234e-05, |
| "loss": 0.8627, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.8934010152284264, |
| "grad_norm": 0.9315083146027145, |
| "learning_rate": 3.92469848560784e-05, |
| "loss": 0.8772, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.8974619289340101, |
| "grad_norm": 0.6765390361868112, |
| "learning_rate": 3.923147945484751e-05, |
| "loss": 0.8615, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.9015228426395939, |
| "grad_norm": 0.6855267721984877, |
| "learning_rate": 3.9215819165924956e-05, |
| "loss": 0.8901, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.9055837563451776, |
| "grad_norm": 0.7531347860395485, |
| "learning_rate": 3.920000411543654e-05, |
| "loss": 0.8586, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.9096446700507614, |
| "grad_norm": 0.7263419004831295, |
| "learning_rate": 3.9184034430754495e-05, |
| "loss": 0.8704, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.9137055837563451, |
| "grad_norm": 0.7549773029380547, |
| "learning_rate": 3.916791024049648e-05, |
| "loss": 0.88, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.917766497461929, |
| "grad_norm": 0.8335613254633946, |
| "learning_rate": 3.91516316745245e-05, |
| "loss": 0.8728, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.9218274111675127, |
| "grad_norm": 0.9835568669580276, |
| "learning_rate": 3.913519886394389e-05, |
| "loss": 0.8687, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.9258883248730965, |
| "grad_norm": 1.2322022298768474, |
| "learning_rate": 3.911861194110225e-05, |
| "loss": 0.8779, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.9299492385786802, |
| "grad_norm": 0.7423357752207448, |
| "learning_rate": 3.910187103958837e-05, |
| "loss": 0.8625, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.934010152284264, |
| "grad_norm": 0.5737924971648999, |
| "learning_rate": 3.908497629423117e-05, |
| "loss": 0.8586, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.9380710659898477, |
| "grad_norm": 0.8786906523233956, |
| "learning_rate": 3.9067927841098614e-05, |
| "loss": 0.8788, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.9421319796954315, |
| "grad_norm": 0.98193260573937, |
| "learning_rate": 3.9050725817496594e-05, |
| "loss": 0.847, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.9461928934010152, |
| "grad_norm": 0.7913892945281108, |
| "learning_rate": 3.9033370361967844e-05, |
| "loss": 0.8747, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.950253807106599, |
| "grad_norm": 0.7577447884683057, |
| "learning_rate": 3.901586161429081e-05, |
| "loss": 0.8759, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.9543147208121827, |
| "grad_norm": 0.6759329196235779, |
| "learning_rate": 3.8998199715478545e-05, |
| "loss": 0.8721, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.9583756345177665, |
| "grad_norm": 0.5931549570210413, |
| "learning_rate": 3.8980384807777564e-05, |
| "loss": 0.8727, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.9624365482233502, |
| "grad_norm": 0.5756110565640363, |
| "learning_rate": 3.896241703466667e-05, |
| "loss": 0.8743, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.966497461928934, |
| "grad_norm": 0.6529948707200075, |
| "learning_rate": 3.894429654085585e-05, |
| "loss": 0.8696, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.9705583756345177, |
| "grad_norm": 0.5895747055634767, |
| "learning_rate": 3.892602347228505e-05, |
| "loss": 0.8534, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.9746192893401016, |
| "grad_norm": 0.5410817057573484, |
| "learning_rate": 3.890759797612307e-05, |
| "loss": 0.8662, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.9786802030456853, |
| "grad_norm": 0.6646080065582237, |
| "learning_rate": 3.888902020076632e-05, |
| "loss": 0.8743, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.9827411167512691, |
| "grad_norm": 0.7829869432980208, |
| "learning_rate": 3.887029029583764e-05, |
| "loss": 0.8689, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.9868020304568528, |
| "grad_norm": 1.0596085542111642, |
| "learning_rate": 3.8851408412185125e-05, |
| "loss": 0.8683, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.9908629441624366, |
| "grad_norm": 0.911377363067641, |
| "learning_rate": 3.8832374701880855e-05, |
| "loss": 0.8889, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.9949238578680203, |
| "grad_norm": 1.2488366056373077, |
| "learning_rate": 3.881318931821972e-05, |
| "loss": 0.8702, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.9989847715736041, |
| "grad_norm": 0.7195805367184755, |
| "learning_rate": 3.879385241571817e-05, |
| "loss": 0.8583, |
| "step": 246 |
| }, |
| { |
| "epoch": 1.0030456852791878, |
| "grad_norm": 1.2718149151821105, |
| "learning_rate": 3.8774364150112955e-05, |
| "loss": 1.5016, |
| "step": 247 |
| }, |
| { |
| "epoch": 1.0071065989847716, |
| "grad_norm": 1.2228390639433937, |
| "learning_rate": 3.8754724678359884e-05, |
| "loss": 0.841, |
| "step": 248 |
| }, |
| { |
| "epoch": 1.0111675126903554, |
| "grad_norm": 0.6706593945110678, |
| "learning_rate": 3.873493415863256e-05, |
| "loss": 0.8529, |
| "step": 249 |
| }, |
| { |
| "epoch": 1.015228426395939, |
| "grad_norm": 1.1194033577934361, |
| "learning_rate": 3.871499275032111e-05, |
| "loss": 0.8444, |
| "step": 250 |
| }, |
| { |
| "epoch": 1.0192893401015228, |
| "grad_norm": 0.9046523505591721, |
| "learning_rate": 3.869490061403091e-05, |
| "loss": 0.8034, |
| "step": 251 |
| }, |
| { |
| "epoch": 1.0233502538071066, |
| "grad_norm": 0.9231951220105618, |
| "learning_rate": 3.867465791158124e-05, |
| "loss": 0.8495, |
| "step": 252 |
| }, |
| { |
| "epoch": 1.0274111675126905, |
| "grad_norm": 1.1108905063748793, |
| "learning_rate": 3.865426480600407e-05, |
| "loss": 0.8886, |
| "step": 253 |
| }, |
| { |
| "epoch": 1.031472081218274, |
| "grad_norm": 1.069777537016959, |
| "learning_rate": 3.863372146154264e-05, |
| "loss": 0.8365, |
| "step": 254 |
| }, |
| { |
| "epoch": 1.0355329949238579, |
| "grad_norm": 0.9934931436409761, |
| "learning_rate": 3.861302804365024e-05, |
| "loss": 0.8315, |
| "step": 255 |
| }, |
| { |
| "epoch": 1.0395939086294417, |
| "grad_norm": 0.8067455786134673, |
| "learning_rate": 3.85921847189888e-05, |
| "loss": 0.8332, |
| "step": 256 |
| }, |
| { |
| "epoch": 1.0436548223350255, |
| "grad_norm": 0.7489125843311554, |
| "learning_rate": 3.85711916554276e-05, |
| "loss": 0.8285, |
| "step": 257 |
| }, |
| { |
| "epoch": 1.047715736040609, |
| "grad_norm": 0.7299894747921222, |
| "learning_rate": 3.85500490220419e-05, |
| "loss": 0.86, |
| "step": 258 |
| }, |
| { |
| "epoch": 1.051776649746193, |
| "grad_norm": 0.7066648467209312, |
| "learning_rate": 3.852875698911154e-05, |
| "loss": 0.8253, |
| "step": 259 |
| }, |
| { |
| "epoch": 1.0558375634517767, |
| "grad_norm": 0.7587559320224089, |
| "learning_rate": 3.850731572811963e-05, |
| "loss": 0.8312, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.0598984771573603, |
| "grad_norm": 0.961795434361132, |
| "learning_rate": 3.848572541175116e-05, |
| "loss": 0.8458, |
| "step": 261 |
| }, |
| { |
| "epoch": 1.063959390862944, |
| "grad_norm": 1.22462106606504, |
| "learning_rate": 3.846398621389154e-05, |
| "loss": 0.8625, |
| "step": 262 |
| }, |
| { |
| "epoch": 1.068020304568528, |
| "grad_norm": 0.6507218661867931, |
| "learning_rate": 3.84420983096253e-05, |
| "loss": 0.8236, |
| "step": 263 |
| }, |
| { |
| "epoch": 1.0720812182741117, |
| "grad_norm": 1.0756974743905785, |
| "learning_rate": 3.8420061875234606e-05, |
| "loss": 0.8525, |
| "step": 264 |
| }, |
| { |
| "epoch": 1.0761421319796955, |
| "grad_norm": 1.4751285556259122, |
| "learning_rate": 3.839787708819787e-05, |
| "loss": 0.8535, |
| "step": 265 |
| }, |
| { |
| "epoch": 1.0802030456852791, |
| "grad_norm": 0.5078725621691786, |
| "learning_rate": 3.8375544127188325e-05, |
| "loss": 0.8238, |
| "step": 266 |
| }, |
| { |
| "epoch": 1.084263959390863, |
| "grad_norm": 1.4024156360173399, |
| "learning_rate": 3.8353063172072564e-05, |
| "loss": 0.8461, |
| "step": 267 |
| }, |
| { |
| "epoch": 1.0883248730964468, |
| "grad_norm": 0.7028446018672242, |
| "learning_rate": 3.8330434403909105e-05, |
| "loss": 0.8437, |
| "step": 268 |
| }, |
| { |
| "epoch": 1.0923857868020304, |
| "grad_norm": 0.7439443483427532, |
| "learning_rate": 3.8307658004946934e-05, |
| "loss": 0.8528, |
| "step": 269 |
| }, |
| { |
| "epoch": 1.0964467005076142, |
| "grad_norm": 0.9142959001379658, |
| "learning_rate": 3.8284734158624046e-05, |
| "loss": 0.8118, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.100507614213198, |
| "grad_norm": 0.8140387316882918, |
| "learning_rate": 3.826166304956594e-05, |
| "loss": 0.8265, |
| "step": 271 |
| }, |
| { |
| "epoch": 1.1045685279187818, |
| "grad_norm": 0.967359818436721, |
| "learning_rate": 3.8238444863584164e-05, |
| "loss": 0.8326, |
| "step": 272 |
| }, |
| { |
| "epoch": 1.1086294416243654, |
| "grad_norm": 0.9352312088978563, |
| "learning_rate": 3.821507978767479e-05, |
| "loss": 0.8442, |
| "step": 273 |
| }, |
| { |
| "epoch": 1.1126903553299492, |
| "grad_norm": 0.7817192574090527, |
| "learning_rate": 3.819156801001693e-05, |
| "loss": 0.863, |
| "step": 274 |
| }, |
| { |
| "epoch": 1.116751269035533, |
| "grad_norm": 0.6735200770668788, |
| "learning_rate": 3.816790971997121e-05, |
| "loss": 0.8266, |
| "step": 275 |
| }, |
| { |
| "epoch": 1.1208121827411168, |
| "grad_norm": 0.8605010951047443, |
| "learning_rate": 3.8144105108078246e-05, |
| "loss": 0.8526, |
| "step": 276 |
| }, |
| { |
| "epoch": 1.1248730964467004, |
| "grad_norm": 1.0227467527268905, |
| "learning_rate": 3.81201543660571e-05, |
| "loss": 0.8444, |
| "step": 277 |
| }, |
| { |
| "epoch": 1.1289340101522842, |
| "grad_norm": 0.7987787785636662, |
| "learning_rate": 3.809605768680377e-05, |
| "loss": 0.8391, |
| "step": 278 |
| }, |
| { |
| "epoch": 1.132994923857868, |
| "grad_norm": 0.6177334752184757, |
| "learning_rate": 3.807181526438958e-05, |
| "loss": 0.8547, |
| "step": 279 |
| }, |
| { |
| "epoch": 1.1370558375634519, |
| "grad_norm": 0.8315819027056006, |
| "learning_rate": 3.8047427294059697e-05, |
| "loss": 0.8563, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.1411167512690354, |
| "grad_norm": 0.7573786183934449, |
| "learning_rate": 3.802289397223145e-05, |
| "loss": 0.8316, |
| "step": 281 |
| }, |
| { |
| "epoch": 1.1451776649746193, |
| "grad_norm": 0.5962393811766117, |
| "learning_rate": 3.7998215496492854e-05, |
| "loss": 0.8489, |
| "step": 282 |
| }, |
| { |
| "epoch": 1.149238578680203, |
| "grad_norm": 0.5879813743061921, |
| "learning_rate": 3.797339206560096e-05, |
| "loss": 0.8619, |
| "step": 283 |
| }, |
| { |
| "epoch": 1.1532994923857869, |
| "grad_norm": 0.5811752126956523, |
| "learning_rate": 3.794842387948027e-05, |
| "loss": 0.8428, |
| "step": 284 |
| }, |
| { |
| "epoch": 1.1573604060913705, |
| "grad_norm": 0.5899020662123778, |
| "learning_rate": 3.7923311139221114e-05, |
| "loss": 0.8457, |
| "step": 285 |
| }, |
| { |
| "epoch": 1.1614213197969543, |
| "grad_norm": 0.6207512309428056, |
| "learning_rate": 3.7898054047078054e-05, |
| "loss": 0.8353, |
| "step": 286 |
| }, |
| { |
| "epoch": 1.165482233502538, |
| "grad_norm": 0.5339900916047498, |
| "learning_rate": 3.787265280646825e-05, |
| "loss": 0.8552, |
| "step": 287 |
| }, |
| { |
| "epoch": 1.169543147208122, |
| "grad_norm": 0.6444599393545426, |
| "learning_rate": 3.7847107621969786e-05, |
| "loss": 0.8438, |
| "step": 288 |
| }, |
| { |
| "epoch": 1.1736040609137055, |
| "grad_norm": 0.5433275865967176, |
| "learning_rate": 3.7821418699320064e-05, |
| "loss": 0.8159, |
| "step": 289 |
| }, |
| { |
| "epoch": 1.1776649746192893, |
| "grad_norm": 0.653360301844724, |
| "learning_rate": 3.7795586245414145e-05, |
| "loss": 0.8549, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.1817258883248731, |
| "grad_norm": 0.6912622334819194, |
| "learning_rate": 3.776961046830306e-05, |
| "loss": 0.8542, |
| "step": 291 |
| }, |
| { |
| "epoch": 1.185786802030457, |
| "grad_norm": 0.6630563047663287, |
| "learning_rate": 3.774349157719215e-05, |
| "loss": 0.834, |
| "step": 292 |
| }, |
| { |
| "epoch": 1.1898477157360405, |
| "grad_norm": 0.8737868046028318, |
| "learning_rate": 3.7717229782439365e-05, |
| "loss": 0.8324, |
| "step": 293 |
| }, |
| { |
| "epoch": 1.1939086294416243, |
| "grad_norm": 0.7347359129644747, |
| "learning_rate": 3.769082529555359e-05, |
| "loss": 0.859, |
| "step": 294 |
| }, |
| { |
| "epoch": 1.1979695431472082, |
| "grad_norm": 0.6248164010893552, |
| "learning_rate": 3.766427832919294e-05, |
| "loss": 0.7832, |
| "step": 295 |
| }, |
| { |
| "epoch": 1.202030456852792, |
| "grad_norm": 0.6606792337066806, |
| "learning_rate": 3.7637589097163024e-05, |
| "loss": 0.8124, |
| "step": 296 |
| }, |
| { |
| "epoch": 1.2060913705583756, |
| "grad_norm": 0.7038523403331013, |
| "learning_rate": 3.761075781441526e-05, |
| "loss": 0.8307, |
| "step": 297 |
| }, |
| { |
| "epoch": 1.2101522842639594, |
| "grad_norm": 0.6283740105271951, |
| "learning_rate": 3.75837846970451e-05, |
| "loss": 0.8561, |
| "step": 298 |
| }, |
| { |
| "epoch": 1.2142131979695432, |
| "grad_norm": 0.6254476058893971, |
| "learning_rate": 3.755666996229032e-05, |
| "loss": 0.8336, |
| "step": 299 |
| }, |
| { |
| "epoch": 1.218274111675127, |
| "grad_norm": 0.6323427666126807, |
| "learning_rate": 3.752941382852927e-05, |
| "loss": 0.8484, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.2223350253807106, |
| "grad_norm": 0.7335318018317287, |
| "learning_rate": 3.7502016515279115e-05, |
| "loss": 0.8155, |
| "step": 301 |
| }, |
| { |
| "epoch": 1.2263959390862944, |
| "grad_norm": 0.4934412190629873, |
| "learning_rate": 3.7474478243194043e-05, |
| "loss": 0.862, |
| "step": 302 |
| }, |
| { |
| "epoch": 1.2304568527918782, |
| "grad_norm": 0.5155083972739437, |
| "learning_rate": 3.744679923406351e-05, |
| "loss": 0.8339, |
| "step": 303 |
| }, |
| { |
| "epoch": 1.2345177664974618, |
| "grad_norm": 0.5858260171823958, |
| "learning_rate": 3.741897971081043e-05, |
| "loss": 0.8541, |
| "step": 304 |
| }, |
| { |
| "epoch": 1.2385786802030456, |
| "grad_norm": 0.40878814245310663, |
| "learning_rate": 3.739101989748946e-05, |
| "loss": 0.8254, |
| "step": 305 |
| }, |
| { |
| "epoch": 1.2426395939086294, |
| "grad_norm": 0.4948015703887049, |
| "learning_rate": 3.7362920019285066e-05, |
| "loss": 0.8469, |
| "step": 306 |
| }, |
| { |
| "epoch": 1.2467005076142132, |
| "grad_norm": 0.48431379760652044, |
| "learning_rate": 3.73346803025098e-05, |
| "loss": 0.8293, |
| "step": 307 |
| }, |
| { |
| "epoch": 1.250761421319797, |
| "grad_norm": 0.7345247518550934, |
| "learning_rate": 3.730630097460247e-05, |
| "loss": 0.834, |
| "step": 308 |
| }, |
| { |
| "epoch": 1.2548223350253807, |
| "grad_norm": 0.8095303549251766, |
| "learning_rate": 3.727778226412628e-05, |
| "loss": 0.8589, |
| "step": 309 |
| }, |
| { |
| "epoch": 1.2588832487309645, |
| "grad_norm": 0.6266535188783025, |
| "learning_rate": 3.7249124400767006e-05, |
| "loss": 0.8294, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.2629441624365483, |
| "grad_norm": 0.5606213487931734, |
| "learning_rate": 3.722032761533114e-05, |
| "loss": 0.8506, |
| "step": 311 |
| }, |
| { |
| "epoch": 1.2670050761421319, |
| "grad_norm": 0.5235648877677851, |
| "learning_rate": 3.719139213974403e-05, |
| "loss": 0.8421, |
| "step": 312 |
| }, |
| { |
| "epoch": 1.2710659898477157, |
| "grad_norm": 0.5338534116249731, |
| "learning_rate": 3.7162318207048006e-05, |
| "loss": 0.8385, |
| "step": 313 |
| }, |
| { |
| "epoch": 1.2751269035532995, |
| "grad_norm": 0.5155584520437608, |
| "learning_rate": 3.713310605140055e-05, |
| "loss": 0.8352, |
| "step": 314 |
| }, |
| { |
| "epoch": 1.2791878172588833, |
| "grad_norm": 0.560262301579279, |
| "learning_rate": 3.710375590807233e-05, |
| "loss": 0.8286, |
| "step": 315 |
| }, |
| { |
| "epoch": 1.2832487309644671, |
| "grad_norm": 0.6767012487164431, |
| "learning_rate": 3.7074268013445365e-05, |
| "loss": 0.8606, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.2873096446700507, |
| "grad_norm": 0.6323284092160929, |
| "learning_rate": 3.7044642605011114e-05, |
| "loss": 0.8223, |
| "step": 317 |
| }, |
| { |
| "epoch": 1.2913705583756345, |
| "grad_norm": 0.4760479512094556, |
| "learning_rate": 3.701487992136854e-05, |
| "loss": 0.802, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.2954314720812183, |
| "grad_norm": 0.4406940891484498, |
| "learning_rate": 3.69849802022222e-05, |
| "loss": 0.8303, |
| "step": 319 |
| }, |
| { |
| "epoch": 1.299492385786802, |
| "grad_norm": 0.4176284457984641, |
| "learning_rate": 3.6954943688380334e-05, |
| "loss": 0.8253, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.3035532994923857, |
| "grad_norm": 0.6144711037846401, |
| "learning_rate": 3.692477062175289e-05, |
| "loss": 0.8234, |
| "step": 321 |
| }, |
| { |
| "epoch": 1.3076142131979696, |
| "grad_norm": 0.6489588668539846, |
| "learning_rate": 3.689446124534958e-05, |
| "loss": 0.8109, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.3116751269035534, |
| "grad_norm": 0.4867211040381677, |
| "learning_rate": 3.686401580327799e-05, |
| "loss": 0.8459, |
| "step": 323 |
| }, |
| { |
| "epoch": 1.3157360406091372, |
| "grad_norm": 0.5157527312911335, |
| "learning_rate": 3.683343454074149e-05, |
| "loss": 0.8283, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.3197969543147208, |
| "grad_norm": 0.43348577693844054, |
| "learning_rate": 3.6802717704037386e-05, |
| "loss": 0.8102, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.3238578680203046, |
| "grad_norm": 0.42506958213886786, |
| "learning_rate": 3.6771865540554855e-05, |
| "loss": 0.8282, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.3279187817258884, |
| "grad_norm": 0.5618717340049045, |
| "learning_rate": 3.674087829877297e-05, |
| "loss": 0.8293, |
| "step": 327 |
| }, |
| { |
| "epoch": 1.331979695431472, |
| "grad_norm": 0.5825627504697245, |
| "learning_rate": 3.6709756228258735e-05, |
| "loss": 0.843, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.3360406091370558, |
| "grad_norm": 0.6532594098269143, |
| "learning_rate": 3.667849957966501e-05, |
| "loss": 0.8447, |
| "step": 329 |
| }, |
| { |
| "epoch": 1.3401015228426396, |
| "grad_norm": 0.6288100030104357, |
| "learning_rate": 3.6647108604728546e-05, |
| "loss": 0.8266, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.3441624365482234, |
| "grad_norm": 0.5270083032515198, |
| "learning_rate": 3.661558355626795e-05, |
| "loss": 0.8523, |
| "step": 331 |
| }, |
| { |
| "epoch": 1.3482233502538072, |
| "grad_norm": 0.5100819666791492, |
| "learning_rate": 3.658392468818163e-05, |
| "loss": 0.8494, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.3522842639593908, |
| "grad_norm": 0.5717607811074165, |
| "learning_rate": 3.655213225544574e-05, |
| "loss": 0.8437, |
| "step": 333 |
| }, |
| { |
| "epoch": 1.3563451776649746, |
| "grad_norm": 0.5472972569864112, |
| "learning_rate": 3.652020651411218e-05, |
| "loss": 0.87, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.3604060913705585, |
| "grad_norm": 0.5932671337266103, |
| "learning_rate": 3.6488147721306474e-05, |
| "loss": 0.8496, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.364467005076142, |
| "grad_norm": 0.6243921876876665, |
| "learning_rate": 3.645595613522574e-05, |
| "loss": 0.8363, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.3685279187817259, |
| "grad_norm": 0.6036716093983425, |
| "learning_rate": 3.642363201513657e-05, |
| "loss": 0.8276, |
| "step": 337 |
| }, |
| { |
| "epoch": 1.3725888324873097, |
| "grad_norm": 0.48087432063851737, |
| "learning_rate": 3.6391175621373006e-05, |
| "loss": 0.8484, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.3766497461928933, |
| "grad_norm": 0.4494954915301649, |
| "learning_rate": 3.6358587215334355e-05, |
| "loss": 0.8069, |
| "step": 339 |
| }, |
| { |
| "epoch": 1.380710659898477, |
| "grad_norm": 0.4499646351199436, |
| "learning_rate": 3.632586705948318e-05, |
| "loss": 0.8316, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.384771573604061, |
| "grad_norm": 0.4720861417103376, |
| "learning_rate": 3.629301541734311e-05, |
| "loss": 0.824, |
| "step": 341 |
| }, |
| { |
| "epoch": 1.3888324873096447, |
| "grad_norm": 0.5384355848996392, |
| "learning_rate": 3.626003255349676e-05, |
| "loss": 0.8609, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.3928934010152285, |
| "grad_norm": 0.5743578246745845, |
| "learning_rate": 3.622691873358357e-05, |
| "loss": 0.8279, |
| "step": 343 |
| }, |
| { |
| "epoch": 1.396954314720812, |
| "grad_norm": 0.40898866517926397, |
| "learning_rate": 3.61936742242977e-05, |
| "loss": 0.8324, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.401015228426396, |
| "grad_norm": 0.5232633950374341, |
| "learning_rate": 3.6160299293385864e-05, |
| "loss": 0.8272, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.4050761421319797, |
| "grad_norm": 0.4387075801608212, |
| "learning_rate": 3.612679420964516e-05, |
| "loss": 0.8586, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.4091370558375633, |
| "grad_norm": 0.4111109623813353, |
| "learning_rate": 3.609315924292092e-05, |
| "loss": 0.8773, |
| "step": 347 |
| }, |
| { |
| "epoch": 1.4131979695431471, |
| "grad_norm": 0.419413559657943, |
| "learning_rate": 3.6059394664104554e-05, |
| "loss": 0.8407, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.417258883248731, |
| "grad_norm": 0.5860828029960355, |
| "learning_rate": 3.602550074513133e-05, |
| "loss": 0.8367, |
| "step": 349 |
| }, |
| { |
| "epoch": 1.4213197969543148, |
| "grad_norm": 0.6514078271211347, |
| "learning_rate": 3.599147775897822e-05, |
| "loss": 0.796, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.4253807106598986, |
| "grad_norm": 0.721387803404927, |
| "learning_rate": 3.595732597966167e-05, |
| "loss": 0.8467, |
| "step": 351 |
| }, |
| { |
| "epoch": 1.4294416243654822, |
| "grad_norm": 0.835493892770028, |
| "learning_rate": 3.592304568223542e-05, |
| "loss": 0.8333, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.433502538071066, |
| "grad_norm": 0.8003063387064198, |
| "learning_rate": 3.588863714278826e-05, |
| "loss": 0.8331, |
| "step": 353 |
| }, |
| { |
| "epoch": 1.4375634517766498, |
| "grad_norm": 0.7636733471270608, |
| "learning_rate": 3.585410063844186e-05, |
| "loss": 0.7958, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.4416243654822334, |
| "grad_norm": 0.5519704825353865, |
| "learning_rate": 3.581943644734846e-05, |
| "loss": 0.8434, |
| "step": 355 |
| }, |
| { |
| "epoch": 1.4456852791878172, |
| "grad_norm": 0.4065090146578767, |
| "learning_rate": 3.578464484868869e-05, |
| "loss": 0.8227, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.449746192893401, |
| "grad_norm": 0.7514051814118571, |
| "learning_rate": 3.5749726122669316e-05, |
| "loss": 0.8232, |
| "step": 357 |
| }, |
| { |
| "epoch": 1.4538071065989848, |
| "grad_norm": 0.8293720347046424, |
| "learning_rate": 3.5714680550520943e-05, |
| "loss": 0.8515, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.4578680203045686, |
| "grad_norm": 0.49087240025620166, |
| "learning_rate": 3.5679508414495794e-05, |
| "loss": 0.8314, |
| "step": 359 |
| }, |
| { |
| "epoch": 1.4619289340101522, |
| "grad_norm": 0.489734961840385, |
| "learning_rate": 3.564420999786543e-05, |
| "loss": 0.8503, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.465989847715736, |
| "grad_norm": 0.9007290974785335, |
| "learning_rate": 3.560878558491842e-05, |
| "loss": 0.8377, |
| "step": 361 |
| }, |
| { |
| "epoch": 1.4700507614213199, |
| "grad_norm": 1.0330195191711173, |
| "learning_rate": 3.5573235460958145e-05, |
| "loss": 0.8424, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.4741116751269034, |
| "grad_norm": 0.7420925711864492, |
| "learning_rate": 3.553755991230039e-05, |
| "loss": 0.8244, |
| "step": 363 |
| }, |
| { |
| "epoch": 1.4781725888324873, |
| "grad_norm": 0.5819889550053469, |
| "learning_rate": 3.5501759226271144e-05, |
| "loss": 0.8274, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.482233502538071, |
| "grad_norm": 0.5377088402951758, |
| "learning_rate": 3.546583369120419e-05, |
| "loss": 0.8354, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.4862944162436549, |
| "grad_norm": 0.6410653258797205, |
| "learning_rate": 3.5429783596438864e-05, |
| "loss": 0.8679, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.4903553299492387, |
| "grad_norm": 0.6341081249286764, |
| "learning_rate": 3.539360923231766e-05, |
| "loss": 0.8258, |
| "step": 367 |
| }, |
| { |
| "epoch": 1.4944162436548223, |
| "grad_norm": 0.5893440514843585, |
| "learning_rate": 3.535731089018394e-05, |
| "loss": 0.844, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.498477157360406, |
| "grad_norm": 0.49173933113343044, |
| "learning_rate": 3.532088886237956e-05, |
| "loss": 0.8373, |
| "step": 369 |
| }, |
| { |
| "epoch": 1.50253807106599, |
| "grad_norm": 0.43745055806759486, |
| "learning_rate": 3.528434344224253e-05, |
| "loss": 0.8273, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.5065989847715735, |
| "grad_norm": 0.4212287691935103, |
| "learning_rate": 3.524767492410464e-05, |
| "loss": 0.8364, |
| "step": 371 |
| }, |
| { |
| "epoch": 1.5106598984771573, |
| "grad_norm": 0.4390928272699896, |
| "learning_rate": 3.521088360328908e-05, |
| "loss": 0.8342, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.5147208121827411, |
| "grad_norm": 0.411013391753576, |
| "learning_rate": 3.517396977610811e-05, |
| "loss": 0.8261, |
| "step": 373 |
| }, |
| { |
| "epoch": 1.5187817258883247, |
| "grad_norm": 0.49000931323135516, |
| "learning_rate": 3.5136933739860595e-05, |
| "loss": 0.8442, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.5228426395939088, |
| "grad_norm": 0.47799622425356125, |
| "learning_rate": 3.509977579282971e-05, |
| "loss": 0.8352, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.5269035532994923, |
| "grad_norm": 0.4732295258064042, |
| "learning_rate": 3.5062496234280424e-05, |
| "loss": 0.8607, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.5309644670050762, |
| "grad_norm": 0.48009589490827587, |
| "learning_rate": 3.502509536445719e-05, |
| "loss": 0.8386, |
| "step": 377 |
| }, |
| { |
| "epoch": 1.53502538071066, |
| "grad_norm": 0.44051482756806337, |
| "learning_rate": 3.498757348458147e-05, |
| "loss": 0.8307, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.5390862944162436, |
| "grad_norm": 0.5846582764740224, |
| "learning_rate": 3.4949930896849324e-05, |
| "loss": 0.8346, |
| "step": 379 |
| }, |
| { |
| "epoch": 1.5431472081218274, |
| "grad_norm": 0.4983006044925057, |
| "learning_rate": 3.491216790442899e-05, |
| "loss": 0.8367, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.5472081218274112, |
| "grad_norm": 0.5747074346507772, |
| "learning_rate": 3.487428481145839e-05, |
| "loss": 0.8557, |
| "step": 381 |
| }, |
| { |
| "epoch": 1.5512690355329948, |
| "grad_norm": 0.47143284734196855, |
| "learning_rate": 3.483628192304278e-05, |
| "loss": 0.8253, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.5553299492385788, |
| "grad_norm": 0.4736548286621583, |
| "learning_rate": 3.479815954525219e-05, |
| "loss": 0.8382, |
| "step": 383 |
| }, |
| { |
| "epoch": 1.5593908629441624, |
| "grad_norm": 0.5561693427903793, |
| "learning_rate": 3.475991798511899e-05, |
| "loss": 0.8064, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.5634517766497462, |
| "grad_norm": 0.5391802839736144, |
| "learning_rate": 3.4721557550635464e-05, |
| "loss": 0.8641, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.56751269035533, |
| "grad_norm": 0.7309692409651939, |
| "learning_rate": 3.468307855075128e-05, |
| "loss": 0.8143, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.5715736040609136, |
| "grad_norm": 0.6600441036636492, |
| "learning_rate": 3.4644481295371005e-05, |
| "loss": 0.8337, |
| "step": 387 |
| }, |
| { |
| "epoch": 1.5756345177664974, |
| "grad_norm": 0.5717141236160842, |
| "learning_rate": 3.460576609535163e-05, |
| "loss": 0.8494, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.5796954314720812, |
| "grad_norm": 0.6238447468803623, |
| "learning_rate": 3.456693326250006e-05, |
| "loss": 0.8285, |
| "step": 389 |
| }, |
| { |
| "epoch": 1.5837563451776648, |
| "grad_norm": 0.591732320416858, |
| "learning_rate": 3.452798310957058e-05, |
| "loss": 0.8515, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.5878172588832489, |
| "grad_norm": 0.5936459476711158, |
| "learning_rate": 3.4488915950262386e-05, |
| "loss": 0.8202, |
| "step": 391 |
| }, |
| { |
| "epoch": 1.5918781725888325, |
| "grad_norm": 0.37671136708665454, |
| "learning_rate": 3.4449732099216985e-05, |
| "loss": 0.8284, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.5959390862944163, |
| "grad_norm": 0.43963529330306667, |
| "learning_rate": 3.441043187201574e-05, |
| "loss": 0.8435, |
| "step": 393 |
| }, |
| { |
| "epoch": 1.6, |
| "grad_norm": 0.3993677880177503, |
| "learning_rate": 3.437101558517728e-05, |
| "loss": 0.8414, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.6040609137055837, |
| "grad_norm": 0.4185773945006266, |
| "learning_rate": 3.433148355615496e-05, |
| "loss": 0.854, |
| "step": 395 |
| }, |
| { |
| "epoch": 1.6081218274111675, |
| "grad_norm": 0.3563179666586982, |
| "learning_rate": 3.4291836103334294e-05, |
| "loss": 0.8554, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.6121827411167513, |
| "grad_norm": 0.42707544455112123, |
| "learning_rate": 3.425207354603043e-05, |
| "loss": 0.835, |
| "step": 397 |
| }, |
| { |
| "epoch": 1.616243654822335, |
| "grad_norm": 0.3505764756546602, |
| "learning_rate": 3.421219620448553e-05, |
| "loss": 0.8205, |
| "step": 398 |
| }, |
| { |
| "epoch": 1.620304568527919, |
| "grad_norm": 0.41681274804425994, |
| "learning_rate": 3.417220439986623e-05, |
| "loss": 0.8329, |
| "step": 399 |
| }, |
| { |
| "epoch": 1.6243654822335025, |
| "grad_norm": 0.3987237346071938, |
| "learning_rate": 3.4132098454261024e-05, |
| "loss": 0.8576, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.6284263959390863, |
| "grad_norm": 0.4342758632109407, |
| "learning_rate": 3.4091878690677676e-05, |
| "loss": 0.8236, |
| "step": 401 |
| }, |
| { |
| "epoch": 1.6324873096446701, |
| "grad_norm": 0.40246207331357586, |
| "learning_rate": 3.405154543304065e-05, |
| "loss": 0.8402, |
| "step": 402 |
| }, |
| { |
| "epoch": 1.6365482233502537, |
| "grad_norm": 0.36968385873105397, |
| "learning_rate": 3.401109900618843e-05, |
| "loss": 0.8133, |
| "step": 403 |
| }, |
| { |
| "epoch": 1.6406091370558376, |
| "grad_norm": 0.470626091829263, |
| "learning_rate": 3.3970539735870996e-05, |
| "loss": 0.822, |
| "step": 404 |
| }, |
| { |
| "epoch": 1.6446700507614214, |
| "grad_norm": 0.42274267758007883, |
| "learning_rate": 3.392986794874714e-05, |
| "loss": 0.8433, |
| "step": 405 |
| }, |
| { |
| "epoch": 1.648730964467005, |
| "grad_norm": 0.42581483050039326, |
| "learning_rate": 3.388908397238184e-05, |
| "loss": 0.843, |
| "step": 406 |
| }, |
| { |
| "epoch": 1.652791878172589, |
| "grad_norm": 0.3811781652219886, |
| "learning_rate": 3.384818813524362e-05, |
| "loss": 0.8234, |
| "step": 407 |
| }, |
| { |
| "epoch": 1.6568527918781726, |
| "grad_norm": 0.4378560649814688, |
| "learning_rate": 3.380718076670195e-05, |
| "loss": 0.8711, |
| "step": 408 |
| }, |
| { |
| "epoch": 1.6609137055837564, |
| "grad_norm": 0.4380933961236448, |
| "learning_rate": 3.376606219702454e-05, |
| "loss": 0.8444, |
| "step": 409 |
| }, |
| { |
| "epoch": 1.6649746192893402, |
| "grad_norm": 0.4784341986704259, |
| "learning_rate": 3.372483275737468e-05, |
| "loss": 0.818, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.6690355329949238, |
| "grad_norm": 0.40186769373815034, |
| "learning_rate": 3.368349277980861e-05, |
| "loss": 0.8346, |
| "step": 411 |
| }, |
| { |
| "epoch": 1.6730964467005076, |
| "grad_norm": 0.47286683395878815, |
| "learning_rate": 3.3642042597272844e-05, |
| "loss": 0.8399, |
| "step": 412 |
| }, |
| { |
| "epoch": 1.6771573604060914, |
| "grad_norm": 0.4215315216900389, |
| "learning_rate": 3.360048254360144e-05, |
| "loss": 0.829, |
| "step": 413 |
| }, |
| { |
| "epoch": 1.681218274111675, |
| "grad_norm": 0.4158266141298097, |
| "learning_rate": 3.355881295351336e-05, |
| "loss": 0.8126, |
| "step": 414 |
| }, |
| { |
| "epoch": 1.6852791878172588, |
| "grad_norm": 0.4529742870865966, |
| "learning_rate": 3.351703416260975e-05, |
| "loss": 0.8411, |
| "step": 415 |
| }, |
| { |
| "epoch": 1.6893401015228426, |
| "grad_norm": 0.49074538259207634, |
| "learning_rate": 3.347514650737126e-05, |
| "loss": 0.8421, |
| "step": 416 |
| }, |
| { |
| "epoch": 1.6934010152284262, |
| "grad_norm": 0.4579236760357225, |
| "learning_rate": 3.3433150325155295e-05, |
| "loss": 0.8288, |
| "step": 417 |
| }, |
| { |
| "epoch": 1.6974619289340103, |
| "grad_norm": 0.4311924007579793, |
| "learning_rate": 3.339104595419334e-05, |
| "loss": 0.8647, |
| "step": 418 |
| }, |
| { |
| "epoch": 1.7015228426395939, |
| "grad_norm": 0.41180356661698275, |
| "learning_rate": 3.3348833733588204e-05, |
| "loss": 0.8128, |
| "step": 419 |
| }, |
| { |
| "epoch": 1.7055837563451777, |
| "grad_norm": 0.45089835442765447, |
| "learning_rate": 3.3306514003311305e-05, |
| "loss": 0.856, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.7096446700507615, |
| "grad_norm": 0.4666342651590086, |
| "learning_rate": 3.326408710419996e-05, |
| "loss": 0.824, |
| "step": 421 |
| }, |
| { |
| "epoch": 1.713705583756345, |
| "grad_norm": 0.5582725502158407, |
| "learning_rate": 3.322155337795454e-05, |
| "loss": 0.8427, |
| "step": 422 |
| }, |
| { |
| "epoch": 1.7177664974619289, |
| "grad_norm": 0.5236657610137029, |
| "learning_rate": 3.317891316713587e-05, |
| "loss": 0.8519, |
| "step": 423 |
| }, |
| { |
| "epoch": 1.7218274111675127, |
| "grad_norm": 0.40545397300113567, |
| "learning_rate": 3.313616681516231e-05, |
| "loss": 0.8466, |
| "step": 424 |
| }, |
| { |
| "epoch": 1.7258883248730963, |
| "grad_norm": 0.5927784064084088, |
| "learning_rate": 3.309331466630713e-05, |
| "loss": 0.8392, |
| "step": 425 |
| }, |
| { |
| "epoch": 1.7299492385786803, |
| "grad_norm": 0.5733524087638878, |
| "learning_rate": 3.305035706569563e-05, |
| "loss": 0.8473, |
| "step": 426 |
| }, |
| { |
| "epoch": 1.734010152284264, |
| "grad_norm": 0.47082937583539364, |
| "learning_rate": 3.3007294359302433e-05, |
| "loss": 0.8025, |
| "step": 427 |
| }, |
| { |
| "epoch": 1.7380710659898477, |
| "grad_norm": 0.5535127223714374, |
| "learning_rate": 3.296412689394864e-05, |
| "loss": 0.842, |
| "step": 428 |
| }, |
| { |
| "epoch": 1.7421319796954315, |
| "grad_norm": 0.5873732480320274, |
| "learning_rate": 3.292085501729909e-05, |
| "loss": 0.8306, |
| "step": 429 |
| }, |
| { |
| "epoch": 1.7461928934010151, |
| "grad_norm": 0.5252482534668429, |
| "learning_rate": 3.2877479077859534e-05, |
| "loss": 0.8562, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.750253807106599, |
| "grad_norm": 0.528621381905603, |
| "learning_rate": 3.283399942497381e-05, |
| "loss": 0.8182, |
| "step": 431 |
| }, |
| { |
| "epoch": 1.7543147208121828, |
| "grad_norm": 0.4430118134283268, |
| "learning_rate": 3.279041640882108e-05, |
| "loss": 0.8382, |
| "step": 432 |
| }, |
| { |
| "epoch": 1.7583756345177664, |
| "grad_norm": 0.3794230007735607, |
| "learning_rate": 3.2746730380412964e-05, |
| "loss": 0.837, |
| "step": 433 |
| }, |
| { |
| "epoch": 1.7624365482233504, |
| "grad_norm": 0.40453993920595976, |
| "learning_rate": 3.2702941691590726e-05, |
| "loss": 0.823, |
| "step": 434 |
| }, |
| { |
| "epoch": 1.766497461928934, |
| "grad_norm": 0.4338084789288484, |
| "learning_rate": 3.265905069502244e-05, |
| "loss": 0.8119, |
| "step": 435 |
| }, |
| { |
| "epoch": 1.7705583756345178, |
| "grad_norm": 0.4351239967530858, |
| "learning_rate": 3.261505774420016e-05, |
| "loss": 0.8422, |
| "step": 436 |
| }, |
| { |
| "epoch": 1.7746192893401016, |
| "grad_norm": 0.36111813045701585, |
| "learning_rate": 3.257096319343707e-05, |
| "loss": 0.8305, |
| "step": 437 |
| }, |
| { |
| "epoch": 1.7786802030456852, |
| "grad_norm": 0.462148299460509, |
| "learning_rate": 3.2526767397864614e-05, |
| "loss": 0.8363, |
| "step": 438 |
| }, |
| { |
| "epoch": 1.782741116751269, |
| "grad_norm": 0.69550528185203, |
| "learning_rate": 3.248247071342966e-05, |
| "loss": 0.8135, |
| "step": 439 |
| }, |
| { |
| "epoch": 1.7868020304568528, |
| "grad_norm": 0.5376681744893077, |
| "learning_rate": 3.243807349689161e-05, |
| "loss": 0.8319, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.7908629441624364, |
| "grad_norm": 0.5451580257880063, |
| "learning_rate": 3.2393576105819544e-05, |
| "loss": 0.8399, |
| "step": 441 |
| }, |
| { |
| "epoch": 1.7949238578680204, |
| "grad_norm": 0.4163395496544942, |
| "learning_rate": 3.2348978898589333e-05, |
| "loss": 0.8389, |
| "step": 442 |
| }, |
| { |
| "epoch": 1.798984771573604, |
| "grad_norm": 0.5208179848639264, |
| "learning_rate": 3.230428223438075e-05, |
| "loss": 0.8267, |
| "step": 443 |
| }, |
| { |
| "epoch": 1.8030456852791878, |
| "grad_norm": 0.41962299024851224, |
| "learning_rate": 3.225948647317459e-05, |
| "loss": 0.8178, |
| "step": 444 |
| }, |
| { |
| "epoch": 1.8071065989847717, |
| "grad_norm": 0.5468355109349753, |
| "learning_rate": 3.2214591975749745e-05, |
| "loss": 0.8445, |
| "step": 445 |
| }, |
| { |
| "epoch": 1.8111675126903553, |
| "grad_norm": 0.39121359491390423, |
| "learning_rate": 3.216959910368034e-05, |
| "loss": 0.8527, |
| "step": 446 |
| }, |
| { |
| "epoch": 1.815228426395939, |
| "grad_norm": 0.5495738573910421, |
| "learning_rate": 3.212450821933277e-05, |
| "loss": 0.8016, |
| "step": 447 |
| }, |
| { |
| "epoch": 1.8192893401015229, |
| "grad_norm": 0.5259377037578279, |
| "learning_rate": 3.207931968586281e-05, |
| "loss": 0.8482, |
| "step": 448 |
| }, |
| { |
| "epoch": 1.8233502538071065, |
| "grad_norm": 0.42176938334536146, |
| "learning_rate": 3.203403386721272e-05, |
| "loss": 0.8455, |
| "step": 449 |
| }, |
| { |
| "epoch": 1.8274111675126905, |
| "grad_norm": 0.6240398919356389, |
| "learning_rate": 3.1988651128108245e-05, |
| "loss": 0.8517, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.831472081218274, |
| "grad_norm": 0.4540136297979541, |
| "learning_rate": 3.194317183405573e-05, |
| "loss": 0.8154, |
| "step": 451 |
| }, |
| { |
| "epoch": 1.835532994923858, |
| "grad_norm": 0.5657467074646221, |
| "learning_rate": 3.189759635133914e-05, |
| "loss": 0.8365, |
| "step": 452 |
| }, |
| { |
| "epoch": 1.8395939086294417, |
| "grad_norm": 0.5031354632398635, |
| "learning_rate": 3.185192504701718e-05, |
| "loss": 0.7963, |
| "step": 453 |
| }, |
| { |
| "epoch": 1.8436548223350253, |
| "grad_norm": 0.45275222086451605, |
| "learning_rate": 3.1806158288920234e-05, |
| "loss": 0.8039, |
| "step": 454 |
| }, |
| { |
| "epoch": 1.8477157360406091, |
| "grad_norm": 0.44591852875018373, |
| "learning_rate": 3.1760296445647477e-05, |
| "loss": 0.8236, |
| "step": 455 |
| }, |
| { |
| "epoch": 1.851776649746193, |
| "grad_norm": 0.3622969054011584, |
| "learning_rate": 3.1714339886563896e-05, |
| "loss": 0.8287, |
| "step": 456 |
| }, |
| { |
| "epoch": 1.8558375634517765, |
| "grad_norm": 0.5602662584307379, |
| "learning_rate": 3.166828898179731e-05, |
| "loss": 0.8136, |
| "step": 457 |
| }, |
| { |
| "epoch": 1.8598984771573606, |
| "grad_norm": 0.6134443242294224, |
| "learning_rate": 3.162214410223536e-05, |
| "loss": 0.835, |
| "step": 458 |
| }, |
| { |
| "epoch": 1.8639593908629442, |
| "grad_norm": 0.5200614850287897, |
| "learning_rate": 3.157590561952257e-05, |
| "loss": 0.8339, |
| "step": 459 |
| }, |
| { |
| "epoch": 1.868020304568528, |
| "grad_norm": 0.4335544913469135, |
| "learning_rate": 3.152957390605732e-05, |
| "loss": 0.8398, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.8720812182741118, |
| "grad_norm": 0.372840474288923, |
| "learning_rate": 3.148314933498886e-05, |
| "loss": 0.8472, |
| "step": 461 |
| }, |
| { |
| "epoch": 1.8761421319796954, |
| "grad_norm": 0.4706597738146551, |
| "learning_rate": 3.143663228021431e-05, |
| "loss": 0.8015, |
| "step": 462 |
| }, |
| { |
| "epoch": 1.8802030456852792, |
| "grad_norm": 0.5741709424555732, |
| "learning_rate": 3.1390023116375624e-05, |
| "loss": 0.8454, |
| "step": 463 |
| }, |
| { |
| "epoch": 1.884263959390863, |
| "grad_norm": 0.39803678460941144, |
| "learning_rate": 3.134332221885661e-05, |
| "loss": 0.8348, |
| "step": 464 |
| }, |
| { |
| "epoch": 1.8883248730964466, |
| "grad_norm": 0.41271462321957536, |
| "learning_rate": 3.129652996377987e-05, |
| "loss": 0.8504, |
| "step": 465 |
| }, |
| { |
| "epoch": 1.8923857868020306, |
| "grad_norm": 0.5564905292094973, |
| "learning_rate": 3.12496467280038e-05, |
| "loss": 0.849, |
| "step": 466 |
| }, |
| { |
| "epoch": 1.8964467005076142, |
| "grad_norm": 0.4961563840640516, |
| "learning_rate": 3.120267288911952e-05, |
| "loss": 0.8492, |
| "step": 467 |
| }, |
| { |
| "epoch": 1.9005076142131978, |
| "grad_norm": 0.5531674725037493, |
| "learning_rate": 3.11556088254479e-05, |
| "loss": 0.827, |
| "step": 468 |
| }, |
| { |
| "epoch": 1.9045685279187818, |
| "grad_norm": 0.43399507010793253, |
| "learning_rate": 3.11084549160364e-05, |
| "loss": 0.8574, |
| "step": 469 |
| }, |
| { |
| "epoch": 1.9086294416243654, |
| "grad_norm": 0.4029498159888287, |
| "learning_rate": 3.106121154065615e-05, |
| "loss": 0.8298, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.9126903553299492, |
| "grad_norm": 0.488464255946036, |
| "learning_rate": 3.1013879079798805e-05, |
| "loss": 0.8627, |
| "step": 471 |
| }, |
| { |
| "epoch": 1.916751269035533, |
| "grad_norm": 0.40353283850652966, |
| "learning_rate": 3.096645791467348e-05, |
| "loss": 0.8124, |
| "step": 472 |
| }, |
| { |
| "epoch": 1.9208121827411166, |
| "grad_norm": 0.33219837444905637, |
| "learning_rate": 3.091894842720373e-05, |
| "loss": 0.8261, |
| "step": 473 |
| }, |
| { |
| "epoch": 1.9248730964467005, |
| "grad_norm": 0.4683100154613775, |
| "learning_rate": 3.0871351000024425e-05, |
| "loss": 0.8602, |
| "step": 474 |
| }, |
| { |
| "epoch": 1.9289340101522843, |
| "grad_norm": 0.4454596129143337, |
| "learning_rate": 3.0823666016478716e-05, |
| "loss": 0.8393, |
| "step": 475 |
| }, |
| { |
| "epoch": 1.9329949238578679, |
| "grad_norm": 0.44938194473030474, |
| "learning_rate": 3.0775893860614896e-05, |
| "loss": 0.8192, |
| "step": 476 |
| }, |
| { |
| "epoch": 1.937055837563452, |
| "grad_norm": 0.3816472694149887, |
| "learning_rate": 3.0728034917183336e-05, |
| "loss": 0.8487, |
| "step": 477 |
| }, |
| { |
| "epoch": 1.9411167512690355, |
| "grad_norm": 0.3778446539669071, |
| "learning_rate": 3.06800895716334e-05, |
| "loss": 0.8223, |
| "step": 478 |
| }, |
| { |
| "epoch": 1.9451776649746193, |
| "grad_norm": 0.4571312100432892, |
| "learning_rate": 3.063205821011029e-05, |
| "loss": 0.8272, |
| "step": 479 |
| }, |
| { |
| "epoch": 1.9492385786802031, |
| "grad_norm": 0.38238231314243787, |
| "learning_rate": 3.0583941219452016e-05, |
| "loss": 0.818, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.9532994923857867, |
| "grad_norm": 0.4292022249425022, |
| "learning_rate": 3.053573898718618e-05, |
| "loss": 0.8332, |
| "step": 481 |
| }, |
| { |
| "epoch": 1.9573604060913705, |
| "grad_norm": 0.4222897716962284, |
| "learning_rate": 3.0487451901526956e-05, |
| "loss": 0.8419, |
| "step": 482 |
| }, |
| { |
| "epoch": 1.9614213197969543, |
| "grad_norm": 0.5095951074691307, |
| "learning_rate": 3.0439080351371875e-05, |
| "loss": 0.837, |
| "step": 483 |
| }, |
| { |
| "epoch": 1.965482233502538, |
| "grad_norm": 0.3934653880091596, |
| "learning_rate": 3.0390624726298764e-05, |
| "loss": 0.8305, |
| "step": 484 |
| }, |
| { |
| "epoch": 1.969543147208122, |
| "grad_norm": 0.4236604675793409, |
| "learning_rate": 3.034208541656255e-05, |
| "loss": 0.8445, |
| "step": 485 |
| }, |
| { |
| "epoch": 1.9736040609137055, |
| "grad_norm": 0.42933859663999613, |
| "learning_rate": 3.029346281309218e-05, |
| "loss": 0.8269, |
| "step": 486 |
| }, |
| { |
| "epoch": 1.9776649746192894, |
| "grad_norm": 0.4005271032147896, |
| "learning_rate": 3.0244757307487415e-05, |
| "loss": 0.8401, |
| "step": 487 |
| }, |
| { |
| "epoch": 1.9817258883248732, |
| "grad_norm": 0.4568015114196979, |
| "learning_rate": 3.019596929201569e-05, |
| "loss": 0.8448, |
| "step": 488 |
| }, |
| { |
| "epoch": 1.9857868020304568, |
| "grad_norm": 0.4501516144300372, |
| "learning_rate": 3.0147099159608985e-05, |
| "loss": 0.8276, |
| "step": 489 |
| }, |
| { |
| "epoch": 1.9898477157360406, |
| "grad_norm": 0.4562428309574424, |
| "learning_rate": 3.0098147303860616e-05, |
| "loss": 0.8377, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.9939086294416244, |
| "grad_norm": 0.3718375146687905, |
| "learning_rate": 3.0049114119022117e-05, |
| "loss": 0.8304, |
| "step": 491 |
| }, |
| { |
| "epoch": 1.997969543147208, |
| "grad_norm": 0.3442612163540122, |
| "learning_rate": 3.0000000000000004e-05, |
| "loss": 0.836, |
| "step": 492 |
| }, |
| { |
| "epoch": 2.002030456852792, |
| "grad_norm": 0.64891394208042, |
| "learning_rate": 2.995080534235264e-05, |
| "loss": 1.405, |
| "step": 493 |
| }, |
| { |
| "epoch": 2.0060913705583756, |
| "grad_norm": 0.8652463421629549, |
| "learning_rate": 2.9901530542287044e-05, |
| "loss": 0.8131, |
| "step": 494 |
| }, |
| { |
| "epoch": 2.010152284263959, |
| "grad_norm": 0.9348779273473778, |
| "learning_rate": 2.9852175996655676e-05, |
| "loss": 0.7784, |
| "step": 495 |
| }, |
| { |
| "epoch": 2.0142131979695432, |
| "grad_norm": 0.6438173289423972, |
| "learning_rate": 2.980274210295326e-05, |
| "loss": 0.8134, |
| "step": 496 |
| }, |
| { |
| "epoch": 2.018274111675127, |
| "grad_norm": 0.5277100925611112, |
| "learning_rate": 2.9753229259313578e-05, |
| "loss": 0.7794, |
| "step": 497 |
| }, |
| { |
| "epoch": 2.022335025380711, |
| "grad_norm": 0.49448138330918556, |
| "learning_rate": 2.9703637864506274e-05, |
| "loss": 0.7722, |
| "step": 498 |
| }, |
| { |
| "epoch": 2.0263959390862945, |
| "grad_norm": 0.43315896786743613, |
| "learning_rate": 2.965396831793362e-05, |
| "loss": 0.7732, |
| "step": 499 |
| }, |
| { |
| "epoch": 2.030456852791878, |
| "grad_norm": 0.6072775668922821, |
| "learning_rate": 2.9604221019627316e-05, |
| "loss": 0.8156, |
| "step": 500 |
| }, |
| { |
| "epoch": 2.034517766497462, |
| "grad_norm": 0.48662773123353975, |
| "learning_rate": 2.955439637024526e-05, |
| "loss": 0.7899, |
| "step": 501 |
| }, |
| { |
| "epoch": 2.0385786802030457, |
| "grad_norm": 0.5358077374013118, |
| "learning_rate": 2.9504494771068334e-05, |
| "loss": 0.8169, |
| "step": 502 |
| }, |
| { |
| "epoch": 2.0426395939086293, |
| "grad_norm": 0.4538550735736086, |
| "learning_rate": 2.9454516623997156e-05, |
| "loss": 0.7856, |
| "step": 503 |
| }, |
| { |
| "epoch": 2.0467005076142133, |
| "grad_norm": 0.40460715149019766, |
| "learning_rate": 2.9404462331548847e-05, |
| "loss": 0.7905, |
| "step": 504 |
| }, |
| { |
| "epoch": 2.050761421319797, |
| "grad_norm": 0.5198564106693823, |
| "learning_rate": 2.93543322968538e-05, |
| "loss": 0.7688, |
| "step": 505 |
| }, |
| { |
| "epoch": 2.054822335025381, |
| "grad_norm": 0.429772606498242, |
| "learning_rate": 2.9304126923652428e-05, |
| "loss": 0.7712, |
| "step": 506 |
| }, |
| { |
| "epoch": 2.0588832487309645, |
| "grad_norm": 0.46558086367866325, |
| "learning_rate": 2.9253846616291896e-05, |
| "loss": 0.772, |
| "step": 507 |
| }, |
| { |
| "epoch": 2.062944162436548, |
| "grad_norm": 0.5051467286549566, |
| "learning_rate": 2.9203491779722896e-05, |
| "loss": 0.7949, |
| "step": 508 |
| }, |
| { |
| "epoch": 2.067005076142132, |
| "grad_norm": 0.3657620548936342, |
| "learning_rate": 2.9153062819496357e-05, |
| "loss": 0.769, |
| "step": 509 |
| }, |
| { |
| "epoch": 2.0710659898477157, |
| "grad_norm": 0.43776290565561105, |
| "learning_rate": 2.9102560141760178e-05, |
| "loss": 0.7881, |
| "step": 510 |
| }, |
| { |
| "epoch": 2.0751269035532993, |
| "grad_norm": 0.44747061775202096, |
| "learning_rate": 2.9051984153256004e-05, |
| "loss": 0.7857, |
| "step": 511 |
| }, |
| { |
| "epoch": 2.0791878172588834, |
| "grad_norm": 0.3783287428915489, |
| "learning_rate": 2.900133526131588e-05, |
| "loss": 0.7828, |
| "step": 512 |
| }, |
| { |
| "epoch": 2.083248730964467, |
| "grad_norm": 0.35726606313433734, |
| "learning_rate": 2.8950613873859025e-05, |
| "loss": 0.7903, |
| "step": 513 |
| }, |
| { |
| "epoch": 2.087309644670051, |
| "grad_norm": 0.37007618024489103, |
| "learning_rate": 2.8899820399388515e-05, |
| "loss": 0.7934, |
| "step": 514 |
| }, |
| { |
| "epoch": 2.0913705583756346, |
| "grad_norm": 0.39161406647004077, |
| "learning_rate": 2.8848955246988012e-05, |
| "loss": 0.7873, |
| "step": 515 |
| }, |
| { |
| "epoch": 2.095431472081218, |
| "grad_norm": 0.4115642656408347, |
| "learning_rate": 2.879801882631847e-05, |
| "loss": 0.7881, |
| "step": 516 |
| }, |
| { |
| "epoch": 2.099492385786802, |
| "grad_norm": 0.4671805677365532, |
| "learning_rate": 2.8747011547614808e-05, |
| "loss": 0.7885, |
| "step": 517 |
| }, |
| { |
| "epoch": 2.103553299492386, |
| "grad_norm": 0.4904240725705561, |
| "learning_rate": 2.8695933821682635e-05, |
| "loss": 0.7814, |
| "step": 518 |
| }, |
| { |
| "epoch": 2.1076142131979694, |
| "grad_norm": 0.4139929184686476, |
| "learning_rate": 2.864478605989494e-05, |
| "loss": 0.7931, |
| "step": 519 |
| }, |
| { |
| "epoch": 2.1116751269035534, |
| "grad_norm": 0.48786396999136983, |
| "learning_rate": 2.8593568674188765e-05, |
| "loss": 0.7622, |
| "step": 520 |
| }, |
| { |
| "epoch": 2.115736040609137, |
| "grad_norm": 0.4422834030841654, |
| "learning_rate": 2.8542282077061892e-05, |
| "loss": 0.7857, |
| "step": 521 |
| }, |
| { |
| "epoch": 2.1197969543147206, |
| "grad_norm": 0.4276585611401357, |
| "learning_rate": 2.8490926681569523e-05, |
| "loss": 0.7993, |
| "step": 522 |
| }, |
| { |
| "epoch": 2.1238578680203046, |
| "grad_norm": 0.5074429805485987, |
| "learning_rate": 2.8439502901320956e-05, |
| "loss": 0.7915, |
| "step": 523 |
| }, |
| { |
| "epoch": 2.127918781725888, |
| "grad_norm": 0.4057127068652121, |
| "learning_rate": 2.8388011150476237e-05, |
| "loss": 0.7939, |
| "step": 524 |
| }, |
| { |
| "epoch": 2.1319796954314723, |
| "grad_norm": 0.4872289463867049, |
| "learning_rate": 2.8336451843742866e-05, |
| "loss": 0.8059, |
| "step": 525 |
| }, |
| { |
| "epoch": 2.136040609137056, |
| "grad_norm": 0.4919609224055489, |
| "learning_rate": 2.8284825396372387e-05, |
| "loss": 0.7835, |
| "step": 526 |
| }, |
| { |
| "epoch": 2.1401015228426394, |
| "grad_norm": 0.45746916285393524, |
| "learning_rate": 2.8233132224157132e-05, |
| "loss": 0.792, |
| "step": 527 |
| }, |
| { |
| "epoch": 2.1441624365482235, |
| "grad_norm": 0.43800494914342597, |
| "learning_rate": 2.8181372743426805e-05, |
| "loss": 0.7871, |
| "step": 528 |
| }, |
| { |
| "epoch": 2.148223350253807, |
| "grad_norm": 0.3779645547189469, |
| "learning_rate": 2.8129547371045128e-05, |
| "loss": 0.7677, |
| "step": 529 |
| }, |
| { |
| "epoch": 2.152284263959391, |
| "grad_norm": 0.48737288282761904, |
| "learning_rate": 2.8077656524406534e-05, |
| "loss": 0.7948, |
| "step": 530 |
| }, |
| { |
| "epoch": 2.1563451776649747, |
| "grad_norm": 0.40719384181057056, |
| "learning_rate": 2.802570062143278e-05, |
| "loss": 0.797, |
| "step": 531 |
| }, |
| { |
| "epoch": 2.1604060913705583, |
| "grad_norm": 0.48830468818476336, |
| "learning_rate": 2.7973680080569555e-05, |
| "loss": 0.8137, |
| "step": 532 |
| }, |
| { |
| "epoch": 2.1644670050761423, |
| "grad_norm": 0.5520460121499337, |
| "learning_rate": 2.792159532078314e-05, |
| "loss": 0.7772, |
| "step": 533 |
| }, |
| { |
| "epoch": 2.168527918781726, |
| "grad_norm": 0.31765489806843067, |
| "learning_rate": 2.7869446761557033e-05, |
| "loss": 0.7569, |
| "step": 534 |
| }, |
| { |
| "epoch": 2.1725888324873095, |
| "grad_norm": 0.4522380557543925, |
| "learning_rate": 2.781723482288857e-05, |
| "loss": 0.8046, |
| "step": 535 |
| }, |
| { |
| "epoch": 2.1766497461928935, |
| "grad_norm": 0.4599354074079185, |
| "learning_rate": 2.7764959925285517e-05, |
| "loss": 0.7781, |
| "step": 536 |
| }, |
| { |
| "epoch": 2.180710659898477, |
| "grad_norm": 0.4129825199733175, |
| "learning_rate": 2.771262248976272e-05, |
| "loss": 0.7696, |
| "step": 537 |
| }, |
| { |
| "epoch": 2.1847715736040607, |
| "grad_norm": 0.3597972772000314, |
| "learning_rate": 2.7660222937838677e-05, |
| "loss": 0.7945, |
| "step": 538 |
| }, |
| { |
| "epoch": 2.1888324873096447, |
| "grad_norm": 0.448372440211052, |
| "learning_rate": 2.7607761691532186e-05, |
| "loss": 0.7656, |
| "step": 539 |
| }, |
| { |
| "epoch": 2.1928934010152283, |
| "grad_norm": 0.4629531913543579, |
| "learning_rate": 2.7555239173358916e-05, |
| "loss": 0.7759, |
| "step": 540 |
| }, |
| { |
| "epoch": 2.1969543147208124, |
| "grad_norm": 0.41594750482355514, |
| "learning_rate": 2.7502655806328e-05, |
| "loss": 0.7739, |
| "step": 541 |
| }, |
| { |
| "epoch": 2.201015228426396, |
| "grad_norm": 0.6099230511312791, |
| "learning_rate": 2.7450012013938648e-05, |
| "loss": 0.7973, |
| "step": 542 |
| }, |
| { |
| "epoch": 2.2050761421319796, |
| "grad_norm": 0.6270226494530453, |
| "learning_rate": 2.739730822017673e-05, |
| "loss": 0.8124, |
| "step": 543 |
| }, |
| { |
| "epoch": 2.2091370558375636, |
| "grad_norm": 0.48848635278516567, |
| "learning_rate": 2.7344544849511355e-05, |
| "loss": 0.8037, |
| "step": 544 |
| }, |
| { |
| "epoch": 2.213197969543147, |
| "grad_norm": 0.3593375178240309, |
| "learning_rate": 2.7291722326891456e-05, |
| "loss": 0.791, |
| "step": 545 |
| }, |
| { |
| "epoch": 2.2172588832487308, |
| "grad_norm": 0.47039650273919076, |
| "learning_rate": 2.723884107774236e-05, |
| "loss": 0.775, |
| "step": 546 |
| }, |
| { |
| "epoch": 2.221319796954315, |
| "grad_norm": 0.468698088818375, |
| "learning_rate": 2.718590152796239e-05, |
| "loss": 0.7908, |
| "step": 547 |
| }, |
| { |
| "epoch": 2.2253807106598984, |
| "grad_norm": 0.4314485372282206, |
| "learning_rate": 2.71329041039194e-05, |
| "loss": 0.8069, |
| "step": 548 |
| }, |
| { |
| "epoch": 2.2294416243654824, |
| "grad_norm": 0.3756302978314692, |
| "learning_rate": 2.7079849232447357e-05, |
| "loss": 0.781, |
| "step": 549 |
| }, |
| { |
| "epoch": 2.233502538071066, |
| "grad_norm": 0.364665646824605, |
| "learning_rate": 2.7026737340842895e-05, |
| "loss": 0.8081, |
| "step": 550 |
| }, |
| { |
| "epoch": 2.2375634517766496, |
| "grad_norm": 0.4373311003115894, |
| "learning_rate": 2.697356885686189e-05, |
| "loss": 0.7913, |
| "step": 551 |
| }, |
| { |
| "epoch": 2.2416243654822336, |
| "grad_norm": 0.43919524890125977, |
| "learning_rate": 2.6920344208716014e-05, |
| "loss": 0.7769, |
| "step": 552 |
| }, |
| { |
| "epoch": 2.2456852791878172, |
| "grad_norm": 0.3751290179278852, |
| "learning_rate": 2.6867063825069252e-05, |
| "loss": 0.7905, |
| "step": 553 |
| }, |
| { |
| "epoch": 2.249746192893401, |
| "grad_norm": 0.3460448698631867, |
| "learning_rate": 2.6813728135034494e-05, |
| "loss": 0.7637, |
| "step": 554 |
| }, |
| { |
| "epoch": 2.253807106598985, |
| "grad_norm": 0.4676584148997351, |
| "learning_rate": 2.6760337568170056e-05, |
| "loss": 0.7876, |
| "step": 555 |
| }, |
| { |
| "epoch": 2.2578680203045685, |
| "grad_norm": 0.44025520680191255, |
| "learning_rate": 2.6706892554476226e-05, |
| "loss": 0.813, |
| "step": 556 |
| }, |
| { |
| "epoch": 2.261928934010152, |
| "grad_norm": 0.41621443270651415, |
| "learning_rate": 2.6653393524391795e-05, |
| "loss": 0.7825, |
| "step": 557 |
| }, |
| { |
| "epoch": 2.265989847715736, |
| "grad_norm": 0.3701080537144901, |
| "learning_rate": 2.6599840908790592e-05, |
| "loss": 0.7827, |
| "step": 558 |
| }, |
| { |
| "epoch": 2.2700507614213197, |
| "grad_norm": 0.42554138077738807, |
| "learning_rate": 2.6546235138978028e-05, |
| "loss": 0.7648, |
| "step": 559 |
| }, |
| { |
| "epoch": 2.2741116751269037, |
| "grad_norm": 0.3010582955673964, |
| "learning_rate": 2.6492576646687597e-05, |
| "loss": 0.8058, |
| "step": 560 |
| }, |
| { |
| "epoch": 2.2781725888324873, |
| "grad_norm": 0.35203361672088324, |
| "learning_rate": 2.6438865864077425e-05, |
| "loss": 0.7888, |
| "step": 561 |
| }, |
| { |
| "epoch": 2.282233502538071, |
| "grad_norm": 0.4084088179119911, |
| "learning_rate": 2.6385103223726766e-05, |
| "loss": 0.8021, |
| "step": 562 |
| }, |
| { |
| "epoch": 2.286294416243655, |
| "grad_norm": 0.3442625949102979, |
| "learning_rate": 2.6331289158632537e-05, |
| "loss": 0.7909, |
| "step": 563 |
| }, |
| { |
| "epoch": 2.2903553299492385, |
| "grad_norm": 0.3398821977797976, |
| "learning_rate": 2.6277424102205817e-05, |
| "loss": 0.7851, |
| "step": 564 |
| }, |
| { |
| "epoch": 2.2944162436548226, |
| "grad_norm": 0.412777572413902, |
| "learning_rate": 2.6223508488268374e-05, |
| "loss": 0.7859, |
| "step": 565 |
| }, |
| { |
| "epoch": 2.298477157360406, |
| "grad_norm": 0.45102079570324355, |
| "learning_rate": 2.6169542751049148e-05, |
| "loss": 0.7897, |
| "step": 566 |
| }, |
| { |
| "epoch": 2.3025380710659897, |
| "grad_norm": 0.3542313092630835, |
| "learning_rate": 2.6115527325180754e-05, |
| "loss": 0.7714, |
| "step": 567 |
| }, |
| { |
| "epoch": 2.3065989847715738, |
| "grad_norm": 0.43307413741713047, |
| "learning_rate": 2.606146264569603e-05, |
| "loss": 0.7802, |
| "step": 568 |
| }, |
| { |
| "epoch": 2.3106598984771574, |
| "grad_norm": 0.38621936570142634, |
| "learning_rate": 2.6007349148024447e-05, |
| "loss": 0.7946, |
| "step": 569 |
| }, |
| { |
| "epoch": 2.314720812182741, |
| "grad_norm": 0.36107902630036987, |
| "learning_rate": 2.5953187267988694e-05, |
| "loss": 0.7846, |
| "step": 570 |
| }, |
| { |
| "epoch": 2.318781725888325, |
| "grad_norm": 0.39381941502999723, |
| "learning_rate": 2.5898977441801097e-05, |
| "loss": 0.7975, |
| "step": 571 |
| }, |
| { |
| "epoch": 2.3228426395939086, |
| "grad_norm": 0.33146036036906096, |
| "learning_rate": 2.584472010606015e-05, |
| "loss": 0.7859, |
| "step": 572 |
| }, |
| { |
| "epoch": 2.326903553299492, |
| "grad_norm": 0.353608761683652, |
| "learning_rate": 2.5790415697746976e-05, |
| "loss": 0.7729, |
| "step": 573 |
| }, |
| { |
| "epoch": 2.330964467005076, |
| "grad_norm": 0.3677979026424005, |
| "learning_rate": 2.5736064654221808e-05, |
| "loss": 0.8157, |
| "step": 574 |
| }, |
| { |
| "epoch": 2.33502538071066, |
| "grad_norm": 0.3136935391293619, |
| "learning_rate": 2.568166741322048e-05, |
| "loss": 0.7688, |
| "step": 575 |
| }, |
| { |
| "epoch": 2.339086294416244, |
| "grad_norm": 0.5186534474774878, |
| "learning_rate": 2.56272244128509e-05, |
| "loss": 0.8109, |
| "step": 576 |
| }, |
| { |
| "epoch": 2.3431472081218274, |
| "grad_norm": 0.3422352191041233, |
| "learning_rate": 2.55727360915895e-05, |
| "loss": 0.7693, |
| "step": 577 |
| }, |
| { |
| "epoch": 2.347208121827411, |
| "grad_norm": 0.4021137219933842, |
| "learning_rate": 2.5518202888277734e-05, |
| "loss": 0.815, |
| "step": 578 |
| }, |
| { |
| "epoch": 2.351269035532995, |
| "grad_norm": 0.37025113684125394, |
| "learning_rate": 2.5463625242118523e-05, |
| "loss": 0.7471, |
| "step": 579 |
| }, |
| { |
| "epoch": 2.3553299492385786, |
| "grad_norm": 0.3161917054488647, |
| "learning_rate": 2.5409003592672723e-05, |
| "loss": 0.7852, |
| "step": 580 |
| }, |
| { |
| "epoch": 2.3593908629441627, |
| "grad_norm": 0.3940494026071141, |
| "learning_rate": 2.535433837985559e-05, |
| "loss": 0.7647, |
| "step": 581 |
| }, |
| { |
| "epoch": 2.3634517766497463, |
| "grad_norm": 0.4049994808454731, |
| "learning_rate": 2.529963004393324e-05, |
| "loss": 0.782, |
| "step": 582 |
| }, |
| { |
| "epoch": 2.36751269035533, |
| "grad_norm": 0.34393786441543645, |
| "learning_rate": 2.524487902551908e-05, |
| "loss": 0.8022, |
| "step": 583 |
| }, |
| { |
| "epoch": 2.371573604060914, |
| "grad_norm": 0.3458739188615557, |
| "learning_rate": 2.519008576557029e-05, |
| "loss": 0.7867, |
| "step": 584 |
| }, |
| { |
| "epoch": 2.3756345177664975, |
| "grad_norm": 0.3512886583377272, |
| "learning_rate": 2.5135250705384254e-05, |
| "loss": 0.7959, |
| "step": 585 |
| }, |
| { |
| "epoch": 2.379695431472081, |
| "grad_norm": 0.3166567008381363, |
| "learning_rate": 2.5080374286595007e-05, |
| "loss": 0.7828, |
| "step": 586 |
| }, |
| { |
| "epoch": 2.383756345177665, |
| "grad_norm": 0.37827872090233694, |
| "learning_rate": 2.5025456951169677e-05, |
| "loss": 0.7899, |
| "step": 587 |
| }, |
| { |
| "epoch": 2.3878172588832487, |
| "grad_norm": 0.38341693540910676, |
| "learning_rate": 2.4970499141404942e-05, |
| "loss": 0.8023, |
| "step": 588 |
| }, |
| { |
| "epoch": 2.3918781725888323, |
| "grad_norm": 0.314871268529053, |
| "learning_rate": 2.491550129992345e-05, |
| "loss": 0.7816, |
| "step": 589 |
| }, |
| { |
| "epoch": 2.3959390862944163, |
| "grad_norm": 0.4337890281152915, |
| "learning_rate": 2.486046386967024e-05, |
| "loss": 0.777, |
| "step": 590 |
| }, |
| { |
| "epoch": 2.4, |
| "grad_norm": 0.4251034334619548, |
| "learning_rate": 2.4805387293909214e-05, |
| "loss": 0.8113, |
| "step": 591 |
| }, |
| { |
| "epoch": 2.404060913705584, |
| "grad_norm": 0.2994041294404965, |
| "learning_rate": 2.4750272016219552e-05, |
| "loss": 0.7872, |
| "step": 592 |
| }, |
| { |
| "epoch": 2.4081218274111675, |
| "grad_norm": 0.49840203513811554, |
| "learning_rate": 2.4695118480492114e-05, |
| "loss": 0.8043, |
| "step": 593 |
| }, |
| { |
| "epoch": 2.412182741116751, |
| "grad_norm": 0.4059710203578824, |
| "learning_rate": 2.4639927130925898e-05, |
| "loss": 0.7784, |
| "step": 594 |
| }, |
| { |
| "epoch": 2.416243654822335, |
| "grad_norm": 0.39640518858341955, |
| "learning_rate": 2.458469841202444e-05, |
| "loss": 0.7958, |
| "step": 595 |
| }, |
| { |
| "epoch": 2.4203045685279188, |
| "grad_norm": 0.3167486556837784, |
| "learning_rate": 2.452943276859226e-05, |
| "loss": 0.7967, |
| "step": 596 |
| }, |
| { |
| "epoch": 2.4243654822335023, |
| "grad_norm": 0.3878734460827972, |
| "learning_rate": 2.447413064573125e-05, |
| "loss": 0.7868, |
| "step": 597 |
| }, |
| { |
| "epoch": 2.4284263959390864, |
| "grad_norm": 0.2793482242945048, |
| "learning_rate": 2.4418792488837095e-05, |
| "loss": 0.764, |
| "step": 598 |
| }, |
| { |
| "epoch": 2.43248730964467, |
| "grad_norm": 0.3565673803423223, |
| "learning_rate": 2.4363418743595713e-05, |
| "loss": 0.7855, |
| "step": 599 |
| }, |
| { |
| "epoch": 2.436548223350254, |
| "grad_norm": 0.39096967168186086, |
| "learning_rate": 2.430800985597963e-05, |
| "loss": 0.7914, |
| "step": 600 |
| }, |
| { |
| "epoch": 2.4406091370558376, |
| "grad_norm": 0.3356770336056809, |
| "learning_rate": 2.4252566272244415e-05, |
| "loss": 0.7753, |
| "step": 601 |
| }, |
| { |
| "epoch": 2.444670050761421, |
| "grad_norm": 0.3693679483682359, |
| "learning_rate": 2.4197088438925063e-05, |
| "loss": 0.7773, |
| "step": 602 |
| }, |
| { |
| "epoch": 2.4487309644670052, |
| "grad_norm": 0.3352969801111382, |
| "learning_rate": 2.4141576802832417e-05, |
| "loss": 0.7768, |
| "step": 603 |
| }, |
| { |
| "epoch": 2.452791878172589, |
| "grad_norm": 0.33402183067598984, |
| "learning_rate": 2.408603181104957e-05, |
| "loss": 0.7884, |
| "step": 604 |
| }, |
| { |
| "epoch": 2.4568527918781724, |
| "grad_norm": 0.3145526563253167, |
| "learning_rate": 2.4030453910928245e-05, |
| "loss": 0.7789, |
| "step": 605 |
| }, |
| { |
| "epoch": 2.4609137055837564, |
| "grad_norm": 0.3255196063630698, |
| "learning_rate": 2.397484355008521e-05, |
| "loss": 0.7916, |
| "step": 606 |
| }, |
| { |
| "epoch": 2.46497461928934, |
| "grad_norm": 0.29204519991654526, |
| "learning_rate": 2.3919201176398662e-05, |
| "loss": 0.7758, |
| "step": 607 |
| }, |
| { |
| "epoch": 2.4690355329949236, |
| "grad_norm": 0.2891030572484462, |
| "learning_rate": 2.3863527238004633e-05, |
| "loss": 0.7629, |
| "step": 608 |
| }, |
| { |
| "epoch": 2.4730964467005077, |
| "grad_norm": 0.2923567174037668, |
| "learning_rate": 2.380782218329337e-05, |
| "loss": 0.7888, |
| "step": 609 |
| }, |
| { |
| "epoch": 2.4771573604060912, |
| "grad_norm": 0.3306525917922542, |
| "learning_rate": 2.3752086460905725e-05, |
| "loss": 0.7978, |
| "step": 610 |
| }, |
| { |
| "epoch": 2.4812182741116753, |
| "grad_norm": 0.3773588068876117, |
| "learning_rate": 2.3696320519729544e-05, |
| "loss": 0.7915, |
| "step": 611 |
| }, |
| { |
| "epoch": 2.485279187817259, |
| "grad_norm": 0.31683732276895005, |
| "learning_rate": 2.3640524808896045e-05, |
| "loss": 0.7982, |
| "step": 612 |
| }, |
| { |
| "epoch": 2.4893401015228425, |
| "grad_norm": 0.38581970723619197, |
| "learning_rate": 2.3584699777776222e-05, |
| "loss": 0.7964, |
| "step": 613 |
| }, |
| { |
| "epoch": 2.4934010152284265, |
| "grad_norm": 0.2866990737962306, |
| "learning_rate": 2.3528845875977195e-05, |
| "loss": 0.7873, |
| "step": 614 |
| }, |
| { |
| "epoch": 2.49746192893401, |
| "grad_norm": 0.34632372526373356, |
| "learning_rate": 2.3472963553338614e-05, |
| "loss": 0.7884, |
| "step": 615 |
| }, |
| { |
| "epoch": 2.501522842639594, |
| "grad_norm": 0.30665637390151196, |
| "learning_rate": 2.341705325992901e-05, |
| "loss": 0.7907, |
| "step": 616 |
| }, |
| { |
| "epoch": 2.5055837563451777, |
| "grad_norm": 0.39478841436867845, |
| "learning_rate": 2.336111544604222e-05, |
| "loss": 0.8006, |
| "step": 617 |
| }, |
| { |
| "epoch": 2.5096446700507613, |
| "grad_norm": 0.2727421043324723, |
| "learning_rate": 2.33051505621937e-05, |
| "loss": 0.8001, |
| "step": 618 |
| }, |
| { |
| "epoch": 2.5137055837563453, |
| "grad_norm": 0.3642318685099745, |
| "learning_rate": 2.324915905911693e-05, |
| "loss": 0.7765, |
| "step": 619 |
| }, |
| { |
| "epoch": 2.517766497461929, |
| "grad_norm": 0.2883606617502641, |
| "learning_rate": 2.319314138775977e-05, |
| "loss": 0.7885, |
| "step": 620 |
| }, |
| { |
| "epoch": 2.5218274111675125, |
| "grad_norm": 0.39038816390432624, |
| "learning_rate": 2.3137097999280856e-05, |
| "loss": 0.7987, |
| "step": 621 |
| }, |
| { |
| "epoch": 2.5258883248730966, |
| "grad_norm": 0.3499604329365916, |
| "learning_rate": 2.308102934504593e-05, |
| "loss": 0.8071, |
| "step": 622 |
| }, |
| { |
| "epoch": 2.52994923857868, |
| "grad_norm": 0.3472031129926152, |
| "learning_rate": 2.3024935876624222e-05, |
| "loss": 0.787, |
| "step": 623 |
| }, |
| { |
| "epoch": 2.5340101522842637, |
| "grad_norm": 0.41039609992932435, |
| "learning_rate": 2.2968818045784813e-05, |
| "loss": 0.7771, |
| "step": 624 |
| }, |
| { |
| "epoch": 2.5380710659898478, |
| "grad_norm": 0.3257746967506483, |
| "learning_rate": 2.2912676304493006e-05, |
| "loss": 0.7932, |
| "step": 625 |
| }, |
| { |
| "epoch": 2.5421319796954314, |
| "grad_norm": 0.3789015257069384, |
| "learning_rate": 2.2856511104906668e-05, |
| "loss": 0.7851, |
| "step": 626 |
| }, |
| { |
| "epoch": 2.546192893401015, |
| "grad_norm": 0.43971258275737324, |
| "learning_rate": 2.2800322899372586e-05, |
| "loss": 0.8072, |
| "step": 627 |
| }, |
| { |
| "epoch": 2.550253807106599, |
| "grad_norm": 0.3987210296702516, |
| "learning_rate": 2.2744112140422844e-05, |
| "loss": 0.7721, |
| "step": 628 |
| }, |
| { |
| "epoch": 2.5543147208121826, |
| "grad_norm": 0.37554551707357203, |
| "learning_rate": 2.2687879280771177e-05, |
| "loss": 0.7816, |
| "step": 629 |
| }, |
| { |
| "epoch": 2.5583756345177666, |
| "grad_norm": 0.3250075504151975, |
| "learning_rate": 2.26316247733093e-05, |
| "loss": 0.8002, |
| "step": 630 |
| }, |
| { |
| "epoch": 2.56243654822335, |
| "grad_norm": 0.2974967391492129, |
| "learning_rate": 2.257534907110328e-05, |
| "loss": 0.7643, |
| "step": 631 |
| }, |
| { |
| "epoch": 2.5664974619289342, |
| "grad_norm": 0.32417206916043345, |
| "learning_rate": 2.2519052627389882e-05, |
| "loss": 0.7651, |
| "step": 632 |
| }, |
| { |
| "epoch": 2.570558375634518, |
| "grad_norm": 0.30435586977894385, |
| "learning_rate": 2.246273589557294e-05, |
| "loss": 0.7877, |
| "step": 633 |
| }, |
| { |
| "epoch": 2.5746192893401014, |
| "grad_norm": 0.33126346298012493, |
| "learning_rate": 2.240639932921966e-05, |
| "loss": 0.7651, |
| "step": 634 |
| }, |
| { |
| "epoch": 2.5786802030456855, |
| "grad_norm": 0.3796617549151834, |
| "learning_rate": 2.2350043382056995e-05, |
| "loss": 0.7972, |
| "step": 635 |
| }, |
| { |
| "epoch": 2.582741116751269, |
| "grad_norm": 0.33986951853650826, |
| "learning_rate": 2.2293668507968015e-05, |
| "loss": 0.7634, |
| "step": 636 |
| }, |
| { |
| "epoch": 2.5868020304568526, |
| "grad_norm": 0.32371264761925017, |
| "learning_rate": 2.2237275160988186e-05, |
| "loss": 0.7908, |
| "step": 637 |
| }, |
| { |
| "epoch": 2.5908629441624367, |
| "grad_norm": 0.3148593795022237, |
| "learning_rate": 2.2180863795301787e-05, |
| "loss": 0.7803, |
| "step": 638 |
| }, |
| { |
| "epoch": 2.5949238578680203, |
| "grad_norm": 0.34282558227552595, |
| "learning_rate": 2.212443486523819e-05, |
| "loss": 0.7741, |
| "step": 639 |
| }, |
| { |
| "epoch": 2.598984771573604, |
| "grad_norm": 0.30869686016726433, |
| "learning_rate": 2.2067988825268243e-05, |
| "loss": 0.7962, |
| "step": 640 |
| }, |
| { |
| "epoch": 2.603045685279188, |
| "grad_norm": 0.28972449545870527, |
| "learning_rate": 2.2011526130000596e-05, |
| "loss": 0.7698, |
| "step": 641 |
| }, |
| { |
| "epoch": 2.6071065989847715, |
| "grad_norm": 0.33798493260143075, |
| "learning_rate": 2.1955047234178038e-05, |
| "loss": 0.7951, |
| "step": 642 |
| }, |
| { |
| "epoch": 2.611167512690355, |
| "grad_norm": 0.28133817024005686, |
| "learning_rate": 2.1898552592673825e-05, |
| "loss": 0.7883, |
| "step": 643 |
| }, |
| { |
| "epoch": 2.615228426395939, |
| "grad_norm": 0.35740552787840285, |
| "learning_rate": 2.184204266048803e-05, |
| "loss": 0.7842, |
| "step": 644 |
| }, |
| { |
| "epoch": 2.6192893401015227, |
| "grad_norm": 0.3285179257035973, |
| "learning_rate": 2.1785517892743887e-05, |
| "loss": 0.8005, |
| "step": 645 |
| }, |
| { |
| "epoch": 2.6233502538071067, |
| "grad_norm": 0.33793604823410167, |
| "learning_rate": 2.17289787446841e-05, |
| "loss": 0.7917, |
| "step": 646 |
| }, |
| { |
| "epoch": 2.6274111675126903, |
| "grad_norm": 0.3398528301474974, |
| "learning_rate": 2.1672425671667198e-05, |
| "loss": 0.7862, |
| "step": 647 |
| }, |
| { |
| "epoch": 2.6314720812182744, |
| "grad_norm": 0.35028892036669446, |
| "learning_rate": 2.161585912916385e-05, |
| "loss": 0.8009, |
| "step": 648 |
| }, |
| { |
| "epoch": 2.635532994923858, |
| "grad_norm": 0.3323206026801166, |
| "learning_rate": 2.1559279572753214e-05, |
| "loss": 0.7761, |
| "step": 649 |
| }, |
| { |
| "epoch": 2.6395939086294415, |
| "grad_norm": 0.36641983645762743, |
| "learning_rate": 2.1502687458119268e-05, |
| "loss": 0.7743, |
| "step": 650 |
| }, |
| { |
| "epoch": 2.6436548223350256, |
| "grad_norm": 0.32408973495890764, |
| "learning_rate": 2.1446083241047116e-05, |
| "loss": 0.8109, |
| "step": 651 |
| }, |
| { |
| "epoch": 2.647715736040609, |
| "grad_norm": 0.3261536162295097, |
| "learning_rate": 2.1389467377419333e-05, |
| "loss": 0.7714, |
| "step": 652 |
| }, |
| { |
| "epoch": 2.6517766497461928, |
| "grad_norm": 0.36292631470694736, |
| "learning_rate": 2.133284032321232e-05, |
| "loss": 0.7974, |
| "step": 653 |
| }, |
| { |
| "epoch": 2.655837563451777, |
| "grad_norm": 0.37019989732408076, |
| "learning_rate": 2.1276202534492566e-05, |
| "loss": 0.7822, |
| "step": 654 |
| }, |
| { |
| "epoch": 2.6598984771573604, |
| "grad_norm": 0.3487374975364812, |
| "learning_rate": 2.121955446741306e-05, |
| "loss": 0.8187, |
| "step": 655 |
| }, |
| { |
| "epoch": 2.663959390862944, |
| "grad_norm": 0.31130310561499763, |
| "learning_rate": 2.1162896578209517e-05, |
| "loss": 0.7814, |
| "step": 656 |
| }, |
| { |
| "epoch": 2.668020304568528, |
| "grad_norm": 0.3432290413777575, |
| "learning_rate": 2.1106229323196813e-05, |
| "loss": 0.7984, |
| "step": 657 |
| }, |
| { |
| "epoch": 2.6720812182741116, |
| "grad_norm": 0.2784292520484534, |
| "learning_rate": 2.1049553158765214e-05, |
| "loss": 0.7724, |
| "step": 658 |
| }, |
| { |
| "epoch": 2.676142131979695, |
| "grad_norm": 0.29809379730160107, |
| "learning_rate": 2.0992868541376764e-05, |
| "loss": 0.7777, |
| "step": 659 |
| }, |
| { |
| "epoch": 2.6802030456852792, |
| "grad_norm": 0.314447025149746, |
| "learning_rate": 2.093617592756158e-05, |
| "loss": 0.7991, |
| "step": 660 |
| }, |
| { |
| "epoch": 2.684263959390863, |
| "grad_norm": 0.28844677713981254, |
| "learning_rate": 2.0879475773914167e-05, |
| "loss": 0.7672, |
| "step": 661 |
| }, |
| { |
| "epoch": 2.688324873096447, |
| "grad_norm": 0.33337354917144957, |
| "learning_rate": 2.082276853708978e-05, |
| "loss": 0.8011, |
| "step": 662 |
| }, |
| { |
| "epoch": 2.6923857868020304, |
| "grad_norm": 0.29090897293063595, |
| "learning_rate": 2.076605467380071e-05, |
| "loss": 0.7754, |
| "step": 663 |
| }, |
| { |
| "epoch": 2.6964467005076145, |
| "grad_norm": 0.32825311041463845, |
| "learning_rate": 2.0709334640812613e-05, |
| "loss": 0.7856, |
| "step": 664 |
| }, |
| { |
| "epoch": 2.700507614213198, |
| "grad_norm": 0.3024193894680331, |
| "learning_rate": 2.0652608894940824e-05, |
| "loss": 0.8121, |
| "step": 665 |
| }, |
| { |
| "epoch": 2.7045685279187817, |
| "grad_norm": 0.2784730254181954, |
| "learning_rate": 2.0595877893046722e-05, |
| "loss": 0.7642, |
| "step": 666 |
| }, |
| { |
| "epoch": 2.7086294416243657, |
| "grad_norm": 0.3321101625599293, |
| "learning_rate": 2.0539142092033985e-05, |
| "loss": 0.7761, |
| "step": 667 |
| }, |
| { |
| "epoch": 2.7126903553299493, |
| "grad_norm": 0.29321779886563354, |
| "learning_rate": 2.048240194884496e-05, |
| "loss": 0.7839, |
| "step": 668 |
| }, |
| { |
| "epoch": 2.716751269035533, |
| "grad_norm": 0.43605725460431627, |
| "learning_rate": 2.042565792045695e-05, |
| "loss": 0.7753, |
| "step": 669 |
| }, |
| { |
| "epoch": 2.720812182741117, |
| "grad_norm": 0.2849203692091733, |
| "learning_rate": 2.036891046387857e-05, |
| "loss": 0.7718, |
| "step": 670 |
| }, |
| { |
| "epoch": 2.7248730964467005, |
| "grad_norm": 0.3646339044233378, |
| "learning_rate": 2.0312160036146036e-05, |
| "loss": 0.7885, |
| "step": 671 |
| }, |
| { |
| "epoch": 2.728934010152284, |
| "grad_norm": 0.4205278755093403, |
| "learning_rate": 2.025540709431948e-05, |
| "loss": 0.7924, |
| "step": 672 |
| }, |
| { |
| "epoch": 2.732994923857868, |
| "grad_norm": 0.30850312866758717, |
| "learning_rate": 2.0198652095479298e-05, |
| "loss": 0.7789, |
| "step": 673 |
| }, |
| { |
| "epoch": 2.7370558375634517, |
| "grad_norm": 0.3915107267999157, |
| "learning_rate": 2.014189549672245e-05, |
| "loss": 0.7811, |
| "step": 674 |
| }, |
| { |
| "epoch": 2.7411167512690353, |
| "grad_norm": 0.3323152447853919, |
| "learning_rate": 2.0085137755158776e-05, |
| "loss": 0.7981, |
| "step": 675 |
| }, |
| { |
| "epoch": 2.7451776649746193, |
| "grad_norm": 0.3081867744420649, |
| "learning_rate": 2.0028379327907327e-05, |
| "loss": 0.7772, |
| "step": 676 |
| }, |
| { |
| "epoch": 2.749238578680203, |
| "grad_norm": 0.36836206498360724, |
| "learning_rate": 1.9971620672092676e-05, |
| "loss": 0.784, |
| "step": 677 |
| }, |
| { |
| "epoch": 2.7532994923857865, |
| "grad_norm": 0.342196622234712, |
| "learning_rate": 1.991486224484123e-05, |
| "loss": 0.7785, |
| "step": 678 |
| }, |
| { |
| "epoch": 2.7573604060913706, |
| "grad_norm": 0.3367752226504114, |
| "learning_rate": 1.985810450327756e-05, |
| "loss": 0.7658, |
| "step": 679 |
| }, |
| { |
| "epoch": 2.761421319796954, |
| "grad_norm": 0.36320326684524723, |
| "learning_rate": 1.9801347904520706e-05, |
| "loss": 0.7847, |
| "step": 680 |
| }, |
| { |
| "epoch": 2.765482233502538, |
| "grad_norm": 0.3194637387581025, |
| "learning_rate": 1.974459290568053e-05, |
| "loss": 0.8106, |
| "step": 681 |
| }, |
| { |
| "epoch": 2.769543147208122, |
| "grad_norm": 0.3892996166428429, |
| "learning_rate": 1.968783996385397e-05, |
| "loss": 0.7764, |
| "step": 682 |
| }, |
| { |
| "epoch": 2.773604060913706, |
| "grad_norm": 0.3057574427412112, |
| "learning_rate": 1.963108953612143e-05, |
| "loss": 0.7843, |
| "step": 683 |
| }, |
| { |
| "epoch": 2.7776649746192894, |
| "grad_norm": 0.4016761209641784, |
| "learning_rate": 1.9574342079543056e-05, |
| "loss": 0.7932, |
| "step": 684 |
| }, |
| { |
| "epoch": 2.781725888324873, |
| "grad_norm": 0.322622055083241, |
| "learning_rate": 1.9517598051155046e-05, |
| "loss": 0.7798, |
| "step": 685 |
| }, |
| { |
| "epoch": 2.785786802030457, |
| "grad_norm": 0.40234657520074923, |
| "learning_rate": 1.9460857907966025e-05, |
| "loss": 0.8261, |
| "step": 686 |
| }, |
| { |
| "epoch": 2.7898477157360406, |
| "grad_norm": 0.4234600208046727, |
| "learning_rate": 1.9404122106953285e-05, |
| "loss": 0.8103, |
| "step": 687 |
| }, |
| { |
| "epoch": 2.793908629441624, |
| "grad_norm": 0.35789927893759477, |
| "learning_rate": 1.9347391105059176e-05, |
| "loss": 0.7491, |
| "step": 688 |
| }, |
| { |
| "epoch": 2.7979695431472082, |
| "grad_norm": 0.41315271083978455, |
| "learning_rate": 1.92906653591874e-05, |
| "loss": 0.7774, |
| "step": 689 |
| }, |
| { |
| "epoch": 2.802030456852792, |
| "grad_norm": 0.3684100960951132, |
| "learning_rate": 1.9233945326199295e-05, |
| "loss": 0.8035, |
| "step": 690 |
| }, |
| { |
| "epoch": 2.8060913705583754, |
| "grad_norm": 0.4227462927920012, |
| "learning_rate": 1.917723146291022e-05, |
| "loss": 0.7674, |
| "step": 691 |
| }, |
| { |
| "epoch": 2.8101522842639595, |
| "grad_norm": 0.3084964299555269, |
| "learning_rate": 1.912052422608584e-05, |
| "loss": 0.766, |
| "step": 692 |
| }, |
| { |
| "epoch": 2.814213197969543, |
| "grad_norm": 0.4152962504576633, |
| "learning_rate": 1.9063824072438428e-05, |
| "loss": 0.7962, |
| "step": 693 |
| }, |
| { |
| "epoch": 2.8182741116751266, |
| "grad_norm": 0.3241429092791355, |
| "learning_rate": 1.9007131458623246e-05, |
| "loss": 0.7822, |
| "step": 694 |
| }, |
| { |
| "epoch": 2.8223350253807107, |
| "grad_norm": 0.40096597609423773, |
| "learning_rate": 1.895044684123479e-05, |
| "loss": 0.8005, |
| "step": 695 |
| }, |
| { |
| "epoch": 2.8263959390862943, |
| "grad_norm": 0.362653277090994, |
| "learning_rate": 1.8893770676803194e-05, |
| "loss": 0.7943, |
| "step": 696 |
| }, |
| { |
| "epoch": 2.8304568527918783, |
| "grad_norm": 0.3525812853214671, |
| "learning_rate": 1.8837103421790486e-05, |
| "loss": 0.7748, |
| "step": 697 |
| }, |
| { |
| "epoch": 2.834517766497462, |
| "grad_norm": 0.3247871605038389, |
| "learning_rate": 1.8780445532586952e-05, |
| "loss": 0.7664, |
| "step": 698 |
| }, |
| { |
| "epoch": 2.838578680203046, |
| "grad_norm": 0.3288431037591601, |
| "learning_rate": 1.872379746550743e-05, |
| "loss": 0.7801, |
| "step": 699 |
| }, |
| { |
| "epoch": 2.8426395939086295, |
| "grad_norm": 0.3286665751725121, |
| "learning_rate": 1.866715967678769e-05, |
| "loss": 0.7976, |
| "step": 700 |
| }, |
| { |
| "epoch": 2.846700507614213, |
| "grad_norm": 0.3600684897254582, |
| "learning_rate": 1.861053262258067e-05, |
| "loss": 0.777, |
| "step": 701 |
| }, |
| { |
| "epoch": 2.850761421319797, |
| "grad_norm": 0.3239039887067652, |
| "learning_rate": 1.8553916758952897e-05, |
| "loss": 0.7958, |
| "step": 702 |
| }, |
| { |
| "epoch": 2.8548223350253807, |
| "grad_norm": 0.2739302973678819, |
| "learning_rate": 1.8497312541880735e-05, |
| "loss": 0.7884, |
| "step": 703 |
| }, |
| { |
| "epoch": 2.8588832487309643, |
| "grad_norm": 0.3126432071370836, |
| "learning_rate": 1.8440720427246786e-05, |
| "loss": 0.7995, |
| "step": 704 |
| }, |
| { |
| "epoch": 2.8629441624365484, |
| "grad_norm": 0.3478690354942657, |
| "learning_rate": 1.8384140870836157e-05, |
| "loss": 0.7757, |
| "step": 705 |
| }, |
| { |
| "epoch": 2.867005076142132, |
| "grad_norm": 0.2867621498391961, |
| "learning_rate": 1.8327574328332806e-05, |
| "loss": 0.7805, |
| "step": 706 |
| }, |
| { |
| "epoch": 2.8710659898477155, |
| "grad_norm": 0.34410969176451933, |
| "learning_rate": 1.8271021255315906e-05, |
| "loss": 0.7905, |
| "step": 707 |
| }, |
| { |
| "epoch": 2.8751269035532996, |
| "grad_norm": 0.35899648607635093, |
| "learning_rate": 1.8214482107256117e-05, |
| "loss": 0.8038, |
| "step": 708 |
| }, |
| { |
| "epoch": 2.879187817258883, |
| "grad_norm": 0.33216399555339865, |
| "learning_rate": 1.8157957339511968e-05, |
| "loss": 0.7686, |
| "step": 709 |
| }, |
| { |
| "epoch": 2.8832487309644668, |
| "grad_norm": 0.3215952334885353, |
| "learning_rate": 1.8101447407326182e-05, |
| "loss": 0.7916, |
| "step": 710 |
| }, |
| { |
| "epoch": 2.887309644670051, |
| "grad_norm": 0.3211710199800581, |
| "learning_rate": 1.8044952765821966e-05, |
| "loss": 0.7788, |
| "step": 711 |
| }, |
| { |
| "epoch": 2.8913705583756344, |
| "grad_norm": 0.30048135851688895, |
| "learning_rate": 1.7988473869999407e-05, |
| "loss": 0.8097, |
| "step": 712 |
| }, |
| { |
| "epoch": 2.8954314720812184, |
| "grad_norm": 0.32880550154574145, |
| "learning_rate": 1.7932011174731764e-05, |
| "loss": 0.786, |
| "step": 713 |
| }, |
| { |
| "epoch": 2.899492385786802, |
| "grad_norm": 0.3142321285198872, |
| "learning_rate": 1.7875565134761817e-05, |
| "loss": 0.7918, |
| "step": 714 |
| }, |
| { |
| "epoch": 2.903553299492386, |
| "grad_norm": 0.35774097679425537, |
| "learning_rate": 1.7819136204698226e-05, |
| "loss": 0.781, |
| "step": 715 |
| }, |
| { |
| "epoch": 2.9076142131979696, |
| "grad_norm": 0.28967080931043626, |
| "learning_rate": 1.776272483901182e-05, |
| "loss": 0.7835, |
| "step": 716 |
| }, |
| { |
| "epoch": 2.9116751269035532, |
| "grad_norm": 0.3274965021895058, |
| "learning_rate": 1.7706331492031995e-05, |
| "loss": 0.7849, |
| "step": 717 |
| }, |
| { |
| "epoch": 2.9157360406091373, |
| "grad_norm": 0.4001142306005694, |
| "learning_rate": 1.764995661794301e-05, |
| "loss": 0.7799, |
| "step": 718 |
| }, |
| { |
| "epoch": 2.919796954314721, |
| "grad_norm": 0.2898541351185558, |
| "learning_rate": 1.759360067078035e-05, |
| "loss": 0.7777, |
| "step": 719 |
| }, |
| { |
| "epoch": 2.9238578680203045, |
| "grad_norm": 0.38831169491429907, |
| "learning_rate": 1.7537264104427064e-05, |
| "loss": 0.787, |
| "step": 720 |
| }, |
| { |
| "epoch": 2.9279187817258885, |
| "grad_norm": 0.33206179946457476, |
| "learning_rate": 1.748094737261012e-05, |
| "loss": 0.8013, |
| "step": 721 |
| }, |
| { |
| "epoch": 2.931979695431472, |
| "grad_norm": 0.30537339104276, |
| "learning_rate": 1.7424650928896726e-05, |
| "loss": 0.7945, |
| "step": 722 |
| }, |
| { |
| "epoch": 2.9360406091370557, |
| "grad_norm": 0.3208894660631289, |
| "learning_rate": 1.7368375226690712e-05, |
| "loss": 0.7557, |
| "step": 723 |
| }, |
| { |
| "epoch": 2.9401015228426397, |
| "grad_norm": 0.29765219320445946, |
| "learning_rate": 1.731212071922883e-05, |
| "loss": 0.779, |
| "step": 724 |
| }, |
| { |
| "epoch": 2.9441624365482233, |
| "grad_norm": 0.29651569524467913, |
| "learning_rate": 1.7255887859577156e-05, |
| "loss": 0.7647, |
| "step": 725 |
| }, |
| { |
| "epoch": 2.948223350253807, |
| "grad_norm": 0.29164399705633753, |
| "learning_rate": 1.7199677100627427e-05, |
| "loss": 0.7764, |
| "step": 726 |
| }, |
| { |
| "epoch": 2.952284263959391, |
| "grad_norm": 0.26751152924186367, |
| "learning_rate": 1.7143488895093343e-05, |
| "loss": 0.7993, |
| "step": 727 |
| }, |
| { |
| "epoch": 2.9563451776649745, |
| "grad_norm": 0.3278592842330587, |
| "learning_rate": 1.7087323695506994e-05, |
| "loss": 0.7677, |
| "step": 728 |
| }, |
| { |
| "epoch": 2.960406091370558, |
| "grad_norm": 0.32065302487819675, |
| "learning_rate": 1.7031181954215194e-05, |
| "loss": 0.807, |
| "step": 729 |
| }, |
| { |
| "epoch": 2.964467005076142, |
| "grad_norm": 0.3454848496680953, |
| "learning_rate": 1.6975064123375788e-05, |
| "loss": 0.8048, |
| "step": 730 |
| }, |
| { |
| "epoch": 2.9685279187817257, |
| "grad_norm": 0.29033536100637847, |
| "learning_rate": 1.6918970654954084e-05, |
| "loss": 0.802, |
| "step": 731 |
| }, |
| { |
| "epoch": 2.9725888324873098, |
| "grad_norm": 0.35081973599917765, |
| "learning_rate": 1.686290200071915e-05, |
| "loss": 0.7896, |
| "step": 732 |
| }, |
| { |
| "epoch": 2.9766497461928934, |
| "grad_norm": 0.3422142854536168, |
| "learning_rate": 1.6806858612240234e-05, |
| "loss": 0.7884, |
| "step": 733 |
| }, |
| { |
| "epoch": 2.9807106598984774, |
| "grad_norm": 0.3039406476626592, |
| "learning_rate": 1.6750840940883078e-05, |
| "loss": 0.7859, |
| "step": 734 |
| }, |
| { |
| "epoch": 2.984771573604061, |
| "grad_norm": 0.3045718685073243, |
| "learning_rate": 1.6694849437806305e-05, |
| "loss": 0.7882, |
| "step": 735 |
| }, |
| { |
| "epoch": 2.9888324873096446, |
| "grad_norm": 0.2535105823290247, |
| "learning_rate": 1.663888455395778e-05, |
| "loss": 0.7918, |
| "step": 736 |
| }, |
| { |
| "epoch": 2.9928934010152286, |
| "grad_norm": 0.32120562574837785, |
| "learning_rate": 1.6582946740070995e-05, |
| "loss": 0.8062, |
| "step": 737 |
| }, |
| { |
| "epoch": 2.996954314720812, |
| "grad_norm": 0.26905677090440744, |
| "learning_rate": 1.6527036446661396e-05, |
| "loss": 0.7644, |
| "step": 738 |
| }, |
| { |
| "epoch": 3.001015228426396, |
| "grad_norm": 0.5272561843723065, |
| "learning_rate": 1.6471154124022818e-05, |
| "loss": 1.3683, |
| "step": 739 |
| }, |
| { |
| "epoch": 3.00507614213198, |
| "grad_norm": 0.3995125412114343, |
| "learning_rate": 1.6415300222223788e-05, |
| "loss": 0.7224, |
| "step": 740 |
| }, |
| { |
| "epoch": 3.0091370558375634, |
| "grad_norm": 0.3938366508268558, |
| "learning_rate": 1.6359475191103958e-05, |
| "loss": 0.7477, |
| "step": 741 |
| }, |
| { |
| "epoch": 3.013197969543147, |
| "grad_norm": 0.4473634225355428, |
| "learning_rate": 1.6303679480270466e-05, |
| "loss": 0.7484, |
| "step": 742 |
| }, |
| { |
| "epoch": 3.017258883248731, |
| "grad_norm": 0.36658505587549567, |
| "learning_rate": 1.624791353909428e-05, |
| "loss": 0.7439, |
| "step": 743 |
| }, |
| { |
| "epoch": 3.0213197969543146, |
| "grad_norm": 0.44252399981787904, |
| "learning_rate": 1.619217781670663e-05, |
| "loss": 0.7382, |
| "step": 744 |
| }, |
| { |
| "epoch": 3.0253807106598987, |
| "grad_norm": 0.35827662892550266, |
| "learning_rate": 1.6136472761995373e-05, |
| "loss": 0.7344, |
| "step": 745 |
| }, |
| { |
| "epoch": 3.0294416243654823, |
| "grad_norm": 0.4185665617633392, |
| "learning_rate": 1.608079882360134e-05, |
| "loss": 0.7137, |
| "step": 746 |
| }, |
| { |
| "epoch": 3.033502538071066, |
| "grad_norm": 0.4876804065724073, |
| "learning_rate": 1.60251564499148e-05, |
| "loss": 0.7441, |
| "step": 747 |
| }, |
| { |
| "epoch": 3.03756345177665, |
| "grad_norm": 0.3303488082709638, |
| "learning_rate": 1.596954608907176e-05, |
| "loss": 0.7335, |
| "step": 748 |
| }, |
| { |
| "epoch": 3.0416243654822335, |
| "grad_norm": 0.43963751974485077, |
| "learning_rate": 1.591396818895043e-05, |
| "loss": 0.7557, |
| "step": 749 |
| }, |
| { |
| "epoch": 3.045685279187817, |
| "grad_norm": 0.3220084387379337, |
| "learning_rate": 1.585842319716759e-05, |
| "loss": 0.7458, |
| "step": 750 |
| }, |
| { |
| "epoch": 3.049746192893401, |
| "grad_norm": 0.34193528234557186, |
| "learning_rate": 1.5802911561074944e-05, |
| "loss": 0.7319, |
| "step": 751 |
| }, |
| { |
| "epoch": 3.0538071065989847, |
| "grad_norm": 0.32045802025445913, |
| "learning_rate": 1.5747433727755595e-05, |
| "loss": 0.729, |
| "step": 752 |
| }, |
| { |
| "epoch": 3.0578680203045687, |
| "grad_norm": 0.3279893882544517, |
| "learning_rate": 1.5691990144020376e-05, |
| "loss": 0.7341, |
| "step": 753 |
| }, |
| { |
| "epoch": 3.0619289340101523, |
| "grad_norm": 0.29691347373923505, |
| "learning_rate": 1.5636581256404297e-05, |
| "loss": 0.722, |
| "step": 754 |
| }, |
| { |
| "epoch": 3.065989847715736, |
| "grad_norm": 0.3274278485365102, |
| "learning_rate": 1.558120751116291e-05, |
| "loss": 0.7485, |
| "step": 755 |
| }, |
| { |
| "epoch": 3.07005076142132, |
| "grad_norm": 0.2876091199522131, |
| "learning_rate": 1.552586935426876e-05, |
| "loss": 0.7401, |
| "step": 756 |
| }, |
| { |
| "epoch": 3.0741116751269035, |
| "grad_norm": 0.34117776094647645, |
| "learning_rate": 1.547056723140774e-05, |
| "loss": 0.7245, |
| "step": 757 |
| }, |
| { |
| "epoch": 3.078172588832487, |
| "grad_norm": 0.361802081945869, |
| "learning_rate": 1.5415301587975565e-05, |
| "loss": 0.7416, |
| "step": 758 |
| }, |
| { |
| "epoch": 3.082233502538071, |
| "grad_norm": 0.3426236885673128, |
| "learning_rate": 1.536007286907411e-05, |
| "loss": 0.7599, |
| "step": 759 |
| }, |
| { |
| "epoch": 3.0862944162436547, |
| "grad_norm": 0.4852890605345928, |
| "learning_rate": 1.5304881519507896e-05, |
| "loss": 0.7414, |
| "step": 760 |
| }, |
| { |
| "epoch": 3.090355329949239, |
| "grad_norm": 0.33237969736841255, |
| "learning_rate": 1.5249727983780453e-05, |
| "loss": 0.738, |
| "step": 761 |
| }, |
| { |
| "epoch": 3.0944162436548224, |
| "grad_norm": 0.44137238279666374, |
| "learning_rate": 1.5194612706090786e-05, |
| "loss": 0.761, |
| "step": 762 |
| }, |
| { |
| "epoch": 3.098477157360406, |
| "grad_norm": 0.3797869864963043, |
| "learning_rate": 1.5139536130329771e-05, |
| "loss": 0.7252, |
| "step": 763 |
| }, |
| { |
| "epoch": 3.10253807106599, |
| "grad_norm": 0.2961740354584357, |
| "learning_rate": 1.508449870007656e-05, |
| "loss": 0.7288, |
| "step": 764 |
| }, |
| { |
| "epoch": 3.1065989847715736, |
| "grad_norm": 0.4446696963154877, |
| "learning_rate": 1.5029500858595056e-05, |
| "loss": 0.7419, |
| "step": 765 |
| }, |
| { |
| "epoch": 3.110659898477157, |
| "grad_norm": 0.2723421851369137, |
| "learning_rate": 1.4974543048830328e-05, |
| "loss": 0.7536, |
| "step": 766 |
| }, |
| { |
| "epoch": 3.114720812182741, |
| "grad_norm": 0.4168640413621046, |
| "learning_rate": 1.4919625713405e-05, |
| "loss": 0.7391, |
| "step": 767 |
| }, |
| { |
| "epoch": 3.118781725888325, |
| "grad_norm": 0.3304840241552262, |
| "learning_rate": 1.4864749294615756e-05, |
| "loss": 0.742, |
| "step": 768 |
| }, |
| { |
| "epoch": 3.122842639593909, |
| "grad_norm": 0.4019941763245211, |
| "learning_rate": 1.4809914234429716e-05, |
| "loss": 0.7248, |
| "step": 769 |
| }, |
| { |
| "epoch": 3.1269035532994924, |
| "grad_norm": 0.3087464883159267, |
| "learning_rate": 1.4755120974480923e-05, |
| "loss": 0.7457, |
| "step": 770 |
| }, |
| { |
| "epoch": 3.130964467005076, |
| "grad_norm": 0.363648230874974, |
| "learning_rate": 1.4700369956066771e-05, |
| "loss": 0.7346, |
| "step": 771 |
| }, |
| { |
| "epoch": 3.13502538071066, |
| "grad_norm": 0.27188450308844053, |
| "learning_rate": 1.4645661620144413e-05, |
| "loss": 0.7568, |
| "step": 772 |
| }, |
| { |
| "epoch": 3.1390862944162436, |
| "grad_norm": 0.43845250311218126, |
| "learning_rate": 1.4590996407327284e-05, |
| "loss": 0.7329, |
| "step": 773 |
| }, |
| { |
| "epoch": 3.1431472081218272, |
| "grad_norm": 0.2642384381691769, |
| "learning_rate": 1.4536374757881487e-05, |
| "loss": 0.7649, |
| "step": 774 |
| }, |
| { |
| "epoch": 3.1472081218274113, |
| "grad_norm": 0.338293331024774, |
| "learning_rate": 1.4481797111722271e-05, |
| "loss": 0.7195, |
| "step": 775 |
| }, |
| { |
| "epoch": 3.151269035532995, |
| "grad_norm": 0.27436565915905275, |
| "learning_rate": 1.4427263908410507e-05, |
| "loss": 0.7492, |
| "step": 776 |
| }, |
| { |
| "epoch": 3.1553299492385785, |
| "grad_norm": 0.30259492454908654, |
| "learning_rate": 1.4372775587149108e-05, |
| "loss": 0.7387, |
| "step": 777 |
| }, |
| { |
| "epoch": 3.1593908629441625, |
| "grad_norm": 0.2639400998864047, |
| "learning_rate": 1.4318332586779522e-05, |
| "loss": 0.7583, |
| "step": 778 |
| }, |
| { |
| "epoch": 3.163451776649746, |
| "grad_norm": 0.26551641668032355, |
| "learning_rate": 1.4263935345778202e-05, |
| "loss": 0.73, |
| "step": 779 |
| }, |
| { |
| "epoch": 3.16751269035533, |
| "grad_norm": 0.2944980787030092, |
| "learning_rate": 1.420958430225303e-05, |
| "loss": 0.758, |
| "step": 780 |
| }, |
| { |
| "epoch": 3.1715736040609137, |
| "grad_norm": 0.29781770495371135, |
| "learning_rate": 1.415527989393985e-05, |
| "loss": 0.7506, |
| "step": 781 |
| }, |
| { |
| "epoch": 3.1756345177664973, |
| "grad_norm": 0.34119082134701306, |
| "learning_rate": 1.410102255819891e-05, |
| "loss": 0.7481, |
| "step": 782 |
| }, |
| { |
| "epoch": 3.1796954314720813, |
| "grad_norm": 0.30851150885614853, |
| "learning_rate": 1.404681273201131e-05, |
| "loss": 0.7458, |
| "step": 783 |
| }, |
| { |
| "epoch": 3.183756345177665, |
| "grad_norm": 0.27361726931799646, |
| "learning_rate": 1.399265085197556e-05, |
| "loss": 0.7493, |
| "step": 784 |
| }, |
| { |
| "epoch": 3.187817258883249, |
| "grad_norm": 0.36306956336848056, |
| "learning_rate": 1.393853735430398e-05, |
| "loss": 0.7271, |
| "step": 785 |
| }, |
| { |
| "epoch": 3.1918781725888326, |
| "grad_norm": 0.3185922234214135, |
| "learning_rate": 1.3884472674819246e-05, |
| "loss": 0.7277, |
| "step": 786 |
| }, |
| { |
| "epoch": 3.195939086294416, |
| "grad_norm": 0.28815719373360027, |
| "learning_rate": 1.3830457248950864e-05, |
| "loss": 0.7519, |
| "step": 787 |
| }, |
| { |
| "epoch": 3.2, |
| "grad_norm": 0.30665509302604865, |
| "learning_rate": 1.377649151173163e-05, |
| "loss": 0.7478, |
| "step": 788 |
| }, |
| { |
| "epoch": 3.2040609137055838, |
| "grad_norm": 0.3239622910515329, |
| "learning_rate": 1.3722575897794181e-05, |
| "loss": 0.7368, |
| "step": 789 |
| }, |
| { |
| "epoch": 3.2081218274111674, |
| "grad_norm": 0.2829991605182016, |
| "learning_rate": 1.3668710841367472e-05, |
| "loss": 0.7311, |
| "step": 790 |
| }, |
| { |
| "epoch": 3.2121827411167514, |
| "grad_norm": 0.3925944810396778, |
| "learning_rate": 1.361489677627324e-05, |
| "loss": 0.7307, |
| "step": 791 |
| }, |
| { |
| "epoch": 3.216243654822335, |
| "grad_norm": 0.3117437800741885, |
| "learning_rate": 1.3561134135922585e-05, |
| "loss": 0.7496, |
| "step": 792 |
| }, |
| { |
| "epoch": 3.2203045685279186, |
| "grad_norm": 0.31087275099758055, |
| "learning_rate": 1.350742335331241e-05, |
| "loss": 0.7609, |
| "step": 793 |
| }, |
| { |
| "epoch": 3.2243654822335026, |
| "grad_norm": 0.33879709967295524, |
| "learning_rate": 1.345376486102198e-05, |
| "loss": 0.7392, |
| "step": 794 |
| }, |
| { |
| "epoch": 3.228426395939086, |
| "grad_norm": 0.31237863299632385, |
| "learning_rate": 1.3400159091209414e-05, |
| "loss": 0.7333, |
| "step": 795 |
| }, |
| { |
| "epoch": 3.2324873096446702, |
| "grad_norm": 0.3535882037053915, |
| "learning_rate": 1.3346606475608216e-05, |
| "loss": 0.7455, |
| "step": 796 |
| }, |
| { |
| "epoch": 3.236548223350254, |
| "grad_norm": 0.27427926271935554, |
| "learning_rate": 1.3293107445523781e-05, |
| "loss": 0.7412, |
| "step": 797 |
| }, |
| { |
| "epoch": 3.2406091370558374, |
| "grad_norm": 0.3151504447846241, |
| "learning_rate": 1.3239662431829949e-05, |
| "loss": 0.7303, |
| "step": 798 |
| }, |
| { |
| "epoch": 3.2446700507614215, |
| "grad_norm": 0.27001938316373064, |
| "learning_rate": 1.3186271864965509e-05, |
| "loss": 0.7475, |
| "step": 799 |
| }, |
| { |
| "epoch": 3.248730964467005, |
| "grad_norm": 0.2830417637242057, |
| "learning_rate": 1.3132936174930756e-05, |
| "loss": 0.7481, |
| "step": 800 |
| }, |
| { |
| "epoch": 3.252791878172589, |
| "grad_norm": 0.3056894647613015, |
| "learning_rate": 1.3079655791283995e-05, |
| "loss": 0.7522, |
| "step": 801 |
| }, |
| { |
| "epoch": 3.2568527918781727, |
| "grad_norm": 0.27733762682798163, |
| "learning_rate": 1.3026431143138108e-05, |
| "loss": 0.7344, |
| "step": 802 |
| }, |
| { |
| "epoch": 3.2609137055837563, |
| "grad_norm": 0.32005797529676006, |
| "learning_rate": 1.2973262659157114e-05, |
| "loss": 0.7362, |
| "step": 803 |
| }, |
| { |
| "epoch": 3.2649746192893403, |
| "grad_norm": 0.31805582361392576, |
| "learning_rate": 1.2920150767552651e-05, |
| "loss": 0.7488, |
| "step": 804 |
| }, |
| { |
| "epoch": 3.269035532994924, |
| "grad_norm": 0.3143165155925378, |
| "learning_rate": 1.2867095896080607e-05, |
| "loss": 0.738, |
| "step": 805 |
| }, |
| { |
| "epoch": 3.2730964467005075, |
| "grad_norm": 0.30874886747722174, |
| "learning_rate": 1.2814098472037612e-05, |
| "loss": 0.7424, |
| "step": 806 |
| }, |
| { |
| "epoch": 3.2771573604060915, |
| "grad_norm": 0.3010319724195612, |
| "learning_rate": 1.276115892225764e-05, |
| "loss": 0.7333, |
| "step": 807 |
| }, |
| { |
| "epoch": 3.281218274111675, |
| "grad_norm": 0.26121271855233724, |
| "learning_rate": 1.2708277673108555e-05, |
| "loss": 0.7286, |
| "step": 808 |
| }, |
| { |
| "epoch": 3.2852791878172587, |
| "grad_norm": 0.27455141861755006, |
| "learning_rate": 1.2655455150488649e-05, |
| "loss": 0.745, |
| "step": 809 |
| }, |
| { |
| "epoch": 3.2893401015228427, |
| "grad_norm": 0.29415969513042417, |
| "learning_rate": 1.2602691779823272e-05, |
| "loss": 0.77, |
| "step": 810 |
| }, |
| { |
| "epoch": 3.2934010152284263, |
| "grad_norm": 0.2911494860837187, |
| "learning_rate": 1.2549987986061355e-05, |
| "loss": 0.7276, |
| "step": 811 |
| }, |
| { |
| "epoch": 3.29746192893401, |
| "grad_norm": 0.2599383228152168, |
| "learning_rate": 1.2497344193672005e-05, |
| "loss": 0.7512, |
| "step": 812 |
| }, |
| { |
| "epoch": 3.301522842639594, |
| "grad_norm": 0.2791205334763667, |
| "learning_rate": 1.2444760826641092e-05, |
| "loss": 0.7253, |
| "step": 813 |
| }, |
| { |
| "epoch": 3.3055837563451775, |
| "grad_norm": 0.27353854314388765, |
| "learning_rate": 1.2392238308467817e-05, |
| "loss": 0.7418, |
| "step": 814 |
| }, |
| { |
| "epoch": 3.3096446700507616, |
| "grad_norm": 0.2510363756212501, |
| "learning_rate": 1.2339777062161326e-05, |
| "loss": 0.7363, |
| "step": 815 |
| }, |
| { |
| "epoch": 3.313705583756345, |
| "grad_norm": 0.2728828695927957, |
| "learning_rate": 1.2287377510237293e-05, |
| "loss": 0.7459, |
| "step": 816 |
| }, |
| { |
| "epoch": 3.3177664974619288, |
| "grad_norm": 0.2778220615086869, |
| "learning_rate": 1.2235040074714488e-05, |
| "loss": 0.7182, |
| "step": 817 |
| }, |
| { |
| "epoch": 3.321827411167513, |
| "grad_norm": 0.2706334326092133, |
| "learning_rate": 1.2182765177111434e-05, |
| "loss": 0.7524, |
| "step": 818 |
| }, |
| { |
| "epoch": 3.3258883248730964, |
| "grad_norm": 0.33292910716563195, |
| "learning_rate": 1.213055323844297e-05, |
| "loss": 0.7394, |
| "step": 819 |
| }, |
| { |
| "epoch": 3.3299492385786804, |
| "grad_norm": 0.2714101935547049, |
| "learning_rate": 1.2078404679216864e-05, |
| "loss": 0.7614, |
| "step": 820 |
| }, |
| { |
| "epoch": 3.334010152284264, |
| "grad_norm": 0.3322722662060862, |
| "learning_rate": 1.2026319919430458e-05, |
| "loss": 0.7233, |
| "step": 821 |
| }, |
| { |
| "epoch": 3.3380710659898476, |
| "grad_norm": 0.2540056373156356, |
| "learning_rate": 1.1974299378567227e-05, |
| "loss": 0.7463, |
| "step": 822 |
| }, |
| { |
| "epoch": 3.3421319796954316, |
| "grad_norm": 0.28821523737476706, |
| "learning_rate": 1.1922343475593462e-05, |
| "loss": 0.743, |
| "step": 823 |
| }, |
| { |
| "epoch": 3.3461928934010152, |
| "grad_norm": 0.2825372593715907, |
| "learning_rate": 1.187045262895488e-05, |
| "loss": 0.7439, |
| "step": 824 |
| }, |
| { |
| "epoch": 3.350253807106599, |
| "grad_norm": 0.26555647709243396, |
| "learning_rate": 1.1818627256573203e-05, |
| "loss": 0.7476, |
| "step": 825 |
| }, |
| { |
| "epoch": 3.354314720812183, |
| "grad_norm": 0.24979621920316314, |
| "learning_rate": 1.1766867775842864e-05, |
| "loss": 0.7732, |
| "step": 826 |
| }, |
| { |
| "epoch": 3.3583756345177664, |
| "grad_norm": 0.2496024750697965, |
| "learning_rate": 1.1715174603627615e-05, |
| "loss": 0.7254, |
| "step": 827 |
| }, |
| { |
| "epoch": 3.36243654822335, |
| "grad_norm": 0.25182054657540387, |
| "learning_rate": 1.1663548156257147e-05, |
| "loss": 0.7543, |
| "step": 828 |
| }, |
| { |
| "epoch": 3.366497461928934, |
| "grad_norm": 0.26570562705892076, |
| "learning_rate": 1.161198884952377e-05, |
| "loss": 0.7695, |
| "step": 829 |
| }, |
| { |
| "epoch": 3.3705583756345177, |
| "grad_norm": 0.23489487085423405, |
| "learning_rate": 1.1560497098679056e-05, |
| "loss": 0.762, |
| "step": 830 |
| }, |
| { |
| "epoch": 3.3746192893401017, |
| "grad_norm": 0.2501080611325196, |
| "learning_rate": 1.1509073318430479e-05, |
| "loss": 0.7365, |
| "step": 831 |
| }, |
| { |
| "epoch": 3.3786802030456853, |
| "grad_norm": 0.2556820948346217, |
| "learning_rate": 1.1457717922938116e-05, |
| "loss": 0.7829, |
| "step": 832 |
| }, |
| { |
| "epoch": 3.382741116751269, |
| "grad_norm": 0.23456955137291757, |
| "learning_rate": 1.1406431325811233e-05, |
| "loss": 0.7516, |
| "step": 833 |
| }, |
| { |
| "epoch": 3.386802030456853, |
| "grad_norm": 0.2483349006205051, |
| "learning_rate": 1.135521394010506e-05, |
| "loss": 0.7561, |
| "step": 834 |
| }, |
| { |
| "epoch": 3.3908629441624365, |
| "grad_norm": 0.2621579025203293, |
| "learning_rate": 1.1304066178317367e-05, |
| "loss": 0.7437, |
| "step": 835 |
| }, |
| { |
| "epoch": 3.3949238578680205, |
| "grad_norm": 0.234798906520539, |
| "learning_rate": 1.1252988452385199e-05, |
| "loss": 0.7451, |
| "step": 836 |
| }, |
| { |
| "epoch": 3.398984771573604, |
| "grad_norm": 0.2420752405876616, |
| "learning_rate": 1.1201981173681536e-05, |
| "loss": 0.731, |
| "step": 837 |
| }, |
| { |
| "epoch": 3.4030456852791877, |
| "grad_norm": 0.24001912636883815, |
| "learning_rate": 1.1151044753011991e-05, |
| "loss": 0.7305, |
| "step": 838 |
| }, |
| { |
| "epoch": 3.4071065989847718, |
| "grad_norm": 0.251533688715336, |
| "learning_rate": 1.1100179600611491e-05, |
| "loss": 0.7391, |
| "step": 839 |
| }, |
| { |
| "epoch": 3.4111675126903553, |
| "grad_norm": 0.25744572484429745, |
| "learning_rate": 1.1049386126140985e-05, |
| "loss": 0.7446, |
| "step": 840 |
| }, |
| { |
| "epoch": 3.415228426395939, |
| "grad_norm": 0.231718172223051, |
| "learning_rate": 1.0998664738684128e-05, |
| "loss": 0.75, |
| "step": 841 |
| }, |
| { |
| "epoch": 3.419289340101523, |
| "grad_norm": 0.2576424502903035, |
| "learning_rate": 1.0948015846744e-05, |
| "loss": 0.7274, |
| "step": 842 |
| }, |
| { |
| "epoch": 3.4233502538071066, |
| "grad_norm": 0.2819264357076971, |
| "learning_rate": 1.0897439858239832e-05, |
| "loss": 0.7612, |
| "step": 843 |
| }, |
| { |
| "epoch": 3.42741116751269, |
| "grad_norm": 0.23170687077949342, |
| "learning_rate": 1.0846937180503652e-05, |
| "loss": 0.745, |
| "step": 844 |
| }, |
| { |
| "epoch": 3.431472081218274, |
| "grad_norm": 0.2889169574975605, |
| "learning_rate": 1.0796508220277117e-05, |
| "loss": 0.7404, |
| "step": 845 |
| }, |
| { |
| "epoch": 3.4355329949238578, |
| "grad_norm": 0.24752001491585654, |
| "learning_rate": 1.0746153383708107e-05, |
| "loss": 0.7402, |
| "step": 846 |
| }, |
| { |
| "epoch": 3.439593908629442, |
| "grad_norm": 0.31282878984182255, |
| "learning_rate": 1.0695873076347579e-05, |
| "loss": 0.7414, |
| "step": 847 |
| }, |
| { |
| "epoch": 3.4436548223350254, |
| "grad_norm": 0.27827445872160866, |
| "learning_rate": 1.0645667703146205e-05, |
| "loss": 0.7464, |
| "step": 848 |
| }, |
| { |
| "epoch": 3.447715736040609, |
| "grad_norm": 0.25959820978042525, |
| "learning_rate": 1.0595537668451161e-05, |
| "loss": 0.7596, |
| "step": 849 |
| }, |
| { |
| "epoch": 3.451776649746193, |
| "grad_norm": 0.28009757453038014, |
| "learning_rate": 1.0545483376002854e-05, |
| "loss": 0.7331, |
| "step": 850 |
| }, |
| { |
| "epoch": 3.4558375634517766, |
| "grad_norm": 0.26441163416028046, |
| "learning_rate": 1.0495505228931676e-05, |
| "loss": 0.7417, |
| "step": 851 |
| }, |
| { |
| "epoch": 3.4598984771573607, |
| "grad_norm": 0.22478446900055882, |
| "learning_rate": 1.044560362975474e-05, |
| "loss": 0.7281, |
| "step": 852 |
| }, |
| { |
| "epoch": 3.4639593908629442, |
| "grad_norm": 0.26380059974827835, |
| "learning_rate": 1.0395778980372695e-05, |
| "loss": 0.7268, |
| "step": 853 |
| }, |
| { |
| "epoch": 3.468020304568528, |
| "grad_norm": 0.2236590626384194, |
| "learning_rate": 1.0346031682066381e-05, |
| "loss": 0.7242, |
| "step": 854 |
| }, |
| { |
| "epoch": 3.472081218274112, |
| "grad_norm": 0.225105392287092, |
| "learning_rate": 1.0296362135493724e-05, |
| "loss": 0.707, |
| "step": 855 |
| }, |
| { |
| "epoch": 3.4761421319796955, |
| "grad_norm": 0.25097569472952364, |
| "learning_rate": 1.0246770740686422e-05, |
| "loss": 0.7306, |
| "step": 856 |
| }, |
| { |
| "epoch": 3.480203045685279, |
| "grad_norm": 0.2403426115585771, |
| "learning_rate": 1.0197257897046743e-05, |
| "loss": 0.749, |
| "step": 857 |
| }, |
| { |
| "epoch": 3.484263959390863, |
| "grad_norm": 0.2446777617970813, |
| "learning_rate": 1.014782400334433e-05, |
| "loss": 0.7279, |
| "step": 858 |
| }, |
| { |
| "epoch": 3.4883248730964467, |
| "grad_norm": 0.2556873660668158, |
| "learning_rate": 1.009846945771296e-05, |
| "loss": 0.7414, |
| "step": 859 |
| }, |
| { |
| "epoch": 3.4923857868020303, |
| "grad_norm": 0.2609147461889837, |
| "learning_rate": 1.0049194657647363e-05, |
| "loss": 0.758, |
| "step": 860 |
| }, |
| { |
| "epoch": 3.4964467005076143, |
| "grad_norm": 0.2609577965875469, |
| "learning_rate": 1.0000000000000006e-05, |
| "loss": 0.757, |
| "step": 861 |
| }, |
| { |
| "epoch": 3.500507614213198, |
| "grad_norm": 0.25279183390096327, |
| "learning_rate": 9.950885880977891e-06, |
| "loss": 0.7288, |
| "step": 862 |
| }, |
| { |
| "epoch": 3.5045685279187815, |
| "grad_norm": 0.27486794928859987, |
| "learning_rate": 9.901852696139382e-06, |
| "loss": 0.746, |
| "step": 863 |
| }, |
| { |
| "epoch": 3.5086294416243655, |
| "grad_norm": 0.25677685542325684, |
| "learning_rate": 9.852900840391027e-06, |
| "loss": 0.7539, |
| "step": 864 |
| }, |
| { |
| "epoch": 3.512690355329949, |
| "grad_norm": 1.521482654503753, |
| "learning_rate": 9.804030707984313e-06, |
| "loss": 0.7517, |
| "step": 865 |
| }, |
| { |
| "epoch": 3.516751269035533, |
| "grad_norm": 0.2575599981638263, |
| "learning_rate": 9.755242692512599e-06, |
| "loss": 0.7333, |
| "step": 866 |
| }, |
| { |
| "epoch": 3.5208121827411167, |
| "grad_norm": 0.2417441375571431, |
| "learning_rate": 9.70653718690782e-06, |
| "loss": 0.7534, |
| "step": 867 |
| }, |
| { |
| "epoch": 3.5248730964467008, |
| "grad_norm": 0.24620512184778567, |
| "learning_rate": 9.657914583437454e-06, |
| "loss": 0.7414, |
| "step": 868 |
| }, |
| { |
| "epoch": 3.5289340101522844, |
| "grad_norm": 0.24115043325920094, |
| "learning_rate": 9.609375273701246e-06, |
| "loss": 0.7362, |
| "step": 869 |
| }, |
| { |
| "epoch": 3.532994923857868, |
| "grad_norm": 0.2752632961058416, |
| "learning_rate": 9.560919648628133e-06, |
| "loss": 0.7655, |
| "step": 870 |
| }, |
| { |
| "epoch": 3.537055837563452, |
| "grad_norm": 0.23278488625594862, |
| "learning_rate": 9.512548098473047e-06, |
| "loss": 0.739, |
| "step": 871 |
| }, |
| { |
| "epoch": 3.5411167512690356, |
| "grad_norm": 0.25951889371596865, |
| "learning_rate": 9.464261012813825e-06, |
| "loss": 0.7352, |
| "step": 872 |
| }, |
| { |
| "epoch": 3.545177664974619, |
| "grad_norm": 0.2290229790246288, |
| "learning_rate": 9.416058780547987e-06, |
| "loss": 0.731, |
| "step": 873 |
| }, |
| { |
| "epoch": 3.549238578680203, |
| "grad_norm": 0.24659024883115266, |
| "learning_rate": 9.367941789889714e-06, |
| "loss": 0.7694, |
| "step": 874 |
| }, |
| { |
| "epoch": 3.553299492385787, |
| "grad_norm": 0.25787404882197246, |
| "learning_rate": 9.319910428366607e-06, |
| "loss": 0.7276, |
| "step": 875 |
| }, |
| { |
| "epoch": 3.5573604060913704, |
| "grad_norm": 0.23615790052667904, |
| "learning_rate": 9.271965082816667e-06, |
| "loss": 0.732, |
| "step": 876 |
| }, |
| { |
| "epoch": 3.5614213197969544, |
| "grad_norm": 0.25379973725955685, |
| "learning_rate": 9.224106139385111e-06, |
| "loss": 0.7439, |
| "step": 877 |
| }, |
| { |
| "epoch": 3.565482233502538, |
| "grad_norm": 0.24250996181672738, |
| "learning_rate": 9.176333983521291e-06, |
| "loss": 0.7336, |
| "step": 878 |
| }, |
| { |
| "epoch": 3.5695431472081216, |
| "grad_norm": 0.27360033387740423, |
| "learning_rate": 9.12864899997558e-06, |
| "loss": 0.7243, |
| "step": 879 |
| }, |
| { |
| "epoch": 3.5736040609137056, |
| "grad_norm": 0.2508165228700863, |
| "learning_rate": 9.08105157279628e-06, |
| "loss": 0.7357, |
| "step": 880 |
| }, |
| { |
| "epoch": 3.5776649746192892, |
| "grad_norm": 0.25227549130403126, |
| "learning_rate": 9.03354208532653e-06, |
| "loss": 0.753, |
| "step": 881 |
| }, |
| { |
| "epoch": 3.581725888324873, |
| "grad_norm": 0.24155626292668905, |
| "learning_rate": 8.986120920201205e-06, |
| "loss": 0.7538, |
| "step": 882 |
| }, |
| { |
| "epoch": 3.585786802030457, |
| "grad_norm": 0.27107708304424194, |
| "learning_rate": 8.938788459343852e-06, |
| "loss": 0.7375, |
| "step": 883 |
| }, |
| { |
| "epoch": 3.5898477157360404, |
| "grad_norm": 0.2511326734171602, |
| "learning_rate": 8.8915450839636e-06, |
| "loss": 0.7288, |
| "step": 884 |
| }, |
| { |
| "epoch": 3.5939086294416245, |
| "grad_norm": 0.23109212835916476, |
| "learning_rate": 8.844391174552116e-06, |
| "loss": 0.7616, |
| "step": 885 |
| }, |
| { |
| "epoch": 3.597969543147208, |
| "grad_norm": 0.26245808772545715, |
| "learning_rate": 8.797327110880479e-06, |
| "loss": 0.7554, |
| "step": 886 |
| }, |
| { |
| "epoch": 3.602030456852792, |
| "grad_norm": 0.26545054867805085, |
| "learning_rate": 8.750353271996206e-06, |
| "loss": 0.7429, |
| "step": 887 |
| }, |
| { |
| "epoch": 3.6060913705583757, |
| "grad_norm": 0.22464792750044918, |
| "learning_rate": 8.703470036220132e-06, |
| "loss": 0.7491, |
| "step": 888 |
| }, |
| { |
| "epoch": 3.6101522842639593, |
| "grad_norm": 0.2307376202774878, |
| "learning_rate": 8.656677781143394e-06, |
| "loss": 0.7523, |
| "step": 889 |
| }, |
| { |
| "epoch": 3.6142131979695433, |
| "grad_norm": 0.23944127080137503, |
| "learning_rate": 8.609976883624377e-06, |
| "loss": 0.7417, |
| "step": 890 |
| }, |
| { |
| "epoch": 3.618274111675127, |
| "grad_norm": 0.22982245136354854, |
| "learning_rate": 8.563367719785698e-06, |
| "loss": 0.7139, |
| "step": 891 |
| }, |
| { |
| "epoch": 3.6223350253807105, |
| "grad_norm": 0.22948869782898942, |
| "learning_rate": 8.516850665011138e-06, |
| "loss": 0.7405, |
| "step": 892 |
| }, |
| { |
| "epoch": 3.6263959390862945, |
| "grad_norm": 0.22700879030682156, |
| "learning_rate": 8.47042609394269e-06, |
| "loss": 0.7395, |
| "step": 893 |
| }, |
| { |
| "epoch": 3.630456852791878, |
| "grad_norm": 0.23403438807322638, |
| "learning_rate": 8.424094380477432e-06, |
| "loss": 0.7403, |
| "step": 894 |
| }, |
| { |
| "epoch": 3.6345177664974617, |
| "grad_norm": 0.2599099157052386, |
| "learning_rate": 8.37785589776465e-06, |
| "loss": 0.7594, |
| "step": 895 |
| }, |
| { |
| "epoch": 3.6385786802030458, |
| "grad_norm": 0.24402970348208128, |
| "learning_rate": 8.331711018202694e-06, |
| "loss": 0.7398, |
| "step": 896 |
| }, |
| { |
| "epoch": 3.6426395939086293, |
| "grad_norm": 0.2499812856959354, |
| "learning_rate": 8.285660113436104e-06, |
| "loss": 0.75, |
| "step": 897 |
| }, |
| { |
| "epoch": 3.646700507614213, |
| "grad_norm": 0.26496932894568204, |
| "learning_rate": 8.239703554352527e-06, |
| "loss": 0.7405, |
| "step": 898 |
| }, |
| { |
| "epoch": 3.650761421319797, |
| "grad_norm": 0.24588632730665527, |
| "learning_rate": 8.193841711079775e-06, |
| "loss": 0.749, |
| "step": 899 |
| }, |
| { |
| "epoch": 3.6548223350253806, |
| "grad_norm": 0.22798888412961407, |
| "learning_rate": 8.148074952982828e-06, |
| "loss": 0.7565, |
| "step": 900 |
| }, |
| { |
| "epoch": 3.6588832487309646, |
| "grad_norm": 0.26382430874419904, |
| "learning_rate": 8.102403648660859e-06, |
| "loss": 0.7253, |
| "step": 901 |
| }, |
| { |
| "epoch": 3.662944162436548, |
| "grad_norm": 0.2230223665177076, |
| "learning_rate": 8.056828165944282e-06, |
| "loss": 0.7324, |
| "step": 902 |
| }, |
| { |
| "epoch": 3.6670050761421322, |
| "grad_norm": 0.23200546768626257, |
| "learning_rate": 8.011348871891762e-06, |
| "loss": 0.7272, |
| "step": 903 |
| }, |
| { |
| "epoch": 3.671065989847716, |
| "grad_norm": 0.22992878540445752, |
| "learning_rate": 7.965966132787287e-06, |
| "loss": 0.7445, |
| "step": 904 |
| }, |
| { |
| "epoch": 3.6751269035532994, |
| "grad_norm": 0.23633450124422825, |
| "learning_rate": 7.920680314137189e-06, |
| "loss": 0.7217, |
| "step": 905 |
| }, |
| { |
| "epoch": 3.6791878172588834, |
| "grad_norm": 0.23273894533582937, |
| "learning_rate": 7.875491780667246e-06, |
| "loss": 0.7738, |
| "step": 906 |
| }, |
| { |
| "epoch": 3.683248730964467, |
| "grad_norm": 0.2322736235018593, |
| "learning_rate": 7.830400896319667e-06, |
| "loss": 0.7447, |
| "step": 907 |
| }, |
| { |
| "epoch": 3.6873096446700506, |
| "grad_norm": 0.26055207046750123, |
| "learning_rate": 7.785408024250259e-06, |
| "loss": 0.7399, |
| "step": 908 |
| }, |
| { |
| "epoch": 3.6913705583756347, |
| "grad_norm": 0.20152296096342115, |
| "learning_rate": 7.74051352682542e-06, |
| "loss": 0.7447, |
| "step": 909 |
| }, |
| { |
| "epoch": 3.6954314720812182, |
| "grad_norm": 0.24735431056615678, |
| "learning_rate": 7.695717765619257e-06, |
| "loss": 0.7419, |
| "step": 910 |
| }, |
| { |
| "epoch": 3.699492385786802, |
| "grad_norm": 0.25129461689703236, |
| "learning_rate": 7.651021101410673e-06, |
| "loss": 0.7503, |
| "step": 911 |
| }, |
| { |
| "epoch": 3.703553299492386, |
| "grad_norm": 0.2376847706323741, |
| "learning_rate": 7.606423894180464e-06, |
| "loss": 0.7393, |
| "step": 912 |
| }, |
| { |
| "epoch": 3.7076142131979695, |
| "grad_norm": 0.24136592348794328, |
| "learning_rate": 7.56192650310839e-06, |
| "loss": 0.7442, |
| "step": 913 |
| }, |
| { |
| "epoch": 3.711675126903553, |
| "grad_norm": 0.22503619709470735, |
| "learning_rate": 7.517529286570349e-06, |
| "loss": 0.7411, |
| "step": 914 |
| }, |
| { |
| "epoch": 3.715736040609137, |
| "grad_norm": 0.23227373547827485, |
| "learning_rate": 7.473232602135387e-06, |
| "loss": 0.7389, |
| "step": 915 |
| }, |
| { |
| "epoch": 3.7197969543147207, |
| "grad_norm": 0.2316689555657671, |
| "learning_rate": 7.429036806562935e-06, |
| "loss": 0.7553, |
| "step": 916 |
| }, |
| { |
| "epoch": 3.7238578680203047, |
| "grad_norm": 0.21575118586596123, |
| "learning_rate": 7.3849422557998455e-06, |
| "loss": 0.7314, |
| "step": 917 |
| }, |
| { |
| "epoch": 3.7279187817258883, |
| "grad_norm": 0.25045013851788955, |
| "learning_rate": 7.340949304977567e-06, |
| "loss": 0.7544, |
| "step": 918 |
| }, |
| { |
| "epoch": 3.7319796954314723, |
| "grad_norm": 0.21393928325341693, |
| "learning_rate": 7.297058308409282e-06, |
| "loss": 0.7329, |
| "step": 919 |
| }, |
| { |
| "epoch": 3.736040609137056, |
| "grad_norm": 0.2372466928285262, |
| "learning_rate": 7.25326961958704e-06, |
| "loss": 0.7481, |
| "step": 920 |
| }, |
| { |
| "epoch": 3.7401015228426395, |
| "grad_norm": 0.2521778142033679, |
| "learning_rate": 7.209583591178921e-06, |
| "loss": 0.7484, |
| "step": 921 |
| }, |
| { |
| "epoch": 3.7441624365482236, |
| "grad_norm": 0.24188938258053216, |
| "learning_rate": 7.1660005750261925e-06, |
| "loss": 0.7544, |
| "step": 922 |
| }, |
| { |
| "epoch": 3.748223350253807, |
| "grad_norm": 0.23184664300726707, |
| "learning_rate": 7.1225209221404765e-06, |
| "loss": 0.7388, |
| "step": 923 |
| }, |
| { |
| "epoch": 3.7522842639593907, |
| "grad_norm": 0.2510462743381437, |
| "learning_rate": 7.079144982700909e-06, |
| "loss": 0.7282, |
| "step": 924 |
| }, |
| { |
| "epoch": 3.7563451776649748, |
| "grad_norm": 0.2980168710199434, |
| "learning_rate": 7.0358731060513695e-06, |
| "loss": 0.7561, |
| "step": 925 |
| }, |
| { |
| "epoch": 3.7604060913705584, |
| "grad_norm": 0.24450263780934453, |
| "learning_rate": 6.99270564069757e-06, |
| "loss": 0.7294, |
| "step": 926 |
| }, |
| { |
| "epoch": 3.764467005076142, |
| "grad_norm": 0.24230554082720565, |
| "learning_rate": 6.949642934304375e-06, |
| "loss": 0.7252, |
| "step": 927 |
| }, |
| { |
| "epoch": 3.768527918781726, |
| "grad_norm": 0.22714615069749453, |
| "learning_rate": 6.906685333692871e-06, |
| "loss": 0.7576, |
| "step": 928 |
| }, |
| { |
| "epoch": 3.7725888324873096, |
| "grad_norm": 0.25676800727043986, |
| "learning_rate": 6.86383318483769e-06, |
| "loss": 0.7315, |
| "step": 929 |
| }, |
| { |
| "epoch": 3.776649746192893, |
| "grad_norm": 0.23243017026901552, |
| "learning_rate": 6.821086832864139e-06, |
| "loss": 0.7501, |
| "step": 930 |
| }, |
| { |
| "epoch": 3.780710659898477, |
| "grad_norm": 0.23387701007419862, |
| "learning_rate": 6.77844662204546e-06, |
| "loss": 0.7689, |
| "step": 931 |
| }, |
| { |
| "epoch": 3.784771573604061, |
| "grad_norm": 0.24240846133275248, |
| "learning_rate": 6.7359128958000455e-06, |
| "loss": 0.7036, |
| "step": 932 |
| }, |
| { |
| "epoch": 3.788832487309645, |
| "grad_norm": 0.23625124060915847, |
| "learning_rate": 6.693485996688695e-06, |
| "loss": 0.7391, |
| "step": 933 |
| }, |
| { |
| "epoch": 3.7928934010152284, |
| "grad_norm": 0.2447453411043934, |
| "learning_rate": 6.651166266411801e-06, |
| "loss": 0.7406, |
| "step": 934 |
| }, |
| { |
| "epoch": 3.796954314720812, |
| "grad_norm": 0.25843158856773174, |
| "learning_rate": 6.6089540458066725e-06, |
| "loss": 0.743, |
| "step": 935 |
| }, |
| { |
| "epoch": 3.801015228426396, |
| "grad_norm": 0.26033849403138104, |
| "learning_rate": 6.566849674844711e-06, |
| "loss": 0.7265, |
| "step": 936 |
| }, |
| { |
| "epoch": 3.8050761421319796, |
| "grad_norm": 0.2303788156060688, |
| "learning_rate": 6.524853492628747e-06, |
| "loss": 0.7392, |
| "step": 937 |
| }, |
| { |
| "epoch": 3.8091370558375637, |
| "grad_norm": 0.2250808956355806, |
| "learning_rate": 6.4829658373902536e-06, |
| "loss": 0.7596, |
| "step": 938 |
| }, |
| { |
| "epoch": 3.8131979695431473, |
| "grad_norm": 0.22212031108153166, |
| "learning_rate": 6.441187046486648e-06, |
| "loss": 0.7201, |
| "step": 939 |
| }, |
| { |
| "epoch": 3.817258883248731, |
| "grad_norm": 0.265299499761227, |
| "learning_rate": 6.399517456398567e-06, |
| "loss": 0.7549, |
| "step": 940 |
| }, |
| { |
| "epoch": 3.821319796954315, |
| "grad_norm": 0.2078882009691513, |
| "learning_rate": 6.357957402727164e-06, |
| "loss": 0.7249, |
| "step": 941 |
| }, |
| { |
| "epoch": 3.8253807106598985, |
| "grad_norm": 0.21527356033807118, |
| "learning_rate": 6.316507220191395e-06, |
| "loss": 0.7403, |
| "step": 942 |
| }, |
| { |
| "epoch": 3.829441624365482, |
| "grad_norm": 0.24764348029490085, |
| "learning_rate": 6.275167242625331e-06, |
| "loss": 0.7422, |
| "step": 943 |
| }, |
| { |
| "epoch": 3.833502538071066, |
| "grad_norm": 0.22004098644932552, |
| "learning_rate": 6.233937802975471e-06, |
| "loss": 0.7391, |
| "step": 944 |
| }, |
| { |
| "epoch": 3.8375634517766497, |
| "grad_norm": 0.22749877075828798, |
| "learning_rate": 6.192819233298046e-06, |
| "loss": 0.7559, |
| "step": 945 |
| }, |
| { |
| "epoch": 3.8416243654822333, |
| "grad_norm": 0.22858436995937756, |
| "learning_rate": 6.151811864756383e-06, |
| "loss": 0.7231, |
| "step": 946 |
| }, |
| { |
| "epoch": 3.8456852791878173, |
| "grad_norm": 0.20151570292625065, |
| "learning_rate": 6.1109160276181655e-06, |
| "loss": 0.743, |
| "step": 947 |
| }, |
| { |
| "epoch": 3.849746192893401, |
| "grad_norm": 0.2068047205090228, |
| "learning_rate": 6.070132051252868e-06, |
| "loss": 0.7314, |
| "step": 948 |
| }, |
| { |
| "epoch": 3.8538071065989845, |
| "grad_norm": 0.2910426669889258, |
| "learning_rate": 6.0294602641290034e-06, |
| "loss": 0.7366, |
| "step": 949 |
| }, |
| { |
| "epoch": 3.8578680203045685, |
| "grad_norm": 0.21071386184491267, |
| "learning_rate": 5.988900993811575e-06, |
| "loss": 0.7436, |
| "step": 950 |
| }, |
| { |
| "epoch": 3.861928934010152, |
| "grad_norm": 0.22849085311042258, |
| "learning_rate": 5.948454566959363e-06, |
| "loss": 0.7555, |
| "step": 951 |
| }, |
| { |
| "epoch": 3.865989847715736, |
| "grad_norm": 0.2255294508140575, |
| "learning_rate": 5.908121309322328e-06, |
| "loss": 0.7508, |
| "step": 952 |
| }, |
| { |
| "epoch": 3.8700507614213198, |
| "grad_norm": 0.22562347706919952, |
| "learning_rate": 5.867901545738976e-06, |
| "loss": 0.7519, |
| "step": 953 |
| }, |
| { |
| "epoch": 3.874111675126904, |
| "grad_norm": 0.20458850987797497, |
| "learning_rate": 5.827795600133774e-06, |
| "loss": 0.7458, |
| "step": 954 |
| }, |
| { |
| "epoch": 3.8781725888324874, |
| "grad_norm": 0.22629134938810985, |
| "learning_rate": 5.787803795514466e-06, |
| "loss": 0.7218, |
| "step": 955 |
| }, |
| { |
| "epoch": 3.882233502538071, |
| "grad_norm": 0.23317151394054736, |
| "learning_rate": 5.747926453969576e-06, |
| "loss": 0.7422, |
| "step": 956 |
| }, |
| { |
| "epoch": 3.886294416243655, |
| "grad_norm": 0.22632132880013398, |
| "learning_rate": 5.708163896665708e-06, |
| "loss": 0.7186, |
| "step": 957 |
| }, |
| { |
| "epoch": 3.8903553299492386, |
| "grad_norm": 0.22208728288117852, |
| "learning_rate": 5.668516443845047e-06, |
| "loss": 0.7404, |
| "step": 958 |
| }, |
| { |
| "epoch": 3.894416243654822, |
| "grad_norm": 0.2160528938183103, |
| "learning_rate": 5.6289844148227225e-06, |
| "loss": 0.7405, |
| "step": 959 |
| }, |
| { |
| "epoch": 3.8984771573604062, |
| "grad_norm": 0.21110092988902335, |
| "learning_rate": 5.5895681279842615e-06, |
| "loss": 0.7039, |
| "step": 960 |
| }, |
| { |
| "epoch": 3.90253807106599, |
| "grad_norm": 0.21201570315614024, |
| "learning_rate": 5.550267900783019e-06, |
| "loss": 0.7549, |
| "step": 961 |
| }, |
| { |
| "epoch": 3.9065989847715734, |
| "grad_norm": 0.21311262784525606, |
| "learning_rate": 5.511084049737623e-06, |
| "loss": 0.7173, |
| "step": 962 |
| }, |
| { |
| "epoch": 3.9106598984771574, |
| "grad_norm": 0.21411990251735186, |
| "learning_rate": 5.4720168904294215e-06, |
| "loss": 0.7435, |
| "step": 963 |
| }, |
| { |
| "epoch": 3.914720812182741, |
| "grad_norm": 0.21849107730150769, |
| "learning_rate": 5.433066737499948e-06, |
| "loss": 0.7541, |
| "step": 964 |
| }, |
| { |
| "epoch": 3.9187817258883246, |
| "grad_norm": 0.20294153131438714, |
| "learning_rate": 5.394233904648376e-06, |
| "loss": 0.7462, |
| "step": 965 |
| }, |
| { |
| "epoch": 3.9228426395939087, |
| "grad_norm": 0.21463427345226635, |
| "learning_rate": 5.355518704628997e-06, |
| "loss": 0.7239, |
| "step": 966 |
| }, |
| { |
| "epoch": 3.9269035532994923, |
| "grad_norm": 0.23671657537172908, |
| "learning_rate": 5.316921449248731e-06, |
| "loss": 0.7487, |
| "step": 967 |
| }, |
| { |
| "epoch": 3.9309644670050763, |
| "grad_norm": 0.23363893632078345, |
| "learning_rate": 5.278442449364538e-06, |
| "loss": 0.7354, |
| "step": 968 |
| }, |
| { |
| "epoch": 3.93502538071066, |
| "grad_norm": 0.19520580298691356, |
| "learning_rate": 5.240082014881016e-06, |
| "loss": 0.7448, |
| "step": 969 |
| }, |
| { |
| "epoch": 3.939086294416244, |
| "grad_norm": 0.19230981141514814, |
| "learning_rate": 5.201840454747822e-06, |
| "loss": 0.748, |
| "step": 970 |
| }, |
| { |
| "epoch": 3.9431472081218275, |
| "grad_norm": 0.20023436700406883, |
| "learning_rate": 5.163718076957223e-06, |
| "loss": 0.7116, |
| "step": 971 |
| }, |
| { |
| "epoch": 3.947208121827411, |
| "grad_norm": 0.21296565270842732, |
| "learning_rate": 5.125715188541609e-06, |
| "loss": 0.7229, |
| "step": 972 |
| }, |
| { |
| "epoch": 3.951269035532995, |
| "grad_norm": 0.22662415287060791, |
| "learning_rate": 5.087832095571021e-06, |
| "loss": 0.7368, |
| "step": 973 |
| }, |
| { |
| "epoch": 3.9553299492385787, |
| "grad_norm": 0.22248967061653518, |
| "learning_rate": 5.0500691031506766e-06, |
| "loss": 0.7652, |
| "step": 974 |
| }, |
| { |
| "epoch": 3.9593908629441623, |
| "grad_norm": 0.2099808059460677, |
| "learning_rate": 5.01242651541854e-06, |
| "loss": 0.7357, |
| "step": 975 |
| }, |
| { |
| "epoch": 3.9634517766497463, |
| "grad_norm": 0.20610965580442173, |
| "learning_rate": 4.974904635542815e-06, |
| "loss": 0.7547, |
| "step": 976 |
| }, |
| { |
| "epoch": 3.96751269035533, |
| "grad_norm": 0.21603019923085653, |
| "learning_rate": 4.937503765719582e-06, |
| "loss": 0.7217, |
| "step": 977 |
| }, |
| { |
| "epoch": 3.9715736040609135, |
| "grad_norm": 0.23432127437655537, |
| "learning_rate": 4.900224207170299e-06, |
| "loss": 0.7483, |
| "step": 978 |
| }, |
| { |
| "epoch": 3.9756345177664976, |
| "grad_norm": 0.2579018922203181, |
| "learning_rate": 4.8630662601394065e-06, |
| "loss": 0.7227, |
| "step": 979 |
| }, |
| { |
| "epoch": 3.979695431472081, |
| "grad_norm": 0.2424333544891702, |
| "learning_rate": 4.8260302238918995e-06, |
| "loss": 0.7498, |
| "step": 980 |
| }, |
| { |
| "epoch": 3.9837563451776647, |
| "grad_norm": 0.21306614085277784, |
| "learning_rate": 4.789116396710924e-06, |
| "loss": 0.7365, |
| "step": 981 |
| }, |
| { |
| "epoch": 3.987817258883249, |
| "grad_norm": 0.21639303553147837, |
| "learning_rate": 4.752325075895368e-06, |
| "loss": 0.75, |
| "step": 982 |
| }, |
| { |
| "epoch": 3.9918781725888324, |
| "grad_norm": 0.2112003757142461, |
| "learning_rate": 4.715656557757473e-06, |
| "loss": 0.7479, |
| "step": 983 |
| }, |
| { |
| "epoch": 3.9959390862944164, |
| "grad_norm": 0.20253527571826366, |
| "learning_rate": 4.679111137620442e-06, |
| "loss": 0.731, |
| "step": 984 |
| }, |
| { |
| "epoch": 4.0, |
| "grad_norm": 0.41521391532492863, |
| "learning_rate": 4.6426891098160585e-06, |
| "loss": 1.2917, |
| "step": 985 |
| }, |
| { |
| "epoch": 4.004060913705584, |
| "grad_norm": 0.2850560941440662, |
| "learning_rate": 4.6063907676823474e-06, |
| "loss": 0.7141, |
| "step": 986 |
| }, |
| { |
| "epoch": 4.008121827411167, |
| "grad_norm": 0.2888542907877441, |
| "learning_rate": 4.570216403561141e-06, |
| "loss": 0.7148, |
| "step": 987 |
| }, |
| { |
| "epoch": 4.012182741116751, |
| "grad_norm": 0.27491747442561987, |
| "learning_rate": 4.534166308795815e-06, |
| "loss": 0.7344, |
| "step": 988 |
| }, |
| { |
| "epoch": 4.016243654822335, |
| "grad_norm": 0.28179527762540824, |
| "learning_rate": 4.498240773728859e-06, |
| "loss": 0.7081, |
| "step": 989 |
| }, |
| { |
| "epoch": 4.020304568527918, |
| "grad_norm": 0.2518788378654235, |
| "learning_rate": 4.462440087699609e-06, |
| "loss": 0.7259, |
| "step": 990 |
| }, |
| { |
| "epoch": 4.024365482233502, |
| "grad_norm": 0.2942921376143902, |
| "learning_rate": 4.426764539041861e-06, |
| "loss": 0.7102, |
| "step": 991 |
| }, |
| { |
| "epoch": 4.0284263959390865, |
| "grad_norm": 0.2857739782152199, |
| "learning_rate": 4.391214415081582e-06, |
| "loss": 0.7199, |
| "step": 992 |
| }, |
| { |
| "epoch": 4.03248730964467, |
| "grad_norm": 0.24117619047634056, |
| "learning_rate": 4.355790002134579e-06, |
| "loss": 0.704, |
| "step": 993 |
| }, |
| { |
| "epoch": 4.036548223350254, |
| "grad_norm": 0.24701796794209915, |
| "learning_rate": 4.320491585504207e-06, |
| "loss": 0.7082, |
| "step": 994 |
| }, |
| { |
| "epoch": 4.040609137055838, |
| "grad_norm": 0.2747447351801685, |
| "learning_rate": 4.2853194494790615e-06, |
| "loss": 0.7164, |
| "step": 995 |
| }, |
| { |
| "epoch": 4.044670050761422, |
| "grad_norm": 0.2557174468414056, |
| "learning_rate": 4.250273877330691e-06, |
| "loss": 0.7192, |
| "step": 996 |
| }, |
| { |
| "epoch": 4.048730964467005, |
| "grad_norm": 0.26971791828027597, |
| "learning_rate": 4.215355151311313e-06, |
| "loss": 0.7046, |
| "step": 997 |
| }, |
| { |
| "epoch": 4.052791878172589, |
| "grad_norm": 0.24428970205258202, |
| "learning_rate": 4.180563552651542e-06, |
| "loss": 0.6962, |
| "step": 998 |
| }, |
| { |
| "epoch": 4.056852791878173, |
| "grad_norm": 0.2666870737798325, |
| "learning_rate": 4.145899361558147e-06, |
| "loss": 0.7145, |
| "step": 999 |
| }, |
| { |
| "epoch": 4.060913705583756, |
| "grad_norm": 0.28527503856519576, |
| "learning_rate": 4.111362857211738e-06, |
| "loss": 0.7295, |
| "step": 1000 |
| }, |
| { |
| "epoch": 4.06497461928934, |
| "grad_norm": 0.24348351825634046, |
| "learning_rate": 4.076954317764592e-06, |
| "loss": 0.7155, |
| "step": 1001 |
| }, |
| { |
| "epoch": 4.069035532994924, |
| "grad_norm": 0.21531748782793372, |
| "learning_rate": 4.042674020338335e-06, |
| "loss": 0.7031, |
| "step": 1002 |
| }, |
| { |
| "epoch": 4.073096446700507, |
| "grad_norm": 0.25618200384083717, |
| "learning_rate": 4.0085222410217835e-06, |
| "loss": 0.7012, |
| "step": 1003 |
| }, |
| { |
| "epoch": 4.077157360406091, |
| "grad_norm": 0.2817513510872296, |
| "learning_rate": 3.974499254868674e-06, |
| "loss": 0.7155, |
| "step": 1004 |
| }, |
| { |
| "epoch": 4.081218274111675, |
| "grad_norm": 0.22361220658714553, |
| "learning_rate": 3.940605335895451e-06, |
| "loss": 0.7107, |
| "step": 1005 |
| }, |
| { |
| "epoch": 4.0852791878172585, |
| "grad_norm": 0.21638444670819842, |
| "learning_rate": 3.90684075707908e-06, |
| "loss": 0.7024, |
| "step": 1006 |
| }, |
| { |
| "epoch": 4.0893401015228426, |
| "grad_norm": 0.25076449405014356, |
| "learning_rate": 3.8732057903548505e-06, |
| "loss": 0.727, |
| "step": 1007 |
| }, |
| { |
| "epoch": 4.093401015228427, |
| "grad_norm": 0.22795220989908724, |
| "learning_rate": 3.8397007066141375e-06, |
| "loss": 0.701, |
| "step": 1008 |
| }, |
| { |
| "epoch": 4.09746192893401, |
| "grad_norm": 0.21653887558824297, |
| "learning_rate": 3.806325775702304e-06, |
| "loss": 0.709, |
| "step": 1009 |
| }, |
| { |
| "epoch": 4.101522842639594, |
| "grad_norm": 0.23363907056199035, |
| "learning_rate": 3.773081266416434e-06, |
| "loss": 0.7121, |
| "step": 1010 |
| }, |
| { |
| "epoch": 4.105583756345178, |
| "grad_norm": 0.2484094886279314, |
| "learning_rate": 3.739967446503245e-06, |
| "loss": 0.6961, |
| "step": 1011 |
| }, |
| { |
| "epoch": 4.109644670050762, |
| "grad_norm": 0.2129494704600493, |
| "learning_rate": 3.706984582656894e-06, |
| "loss": 0.7008, |
| "step": 1012 |
| }, |
| { |
| "epoch": 4.113705583756345, |
| "grad_norm": 0.21217402119089102, |
| "learning_rate": 3.6741329405168237e-06, |
| "loss": 0.7095, |
| "step": 1013 |
| }, |
| { |
| "epoch": 4.117766497461929, |
| "grad_norm": 0.2447940648563814, |
| "learning_rate": 3.641412784665648e-06, |
| "loss": 0.6999, |
| "step": 1014 |
| }, |
| { |
| "epoch": 4.121827411167513, |
| "grad_norm": 0.25104323364533415, |
| "learning_rate": 3.608824378627005e-06, |
| "loss": 0.7023, |
| "step": 1015 |
| }, |
| { |
| "epoch": 4.125888324873096, |
| "grad_norm": 0.22674342612180473, |
| "learning_rate": 3.5763679848634337e-06, |
| "loss": 0.7353, |
| "step": 1016 |
| }, |
| { |
| "epoch": 4.12994923857868, |
| "grad_norm": 0.21052184913367925, |
| "learning_rate": 3.544043864774269e-06, |
| "loss": 0.7086, |
| "step": 1017 |
| }, |
| { |
| "epoch": 4.134010152284264, |
| "grad_norm": 0.2143451385751272, |
| "learning_rate": 3.5118522786935282e-06, |
| "loss": 0.7201, |
| "step": 1018 |
| }, |
| { |
| "epoch": 4.138071065989847, |
| "grad_norm": 0.22573926325317895, |
| "learning_rate": 3.479793485887819e-06, |
| "loss": 0.726, |
| "step": 1019 |
| }, |
| { |
| "epoch": 4.1421319796954315, |
| "grad_norm": 0.20850431478139123, |
| "learning_rate": 3.4478677445542653e-06, |
| "loss": 0.7069, |
| "step": 1020 |
| }, |
| { |
| "epoch": 4.1461928934010155, |
| "grad_norm": 0.23320933559391355, |
| "learning_rate": 3.4160753118183767e-06, |
| "loss": 0.7201, |
| "step": 1021 |
| }, |
| { |
| "epoch": 4.150253807106599, |
| "grad_norm": 0.24789716827428848, |
| "learning_rate": 3.3844164437320527e-06, |
| "loss": 0.7112, |
| "step": 1022 |
| }, |
| { |
| "epoch": 4.154314720812183, |
| "grad_norm": 0.21755084398521404, |
| "learning_rate": 3.3528913952714558e-06, |
| "loss": 0.7043, |
| "step": 1023 |
| }, |
| { |
| "epoch": 4.158375634517767, |
| "grad_norm": 0.19663474021805777, |
| "learning_rate": 3.321500420335e-06, |
| "loss": 0.7349, |
| "step": 1024 |
| }, |
| { |
| "epoch": 4.16243654822335, |
| "grad_norm": 0.22931937425262247, |
| "learning_rate": 3.290243771741275e-06, |
| "loss": 0.709, |
| "step": 1025 |
| }, |
| { |
| "epoch": 4.166497461928934, |
| "grad_norm": 0.21500253239701286, |
| "learning_rate": 3.2591217012270325e-06, |
| "loss": 0.7039, |
| "step": 1026 |
| }, |
| { |
| "epoch": 4.170558375634518, |
| "grad_norm": 0.23206521450620168, |
| "learning_rate": 3.228134459445149e-06, |
| "loss": 0.7176, |
| "step": 1027 |
| }, |
| { |
| "epoch": 4.174619289340102, |
| "grad_norm": 0.19993807293873533, |
| "learning_rate": 3.1972822959626205e-06, |
| "loss": 0.7098, |
| "step": 1028 |
| }, |
| { |
| "epoch": 4.178680203045685, |
| "grad_norm": 0.20085679030024695, |
| "learning_rate": 3.166565459258513e-06, |
| "loss": 0.7057, |
| "step": 1029 |
| }, |
| { |
| "epoch": 4.182741116751269, |
| "grad_norm": 0.2021915048534329, |
| "learning_rate": 3.1359841967220193e-06, |
| "loss": 0.7084, |
| "step": 1030 |
| }, |
| { |
| "epoch": 4.186802030456853, |
| "grad_norm": 0.19770791555426856, |
| "learning_rate": 3.105538754650419e-06, |
| "loss": 0.6914, |
| "step": 1031 |
| }, |
| { |
| "epoch": 4.190862944162436, |
| "grad_norm": 0.22757057414678655, |
| "learning_rate": 3.07522937824712e-06, |
| "loss": 0.7142, |
| "step": 1032 |
| }, |
| { |
| "epoch": 4.19492385786802, |
| "grad_norm": 0.22239309081799277, |
| "learning_rate": 3.0450563116196697e-06, |
| "loss": 0.7113, |
| "step": 1033 |
| }, |
| { |
| "epoch": 4.198984771573604, |
| "grad_norm": 0.21856109868425377, |
| "learning_rate": 3.0150197977778008e-06, |
| "loss": 0.6911, |
| "step": 1034 |
| }, |
| { |
| "epoch": 4.2030456852791875, |
| "grad_norm": 0.22064476833728447, |
| "learning_rate": 2.985120078631465e-06, |
| "loss": 0.6804, |
| "step": 1035 |
| }, |
| { |
| "epoch": 4.207106598984772, |
| "grad_norm": 0.188327941405046, |
| "learning_rate": 2.9553573949888893e-06, |
| "loss": 0.6916, |
| "step": 1036 |
| }, |
| { |
| "epoch": 4.211167512690356, |
| "grad_norm": 0.21221941806893596, |
| "learning_rate": 2.9257319865546384e-06, |
| "loss": 0.7101, |
| "step": 1037 |
| }, |
| { |
| "epoch": 4.215228426395939, |
| "grad_norm": 0.20174563385899463, |
| "learning_rate": 2.896244091927678e-06, |
| "loss": 0.704, |
| "step": 1038 |
| }, |
| { |
| "epoch": 4.219289340101523, |
| "grad_norm": 0.20613064430934083, |
| "learning_rate": 2.8668939485994584e-06, |
| "loss": 0.7144, |
| "step": 1039 |
| }, |
| { |
| "epoch": 4.223350253807107, |
| "grad_norm": 0.20587529730483234, |
| "learning_rate": 2.837681792951994e-06, |
| "loss": 0.7253, |
| "step": 1040 |
| }, |
| { |
| "epoch": 4.22741116751269, |
| "grad_norm": 0.20815904060410292, |
| "learning_rate": 2.808607860255981e-06, |
| "loss": 0.7254, |
| "step": 1041 |
| }, |
| { |
| "epoch": 4.231472081218274, |
| "grad_norm": 0.22856615650465031, |
| "learning_rate": 2.7796723846688634e-06, |
| "loss": 0.7024, |
| "step": 1042 |
| }, |
| { |
| "epoch": 4.235532994923858, |
| "grad_norm": 0.22051415732907617, |
| "learning_rate": 2.7508755992329937e-06, |
| "loss": 0.7106, |
| "step": 1043 |
| }, |
| { |
| "epoch": 4.239593908629441, |
| "grad_norm": 0.2034090237994826, |
| "learning_rate": 2.722217735873718e-06, |
| "loss": 0.6982, |
| "step": 1044 |
| }, |
| { |
| "epoch": 4.243654822335025, |
| "grad_norm": 0.2005484451504613, |
| "learning_rate": 2.6936990253975315e-06, |
| "loss": 0.6862, |
| "step": 1045 |
| }, |
| { |
| "epoch": 4.247715736040609, |
| "grad_norm": 0.2299513609659669, |
| "learning_rate": 2.665319697490205e-06, |
| "loss": 0.7129, |
| "step": 1046 |
| }, |
| { |
| "epoch": 4.251776649746193, |
| "grad_norm": 0.20473306780276238, |
| "learning_rate": 2.637079980714945e-06, |
| "loss": 0.7008, |
| "step": 1047 |
| }, |
| { |
| "epoch": 4.255837563451776, |
| "grad_norm": 0.1933200458488444, |
| "learning_rate": 2.6089801025105453e-06, |
| "loss": 0.7165, |
| "step": 1048 |
| }, |
| { |
| "epoch": 4.2598984771573605, |
| "grad_norm": 0.21047418943172733, |
| "learning_rate": 2.581020289189571e-06, |
| "loss": 0.6824, |
| "step": 1049 |
| }, |
| { |
| "epoch": 4.2639593908629445, |
| "grad_norm": 0.20577843477703464, |
| "learning_rate": 2.553200765936501e-06, |
| "loss": 0.685, |
| "step": 1050 |
| }, |
| { |
| "epoch": 4.268020304568528, |
| "grad_norm": 0.21802997559316606, |
| "learning_rate": 2.525521756805962e-06, |
| "loss": 0.6859, |
| "step": 1051 |
| }, |
| { |
| "epoch": 4.272081218274112, |
| "grad_norm": 0.191364305721244, |
| "learning_rate": 2.497983484720885e-06, |
| "loss": 0.7019, |
| "step": 1052 |
| }, |
| { |
| "epoch": 4.276142131979696, |
| "grad_norm": 0.20644661986147728, |
| "learning_rate": 2.470586171470728e-06, |
| "loss": 0.7325, |
| "step": 1053 |
| }, |
| { |
| "epoch": 4.280203045685279, |
| "grad_norm": 0.25262109675549166, |
| "learning_rate": 2.4433300377096836e-06, |
| "loss": 0.7011, |
| "step": 1054 |
| }, |
| { |
| "epoch": 4.284263959390863, |
| "grad_norm": 0.21359875850454293, |
| "learning_rate": 2.4162153029549073e-06, |
| "loss": 0.7075, |
| "step": 1055 |
| }, |
| { |
| "epoch": 4.288324873096447, |
| "grad_norm": 0.21388043789542105, |
| "learning_rate": 2.3892421855847458e-06, |
| "loss": 0.7074, |
| "step": 1056 |
| }, |
| { |
| "epoch": 4.29238578680203, |
| "grad_norm": 0.21352931611341033, |
| "learning_rate": 2.362410902836978e-06, |
| "loss": 0.728, |
| "step": 1057 |
| }, |
| { |
| "epoch": 4.296446700507614, |
| "grad_norm": 0.22847629191396904, |
| "learning_rate": 2.3357216708070653e-06, |
| "loss": 0.7241, |
| "step": 1058 |
| }, |
| { |
| "epoch": 4.300507614213198, |
| "grad_norm": 0.23089562626153767, |
| "learning_rate": 2.309174704446411e-06, |
| "loss": 0.7199, |
| "step": 1059 |
| }, |
| { |
| "epoch": 4.304568527918782, |
| "grad_norm": 0.19824381991111542, |
| "learning_rate": 2.2827702175606437e-06, |
| "loss": 0.6988, |
| "step": 1060 |
| }, |
| { |
| "epoch": 4.308629441624365, |
| "grad_norm": 0.18918781890298583, |
| "learning_rate": 2.256508422807855e-06, |
| "loss": 0.7021, |
| "step": 1061 |
| }, |
| { |
| "epoch": 4.312690355329949, |
| "grad_norm": 0.19534730528331284, |
| "learning_rate": 2.230389531696946e-06, |
| "loss": 0.7243, |
| "step": 1062 |
| }, |
| { |
| "epoch": 4.316751269035533, |
| "grad_norm": 0.20453014169090913, |
| "learning_rate": 2.204413754585857e-06, |
| "loss": 0.7016, |
| "step": 1063 |
| }, |
| { |
| "epoch": 4.320812182741117, |
| "grad_norm": 0.22813799544126379, |
| "learning_rate": 2.1785813006799406e-06, |
| "loss": 0.7138, |
| "step": 1064 |
| }, |
| { |
| "epoch": 4.324873096446701, |
| "grad_norm": 0.21591015757115095, |
| "learning_rate": 2.1528923780302224e-06, |
| "loss": 0.7185, |
| "step": 1065 |
| }, |
| { |
| "epoch": 4.328934010152285, |
| "grad_norm": 0.19051583174113043, |
| "learning_rate": 2.127347193531757e-06, |
| "loss": 0.6865, |
| "step": 1066 |
| }, |
| { |
| "epoch": 4.332994923857868, |
| "grad_norm": 0.197738767215128, |
| "learning_rate": 2.101945952921942e-06, |
| "loss": 0.7133, |
| "step": 1067 |
| }, |
| { |
| "epoch": 4.337055837563452, |
| "grad_norm": 0.19306980403817162, |
| "learning_rate": 2.0766888607788906e-06, |
| "loss": 0.6853, |
| "step": 1068 |
| }, |
| { |
| "epoch": 4.341116751269036, |
| "grad_norm": 0.20701412900943456, |
| "learning_rate": 2.0515761205197337e-06, |
| "loss": 0.7125, |
| "step": 1069 |
| }, |
| { |
| "epoch": 4.345177664974619, |
| "grad_norm": 0.2222600851334625, |
| "learning_rate": 2.0266079343990453e-06, |
| "loss": 0.7138, |
| "step": 1070 |
| }, |
| { |
| "epoch": 4.349238578680203, |
| "grad_norm": 0.2038571945945684, |
| "learning_rate": 2.0017845035071494e-06, |
| "loss": 0.6964, |
| "step": 1071 |
| }, |
| { |
| "epoch": 4.353299492385787, |
| "grad_norm": 0.20649965594326167, |
| "learning_rate": 1.9771060277685537e-06, |
| "loss": 0.723, |
| "step": 1072 |
| }, |
| { |
| "epoch": 4.35736040609137, |
| "grad_norm": 0.18591726370957262, |
| "learning_rate": 1.95257270594031e-06, |
| "loss": 0.7062, |
| "step": 1073 |
| }, |
| { |
| "epoch": 4.361421319796954, |
| "grad_norm": 0.1932532745987076, |
| "learning_rate": 1.9281847356104188e-06, |
| "loss": 0.6838, |
| "step": 1074 |
| }, |
| { |
| "epoch": 4.365482233502538, |
| "grad_norm": 0.19532573667941017, |
| "learning_rate": 1.9039423131962365e-06, |
| "loss": 0.6918, |
| "step": 1075 |
| }, |
| { |
| "epoch": 4.369543147208121, |
| "grad_norm": 0.19948698389943428, |
| "learning_rate": 1.8798456339429027e-06, |
| "loss": 0.7069, |
| "step": 1076 |
| }, |
| { |
| "epoch": 4.3736040609137055, |
| "grad_norm": 0.1922711433139182, |
| "learning_rate": 1.8558948919217612e-06, |
| "loss": 0.693, |
| "step": 1077 |
| }, |
| { |
| "epoch": 4.3776649746192895, |
| "grad_norm": 0.19673311585872652, |
| "learning_rate": 1.8320902800287954e-06, |
| "loss": 0.7116, |
| "step": 1078 |
| }, |
| { |
| "epoch": 4.381725888324873, |
| "grad_norm": 0.1939681776940388, |
| "learning_rate": 1.8084319899830726e-06, |
| "loss": 0.7083, |
| "step": 1079 |
| }, |
| { |
| "epoch": 4.385786802030457, |
| "grad_norm": 0.1969351398842089, |
| "learning_rate": 1.7849202123252097e-06, |
| "loss": 0.7206, |
| "step": 1080 |
| }, |
| { |
| "epoch": 4.389847715736041, |
| "grad_norm": 0.18849745282487337, |
| "learning_rate": 1.7615551364158401e-06, |
| "loss": 0.6898, |
| "step": 1081 |
| }, |
| { |
| "epoch": 4.393908629441625, |
| "grad_norm": 0.19388382673699417, |
| "learning_rate": 1.738336950434061e-06, |
| "loss": 0.6968, |
| "step": 1082 |
| }, |
| { |
| "epoch": 4.397969543147208, |
| "grad_norm": 0.1794724896458433, |
| "learning_rate": 1.715265841375957e-06, |
| "loss": 0.6816, |
| "step": 1083 |
| }, |
| { |
| "epoch": 4.402030456852792, |
| "grad_norm": 0.18856207860651322, |
| "learning_rate": 1.6923419950530684e-06, |
| "loss": 0.7153, |
| "step": 1084 |
| }, |
| { |
| "epoch": 4.406091370558376, |
| "grad_norm": 0.19229573269486613, |
| "learning_rate": 1.6695655960909008e-06, |
| "loss": 0.7178, |
| "step": 1085 |
| }, |
| { |
| "epoch": 4.410152284263959, |
| "grad_norm": 0.20801158806456688, |
| "learning_rate": 1.646936827927441e-06, |
| "loss": 0.699, |
| "step": 1086 |
| }, |
| { |
| "epoch": 4.414213197969543, |
| "grad_norm": 0.19186933974991496, |
| "learning_rate": 1.6244558728116766e-06, |
| "loss": 0.7039, |
| "step": 1087 |
| }, |
| { |
| "epoch": 4.418274111675127, |
| "grad_norm": 0.2003696855683119, |
| "learning_rate": 1.6021229118021265e-06, |
| "loss": 0.7076, |
| "step": 1088 |
| }, |
| { |
| "epoch": 4.42233502538071, |
| "grad_norm": 0.1971319476563732, |
| "learning_rate": 1.5799381247653967e-06, |
| "loss": 0.7219, |
| "step": 1089 |
| }, |
| { |
| "epoch": 4.426395939086294, |
| "grad_norm": 0.18854943099636698, |
| "learning_rate": 1.5579016903747013e-06, |
| "loss": 0.7067, |
| "step": 1090 |
| }, |
| { |
| "epoch": 4.430456852791878, |
| "grad_norm": 0.18925292613909386, |
| "learning_rate": 1.5360137861084656e-06, |
| "loss": 0.7102, |
| "step": 1091 |
| }, |
| { |
| "epoch": 4.4345177664974615, |
| "grad_norm": 0.1901411296550555, |
| "learning_rate": 1.5142745882488475e-06, |
| "loss": 0.7283, |
| "step": 1092 |
| }, |
| { |
| "epoch": 4.438578680203046, |
| "grad_norm": 0.18594741297652848, |
| "learning_rate": 1.4926842718803691e-06, |
| "loss": 0.7113, |
| "step": 1093 |
| }, |
| { |
| "epoch": 4.44263959390863, |
| "grad_norm": 0.20714302815473834, |
| "learning_rate": 1.4712430108884657e-06, |
| "loss": 0.7056, |
| "step": 1094 |
| }, |
| { |
| "epoch": 4.446700507614214, |
| "grad_norm": 0.19738463771182943, |
| "learning_rate": 1.4499509779581078e-06, |
| "loss": 0.72, |
| "step": 1095 |
| }, |
| { |
| "epoch": 4.450761421319797, |
| "grad_norm": 0.20932127089336555, |
| "learning_rate": 1.4288083445723988e-06, |
| "loss": 0.722, |
| "step": 1096 |
| }, |
| { |
| "epoch": 4.454822335025381, |
| "grad_norm": 0.1905487757196375, |
| "learning_rate": 1.4078152810112045e-06, |
| "loss": 0.7152, |
| "step": 1097 |
| }, |
| { |
| "epoch": 4.458883248730965, |
| "grad_norm": 0.1909907811050504, |
| "learning_rate": 1.3869719563497697e-06, |
| "loss": 0.7109, |
| "step": 1098 |
| }, |
| { |
| "epoch": 4.462944162436548, |
| "grad_norm": 0.20086472481900297, |
| "learning_rate": 1.3662785384573663e-06, |
| "loss": 0.7197, |
| "step": 1099 |
| }, |
| { |
| "epoch": 4.467005076142132, |
| "grad_norm": 0.1937168329569741, |
| "learning_rate": 1.3457351939959383e-06, |
| "loss": 0.7232, |
| "step": 1100 |
| }, |
| { |
| "epoch": 4.471065989847716, |
| "grad_norm": 0.19471697744572478, |
| "learning_rate": 1.3253420884187551e-06, |
| "loss": 0.721, |
| "step": 1101 |
| }, |
| { |
| "epoch": 4.475126903553299, |
| "grad_norm": 0.19065070910110418, |
| "learning_rate": 1.3050993859690953e-06, |
| "loss": 0.681, |
| "step": 1102 |
| }, |
| { |
| "epoch": 4.479187817258883, |
| "grad_norm": 0.20268952854279956, |
| "learning_rate": 1.2850072496788869e-06, |
| "loss": 0.7223, |
| "step": 1103 |
| }, |
| { |
| "epoch": 4.483248730964467, |
| "grad_norm": 0.1818363055816385, |
| "learning_rate": 1.2650658413674434e-06, |
| "loss": 0.7103, |
| "step": 1104 |
| }, |
| { |
| "epoch": 4.4873096446700504, |
| "grad_norm": 0.21217213420997766, |
| "learning_rate": 1.2452753216401226e-06, |
| "loss": 0.7177, |
| "step": 1105 |
| }, |
| { |
| "epoch": 4.4913705583756345, |
| "grad_norm": 0.21028157926866825, |
| "learning_rate": 1.2256358498870503e-06, |
| "loss": 0.7216, |
| "step": 1106 |
| }, |
| { |
| "epoch": 4.4954314720812185, |
| "grad_norm": 0.2052122571680612, |
| "learning_rate": 1.2061475842818337e-06, |
| "loss": 0.7238, |
| "step": 1107 |
| }, |
| { |
| "epoch": 4.499492385786802, |
| "grad_norm": 0.1852813097783252, |
| "learning_rate": 1.1868106817802816e-06, |
| "loss": 0.7225, |
| "step": 1108 |
| }, |
| { |
| "epoch": 4.503553299492386, |
| "grad_norm": 0.1864694441265238, |
| "learning_rate": 1.1676252981191482e-06, |
| "loss": 0.6991, |
| "step": 1109 |
| }, |
| { |
| "epoch": 4.50761421319797, |
| "grad_norm": 0.19718432078748988, |
| "learning_rate": 1.1485915878148823e-06, |
| "loss": 0.7029, |
| "step": 1110 |
| }, |
| { |
| "epoch": 4.511675126903553, |
| "grad_norm": 0.193249898839053, |
| "learning_rate": 1.1297097041623584e-06, |
| "loss": 0.7244, |
| "step": 1111 |
| }, |
| { |
| "epoch": 4.515736040609137, |
| "grad_norm": 0.19981859658046794, |
| "learning_rate": 1.1109797992336847e-06, |
| "loss": 0.7002, |
| "step": 1112 |
| }, |
| { |
| "epoch": 4.519796954314721, |
| "grad_norm": 0.18900390090160304, |
| "learning_rate": 1.092402023876933e-06, |
| "loss": 0.7026, |
| "step": 1113 |
| }, |
| { |
| "epoch": 4.523857868020304, |
| "grad_norm": 0.18322687252270242, |
| "learning_rate": 1.0739765277149527e-06, |
| "loss": 0.6955, |
| "step": 1114 |
| }, |
| { |
| "epoch": 4.527918781725888, |
| "grad_norm": 0.17903451553474023, |
| "learning_rate": 1.0557034591441596e-06, |
| "loss": 0.7039, |
| "step": 1115 |
| }, |
| { |
| "epoch": 4.531979695431472, |
| "grad_norm": 0.18290996700820106, |
| "learning_rate": 1.0375829653333324e-06, |
| "loss": 0.7013, |
| "step": 1116 |
| }, |
| { |
| "epoch": 4.536040609137056, |
| "grad_norm": 0.18118485292689418, |
| "learning_rate": 1.0196151922224385e-06, |
| "loss": 0.7115, |
| "step": 1117 |
| }, |
| { |
| "epoch": 4.540101522842639, |
| "grad_norm": 0.17815244453066895, |
| "learning_rate": 1.0018002845214526e-06, |
| "loss": 0.7073, |
| "step": 1118 |
| }, |
| { |
| "epoch": 4.544162436548223, |
| "grad_norm": 0.18243752025517682, |
| "learning_rate": 9.841383857091947e-07, |
| "loss": 0.7138, |
| "step": 1119 |
| }, |
| { |
| "epoch": 4.548223350253807, |
| "grad_norm": 0.19375951855218576, |
| "learning_rate": 9.666296380321616e-07, |
| "loss": 0.6995, |
| "step": 1120 |
| }, |
| { |
| "epoch": 4.552284263959391, |
| "grad_norm": 0.18311312272830696, |
| "learning_rate": 9.492741825034124e-07, |
| "loss": 0.7189, |
| "step": 1121 |
| }, |
| { |
| "epoch": 4.556345177664975, |
| "grad_norm": 0.21522556759336556, |
| "learning_rate": 9.320721589013892e-07, |
| "loss": 0.7016, |
| "step": 1122 |
| }, |
| { |
| "epoch": 4.560406091370559, |
| "grad_norm": 0.18525990329888184, |
| "learning_rate": 9.150237057688339e-07, |
| "loss": 0.7236, |
| "step": 1123 |
| }, |
| { |
| "epoch": 4.564467005076142, |
| "grad_norm": 0.19100985392712147, |
| "learning_rate": 8.981289604116328e-07, |
| "loss": 0.7319, |
| "step": 1124 |
| }, |
| { |
| "epoch": 4.568527918781726, |
| "grad_norm": 0.18582117579527035, |
| "learning_rate": 8.813880588977542e-07, |
| "loss": 0.6971, |
| "step": 1125 |
| }, |
| { |
| "epoch": 4.57258883248731, |
| "grad_norm": 0.17973387718573708, |
| "learning_rate": 8.648011360561126e-07, |
| "loss": 0.7077, |
| "step": 1126 |
| }, |
| { |
| "epoch": 4.576649746192894, |
| "grad_norm": 0.1815863256342077, |
| "learning_rate": 8.483683254755037e-07, |
| "loss": 0.7228, |
| "step": 1127 |
| }, |
| { |
| "epoch": 4.580710659898477, |
| "grad_norm": 0.18242793546140046, |
| "learning_rate": 8.320897595035227e-07, |
| "loss": 0.7065, |
| "step": 1128 |
| }, |
| { |
| "epoch": 4.584771573604061, |
| "grad_norm": 0.17631872571388305, |
| "learning_rate": 8.159655692455093e-07, |
| "loss": 0.6661, |
| "step": 1129 |
| }, |
| { |
| "epoch": 4.588832487309645, |
| "grad_norm": 0.18136267235846454, |
| "learning_rate": 7.999958845634648e-07, |
| "loss": 0.715, |
| "step": 1130 |
| }, |
| { |
| "epoch": 4.592893401015228, |
| "grad_norm": 0.19885035055577172, |
| "learning_rate": 7.841808340750478e-07, |
| "loss": 0.7122, |
| "step": 1131 |
| }, |
| { |
| "epoch": 4.596954314720812, |
| "grad_norm": 0.18868060008115312, |
| "learning_rate": 7.685205451524869e-07, |
| "loss": 0.7082, |
| "step": 1132 |
| }, |
| { |
| "epoch": 4.601015228426396, |
| "grad_norm": 0.19688723956130258, |
| "learning_rate": 7.530151439216027e-07, |
| "loss": 0.6919, |
| "step": 1133 |
| }, |
| { |
| "epoch": 4.6050761421319795, |
| "grad_norm": 0.1892381526419787, |
| "learning_rate": 7.376647552607675e-07, |
| "loss": 0.734, |
| "step": 1134 |
| }, |
| { |
| "epoch": 4.6091370558375635, |
| "grad_norm": 0.1857183939107732, |
| "learning_rate": 7.224695027998963e-07, |
| "loss": 0.6844, |
| "step": 1135 |
| }, |
| { |
| "epoch": 4.6131979695431475, |
| "grad_norm": 0.20198762895197114, |
| "learning_rate": 7.07429508919466e-07, |
| "loss": 0.7068, |
| "step": 1136 |
| }, |
| { |
| "epoch": 4.617258883248731, |
| "grad_norm": 0.21442069467122712, |
| "learning_rate": 6.925448947495206e-07, |
| "loss": 0.7041, |
| "step": 1137 |
| }, |
| { |
| "epoch": 4.621319796954315, |
| "grad_norm": 0.17271781215280485, |
| "learning_rate": 6.778157801686936e-07, |
| "loss": 0.6907, |
| "step": 1138 |
| }, |
| { |
| "epoch": 4.625380710659899, |
| "grad_norm": 0.19940047554083049, |
| "learning_rate": 6.632422838032515e-07, |
| "loss": 0.6926, |
| "step": 1139 |
| }, |
| { |
| "epoch": 4.629441624365482, |
| "grad_norm": 0.18201464122924066, |
| "learning_rate": 6.488245230261281e-07, |
| "loss": 0.7152, |
| "step": 1140 |
| }, |
| { |
| "epoch": 4.633502538071066, |
| "grad_norm": 0.19106350068613595, |
| "learning_rate": 6.345626139559868e-07, |
| "loss": 0.7128, |
| "step": 1141 |
| }, |
| { |
| "epoch": 4.63756345177665, |
| "grad_norm": 0.19575171775220232, |
| "learning_rate": 6.204566714562866e-07, |
| "loss": 0.7098, |
| "step": 1142 |
| }, |
| { |
| "epoch": 4.641624365482233, |
| "grad_norm": 0.1860021445236794, |
| "learning_rate": 6.06506809134344e-07, |
| "loss": 0.7195, |
| "step": 1143 |
| }, |
| { |
| "epoch": 4.645685279187817, |
| "grad_norm": 0.20165562305028725, |
| "learning_rate": 5.927131393404373e-07, |
| "loss": 0.7025, |
| "step": 1144 |
| }, |
| { |
| "epoch": 4.649746192893401, |
| "grad_norm": 0.19111839749815457, |
| "learning_rate": 5.790757731668817e-07, |
| "loss": 0.7139, |
| "step": 1145 |
| }, |
| { |
| "epoch": 4.653807106598984, |
| "grad_norm": 0.1749719770564701, |
| "learning_rate": 5.655948204471507e-07, |
| "loss": 0.6775, |
| "step": 1146 |
| }, |
| { |
| "epoch": 4.657868020304568, |
| "grad_norm": 0.19815546728124422, |
| "learning_rate": 5.522703897549875e-07, |
| "loss": 0.7394, |
| "step": 1147 |
| }, |
| { |
| "epoch": 4.661928934010152, |
| "grad_norm": 0.18961130066249993, |
| "learning_rate": 5.391025884035239e-07, |
| "loss": 0.7168, |
| "step": 1148 |
| }, |
| { |
| "epoch": 4.6659898477157356, |
| "grad_norm": 0.1893287221057379, |
| "learning_rate": 5.260915224444207e-07, |
| "loss": 0.7202, |
| "step": 1149 |
| }, |
| { |
| "epoch": 4.67005076142132, |
| "grad_norm": 0.19438061826481337, |
| "learning_rate": 5.132372966670129e-07, |
| "loss": 0.7209, |
| "step": 1150 |
| }, |
| { |
| "epoch": 4.674111675126904, |
| "grad_norm": 0.21047134904569653, |
| "learning_rate": 5.005400145974704e-07, |
| "loss": 0.7055, |
| "step": 1151 |
| }, |
| { |
| "epoch": 4.678172588832488, |
| "grad_norm": 0.2180569002551968, |
| "learning_rate": 4.879997784979562e-07, |
| "loss": 0.7063, |
| "step": 1152 |
| }, |
| { |
| "epoch": 4.682233502538071, |
| "grad_norm": 0.19582195435858835, |
| "learning_rate": 4.7561668936580984e-07, |
| "loss": 0.711, |
| "step": 1153 |
| }, |
| { |
| "epoch": 4.686294416243655, |
| "grad_norm": 0.1892357703752406, |
| "learning_rate": 4.6339084693272306e-07, |
| "loss": 0.7193, |
| "step": 1154 |
| }, |
| { |
| "epoch": 4.690355329949239, |
| "grad_norm": 0.18358188080866097, |
| "learning_rate": 4.5132234966395847e-07, |
| "loss": 0.7025, |
| "step": 1155 |
| }, |
| { |
| "epoch": 4.694416243654822, |
| "grad_norm": 0.19976708201672577, |
| "learning_rate": 4.3941129475752795e-07, |
| "loss": 0.7417, |
| "step": 1156 |
| }, |
| { |
| "epoch": 4.698477157360406, |
| "grad_norm": 0.1844356894032601, |
| "learning_rate": 4.27657778143431e-07, |
| "loss": 0.6891, |
| "step": 1157 |
| }, |
| { |
| "epoch": 4.70253807106599, |
| "grad_norm": 0.19397329457267418, |
| "learning_rate": 4.1606189448287757e-07, |
| "loss": 0.6858, |
| "step": 1158 |
| }, |
| { |
| "epoch": 4.706598984771573, |
| "grad_norm": 0.20027813257836688, |
| "learning_rate": 4.046237371675177e-07, |
| "loss": 0.7183, |
| "step": 1159 |
| }, |
| { |
| "epoch": 4.710659898477157, |
| "grad_norm": 0.1835685309560609, |
| "learning_rate": 3.9334339831869963e-07, |
| "loss": 0.7043, |
| "step": 1160 |
| }, |
| { |
| "epoch": 4.714720812182741, |
| "grad_norm": 0.17921532922278718, |
| "learning_rate": 3.8222096878671955e-07, |
| "loss": 0.7116, |
| "step": 1161 |
| }, |
| { |
| "epoch": 4.718781725888325, |
| "grad_norm": 0.1762036906039039, |
| "learning_rate": 3.7125653815009545e-07, |
| "loss": 0.6769, |
| "step": 1162 |
| }, |
| { |
| "epoch": 4.7228426395939085, |
| "grad_norm": 0.18438117973380222, |
| "learning_rate": 3.6045019471484974e-07, |
| "loss": 0.7254, |
| "step": 1163 |
| }, |
| { |
| "epoch": 4.7269035532994925, |
| "grad_norm": 0.17140330163611783, |
| "learning_rate": 3.498020255137813e-07, |
| "loss": 0.7155, |
| "step": 1164 |
| }, |
| { |
| "epoch": 4.730964467005077, |
| "grad_norm": 0.18780148772107533, |
| "learning_rate": 3.393121163057811e-07, |
| "loss": 0.7045, |
| "step": 1165 |
| }, |
| { |
| "epoch": 4.73502538071066, |
| "grad_norm": 0.17916428013506577, |
| "learning_rate": 3.289805515751399e-07, |
| "loss": 0.7051, |
| "step": 1166 |
| }, |
| { |
| "epoch": 4.739086294416244, |
| "grad_norm": 0.1900846470364545, |
| "learning_rate": 3.188074145308573e-07, |
| "loss": 0.7081, |
| "step": 1167 |
| }, |
| { |
| "epoch": 4.743147208121828, |
| "grad_norm": 0.18398667783682163, |
| "learning_rate": 3.087927871059804e-07, |
| "loss": 0.7171, |
| "step": 1168 |
| }, |
| { |
| "epoch": 4.747208121827411, |
| "grad_norm": 0.1786797493285871, |
| "learning_rate": 2.989367499569418e-07, |
| "loss": 0.7235, |
| "step": 1169 |
| }, |
| { |
| "epoch": 4.751269035532995, |
| "grad_norm": 0.191264731060291, |
| "learning_rate": 2.8923938246290917e-07, |
| "loss": 0.7131, |
| "step": 1170 |
| }, |
| { |
| "epoch": 4.755329949238579, |
| "grad_norm": 0.1860291435316892, |
| "learning_rate": 2.7970076272514804e-07, |
| "loss": 0.6963, |
| "step": 1171 |
| }, |
| { |
| "epoch": 4.759390862944162, |
| "grad_norm": 0.19895794302931868, |
| "learning_rate": 2.703209675663887e-07, |
| "loss": 0.6977, |
| "step": 1172 |
| }, |
| { |
| "epoch": 4.763451776649746, |
| "grad_norm": 0.18506146006581123, |
| "learning_rate": 2.6110007253021374e-07, |
| "loss": 0.6977, |
| "step": 1173 |
| }, |
| { |
| "epoch": 4.76751269035533, |
| "grad_norm": 0.17779966728452806, |
| "learning_rate": 2.520381518804471e-07, |
| "loss": 0.7038, |
| "step": 1174 |
| }, |
| { |
| "epoch": 4.771573604060913, |
| "grad_norm": 0.18181357309939802, |
| "learning_rate": 2.4313527860054585e-07, |
| "loss": 0.7056, |
| "step": 1175 |
| }, |
| { |
| "epoch": 4.775634517766497, |
| "grad_norm": 0.18617462367358958, |
| "learning_rate": 2.343915243930317e-07, |
| "loss": 0.7099, |
| "step": 1176 |
| }, |
| { |
| "epoch": 4.779695431472081, |
| "grad_norm": 0.17800999134377843, |
| "learning_rate": 2.2580695967889367e-07, |
| "loss": 0.7138, |
| "step": 1177 |
| }, |
| { |
| "epoch": 4.783756345177665, |
| "grad_norm": 0.17891939462393, |
| "learning_rate": 2.1738165359704189e-07, |
| "loss": 0.7304, |
| "step": 1178 |
| }, |
| { |
| "epoch": 4.787817258883249, |
| "grad_norm": 0.1794217627811603, |
| "learning_rate": 2.0911567400373257e-07, |
| "loss": 0.7185, |
| "step": 1179 |
| }, |
| { |
| "epoch": 4.791878172588833, |
| "grad_norm": 0.17562033309283775, |
| "learning_rate": 2.0100908747202607e-07, |
| "loss": 0.7082, |
| "step": 1180 |
| }, |
| { |
| "epoch": 4.795939086294416, |
| "grad_norm": 0.17587695987018678, |
| "learning_rate": 1.9306195929125638e-07, |
| "loss": 0.7243, |
| "step": 1181 |
| }, |
| { |
| "epoch": 4.8, |
| "grad_norm": 0.17228679354461118, |
| "learning_rate": 1.8527435346650247e-07, |
| "loss": 0.707, |
| "step": 1182 |
| }, |
| { |
| "epoch": 4.804060913705584, |
| "grad_norm": 0.17940519247993464, |
| "learning_rate": 1.7764633271807108e-07, |
| "loss": 0.7088, |
| "step": 1183 |
| }, |
| { |
| "epoch": 4.808121827411168, |
| "grad_norm": 0.1812927905758579, |
| "learning_rate": 1.7017795848099262e-07, |
| "loss": 0.699, |
| "step": 1184 |
| }, |
| { |
| "epoch": 4.812182741116751, |
| "grad_norm": 0.1767516259431487, |
| "learning_rate": 1.6286929090452596e-07, |
| "loss": 0.7138, |
| "step": 1185 |
| }, |
| { |
| "epoch": 4.816243654822335, |
| "grad_norm": 0.179376907713332, |
| "learning_rate": 1.557203888516745e-07, |
| "loss": 0.6996, |
| "step": 1186 |
| }, |
| { |
| "epoch": 4.820304568527919, |
| "grad_norm": 0.18081525033039156, |
| "learning_rate": 1.487313098987131e-07, |
| "loss": 0.7293, |
| "step": 1187 |
| }, |
| { |
| "epoch": 4.824365482233502, |
| "grad_norm": 0.18059810664281045, |
| "learning_rate": 1.4190211033472402e-07, |
| "loss": 0.7186, |
| "step": 1188 |
| }, |
| { |
| "epoch": 4.828426395939086, |
| "grad_norm": 0.1738613382670985, |
| "learning_rate": 1.3523284516113955e-07, |
| "loss": 0.7082, |
| "step": 1189 |
| }, |
| { |
| "epoch": 4.83248730964467, |
| "grad_norm": 1.1810657649736291, |
| "learning_rate": 1.2872356809130682e-07, |
| "loss": 0.739, |
| "step": 1190 |
| }, |
| { |
| "epoch": 4.8365482233502535, |
| "grad_norm": 0.1794829688930156, |
| "learning_rate": 1.2237433155004807e-07, |
| "loss": 0.7168, |
| "step": 1191 |
| }, |
| { |
| "epoch": 4.8406091370558375, |
| "grad_norm": 0.1738768213722039, |
| "learning_rate": 1.1618518667323886e-07, |
| "loss": 0.7036, |
| "step": 1192 |
| }, |
| { |
| "epoch": 4.8446700507614215, |
| "grad_norm": 0.17883833126382923, |
| "learning_rate": 1.1015618330740385e-07, |
| "loss": 0.6865, |
| "step": 1193 |
| }, |
| { |
| "epoch": 4.848730964467005, |
| "grad_norm": 0.18101989369032817, |
| "learning_rate": 1.042873700093061e-07, |
| "loss": 0.7242, |
| "step": 1194 |
| }, |
| { |
| "epoch": 4.852791878172589, |
| "grad_norm": 0.18415529671136324, |
| "learning_rate": 9.857879404556291e-08, |
| "loss": 0.7092, |
| "step": 1195 |
| }, |
| { |
| "epoch": 4.856852791878173, |
| "grad_norm": 0.18049812977584118, |
| "learning_rate": 9.303050139225722e-08, |
| "loss": 0.6998, |
| "step": 1196 |
| }, |
| { |
| "epoch": 4.860913705583757, |
| "grad_norm": 0.1785866092049721, |
| "learning_rate": 8.76425367345779e-08, |
| "loss": 0.6834, |
| "step": 1197 |
| }, |
| { |
| "epoch": 4.86497461928934, |
| "grad_norm": 0.1785392029303703, |
| "learning_rate": 8.241494346644897e-08, |
| "loss": 0.7068, |
| "step": 1198 |
| }, |
| { |
| "epoch": 4.869035532994924, |
| "grad_norm": 0.17428309998770658, |
| "learning_rate": 7.734776369019204e-08, |
| "loss": 0.6972, |
| "step": 1199 |
| }, |
| { |
| "epoch": 4.873096446700508, |
| "grad_norm": 0.17835898852518464, |
| "learning_rate": 7.244103821617332e-08, |
| "loss": 0.7108, |
| "step": 1200 |
| }, |
| { |
| "epoch": 4.877157360406091, |
| "grad_norm": 0.1805023163489205, |
| "learning_rate": 6.769480656248606e-08, |
| "loss": 0.692, |
| "step": 1201 |
| }, |
| { |
| "epoch": 4.881218274111675, |
| "grad_norm": 0.17977621695878188, |
| "learning_rate": 6.310910695462635e-08, |
| "loss": 0.6969, |
| "step": 1202 |
| }, |
| { |
| "epoch": 4.885279187817259, |
| "grad_norm": 0.17239587879760473, |
| "learning_rate": 5.8683976325191185e-08, |
| "loss": 0.6992, |
| "step": 1203 |
| }, |
| { |
| "epoch": 4.889340101522842, |
| "grad_norm": 0.1847141639843027, |
| "learning_rate": 5.4419450313571984e-08, |
| "loss": 0.707, |
| "step": 1204 |
| }, |
| { |
| "epoch": 4.893401015228426, |
| "grad_norm": 0.17306432330417978, |
| "learning_rate": 5.031556326567488e-08, |
| "loss": 0.7072, |
| "step": 1205 |
| }, |
| { |
| "epoch": 4.8974619289340104, |
| "grad_norm": 0.17246899597817691, |
| "learning_rate": 4.637234823364312e-08, |
| "loss": 0.7248, |
| "step": 1206 |
| }, |
| { |
| "epoch": 4.901522842639594, |
| "grad_norm": 0.18157931356114895, |
| "learning_rate": 4.258983697558838e-08, |
| "loss": 0.7172, |
| "step": 1207 |
| }, |
| { |
| "epoch": 4.905583756345178, |
| "grad_norm": 0.17572327512980632, |
| "learning_rate": 3.896805995533548e-08, |
| "loss": 0.7028, |
| "step": 1208 |
| }, |
| { |
| "epoch": 4.909644670050762, |
| "grad_norm": 0.1853567378749109, |
| "learning_rate": 3.550704634218028e-08, |
| "loss": 0.7123, |
| "step": 1209 |
| }, |
| { |
| "epoch": 4.913705583756345, |
| "grad_norm": 0.1788108246321232, |
| "learning_rate": 3.2206824010647676e-08, |
| "loss": 0.7135, |
| "step": 1210 |
| }, |
| { |
| "epoch": 4.917766497461929, |
| "grad_norm": 0.17877208706013126, |
| "learning_rate": 2.9067419540278476e-08, |
| "loss": 0.7134, |
| "step": 1211 |
| }, |
| { |
| "epoch": 4.921827411167513, |
| "grad_norm": 0.17178749878386051, |
| "learning_rate": 2.6088858215400638e-08, |
| "loss": 0.7054, |
| "step": 1212 |
| }, |
| { |
| "epoch": 4.925888324873096, |
| "grad_norm": 0.18026445902409957, |
| "learning_rate": 2.3271164024940564e-08, |
| "loss": 0.6817, |
| "step": 1213 |
| }, |
| { |
| "epoch": 4.92994923857868, |
| "grad_norm": 0.18092496265885005, |
| "learning_rate": 2.061435966221881e-08, |
| "loss": 0.7179, |
| "step": 1214 |
| }, |
| { |
| "epoch": 4.934010152284264, |
| "grad_norm": 0.18330859759394563, |
| "learning_rate": 1.811846652477245e-08, |
| "loss": 0.7068, |
| "step": 1215 |
| }, |
| { |
| "epoch": 4.938071065989847, |
| "grad_norm": 0.17551458763981342, |
| "learning_rate": 1.5783504714184106e-08, |
| "loss": 0.6972, |
| "step": 1216 |
| }, |
| { |
| "epoch": 4.942131979695431, |
| "grad_norm": 0.17988455474433215, |
| "learning_rate": 1.360949303591097e-08, |
| "loss": 0.7066, |
| "step": 1217 |
| }, |
| { |
| "epoch": 4.946192893401015, |
| "grad_norm": 0.17438539082294452, |
| "learning_rate": 1.1596448999144916e-08, |
| "loss": 0.7204, |
| "step": 1218 |
| }, |
| { |
| "epoch": 4.950253807106599, |
| "grad_norm": 0.1777485669648779, |
| "learning_rate": 9.744388816668172e-09, |
| "loss": 0.712, |
| "step": 1219 |
| }, |
| { |
| "epoch": 4.9543147208121825, |
| "grad_norm": 0.1806481462929512, |
| "learning_rate": 8.05332740472009e-09, |
| "loss": 0.7203, |
| "step": 1220 |
| }, |
| { |
| "epoch": 4.9583756345177665, |
| "grad_norm": 0.17547822411966987, |
| "learning_rate": 6.523278382872811e-09, |
| "loss": 0.7232, |
| "step": 1221 |
| }, |
| { |
| "epoch": 4.962436548223351, |
| "grad_norm": 0.178263122194253, |
| "learning_rate": 5.15425407393133e-09, |
| "loss": 0.7068, |
| "step": 1222 |
| }, |
| { |
| "epoch": 4.966497461928934, |
| "grad_norm": 0.17934887129542185, |
| "learning_rate": 3.94626550383137e-09, |
| "loss": 0.7235, |
| "step": 1223 |
| }, |
| { |
| "epoch": 4.970558375634518, |
| "grad_norm": 0.1775201512943909, |
| "learning_rate": 2.899322401546112e-09, |
| "loss": 0.7, |
| "step": 1224 |
| }, |
| { |
| "epoch": 4.974619289340102, |
| "grad_norm": 0.18437794081475634, |
| "learning_rate": 2.013433199010706e-09, |
| "loss": 0.7029, |
| "step": 1225 |
| }, |
| { |
| "epoch": 4.978680203045685, |
| "grad_norm": 0.1788793658490742, |
| "learning_rate": 1.2886050310556563e-09, |
| "loss": 0.6835, |
| "step": 1226 |
| }, |
| { |
| "epoch": 4.982741116751269, |
| "grad_norm": 0.1782908175964806, |
| "learning_rate": 7.248437353468695e-10, |
| "loss": 0.7247, |
| "step": 1227 |
| }, |
| { |
| "epoch": 4.986802030456853, |
| "grad_norm": 0.17960842774184113, |
| "learning_rate": 3.221538523412449e-10, |
| "loss": 0.7062, |
| "step": 1228 |
| }, |
| { |
| "epoch": 4.990862944162437, |
| "grad_norm": 0.18477424162434355, |
| "learning_rate": 8.053862524670663e-11, |
| "loss": 0.6891, |
| "step": 1229 |
| }, |
| { |
| "epoch": 4.99492385786802, |
| "grad_norm": 0.17939234139703134, |
| "learning_rate": 0.0, |
| "loss": 0.6987, |
| "step": 1230 |
| }, |
| { |
| "epoch": 4.99492385786802, |
| "step": 1230, |
| "total_flos": 6.032180188216295e+18, |
| "train_loss": 0.8038945751461556, |
| "train_runtime": 33001.4558, |
| "train_samples_per_second": 4.774, |
| "train_steps_per_second": 0.037 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 1230, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 5, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 6.032180188216295e+18, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|