| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.9943851768669285, | |
| "eval_steps": 500, | |
| "global_step": 666, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.004491858506457047, | |
| "grad_norm": 5.744791141546518, | |
| "learning_rate": 1.1940298507462686e-06, | |
| "loss": 0.8286, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.008983717012914094, | |
| "grad_norm": 5.938504053024298, | |
| "learning_rate": 2.3880597014925373e-06, | |
| "loss": 0.8731, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.01347557551937114, | |
| "grad_norm": 5.689539575557419, | |
| "learning_rate": 3.582089552238806e-06, | |
| "loss": 0.8516, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.017967434025828188, | |
| "grad_norm": 5.284586737919561, | |
| "learning_rate": 4.7761194029850745e-06, | |
| "loss": 0.8492, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.022459292532285232, | |
| "grad_norm": 3.971869242420689, | |
| "learning_rate": 5.970149253731343e-06, | |
| "loss": 0.8157, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.02695115103874228, | |
| "grad_norm": 2.1377599463707706, | |
| "learning_rate": 7.164179104477612e-06, | |
| "loss": 0.7615, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.031443009545199324, | |
| "grad_norm": 4.067532899413643, | |
| "learning_rate": 8.35820895522388e-06, | |
| "loss": 0.77, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.035934868051656375, | |
| "grad_norm": 4.3609991841226465, | |
| "learning_rate": 9.552238805970149e-06, | |
| "loss": 0.7734, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.04042672655811342, | |
| "grad_norm": 3.955811585110502, | |
| "learning_rate": 1.074626865671642e-05, | |
| "loss": 0.7264, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.044918585064570464, | |
| "grad_norm": 4.147344991950925, | |
| "learning_rate": 1.1940298507462686e-05, | |
| "loss": 0.7181, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.049410443571027515, | |
| "grad_norm": 2.9375211912793167, | |
| "learning_rate": 1.3134328358208957e-05, | |
| "loss": 0.709, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.05390230207748456, | |
| "grad_norm": 1.7153314450537895, | |
| "learning_rate": 1.4328358208955224e-05, | |
| "loss": 0.6781, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.058394160583941604, | |
| "grad_norm": 2.0790765361170673, | |
| "learning_rate": 1.5522388059701494e-05, | |
| "loss": 0.6527, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.06288601909039865, | |
| "grad_norm": 1.5527768247367837, | |
| "learning_rate": 1.671641791044776e-05, | |
| "loss": 0.6452, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.06737787759685569, | |
| "grad_norm": 1.0792330483516703, | |
| "learning_rate": 1.791044776119403e-05, | |
| "loss": 0.6257, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.07186973610331275, | |
| "grad_norm": 1.2580380805920675, | |
| "learning_rate": 1.9104477611940298e-05, | |
| "loss": 0.6194, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.0763615946097698, | |
| "grad_norm": 1.0358737748222084, | |
| "learning_rate": 2.029850746268657e-05, | |
| "loss": 0.6222, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.08085345311622684, | |
| "grad_norm": 0.8752143391152137, | |
| "learning_rate": 2.149253731343284e-05, | |
| "loss": 0.6039, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.08534531162268388, | |
| "grad_norm": 0.9084228854146826, | |
| "learning_rate": 2.2686567164179106e-05, | |
| "loss": 0.5925, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.08983717012914093, | |
| "grad_norm": 0.7161525114514684, | |
| "learning_rate": 2.3880597014925373e-05, | |
| "loss": 0.5975, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.09432902863559797, | |
| "grad_norm": 0.7863667359029304, | |
| "learning_rate": 2.5074626865671646e-05, | |
| "loss": 0.5792, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.09882088714205503, | |
| "grad_norm": 0.7596751702825932, | |
| "learning_rate": 2.6268656716417913e-05, | |
| "loss": 0.5768, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.10331274564851207, | |
| "grad_norm": 0.6815450158836671, | |
| "learning_rate": 2.746268656716418e-05, | |
| "loss": 0.5743, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.10780460415496912, | |
| "grad_norm": 0.7844121977758192, | |
| "learning_rate": 2.8656716417910447e-05, | |
| "loss": 0.5645, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.11229646266142616, | |
| "grad_norm": 0.6266465530238511, | |
| "learning_rate": 2.985074626865672e-05, | |
| "loss": 0.5663, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.11678832116788321, | |
| "grad_norm": 0.6323910407952537, | |
| "learning_rate": 3.104477611940299e-05, | |
| "loss": 0.5456, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.12128017967434025, | |
| "grad_norm": 0.8411877102047188, | |
| "learning_rate": 3.2238805970149255e-05, | |
| "loss": 0.5557, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.1257720381807973, | |
| "grad_norm": 1.0602361532910023, | |
| "learning_rate": 3.343283582089552e-05, | |
| "loss": 0.554, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.13026389668725435, | |
| "grad_norm": 1.0430432872124518, | |
| "learning_rate": 3.462686567164179e-05, | |
| "loss": 0.5529, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.13475575519371139, | |
| "grad_norm": 1.1738603384793684, | |
| "learning_rate": 3.582089552238806e-05, | |
| "loss": 0.5539, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.13924761370016844, | |
| "grad_norm": 0.9490994450271529, | |
| "learning_rate": 3.701492537313433e-05, | |
| "loss": 0.5532, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.1437394722066255, | |
| "grad_norm": 0.7226179922094373, | |
| "learning_rate": 3.8208955223880596e-05, | |
| "loss": 0.5446, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.14823133071308253, | |
| "grad_norm": 1.3004111922002466, | |
| "learning_rate": 3.940298507462687e-05, | |
| "loss": 0.5337, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.1527231892195396, | |
| "grad_norm": 1.0839907708318666, | |
| "learning_rate": 4.059701492537314e-05, | |
| "loss": 0.5393, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.15721504772599662, | |
| "grad_norm": 1.0610629228230326, | |
| "learning_rate": 4.1791044776119404e-05, | |
| "loss": 0.5346, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.16170690623245368, | |
| "grad_norm": 1.1894669815276204, | |
| "learning_rate": 4.298507462686568e-05, | |
| "loss": 0.527, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.16619876473891074, | |
| "grad_norm": 1.092793519670506, | |
| "learning_rate": 4.4179104477611944e-05, | |
| "loss": 0.5251, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.17069062324536777, | |
| "grad_norm": 1.0366204770683871, | |
| "learning_rate": 4.537313432835821e-05, | |
| "loss": 0.5326, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.17518248175182483, | |
| "grad_norm": 1.5522208820975247, | |
| "learning_rate": 4.6567164179104485e-05, | |
| "loss": 0.5313, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.17967434025828186, | |
| "grad_norm": 1.4684418047189554, | |
| "learning_rate": 4.7761194029850745e-05, | |
| "loss": 0.535, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.18416619876473891, | |
| "grad_norm": 1.5110440506100333, | |
| "learning_rate": 4.895522388059702e-05, | |
| "loss": 0.5333, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.18865805727119594, | |
| "grad_norm": 0.9183570790429239, | |
| "learning_rate": 5.014925373134329e-05, | |
| "loss": 0.5174, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.193149915777653, | |
| "grad_norm": 0.912302118661921, | |
| "learning_rate": 5.134328358208955e-05, | |
| "loss": 0.5165, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.19764177428411006, | |
| "grad_norm": 1.5011336778925848, | |
| "learning_rate": 5.2537313432835826e-05, | |
| "loss": 0.5283, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.2021336327905671, | |
| "grad_norm": 1.5445930212563217, | |
| "learning_rate": 5.37313432835821e-05, | |
| "loss": 0.5145, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.20662549129702415, | |
| "grad_norm": 1.1328139603940344, | |
| "learning_rate": 5.492537313432836e-05, | |
| "loss": 0.5197, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.21111734980348118, | |
| "grad_norm": 1.3391474717394163, | |
| "learning_rate": 5.6119402985074634e-05, | |
| "loss": 0.514, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.21560920830993824, | |
| "grad_norm": 1.4810023865286073, | |
| "learning_rate": 5.7313432835820894e-05, | |
| "loss": 0.5179, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.2201010668163953, | |
| "grad_norm": 1.2945048151439829, | |
| "learning_rate": 5.850746268656717e-05, | |
| "loss": 0.5057, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.22459292532285233, | |
| "grad_norm": 1.537052702638256, | |
| "learning_rate": 5.970149253731344e-05, | |
| "loss": 0.5278, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.22908478382930939, | |
| "grad_norm": 1.2254248296907218, | |
| "learning_rate": 6.08955223880597e-05, | |
| "loss": 0.5107, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.23357664233576642, | |
| "grad_norm": 1.4602102943166557, | |
| "learning_rate": 6.208955223880598e-05, | |
| "loss": 0.5175, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.23806850084222347, | |
| "grad_norm": 1.2678108277018225, | |
| "learning_rate": 6.328358208955224e-05, | |
| "loss": 0.5084, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.2425603593486805, | |
| "grad_norm": 1.6874272087453315, | |
| "learning_rate": 6.447761194029851e-05, | |
| "loss": 0.5165, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.24705221785513756, | |
| "grad_norm": 0.9962783801788024, | |
| "learning_rate": 6.567164179104479e-05, | |
| "loss": 0.4922, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.2515440763615946, | |
| "grad_norm": 2.172787384544179, | |
| "learning_rate": 6.686567164179104e-05, | |
| "loss": 0.5183, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.25603593486805165, | |
| "grad_norm": 1.1082666763751967, | |
| "learning_rate": 6.805970149253732e-05, | |
| "loss": 0.5064, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.2605277933745087, | |
| "grad_norm": 2.074359577872402, | |
| "learning_rate": 6.925373134328358e-05, | |
| "loss": 0.506, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.26501965188096577, | |
| "grad_norm": 1.5315602318768948, | |
| "learning_rate": 7.044776119402986e-05, | |
| "loss": 0.5207, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.26951151038742277, | |
| "grad_norm": 1.8337956234199035, | |
| "learning_rate": 7.164179104477612e-05, | |
| "loss": 0.5069, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.27400336889387983, | |
| "grad_norm": 1.8182681786016288, | |
| "learning_rate": 7.283582089552239e-05, | |
| "loss": 0.5077, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.2784952274003369, | |
| "grad_norm": 1.213164943102124, | |
| "learning_rate": 7.402985074626866e-05, | |
| "loss": 0.5053, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.28298708590679394, | |
| "grad_norm": 1.8083951787977053, | |
| "learning_rate": 7.522388059701494e-05, | |
| "loss": 0.5208, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.287478944413251, | |
| "grad_norm": 1.3893627307148912, | |
| "learning_rate": 7.641791044776119e-05, | |
| "loss": 0.5079, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.291970802919708, | |
| "grad_norm": 1.6168165719071332, | |
| "learning_rate": 7.761194029850747e-05, | |
| "loss": 0.5063, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.29646266142616506, | |
| "grad_norm": 1.220121168098996, | |
| "learning_rate": 7.880597014925374e-05, | |
| "loss": 0.4961, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.3009545199326221, | |
| "grad_norm": 1.3775838670272897, | |
| "learning_rate": 8e-05, | |
| "loss": 0.5094, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.3054463784390792, | |
| "grad_norm": 1.697982382227281, | |
| "learning_rate": 7.999944985762151e-05, | |
| "loss": 0.4981, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.30993823694553624, | |
| "grad_norm": 1.9213713325845574, | |
| "learning_rate": 7.999779944561886e-05, | |
| "loss": 0.5062, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.31443009545199324, | |
| "grad_norm": 0.9142611846274715, | |
| "learning_rate": 7.999504880939012e-05, | |
| "loss": 0.4992, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.3189219539584503, | |
| "grad_norm": 1.5234672488289716, | |
| "learning_rate": 7.999119802459736e-05, | |
| "loss": 0.5118, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.32341381246490736, | |
| "grad_norm": 1.5883968537582671, | |
| "learning_rate": 7.99862471971646e-05, | |
| "loss": 0.5265, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.3279056709713644, | |
| "grad_norm": 1.2434039184385957, | |
| "learning_rate": 7.998019646327482e-05, | |
| "loss": 0.5047, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.3323975294778215, | |
| "grad_norm": 1.5937676355642276, | |
| "learning_rate": 7.99730459893663e-05, | |
| "loss": 0.5103, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.3368893879842785, | |
| "grad_norm": 1.926452145823992, | |
| "learning_rate": 7.996479597212797e-05, | |
| "loss": 0.5152, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.34138124649073553, | |
| "grad_norm": 1.0497002952507317, | |
| "learning_rate": 7.9955446638494e-05, | |
| "loss": 0.4992, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.3458731049971926, | |
| "grad_norm": 1.7357655242315568, | |
| "learning_rate": 7.994499824563766e-05, | |
| "loss": 0.5149, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.35036496350364965, | |
| "grad_norm": 1.3616037173767013, | |
| "learning_rate": 7.993345108096412e-05, | |
| "loss": 0.4974, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.35485682201010665, | |
| "grad_norm": 1.6356884431238699, | |
| "learning_rate": 7.992080546210264e-05, | |
| "loss": 0.509, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.3593486805165637, | |
| "grad_norm": 1.0562731675409218, | |
| "learning_rate": 7.99070617368977e-05, | |
| "loss": 0.507, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.36384053902302077, | |
| "grad_norm": 1.8311690018327664, | |
| "learning_rate": 7.989222028339965e-05, | |
| "loss": 0.5087, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.36833239752947783, | |
| "grad_norm": 1.2709837151378605, | |
| "learning_rate": 7.987628150985408e-05, | |
| "loss": 0.5171, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.3728242560359349, | |
| "grad_norm": 1.2113347073174108, | |
| "learning_rate": 7.985924585469074e-05, | |
| "loss": 0.5039, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.3773161145423919, | |
| "grad_norm": 1.3083289491639871, | |
| "learning_rate": 7.98411137865114e-05, | |
| "loss": 0.5027, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.38180797304884895, | |
| "grad_norm": 1.769283617364072, | |
| "learning_rate": 7.982188580407705e-05, | |
| "loss": 0.5073, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.386299831555306, | |
| "grad_norm": 1.0121335159848557, | |
| "learning_rate": 7.980156243629408e-05, | |
| "loss": 0.4893, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.39079169006176306, | |
| "grad_norm": 1.2050053324230048, | |
| "learning_rate": 7.978014424219977e-05, | |
| "loss": 0.4868, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.3952835485682201, | |
| "grad_norm": 1.7252433120749655, | |
| "learning_rate": 7.975763181094695e-05, | |
| "loss": 0.4967, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.3997754070746771, | |
| "grad_norm": 1.2482585478247559, | |
| "learning_rate": 7.973402576178773e-05, | |
| "loss": 0.5024, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.4042672655811342, | |
| "grad_norm": 0.8657354646252154, | |
| "learning_rate": 7.970932674405653e-05, | |
| "loss": 0.4963, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.40875912408759124, | |
| "grad_norm": 1.2349014600356545, | |
| "learning_rate": 7.968353543715214e-05, | |
| "loss": 0.5012, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.4132509825940483, | |
| "grad_norm": 1.5150396004413444, | |
| "learning_rate": 7.965665255051912e-05, | |
| "loss": 0.494, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.41774284110050536, | |
| "grad_norm": 1.2355183865724308, | |
| "learning_rate": 7.962867882362824e-05, | |
| "loss": 0.4931, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.42223469960696236, | |
| "grad_norm": 1.215530918022545, | |
| "learning_rate": 7.959961502595612e-05, | |
| "loss": 0.4988, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.4267265581134194, | |
| "grad_norm": 1.6111719837824063, | |
| "learning_rate": 7.956946195696409e-05, | |
| "loss": 0.4974, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.4312184166198765, | |
| "grad_norm": 1.037390439761141, | |
| "learning_rate": 7.953822044607624e-05, | |
| "loss": 0.4797, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.43571027512633353, | |
| "grad_norm": 1.3858412511223213, | |
| "learning_rate": 7.950589135265648e-05, | |
| "loss": 0.4988, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.4402021336327906, | |
| "grad_norm": 0.9483575731208523, | |
| "learning_rate": 7.947247556598507e-05, | |
| "loss": 0.4904, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.4446939921392476, | |
| "grad_norm": 1.6895047000185839, | |
| "learning_rate": 7.9437974005234e-05, | |
| "loss": 0.4743, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.44918585064570465, | |
| "grad_norm": 0.7599225264493149, | |
| "learning_rate": 7.94023876194418e-05, | |
| "loss": 0.4829, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.4536777091521617, | |
| "grad_norm": 1.5787414945598128, | |
| "learning_rate": 7.936571738748745e-05, | |
| "loss": 0.5048, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.45816956765861877, | |
| "grad_norm": 1.0063654802566553, | |
| "learning_rate": 7.932796431806334e-05, | |
| "loss": 0.4941, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.4626614261650758, | |
| "grad_norm": 1.5837027411061138, | |
| "learning_rate": 7.928912944964768e-05, | |
| "loss": 0.4986, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.46715328467153283, | |
| "grad_norm": 1.0948129578081418, | |
| "learning_rate": 7.924921385047578e-05, | |
| "loss": 0.4881, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.4716451431779899, | |
| "grad_norm": 1.1809008706016286, | |
| "learning_rate": 7.920821861851078e-05, | |
| "loss": 0.4815, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.47613700168444695, | |
| "grad_norm": 1.1794242285281153, | |
| "learning_rate": 7.916614488141342e-05, | |
| "loss": 0.49, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.480628860190904, | |
| "grad_norm": 1.1260971386728633, | |
| "learning_rate": 7.912299379651098e-05, | |
| "loss": 0.4865, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.485120718697361, | |
| "grad_norm": 1.003255614399029, | |
| "learning_rate": 7.907876655076548e-05, | |
| "loss": 0.4973, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.48961257720381807, | |
| "grad_norm": 0.9963086733446658, | |
| "learning_rate": 7.903346436074102e-05, | |
| "loss": 0.4958, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.4941044357102751, | |
| "grad_norm": 1.1295130274532024, | |
| "learning_rate": 7.898708847257036e-05, | |
| "loss": 0.4925, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.4985962942167322, | |
| "grad_norm": 1.1286543839536498, | |
| "learning_rate": 7.893964016192055e-05, | |
| "loss": 0.4958, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.5030881527231892, | |
| "grad_norm": 1.138773157548404, | |
| "learning_rate": 7.889112073395791e-05, | |
| "loss": 0.4809, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.5075800112296462, | |
| "grad_norm": 1.1857975592423018, | |
| "learning_rate": 7.884153152331211e-05, | |
| "loss": 0.4956, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.5120718697361033, | |
| "grad_norm": 0.9806978323568485, | |
| "learning_rate": 7.879087389403949e-05, | |
| "loss": 0.4913, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.5165637282425604, | |
| "grad_norm": 1.5207878098068772, | |
| "learning_rate": 7.873914923958544e-05, | |
| "loss": 0.4831, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.5210555867490174, | |
| "grad_norm": 0.9999550101740227, | |
| "learning_rate": 7.868635898274625e-05, | |
| "loss": 0.4987, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.5255474452554745, | |
| "grad_norm": 1.3942178646720018, | |
| "learning_rate": 7.863250457562972e-05, | |
| "loss": 0.4867, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.5300393037619315, | |
| "grad_norm": 0.8492563528108323, | |
| "learning_rate": 7.857758749961546e-05, | |
| "loss": 0.4892, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.5345311622683886, | |
| "grad_norm": 1.1338791712516503, | |
| "learning_rate": 7.852160926531402e-05, | |
| "loss": 0.4836, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.5390230207748455, | |
| "grad_norm": 1.0121158793555165, | |
| "learning_rate": 7.846457141252537e-05, | |
| "loss": 0.4838, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.5435148792813026, | |
| "grad_norm": 1.5066492819198594, | |
| "learning_rate": 7.840647551019645e-05, | |
| "loss": 0.4913, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.5480067377877597, | |
| "grad_norm": 0.9637039832234741, | |
| "learning_rate": 7.834732315637819e-05, | |
| "loss": 0.482, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.5524985962942167, | |
| "grad_norm": 1.132873709421742, | |
| "learning_rate": 7.828711597818142e-05, | |
| "loss": 0.4666, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.5569904548006738, | |
| "grad_norm": 1.0292705626466296, | |
| "learning_rate": 7.822585563173215e-05, | |
| "loss": 0.4932, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.5614823133071308, | |
| "grad_norm": 1.072527714111858, | |
| "learning_rate": 7.816354380212603e-05, | |
| "loss": 0.4936, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.5659741718135879, | |
| "grad_norm": 1.1534790542525402, | |
| "learning_rate": 7.810018220338193e-05, | |
| "loss": 0.4724, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.570466030320045, | |
| "grad_norm": 0.87781466428156, | |
| "learning_rate": 7.80357725783949e-05, | |
| "loss": 0.4716, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.574957888826502, | |
| "grad_norm": 0.9263213228649457, | |
| "learning_rate": 7.797031669888817e-05, | |
| "loss": 0.477, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.5794497473329591, | |
| "grad_norm": 0.5681074193973272, | |
| "learning_rate": 7.790381636536439e-05, | |
| "loss": 0.4798, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.583941605839416, | |
| "grad_norm": 0.8711123426899279, | |
| "learning_rate": 7.783627340705613e-05, | |
| "loss": 0.4661, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.5884334643458731, | |
| "grad_norm": 1.1491807819965665, | |
| "learning_rate": 7.77676896818756e-05, | |
| "loss": 0.4826, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.5929253228523301, | |
| "grad_norm": 1.1349275985408458, | |
| "learning_rate": 7.769806707636345e-05, | |
| "loss": 0.4665, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.5974171813587872, | |
| "grad_norm": 0.9503680298154338, | |
| "learning_rate": 7.762740750563702e-05, | |
| "loss": 0.4726, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.6019090398652442, | |
| "grad_norm": 0.8798723540455866, | |
| "learning_rate": 7.755571291333748e-05, | |
| "loss": 0.478, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.6064008983717013, | |
| "grad_norm": 1.018233009868903, | |
| "learning_rate": 7.748298527157654e-05, | |
| "loss": 0.4797, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.6108927568781584, | |
| "grad_norm": 1.1342037469856567, | |
| "learning_rate": 7.740922658088207e-05, | |
| "loss": 0.4825, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.6153846153846154, | |
| "grad_norm": 1.1039412257360939, | |
| "learning_rate": 7.733443887014315e-05, | |
| "loss": 0.4736, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.6198764738910725, | |
| "grad_norm": 0.9166688022128103, | |
| "learning_rate": 7.725862419655424e-05, | |
| "loss": 0.4695, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.6243683323975294, | |
| "grad_norm": 0.9064275936046057, | |
| "learning_rate": 7.718178464555857e-05, | |
| "loss": 0.4773, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.6288601909039865, | |
| "grad_norm": 0.8451696169829901, | |
| "learning_rate": 7.710392233079083e-05, | |
| "loss": 0.4701, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.6333520494104435, | |
| "grad_norm": 0.9264031050195181, | |
| "learning_rate": 7.702503939401895e-05, | |
| "loss": 0.4789, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.6378439079169006, | |
| "grad_norm": 1.2104242801664975, | |
| "learning_rate": 7.694513800508527e-05, | |
| "loss": 0.4684, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.6423357664233577, | |
| "grad_norm": 0.5752970931660927, | |
| "learning_rate": 7.686422036184677e-05, | |
| "loss": 0.4613, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.6468276249298147, | |
| "grad_norm": 0.8291757131340055, | |
| "learning_rate": 7.678228869011471e-05, | |
| "loss": 0.4641, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.6513194834362718, | |
| "grad_norm": 1.248162748576082, | |
| "learning_rate": 7.669934524359334e-05, | |
| "loss": 0.4815, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.6558113419427288, | |
| "grad_norm": 0.729557145934983, | |
| "learning_rate": 7.661539230381786e-05, | |
| "loss": 0.4713, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.6603032004491859, | |
| "grad_norm": 0.8393015660885477, | |
| "learning_rate": 7.653043218009182e-05, | |
| "loss": 0.473, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.664795058955643, | |
| "grad_norm": 0.8838686794804901, | |
| "learning_rate": 7.644446720942341e-05, | |
| "loss": 0.4726, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.6692869174620999, | |
| "grad_norm": 1.0673537017271082, | |
| "learning_rate": 7.635749975646134e-05, | |
| "loss": 0.4737, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.673778775968557, | |
| "grad_norm": 1.0252286647009419, | |
| "learning_rate": 7.626953221342964e-05, | |
| "loss": 0.4681, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.678270634475014, | |
| "grad_norm": 1.00075506689465, | |
| "learning_rate": 7.6180567000062e-05, | |
| "loss": 0.4816, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.6827624929814711, | |
| "grad_norm": 1.0936170337365332, | |
| "learning_rate": 7.609060656353512e-05, | |
| "loss": 0.4889, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.6872543514879281, | |
| "grad_norm": 0.770783289342069, | |
| "learning_rate": 7.599965337840143e-05, | |
| "loss": 0.4748, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.6917462099943852, | |
| "grad_norm": 0.9855590028580333, | |
| "learning_rate": 7.590770994652101e-05, | |
| "loss": 0.4688, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.6962380685008422, | |
| "grad_norm": 1.1310208012024585, | |
| "learning_rate": 7.581477879699277e-05, | |
| "loss": 0.4743, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.7007299270072993, | |
| "grad_norm": 1.090963813345675, | |
| "learning_rate": 7.572086248608488e-05, | |
| "loss": 0.4705, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.7052217855137564, | |
| "grad_norm": 1.1564716947765967, | |
| "learning_rate": 7.56259635971645e-05, | |
| "loss": 0.4627, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.7097136440202133, | |
| "grad_norm": 0.5696295674290832, | |
| "learning_rate": 7.553008474062664e-05, | |
| "loss": 0.4677, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.7142055025266704, | |
| "grad_norm": 1.0010280498500423, | |
| "learning_rate": 7.54332285538224e-05, | |
| "loss": 0.4811, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.7186973610331274, | |
| "grad_norm": 1.4784344157308926, | |
| "learning_rate": 7.533539770098644e-05, | |
| "loss": 0.4759, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.7231892195395845, | |
| "grad_norm": 0.5416223455308844, | |
| "learning_rate": 7.523659487316368e-05, | |
| "loss": 0.4634, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.7276810780460415, | |
| "grad_norm": 1.306263463999413, | |
| "learning_rate": 7.513682278813521e-05, | |
| "loss": 0.4674, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.7321729365524986, | |
| "grad_norm": 0.709924329040727, | |
| "learning_rate": 7.503608419034368e-05, | |
| "loss": 0.4503, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.7366647950589557, | |
| "grad_norm": 0.7284637136229817, | |
| "learning_rate": 7.493438185081767e-05, | |
| "loss": 0.4559, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.7411566535654127, | |
| "grad_norm": 0.7906473191724958, | |
| "learning_rate": 7.48317185670955e-05, | |
| "loss": 0.4715, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.7456485120718698, | |
| "grad_norm": 0.7144905634247598, | |
| "learning_rate": 7.472809716314837e-05, | |
| "loss": 0.4716, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.7501403705783268, | |
| "grad_norm": 0.5389182526898639, | |
| "learning_rate": 7.462352048930253e-05, | |
| "loss": 0.4761, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.7546322290847838, | |
| "grad_norm": 0.7206565766656343, | |
| "learning_rate": 7.4517991422161e-05, | |
| "loss": 0.4604, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.7591240875912408, | |
| "grad_norm": 0.9506988377544834, | |
| "learning_rate": 7.441151286452437e-05, | |
| "loss": 0.4655, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.7636159460976979, | |
| "grad_norm": 1.1556170647637938, | |
| "learning_rate": 7.430408774531099e-05, | |
| "loss": 0.4656, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.768107804604155, | |
| "grad_norm": 0.9512254963759508, | |
| "learning_rate": 7.419571901947638e-05, | |
| "loss": 0.4717, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.772599663110612, | |
| "grad_norm": 1.0174070588488835, | |
| "learning_rate": 7.408640966793199e-05, | |
| "loss": 0.4625, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.7770915216170691, | |
| "grad_norm": 0.7468524554469199, | |
| "learning_rate": 7.397616269746314e-05, | |
| "loss": 0.4528, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.7815833801235261, | |
| "grad_norm": 0.6622577368272095, | |
| "learning_rate": 7.386498114064637e-05, | |
| "loss": 0.4668, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.7860752386299832, | |
| "grad_norm": 0.662100634409437, | |
| "learning_rate": 7.375286805576598e-05, | |
| "loss": 0.4707, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.7905670971364402, | |
| "grad_norm": 0.5977372696427495, | |
| "learning_rate": 7.36398265267299e-05, | |
| "loss": 0.4694, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.7950589556428973, | |
| "grad_norm": 0.5491784546575701, | |
| "learning_rate": 7.352585966298496e-05, | |
| "loss": 0.4495, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.7995508141493542, | |
| "grad_norm": 0.486244889699389, | |
| "learning_rate": 7.34109705994312e-05, | |
| "loss": 0.4571, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.8040426726558113, | |
| "grad_norm": 0.559720273360908, | |
| "learning_rate": 7.329516249633576e-05, | |
| "loss": 0.4612, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.8085345311622684, | |
| "grad_norm": 0.6029983609419821, | |
| "learning_rate": 7.317843853924593e-05, | |
| "loss": 0.4667, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.8130263896687254, | |
| "grad_norm": 0.7279953162142647, | |
| "learning_rate": 7.306080193890146e-05, | |
| "loss": 0.475, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.8175182481751825, | |
| "grad_norm": 0.7551622614777805, | |
| "learning_rate": 7.294225593114628e-05, | |
| "loss": 0.458, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.8220101066816395, | |
| "grad_norm": 0.6486112995518978, | |
| "learning_rate": 7.282280377683956e-05, | |
| "loss": 0.4633, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.8265019651880966, | |
| "grad_norm": 0.5450682325816288, | |
| "learning_rate": 7.270244876176592e-05, | |
| "loss": 0.4559, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.8309938236945537, | |
| "grad_norm": 0.5532087636578195, | |
| "learning_rate": 7.258119419654504e-05, | |
| "loss": 0.4558, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.8354856822010107, | |
| "grad_norm": 0.6879500532179057, | |
| "learning_rate": 7.245904341654067e-05, | |
| "loss": 0.4608, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.8399775407074677, | |
| "grad_norm": 0.7126782476489365, | |
| "learning_rate": 7.233599978176888e-05, | |
| "loss": 0.4633, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.8444693992139247, | |
| "grad_norm": 0.7938445404455041, | |
| "learning_rate": 7.221206667680552e-05, | |
| "loss": 0.4542, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.8489612577203818, | |
| "grad_norm": 0.9639983419087139, | |
| "learning_rate": 7.208724751069329e-05, | |
| "loss": 0.4548, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.8534531162268388, | |
| "grad_norm": 1.0323225950842096, | |
| "learning_rate": 7.196154571684778e-05, | |
| "loss": 0.4692, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.8579449747332959, | |
| "grad_norm": 1.021446328914379, | |
| "learning_rate": 7.183496475296323e-05, | |
| "loss": 0.4708, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.862436833239753, | |
| "grad_norm": 1.0260650351708893, | |
| "learning_rate": 7.170750810091725e-05, | |
| "loss": 0.468, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.86692869174621, | |
| "grad_norm": 1.0649895435134098, | |
| "learning_rate": 7.157917926667513e-05, | |
| "loss": 0.4642, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.8714205502526671, | |
| "grad_norm": 0.9428882752577131, | |
| "learning_rate": 7.144998178019336e-05, | |
| "loss": 0.4745, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.8759124087591241, | |
| "grad_norm": 0.6945999326470843, | |
| "learning_rate": 7.131991919532259e-05, | |
| "loss": 0.449, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.8804042672655812, | |
| "grad_norm": 0.4980490994282038, | |
| "learning_rate": 7.11889950897098e-05, | |
| "loss": 0.4516, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.8848961257720381, | |
| "grad_norm": 0.4453649654330163, | |
| "learning_rate": 7.105721306469992e-05, | |
| "loss": 0.4594, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.8893879842784952, | |
| "grad_norm": 0.5672440941377567, | |
| "learning_rate": 7.09245767452368e-05, | |
| "loss": 0.4681, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.8938798427849522, | |
| "grad_norm": 0.5821025878591354, | |
| "learning_rate": 7.079108977976344e-05, | |
| "loss": 0.4583, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.8983717012914093, | |
| "grad_norm": 1.887194081506819, | |
| "learning_rate": 7.065675584012169e-05, | |
| "loss": 0.4454, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.9028635597978664, | |
| "grad_norm": 0.606300162757906, | |
| "learning_rate": 7.052157862145119e-05, | |
| "loss": 0.4596, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.9073554183043234, | |
| "grad_norm": 1.2204993329328941, | |
| "learning_rate": 7.038556184208779e-05, | |
| "loss": 0.4789, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.9118472768107805, | |
| "grad_norm": 0.9183384138457148, | |
| "learning_rate": 7.024870924346118e-05, | |
| "loss": 0.4532, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.9163391353172375, | |
| "grad_norm": 0.9032434610079714, | |
| "learning_rate": 7.01110245899921e-05, | |
| "loss": 0.4543, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.9208309938236946, | |
| "grad_norm": 0.9435981485475972, | |
| "learning_rate": 6.997251166898867e-05, | |
| "loss": 0.4651, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.9253228523301515, | |
| "grad_norm": 0.8083002836584318, | |
| "learning_rate": 6.983317429054229e-05, | |
| "loss": 0.4697, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.9298147108366086, | |
| "grad_norm": 0.8339120351613075, | |
| "learning_rate": 6.969301628742278e-05, | |
| "loss": 0.4598, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.9343065693430657, | |
| "grad_norm": 1.2386540448486012, | |
| "learning_rate": 6.955204151497302e-05, | |
| "loss": 0.4586, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.9387984278495227, | |
| "grad_norm": 0.9444467314885506, | |
| "learning_rate": 6.941025385100284e-05, | |
| "loss": 0.4656, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.9432902863559798, | |
| "grad_norm": 0.9810972501475214, | |
| "learning_rate": 6.926765719568237e-05, | |
| "loss": 0.4705, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.9477821448624368, | |
| "grad_norm": 0.9169761617167389, | |
| "learning_rate": 6.912425547143475e-05, | |
| "loss": 0.4591, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.9522740033688939, | |
| "grad_norm": 0.8457329268313868, | |
| "learning_rate": 6.89800526228283e-05, | |
| "loss": 0.4526, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.956765861875351, | |
| "grad_norm": 0.8671736266003667, | |
| "learning_rate": 6.883505261646789e-05, | |
| "loss": 0.4511, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.961257720381808, | |
| "grad_norm": 0.9307163378877691, | |
| "learning_rate": 6.868925944088598e-05, | |
| "loss": 0.4617, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.9657495788882651, | |
| "grad_norm": 0.6069426932405532, | |
| "learning_rate": 6.854267710643273e-05, | |
| "loss": 0.4409, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.970241437394722, | |
| "grad_norm": 0.9065066636149269, | |
| "learning_rate": 6.83953096451659e-05, | |
| "loss": 0.4571, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.9747332959011791, | |
| "grad_norm": 0.6844016278471394, | |
| "learning_rate": 6.824716111073974e-05, | |
| "loss": 0.4515, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.9792251544076361, | |
| "grad_norm": 0.7009669131153172, | |
| "learning_rate": 6.809823557829363e-05, | |
| "loss": 0.4584, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.9837170129140932, | |
| "grad_norm": 0.7418077141817147, | |
| "learning_rate": 6.794853714433988e-05, | |
| "loss": 0.4574, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.9882088714205502, | |
| "grad_norm": 0.59493279022597, | |
| "learning_rate": 6.779806992665112e-05, | |
| "loss": 0.4708, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.9927007299270073, | |
| "grad_norm": 0.7808623506488296, | |
| "learning_rate": 6.764683806414702e-05, | |
| "loss": 0.4579, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.9971925884334644, | |
| "grad_norm": 0.5616830320706236, | |
| "learning_rate": 6.749484571678038e-05, | |
| "loss": 0.456, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.0030881527231892, | |
| "grad_norm": 1.1654486761290646, | |
| "learning_rate": 6.734209706542279e-05, | |
| "loss": 0.727, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.0075800112296462, | |
| "grad_norm": 1.3312267335374193, | |
| "learning_rate": 6.718859631174958e-05, | |
| "loss": 0.4298, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.0120718697361033, | |
| "grad_norm": 0.6800975438630601, | |
| "learning_rate": 6.703434767812421e-05, | |
| "loss": 0.3983, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.0165637282425604, | |
| "grad_norm": 0.9565932620040085, | |
| "learning_rate": 6.68793554074822e-05, | |
| "loss": 0.4132, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.0210555867490174, | |
| "grad_norm": 0.8913849334128685, | |
| "learning_rate": 6.672362376321438e-05, | |
| "loss": 0.4084, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.0255474452554745, | |
| "grad_norm": 1.2687806417905505, | |
| "learning_rate": 6.65671570290496e-05, | |
| "loss": 0.4307, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.0300393037619315, | |
| "grad_norm": 0.48456340808868187, | |
| "learning_rate": 6.640995950893694e-05, | |
| "loss": 0.4099, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.0345311622683886, | |
| "grad_norm": 0.8131700755887267, | |
| "learning_rate": 6.625203552692724e-05, | |
| "loss": 0.4261, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.0390230207748457, | |
| "grad_norm": 1.0354869448343091, | |
| "learning_rate": 6.609338942705429e-05, | |
| "loss": 0.4224, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.0435148792813027, | |
| "grad_norm": 1.0441748151895986, | |
| "learning_rate": 6.593402557321523e-05, | |
| "loss": 0.4316, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.0480067377877598, | |
| "grad_norm": 0.6277260520352427, | |
| "learning_rate": 6.577394834905052e-05, | |
| "loss": 0.4168, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.0524985962942168, | |
| "grad_norm": 0.7608794537238852, | |
| "learning_rate": 6.56131621578234e-05, | |
| "loss": 0.413, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.0569904548006739, | |
| "grad_norm": 0.9593747870071356, | |
| "learning_rate": 6.545167142229878e-05, | |
| "loss": 0.4224, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.0614823133071307, | |
| "grad_norm": 0.8781448867437442, | |
| "learning_rate": 6.528948058462149e-05, | |
| "loss": 0.4135, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.0659741718135878, | |
| "grad_norm": 0.386723750649497, | |
| "learning_rate": 6.512659410619423e-05, | |
| "loss": 0.3986, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.0704660303200448, | |
| "grad_norm": 0.7859451159445625, | |
| "learning_rate": 6.496301646755469e-05, | |
| "loss": 0.4189, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.074957888826502, | |
| "grad_norm": 0.7585050336954793, | |
| "learning_rate": 6.479875216825247e-05, | |
| "loss": 0.4112, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.079449747332959, | |
| "grad_norm": 0.5696884403230228, | |
| "learning_rate": 6.463380572672517e-05, | |
| "loss": 0.4047, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.083941605839416, | |
| "grad_norm": 0.41206064030731154, | |
| "learning_rate": 6.446818168017418e-05, | |
| "loss": 0.4087, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.088433464345873, | |
| "grad_norm": 0.5743229232973563, | |
| "learning_rate": 6.430188458443983e-05, | |
| "loss": 0.4209, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.0929253228523301, | |
| "grad_norm": 0.6765195809396967, | |
| "learning_rate": 6.413491901387611e-05, | |
| "loss": 0.4131, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.0974171813587872, | |
| "grad_norm": 0.5316168249300188, | |
| "learning_rate": 6.396728956122484e-05, | |
| "loss": 0.4004, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.1019090398652442, | |
| "grad_norm": 0.3675582288504814, | |
| "learning_rate": 6.379900083748932e-05, | |
| "loss": 0.4099, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.1064008983717013, | |
| "grad_norm": 0.5469784997146581, | |
| "learning_rate": 6.363005747180744e-05, | |
| "loss": 0.4324, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.1108927568781584, | |
| "grad_norm": 0.5990819245972675, | |
| "learning_rate": 6.346046411132449e-05, | |
| "loss": 0.3956, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.1153846153846154, | |
| "grad_norm": 0.4291730181335894, | |
| "learning_rate": 6.329022542106521e-05, | |
| "loss": 0.4115, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.1198764738910725, | |
| "grad_norm": 0.42747466515410476, | |
| "learning_rate": 6.311934608380548e-05, | |
| "loss": 0.4163, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.1243683323975295, | |
| "grad_norm": 0.5830417883517935, | |
| "learning_rate": 6.294783079994355e-05, | |
| "loss": 0.4317, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.1288601909039866, | |
| "grad_norm": 0.6362912082901964, | |
| "learning_rate": 6.277568428737075e-05, | |
| "loss": 0.411, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.1333520494104437, | |
| "grad_norm": 0.4295094976566588, | |
| "learning_rate": 6.260291128134167e-05, | |
| "loss": 0.3882, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.1378439079169007, | |
| "grad_norm": 0.5241460014550018, | |
| "learning_rate": 6.242951653434391e-05, | |
| "loss": 0.432, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.1423357664233578, | |
| "grad_norm": 0.6551009024427942, | |
| "learning_rate": 6.225550481596742e-05, | |
| "loss": 0.3961, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.1468276249298146, | |
| "grad_norm": 0.4615199034173447, | |
| "learning_rate": 6.20808809127732e-05, | |
| "loss": 0.4236, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.1513194834362717, | |
| "grad_norm": 0.4776454533248158, | |
| "learning_rate": 6.190564962816178e-05, | |
| "loss": 0.3946, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.1558113419427287, | |
| "grad_norm": 0.4702072198863169, | |
| "learning_rate": 6.172981578224089e-05, | |
| "loss": 0.4256, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.1603032004491858, | |
| "grad_norm": 0.3879880154642624, | |
| "learning_rate": 6.155338421169307e-05, | |
| "loss": 0.408, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.1647950589556428, | |
| "grad_norm": 0.325137012797199, | |
| "learning_rate": 6.137635976964252e-05, | |
| "loss": 0.4246, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.1692869174621, | |
| "grad_norm": 0.4139282821779981, | |
| "learning_rate": 6.119874732552158e-05, | |
| "loss": 0.3977, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.173778775968557, | |
| "grad_norm": 0.30309197791879683, | |
| "learning_rate": 6.102055176493691e-05, | |
| "loss": 0.3896, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.178270634475014, | |
| "grad_norm": 0.45285378499157425, | |
| "learning_rate": 6.084177798953498e-05, | |
| "loss": 0.4338, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.182762492981471, | |
| "grad_norm": 0.4561922091994438, | |
| "learning_rate": 6.066243091686729e-05, | |
| "loss": 0.4063, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.1872543514879281, | |
| "grad_norm": 0.34720058499151, | |
| "learning_rate": 6.048251548025509e-05, | |
| "loss": 0.4157, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.1917462099943852, | |
| "grad_norm": 0.4444769642948116, | |
| "learning_rate": 6.030203662865369e-05, | |
| "loss": 0.4025, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.1962380685008422, | |
| "grad_norm": 0.3157010643900918, | |
| "learning_rate": 6.012099932651632e-05, | |
| "loss": 0.417, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.2007299270072993, | |
| "grad_norm": 0.4607004582534085, | |
| "learning_rate": 5.99394085536576e-05, | |
| "loss": 0.4119, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.2052217855137564, | |
| "grad_norm": 0.40037720793616105, | |
| "learning_rate": 5.97572693051165e-05, | |
| "loss": 0.4095, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.2097136440202134, | |
| "grad_norm": 0.34033974272575146, | |
| "learning_rate": 5.9574586591019e-05, | |
| "loss": 0.419, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.2142055025266705, | |
| "grad_norm": 0.30946307375970267, | |
| "learning_rate": 5.939136543644023e-05, | |
| "loss": 0.3956, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.2186973610331275, | |
| "grad_norm": 0.37693528407713545, | |
| "learning_rate": 5.920761088126628e-05, | |
| "loss": 0.4289, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.2231892195395846, | |
| "grad_norm": 0.3625770123000559, | |
| "learning_rate": 5.902332798005556e-05, | |
| "loss": 0.3905, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.2276810780460417, | |
| "grad_norm": 0.369011171129336, | |
| "learning_rate": 5.8838521801899745e-05, | |
| "loss": 0.4075, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.2321729365524985, | |
| "grad_norm": 0.3877982848413565, | |
| "learning_rate": 5.865319743028436e-05, | |
| "loss": 0.4195, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.2366647950589555, | |
| "grad_norm": 0.4235810796921061, | |
| "learning_rate": 5.846735996294893e-05, | |
| "loss": 0.4267, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.2411566535654126, | |
| "grad_norm": 0.5700279252391541, | |
| "learning_rate": 5.828101451174677e-05, | |
| "loss": 0.3894, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.2456485120718697, | |
| "grad_norm": 0.8338556645440748, | |
| "learning_rate": 5.809416620250437e-05, | |
| "loss": 0.4211, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.2501403705783267, | |
| "grad_norm": 0.7034463133742246, | |
| "learning_rate": 5.7906820174880395e-05, | |
| "loss": 0.3982, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.2546322290847838, | |
| "grad_norm": 0.282318747665, | |
| "learning_rate": 5.7718981582224294e-05, | |
| "loss": 0.4127, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.2591240875912408, | |
| "grad_norm": 0.4551961016258335, | |
| "learning_rate": 5.753065559143459e-05, | |
| "loss": 0.4075, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.263615946097698, | |
| "grad_norm": 0.4224362715525179, | |
| "learning_rate": 5.734184738281669e-05, | |
| "loss": 0.4122, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.268107804604155, | |
| "grad_norm": 0.3912978318459658, | |
| "learning_rate": 5.715256214994048e-05, | |
| "loss": 0.4105, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.272599663110612, | |
| "grad_norm": 0.5028090052720567, | |
| "learning_rate": 5.6962805099497325e-05, | |
| "loss": 0.4097, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.277091521617069, | |
| "grad_norm": 0.36305428770652426, | |
| "learning_rate": 5.6772581451157e-05, | |
| "loss": 0.4101, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.2815833801235261, | |
| "grad_norm": 0.29030384456593744, | |
| "learning_rate": 5.658189643742403e-05, | |
| "loss": 0.4118, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.2860752386299832, | |
| "grad_norm": 0.48356020891437856, | |
| "learning_rate": 5.639075530349375e-05, | |
| "loss": 0.4123, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.2905670971364402, | |
| "grad_norm": 0.39034814232405435, | |
| "learning_rate": 5.6199163307108066e-05, | |
| "loss": 0.4245, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.2950589556428973, | |
| "grad_norm": 0.4155146047824133, | |
| "learning_rate": 5.6007125718410814e-05, | |
| "loss": 0.4099, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.2995508141493544, | |
| "grad_norm": 0.3184955707189902, | |
| "learning_rate": 5.581464781980277e-05, | |
| "loss": 0.3909, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.3040426726558114, | |
| "grad_norm": 0.33081950542012384, | |
| "learning_rate": 5.562173490579641e-05, | |
| "loss": 0.4114, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.3085345311622683, | |
| "grad_norm": 0.4188246651840897, | |
| "learning_rate": 5.5428392282870174e-05, | |
| "loss": 0.4126, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.3130263896687255, | |
| "grad_norm": 0.4078104154498075, | |
| "learning_rate": 5.5234625269322585e-05, | |
| "loss": 0.4063, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.3175182481751824, | |
| "grad_norm": 0.3351106313247552, | |
| "learning_rate": 5.5040439195125955e-05, | |
| "loss": 0.4157, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.3220101066816397, | |
| "grad_norm": 0.3998877673309915, | |
| "learning_rate": 5.484583940177969e-05, | |
| "loss": 0.4105, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.3265019651880965, | |
| "grad_norm": 0.3168105290580479, | |
| "learning_rate": 5.465083124216347e-05, | |
| "loss": 0.4177, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.3309938236945535, | |
| "grad_norm": 0.38171508022894285, | |
| "learning_rate": 5.445542008038991e-05, | |
| "loss": 0.4114, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.3354856822010106, | |
| "grad_norm": 0.4101559170777812, | |
| "learning_rate": 5.4259611291657094e-05, | |
| "loss": 0.4036, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.3399775407074677, | |
| "grad_norm": 0.37156720067038823, | |
| "learning_rate": 5.4063410262100666e-05, | |
| "loss": 0.4042, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.3444693992139247, | |
| "grad_norm": 0.4215354640021287, | |
| "learning_rate": 5.386682238864566e-05, | |
| "loss": 0.4048, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.3489612577203818, | |
| "grad_norm": 0.40573405133318047, | |
| "learning_rate": 5.36698530788581e-05, | |
| "loss": 0.4199, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.3534531162268388, | |
| "grad_norm": 0.4692527272437616, | |
| "learning_rate": 5.347250775079622e-05, | |
| "loss": 0.4128, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.357944974733296, | |
| "grad_norm": 0.5228212494488583, | |
| "learning_rate": 5.327479183286143e-05, | |
| "loss": 0.4054, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.362436833239753, | |
| "grad_norm": 0.4283044350017929, | |
| "learning_rate": 5.307671076364898e-05, | |
| "loss": 0.4029, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.36692869174621, | |
| "grad_norm": 0.41603153313358904, | |
| "learning_rate": 5.287826999179841e-05, | |
| "loss": 0.4017, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.371420550252667, | |
| "grad_norm": 0.33366242403586405, | |
| "learning_rate": 5.267947497584362e-05, | |
| "loss": 0.4007, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.3759124087591241, | |
| "grad_norm": 0.37210505804033456, | |
| "learning_rate": 5.248033118406278e-05, | |
| "loss": 0.406, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.3804042672655812, | |
| "grad_norm": 0.407677122147685, | |
| "learning_rate": 5.2280844094327835e-05, | |
| "loss": 0.3911, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.3848961257720382, | |
| "grad_norm": 0.3625920847291524, | |
| "learning_rate": 5.208101919395388e-05, | |
| "loss": 0.4197, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.3893879842784953, | |
| "grad_norm": 0.35589571408515086, | |
| "learning_rate": 5.188086197954823e-05, | |
| "loss": 0.4013, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.3938798427849521, | |
| "grad_norm": 0.31765242308132235, | |
| "learning_rate": 5.1680377956859176e-05, | |
| "loss": 0.4017, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.3983717012914094, | |
| "grad_norm": 0.39225436673433745, | |
| "learning_rate": 5.1479572640624575e-05, | |
| "loss": 0.389, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.4028635597978663, | |
| "grad_norm": 0.3281070463104743, | |
| "learning_rate": 5.127845155442015e-05, | |
| "loss": 0.4072, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.4073554183043235, | |
| "grad_norm": 0.2643607457931133, | |
| "learning_rate": 5.1077020230507515e-05, | |
| "loss": 0.4002, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.4118472768107804, | |
| "grad_norm": 0.3097986304132296, | |
| "learning_rate": 5.087528420968207e-05, | |
| "loss": 0.4325, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.4163391353172374, | |
| "grad_norm": 0.39074629128208, | |
| "learning_rate": 5.0673249041120545e-05, | |
| "loss": 0.3999, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.4208309938236945, | |
| "grad_norm": 0.3767364164904721, | |
| "learning_rate": 5.0470920282228315e-05, | |
| "loss": 0.3998, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.4253228523301515, | |
| "grad_norm": 0.277594755777687, | |
| "learning_rate": 5.026830349848663e-05, | |
| "loss": 0.4264, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.4298147108366086, | |
| "grad_norm": 0.3280778028083623, | |
| "learning_rate": 5.0065404263299454e-05, | |
| "loss": 0.4047, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.4343065693430657, | |
| "grad_norm": 0.39247800021426, | |
| "learning_rate": 4.9862228157840194e-05, | |
| "loss": 0.4142, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.4387984278495227, | |
| "grad_norm": 0.3876986577819052, | |
| "learning_rate": 4.965878077089812e-05, | |
| "loss": 0.4076, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.4432902863559798, | |
| "grad_norm": 0.2865061837372421, | |
| "learning_rate": 4.945506769872471e-05, | |
| "loss": 0.4193, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.4477821448624368, | |
| "grad_norm": 0.2727996079835713, | |
| "learning_rate": 4.9251094544879677e-05, | |
| "loss": 0.4001, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.452274003368894, | |
| "grad_norm": 0.2918942237241567, | |
| "learning_rate": 4.9046866920076806e-05, | |
| "loss": 0.4196, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.456765861875351, | |
| "grad_norm": 0.2950754845340194, | |
| "learning_rate": 4.884239044202967e-05, | |
| "loss": 0.4105, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.461257720381808, | |
| "grad_norm": 0.24670911055185038, | |
| "learning_rate": 4.8637670735297065e-05, | |
| "loss": 0.4132, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.465749578888265, | |
| "grad_norm": 0.322486601307241, | |
| "learning_rate": 4.8432713431128315e-05, | |
| "loss": 0.412, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.4702414373947221, | |
| "grad_norm": 0.27896706784221476, | |
| "learning_rate": 4.822752416730836e-05, | |
| "loss": 0.3878, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.4747332959011792, | |
| "grad_norm": 0.31731987922420013, | |
| "learning_rate": 4.802210858800265e-05, | |
| "loss": 0.4272, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.479225154407636, | |
| "grad_norm": 0.25550900582768693, | |
| "learning_rate": 4.7816472343602014e-05, | |
| "loss": 0.3941, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.4837170129140933, | |
| "grad_norm": 0.35938482437350655, | |
| "learning_rate": 4.7610621090567025e-05, | |
| "loss": 0.4194, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.4882088714205501, | |
| "grad_norm": 0.3894041992575585, | |
| "learning_rate": 4.740456049127262e-05, | |
| "loss": 0.4109, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.4927007299270074, | |
| "grad_norm": 0.32576537259161636, | |
| "learning_rate": 4.719829621385219e-05, | |
| "loss": 0.4006, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.4971925884334643, | |
| "grad_norm": 0.32654279427473487, | |
| "learning_rate": 4.699183393204175e-05, | |
| "loss": 0.4071, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.5016844469399215, | |
| "grad_norm": 0.4068732897282033, | |
| "learning_rate": 4.6785179325023845e-05, | |
| "loss": 0.4118, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.5061763054463784, | |
| "grad_norm": 0.4595765453656239, | |
| "learning_rate": 4.657833807727131e-05, | |
| "loss": 0.4105, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.5106681639528357, | |
| "grad_norm": 0.32934013203935564, | |
| "learning_rate": 4.637131587839096e-05, | |
| "loss": 0.417, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.5151600224592925, | |
| "grad_norm": 0.48353827888495043, | |
| "learning_rate": 4.616411842296703e-05, | |
| "loss": 0.4021, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.5196518809657495, | |
| "grad_norm": 0.49403193767926096, | |
| "learning_rate": 4.5956751410404586e-05, | |
| "loss": 0.3968, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.5241437394722066, | |
| "grad_norm": 0.2821246849644339, | |
| "learning_rate": 4.574922054477268e-05, | |
| "loss": 0.4094, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.5286355979786637, | |
| "grad_norm": 0.36527583509462574, | |
| "learning_rate": 4.5541531534647524e-05, | |
| "loss": 0.4046, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.5331274564851207, | |
| "grad_norm": 0.33782825846594283, | |
| "learning_rate": 4.533369009295543e-05, | |
| "loss": 0.4115, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.5376193149915778, | |
| "grad_norm": 0.3139631387832028, | |
| "learning_rate": 4.512570193681563e-05, | |
| "loss": 0.4031, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.5421111734980348, | |
| "grad_norm": 0.3158197060998367, | |
| "learning_rate": 4.491757278738308e-05, | |
| "loss": 0.4176, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.546603032004492, | |
| "grad_norm": 0.3302158999182897, | |
| "learning_rate": 4.4709308369691044e-05, | |
| "loss": 0.4092, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.551094890510949, | |
| "grad_norm": 0.29538478228556253, | |
| "learning_rate": 4.450091441249363e-05, | |
| "loss": 0.3984, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.5555867490174058, | |
| "grad_norm": 0.3210857193848622, | |
| "learning_rate": 4.42923966481082e-05, | |
| "loss": 0.4124, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.560078607523863, | |
| "grad_norm": 0.2663635433039826, | |
| "learning_rate": 4.40837608122577e-05, | |
| "loss": 0.4034, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.56457046603032, | |
| "grad_norm": 0.26921246905359814, | |
| "learning_rate": 4.387501264391288e-05, | |
| "loss": 0.3925, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.5690623245367772, | |
| "grad_norm": 0.29711436918240935, | |
| "learning_rate": 4.366615788513443e-05, | |
| "loss": 0.4049, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.573554183043234, | |
| "grad_norm": 0.29975949512713473, | |
| "learning_rate": 4.345720228091503e-05, | |
| "loss": 0.4217, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.5780460415496913, | |
| "grad_norm": 0.3150365175905575, | |
| "learning_rate": 4.3248151579021345e-05, | |
| "loss": 0.4091, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.5825379000561481, | |
| "grad_norm": 0.29078939759895844, | |
| "learning_rate": 4.303901152983589e-05, | |
| "loss": 0.409, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.5870297585626054, | |
| "grad_norm": 0.3159981446910848, | |
| "learning_rate": 4.282978788619887e-05, | |
| "loss": 0.3849, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.5915216170690623, | |
| "grad_norm": 0.32912113367383816, | |
| "learning_rate": 4.262048640324993e-05, | |
| "loss": 0.4216, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.5960134755755195, | |
| "grad_norm": 0.29288868203130786, | |
| "learning_rate": 4.241111283826985e-05, | |
| "loss": 0.4142, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.6005053340819764, | |
| "grad_norm": 0.26900873749775583, | |
| "learning_rate": 4.220167295052219e-05, | |
| "loss": 0.3863, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.6049971925884334, | |
| "grad_norm": 0.2943447296099732, | |
| "learning_rate": 4.199217250109486e-05, | |
| "loss": 0.3998, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.6094890510948905, | |
| "grad_norm": 0.2974700123953123, | |
| "learning_rate": 4.178261725274161e-05, | |
| "loss": 0.4152, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.6139809096013475, | |
| "grad_norm": 0.28249957453922614, | |
| "learning_rate": 4.157301296972359e-05, | |
| "loss": 0.4153, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.6184727681078046, | |
| "grad_norm": 0.259797724301959, | |
| "learning_rate": 4.136336541765075e-05, | |
| "loss": 0.403, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.6229646266142617, | |
| "grad_norm": 0.3257283603832216, | |
| "learning_rate": 4.115368036332321e-05, | |
| "loss": 0.4056, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.6274564851207187, | |
| "grad_norm": 0.26689578853303386, | |
| "learning_rate": 4.0943963574572736e-05, | |
| "loss": 0.408, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.6319483436271758, | |
| "grad_norm": 0.26658298920192414, | |
| "learning_rate": 4.073422082010394e-05, | |
| "loss": 0.3851, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.6364402021336328, | |
| "grad_norm": 0.28419006374309425, | |
| "learning_rate": 4.052445786933574e-05, | |
| "loss": 0.4124, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.6409320606400897, | |
| "grad_norm": 0.2939448611616856, | |
| "learning_rate": 4.031468049224255e-05, | |
| "loss": 0.3919, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.645423919146547, | |
| "grad_norm": 0.22357829077026714, | |
| "learning_rate": 4.010489445919564e-05, | |
| "loss": 0.4032, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.6499157776530038, | |
| "grad_norm": 0.23621819447088288, | |
| "learning_rate": 3.989510554080437e-05, | |
| "loss": 0.406, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.654407636159461, | |
| "grad_norm": 0.24252116118580652, | |
| "learning_rate": 3.968531950775746e-05, | |
| "loss": 0.3847, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.658899494665918, | |
| "grad_norm": 0.23820691551522122, | |
| "learning_rate": 3.947554213066427e-05, | |
| "loss": 0.4162, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.6633913531723752, | |
| "grad_norm": 0.25744531937693094, | |
| "learning_rate": 3.926577917989607e-05, | |
| "loss": 0.3988, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.667883211678832, | |
| "grad_norm": 0.2559363776041835, | |
| "learning_rate": 3.905603642542728e-05, | |
| "loss": 0.3985, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.6723750701852893, | |
| "grad_norm": 0.23986797530736567, | |
| "learning_rate": 3.88463196366768e-05, | |
| "loss": 0.415, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.6768669286917461, | |
| "grad_norm": 0.3024405023590459, | |
| "learning_rate": 3.8636634582349275e-05, | |
| "loss": 0.4181, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.6813587871982034, | |
| "grad_norm": 0.26212467870742273, | |
| "learning_rate": 3.842698703027643e-05, | |
| "loss": 0.3884, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.6858506457046603, | |
| "grad_norm": 0.23942286730973997, | |
| "learning_rate": 3.8217382747258404e-05, | |
| "loss": 0.3983, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.6903425042111173, | |
| "grad_norm": 0.27463828640331295, | |
| "learning_rate": 3.8007827498905156e-05, | |
| "loss": 0.409, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.6948343627175744, | |
| "grad_norm": 0.2504990871201297, | |
| "learning_rate": 3.779832704947782e-05, | |
| "loss": 0.4077, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.6993262212240314, | |
| "grad_norm": 0.2881565193960826, | |
| "learning_rate": 3.758888716173016e-05, | |
| "loss": 0.4049, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.7038180797304885, | |
| "grad_norm": 0.2798106696498113, | |
| "learning_rate": 3.7379513596750086e-05, | |
| "loss": 0.3969, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.7083099382369455, | |
| "grad_norm": 0.2531684740356093, | |
| "learning_rate": 3.7170212113801145e-05, | |
| "loss": 0.4013, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.7128017967434026, | |
| "grad_norm": 0.23581900229494832, | |
| "learning_rate": 3.6960988470164124e-05, | |
| "loss": 0.4155, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.7172936552498597, | |
| "grad_norm": 0.2565820187404975, | |
| "learning_rate": 3.675184842097867e-05, | |
| "loss": 0.3986, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.7217855137563167, | |
| "grad_norm": 0.2668484179004775, | |
| "learning_rate": 3.654279771908498e-05, | |
| "loss": 0.4228, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.7262773722627736, | |
| "grad_norm": 0.2166817617434314, | |
| "learning_rate": 3.6333842114865585e-05, | |
| "loss": 0.3949, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.7307692307692308, | |
| "grad_norm": 0.274716879314434, | |
| "learning_rate": 3.612498735608713e-05, | |
| "loss": 0.4033, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.7352610892756877, | |
| "grad_norm": 0.24245384174880844, | |
| "learning_rate": 3.591623918774231e-05, | |
| "loss": 0.4163, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.739752947782145, | |
| "grad_norm": 0.26113211060233965, | |
| "learning_rate": 3.570760335189181e-05, | |
| "loss": 0.4022, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.7442448062886018, | |
| "grad_norm": 0.2534805949154817, | |
| "learning_rate": 3.549908558750639e-05, | |
| "loss": 0.3945, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.748736664795059, | |
| "grad_norm": 0.2633552123606204, | |
| "learning_rate": 3.5290691630308976e-05, | |
| "loss": 0.4144, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.753228523301516, | |
| "grad_norm": 0.22176423579895221, | |
| "learning_rate": 3.508242721261694e-05, | |
| "loss": 0.396, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.7577203818079732, | |
| "grad_norm": 0.25257982632693415, | |
| "learning_rate": 3.4874298063184396e-05, | |
| "loss": 0.4275, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.76221224031443, | |
| "grad_norm": 0.2422518175663573, | |
| "learning_rate": 3.466630990704459e-05, | |
| "loss": 0.3947, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.7667040988208873, | |
| "grad_norm": 0.29409782629840403, | |
| "learning_rate": 3.445846846535248e-05, | |
| "loss": 0.4082, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.7711959573273441, | |
| "grad_norm": 0.25780664367280237, | |
| "learning_rate": 3.425077945522733e-05, | |
| "loss": 0.3933, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.7756878158338012, | |
| "grad_norm": 0.32027854937086486, | |
| "learning_rate": 3.404324858959543e-05, | |
| "loss": 0.3948, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.7801796743402583, | |
| "grad_norm": 0.23910627046696867, | |
| "learning_rate": 3.3835881577032976e-05, | |
| "loss": 0.3919, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.7846715328467153, | |
| "grad_norm": 0.304853028429146, | |
| "learning_rate": 3.362868412160905e-05, | |
| "loss": 0.3904, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.7891633913531724, | |
| "grad_norm": 0.2799103231082546, | |
| "learning_rate": 3.34216619227287e-05, | |
| "loss": 0.4037, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.7936552498596294, | |
| "grad_norm": 0.22326908032051312, | |
| "learning_rate": 3.321482067497617e-05, | |
| "loss": 0.3933, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.7981471083660865, | |
| "grad_norm": 0.2455760902334925, | |
| "learning_rate": 3.300816606795826e-05, | |
| "loss": 0.4084, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.8026389668725435, | |
| "grad_norm": 0.24240071091375984, | |
| "learning_rate": 3.280170378614782e-05, | |
| "loss": 0.4093, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.8071308253790006, | |
| "grad_norm": 0.2186413081477587, | |
| "learning_rate": 3.2595439508727395e-05, | |
| "loss": 0.3925, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.8116226838854577, | |
| "grad_norm": 0.2647465546656249, | |
| "learning_rate": 3.238937890943299e-05, | |
| "loss": 0.4144, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.8161145423919147, | |
| "grad_norm": 0.21271997473762394, | |
| "learning_rate": 3.218352765639801e-05, | |
| "loss": 0.4133, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.8206064008983716, | |
| "grad_norm": 0.21700928801606473, | |
| "learning_rate": 3.197789141199736e-05, | |
| "loss": 0.3971, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.8250982594048288, | |
| "grad_norm": 0.22346798601918932, | |
| "learning_rate": 3.177247583269167e-05, | |
| "loss": 0.4018, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.8295901179112857, | |
| "grad_norm": 0.2520715121515013, | |
| "learning_rate": 3.1567286568871705e-05, | |
| "loss": 0.4026, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.834081976417743, | |
| "grad_norm": 0.2144950770599001, | |
| "learning_rate": 3.136232926470295e-05, | |
| "loss": 0.3851, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.8385738349241998, | |
| "grad_norm": 0.2298293252996528, | |
| "learning_rate": 3.115760955797033e-05, | |
| "loss": 0.3963, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.843065693430657, | |
| "grad_norm": 0.24640695632437667, | |
| "learning_rate": 3.0953133079923193e-05, | |
| "loss": 0.4155, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.847557551937114, | |
| "grad_norm": 0.28922984509106603, | |
| "learning_rate": 3.074890545512034e-05, | |
| "loss": 0.3925, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.8520494104435712, | |
| "grad_norm": 0.24528555576156022, | |
| "learning_rate": 3.0544932301275295e-05, | |
| "loss": 0.3974, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.856541268950028, | |
| "grad_norm": 0.30073262623875324, | |
| "learning_rate": 3.0341219229101892e-05, | |
| "loss": 0.4181, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.861033127456485, | |
| "grad_norm": 0.24741943198112243, | |
| "learning_rate": 3.0137771842159823e-05, | |
| "loss": 0.3792, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.8655249859629421, | |
| "grad_norm": 0.27594864681926284, | |
| "learning_rate": 2.9934595736700552e-05, | |
| "loss": 0.3976, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.8700168444693992, | |
| "grad_norm": 0.29829276307632024, | |
| "learning_rate": 2.973169650151338e-05, | |
| "loss": 0.4291, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.8745087029758563, | |
| "grad_norm": 0.23370424374244397, | |
| "learning_rate": 2.95290797177717e-05, | |
| "loss": 0.3951, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.8790005614823133, | |
| "grad_norm": 0.25339121289077726, | |
| "learning_rate": 2.9326750958879472e-05, | |
| "loss": 0.4007, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.8834924199887704, | |
| "grad_norm": 0.2514599422293906, | |
| "learning_rate": 2.9124715790317935e-05, | |
| "loss": 0.4112, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.8879842784952274, | |
| "grad_norm": 0.29652433410674994, | |
| "learning_rate": 2.89229797694925e-05, | |
| "loss": 0.3888, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.8924761370016845, | |
| "grad_norm": 0.2335645367111168, | |
| "learning_rate": 2.872154844557987e-05, | |
| "loss": 0.4, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.8969679955081415, | |
| "grad_norm": 0.29301822091649277, | |
| "learning_rate": 2.852042735937544e-05, | |
| "loss": 0.3958, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.9014598540145986, | |
| "grad_norm": 0.23358979395836138, | |
| "learning_rate": 2.831962204314084e-05, | |
| "loss": 0.4004, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.9059517125210554, | |
| "grad_norm": 0.297059931374844, | |
| "learning_rate": 2.811913802045179e-05, | |
| "loss": 0.4142, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.9104435710275127, | |
| "grad_norm": 0.2738594248273006, | |
| "learning_rate": 2.7918980806046124e-05, | |
| "loss": 0.3874, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.9149354295339696, | |
| "grad_norm": 0.22583777218223228, | |
| "learning_rate": 2.7719155905672175e-05, | |
| "loss": 0.3915, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.9194272880404268, | |
| "grad_norm": 0.3169065687894036, | |
| "learning_rate": 2.7519668815937225e-05, | |
| "loss": 0.4208, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.9239191465468837, | |
| "grad_norm": 0.24777052847128442, | |
| "learning_rate": 2.7320525024156376e-05, | |
| "loss": 0.3958, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.928411005053341, | |
| "grad_norm": 0.2619436465557623, | |
| "learning_rate": 2.71217300082016e-05, | |
| "loss": 0.4011, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.9329028635597978, | |
| "grad_norm": 0.26481370108104213, | |
| "learning_rate": 2.6923289236351025e-05, | |
| "loss": 0.39, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.937394722066255, | |
| "grad_norm": 0.24524014004110775, | |
| "learning_rate": 2.6725208167138582e-05, | |
| "loss": 0.4005, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.941886580572712, | |
| "grad_norm": 0.2864960126828243, | |
| "learning_rate": 2.6527492249203786e-05, | |
| "loss": 0.407, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.946378439079169, | |
| "grad_norm": 0.2573841742136599, | |
| "learning_rate": 2.6330146921141903e-05, | |
| "loss": 0.3944, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.950870297585626, | |
| "grad_norm": 0.261224295008013, | |
| "learning_rate": 2.6133177611354354e-05, | |
| "loss": 0.3898, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.955362156092083, | |
| "grad_norm": 0.2757414043642235, | |
| "learning_rate": 2.5936589737899347e-05, | |
| "loss": 0.4063, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.9598540145985401, | |
| "grad_norm": 0.2681241721691539, | |
| "learning_rate": 2.5740388708342912e-05, | |
| "loss": 0.4065, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.9643458731049972, | |
| "grad_norm": 0.29726882193114346, | |
| "learning_rate": 2.5544579919610107e-05, | |
| "loss": 0.4001, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.9688377316114543, | |
| "grad_norm": 0.2619941869458257, | |
| "learning_rate": 2.5349168757836556e-05, | |
| "loss": 0.3972, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.9733295901179113, | |
| "grad_norm": 0.2485677575844825, | |
| "learning_rate": 2.515416059822033e-05, | |
| "loss": 0.3918, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.9778214486243684, | |
| "grad_norm": 0.25150597142180403, | |
| "learning_rate": 2.495956080487407e-05, | |
| "loss": 0.4049, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.9823133071308254, | |
| "grad_norm": 0.2416442440686141, | |
| "learning_rate": 2.4765374730677428e-05, | |
| "loss": 0.3925, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.9868051656372825, | |
| "grad_norm": 0.27770044118825543, | |
| "learning_rate": 2.4571607717129836e-05, | |
| "loss": 0.386, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.9912970241437393, | |
| "grad_norm": 0.22843041184114035, | |
| "learning_rate": 2.4378265094203597e-05, | |
| "loss": 0.4103, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.9957888826501966, | |
| "grad_norm": 0.251513977240523, | |
| "learning_rate": 2.418535218019723e-05, | |
| "loss": 0.3956, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 2.0016844469399215, | |
| "grad_norm": 0.4091173787285266, | |
| "learning_rate": 2.3992874281589192e-05, | |
| "loss": 0.6375, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 2.0061763054463784, | |
| "grad_norm": 0.32860214397751897, | |
| "learning_rate": 2.380083669289194e-05, | |
| "loss": 0.356, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 2.0106681639528357, | |
| "grad_norm": 0.28403755688899435, | |
| "learning_rate": 2.360924469650626e-05, | |
| "loss": 0.3511, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 2.0151600224592925, | |
| "grad_norm": 0.3135426923437301, | |
| "learning_rate": 2.3418103562575984e-05, | |
| "loss": 0.3416, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 2.0196518809657498, | |
| "grad_norm": 0.3137440227950926, | |
| "learning_rate": 2.3227418548843008e-05, | |
| "loss": 0.3472, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 2.0241437394722066, | |
| "grad_norm": 0.3052589136436459, | |
| "learning_rate": 2.3037194900502685e-05, | |
| "loss": 0.3604, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.028635597978664, | |
| "grad_norm": 0.2985308545629162, | |
| "learning_rate": 2.2847437850059534e-05, | |
| "loss": 0.3618, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 2.0331274564851207, | |
| "grad_norm": 0.2769823613293178, | |
| "learning_rate": 2.2658152617183308e-05, | |
| "loss": 0.3474, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 2.0376193149915776, | |
| "grad_norm": 0.25170574363467396, | |
| "learning_rate": 2.2469344408565423e-05, | |
| "loss": 0.3434, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 2.042111173498035, | |
| "grad_norm": 0.2771050583191645, | |
| "learning_rate": 2.2281018417775716e-05, | |
| "loss": 0.3555, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 2.0466030320044917, | |
| "grad_norm": 0.24106049325961768, | |
| "learning_rate": 2.2093179825119622e-05, | |
| "loss": 0.3588, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 2.051094890510949, | |
| "grad_norm": 0.25497886667692615, | |
| "learning_rate": 2.1905833797495638e-05, | |
| "loss": 0.3401, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 2.055586749017406, | |
| "grad_norm": 0.2681465681083491, | |
| "learning_rate": 2.1718985488253236e-05, | |
| "loss": 0.3491, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 2.060078607523863, | |
| "grad_norm": 0.27091798666493266, | |
| "learning_rate": 2.1532640037051082e-05, | |
| "loss": 0.3607, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 2.06457046603032, | |
| "grad_norm": 0.26598351831864453, | |
| "learning_rate": 2.134680256971565e-05, | |
| "loss": 0.3644, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 2.069062324536777, | |
| "grad_norm": 0.22340596753268635, | |
| "learning_rate": 2.1161478198100265e-05, | |
| "loss": 0.3342, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.073554183043234, | |
| "grad_norm": 0.2641367297043688, | |
| "learning_rate": 2.097667201994445e-05, | |
| "loss": 0.3395, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 2.0780460415496913, | |
| "grad_norm": 0.22757325390505462, | |
| "learning_rate": 2.0792389118733735e-05, | |
| "loss": 0.3497, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 2.082537900056148, | |
| "grad_norm": 0.2695436559040029, | |
| "learning_rate": 2.060863456355979e-05, | |
| "loss": 0.3485, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 2.0870297585626054, | |
| "grad_norm": 0.236514722798989, | |
| "learning_rate": 2.0425413408981017e-05, | |
| "loss": 0.3465, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 2.0915216170690623, | |
| "grad_norm": 0.2112104503569974, | |
| "learning_rate": 2.0242730694883513e-05, | |
| "loss": 0.3411, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 2.0960134755755195, | |
| "grad_norm": 0.24517893387070383, | |
| "learning_rate": 2.0060591446342413e-05, | |
| "loss": 0.3378, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 2.1005053340819764, | |
| "grad_norm": 0.22147905462894701, | |
| "learning_rate": 1.9879000673483696e-05, | |
| "loss": 0.351, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 2.1049971925884337, | |
| "grad_norm": 0.21884126344376645, | |
| "learning_rate": 1.9697963371346334e-05, | |
| "loss": 0.348, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 2.1094890510948905, | |
| "grad_norm": 0.2189327106591954, | |
| "learning_rate": 1.9517484519744933e-05, | |
| "loss": 0.3392, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 2.1139809096013478, | |
| "grad_norm": 0.21918133809987306, | |
| "learning_rate": 1.933756908313273e-05, | |
| "loss": 0.3638, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 2.1184727681078046, | |
| "grad_norm": 0.24085887555135274, | |
| "learning_rate": 1.9158222010465034e-05, | |
| "loss": 0.3406, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 2.1229646266142614, | |
| "grad_norm": 0.18564764665994976, | |
| "learning_rate": 1.8979448235063103e-05, | |
| "loss": 0.35, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 2.1274564851207187, | |
| "grad_norm": 0.2019350919162774, | |
| "learning_rate": 1.8801252674478432e-05, | |
| "loss": 0.3529, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 2.1319483436271756, | |
| "grad_norm": 0.2299248503134596, | |
| "learning_rate": 1.8623640230357507e-05, | |
| "loss": 0.3597, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 2.136440202133633, | |
| "grad_norm": 0.1849051592133099, | |
| "learning_rate": 1.8446615788306928e-05, | |
| "loss": 0.3443, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 2.1409320606400897, | |
| "grad_norm": 0.2363240641342722, | |
| "learning_rate": 1.827018421775911e-05, | |
| "loss": 0.3453, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 2.145423919146547, | |
| "grad_norm": 0.20526178829757494, | |
| "learning_rate": 1.809435037183823e-05, | |
| "loss": 0.3459, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 2.149915777653004, | |
| "grad_norm": 0.21325883250052174, | |
| "learning_rate": 1.7919119087226797e-05, | |
| "loss": 0.3415, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 2.154407636159461, | |
| "grad_norm": 0.1815510758641163, | |
| "learning_rate": 1.7744495184032598e-05, | |
| "loss": 0.3468, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 2.158899494665918, | |
| "grad_norm": 0.2016306471491351, | |
| "learning_rate": 1.7570483465656103e-05, | |
| "loss": 0.3475, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.163391353172375, | |
| "grad_norm": 0.18802925833928252, | |
| "learning_rate": 1.7397088718658345e-05, | |
| "loss": 0.3624, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 2.167883211678832, | |
| "grad_norm": 0.17956304329390538, | |
| "learning_rate": 1.7224315712629254e-05, | |
| "loss": 0.3178, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 2.1723750701852893, | |
| "grad_norm": 0.22221218653508965, | |
| "learning_rate": 1.7052169200056447e-05, | |
| "loss": 0.373, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 2.176866928691746, | |
| "grad_norm": 0.16189007688443122, | |
| "learning_rate": 1.6880653916194526e-05, | |
| "loss": 0.3404, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 2.1813587871982034, | |
| "grad_norm": 0.20743852170439855, | |
| "learning_rate": 1.6709774578934794e-05, | |
| "loss": 0.3626, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 2.1858506457046603, | |
| "grad_norm": 0.1875445083851209, | |
| "learning_rate": 1.6539535888675508e-05, | |
| "loss": 0.3617, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 2.1903425042111175, | |
| "grad_norm": 0.18715180951661162, | |
| "learning_rate": 1.6369942528192567e-05, | |
| "loss": 0.3362, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 2.1948343627175744, | |
| "grad_norm": 0.21111347158534718, | |
| "learning_rate": 1.62009991625107e-05, | |
| "loss": 0.3564, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 2.199326221224031, | |
| "grad_norm": 0.16675861606264242, | |
| "learning_rate": 1.6032710438775163e-05, | |
| "loss": 0.3447, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 2.2038180797304885, | |
| "grad_norm": 0.18889222136672323, | |
| "learning_rate": 1.58650809861239e-05, | |
| "loss": 0.3388, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.2083099382369458, | |
| "grad_norm": 0.18374939591927242, | |
| "learning_rate": 1.5698115415560184e-05, | |
| "loss": 0.342, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 2.2128017967434026, | |
| "grad_norm": 0.1720897284047041, | |
| "learning_rate": 1.553181831982582e-05, | |
| "loss": 0.34, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 2.2172936552498594, | |
| "grad_norm": 0.19412818871097415, | |
| "learning_rate": 1.5366194273274823e-05, | |
| "loss": 0.3593, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 2.2217855137563167, | |
| "grad_norm": 0.16406076512392617, | |
| "learning_rate": 1.5201247831747522e-05, | |
| "loss": 0.3345, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 2.2262773722627736, | |
| "grad_norm": 0.17079153640886607, | |
| "learning_rate": 1.5036983532445302e-05, | |
| "loss": 0.3361, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 2.230769230769231, | |
| "grad_norm": 0.1835620760584162, | |
| "learning_rate": 1.4873405893805792e-05, | |
| "loss": 0.3416, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 2.2352610892756877, | |
| "grad_norm": 0.15129880371380047, | |
| "learning_rate": 1.4710519415378523e-05, | |
| "loss": 0.3479, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 2.239752947782145, | |
| "grad_norm": 0.18163683124066854, | |
| "learning_rate": 1.4548328577701245e-05, | |
| "loss": 0.3403, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 2.244244806288602, | |
| "grad_norm": 0.1722424043905087, | |
| "learning_rate": 1.4386837842176618e-05, | |
| "loss": 0.3338, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 2.248736664795059, | |
| "grad_norm": 0.1602114691044203, | |
| "learning_rate": 1.4226051650949506e-05, | |
| "loss": 0.3583, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.253228523301516, | |
| "grad_norm": 0.1833956593702748, | |
| "learning_rate": 1.4065974426784794e-05, | |
| "loss": 0.3452, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 2.257720381807973, | |
| "grad_norm": 0.17676908693287866, | |
| "learning_rate": 1.3906610572945724e-05, | |
| "loss": 0.3622, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 2.26221224031443, | |
| "grad_norm": 0.17774222616891286, | |
| "learning_rate": 1.374796447307278e-05, | |
| "loss": 0.3366, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 2.2667040988208873, | |
| "grad_norm": 0.16450100612772037, | |
| "learning_rate": 1.359004049106309e-05, | |
| "loss": 0.3698, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 2.271195957327344, | |
| "grad_norm": 0.15818839072111363, | |
| "learning_rate": 1.3432842970950418e-05, | |
| "loss": 0.3428, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.2756878158338014, | |
| "grad_norm": 0.16175453217358932, | |
| "learning_rate": 1.3276376236785638e-05, | |
| "loss": 0.3378, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 2.2801796743402583, | |
| "grad_norm": 0.15243615785406947, | |
| "learning_rate": 1.3120644592517815e-05, | |
| "loss": 0.3446, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 2.2846715328467155, | |
| "grad_norm": 0.16564111193403935, | |
| "learning_rate": 1.2965652321875797e-05, | |
| "loss": 0.3727, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 2.2891633913531724, | |
| "grad_norm": 0.15722752420441452, | |
| "learning_rate": 1.2811403688250428e-05, | |
| "loss": 0.3422, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 2.293655249859629, | |
| "grad_norm": 0.16529974529667987, | |
| "learning_rate": 1.2657902934577209e-05, | |
| "loss": 0.3536, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.2981471083660865, | |
| "grad_norm": 0.17003725438439155, | |
| "learning_rate": 1.2505154283219624e-05, | |
| "loss": 0.3551, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 2.3026389668725433, | |
| "grad_norm": 0.15179629688733343, | |
| "learning_rate": 1.235316193585299e-05, | |
| "loss": 0.3578, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 2.3071308253790006, | |
| "grad_norm": 0.16299117462388957, | |
| "learning_rate": 1.2201930073348884e-05, | |
| "loss": 0.3611, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 2.3116226838854574, | |
| "grad_norm": 0.17537334903346694, | |
| "learning_rate": 1.2051462855660133e-05, | |
| "loss": 0.3413, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 2.3161145423919147, | |
| "grad_norm": 0.14938083502587854, | |
| "learning_rate": 1.1901764421706386e-05, | |
| "loss": 0.3473, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 2.3206064008983716, | |
| "grad_norm": 0.15589905195055775, | |
| "learning_rate": 1.175283888926027e-05, | |
| "loss": 0.3379, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 2.325098259404829, | |
| "grad_norm": 0.15860417415899525, | |
| "learning_rate": 1.1604690354834114e-05, | |
| "loss": 0.3522, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 2.3295901179112857, | |
| "grad_norm": 0.16198086520569122, | |
| "learning_rate": 1.1457322893567277e-05, | |
| "loss": 0.3547, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 2.334081976417743, | |
| "grad_norm": 0.17126957203164983, | |
| "learning_rate": 1.1310740559114044e-05, | |
| "loss": 0.3438, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 2.3385738349242, | |
| "grad_norm": 0.1604876487499848, | |
| "learning_rate": 1.1164947383532118e-05, | |
| "loss": 0.3365, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.343065693430657, | |
| "grad_norm": 0.17566149893352012, | |
| "learning_rate": 1.1019947377171714e-05, | |
| "loss": 0.3795, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 2.347557551937114, | |
| "grad_norm": 0.1527369227777921, | |
| "learning_rate": 1.0875744528565258e-05, | |
| "loss": 0.3313, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 2.352049410443571, | |
| "grad_norm": 0.1738910354931097, | |
| "learning_rate": 1.0732342804317649e-05, | |
| "loss": 0.3547, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 2.356541268950028, | |
| "grad_norm": 0.15829513752774602, | |
| "learning_rate": 1.0589746148997175e-05, | |
| "loss": 0.341, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 2.3610331274564853, | |
| "grad_norm": 0.15387604623714768, | |
| "learning_rate": 1.044795848502698e-05, | |
| "loss": 0.3562, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 2.365524985962942, | |
| "grad_norm": 0.15853077870345747, | |
| "learning_rate": 1.0306983712577226e-05, | |
| "loss": 0.3407, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 2.370016844469399, | |
| "grad_norm": 0.1642494316130536, | |
| "learning_rate": 1.0166825709457725e-05, | |
| "loss": 0.3398, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 2.3745087029758563, | |
| "grad_norm": 0.14876808256366694, | |
| "learning_rate": 1.0027488331011335e-05, | |
| "loss": 0.3629, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 2.3790005614823135, | |
| "grad_norm": 0.16379340889108565, | |
| "learning_rate": 9.888975410007905e-06, | |
| "loss": 0.3489, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 2.3834924199887704, | |
| "grad_norm": 0.16592856273024006, | |
| "learning_rate": 9.751290756538822e-06, | |
| "loss": 0.368, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.387984278495227, | |
| "grad_norm": 0.1388571025281717, | |
| "learning_rate": 9.614438157912223e-06, | |
| "loss": 0.3333, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 2.3924761370016845, | |
| "grad_norm": 0.16532900879126655, | |
| "learning_rate": 9.47842137854881e-06, | |
| "loss": 0.3618, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 2.3969679955081413, | |
| "grad_norm": 0.1595813852533839, | |
| "learning_rate": 9.343244159878315e-06, | |
| "loss": 0.3405, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 2.4014598540145986, | |
| "grad_norm": 0.16331964277836997, | |
| "learning_rate": 9.208910220236564e-06, | |
| "loss": 0.3468, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 2.4059517125210554, | |
| "grad_norm": 0.15235072470887961, | |
| "learning_rate": 9.07542325476321e-06, | |
| "loss": 0.3612, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 2.4104435710275127, | |
| "grad_norm": 0.15314179707744743, | |
| "learning_rate": 8.94278693530009e-06, | |
| "loss": 0.3449, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 2.4149354295339696, | |
| "grad_norm": 0.15818297505598225, | |
| "learning_rate": 8.811004910290223e-06, | |
| "loss": 0.3521, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 2.419427288040427, | |
| "grad_norm": 0.13726636412942167, | |
| "learning_rate": 8.68008080467743e-06, | |
| "loss": 0.359, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 2.4239191465468837, | |
| "grad_norm": 0.15325964779422865, | |
| "learning_rate": 8.550018219806654e-06, | |
| "loss": 0.321, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 2.428411005053341, | |
| "grad_norm": 0.1562344405772757, | |
| "learning_rate": 8.420820733324895e-06, | |
| "loss": 0.3585, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.432902863559798, | |
| "grad_norm": 0.1410637159434683, | |
| "learning_rate": 8.29249189908277e-06, | |
| "loss": 0.3512, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 2.437394722066255, | |
| "grad_norm": 0.1501006000439916, | |
| "learning_rate": 8.16503524703678e-06, | |
| "loss": 0.3377, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 2.441886580572712, | |
| "grad_norm": 0.14626390055339436, | |
| "learning_rate": 8.038454283152228e-06, | |
| "loss": 0.345, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 2.446378439079169, | |
| "grad_norm": 0.14739939271307606, | |
| "learning_rate": 7.912752489306732e-06, | |
| "loss": 0.346, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 2.450870297585626, | |
| "grad_norm": 0.1533306899579021, | |
| "learning_rate": 7.787933323194483e-06, | |
| "loss": 0.3499, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 2.4553621560920833, | |
| "grad_norm": 0.1309572466646881, | |
| "learning_rate": 7.664000218231132e-06, | |
| "loss": 0.3262, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 2.45985401459854, | |
| "grad_norm": 0.15580075282771919, | |
| "learning_rate": 7.540956583459329e-06, | |
| "loss": 0.3483, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 2.464345873104997, | |
| "grad_norm": 0.1675772733776805, | |
| "learning_rate": 7.418805803454976e-06, | |
| "loss": 0.3671, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 2.4688377316114543, | |
| "grad_norm": 0.13343861360540754, | |
| "learning_rate": 7.297551238234098e-06, | |
| "loss": 0.3359, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 2.473329590117911, | |
| "grad_norm": 0.17400956570484363, | |
| "learning_rate": 7.1771962231604476e-06, | |
| "loss": 0.3612, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.4778214486243684, | |
| "grad_norm": 0.14405074965057518, | |
| "learning_rate": 7.057744068853729e-06, | |
| "loss": 0.3565, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 2.482313307130825, | |
| "grad_norm": 0.13613360926989856, | |
| "learning_rate": 6.939198061098564e-06, | |
| "loss": 0.3422, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 2.4868051656372825, | |
| "grad_norm": 0.17828647703786085, | |
| "learning_rate": 6.821561460754087e-06, | |
| "loss": 0.3484, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 2.4912970241437393, | |
| "grad_norm": 0.15442919344710362, | |
| "learning_rate": 6.704837503664246e-06, | |
| "loss": 0.3396, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 2.4957888826501966, | |
| "grad_norm": 0.137631282530382, | |
| "learning_rate": 6.589029400568816e-06, | |
| "loss": 0.3499, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 2.5002807411566534, | |
| "grad_norm": 0.14693935190563343, | |
| "learning_rate": 6.47414033701506e-06, | |
| "loss": 0.3283, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 2.5047725996631107, | |
| "grad_norm": 0.1767823562021828, | |
| "learning_rate": 6.360173473270111e-06, | |
| "loss": 0.3686, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 2.5092644581695676, | |
| "grad_norm": 0.14491588106378347, | |
| "learning_rate": 6.247131944234035e-06, | |
| "loss": 0.3229, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 2.513756316676025, | |
| "grad_norm": 0.14597911184028542, | |
| "learning_rate": 6.135018859353632e-06, | |
| "loss": 0.3404, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 2.5182481751824817, | |
| "grad_norm": 0.15851416639813481, | |
| "learning_rate": 6.023837302536857e-06, | |
| "loss": 0.3621, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.522740033688939, | |
| "grad_norm": 0.14799715793071916, | |
| "learning_rate": 5.913590332068011e-06, | |
| "loss": 0.3395, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 2.527231892195396, | |
| "grad_norm": 0.1434869131744014, | |
| "learning_rate": 5.804280980523622e-06, | |
| "loss": 0.3459, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 2.531723750701853, | |
| "grad_norm": 0.14402655705068848, | |
| "learning_rate": 5.695912254689022e-06, | |
| "loss": 0.3344, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 2.53621560920831, | |
| "grad_norm": 0.14411631895283028, | |
| "learning_rate": 5.58848713547564e-06, | |
| "loss": 0.3542, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 2.5407074677147667, | |
| "grad_norm": 0.15856697444491646, | |
| "learning_rate": 5.4820085778390084e-06, | |
| "loss": 0.3624, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 2.545199326221224, | |
| "grad_norm": 0.14160084901579828, | |
| "learning_rate": 5.376479510697472e-06, | |
| "loss": 0.355, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 2.5496911847276813, | |
| "grad_norm": 0.1265889667940892, | |
| "learning_rate": 5.271902836851635e-06, | |
| "loss": 0.3348, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 2.554183043234138, | |
| "grad_norm": 0.1285735736923682, | |
| "learning_rate": 5.168281432904496e-06, | |
| "loss": 0.332, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 2.558674901740595, | |
| "grad_norm": 0.1554259089789184, | |
| "learning_rate": 5.065618149182343e-06, | |
| "loss": 0.385, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 2.5631667602470523, | |
| "grad_norm": 0.14072544453786964, | |
| "learning_rate": 4.963915809656325e-06, | |
| "loss": 0.3401, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.5676586187535095, | |
| "grad_norm": 0.13255000877992487, | |
| "learning_rate": 4.863177211864791e-06, | |
| "loss": 0.3426, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 2.5721504772599664, | |
| "grad_norm": 0.1395445098697114, | |
| "learning_rate": 4.763405126836333e-06, | |
| "loss": 0.3479, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 2.576642335766423, | |
| "grad_norm": 0.13569816540941806, | |
| "learning_rate": 4.664602299013559e-06, | |
| "loss": 0.3383, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 2.5811341942728805, | |
| "grad_norm": 0.1447597670244306, | |
| "learning_rate": 4.566771446177605e-06, | |
| "loss": 0.3758, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 2.5856260527793373, | |
| "grad_norm": 0.14529065649247142, | |
| "learning_rate": 4.469915259373369e-06, | |
| "loss": 0.3326, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.5901179112857946, | |
| "grad_norm": 0.12660555233054072, | |
| "learning_rate": 4.3740364028355045e-06, | |
| "loss": 0.3462, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 2.5946097697922514, | |
| "grad_norm": 0.13197892060468966, | |
| "learning_rate": 4.279137513915124e-06, | |
| "loss": 0.3525, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 2.5991016282987087, | |
| "grad_norm": 0.1440064619866894, | |
| "learning_rate": 4.185221203007244e-06, | |
| "loss": 0.3355, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 2.6035934868051656, | |
| "grad_norm": 0.15358176185013508, | |
| "learning_rate": 4.092290053478998e-06, | |
| "loss": 0.3515, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 2.608085345311623, | |
| "grad_norm": 0.14006779642635536, | |
| "learning_rate": 4.0003466215985745e-06, | |
| "loss": 0.3492, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.6125772038180797, | |
| "grad_norm": 0.14117656129861625, | |
| "learning_rate": 3.909393436464881e-06, | |
| "loss": 0.3495, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 2.6170690623245365, | |
| "grad_norm": 0.1428894395260911, | |
| "learning_rate": 3.819432999938006e-06, | |
| "loss": 0.3587, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 2.621560920830994, | |
| "grad_norm": 0.12298388160882866, | |
| "learning_rate": 3.7304677865703665e-06, | |
| "loss": 0.3195, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 2.626052779337451, | |
| "grad_norm": 0.134562778425376, | |
| "learning_rate": 3.6425002435386714e-06, | |
| "loss": 0.3563, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 2.630544637843908, | |
| "grad_norm": 0.12286799646937116, | |
| "learning_rate": 3.5555327905765925e-06, | |
| "loss": 0.3375, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 2.6350364963503647, | |
| "grad_norm": 0.1335990567610802, | |
| "learning_rate": 3.469567819908193e-06, | |
| "loss": 0.3616, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 2.639528354856822, | |
| "grad_norm": 0.13134286710446316, | |
| "learning_rate": 3.384607696182145e-06, | |
| "loss": 0.3473, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 2.6440202133632793, | |
| "grad_norm": 0.13222142052984423, | |
| "learning_rate": 3.3006547564066806e-06, | |
| "loss": 0.3512, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 2.648512071869736, | |
| "grad_norm": 0.13573745547933902, | |
| "learning_rate": 3.2177113098852985e-06, | |
| "loss": 0.335, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 2.653003930376193, | |
| "grad_norm": 0.13789788789194135, | |
| "learning_rate": 3.1357796381532445e-06, | |
| "loss": 0.3548, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.6574957888826503, | |
| "grad_norm": 0.12582174116745817, | |
| "learning_rate": 3.054861994914755e-06, | |
| "loss": 0.3516, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 2.661987647389107, | |
| "grad_norm": 0.12675599882868277, | |
| "learning_rate": 2.9749606059810586e-06, | |
| "loss": 0.3456, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 2.6664795058955644, | |
| "grad_norm": 0.12867433860828717, | |
| "learning_rate": 2.8960776692091764e-06, | |
| "loss": 0.3398, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 2.670971364402021, | |
| "grad_norm": 0.1278038155531484, | |
| "learning_rate": 2.8182153544414316e-06, | |
| "loss": 0.3497, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 2.6754632229084785, | |
| "grad_norm": 0.13194736277055646, | |
| "learning_rate": 2.7413758034457648e-06, | |
| "loss": 0.3525, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 2.6799550814149353, | |
| "grad_norm": 0.12711834545130737, | |
| "learning_rate": 2.665561129856853e-06, | |
| "loss": 0.3454, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 2.6844469399213926, | |
| "grad_norm": 0.13461390901725576, | |
| "learning_rate": 2.590773419117936e-06, | |
| "loss": 0.3479, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 2.6889387984278494, | |
| "grad_norm": 0.12476695250219681, | |
| "learning_rate": 2.517014728423464e-06, | |
| "loss": 0.3389, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 2.6934306569343067, | |
| "grad_norm": 0.12616306722285817, | |
| "learning_rate": 2.4442870866625157e-06, | |
| "loss": 0.3511, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 2.6979225154407636, | |
| "grad_norm": 0.1292112184484444, | |
| "learning_rate": 2.3725924943629863e-06, | |
| "loss": 0.3318, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.702414373947221, | |
| "grad_norm": 0.12845075283064739, | |
| "learning_rate": 2.301932923636545e-06, | |
| "loss": 0.353, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 2.7069062324536777, | |
| "grad_norm": 0.12637795993954673, | |
| "learning_rate": 2.232310318124413e-06, | |
| "loss": 0.3413, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 2.7113980909601345, | |
| "grad_norm": 0.13332555159827292, | |
| "learning_rate": 2.1637265929438777e-06, | |
| "loss": 0.3496, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 2.715889949466592, | |
| "grad_norm": 0.13244838873007425, | |
| "learning_rate": 2.0961836346356225e-06, | |
| "loss": 0.348, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 2.720381807973049, | |
| "grad_norm": 0.12704490778311603, | |
| "learning_rate": 2.0296833011118356e-06, | |
| "loss": 0.3606, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 2.724873666479506, | |
| "grad_norm": 0.1164452763005093, | |
| "learning_rate": 1.9642274216051005e-06, | |
| "loss": 0.3276, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 2.7293655249859627, | |
| "grad_norm": 0.12324889152230548, | |
| "learning_rate": 1.8998177966180797e-06, | |
| "loss": 0.341, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 2.73385738349242, | |
| "grad_norm": 0.12520189492260383, | |
| "learning_rate": 1.836456197873986e-06, | |
| "loss": 0.3427, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 2.7383492419988773, | |
| "grad_norm": 0.12490531468817863, | |
| "learning_rate": 1.7741443682678472e-06, | |
| "loss": 0.3508, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 2.742841100505334, | |
| "grad_norm": 0.12247541880147882, | |
| "learning_rate": 1.7128840218185816e-06, | |
| "loss": 0.3467, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.747332959011791, | |
| "grad_norm": 0.12762903734782927, | |
| "learning_rate": 1.6526768436218122e-06, | |
| "loss": 0.3314, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 2.7518248175182483, | |
| "grad_norm": 0.12027242566476193, | |
| "learning_rate": 1.5935244898035574e-06, | |
| "loss": 0.3477, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 2.756316676024705, | |
| "grad_norm": 0.12110151973249488, | |
| "learning_rate": 1.5354285874746455e-06, | |
| "loss": 0.3441, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 2.7608085345311624, | |
| "grad_norm": 0.12530808955011244, | |
| "learning_rate": 1.4783907346859728e-06, | |
| "loss": 0.339, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 2.765300393037619, | |
| "grad_norm": 0.12828910971029417, | |
| "learning_rate": 1.4224125003845423e-06, | |
| "loss": 0.3468, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 2.7697922515440765, | |
| "grad_norm": 0.12606197981784106, | |
| "learning_rate": 1.367495424370291e-06, | |
| "loss": 0.3313, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 2.7742841100505333, | |
| "grad_norm": 0.12769718574523453, | |
| "learning_rate": 1.3136410172537705e-06, | |
| "loss": 0.3602, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 2.7787759685569906, | |
| "grad_norm": 0.11719346291579588, | |
| "learning_rate": 1.2608507604145603e-06, | |
| "loss": 0.35, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 2.7832678270634474, | |
| "grad_norm": 0.12155001879861399, | |
| "learning_rate": 1.2091261059605253e-06, | |
| "loss": 0.3431, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 2.7877596855699043, | |
| "grad_norm": 0.1282704083536137, | |
| "learning_rate": 1.1584684766878973e-06, | |
| "loss": 0.3297, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.7922515440763616, | |
| "grad_norm": 0.1301599388083576, | |
| "learning_rate": 1.1088792660421067e-06, | |
| "loss": 0.3568, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 2.796743402582819, | |
| "grad_norm": 0.12264772159084407, | |
| "learning_rate": 1.060359838079461e-06, | |
| "loss": 0.3491, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 2.8012352610892757, | |
| "grad_norm": 0.1259643831197078, | |
| "learning_rate": 1.0129115274296431e-06, | |
| "loss": 0.3559, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 2.8057271195957325, | |
| "grad_norm": 0.12121168834224898, | |
| "learning_rate": 9.665356392589742e-07, | |
| "loss": 0.3424, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 2.81021897810219, | |
| "grad_norm": 0.12594678606877638, | |
| "learning_rate": 9.212334492345243e-07, | |
| "loss": 0.3436, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 2.814710836608647, | |
| "grad_norm": 0.12322634285205575, | |
| "learning_rate": 8.770062034890237e-07, | |
| "loss": 0.3454, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 2.819202695115104, | |
| "grad_norm": 0.12221357175187589, | |
| "learning_rate": 8.33855118586584e-07, | |
| "loss": 0.3326, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 2.8236945536215607, | |
| "grad_norm": 0.12156102861514406, | |
| "learning_rate": 7.917813814892228e-07, | |
| "loss": 0.3562, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 2.828186412128018, | |
| "grad_norm": 0.12320198694258011, | |
| "learning_rate": 7.507861495242319e-07, | |
| "loss": 0.3491, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 2.832678270634475, | |
| "grad_norm": 0.12553861491418047, | |
| "learning_rate": 7.108705503523361e-07, | |
| "loss": 0.3427, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.837170129140932, | |
| "grad_norm": 0.1162989549721062, | |
| "learning_rate": 6.720356819366647e-07, | |
| "loss": 0.3548, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 2.841661987647389, | |
| "grad_norm": 0.11924254676435524, | |
| "learning_rate": 6.342826125125623e-07, | |
| "loss": 0.3407, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 2.8461538461538463, | |
| "grad_norm": 0.12560899437091247, | |
| "learning_rate": 5.976123805582035e-07, | |
| "loss": 0.35, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 2.850645704660303, | |
| "grad_norm": 0.11719308739701322, | |
| "learning_rate": 5.620259947660112e-07, | |
| "loss": 0.3536, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 2.8551375631667604, | |
| "grad_norm": 0.12353516331310158, | |
| "learning_rate": 5.275244340149411e-07, | |
| "loss": 0.3569, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 2.859629421673217, | |
| "grad_norm": 0.11915354594679166, | |
| "learning_rate": 4.941086473435209e-07, | |
| "loss": 0.3379, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 2.8641212801796745, | |
| "grad_norm": 0.12992646044543846, | |
| "learning_rate": 4.617795539237735e-07, | |
| "loss": 0.3387, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 2.8686131386861313, | |
| "grad_norm": 0.12871571213127825, | |
| "learning_rate": 4.305380430359085e-07, | |
| "loss": 0.3516, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 2.8731049971925886, | |
| "grad_norm": 0.12312806977223319, | |
| "learning_rate": 4.0038497404389254e-07, | |
| "loss": 0.3579, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 2.8775968556990454, | |
| "grad_norm": 0.11737007844018121, | |
| "learning_rate": 3.7132117637177054e-07, | |
| "loss": 0.3392, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.8820887142055023, | |
| "grad_norm": 0.11430758867158565, | |
| "learning_rate": 3.433474494808842e-07, | |
| "loss": 0.3295, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 2.8865805727119596, | |
| "grad_norm": 0.12081138547948157, | |
| "learning_rate": 3.1646456284786685e-07, | |
| "loss": 0.3641, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 2.891072431218417, | |
| "grad_norm": 0.11898446742968109, | |
| "learning_rate": 2.9067325594348326e-07, | |
| "loss": 0.3429, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 2.8955642897248737, | |
| "grad_norm": 0.1160085337963267, | |
| "learning_rate": 2.659742382122721e-07, | |
| "loss": 0.343, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 2.9000561482313305, | |
| "grad_norm": 0.11625669254627106, | |
| "learning_rate": 2.4236818905305493e-07, | |
| "loss": 0.3491, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 2.904548006737788, | |
| "grad_norm": 0.11787797010632982, | |
| "learning_rate": 2.1985575780023138e-07, | |
| "loss": 0.3471, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 2.909039865244245, | |
| "grad_norm": 0.11799589225781788, | |
| "learning_rate": 1.984375637059266e-07, | |
| "loss": 0.3378, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 2.913531723750702, | |
| "grad_norm": 0.12186949331713695, | |
| "learning_rate": 1.7811419592295153e-07, | |
| "loss": 0.348, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 2.9180235822571587, | |
| "grad_norm": 0.12036506084316496, | |
| "learning_rate": 1.5888621348860266e-07, | |
| "loss": 0.3494, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 2.922515440763616, | |
| "grad_norm": 0.11889223734619678, | |
| "learning_rate": 1.4075414530927422e-07, | |
| "loss": 0.3451, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.927007299270073, | |
| "grad_norm": 0.11580885819276238, | |
| "learning_rate": 1.2371849014593208e-07, | |
| "loss": 0.3451, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 2.93149915777653, | |
| "grad_norm": 0.1131234132693257, | |
| "learning_rate": 1.0777971660036024e-07, | |
| "loss": 0.3389, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 2.935991016282987, | |
| "grad_norm": 0.11871470901088223, | |
| "learning_rate": 9.293826310230014e-08, | |
| "loss": 0.3439, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 2.9404828747894443, | |
| "grad_norm": 0.1174736101535469, | |
| "learning_rate": 7.919453789738019e-08, | |
| "loss": 0.3337, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 2.944974733295901, | |
| "grad_norm": 0.11472264545351354, | |
| "learning_rate": 6.654891903588035e-08, | |
| "loss": 0.3312, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 2.9494665918023584, | |
| "grad_norm": 0.12054684683030566, | |
| "learning_rate": 5.500175436234489e-08, | |
| "loss": 0.3615, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 2.953958450308815, | |
| "grad_norm": 0.11681721114397958, | |
| "learning_rate": 4.455336150600786e-08, | |
| "loss": 0.3399, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 2.958450308815272, | |
| "grad_norm": 0.11927486658376495, | |
| "learning_rate": 3.520402787204891e-08, | |
| "loss": 0.345, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 2.9629421673217293, | |
| "grad_norm": 0.12179793775604957, | |
| "learning_rate": 2.6954010633706284e-08, | |
| "loss": 0.3552, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 2.9674340258281866, | |
| "grad_norm": 0.12003995738968648, | |
| "learning_rate": 1.9803536725180317e-08, | |
| "loss": 0.3411, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.9719258843346434, | |
| "grad_norm": 0.11474218422584073, | |
| "learning_rate": 1.3752802835407254e-08, | |
| "loss": 0.3486, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 2.9764177428411003, | |
| "grad_norm": 0.11919954648485624, | |
| "learning_rate": 8.801975402650264e-09, | |
| "loss": 0.3483, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 2.9809096013475576, | |
| "grad_norm": 0.123294915512741, | |
| "learning_rate": 4.951190609898682e-09, | |
| "loss": 0.3475, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 2.985401459854015, | |
| "grad_norm": 0.11489550783071271, | |
| "learning_rate": 2.2005543811554066e-09, | |
| "loss": 0.338, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 2.9898933183604717, | |
| "grad_norm": 0.11585581466960052, | |
| "learning_rate": 5.501423784970428e-10, | |
| "loss": 0.3412, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 2.9943851768669285, | |
| "grad_norm": 0.12306103394010724, | |
| "learning_rate": 0.0, | |
| "loss": 0.3482, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 2.9943851768669285, | |
| "step": 666, | |
| "total_flos": 7.06867120956244e+18, | |
| "train_loss": 0.4220664166025929, | |
| "train_runtime": 109600.6183, | |
| "train_samples_per_second": 3.119, | |
| "train_steps_per_second": 0.006 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 666, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 7.06867120956244e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |