| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 5.0, |
| "eval_steps": 500, |
| "global_step": 405, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.012345679012345678, |
| "grad_norm": 5.322886485316676, |
| "learning_rate": 1.951219512195122e-06, |
| "loss": 0.8713, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.024691358024691357, |
| "grad_norm": 5.297045656067237, |
| "learning_rate": 3.902439024390244e-06, |
| "loss": 0.8617, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.037037037037037035, |
| "grad_norm": 4.964177426655451, |
| "learning_rate": 5.853658536585366e-06, |
| "loss": 0.8557, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.04938271604938271, |
| "grad_norm": 3.773649879416999, |
| "learning_rate": 7.804878048780489e-06, |
| "loss": 0.8258, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.06172839506172839, |
| "grad_norm": 2.0170140516019943, |
| "learning_rate": 9.756097560975611e-06, |
| "loss": 0.7756, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.07407407407407407, |
| "grad_norm": 3.884073569291856, |
| "learning_rate": 1.1707317073170731e-05, |
| "loss": 0.786, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.08641975308641975, |
| "grad_norm": 4.3463765308085005, |
| "learning_rate": 1.3658536585365855e-05, |
| "loss": 0.7873, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.09876543209876543, |
| "grad_norm": 4.931673812582102, |
| "learning_rate": 1.5609756097560978e-05, |
| "loss": 0.7584, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.1111111111111111, |
| "grad_norm": 3.834447178572324, |
| "learning_rate": 1.75609756097561e-05, |
| "loss": 0.7462, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.12345679012345678, |
| "grad_norm": 2.179837729025759, |
| "learning_rate": 1.9512195121951222e-05, |
| "loss": 0.6981, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.13580246913580246, |
| "grad_norm": 2.101488664762724, |
| "learning_rate": 2.1463414634146344e-05, |
| "loss": 0.6788, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.14814814814814814, |
| "grad_norm": 1.5969780288722668, |
| "learning_rate": 2.3414634146341463e-05, |
| "loss": 0.6542, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.16049382716049382, |
| "grad_norm": 1.2878152911707546, |
| "learning_rate": 2.536585365853659e-05, |
| "loss": 0.6401, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.1728395061728395, |
| "grad_norm": 1.110122058562916, |
| "learning_rate": 2.731707317073171e-05, |
| "loss": 0.6224, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.18518518518518517, |
| "grad_norm": 0.9441382009144876, |
| "learning_rate": 2.926829268292683e-05, |
| "loss": 0.6147, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.19753086419753085, |
| "grad_norm": 1.0755606503445887, |
| "learning_rate": 3.1219512195121955e-05, |
| "loss": 0.608, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.20987654320987653, |
| "grad_norm": 1.0317574737428319, |
| "learning_rate": 3.3170731707317074e-05, |
| "loss": 0.5964, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.2222222222222222, |
| "grad_norm": 1.1016699420428528, |
| "learning_rate": 3.51219512195122e-05, |
| "loss": 0.5827, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.2345679012345679, |
| "grad_norm": 1.2684998671325824, |
| "learning_rate": 3.7073170731707325e-05, |
| "loss": 0.5863, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.24691358024691357, |
| "grad_norm": 1.1450067110010247, |
| "learning_rate": 3.9024390243902444e-05, |
| "loss": 0.5828, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.25925925925925924, |
| "grad_norm": 0.9761661263181526, |
| "learning_rate": 4.097560975609756e-05, |
| "loss": 0.5699, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.2716049382716049, |
| "grad_norm": 0.8094703924890408, |
| "learning_rate": 4.292682926829269e-05, |
| "loss": 0.5705, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.2839506172839506, |
| "grad_norm": 1.1556501519240183, |
| "learning_rate": 4.4878048780487814e-05, |
| "loss": 0.5505, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.2962962962962963, |
| "grad_norm": 1.1904158496263302, |
| "learning_rate": 4.6829268292682926e-05, |
| "loss": 0.5564, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.30864197530864196, |
| "grad_norm": 1.3992365789774508, |
| "learning_rate": 4.878048780487805e-05, |
| "loss": 0.5517, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.32098765432098764, |
| "grad_norm": 0.7997862572956267, |
| "learning_rate": 5.073170731707318e-05, |
| "loss": 0.5488, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.3333333333333333, |
| "grad_norm": 0.952625422206775, |
| "learning_rate": 5.26829268292683e-05, |
| "loss": 0.5494, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.345679012345679, |
| "grad_norm": 1.5935135402175304, |
| "learning_rate": 5.463414634146342e-05, |
| "loss": 0.5527, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.35802469135802467, |
| "grad_norm": 1.2248225513278832, |
| "learning_rate": 5.658536585365854e-05, |
| "loss": 0.5404, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.37037037037037035, |
| "grad_norm": 1.1987238686766297, |
| "learning_rate": 5.853658536585366e-05, |
| "loss": 0.5467, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.38271604938271603, |
| "grad_norm": 1.7560415296488376, |
| "learning_rate": 6.0487804878048785e-05, |
| "loss": 0.5427, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.3950617283950617, |
| "grad_norm": 0.9869800217044912, |
| "learning_rate": 6.243902439024391e-05, |
| "loss": 0.5364, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.4074074074074074, |
| "grad_norm": 1.3759308961313441, |
| "learning_rate": 6.439024390243903e-05, |
| "loss": 0.535, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.41975308641975306, |
| "grad_norm": 0.848280656571499, |
| "learning_rate": 6.634146341463415e-05, |
| "loss": 0.5255, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.43209876543209874, |
| "grad_norm": 1.5834791375974373, |
| "learning_rate": 6.829268292682927e-05, |
| "loss": 0.5463, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.4444444444444444, |
| "grad_norm": 1.3946299749030078, |
| "learning_rate": 7.02439024390244e-05, |
| "loss": 0.5329, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.4567901234567901, |
| "grad_norm": 2.4553544008908896, |
| "learning_rate": 7.219512195121952e-05, |
| "loss": 0.5469, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.4691358024691358, |
| "grad_norm": 1.4448518801644223, |
| "learning_rate": 7.414634146341465e-05, |
| "loss": 0.5384, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.48148148148148145, |
| "grad_norm": 2.366228950235355, |
| "learning_rate": 7.609756097560976e-05, |
| "loss": 0.5331, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.49382716049382713, |
| "grad_norm": 1.606851424746494, |
| "learning_rate": 7.804878048780489e-05, |
| "loss": 0.5387, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.5061728395061729, |
| "grad_norm": 1.5736626742577726, |
| "learning_rate": 8e-05, |
| "loss": 0.5215, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.5185185185185185, |
| "grad_norm": 1.9430399946541783, |
| "learning_rate": 7.999851021266518e-05, |
| "loss": 0.5218, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.5308641975308642, |
| "grad_norm": 1.5970596690336833, |
| "learning_rate": 7.999404096163398e-05, |
| "loss": 0.5249, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.5432098765432098, |
| "grad_norm": 1.3518955116038844, |
| "learning_rate": 7.998659257981813e-05, |
| "loss": 0.5125, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.5555555555555556, |
| "grad_norm": 1.065342246785812, |
| "learning_rate": 7.997616562204282e-05, |
| "loss": 0.516, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.5679012345679012, |
| "grad_norm": 0.961399130642479, |
| "learning_rate": 7.996276086500558e-05, |
| "loss": 0.5175, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.5802469135802469, |
| "grad_norm": 1.6746336809141793, |
| "learning_rate": 7.994637930721825e-05, |
| "loss": 0.517, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.5925925925925926, |
| "grad_norm": 1.5584433959820718, |
| "learning_rate": 7.99270221689327e-05, |
| "loss": 0.5192, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.6049382716049383, |
| "grad_norm": 1.1658481619505876, |
| "learning_rate": 7.990469089204992e-05, |
| "loss": 0.5026, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.6172839506172839, |
| "grad_norm": 1.1432735287000564, |
| "learning_rate": 7.987938714001254e-05, |
| "loss": 0.5051, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.6296296296296297, |
| "grad_norm": 1.6729263804344565, |
| "learning_rate": 7.985111279768106e-05, |
| "loss": 0.5208, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.6419753086419753, |
| "grad_norm": 1.8058261166724783, |
| "learning_rate": 7.981986997119334e-05, |
| "loss": 0.5028, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.654320987654321, |
| "grad_norm": 0.9551798887699284, |
| "learning_rate": 7.978566098780771e-05, |
| "loss": 0.5023, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.6666666666666666, |
| "grad_norm": 2.6120888775776003, |
| "learning_rate": 7.974848839572971e-05, |
| "loss": 0.5073, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.6790123456790124, |
| "grad_norm": 1.6504732308976957, |
| "learning_rate": 7.970835496392216e-05, |
| "loss": 0.5197, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.691358024691358, |
| "grad_norm": 2.4813850162470694, |
| "learning_rate": 7.9665263681899e-05, |
| "loss": 0.5065, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.7037037037037037, |
| "grad_norm": 2.0085317140110464, |
| "learning_rate": 7.961921775950254e-05, |
| "loss": 0.5186, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.7160493827160493, |
| "grad_norm": 2.1727130740463254, |
| "learning_rate": 7.957022062666436e-05, |
| "loss": 0.5138, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.7283950617283951, |
| "grad_norm": 1.7923872395834526, |
| "learning_rate": 7.951827593314987e-05, |
| "loss": 0.5154, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.7407407407407407, |
| "grad_norm": 1.6455141916519003, |
| "learning_rate": 7.946338754828639e-05, |
| "loss": 0.508, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.7530864197530864, |
| "grad_norm": 1.2020850632554296, |
| "learning_rate": 7.940555956067495e-05, |
| "loss": 0.5029, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.7654320987654321, |
| "grad_norm": 1.4345699160882657, |
| "learning_rate": 7.934479627788576e-05, |
| "loss": 0.5081, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.7777777777777778, |
| "grad_norm": 1.1706979555619268, |
| "learning_rate": 7.928110222613723e-05, |
| "loss": 0.5013, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.7901234567901234, |
| "grad_norm": 1.0822260503444048, |
| "learning_rate": 7.921448214995895e-05, |
| "loss": 0.5009, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.8024691358024691, |
| "grad_norm": 0.9538983846582979, |
| "learning_rate": 7.914494101183822e-05, |
| "loss": 0.5048, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.8148148148148148, |
| "grad_norm": 1.2772663076589637, |
| "learning_rate": 7.907248399185037e-05, |
| "loss": 0.5068, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.8271604938271605, |
| "grad_norm": 0.9753447960969902, |
| "learning_rate": 7.899711648727294e-05, |
| "loss": 0.4967, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.8395061728395061, |
| "grad_norm": 1.278582421674819, |
| "learning_rate": 7.891884411218364e-05, |
| "loss": 0.4876, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.8518518518518519, |
| "grad_norm": 1.2296613613599487, |
| "learning_rate": 7.883767269704209e-05, |
| "loss": 0.4902, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.8641975308641975, |
| "grad_norm": 1.1659409845757795, |
| "learning_rate": 7.875360828825562e-05, |
| "loss": 0.4926, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.8765432098765432, |
| "grad_norm": 0.9109060592449263, |
| "learning_rate": 7.866665714772879e-05, |
| "loss": 0.4992, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.8888888888888888, |
| "grad_norm": 1.1413101315936145, |
| "learning_rate": 7.8576825752397e-05, |
| "loss": 0.4944, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.9012345679012346, |
| "grad_norm": 1.5501956716372292, |
| "learning_rate": 7.848412079374403e-05, |
| "loss": 0.5036, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.9135802469135802, |
| "grad_norm": 0.8833612034769314, |
| "learning_rate": 7.838854917730351e-05, |
| "loss": 0.4956, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.9259259259259259, |
| "grad_norm": 1.6382790184141087, |
| "learning_rate": 7.829011802214464e-05, |
| "loss": 0.5049, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.9382716049382716, |
| "grad_norm": 0.8824015261456967, |
| "learning_rate": 7.818883466034184e-05, |
| "loss": 0.5073, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.9506172839506173, |
| "grad_norm": 1.8874422790227483, |
| "learning_rate": 7.808470663642856e-05, |
| "loss": 0.5038, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.9629629629629629, |
| "grad_norm": 1.151698216260373, |
| "learning_rate": 7.797774170683542e-05, |
| "loss": 0.4987, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.9753086419753086, |
| "grad_norm": 1.8822125796547127, |
| "learning_rate": 7.786794783931225e-05, |
| "loss": 0.4955, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.9876543209876543, |
| "grad_norm": 1.3793553962064276, |
| "learning_rate": 7.775533321233471e-05, |
| "loss": 0.5098, |
| "step": 80 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 1.4900485689809222, |
| "learning_rate": 7.763990621449507e-05, |
| "loss": 0.4941, |
| "step": 81 |
| }, |
| { |
| "epoch": 1.0123456790123457, |
| "grad_norm": 1.2191759152743815, |
| "learning_rate": 7.752167544387728e-05, |
| "loss": 0.4824, |
| "step": 82 |
| }, |
| { |
| "epoch": 1.0246913580246915, |
| "grad_norm": 0.9817036892343542, |
| "learning_rate": 7.740064970741661e-05, |
| "loss": 0.4677, |
| "step": 83 |
| }, |
| { |
| "epoch": 1.037037037037037, |
| "grad_norm": 0.9505715870347944, |
| "learning_rate": 7.727683802024347e-05, |
| "loss": 0.4755, |
| "step": 84 |
| }, |
| { |
| "epoch": 1.0493827160493827, |
| "grad_norm": 0.9315786466714934, |
| "learning_rate": 7.715024960501209e-05, |
| "loss": 0.4626, |
| "step": 85 |
| }, |
| { |
| "epoch": 1.0617283950617284, |
| "grad_norm": 1.088912497598927, |
| "learning_rate": 7.702089389121335e-05, |
| "loss": 0.4699, |
| "step": 86 |
| }, |
| { |
| "epoch": 1.074074074074074, |
| "grad_norm": 0.7664961821150688, |
| "learning_rate": 7.688878051447243e-05, |
| "loss": 0.4632, |
| "step": 87 |
| }, |
| { |
| "epoch": 1.0864197530864197, |
| "grad_norm": 0.7118117556462173, |
| "learning_rate": 7.675391931583109e-05, |
| "loss": 0.4694, |
| "step": 88 |
| }, |
| { |
| "epoch": 1.0987654320987654, |
| "grad_norm": 0.8838029650550034, |
| "learning_rate": 7.661632034101466e-05, |
| "loss": 0.4685, |
| "step": 89 |
| }, |
| { |
| "epoch": 1.1111111111111112, |
| "grad_norm": 1.0456749479001006, |
| "learning_rate": 7.64759938396836e-05, |
| "loss": 0.4652, |
| "step": 90 |
| }, |
| { |
| "epoch": 1.123456790123457, |
| "grad_norm": 0.9296714999836658, |
| "learning_rate": 7.633295026467016e-05, |
| "loss": 0.4557, |
| "step": 91 |
| }, |
| { |
| "epoch": 1.1358024691358024, |
| "grad_norm": 0.7784127512214979, |
| "learning_rate": 7.618720027119966e-05, |
| "loss": 0.4576, |
| "step": 92 |
| }, |
| { |
| "epoch": 1.1481481481481481, |
| "grad_norm": 0.7773052206833756, |
| "learning_rate": 7.603875471609677e-05, |
| "loss": 0.4602, |
| "step": 93 |
| }, |
| { |
| "epoch": 1.1604938271604939, |
| "grad_norm": 1.1454144308753773, |
| "learning_rate": 7.588762465697693e-05, |
| "loss": 0.4624, |
| "step": 94 |
| }, |
| { |
| "epoch": 1.1728395061728394, |
| "grad_norm": 0.9378102331091442, |
| "learning_rate": 7.573382135142253e-05, |
| "loss": 0.4641, |
| "step": 95 |
| }, |
| { |
| "epoch": 1.1851851851851851, |
| "grad_norm": 0.9068527199171179, |
| "learning_rate": 7.55773562561444e-05, |
| "loss": 0.4498, |
| "step": 96 |
| }, |
| { |
| "epoch": 1.1975308641975309, |
| "grad_norm": 0.9300994723441598, |
| "learning_rate": 7.541824102612839e-05, |
| "loss": 0.46, |
| "step": 97 |
| }, |
| { |
| "epoch": 1.2098765432098766, |
| "grad_norm": 1.0104590943724414, |
| "learning_rate": 7.525648751376726e-05, |
| "loss": 0.456, |
| "step": 98 |
| }, |
| { |
| "epoch": 1.2222222222222223, |
| "grad_norm": 1.0168144609561725, |
| "learning_rate": 7.509210776797768e-05, |
| "loss": 0.4585, |
| "step": 99 |
| }, |
| { |
| "epoch": 1.2345679012345678, |
| "grad_norm": 0.8394713299392315, |
| "learning_rate": 7.492511403330284e-05, |
| "loss": 0.4543, |
| "step": 100 |
| }, |
| { |
| "epoch": 1.2469135802469136, |
| "grad_norm": 0.6254433810264461, |
| "learning_rate": 7.475551874900027e-05, |
| "loss": 0.4535, |
| "step": 101 |
| }, |
| { |
| "epoch": 1.2592592592592593, |
| "grad_norm": 0.6014772324908465, |
| "learning_rate": 7.458333454811531e-05, |
| "loss": 0.4558, |
| "step": 102 |
| }, |
| { |
| "epoch": 1.2716049382716048, |
| "grad_norm": 0.6719474936653222, |
| "learning_rate": 7.440857425654004e-05, |
| "loss": 0.4474, |
| "step": 103 |
| }, |
| { |
| "epoch": 1.2839506172839505, |
| "grad_norm": 0.8289366172315764, |
| "learning_rate": 7.42312508920579e-05, |
| "loss": 0.452, |
| "step": 104 |
| }, |
| { |
| "epoch": 1.2962962962962963, |
| "grad_norm": 1.1119177471943609, |
| "learning_rate": 7.405137766337406e-05, |
| "loss": 0.4615, |
| "step": 105 |
| }, |
| { |
| "epoch": 1.308641975308642, |
| "grad_norm": 1.1864543763821342, |
| "learning_rate": 7.386896796913137e-05, |
| "loss": 0.4595, |
| "step": 106 |
| }, |
| { |
| "epoch": 1.3209876543209877, |
| "grad_norm": 0.7739869856668281, |
| "learning_rate": 7.368403539691247e-05, |
| "loss": 0.4526, |
| "step": 107 |
| }, |
| { |
| "epoch": 1.3333333333333333, |
| "grad_norm": 0.7500250889954546, |
| "learning_rate": 7.349659372222755e-05, |
| "loss": 0.4499, |
| "step": 108 |
| }, |
| { |
| "epoch": 1.345679012345679, |
| "grad_norm": 1.0110160098128433, |
| "learning_rate": 7.330665690748825e-05, |
| "loss": 0.4598, |
| "step": 109 |
| }, |
| { |
| "epoch": 1.3580246913580247, |
| "grad_norm": 1.240318418546968, |
| "learning_rate": 7.311423910096764e-05, |
| "loss": 0.4531, |
| "step": 110 |
| }, |
| { |
| "epoch": 1.3703703703703702, |
| "grad_norm": 0.7673358482948583, |
| "learning_rate": 7.291935463574626e-05, |
| "loss": 0.4472, |
| "step": 111 |
| }, |
| { |
| "epoch": 1.382716049382716, |
| "grad_norm": 0.9290397269601804, |
| "learning_rate": 7.272201802864452e-05, |
| "loss": 0.4524, |
| "step": 112 |
| }, |
| { |
| "epoch": 1.3950617283950617, |
| "grad_norm": 1.200427789224473, |
| "learning_rate": 7.25222439791413e-05, |
| "loss": 0.4531, |
| "step": 113 |
| }, |
| { |
| "epoch": 1.4074074074074074, |
| "grad_norm": 0.8914574952010766, |
| "learning_rate": 7.232004736827907e-05, |
| "loss": 0.4614, |
| "step": 114 |
| }, |
| { |
| "epoch": 1.4197530864197532, |
| "grad_norm": 1.072583649734927, |
| "learning_rate": 7.21154432575553e-05, |
| "loss": 0.4528, |
| "step": 115 |
| }, |
| { |
| "epoch": 1.4320987654320987, |
| "grad_norm": 0.7180130997337799, |
| "learning_rate": 7.190844688780065e-05, |
| "loss": 0.4492, |
| "step": 116 |
| }, |
| { |
| "epoch": 1.4444444444444444, |
| "grad_norm": 0.672503509002727, |
| "learning_rate": 7.169907367804363e-05, |
| "loss": 0.4533, |
| "step": 117 |
| }, |
| { |
| "epoch": 1.4567901234567902, |
| "grad_norm": 0.69958335114305, |
| "learning_rate": 7.148733922436201e-05, |
| "loss": 0.446, |
| "step": 118 |
| }, |
| { |
| "epoch": 1.4691358024691357, |
| "grad_norm": 0.7653453417783888, |
| "learning_rate": 7.12732592987212e-05, |
| "loss": 0.4456, |
| "step": 119 |
| }, |
| { |
| "epoch": 1.4814814814814814, |
| "grad_norm": 0.6310721044962515, |
| "learning_rate": 7.105684984779928e-05, |
| "loss": 0.4444, |
| "step": 120 |
| }, |
| { |
| "epoch": 1.4938271604938271, |
| "grad_norm": 0.6151795732030567, |
| "learning_rate": 7.083812699179919e-05, |
| "loss": 0.4465, |
| "step": 121 |
| }, |
| { |
| "epoch": 1.5061728395061729, |
| "grad_norm": 0.6874616448091606, |
| "learning_rate": 7.061710702324799e-05, |
| "loss": 0.4508, |
| "step": 122 |
| }, |
| { |
| "epoch": 1.5185185185185186, |
| "grad_norm": 0.6395722108011573, |
| "learning_rate": 7.039380640578316e-05, |
| "loss": 0.4462, |
| "step": 123 |
| }, |
| { |
| "epoch": 1.5308641975308643, |
| "grad_norm": 1.0391636044709032, |
| "learning_rate": 7.016824177292629e-05, |
| "loss": 0.4506, |
| "step": 124 |
| }, |
| { |
| "epoch": 1.5432098765432098, |
| "grad_norm": 1.209612143815189, |
| "learning_rate": 6.994042992684406e-05, |
| "loss": 0.444, |
| "step": 125 |
| }, |
| { |
| "epoch": 1.5555555555555556, |
| "grad_norm": 0.6126615919119747, |
| "learning_rate": 6.971038783709658e-05, |
| "loss": 0.4503, |
| "step": 126 |
| }, |
| { |
| "epoch": 1.567901234567901, |
| "grad_norm": 0.7148367458249104, |
| "learning_rate": 6.947813263937347e-05, |
| "loss": 0.4606, |
| "step": 127 |
| }, |
| { |
| "epoch": 1.5802469135802468, |
| "grad_norm": 1.0428988763055895, |
| "learning_rate": 6.924368163421733e-05, |
| "loss": 0.4467, |
| "step": 128 |
| }, |
| { |
| "epoch": 1.5925925925925926, |
| "grad_norm": 1.1843461844631857, |
| "learning_rate": 6.900705228573507e-05, |
| "loss": 0.452, |
| "step": 129 |
| }, |
| { |
| "epoch": 1.6049382716049383, |
| "grad_norm": 0.7127612179405008, |
| "learning_rate": 6.876826222029699e-05, |
| "loss": 0.4497, |
| "step": 130 |
| }, |
| { |
| "epoch": 1.617283950617284, |
| "grad_norm": 0.48971010045507035, |
| "learning_rate": 6.852732922522385e-05, |
| "loss": 0.4452, |
| "step": 131 |
| }, |
| { |
| "epoch": 1.6296296296296298, |
| "grad_norm": 0.6614951195691181, |
| "learning_rate": 6.828427124746191e-05, |
| "loss": 0.4499, |
| "step": 132 |
| }, |
| { |
| "epoch": 1.6419753086419753, |
| "grad_norm": 0.759726041736274, |
| "learning_rate": 6.803910639224598e-05, |
| "loss": 0.4479, |
| "step": 133 |
| }, |
| { |
| "epoch": 1.654320987654321, |
| "grad_norm": 0.7326050738916848, |
| "learning_rate": 6.77918529217509e-05, |
| "loss": 0.4485, |
| "step": 134 |
| }, |
| { |
| "epoch": 1.6666666666666665, |
| "grad_norm": 0.664731323528839, |
| "learning_rate": 6.754252925373109e-05, |
| "loss": 0.4369, |
| "step": 135 |
| }, |
| { |
| "epoch": 1.6790123456790123, |
| "grad_norm": 0.6194905151290093, |
| "learning_rate": 6.729115396014871e-05, |
| "loss": 0.4392, |
| "step": 136 |
| }, |
| { |
| "epoch": 1.691358024691358, |
| "grad_norm": 0.7257710866052547, |
| "learning_rate": 6.703774576579018e-05, |
| "loss": 0.4466, |
| "step": 137 |
| }, |
| { |
| "epoch": 1.7037037037037037, |
| "grad_norm": 0.913815195106407, |
| "learning_rate": 6.678232354687144e-05, |
| "loss": 0.4485, |
| "step": 138 |
| }, |
| { |
| "epoch": 1.7160493827160495, |
| "grad_norm": 1.1501420885353775, |
| "learning_rate": 6.652490632963182e-05, |
| "loss": 0.4419, |
| "step": 139 |
| }, |
| { |
| "epoch": 1.7283950617283952, |
| "grad_norm": 0.7814972803848219, |
| "learning_rate": 6.626551328891681e-05, |
| "loss": 0.4421, |
| "step": 140 |
| }, |
| { |
| "epoch": 1.7407407407407407, |
| "grad_norm": 0.6809040763876827, |
| "learning_rate": 6.600416374674978e-05, |
| "loss": 0.4446, |
| "step": 141 |
| }, |
| { |
| "epoch": 1.7530864197530864, |
| "grad_norm": 0.6937546616216443, |
| "learning_rate": 6.57408771708926e-05, |
| "loss": 0.4439, |
| "step": 142 |
| }, |
| { |
| "epoch": 1.765432098765432, |
| "grad_norm": 0.6133971132082513, |
| "learning_rate": 6.547567317339557e-05, |
| "loss": 0.4405, |
| "step": 143 |
| }, |
| { |
| "epoch": 1.7777777777777777, |
| "grad_norm": 0.5795946877354976, |
| "learning_rate": 6.520857150913655e-05, |
| "loss": 0.4468, |
| "step": 144 |
| }, |
| { |
| "epoch": 1.7901234567901234, |
| "grad_norm": 0.6272260508720054, |
| "learning_rate": 6.493959207434934e-05, |
| "loss": 0.4403, |
| "step": 145 |
| }, |
| { |
| "epoch": 1.8024691358024691, |
| "grad_norm": 0.6607250320416114, |
| "learning_rate": 6.466875490514173e-05, |
| "loss": 0.4441, |
| "step": 146 |
| }, |
| { |
| "epoch": 1.8148148148148149, |
| "grad_norm": 0.666515803916981, |
| "learning_rate": 6.439608017600292e-05, |
| "loss": 0.4367, |
| "step": 147 |
| }, |
| { |
| "epoch": 1.8271604938271606, |
| "grad_norm": 0.6563646888406403, |
| "learning_rate": 6.412158819830082e-05, |
| "loss": 0.4383, |
| "step": 148 |
| }, |
| { |
| "epoch": 1.8395061728395061, |
| "grad_norm": 0.6751228037264413, |
| "learning_rate": 6.384529941876902e-05, |
| "loss": 0.4443, |
| "step": 149 |
| }, |
| { |
| "epoch": 1.8518518518518519, |
| "grad_norm": 0.6037023672290093, |
| "learning_rate": 6.356723441798375e-05, |
| "loss": 0.4343, |
| "step": 150 |
| }, |
| { |
| "epoch": 1.8641975308641974, |
| "grad_norm": 0.7575004210332137, |
| "learning_rate": 6.328741390883084e-05, |
| "loss": 0.436, |
| "step": 151 |
| }, |
| { |
| "epoch": 1.876543209876543, |
| "grad_norm": 1.053664259204655, |
| "learning_rate": 6.300585873496279e-05, |
| "loss": 0.4446, |
| "step": 152 |
| }, |
| { |
| "epoch": 1.8888888888888888, |
| "grad_norm": 1.143162590447998, |
| "learning_rate": 6.272258986924624e-05, |
| "loss": 0.4376, |
| "step": 153 |
| }, |
| { |
| "epoch": 1.9012345679012346, |
| "grad_norm": 0.8056844525604071, |
| "learning_rate": 6.243762841219958e-05, |
| "loss": 0.4425, |
| "step": 154 |
| }, |
| { |
| "epoch": 1.9135802469135803, |
| "grad_norm": 0.7005815828988159, |
| "learning_rate": 6.215099559042132e-05, |
| "loss": 0.4438, |
| "step": 155 |
| }, |
| { |
| "epoch": 1.925925925925926, |
| "grad_norm": 0.5395123726786041, |
| "learning_rate": 6.186271275500885e-05, |
| "loss": 0.4451, |
| "step": 156 |
| }, |
| { |
| "epoch": 1.9382716049382716, |
| "grad_norm": 0.447328421246998, |
| "learning_rate": 6.157280137996797e-05, |
| "loss": 0.4496, |
| "step": 157 |
| }, |
| { |
| "epoch": 1.9506172839506173, |
| "grad_norm": 0.5525549829334637, |
| "learning_rate": 6.128128306061347e-05, |
| "loss": 0.4406, |
| "step": 158 |
| }, |
| { |
| "epoch": 1.9629629629629628, |
| "grad_norm": 0.6119311049360817, |
| "learning_rate": 6.098817951196032e-05, |
| "loss": 0.4432, |
| "step": 159 |
| }, |
| { |
| "epoch": 1.9753086419753085, |
| "grad_norm": 0.5999595846933212, |
| "learning_rate": 6.0693512567106275e-05, |
| "loss": 0.4396, |
| "step": 160 |
| }, |
| { |
| "epoch": 1.9876543209876543, |
| "grad_norm": 0.6329428597314521, |
| "learning_rate": 6.0397304175605444e-05, |
| "loss": 0.4397, |
| "step": 161 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.6165900475914675, |
| "learning_rate": 6.009957640183333e-05, |
| "loss": 0.4339, |
| "step": 162 |
| }, |
| { |
| "epoch": 2.0123456790123457, |
| "grad_norm": 0.6175294635421779, |
| "learning_rate": 5.980035142334326e-05, |
| "loss": 0.4037, |
| "step": 163 |
| }, |
| { |
| "epoch": 2.0246913580246915, |
| "grad_norm": 0.5464603695576591, |
| "learning_rate": 5.94996515292144e-05, |
| "loss": 0.4069, |
| "step": 164 |
| }, |
| { |
| "epoch": 2.037037037037037, |
| "grad_norm": 0.5961811075021233, |
| "learning_rate": 5.919749911839146e-05, |
| "loss": 0.4072, |
| "step": 165 |
| }, |
| { |
| "epoch": 2.049382716049383, |
| "grad_norm": 0.7162072119334066, |
| "learning_rate": 5.8893916698016154e-05, |
| "loss": 0.409, |
| "step": 166 |
| }, |
| { |
| "epoch": 2.0617283950617282, |
| "grad_norm": 0.7531665562571003, |
| "learning_rate": 5.858892688175075e-05, |
| "loss": 0.4054, |
| "step": 167 |
| }, |
| { |
| "epoch": 2.074074074074074, |
| "grad_norm": 0.6634967768803004, |
| "learning_rate": 5.828255238809352e-05, |
| "loss": 0.4102, |
| "step": 168 |
| }, |
| { |
| "epoch": 2.0864197530864197, |
| "grad_norm": 0.5214973502711097, |
| "learning_rate": 5.797481603868646e-05, |
| "loss": 0.4093, |
| "step": 169 |
| }, |
| { |
| "epoch": 2.0987654320987654, |
| "grad_norm": 0.48817555796705103, |
| "learning_rate": 5.766574075661538e-05, |
| "loss": 0.4056, |
| "step": 170 |
| }, |
| { |
| "epoch": 2.111111111111111, |
| "grad_norm": 0.46995270511301984, |
| "learning_rate": 5.735534956470233e-05, |
| "loss": 0.4017, |
| "step": 171 |
| }, |
| { |
| "epoch": 2.123456790123457, |
| "grad_norm": 0.4001954795848698, |
| "learning_rate": 5.7043665583790627e-05, |
| "loss": 0.4043, |
| "step": 172 |
| }, |
| { |
| "epoch": 2.1358024691358026, |
| "grad_norm": 0.3352773336486886, |
| "learning_rate": 5.673071203102261e-05, |
| "loss": 0.3978, |
| "step": 173 |
| }, |
| { |
| "epoch": 2.148148148148148, |
| "grad_norm": 0.36429657583588576, |
| "learning_rate": 5.641651221811028e-05, |
| "loss": 0.405, |
| "step": 174 |
| }, |
| { |
| "epoch": 2.1604938271604937, |
| "grad_norm": 0.3696795489355164, |
| "learning_rate": 5.6101089549598704e-05, |
| "loss": 0.4015, |
| "step": 175 |
| }, |
| { |
| "epoch": 2.1728395061728394, |
| "grad_norm": 0.37732718419035066, |
| "learning_rate": 5.578446752112273e-05, |
| "loss": 0.3993, |
| "step": 176 |
| }, |
| { |
| "epoch": 2.185185185185185, |
| "grad_norm": 0.3610852145730118, |
| "learning_rate": 5.546666971765675e-05, |
| "loss": 0.401, |
| "step": 177 |
| }, |
| { |
| "epoch": 2.197530864197531, |
| "grad_norm": 0.375404848711513, |
| "learning_rate": 5.5147719811757914e-05, |
| "loss": 0.4013, |
| "step": 178 |
| }, |
| { |
| "epoch": 2.2098765432098766, |
| "grad_norm": 0.39144573818004924, |
| "learning_rate": 5.4827641561802716e-05, |
| "loss": 0.4037, |
| "step": 179 |
| }, |
| { |
| "epoch": 2.2222222222222223, |
| "grad_norm": 0.46921938093624016, |
| "learning_rate": 5.4506458810217286e-05, |
| "loss": 0.4084, |
| "step": 180 |
| }, |
| { |
| "epoch": 2.234567901234568, |
| "grad_norm": 0.39178530019945007, |
| "learning_rate": 5.4184195481701425e-05, |
| "loss": 0.3974, |
| "step": 181 |
| }, |
| { |
| "epoch": 2.246913580246914, |
| "grad_norm": 0.31454106942243093, |
| "learning_rate": 5.38608755814464e-05, |
| "loss": 0.3996, |
| "step": 182 |
| }, |
| { |
| "epoch": 2.259259259259259, |
| "grad_norm": 0.32288914490706055, |
| "learning_rate": 5.353652319334682e-05, |
| "loss": 0.4026, |
| "step": 183 |
| }, |
| { |
| "epoch": 2.271604938271605, |
| "grad_norm": 0.3480461509407278, |
| "learning_rate": 5.321116247820669e-05, |
| "loss": 0.409, |
| "step": 184 |
| }, |
| { |
| "epoch": 2.2839506172839505, |
| "grad_norm": 0.32317305352669917, |
| "learning_rate": 5.288481767193963e-05, |
| "loss": 0.3992, |
| "step": 185 |
| }, |
| { |
| "epoch": 2.2962962962962963, |
| "grad_norm": 0.3491754615190072, |
| "learning_rate": 5.2557513083763605e-05, |
| "loss": 0.4106, |
| "step": 186 |
| }, |
| { |
| "epoch": 2.308641975308642, |
| "grad_norm": 0.2654352934184053, |
| "learning_rate": 5.2229273094390124e-05, |
| "loss": 0.395, |
| "step": 187 |
| }, |
| { |
| "epoch": 2.3209876543209877, |
| "grad_norm": 0.240507325939916, |
| "learning_rate": 5.190012215420812e-05, |
| "loss": 0.3962, |
| "step": 188 |
| }, |
| { |
| "epoch": 2.3333333333333335, |
| "grad_norm": 0.3096781461722714, |
| "learning_rate": 5.1570084781462716e-05, |
| "loss": 0.4009, |
| "step": 189 |
| }, |
| { |
| "epoch": 2.3456790123456788, |
| "grad_norm": 0.3254620628162861, |
| "learning_rate": 5.123918556042878e-05, |
| "loss": 0.4003, |
| "step": 190 |
| }, |
| { |
| "epoch": 2.3580246913580245, |
| "grad_norm": 0.30442823004858754, |
| "learning_rate": 5.0907449139579755e-05, |
| "loss": 0.407, |
| "step": 191 |
| }, |
| { |
| "epoch": 2.3703703703703702, |
| "grad_norm": 0.24609813176288364, |
| "learning_rate": 5.057490022975156e-05, |
| "loss": 0.406, |
| "step": 192 |
| }, |
| { |
| "epoch": 2.382716049382716, |
| "grad_norm": 0.2645811599665009, |
| "learning_rate": 5.024156360230189e-05, |
| "loss": 0.4087, |
| "step": 193 |
| }, |
| { |
| "epoch": 2.3950617283950617, |
| "grad_norm": 0.29950051962612906, |
| "learning_rate": 4.9907464087265045e-05, |
| "loss": 0.4051, |
| "step": 194 |
| }, |
| { |
| "epoch": 2.4074074074074074, |
| "grad_norm": 0.25413630356733447, |
| "learning_rate": 4.9572626571502316e-05, |
| "loss": 0.4115, |
| "step": 195 |
| }, |
| { |
| "epoch": 2.419753086419753, |
| "grad_norm": 0.1982329593965016, |
| "learning_rate": 4.9237075996848235e-05, |
| "loss": 0.4009, |
| "step": 196 |
| }, |
| { |
| "epoch": 2.432098765432099, |
| "grad_norm": 0.2793566075797078, |
| "learning_rate": 4.890083735825258e-05, |
| "loss": 0.4018, |
| "step": 197 |
| }, |
| { |
| "epoch": 2.4444444444444446, |
| "grad_norm": 0.2501185111375193, |
| "learning_rate": 4.8563935701918646e-05, |
| "loss": 0.4005, |
| "step": 198 |
| }, |
| { |
| "epoch": 2.45679012345679, |
| "grad_norm": 0.2508812489655374, |
| "learning_rate": 4.8226396123437466e-05, |
| "loss": 0.4024, |
| "step": 199 |
| }, |
| { |
| "epoch": 2.4691358024691357, |
| "grad_norm": 0.23763312456408484, |
| "learning_rate": 4.788824376591849e-05, |
| "loss": 0.3938, |
| "step": 200 |
| }, |
| { |
| "epoch": 2.4814814814814814, |
| "grad_norm": 0.2505474740052618, |
| "learning_rate": 4.754950381811667e-05, |
| "loss": 0.4098, |
| "step": 201 |
| }, |
| { |
| "epoch": 2.493827160493827, |
| "grad_norm": 0.32543257673329773, |
| "learning_rate": 4.721020151255624e-05, |
| "loss": 0.4008, |
| "step": 202 |
| }, |
| { |
| "epoch": 2.506172839506173, |
| "grad_norm": 0.31344361733289505, |
| "learning_rate": 4.6870362123651056e-05, |
| "loss": 0.3958, |
| "step": 203 |
| }, |
| { |
| "epoch": 2.5185185185185186, |
| "grad_norm": 0.2808667743552056, |
| "learning_rate": 4.6530010965821984e-05, |
| "loss": 0.3935, |
| "step": 204 |
| }, |
| { |
| "epoch": 2.5308641975308643, |
| "grad_norm": 0.2799818958240018, |
| "learning_rate": 4.618917339161125e-05, |
| "loss": 0.3975, |
| "step": 205 |
| }, |
| { |
| "epoch": 2.5432098765432096, |
| "grad_norm": 0.40415793903369035, |
| "learning_rate": 4.584787478979394e-05, |
| "loss": 0.4012, |
| "step": 206 |
| }, |
| { |
| "epoch": 2.5555555555555554, |
| "grad_norm": 0.36820613398161905, |
| "learning_rate": 4.550614058348674e-05, |
| "loss": 0.3935, |
| "step": 207 |
| }, |
| { |
| "epoch": 2.567901234567901, |
| "grad_norm": 0.22878484205266533, |
| "learning_rate": 4.516399622825428e-05, |
| "loss": 0.4017, |
| "step": 208 |
| }, |
| { |
| "epoch": 2.580246913580247, |
| "grad_norm": 0.2526602623885084, |
| "learning_rate": 4.4821467210212924e-05, |
| "loss": 0.3986, |
| "step": 209 |
| }, |
| { |
| "epoch": 2.5925925925925926, |
| "grad_norm": 0.2857441977382452, |
| "learning_rate": 4.4478579044132314e-05, |
| "loss": 0.3983, |
| "step": 210 |
| }, |
| { |
| "epoch": 2.6049382716049383, |
| "grad_norm": 0.31335609029960937, |
| "learning_rate": 4.41353572715348e-05, |
| "loss": 0.4084, |
| "step": 211 |
| }, |
| { |
| "epoch": 2.617283950617284, |
| "grad_norm": 0.29329552295291217, |
| "learning_rate": 4.379182745879289e-05, |
| "loss": 0.4, |
| "step": 212 |
| }, |
| { |
| "epoch": 2.6296296296296298, |
| "grad_norm": 0.2464006577647208, |
| "learning_rate": 4.344801519522478e-05, |
| "loss": 0.3981, |
| "step": 213 |
| }, |
| { |
| "epoch": 2.6419753086419755, |
| "grad_norm": 0.21757828305891083, |
| "learning_rate": 4.310394609118826e-05, |
| "loss": 0.4109, |
| "step": 214 |
| }, |
| { |
| "epoch": 2.6543209876543212, |
| "grad_norm": 0.3210249559146646, |
| "learning_rate": 4.2759645776172996e-05, |
| "loss": 0.4069, |
| "step": 215 |
| }, |
| { |
| "epoch": 2.6666666666666665, |
| "grad_norm": 0.3477320907889876, |
| "learning_rate": 4.241513989689145e-05, |
| "loss": 0.4008, |
| "step": 216 |
| }, |
| { |
| "epoch": 2.6790123456790123, |
| "grad_norm": 0.24803139007843356, |
| "learning_rate": 4.2070454115368385e-05, |
| "loss": 0.3977, |
| "step": 217 |
| }, |
| { |
| "epoch": 2.691358024691358, |
| "grad_norm": 0.20440440035218216, |
| "learning_rate": 4.1725614107029387e-05, |
| "loss": 0.4001, |
| "step": 218 |
| }, |
| { |
| "epoch": 2.7037037037037037, |
| "grad_norm": 0.27538798360958056, |
| "learning_rate": 4.138064555878833e-05, |
| "loss": 0.3981, |
| "step": 219 |
| }, |
| { |
| "epoch": 2.7160493827160495, |
| "grad_norm": 0.2858784452038823, |
| "learning_rate": 4.1035574167133905e-05, |
| "loss": 0.4023, |
| "step": 220 |
| }, |
| { |
| "epoch": 2.728395061728395, |
| "grad_norm": 0.2008423560120923, |
| "learning_rate": 4.069042563621555e-05, |
| "loss": 0.4079, |
| "step": 221 |
| }, |
| { |
| "epoch": 2.7407407407407405, |
| "grad_norm": 0.24293176835950328, |
| "learning_rate": 4.034522567592876e-05, |
| "loss": 0.407, |
| "step": 222 |
| }, |
| { |
| "epoch": 2.753086419753086, |
| "grad_norm": 0.3091409408215162, |
| "learning_rate": 4e-05, |
| "loss": 0.3969, |
| "step": 223 |
| }, |
| { |
| "epoch": 2.765432098765432, |
| "grad_norm": 0.31288462010722656, |
| "learning_rate": 3.965477432407125e-05, |
| "loss": 0.4031, |
| "step": 224 |
| }, |
| { |
| "epoch": 2.7777777777777777, |
| "grad_norm": 0.2567222355035599, |
| "learning_rate": 3.9309574363784465e-05, |
| "loss": 0.3973, |
| "step": 225 |
| }, |
| { |
| "epoch": 2.7901234567901234, |
| "grad_norm": 0.19069072798190304, |
| "learning_rate": 3.8964425832866115e-05, |
| "loss": 0.3994, |
| "step": 226 |
| }, |
| { |
| "epoch": 2.802469135802469, |
| "grad_norm": 0.3116244915598422, |
| "learning_rate": 3.861935444121169e-05, |
| "loss": 0.4046, |
| "step": 227 |
| }, |
| { |
| "epoch": 2.814814814814815, |
| "grad_norm": 0.28128541699160603, |
| "learning_rate": 3.827438589297062e-05, |
| "loss": 0.3999, |
| "step": 228 |
| }, |
| { |
| "epoch": 2.8271604938271606, |
| "grad_norm": 0.167533219149008, |
| "learning_rate": 3.792954588463162e-05, |
| "loss": 0.3929, |
| "step": 229 |
| }, |
| { |
| "epoch": 2.8395061728395063, |
| "grad_norm": 0.2904700089197945, |
| "learning_rate": 3.758486010310856e-05, |
| "loss": 0.4033, |
| "step": 230 |
| }, |
| { |
| "epoch": 2.851851851851852, |
| "grad_norm": 0.222742797773903, |
| "learning_rate": 3.7240354223827004e-05, |
| "loss": 0.3952, |
| "step": 231 |
| }, |
| { |
| "epoch": 2.8641975308641974, |
| "grad_norm": 0.22495887212575763, |
| "learning_rate": 3.6896053908811755e-05, |
| "loss": 0.4007, |
| "step": 232 |
| }, |
| { |
| "epoch": 2.876543209876543, |
| "grad_norm": 0.20272044370534606, |
| "learning_rate": 3.655198480477523e-05, |
| "loss": 0.3965, |
| "step": 233 |
| }, |
| { |
| "epoch": 2.888888888888889, |
| "grad_norm": 0.20159603268883686, |
| "learning_rate": 3.6208172541207114e-05, |
| "loss": 0.4026, |
| "step": 234 |
| }, |
| { |
| "epoch": 2.9012345679012346, |
| "grad_norm": 0.1857016078801328, |
| "learning_rate": 3.5864642728465205e-05, |
| "loss": 0.4019, |
| "step": 235 |
| }, |
| { |
| "epoch": 2.9135802469135803, |
| "grad_norm": 0.19657456599113443, |
| "learning_rate": 3.552142095586769e-05, |
| "loss": 0.3935, |
| "step": 236 |
| }, |
| { |
| "epoch": 2.925925925925926, |
| "grad_norm": 0.20263796973832712, |
| "learning_rate": 3.517853278978708e-05, |
| "loss": 0.4024, |
| "step": 237 |
| }, |
| { |
| "epoch": 2.9382716049382713, |
| "grad_norm": 0.21927632477325718, |
| "learning_rate": 3.4836003771745736e-05, |
| "loss": 0.4058, |
| "step": 238 |
| }, |
| { |
| "epoch": 2.950617283950617, |
| "grad_norm": 0.18668045348448897, |
| "learning_rate": 3.449385941651328e-05, |
| "loss": 0.4016, |
| "step": 239 |
| }, |
| { |
| "epoch": 2.962962962962963, |
| "grad_norm": 0.22387285374410273, |
| "learning_rate": 3.415212521020609e-05, |
| "loss": 0.3998, |
| "step": 240 |
| }, |
| { |
| "epoch": 2.9753086419753085, |
| "grad_norm": 0.1986946786033834, |
| "learning_rate": 3.381082660838875e-05, |
| "loss": 0.3994, |
| "step": 241 |
| }, |
| { |
| "epoch": 2.9876543209876543, |
| "grad_norm": 0.17106247031701255, |
| "learning_rate": 3.346998903417803e-05, |
| "loss": 0.3948, |
| "step": 242 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 0.19730260856315138, |
| "learning_rate": 3.312963787634896e-05, |
| "loss": 0.3809, |
| "step": 243 |
| }, |
| { |
| "epoch": 3.0123456790123457, |
| "grad_norm": 0.22215703145843618, |
| "learning_rate": 3.2789798487443775e-05, |
| "loss": 0.3664, |
| "step": 244 |
| }, |
| { |
| "epoch": 3.0246913580246915, |
| "grad_norm": 0.18982786875890348, |
| "learning_rate": 3.245049618188334e-05, |
| "loss": 0.3702, |
| "step": 245 |
| }, |
| { |
| "epoch": 3.037037037037037, |
| "grad_norm": 0.2659144149445534, |
| "learning_rate": 3.2111756234081525e-05, |
| "loss": 0.369, |
| "step": 246 |
| }, |
| { |
| "epoch": 3.049382716049383, |
| "grad_norm": 0.21145023138083482, |
| "learning_rate": 3.177360387656254e-05, |
| "loss": 0.37, |
| "step": 247 |
| }, |
| { |
| "epoch": 3.0617283950617282, |
| "grad_norm": 0.24865516246035863, |
| "learning_rate": 3.143606429808136e-05, |
| "loss": 0.3701, |
| "step": 248 |
| }, |
| { |
| "epoch": 3.074074074074074, |
| "grad_norm": 0.2308219847168512, |
| "learning_rate": 3.109916264174743e-05, |
| "loss": 0.3707, |
| "step": 249 |
| }, |
| { |
| "epoch": 3.0864197530864197, |
| "grad_norm": 0.2246388176582276, |
| "learning_rate": 3.076292400315179e-05, |
| "loss": 0.3735, |
| "step": 250 |
| }, |
| { |
| "epoch": 3.0987654320987654, |
| "grad_norm": 0.22125008039208874, |
| "learning_rate": 3.0427373428497704e-05, |
| "loss": 0.3682, |
| "step": 251 |
| }, |
| { |
| "epoch": 3.111111111111111, |
| "grad_norm": 0.20085617720181384, |
| "learning_rate": 3.0092535912734965e-05, |
| "loss": 0.3668, |
| "step": 252 |
| }, |
| { |
| "epoch": 3.123456790123457, |
| "grad_norm": 0.2177257993468155, |
| "learning_rate": 2.9758436397698118e-05, |
| "loss": 0.3653, |
| "step": 253 |
| }, |
| { |
| "epoch": 3.1358024691358026, |
| "grad_norm": 0.2087241919659811, |
| "learning_rate": 2.9425099770248446e-05, |
| "loss": 0.3711, |
| "step": 254 |
| }, |
| { |
| "epoch": 3.148148148148148, |
| "grad_norm": 0.1946566571469163, |
| "learning_rate": 2.9092550860420252e-05, |
| "loss": 0.3624, |
| "step": 255 |
| }, |
| { |
| "epoch": 3.1604938271604937, |
| "grad_norm": 0.22054171431244854, |
| "learning_rate": 2.8760814439571233e-05, |
| "loss": 0.3685, |
| "step": 256 |
| }, |
| { |
| "epoch": 3.1728395061728394, |
| "grad_norm": 0.17240280564444063, |
| "learning_rate": 2.8429915218537297e-05, |
| "loss": 0.3674, |
| "step": 257 |
| }, |
| { |
| "epoch": 3.185185185185185, |
| "grad_norm": 0.1848593449888633, |
| "learning_rate": 2.809987784579189e-05, |
| "loss": 0.3608, |
| "step": 258 |
| }, |
| { |
| "epoch": 3.197530864197531, |
| "grad_norm": 0.18266249121254657, |
| "learning_rate": 2.7770726905609896e-05, |
| "loss": 0.3642, |
| "step": 259 |
| }, |
| { |
| "epoch": 3.2098765432098766, |
| "grad_norm": 0.16073855445767077, |
| "learning_rate": 2.74424869162364e-05, |
| "loss": 0.3563, |
| "step": 260 |
| }, |
| { |
| "epoch": 3.2222222222222223, |
| "grad_norm": 0.17531367979315252, |
| "learning_rate": 2.7115182328060385e-05, |
| "loss": 0.3676, |
| "step": 261 |
| }, |
| { |
| "epoch": 3.234567901234568, |
| "grad_norm": 0.14796201932709915, |
| "learning_rate": 2.678883752179333e-05, |
| "loss": 0.3625, |
| "step": 262 |
| }, |
| { |
| "epoch": 3.246913580246914, |
| "grad_norm": 0.19310186654814873, |
| "learning_rate": 2.6463476806653185e-05, |
| "loss": 0.3585, |
| "step": 263 |
| }, |
| { |
| "epoch": 3.259259259259259, |
| "grad_norm": 0.17358347178863473, |
| "learning_rate": 2.61391244185536e-05, |
| "loss": 0.3605, |
| "step": 264 |
| }, |
| { |
| "epoch": 3.271604938271605, |
| "grad_norm": 0.17684421859741475, |
| "learning_rate": 2.5815804518298575e-05, |
| "loss": 0.3677, |
| "step": 265 |
| }, |
| { |
| "epoch": 3.2839506172839505, |
| "grad_norm": 0.203480321127498, |
| "learning_rate": 2.549354118978272e-05, |
| "loss": 0.3678, |
| "step": 266 |
| }, |
| { |
| "epoch": 3.2962962962962963, |
| "grad_norm": 0.17547188883389694, |
| "learning_rate": 2.51723584381973e-05, |
| "loss": 0.3608, |
| "step": 267 |
| }, |
| { |
| "epoch": 3.308641975308642, |
| "grad_norm": 0.18602067695611346, |
| "learning_rate": 2.4852280188242096e-05, |
| "loss": 0.3678, |
| "step": 268 |
| }, |
| { |
| "epoch": 3.3209876543209877, |
| "grad_norm": 0.17140539371210345, |
| "learning_rate": 2.453333028234325e-05, |
| "loss": 0.3638, |
| "step": 269 |
| }, |
| { |
| "epoch": 3.3333333333333335, |
| "grad_norm": 0.15224985187441373, |
| "learning_rate": 2.4215532478877283e-05, |
| "loss": 0.3597, |
| "step": 270 |
| }, |
| { |
| "epoch": 3.3456790123456788, |
| "grad_norm": 0.16134052410438623, |
| "learning_rate": 2.3898910450401306e-05, |
| "loss": 0.3667, |
| "step": 271 |
| }, |
| { |
| "epoch": 3.3580246913580245, |
| "grad_norm": 0.14386569537644786, |
| "learning_rate": 2.3583487781889737e-05, |
| "loss": 0.3709, |
| "step": 272 |
| }, |
| { |
| "epoch": 3.3703703703703702, |
| "grad_norm": 0.15151232350322325, |
| "learning_rate": 2.3269287968977406e-05, |
| "loss": 0.3637, |
| "step": 273 |
| }, |
| { |
| "epoch": 3.382716049382716, |
| "grad_norm": 0.15707589935891128, |
| "learning_rate": 2.2956334416209404e-05, |
| "loss": 0.3735, |
| "step": 274 |
| }, |
| { |
| "epoch": 3.3950617283950617, |
| "grad_norm": 0.1477749720910393, |
| "learning_rate": 2.264465043529768e-05, |
| "loss": 0.3622, |
| "step": 275 |
| }, |
| { |
| "epoch": 3.4074074074074074, |
| "grad_norm": 0.14885929712006027, |
| "learning_rate": 2.233425924338463e-05, |
| "loss": 0.3644, |
| "step": 276 |
| }, |
| { |
| "epoch": 3.419753086419753, |
| "grad_norm": 0.14340772251052653, |
| "learning_rate": 2.2025183961313542e-05, |
| "loss": 0.3687, |
| "step": 277 |
| }, |
| { |
| "epoch": 3.432098765432099, |
| "grad_norm": 0.13763533911676756, |
| "learning_rate": 2.1717447611906496e-05, |
| "loss": 0.3663, |
| "step": 278 |
| }, |
| { |
| "epoch": 3.4444444444444446, |
| "grad_norm": 0.14639101329943424, |
| "learning_rate": 2.141107311824926e-05, |
| "loss": 0.3689, |
| "step": 279 |
| }, |
| { |
| "epoch": 3.45679012345679, |
| "grad_norm": 0.1276571096925691, |
| "learning_rate": 2.1106083301983852e-05, |
| "loss": 0.3618, |
| "step": 280 |
| }, |
| { |
| "epoch": 3.4691358024691357, |
| "grad_norm": 0.14363458047447658, |
| "learning_rate": 2.0802500881608557e-05, |
| "loss": 0.3727, |
| "step": 281 |
| }, |
| { |
| "epoch": 3.4814814814814814, |
| "grad_norm": 0.12886295792105223, |
| "learning_rate": 2.0500348470785614e-05, |
| "loss": 0.3692, |
| "step": 282 |
| }, |
| { |
| "epoch": 3.493827160493827, |
| "grad_norm": 0.1490463167300804, |
| "learning_rate": 2.0199648576656744e-05, |
| "loss": 0.3636, |
| "step": 283 |
| }, |
| { |
| "epoch": 3.506172839506173, |
| "grad_norm": 0.12848412841426096, |
| "learning_rate": 1.9900423598166685e-05, |
| "loss": 0.3651, |
| "step": 284 |
| }, |
| { |
| "epoch": 3.5185185185185186, |
| "grad_norm": 0.15574442730245652, |
| "learning_rate": 1.9602695824394576e-05, |
| "loss": 0.3651, |
| "step": 285 |
| }, |
| { |
| "epoch": 3.5308641975308643, |
| "grad_norm": 0.13229685022791354, |
| "learning_rate": 1.9306487432893725e-05, |
| "loss": 0.3662, |
| "step": 286 |
| }, |
| { |
| "epoch": 3.5432098765432096, |
| "grad_norm": 0.12772394530331901, |
| "learning_rate": 1.901182048803968e-05, |
| "loss": 0.3651, |
| "step": 287 |
| }, |
| { |
| "epoch": 3.5555555555555554, |
| "grad_norm": 0.1465151284584716, |
| "learning_rate": 1.8718716939386543e-05, |
| "loss": 0.3698, |
| "step": 288 |
| }, |
| { |
| "epoch": 3.567901234567901, |
| "grad_norm": 0.14047354187123118, |
| "learning_rate": 1.8427198620032037e-05, |
| "loss": 0.3661, |
| "step": 289 |
| }, |
| { |
| "epoch": 3.580246913580247, |
| "grad_norm": 0.1410397043933752, |
| "learning_rate": 1.8137287244991162e-05, |
| "loss": 0.3708, |
| "step": 290 |
| }, |
| { |
| "epoch": 3.5925925925925926, |
| "grad_norm": 0.14850797574149896, |
| "learning_rate": 1.7849004409578678e-05, |
| "loss": 0.37, |
| "step": 291 |
| }, |
| { |
| "epoch": 3.6049382716049383, |
| "grad_norm": 0.14770512288892354, |
| "learning_rate": 1.7562371587800422e-05, |
| "loss": 0.3621, |
| "step": 292 |
| }, |
| { |
| "epoch": 3.617283950617284, |
| "grad_norm": 0.1301969671247391, |
| "learning_rate": 1.7277410130753775e-05, |
| "loss": 0.3705, |
| "step": 293 |
| }, |
| { |
| "epoch": 3.6296296296296298, |
| "grad_norm": 0.13324724802934845, |
| "learning_rate": 1.6994141265037222e-05, |
| "loss": 0.3583, |
| "step": 294 |
| }, |
| { |
| "epoch": 3.6419753086419755, |
| "grad_norm": 0.12335331492186778, |
| "learning_rate": 1.6712586091169183e-05, |
| "loss": 0.3706, |
| "step": 295 |
| }, |
| { |
| "epoch": 3.6543209876543212, |
| "grad_norm": 0.12483228986767417, |
| "learning_rate": 1.6432765582016257e-05, |
| "loss": 0.3627, |
| "step": 296 |
| }, |
| { |
| "epoch": 3.6666666666666665, |
| "grad_norm": 0.1224822064632684, |
| "learning_rate": 1.615470058123099e-05, |
| "loss": 0.3683, |
| "step": 297 |
| }, |
| { |
| "epoch": 3.6790123456790123, |
| "grad_norm": 0.12870125642053606, |
| "learning_rate": 1.5878411801699182e-05, |
| "loss": 0.3677, |
| "step": 298 |
| }, |
| { |
| "epoch": 3.691358024691358, |
| "grad_norm": 0.11464105740669171, |
| "learning_rate": 1.5603919823997083e-05, |
| "loss": 0.3725, |
| "step": 299 |
| }, |
| { |
| "epoch": 3.7037037037037037, |
| "grad_norm": 0.1259473229571899, |
| "learning_rate": 1.5331245094858277e-05, |
| "loss": 0.3632, |
| "step": 300 |
| }, |
| { |
| "epoch": 3.7160493827160495, |
| "grad_norm": 0.11989849457004843, |
| "learning_rate": 1.5060407925650662e-05, |
| "loss": 0.363, |
| "step": 301 |
| }, |
| { |
| "epoch": 3.728395061728395, |
| "grad_norm": 0.11838170070815705, |
| "learning_rate": 1.4791428490863462e-05, |
| "loss": 0.3636, |
| "step": 302 |
| }, |
| { |
| "epoch": 3.7407407407407405, |
| "grad_norm": 0.11542631237011597, |
| "learning_rate": 1.4524326826604442e-05, |
| "loss": 0.3662, |
| "step": 303 |
| }, |
| { |
| "epoch": 3.753086419753086, |
| "grad_norm": 0.10853513336233153, |
| "learning_rate": 1.425912282910741e-05, |
| "loss": 0.3612, |
| "step": 304 |
| }, |
| { |
| "epoch": 3.765432098765432, |
| "grad_norm": 0.12927711782224877, |
| "learning_rate": 1.3995836253250233e-05, |
| "loss": 0.3632, |
| "step": 305 |
| }, |
| { |
| "epoch": 3.7777777777777777, |
| "grad_norm": 0.11124438692244316, |
| "learning_rate": 1.3734486711083199e-05, |
| "loss": 0.3656, |
| "step": 306 |
| }, |
| { |
| "epoch": 3.7901234567901234, |
| "grad_norm": 0.11829671034615585, |
| "learning_rate": 1.3475093670368202e-05, |
| "loss": 0.3625, |
| "step": 307 |
| }, |
| { |
| "epoch": 3.802469135802469, |
| "grad_norm": 0.1171711805914509, |
| "learning_rate": 1.3217676453128583e-05, |
| "loss": 0.3681, |
| "step": 308 |
| }, |
| { |
| "epoch": 3.814814814814815, |
| "grad_norm": 0.12136369786981609, |
| "learning_rate": 1.2962254234209826e-05, |
| "loss": 0.3702, |
| "step": 309 |
| }, |
| { |
| "epoch": 3.8271604938271606, |
| "grad_norm": 0.11625055457136292, |
| "learning_rate": 1.2708846039851306e-05, |
| "loss": 0.3596, |
| "step": 310 |
| }, |
| { |
| "epoch": 3.8395061728395063, |
| "grad_norm": 0.1249883414445167, |
| "learning_rate": 1.2457470746268912e-05, |
| "loss": 0.3597, |
| "step": 311 |
| }, |
| { |
| "epoch": 3.851851851851852, |
| "grad_norm": 0.11998786571991671, |
| "learning_rate": 1.2208147078249106e-05, |
| "loss": 0.3705, |
| "step": 312 |
| }, |
| { |
| "epoch": 3.8641975308641974, |
| "grad_norm": 0.12324835771363575, |
| "learning_rate": 1.1960893607754022e-05, |
| "loss": 0.378, |
| "step": 313 |
| }, |
| { |
| "epoch": 3.876543209876543, |
| "grad_norm": 0.13292216219148154, |
| "learning_rate": 1.1715728752538103e-05, |
| "loss": 0.3709, |
| "step": 314 |
| }, |
| { |
| "epoch": 3.888888888888889, |
| "grad_norm": 0.12363035327325136, |
| "learning_rate": 1.1472670774776159e-05, |
| "loss": 0.3734, |
| "step": 315 |
| }, |
| { |
| "epoch": 3.9012345679012346, |
| "grad_norm": 0.12095948017126495, |
| "learning_rate": 1.123173777970303e-05, |
| "loss": 0.3644, |
| "step": 316 |
| }, |
| { |
| "epoch": 3.9135802469135803, |
| "grad_norm": 0.12223767053256172, |
| "learning_rate": 1.0992947714264952e-05, |
| "loss": 0.3639, |
| "step": 317 |
| }, |
| { |
| "epoch": 3.925925925925926, |
| "grad_norm": 0.11556671111818939, |
| "learning_rate": 1.0756318365782672e-05, |
| "loss": 0.3705, |
| "step": 318 |
| }, |
| { |
| "epoch": 3.9382716049382713, |
| "grad_norm": 0.11863503874267803, |
| "learning_rate": 1.0521867360626534e-05, |
| "loss": 0.3649, |
| "step": 319 |
| }, |
| { |
| "epoch": 3.950617283950617, |
| "grad_norm": 0.1220737146765024, |
| "learning_rate": 1.028961216290342e-05, |
| "loss": 0.3702, |
| "step": 320 |
| }, |
| { |
| "epoch": 3.962962962962963, |
| "grad_norm": 0.10688177437745146, |
| "learning_rate": 1.0059570073155953e-05, |
| "loss": 0.3686, |
| "step": 321 |
| }, |
| { |
| "epoch": 3.9753086419753085, |
| "grad_norm": 0.1115239690090056, |
| "learning_rate": 9.831758227073714e-06, |
| "loss": 0.3622, |
| "step": 322 |
| }, |
| { |
| "epoch": 3.9876543209876543, |
| "grad_norm": 0.1013118549435552, |
| "learning_rate": 9.606193594216852e-06, |
| "loss": 0.3662, |
| "step": 323 |
| }, |
| { |
| "epoch": 4.0, |
| "grad_norm": 0.13078127831261904, |
| "learning_rate": 9.382892976752024e-06, |
| "loss": 0.3514, |
| "step": 324 |
| }, |
| { |
| "epoch": 4.012345679012346, |
| "grad_norm": 0.14120097496571368, |
| "learning_rate": 9.161873008200816e-06, |
| "loss": 0.3447, |
| "step": 325 |
| }, |
| { |
| "epoch": 4.0246913580246915, |
| "grad_norm": 0.131610168014187, |
| "learning_rate": 8.943150152200734e-06, |
| "loss": 0.3564, |
| "step": 326 |
| }, |
| { |
| "epoch": 4.037037037037037, |
| "grad_norm": 0.1197175249959368, |
| "learning_rate": 8.72674070127881e-06, |
| "loss": 0.3399, |
| "step": 327 |
| }, |
| { |
| "epoch": 4.049382716049383, |
| "grad_norm": 0.1381715130606439, |
| "learning_rate": 8.512660775637998e-06, |
| "loss": 0.3434, |
| "step": 328 |
| }, |
| { |
| "epoch": 4.061728395061729, |
| "grad_norm": 0.13276158035183344, |
| "learning_rate": 8.300926321956391e-06, |
| "loss": 0.3416, |
| "step": 329 |
| }, |
| { |
| "epoch": 4.074074074074074, |
| "grad_norm": 0.13242822539056381, |
| "learning_rate": 8.091553112199362e-06, |
| "loss": 0.3518, |
| "step": 330 |
| }, |
| { |
| "epoch": 4.08641975308642, |
| "grad_norm": 0.12212877221315457, |
| "learning_rate": 7.884556742444704e-06, |
| "loss": 0.3446, |
| "step": 331 |
| }, |
| { |
| "epoch": 4.098765432098766, |
| "grad_norm": 0.11698141950653529, |
| "learning_rate": 7.679952631720944e-06, |
| "loss": 0.3466, |
| "step": 332 |
| }, |
| { |
| "epoch": 4.111111111111111, |
| "grad_norm": 0.1304559116220868, |
| "learning_rate": 7.477756020858695e-06, |
| "loss": 0.355, |
| "step": 333 |
| }, |
| { |
| "epoch": 4.1234567901234565, |
| "grad_norm": 0.13089784593559015, |
| "learning_rate": 7.277981971355487e-06, |
| "loss": 0.3432, |
| "step": 334 |
| }, |
| { |
| "epoch": 4.135802469135802, |
| "grad_norm": 0.12263988117511435, |
| "learning_rate": 7.080645364253747e-06, |
| "loss": 0.3451, |
| "step": 335 |
| }, |
| { |
| "epoch": 4.148148148148148, |
| "grad_norm": 0.11728846389962917, |
| "learning_rate": 6.885760899032368e-06, |
| "loss": 0.345, |
| "step": 336 |
| }, |
| { |
| "epoch": 4.160493827160494, |
| "grad_norm": 0.11392519861587432, |
| "learning_rate": 6.69334309251175e-06, |
| "loss": 0.3472, |
| "step": 337 |
| }, |
| { |
| "epoch": 4.172839506172839, |
| "grad_norm": 0.12602381332743814, |
| "learning_rate": 6.503406277772457e-06, |
| "loss": 0.345, |
| "step": 338 |
| }, |
| { |
| "epoch": 4.185185185185185, |
| "grad_norm": 0.10134725138589995, |
| "learning_rate": 6.31596460308753e-06, |
| "loss": 0.3431, |
| "step": 339 |
| }, |
| { |
| "epoch": 4.197530864197531, |
| "grad_norm": 0.11429337446344612, |
| "learning_rate": 6.1310320308686354e-06, |
| "loss": 0.3492, |
| "step": 340 |
| }, |
| { |
| "epoch": 4.209876543209877, |
| "grad_norm": 0.12482104702813673, |
| "learning_rate": 5.9486223366259555e-06, |
| "loss": 0.3406, |
| "step": 341 |
| }, |
| { |
| "epoch": 4.222222222222222, |
| "grad_norm": 0.11483630989866628, |
| "learning_rate": 5.768749107942105e-06, |
| "loss": 0.3458, |
| "step": 342 |
| }, |
| { |
| "epoch": 4.234567901234568, |
| "grad_norm": 0.10641915634927608, |
| "learning_rate": 5.5914257434599705e-06, |
| "loss": 0.3427, |
| "step": 343 |
| }, |
| { |
| "epoch": 4.246913580246914, |
| "grad_norm": 0.11187178257279115, |
| "learning_rate": 5.416665451884706e-06, |
| "loss": 0.3478, |
| "step": 344 |
| }, |
| { |
| "epoch": 4.2592592592592595, |
| "grad_norm": 0.10328829320916721, |
| "learning_rate": 5.24448125099974e-06, |
| "loss": 0.3604, |
| "step": 345 |
| }, |
| { |
| "epoch": 4.271604938271605, |
| "grad_norm": 0.10325046605157456, |
| "learning_rate": 5.074885966697167e-06, |
| "loss": 0.3439, |
| "step": 346 |
| }, |
| { |
| "epoch": 4.283950617283951, |
| "grad_norm": 0.09783875518175919, |
| "learning_rate": 4.9078922320223225e-06, |
| "loss": 0.338, |
| "step": 347 |
| }, |
| { |
| "epoch": 4.296296296296296, |
| "grad_norm": 0.09584121801157325, |
| "learning_rate": 4.7435124862327445e-06, |
| "loss": 0.3456, |
| "step": 348 |
| }, |
| { |
| "epoch": 4.308641975308642, |
| "grad_norm": 0.10257946096157032, |
| "learning_rate": 4.581758973871609e-06, |
| "loss": 0.3435, |
| "step": 349 |
| }, |
| { |
| "epoch": 4.320987654320987, |
| "grad_norm": 0.09622559695473883, |
| "learning_rate": 4.422643743855611e-06, |
| "loss": 0.3387, |
| "step": 350 |
| }, |
| { |
| "epoch": 4.333333333333333, |
| "grad_norm": 0.08985291405803897, |
| "learning_rate": 4.266178648577484e-06, |
| "loss": 0.3465, |
| "step": 351 |
| }, |
| { |
| "epoch": 4.345679012345679, |
| "grad_norm": 0.09083798833221284, |
| "learning_rate": 4.112375343023076e-06, |
| "loss": 0.3402, |
| "step": 352 |
| }, |
| { |
| "epoch": 4.3580246913580245, |
| "grad_norm": 0.0928368108514207, |
| "learning_rate": 3.961245283903239e-06, |
| "loss": 0.3417, |
| "step": 353 |
| }, |
| { |
| "epoch": 4.37037037037037, |
| "grad_norm": 0.09858890680167014, |
| "learning_rate": 3.812799728800354e-06, |
| "loss": 0.3494, |
| "step": 354 |
| }, |
| { |
| "epoch": 4.382716049382716, |
| "grad_norm": 0.09253195368124226, |
| "learning_rate": 3.6670497353298396e-06, |
| "loss": 0.3486, |
| "step": 355 |
| }, |
| { |
| "epoch": 4.395061728395062, |
| "grad_norm": 0.09138349778139458, |
| "learning_rate": 3.5240061603163977e-06, |
| "loss": 0.3439, |
| "step": 356 |
| }, |
| { |
| "epoch": 4.407407407407407, |
| "grad_norm": 0.09207502583455726, |
| "learning_rate": 3.3836796589853484e-06, |
| "loss": 0.3399, |
| "step": 357 |
| }, |
| { |
| "epoch": 4.419753086419753, |
| "grad_norm": 0.09098178054496783, |
| "learning_rate": 3.246080684168913e-06, |
| "loss": 0.3374, |
| "step": 358 |
| }, |
| { |
| "epoch": 4.432098765432099, |
| "grad_norm": 0.09461477122622283, |
| "learning_rate": 3.11121948552759e-06, |
| "loss": 0.347, |
| "step": 359 |
| }, |
| { |
| "epoch": 4.444444444444445, |
| "grad_norm": 0.08873177760685183, |
| "learning_rate": 2.9791061087866625e-06, |
| "loss": 0.3517, |
| "step": 360 |
| }, |
| { |
| "epoch": 4.45679012345679, |
| "grad_norm": 0.08923078431705038, |
| "learning_rate": 2.849750394987907e-06, |
| "loss": 0.3507, |
| "step": 361 |
| }, |
| { |
| "epoch": 4.469135802469136, |
| "grad_norm": 0.09262596320456881, |
| "learning_rate": 2.7231619797565278e-06, |
| "loss": 0.3462, |
| "step": 362 |
| }, |
| { |
| "epoch": 4.481481481481482, |
| "grad_norm": 0.09034563106603927, |
| "learning_rate": 2.5993502925834115e-06, |
| "loss": 0.3475, |
| "step": 363 |
| }, |
| { |
| "epoch": 4.493827160493828, |
| "grad_norm": 0.08464664656906902, |
| "learning_rate": 2.4783245561227264e-06, |
| "loss": 0.3436, |
| "step": 364 |
| }, |
| { |
| "epoch": 4.506172839506172, |
| "grad_norm": 0.08516291301856833, |
| "learning_rate": 2.3600937855049467e-06, |
| "loss": 0.3489, |
| "step": 365 |
| }, |
| { |
| "epoch": 4.518518518518518, |
| "grad_norm": 0.08613713073759806, |
| "learning_rate": 2.244666787665297e-06, |
| "loss": 0.3479, |
| "step": 366 |
| }, |
| { |
| "epoch": 4.530864197530864, |
| "grad_norm": 0.08766949672032548, |
| "learning_rate": 2.1320521606877653e-06, |
| "loss": 0.3491, |
| "step": 367 |
| }, |
| { |
| "epoch": 4.54320987654321, |
| "grad_norm": 0.08233473579098213, |
| "learning_rate": 2.022258293164585e-06, |
| "loss": 0.3444, |
| "step": 368 |
| }, |
| { |
| "epoch": 4.555555555555555, |
| "grad_norm": 0.08546306867171963, |
| "learning_rate": 1.9152933635714354e-06, |
| "loss": 0.3402, |
| "step": 369 |
| }, |
| { |
| "epoch": 4.567901234567901, |
| "grad_norm": 0.0838918934654388, |
| "learning_rate": 1.8111653396581762e-06, |
| "loss": 0.3497, |
| "step": 370 |
| }, |
| { |
| "epoch": 4.580246913580247, |
| "grad_norm": 0.08391928692735219, |
| "learning_rate": 1.709881977855372e-06, |
| "loss": 0.3451, |
| "step": 371 |
| }, |
| { |
| "epoch": 4.592592592592593, |
| "grad_norm": 0.0871181996609232, |
| "learning_rate": 1.6114508226965009e-06, |
| "loss": 0.3446, |
| "step": 372 |
| }, |
| { |
| "epoch": 4.604938271604938, |
| "grad_norm": 0.08989815759315828, |
| "learning_rate": 1.5158792062559813e-06, |
| "loss": 0.3472, |
| "step": 373 |
| }, |
| { |
| "epoch": 4.617283950617284, |
| "grad_norm": 0.08525583975432947, |
| "learning_rate": 1.4231742476029965e-06, |
| "loss": 0.3464, |
| "step": 374 |
| }, |
| { |
| "epoch": 4.62962962962963, |
| "grad_norm": 0.08702616238335954, |
| "learning_rate": 1.33334285227122e-06, |
| "loss": 0.3464, |
| "step": 375 |
| }, |
| { |
| "epoch": 4.6419753086419755, |
| "grad_norm": 0.0798577947590892, |
| "learning_rate": 1.2463917117443968e-06, |
| "loss": 0.3485, |
| "step": 376 |
| }, |
| { |
| "epoch": 4.654320987654321, |
| "grad_norm": 0.08199514353400199, |
| "learning_rate": 1.1623273029579195e-06, |
| "loss": 0.3466, |
| "step": 377 |
| }, |
| { |
| "epoch": 4.666666666666667, |
| "grad_norm": 0.08036935320442855, |
| "learning_rate": 1.0811558878163698e-06, |
| "loss": 0.3449, |
| "step": 378 |
| }, |
| { |
| "epoch": 4.679012345679013, |
| "grad_norm": 0.09004723861019338, |
| "learning_rate": 1.0028835127270553e-06, |
| "loss": 0.3429, |
| "step": 379 |
| }, |
| { |
| "epoch": 4.6913580246913575, |
| "grad_norm": 0.08561873370258896, |
| "learning_rate": 9.275160081496337e-07, |
| "loss": 0.346, |
| "step": 380 |
| }, |
| { |
| "epoch": 4.703703703703704, |
| "grad_norm": 0.08084419918008226, |
| "learning_rate": 8.550589881617877e-07, |
| "loss": 0.3416, |
| "step": 381 |
| }, |
| { |
| "epoch": 4.716049382716049, |
| "grad_norm": 0.08104505949528973, |
| "learning_rate": 7.85517850041062e-07, |
| "loss": 0.3398, |
| "step": 382 |
| }, |
| { |
| "epoch": 4.728395061728395, |
| "grad_norm": 0.08076162479144416, |
| "learning_rate": 7.188977738627901e-07, |
| "loss": 0.3453, |
| "step": 383 |
| }, |
| { |
| "epoch": 4.7407407407407405, |
| "grad_norm": 0.07770391544716708, |
| "learning_rate": 6.552037221142593e-07, |
| "loss": 0.3393, |
| "step": 384 |
| }, |
| { |
| "epoch": 4.753086419753086, |
| "grad_norm": 0.08331286393559116, |
| "learning_rate": 5.944404393250481e-07, |
| "loss": 0.3416, |
| "step": 385 |
| }, |
| { |
| "epoch": 4.765432098765432, |
| "grad_norm": 0.0804668471427177, |
| "learning_rate": 5.366124517136184e-07, |
| "loss": 0.342, |
| "step": 386 |
| }, |
| { |
| "epoch": 4.777777777777778, |
| "grad_norm": 0.08538622755026046, |
| "learning_rate": 4.817240668501421e-07, |
| "loss": 0.3401, |
| "step": 387 |
| }, |
| { |
| "epoch": 4.790123456790123, |
| "grad_norm": 0.08146413045412997, |
| "learning_rate": 4.297793733356548e-07, |
| "loss": 0.3427, |
| "step": 388 |
| }, |
| { |
| "epoch": 4.802469135802469, |
| "grad_norm": 0.08133595326521681, |
| "learning_rate": 3.807822404974726e-07, |
| "loss": 0.3483, |
| "step": 389 |
| }, |
| { |
| "epoch": 4.814814814814815, |
| "grad_norm": 0.07919579475608875, |
| "learning_rate": 3.3473631810100103e-07, |
| "loss": 0.3327, |
| "step": 390 |
| }, |
| { |
| "epoch": 4.827160493827161, |
| "grad_norm": 0.08123223258851157, |
| "learning_rate": 2.916450360778411e-07, |
| "loss": 0.3365, |
| "step": 391 |
| }, |
| { |
| "epoch": 4.839506172839506, |
| "grad_norm": 0.08257891936662312, |
| "learning_rate": 2.5151160427029584e-07, |
| "loss": 0.3475, |
| "step": 392 |
| }, |
| { |
| "epoch": 4.851851851851852, |
| "grad_norm": 0.07932897839921949, |
| "learning_rate": 2.1433901219229502e-07, |
| "loss": 0.3467, |
| "step": 393 |
| }, |
| { |
| "epoch": 4.864197530864198, |
| "grad_norm": 0.08087497808227699, |
| "learning_rate": 1.8013002880667096e-07, |
| "loss": 0.3445, |
| "step": 394 |
| }, |
| { |
| "epoch": 4.8765432098765435, |
| "grad_norm": 0.09555289840502959, |
| "learning_rate": 1.4888720231894138e-07, |
| "loss": 0.3489, |
| "step": 395 |
| }, |
| { |
| "epoch": 4.888888888888889, |
| "grad_norm": 0.07961294947105259, |
| "learning_rate": 1.2061285998746562e-07, |
| "loss": 0.3469, |
| "step": 396 |
| }, |
| { |
| "epoch": 4.901234567901234, |
| "grad_norm": 0.08343057365407185, |
| "learning_rate": 9.530910795009895e-08, |
| "loss": 0.3448, |
| "step": 397 |
| }, |
| { |
| "epoch": 4.91358024691358, |
| "grad_norm": 0.07711854846564468, |
| "learning_rate": 7.297783106730016e-08, |
| "loss": 0.341, |
| "step": 398 |
| }, |
| { |
| "epoch": 4.925925925925926, |
| "grad_norm": 0.07943450346405932, |
| "learning_rate": 5.362069278175508e-08, |
| "loss": 0.347, |
| "step": 399 |
| }, |
| { |
| "epoch": 4.938271604938271, |
| "grad_norm": 0.07870891626937102, |
| "learning_rate": 3.7239134994426775e-08, |
| "loss": 0.3399, |
| "step": 400 |
| }, |
| { |
| "epoch": 4.950617283950617, |
| "grad_norm": 0.08172348680504911, |
| "learning_rate": 2.3834377957183684e-08, |
| "loss": 0.3363, |
| "step": 401 |
| }, |
| { |
| "epoch": 4.962962962962963, |
| "grad_norm": 0.08219122509641905, |
| "learning_rate": 1.3407420181885678e-08, |
| "loss": 0.3397, |
| "step": 402 |
| }, |
| { |
| "epoch": 4.9753086419753085, |
| "grad_norm": 0.079358090049586, |
| "learning_rate": 5.959038366021297e-09, |
| "loss": 0.3475, |
| "step": 403 |
| }, |
| { |
| "epoch": 4.987654320987654, |
| "grad_norm": 0.07720406166190734, |
| "learning_rate": 1.4897873348340697e-09, |
| "loss": 0.344, |
| "step": 404 |
| }, |
| { |
| "epoch": 5.0, |
| "grad_norm": 0.08228065193505166, |
| "learning_rate": 0.0, |
| "loss": 0.3412, |
| "step": 405 |
| }, |
| { |
| "epoch": 5.0, |
| "step": 405, |
| "total_flos": 6794337614561280.0, |
| "train_loss": 0.42579232314486565, |
| "train_runtime": 5653.6163, |
| "train_samples_per_second": 36.455, |
| "train_steps_per_second": 0.072 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 405, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 5, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 6794337614561280.0, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|