| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 3.0, |
| "eval_steps": 500, |
| "global_step": 594, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.005050505050505051, |
| "grad_norm": 35.239383697509766, |
| "learning_rate": 5.0000000000000004e-08, |
| "loss": 2.6493, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.010101010101010102, |
| "grad_norm": 35.41305923461914, |
| "learning_rate": 1.0000000000000001e-07, |
| "loss": 2.6401, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.015151515151515152, |
| "grad_norm": 35.47914505004883, |
| "learning_rate": 1.5000000000000002e-07, |
| "loss": 2.6401, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.020202020202020204, |
| "grad_norm": 34.923484802246094, |
| "learning_rate": 2.0000000000000002e-07, |
| "loss": 2.6202, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.025252525252525252, |
| "grad_norm": 34.7836799621582, |
| "learning_rate": 2.5000000000000004e-07, |
| "loss": 2.6071, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.030303030303030304, |
| "grad_norm": 36.033050537109375, |
| "learning_rate": 3.0000000000000004e-07, |
| "loss": 2.6612, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.03535353535353535, |
| "grad_norm": 34.26911926269531, |
| "learning_rate": 3.5000000000000004e-07, |
| "loss": 2.5487, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.04040404040404041, |
| "grad_norm": 33.940216064453125, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 2.5171, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.045454545454545456, |
| "grad_norm": 33.55999755859375, |
| "learning_rate": 4.5000000000000003e-07, |
| "loss": 2.5295, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.050505050505050504, |
| "grad_norm": 35.171165466308594, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 2.6192, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.05555555555555555, |
| "grad_norm": 32.917484283447266, |
| "learning_rate": 5.5e-07, |
| "loss": 2.4022, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.06060606060606061, |
| "grad_norm": 34.53117752075195, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 2.5785, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.06565656565656566, |
| "grad_norm": 34.89581298828125, |
| "learning_rate": 6.5e-07, |
| "loss": 2.52, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.0707070707070707, |
| "grad_norm": 32.23255920410156, |
| "learning_rate": 7.000000000000001e-07, |
| "loss": 2.2731, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.07575757575757576, |
| "grad_norm": 33.4830207824707, |
| "learning_rate": 7.5e-07, |
| "loss": 2.3016, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.08080808080808081, |
| "grad_norm": 33.69716262817383, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 2.2107, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.08585858585858586, |
| "grad_norm": 33.970333099365234, |
| "learning_rate": 8.500000000000001e-07, |
| "loss": 2.1916, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.09090909090909091, |
| "grad_norm": 32.690433502197266, |
| "learning_rate": 9.000000000000001e-07, |
| "loss": 1.9767, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.09595959595959595, |
| "grad_norm": 34.36090850830078, |
| "learning_rate": 9.500000000000001e-07, |
| "loss": 1.9856, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.10101010101010101, |
| "grad_norm": 34.156280517578125, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 1.7905, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.10606060606060606, |
| "grad_norm": 34.23146438598633, |
| "learning_rate": 1.0500000000000001e-06, |
| "loss": 1.6931, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.1111111111111111, |
| "grad_norm": 33.700294494628906, |
| "learning_rate": 1.1e-06, |
| "loss": 1.5142, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.11616161616161616, |
| "grad_norm": 32.60850143432617, |
| "learning_rate": 1.1500000000000002e-06, |
| "loss": 1.4211, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.12121212121212122, |
| "grad_norm": 30.005735397338867, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 1.2172, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.12626262626262627, |
| "grad_norm": 29.669198989868164, |
| "learning_rate": 1.25e-06, |
| "loss": 1.1584, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.13131313131313133, |
| "grad_norm": 30.327133178710938, |
| "learning_rate": 1.3e-06, |
| "loss": 0.8788, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.13636363636363635, |
| "grad_norm": 29.821216583251953, |
| "learning_rate": 1.3500000000000002e-06, |
| "loss": 0.7566, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.1414141414141414, |
| "grad_norm": 27.04122543334961, |
| "learning_rate": 1.4000000000000001e-06, |
| "loss": 0.5775, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.14646464646464646, |
| "grad_norm": 24.890750885009766, |
| "learning_rate": 1.45e-06, |
| "loss": 0.5193, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.15151515151515152, |
| "grad_norm": 21.845609664916992, |
| "learning_rate": 1.5e-06, |
| "loss": 0.3785, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.15656565656565657, |
| "grad_norm": 17.295909881591797, |
| "learning_rate": 1.5500000000000002e-06, |
| "loss": 0.3091, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.16161616161616163, |
| "grad_norm": 11.265312194824219, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 0.1745, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.16666666666666666, |
| "grad_norm": 5.728430271148682, |
| "learning_rate": 1.6500000000000003e-06, |
| "loss": 0.1413, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.1717171717171717, |
| "grad_norm": 7.306532859802246, |
| "learning_rate": 1.7000000000000002e-06, |
| "loss": 0.1613, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.17676767676767677, |
| "grad_norm": 5.182886600494385, |
| "learning_rate": 1.75e-06, |
| "loss": 0.1155, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.18181818181818182, |
| "grad_norm": 3.8576388359069824, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 0.0764, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.18686868686868688, |
| "grad_norm": 2.638275623321533, |
| "learning_rate": 1.85e-06, |
| "loss": 0.0745, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.1919191919191919, |
| "grad_norm": 2.225215435028076, |
| "learning_rate": 1.9000000000000002e-06, |
| "loss": 0.0719, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.19696969696969696, |
| "grad_norm": 1.7595651149749756, |
| "learning_rate": 1.9500000000000004e-06, |
| "loss": 0.0633, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.20202020202020202, |
| "grad_norm": 1.4759680032730103, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.0529, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.20707070707070707, |
| "grad_norm": 1.3384065628051758, |
| "learning_rate": 2.05e-06, |
| "loss": 0.0433, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.21212121212121213, |
| "grad_norm": 1.564224362373352, |
| "learning_rate": 2.1000000000000002e-06, |
| "loss": 0.061, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.21717171717171718, |
| "grad_norm": 1.419657826423645, |
| "learning_rate": 2.15e-06, |
| "loss": 0.0615, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.2222222222222222, |
| "grad_norm": 1.1758321523666382, |
| "learning_rate": 2.2e-06, |
| "loss": 0.059, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.22727272727272727, |
| "grad_norm": 1.3546487092971802, |
| "learning_rate": 2.25e-06, |
| "loss": 0.0574, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.23232323232323232, |
| "grad_norm": 1.0917863845825195, |
| "learning_rate": 2.3000000000000004e-06, |
| "loss": 0.0601, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.23737373737373738, |
| "grad_norm": 0.990371823310852, |
| "learning_rate": 2.35e-06, |
| "loss": 0.0515, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.24242424242424243, |
| "grad_norm": 1.3220105171203613, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 0.0584, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.2474747474747475, |
| "grad_norm": 1.1640170812606812, |
| "learning_rate": 2.4500000000000003e-06, |
| "loss": 0.0487, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.25252525252525254, |
| "grad_norm": 1.0264198780059814, |
| "learning_rate": 2.5e-06, |
| "loss": 0.0456, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.25757575757575757, |
| "grad_norm": 1.1187573671340942, |
| "learning_rate": 2.55e-06, |
| "loss": 0.0588, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.26262626262626265, |
| "grad_norm": 0.8740547299385071, |
| "learning_rate": 2.6e-06, |
| "loss": 0.0415, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.2676767676767677, |
| "grad_norm": 1.128773808479309, |
| "learning_rate": 2.6500000000000005e-06, |
| "loss": 0.0535, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.2727272727272727, |
| "grad_norm": 1.1845512390136719, |
| "learning_rate": 2.7000000000000004e-06, |
| "loss": 0.0543, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.2777777777777778, |
| "grad_norm": 1.0151381492614746, |
| "learning_rate": 2.7500000000000004e-06, |
| "loss": 0.0467, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.2828282828282828, |
| "grad_norm": 1.0951204299926758, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 0.0432, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.2878787878787879, |
| "grad_norm": 0.8145187497138977, |
| "learning_rate": 2.85e-06, |
| "loss": 0.0433, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.29292929292929293, |
| "grad_norm": 0.8338307738304138, |
| "learning_rate": 2.9e-06, |
| "loss": 0.0442, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.29797979797979796, |
| "grad_norm": 1.2009202241897583, |
| "learning_rate": 2.95e-06, |
| "loss": 0.0471, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.30303030303030304, |
| "grad_norm": 1.0247856378555298, |
| "learning_rate": 3e-06, |
| "loss": 0.0418, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.30808080808080807, |
| "grad_norm": 0.8393287062644958, |
| "learning_rate": 3.05e-06, |
| "loss": 0.0411, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.31313131313131315, |
| "grad_norm": 0.807152271270752, |
| "learning_rate": 3.1000000000000004e-06, |
| "loss": 0.0428, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.3181818181818182, |
| "grad_norm": 1.0362982749938965, |
| "learning_rate": 3.1500000000000003e-06, |
| "loss": 0.0412, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.32323232323232326, |
| "grad_norm": 0.8612103462219238, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 0.0374, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.3282828282828283, |
| "grad_norm": 0.763271152973175, |
| "learning_rate": 3.2500000000000002e-06, |
| "loss": 0.038, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.3333333333333333, |
| "grad_norm": 0.993196427822113, |
| "learning_rate": 3.3000000000000006e-06, |
| "loss": 0.0476, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.3383838383838384, |
| "grad_norm": 1.1754580736160278, |
| "learning_rate": 3.3500000000000005e-06, |
| "loss": 0.0496, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.3434343434343434, |
| "grad_norm": 0.7608982920646667, |
| "learning_rate": 3.4000000000000005e-06, |
| "loss": 0.0371, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.3484848484848485, |
| "grad_norm": 0.8498206734657288, |
| "learning_rate": 3.45e-06, |
| "loss": 0.0422, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.35353535353535354, |
| "grad_norm": 0.8418146371841431, |
| "learning_rate": 3.5e-06, |
| "loss": 0.0361, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.35858585858585856, |
| "grad_norm": 0.8102655410766602, |
| "learning_rate": 3.5500000000000003e-06, |
| "loss": 0.0356, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.36363636363636365, |
| "grad_norm": 0.7071998119354248, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 0.0352, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.3686868686868687, |
| "grad_norm": 0.8242170810699463, |
| "learning_rate": 3.65e-06, |
| "loss": 0.0393, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.37373737373737376, |
| "grad_norm": 0.93561190366745, |
| "learning_rate": 3.7e-06, |
| "loss": 0.047, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.3787878787878788, |
| "grad_norm": 0.8524022698402405, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.0325, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.3838383838383838, |
| "grad_norm": 0.8019789457321167, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 0.0358, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.3888888888888889, |
| "grad_norm": 0.9955350160598755, |
| "learning_rate": 3.85e-06, |
| "loss": 0.0373, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.3939393939393939, |
| "grad_norm": 1.2481234073638916, |
| "learning_rate": 3.900000000000001e-06, |
| "loss": 0.0353, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.398989898989899, |
| "grad_norm": 0.7706320285797119, |
| "learning_rate": 3.95e-06, |
| "loss": 0.0352, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.40404040404040403, |
| "grad_norm": 0.8946953415870667, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.0389, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.4090909090909091, |
| "grad_norm": 0.7142760753631592, |
| "learning_rate": 4.05e-06, |
| "loss": 0.0347, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.41414141414141414, |
| "grad_norm": 0.6389093399047852, |
| "learning_rate": 4.1e-06, |
| "loss": 0.0352, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.41919191919191917, |
| "grad_norm": 0.9447190165519714, |
| "learning_rate": 4.15e-06, |
| "loss": 0.0392, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.42424242424242425, |
| "grad_norm": 0.832013726234436, |
| "learning_rate": 4.2000000000000004e-06, |
| "loss": 0.0361, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.4292929292929293, |
| "grad_norm": 0.8298827409744263, |
| "learning_rate": 4.25e-06, |
| "loss": 0.0342, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.43434343434343436, |
| "grad_norm": 0.8309102654457092, |
| "learning_rate": 4.3e-06, |
| "loss": 0.0253, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.4393939393939394, |
| "grad_norm": 0.8921272158622742, |
| "learning_rate": 4.350000000000001e-06, |
| "loss": 0.0356, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.4444444444444444, |
| "grad_norm": 0.9582182765007019, |
| "learning_rate": 4.4e-06, |
| "loss": 0.0307, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.4494949494949495, |
| "grad_norm": 0.9414967894554138, |
| "learning_rate": 4.450000000000001e-06, |
| "loss": 0.0347, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.45454545454545453, |
| "grad_norm": 0.8414708375930786, |
| "learning_rate": 4.5e-06, |
| "loss": 0.0324, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.4595959595959596, |
| "grad_norm": 0.97618168592453, |
| "learning_rate": 4.5500000000000005e-06, |
| "loss": 0.035, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.46464646464646464, |
| "grad_norm": 0.7519140839576721, |
| "learning_rate": 4.600000000000001e-06, |
| "loss": 0.0291, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.4696969696969697, |
| "grad_norm": 0.7026158571243286, |
| "learning_rate": 4.65e-06, |
| "loss": 0.0343, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.47474747474747475, |
| "grad_norm": 0.8826281428337097, |
| "learning_rate": 4.7e-06, |
| "loss": 0.0283, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.4797979797979798, |
| "grad_norm": 0.8762742877006531, |
| "learning_rate": 4.75e-06, |
| "loss": 0.027, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.48484848484848486, |
| "grad_norm": 0.7252739667892456, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 0.0286, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.4898989898989899, |
| "grad_norm": 0.7852551341056824, |
| "learning_rate": 4.85e-06, |
| "loss": 0.0287, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.494949494949495, |
| "grad_norm": 0.6870710849761963, |
| "learning_rate": 4.9000000000000005e-06, |
| "loss": 0.0324, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 1.0492205619812012, |
| "learning_rate": 4.95e-06, |
| "loss": 0.0384, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.5050505050505051, |
| "grad_norm": 0.9691818952560425, |
| "learning_rate": 5e-06, |
| "loss": 0.0324, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.51010101010101, |
| "grad_norm": 0.7680443525314331, |
| "learning_rate": 4.999989577985671e-06, |
| "loss": 0.0239, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.5151515151515151, |
| "grad_norm": 0.7711728811264038, |
| "learning_rate": 4.99995831202958e-06, |
| "loss": 0.0339, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.5202020202020202, |
| "grad_norm": 0.664016842842102, |
| "learning_rate": 4.999906202392409e-06, |
| "loss": 0.0242, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.5252525252525253, |
| "grad_norm": 0.6629706621170044, |
| "learning_rate": 4.999833249508629e-06, |
| "loss": 0.0272, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.5303030303030303, |
| "grad_norm": 0.8449888229370117, |
| "learning_rate": 4.999739453986491e-06, |
| "loss": 0.0313, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.5353535353535354, |
| "grad_norm": 0.9404881000518799, |
| "learning_rate": 4.999624816608027e-06, |
| "loss": 0.0333, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.5404040404040404, |
| "grad_norm": 0.8782823085784912, |
| "learning_rate": 4.99948933832904e-06, |
| "loss": 0.0298, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.5454545454545454, |
| "grad_norm": 0.8479216694831848, |
| "learning_rate": 4.999333020279094e-06, |
| "loss": 0.0305, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.5505050505050505, |
| "grad_norm": 0.7747307419776917, |
| "learning_rate": 4.999155863761507e-06, |
| "loss": 0.0275, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.5555555555555556, |
| "grad_norm": 0.8345880508422852, |
| "learning_rate": 4.998957870253344e-06, |
| "loss": 0.0326, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.5606060606060606, |
| "grad_norm": 0.84605473279953, |
| "learning_rate": 4.998739041405395e-06, |
| "loss": 0.0302, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.5656565656565656, |
| "grad_norm": 0.7808102369308472, |
| "learning_rate": 4.998499379042172e-06, |
| "loss": 0.0262, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.5707070707070707, |
| "grad_norm": 0.8007850050926208, |
| "learning_rate": 4.998238885161886e-06, |
| "loss": 0.0272, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.5757575757575758, |
| "grad_norm": 0.8897403478622437, |
| "learning_rate": 4.997957561936433e-06, |
| "loss": 0.0261, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.5808080808080808, |
| "grad_norm": 0.8403050899505615, |
| "learning_rate": 4.997655411711378e-06, |
| "loss": 0.0292, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.5858585858585859, |
| "grad_norm": 0.7349573969841003, |
| "learning_rate": 4.997332437005932e-06, |
| "loss": 0.0243, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.5909090909090909, |
| "grad_norm": 0.8440062999725342, |
| "learning_rate": 4.996988640512931e-06, |
| "loss": 0.0253, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.5959595959595959, |
| "grad_norm": 0.8826310038566589, |
| "learning_rate": 4.996624025098819e-06, |
| "loss": 0.0209, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.601010101010101, |
| "grad_norm": 1.1104847192764282, |
| "learning_rate": 4.996238593803616e-06, |
| "loss": 0.0242, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.6060606060606061, |
| "grad_norm": 1.3400338888168335, |
| "learning_rate": 4.9958323498409e-06, |
| "loss": 0.0347, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.6111111111111112, |
| "grad_norm": 0.9243490099906921, |
| "learning_rate": 4.9954052965977725e-06, |
| "loss": 0.0202, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.6161616161616161, |
| "grad_norm": 1.075374960899353, |
| "learning_rate": 4.99495743763484e-06, |
| "loss": 0.0309, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.6212121212121212, |
| "grad_norm": 1.0591344833374023, |
| "learning_rate": 4.9944887766861765e-06, |
| "loss": 0.0241, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.6262626262626263, |
| "grad_norm": 0.7506961226463318, |
| "learning_rate": 4.993999317659293e-06, |
| "loss": 0.025, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.6313131313131313, |
| "grad_norm": 1.016758680343628, |
| "learning_rate": 4.993489064635109e-06, |
| "loss": 0.0219, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.6363636363636364, |
| "grad_norm": 0.7476636171340942, |
| "learning_rate": 4.9929580218679195e-06, |
| "loss": 0.021, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.6414141414141414, |
| "grad_norm": 0.7349154949188232, |
| "learning_rate": 4.992406193785348e-06, |
| "loss": 0.0228, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.6464646464646465, |
| "grad_norm": 0.672728419303894, |
| "learning_rate": 4.991833584988326e-06, |
| "loss": 0.0209, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.6515151515151515, |
| "grad_norm": 0.5250627398490906, |
| "learning_rate": 4.991240200251041e-06, |
| "loss": 0.0124, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.6565656565656566, |
| "grad_norm": 1.0610628128051758, |
| "learning_rate": 4.990626044520905e-06, |
| "loss": 0.0176, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.6616161616161617, |
| "grad_norm": 1.0880004167556763, |
| "learning_rate": 4.98999112291851e-06, |
| "loss": 0.024, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.6666666666666666, |
| "grad_norm": 0.9670020937919617, |
| "learning_rate": 4.989335440737587e-06, |
| "loss": 0.0224, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.6717171717171717, |
| "grad_norm": 0.869041383266449, |
| "learning_rate": 4.988659003444956e-06, |
| "loss": 0.0184, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.6767676767676768, |
| "grad_norm": 0.9090948700904846, |
| "learning_rate": 4.987961816680493e-06, |
| "loss": 0.0192, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.6818181818181818, |
| "grad_norm": 1.0600134134292603, |
| "learning_rate": 4.987243886257066e-06, |
| "loss": 0.029, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.6868686868686869, |
| "grad_norm": 0.5932410359382629, |
| "learning_rate": 4.986505218160502e-06, |
| "loss": 0.0172, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.6919191919191919, |
| "grad_norm": 0.8293594121932983, |
| "learning_rate": 4.985745818549527e-06, |
| "loss": 0.0207, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.696969696969697, |
| "grad_norm": 0.863837718963623, |
| "learning_rate": 4.984965693755723e-06, |
| "loss": 0.0262, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.702020202020202, |
| "grad_norm": 0.8079712390899658, |
| "learning_rate": 4.984164850283465e-06, |
| "loss": 0.0226, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.7070707070707071, |
| "grad_norm": 0.7353380918502808, |
| "learning_rate": 4.983343294809875e-06, |
| "loss": 0.031, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.7121212121212122, |
| "grad_norm": 0.6296212673187256, |
| "learning_rate": 4.9825010341847644e-06, |
| "loss": 0.016, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.7171717171717171, |
| "grad_norm": 0.9319673180580139, |
| "learning_rate": 4.981638075430572e-06, |
| "loss": 0.0213, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.7222222222222222, |
| "grad_norm": 0.6460096836090088, |
| "learning_rate": 4.980754425742318e-06, |
| "loss": 0.0203, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.7272727272727273, |
| "grad_norm": 0.6757596731185913, |
| "learning_rate": 4.979850092487525e-06, |
| "loss": 0.0175, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.7323232323232324, |
| "grad_norm": 0.7001456022262573, |
| "learning_rate": 4.978925083206174e-06, |
| "loss": 0.021, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.7373737373737373, |
| "grad_norm": 0.602177619934082, |
| "learning_rate": 4.977979405610635e-06, |
| "loss": 0.0168, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.7424242424242424, |
| "grad_norm": 0.7525290250778198, |
| "learning_rate": 4.977013067585597e-06, |
| "loss": 0.022, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.7474747474747475, |
| "grad_norm": 0.6703810095787048, |
| "learning_rate": 4.976026077188013e-06, |
| "loss": 0.016, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.7525252525252525, |
| "grad_norm": 0.7743527293205261, |
| "learning_rate": 4.975018442647026e-06, |
| "loss": 0.0226, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.7575757575757576, |
| "grad_norm": 0.6670228838920593, |
| "learning_rate": 4.973990172363899e-06, |
| "loss": 0.0181, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.7626262626262627, |
| "grad_norm": 0.7247374653816223, |
| "learning_rate": 4.972941274911953e-06, |
| "loss": 0.0146, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.7676767676767676, |
| "grad_norm": 0.6329397559165955, |
| "learning_rate": 4.9718717590364855e-06, |
| "loss": 0.0165, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.7727272727272727, |
| "grad_norm": 0.859639048576355, |
| "learning_rate": 4.9707816336547045e-06, |
| "loss": 0.0216, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.7777777777777778, |
| "grad_norm": 0.7974834442138672, |
| "learning_rate": 4.969670907855651e-06, |
| "loss": 0.0212, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.7828282828282829, |
| "grad_norm": 1.1814897060394287, |
| "learning_rate": 4.968539590900128e-06, |
| "loss": 0.0231, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.7878787878787878, |
| "grad_norm": 0.7747610211372375, |
| "learning_rate": 4.967387692220615e-06, |
| "loss": 0.0137, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.7929292929292929, |
| "grad_norm": 0.5962601900100708, |
| "learning_rate": 4.966215221421195e-06, |
| "loss": 0.0139, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.797979797979798, |
| "grad_norm": 1.1249876022338867, |
| "learning_rate": 4.965022188277474e-06, |
| "loss": 0.0192, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.803030303030303, |
| "grad_norm": 0.559429407119751, |
| "learning_rate": 4.9638086027365005e-06, |
| "loss": 0.0128, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.8080808080808081, |
| "grad_norm": 1.0353529453277588, |
| "learning_rate": 4.962574474916678e-06, |
| "loss": 0.0179, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.8131313131313131, |
| "grad_norm": 0.5366432666778564, |
| "learning_rate": 4.961319815107685e-06, |
| "loss": 0.0098, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.8181818181818182, |
| "grad_norm": 0.673726499080658, |
| "learning_rate": 4.960044633770387e-06, |
| "loss": 0.0144, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.8232323232323232, |
| "grad_norm": 0.517419695854187, |
| "learning_rate": 4.95874894153675e-06, |
| "loss": 0.016, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.8282828282828283, |
| "grad_norm": 0.5697750449180603, |
| "learning_rate": 4.957432749209755e-06, |
| "loss": 0.0117, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.8333333333333334, |
| "grad_norm": 1.0551936626434326, |
| "learning_rate": 4.9560960677633e-06, |
| "loss": 0.0179, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.8383838383838383, |
| "grad_norm": 0.564612865447998, |
| "learning_rate": 4.954738908342116e-06, |
| "loss": 0.0112, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.8434343434343434, |
| "grad_norm": 0.7717934846878052, |
| "learning_rate": 4.953361282261671e-06, |
| "loss": 0.0121, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.8484848484848485, |
| "grad_norm": 0.6095231175422668, |
| "learning_rate": 4.9519632010080765e-06, |
| "loss": 0.0071, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.8535353535353535, |
| "grad_norm": 0.7816880941390991, |
| "learning_rate": 4.95054467623799e-06, |
| "loss": 0.0107, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.8585858585858586, |
| "grad_norm": 0.5223932266235352, |
| "learning_rate": 4.9491057197785205e-06, |
| "loss": 0.0106, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.8636363636363636, |
| "grad_norm": 0.9016621708869934, |
| "learning_rate": 4.947646343627128e-06, |
| "loss": 0.0118, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.8686868686868687, |
| "grad_norm": 0.4834199547767639, |
| "learning_rate": 4.946166559951523e-06, |
| "loss": 0.0063, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.8737373737373737, |
| "grad_norm": 0.8252310752868652, |
| "learning_rate": 4.944666381089567e-06, |
| "loss": 0.0175, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.8787878787878788, |
| "grad_norm": 0.6991069912910461, |
| "learning_rate": 4.943145819549169e-06, |
| "loss": 0.0104, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.8838383838383839, |
| "grad_norm": 0.6017345786094666, |
| "learning_rate": 4.941604888008181e-06, |
| "loss": 0.0117, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.8888888888888888, |
| "grad_norm": 0.69279944896698, |
| "learning_rate": 4.9400435993142895e-06, |
| "loss": 0.0137, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.8939393939393939, |
| "grad_norm": 1.0663758516311646, |
| "learning_rate": 4.938461966484914e-06, |
| "loss": 0.01, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.898989898989899, |
| "grad_norm": 0.9510229229927063, |
| "learning_rate": 4.936860002707096e-06, |
| "loss": 0.0085, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.9040404040404041, |
| "grad_norm": 0.8557943105697632, |
| "learning_rate": 4.935237721337384e-06, |
| "loss": 0.0141, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.9090909090909091, |
| "grad_norm": 0.7711794972419739, |
| "learning_rate": 4.933595135901733e-06, |
| "loss": 0.0117, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.9141414141414141, |
| "grad_norm": 0.8024590015411377, |
| "learning_rate": 4.931932260095379e-06, |
| "loss": 0.01, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.9191919191919192, |
| "grad_norm": 0.8047268390655518, |
| "learning_rate": 4.9302491077827366e-06, |
| "loss": 0.0192, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.9242424242424242, |
| "grad_norm": 0.5250588655471802, |
| "learning_rate": 4.928545692997275e-06, |
| "loss": 0.0096, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.9292929292929293, |
| "grad_norm": 0.45513755083084106, |
| "learning_rate": 4.926822029941406e-06, |
| "loss": 0.0087, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.9343434343434344, |
| "grad_norm": 0.4509701728820801, |
| "learning_rate": 4.925078132986361e-06, |
| "loss": 0.0118, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.9393939393939394, |
| "grad_norm": 0.5309934616088867, |
| "learning_rate": 4.923314016672075e-06, |
| "loss": 0.0084, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.9444444444444444, |
| "grad_norm": 0.4668470621109009, |
| "learning_rate": 4.921529695707065e-06, |
| "loss": 0.0076, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.9494949494949495, |
| "grad_norm": 0.5394138097763062, |
| "learning_rate": 4.919725184968307e-06, |
| "loss": 0.0129, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.9545454545454546, |
| "grad_norm": 0.5527902841567993, |
| "learning_rate": 4.917900499501109e-06, |
| "loss": 0.0099, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.9595959595959596, |
| "grad_norm": 0.4318401515483856, |
| "learning_rate": 4.9160556545189895e-06, |
| "loss": 0.0116, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.9646464646464646, |
| "grad_norm": 0.5038670301437378, |
| "learning_rate": 4.91419066540355e-06, |
| "loss": 0.0089, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.9696969696969697, |
| "grad_norm": 0.5830440521240234, |
| "learning_rate": 4.9123055477043454e-06, |
| "loss": 0.0072, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.9747474747474747, |
| "grad_norm": 0.3858352303504944, |
| "learning_rate": 4.910400317138752e-06, |
| "loss": 0.0078, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.9797979797979798, |
| "grad_norm": 0.7046027183532715, |
| "learning_rate": 4.908474989591846e-06, |
| "loss": 0.0111, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.9848484848484849, |
| "grad_norm": 0.8276668787002563, |
| "learning_rate": 4.906529581116259e-06, |
| "loss": 0.011, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.98989898989899, |
| "grad_norm": 0.8134333491325378, |
| "learning_rate": 4.904564107932048e-06, |
| "loss": 0.0073, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.9949494949494949, |
| "grad_norm": 0.41067278385162354, |
| "learning_rate": 4.902578586426569e-06, |
| "loss": 0.0073, |
| "step": 197 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.6972447633743286, |
| "learning_rate": 4.900573033154325e-06, |
| "loss": 0.0113, |
| "step": 198 |
| }, |
| { |
| "epoch": 1.005050505050505, |
| "grad_norm": 0.7097163200378418, |
| "learning_rate": 4.898547464836844e-06, |
| "loss": 0.0105, |
| "step": 199 |
| }, |
| { |
| "epoch": 1.0101010101010102, |
| "grad_norm": 0.3972426652908325, |
| "learning_rate": 4.8965018983625245e-06, |
| "loss": 0.0059, |
| "step": 200 |
| }, |
| { |
| "epoch": 1.0151515151515151, |
| "grad_norm": 0.32025033235549927, |
| "learning_rate": 4.8944363507865065e-06, |
| "loss": 0.0025, |
| "step": 201 |
| }, |
| { |
| "epoch": 1.02020202020202, |
| "grad_norm": 0.5081179141998291, |
| "learning_rate": 4.8923508393305224e-06, |
| "loss": 0.0037, |
| "step": 202 |
| }, |
| { |
| "epoch": 1.0252525252525253, |
| "grad_norm": 0.24843256175518036, |
| "learning_rate": 4.890245381382757e-06, |
| "loss": 0.0021, |
| "step": 203 |
| }, |
| { |
| "epoch": 1.0303030303030303, |
| "grad_norm": 0.2488572746515274, |
| "learning_rate": 4.888119994497701e-06, |
| "loss": 0.0032, |
| "step": 204 |
| }, |
| { |
| "epoch": 1.0353535353535352, |
| "grad_norm": 0.8642904758453369, |
| "learning_rate": 4.885974696396002e-06, |
| "loss": 0.0079, |
| "step": 205 |
| }, |
| { |
| "epoch": 1.0404040404040404, |
| "grad_norm": 0.7817063331604004, |
| "learning_rate": 4.883809504964325e-06, |
| "loss": 0.0052, |
| "step": 206 |
| }, |
| { |
| "epoch": 1.0454545454545454, |
| "grad_norm": 0.5490451455116272, |
| "learning_rate": 4.881624438255194e-06, |
| "loss": 0.007, |
| "step": 207 |
| }, |
| { |
| "epoch": 1.0505050505050506, |
| "grad_norm": 0.9993918538093567, |
| "learning_rate": 4.879419514486846e-06, |
| "loss": 0.0103, |
| "step": 208 |
| }, |
| { |
| "epoch": 1.0555555555555556, |
| "grad_norm": 0.6243229508399963, |
| "learning_rate": 4.8771947520430785e-06, |
| "loss": 0.002, |
| "step": 209 |
| }, |
| { |
| "epoch": 1.0606060606060606, |
| "grad_norm": 0.5708845853805542, |
| "learning_rate": 4.874950169473097e-06, |
| "loss": 0.0026, |
| "step": 210 |
| }, |
| { |
| "epoch": 1.0656565656565657, |
| "grad_norm": 0.29404523968696594, |
| "learning_rate": 4.872685785491357e-06, |
| "loss": 0.0035, |
| "step": 211 |
| }, |
| { |
| "epoch": 1.0707070707070707, |
| "grad_norm": 0.44978681206703186, |
| "learning_rate": 4.870401618977415e-06, |
| "loss": 0.0033, |
| "step": 212 |
| }, |
| { |
| "epoch": 1.0757575757575757, |
| "grad_norm": 0.34965094923973083, |
| "learning_rate": 4.868097688975763e-06, |
| "loss": 0.0053, |
| "step": 213 |
| }, |
| { |
| "epoch": 1.0808080808080809, |
| "grad_norm": 0.4668480455875397, |
| "learning_rate": 4.8657740146956724e-06, |
| "loss": 0.0046, |
| "step": 214 |
| }, |
| { |
| "epoch": 1.0858585858585859, |
| "grad_norm": 0.6791878342628479, |
| "learning_rate": 4.863430615511039e-06, |
| "loss": 0.004, |
| "step": 215 |
| }, |
| { |
| "epoch": 1.0909090909090908, |
| "grad_norm": 0.4435560703277588, |
| "learning_rate": 4.8610675109602135e-06, |
| "loss": 0.0045, |
| "step": 216 |
| }, |
| { |
| "epoch": 1.095959595959596, |
| "grad_norm": 0.8708865642547607, |
| "learning_rate": 4.858684720745843e-06, |
| "loss": 0.0065, |
| "step": 217 |
| }, |
| { |
| "epoch": 1.101010101010101, |
| "grad_norm": 0.6024020314216614, |
| "learning_rate": 4.856282264734708e-06, |
| "loss": 0.0079, |
| "step": 218 |
| }, |
| { |
| "epoch": 1.106060606060606, |
| "grad_norm": 0.9265580773353577, |
| "learning_rate": 4.8538601629575525e-06, |
| "loss": 0.0047, |
| "step": 219 |
| }, |
| { |
| "epoch": 1.1111111111111112, |
| "grad_norm": 0.4640383720397949, |
| "learning_rate": 4.851418435608919e-06, |
| "loss": 0.0061, |
| "step": 220 |
| }, |
| { |
| "epoch": 1.1161616161616161, |
| "grad_norm": 0.36199602484703064, |
| "learning_rate": 4.848957103046984e-06, |
| "loss": 0.0051, |
| "step": 221 |
| }, |
| { |
| "epoch": 1.121212121212121, |
| "grad_norm": 0.4664932191371918, |
| "learning_rate": 4.84647618579338e-06, |
| "loss": 0.0033, |
| "step": 222 |
| }, |
| { |
| "epoch": 1.1262626262626263, |
| "grad_norm": 0.23365701735019684, |
| "learning_rate": 4.843975704533031e-06, |
| "loss": 0.0031, |
| "step": 223 |
| }, |
| { |
| "epoch": 1.1313131313131313, |
| "grad_norm": 0.40371668338775635, |
| "learning_rate": 4.841455680113979e-06, |
| "loss": 0.0072, |
| "step": 224 |
| }, |
| { |
| "epoch": 1.1363636363636362, |
| "grad_norm": 0.8710446357727051, |
| "learning_rate": 4.838916133547208e-06, |
| "loss": 0.0062, |
| "step": 225 |
| }, |
| { |
| "epoch": 1.1414141414141414, |
| "grad_norm": 0.5429500341415405, |
| "learning_rate": 4.836357086006471e-06, |
| "loss": 0.0034, |
| "step": 226 |
| }, |
| { |
| "epoch": 1.1464646464646464, |
| "grad_norm": 0.7119227051734924, |
| "learning_rate": 4.8337785588281125e-06, |
| "loss": 0.0069, |
| "step": 227 |
| }, |
| { |
| "epoch": 1.1515151515151516, |
| "grad_norm": 0.35653722286224365, |
| "learning_rate": 4.83118057351089e-06, |
| "loss": 0.0044, |
| "step": 228 |
| }, |
| { |
| "epoch": 1.1565656565656566, |
| "grad_norm": 0.34784218668937683, |
| "learning_rate": 4.828563151715795e-06, |
| "loss": 0.0025, |
| "step": 229 |
| }, |
| { |
| "epoch": 1.1616161616161615, |
| "grad_norm": 0.6477975845336914, |
| "learning_rate": 4.825926315265874e-06, |
| "loss": 0.0059, |
| "step": 230 |
| }, |
| { |
| "epoch": 1.1666666666666667, |
| "grad_norm": 0.557756781578064, |
| "learning_rate": 4.823270086146045e-06, |
| "loss": 0.0048, |
| "step": 231 |
| }, |
| { |
| "epoch": 1.1717171717171717, |
| "grad_norm": 0.4651699960231781, |
| "learning_rate": 4.820594486502913e-06, |
| "loss": 0.0064, |
| "step": 232 |
| }, |
| { |
| "epoch": 1.1767676767676767, |
| "grad_norm": 0.32300034165382385, |
| "learning_rate": 4.81789953864459e-06, |
| "loss": 0.0017, |
| "step": 233 |
| }, |
| { |
| "epoch": 1.1818181818181819, |
| "grad_norm": 0.23726579546928406, |
| "learning_rate": 4.815185265040504e-06, |
| "loss": 0.0023, |
| "step": 234 |
| }, |
| { |
| "epoch": 1.1868686868686869, |
| "grad_norm": 0.6567448973655701, |
| "learning_rate": 4.812451688321213e-06, |
| "loss": 0.0028, |
| "step": 235 |
| }, |
| { |
| "epoch": 1.1919191919191918, |
| "grad_norm": 0.2410786747932434, |
| "learning_rate": 4.809698831278217e-06, |
| "loss": 0.0009, |
| "step": 236 |
| }, |
| { |
| "epoch": 1.196969696969697, |
| "grad_norm": 0.839699923992157, |
| "learning_rate": 4.80692671686377e-06, |
| "loss": 0.005, |
| "step": 237 |
| }, |
| { |
| "epoch": 1.202020202020202, |
| "grad_norm": 1.173795461654663, |
| "learning_rate": 4.804135368190684e-06, |
| "loss": 0.0021, |
| "step": 238 |
| }, |
| { |
| "epoch": 1.2070707070707072, |
| "grad_norm": 0.5123177170753479, |
| "learning_rate": 4.8013248085321394e-06, |
| "loss": 0.0043, |
| "step": 239 |
| }, |
| { |
| "epoch": 1.2121212121212122, |
| "grad_norm": 0.6547465920448303, |
| "learning_rate": 4.798495061321492e-06, |
| "loss": 0.0056, |
| "step": 240 |
| }, |
| { |
| "epoch": 1.2171717171717171, |
| "grad_norm": 1.0825695991516113, |
| "learning_rate": 4.795646150152072e-06, |
| "loss": 0.0046, |
| "step": 241 |
| }, |
| { |
| "epoch": 1.2222222222222223, |
| "grad_norm": 0.25643694400787354, |
| "learning_rate": 4.792778098776997e-06, |
| "loss": 0.0015, |
| "step": 242 |
| }, |
| { |
| "epoch": 1.2272727272727273, |
| "grad_norm": 0.35064786672592163, |
| "learning_rate": 4.789890931108963e-06, |
| "loss": 0.0018, |
| "step": 243 |
| }, |
| { |
| "epoch": 1.2323232323232323, |
| "grad_norm": 0.6860871315002441, |
| "learning_rate": 4.786984671220053e-06, |
| "loss": 0.0045, |
| "step": 244 |
| }, |
| { |
| "epoch": 1.2373737373737375, |
| "grad_norm": 0.5118893384933472, |
| "learning_rate": 4.784059343341531e-06, |
| "loss": 0.0026, |
| "step": 245 |
| }, |
| { |
| "epoch": 1.2424242424242424, |
| "grad_norm": 0.3446030616760254, |
| "learning_rate": 4.7811149718636475e-06, |
| "loss": 0.0026, |
| "step": 246 |
| }, |
| { |
| "epoch": 1.2474747474747474, |
| "grad_norm": 0.29767417907714844, |
| "learning_rate": 4.778151581335427e-06, |
| "loss": 0.0005, |
| "step": 247 |
| }, |
| { |
| "epoch": 1.2525252525252526, |
| "grad_norm": 0.41258764266967773, |
| "learning_rate": 4.7751691964644655e-06, |
| "loss": 0.0036, |
| "step": 248 |
| }, |
| { |
| "epoch": 1.2575757575757576, |
| "grad_norm": 0.1846575289964676, |
| "learning_rate": 4.772167842116732e-06, |
| "loss": 0.0014, |
| "step": 249 |
| }, |
| { |
| "epoch": 1.2626262626262625, |
| "grad_norm": 0.5577757358551025, |
| "learning_rate": 4.7691475433163515e-06, |
| "loss": 0.002, |
| "step": 250 |
| }, |
| { |
| "epoch": 1.2676767676767677, |
| "grad_norm": 2.5368878841400146, |
| "learning_rate": 4.766108325245403e-06, |
| "loss": 0.005, |
| "step": 251 |
| }, |
| { |
| "epoch": 1.2727272727272727, |
| "grad_norm": 0.24205638468265533, |
| "learning_rate": 4.763050213243705e-06, |
| "loss": 0.0022, |
| "step": 252 |
| }, |
| { |
| "epoch": 1.2777777777777777, |
| "grad_norm": 1.3182103633880615, |
| "learning_rate": 4.759973232808609e-06, |
| "loss": 0.005, |
| "step": 253 |
| }, |
| { |
| "epoch": 1.2828282828282829, |
| "grad_norm": 0.13516050577163696, |
| "learning_rate": 4.7568774095947804e-06, |
| "loss": 0.0007, |
| "step": 254 |
| }, |
| { |
| "epoch": 1.2878787878787878, |
| "grad_norm": 0.2980661392211914, |
| "learning_rate": 4.753762769413991e-06, |
| "loss": 0.0033, |
| "step": 255 |
| }, |
| { |
| "epoch": 1.2929292929292928, |
| "grad_norm": 0.25073671340942383, |
| "learning_rate": 4.7506293382349e-06, |
| "loss": 0.0008, |
| "step": 256 |
| }, |
| { |
| "epoch": 1.297979797979798, |
| "grad_norm": 0.21918445825576782, |
| "learning_rate": 4.747477142182839e-06, |
| "loss": 0.0009, |
| "step": 257 |
| }, |
| { |
| "epoch": 1.303030303030303, |
| "grad_norm": 0.18419255316257477, |
| "learning_rate": 4.744306207539595e-06, |
| "loss": 0.0014, |
| "step": 258 |
| }, |
| { |
| "epoch": 1.308080808080808, |
| "grad_norm": 0.5623617172241211, |
| "learning_rate": 4.741116560743189e-06, |
| "loss": 0.0036, |
| "step": 259 |
| }, |
| { |
| "epoch": 1.3131313131313131, |
| "grad_norm": 0.675177276134491, |
| "learning_rate": 4.737908228387656e-06, |
| "loss": 0.0058, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.3181818181818181, |
| "grad_norm": 0.6051934361457825, |
| "learning_rate": 4.734681237222825e-06, |
| "loss": 0.0008, |
| "step": 261 |
| }, |
| { |
| "epoch": 1.3232323232323233, |
| "grad_norm": 0.4990449845790863, |
| "learning_rate": 4.731435614154094e-06, |
| "loss": 0.0021, |
| "step": 262 |
| }, |
| { |
| "epoch": 1.3282828282828283, |
| "grad_norm": 0.42459845542907715, |
| "learning_rate": 4.7281713862422074e-06, |
| "loss": 0.0025, |
| "step": 263 |
| }, |
| { |
| "epoch": 1.3333333333333333, |
| "grad_norm": 0.5847693085670471, |
| "learning_rate": 4.72488858070303e-06, |
| "loss": 0.0045, |
| "step": 264 |
| }, |
| { |
| "epoch": 1.3383838383838385, |
| "grad_norm": 0.512243390083313, |
| "learning_rate": 4.721587224907317e-06, |
| "loss": 0.0035, |
| "step": 265 |
| }, |
| { |
| "epoch": 1.3434343434343434, |
| "grad_norm": 0.4338761270046234, |
| "learning_rate": 4.718267346380492e-06, |
| "loss": 0.0022, |
| "step": 266 |
| }, |
| { |
| "epoch": 1.3484848484848486, |
| "grad_norm": 0.449686735868454, |
| "learning_rate": 4.714928972802412e-06, |
| "loss": 0.0035, |
| "step": 267 |
| }, |
| { |
| "epoch": 1.3535353535353536, |
| "grad_norm": 0.34395352005958557, |
| "learning_rate": 4.711572132007139e-06, |
| "loss": 0.0011, |
| "step": 268 |
| }, |
| { |
| "epoch": 1.3585858585858586, |
| "grad_norm": 0.5509817004203796, |
| "learning_rate": 4.708196851982706e-06, |
| "loss": 0.0055, |
| "step": 269 |
| }, |
| { |
| "epoch": 1.3636363636363638, |
| "grad_norm": 0.7635436058044434, |
| "learning_rate": 4.704803160870888e-06, |
| "loss": 0.0013, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.3686868686868687, |
| "grad_norm": 0.6281613111495972, |
| "learning_rate": 4.701391086966962e-06, |
| "loss": 0.0024, |
| "step": 271 |
| }, |
| { |
| "epoch": 1.3737373737373737, |
| "grad_norm": 0.27791550755500793, |
| "learning_rate": 4.697960658719475e-06, |
| "loss": 0.0011, |
| "step": 272 |
| }, |
| { |
| "epoch": 1.378787878787879, |
| "grad_norm": 0.18364505469799042, |
| "learning_rate": 4.694511904730004e-06, |
| "loss": 0.0007, |
| "step": 273 |
| }, |
| { |
| "epoch": 1.3838383838383839, |
| "grad_norm": 0.3883489966392517, |
| "learning_rate": 4.69104485375292e-06, |
| "loss": 0.003, |
| "step": 274 |
| }, |
| { |
| "epoch": 1.3888888888888888, |
| "grad_norm": 0.36030399799346924, |
| "learning_rate": 4.687559534695149e-06, |
| "loss": 0.0026, |
| "step": 275 |
| }, |
| { |
| "epoch": 1.393939393939394, |
| "grad_norm": 0.6687548160552979, |
| "learning_rate": 4.684055976615924e-06, |
| "loss": 0.0036, |
| "step": 276 |
| }, |
| { |
| "epoch": 1.398989898989899, |
| "grad_norm": 0.3158160150051117, |
| "learning_rate": 4.6805342087265534e-06, |
| "loss": 0.0033, |
| "step": 277 |
| }, |
| { |
| "epoch": 1.404040404040404, |
| "grad_norm": 0.30688995122909546, |
| "learning_rate": 4.676994260390168e-06, |
| "loss": 0.0038, |
| "step": 278 |
| }, |
| { |
| "epoch": 1.4090909090909092, |
| "grad_norm": 3.210895538330078, |
| "learning_rate": 4.6734361611214825e-06, |
| "loss": 0.002, |
| "step": 279 |
| }, |
| { |
| "epoch": 1.4141414141414141, |
| "grad_norm": 0.2669866383075714, |
| "learning_rate": 4.6698599405865465e-06, |
| "loss": 0.0019, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.4191919191919191, |
| "grad_norm": 0.15304385125637054, |
| "learning_rate": 4.666265628602496e-06, |
| "loss": 0.0008, |
| "step": 281 |
| }, |
| { |
| "epoch": 1.4242424242424243, |
| "grad_norm": 0.6389310359954834, |
| "learning_rate": 4.662653255137308e-06, |
| "loss": 0.0008, |
| "step": 282 |
| }, |
| { |
| "epoch": 1.4292929292929293, |
| "grad_norm": 0.5978128910064697, |
| "learning_rate": 4.65902285030955e-06, |
| "loss": 0.0018, |
| "step": 283 |
| }, |
| { |
| "epoch": 1.4343434343434343, |
| "grad_norm": 0.9742176532745361, |
| "learning_rate": 4.655374444388127e-06, |
| "loss": 0.0053, |
| "step": 284 |
| }, |
| { |
| "epoch": 1.4393939393939394, |
| "grad_norm": 0.6034579277038574, |
| "learning_rate": 4.651708067792029e-06, |
| "loss": 0.0024, |
| "step": 285 |
| }, |
| { |
| "epoch": 1.4444444444444444, |
| "grad_norm": 0.50716233253479, |
| "learning_rate": 4.648023751090079e-06, |
| "loss": 0.0023, |
| "step": 286 |
| }, |
| { |
| "epoch": 1.4494949494949494, |
| "grad_norm": 0.2467270791530609, |
| "learning_rate": 4.644321525000681e-06, |
| "loss": 0.001, |
| "step": 287 |
| }, |
| { |
| "epoch": 1.4545454545454546, |
| "grad_norm": 0.2956307530403137, |
| "learning_rate": 4.640601420391554e-06, |
| "loss": 0.0025, |
| "step": 288 |
| }, |
| { |
| "epoch": 1.4595959595959596, |
| "grad_norm": 0.25785166025161743, |
| "learning_rate": 4.6368634682794875e-06, |
| "loss": 0.0025, |
| "step": 289 |
| }, |
| { |
| "epoch": 1.4646464646464645, |
| "grad_norm": 0.5894668698310852, |
| "learning_rate": 4.633107699830073e-06, |
| "loss": 0.0025, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.4696969696969697, |
| "grad_norm": 0.31446027755737305, |
| "learning_rate": 4.629334146357448e-06, |
| "loss": 0.0021, |
| "step": 291 |
| }, |
| { |
| "epoch": 1.4747474747474747, |
| "grad_norm": 1.2636518478393555, |
| "learning_rate": 4.625542839324036e-06, |
| "loss": 0.0014, |
| "step": 292 |
| }, |
| { |
| "epoch": 1.4797979797979797, |
| "grad_norm": 0.38668882846832275, |
| "learning_rate": 4.621733810340282e-06, |
| "loss": 0.0022, |
| "step": 293 |
| }, |
| { |
| "epoch": 1.4848484848484849, |
| "grad_norm": 0.32553184032440186, |
| "learning_rate": 4.617907091164389e-06, |
| "loss": 0.0037, |
| "step": 294 |
| }, |
| { |
| "epoch": 1.4898989898989898, |
| "grad_norm": 0.3489368259906769, |
| "learning_rate": 4.614062713702055e-06, |
| "loss": 0.0021, |
| "step": 295 |
| }, |
| { |
| "epoch": 1.494949494949495, |
| "grad_norm": 0.5913827419281006, |
| "learning_rate": 4.610200710006206e-06, |
| "loss": 0.0026, |
| "step": 296 |
| }, |
| { |
| "epoch": 1.5, |
| "grad_norm": 0.2569572925567627, |
| "learning_rate": 4.606321112276728e-06, |
| "loss": 0.0007, |
| "step": 297 |
| }, |
| { |
| "epoch": 1.5050505050505052, |
| "grad_norm": 0.2640990912914276, |
| "learning_rate": 4.602423952860199e-06, |
| "loss": 0.0011, |
| "step": 298 |
| }, |
| { |
| "epoch": 1.51010101010101, |
| "grad_norm": 0.13946537673473358, |
| "learning_rate": 4.598509264249621e-06, |
| "loss": 0.0008, |
| "step": 299 |
| }, |
| { |
| "epoch": 1.5151515151515151, |
| "grad_norm": 0.3428565263748169, |
| "learning_rate": 4.594577079084146e-06, |
| "loss": 0.0027, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.5202020202020203, |
| "grad_norm": 0.10775818675756454, |
| "learning_rate": 4.590627430148806e-06, |
| "loss": 0.0005, |
| "step": 301 |
| }, |
| { |
| "epoch": 1.5252525252525253, |
| "grad_norm": 0.10182857513427734, |
| "learning_rate": 4.58666035037424e-06, |
| "loss": 0.0005, |
| "step": 302 |
| }, |
| { |
| "epoch": 1.5303030303030303, |
| "grad_norm": 0.8627449870109558, |
| "learning_rate": 4.582675872836417e-06, |
| "loss": 0.0034, |
| "step": 303 |
| }, |
| { |
| "epoch": 1.5353535353535355, |
| "grad_norm": 0.1706748604774475, |
| "learning_rate": 4.578674030756364e-06, |
| "loss": 0.001, |
| "step": 304 |
| }, |
| { |
| "epoch": 1.5404040404040404, |
| "grad_norm": 0.22486886382102966, |
| "learning_rate": 4.574654857499883e-06, |
| "loss": 0.0006, |
| "step": 305 |
| }, |
| { |
| "epoch": 1.5454545454545454, |
| "grad_norm": 0.132318377494812, |
| "learning_rate": 4.57061838657728e-06, |
| "loss": 0.0002, |
| "step": 306 |
| }, |
| { |
| "epoch": 1.5505050505050506, |
| "grad_norm": 0.11632445454597473, |
| "learning_rate": 4.566564651643083e-06, |
| "loss": 0.0008, |
| "step": 307 |
| }, |
| { |
| "epoch": 1.5555555555555556, |
| "grad_norm": 0.5458851456642151, |
| "learning_rate": 4.562493686495756e-06, |
| "loss": 0.0017, |
| "step": 308 |
| }, |
| { |
| "epoch": 1.5606060606060606, |
| "grad_norm": 0.3893134593963623, |
| "learning_rate": 4.558405525077426e-06, |
| "loss": 0.0008, |
| "step": 309 |
| }, |
| { |
| "epoch": 1.5656565656565657, |
| "grad_norm": 0.14303813874721527, |
| "learning_rate": 4.5543002014735955e-06, |
| "loss": 0.0008, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.5707070707070707, |
| "grad_norm": 0.06348168104887009, |
| "learning_rate": 4.550177749912857e-06, |
| "loss": 0.0001, |
| "step": 311 |
| }, |
| { |
| "epoch": 1.5757575757575757, |
| "grad_norm": 0.024890398606657982, |
| "learning_rate": 4.546038204766609e-06, |
| "loss": 0.0001, |
| "step": 312 |
| }, |
| { |
| "epoch": 1.5808080808080809, |
| "grad_norm": 0.2720322012901306, |
| "learning_rate": 4.541881600548771e-06, |
| "loss": 0.0007, |
| "step": 313 |
| }, |
| { |
| "epoch": 1.5858585858585859, |
| "grad_norm": 2.1944384574890137, |
| "learning_rate": 4.537707971915495e-06, |
| "loss": 0.0046, |
| "step": 314 |
| }, |
| { |
| "epoch": 1.5909090909090908, |
| "grad_norm": 0.7526996731758118, |
| "learning_rate": 4.533517353664874e-06, |
| "loss": 0.0035, |
| "step": 315 |
| }, |
| { |
| "epoch": 1.595959595959596, |
| "grad_norm": 0.5112221240997314, |
| "learning_rate": 4.529309780736654e-06, |
| "loss": 0.0028, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.601010101010101, |
| "grad_norm": 0.514263391494751, |
| "learning_rate": 4.525085288211943e-06, |
| "loss": 0.0035, |
| "step": 317 |
| }, |
| { |
| "epoch": 1.606060606060606, |
| "grad_norm": 0.22527411580085754, |
| "learning_rate": 4.520843911312922e-06, |
| "loss": 0.0005, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.6111111111111112, |
| "grad_norm": 0.3148433566093445, |
| "learning_rate": 4.5165856854025394e-06, |
| "loss": 0.0015, |
| "step": 319 |
| }, |
| { |
| "epoch": 1.6161616161616161, |
| "grad_norm": 0.186680406332016, |
| "learning_rate": 4.512310645984231e-06, |
| "loss": 0.0002, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.621212121212121, |
| "grad_norm": 0.30500105023384094, |
| "learning_rate": 4.508018828701613e-06, |
| "loss": 0.003, |
| "step": 321 |
| }, |
| { |
| "epoch": 1.6262626262626263, |
| "grad_norm": 0.11456169933080673, |
| "learning_rate": 4.503710269338191e-06, |
| "loss": 0.0008, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.6313131313131313, |
| "grad_norm": 0.1492011696100235, |
| "learning_rate": 4.4993850038170586e-06, |
| "loss": 0.0009, |
| "step": 323 |
| }, |
| { |
| "epoch": 1.6363636363636362, |
| "grad_norm": 0.059317488223314285, |
| "learning_rate": 4.4950430682005995e-06, |
| "loss": 0.0003, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.6414141414141414, |
| "grad_norm": 0.12276207655668259, |
| "learning_rate": 4.490684498690186e-06, |
| "loss": 0.0005, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.6464646464646466, |
| "grad_norm": 0.15645022690296173, |
| "learning_rate": 4.486309331625877e-06, |
| "loss": 0.0004, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.6515151515151514, |
| "grad_norm": 0.2627776861190796, |
| "learning_rate": 4.4819176034861146e-06, |
| "loss": 0.0034, |
| "step": 327 |
| }, |
| { |
| "epoch": 1.6565656565656566, |
| "grad_norm": 0.23384395241737366, |
| "learning_rate": 4.477509350887424e-06, |
| "loss": 0.0007, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.6616161616161618, |
| "grad_norm": 0.16817571222782135, |
| "learning_rate": 4.4730846105841e-06, |
| "loss": 0.0004, |
| "step": 329 |
| }, |
| { |
| "epoch": 1.6666666666666665, |
| "grad_norm": 0.291900634765625, |
| "learning_rate": 4.468643419467909e-06, |
| "loss": 0.0017, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.6717171717171717, |
| "grad_norm": 0.34208860993385315, |
| "learning_rate": 4.464185814567779e-06, |
| "loss": 0.0013, |
| "step": 331 |
| }, |
| { |
| "epoch": 1.676767676767677, |
| "grad_norm": 2.485881805419922, |
| "learning_rate": 4.459711833049485e-06, |
| "loss": 0.002, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.6818181818181817, |
| "grad_norm": 0.22386501729488373, |
| "learning_rate": 4.455221512215347e-06, |
| "loss": 0.0014, |
| "step": 333 |
| }, |
| { |
| "epoch": 1.6868686868686869, |
| "grad_norm": 0.6212871074676514, |
| "learning_rate": 4.4507148895039165e-06, |
| "loss": 0.0043, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.691919191919192, |
| "grad_norm": 0.5749779343605042, |
| "learning_rate": 4.4461920024896624e-06, |
| "loss": 0.0026, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.696969696969697, |
| "grad_norm": 0.05218998342752457, |
| "learning_rate": 4.4416528888826595e-06, |
| "loss": 0.0003, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.702020202020202, |
| "grad_norm": 0.18819659948349, |
| "learning_rate": 4.437097586528271e-06, |
| "loss": 0.0003, |
| "step": 337 |
| }, |
| { |
| "epoch": 1.7070707070707072, |
| "grad_norm": 0.3252980709075928, |
| "learning_rate": 4.432526133406843e-06, |
| "loss": 0.0006, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.7121212121212122, |
| "grad_norm": 0.05513511970639229, |
| "learning_rate": 4.427938567633371e-06, |
| "loss": 0.0002, |
| "step": 339 |
| }, |
| { |
| "epoch": 1.7171717171717171, |
| "grad_norm": 0.22534117102622986, |
| "learning_rate": 4.423334927457198e-06, |
| "loss": 0.0004, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.7222222222222223, |
| "grad_norm": 0.5896280407905579, |
| "learning_rate": 4.418715251261686e-06, |
| "loss": 0.0044, |
| "step": 341 |
| }, |
| { |
| "epoch": 1.7272727272727273, |
| "grad_norm": 0.29156655073165894, |
| "learning_rate": 4.414079577563901e-06, |
| "loss": 0.0004, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.7323232323232323, |
| "grad_norm": 0.40658339858055115, |
| "learning_rate": 4.409427945014289e-06, |
| "loss": 0.0013, |
| "step": 343 |
| }, |
| { |
| "epoch": 1.7373737373737375, |
| "grad_norm": 0.11740848422050476, |
| "learning_rate": 4.404760392396355e-06, |
| "loss": 0.0002, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.7424242424242424, |
| "grad_norm": 0.32155877351760864, |
| "learning_rate": 4.4000769586263394e-06, |
| "loss": 0.0005, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.7474747474747474, |
| "grad_norm": 0.32882601022720337, |
| "learning_rate": 4.3953776827528925e-06, |
| "loss": 0.0009, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.7525252525252526, |
| "grad_norm": 0.02031019888818264, |
| "learning_rate": 4.390662603956751e-06, |
| "loss": 0.0001, |
| "step": 347 |
| }, |
| { |
| "epoch": 1.7575757575757576, |
| "grad_norm": 0.030053604394197464, |
| "learning_rate": 4.385931761550411e-06, |
| "loss": 0.0001, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.7626262626262625, |
| "grad_norm": 0.04812119901180267, |
| "learning_rate": 4.381185194977796e-06, |
| "loss": 0.0001, |
| "step": 349 |
| }, |
| { |
| "epoch": 1.7676767676767677, |
| "grad_norm": 0.7133746147155762, |
| "learning_rate": 4.376422943813936e-06, |
| "loss": 0.0013, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.7727272727272727, |
| "grad_norm": 0.13918650150299072, |
| "learning_rate": 4.37164504776463e-06, |
| "loss": 0.0004, |
| "step": 351 |
| }, |
| { |
| "epoch": 1.7777777777777777, |
| "grad_norm": 0.029156319797039032, |
| "learning_rate": 4.366851546666118e-06, |
| "loss": 0.0001, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.7828282828282829, |
| "grad_norm": 0.23016220331192017, |
| "learning_rate": 4.362042480484753e-06, |
| "loss": 0.0014, |
| "step": 353 |
| }, |
| { |
| "epoch": 1.7878787878787878, |
| "grad_norm": 0.015141244977712631, |
| "learning_rate": 4.357217889316657e-06, |
| "loss": 0.0001, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.7929292929292928, |
| "grad_norm": 0.04107781499624252, |
| "learning_rate": 4.352377813387398e-06, |
| "loss": 0.0002, |
| "step": 355 |
| }, |
| { |
| "epoch": 1.797979797979798, |
| "grad_norm": 0.12357518076896667, |
| "learning_rate": 4.3475222930516484e-06, |
| "loss": 0.0002, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.803030303030303, |
| "grad_norm": 0.1132410392165184, |
| "learning_rate": 4.34265136879285e-06, |
| "loss": 0.0021, |
| "step": 357 |
| }, |
| { |
| "epoch": 1.808080808080808, |
| "grad_norm": 0.2640242278575897, |
| "learning_rate": 4.3377650812228765e-06, |
| "loss": 0.0028, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.8131313131313131, |
| "grad_norm": 0.3503035008907318, |
| "learning_rate": 4.332863471081695e-06, |
| "loss": 0.0006, |
| "step": 359 |
| }, |
| { |
| "epoch": 1.8181818181818183, |
| "grad_norm": 0.20633821189403534, |
| "learning_rate": 4.327946579237028e-06, |
| "loss": 0.0005, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.823232323232323, |
| "grad_norm": 0.7339881658554077, |
| "learning_rate": 4.323014446684007e-06, |
| "loss": 0.0013, |
| "step": 361 |
| }, |
| { |
| "epoch": 1.8282828282828283, |
| "grad_norm": 0.32832375168800354, |
| "learning_rate": 4.318067114544838e-06, |
| "loss": 0.0017, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.8333333333333335, |
| "grad_norm": 0.2648046314716339, |
| "learning_rate": 4.313104624068456e-06, |
| "loss": 0.0011, |
| "step": 363 |
| }, |
| { |
| "epoch": 1.8383838383838382, |
| "grad_norm": 0.14635734260082245, |
| "learning_rate": 4.308127016630176e-06, |
| "loss": 0.0008, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.8434343434343434, |
| "grad_norm": 0.7941786646842957, |
| "learning_rate": 4.303134333731354e-06, |
| "loss": 0.0027, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.8484848484848486, |
| "grad_norm": 0.02228306420147419, |
| "learning_rate": 4.2981266169990436e-06, |
| "loss": 0.0001, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.8535353535353534, |
| "grad_norm": 0.03166576102375984, |
| "learning_rate": 4.293103908185638e-06, |
| "loss": 0.0001, |
| "step": 367 |
| }, |
| { |
| "epoch": 1.8585858585858586, |
| "grad_norm": 0.055547453463077545, |
| "learning_rate": 4.2880662491685345e-06, |
| "loss": 0.0002, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.8636363636363638, |
| "grad_norm": 0.2905639708042145, |
| "learning_rate": 4.283013681949774e-06, |
| "loss": 0.0002, |
| "step": 369 |
| }, |
| { |
| "epoch": 1.8686868686868687, |
| "grad_norm": 0.1792776733636856, |
| "learning_rate": 4.277946248655701e-06, |
| "loss": 0.0005, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.8737373737373737, |
| "grad_norm": 0.1844152808189392, |
| "learning_rate": 4.272863991536604e-06, |
| "loss": 0.0004, |
| "step": 371 |
| }, |
| { |
| "epoch": 1.878787878787879, |
| "grad_norm": 0.5641722679138184, |
| "learning_rate": 4.267766952966369e-06, |
| "loss": 0.0012, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.8838383838383839, |
| "grad_norm": 0.7322543859481812, |
| "learning_rate": 4.262655175442123e-06, |
| "loss": 0.0019, |
| "step": 373 |
| }, |
| { |
| "epoch": 1.8888888888888888, |
| "grad_norm": 0.1359657496213913, |
| "learning_rate": 4.257528701583882e-06, |
| "loss": 0.0004, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.893939393939394, |
| "grad_norm": 0.06701353937387466, |
| "learning_rate": 4.2523875741341905e-06, |
| "loss": 0.0002, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.898989898989899, |
| "grad_norm": 0.07855173945426941, |
| "learning_rate": 4.247231835957773e-06, |
| "loss": 0.0001, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.904040404040404, |
| "grad_norm": 0.2692381739616394, |
| "learning_rate": 4.242061530041173e-06, |
| "loss": 0.0017, |
| "step": 377 |
| }, |
| { |
| "epoch": 1.9090909090909092, |
| "grad_norm": 0.059216227382421494, |
| "learning_rate": 4.236876699492391e-06, |
| "loss": 0.0001, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.9141414141414141, |
| "grad_norm": 0.14110217988491058, |
| "learning_rate": 4.231677387540528e-06, |
| "loss": 0.0002, |
| "step": 379 |
| }, |
| { |
| "epoch": 1.9191919191919191, |
| "grad_norm": 0.02432100474834442, |
| "learning_rate": 4.226463637535429e-06, |
| "loss": 0.0001, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.9242424242424243, |
| "grad_norm": 2.4970834255218506, |
| "learning_rate": 4.221235492947316e-06, |
| "loss": 0.0011, |
| "step": 381 |
| }, |
| { |
| "epoch": 1.9292929292929293, |
| "grad_norm": 0.029205068945884705, |
| "learning_rate": 4.215992997366425e-06, |
| "loss": 0.0001, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.9343434343434343, |
| "grad_norm": 0.1291147619485855, |
| "learning_rate": 4.21073619450265e-06, |
| "loss": 0.0002, |
| "step": 383 |
| }, |
| { |
| "epoch": 1.9393939393939394, |
| "grad_norm": 0.24579289555549622, |
| "learning_rate": 4.2054651281851685e-06, |
| "loss": 0.0028, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.9444444444444444, |
| "grad_norm": 0.10453365743160248, |
| "learning_rate": 4.200179842362084e-06, |
| "loss": 0.0003, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.9494949494949494, |
| "grad_norm": 0.1425495594739914, |
| "learning_rate": 4.1948803811000585e-06, |
| "loss": 0.0007, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.9545454545454546, |
| "grad_norm": 0.0607614740729332, |
| "learning_rate": 4.189566788583938e-06, |
| "loss": 0.0002, |
| "step": 387 |
| }, |
| { |
| "epoch": 1.9595959595959596, |
| "grad_norm": 0.1010780856013298, |
| "learning_rate": 4.184239109116393e-06, |
| "loss": 0.0002, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.9646464646464645, |
| "grad_norm": 0.09742361307144165, |
| "learning_rate": 4.178897387117547e-06, |
| "loss": 0.0001, |
| "step": 389 |
| }, |
| { |
| "epoch": 1.9696969696969697, |
| "grad_norm": 0.13673080503940582, |
| "learning_rate": 4.173541667124599e-06, |
| "loss": 0.0002, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.9747474747474747, |
| "grad_norm": 0.28466007113456726, |
| "learning_rate": 4.168171993791465e-06, |
| "loss": 0.0004, |
| "step": 391 |
| }, |
| { |
| "epoch": 1.9797979797979797, |
| "grad_norm": 0.07821296155452728, |
| "learning_rate": 4.1627884118883925e-06, |
| "loss": 0.0001, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.9848484848484849, |
| "grad_norm": 0.42731398344039917, |
| "learning_rate": 4.157390966301597e-06, |
| "loss": 0.0014, |
| "step": 393 |
| }, |
| { |
| "epoch": 1.98989898989899, |
| "grad_norm": 0.19062280654907227, |
| "learning_rate": 4.1519797020328815e-06, |
| "loss": 0.002, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.9949494949494948, |
| "grad_norm": 0.033713385462760925, |
| "learning_rate": 4.146554664199265e-06, |
| "loss": 0.0001, |
| "step": 395 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.3922223746776581, |
| "learning_rate": 4.141115898032607e-06, |
| "loss": 0.002, |
| "step": 396 |
| }, |
| { |
| "epoch": 2.005050505050505, |
| "grad_norm": 0.09248466789722443, |
| "learning_rate": 4.135663448879225e-06, |
| "loss": 0.0002, |
| "step": 397 |
| }, |
| { |
| "epoch": 2.01010101010101, |
| "grad_norm": 0.013212003745138645, |
| "learning_rate": 4.130197362199521e-06, |
| "loss": 0.0001, |
| "step": 398 |
| }, |
| { |
| "epoch": 2.015151515151515, |
| "grad_norm": 0.02999984472990036, |
| "learning_rate": 4.124717683567605e-06, |
| "loss": 0.0001, |
| "step": 399 |
| }, |
| { |
| "epoch": 2.0202020202020203, |
| "grad_norm": 0.05465150251984596, |
| "learning_rate": 4.119224458670905e-06, |
| "loss": 0.0001, |
| "step": 400 |
| }, |
| { |
| "epoch": 2.025252525252525, |
| "grad_norm": 0.01147720031440258, |
| "learning_rate": 4.113717733309798e-06, |
| "loss": 0.0001, |
| "step": 401 |
| }, |
| { |
| "epoch": 2.0303030303030303, |
| "grad_norm": 0.3443642258644104, |
| "learning_rate": 4.1081975533972185e-06, |
| "loss": 0.0002, |
| "step": 402 |
| }, |
| { |
| "epoch": 2.0353535353535355, |
| "grad_norm": 0.012025265023112297, |
| "learning_rate": 4.102663964958285e-06, |
| "loss": 0.0, |
| "step": 403 |
| }, |
| { |
| "epoch": 2.04040404040404, |
| "grad_norm": 0.007129222620278597, |
| "learning_rate": 4.097117014129903e-06, |
| "loss": 0.0, |
| "step": 404 |
| }, |
| { |
| "epoch": 2.0454545454545454, |
| "grad_norm": 0.011770972982048988, |
| "learning_rate": 4.091556747160398e-06, |
| "loss": 0.0001, |
| "step": 405 |
| }, |
| { |
| "epoch": 2.0505050505050506, |
| "grad_norm": 0.01947842165827751, |
| "learning_rate": 4.085983210409114e-06, |
| "loss": 0.0001, |
| "step": 406 |
| }, |
| { |
| "epoch": 2.0555555555555554, |
| "grad_norm": 0.10189538449048996, |
| "learning_rate": 4.0803964503460345e-06, |
| "loss": 0.001, |
| "step": 407 |
| }, |
| { |
| "epoch": 2.0606060606060606, |
| "grad_norm": 0.009692806750535965, |
| "learning_rate": 4.074796513551395e-06, |
| "loss": 0.0, |
| "step": 408 |
| }, |
| { |
| "epoch": 2.0656565656565657, |
| "grad_norm": 0.3666267395019531, |
| "learning_rate": 4.069183446715292e-06, |
| "loss": 0.0022, |
| "step": 409 |
| }, |
| { |
| "epoch": 2.0707070707070705, |
| "grad_norm": 0.05267680808901787, |
| "learning_rate": 4.063557296637295e-06, |
| "loss": 0.0001, |
| "step": 410 |
| }, |
| { |
| "epoch": 2.0757575757575757, |
| "grad_norm": 0.01215702947229147, |
| "learning_rate": 4.057918110226059e-06, |
| "loss": 0.0, |
| "step": 411 |
| }, |
| { |
| "epoch": 2.080808080808081, |
| "grad_norm": 0.013252578675746918, |
| "learning_rate": 4.052265934498929e-06, |
| "loss": 0.0001, |
| "step": 412 |
| }, |
| { |
| "epoch": 2.0858585858585856, |
| "grad_norm": 0.007566288113594055, |
| "learning_rate": 4.04660081658155e-06, |
| "loss": 0.0, |
| "step": 413 |
| }, |
| { |
| "epoch": 2.090909090909091, |
| "grad_norm": 0.00914331991225481, |
| "learning_rate": 4.040922803707474e-06, |
| "loss": 0.0, |
| "step": 414 |
| }, |
| { |
| "epoch": 2.095959595959596, |
| "grad_norm": 0.007612581364810467, |
| "learning_rate": 4.035231943217765e-06, |
| "loss": 0.0, |
| "step": 415 |
| }, |
| { |
| "epoch": 2.101010101010101, |
| "grad_norm": 0.06358092278242111, |
| "learning_rate": 4.029528282560609e-06, |
| "loss": 0.0001, |
| "step": 416 |
| }, |
| { |
| "epoch": 2.106060606060606, |
| "grad_norm": 0.015951288864016533, |
| "learning_rate": 4.023811869290911e-06, |
| "loss": 0.0001, |
| "step": 417 |
| }, |
| { |
| "epoch": 2.111111111111111, |
| "grad_norm": 0.00994573812931776, |
| "learning_rate": 4.018082751069904e-06, |
| "loss": 0.0, |
| "step": 418 |
| }, |
| { |
| "epoch": 2.1161616161616164, |
| "grad_norm": 0.018615180626511574, |
| "learning_rate": 4.01234097566475e-06, |
| "loss": 0.0001, |
| "step": 419 |
| }, |
| { |
| "epoch": 2.121212121212121, |
| "grad_norm": 0.007956521585583687, |
| "learning_rate": 4.006586590948141e-06, |
| "loss": 0.0, |
| "step": 420 |
| }, |
| { |
| "epoch": 2.1262626262626263, |
| "grad_norm": 0.02259342558681965, |
| "learning_rate": 4.000819644897902e-06, |
| "loss": 0.0001, |
| "step": 421 |
| }, |
| { |
| "epoch": 2.1313131313131315, |
| "grad_norm": 0.011591397225856781, |
| "learning_rate": 3.995040185596588e-06, |
| "loss": 0.0, |
| "step": 422 |
| }, |
| { |
| "epoch": 2.1363636363636362, |
| "grad_norm": 0.007842698134481907, |
| "learning_rate": 3.989248261231084e-06, |
| "loss": 0.0, |
| "step": 423 |
| }, |
| { |
| "epoch": 2.1414141414141414, |
| "grad_norm": 0.07917871326208115, |
| "learning_rate": 3.983443920092206e-06, |
| "loss": 0.0, |
| "step": 424 |
| }, |
| { |
| "epoch": 2.1464646464646466, |
| "grad_norm": 0.0059897336177527905, |
| "learning_rate": 3.977627210574295e-06, |
| "loss": 0.0, |
| "step": 425 |
| }, |
| { |
| "epoch": 2.1515151515151514, |
| "grad_norm": 0.004839922301471233, |
| "learning_rate": 3.971798181174816e-06, |
| "loss": 0.0, |
| "step": 426 |
| }, |
| { |
| "epoch": 2.1565656565656566, |
| "grad_norm": 0.0059188189916312695, |
| "learning_rate": 3.96595688049395e-06, |
| "loss": 0.0, |
| "step": 427 |
| }, |
| { |
| "epoch": 2.1616161616161618, |
| "grad_norm": 0.010813100263476372, |
| "learning_rate": 3.960103357234192e-06, |
| "loss": 0.0, |
| "step": 428 |
| }, |
| { |
| "epoch": 2.1666666666666665, |
| "grad_norm": 0.008660750463604927, |
| "learning_rate": 3.954237660199946e-06, |
| "loss": 0.0, |
| "step": 429 |
| }, |
| { |
| "epoch": 2.1717171717171717, |
| "grad_norm": 0.009066971018910408, |
| "learning_rate": 3.948359838297115e-06, |
| "loss": 0.0, |
| "step": 430 |
| }, |
| { |
| "epoch": 2.176767676767677, |
| "grad_norm": 0.11104936897754669, |
| "learning_rate": 3.942469940532694e-06, |
| "loss": 0.0006, |
| "step": 431 |
| }, |
| { |
| "epoch": 2.1818181818181817, |
| "grad_norm": 0.12647657096385956, |
| "learning_rate": 3.9365680160143595e-06, |
| "loss": 0.0006, |
| "step": 432 |
| }, |
| { |
| "epoch": 2.186868686868687, |
| "grad_norm": 0.011482702568173409, |
| "learning_rate": 3.93065411395007e-06, |
| "loss": 0.0, |
| "step": 433 |
| }, |
| { |
| "epoch": 2.191919191919192, |
| "grad_norm": 0.006080338265746832, |
| "learning_rate": 3.924728283647638e-06, |
| "loss": 0.0, |
| "step": 434 |
| }, |
| { |
| "epoch": 2.196969696969697, |
| "grad_norm": 0.003556201932951808, |
| "learning_rate": 3.918790574514338e-06, |
| "loss": 0.0, |
| "step": 435 |
| }, |
| { |
| "epoch": 2.202020202020202, |
| "grad_norm": 0.037242766469717026, |
| "learning_rate": 3.91284103605648e-06, |
| "loss": 0.0, |
| "step": 436 |
| }, |
| { |
| "epoch": 2.207070707070707, |
| "grad_norm": 0.0035004725214093924, |
| "learning_rate": 3.906879717879005e-06, |
| "loss": 0.0, |
| "step": 437 |
| }, |
| { |
| "epoch": 2.212121212121212, |
| "grad_norm": 0.19019567966461182, |
| "learning_rate": 3.9009066696850664e-06, |
| "loss": 0.0009, |
| "step": 438 |
| }, |
| { |
| "epoch": 2.217171717171717, |
| "grad_norm": 0.3347993493080139, |
| "learning_rate": 3.89492194127562e-06, |
| "loss": 0.0036, |
| "step": 439 |
| }, |
| { |
| "epoch": 2.2222222222222223, |
| "grad_norm": 0.013379104435443878, |
| "learning_rate": 3.888925582549006e-06, |
| "loss": 0.0, |
| "step": 440 |
| }, |
| { |
| "epoch": 2.227272727272727, |
| "grad_norm": 0.22218060493469238, |
| "learning_rate": 3.882917643500534e-06, |
| "loss": 0.0022, |
| "step": 441 |
| }, |
| { |
| "epoch": 2.2323232323232323, |
| "grad_norm": 0.011584995314478874, |
| "learning_rate": 3.8768981742220646e-06, |
| "loss": 0.0, |
| "step": 442 |
| }, |
| { |
| "epoch": 2.2373737373737375, |
| "grad_norm": 0.012700509279966354, |
| "learning_rate": 3.870867224901595e-06, |
| "loss": 0.0, |
| "step": 443 |
| }, |
| { |
| "epoch": 2.242424242424242, |
| "grad_norm": 0.11896836757659912, |
| "learning_rate": 3.864824845822837e-06, |
| "loss": 0.0001, |
| "step": 444 |
| }, |
| { |
| "epoch": 2.2474747474747474, |
| "grad_norm": 0.010297846049070358, |
| "learning_rate": 3.8587710873647985e-06, |
| "loss": 0.0, |
| "step": 445 |
| }, |
| { |
| "epoch": 2.2525252525252526, |
| "grad_norm": 0.008279677480459213, |
| "learning_rate": 3.852706000001367e-06, |
| "loss": 0.0, |
| "step": 446 |
| }, |
| { |
| "epoch": 2.257575757575758, |
| "grad_norm": 0.25368809700012207, |
| "learning_rate": 3.846629634300883e-06, |
| "loss": 0.0005, |
| "step": 447 |
| }, |
| { |
| "epoch": 2.2626262626262625, |
| "grad_norm": 0.03778368607163429, |
| "learning_rate": 3.840542040925725e-06, |
| "loss": 0.0001, |
| "step": 448 |
| }, |
| { |
| "epoch": 2.2676767676767677, |
| "grad_norm": 0.017897954210639, |
| "learning_rate": 3.834443270631879e-06, |
| "loss": 0.0001, |
| "step": 449 |
| }, |
| { |
| "epoch": 2.2727272727272725, |
| "grad_norm": 0.011958397924900055, |
| "learning_rate": 3.828333374268523e-06, |
| "loss": 0.0001, |
| "step": 450 |
| }, |
| { |
| "epoch": 2.2777777777777777, |
| "grad_norm": 0.03435271605849266, |
| "learning_rate": 3.8222124027775994e-06, |
| "loss": 0.0001, |
| "step": 451 |
| }, |
| { |
| "epoch": 2.282828282828283, |
| "grad_norm": 0.012268775142729282, |
| "learning_rate": 3.81608040719339e-06, |
| "loss": 0.0001, |
| "step": 452 |
| }, |
| { |
| "epoch": 2.287878787878788, |
| "grad_norm": 0.03738567978143692, |
| "learning_rate": 3.8099374386420906e-06, |
| "loss": 0.0001, |
| "step": 453 |
| }, |
| { |
| "epoch": 2.292929292929293, |
| "grad_norm": 0.027487782761454582, |
| "learning_rate": 3.8037835483413877e-06, |
| "loss": 0.0001, |
| "step": 454 |
| }, |
| { |
| "epoch": 2.297979797979798, |
| "grad_norm": 0.02155883237719536, |
| "learning_rate": 3.7976187876000266e-06, |
| "loss": 0.0001, |
| "step": 455 |
| }, |
| { |
| "epoch": 2.303030303030303, |
| "grad_norm": 0.044139981269836426, |
| "learning_rate": 3.7914432078173867e-06, |
| "loss": 0.0001, |
| "step": 456 |
| }, |
| { |
| "epoch": 2.308080808080808, |
| "grad_norm": 0.10077373683452606, |
| "learning_rate": 3.785256860483054e-06, |
| "loss": 0.0001, |
| "step": 457 |
| }, |
| { |
| "epoch": 2.313131313131313, |
| "grad_norm": 0.01995331421494484, |
| "learning_rate": 3.7790597971763892e-06, |
| "loss": 0.0, |
| "step": 458 |
| }, |
| { |
| "epoch": 2.3181818181818183, |
| "grad_norm": 0.014978822320699692, |
| "learning_rate": 3.772852069566097e-06, |
| "loss": 0.0, |
| "step": 459 |
| }, |
| { |
| "epoch": 2.323232323232323, |
| "grad_norm": 0.0070231300778687, |
| "learning_rate": 3.7666337294097987e-06, |
| "loss": 0.0, |
| "step": 460 |
| }, |
| { |
| "epoch": 2.3282828282828283, |
| "grad_norm": 0.007256317883729935, |
| "learning_rate": 3.760404828553599e-06, |
| "loss": 0.0, |
| "step": 461 |
| }, |
| { |
| "epoch": 2.3333333333333335, |
| "grad_norm": 0.02269594930112362, |
| "learning_rate": 3.7541654189316525e-06, |
| "loss": 0.0001, |
| "step": 462 |
| }, |
| { |
| "epoch": 2.3383838383838382, |
| "grad_norm": 0.004164062440395355, |
| "learning_rate": 3.7479155525657323e-06, |
| "loss": 0.0, |
| "step": 463 |
| }, |
| { |
| "epoch": 2.3434343434343434, |
| "grad_norm": 0.00466076610609889, |
| "learning_rate": 3.741655281564796e-06, |
| "loss": 0.0, |
| "step": 464 |
| }, |
| { |
| "epoch": 2.3484848484848486, |
| "grad_norm": 0.009731332771480083, |
| "learning_rate": 3.735384658124551e-06, |
| "loss": 0.0, |
| "step": 465 |
| }, |
| { |
| "epoch": 2.3535353535353534, |
| "grad_norm": 0.46250560879707336, |
| "learning_rate": 3.72910373452702e-06, |
| "loss": 0.0003, |
| "step": 466 |
| }, |
| { |
| "epoch": 2.3585858585858586, |
| "grad_norm": 0.009878060780465603, |
| "learning_rate": 3.722812563140102e-06, |
| "loss": 0.0, |
| "step": 467 |
| }, |
| { |
| "epoch": 2.3636363636363638, |
| "grad_norm": 0.006603723857551813, |
| "learning_rate": 3.7165111964171407e-06, |
| "loss": 0.0, |
| "step": 468 |
| }, |
| { |
| "epoch": 2.3686868686868685, |
| "grad_norm": 0.17878268659114838, |
| "learning_rate": 3.7101996868964836e-06, |
| "loss": 0.001, |
| "step": 469 |
| }, |
| { |
| "epoch": 2.3737373737373737, |
| "grad_norm": 0.0032485495321452618, |
| "learning_rate": 3.703878087201044e-06, |
| "loss": 0.0, |
| "step": 470 |
| }, |
| { |
| "epoch": 2.378787878787879, |
| "grad_norm": 0.004560573026537895, |
| "learning_rate": 3.697546450037865e-06, |
| "loss": 0.0, |
| "step": 471 |
| }, |
| { |
| "epoch": 2.3838383838383836, |
| "grad_norm": 0.007358085829764605, |
| "learning_rate": 3.6912048281976764e-06, |
| "loss": 0.0, |
| "step": 472 |
| }, |
| { |
| "epoch": 2.388888888888889, |
| "grad_norm": 0.007086644880473614, |
| "learning_rate": 3.6848532745544574e-06, |
| "loss": 0.0, |
| "step": 473 |
| }, |
| { |
| "epoch": 2.393939393939394, |
| "grad_norm": 0.06341815739870071, |
| "learning_rate": 3.6784918420649952e-06, |
| "loss": 0.0003, |
| "step": 474 |
| }, |
| { |
| "epoch": 2.398989898989899, |
| "grad_norm": 0.005435152444988489, |
| "learning_rate": 3.67212058376844e-06, |
| "loss": 0.0, |
| "step": 475 |
| }, |
| { |
| "epoch": 2.404040404040404, |
| "grad_norm": 0.004161132499575615, |
| "learning_rate": 3.66573955278587e-06, |
| "loss": 0.0, |
| "step": 476 |
| }, |
| { |
| "epoch": 2.409090909090909, |
| "grad_norm": 0.006175799760967493, |
| "learning_rate": 3.6593488023198427e-06, |
| "loss": 0.0, |
| "step": 477 |
| }, |
| { |
| "epoch": 2.4141414141414144, |
| "grad_norm": 0.015126085840165615, |
| "learning_rate": 3.6529483856539512e-06, |
| "loss": 0.0, |
| "step": 478 |
| }, |
| { |
| "epoch": 2.419191919191919, |
| "grad_norm": 0.003908907063305378, |
| "learning_rate": 3.6465383561523825e-06, |
| "loss": 0.0, |
| "step": 479 |
| }, |
| { |
| "epoch": 2.4242424242424243, |
| "grad_norm": 0.0020491559989750385, |
| "learning_rate": 3.640118767259474e-06, |
| "loss": 0.0, |
| "step": 480 |
| }, |
| { |
| "epoch": 2.429292929292929, |
| "grad_norm": 0.00249357963912189, |
| "learning_rate": 3.6336896724992614e-06, |
| "loss": 0.0, |
| "step": 481 |
| }, |
| { |
| "epoch": 2.4343434343434343, |
| "grad_norm": 0.003509709145873785, |
| "learning_rate": 3.6272511254750403e-06, |
| "loss": 0.0, |
| "step": 482 |
| }, |
| { |
| "epoch": 2.4393939393939394, |
| "grad_norm": 0.04434852674603462, |
| "learning_rate": 3.620803179868913e-06, |
| "loss": 0.0001, |
| "step": 483 |
| }, |
| { |
| "epoch": 2.4444444444444446, |
| "grad_norm": 0.004402869381010532, |
| "learning_rate": 3.6143458894413463e-06, |
| "loss": 0.0, |
| "step": 484 |
| }, |
| { |
| "epoch": 2.4494949494949494, |
| "grad_norm": 0.19418330490589142, |
| "learning_rate": 3.6078793080307173e-06, |
| "loss": 0.0011, |
| "step": 485 |
| }, |
| { |
| "epoch": 2.4545454545454546, |
| "grad_norm": 0.006222233176231384, |
| "learning_rate": 3.6014034895528705e-06, |
| "loss": 0.0, |
| "step": 486 |
| }, |
| { |
| "epoch": 2.45959595959596, |
| "grad_norm": 0.2598673105239868, |
| "learning_rate": 3.594918488000664e-06, |
| "loss": 0.0013, |
| "step": 487 |
| }, |
| { |
| "epoch": 2.4646464646464645, |
| "grad_norm": 0.00211211945861578, |
| "learning_rate": 3.588424357443521e-06, |
| "loss": 0.0, |
| "step": 488 |
| }, |
| { |
| "epoch": 2.4696969696969697, |
| "grad_norm": 0.005605990532785654, |
| "learning_rate": 3.581921152026978e-06, |
| "loss": 0.0, |
| "step": 489 |
| }, |
| { |
| "epoch": 2.474747474747475, |
| "grad_norm": 0.007124903611838818, |
| "learning_rate": 3.5754089259722365e-06, |
| "loss": 0.0, |
| "step": 490 |
| }, |
| { |
| "epoch": 2.4797979797979797, |
| "grad_norm": 1.6332594156265259, |
| "learning_rate": 3.5688877335757055e-06, |
| "loss": 0.0024, |
| "step": 491 |
| }, |
| { |
| "epoch": 2.484848484848485, |
| "grad_norm": 0.002983370330184698, |
| "learning_rate": 3.5623576292085555e-06, |
| "loss": 0.0, |
| "step": 492 |
| }, |
| { |
| "epoch": 2.48989898989899, |
| "grad_norm": 0.04062391445040703, |
| "learning_rate": 3.5558186673162575e-06, |
| "loss": 0.0001, |
| "step": 493 |
| }, |
| { |
| "epoch": 2.494949494949495, |
| "grad_norm": 0.006106998771429062, |
| "learning_rate": 3.549270902418136e-06, |
| "loss": 0.0, |
| "step": 494 |
| }, |
| { |
| "epoch": 2.5, |
| "grad_norm": 0.04901747405529022, |
| "learning_rate": 3.542714389106911e-06, |
| "loss": 0.0001, |
| "step": 495 |
| }, |
| { |
| "epoch": 2.505050505050505, |
| "grad_norm": 0.006030434276908636, |
| "learning_rate": 3.536149182048243e-06, |
| "loss": 0.0, |
| "step": 496 |
| }, |
| { |
| "epoch": 2.51010101010101, |
| "grad_norm": 0.009680806659162045, |
| "learning_rate": 3.529575335980277e-06, |
| "loss": 0.0, |
| "step": 497 |
| }, |
| { |
| "epoch": 2.515151515151515, |
| "grad_norm": 0.003187371650710702, |
| "learning_rate": 3.5229929057131877e-06, |
| "loss": 0.0, |
| "step": 498 |
| }, |
| { |
| "epoch": 2.5202020202020203, |
| "grad_norm": 0.14173120260238647, |
| "learning_rate": 3.5164019461287215e-06, |
| "loss": 0.0004, |
| "step": 499 |
| }, |
| { |
| "epoch": 2.525252525252525, |
| "grad_norm": 0.006695139221847057, |
| "learning_rate": 3.5098025121797375e-06, |
| "loss": 0.0, |
| "step": 500 |
| }, |
| { |
| "epoch": 2.5303030303030303, |
| "grad_norm": 0.005796091165393591, |
| "learning_rate": 3.503194658889753e-06, |
| "loss": 0.0, |
| "step": 501 |
| }, |
| { |
| "epoch": 2.5353535353535355, |
| "grad_norm": 0.007152227684855461, |
| "learning_rate": 3.496578441352481e-06, |
| "loss": 0.0, |
| "step": 502 |
| }, |
| { |
| "epoch": 2.5404040404040407, |
| "grad_norm": 0.007902374491095543, |
| "learning_rate": 3.4899539147313727e-06, |
| "loss": 0.0, |
| "step": 503 |
| }, |
| { |
| "epoch": 2.5454545454545454, |
| "grad_norm": 0.04069540649652481, |
| "learning_rate": 3.4833211342591565e-06, |
| "loss": 0.0, |
| "step": 504 |
| }, |
| { |
| "epoch": 2.5505050505050506, |
| "grad_norm": 0.040364135056734085, |
| "learning_rate": 3.4766801552373804e-06, |
| "loss": 0.0001, |
| "step": 505 |
| }, |
| { |
| "epoch": 2.5555555555555554, |
| "grad_norm": 0.0079901572316885, |
| "learning_rate": 3.4700310330359456e-06, |
| "loss": 0.0, |
| "step": 506 |
| }, |
| { |
| "epoch": 2.5606060606060606, |
| "grad_norm": 0.004804165568202734, |
| "learning_rate": 3.4633738230926508e-06, |
| "loss": 0.0, |
| "step": 507 |
| }, |
| { |
| "epoch": 2.5656565656565657, |
| "grad_norm": 0.0025831114035099745, |
| "learning_rate": 3.4567085809127247e-06, |
| "loss": 0.0, |
| "step": 508 |
| }, |
| { |
| "epoch": 2.570707070707071, |
| "grad_norm": 0.012033859267830849, |
| "learning_rate": 3.450035362068368e-06, |
| "loss": 0.0, |
| "step": 509 |
| }, |
| { |
| "epoch": 2.5757575757575757, |
| "grad_norm": 0.002701899502426386, |
| "learning_rate": 3.4433542221982863e-06, |
| "loss": 0.0, |
| "step": 510 |
| }, |
| { |
| "epoch": 2.580808080808081, |
| "grad_norm": 0.008882194757461548, |
| "learning_rate": 3.4366652170072277e-06, |
| "loss": 0.0, |
| "step": 511 |
| }, |
| { |
| "epoch": 2.5858585858585856, |
| "grad_norm": 0.15618982911109924, |
| "learning_rate": 3.4299684022655196e-06, |
| "loss": 0.0006, |
| "step": 512 |
| }, |
| { |
| "epoch": 2.590909090909091, |
| "grad_norm": 0.0035035579930990934, |
| "learning_rate": 3.4232638338085996e-06, |
| "loss": 0.0, |
| "step": 513 |
| }, |
| { |
| "epoch": 2.595959595959596, |
| "grad_norm": 0.03973141312599182, |
| "learning_rate": 3.4165515675365558e-06, |
| "loss": 0.0001, |
| "step": 514 |
| }, |
| { |
| "epoch": 2.601010101010101, |
| "grad_norm": 0.0064284526742994785, |
| "learning_rate": 3.4098316594136554e-06, |
| "loss": 0.0, |
| "step": 515 |
| }, |
| { |
| "epoch": 2.606060606060606, |
| "grad_norm": 0.008207082748413086, |
| "learning_rate": 3.403104165467883e-06, |
| "loss": 0.0, |
| "step": 516 |
| }, |
| { |
| "epoch": 2.611111111111111, |
| "grad_norm": 0.02613808587193489, |
| "learning_rate": 3.3963691417904676e-06, |
| "loss": 0.0, |
| "step": 517 |
| }, |
| { |
| "epoch": 2.616161616161616, |
| "grad_norm": 0.0026936449576169252, |
| "learning_rate": 3.3896266445354208e-06, |
| "loss": 0.0, |
| "step": 518 |
| }, |
| { |
| "epoch": 2.621212121212121, |
| "grad_norm": 0.006232825573533773, |
| "learning_rate": 3.3828767299190646e-06, |
| "loss": 0.0, |
| "step": 519 |
| }, |
| { |
| "epoch": 2.6262626262626263, |
| "grad_norm": 0.004667866975069046, |
| "learning_rate": 3.376119454219565e-06, |
| "loss": 0.0, |
| "step": 520 |
| }, |
| { |
| "epoch": 2.6313131313131315, |
| "grad_norm": 0.00448667211458087, |
| "learning_rate": 3.3693548737764607e-06, |
| "loss": 0.0, |
| "step": 521 |
| }, |
| { |
| "epoch": 2.6363636363636362, |
| "grad_norm": 0.009849603287875652, |
| "learning_rate": 3.362583044990195e-06, |
| "loss": 0.0, |
| "step": 522 |
| }, |
| { |
| "epoch": 2.6414141414141414, |
| "grad_norm": 0.041966211050748825, |
| "learning_rate": 3.3558040243216463e-06, |
| "loss": 0.0, |
| "step": 523 |
| }, |
| { |
| "epoch": 2.6464646464646466, |
| "grad_norm": 0.021129852160811424, |
| "learning_rate": 3.3490178682916534e-06, |
| "loss": 0.0, |
| "step": 524 |
| }, |
| { |
| "epoch": 2.6515151515151514, |
| "grad_norm": 0.010130657814443111, |
| "learning_rate": 3.3422246334805504e-06, |
| "loss": 0.0, |
| "step": 525 |
| }, |
| { |
| "epoch": 2.6565656565656566, |
| "grad_norm": 0.004168142564594746, |
| "learning_rate": 3.335424376527688e-06, |
| "loss": 0.0, |
| "step": 526 |
| }, |
| { |
| "epoch": 2.6616161616161618, |
| "grad_norm": 0.002758739748969674, |
| "learning_rate": 3.3286171541309686e-06, |
| "loss": 0.0, |
| "step": 527 |
| }, |
| { |
| "epoch": 2.6666666666666665, |
| "grad_norm": 0.005857916083186865, |
| "learning_rate": 3.321803023046366e-06, |
| "loss": 0.0, |
| "step": 528 |
| }, |
| { |
| "epoch": 2.6717171717171717, |
| "grad_norm": 0.02048281952738762, |
| "learning_rate": 3.3149820400874575e-06, |
| "loss": 0.0, |
| "step": 529 |
| }, |
| { |
| "epoch": 2.676767676767677, |
| "grad_norm": 0.010321596637368202, |
| "learning_rate": 3.3081542621249503e-06, |
| "loss": 0.0, |
| "step": 530 |
| }, |
| { |
| "epoch": 2.6818181818181817, |
| "grad_norm": 0.008361139334738255, |
| "learning_rate": 3.301319746086203e-06, |
| "loss": 0.0, |
| "step": 531 |
| }, |
| { |
| "epoch": 2.686868686868687, |
| "grad_norm": 0.02190493792295456, |
| "learning_rate": 3.2944785489547544e-06, |
| "loss": 0.0, |
| "step": 532 |
| }, |
| { |
| "epoch": 2.691919191919192, |
| "grad_norm": 0.002024057786911726, |
| "learning_rate": 3.2876307277698495e-06, |
| "loss": 0.0, |
| "step": 533 |
| }, |
| { |
| "epoch": 2.6969696969696972, |
| "grad_norm": 0.008165915496647358, |
| "learning_rate": 3.2807763396259597e-06, |
| "loss": 0.0, |
| "step": 534 |
| }, |
| { |
| "epoch": 2.702020202020202, |
| "grad_norm": 0.0023303315974771976, |
| "learning_rate": 3.2739154416723113e-06, |
| "loss": 0.0, |
| "step": 535 |
| }, |
| { |
| "epoch": 2.707070707070707, |
| "grad_norm": 0.002119947923347354, |
| "learning_rate": 3.2670480911124045e-06, |
| "loss": 0.0, |
| "step": 536 |
| }, |
| { |
| "epoch": 2.712121212121212, |
| "grad_norm": 0.0013211388140916824, |
| "learning_rate": 3.2601743452035413e-06, |
| "loss": 0.0, |
| "step": 537 |
| }, |
| { |
| "epoch": 2.717171717171717, |
| "grad_norm": 0.003069607773795724, |
| "learning_rate": 3.2532942612563436e-06, |
| "loss": 0.0, |
| "step": 538 |
| }, |
| { |
| "epoch": 2.7222222222222223, |
| "grad_norm": 0.002737869042903185, |
| "learning_rate": 3.246407896634277e-06, |
| "loss": 0.0, |
| "step": 539 |
| }, |
| { |
| "epoch": 2.7272727272727275, |
| "grad_norm": 0.002814906882122159, |
| "learning_rate": 3.2395153087531767e-06, |
| "loss": 0.0, |
| "step": 540 |
| }, |
| { |
| "epoch": 2.7323232323232323, |
| "grad_norm": 0.00364164961501956, |
| "learning_rate": 3.23261655508076e-06, |
| "loss": 0.0, |
| "step": 541 |
| }, |
| { |
| "epoch": 2.7373737373737375, |
| "grad_norm": 0.002594191348180175, |
| "learning_rate": 3.225711693136156e-06, |
| "loss": 0.0, |
| "step": 542 |
| }, |
| { |
| "epoch": 2.742424242424242, |
| "grad_norm": 0.0014940836699679494, |
| "learning_rate": 3.2188007804894206e-06, |
| "loss": 0.0, |
| "step": 543 |
| }, |
| { |
| "epoch": 2.7474747474747474, |
| "grad_norm": 0.002774301450699568, |
| "learning_rate": 3.211883874761058e-06, |
| "loss": 0.0, |
| "step": 544 |
| }, |
| { |
| "epoch": 2.7525252525252526, |
| "grad_norm": 0.0019573576282709837, |
| "learning_rate": 3.2049610336215416e-06, |
| "loss": 0.0, |
| "step": 545 |
| }, |
| { |
| "epoch": 2.757575757575758, |
| "grad_norm": 0.0017838666681200266, |
| "learning_rate": 3.19803231479083e-06, |
| "loss": 0.0, |
| "step": 546 |
| }, |
| { |
| "epoch": 2.7626262626262625, |
| "grad_norm": 0.0023138851393014193, |
| "learning_rate": 3.1910977760378884e-06, |
| "loss": 0.0, |
| "step": 547 |
| }, |
| { |
| "epoch": 2.7676767676767677, |
| "grad_norm": 0.005525521468371153, |
| "learning_rate": 3.184157475180208e-06, |
| "loss": 0.0, |
| "step": 548 |
| }, |
| { |
| "epoch": 2.7727272727272725, |
| "grad_norm": 0.0011541108833625913, |
| "learning_rate": 3.1772114700833194e-06, |
| "loss": 0.0, |
| "step": 549 |
| }, |
| { |
| "epoch": 2.7777777777777777, |
| "grad_norm": 0.0012338707456365228, |
| "learning_rate": 3.1702598186603152e-06, |
| "loss": 0.0, |
| "step": 550 |
| }, |
| { |
| "epoch": 2.782828282828283, |
| "grad_norm": 0.0019040412735193968, |
| "learning_rate": 3.1633025788713632e-06, |
| "loss": 0.0, |
| "step": 551 |
| }, |
| { |
| "epoch": 2.787878787878788, |
| "grad_norm": 0.15179114043712616, |
| "learning_rate": 3.1563398087232265e-06, |
| "loss": 0.0003, |
| "step": 552 |
| }, |
| { |
| "epoch": 2.792929292929293, |
| "grad_norm": 0.0010482153156772256, |
| "learning_rate": 3.1493715662687774e-06, |
| "loss": 0.0, |
| "step": 553 |
| }, |
| { |
| "epoch": 2.797979797979798, |
| "grad_norm": 0.26141083240509033, |
| "learning_rate": 3.1423979096065134e-06, |
| "loss": 0.0009, |
| "step": 554 |
| }, |
| { |
| "epoch": 2.8030303030303028, |
| "grad_norm": 0.00429484061896801, |
| "learning_rate": 3.135418896880074e-06, |
| "loss": 0.0, |
| "step": 555 |
| }, |
| { |
| "epoch": 2.808080808080808, |
| "grad_norm": 0.010286355391144753, |
| "learning_rate": 3.1284345862777572e-06, |
| "loss": 0.0, |
| "step": 556 |
| }, |
| { |
| "epoch": 2.813131313131313, |
| "grad_norm": 0.0035182933788746595, |
| "learning_rate": 3.121445036032029e-06, |
| "loss": 0.0, |
| "step": 557 |
| }, |
| { |
| "epoch": 2.8181818181818183, |
| "grad_norm": 0.005269261077046394, |
| "learning_rate": 3.1144503044190456e-06, |
| "loss": 0.0, |
| "step": 558 |
| }, |
| { |
| "epoch": 2.823232323232323, |
| "grad_norm": 0.003896774258464575, |
| "learning_rate": 3.10745044975816e-06, |
| "loss": 0.0, |
| "step": 559 |
| }, |
| { |
| "epoch": 2.8282828282828283, |
| "grad_norm": 0.0011062441626563668, |
| "learning_rate": 3.100445530411442e-06, |
| "loss": 0.0, |
| "step": 560 |
| }, |
| { |
| "epoch": 2.8333333333333335, |
| "grad_norm": 0.003219842677935958, |
| "learning_rate": 3.093435604783186e-06, |
| "loss": 0.0, |
| "step": 561 |
| }, |
| { |
| "epoch": 2.8383838383838382, |
| "grad_norm": 0.0029111194889992476, |
| "learning_rate": 3.086420731319429e-06, |
| "loss": 0.0, |
| "step": 562 |
| }, |
| { |
| "epoch": 2.8434343434343434, |
| "grad_norm": 0.0015513853868469596, |
| "learning_rate": 3.0794009685074606e-06, |
| "loss": 0.0, |
| "step": 563 |
| }, |
| { |
| "epoch": 2.8484848484848486, |
| "grad_norm": 0.0007438853499479592, |
| "learning_rate": 3.0723763748753354e-06, |
| "loss": 0.0, |
| "step": 564 |
| }, |
| { |
| "epoch": 2.8535353535353534, |
| "grad_norm": 0.0032596453092992306, |
| "learning_rate": 3.065347008991386e-06, |
| "loss": 0.0, |
| "step": 565 |
| }, |
| { |
| "epoch": 2.8585858585858586, |
| "grad_norm": 0.0018467978807166219, |
| "learning_rate": 3.0583129294637342e-06, |
| "loss": 0.0, |
| "step": 566 |
| }, |
| { |
| "epoch": 2.8636363636363638, |
| "grad_norm": 0.004413960501551628, |
| "learning_rate": 3.051274194939802e-06, |
| "loss": 0.0, |
| "step": 567 |
| }, |
| { |
| "epoch": 2.8686868686868685, |
| "grad_norm": 0.020187318325042725, |
| "learning_rate": 3.044230864105821e-06, |
| "loss": 0.0, |
| "step": 568 |
| }, |
| { |
| "epoch": 2.8737373737373737, |
| "grad_norm": 0.0026113647036254406, |
| "learning_rate": 3.03718299568635e-06, |
| "loss": 0.0, |
| "step": 569 |
| }, |
| { |
| "epoch": 2.878787878787879, |
| "grad_norm": 0.006202157121151686, |
| "learning_rate": 3.030130648443777e-06, |
| "loss": 0.0, |
| "step": 570 |
| }, |
| { |
| "epoch": 2.883838383838384, |
| "grad_norm": 0.0044505237601697445, |
| "learning_rate": 3.023073881177833e-06, |
| "loss": 0.0, |
| "step": 571 |
| }, |
| { |
| "epoch": 2.888888888888889, |
| "grad_norm": 0.0023774108849465847, |
| "learning_rate": 3.0160127527250993e-06, |
| "loss": 0.0, |
| "step": 572 |
| }, |
| { |
| "epoch": 2.893939393939394, |
| "grad_norm": 0.005300168413668871, |
| "learning_rate": 3.0089473219585246e-06, |
| "loss": 0.0, |
| "step": 573 |
| }, |
| { |
| "epoch": 2.898989898989899, |
| "grad_norm": 0.0016464275540784001, |
| "learning_rate": 3.0018776477869244e-06, |
| "loss": 0.0, |
| "step": 574 |
| }, |
| { |
| "epoch": 2.904040404040404, |
| "grad_norm": 0.0017848126590251923, |
| "learning_rate": 2.994803789154495e-06, |
| "loss": 0.0, |
| "step": 575 |
| }, |
| { |
| "epoch": 2.909090909090909, |
| "grad_norm": 0.0014869156293570995, |
| "learning_rate": 2.9877258050403214e-06, |
| "loss": 0.0, |
| "step": 576 |
| }, |
| { |
| "epoch": 2.9141414141414144, |
| "grad_norm": 0.006105687469244003, |
| "learning_rate": 2.9806437544578844e-06, |
| "loss": 0.0, |
| "step": 577 |
| }, |
| { |
| "epoch": 2.919191919191919, |
| "grad_norm": 0.006673851516097784, |
| "learning_rate": 2.973557696454571e-06, |
| "loss": 0.0, |
| "step": 578 |
| }, |
| { |
| "epoch": 2.9242424242424243, |
| "grad_norm": 0.002130689565092325, |
| "learning_rate": 2.966467690111179e-06, |
| "loss": 0.0, |
| "step": 579 |
| }, |
| { |
| "epoch": 2.929292929292929, |
| "grad_norm": 0.0036351734306663275, |
| "learning_rate": 2.9593737945414264e-06, |
| "loss": 0.0, |
| "step": 580 |
| }, |
| { |
| "epoch": 2.9343434343434343, |
| "grad_norm": 0.08766695111989975, |
| "learning_rate": 2.9522760688914587e-06, |
| "loss": 0.0005, |
| "step": 581 |
| }, |
| { |
| "epoch": 2.9393939393939394, |
| "grad_norm": 0.0016932138241827488, |
| "learning_rate": 2.9451745723393547e-06, |
| "loss": 0.0, |
| "step": 582 |
| }, |
| { |
| "epoch": 2.9444444444444446, |
| "grad_norm": 0.04073040559887886, |
| "learning_rate": 2.938069364094634e-06, |
| "loss": 0.0, |
| "step": 583 |
| }, |
| { |
| "epoch": 2.9494949494949494, |
| "grad_norm": 0.0032754631247371435, |
| "learning_rate": 2.930960503397761e-06, |
| "loss": 0.0, |
| "step": 584 |
| }, |
| { |
| "epoch": 2.9545454545454546, |
| "grad_norm": 0.0012018376728519797, |
| "learning_rate": 2.9238480495196543e-06, |
| "loss": 0.0, |
| "step": 585 |
| }, |
| { |
| "epoch": 2.9595959595959593, |
| "grad_norm": 0.0016919056652113795, |
| "learning_rate": 2.916732061761192e-06, |
| "loss": 0.0, |
| "step": 586 |
| }, |
| { |
| "epoch": 2.9646464646464645, |
| "grad_norm": 0.0013172467006370425, |
| "learning_rate": 2.9096125994527147e-06, |
| "loss": 0.0, |
| "step": 587 |
| }, |
| { |
| "epoch": 2.9696969696969697, |
| "grad_norm": 0.007328435312956572, |
| "learning_rate": 2.9024897219535326e-06, |
| "loss": 0.0, |
| "step": 588 |
| }, |
| { |
| "epoch": 2.974747474747475, |
| "grad_norm": 0.0016528492560610175, |
| "learning_rate": 2.895363488651431e-06, |
| "loss": 0.0, |
| "step": 589 |
| }, |
| { |
| "epoch": 2.9797979797979797, |
| "grad_norm": 0.001276439055800438, |
| "learning_rate": 2.8882339589621742e-06, |
| "loss": 0.0, |
| "step": 590 |
| }, |
| { |
| "epoch": 2.984848484848485, |
| "grad_norm": 0.0013321200385689735, |
| "learning_rate": 2.8811011923290104e-06, |
| "loss": 0.0, |
| "step": 591 |
| }, |
| { |
| "epoch": 2.98989898989899, |
| "grad_norm": 0.002064246218651533, |
| "learning_rate": 2.873965248222178e-06, |
| "loss": 0.0, |
| "step": 592 |
| }, |
| { |
| "epoch": 2.994949494949495, |
| "grad_norm": 0.001075342413969338, |
| "learning_rate": 2.8668261861384045e-06, |
| "loss": 0.0, |
| "step": 593 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 0.0037419938016682863, |
| "learning_rate": 2.859684065600417e-06, |
| "loss": 0.0, |
| "step": 594 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 1188, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 6, |
| "save_steps": 198, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.50046579787563e+18, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|