| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 1131, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.002652519893899204, | |
| "grad_norm": 1057.6354979760426, | |
| "learning_rate": 8.771929824561404e-08, | |
| "loss": 12.3825, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.005305039787798408, | |
| "grad_norm": 927.0337289466481, | |
| "learning_rate": 1.7543859649122808e-07, | |
| "loss": 12.403, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.007957559681697613, | |
| "grad_norm": 885.2222028050505, | |
| "learning_rate": 2.6315789473684213e-07, | |
| "loss": 12.368, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.010610079575596816, | |
| "grad_norm": 986.62193425678, | |
| "learning_rate": 3.5087719298245616e-07, | |
| "loss": 12.4131, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.013262599469496022, | |
| "grad_norm": 900.0265144818454, | |
| "learning_rate": 4.385964912280702e-07, | |
| "loss": 12.3401, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.015915119363395226, | |
| "grad_norm": 916.6202200105147, | |
| "learning_rate": 5.263157894736843e-07, | |
| "loss": 12.3994, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.01856763925729443, | |
| "grad_norm": 960.7766522444683, | |
| "learning_rate": 6.140350877192982e-07, | |
| "loss": 12.1096, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.021220159151193633, | |
| "grad_norm": 901.5283770556252, | |
| "learning_rate": 7.017543859649123e-07, | |
| "loss": 11.9827, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.023872679045092837, | |
| "grad_norm": 1015.5312534342685, | |
| "learning_rate": 7.894736842105263e-07, | |
| "loss": 11.6926, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.026525198938992044, | |
| "grad_norm": 881.8548072723552, | |
| "learning_rate": 8.771929824561404e-07, | |
| "loss": 11.4372, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.029177718832891247, | |
| "grad_norm": 893.9278869644111, | |
| "learning_rate": 9.649122807017545e-07, | |
| "loss": 10.7974, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.03183023872679045, | |
| "grad_norm": 1213.0463821664953, | |
| "learning_rate": 1.0526315789473685e-06, | |
| "loss": 10.1642, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.034482758620689655, | |
| "grad_norm": 901.5212265559629, | |
| "learning_rate": 1.1403508771929824e-06, | |
| "loss": 10.1035, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.03713527851458886, | |
| "grad_norm": 812.5030326600662, | |
| "learning_rate": 1.2280701754385965e-06, | |
| "loss": 9.7278, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.03978779840848806, | |
| "grad_norm": 929.6415117694088, | |
| "learning_rate": 1.3157894736842106e-06, | |
| "loss": 8.531, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.042440318302387266, | |
| "grad_norm": 1502.3517428678672, | |
| "learning_rate": 1.4035087719298246e-06, | |
| "loss": 8.4856, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.04509283819628647, | |
| "grad_norm": 914.547959757731, | |
| "learning_rate": 1.4912280701754387e-06, | |
| "loss": 8.1597, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.04774535809018567, | |
| "grad_norm": 880.0266800450053, | |
| "learning_rate": 1.5789473684210526e-06, | |
| "loss": 8.0847, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.050397877984084884, | |
| "grad_norm": 1328.4207152672504, | |
| "learning_rate": 1.6666666666666667e-06, | |
| "loss": 7.7121, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.05305039787798409, | |
| "grad_norm": 591.8603552507835, | |
| "learning_rate": 1.7543859649122807e-06, | |
| "loss": 7.2904, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.05570291777188329, | |
| "grad_norm": 470.2427732455939, | |
| "learning_rate": 1.8421052631578948e-06, | |
| "loss": 6.167, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.058355437665782495, | |
| "grad_norm": 630.4290901807428, | |
| "learning_rate": 1.929824561403509e-06, | |
| "loss": 5.6238, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.0610079575596817, | |
| "grad_norm": 487.6885113512216, | |
| "learning_rate": 2.017543859649123e-06, | |
| "loss": 5.3303, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.0636604774535809, | |
| "grad_norm": 336.571982471426, | |
| "learning_rate": 2.105263157894737e-06, | |
| "loss": 4.8751, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.06631299734748011, | |
| "grad_norm": 352.55537532710196, | |
| "learning_rate": 2.192982456140351e-06, | |
| "loss": 4.7707, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.06896551724137931, | |
| "grad_norm": 310.95482127407337, | |
| "learning_rate": 2.280701754385965e-06, | |
| "loss": 4.5952, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.07161803713527852, | |
| "grad_norm": 322.07610035331254, | |
| "learning_rate": 2.368421052631579e-06, | |
| "loss": 4.43, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.07427055702917772, | |
| "grad_norm": 339.6242418909197, | |
| "learning_rate": 2.456140350877193e-06, | |
| "loss": 4.263, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.07692307692307693, | |
| "grad_norm": 328.1456910535762, | |
| "learning_rate": 2.5438596491228075e-06, | |
| "loss": 4.0152, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.07957559681697612, | |
| "grad_norm": 372.5572647856711, | |
| "learning_rate": 2.631578947368421e-06, | |
| "loss": 3.9055, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.08222811671087533, | |
| "grad_norm": 278.5563412172206, | |
| "learning_rate": 2.7192982456140356e-06, | |
| "loss": 3.6592, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.08488063660477453, | |
| "grad_norm": 333.58239669567297, | |
| "learning_rate": 2.8070175438596493e-06, | |
| "loss": 3.5082, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.08753315649867374, | |
| "grad_norm": 267.04537081954055, | |
| "learning_rate": 2.8947368421052634e-06, | |
| "loss": 3.3718, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.09018567639257294, | |
| "grad_norm": 251.79057269663846, | |
| "learning_rate": 2.9824561403508774e-06, | |
| "loss": 3.2668, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.09283819628647215, | |
| "grad_norm": 224.4862012598969, | |
| "learning_rate": 3.0701754385964915e-06, | |
| "loss": 3.1903, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.09549071618037135, | |
| "grad_norm": 266.3890677245296, | |
| "learning_rate": 3.157894736842105e-06, | |
| "loss": 3.0958, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.09814323607427056, | |
| "grad_norm": 228.36269849077078, | |
| "learning_rate": 3.2456140350877197e-06, | |
| "loss": 3.0408, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.10079575596816977, | |
| "grad_norm": 243.64545370794767, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 2.959, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.10344827586206896, | |
| "grad_norm": 227.0526450156013, | |
| "learning_rate": 3.421052631578948e-06, | |
| "loss": 2.9202, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.10610079575596817, | |
| "grad_norm": 233.87671337953952, | |
| "learning_rate": 3.5087719298245615e-06, | |
| "loss": 2.8309, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.10875331564986737, | |
| "grad_norm": 250.00981409653355, | |
| "learning_rate": 3.596491228070176e-06, | |
| "loss": 2.8592, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.11140583554376658, | |
| "grad_norm": 252.304951717162, | |
| "learning_rate": 3.6842105263157896e-06, | |
| "loss": 2.7445, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.11405835543766578, | |
| "grad_norm": 326.26082869300905, | |
| "learning_rate": 3.7719298245614037e-06, | |
| "loss": 2.8497, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.11671087533156499, | |
| "grad_norm": 189.74885765189532, | |
| "learning_rate": 3.859649122807018e-06, | |
| "loss": 2.6597, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.11936339522546419, | |
| "grad_norm": 256.8954896530403, | |
| "learning_rate": 3.947368421052632e-06, | |
| "loss": 2.7012, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.1220159151193634, | |
| "grad_norm": 249.99226859488456, | |
| "learning_rate": 4.035087719298246e-06, | |
| "loss": 2.6438, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.1246684350132626, | |
| "grad_norm": 320.4321578125072, | |
| "learning_rate": 4.12280701754386e-06, | |
| "loss": 2.7151, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.1273209549071618, | |
| "grad_norm": 324.2878517230742, | |
| "learning_rate": 4.210526315789474e-06, | |
| "loss": 2.6928, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.129973474801061, | |
| "grad_norm": 288.2007855525106, | |
| "learning_rate": 4.298245614035088e-06, | |
| "loss": 2.5762, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.13262599469496023, | |
| "grad_norm": 217.98285338985926, | |
| "learning_rate": 4.385964912280702e-06, | |
| "loss": 2.6044, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.13527851458885942, | |
| "grad_norm": 285.67772191976934, | |
| "learning_rate": 4.473684210526316e-06, | |
| "loss": 2.8076, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.13793103448275862, | |
| "grad_norm": 231.89844477392393, | |
| "learning_rate": 4.56140350877193e-06, | |
| "loss": 2.5825, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.14058355437665782, | |
| "grad_norm": 200.35164460870405, | |
| "learning_rate": 4.649122807017544e-06, | |
| "loss": 2.5136, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.14323607427055704, | |
| "grad_norm": 181.3577300550699, | |
| "learning_rate": 4.736842105263158e-06, | |
| "loss": 2.4796, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.14588859416445624, | |
| "grad_norm": 273.8688399336017, | |
| "learning_rate": 4.824561403508772e-06, | |
| "loss": 2.9247, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.14854111405835543, | |
| "grad_norm": 192.9551561910648, | |
| "learning_rate": 4.912280701754386e-06, | |
| "loss": 2.5384, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.15119363395225463, | |
| "grad_norm": 212.15007445840294, | |
| "learning_rate": 5e-06, | |
| "loss": 2.6447, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.15384615384615385, | |
| "grad_norm": 216.7212543511908, | |
| "learning_rate": 5.087719298245615e-06, | |
| "loss": 2.629, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.15649867374005305, | |
| "grad_norm": 158.23197477110963, | |
| "learning_rate": 5.175438596491229e-06, | |
| "loss": 2.4782, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.15915119363395225, | |
| "grad_norm": 206.31152589934794, | |
| "learning_rate": 5.263157894736842e-06, | |
| "loss": 2.7059, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.16180371352785147, | |
| "grad_norm": 165.6102307287445, | |
| "learning_rate": 5.350877192982457e-06, | |
| "loss": 2.4767, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.16445623342175067, | |
| "grad_norm": 148.57888805173252, | |
| "learning_rate": 5.438596491228071e-06, | |
| "loss": 2.4419, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.16710875331564987, | |
| "grad_norm": 182.96085832221476, | |
| "learning_rate": 5.526315789473685e-06, | |
| "loss": 2.7788, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.16976127320954906, | |
| "grad_norm": 185.04475961468603, | |
| "learning_rate": 5.6140350877192985e-06, | |
| "loss": 2.5112, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.1724137931034483, | |
| "grad_norm": 181.87584559373875, | |
| "learning_rate": 5.701754385964913e-06, | |
| "loss": 2.5493, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.17506631299734748, | |
| "grad_norm": 128.89690506865657, | |
| "learning_rate": 5.789473684210527e-06, | |
| "loss": 2.5865, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.17771883289124668, | |
| "grad_norm": 125.53614943228185, | |
| "learning_rate": 5.877192982456141e-06, | |
| "loss": 2.4013, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.18037135278514588, | |
| "grad_norm": 129.28869383507703, | |
| "learning_rate": 5.964912280701755e-06, | |
| "loss": 2.5263, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.1830238726790451, | |
| "grad_norm": 129.91862094941294, | |
| "learning_rate": 6.0526315789473685e-06, | |
| "loss": 2.4461, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.1856763925729443, | |
| "grad_norm": 165.4806625611672, | |
| "learning_rate": 6.140350877192983e-06, | |
| "loss": 2.5557, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.1883289124668435, | |
| "grad_norm": 126.9540484517733, | |
| "learning_rate": 6.2280701754385975e-06, | |
| "loss": 2.3607, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.1909814323607427, | |
| "grad_norm": 192.47028479898952, | |
| "learning_rate": 6.31578947368421e-06, | |
| "loss": 2.4303, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.19363395225464192, | |
| "grad_norm": 125.72368130373515, | |
| "learning_rate": 6.403508771929825e-06, | |
| "loss": 2.341, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.1962864721485411, | |
| "grad_norm": 169.77442622506598, | |
| "learning_rate": 6.491228070175439e-06, | |
| "loss": 2.5491, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.1989389920424403, | |
| "grad_norm": 88.31745235487897, | |
| "learning_rate": 6.578947368421054e-06, | |
| "loss": 2.4596, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.20159151193633953, | |
| "grad_norm": 126.4612901115506, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 2.7187, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.20424403183023873, | |
| "grad_norm": 101.63379082230595, | |
| "learning_rate": 6.754385964912281e-06, | |
| "loss": 2.2475, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.20689655172413793, | |
| "grad_norm": 98.61983172878145, | |
| "learning_rate": 6.842105263157896e-06, | |
| "loss": 2.2866, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.20954907161803712, | |
| "grad_norm": 102.27354730219379, | |
| "learning_rate": 6.92982456140351e-06, | |
| "loss": 2.3918, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.21220159151193635, | |
| "grad_norm": 97.96501270450008, | |
| "learning_rate": 7.017543859649123e-06, | |
| "loss": 2.2837, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.21485411140583555, | |
| "grad_norm": 90.64902568158081, | |
| "learning_rate": 7.1052631578947375e-06, | |
| "loss": 2.2282, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.21750663129973474, | |
| "grad_norm": 99.54094606904516, | |
| "learning_rate": 7.192982456140352e-06, | |
| "loss": 2.2265, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.22015915119363394, | |
| "grad_norm": 88.23601011047764, | |
| "learning_rate": 7.280701754385966e-06, | |
| "loss": 2.1562, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.22281167108753316, | |
| "grad_norm": 108.26988105629067, | |
| "learning_rate": 7.368421052631579e-06, | |
| "loss": 2.1083, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.22546419098143236, | |
| "grad_norm": 127.34185511519445, | |
| "learning_rate": 7.456140350877194e-06, | |
| "loss": 2.3447, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.22811671087533156, | |
| "grad_norm": 94.40830394752739, | |
| "learning_rate": 7.5438596491228074e-06, | |
| "loss": 2.1414, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.23076923076923078, | |
| "grad_norm": 90.10921966388504, | |
| "learning_rate": 7.631578947368423e-06, | |
| "loss": 2.0975, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.23342175066312998, | |
| "grad_norm": 89.66896823795594, | |
| "learning_rate": 7.719298245614036e-06, | |
| "loss": 2.3711, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.23607427055702918, | |
| "grad_norm": 107.466701290302, | |
| "learning_rate": 7.80701754385965e-06, | |
| "loss": 2.4671, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.23872679045092837, | |
| "grad_norm": 94.06642429252037, | |
| "learning_rate": 7.894736842105265e-06, | |
| "loss": 2.2346, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.2413793103448276, | |
| "grad_norm": 103.89599321639466, | |
| "learning_rate": 7.982456140350877e-06, | |
| "loss": 2.2892, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.2440318302387268, | |
| "grad_norm": 74.45085250217441, | |
| "learning_rate": 8.070175438596492e-06, | |
| "loss": 2.0603, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.246684350132626, | |
| "grad_norm": 70.08394015998344, | |
| "learning_rate": 8.157894736842106e-06, | |
| "loss": 2.0869, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.2493368700265252, | |
| "grad_norm": 65.4351492030421, | |
| "learning_rate": 8.24561403508772e-06, | |
| "loss": 2.057, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.2519893899204244, | |
| "grad_norm": 83.9671759946911, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 2.1099, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.2546419098143236, | |
| "grad_norm": 68.22858791336857, | |
| "learning_rate": 8.421052631578948e-06, | |
| "loss": 2.0877, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.2572944297082228, | |
| "grad_norm": 55.44191583189982, | |
| "learning_rate": 8.508771929824563e-06, | |
| "loss": 1.9284, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.259946949602122, | |
| "grad_norm": 64.49521997389655, | |
| "learning_rate": 8.596491228070176e-06, | |
| "loss": 1.9862, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.2625994694960212, | |
| "grad_norm": 56.45590523647097, | |
| "learning_rate": 8.68421052631579e-06, | |
| "loss": 1.9069, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.26525198938992045, | |
| "grad_norm": 60.902670759910606, | |
| "learning_rate": 8.771929824561405e-06, | |
| "loss": 1.9144, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.26790450928381965, | |
| "grad_norm": 79.16383374365226, | |
| "learning_rate": 8.859649122807017e-06, | |
| "loss": 1.9319, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.27055702917771884, | |
| "grad_norm": 85.55643926023406, | |
| "learning_rate": 8.947368421052632e-06, | |
| "loss": 2.171, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.27320954907161804, | |
| "grad_norm": 92.74604608421551, | |
| "learning_rate": 9.035087719298246e-06, | |
| "loss": 2.2301, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.27586206896551724, | |
| "grad_norm": 140.06368126243478, | |
| "learning_rate": 9.12280701754386e-06, | |
| "loss": 1.9184, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.27851458885941643, | |
| "grad_norm": 95.18299002089283, | |
| "learning_rate": 9.210526315789474e-06, | |
| "loss": 1.8408, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.28116710875331563, | |
| "grad_norm": 123.85322526042701, | |
| "learning_rate": 9.298245614035088e-06, | |
| "loss": 2.1824, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.2838196286472148, | |
| "grad_norm": 59.082088996222765, | |
| "learning_rate": 9.385964912280703e-06, | |
| "loss": 1.7425, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.2864721485411141, | |
| "grad_norm": 92.34679112945953, | |
| "learning_rate": 9.473684210526315e-06, | |
| "loss": 1.9722, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.2891246684350133, | |
| "grad_norm": 59.49906959525333, | |
| "learning_rate": 9.56140350877193e-06, | |
| "loss": 1.6713, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.2917771883289125, | |
| "grad_norm": 57.222734212997516, | |
| "learning_rate": 9.649122807017545e-06, | |
| "loss": 1.5888, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.29442970822281167, | |
| "grad_norm": 63.42437733921843, | |
| "learning_rate": 9.736842105263159e-06, | |
| "loss": 1.6321, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.29708222811671087, | |
| "grad_norm": 85.71801880646818, | |
| "learning_rate": 9.824561403508772e-06, | |
| "loss": 1.685, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.29973474801061006, | |
| "grad_norm": 45.26455300235202, | |
| "learning_rate": 9.912280701754386e-06, | |
| "loss": 1.4516, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.30238726790450926, | |
| "grad_norm": 77.60251209005177, | |
| "learning_rate": 1e-05, | |
| "loss": 1.7045, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.3050397877984085, | |
| "grad_norm": 77.6351324144232, | |
| "learning_rate": 9.999976144006772e-06, | |
| "loss": 1.6669, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.3076923076923077, | |
| "grad_norm": 46.49302152077948, | |
| "learning_rate": 9.999904576254723e-06, | |
| "loss": 1.5303, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.3103448275862069, | |
| "grad_norm": 53.91861400227519, | |
| "learning_rate": 9.999785297426788e-06, | |
| "loss": 1.523, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.3129973474801061, | |
| "grad_norm": 44.427468868816305, | |
| "learning_rate": 9.99961830866117e-06, | |
| "loss": 1.4072, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.3156498673740053, | |
| "grad_norm": 48.244527601359856, | |
| "learning_rate": 9.999403611551341e-06, | |
| "loss": 1.5829, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.3183023872679045, | |
| "grad_norm": 69.18717865370901, | |
| "learning_rate": 9.999141208146029e-06, | |
| "loss": 1.6006, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.3209549071618037, | |
| "grad_norm": 39.38953049273077, | |
| "learning_rate": 9.998831100949188e-06, | |
| "loss": 1.3514, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.32360742705570295, | |
| "grad_norm": 43.3992283530604, | |
| "learning_rate": 9.998473292919987e-06, | |
| "loss": 1.4403, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.32625994694960214, | |
| "grad_norm": 52.42551809832924, | |
| "learning_rate": 9.998067787472772e-06, | |
| "loss": 1.3859, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.32891246684350134, | |
| "grad_norm": 62.05063837219399, | |
| "learning_rate": 9.997614588477033e-06, | |
| "loss": 1.2548, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.33156498673740054, | |
| "grad_norm": 33.51416410407643, | |
| "learning_rate": 9.997113700257383e-06, | |
| "loss": 1.2024, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.33421750663129973, | |
| "grad_norm": 39.78090613820526, | |
| "learning_rate": 9.99656512759349e-06, | |
| "loss": 1.3366, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.33687002652519893, | |
| "grad_norm": 38.74157557623179, | |
| "learning_rate": 9.995968875720052e-06, | |
| "loss": 1.2897, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.3395225464190981, | |
| "grad_norm": 45.04339821572935, | |
| "learning_rate": 9.995324950326746e-06, | |
| "loss": 1.349, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.3421750663129973, | |
| "grad_norm": 52.05381264293075, | |
| "learning_rate": 9.994633357558158e-06, | |
| "loss": 1.3799, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.3448275862068966, | |
| "grad_norm": 58.11808876418133, | |
| "learning_rate": 9.993894104013748e-06, | |
| "loss": 1.3688, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.34748010610079577, | |
| "grad_norm": 68.53043325748376, | |
| "learning_rate": 9.99310719674776e-06, | |
| "loss": 1.2633, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.35013262599469497, | |
| "grad_norm": 44.63700886778215, | |
| "learning_rate": 9.992272643269181e-06, | |
| "loss": 1.3537, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.35278514588859416, | |
| "grad_norm": 35.349197761747455, | |
| "learning_rate": 9.99139045154165e-06, | |
| "loss": 1.2383, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.35543766578249336, | |
| "grad_norm": 27.39868891118424, | |
| "learning_rate": 9.99046062998339e-06, | |
| "loss": 1.0074, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.35809018567639256, | |
| "grad_norm": 39.096330451308255, | |
| "learning_rate": 9.989483187467128e-06, | |
| "loss": 1.0671, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.36074270557029176, | |
| "grad_norm": 31.78599292309684, | |
| "learning_rate": 9.988458133320009e-06, | |
| "loss": 1.1986, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.363395225464191, | |
| "grad_norm": 37.62493778513282, | |
| "learning_rate": 9.987385477323507e-06, | |
| "loss": 1.0431, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.3660477453580902, | |
| "grad_norm": 27.795085573099815, | |
| "learning_rate": 9.986265229713332e-06, | |
| "loss": 0.9138, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.3687002652519894, | |
| "grad_norm": 32.67710623808962, | |
| "learning_rate": 9.985097401179333e-06, | |
| "loss": 1.0141, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.3713527851458886, | |
| "grad_norm": 29.985537462526572, | |
| "learning_rate": 9.983882002865392e-06, | |
| "loss": 0.8671, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.3740053050397878, | |
| "grad_norm": 26.80836366594666, | |
| "learning_rate": 9.982619046369321e-06, | |
| "loss": 0.8807, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.376657824933687, | |
| "grad_norm": 28.40085947168613, | |
| "learning_rate": 9.981308543742759e-06, | |
| "loss": 0.7791, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.3793103448275862, | |
| "grad_norm": 43.09184411908547, | |
| "learning_rate": 9.979950507491035e-06, | |
| "loss": 0.9452, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.3819628647214854, | |
| "grad_norm": 27.51993199521544, | |
| "learning_rate": 9.978544950573075e-06, | |
| "loss": 0.8276, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.38461538461538464, | |
| "grad_norm": 26.979904553674206, | |
| "learning_rate": 9.97709188640126e-06, | |
| "loss": 0.7469, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.38726790450928383, | |
| "grad_norm": 24.803132080747552, | |
| "learning_rate": 9.975591328841306e-06, | |
| "loss": 0.7504, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.38992042440318303, | |
| "grad_norm": 23.87377602560433, | |
| "learning_rate": 9.974043292212129e-06, | |
| "loss": 0.7241, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.3925729442970822, | |
| "grad_norm": 29.719548409682915, | |
| "learning_rate": 9.97244779128571e-06, | |
| "loss": 0.7729, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.3952254641909814, | |
| "grad_norm": 23.61927375262231, | |
| "learning_rate": 9.970804841286954e-06, | |
| "loss": 0.6403, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.3978779840848806, | |
| "grad_norm": 23.76952733775002, | |
| "learning_rate": 9.96911445789354e-06, | |
| "loss": 0.621, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.4005305039787798, | |
| "grad_norm": 25.17921861310616, | |
| "learning_rate": 9.96737665723578e-06, | |
| "loss": 0.6999, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.40318302387267907, | |
| "grad_norm": 38.36462354671902, | |
| "learning_rate": 9.965591455896456e-06, | |
| "loss": 0.7754, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.40583554376657827, | |
| "grad_norm": 29.312154808801118, | |
| "learning_rate": 9.963758870910672e-06, | |
| "loss": 0.667, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.40848806366047746, | |
| "grad_norm": 32.4299067250831, | |
| "learning_rate": 9.961878919765678e-06, | |
| "loss": 0.734, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.41114058355437666, | |
| "grad_norm": 52.741095672588536, | |
| "learning_rate": 9.95995162040072e-06, | |
| "loss": 1.42, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.41379310344827586, | |
| "grad_norm": 37.22083984241913, | |
| "learning_rate": 9.957976991206847e-06, | |
| "loss": 1.0212, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.41644562334217505, | |
| "grad_norm": 60.750037677882, | |
| "learning_rate": 9.95595505102676e-06, | |
| "loss": 0.8704, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.41909814323607425, | |
| "grad_norm": 71.42658049173427, | |
| "learning_rate": 9.953885819154615e-06, | |
| "loss": 1.4326, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.4217506631299735, | |
| "grad_norm": 24.577541079122234, | |
| "learning_rate": 9.951769315335843e-06, | |
| "loss": 0.7411, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.4244031830238727, | |
| "grad_norm": 26.289197928953943, | |
| "learning_rate": 9.949605559766969e-06, | |
| "loss": 0.7146, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.4270557029177719, | |
| "grad_norm": 44.52862474141508, | |
| "learning_rate": 9.947394573095403e-06, | |
| "loss": 0.6277, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.4297082228116711, | |
| "grad_norm": 31.56290419474237, | |
| "learning_rate": 9.94513637641926e-06, | |
| "loss": 0.551, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.4323607427055703, | |
| "grad_norm": 38.123849774306265, | |
| "learning_rate": 9.942830991287149e-06, | |
| "loss": 0.6985, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.4350132625994695, | |
| "grad_norm": 33.58632098064105, | |
| "learning_rate": 9.940478439697973e-06, | |
| "loss": 0.7803, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.4376657824933687, | |
| "grad_norm": 28.370024683436775, | |
| "learning_rate": 9.938078744100713e-06, | |
| "loss": 0.5784, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.4403183023872679, | |
| "grad_norm": 32.524743222318655, | |
| "learning_rate": 9.935631927394216e-06, | |
| "loss": 0.5907, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.44297082228116713, | |
| "grad_norm": 34.040968392157055, | |
| "learning_rate": 9.933138012926982e-06, | |
| "loss": 0.5183, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.44562334217506633, | |
| "grad_norm": 28.136546083528152, | |
| "learning_rate": 9.930597024496933e-06, | |
| "loss": 0.5178, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.4482758620689655, | |
| "grad_norm": 43.93421178414282, | |
| "learning_rate": 9.928008986351187e-06, | |
| "loss": 0.8739, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.4509283819628647, | |
| "grad_norm": 119.1038178854159, | |
| "learning_rate": 9.925373923185835e-06, | |
| "loss": 0.5091, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.4535809018567639, | |
| "grad_norm": 28.83088817600832, | |
| "learning_rate": 9.922691860145696e-06, | |
| "loss": 0.5211, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.4562334217506631, | |
| "grad_norm": 22.221755556596992, | |
| "learning_rate": 9.919962822824083e-06, | |
| "loss": 0.5197, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.4588859416445623, | |
| "grad_norm": 32.958688826283854, | |
| "learning_rate": 9.917186837262552e-06, | |
| "loss": 0.3914, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.46153846153846156, | |
| "grad_norm": 41.71583238871775, | |
| "learning_rate": 9.91436392995066e-06, | |
| "loss": 0.5258, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.46419098143236076, | |
| "grad_norm": 22.206627018336867, | |
| "learning_rate": 9.91149412782571e-06, | |
| "loss": 0.4415, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.46684350132625996, | |
| "grad_norm": 29.465926646264833, | |
| "learning_rate": 9.908577458272496e-06, | |
| "loss": 0.441, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.46949602122015915, | |
| "grad_norm": 24.17133231149754, | |
| "learning_rate": 9.905613949123036e-06, | |
| "loss": 0.4358, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.47214854111405835, | |
| "grad_norm": 24.684729254239027, | |
| "learning_rate": 9.902603628656312e-06, | |
| "loss": 0.4289, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.47480106100795755, | |
| "grad_norm": 25.781253401759116, | |
| "learning_rate": 9.899546525597998e-06, | |
| "loss": 0.4589, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.47745358090185674, | |
| "grad_norm": 19.39490678374163, | |
| "learning_rate": 9.896442669120188e-06, | |
| "loss": 0.4193, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.48010610079575594, | |
| "grad_norm": 30.5732115207238, | |
| "learning_rate": 9.893292088841109e-06, | |
| "loss": 0.4166, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.4827586206896552, | |
| "grad_norm": 32.50572814195163, | |
| "learning_rate": 9.890094814824854e-06, | |
| "loss": 0.4236, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.4854111405835544, | |
| "grad_norm": 30.807902366617746, | |
| "learning_rate": 9.886850877581079e-06, | |
| "loss": 0.4309, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.4880636604774536, | |
| "grad_norm": 35.1747447641885, | |
| "learning_rate": 9.883560308064723e-06, | |
| "loss": 0.4544, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.4907161803713528, | |
| "grad_norm": 31.49387832975565, | |
| "learning_rate": 9.880223137675709e-06, | |
| "loss": 0.3407, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.493368700265252, | |
| "grad_norm": 26.52811176686443, | |
| "learning_rate": 9.87683939825864e-06, | |
| "loss": 0.4113, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.4960212201591512, | |
| "grad_norm": 31.00758936810695, | |
| "learning_rate": 9.873409122102505e-06, | |
| "loss": 0.4355, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.4986737400530504, | |
| "grad_norm": 27.717176077117248, | |
| "learning_rate": 9.86993234194036e-06, | |
| "loss": 0.3516, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.5013262599469496, | |
| "grad_norm": 34.74096987840765, | |
| "learning_rate": 9.866409090949023e-06, | |
| "loss": 0.3642, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.5039787798408488, | |
| "grad_norm": 29.015794904105384, | |
| "learning_rate": 9.862839402748754e-06, | |
| "loss": 0.3794, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.506631299734748, | |
| "grad_norm": 41.84479090753642, | |
| "learning_rate": 9.859223311402937e-06, | |
| "loss": 0.3886, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.5092838196286472, | |
| "grad_norm": 36.854102597468284, | |
| "learning_rate": 9.855560851417752e-06, | |
| "loss": 0.4183, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.5119363395225465, | |
| "grad_norm": 64.30754223963406, | |
| "learning_rate": 9.851852057741846e-06, | |
| "loss": 0.4084, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.5145888594164456, | |
| "grad_norm": 25.193081857014732, | |
| "learning_rate": 9.848096965766005e-06, | |
| "loss": 0.3958, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.5172413793103449, | |
| "grad_norm": 24.9351452398952, | |
| "learning_rate": 9.844295611322804e-06, | |
| "loss": 0.3078, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.519893899204244, | |
| "grad_norm": 23.312934255537694, | |
| "learning_rate": 9.84044803068628e-06, | |
| "loss": 0.3327, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.5225464190981433, | |
| "grad_norm": 25.7230160426201, | |
| "learning_rate": 9.836554260571577e-06, | |
| "loss": 0.3178, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.5251989389920424, | |
| "grad_norm": 35.938145582024, | |
| "learning_rate": 9.832614338134595e-06, | |
| "loss": 0.4019, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.5278514588859416, | |
| "grad_norm": 39.11558506828113, | |
| "learning_rate": 9.828628300971639e-06, | |
| "loss": 0.3789, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.5305039787798409, | |
| "grad_norm": 30.261902180642043, | |
| "learning_rate": 9.82459618711906e-06, | |
| "loss": 0.3983, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.53315649867374, | |
| "grad_norm": 64.61984241373075, | |
| "learning_rate": 9.82051803505289e-06, | |
| "loss": 0.3625, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.5358090185676393, | |
| "grad_norm": 33.429830067040626, | |
| "learning_rate": 9.816393883688475e-06, | |
| "loss": 0.2433, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.5384615384615384, | |
| "grad_norm": 29.947626952304116, | |
| "learning_rate": 9.812223772380107e-06, | |
| "loss": 0.3918, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.5411140583554377, | |
| "grad_norm": 38.93953812281945, | |
| "learning_rate": 9.808007740920647e-06, | |
| "loss": 0.5043, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.5437665782493368, | |
| "grad_norm": 26.173589489761422, | |
| "learning_rate": 9.803745829541138e-06, | |
| "loss": 0.4359, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.5464190981432361, | |
| "grad_norm": 32.72334404117796, | |
| "learning_rate": 9.799438078910433e-06, | |
| "loss": 0.5079, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.5490716180371353, | |
| "grad_norm": 24.299069894541628, | |
| "learning_rate": 9.795084530134801e-06, | |
| "loss": 0.353, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.5517241379310345, | |
| "grad_norm": 30.678303610004576, | |
| "learning_rate": 9.790685224757534e-06, | |
| "loss": 0.4452, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.5543766578249337, | |
| "grad_norm": 29.167867236384616, | |
| "learning_rate": 9.786240204758552e-06, | |
| "loss": 0.4194, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.5570291777188329, | |
| "grad_norm": 26.581831161155673, | |
| "learning_rate": 9.781749512554e-06, | |
| "loss": 0.4243, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.5596816976127321, | |
| "grad_norm": 21.37851363099381, | |
| "learning_rate": 9.777213190995849e-06, | |
| "loss": 0.4484, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.5623342175066313, | |
| "grad_norm": 22.08892391101572, | |
| "learning_rate": 9.772631283371481e-06, | |
| "loss": 0.4268, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.5649867374005305, | |
| "grad_norm": 15.782823698332344, | |
| "learning_rate": 9.768003833403278e-06, | |
| "loss": 0.3759, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.5676392572944297, | |
| "grad_norm": 16.07792275411613, | |
| "learning_rate": 9.763330885248206e-06, | |
| "loss": 0.2717, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.5702917771883289, | |
| "grad_norm": 26.021905380869452, | |
| "learning_rate": 9.758612483497395e-06, | |
| "loss": 0.3971, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.5729442970822282, | |
| "grad_norm": 34.183126869494714, | |
| "learning_rate": 9.753848673175707e-06, | |
| "loss": 0.5552, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.5755968169761273, | |
| "grad_norm": 21.719701736389982, | |
| "learning_rate": 9.749039499741313e-06, | |
| "loss": 0.3556, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.5782493368700266, | |
| "grad_norm": 20.51115752526158, | |
| "learning_rate": 9.744185009085258e-06, | |
| "loss": 0.3676, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.5809018567639257, | |
| "grad_norm": 24.935205724907632, | |
| "learning_rate": 9.739285247531019e-06, | |
| "loss": 0.2892, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.583554376657825, | |
| "grad_norm": 21.904827099612007, | |
| "learning_rate": 9.734340261834068e-06, | |
| "loss": 0.3655, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.5862068965517241, | |
| "grad_norm": 20.80201877630619, | |
| "learning_rate": 9.72935009918142e-06, | |
| "loss": 0.446, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.5888594164456233, | |
| "grad_norm": 17.590163414902698, | |
| "learning_rate": 9.724314807191197e-06, | |
| "loss": 0.4026, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.5915119363395226, | |
| "grad_norm": 22.389907714500293, | |
| "learning_rate": 9.719234433912148e-06, | |
| "loss": 0.4303, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.5941644562334217, | |
| "grad_norm": 33.027281154585765, | |
| "learning_rate": 9.714109027823218e-06, | |
| "loss": 0.5218, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.596816976127321, | |
| "grad_norm": 34.85602301953212, | |
| "learning_rate": 9.708938637833065e-06, | |
| "loss": 0.6806, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.5994694960212201, | |
| "grad_norm": 33.958613057983065, | |
| "learning_rate": 9.703723313279607e-06, | |
| "loss": 0.6133, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.6021220159151194, | |
| "grad_norm": 22.14815049703452, | |
| "learning_rate": 9.698463103929542e-06, | |
| "loss": 0.3241, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.6047745358090185, | |
| "grad_norm": 22.486614563222847, | |
| "learning_rate": 9.693158059977879e-06, | |
| "loss": 0.3484, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.6074270557029178, | |
| "grad_norm": 22.65909520718584, | |
| "learning_rate": 9.687808232047452e-06, | |
| "loss": 0.5067, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.610079575596817, | |
| "grad_norm": 25.434225245621576, | |
| "learning_rate": 9.682413671188444e-06, | |
| "loss": 0.5275, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.6127320954907162, | |
| "grad_norm": 81.64737851645542, | |
| "learning_rate": 9.6769744288779e-06, | |
| "loss": 0.8488, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.6153846153846154, | |
| "grad_norm": 22.83596646551565, | |
| "learning_rate": 9.671490557019234e-06, | |
| "loss": 0.5084, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.6180371352785146, | |
| "grad_norm": 24.661145613716183, | |
| "learning_rate": 9.665962107941725e-06, | |
| "loss": 0.3213, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.6206896551724138, | |
| "grad_norm": 35.54506895459497, | |
| "learning_rate": 9.660389134400034e-06, | |
| "loss": 0.4413, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.623342175066313, | |
| "grad_norm": 64.98170070542619, | |
| "learning_rate": 9.654771689573685e-06, | |
| "loss": 0.6559, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.6259946949602122, | |
| "grad_norm": 187.51068555822786, | |
| "learning_rate": 9.649109827066572e-06, | |
| "loss": 0.6989, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.6286472148541115, | |
| "grad_norm": 33.1125217717655, | |
| "learning_rate": 9.643403600906433e-06, | |
| "loss": 0.7709, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.6312997347480106, | |
| "grad_norm": 29.25348002948659, | |
| "learning_rate": 9.637653065544349e-06, | |
| "loss": 0.4139, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.6339522546419099, | |
| "grad_norm": 27.942960704970478, | |
| "learning_rate": 9.63185827585421e-06, | |
| "loss": 0.4322, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.636604774535809, | |
| "grad_norm": 14.655362018642805, | |
| "learning_rate": 9.626019287132202e-06, | |
| "loss": 0.3577, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.6392572944297082, | |
| "grad_norm": 21.994795474690797, | |
| "learning_rate": 9.620136155096276e-06, | |
| "loss": 0.5319, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.6419098143236074, | |
| "grad_norm": 42.45323753099587, | |
| "learning_rate": 9.614208935885615e-06, | |
| "loss": 0.6774, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.6445623342175066, | |
| "grad_norm": 36.512286461212625, | |
| "learning_rate": 9.608237686060099e-06, | |
| "loss": 0.3534, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.6472148541114059, | |
| "grad_norm": 17.32927132579308, | |
| "learning_rate": 9.602222462599768e-06, | |
| "loss": 0.3636, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.649867374005305, | |
| "grad_norm": 17.977911163442602, | |
| "learning_rate": 9.59616332290427e-06, | |
| "loss": 0.4301, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.6525198938992043, | |
| "grad_norm": 17.50890484869782, | |
| "learning_rate": 9.590060324792328e-06, | |
| "loss": 0.3769, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.6551724137931034, | |
| "grad_norm": 18.207988906759493, | |
| "learning_rate": 9.58391352650117e-06, | |
| "loss": 0.3934, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.6578249336870027, | |
| "grad_norm": 16.794379411032647, | |
| "learning_rate": 9.577722986685992e-06, | |
| "loss": 0.3416, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.6604774535809018, | |
| "grad_norm": 22.46268516955722, | |
| "learning_rate": 9.571488764419381e-06, | |
| "loss": 0.4779, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.6631299734748011, | |
| "grad_norm": 18.844281302633767, | |
| "learning_rate": 9.565210919190764e-06, | |
| "loss": 0.4127, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.6657824933687002, | |
| "grad_norm": 30.909047305949056, | |
| "learning_rate": 9.558889510905836e-06, | |
| "loss": 0.3517, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.6684350132625995, | |
| "grad_norm": 11.838602530900902, | |
| "learning_rate": 9.552524599885982e-06, | |
| "loss": 0.3324, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.6710875331564987, | |
| "grad_norm": 40.74877226648762, | |
| "learning_rate": 9.546116246867716e-06, | |
| "loss": 0.4133, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.6737400530503979, | |
| "grad_norm": 44.920801856628174, | |
| "learning_rate": 9.539664513002085e-06, | |
| "loss": 0.2973, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.6763925729442971, | |
| "grad_norm": 16.656713031850103, | |
| "learning_rate": 9.5331694598541e-06, | |
| "loss": 0.4337, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.6790450928381963, | |
| "grad_norm": 14.985641353600831, | |
| "learning_rate": 9.526631149402135e-06, | |
| "loss": 0.3079, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.6816976127320955, | |
| "grad_norm": 20.960436747249688, | |
| "learning_rate": 9.520049644037349e-06, | |
| "loss": 0.2489, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.6843501326259946, | |
| "grad_norm": 13.99207792463964, | |
| "learning_rate": 9.51342500656308e-06, | |
| "loss": 0.4532, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.6870026525198939, | |
| "grad_norm": 16.416720828486298, | |
| "learning_rate": 9.506757300194249e-06, | |
| "loss": 0.2945, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.6896551724137931, | |
| "grad_norm": 19.89613501499206, | |
| "learning_rate": 9.500046588556762e-06, | |
| "loss": 0.4742, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.6923076923076923, | |
| "grad_norm": 9.951587060994283, | |
| "learning_rate": 9.493292935686896e-06, | |
| "loss": 0.2758, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.6949602122015915, | |
| "grad_norm": 19.80119532690169, | |
| "learning_rate": 9.486496406030687e-06, | |
| "loss": 0.452, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.6976127320954907, | |
| "grad_norm": 33.212777558508854, | |
| "learning_rate": 9.479657064443321e-06, | |
| "loss": 0.7174, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.7002652519893899, | |
| "grad_norm": 18.006235485939523, | |
| "learning_rate": 9.472774976188515e-06, | |
| "loss": 0.5917, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.7029177718832891, | |
| "grad_norm": 15.240588483515115, | |
| "learning_rate": 9.46585020693789e-06, | |
| "loss": 0.3999, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.7055702917771883, | |
| "grad_norm": 35.737869514907985, | |
| "learning_rate": 9.458882822770342e-06, | |
| "loss": 0.3715, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.7082228116710876, | |
| "grad_norm": 24.925311207725752, | |
| "learning_rate": 9.451872890171419e-06, | |
| "loss": 0.5303, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.7108753315649867, | |
| "grad_norm": 46.23741848833133, | |
| "learning_rate": 9.444820476032687e-06, | |
| "loss": 0.4639, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.713527851458886, | |
| "grad_norm": 21.735983219615278, | |
| "learning_rate": 9.43772564765108e-06, | |
| "loss": 0.4672, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.7161803713527851, | |
| "grad_norm": 14.075914265260876, | |
| "learning_rate": 9.430588472728271e-06, | |
| "loss": 0.3487, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.7188328912466844, | |
| "grad_norm": 22.672174429266864, | |
| "learning_rate": 9.423409019370015e-06, | |
| "loss": 0.4069, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.7214854111405835, | |
| "grad_norm": 13.676070502807473, | |
| "learning_rate": 9.416187356085513e-06, | |
| "loss": 0.3357, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.7241379310344828, | |
| "grad_norm": 13.835110121196607, | |
| "learning_rate": 9.408923551786742e-06, | |
| "loss": 0.2822, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.726790450928382, | |
| "grad_norm": 13.118152081858932, | |
| "learning_rate": 9.401617675787812e-06, | |
| "loss": 0.3351, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.7294429708222812, | |
| "grad_norm": 17.912862329564724, | |
| "learning_rate": 9.39426979780429e-06, | |
| "loss": 0.3141, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.7320954907161804, | |
| "grad_norm": 19.430106308681527, | |
| "learning_rate": 9.386879987952549e-06, | |
| "loss": 0.3015, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.7347480106100795, | |
| "grad_norm": 18.18397698895223, | |
| "learning_rate": 9.379448316749092e-06, | |
| "loss": 0.2847, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.7374005305039788, | |
| "grad_norm": 14.502714380014782, | |
| "learning_rate": 9.371974855109876e-06, | |
| "loss": 0.2021, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.7400530503978779, | |
| "grad_norm": 21.444682273336202, | |
| "learning_rate": 9.364459674349642e-06, | |
| "loss": 0.5219, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.7427055702917772, | |
| "grad_norm": 16.515642246210884, | |
| "learning_rate": 9.356902846181229e-06, | |
| "loss": 0.4582, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.7453580901856764, | |
| "grad_norm": 10.185285430919885, | |
| "learning_rate": 9.349304442714895e-06, | |
| "loss": 0.3156, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.7480106100795756, | |
| "grad_norm": 14.153947807956735, | |
| "learning_rate": 9.341664536457626e-06, | |
| "loss": 0.2695, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.7506631299734748, | |
| "grad_norm": 17.285611051909843, | |
| "learning_rate": 9.33398320031244e-06, | |
| "loss": 0.3721, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.753315649867374, | |
| "grad_norm": 13.407829897010375, | |
| "learning_rate": 9.326260507577702e-06, | |
| "loss": 0.3364, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.7559681697612732, | |
| "grad_norm": 14.25268290484978, | |
| "learning_rate": 9.318496531946411e-06, | |
| "loss": 0.2748, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.7586206896551724, | |
| "grad_norm": 18.492761954599935, | |
| "learning_rate": 9.310691347505506e-06, | |
| "loss": 0.5008, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.7612732095490716, | |
| "grad_norm": 22.793144839358863, | |
| "learning_rate": 9.30284502873516e-06, | |
| "loss": 0.4097, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.7639257294429708, | |
| "grad_norm": 16.181153559831785, | |
| "learning_rate": 9.294957650508065e-06, | |
| "loss": 0.2809, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.76657824933687, | |
| "grad_norm": 11.470613703520078, | |
| "learning_rate": 9.287029288088716e-06, | |
| "loss": 0.2863, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.7692307692307693, | |
| "grad_norm": 13.332851323419893, | |
| "learning_rate": 9.279060017132698e-06, | |
| "loss": 0.2954, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.7718832891246684, | |
| "grad_norm": 11.26667469574856, | |
| "learning_rate": 9.27104991368596e-06, | |
| "loss": 0.251, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.7745358090185677, | |
| "grad_norm": 18.02941499098984, | |
| "learning_rate": 9.262999054184093e-06, | |
| "loss": 0.3293, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.7771883289124668, | |
| "grad_norm": 31.725285806638624, | |
| "learning_rate": 9.254907515451593e-06, | |
| "loss": 0.2781, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.7798408488063661, | |
| "grad_norm": 12.678792811795962, | |
| "learning_rate": 9.246775374701139e-06, | |
| "loss": 0.2566, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.7824933687002652, | |
| "grad_norm": 11.334745839135394, | |
| "learning_rate": 9.238602709532851e-06, | |
| "loss": 0.1999, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.7851458885941645, | |
| "grad_norm": 15.041533936869287, | |
| "learning_rate": 9.230389597933545e-06, | |
| "loss": 0.1876, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.7877984084880637, | |
| "grad_norm": 29.12651112845299, | |
| "learning_rate": 9.222136118275996e-06, | |
| "loss": 0.3695, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.7904509283819628, | |
| "grad_norm": 24.386941215029246, | |
| "learning_rate": 9.213842349318185e-06, | |
| "loss": 0.4731, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.7931034482758621, | |
| "grad_norm": 28.853883305664173, | |
| "learning_rate": 9.205508370202552e-06, | |
| "loss": 0.317, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.7957559681697612, | |
| "grad_norm": 28.19864618531571, | |
| "learning_rate": 9.197134260455233e-06, | |
| "loss": 0.291, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.7984084880636605, | |
| "grad_norm": 27.402620701056804, | |
| "learning_rate": 9.188720099985316e-06, | |
| "loss": 0.3329, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.8010610079575596, | |
| "grad_norm": 15.943330830921525, | |
| "learning_rate": 9.180265969084058e-06, | |
| "loss": 0.2834, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.8037135278514589, | |
| "grad_norm": 7.227251326783193, | |
| "learning_rate": 9.171771948424138e-06, | |
| "loss": 0.1717, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.8063660477453581, | |
| "grad_norm": 34.48343800704204, | |
| "learning_rate": 9.163238119058873e-06, | |
| "loss": 0.3691, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.8090185676392573, | |
| "grad_norm": 40.06172952437424, | |
| "learning_rate": 9.154664562421453e-06, | |
| "loss": 0.2684, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.8116710875331565, | |
| "grad_norm": 24.988139867781623, | |
| "learning_rate": 9.146051360324166e-06, | |
| "loss": 0.3486, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.8143236074270557, | |
| "grad_norm": 19.09100107534328, | |
| "learning_rate": 9.137398594957605e-06, | |
| "loss": 0.3448, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.8169761273209549, | |
| "grad_norm": 24.043307257960755, | |
| "learning_rate": 9.128706348889895e-06, | |
| "loss": 0.232, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.8196286472148541, | |
| "grad_norm": 12.94821480071146, | |
| "learning_rate": 9.119974705065902e-06, | |
| "loss": 0.2142, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.8222811671087533, | |
| "grad_norm": 36.61325443120747, | |
| "learning_rate": 9.111203746806439e-06, | |
| "loss": 0.2855, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.8249336870026526, | |
| "grad_norm": 46.96133911360459, | |
| "learning_rate": 9.102393557807476e-06, | |
| "loss": 0.4035, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.8275862068965517, | |
| "grad_norm": 24.988284986707416, | |
| "learning_rate": 9.093544222139338e-06, | |
| "loss": 0.2393, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.830238726790451, | |
| "grad_norm": 31.166186001209244, | |
| "learning_rate": 9.084655824245899e-06, | |
| "loss": 0.3218, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.8328912466843501, | |
| "grad_norm": 21.281328272591747, | |
| "learning_rate": 9.075728448943783e-06, | |
| "loss": 0.2582, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.8355437665782494, | |
| "grad_norm": 18.814982555038767, | |
| "learning_rate": 9.066762181421552e-06, | |
| "loss": 0.2068, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.8381962864721485, | |
| "grad_norm": 31.825936760948323, | |
| "learning_rate": 9.057757107238897e-06, | |
| "loss": 0.3817, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.8408488063660478, | |
| "grad_norm": 14.169256890558362, | |
| "learning_rate": 9.048713312325806e-06, | |
| "loss": 0.2314, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.843501326259947, | |
| "grad_norm": 23.688281166655763, | |
| "learning_rate": 9.039630882981769e-06, | |
| "loss": 0.1742, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.8461538461538461, | |
| "grad_norm": 13.838514473172634, | |
| "learning_rate": 9.030509905874934e-06, | |
| "loss": 0.1665, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.8488063660477454, | |
| "grad_norm": 17.781459045863368, | |
| "learning_rate": 9.021350468041287e-06, | |
| "loss": 0.1784, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.8514588859416445, | |
| "grad_norm": 77.94922577002949, | |
| "learning_rate": 9.012152656883824e-06, | |
| "loss": 0.336, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.8541114058355438, | |
| "grad_norm": 29.72677747814033, | |
| "learning_rate": 9.002916560171713e-06, | |
| "loss": 0.1686, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.8567639257294429, | |
| "grad_norm": 16.891675901144588, | |
| "learning_rate": 8.993642266039457e-06, | |
| "loss": 0.1319, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.8594164456233422, | |
| "grad_norm": 15.13094650249064, | |
| "learning_rate": 8.984329862986056e-06, | |
| "loss": 0.1736, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.8620689655172413, | |
| "grad_norm": 28.30844550213038, | |
| "learning_rate": 8.974979439874161e-06, | |
| "loss": 0.1655, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.8647214854111406, | |
| "grad_norm": 22.835279340857753, | |
| "learning_rate": 8.965591085929222e-06, | |
| "loss": 0.1709, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.8673740053050398, | |
| "grad_norm": 21.73201118787205, | |
| "learning_rate": 8.956164890738643e-06, | |
| "loss": 0.2229, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.870026525198939, | |
| "grad_norm": 27.182932385878157, | |
| "learning_rate": 8.946700944250925e-06, | |
| "loss": 0.109, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.8726790450928382, | |
| "grad_norm": 25.507073178426865, | |
| "learning_rate": 8.937199336774805e-06, | |
| "loss": 0.1672, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.8753315649867374, | |
| "grad_norm": 37.65077177215759, | |
| "learning_rate": 8.927660158978392e-06, | |
| "loss": 0.2405, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.8779840848806366, | |
| "grad_norm": 30.41852566328146, | |
| "learning_rate": 8.918083501888318e-06, | |
| "loss": 0.2346, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.8806366047745358, | |
| "grad_norm": 32.760683405546175, | |
| "learning_rate": 8.908469456888845e-06, | |
| "loss": 0.2821, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.883289124668435, | |
| "grad_norm": 14.96275740171701, | |
| "learning_rate": 8.898818115721009e-06, | |
| "loss": 0.1217, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.8859416445623343, | |
| "grad_norm": 56.929989017035076, | |
| "learning_rate": 8.889129570481742e-06, | |
| "loss": 0.1842, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.8885941644562334, | |
| "grad_norm": 39.89129271968048, | |
| "learning_rate": 8.879403913622996e-06, | |
| "loss": 0.352, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.8912466843501327, | |
| "grad_norm": 23.553977289043363, | |
| "learning_rate": 8.86964123795085e-06, | |
| "loss": 0.196, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.8938992042440318, | |
| "grad_norm": 50.539674043133445, | |
| "learning_rate": 8.859841636624632e-06, | |
| "loss": 0.2385, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.896551724137931, | |
| "grad_norm": 41.28083289313564, | |
| "learning_rate": 8.850005203156035e-06, | |
| "loss": 0.2551, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.8992042440318302, | |
| "grad_norm": 17.46438639961055, | |
| "learning_rate": 8.84013203140821e-06, | |
| "loss": 0.1871, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.9018567639257294, | |
| "grad_norm": 65.22183136449037, | |
| "learning_rate": 8.83022221559489e-06, | |
| "loss": 0.1929, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.9045092838196287, | |
| "grad_norm": 33.40080695540441, | |
| "learning_rate": 8.820275850279473e-06, | |
| "loss": 0.1178, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.9071618037135278, | |
| "grad_norm": 30.606871997416945, | |
| "learning_rate": 8.810293030374126e-06, | |
| "loss": 0.2409, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.9098143236074271, | |
| "grad_norm": 9.8419553666327, | |
| "learning_rate": 8.800273851138882e-06, | |
| "loss": 0.1124, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.9124668435013262, | |
| "grad_norm": 7.029317494350536, | |
| "learning_rate": 8.790218408180736e-06, | |
| "loss": 0.0794, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.9151193633952255, | |
| "grad_norm": 25.80311230082593, | |
| "learning_rate": 8.780126797452713e-06, | |
| "loss": 0.1806, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.9177718832891246, | |
| "grad_norm": 25.294427145350294, | |
| "learning_rate": 8.769999115252976e-06, | |
| "loss": 0.1836, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.9204244031830239, | |
| "grad_norm": 7.955897703432974, | |
| "learning_rate": 8.759835458223889e-06, | |
| "loss": 0.0765, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.9230769230769231, | |
| "grad_norm": 37.513807083447816, | |
| "learning_rate": 8.749635923351108e-06, | |
| "loss": 0.2575, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.9257294429708223, | |
| "grad_norm": 51.255509955721585, | |
| "learning_rate": 8.739400607962644e-06, | |
| "loss": 0.1577, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.9283819628647215, | |
| "grad_norm": 18.240656285489155, | |
| "learning_rate": 8.729129609727948e-06, | |
| "loss": 0.1548, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.9310344827586207, | |
| "grad_norm": 100.46606805908814, | |
| "learning_rate": 8.71882302665696e-06, | |
| "loss": 0.1576, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.9336870026525199, | |
| "grad_norm": 70.48575406579431, | |
| "learning_rate": 8.708480957099195e-06, | |
| "loss": 0.3021, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.9363395225464191, | |
| "grad_norm": 50.24377457117916, | |
| "learning_rate": 8.698103499742785e-06, | |
| "loss": 0.2214, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.9389920424403183, | |
| "grad_norm": 45.53670800288432, | |
| "learning_rate": 8.687690753613554e-06, | |
| "loss": 0.1214, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.9416445623342176, | |
| "grad_norm": 19.258099163195403, | |
| "learning_rate": 8.677242818074064e-06, | |
| "loss": 0.1727, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.9442970822281167, | |
| "grad_norm": 35.20766984498437, | |
| "learning_rate": 8.666759792822662e-06, | |
| "loss": 0.1809, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.946949602122016, | |
| "grad_norm": 34.34333777726141, | |
| "learning_rate": 8.656241777892544e-06, | |
| "loss": 0.1829, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.9496021220159151, | |
| "grad_norm": 34.90609145180279, | |
| "learning_rate": 8.645688873650785e-06, | |
| "loss": 0.2829, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.9522546419098143, | |
| "grad_norm": 36.452500595932214, | |
| "learning_rate": 8.635101180797391e-06, | |
| "loss": 0.3426, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.9549071618037135, | |
| "grad_norm": 108.03269569387093, | |
| "learning_rate": 8.624478800364332e-06, | |
| "loss": 0.1976, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.9575596816976127, | |
| "grad_norm": 33.814306900086486, | |
| "learning_rate": 8.613821833714584e-06, | |
| "loss": 0.2338, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.9602122015915119, | |
| "grad_norm": 21.917159876576733, | |
| "learning_rate": 8.603130382541156e-06, | |
| "loss": 0.1066, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.9628647214854111, | |
| "grad_norm": 21.301362973976964, | |
| "learning_rate": 8.592404548866123e-06, | |
| "loss": 0.1646, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.9655172413793104, | |
| "grad_norm": 49.429845587961346, | |
| "learning_rate": 8.581644435039652e-06, | |
| "loss": 0.126, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.9681697612732095, | |
| "grad_norm": 50.080975421312885, | |
| "learning_rate": 8.570850143739022e-06, | |
| "loss": 0.1885, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.9708222811671088, | |
| "grad_norm": 141.1531101408704, | |
| "learning_rate": 8.56002177796765e-06, | |
| "loss": 0.3207, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.9734748010610079, | |
| "grad_norm": 46.260350554324006, | |
| "learning_rate": 8.549159441054105e-06, | |
| "loss": 0.2098, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.9761273209549072, | |
| "grad_norm": 18.971860510276223, | |
| "learning_rate": 8.538263236651119e-06, | |
| "loss": 0.1199, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.9787798408488063, | |
| "grad_norm": 8.232640954254169, | |
| "learning_rate": 8.527333268734607e-06, | |
| "loss": 0.0843, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.9814323607427056, | |
| "grad_norm": 65.18425659961827, | |
| "learning_rate": 8.516369641602662e-06, | |
| "loss": 0.1867, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.9840848806366048, | |
| "grad_norm": 40.82812700576002, | |
| "learning_rate": 8.505372459874572e-06, | |
| "loss": 0.1587, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.986737400530504, | |
| "grad_norm": 62.23940974570685, | |
| "learning_rate": 8.494341828489812e-06, | |
| "loss": 0.3068, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.9893899204244032, | |
| "grad_norm": 219.5646577859926, | |
| "learning_rate": 8.483277852707053e-06, | |
| "loss": 0.3759, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.9920424403183024, | |
| "grad_norm": 78.02080168267564, | |
| "learning_rate": 8.472180638103143e-06, | |
| "loss": 0.1889, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.9946949602122016, | |
| "grad_norm": 14.35741713135908, | |
| "learning_rate": 8.461050290572114e-06, | |
| "loss": 0.1484, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.9973474801061007, | |
| "grad_norm": 18.958949331706716, | |
| "learning_rate": 8.449886916324168e-06, | |
| "loss": 0.0946, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 43.25681963470466, | |
| "learning_rate": 8.43869062188465e-06, | |
| "loss": 0.1539, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.30030304193496704, | |
| "eval_runtime": 183.8733, | |
| "eval_samples_per_second": 11.502, | |
| "eval_steps_per_second": 1.441, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.0026525198938991, | |
| "grad_norm": 26.247740015472544, | |
| "learning_rate": 8.427461514093056e-06, | |
| "loss": 0.1473, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.0053050397877985, | |
| "grad_norm": 44.57567022829379, | |
| "learning_rate": 8.41619970010199e-06, | |
| "loss": 0.26, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.0079575596816976, | |
| "grad_norm": 33.58181667040502, | |
| "learning_rate": 8.404905287376158e-06, | |
| "loss": 0.3814, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.0106100795755968, | |
| "grad_norm": 20.504205177863337, | |
| "learning_rate": 8.39357838369133e-06, | |
| "loss": 0.0715, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.013262599469496, | |
| "grad_norm": 19.073038994052123, | |
| "learning_rate": 8.382219097133323e-06, | |
| "loss": 0.0757, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.0159151193633953, | |
| "grad_norm": 25.9547062241258, | |
| "learning_rate": 8.370827536096966e-06, | |
| "loss": 0.1846, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.0185676392572944, | |
| "grad_norm": 42.83760462019658, | |
| "learning_rate": 8.359403809285054e-06, | |
| "loss": 0.1511, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.0212201591511936, | |
| "grad_norm": 147.25222955626703, | |
| "learning_rate": 8.347948025707331e-06, | |
| "loss": 0.2415, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.023872679045093, | |
| "grad_norm": 36.30212036610868, | |
| "learning_rate": 8.336460294679431e-06, | |
| "loss": 0.1406, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.026525198938992, | |
| "grad_norm": 23.954532976113505, | |
| "learning_rate": 8.324940725821853e-06, | |
| "loss": 0.2073, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.0291777188328912, | |
| "grad_norm": 9.347217747388326, | |
| "learning_rate": 8.313389429058895e-06, | |
| "loss": 0.0726, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.0318302387267904, | |
| "grad_norm": 32.17915696861651, | |
| "learning_rate": 8.301806514617622e-06, | |
| "loss": 0.1584, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.0344827586206897, | |
| "grad_norm": 28.265295981028995, | |
| "learning_rate": 8.290192093026805e-06, | |
| "loss": 0.2709, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.0371352785145889, | |
| "grad_norm": 15.969507471144706, | |
| "learning_rate": 8.27854627511587e-06, | |
| "loss": 0.0737, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.039787798408488, | |
| "grad_norm": 16.354524351934344, | |
| "learning_rate": 8.266869172013835e-06, | |
| "loss": 0.0823, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.0424403183023874, | |
| "grad_norm": 8.25219055208596, | |
| "learning_rate": 8.255160895148263e-06, | |
| "loss": 0.0604, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.0450928381962865, | |
| "grad_norm": 19.259455856985063, | |
| "learning_rate": 8.243421556244179e-06, | |
| "loss": 0.154, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.0477453580901857, | |
| "grad_norm": 40.049582793036315, | |
| "learning_rate": 8.23165126732302e-06, | |
| "loss": 0.2644, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.0503978779840848, | |
| "grad_norm": 43.027345975298836, | |
| "learning_rate": 8.219850140701557e-06, | |
| "loss": 0.2, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.0530503978779842, | |
| "grad_norm": 25.456126225005505, | |
| "learning_rate": 8.208018288990832e-06, | |
| "loss": 0.1524, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.0557029177718833, | |
| "grad_norm": 50.986810327005976, | |
| "learning_rate": 8.196155825095073e-06, | |
| "loss": 0.1664, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.0583554376657824, | |
| "grad_norm": 33.63796057844049, | |
| "learning_rate": 8.184262862210624e-06, | |
| "loss": 0.2054, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.0610079575596818, | |
| "grad_norm": 6.627905349804448, | |
| "learning_rate": 8.172339513824863e-06, | |
| "loss": 0.0373, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.063660477453581, | |
| "grad_norm": 42.33126806111016, | |
| "learning_rate": 8.160385893715113e-06, | |
| "loss": 0.161, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.06631299734748, | |
| "grad_norm": 25.6529334614425, | |
| "learning_rate": 8.14840211594757e-06, | |
| "loss": 0.0532, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.0689655172413792, | |
| "grad_norm": 44.42504750183782, | |
| "learning_rate": 8.136388294876204e-06, | |
| "loss": 0.1046, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.0716180371352786, | |
| "grad_norm": 31.1347062063765, | |
| "learning_rate": 8.124344545141663e-06, | |
| "loss": 0.1343, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.0742705570291777, | |
| "grad_norm": 43.980161157615555, | |
| "learning_rate": 8.112270981670196e-06, | |
| "loss": 0.0939, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.0769230769230769, | |
| "grad_norm": 59.158674533654114, | |
| "learning_rate": 8.10016771967254e-06, | |
| "loss": 0.112, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.079575596816976, | |
| "grad_norm": 27.34798138117141, | |
| "learning_rate": 8.088034874642834e-06, | |
| "loss": 0.0769, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.0822281167108754, | |
| "grad_norm": 71.207985696847, | |
| "learning_rate": 8.075872562357502e-06, | |
| "loss": 0.121, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.0848806366047745, | |
| "grad_norm": 28.95675883141315, | |
| "learning_rate": 8.063680898874158e-06, | |
| "loss": 0.161, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.0875331564986737, | |
| "grad_norm": 32.41626273254541, | |
| "learning_rate": 8.051460000530501e-06, | |
| "loss": 0.0659, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.090185676392573, | |
| "grad_norm": 63.80567601306284, | |
| "learning_rate": 8.039209983943201e-06, | |
| "loss": 0.2279, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.0928381962864722, | |
| "grad_norm": 66.14704696379152, | |
| "learning_rate": 8.026930966006778e-06, | |
| "loss": 0.1092, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.0954907161803713, | |
| "grad_norm": 34.7401940655128, | |
| "learning_rate": 8.014623063892504e-06, | |
| "loss": 0.1228, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.0981432360742707, | |
| "grad_norm": 28.39328042406372, | |
| "learning_rate": 8.002286395047267e-06, | |
| "loss": 0.0952, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.1007957559681698, | |
| "grad_norm": 35.277288119831546, | |
| "learning_rate": 7.989921077192464e-06, | |
| "loss": 0.055, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.103448275862069, | |
| "grad_norm": 55.3169101287943, | |
| "learning_rate": 7.97752722832287e-06, | |
| "loss": 0.1851, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.106100795755968, | |
| "grad_norm": 33.0051706210266, | |
| "learning_rate": 7.965104966705518e-06, | |
| "loss": 0.086, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.1087533156498675, | |
| "grad_norm": 28.435466433467102, | |
| "learning_rate": 7.95265441087856e-06, | |
| "loss": 0.0612, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.1114058355437666, | |
| "grad_norm": 59.015781331645016, | |
| "learning_rate": 7.940175679650145e-06, | |
| "loss": 0.2208, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.1140583554376657, | |
| "grad_norm": 31.56091880492037, | |
| "learning_rate": 7.927668892097288e-06, | |
| "loss": 0.1255, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.1167108753315649, | |
| "grad_norm": 30.739733409482906, | |
| "learning_rate": 7.915134167564724e-06, | |
| "loss": 0.0426, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.1193633952254642, | |
| "grad_norm": 59.88923437998145, | |
| "learning_rate": 7.902571625663773e-06, | |
| "loss": 0.3506, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.1220159151193634, | |
| "grad_norm": 86.34346137579625, | |
| "learning_rate": 7.889981386271202e-06, | |
| "loss": 0.2026, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.1246684350132625, | |
| "grad_norm": 76.74826851833494, | |
| "learning_rate": 7.877363569528076e-06, | |
| "loss": 0.084, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.1273209549071619, | |
| "grad_norm": 98.33173582335951, | |
| "learning_rate": 7.864718295838615e-06, | |
| "loss": 0.0954, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.129973474801061, | |
| "grad_norm": 99.25959364761675, | |
| "learning_rate": 7.852045685869046e-06, | |
| "loss": 0.0848, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.1326259946949602, | |
| "grad_norm": 39.34005054150488, | |
| "learning_rate": 7.839345860546448e-06, | |
| "loss": 0.1077, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.1352785145888595, | |
| "grad_norm": 32.5882394326579, | |
| "learning_rate": 7.826618941057597e-06, | |
| "loss": 0.1042, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.1379310344827587, | |
| "grad_norm": 20.14495021482005, | |
| "learning_rate": 7.81386504884782e-06, | |
| "loss": 0.081, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.1405835543766578, | |
| "grad_norm": 124.56461422842759, | |
| "learning_rate": 7.80108430561982e-06, | |
| "loss": 0.1182, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.143236074270557, | |
| "grad_norm": 16.549737245392794, | |
| "learning_rate": 7.788276833332527e-06, | |
| "loss": 0.0422, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.1458885941644563, | |
| "grad_norm": 36.72590686053697, | |
| "learning_rate": 7.775442754199929e-06, | |
| "loss": 0.0792, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.1485411140583555, | |
| "grad_norm": 15.66496882205861, | |
| "learning_rate": 7.762582190689912e-06, | |
| "loss": 0.0713, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.1511936339522546, | |
| "grad_norm": 53.28313425943304, | |
| "learning_rate": 7.749695265523076e-06, | |
| "loss": 0.1091, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.1538461538461537, | |
| "grad_norm": 26.906094978375314, | |
| "learning_rate": 7.736782101671587e-06, | |
| "loss": 0.0743, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.156498673740053, | |
| "grad_norm": 25.290823343487745, | |
| "learning_rate": 7.723842822357982e-06, | |
| "loss": 0.0753, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.1591511936339522, | |
| "grad_norm": 42.52515549138246, | |
| "learning_rate": 7.710877551054004e-06, | |
| "loss": 0.0933, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.1618037135278514, | |
| "grad_norm": 47.66039776265163, | |
| "learning_rate": 7.697886411479422e-06, | |
| "loss": 0.14, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.1644562334217508, | |
| "grad_norm": 22.239886481476965, | |
| "learning_rate": 7.684869527600856e-06, | |
| "loss": 0.0735, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.16710875331565, | |
| "grad_norm": 77.17310874305029, | |
| "learning_rate": 7.67182702363058e-06, | |
| "loss": 0.115, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.169761273209549, | |
| "grad_norm": 36.31263593466285, | |
| "learning_rate": 7.658759024025349e-06, | |
| "loss": 0.1489, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.1724137931034484, | |
| "grad_norm": 33.93046151020367, | |
| "learning_rate": 7.645665653485205e-06, | |
| "loss": 0.2482, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.1750663129973475, | |
| "grad_norm": 61.83847934670906, | |
| "learning_rate": 7.632547036952296e-06, | |
| "loss": 0.3177, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.1777188328912467, | |
| "grad_norm": 44.06843591162873, | |
| "learning_rate": 7.6194032996096685e-06, | |
| "loss": 0.2418, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.1803713527851458, | |
| "grad_norm": 71.6311059732174, | |
| "learning_rate": 7.606234566880089e-06, | |
| "loss": 0.1021, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.1830238726790452, | |
| "grad_norm": 30.701514325915806, | |
| "learning_rate": 7.593040964424836e-06, | |
| "loss": 0.1158, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.1856763925729443, | |
| "grad_norm": 51.53360555139439, | |
| "learning_rate": 7.579822618142505e-06, | |
| "loss": 0.1298, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.1883289124668435, | |
| "grad_norm": 34.359770039769536, | |
| "learning_rate": 7.5665796541678106e-06, | |
| "loss": 0.1088, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.1909814323607426, | |
| "grad_norm": 36.703698503352285, | |
| "learning_rate": 7.553312198870373e-06, | |
| "loss": 0.2159, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.193633952254642, | |
| "grad_norm": 33.24882834524891, | |
| "learning_rate": 7.540020378853523e-06, | |
| "loss": 0.1625, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.1962864721485411, | |
| "grad_norm": 37.862213155854846, | |
| "learning_rate": 7.526704320953091e-06, | |
| "loss": 0.1534, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.1989389920424403, | |
| "grad_norm": 36.79476145492882, | |
| "learning_rate": 7.513364152236185e-06, | |
| "loss": 0.0763, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.2015915119363396, | |
| "grad_norm": 24.739646949886776, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.0935, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.2042440318302388, | |
| "grad_norm": 77.97378968207286, | |
| "learning_rate": 7.486611991770586e-06, | |
| "loss": 0.19, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.206896551724138, | |
| "grad_norm": 13.92691027987082, | |
| "learning_rate": 7.473200255301635e-06, | |
| "loss": 0.0592, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.209549071618037, | |
| "grad_norm": 25.867853333158276, | |
| "learning_rate": 7.459764918573264e-06, | |
| "loss": 0.1511, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.2122015915119364, | |
| "grad_norm": 79.5340370896487, | |
| "learning_rate": 7.446306109790798e-06, | |
| "loss": 0.1044, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.2148541114058355, | |
| "grad_norm": 35.636956504443404, | |
| "learning_rate": 7.432823957383533e-06, | |
| "loss": 0.1028, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.2175066312997347, | |
| "grad_norm": 25.181159522573243, | |
| "learning_rate": 7.419318590003524e-06, | |
| "loss": 0.0763, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.2201591511936338, | |
| "grad_norm": 29.344977483969632, | |
| "learning_rate": 7.405790136524353e-06, | |
| "loss": 0.1183, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.2228116710875332, | |
| "grad_norm": 28.3878952747556, | |
| "learning_rate": 7.392238726039897e-06, | |
| "loss": 0.1087, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.2254641909814323, | |
| "grad_norm": 83.9178301412247, | |
| "learning_rate": 7.3786644878631035e-06, | |
| "loss": 0.2668, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.2281167108753315, | |
| "grad_norm": 106.49206199298355, | |
| "learning_rate": 7.365067551524739e-06, | |
| "loss": 0.2169, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.2307692307692308, | |
| "grad_norm": 59.12511319657039, | |
| "learning_rate": 7.3514480467721786e-06, | |
| "loss": 0.1926, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.23342175066313, | |
| "grad_norm": 53.09405847876735, | |
| "learning_rate": 7.3378061035681415e-06, | |
| "loss": 0.0563, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.2360742705570291, | |
| "grad_norm": 26.983609235432123, | |
| "learning_rate": 7.324141852089473e-06, | |
| "loss": 0.1112, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.2387267904509285, | |
| "grad_norm": 34.8998916179393, | |
| "learning_rate": 7.3104554227258895e-06, | |
| "loss": 0.1379, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.2413793103448276, | |
| "grad_norm": 34.10991608760568, | |
| "learning_rate": 7.296746946078737e-06, | |
| "loss": 0.1422, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.2440318302387268, | |
| "grad_norm": 22.906010086047914, | |
| "learning_rate": 7.283016552959745e-06, | |
| "loss": 0.0822, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.246684350132626, | |
| "grad_norm": 38.08134012310191, | |
| "learning_rate": 7.269264374389781e-06, | |
| "loss": 0.1153, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.2493368700265253, | |
| "grad_norm": 37.43929570153704, | |
| "learning_rate": 7.255490541597594e-06, | |
| "loss": 0.1395, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.2519893899204244, | |
| "grad_norm": 13.945545827968598, | |
| "learning_rate": 7.2416951860185735e-06, | |
| "loss": 0.0929, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.2546419098143236, | |
| "grad_norm": 69.16493115003064, | |
| "learning_rate": 7.2278784392934775e-06, | |
| "loss": 0.1108, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.2572944297082227, | |
| "grad_norm": 27.84020331802505, | |
| "learning_rate": 7.2140404332671986e-06, | |
| "loss": 0.0824, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.259946949602122, | |
| "grad_norm": 40.54513618444105, | |
| "learning_rate": 7.200181299987483e-06, | |
| "loss": 0.1092, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.2625994694960212, | |
| "grad_norm": 30.96105138774015, | |
| "learning_rate": 7.186301171703689e-06, | |
| "loss": 0.2326, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.2652519893899203, | |
| "grad_norm": 17.211557686559033, | |
| "learning_rate": 7.172400180865514e-06, | |
| "loss": 0.0713, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.2679045092838197, | |
| "grad_norm": 16.708361347729333, | |
| "learning_rate": 7.158478460121735e-06, | |
| "loss": 0.0367, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.2705570291777188, | |
| "grad_norm": 47.326622082791715, | |
| "learning_rate": 7.144536142318945e-06, | |
| "loss": 0.1239, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.273209549071618, | |
| "grad_norm": 48.006912212259905, | |
| "learning_rate": 7.130573360500277e-06, | |
| "loss": 0.0745, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.2758620689655173, | |
| "grad_norm": 22.48308191817879, | |
| "learning_rate": 7.116590247904144e-06, | |
| "loss": 0.0542, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.2785145888594165, | |
| "grad_norm": 66.41589859102847, | |
| "learning_rate": 7.102586937962961e-06, | |
| "loss": 0.1022, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.2811671087533156, | |
| "grad_norm": 8.335141851532386, | |
| "learning_rate": 7.088563564301874e-06, | |
| "loss": 0.0272, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.2838196286472148, | |
| "grad_norm": 58.594053099408356, | |
| "learning_rate": 7.074520260737487e-06, | |
| "loss": 0.0736, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.2864721485411141, | |
| "grad_norm": 41.46994169436375, | |
| "learning_rate": 7.060457161276581e-06, | |
| "loss": 0.0685, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.2891246684350133, | |
| "grad_norm": 37.947876016333154, | |
| "learning_rate": 7.046374400114842e-06, | |
| "loss": 0.1162, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.2917771883289124, | |
| "grad_norm": 16.012005850395916, | |
| "learning_rate": 7.032272111635565e-06, | |
| "loss": 0.0604, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.2944297082228116, | |
| "grad_norm": 36.95457052573452, | |
| "learning_rate": 7.018150430408394e-06, | |
| "loss": 0.0718, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.297082228116711, | |
| "grad_norm": 24.636928336529454, | |
| "learning_rate": 7.004009491188023e-06, | |
| "loss": 0.0583, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.29973474801061, | |
| "grad_norm": 8.212480186099812, | |
| "learning_rate": 6.989849428912908e-06, | |
| "loss": 0.0237, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.3023872679045092, | |
| "grad_norm": 23.40792797539694, | |
| "learning_rate": 6.975670378703993e-06, | |
| "loss": 0.0529, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.3050397877984086, | |
| "grad_norm": 143.75143378518493, | |
| "learning_rate": 6.961472475863406e-06, | |
| "loss": 0.2797, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.3076923076923077, | |
| "grad_norm": 54.67114877890433, | |
| "learning_rate": 6.947255855873176e-06, | |
| "loss": 0.1684, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 1.3103448275862069, | |
| "grad_norm": 59.30945229065993, | |
| "learning_rate": 6.933020654393941e-06, | |
| "loss": 0.1363, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.3129973474801062, | |
| "grad_norm": 65.62023875166965, | |
| "learning_rate": 6.918767007263646e-06, | |
| "loss": 0.1953, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.3156498673740054, | |
| "grad_norm": 85.40564891055027, | |
| "learning_rate": 6.904495050496258e-06, | |
| "loss": 0.1086, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.3183023872679045, | |
| "grad_norm": 81.35706508150807, | |
| "learning_rate": 6.8902049202804574e-06, | |
| "loss": 0.1934, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 1.3209549071618036, | |
| "grad_norm": 30.124047496096946, | |
| "learning_rate": 6.875896752978345e-06, | |
| "loss": 0.0448, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 1.323607427055703, | |
| "grad_norm": 38.712032044430956, | |
| "learning_rate": 6.861570685124135e-06, | |
| "loss": 0.0774, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 1.3262599469496021, | |
| "grad_norm": 31.235671018858355, | |
| "learning_rate": 6.847226853422863e-06, | |
| "loss": 0.1135, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.3289124668435013, | |
| "grad_norm": 56.599902608952924, | |
| "learning_rate": 6.832865394749065e-06, | |
| "loss": 0.0363, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 1.3315649867374004, | |
| "grad_norm": 26.43153589589585, | |
| "learning_rate": 6.8184864461454866e-06, | |
| "loss": 0.0801, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 1.3342175066312998, | |
| "grad_norm": 55.08537006784008, | |
| "learning_rate": 6.804090144821772e-06, | |
| "loss": 0.2674, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 1.336870026525199, | |
| "grad_norm": 43.44417619289478, | |
| "learning_rate": 6.7896766281531435e-06, | |
| "loss": 0.0734, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 1.339522546419098, | |
| "grad_norm": 14.012160398194428, | |
| "learning_rate": 6.775246033679105e-06, | |
| "loss": 0.0181, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.3421750663129974, | |
| "grad_norm": 36.88645373124903, | |
| "learning_rate": 6.760798499102121e-06, | |
| "loss": 0.1148, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 1.3448275862068966, | |
| "grad_norm": 72.27770295224616, | |
| "learning_rate": 6.7463341622863074e-06, | |
| "loss": 0.1224, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 1.3474801061007957, | |
| "grad_norm": 21.31876807638229, | |
| "learning_rate": 6.7318531612561145e-06, | |
| "loss": 0.069, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 1.350132625994695, | |
| "grad_norm": 70.60638710522164, | |
| "learning_rate": 6.717355634195004e-06, | |
| "loss": 0.1149, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 1.3527851458885942, | |
| "grad_norm": 7.4216425190137825, | |
| "learning_rate": 6.702841719444141e-06, | |
| "loss": 0.027, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.3554376657824934, | |
| "grad_norm": 27.954485526799452, | |
| "learning_rate": 6.688311555501064e-06, | |
| "loss": 0.0665, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 1.3580901856763925, | |
| "grad_norm": 77.339119906972, | |
| "learning_rate": 6.673765281018373e-06, | |
| "loss": 0.2578, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 1.3607427055702916, | |
| "grad_norm": 65.8399698675993, | |
| "learning_rate": 6.659203034802397e-06, | |
| "loss": 0.1517, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 1.363395225464191, | |
| "grad_norm": 16.6028638970233, | |
| "learning_rate": 6.644624955811873e-06, | |
| "loss": 0.028, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 1.3660477453580901, | |
| "grad_norm": 42.46427097861889, | |
| "learning_rate": 6.630031183156628e-06, | |
| "loss": 0.1234, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.3687002652519893, | |
| "grad_norm": 8.305973806105712, | |
| "learning_rate": 6.615421856096231e-06, | |
| "loss": 0.0184, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 1.3713527851458887, | |
| "grad_norm": 48.942881839709344, | |
| "learning_rate": 6.6007971140386915e-06, | |
| "loss": 0.0818, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 1.3740053050397878, | |
| "grad_norm": 178.63453863601057, | |
| "learning_rate": 6.586157096539105e-06, | |
| "loss": 0.1318, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 1.376657824933687, | |
| "grad_norm": 84.05711711958848, | |
| "learning_rate": 6.571501943298335e-06, | |
| "loss": 0.2545, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 1.3793103448275863, | |
| "grad_norm": 46.392968542735076, | |
| "learning_rate": 6.556831794161678e-06, | |
| "loss": 0.109, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.3819628647214854, | |
| "grad_norm": 20.063150286169318, | |
| "learning_rate": 6.542146789117524e-06, | |
| "loss": 0.0473, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 1.3846153846153846, | |
| "grad_norm": 30.01502731273673, | |
| "learning_rate": 6.527447068296026e-06, | |
| "loss": 0.1065, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 1.387267904509284, | |
| "grad_norm": 51.91080145181505, | |
| "learning_rate": 6.512732771967758e-06, | |
| "loss": 0.0388, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 1.389920424403183, | |
| "grad_norm": 17.466322377716512, | |
| "learning_rate": 6.498004040542385e-06, | |
| "loss": 0.0212, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 1.3925729442970822, | |
| "grad_norm": 156.28020384461885, | |
| "learning_rate": 6.483261014567311e-06, | |
| "loss": 0.3644, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.3952254641909814, | |
| "grad_norm": 81.09681975348255, | |
| "learning_rate": 6.4685038347263495e-06, | |
| "loss": 0.2776, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 1.3978779840848805, | |
| "grad_norm": 115.66362572251657, | |
| "learning_rate": 6.453732641838372e-06, | |
| "loss": 0.1573, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 1.4005305039787799, | |
| "grad_norm": 21.424754573843853, | |
| "learning_rate": 6.4389475768559675e-06, | |
| "loss": 0.0406, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 1.403183023872679, | |
| "grad_norm": 13.499969541805338, | |
| "learning_rate": 6.4241487808641044e-06, | |
| "loss": 0.018, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 1.4058355437665782, | |
| "grad_norm": 59.696008183372705, | |
| "learning_rate": 6.409336395078771e-06, | |
| "loss": 0.0787, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.4084880636604775, | |
| "grad_norm": 52.5406584254, | |
| "learning_rate": 6.394510560845637e-06, | |
| "loss": 0.0692, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 1.4111405835543767, | |
| "grad_norm": 86.63774361633952, | |
| "learning_rate": 6.379671419638703e-06, | |
| "loss": 0.0691, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.4137931034482758, | |
| "grad_norm": 56.58793952611304, | |
| "learning_rate": 6.3648191130589524e-06, | |
| "loss": 0.1705, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 1.4164456233421752, | |
| "grad_norm": 18.35003530465239, | |
| "learning_rate": 6.349953782832991e-06, | |
| "loss": 0.0327, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 1.4190981432360743, | |
| "grad_norm": 32.415311247602894, | |
| "learning_rate": 6.335075570811708e-06, | |
| "loss": 0.07, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.4217506631299734, | |
| "grad_norm": 13.264443399080834, | |
| "learning_rate": 6.320184618968915e-06, | |
| "loss": 0.03, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 1.4244031830238728, | |
| "grad_norm": 87.79800644637659, | |
| "learning_rate": 6.305281069399989e-06, | |
| "loss": 0.1208, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 1.427055702917772, | |
| "grad_norm": 51.915392437201334, | |
| "learning_rate": 6.290365064320521e-06, | |
| "loss": 0.1035, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 1.429708222811671, | |
| "grad_norm": 11.450320048154701, | |
| "learning_rate": 6.275436746064957e-06, | |
| "loss": 0.0221, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 1.4323607427055702, | |
| "grad_norm": 29.51794007071361, | |
| "learning_rate": 6.26049625708524e-06, | |
| "loss": 0.0686, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.4350132625994694, | |
| "grad_norm": 22.617943845931112, | |
| "learning_rate": 6.245543739949455e-06, | |
| "loss": 0.0348, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 1.4376657824933687, | |
| "grad_norm": 24.69228562463656, | |
| "learning_rate": 6.2305793373404564e-06, | |
| "loss": 0.0283, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 1.4403183023872679, | |
| "grad_norm": 11.604045977899252, | |
| "learning_rate": 6.215603192054523e-06, | |
| "loss": 0.0282, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 1.442970822281167, | |
| "grad_norm": 5.625542599219923, | |
| "learning_rate": 6.2006154469999824e-06, | |
| "loss": 0.0136, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 1.4456233421750664, | |
| "grad_norm": 42.90878731413574, | |
| "learning_rate": 6.185616245195849e-06, | |
| "loss": 0.0219, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.4482758620689655, | |
| "grad_norm": 61.464101125599086, | |
| "learning_rate": 6.17060572977047e-06, | |
| "loss": 0.0689, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 1.4509283819628647, | |
| "grad_norm": 7.954745688919827, | |
| "learning_rate": 6.155584043960145e-06, | |
| "loss": 0.017, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 1.453580901856764, | |
| "grad_norm": 11.291047667367858, | |
| "learning_rate": 6.140551331107767e-06, | |
| "loss": 0.0194, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 1.4562334217506632, | |
| "grad_norm": 23.052899303053362, | |
| "learning_rate": 6.125507734661458e-06, | |
| "loss": 0.039, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 1.4588859416445623, | |
| "grad_norm": 11.346473587654254, | |
| "learning_rate": 6.110453398173188e-06, | |
| "loss": 0.0134, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.4615384615384617, | |
| "grad_norm": 103.5653471207511, | |
| "learning_rate": 6.095388465297418e-06, | |
| "loss": 0.1774, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 1.4641909814323608, | |
| "grad_norm": 90.55736048848877, | |
| "learning_rate": 6.080313079789723e-06, | |
| "loss": 0.0903, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 1.46684350132626, | |
| "grad_norm": 13.021051610454892, | |
| "learning_rate": 6.0652273855054225e-06, | |
| "loss": 0.019, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 1.469496021220159, | |
| "grad_norm": 18.86509087312211, | |
| "learning_rate": 6.050131526398202e-06, | |
| "loss": 0.0241, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 1.4721485411140582, | |
| "grad_norm": 70.06520538380781, | |
| "learning_rate": 6.035025646518747e-06, | |
| "loss": 0.0807, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.4748010610079576, | |
| "grad_norm": 25.854491247244212, | |
| "learning_rate": 6.019909890013367e-06, | |
| "loss": 0.0162, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 1.4774535809018567, | |
| "grad_norm": 116.04479411723888, | |
| "learning_rate": 6.004784401122613e-06, | |
| "loss": 0.352, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 1.4801061007957559, | |
| "grad_norm": 62.58830551899949, | |
| "learning_rate": 5.9896493241799115e-06, | |
| "loss": 0.1359, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 1.4827586206896552, | |
| "grad_norm": 5.004233818932882, | |
| "learning_rate": 5.974504803610178e-06, | |
| "loss": 0.0155, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 1.4854111405835544, | |
| "grad_norm": 6.793190238020371, | |
| "learning_rate": 5.959350983928446e-06, | |
| "loss": 0.0187, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.4880636604774535, | |
| "grad_norm": 31.60468402351343, | |
| "learning_rate": 5.944188009738483e-06, | |
| "loss": 0.0394, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 1.490716180371353, | |
| "grad_norm": 86.52370400694087, | |
| "learning_rate": 5.929016025731413e-06, | |
| "loss": 0.0833, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 1.493368700265252, | |
| "grad_norm": 45.707565727810554, | |
| "learning_rate": 5.913835176684335e-06, | |
| "loss": 0.091, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 1.4960212201591512, | |
| "grad_norm": 10.495491117506214, | |
| "learning_rate": 5.898645607458941e-06, | |
| "loss": 0.0196, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 1.4986737400530503, | |
| "grad_norm": 16.83038431961822, | |
| "learning_rate": 5.883447463000136e-06, | |
| "loss": 0.0205, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.5013262599469495, | |
| "grad_norm": 22.634803756903562, | |
| "learning_rate": 5.8682408883346535e-06, | |
| "loss": 0.0469, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 1.5039787798408488, | |
| "grad_norm": 63.774457482319505, | |
| "learning_rate": 5.8530260285696674e-06, | |
| "loss": 0.1572, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 1.506631299734748, | |
| "grad_norm": 10.116645955560424, | |
| "learning_rate": 5.837803028891418e-06, | |
| "loss": 0.02, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 1.509283819628647, | |
| "grad_norm": 19.728441774666695, | |
| "learning_rate": 5.822572034563812e-06, | |
| "loss": 0.0286, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 1.5119363395225465, | |
| "grad_norm": 30.614280069923158, | |
| "learning_rate": 5.807333190927054e-06, | |
| "loss": 0.0245, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.5145888594164456, | |
| "grad_norm": 70.45242902971724, | |
| "learning_rate": 5.792086643396238e-06, | |
| "loss": 0.1806, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 1.5172413793103448, | |
| "grad_norm": 35.46015186362154, | |
| "learning_rate": 5.776832537459983e-06, | |
| "loss": 0.0656, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 1.5198938992042441, | |
| "grad_norm": 31.43379227238137, | |
| "learning_rate": 5.761571018679025e-06, | |
| "loss": 0.0285, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 1.5225464190981433, | |
| "grad_norm": 12.99368022986434, | |
| "learning_rate": 5.746302232684843e-06, | |
| "loss": 0.0148, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 1.5251989389920424, | |
| "grad_norm": 12.068414987458594, | |
| "learning_rate": 5.731026325178255e-06, | |
| "loss": 0.0137, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.5278514588859418, | |
| "grad_norm": 5.802944986372316, | |
| "learning_rate": 5.715743441928041e-06, | |
| "loss": 0.0117, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 1.530503978779841, | |
| "grad_norm": 114.65209018234462, | |
| "learning_rate": 5.700453728769545e-06, | |
| "loss": 0.1301, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 1.53315649867374, | |
| "grad_norm": 43.46336042530884, | |
| "learning_rate": 5.6851573316032845e-06, | |
| "loss": 0.1735, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 1.5358090185676394, | |
| "grad_norm": 17.074443072834615, | |
| "learning_rate": 5.669854396393559e-06, | |
| "loss": 0.0184, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 1.5384615384615383, | |
| "grad_norm": 45.630425577025065, | |
| "learning_rate": 5.654545069167056e-06, | |
| "loss": 0.0325, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.5411140583554377, | |
| "grad_norm": 8.023078253752775, | |
| "learning_rate": 5.639229496011456e-06, | |
| "loss": 0.011, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 1.5437665782493368, | |
| "grad_norm": 78.57666705563578, | |
| "learning_rate": 5.623907823074044e-06, | |
| "loss": 0.1044, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 1.546419098143236, | |
| "grad_norm": 60.788739153423315, | |
| "learning_rate": 5.60858019656031e-06, | |
| "loss": 0.0458, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 1.5490716180371353, | |
| "grad_norm": 17.4097205779028, | |
| "learning_rate": 5.593246762732558e-06, | |
| "loss": 0.0199, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 1.5517241379310345, | |
| "grad_norm": 77.40807180099829, | |
| "learning_rate": 5.577907667908505e-06, | |
| "loss": 0.1016, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.5543766578249336, | |
| "grad_norm": 16.51575480381425, | |
| "learning_rate": 5.562563058459884e-06, | |
| "loss": 0.0185, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 1.557029177718833, | |
| "grad_norm": 22.691621813554946, | |
| "learning_rate": 5.5472130808110595e-06, | |
| "loss": 0.0257, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 1.5596816976127321, | |
| "grad_norm": 18.114026991315463, | |
| "learning_rate": 5.531857881437612e-06, | |
| "loss": 0.0225, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 1.5623342175066313, | |
| "grad_norm": 17.600714811757264, | |
| "learning_rate": 5.516497606864959e-06, | |
| "loss": 0.0124, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 1.5649867374005306, | |
| "grad_norm": 12.495110023622034, | |
| "learning_rate": 5.50113240366694e-06, | |
| "loss": 0.0167, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.5676392572944295, | |
| "grad_norm": 22.72941330608339, | |
| "learning_rate": 5.48576241846443e-06, | |
| "loss": 0.0234, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 1.570291777188329, | |
| "grad_norm": 35.24068532108153, | |
| "learning_rate": 5.470387797923934e-06, | |
| "loss": 0.0259, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 1.5729442970822283, | |
| "grad_norm": 45.77299859804751, | |
| "learning_rate": 5.4550086887561874e-06, | |
| "loss": 0.0357, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 1.5755968169761272, | |
| "grad_norm": 2.646503502463956, | |
| "learning_rate": 5.4396252377147615e-06, | |
| "loss": 0.0088, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 1.5782493368700266, | |
| "grad_norm": 88.79199080920962, | |
| "learning_rate": 5.424237591594658e-06, | |
| "loss": 0.1504, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.5809018567639257, | |
| "grad_norm": 13.897118361241494, | |
| "learning_rate": 5.4088458972309085e-06, | |
| "loss": 0.0119, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 1.5835543766578248, | |
| "grad_norm": 5.274439487089943, | |
| "learning_rate": 5.39345030149718e-06, | |
| "loss": 0.0105, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 1.5862068965517242, | |
| "grad_norm": 17.23007233498294, | |
| "learning_rate": 5.378050951304356e-06, | |
| "loss": 0.0175, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 1.5888594164456233, | |
| "grad_norm": 31.29780553927715, | |
| "learning_rate": 5.362647993599159e-06, | |
| "loss": 0.0272, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 1.5915119363395225, | |
| "grad_norm": 7.79459656888984, | |
| "learning_rate": 5.347241575362729e-06, | |
| "loss": 0.0117, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.5941644562334218, | |
| "grad_norm": 41.63624115729426, | |
| "learning_rate": 5.3318318436092335e-06, | |
| "loss": 0.0433, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 1.596816976127321, | |
| "grad_norm": 5.238630430255536, | |
| "learning_rate": 5.31641894538445e-06, | |
| "loss": 0.0095, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 1.5994694960212201, | |
| "grad_norm": 120.54452188576255, | |
| "learning_rate": 5.30100302776438e-06, | |
| "loss": 0.0744, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 1.6021220159151195, | |
| "grad_norm": 6.787579734792685, | |
| "learning_rate": 5.285584237853832e-06, | |
| "loss": 0.0101, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 1.6047745358090184, | |
| "grad_norm": 51.765039338574724, | |
| "learning_rate": 5.270162722785026e-06, | |
| "loss": 0.0676, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 1.6074270557029178, | |
| "grad_norm": 136.08607486651354, | |
| "learning_rate": 5.254738629716186e-06, | |
| "loss": 0.1054, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 1.6100795755968171, | |
| "grad_norm": 0.9150999498569187, | |
| "learning_rate": 5.239312105830135e-06, | |
| "loss": 0.007, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 1.612732095490716, | |
| "grad_norm": 26.51142157331174, | |
| "learning_rate": 5.223883298332894e-06, | |
| "loss": 0.0099, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 1.6153846153846154, | |
| "grad_norm": 43.90606351327701, | |
| "learning_rate": 5.208452354452275e-06, | |
| "loss": 0.0442, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 1.6180371352785146, | |
| "grad_norm": 38.92615571615667, | |
| "learning_rate": 5.19301942143647e-06, | |
| "loss": 0.0711, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.6206896551724137, | |
| "grad_norm": 22.94695341444744, | |
| "learning_rate": 5.1775846465526625e-06, | |
| "loss": 0.0158, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 1.623342175066313, | |
| "grad_norm": 86.51906441493873, | |
| "learning_rate": 5.162148177085604e-06, | |
| "loss": 0.1054, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 1.6259946949602122, | |
| "grad_norm": 290.1224367723256, | |
| "learning_rate": 5.14671016033622e-06, | |
| "loss": 0.1045, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 1.6286472148541113, | |
| "grad_norm": 11.977547474086224, | |
| "learning_rate": 5.131270743620201e-06, | |
| "loss": 0.0105, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 1.6312997347480107, | |
| "grad_norm": 17.757802345886905, | |
| "learning_rate": 5.115830074266592e-06, | |
| "loss": 0.0148, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 1.6339522546419099, | |
| "grad_norm": 0.5545128316631375, | |
| "learning_rate": 5.100388299616395e-06, | |
| "loss": 0.0061, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 1.636604774535809, | |
| "grad_norm": 28.552737920035725, | |
| "learning_rate": 5.084945567021159e-06, | |
| "loss": 0.0283, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 1.6392572944297084, | |
| "grad_norm": 18.29378685614396, | |
| "learning_rate": 5.069502023841576e-06, | |
| "loss": 0.0147, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 1.6419098143236073, | |
| "grad_norm": 5.186593395870576, | |
| "learning_rate": 5.054057817446067e-06, | |
| "loss": 0.008, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 1.6445623342175066, | |
| "grad_norm": 2.491298499220129, | |
| "learning_rate": 5.038613095209392e-06, | |
| "loss": 0.0065, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.647214854111406, | |
| "grad_norm": 7.93088074804503, | |
| "learning_rate": 5.0231680045112174e-06, | |
| "loss": 0.0083, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 1.649867374005305, | |
| "grad_norm": 15.212074645043778, | |
| "learning_rate": 5.00772269273474e-06, | |
| "loss": 0.0066, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 1.6525198938992043, | |
| "grad_norm": 3.0927305634307, | |
| "learning_rate": 4.9922773072652615e-06, | |
| "loss": 0.007, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 1.6551724137931034, | |
| "grad_norm": 15.601264050470213, | |
| "learning_rate": 4.976831995488784e-06, | |
| "loss": 0.0168, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 1.6578249336870026, | |
| "grad_norm": 6.507108555037176, | |
| "learning_rate": 4.961386904790611e-06, | |
| "loss": 0.0083, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 1.660477453580902, | |
| "grad_norm": 34.51706318170982, | |
| "learning_rate": 4.945942182553932e-06, | |
| "loss": 0.0172, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 1.663129973474801, | |
| "grad_norm": 114.9051005167022, | |
| "learning_rate": 4.9304979761584256e-06, | |
| "loss": 0.1928, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 1.6657824933687002, | |
| "grad_norm": 71.54065943942709, | |
| "learning_rate": 4.915054432978842e-06, | |
| "loss": 0.088, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 1.6684350132625996, | |
| "grad_norm": 10.020474994710991, | |
| "learning_rate": 4.899611700383608e-06, | |
| "loss": 0.0086, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 1.6710875331564987, | |
| "grad_norm": 1.1739761766729617, | |
| "learning_rate": 4.884169925733409e-06, | |
| "loss": 0.0056, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.6737400530503979, | |
| "grad_norm": 68.80076453198339, | |
| "learning_rate": 4.868729256379802e-06, | |
| "loss": 0.1049, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 1.6763925729442972, | |
| "grad_norm": 139.14288646342686, | |
| "learning_rate": 4.8532898396637815e-06, | |
| "loss": 0.1044, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 1.6790450928381961, | |
| "grad_norm": 83.91312560876246, | |
| "learning_rate": 4.837851822914397e-06, | |
| "loss": 0.1987, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 1.6816976127320955, | |
| "grad_norm": 159.7343179908265, | |
| "learning_rate": 4.822415353447339e-06, | |
| "loss": 0.1106, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 1.6843501326259946, | |
| "grad_norm": 74.40965665746292, | |
| "learning_rate": 4.80698057856353e-06, | |
| "loss": 0.1142, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 1.6870026525198938, | |
| "grad_norm": 7.720256583997108, | |
| "learning_rate": 4.791547645547727e-06, | |
| "loss": 0.0075, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 1.6896551724137931, | |
| "grad_norm": 14.03927696707269, | |
| "learning_rate": 4.7761167016671064e-06, | |
| "loss": 0.0135, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 1.6923076923076923, | |
| "grad_norm": 50.43479529738081, | |
| "learning_rate": 4.760687894169867e-06, | |
| "loss": 0.0599, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 1.6949602122015914, | |
| "grad_norm": 2.9711811471976164, | |
| "learning_rate": 4.7452613702838166e-06, | |
| "loss": 0.007, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 1.6976127320954908, | |
| "grad_norm": 56.36850978467863, | |
| "learning_rate": 4.729837277214975e-06, | |
| "loss": 0.2037, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.70026525198939, | |
| "grad_norm": 2.503854387459047, | |
| "learning_rate": 4.7144157621461694e-06, | |
| "loss": 0.0052, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 1.702917771883289, | |
| "grad_norm": 19.199657130375417, | |
| "learning_rate": 4.698996972235622e-06, | |
| "loss": 0.021, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 1.7055702917771884, | |
| "grad_norm": 46.97091730042586, | |
| "learning_rate": 4.683581054615551e-06, | |
| "loss": 0.0665, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 1.7082228116710876, | |
| "grad_norm": 23.984126690129045, | |
| "learning_rate": 4.668168156390769e-06, | |
| "loss": 0.0226, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 1.7108753315649867, | |
| "grad_norm": 5.35881830204344, | |
| "learning_rate": 4.652758424637271e-06, | |
| "loss": 0.0072, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 1.713527851458886, | |
| "grad_norm": 25.62815283007476, | |
| "learning_rate": 4.637352006400842e-06, | |
| "loss": 0.017, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 1.716180371352785, | |
| "grad_norm": 29.674007492866465, | |
| "learning_rate": 4.621949048695646e-06, | |
| "loss": 0.0165, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 1.7188328912466844, | |
| "grad_norm": 5.834743927193598, | |
| "learning_rate": 4.606549698502824e-06, | |
| "loss": 0.0062, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 1.7214854111405835, | |
| "grad_norm": 189.28835878384356, | |
| "learning_rate": 4.5911541027690914e-06, | |
| "loss": 0.0622, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 1.7241379310344827, | |
| "grad_norm": 44.082039517974614, | |
| "learning_rate": 4.575762408405343e-06, | |
| "loss": 0.0288, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.726790450928382, | |
| "grad_norm": 21.37538637947597, | |
| "learning_rate": 4.56037476228524e-06, | |
| "loss": 0.0122, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 1.7294429708222812, | |
| "grad_norm": 12.073001602352482, | |
| "learning_rate": 4.544991311243815e-06, | |
| "loss": 0.0114, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 1.7320954907161803, | |
| "grad_norm": 32.74609082616948, | |
| "learning_rate": 4.529612202076069e-06, | |
| "loss": 0.0219, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 1.7347480106100797, | |
| "grad_norm": 17.151939791815195, | |
| "learning_rate": 4.514237581535571e-06, | |
| "loss": 0.0112, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 1.7374005305039788, | |
| "grad_norm": 51.470078034531205, | |
| "learning_rate": 4.498867596333061e-06, | |
| "loss": 0.0572, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.740053050397878, | |
| "grad_norm": 4.38414008873116, | |
| "learning_rate": 4.4835023931350425e-06, | |
| "loss": 0.0091, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 1.7427055702917773, | |
| "grad_norm": 12.374479534433322, | |
| "learning_rate": 4.468142118562389e-06, | |
| "loss": 0.0149, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 1.7453580901856764, | |
| "grad_norm": 48.35886122098641, | |
| "learning_rate": 4.452786919188943e-06, | |
| "loss": 0.0307, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 1.7480106100795756, | |
| "grad_norm": 2.0303745805661877, | |
| "learning_rate": 4.437436941540116e-06, | |
| "loss": 0.0058, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 1.750663129973475, | |
| "grad_norm": 1.335578603780813, | |
| "learning_rate": 4.422092332091497e-06, | |
| "loss": 0.0055, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.7533156498673739, | |
| "grad_norm": 19.786034815955663, | |
| "learning_rate": 4.4067532372674434e-06, | |
| "loss": 0.0114, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 1.7559681697612732, | |
| "grad_norm": 52.83937383127931, | |
| "learning_rate": 4.391419803439691e-06, | |
| "loss": 0.0722, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 1.7586206896551724, | |
| "grad_norm": 27.417949020037334, | |
| "learning_rate": 4.3760921769259585e-06, | |
| "loss": 0.0186, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 1.7612732095490715, | |
| "grad_norm": 58.386913018853946, | |
| "learning_rate": 4.360770503988545e-06, | |
| "loss": 0.0392, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 1.7639257294429709, | |
| "grad_norm": 96.73501293878469, | |
| "learning_rate": 4.345454930832946e-06, | |
| "loss": 0.184, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 1.76657824933687, | |
| "grad_norm": 23.585654873677235, | |
| "learning_rate": 4.3301456036064415e-06, | |
| "loss": 0.0215, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 1.7692307692307692, | |
| "grad_norm": 64.61869518514479, | |
| "learning_rate": 4.314842668396716e-06, | |
| "loss": 0.0471, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 1.7718832891246685, | |
| "grad_norm": 58.04401066189464, | |
| "learning_rate": 4.299546271230457e-06, | |
| "loss": 0.021, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 1.7745358090185677, | |
| "grad_norm": 103.78501696678265, | |
| "learning_rate": 4.28425655807196e-06, | |
| "loss": 0.0148, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 1.7771883289124668, | |
| "grad_norm": 14.029544188273057, | |
| "learning_rate": 4.268973674821747e-06, | |
| "loss": 0.0115, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.7798408488063662, | |
| "grad_norm": 16.577893251200816, | |
| "learning_rate": 4.2536977673151594e-06, | |
| "loss": 0.012, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 1.782493368700265, | |
| "grad_norm": 2.207183182252488, | |
| "learning_rate": 4.2384289813209754e-06, | |
| "loss": 0.0053, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 1.7851458885941645, | |
| "grad_norm": 110.20206163510139, | |
| "learning_rate": 4.223167462540018e-06, | |
| "loss": 0.103, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 1.7877984084880638, | |
| "grad_norm": 3.5616442817864025, | |
| "learning_rate": 4.207913356603762e-06, | |
| "loss": 0.0065, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 1.7904509283819627, | |
| "grad_norm": 20.720221099796785, | |
| "learning_rate": 4.192666809072948e-06, | |
| "loss": 0.0124, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 1.793103448275862, | |
| "grad_norm": 0.2791747293398447, | |
| "learning_rate": 4.1774279654361895e-06, | |
| "loss": 0.0043, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 1.7957559681697612, | |
| "grad_norm": 46.785793700335034, | |
| "learning_rate": 4.162196971108584e-06, | |
| "loss": 0.0271, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 1.7984084880636604, | |
| "grad_norm": 3.6782612061623436, | |
| "learning_rate": 4.146973971430333e-06, | |
| "loss": 0.006, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 1.8010610079575597, | |
| "grad_norm": 1.6676424475320761, | |
| "learning_rate": 4.131759111665349e-06, | |
| "loss": 0.0044, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 1.8037135278514589, | |
| "grad_norm": 3.8211009260758186, | |
| "learning_rate": 4.116552536999865e-06, | |
| "loss": 0.0043, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.806366047745358, | |
| "grad_norm": 11.713632694380282, | |
| "learning_rate": 4.101354392541061e-06, | |
| "loss": 0.0081, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 1.8090185676392574, | |
| "grad_norm": 25.78792999904907, | |
| "learning_rate": 4.086164823315667e-06, | |
| "loss": 0.0158, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 1.8116710875331565, | |
| "grad_norm": 0.2826021010784135, | |
| "learning_rate": 4.070983974268588e-06, | |
| "loss": 0.0041, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 1.8143236074270557, | |
| "grad_norm": 3.9465866260024454, | |
| "learning_rate": 4.055811990261518e-06, | |
| "loss": 0.0049, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 1.816976127320955, | |
| "grad_norm": 16.479422729029334, | |
| "learning_rate": 4.040649016071555e-06, | |
| "loss": 0.0108, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 1.819628647214854, | |
| "grad_norm": 3.9386860667821324, | |
| "learning_rate": 4.025495196389824e-06, | |
| "loss": 0.005, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 1.8222811671087533, | |
| "grad_norm": 0.7007528159185257, | |
| "learning_rate": 4.010350675820091e-06, | |
| "loss": 0.0041, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 1.8249336870026527, | |
| "grad_norm": 52.767449350981636, | |
| "learning_rate": 3.9952155988773876e-06, | |
| "loss": 0.0278, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 1.8275862068965516, | |
| "grad_norm": 12.809021845146276, | |
| "learning_rate": 3.980090109986634e-06, | |
| "loss": 0.0097, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 1.830238726790451, | |
| "grad_norm": 1.936816479231782, | |
| "learning_rate": 3.964974353481254e-06, | |
| "loss": 0.0045, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.83289124668435, | |
| "grad_norm": 11.429369395613923, | |
| "learning_rate": 3.949868473601801e-06, | |
| "loss": 0.0072, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 1.8355437665782492, | |
| "grad_norm": 3.5877731797978503, | |
| "learning_rate": 3.934772614494581e-06, | |
| "loss": 0.0042, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 1.8381962864721486, | |
| "grad_norm": 1.9103718956330118, | |
| "learning_rate": 3.9196869202102775e-06, | |
| "loss": 0.0043, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 1.8408488063660478, | |
| "grad_norm": 1.3166708641132006, | |
| "learning_rate": 3.904611534702583e-06, | |
| "loss": 0.004, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 1.843501326259947, | |
| "grad_norm": 7.608358241578584, | |
| "learning_rate": 3.889546601826813e-06, | |
| "loss": 0.0063, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 1.8461538461538463, | |
| "grad_norm": 36.4981318943651, | |
| "learning_rate": 3.874492265338544e-06, | |
| "loss": 0.0111, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 1.8488063660477454, | |
| "grad_norm": 10.617939703321683, | |
| "learning_rate": 3.859448668892233e-06, | |
| "loss": 0.0061, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 1.8514588859416445, | |
| "grad_norm": 10.765319080244412, | |
| "learning_rate": 3.844415956039856e-06, | |
| "loss": 0.0079, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 1.854111405835544, | |
| "grad_norm": 244.4282955528186, | |
| "learning_rate": 3.829394270229531e-06, | |
| "loss": 0.1541, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 1.8567639257294428, | |
| "grad_norm": 0.3089211191680009, | |
| "learning_rate": 3.814383754804152e-06, | |
| "loss": 0.0035, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.8594164456233422, | |
| "grad_norm": 80.54486154944388, | |
| "learning_rate": 3.79938455300002e-06, | |
| "loss": 0.058, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 1.8620689655172413, | |
| "grad_norm": 130.3670434649658, | |
| "learning_rate": 3.7843968079454773e-06, | |
| "loss": 0.0597, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 1.8647214854111405, | |
| "grad_norm": 0.9154132364017707, | |
| "learning_rate": 3.7694206626595444e-06, | |
| "loss": 0.0037, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 1.8673740053050398, | |
| "grad_norm": 0.37355475992921444, | |
| "learning_rate": 3.7544562600505475e-06, | |
| "loss": 0.0036, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 1.870026525198939, | |
| "grad_norm": 0.5950336166619362, | |
| "learning_rate": 3.7395037429147615e-06, | |
| "loss": 0.0035, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 1.8726790450928381, | |
| "grad_norm": 0.39575559342124034, | |
| "learning_rate": 3.724563253935045e-06, | |
| "loss": 0.0034, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 1.8753315649867375, | |
| "grad_norm": 0.9995121041132633, | |
| "learning_rate": 3.7096349356794803e-06, | |
| "loss": 0.0035, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 1.8779840848806366, | |
| "grad_norm": 3.482614330037422, | |
| "learning_rate": 3.694718930600012e-06, | |
| "loss": 0.0039, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 1.8806366047745358, | |
| "grad_norm": 198.66010820962924, | |
| "learning_rate": 3.6798153810310854e-06, | |
| "loss": 0.0365, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 1.8832891246684351, | |
| "grad_norm": 72.9259129426177, | |
| "learning_rate": 3.6649244291882923e-06, | |
| "loss": 0.1114, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.8859416445623343, | |
| "grad_norm": 0.251348937912663, | |
| "learning_rate": 3.6500462171670104e-06, | |
| "loss": 0.0031, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 1.8885941644562334, | |
| "grad_norm": 1.9300170500901646, | |
| "learning_rate": 3.6351808869410484e-06, | |
| "loss": 0.0038, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 1.8912466843501328, | |
| "grad_norm": 3.728616213792532, | |
| "learning_rate": 3.6203285803612975e-06, | |
| "loss": 0.0042, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 1.8938992042440317, | |
| "grad_norm": 0.18388707693647469, | |
| "learning_rate": 3.605489439154365e-06, | |
| "loss": 0.003, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 1.896551724137931, | |
| "grad_norm": 0.16443647926995575, | |
| "learning_rate": 3.5906636049212316e-06, | |
| "loss": 0.0029, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 1.8992042440318302, | |
| "grad_norm": 0.5481365787826281, | |
| "learning_rate": 3.575851219135898e-06, | |
| "loss": 0.0031, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 1.9018567639257293, | |
| "grad_norm": 1.30336442150029, | |
| "learning_rate": 3.5610524231440324e-06, | |
| "loss": 0.003, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 1.9045092838196287, | |
| "grad_norm": 1.0279286556760354, | |
| "learning_rate": 3.5462673581616298e-06, | |
| "loss": 0.0032, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 1.9071618037135278, | |
| "grad_norm": 0.2387873667922055, | |
| "learning_rate": 3.5314961652736517e-06, | |
| "loss": 0.0028, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 1.909814323607427, | |
| "grad_norm": 112.08452825586953, | |
| "learning_rate": 3.5167389854326907e-06, | |
| "loss": 0.0701, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.9124668435013263, | |
| "grad_norm": 54.85440904694243, | |
| "learning_rate": 3.501995959457616e-06, | |
| "loss": 0.0214, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 1.9151193633952255, | |
| "grad_norm": 0.36064944531119963, | |
| "learning_rate": 3.487267228032242e-06, | |
| "loss": 0.0028, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 1.9177718832891246, | |
| "grad_norm": 0.1648152951890131, | |
| "learning_rate": 3.472552931703975e-06, | |
| "loss": 0.0027, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 1.920424403183024, | |
| "grad_norm": 4.789528729630756, | |
| "learning_rate": 3.457853210882477e-06, | |
| "loss": 0.0033, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 1.9230769230769231, | |
| "grad_norm": 6.718746177896506, | |
| "learning_rate": 3.443168205838323e-06, | |
| "loss": 0.0031, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 1.9257294429708223, | |
| "grad_norm": 117.80993284549162, | |
| "learning_rate": 3.428498056701665e-06, | |
| "loss": 0.0843, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 1.9283819628647216, | |
| "grad_norm": 41.28211838924019, | |
| "learning_rate": 3.413842903460896e-06, | |
| "loss": 0.014, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 1.9310344827586206, | |
| "grad_norm": 2.4226356040932724, | |
| "learning_rate": 3.39920288596131e-06, | |
| "loss": 0.0032, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 1.93368700265252, | |
| "grad_norm": 2.0264341603877916, | |
| "learning_rate": 3.3845781439037695e-06, | |
| "loss": 0.003, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 1.936339522546419, | |
| "grad_norm": 11.92685326998828, | |
| "learning_rate": 3.369968816843375e-06, | |
| "loss": 0.006, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.9389920424403182, | |
| "grad_norm": 5.680403911551653, | |
| "learning_rate": 3.3553750441881266e-06, | |
| "loss": 0.0039, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 1.9416445623342176, | |
| "grad_norm": 168.17807557869415, | |
| "learning_rate": 3.3407969651976045e-06, | |
| "loss": 0.0124, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 1.9442970822281167, | |
| "grad_norm": 126.70096301070858, | |
| "learning_rate": 3.326234718981628e-06, | |
| "loss": 0.0655, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 1.9469496021220158, | |
| "grad_norm": 142.89224558323636, | |
| "learning_rate": 3.311688444498937e-06, | |
| "loss": 0.0327, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 1.9496021220159152, | |
| "grad_norm": 9.294452576988633, | |
| "learning_rate": 3.2971582805558622e-06, | |
| "loss": 0.0049, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 1.9522546419098143, | |
| "grad_norm": 0.17893845574932443, | |
| "learning_rate": 3.2826443658049977e-06, | |
| "loss": 0.0025, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 1.9549071618037135, | |
| "grad_norm": 0.4862476814533373, | |
| "learning_rate": 3.2681468387438876e-06, | |
| "loss": 0.0026, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 1.9575596816976129, | |
| "grad_norm": 4.334114617573384, | |
| "learning_rate": 3.253665837713694e-06, | |
| "loss": 0.0036, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 1.9602122015915118, | |
| "grad_norm": 16.31414878674207, | |
| "learning_rate": 3.239201500897881e-06, | |
| "loss": 0.006, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 1.9628647214854111, | |
| "grad_norm": 2.2212193198739114, | |
| "learning_rate": 3.224753966320898e-06, | |
| "loss": 0.003, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.9655172413793105, | |
| "grad_norm": 13.72246494779621, | |
| "learning_rate": 3.2103233718468574e-06, | |
| "loss": 0.0062, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 1.9681697612732094, | |
| "grad_norm": 0.9892211791242377, | |
| "learning_rate": 3.1959098551782285e-06, | |
| "loss": 0.0027, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 1.9708222811671088, | |
| "grad_norm": 0.48347022766567804, | |
| "learning_rate": 3.181513553854514e-06, | |
| "loss": 0.0025, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 1.973474801061008, | |
| "grad_norm": 0.19974265309256214, | |
| "learning_rate": 3.167134605250938e-06, | |
| "loss": 0.0024, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 1.976127320954907, | |
| "grad_norm": 25.992800802872452, | |
| "learning_rate": 3.152773146577138e-06, | |
| "loss": 0.0071, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 1.9787798408488064, | |
| "grad_norm": 5.387653358677239, | |
| "learning_rate": 3.138429314875865e-06, | |
| "loss": 0.0033, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 1.9814323607427056, | |
| "grad_norm": 0.15470426246878025, | |
| "learning_rate": 3.1241032470216564e-06, | |
| "loss": 0.0024, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 1.9840848806366047, | |
| "grad_norm": 0.13718416828161872, | |
| "learning_rate": 3.109795079719544e-06, | |
| "loss": 0.0024, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 1.986737400530504, | |
| "grad_norm": 0.17662517595440433, | |
| "learning_rate": 3.0955049495037435e-06, | |
| "loss": 0.0024, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 1.9893899204244032, | |
| "grad_norm": 4.2249344112976575, | |
| "learning_rate": 3.081232992736355e-06, | |
| "loss": 0.0033, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.9920424403183024, | |
| "grad_norm": 0.13347032559827443, | |
| "learning_rate": 3.0669793456060613e-06, | |
| "loss": 0.0023, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 1.9946949602122017, | |
| "grad_norm": 0.1562461041757496, | |
| "learning_rate": 3.052744144126826e-06, | |
| "loss": 0.0023, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 1.9973474801061006, | |
| "grad_norm": 0.16945999077105298, | |
| "learning_rate": 3.0385275241365965e-06, | |
| "loss": 0.0023, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.34991663179289223, | |
| "learning_rate": 3.024329621296008e-06, | |
| "loss": 0.0024, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 1.3204874992370605, | |
| "eval_runtime": 184.491, | |
| "eval_samples_per_second": 11.464, | |
| "eval_steps_per_second": 1.436, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 2.0026525198938994, | |
| "grad_norm": 0.14050094312542755, | |
| "learning_rate": 3.0101505710870914e-06, | |
| "loss": 0.0022, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 2.0053050397877983, | |
| "grad_norm": 0.6183504954610859, | |
| "learning_rate": 2.9959905088119777e-06, | |
| "loss": 0.0023, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 2.0079575596816976, | |
| "grad_norm": 0.2827903552240193, | |
| "learning_rate": 2.981849569591606e-06, | |
| "loss": 0.0022, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 2.010610079575597, | |
| "grad_norm": 0.12431658714516998, | |
| "learning_rate": 2.9677278883644367e-06, | |
| "loss": 0.0021, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 2.013262599469496, | |
| "grad_norm": 0.1947486881294333, | |
| "learning_rate": 2.9536255998851615e-06, | |
| "loss": 0.0022, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 2.0159151193633953, | |
| "grad_norm": 0.1241363948209879, | |
| "learning_rate": 2.9395428387234192e-06, | |
| "loss": 0.0021, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 2.018567639257294, | |
| "grad_norm": 0.11720975702516029, | |
| "learning_rate": 2.9254797392625146e-06, | |
| "loss": 0.0021, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 2.0212201591511936, | |
| "grad_norm": 1.4611727812014197, | |
| "learning_rate": 2.9114364356981274e-06, | |
| "loss": 0.0024, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 2.023872679045093, | |
| "grad_norm": 1.777037870013191, | |
| "learning_rate": 2.8974130620370405e-06, | |
| "loss": 0.0024, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 2.026525198938992, | |
| "grad_norm": 0.28329994259219843, | |
| "learning_rate": 2.883409752095857e-06, | |
| "loss": 0.0021, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 2.029177718832891, | |
| "grad_norm": 0.11691713185520833, | |
| "learning_rate": 2.8694266394997238e-06, | |
| "loss": 0.002, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 2.0318302387267906, | |
| "grad_norm": 3.127924717406078, | |
| "learning_rate": 2.8554638576810565e-06, | |
| "loss": 0.0024, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 2.0344827586206895, | |
| "grad_norm": 0.49914381173034794, | |
| "learning_rate": 2.8415215398782657e-06, | |
| "loss": 0.0021, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 2.037135278514589, | |
| "grad_norm": 0.12342862971938788, | |
| "learning_rate": 2.827599819134489e-06, | |
| "loss": 0.002, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 2.0397877984084882, | |
| "grad_norm": 1.199338570148691, | |
| "learning_rate": 2.813698828296312e-06, | |
| "loss": 0.0023, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 2.042440318302387, | |
| "grad_norm": 6.468147761378389, | |
| "learning_rate": 2.799818700012518e-06, | |
| "loss": 0.0027, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 2.0450928381962865, | |
| "grad_norm": 0.10900497117980268, | |
| "learning_rate": 2.7859595667328027e-06, | |
| "loss": 0.0019, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 2.047745358090186, | |
| "grad_norm": 0.264214731354079, | |
| "learning_rate": 2.772121560706522e-06, | |
| "loss": 0.0019, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 2.050397877984085, | |
| "grad_norm": 0.10986125434031639, | |
| "learning_rate": 2.758304813981428e-06, | |
| "loss": 0.0019, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 2.053050397877984, | |
| "grad_norm": 101.41719434357161, | |
| "learning_rate": 2.7445094584024067e-06, | |
| "loss": 0.0533, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 2.055702917771883, | |
| "grad_norm": 0.8528692045683144, | |
| "learning_rate": 2.7307356256102215e-06, | |
| "loss": 0.002, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 2.0583554376657824, | |
| "grad_norm": 0.13636942340765967, | |
| "learning_rate": 2.716983447040257e-06, | |
| "loss": 0.0019, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 2.061007957559682, | |
| "grad_norm": 0.10258828832482879, | |
| "learning_rate": 2.703253053921266e-06, | |
| "loss": 0.0018, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 2.0636604774535807, | |
| "grad_norm": 0.10649020932037707, | |
| "learning_rate": 2.689544577274113e-06, | |
| "loss": 0.0018, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 2.06631299734748, | |
| "grad_norm": 108.32193776949099, | |
| "learning_rate": 2.6758581479105274e-06, | |
| "loss": 0.0314, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 2.0689655172413794, | |
| "grad_norm": 0.1034645054683085, | |
| "learning_rate": 2.6621938964318593e-06, | |
| "loss": 0.0018, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 2.0716180371352784, | |
| "grad_norm": 5.830050397015915, | |
| "learning_rate": 2.6485519532278235e-06, | |
| "loss": 0.0032, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 2.0742705570291777, | |
| "grad_norm": 228.63559970003033, | |
| "learning_rate": 2.6349324484752612e-06, | |
| "loss": 0.0271, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 2.076923076923077, | |
| "grad_norm": 0.31851881180868624, | |
| "learning_rate": 2.621335512136899e-06, | |
| "loss": 0.0018, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 2.079575596816976, | |
| "grad_norm": 0.7232659060672972, | |
| "learning_rate": 2.6077612739601015e-06, | |
| "loss": 0.002, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 2.0822281167108754, | |
| "grad_norm": 4.4527732696930675, | |
| "learning_rate": 2.5942098634756475e-06, | |
| "loss": 0.0023, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 2.0848806366047747, | |
| "grad_norm": 1.7140439428553513, | |
| "learning_rate": 2.580681409996477e-06, | |
| "loss": 0.0022, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 2.0875331564986737, | |
| "grad_norm": 61.715104619750406, | |
| "learning_rate": 2.567176042616471e-06, | |
| "loss": 0.0157, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 2.090185676392573, | |
| "grad_norm": 0.23387662655238117, | |
| "learning_rate": 2.5536938902092056e-06, | |
| "loss": 0.0018, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 2.092838196286472, | |
| "grad_norm": 5.979822001101083, | |
| "learning_rate": 2.5402350814267364e-06, | |
| "loss": 0.0024, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 2.0954907161803713, | |
| "grad_norm": 30.405797764381745, | |
| "learning_rate": 2.526799744698366e-06, | |
| "loss": 0.0109, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 2.0981432360742707, | |
| "grad_norm": 1.782000380898367, | |
| "learning_rate": 2.5133880082294155e-06, | |
| "loss": 0.0019, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 2.1007957559681696, | |
| "grad_norm": 0.20353659088976575, | |
| "learning_rate": 2.5000000000000015e-06, | |
| "loss": 0.0018, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 2.103448275862069, | |
| "grad_norm": 0.1766502553727905, | |
| "learning_rate": 2.486635847763815e-06, | |
| "loss": 0.0017, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 2.1061007957559683, | |
| "grad_norm": 0.09662770866395148, | |
| "learning_rate": 2.473295679046911e-06, | |
| "loss": 0.0017, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 2.1087533156498672, | |
| "grad_norm": 0.11532877150608818, | |
| "learning_rate": 2.4599796211464772e-06, | |
| "loss": 0.0017, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 2.1114058355437666, | |
| "grad_norm": 0.4785349103019139, | |
| "learning_rate": 2.446687801129628e-06, | |
| "loss": 0.0017, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 2.114058355437666, | |
| "grad_norm": 0.10560060501990368, | |
| "learning_rate": 2.433420345832191e-06, | |
| "loss": 0.0016, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 2.116710875331565, | |
| "grad_norm": 158.45074124054727, | |
| "learning_rate": 2.4201773818574956e-06, | |
| "loss": 0.1111, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 2.1193633952254642, | |
| "grad_norm": 0.10914455056011906, | |
| "learning_rate": 2.406959035575166e-06, | |
| "loss": 0.0017, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 2.1220159151193636, | |
| "grad_norm": 1.9981342576158658, | |
| "learning_rate": 2.393765433119913e-06, | |
| "loss": 0.002, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 2.1246684350132625, | |
| "grad_norm": 0.11389412778106908, | |
| "learning_rate": 2.3805967003903336e-06, | |
| "loss": 0.0017, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 2.127320954907162, | |
| "grad_norm": 0.1099671005906803, | |
| "learning_rate": 2.3674529630477074e-06, | |
| "loss": 0.0016, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 2.129973474801061, | |
| "grad_norm": 0.14767238213621295, | |
| "learning_rate": 2.3543343465147956e-06, | |
| "loss": 0.0016, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 2.13262599469496, | |
| "grad_norm": 0.09529036621487165, | |
| "learning_rate": 2.341240975974653e-06, | |
| "loss": 0.0016, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 2.1352785145888595, | |
| "grad_norm": 19.604990356490127, | |
| "learning_rate": 2.328172976369421e-06, | |
| "loss": 0.0063, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 2.1379310344827585, | |
| "grad_norm": 99.28830036238374, | |
| "learning_rate": 2.315130472399145e-06, | |
| "loss": 0.0982, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 2.140583554376658, | |
| "grad_norm": 0.0913940346583298, | |
| "learning_rate": 2.302113588520578e-06, | |
| "loss": 0.0016, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 2.143236074270557, | |
| "grad_norm": 0.10184255139784493, | |
| "learning_rate": 2.289122448945997e-06, | |
| "loss": 0.0016, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 2.145888594164456, | |
| "grad_norm": 19.778388930648745, | |
| "learning_rate": 2.2761571776420187e-06, | |
| "loss": 0.004, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 2.1485411140583555, | |
| "grad_norm": 0.12459480053749435, | |
| "learning_rate": 2.263217898328415e-06, | |
| "loss": 0.0016, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 2.151193633952255, | |
| "grad_norm": 90.21851918232709, | |
| "learning_rate": 2.2503047344769256e-06, | |
| "loss": 0.0585, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 2.1538461538461537, | |
| "grad_norm": 0.1864246290588083, | |
| "learning_rate": 2.23741780931009e-06, | |
| "loss": 0.0016, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 2.156498673740053, | |
| "grad_norm": 0.1746806262091177, | |
| "learning_rate": 2.2245572458000714e-06, | |
| "loss": 0.0016, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 2.159151193633952, | |
| "grad_norm": 0.08740937612823772, | |
| "learning_rate": 2.211723166667475e-06, | |
| "loss": 0.0015, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 2.1618037135278514, | |
| "grad_norm": 0.08810120816843081, | |
| "learning_rate": 2.198915694380182e-06, | |
| "loss": 0.0015, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 2.1644562334217508, | |
| "grad_norm": 0.08824391899957097, | |
| "learning_rate": 2.1861349511521817e-06, | |
| "loss": 0.0015, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 2.1671087533156497, | |
| "grad_norm": 0.18322170670391005, | |
| "learning_rate": 2.173381058942402e-06, | |
| "loss": 0.0015, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 2.169761273209549, | |
| "grad_norm": 150.49741281570374, | |
| "learning_rate": 2.1606541394535528e-06, | |
| "loss": 0.0481, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 2.1724137931034484, | |
| "grad_norm": 0.11654896618735007, | |
| "learning_rate": 2.147954314130955e-06, | |
| "loss": 0.0015, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 2.1750663129973473, | |
| "grad_norm": 0.10317704451738126, | |
| "learning_rate": 2.135281704161386e-06, | |
| "loss": 0.0015, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 2.1777188328912467, | |
| "grad_norm": 0.3245290336207275, | |
| "learning_rate": 2.122636430471926e-06, | |
| "loss": 0.0016, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 2.180371352785146, | |
| "grad_norm": 60.86225599121476, | |
| "learning_rate": 2.1100186137288005e-06, | |
| "loss": 0.027, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 2.183023872679045, | |
| "grad_norm": 11.145714425069787, | |
| "learning_rate": 2.0974283743362283e-06, | |
| "loss": 0.0048, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 2.1856763925729443, | |
| "grad_norm": 14.639285433583048, | |
| "learning_rate": 2.084865832435278e-06, | |
| "loss": 0.0036, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 2.1883289124668437, | |
| "grad_norm": 1.7024663731091472, | |
| "learning_rate": 2.072331107902713e-06, | |
| "loss": 0.0018, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 2.1909814323607426, | |
| "grad_norm": 0.2326547080503539, | |
| "learning_rate": 2.0598243203498562e-06, | |
| "loss": 0.0015, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 2.193633952254642, | |
| "grad_norm": 19.259529211251273, | |
| "learning_rate": 2.0473455891214416e-06, | |
| "loss": 0.0041, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 2.1962864721485413, | |
| "grad_norm": 0.23161290224367345, | |
| "learning_rate": 2.034895033294483e-06, | |
| "loss": 0.0015, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 2.1989389920424403, | |
| "grad_norm": 0.08761071853121946, | |
| "learning_rate": 2.0224727716771297e-06, | |
| "loss": 0.0015, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 2.2015915119363396, | |
| "grad_norm": 0.09616103248966669, | |
| "learning_rate": 2.0100789228075375e-06, | |
| "loss": 0.0015, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 2.2042440318302385, | |
| "grad_norm": 0.3751676855004518, | |
| "learning_rate": 1.9977136049527348e-06, | |
| "loss": 0.0015, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 2.206896551724138, | |
| "grad_norm": 0.08231013343373718, | |
| "learning_rate": 1.9853769361074964e-06, | |
| "loss": 0.0014, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 2.2095490716180373, | |
| "grad_norm": 0.18666172738141099, | |
| "learning_rate": 1.973069033993223e-06, | |
| "loss": 0.0015, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 2.212201591511936, | |
| "grad_norm": 0.14626078989059024, | |
| "learning_rate": 1.960790016056801e-06, | |
| "loss": 0.0015, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 2.2148541114058355, | |
| "grad_norm": 0.16713695630623848, | |
| "learning_rate": 1.9485399994694998e-06, | |
| "loss": 0.0015, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 2.217506631299735, | |
| "grad_norm": 0.34837075659922284, | |
| "learning_rate": 1.9363191011258426e-06, | |
| "loss": 0.0015, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 2.220159151193634, | |
| "grad_norm": 0.10312418350386081, | |
| "learning_rate": 1.9241274376425e-06, | |
| "loss": 0.0014, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 2.222811671087533, | |
| "grad_norm": 0.09176903421637479, | |
| "learning_rate": 1.9119651253571676e-06, | |
| "loss": 0.0014, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 2.2254641909814326, | |
| "grad_norm": 0.09637365753492827, | |
| "learning_rate": 1.89983228032746e-06, | |
| "loss": 0.0014, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 2.2281167108753315, | |
| "grad_norm": 0.12149401988517716, | |
| "learning_rate": 1.8877290183298058e-06, | |
| "loss": 0.0014, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 2.230769230769231, | |
| "grad_norm": 0.08013339280323412, | |
| "learning_rate": 1.8756554548583377e-06, | |
| "loss": 0.0014, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 2.2334217506631298, | |
| "grad_norm": 0.08047819136872343, | |
| "learning_rate": 1.863611705123798e-06, | |
| "loss": 0.0014, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 2.236074270557029, | |
| "grad_norm": 0.07917159306298144, | |
| "learning_rate": 1.8515978840524302e-06, | |
| "loss": 0.0014, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 2.2387267904509285, | |
| "grad_norm": 0.0795637937781073, | |
| "learning_rate": 1.8396141062848877e-06, | |
| "loss": 0.0014, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 2.2413793103448274, | |
| "grad_norm": 0.10561908277405324, | |
| "learning_rate": 1.827660486175139e-06, | |
| "loss": 0.0014, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 2.2440318302387268, | |
| "grad_norm": 0.22917237635635745, | |
| "learning_rate": 1.8157371377893769e-06, | |
| "loss": 0.0014, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 2.246684350132626, | |
| "grad_norm": 0.08532627392000394, | |
| "learning_rate": 1.803844174904928e-06, | |
| "loss": 0.0014, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 2.249336870026525, | |
| "grad_norm": 0.0778389733090977, | |
| "learning_rate": 1.7919817110091691e-06, | |
| "loss": 0.0014, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 2.2519893899204244, | |
| "grad_norm": 0.07628074268285744, | |
| "learning_rate": 1.7801498592984445e-06, | |
| "loss": 0.0013, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 2.2546419098143238, | |
| "grad_norm": 0.5208502677944772, | |
| "learning_rate": 1.7683487326769826e-06, | |
| "loss": 0.0014, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 2.2572944297082227, | |
| "grad_norm": 0.07726489861414727, | |
| "learning_rate": 1.756578443755822e-06, | |
| "loss": 0.0014, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 2.259946949602122, | |
| "grad_norm": 0.22783711500679732, | |
| "learning_rate": 1.7448391048517378e-06, | |
| "loss": 0.0014, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 2.2625994694960214, | |
| "grad_norm": 0.07529383215160723, | |
| "learning_rate": 1.7331308279861641e-06, | |
| "loss": 0.0013, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 2.2652519893899203, | |
| "grad_norm": 0.07623502670068076, | |
| "learning_rate": 1.7214537248841317e-06, | |
| "loss": 0.0013, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 2.2679045092838197, | |
| "grad_norm": 0.08578711095298416, | |
| "learning_rate": 1.709807906973196e-06, | |
| "loss": 0.0013, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 2.270557029177719, | |
| "grad_norm": 0.07576100118413397, | |
| "learning_rate": 1.6981934853823796e-06, | |
| "loss": 0.0013, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 2.273209549071618, | |
| "grad_norm": 0.0755024556668533, | |
| "learning_rate": 1.6866105709411069e-06, | |
| "loss": 0.0013, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 2.2758620689655173, | |
| "grad_norm": 0.07949872956423557, | |
| "learning_rate": 1.6750592741781496e-06, | |
| "loss": 0.0013, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 2.2785145888594163, | |
| "grad_norm": 0.07739745813700703, | |
| "learning_rate": 1.6635397053205704e-06, | |
| "loss": 0.0013, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 2.2811671087533156, | |
| "grad_norm": 0.48595997956942966, | |
| "learning_rate": 1.6520519742926705e-06, | |
| "loss": 0.0014, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 2.283819628647215, | |
| "grad_norm": 0.08713256642378481, | |
| "learning_rate": 1.640596190714947e-06, | |
| "loss": 0.0013, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 2.286472148541114, | |
| "grad_norm": 2.3351158973513466, | |
| "learning_rate": 1.6291724639030353e-06, | |
| "loss": 0.0015, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 2.2891246684350133, | |
| "grad_norm": 0.0732232112406159, | |
| "learning_rate": 1.6177809028666769e-06, | |
| "loss": 0.0013, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 2.2917771883289126, | |
| "grad_norm": 0.08055672500543515, | |
| "learning_rate": 1.6064216163086716e-06, | |
| "loss": 0.0013, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 2.2944297082228116, | |
| "grad_norm": 0.07396938385774736, | |
| "learning_rate": 1.595094712623843e-06, | |
| "loss": 0.0013, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 2.297082228116711, | |
| "grad_norm": 0.23905264486122726, | |
| "learning_rate": 1.5838002998980107e-06, | |
| "loss": 0.0013, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 2.29973474801061, | |
| "grad_norm": 0.07488749231338356, | |
| "learning_rate": 1.5725384859069454e-06, | |
| "loss": 0.0013, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 2.302387267904509, | |
| "grad_norm": 0.08418634636135462, | |
| "learning_rate": 1.5613093781153503e-06, | |
| "loss": 0.0013, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 2.3050397877984086, | |
| "grad_norm": 0.07446527342204637, | |
| "learning_rate": 1.550113083675836e-06, | |
| "loss": 0.0013, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 2.3076923076923075, | |
| "grad_norm": 0.748064917256297, | |
| "learning_rate": 1.5389497094278861e-06, | |
| "loss": 0.0014, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 2.310344827586207, | |
| "grad_norm": 0.1221337964709013, | |
| "learning_rate": 1.5278193618968584e-06, | |
| "loss": 0.0013, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 2.312997347480106, | |
| "grad_norm": 0.07249510996089394, | |
| "learning_rate": 1.5167221472929489e-06, | |
| "loss": 0.0013, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 2.315649867374005, | |
| "grad_norm": 0.07185807968590574, | |
| "learning_rate": 1.5056581715101887e-06, | |
| "loss": 0.0013, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 2.3183023872679045, | |
| "grad_norm": 0.07200421761693301, | |
| "learning_rate": 1.4946275401254301e-06, | |
| "loss": 0.0013, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 2.320954907161804, | |
| "grad_norm": 0.15132703297360822, | |
| "learning_rate": 1.4836303583973384e-06, | |
| "loss": 0.0013, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 2.323607427055703, | |
| "grad_norm": 6.948553642424826, | |
| "learning_rate": 1.472666731265394e-06, | |
| "loss": 0.0022, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 2.326259946949602, | |
| "grad_norm": 0.41658350421527945, | |
| "learning_rate": 1.4617367633488816e-06, | |
| "loss": 0.0013, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 2.3289124668435015, | |
| "grad_norm": 1.3971627759198118, | |
| "learning_rate": 1.4508405589458968e-06, | |
| "loss": 0.0015, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 2.3315649867374004, | |
| "grad_norm": 0.07283774441799601, | |
| "learning_rate": 1.4399782220323515e-06, | |
| "loss": 0.0013, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 2.3342175066313, | |
| "grad_norm": 0.0707694981254212, | |
| "learning_rate": 1.4291498562609802e-06, | |
| "loss": 0.0012, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 2.336870026525199, | |
| "grad_norm": 0.07056370961999311, | |
| "learning_rate": 1.4183555649603503e-06, | |
| "loss": 0.0012, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 2.339522546419098, | |
| "grad_norm": 0.06990280541051755, | |
| "learning_rate": 1.4075954511338784e-06, | |
| "loss": 0.0012, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 2.3421750663129974, | |
| "grad_norm": 6.038315408381685, | |
| "learning_rate": 1.396869617458846e-06, | |
| "loss": 0.0022, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 2.344827586206897, | |
| "grad_norm": 0.0705211029898386, | |
| "learning_rate": 1.3861781662854162e-06, | |
| "loss": 0.0012, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 2.3474801061007957, | |
| "grad_norm": 0.07208408332150218, | |
| "learning_rate": 1.3755211996356687e-06, | |
| "loss": 0.0012, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 2.350132625994695, | |
| "grad_norm": 0.06873391014603904, | |
| "learning_rate": 1.3648988192026108e-06, | |
| "loss": 0.0012, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 2.352785145888594, | |
| "grad_norm": 0.06830768636538152, | |
| "learning_rate": 1.3543111263492165e-06, | |
| "loss": 0.0012, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 2.3554376657824934, | |
| "grad_norm": 0.06956360956522886, | |
| "learning_rate": 1.3437582221074574e-06, | |
| "loss": 0.0012, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 2.3580901856763927, | |
| "grad_norm": 0.07484841652434673, | |
| "learning_rate": 1.3332402071773376e-06, | |
| "loss": 0.0012, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 2.3607427055702916, | |
| "grad_norm": 84.4628857859686, | |
| "learning_rate": 1.322757181925937e-06, | |
| "loss": 0.0697, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 2.363395225464191, | |
| "grad_norm": 0.0686797943718732, | |
| "learning_rate": 1.3123092463864456e-06, | |
| "loss": 0.0012, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 2.3660477453580904, | |
| "grad_norm": 0.06877595045071883, | |
| "learning_rate": 1.301896500257217e-06, | |
| "loss": 0.0012, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 2.3687002652519893, | |
| "grad_norm": 0.06907246986916021, | |
| "learning_rate": 1.2915190429008084e-06, | |
| "loss": 0.0012, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 2.3713527851458887, | |
| "grad_norm": 0.06831755147266023, | |
| "learning_rate": 1.2811769733430406e-06, | |
| "loss": 0.0012, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 2.3740053050397876, | |
| "grad_norm": 0.06752190838464503, | |
| "learning_rate": 1.2708703902720538e-06, | |
| "loss": 0.0012, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 2.376657824933687, | |
| "grad_norm": 0.06867830464417264, | |
| "learning_rate": 1.260599392037356e-06, | |
| "loss": 0.0012, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 2.3793103448275863, | |
| "grad_norm": 0.06853977135411335, | |
| "learning_rate": 1.250364076648894e-06, | |
| "loss": 0.0012, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 2.381962864721485, | |
| "grad_norm": 0.0686971427775422, | |
| "learning_rate": 1.2401645417761126e-06, | |
| "loss": 0.0012, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 2.3846153846153846, | |
| "grad_norm": 0.06813588670966912, | |
| "learning_rate": 1.2300008847470252e-06, | |
| "loss": 0.0012, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 2.387267904509284, | |
| "grad_norm": 0.06727926380286532, | |
| "learning_rate": 1.2198732025472876e-06, | |
| "loss": 0.0012, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.389920424403183, | |
| "grad_norm": 0.10992632525185202, | |
| "learning_rate": 1.2097815918192652e-06, | |
| "loss": 0.0012, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 2.3925729442970822, | |
| "grad_norm": 2.781133305852038, | |
| "learning_rate": 1.1997261488611173e-06, | |
| "loss": 0.0019, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 2.3952254641909816, | |
| "grad_norm": 0.07315981673458091, | |
| "learning_rate": 1.1897069696258756e-06, | |
| "loss": 0.0012, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 2.3978779840848805, | |
| "grad_norm": 0.06729175072481897, | |
| "learning_rate": 1.1797241497205285e-06, | |
| "loss": 0.0012, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 2.40053050397878, | |
| "grad_norm": 0.0737220427607404, | |
| "learning_rate": 1.1697777844051105e-06, | |
| "loss": 0.0012, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 2.4031830238726792, | |
| "grad_norm": 0.07922903512271506, | |
| "learning_rate": 1.1598679685917901e-06, | |
| "loss": 0.0012, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 2.405835543766578, | |
| "grad_norm": 0.06631213456537181, | |
| "learning_rate": 1.1499947968439673e-06, | |
| "loss": 0.0012, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 2.4084880636604775, | |
| "grad_norm": 0.43812258083872085, | |
| "learning_rate": 1.1401583633753683e-06, | |
| "loss": 0.0013, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 2.411140583554377, | |
| "grad_norm": 0.07128135475948372, | |
| "learning_rate": 1.1303587620491513e-06, | |
| "loss": 0.0012, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 2.413793103448276, | |
| "grad_norm": 0.07036174617809875, | |
| "learning_rate": 1.120596086377005e-06, | |
| "loss": 0.0012, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 2.416445623342175, | |
| "grad_norm": 0.6040273119344629, | |
| "learning_rate": 1.1108704295182582e-06, | |
| "loss": 0.0012, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 2.419098143236074, | |
| "grad_norm": 0.16800528636244244, | |
| "learning_rate": 1.1011818842789928e-06, | |
| "loss": 0.0012, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 2.4217506631299734, | |
| "grad_norm": 0.07261939675580374, | |
| "learning_rate": 1.0915305431111561e-06, | |
| "loss": 0.0011, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 2.424403183023873, | |
| "grad_norm": 0.08205007540667691, | |
| "learning_rate": 1.0819164981116825e-06, | |
| "loss": 0.0012, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 2.4270557029177717, | |
| "grad_norm": 0.06572830629445646, | |
| "learning_rate": 1.0723398410216085e-06, | |
| "loss": 0.0011, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 2.429708222811671, | |
| "grad_norm": 0.3605319924863454, | |
| "learning_rate": 1.0628006632251975e-06, | |
| "loss": 0.0012, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 2.4323607427055705, | |
| "grad_norm": 0.08339529686452388, | |
| "learning_rate": 1.0532990557490768e-06, | |
| "loss": 0.0012, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 2.4350132625994694, | |
| "grad_norm": 0.06643327060166455, | |
| "learning_rate": 1.043835109261357e-06, | |
| "loss": 0.0012, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 2.4376657824933687, | |
| "grad_norm": 0.8203758199273854, | |
| "learning_rate": 1.034408914070779e-06, | |
| "loss": 0.0012, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 2.4403183023872677, | |
| "grad_norm": 0.25257285521020284, | |
| "learning_rate": 1.0250205601258407e-06, | |
| "loss": 0.0012, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.442970822281167, | |
| "grad_norm": 0.0640690691890942, | |
| "learning_rate": 1.0156701370139454e-06, | |
| "loss": 0.0011, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 2.4456233421750664, | |
| "grad_norm": 0.06532193308679948, | |
| "learning_rate": 1.0063577339605452e-06, | |
| "loss": 0.0011, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 2.4482758620689653, | |
| "grad_norm": 0.07654524246951325, | |
| "learning_rate": 9.970834398282887e-07, | |
| "loss": 0.0011, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 2.4509283819628647, | |
| "grad_norm": 0.18790715100110908, | |
| "learning_rate": 9.878473431161767e-07, | |
| "loss": 0.0011, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 2.453580901856764, | |
| "grad_norm": 0.06402817200612926, | |
| "learning_rate": 9.786495319587136e-07, | |
| "loss": 0.0011, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 2.456233421750663, | |
| "grad_norm": 0.06388838101228224, | |
| "learning_rate": 9.694900941250674e-07, | |
| "loss": 0.0011, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 2.4588859416445623, | |
| "grad_norm": 0.06492708141320898, | |
| "learning_rate": 9.603691170182316e-07, | |
| "loss": 0.0011, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 2.4615384615384617, | |
| "grad_norm": 0.4278494732745631, | |
| "learning_rate": 9.512866876741949e-07, | |
| "loss": 0.0012, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 2.4641909814323606, | |
| "grad_norm": 0.06326199985055948, | |
| "learning_rate": 9.42242892761106e-07, | |
| "loss": 0.0011, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 2.46684350132626, | |
| "grad_norm": 0.12345201557273024, | |
| "learning_rate": 9.332378185784491e-07, | |
| "loss": 0.0011, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.4694960212201593, | |
| "grad_norm": 0.0632481832437175, | |
| "learning_rate": 9.242715510562195e-07, | |
| "loss": 0.0011, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 2.4721485411140582, | |
| "grad_norm": 0.06345078842767803, | |
| "learning_rate": 9.153441757541026e-07, | |
| "loss": 0.0011, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 2.4748010610079576, | |
| "grad_norm": 0.06246467170334875, | |
| "learning_rate": 9.064557778606631e-07, | |
| "loss": 0.0011, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 2.477453580901857, | |
| "grad_norm": 0.06407280121914012, | |
| "learning_rate": 8.97606442192524e-07, | |
| "loss": 0.0011, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 2.480106100795756, | |
| "grad_norm": 0.08385592450517614, | |
| "learning_rate": 8.887962531935612e-07, | |
| "loss": 0.0011, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 2.4827586206896552, | |
| "grad_norm": 0.0641713884219198, | |
| "learning_rate": 8.800252949340998e-07, | |
| "loss": 0.0011, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 2.4854111405835546, | |
| "grad_norm": 0.062366965644575126, | |
| "learning_rate": 8.712936511101056e-07, | |
| "loss": 0.0011, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 2.4880636604774535, | |
| "grad_norm": 0.09365524061432295, | |
| "learning_rate": 8.62601405042397e-07, | |
| "loss": 0.0011, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 2.490716180371353, | |
| "grad_norm": 0.063016174808786, | |
| "learning_rate": 8.539486396758357e-07, | |
| "loss": 0.0011, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 2.493368700265252, | |
| "grad_norm": 0.0643384463062384, | |
| "learning_rate": 8.453354375785477e-07, | |
| "loss": 0.0011, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 2.496021220159151, | |
| "grad_norm": 0.0625156738078906, | |
| "learning_rate": 8.367618809411299e-07, | |
| "loss": 0.0011, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 2.4986737400530505, | |
| "grad_norm": 0.06181998197294903, | |
| "learning_rate": 8.282280515758639e-07, | |
| "loss": 0.0011, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 2.5013262599469495, | |
| "grad_norm": 0.062355052138952045, | |
| "learning_rate": 8.197340309159429e-07, | |
| "loss": 0.0011, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 2.503978779840849, | |
| "grad_norm": 0.0627199890542338, | |
| "learning_rate": 8.112799000146853e-07, | |
| "loss": 0.0011, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 2.5066312997347477, | |
| "grad_norm": 0.06197308036357831, | |
| "learning_rate": 8.02865739544767e-07, | |
| "loss": 0.0011, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 2.509283819628647, | |
| "grad_norm": 0.06557116137937126, | |
| "learning_rate": 7.944916297974498e-07, | |
| "loss": 0.0011, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 2.5119363395225465, | |
| "grad_norm": 0.06161745183391721, | |
| "learning_rate": 7.861576506818147e-07, | |
| "loss": 0.0011, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 2.5145888594164454, | |
| "grad_norm": 0.06328262626718707, | |
| "learning_rate": 7.778638817240042e-07, | |
| "loss": 0.0011, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 2.5172413793103448, | |
| "grad_norm": 0.06168396188379144, | |
| "learning_rate": 7.696104020664552e-07, | |
| "loss": 0.0011, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 2.519893899204244, | |
| "grad_norm": 3.4900735483037675, | |
| "learning_rate": 7.613972904671496e-07, | |
| "loss": 0.0017, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 2.522546419098143, | |
| "grad_norm": 0.0603808591062879, | |
| "learning_rate": 7.532246252988617e-07, | |
| "loss": 0.0011, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 2.5251989389920424, | |
| "grad_norm": 0.06256729914752837, | |
| "learning_rate": 7.450924845484092e-07, | |
| "loss": 0.0011, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 2.5278514588859418, | |
| "grad_norm": 0.060223068535577365, | |
| "learning_rate": 7.370009458159099e-07, | |
| "loss": 0.0011, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 2.5305039787798407, | |
| "grad_norm": 0.060902909535236444, | |
| "learning_rate": 7.289500863140414e-07, | |
| "loss": 0.0011, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 2.53315649867374, | |
| "grad_norm": 0.06131602558880652, | |
| "learning_rate": 7.20939982867303e-07, | |
| "loss": 0.0011, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 2.5358090185676394, | |
| "grad_norm": 0.06093438475118728, | |
| "learning_rate": 7.129707119112838e-07, | |
| "loss": 0.0011, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 2.5384615384615383, | |
| "grad_norm": 0.060529102445561776, | |
| "learning_rate": 7.05042349491935e-07, | |
| "loss": 0.0011, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 2.5411140583554377, | |
| "grad_norm": 0.06681863648732436, | |
| "learning_rate": 6.971549712648401e-07, | |
| "loss": 0.0011, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 2.543766578249337, | |
| "grad_norm": 0.05995555771881405, | |
| "learning_rate": 6.893086524944953e-07, | |
| "loss": 0.001, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 2.546419098143236, | |
| "grad_norm": 0.06084312525280391, | |
| "learning_rate": 6.815034680535915e-07, | |
| "loss": 0.0011, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 2.5490716180371353, | |
| "grad_norm": 0.06207433552846488, | |
| "learning_rate": 6.737394924223e-07, | |
| "loss": 0.0011, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 2.5517241379310347, | |
| "grad_norm": 0.06090611219622317, | |
| "learning_rate": 6.660167996875605e-07, | |
| "loss": 0.0011, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 2.5543766578249336, | |
| "grad_norm": 0.10210121305506363, | |
| "learning_rate": 6.583354635423755e-07, | |
| "loss": 0.0011, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 2.557029177718833, | |
| "grad_norm": 0.06214599105012155, | |
| "learning_rate": 6.506955572851059e-07, | |
| "loss": 0.0011, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 2.5596816976127323, | |
| "grad_norm": 0.060577971305451866, | |
| "learning_rate": 6.430971538187725e-07, | |
| "loss": 0.0011, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 2.5623342175066313, | |
| "grad_norm": 0.06022186448438301, | |
| "learning_rate": 6.355403256503595e-07, | |
| "loss": 0.0011, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 2.5649867374005306, | |
| "grad_norm": 0.061556135573867826, | |
| "learning_rate": 6.280251448901253e-07, | |
| "loss": 0.0011, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 2.5676392572944295, | |
| "grad_norm": 0.23230874631462142, | |
| "learning_rate": 6.205516832509089e-07, | |
| "loss": 0.0011, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 2.570291777188329, | |
| "grad_norm": 0.06121592878850303, | |
| "learning_rate": 6.131200120474512e-07, | |
| "loss": 0.0011, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 2.5729442970822283, | |
| "grad_norm": 0.06103335172214427, | |
| "learning_rate": 6.057302021957113e-07, | |
| "loss": 0.0011, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.575596816976127, | |
| "grad_norm": 0.0747161538132135, | |
| "learning_rate": 5.983823242121888e-07, | |
| "loss": 0.0011, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 2.5782493368700266, | |
| "grad_norm": 0.060446034942887174, | |
| "learning_rate": 5.910764482132575e-07, | |
| "loss": 0.0011, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 2.5809018567639255, | |
| "grad_norm": 0.06133803420667302, | |
| "learning_rate": 5.838126439144875e-07, | |
| "loss": 0.0011, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 2.583554376657825, | |
| "grad_norm": 0.20083767139428546, | |
| "learning_rate": 5.765909806299863e-07, | |
| "loss": 0.0011, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 2.586206896551724, | |
| "grad_norm": 0.21805147129632801, | |
| "learning_rate": 5.694115272717326e-07, | |
| "loss": 0.0011, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 2.588859416445623, | |
| "grad_norm": 0.14427951314872606, | |
| "learning_rate": 5.622743523489216e-07, | |
| "loss": 0.0011, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 2.5915119363395225, | |
| "grad_norm": 0.060470037999359225, | |
| "learning_rate": 5.551795239673146e-07, | |
| "loss": 0.0011, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 2.594164456233422, | |
| "grad_norm": 0.09547973109513704, | |
| "learning_rate": 5.481271098285818e-07, | |
| "loss": 0.0011, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 2.5968169761273208, | |
| "grad_norm": 0.06109432013062574, | |
| "learning_rate": 5.411171772296609e-07, | |
| "loss": 0.0011, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 2.59946949602122, | |
| "grad_norm": 0.05988121541663535, | |
| "learning_rate": 5.34149793062112e-07, | |
| "loss": 0.001, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 2.6021220159151195, | |
| "grad_norm": 0.05940930651221904, | |
| "learning_rate": 5.272250238114857e-07, | |
| "loss": 0.001, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 2.6047745358090184, | |
| "grad_norm": 0.05902304202526832, | |
| "learning_rate": 5.203429355566797e-07, | |
| "loss": 0.001, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 2.6074270557029178, | |
| "grad_norm": 0.059119200588062956, | |
| "learning_rate": 5.13503593969315e-07, | |
| "loss": 0.001, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 2.610079575596817, | |
| "grad_norm": 0.060062503713428146, | |
| "learning_rate": 5.067070643131056e-07, | |
| "loss": 0.001, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 2.612732095490716, | |
| "grad_norm": 0.06006346680952073, | |
| "learning_rate": 4.999534114432386e-07, | |
| "loss": 0.001, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 2.6153846153846154, | |
| "grad_norm": 0.11773287881676983, | |
| "learning_rate": 4.932426998057516e-07, | |
| "loss": 0.0011, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 2.618037135278515, | |
| "grad_norm": 0.060246496169987326, | |
| "learning_rate": 4.865749934369224e-07, | |
| "loss": 0.001, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 2.6206896551724137, | |
| "grad_norm": 0.058923248100421995, | |
| "learning_rate": 4.799503559626528e-07, | |
| "loss": 0.001, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 2.623342175066313, | |
| "grad_norm": 0.0598649588552234, | |
| "learning_rate": 4.733688505978673e-07, | |
| "loss": 0.001, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 2.6259946949602124, | |
| "grad_norm": 0.057891816150437994, | |
| "learning_rate": 4.668305401459022e-07, | |
| "loss": 0.001, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 2.6286472148541113, | |
| "grad_norm": 0.05924443840740104, | |
| "learning_rate": 4.603354869979165e-07, | |
| "loss": 0.001, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 2.6312997347480107, | |
| "grad_norm": 0.0591668438224006, | |
| "learning_rate": 4.5388375313228595e-07, | |
| "loss": 0.001, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 2.63395225464191, | |
| "grad_norm": 0.05820548438282687, | |
| "learning_rate": 4.4747540011401913e-07, | |
| "loss": 0.001, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 2.636604774535809, | |
| "grad_norm": 0.05838283733501604, | |
| "learning_rate": 4.4111048909416644e-07, | |
| "loss": 0.001, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 2.6392572944297084, | |
| "grad_norm": 0.058938592640688806, | |
| "learning_rate": 4.347890808092359e-07, | |
| "loss": 0.001, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 2.6419098143236073, | |
| "grad_norm": 0.05885054610886344, | |
| "learning_rate": 4.2851123558061927e-07, | |
| "loss": 0.001, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 2.6445623342175066, | |
| "grad_norm": 0.05936240290513845, | |
| "learning_rate": 4.2227701331400974e-07, | |
| "loss": 0.001, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 2.647214854111406, | |
| "grad_norm": 0.06990676561141362, | |
| "learning_rate": 4.1608647349883123e-07, | |
| "loss": 0.001, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 2.649867374005305, | |
| "grad_norm": 0.05804743429182015, | |
| "learning_rate": 4.0993967520767455e-07, | |
| "loss": 0.001, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 2.6525198938992043, | |
| "grad_norm": 0.057653050301526836, | |
| "learning_rate": 4.0383667709573083e-07, | |
| "loss": 0.001, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.655172413793103, | |
| "grad_norm": 0.05815498354816591, | |
| "learning_rate": 3.9777753740023404e-07, | |
| "loss": 0.001, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 2.6578249336870026, | |
| "grad_norm": 0.06801602564012861, | |
| "learning_rate": 3.9176231393990183e-07, | |
| "loss": 0.001, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 2.660477453580902, | |
| "grad_norm": 0.05809297667176923, | |
| "learning_rate": 3.8579106411438636e-07, | |
| "loss": 0.001, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 2.663129973474801, | |
| "grad_norm": 6.0418097895357805, | |
| "learning_rate": 3.7986384490372395e-07, | |
| "loss": 0.0017, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 2.6657824933687, | |
| "grad_norm": 5.0348929600486, | |
| "learning_rate": 3.739807128677986e-07, | |
| "loss": 0.0016, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 2.6684350132625996, | |
| "grad_norm": 17.99966837081209, | |
| "learning_rate": 3.6814172414579075e-07, | |
| "loss": 0.0053, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 2.6710875331564985, | |
| "grad_norm": 0.05818204278536647, | |
| "learning_rate": 3.6234693445565185e-07, | |
| "loss": 0.001, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 2.673740053050398, | |
| "grad_norm": 0.05762331919374923, | |
| "learning_rate": 3.5659639909356725e-07, | |
| "loss": 0.001, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 2.676392572944297, | |
| "grad_norm": 0.05861398148302051, | |
| "learning_rate": 3.5089017293342965e-07, | |
| "loss": 0.001, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 2.679045092838196, | |
| "grad_norm": 0.0826821772808791, | |
| "learning_rate": 3.45228310426316e-07, | |
| "loss": 0.001, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 2.6816976127320955, | |
| "grad_norm": 0.05779077464837972, | |
| "learning_rate": 3.39610865599968e-07, | |
| "loss": 0.001, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 2.684350132625995, | |
| "grad_norm": 0.05749634870768738, | |
| "learning_rate": 3.34037892058276e-07, | |
| "loss": 0.001, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 2.687002652519894, | |
| "grad_norm": 0.058170717495365454, | |
| "learning_rate": 3.285094429807673e-07, | |
| "loss": 0.001, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 2.689655172413793, | |
| "grad_norm": 0.9273051963626235, | |
| "learning_rate": 3.230255711220992e-07, | |
| "loss": 0.0011, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 2.6923076923076925, | |
| "grad_norm": 0.08832036515142189, | |
| "learning_rate": 3.175863288115566e-07, | |
| "loss": 0.001, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 2.6949602122015914, | |
| "grad_norm": 0.05640399005369637, | |
| "learning_rate": 3.121917679525505e-07, | |
| "loss": 0.001, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 2.697612732095491, | |
| "grad_norm": 0.06245306715456624, | |
| "learning_rate": 3.0684194002212287e-07, | |
| "loss": 0.001, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 2.70026525198939, | |
| "grad_norm": 0.05955178949329247, | |
| "learning_rate": 3.015368960704584e-07, | |
| "loss": 0.001, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 2.702917771883289, | |
| "grad_norm": 0.05754804235165322, | |
| "learning_rate": 2.962766867203926e-07, | |
| "loss": 0.001, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 2.7055702917771884, | |
| "grad_norm": 0.057846912920183347, | |
| "learning_rate": 2.910613621669356e-07, | |
| "loss": 0.001, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 2.708222811671088, | |
| "grad_norm": 0.12010953781517812, | |
| "learning_rate": 2.8589097217678383e-07, | |
| "loss": 0.001, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 2.7108753315649867, | |
| "grad_norm": 0.05762401523324588, | |
| "learning_rate": 2.807655660878533e-07, | |
| "loss": 0.001, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 2.713527851458886, | |
| "grad_norm": 0.062105848428202826, | |
| "learning_rate": 2.756851928088056e-07, | |
| "loss": 0.001, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 2.716180371352785, | |
| "grad_norm": 0.05823768429439894, | |
| "learning_rate": 2.706499008185798e-07, | |
| "loss": 0.001, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 2.7188328912466844, | |
| "grad_norm": 0.071182213923279, | |
| "learning_rate": 2.6565973816593424e-07, | |
| "loss": 0.001, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 2.7214854111405833, | |
| "grad_norm": 0.3320041161058342, | |
| "learning_rate": 2.607147524689829e-07, | |
| "loss": 0.001, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 2.7241379310344827, | |
| "grad_norm": 0.0581767388246845, | |
| "learning_rate": 2.558149909147434e-07, | |
| "loss": 0.001, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 2.726790450928382, | |
| "grad_norm": 0.057259137826734856, | |
| "learning_rate": 2.5096050025868734e-07, | |
| "loss": 0.001, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 2.729442970822281, | |
| "grad_norm": 0.06014432323080065, | |
| "learning_rate": 2.461513268242938e-07, | |
| "loss": 0.001, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 2.7320954907161803, | |
| "grad_norm": 0.05755143786126445, | |
| "learning_rate": 2.4138751650260585e-07, | |
| "loss": 0.001, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 2.7347480106100797, | |
| "grad_norm": 0.07879601320240977, | |
| "learning_rate": 2.366691147517941e-07, | |
| "loss": 0.001, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 2.7374005305039786, | |
| "grad_norm": 0.05848781629848156, | |
| "learning_rate": 2.3199616659672352e-07, | |
| "loss": 0.001, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 2.740053050397878, | |
| "grad_norm": 1.9459183126974138, | |
| "learning_rate": 2.2736871662852045e-07, | |
| "loss": 0.0015, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 2.7427055702917773, | |
| "grad_norm": 0.05713432687281468, | |
| "learning_rate": 2.2278680900415183e-07, | |
| "loss": 0.001, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 2.7453580901856762, | |
| "grad_norm": 0.05818856311436017, | |
| "learning_rate": 2.1825048744600062e-07, | |
| "loss": 0.001, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 2.7480106100795756, | |
| "grad_norm": 0.05694484218010227, | |
| "learning_rate": 2.1375979524144942e-07, | |
| "loss": 0.001, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 2.750663129973475, | |
| "grad_norm": 0.05672780760207481, | |
| "learning_rate": 2.093147752424668e-07, | |
| "loss": 0.001, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 2.753315649867374, | |
| "grad_norm": 0.05771013878875976, | |
| "learning_rate": 2.0491546986519896e-07, | |
| "loss": 0.001, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 2.7559681697612732, | |
| "grad_norm": 0.057116241500339324, | |
| "learning_rate": 2.0056192108956762e-07, | |
| "loss": 0.001, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 2.7586206896551726, | |
| "grad_norm": 0.059791212628948105, | |
| "learning_rate": 1.962541704588633e-07, | |
| "loss": 0.001, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 2.7612732095490715, | |
| "grad_norm": 0.05729818339303753, | |
| "learning_rate": 1.9199225907935492e-07, | |
| "loss": 0.001, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 2.763925729442971, | |
| "grad_norm": 0.06120345122849994, | |
| "learning_rate": 1.8777622761989355e-07, | |
| "loss": 0.001, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 2.7665782493368702, | |
| "grad_norm": 0.05586261375655423, | |
| "learning_rate": 1.8360611631152602e-07, | |
| "loss": 0.001, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 2.769230769230769, | |
| "grad_norm": 0.05740276066745265, | |
| "learning_rate": 1.794819649471119e-07, | |
| "loss": 0.001, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 2.7718832891246685, | |
| "grad_norm": 0.05699866094807924, | |
| "learning_rate": 1.7540381288094154e-07, | |
| "loss": 0.001, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 2.774535809018568, | |
| "grad_norm": 0.057985647025893546, | |
| "learning_rate": 1.7137169902836203e-07, | |
| "loss": 0.001, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 2.777188328912467, | |
| "grad_norm": 0.056119960022791834, | |
| "learning_rate": 1.6738566186540628e-07, | |
| "loss": 0.001, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 2.779840848806366, | |
| "grad_norm": 0.057465326055435084, | |
| "learning_rate": 1.6344573942842333e-07, | |
| "loss": 0.001, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 2.782493368700265, | |
| "grad_norm": 0.057387984293876836, | |
| "learning_rate": 1.5955196931371985e-07, | |
| "loss": 0.001, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 2.7851458885941645, | |
| "grad_norm": 0.05749000125886829, | |
| "learning_rate": 1.5570438867719695e-07, | |
| "loss": 0.001, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 2.787798408488064, | |
| "grad_norm": 0.055779215853417795, | |
| "learning_rate": 1.5190303423399722e-07, | |
| "loss": 0.001, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 2.7904509283819627, | |
| "grad_norm": 0.05837133189327693, | |
| "learning_rate": 1.4814794225815443e-07, | |
| "loss": 0.001, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 2.793103448275862, | |
| "grad_norm": 0.057530072011698706, | |
| "learning_rate": 1.4443914858224938e-07, | |
| "loss": 0.001, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 2.795755968169761, | |
| "grad_norm": 0.05948059608152728, | |
| "learning_rate": 1.4077668859706407e-07, | |
| "loss": 0.001, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 2.7984084880636604, | |
| "grad_norm": 0.05739529031781353, | |
| "learning_rate": 1.3716059725124687e-07, | |
| "loss": 0.001, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 2.8010610079575597, | |
| "grad_norm": 0.057277216006988534, | |
| "learning_rate": 1.335909090509785e-07, | |
| "loss": 0.001, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 2.8037135278514587, | |
| "grad_norm": 0.05680339250328834, | |
| "learning_rate": 1.300676580596405e-07, | |
| "loss": 0.001, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 2.806366047745358, | |
| "grad_norm": 0.05828622619182935, | |
| "learning_rate": 1.2659087789749557e-07, | |
| "loss": 0.001, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 2.8090185676392574, | |
| "grad_norm": 0.05682695003127833, | |
| "learning_rate": 1.2316060174136e-07, | |
| "loss": 0.001, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 2.8116710875331563, | |
| "grad_norm": 0.08414401378334506, | |
| "learning_rate": 1.197768623242923e-07, | |
| "loss": 0.001, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 2.8143236074270557, | |
| "grad_norm": 0.059411490215966166, | |
| "learning_rate": 1.1643969193527783e-07, | |
| "loss": 0.001, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 2.816976127320955, | |
| "grad_norm": 0.05694234544716713, | |
| "learning_rate": 1.1314912241892184e-07, | |
| "loss": 0.001, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 2.819628647214854, | |
| "grad_norm": 0.05741006281378258, | |
| "learning_rate": 1.0990518517514759e-07, | |
| "loss": 0.001, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 2.8222811671087533, | |
| "grad_norm": 0.05784616738303726, | |
| "learning_rate": 1.0670791115889146e-07, | |
| "loss": 0.001, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 2.8249336870026527, | |
| "grad_norm": 0.05655639328478202, | |
| "learning_rate": 1.035573308798138e-07, | |
| "loss": 0.001, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 2.8275862068965516, | |
| "grad_norm": 0.05672848959218307, | |
| "learning_rate": 1.0045347440200192e-07, | |
| "loss": 0.001, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 2.830238726790451, | |
| "grad_norm": 0.05816423825933616, | |
| "learning_rate": 9.739637134368817e-08, | |
| "loss": 0.001, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 2.8328912466843503, | |
| "grad_norm": 0.0561554349681671, | |
| "learning_rate": 9.43860508769645e-08, | |
| "loss": 0.001, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 2.8355437665782492, | |
| "grad_norm": 0.05699530848353419, | |
| "learning_rate": 9.142254172750498e-08, | |
| "loss": 0.001, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 2.8381962864721486, | |
| "grad_norm": 0.0564410579665074, | |
| "learning_rate": 8.850587217429096e-08, | |
| "loss": 0.001, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 2.840848806366048, | |
| "grad_norm": 0.05669473676989865, | |
| "learning_rate": 8.563607004934193e-08, | |
| "loss": 0.001, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 2.843501326259947, | |
| "grad_norm": 0.05568006355362385, | |
| "learning_rate": 8.281316273744955e-08, | |
| "loss": 0.001, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 2.8461538461538463, | |
| "grad_norm": 0.057365180160896465, | |
| "learning_rate": 8.003717717591786e-08, | |
| "loss": 0.001, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 2.8488063660477456, | |
| "grad_norm": 0.05733773586444649, | |
| "learning_rate": 7.730813985430407e-08, | |
| "loss": 0.001, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 2.8514588859416445, | |
| "grad_norm": 0.05706086364318491, | |
| "learning_rate": 7.4626076814166e-08, | |
| "loss": 0.001, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 2.854111405835544, | |
| "grad_norm": 0.056722851232893674, | |
| "learning_rate": 7.199101364881389e-08, | |
| "loss": 0.001, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 2.856763925729443, | |
| "grad_norm": 0.056527232404078934, | |
| "learning_rate": 6.940297550306895e-08, | |
| "loss": 0.001, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 2.859416445623342, | |
| "grad_norm": 0.05756681570287931, | |
| "learning_rate": 6.686198707301861e-08, | |
| "loss": 0.001, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 2.862068965517241, | |
| "grad_norm": 0.05633734533086417, | |
| "learning_rate": 6.436807260578437e-08, | |
| "loss": 0.001, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 2.8647214854111405, | |
| "grad_norm": 0.057283975114846286, | |
| "learning_rate": 6.192125589928821e-08, | |
| "loss": 0.001, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 2.86737400530504, | |
| "grad_norm": 0.0637407579954582, | |
| "learning_rate": 5.952156030202716e-08, | |
| "loss": 0.001, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 2.8700265251989387, | |
| "grad_norm": 0.056402977468176935, | |
| "learning_rate": 5.7169008712851245e-08, | |
| "loss": 0.001, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 2.872679045092838, | |
| "grad_norm": 0.05861670601377333, | |
| "learning_rate": 5.486362358074093e-08, | |
| "loss": 0.001, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 2.8753315649867375, | |
| "grad_norm": 0.059395193020708545, | |
| "learning_rate": 5.2605426904598356e-08, | |
| "loss": 0.001, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 2.8779840848806364, | |
| "grad_norm": 0.12224723725938012, | |
| "learning_rate": 5.0394440233031975e-08, | |
| "loss": 0.001, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 2.8806366047745358, | |
| "grad_norm": 0.05844774683108823, | |
| "learning_rate": 4.823068466415615e-08, | |
| "loss": 0.001, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 2.883289124668435, | |
| "grad_norm": 0.0575947236683043, | |
| "learning_rate": 4.611418084538577e-08, | |
| "loss": 0.001, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 2.885941644562334, | |
| "grad_norm": 0.057910527366754984, | |
| "learning_rate": 4.4044948973240855e-08, | |
| "loss": 0.001, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 2.8885941644562334, | |
| "grad_norm": 0.0567875185229435, | |
| "learning_rate": 4.202300879315446e-08, | |
| "loss": 0.001, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 2.8912466843501328, | |
| "grad_norm": 0.0560933582836746, | |
| "learning_rate": 4.004837959928287e-08, | |
| "loss": 0.001, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 2.8938992042440317, | |
| "grad_norm": 0.05706582352434167, | |
| "learning_rate": 3.8121080234322374e-08, | |
| "loss": 0.001, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 2.896551724137931, | |
| "grad_norm": 0.05690225068906364, | |
| "learning_rate": 3.6241129089329416e-08, | |
| "loss": 0.001, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 2.8992042440318304, | |
| "grad_norm": 0.05668723397749334, | |
| "learning_rate": 3.4408544103544663e-08, | |
| "loss": 0.001, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 2.9018567639257293, | |
| "grad_norm": 0.055100985423046284, | |
| "learning_rate": 3.262334276422141e-08, | |
| "loss": 0.001, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 2.9045092838196287, | |
| "grad_norm": 0.057150731098588155, | |
| "learning_rate": 3.088554210646133e-08, | |
| "loss": 0.001, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 2.907161803713528, | |
| "grad_norm": 0.0570294541374595, | |
| "learning_rate": 2.9195158713047345e-08, | |
| "loss": 0.001, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 2.909814323607427, | |
| "grad_norm": 0.22108261869549395, | |
| "learning_rate": 2.7552208714290428e-08, | |
| "loss": 0.001, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 2.9124668435013263, | |
| "grad_norm": 0.05721675679774755, | |
| "learning_rate": 2.595670778787196e-08, | |
| "loss": 0.001, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 2.9151193633952257, | |
| "grad_norm": 0.05705710641083908, | |
| "learning_rate": 2.4408671158695495e-08, | |
| "loss": 0.001, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 2.9177718832891246, | |
| "grad_norm": 0.0578652061122199, | |
| "learning_rate": 2.2908113598741344e-08, | |
| "loss": 0.001, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.920424403183024, | |
| "grad_norm": 0.055816939122989954, | |
| "learning_rate": 2.1455049426926666e-08, | |
| "loss": 0.001, | |
| "step": 1101 | |
| }, | |
| { | |
| "epoch": 2.9230769230769234, | |
| "grad_norm": 0.057142321113611255, | |
| "learning_rate": 2.004949250896615e-08, | |
| "loss": 0.001, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 2.9257294429708223, | |
| "grad_norm": 0.05681935954188008, | |
| "learning_rate": 1.8691456257243223e-08, | |
| "loss": 0.001, | |
| "step": 1103 | |
| }, | |
| { | |
| "epoch": 2.9283819628647216, | |
| "grad_norm": 0.05682017282269606, | |
| "learning_rate": 1.7380953630678488e-08, | |
| "loss": 0.001, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 2.9310344827586206, | |
| "grad_norm": 0.059517284720241787, | |
| "learning_rate": 1.6117997134609263e-08, | |
| "loss": 0.001, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 2.93368700265252, | |
| "grad_norm": 0.05634650368015056, | |
| "learning_rate": 1.4902598820668023e-08, | |
| "loss": 0.001, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 2.936339522546419, | |
| "grad_norm": 0.05999387663001934, | |
| "learning_rate": 1.373477028666803e-08, | |
| "loss": 0.001, | |
| "step": 1107 | |
| }, | |
| { | |
| "epoch": 2.938992042440318, | |
| "grad_norm": 0.056249105833506535, | |
| "learning_rate": 1.2614522676493435e-08, | |
| "loss": 0.001, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 2.9416445623342176, | |
| "grad_norm": 0.7140550763929713, | |
| "learning_rate": 1.1541866679992131e-08, | |
| "loss": 0.0012, | |
| "step": 1109 | |
| }, | |
| { | |
| "epoch": 2.9442970822281165, | |
| "grad_norm": 0.056582716647833035, | |
| "learning_rate": 1.0516812532873622e-08, | |
| "loss": 0.001, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 2.946949602122016, | |
| "grad_norm": 0.06701764443471915, | |
| "learning_rate": 9.53937001661187e-09, | |
| "loss": 0.001, | |
| "step": 1111 | |
| }, | |
| { | |
| "epoch": 2.949602122015915, | |
| "grad_norm": 0.0569784776347109, | |
| "learning_rate": 8.609548458351492e-09, | |
| "loss": 0.001, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 2.952254641909814, | |
| "grad_norm": 0.05620589879415512, | |
| "learning_rate": 7.727356730820035e-09, | |
| "loss": 0.001, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 2.9549071618037135, | |
| "grad_norm": 0.057536750948128934, | |
| "learning_rate": 6.892803252240287e-09, | |
| "loss": 0.001, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 2.957559681697613, | |
| "grad_norm": 0.05635903557892691, | |
| "learning_rate": 6.105895986253108e-09, | |
| "loss": 0.001, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 2.9602122015915118, | |
| "grad_norm": 0.056199142547632225, | |
| "learning_rate": 5.366642441841374e-09, | |
| "loss": 0.001, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 2.962864721485411, | |
| "grad_norm": 0.0740289732481234, | |
| "learning_rate": 4.675049673255605e-09, | |
| "loss": 0.001, | |
| "step": 1117 | |
| }, | |
| { | |
| "epoch": 2.9655172413793105, | |
| "grad_norm": 0.11639386331049349, | |
| "learning_rate": 4.031124279948451e-09, | |
| "loss": 0.001, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 2.9681697612732094, | |
| "grad_norm": 0.057501778565294176, | |
| "learning_rate": 3.4348724065119687e-09, | |
| "loss": 0.001, | |
| "step": 1119 | |
| }, | |
| { | |
| "epoch": 2.970822281167109, | |
| "grad_norm": 0.0700548787138476, | |
| "learning_rate": 2.886299742618226e-09, | |
| "loss": 0.001, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 2.973474801061008, | |
| "grad_norm": 0.07041293934408836, | |
| "learning_rate": 2.385411522966563e-09, | |
| "loss": 0.001, | |
| "step": 1121 | |
| }, | |
| { | |
| "epoch": 2.976127320954907, | |
| "grad_norm": 0.05671027132698676, | |
| "learning_rate": 1.9322125272297488e-09, | |
| "loss": 0.001, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 2.9787798408488064, | |
| "grad_norm": 0.05758042640533521, | |
| "learning_rate": 1.5267070800140116e-09, | |
| "loss": 0.001, | |
| "step": 1123 | |
| }, | |
| { | |
| "epoch": 2.981432360742706, | |
| "grad_norm": 0.05777934222531676, | |
| "learning_rate": 1.168899050812966e-09, | |
| "loss": 0.001, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 2.9840848806366047, | |
| "grad_norm": 0.056001596133190376, | |
| "learning_rate": 8.587918539726403e-10, | |
| "loss": 0.001, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 2.986737400530504, | |
| "grad_norm": 0.056991553557379525, | |
| "learning_rate": 5.963884486598348e-10, | |
| "loss": 0.001, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 2.9893899204244034, | |
| "grad_norm": 0.0570425675108977, | |
| "learning_rate": 3.816913388315913e-10, | |
| "loss": 0.001, | |
| "step": 1127 | |
| }, | |
| { | |
| "epoch": 2.9920424403183024, | |
| "grad_norm": 0.16572017430864017, | |
| "learning_rate": 2.1470257321298815e-10, | |
| "loss": 0.001, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 2.9946949602122017, | |
| "grad_norm": 0.05750168732465444, | |
| "learning_rate": 9.54237452771567e-11, | |
| "loss": 0.001, | |
| "step": 1129 | |
| }, | |
| { | |
| "epoch": 2.9973474801061006, | |
| "grad_norm": 0.0578955313388055, | |
| "learning_rate": 2.3855993230292862e-11, | |
| "loss": 0.001, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.05638576861083123, | |
| "learning_rate": 0.0, | |
| "loss": 0.001, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_loss": 1.5612131357192993, | |
| "eval_runtime": 186.0893, | |
| "eval_samples_per_second": 11.366, | |
| "eval_steps_per_second": 1.424, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 1131, | |
| "total_flos": 33123426287616.0, | |
| "train_loss": 0.5442916138076954, | |
| "train_runtime": 5084.6678, | |
| "train_samples_per_second": 3.558, | |
| "train_steps_per_second": 0.222 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 1131, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 33123426287616.0, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |