| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 754, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.002652519893899204, | |
| "grad_norm": 964.2187679026922, | |
| "learning_rate": 1.3157894736842107e-07, | |
| "loss": 12.3902, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.005305039787798408, | |
| "grad_norm": 921.958588448878, | |
| "learning_rate": 2.6315789473684213e-07, | |
| "loss": 12.3282, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.007957559681697613, | |
| "grad_norm": 974.3733617920872, | |
| "learning_rate": 3.9473684210526315e-07, | |
| "loss": 12.2463, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.010610079575596816, | |
| "grad_norm": 1033.5800201783609, | |
| "learning_rate": 5.263157894736843e-07, | |
| "loss": 12.2958, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.013262599469496022, | |
| "grad_norm": 1016.3974150016741, | |
| "learning_rate": 6.578947368421053e-07, | |
| "loss": 12.3704, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.015915119363395226, | |
| "grad_norm": 925.7507817778275, | |
| "learning_rate": 7.894736842105263e-07, | |
| "loss": 12.1259, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.01856763925729443, | |
| "grad_norm": 873.210487329803, | |
| "learning_rate": 9.210526315789474e-07, | |
| "loss": 11.8241, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.021220159151193633, | |
| "grad_norm": 960.3674707267016, | |
| "learning_rate": 1.0526315789473685e-06, | |
| "loss": 11.4912, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.023872679045092837, | |
| "grad_norm": 989.78383598521, | |
| "learning_rate": 1.1842105263157894e-06, | |
| "loss": 11.3891, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.026525198938992044, | |
| "grad_norm": 903.7350997614694, | |
| "learning_rate": 1.3157894736842106e-06, | |
| "loss": 10.2865, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.029177718832891247, | |
| "grad_norm": 931.8167078263704, | |
| "learning_rate": 1.4473684210526317e-06, | |
| "loss": 9.9394, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.03183023872679045, | |
| "grad_norm": 923.6087227350214, | |
| "learning_rate": 1.5789473684210526e-06, | |
| "loss": 9.4732, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.034482758620689655, | |
| "grad_norm": 813.3743881028537, | |
| "learning_rate": 1.710526315789474e-06, | |
| "loss": 8.6724, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.03713527851458886, | |
| "grad_norm": 843.8799314043998, | |
| "learning_rate": 1.8421052631578948e-06, | |
| "loss": 8.4515, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.03978779840848806, | |
| "grad_norm": 773.295777482183, | |
| "learning_rate": 1.973684210526316e-06, | |
| "loss": 8.1562, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.042440318302387266, | |
| "grad_norm": 602.3737049496269, | |
| "learning_rate": 2.105263157894737e-06, | |
| "loss": 7.612, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.04509283819628647, | |
| "grad_norm": 584.1927754293622, | |
| "learning_rate": 2.236842105263158e-06, | |
| "loss": 6.3209, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.04774535809018567, | |
| "grad_norm": 377.562540989771, | |
| "learning_rate": 2.368421052631579e-06, | |
| "loss": 5.6651, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.050397877984084884, | |
| "grad_norm": 438.03441394074736, | |
| "learning_rate": 2.5e-06, | |
| "loss": 5.259, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.05305039787798409, | |
| "grad_norm": 364.1100523139272, | |
| "learning_rate": 2.631578947368421e-06, | |
| "loss": 5.1039, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.05570291777188329, | |
| "grad_norm": 342.845726560959, | |
| "learning_rate": 2.7631578947368424e-06, | |
| "loss": 4.7562, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.058355437665782495, | |
| "grad_norm": 301.2033168334068, | |
| "learning_rate": 2.8947368421052634e-06, | |
| "loss": 4.5827, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.0610079575596817, | |
| "grad_norm": 374.4884653476006, | |
| "learning_rate": 3.0263157894736843e-06, | |
| "loss": 4.4653, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.0636604774535809, | |
| "grad_norm": 403.305454318838, | |
| "learning_rate": 3.157894736842105e-06, | |
| "loss": 4.2779, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.06631299734748011, | |
| "grad_norm": 353.9986155218897, | |
| "learning_rate": 3.289473684210527e-06, | |
| "loss": 3.9744, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.06896551724137931, | |
| "grad_norm": 248.6193974694029, | |
| "learning_rate": 3.421052631578948e-06, | |
| "loss": 3.6383, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.07161803713527852, | |
| "grad_norm": 307.1594512186394, | |
| "learning_rate": 3.5526315789473687e-06, | |
| "loss": 3.5038, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.07427055702917772, | |
| "grad_norm": 275.07431525270925, | |
| "learning_rate": 3.6842105263157896e-06, | |
| "loss": 3.4938, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.07692307692307693, | |
| "grad_norm": 271.8921488833055, | |
| "learning_rate": 3.815789473684211e-06, | |
| "loss": 3.2977, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.07957559681697612, | |
| "grad_norm": 294.8982691026841, | |
| "learning_rate": 3.947368421052632e-06, | |
| "loss": 3.2739, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.08222811671087533, | |
| "grad_norm": 262.1071558710169, | |
| "learning_rate": 4.078947368421053e-06, | |
| "loss": 3.2013, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.08488063660477453, | |
| "grad_norm": 229.55816377224113, | |
| "learning_rate": 4.210526315789474e-06, | |
| "loss": 3.0229, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.08753315649867374, | |
| "grad_norm": 258.70599750378545, | |
| "learning_rate": 4.342105263157895e-06, | |
| "loss": 3.0239, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.09018567639257294, | |
| "grad_norm": 246.1667693256739, | |
| "learning_rate": 4.473684210526316e-06, | |
| "loss": 2.9517, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.09283819628647215, | |
| "grad_norm": 226.68418484352097, | |
| "learning_rate": 4.605263157894737e-06, | |
| "loss": 2.8555, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.09549071618037135, | |
| "grad_norm": 265.08598417075416, | |
| "learning_rate": 4.736842105263158e-06, | |
| "loss": 2.8596, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.09814323607427056, | |
| "grad_norm": 202.22942322553115, | |
| "learning_rate": 4.8684210526315795e-06, | |
| "loss": 2.7193, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.10079575596816977, | |
| "grad_norm": 197.52232181969407, | |
| "learning_rate": 5e-06, | |
| "loss": 2.7065, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.10344827586206896, | |
| "grad_norm": 197.0077494116073, | |
| "learning_rate": 5.131578947368422e-06, | |
| "loss": 2.6832, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.10610079575596817, | |
| "grad_norm": 206.21981316906903, | |
| "learning_rate": 5.263157894736842e-06, | |
| "loss": 2.6211, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.10875331564986737, | |
| "grad_norm": 241.2414460890544, | |
| "learning_rate": 5.394736842105264e-06, | |
| "loss": 2.751, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.11140583554376658, | |
| "grad_norm": 203.98902813348803, | |
| "learning_rate": 5.526315789473685e-06, | |
| "loss": 2.6146, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.11405835543766578, | |
| "grad_norm": 250.72220112715308, | |
| "learning_rate": 5.657894736842106e-06, | |
| "loss": 2.6972, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.11671087533156499, | |
| "grad_norm": 205.8370589894433, | |
| "learning_rate": 5.789473684210527e-06, | |
| "loss": 2.5528, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.11936339522546419, | |
| "grad_norm": 232.6529502480781, | |
| "learning_rate": 5.921052631578948e-06, | |
| "loss": 2.5257, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.1220159151193634, | |
| "grad_norm": 183.56326745821414, | |
| "learning_rate": 6.0526315789473685e-06, | |
| "loss": 2.4665, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.1246684350132626, | |
| "grad_norm": 193.28463528581133, | |
| "learning_rate": 6.18421052631579e-06, | |
| "loss": 2.5764, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.1273209549071618, | |
| "grad_norm": 194.32818165614543, | |
| "learning_rate": 6.31578947368421e-06, | |
| "loss": 2.5258, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.129973474801061, | |
| "grad_norm": 147.84231901303195, | |
| "learning_rate": 6.447368421052632e-06, | |
| "loss": 2.3977, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.13262599469496023, | |
| "grad_norm": 206.57306047998404, | |
| "learning_rate": 6.578947368421054e-06, | |
| "loss": 2.5407, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.13527851458885942, | |
| "grad_norm": 184.8191889580646, | |
| "learning_rate": 6.710526315789474e-06, | |
| "loss": 2.5804, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.13793103448275862, | |
| "grad_norm": 165.92299777135887, | |
| "learning_rate": 6.842105263157896e-06, | |
| "loss": 2.7385, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.14058355437665782, | |
| "grad_norm": 153.57799012553724, | |
| "learning_rate": 6.973684210526316e-06, | |
| "loss": 2.4551, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.14323607427055704, | |
| "grad_norm": 120.37849614658805, | |
| "learning_rate": 7.1052631578947375e-06, | |
| "loss": 2.3221, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.14588859416445624, | |
| "grad_norm": 206.0913231142331, | |
| "learning_rate": 7.236842105263158e-06, | |
| "loss": 2.835, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.14854111405835543, | |
| "grad_norm": 159.35276874332746, | |
| "learning_rate": 7.368421052631579e-06, | |
| "loss": 2.5064, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.15119363395225463, | |
| "grad_norm": 163.91004929695652, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 2.4421, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.15384615384615385, | |
| "grad_norm": 220.36730597533355, | |
| "learning_rate": 7.631578947368423e-06, | |
| "loss": 2.4689, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.15649867374005305, | |
| "grad_norm": 123.69977576265524, | |
| "learning_rate": 7.763157894736843e-06, | |
| "loss": 2.3087, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.15915119363395225, | |
| "grad_norm": 191.8120630218028, | |
| "learning_rate": 7.894736842105265e-06, | |
| "loss": 2.539, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.16180371352785147, | |
| "grad_norm": 161.1724554095777, | |
| "learning_rate": 8.026315789473685e-06, | |
| "loss": 2.3479, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.16445623342175067, | |
| "grad_norm": 179.64976805756, | |
| "learning_rate": 8.157894736842106e-06, | |
| "loss": 2.3192, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.16710875331564987, | |
| "grad_norm": 198.31090510744684, | |
| "learning_rate": 8.289473684210526e-06, | |
| "loss": 2.6465, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.16976127320954906, | |
| "grad_norm": 188.31226094765435, | |
| "learning_rate": 8.421052631578948e-06, | |
| "loss": 2.4815, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.1724137931034483, | |
| "grad_norm": 176.40592762493222, | |
| "learning_rate": 8.552631578947368e-06, | |
| "loss": 2.3363, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.17506631299734748, | |
| "grad_norm": 160.88672395875722, | |
| "learning_rate": 8.68421052631579e-06, | |
| "loss": 2.4575, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.17771883289124668, | |
| "grad_norm": 280.94907400654307, | |
| "learning_rate": 8.81578947368421e-06, | |
| "loss": 2.2674, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.18037135278514588, | |
| "grad_norm": 180.33471106449431, | |
| "learning_rate": 8.947368421052632e-06, | |
| "loss": 2.3316, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.1830238726790451, | |
| "grad_norm": 187.04581008470694, | |
| "learning_rate": 9.078947368421054e-06, | |
| "loss": 2.3205, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.1856763925729443, | |
| "grad_norm": 332.3820092319189, | |
| "learning_rate": 9.210526315789474e-06, | |
| "loss": 2.3148, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.1883289124668435, | |
| "grad_norm": 172.50127961592412, | |
| "learning_rate": 9.342105263157895e-06, | |
| "loss": 2.2461, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.1909814323607427, | |
| "grad_norm": 277.80095915937767, | |
| "learning_rate": 9.473684210526315e-06, | |
| "loss": 2.3428, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.19363395225464192, | |
| "grad_norm": 221.03168479982568, | |
| "learning_rate": 9.605263157894737e-06, | |
| "loss": 2.3967, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.1962864721485411, | |
| "grad_norm": 142.8700547139937, | |
| "learning_rate": 9.736842105263159e-06, | |
| "loss": 2.1308, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.1989389920424403, | |
| "grad_norm": 283.7420206038234, | |
| "learning_rate": 9.868421052631579e-06, | |
| "loss": 2.1506, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.20159151193633953, | |
| "grad_norm": 126.80709968760124, | |
| "learning_rate": 1e-05, | |
| "loss": 2.4119, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.20424403183023873, | |
| "grad_norm": 295.76872671716245, | |
| "learning_rate": 9.999946324068588e-06, | |
| "loss": 2.0226, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.20689655172413793, | |
| "grad_norm": 227.02809809630116, | |
| "learning_rate": 9.999785297426788e-06, | |
| "loss": 2.0235, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.20954907161803712, | |
| "grad_norm": 235.8031949220893, | |
| "learning_rate": 9.999516923531906e-06, | |
| "loss": 2.1203, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.21220159151193635, | |
| "grad_norm": 161.2458887807027, | |
| "learning_rate": 9.999141208146029e-06, | |
| "loss": 2.0525, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.21485411140583555, | |
| "grad_norm": 290.51438113627165, | |
| "learning_rate": 9.998658159335903e-06, | |
| "loss": 2.0108, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.21750663129973474, | |
| "grad_norm": 138.18012791942064, | |
| "learning_rate": 9.998067787472772e-06, | |
| "loss": 2.0035, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.22015915119363394, | |
| "grad_norm": 421.0509223072362, | |
| "learning_rate": 9.997370105232134e-06, | |
| "loss": 2.0555, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.22281167108753316, | |
| "grad_norm": 80.63002582572717, | |
| "learning_rate": 9.99656512759349e-06, | |
| "loss": 1.9421, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.22546419098143236, | |
| "grad_norm": 303.88959370207726, | |
| "learning_rate": 9.995652871840006e-06, | |
| "loss": 2.1544, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.22811671087533156, | |
| "grad_norm": 90.56704219456141, | |
| "learning_rate": 9.994633357558158e-06, | |
| "loss": 2.1229, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.23076923076923078, | |
| "grad_norm": 105.20662734597437, | |
| "learning_rate": 9.993506606637297e-06, | |
| "loss": 1.8516, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.23342175066312998, | |
| "grad_norm": 142.34075495626965, | |
| "learning_rate": 9.992272643269181e-06, | |
| "loss": 1.9962, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.23607427055702918, | |
| "grad_norm": 62.063640339086405, | |
| "learning_rate": 9.990931493947467e-06, | |
| "loss": 1.8471, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.23872679045092837, | |
| "grad_norm": 97.23602438382434, | |
| "learning_rate": 9.989483187467128e-06, | |
| "loss": 1.861, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.2413793103448276, | |
| "grad_norm": 69.91210680726913, | |
| "learning_rate": 9.987927754923844e-06, | |
| "loss": 1.8162, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.2440318302387268, | |
| "grad_norm": 57.11743629506158, | |
| "learning_rate": 9.986265229713332e-06, | |
| "loss": 1.8707, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.246684350132626, | |
| "grad_norm": 113.53824707461247, | |
| "learning_rate": 9.98449564753063e-06, | |
| "loss": 2.1858, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.2493368700265252, | |
| "grad_norm": 84.49722951749673, | |
| "learning_rate": 9.982619046369321e-06, | |
| "loss": 1.9264, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.2519893899204244, | |
| "grad_norm": 90.79483359185215, | |
| "learning_rate": 9.980635466520738e-06, | |
| "loss": 1.9047, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.2546419098143236, | |
| "grad_norm": 82.8311244764105, | |
| "learning_rate": 9.978544950573075e-06, | |
| "loss": 2.1199, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.2572944297082228, | |
| "grad_norm": 55.505333663404826, | |
| "learning_rate": 9.976347543410487e-06, | |
| "loss": 1.6836, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.259946949602122, | |
| "grad_norm": 67.1358821586414, | |
| "learning_rate": 9.974043292212129e-06, | |
| "loss": 2.0516, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.2625994694960212, | |
| "grad_norm": 62.00409706602572, | |
| "learning_rate": 9.97163224645113e-06, | |
| "loss": 1.9279, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.26525198938992045, | |
| "grad_norm": 46.13886865968944, | |
| "learning_rate": 9.96911445789354e-06, | |
| "loss": 1.6403, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.26790450928381965, | |
| "grad_norm": 65.20474517101002, | |
| "learning_rate": 9.966489980597217e-06, | |
| "loss": 1.8295, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.27055702917771884, | |
| "grad_norm": 65.19407887648836, | |
| "learning_rate": 9.963758870910672e-06, | |
| "loss": 2.008, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.27320954907161804, | |
| "grad_norm": 42.54389417787075, | |
| "learning_rate": 9.960921187471841e-06, | |
| "loss": 1.5092, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.27586206896551724, | |
| "grad_norm": 66.25244257768702, | |
| "learning_rate": 9.957976991206847e-06, | |
| "loss": 1.9777, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.27851458885941643, | |
| "grad_norm": 40.99568907363924, | |
| "learning_rate": 9.95492634532868e-06, | |
| "loss": 1.7923, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.28116710875331563, | |
| "grad_norm": 53.23587811828388, | |
| "learning_rate": 9.951769315335843e-06, | |
| "loss": 1.6235, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.2838196286472148, | |
| "grad_norm": 34.72790613801945, | |
| "learning_rate": 9.94850596901095e-06, | |
| "loss": 1.3773, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.2864721485411141, | |
| "grad_norm": 66.27639171193981, | |
| "learning_rate": 9.94513637641926e-06, | |
| "loss": 1.7899, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.2891246684350133, | |
| "grad_norm": 37.4819698403607, | |
| "learning_rate": 9.94166060990718e-06, | |
| "loss": 1.3286, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.2917771883289125, | |
| "grad_norm": 31.34150550958542, | |
| "learning_rate": 9.938078744100713e-06, | |
| "loss": 1.3079, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.29442970822281167, | |
| "grad_norm": 39.5940115565292, | |
| "learning_rate": 9.934390855903852e-06, | |
| "loss": 1.3561, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.29708222811671087, | |
| "grad_norm": 49.856946246195335, | |
| "learning_rate": 9.930597024496933e-06, | |
| "loss": 1.4053, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.29973474801061006, | |
| "grad_norm": 42.6372605385728, | |
| "learning_rate": 9.926697331334924e-06, | |
| "loss": 1.1869, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.30238726790450926, | |
| "grad_norm": 49.644922186459574, | |
| "learning_rate": 9.922691860145696e-06, | |
| "loss": 1.3708, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.3050397877984085, | |
| "grad_norm": 51.891009907195325, | |
| "learning_rate": 9.918580696928206e-06, | |
| "loss": 1.3774, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.3076923076923077, | |
| "grad_norm": 42.99681649752706, | |
| "learning_rate": 9.91436392995066e-06, | |
| "loss": 1.2047, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.3103448275862069, | |
| "grad_norm": 35.97593818448492, | |
| "learning_rate": 9.910041649748613e-06, | |
| "loss": 1.2253, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.3129973474801061, | |
| "grad_norm": 33.789231450454245, | |
| "learning_rate": 9.905613949123036e-06, | |
| "loss": 1.1402, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.3156498673740053, | |
| "grad_norm": 43.433499916824914, | |
| "learning_rate": 9.901080923138308e-06, | |
| "loss": 1.1818, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.3183023872679045, | |
| "grad_norm": 30.871309475300066, | |
| "learning_rate": 9.896442669120188e-06, | |
| "loss": 1.0327, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.3209549071618037, | |
| "grad_norm": 128.20133510818536, | |
| "learning_rate": 9.891699286653714e-06, | |
| "loss": 1.1527, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.32360742705570295, | |
| "grad_norm": 91.39628128015958, | |
| "learning_rate": 9.886850877581079e-06, | |
| "loss": 1.2442, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.32625994694960214, | |
| "grad_norm": 30.16678910660276, | |
| "learning_rate": 9.88189754599943e-06, | |
| "loss": 1.0637, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.32891246684350134, | |
| "grad_norm": 33.13109222795493, | |
| "learning_rate": 9.87683939825864e-06, | |
| "loss": 0.9803, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.33156498673740054, | |
| "grad_norm": 24.8230025258712, | |
| "learning_rate": 9.87167654295903e-06, | |
| "loss": 0.8584, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.33421750663129973, | |
| "grad_norm": 52.85501417689104, | |
| "learning_rate": 9.866409090949023e-06, | |
| "loss": 1.2777, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.33687002652519893, | |
| "grad_norm": 33.559147777496975, | |
| "learning_rate": 9.861037155322777e-06, | |
| "loss": 1.1076, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.3395225464190981, | |
| "grad_norm": 30.663484139773725, | |
| "learning_rate": 9.855560851417752e-06, | |
| "loss": 0.945, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.3421750663129973, | |
| "grad_norm": 45.6665480612531, | |
| "learning_rate": 9.849980296812231e-06, | |
| "loss": 1.2544, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.3448275862068966, | |
| "grad_norm": 28.02786025006337, | |
| "learning_rate": 9.844295611322804e-06, | |
| "loss": 1.0417, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.34748010610079577, | |
| "grad_norm": 29.055402924895084, | |
| "learning_rate": 9.838506917001784e-06, | |
| "loss": 0.8431, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.35013262599469497, | |
| "grad_norm": 21.099968558621292, | |
| "learning_rate": 9.832614338134595e-06, | |
| "loss": 0.7674, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.35278514588859416, | |
| "grad_norm": 30.67156735527827, | |
| "learning_rate": 9.826618001237101e-06, | |
| "loss": 0.8185, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.35543766578249336, | |
| "grad_norm": 26.843466616950924, | |
| "learning_rate": 9.82051803505289e-06, | |
| "loss": 0.8621, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.35809018567639256, | |
| "grad_norm": 23.477887219504282, | |
| "learning_rate": 9.814314570550506e-06, | |
| "loss": 0.7081, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.36074270557029176, | |
| "grad_norm": 38.337624391782896, | |
| "learning_rate": 9.808007740920647e-06, | |
| "loss": 1.0828, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.363395225464191, | |
| "grad_norm": 38.032323530019895, | |
| "learning_rate": 9.80159768157329e-06, | |
| "loss": 1.2031, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.3660477453580902, | |
| "grad_norm": 21.49819538749273, | |
| "learning_rate": 9.795084530134801e-06, | |
| "loss": 0.776, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.3687002652519894, | |
| "grad_norm": 26.397930222572196, | |
| "learning_rate": 9.788468426444968e-06, | |
| "loss": 0.7757, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.3713527851458886, | |
| "grad_norm": 23.575535518274087, | |
| "learning_rate": 9.781749512554e-06, | |
| "loss": 0.578, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.3740053050397878, | |
| "grad_norm": 30.303563921915355, | |
| "learning_rate": 9.774927932719484e-06, | |
| "loss": 0.7992, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.376657824933687, | |
| "grad_norm": 23.574970725547313, | |
| "learning_rate": 9.768003833403278e-06, | |
| "loss": 0.703, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.3793103448275862, | |
| "grad_norm": 44.049160716872265, | |
| "learning_rate": 9.760977363268374e-06, | |
| "loss": 0.9374, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.3819628647214854, | |
| "grad_norm": 34.61844002889476, | |
| "learning_rate": 9.753848673175707e-06, | |
| "loss": 0.8139, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.38461538461538464, | |
| "grad_norm": 53.94104016123984, | |
| "learning_rate": 9.746617916180906e-06, | |
| "loss": 0.6011, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.38726790450928383, | |
| "grad_norm": 21.016615206127664, | |
| "learning_rate": 9.739285247531019e-06, | |
| "loss": 0.5904, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.38992042440318303, | |
| "grad_norm": 35.942716705163434, | |
| "learning_rate": 9.731850824661171e-06, | |
| "loss": 0.6562, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.3925729442970822, | |
| "grad_norm": 22.523055612662546, | |
| "learning_rate": 9.724314807191197e-06, | |
| "loss": 0.5424, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.3952254641909814, | |
| "grad_norm": 19.50210104338421, | |
| "learning_rate": 9.716677356922193e-06, | |
| "loss": 0.5982, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.3978779840848806, | |
| "grad_norm": 21.70532202656517, | |
| "learning_rate": 9.708938637833065e-06, | |
| "loss": 0.5914, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.4005305039787798, | |
| "grad_norm": 17.23168354522762, | |
| "learning_rate": 9.701098816076995e-06, | |
| "loss": 0.4588, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.40318302387267907, | |
| "grad_norm": 20.921795674319984, | |
| "learning_rate": 9.693158059977879e-06, | |
| "loss": 0.4766, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.40583554376657827, | |
| "grad_norm": 25.20434675018666, | |
| "learning_rate": 9.685116540026703e-06, | |
| "loss": 0.5802, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.40848806366047746, | |
| "grad_norm": 23.374283292586217, | |
| "learning_rate": 9.6769744288779e-06, | |
| "loss": 0.3283, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.41114058355437666, | |
| "grad_norm": 39.918196216075174, | |
| "learning_rate": 9.668731901345632e-06, | |
| "loss": 0.8864, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.41379310344827586, | |
| "grad_norm": 24.458285925184185, | |
| "learning_rate": 9.660389134400034e-06, | |
| "loss": 0.5322, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.41644562334217505, | |
| "grad_norm": 33.33399669600764, | |
| "learning_rate": 9.651946307163417e-06, | |
| "loss": 0.4822, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.41909814323607425, | |
| "grad_norm": 50.052536105159064, | |
| "learning_rate": 9.643403600906433e-06, | |
| "loss": 0.8033, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.4217506631299735, | |
| "grad_norm": 19.58506384135915, | |
| "learning_rate": 9.634761199044165e-06, | |
| "loss": 0.6383, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.4244031830238727, | |
| "grad_norm": 20.614790177057632, | |
| "learning_rate": 9.626019287132202e-06, | |
| "loss": 0.4233, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.4270557029177719, | |
| "grad_norm": 13.09043612963377, | |
| "learning_rate": 9.617178052862648e-06, | |
| "loss": 0.4118, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.4297082228116711, | |
| "grad_norm": 91.05700992668534, | |
| "learning_rate": 9.608237686060099e-06, | |
| "loss": 0.4117, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.4323607427055703, | |
| "grad_norm": 27.591007512009774, | |
| "learning_rate": 9.599198378677559e-06, | |
| "loss": 0.4483, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.4350132625994695, | |
| "grad_norm": 30.65138008439746, | |
| "learning_rate": 9.590060324792328e-06, | |
| "loss": 0.4636, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.4376657824933687, | |
| "grad_norm": 18.356210333973348, | |
| "learning_rate": 9.580823720601824e-06, | |
| "loss": 0.4186, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.4403183023872679, | |
| "grad_norm": 41.190321728229286, | |
| "learning_rate": 9.571488764419381e-06, | |
| "loss": 0.3889, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.44297082228116713, | |
| "grad_norm": 23.727121303907392, | |
| "learning_rate": 9.562055656669988e-06, | |
| "loss": 0.3846, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.44562334217506633, | |
| "grad_norm": 18.641980174276593, | |
| "learning_rate": 9.552524599885982e-06, | |
| "loss": 0.3465, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.4482758620689655, | |
| "grad_norm": 28.998914771509625, | |
| "learning_rate": 9.542895798702702e-06, | |
| "loss": 0.5385, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.4509283819628647, | |
| "grad_norm": 16.00996215029444, | |
| "learning_rate": 9.5331694598541e-06, | |
| "loss": 0.3236, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.4535809018567639, | |
| "grad_norm": 15.933631060662805, | |
| "learning_rate": 9.52334579216829e-06, | |
| "loss": 0.3484, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.4562334217506631, | |
| "grad_norm": 43.19072258274523, | |
| "learning_rate": 9.51342500656308e-06, | |
| "loss": 0.3677, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.4588859416445623, | |
| "grad_norm": 53.659076875322505, | |
| "learning_rate": 9.503407316041432e-06, | |
| "loss": 0.503, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.46153846153846156, | |
| "grad_norm": 23.33531951595278, | |
| "learning_rate": 9.493292935686896e-06, | |
| "loss": 0.2895, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.46419098143236076, | |
| "grad_norm": 18.529561375247077, | |
| "learning_rate": 9.483082082658984e-06, | |
| "loss": 0.3344, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.46684350132625996, | |
| "grad_norm": 19.981613446208126, | |
| "learning_rate": 9.472774976188515e-06, | |
| "loss": 0.2751, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.46949602122015915, | |
| "grad_norm": 21.106306038335028, | |
| "learning_rate": 9.462371837572907e-06, | |
| "loss": 0.3911, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.47214854111405835, | |
| "grad_norm": 17.14239986795487, | |
| "learning_rate": 9.451872890171419e-06, | |
| "loss": 0.2954, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.47480106100795755, | |
| "grad_norm": 24.739589854032555, | |
| "learning_rate": 9.441278359400366e-06, | |
| "loss": 0.3805, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.47745358090185674, | |
| "grad_norm": 23.369236257540873, | |
| "learning_rate": 9.430588472728271e-06, | |
| "loss": 0.39, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.48010610079575594, | |
| "grad_norm": 19.267093087532167, | |
| "learning_rate": 9.41980345967098e-06, | |
| "loss": 0.3561, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.4827586206896552, | |
| "grad_norm": 22.614245081541842, | |
| "learning_rate": 9.408923551786742e-06, | |
| "loss": 0.331, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.4854111405835544, | |
| "grad_norm": 17.0103597899092, | |
| "learning_rate": 9.397948982671237e-06, | |
| "loss": 0.3216, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.4880636604774536, | |
| "grad_norm": 58.87704304280708, | |
| "learning_rate": 9.386879987952549e-06, | |
| "loss": 0.3509, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.4907161803713528, | |
| "grad_norm": 60.21102673594824, | |
| "learning_rate": 9.375716805286122e-06, | |
| "loss": 0.4291, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.493368700265252, | |
| "grad_norm": 28.569248261278577, | |
| "learning_rate": 9.364459674349642e-06, | |
| "loss": 0.4504, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.4960212201591512, | |
| "grad_norm": 23.312940166623253, | |
| "learning_rate": 9.353108836837907e-06, | |
| "loss": 0.3292, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.4986737400530504, | |
| "grad_norm": 21.811660901926434, | |
| "learning_rate": 9.341664536457626e-06, | |
| "loss": 0.3656, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.5013262599469496, | |
| "grad_norm": 37.928763714522916, | |
| "learning_rate": 9.330127018922195e-06, | |
| "loss": 0.48, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.5039787798408488, | |
| "grad_norm": 31.679402691081123, | |
| "learning_rate": 9.318496531946411e-06, | |
| "loss": 0.3708, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.506631299734748, | |
| "grad_norm": 23.244128358634686, | |
| "learning_rate": 9.306773325241161e-06, | |
| "loss": 0.286, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.5092838196286472, | |
| "grad_norm": 33.32604386601592, | |
| "learning_rate": 9.294957650508065e-06, | |
| "loss": 0.2883, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.5119363395225465, | |
| "grad_norm": 31.451999618901944, | |
| "learning_rate": 9.283049761434059e-06, | |
| "loss": 0.4004, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.5145888594164456, | |
| "grad_norm": 21.508965939222215, | |
| "learning_rate": 9.27104991368596e-06, | |
| "loss": 0.3267, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.5172413793103449, | |
| "grad_norm": 18.568501525467312, | |
| "learning_rate": 9.258958364904966e-06, | |
| "loss": 0.2629, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.519893899204244, | |
| "grad_norm": 26.74258271555819, | |
| "learning_rate": 9.246775374701139e-06, | |
| "loss": 0.3435, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.5225464190981433, | |
| "grad_norm": 204.85908613104993, | |
| "learning_rate": 9.234501204647814e-06, | |
| "loss": 0.4236, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.5251989389920424, | |
| "grad_norm": 21.96371611917712, | |
| "learning_rate": 9.222136118275996e-06, | |
| "loss": 0.2632, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.5278514588859416, | |
| "grad_norm": 26.623519325189296, | |
| "learning_rate": 9.209680381068698e-06, | |
| "loss": 0.1853, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.5305039787798409, | |
| "grad_norm": 23.73715897502698, | |
| "learning_rate": 9.197134260455233e-06, | |
| "loss": 0.3193, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.53315649867374, | |
| "grad_norm": 65.46822393336659, | |
| "learning_rate": 9.184498025805493e-06, | |
| "loss": 0.2742, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.5358090185676393, | |
| "grad_norm": 28.33797671963436, | |
| "learning_rate": 9.171771948424138e-06, | |
| "loss": 0.3889, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.5384615384615384, | |
| "grad_norm": 43.831019007003654, | |
| "learning_rate": 9.158956301544791e-06, | |
| "loss": 0.3561, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.5411140583554377, | |
| "grad_norm": 20.049127624205497, | |
| "learning_rate": 9.146051360324166e-06, | |
| "loss": 0.3796, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.5437665782493368, | |
| "grad_norm": 55.866125371722354, | |
| "learning_rate": 9.13305740183616e-06, | |
| "loss": 0.335, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.5464190981432361, | |
| "grad_norm": 23.921447546944503, | |
| "learning_rate": 9.119974705065902e-06, | |
| "loss": 0.3853, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.5490716180371353, | |
| "grad_norm": 20.30356021949217, | |
| "learning_rate": 9.106803550903765e-06, | |
| "loss": 0.3289, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.5517241379310345, | |
| "grad_norm": 40.531610855291405, | |
| "learning_rate": 9.093544222139338e-06, | |
| "loss": 0.4388, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.5543766578249337, | |
| "grad_norm": 24.241007792497765, | |
| "learning_rate": 9.080197003455347e-06, | |
| "loss": 0.2976, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.5570291777188329, | |
| "grad_norm": 60.156858568703655, | |
| "learning_rate": 9.066762181421552e-06, | |
| "loss": 0.261, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.5596816976127321, | |
| "grad_norm": 10.167092687642706, | |
| "learning_rate": 9.053240044488587e-06, | |
| "loss": 0.2564, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.5623342175066313, | |
| "grad_norm": 21.11769992321309, | |
| "learning_rate": 9.039630882981769e-06, | |
| "loss": 0.298, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.5649867374005305, | |
| "grad_norm": 18.500048517524135, | |
| "learning_rate": 9.025934989094866e-06, | |
| "loss": 0.3692, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.5676392572944297, | |
| "grad_norm": 18.444346057647593, | |
| "learning_rate": 9.012152656883824e-06, | |
| "loss": 0.302, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.5702917771883289, | |
| "grad_norm": 23.90767570773742, | |
| "learning_rate": 8.998284182260448e-06, | |
| "loss": 0.2601, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.5729442970822282, | |
| "grad_norm": 21.704685878329204, | |
| "learning_rate": 8.984329862986056e-06, | |
| "loss": 0.2876, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.5755968169761273, | |
| "grad_norm": 14.256180086925218, | |
| "learning_rate": 8.970289998665083e-06, | |
| "loss": 0.1919, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.5782493368700266, | |
| "grad_norm": 27.867020640041776, | |
| "learning_rate": 8.956164890738643e-06, | |
| "loss": 0.269, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.5809018567639257, | |
| "grad_norm": 24.619023032626224, | |
| "learning_rate": 8.941954842478071e-06, | |
| "loss": 0.4232, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.583554376657825, | |
| "grad_norm": 15.954860141663609, | |
| "learning_rate": 8.927660158978392e-06, | |
| "loss": 0.2921, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.5862068965517241, | |
| "grad_norm": 51.79350122734381, | |
| "learning_rate": 8.913281147151793e-06, | |
| "loss": 0.2843, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.5888594164456233, | |
| "grad_norm": 36.50056565020331, | |
| "learning_rate": 8.898818115721009e-06, | |
| "loss": 0.2118, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.5915119363395226, | |
| "grad_norm": 36.99686400494177, | |
| "learning_rate": 8.884271375212714e-06, | |
| "loss": 0.4143, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.5941644562334217, | |
| "grad_norm": 20.862778321044406, | |
| "learning_rate": 8.86964123795085e-06, | |
| "loss": 0.3183, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.596816976127321, | |
| "grad_norm": 14.093369656685331, | |
| "learning_rate": 8.85492801804991e-06, | |
| "loss": 0.1993, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.5994694960212201, | |
| "grad_norm": 22.23081666981876, | |
| "learning_rate": 8.84013203140821e-06, | |
| "loss": 0.1986, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.6021220159151194, | |
| "grad_norm": 19.80394668513876, | |
| "learning_rate": 8.825253595701097e-06, | |
| "loss": 0.3522, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.6047745358090185, | |
| "grad_norm": 26.79275848042136, | |
| "learning_rate": 8.810293030374126e-06, | |
| "loss": 0.3624, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.6074270557029178, | |
| "grad_norm": 21.780691524401902, | |
| "learning_rate": 8.795250656636207e-06, | |
| "loss": 0.2359, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.610079575596817, | |
| "grad_norm": 21.219645614344856, | |
| "learning_rate": 8.780126797452713e-06, | |
| "loss": 0.2334, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.6127320954907162, | |
| "grad_norm": 51.90026115053599, | |
| "learning_rate": 8.764921777538533e-06, | |
| "loss": 0.2705, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.6153846153846154, | |
| "grad_norm": 49.43197633630738, | |
| "learning_rate": 8.749635923351108e-06, | |
| "loss": 0.3167, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.6180371352785146, | |
| "grad_norm": 36.92462588968303, | |
| "learning_rate": 8.734269563083424e-06, | |
| "loss": 0.2417, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.6206896551724138, | |
| "grad_norm": 67.84010643091023, | |
| "learning_rate": 8.71882302665696e-06, | |
| "loss": 0.263, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.623342175066313, | |
| "grad_norm": 108.83266184524678, | |
| "learning_rate": 8.70329664571461e-06, | |
| "loss": 0.4165, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.6259946949602122, | |
| "grad_norm": 94.27017393376165, | |
| "learning_rate": 8.687690753613554e-06, | |
| "loss": 0.3899, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.6286472148541115, | |
| "grad_norm": 52.46804981811211, | |
| "learning_rate": 8.672005685418115e-06, | |
| "loss": 0.3797, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.6312997347480106, | |
| "grad_norm": 56.41690600413298, | |
| "learning_rate": 8.656241777892544e-06, | |
| "loss": 0.3732, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.6339522546419099, | |
| "grad_norm": 18.544072802782022, | |
| "learning_rate": 8.640399369493813e-06, | |
| "loss": 0.1275, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.636604774535809, | |
| "grad_norm": 16.8599408882226, | |
| "learning_rate": 8.624478800364332e-06, | |
| "loss": 0.2321, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.6392572944297082, | |
| "grad_norm": 16.734448787553696, | |
| "learning_rate": 8.608480412324652e-06, | |
| "loss": 0.2389, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.6419098143236074, | |
| "grad_norm": 35.136833501273095, | |
| "learning_rate": 8.592404548866123e-06, | |
| "loss": 0.3766, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.6445623342175066, | |
| "grad_norm": 39.36384291939635, | |
| "learning_rate": 8.576251555143524e-06, | |
| "loss": 0.2749, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.6472148541114059, | |
| "grad_norm": 26.89229898827449, | |
| "learning_rate": 8.56002177796765e-06, | |
| "loss": 0.3041, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.649867374005305, | |
| "grad_norm": 34.67640312710769, | |
| "learning_rate": 8.543715565797861e-06, | |
| "loss": 0.5067, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.6525198938992043, | |
| "grad_norm": 31.926480023321275, | |
| "learning_rate": 8.527333268734607e-06, | |
| "loss": 0.4187, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.6551724137931034, | |
| "grad_norm": 17.78011923766947, | |
| "learning_rate": 8.510875238511911e-06, | |
| "loss": 0.1927, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.6578249336870027, | |
| "grad_norm": 26.381549345861995, | |
| "learning_rate": 8.494341828489812e-06, | |
| "loss": 0.2971, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.6604774535809018, | |
| "grad_norm": 33.6823473576565, | |
| "learning_rate": 8.477733393646787e-06, | |
| "loss": 0.2368, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.6631299734748011, | |
| "grad_norm": 27.878530629080092, | |
| "learning_rate": 8.461050290572114e-06, | |
| "loss": 0.5135, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.6657824933687002, | |
| "grad_norm": 24.696467892749254, | |
| "learning_rate": 8.444292877458238e-06, | |
| "loss": 0.3089, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.6684350132625995, | |
| "grad_norm": 26.32348087778636, | |
| "learning_rate": 8.427461514093056e-06, | |
| "loss": 0.3046, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.6710875331564987, | |
| "grad_norm": 34.10654274877737, | |
| "learning_rate": 8.410556561852212e-06, | |
| "loss": 0.3535, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.6737400530503979, | |
| "grad_norm": 25.225897326947102, | |
| "learning_rate": 8.39357838369133e-06, | |
| "loss": 0.3604, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.6763925729442971, | |
| "grad_norm": 29.866892257151928, | |
| "learning_rate": 8.376527344138222e-06, | |
| "loss": 0.3362, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.6790450928381963, | |
| "grad_norm": 23.13688643114815, | |
| "learning_rate": 8.359403809285054e-06, | |
| "loss": 0.2836, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.6816976127320955, | |
| "grad_norm": 16.321447662460205, | |
| "learning_rate": 8.342208146780504e-06, | |
| "loss": 0.2369, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.6843501326259946, | |
| "grad_norm": 17.98697870887894, | |
| "learning_rate": 8.324940725821853e-06, | |
| "loss": 0.226, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.6870026525198939, | |
| "grad_norm": 18.729098445571775, | |
| "learning_rate": 8.30760191714706e-06, | |
| "loss": 0.2951, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.6896551724137931, | |
| "grad_norm": 28.078049412424807, | |
| "learning_rate": 8.290192093026805e-06, | |
| "loss": 0.2235, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.6923076923076923, | |
| "grad_norm": 20.319159312044384, | |
| "learning_rate": 8.272711627256501e-06, | |
| "loss": 0.2237, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.6949602122015915, | |
| "grad_norm": 16.171040213649807, | |
| "learning_rate": 8.255160895148263e-06, | |
| "loss": 0.2599, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.6976127320954907, | |
| "grad_norm": 18.378464015099404, | |
| "learning_rate": 8.237540273522844e-06, | |
| "loss": 0.2559, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.7002652519893899, | |
| "grad_norm": 15.16456007527664, | |
| "learning_rate": 8.219850140701557e-06, | |
| "loss": 0.2416, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.7029177718832891, | |
| "grad_norm": 17.42517108861892, | |
| "learning_rate": 8.202090876498144e-06, | |
| "loss": 0.3457, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.7055702917771883, | |
| "grad_norm": 15.839205180628854, | |
| "learning_rate": 8.184262862210624e-06, | |
| "loss": 0.2252, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.7082228116710876, | |
| "grad_norm": 21.19692675070958, | |
| "learning_rate": 8.166366480613107e-06, | |
| "loss": 0.2369, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.7108753315649867, | |
| "grad_norm": 81.02675748079616, | |
| "learning_rate": 8.14840211594757e-06, | |
| "loss": 0.1948, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.713527851458886, | |
| "grad_norm": 22.50613740428894, | |
| "learning_rate": 8.13037015391562e-06, | |
| "loss": 0.3102, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.7161803713527851, | |
| "grad_norm": 43.76441289802723, | |
| "learning_rate": 8.112270981670196e-06, | |
| "loss": 0.358, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.7188328912466844, | |
| "grad_norm": 35.40704024813979, | |
| "learning_rate": 8.09410498780727e-06, | |
| "loss": 0.3967, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.7214854111405835, | |
| "grad_norm": 27.467019387624163, | |
| "learning_rate": 8.075872562357502e-06, | |
| "loss": 0.4105, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.7241379310344828, | |
| "grad_norm": 42.449463021117424, | |
| "learning_rate": 8.057574096777854e-06, | |
| "loss": 0.3883, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.726790450928382, | |
| "grad_norm": 44.739456368332576, | |
| "learning_rate": 8.039209983943201e-06, | |
| "loss": 0.3679, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.7294429708222812, | |
| "grad_norm": 20.341556558474032, | |
| "learning_rate": 8.020780618137889e-06, | |
| "loss": 0.2547, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.7320954907161804, | |
| "grad_norm": 22.826492262152907, | |
| "learning_rate": 8.002286395047267e-06, | |
| "loss": 0.2292, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.7347480106100795, | |
| "grad_norm": 28.5475919934408, | |
| "learning_rate": 7.983727711749194e-06, | |
| "loss": 0.4407, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.7374005305039788, | |
| "grad_norm": 15.149149250353114, | |
| "learning_rate": 7.965104966705518e-06, | |
| "loss": 0.3145, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.7400530503978779, | |
| "grad_norm": 20.005133100708917, | |
| "learning_rate": 7.946418559753509e-06, | |
| "loss": 0.2534, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.7427055702917772, | |
| "grad_norm": 20.21639616159156, | |
| "learning_rate": 7.927668892097288e-06, | |
| "loss": 0.2205, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.7453580901856764, | |
| "grad_norm": 13.39149473599178, | |
| "learning_rate": 7.908856366299206e-06, | |
| "loss": 0.1839, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.7480106100795756, | |
| "grad_norm": 33.686880449213234, | |
| "learning_rate": 7.889981386271202e-06, | |
| "loss": 0.1838, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.7506631299734748, | |
| "grad_norm": 13.754394275472217, | |
| "learning_rate": 7.871044357266124e-06, | |
| "loss": 0.2313, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.753315649867374, | |
| "grad_norm": 14.459224005705513, | |
| "learning_rate": 7.852045685869046e-06, | |
| "loss": 0.1053, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.7559681697612732, | |
| "grad_norm": 117.8302672155395, | |
| "learning_rate": 7.832985779988518e-06, | |
| "loss": 0.3165, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.7586206896551724, | |
| "grad_norm": 16.33526411089675, | |
| "learning_rate": 7.81386504884782e-06, | |
| "loss": 0.1872, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.7612732095490716, | |
| "grad_norm": 47.187790455758666, | |
| "learning_rate": 7.794683902976175e-06, | |
| "loss": 0.231, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.7639257294429708, | |
| "grad_norm": 15.772144397278742, | |
| "learning_rate": 7.775442754199929e-06, | |
| "loss": 0.1877, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.76657824933687, | |
| "grad_norm": 12.994364778004908, | |
| "learning_rate": 7.75614201563372e-06, | |
| "loss": 0.18, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.7692307692307693, | |
| "grad_norm": 22.36486352363853, | |
| "learning_rate": 7.736782101671587e-06, | |
| "loss": 0.303, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.7718832891246684, | |
| "grad_norm": 32.725616298595355, | |
| "learning_rate": 7.717363427978103e-06, | |
| "loss": 0.1963, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.7745358090185677, | |
| "grad_norm": 23.454520470808298, | |
| "learning_rate": 7.697886411479422e-06, | |
| "loss": 0.19, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.7771883289124668, | |
| "grad_norm": 20.910887867935426, | |
| "learning_rate": 7.67835147035435e-06, | |
| "loss": 0.2268, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.7798408488063661, | |
| "grad_norm": 50.353395333451616, | |
| "learning_rate": 7.658759024025349e-06, | |
| "loss": 0.1152, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.7824933687002652, | |
| "grad_norm": 11.646101737634984, | |
| "learning_rate": 7.639109493149537e-06, | |
| "loss": 0.1433, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.7851458885941645, | |
| "grad_norm": 55.53232109593029, | |
| "learning_rate": 7.6194032996096685e-06, | |
| "loss": 0.1889, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.7877984084880637, | |
| "grad_norm": 12.392781798300069, | |
| "learning_rate": 7.599640866505058e-06, | |
| "loss": 0.0739, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.7904509283819628, | |
| "grad_norm": 24.971194773514526, | |
| "learning_rate": 7.579822618142505e-06, | |
| "loss": 0.1119, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.7931034482758621, | |
| "grad_norm": 15.63325200104409, | |
| "learning_rate": 7.559948980027189e-06, | |
| "loss": 0.1515, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.7957559681697612, | |
| "grad_norm": 76.01457348099422, | |
| "learning_rate": 7.540020378853523e-06, | |
| "loss": 0.1736, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.7984084880636605, | |
| "grad_norm": 35.63712712761058, | |
| "learning_rate": 7.520037242496e-06, | |
| "loss": 0.1174, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.8010610079575596, | |
| "grad_norm": 988.389832266585, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.4028, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.8037135278514589, | |
| "grad_norm": 32.625190789301996, | |
| "learning_rate": 7.479909081572587e-06, | |
| "loss": 0.2346, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.8063660477453581, | |
| "grad_norm": 23.026128427136946, | |
| "learning_rate": 7.459764918573264e-06, | |
| "loss": 0.1674, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.8090185676392573, | |
| "grad_norm": 18.713869601750982, | |
| "learning_rate": 7.4395679435047175e-06, | |
| "loss": 0.1902, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.8116710875331565, | |
| "grad_norm": 20.00221134734109, | |
| "learning_rate": 7.419318590003524e-06, | |
| "loss": 0.156, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.8143236074270557, | |
| "grad_norm": 42.39911464772158, | |
| "learning_rate": 7.399017292830848e-06, | |
| "loss": 0.181, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.8169761273209549, | |
| "grad_norm": 22.772924147215907, | |
| "learning_rate": 7.3786644878631035e-06, | |
| "loss": 0.0636, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.8196286472148541, | |
| "grad_norm": 74.56197590599034, | |
| "learning_rate": 7.358260612082596e-06, | |
| "loss": 0.3211, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.8222811671087533, | |
| "grad_norm": 24.565286804906005, | |
| "learning_rate": 7.3378061035681415e-06, | |
| "loss": 0.2091, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.8249336870026526, | |
| "grad_norm": 22.841501045240477, | |
| "learning_rate": 7.317301401485657e-06, | |
| "loss": 0.093, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.8275862068965517, | |
| "grad_norm": 17.507110869993074, | |
| "learning_rate": 7.296746946078737e-06, | |
| "loss": 0.1637, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.830238726790451, | |
| "grad_norm": 21.199198503983045, | |
| "learning_rate": 7.276143178659195e-06, | |
| "loss": 0.1701, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.8328912466843501, | |
| "grad_norm": 31.41003354784987, | |
| "learning_rate": 7.255490541597594e-06, | |
| "loss": 0.1502, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.8355437665782494, | |
| "grad_norm": 82.97823194520805, | |
| "learning_rate": 7.2347894783137485e-06, | |
| "loss": 0.2837, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.8381962864721485, | |
| "grad_norm": 104.3440966017041, | |
| "learning_rate": 7.2140404332671986e-06, | |
| "loss": 0.1578, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.8408488063660478, | |
| "grad_norm": 16.745516650265987, | |
| "learning_rate": 7.19324385194767e-06, | |
| "loss": 0.16, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.843501326259947, | |
| "grad_norm": 19.974820114927443, | |
| "learning_rate": 7.172400180865514e-06, | |
| "loss": 0.3236, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.8461538461538461, | |
| "grad_norm": 14.774826615538025, | |
| "learning_rate": 7.1515098675421125e-06, | |
| "loss": 0.0918, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.8488063660477454, | |
| "grad_norm": 16.189009917130136, | |
| "learning_rate": 7.130573360500277e-06, | |
| "loss": 0.0992, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.8514588859416445, | |
| "grad_norm": 15.05014892933826, | |
| "learning_rate": 7.109591109254614e-06, | |
| "loss": 0.1429, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.8541114058355438, | |
| "grad_norm": 29.682156721769513, | |
| "learning_rate": 7.088563564301874e-06, | |
| "loss": 0.1028, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.8567639257294429, | |
| "grad_norm": 15.045877198788466, | |
| "learning_rate": 7.067491177111282e-06, | |
| "loss": 0.1395, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.8594164456233422, | |
| "grad_norm": 15.403180549597606, | |
| "learning_rate": 7.046374400114842e-06, | |
| "loss": 0.0853, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.8620689655172413, | |
| "grad_norm": 10.571508907922988, | |
| "learning_rate": 7.0252136866976205e-06, | |
| "loss": 0.1389, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.8647214854111406, | |
| "grad_norm": 35.39432164754698, | |
| "learning_rate": 7.004009491188023e-06, | |
| "loss": 0.0927, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.8673740053050398, | |
| "grad_norm": 74.75220120407818, | |
| "learning_rate": 6.982762268848024e-06, | |
| "loss": 0.2136, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.870026525198939, | |
| "grad_norm": 40.78428890660548, | |
| "learning_rate": 6.961472475863406e-06, | |
| "loss": 0.2824, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.8726790450928382, | |
| "grad_norm": 69.30332432144913, | |
| "learning_rate": 6.940140569333953e-06, | |
| "loss": 0.1883, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.8753315649867374, | |
| "grad_norm": 18.51867410942239, | |
| "learning_rate": 6.918767007263646e-06, | |
| "loss": 0.1939, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.8779840848806366, | |
| "grad_norm": 15.212952278018678, | |
| "learning_rate": 6.897352248550828e-06, | |
| "loss": 0.1496, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.8806366047745358, | |
| "grad_norm": 91.23215294170157, | |
| "learning_rate": 6.875896752978345e-06, | |
| "loss": 0.0776, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.883289124668435, | |
| "grad_norm": 60.07656834467505, | |
| "learning_rate": 6.85440098120368e-06, | |
| "loss": 0.1235, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.8859416445623343, | |
| "grad_norm": 57.7606396116628, | |
| "learning_rate": 6.832865394749065e-06, | |
| "loss": 0.2734, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.8885941644562334, | |
| "grad_norm": 97.38424578856268, | |
| "learning_rate": 6.811290455991561e-06, | |
| "loss": 0.2594, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.8912466843501327, | |
| "grad_norm": 14.825837763144138, | |
| "learning_rate": 6.7896766281531435e-06, | |
| "loss": 0.0884, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.8938992042440318, | |
| "grad_norm": 32.46637484090218, | |
| "learning_rate": 6.768024375290747e-06, | |
| "loss": 0.2276, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.896551724137931, | |
| "grad_norm": 20.01023118988635, | |
| "learning_rate": 6.7463341622863074e-06, | |
| "loss": 0.2973, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.8992042440318302, | |
| "grad_norm": 30.318015548215282, | |
| "learning_rate": 6.724606454836782e-06, | |
| "loss": 0.1991, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.9018567639257294, | |
| "grad_norm": 32.44856612071763, | |
| "learning_rate": 6.702841719444141e-06, | |
| "loss": 0.1553, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.9045092838196287, | |
| "grad_norm": 49.379538467288945, | |
| "learning_rate": 6.681040423405363e-06, | |
| "loss": 0.0793, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.9071618037135278, | |
| "grad_norm": 25.615220131941072, | |
| "learning_rate": 6.659203034802397e-06, | |
| "loss": 0.1113, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.9098143236074271, | |
| "grad_norm": 26.91418227369825, | |
| "learning_rate": 6.637330022492112e-06, | |
| "loss": 0.0838, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.9124668435013262, | |
| "grad_norm": 14.468861044486943, | |
| "learning_rate": 6.615421856096231e-06, | |
| "loss": 0.0868, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.9151193633952255, | |
| "grad_norm": 33.3639289502211, | |
| "learning_rate": 6.593479005991251e-06, | |
| "loss": 0.3047, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.9177718832891246, | |
| "grad_norm": 64.60398250100485, | |
| "learning_rate": 6.571501943298335e-06, | |
| "loss": 0.2061, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.9204244031830239, | |
| "grad_norm": 9.586237096760447, | |
| "learning_rate": 6.549491139873211e-06, | |
| "loss": 0.0493, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.9230769230769231, | |
| "grad_norm": 36.22322190424842, | |
| "learning_rate": 6.527447068296026e-06, | |
| "loss": 0.1753, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.9257294429708223, | |
| "grad_norm": 39.36810695854887, | |
| "learning_rate": 6.50537020186121e-06, | |
| "loss": 0.1718, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.9283819628647215, | |
| "grad_norm": 36.70864623030449, | |
| "learning_rate": 6.483261014567311e-06, | |
| "loss": 0.1266, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.9310344827586207, | |
| "grad_norm": 41.22081667022202, | |
| "learning_rate": 6.4611199811068196e-06, | |
| "loss": 0.1192, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.9336870026525199, | |
| "grad_norm": 45.73197622465652, | |
| "learning_rate": 6.4389475768559675e-06, | |
| "loss": 0.1907, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.9363395225464191, | |
| "grad_norm": 24.82258436855005, | |
| "learning_rate": 6.416744277864541e-06, | |
| "loss": 0.1255, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.9389920424403183, | |
| "grad_norm": 46.95305885294252, | |
| "learning_rate": 6.394510560845637e-06, | |
| "loss": 0.2034, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.9416445623342176, | |
| "grad_norm": 43.95293899307852, | |
| "learning_rate": 6.372246903165445e-06, | |
| "loss": 0.0667, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.9442970822281167, | |
| "grad_norm": 27.9370246023897, | |
| "learning_rate": 6.349953782832991e-06, | |
| "loss": 0.1236, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.946949602122016, | |
| "grad_norm": 41.300499829650754, | |
| "learning_rate": 6.327631678489874e-06, | |
| "loss": 0.1794, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.9496021220159151, | |
| "grad_norm": 53.496686584518514, | |
| "learning_rate": 6.305281069399989e-06, | |
| "loss": 0.1476, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.9522546419098143, | |
| "grad_norm": 24.773217666941626, | |
| "learning_rate": 6.282902435439242e-06, | |
| "loss": 0.1793, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.9549071618037135, | |
| "grad_norm": 53.733690779853525, | |
| "learning_rate": 6.26049625708524e-06, | |
| "loss": 0.0974, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.9575596816976127, | |
| "grad_norm": 68.14585850230198, | |
| "learning_rate": 6.238063015406982e-06, | |
| "loss": 0.1673, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.9602122015915119, | |
| "grad_norm": 42.881014295604565, | |
| "learning_rate": 6.215603192054523e-06, | |
| "loss": 0.1728, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.9628647214854111, | |
| "grad_norm": 38.07733282694558, | |
| "learning_rate": 6.1931172692486405e-06, | |
| "loss": 0.1598, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.9655172413793104, | |
| "grad_norm": 22.33578536504675, | |
| "learning_rate": 6.17060572977047e-06, | |
| "loss": 0.0928, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.9681697612732095, | |
| "grad_norm": 54.313081195447566, | |
| "learning_rate": 6.1480690569511545e-06, | |
| "loss": 0.1177, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.9708222811671088, | |
| "grad_norm": 19.430422378279193, | |
| "learning_rate": 6.125507734661458e-06, | |
| "loss": 0.0752, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.9734748010610079, | |
| "grad_norm": 9.151633545416283, | |
| "learning_rate": 6.1029222473013705e-06, | |
| "loss": 0.0513, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.9761273209549072, | |
| "grad_norm": 52.06525606443343, | |
| "learning_rate": 6.080313079789723e-06, | |
| "loss": 0.3727, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.9787798408488063, | |
| "grad_norm": 12.623041541263005, | |
| "learning_rate": 6.0576807175537654e-06, | |
| "loss": 0.0798, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.9814323607427056, | |
| "grad_norm": 19.02195591204212, | |
| "learning_rate": 6.035025646518747e-06, | |
| "loss": 0.1577, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.9840848806366048, | |
| "grad_norm": 106.76160829271122, | |
| "learning_rate": 6.012348353097484e-06, | |
| "loss": 0.1309, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.986737400530504, | |
| "grad_norm": 34.825775293319026, | |
| "learning_rate": 5.9896493241799115e-06, | |
| "loss": 0.14, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.9893899204244032, | |
| "grad_norm": 33.19245300505469, | |
| "learning_rate": 5.966929047122641e-06, | |
| "loss": 0.1763, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.9920424403183024, | |
| "grad_norm": 46.172470813437585, | |
| "learning_rate": 5.944188009738483e-06, | |
| "loss": 0.1713, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.9946949602122016, | |
| "grad_norm": 17.03841623309852, | |
| "learning_rate": 5.921426700285986e-06, | |
| "loss": 0.0743, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.9973474801061007, | |
| "grad_norm": 17.459086652141746, | |
| "learning_rate": 5.898645607458941e-06, | |
| "loss": 0.1025, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 18.67436678870874, | |
| "learning_rate": 5.8758452203758995e-06, | |
| "loss": 0.0819, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.2675401568412781, | |
| "eval_runtime": 175.6682, | |
| "eval_samples_per_second": 12.04, | |
| "eval_steps_per_second": 1.509, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.0026525198938991, | |
| "grad_norm": 59.897905985685675, | |
| "learning_rate": 5.8530260285696674e-06, | |
| "loss": 0.1836, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.0053050397877985, | |
| "grad_norm": 19.30075741196344, | |
| "learning_rate": 5.830188521976794e-06, | |
| "loss": 0.1109, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.0079575596816976, | |
| "grad_norm": 13.221693309417905, | |
| "learning_rate": 5.807333190927054e-06, | |
| "loss": 0.0782, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.0106100795755968, | |
| "grad_norm": 26.608005467424196, | |
| "learning_rate": 5.784460526132918e-06, | |
| "loss": 0.0825, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.013262599469496, | |
| "grad_norm": 23.16382228818845, | |
| "learning_rate": 5.761571018679025e-06, | |
| "loss": 0.1793, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.0159151193633953, | |
| "grad_norm": 46.322587870167744, | |
| "learning_rate": 5.738665160011627e-06, | |
| "loss": 0.2887, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.0185676392572944, | |
| "grad_norm": 22.962401412882272, | |
| "learning_rate": 5.715743441928041e-06, | |
| "loss": 0.1489, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.0212201591511936, | |
| "grad_norm": 24.773910138137946, | |
| "learning_rate": 5.6928063565660955e-06, | |
| "loss": 0.0525, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.023872679045093, | |
| "grad_norm": 13.258600982347168, | |
| "learning_rate": 5.669854396393559e-06, | |
| "loss": 0.1046, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.026525198938992, | |
| "grad_norm": 30.627596045537256, | |
| "learning_rate": 5.646888054197568e-06, | |
| "loss": 0.0573, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.0291777188328912, | |
| "grad_norm": 23.645590125761473, | |
| "learning_rate": 5.623907823074044e-06, | |
| "loss": 0.0591, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.0318302387267904, | |
| "grad_norm": 11.724797568397172, | |
| "learning_rate": 5.600914196417112e-06, | |
| "loss": 0.0527, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.0344827586206897, | |
| "grad_norm": 33.05702567494784, | |
| "learning_rate": 5.577907667908505e-06, | |
| "loss": 0.071, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.0371352785145889, | |
| "grad_norm": 61.84391812410997, | |
| "learning_rate": 5.5548887315069575e-06, | |
| "loss": 0.1432, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.039787798408488, | |
| "grad_norm": 26.866469715187634, | |
| "learning_rate": 5.531857881437612e-06, | |
| "loss": 0.0391, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.0424403183023874, | |
| "grad_norm": 21.735047851238534, | |
| "learning_rate": 5.508815612181401e-06, | |
| "loss": 0.0612, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.0450928381962865, | |
| "grad_norm": 81.61327385853646, | |
| "learning_rate": 5.48576241846443e-06, | |
| "loss": 0.0915, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.0477453580901857, | |
| "grad_norm": 36.608293367299886, | |
| "learning_rate": 5.462698795247357e-06, | |
| "loss": 0.0789, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.0503978779840848, | |
| "grad_norm": 15.245615796132189, | |
| "learning_rate": 5.4396252377147615e-06, | |
| "loss": 0.0557, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.0530503978779842, | |
| "grad_norm": 16.14124314650086, | |
| "learning_rate": 5.416542241264524e-06, | |
| "loss": 0.0384, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.0557029177718833, | |
| "grad_norm": 30.91450127539826, | |
| "learning_rate": 5.39345030149718e-06, | |
| "loss": 0.0835, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.0583554376657824, | |
| "grad_norm": 50.5648480705429, | |
| "learning_rate": 5.370349914205273e-06, | |
| "loss": 0.1121, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.0610079575596818, | |
| "grad_norm": 19.82069234335954, | |
| "learning_rate": 5.347241575362729e-06, | |
| "loss": 0.0373, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.063660477453581, | |
| "grad_norm": 28.49315460062302, | |
| "learning_rate": 5.324125781114193e-06, | |
| "loss": 0.1163, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.06631299734748, | |
| "grad_norm": 7.066674993321376, | |
| "learning_rate": 5.30100302776438e-06, | |
| "loss": 0.034, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.0689655172413792, | |
| "grad_norm": 39.89722657614181, | |
| "learning_rate": 5.277873811767415e-06, | |
| "loss": 0.1019, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.0716180371352786, | |
| "grad_norm": 24.29706699041852, | |
| "learning_rate": 5.254738629716186e-06, | |
| "loss": 0.0499, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.0742705570291777, | |
| "grad_norm": 16.98018553152181, | |
| "learning_rate": 5.231597978331669e-06, | |
| "loss": 0.1773, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.0769230769230769, | |
| "grad_norm": 25.620954046680893, | |
| "learning_rate": 5.208452354452275e-06, | |
| "loss": 0.0309, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.079575596816976, | |
| "grad_norm": 15.873827333232358, | |
| "learning_rate": 5.185302255023166e-06, | |
| "loss": 0.0561, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.0822281167108754, | |
| "grad_norm": 9.59916708108742, | |
| "learning_rate": 5.162148177085604e-06, | |
| "loss": 0.0282, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.0848806366047745, | |
| "grad_norm": 40.15025004437079, | |
| "learning_rate": 5.1389906177662705e-06, | |
| "loss": 0.2067, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.0875331564986737, | |
| "grad_norm": 28.56296379309594, | |
| "learning_rate": 5.115830074266592e-06, | |
| "loss": 0.0789, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.090185676392573, | |
| "grad_norm": 30.995625571882485, | |
| "learning_rate": 5.092667043852062e-06, | |
| "loss": 0.1117, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.0928381962864722, | |
| "grad_norm": 19.365267054186745, | |
| "learning_rate": 5.069502023841576e-06, | |
| "loss": 0.0727, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.0954907161803713, | |
| "grad_norm": 22.077696212759637, | |
| "learning_rate": 5.046335511596746e-06, | |
| "loss": 0.034, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.0981432360742707, | |
| "grad_norm": 13.531788824870805, | |
| "learning_rate": 5.0231680045112174e-06, | |
| "loss": 0.0406, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.1007957559681698, | |
| "grad_norm": 304.007796055404, | |
| "learning_rate": 5e-06, | |
| "loss": 0.3101, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.103448275862069, | |
| "grad_norm": 31.623618063306097, | |
| "learning_rate": 4.976831995488784e-06, | |
| "loss": 0.1158, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.106100795755968, | |
| "grad_norm": 9.920704339139728, | |
| "learning_rate": 4.953664488403256e-06, | |
| "loss": 0.0242, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.1087533156498675, | |
| "grad_norm": 23.579564585184656, | |
| "learning_rate": 4.9304979761584256e-06, | |
| "loss": 0.0962, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.1114058355437666, | |
| "grad_norm": 52.57836040943778, | |
| "learning_rate": 4.90733295614794e-06, | |
| "loss": 0.1015, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.1140583554376657, | |
| "grad_norm": 12.43914227411422, | |
| "learning_rate": 4.884169925733409e-06, | |
| "loss": 0.0373, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.1167108753315649, | |
| "grad_norm": 21.402292272016723, | |
| "learning_rate": 4.86100938223373e-06, | |
| "loss": 0.0587, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.1193633952254642, | |
| "grad_norm": 70.60154744569552, | |
| "learning_rate": 4.837851822914397e-06, | |
| "loss": 0.2026, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.1220159151193634, | |
| "grad_norm": 48.54843202209643, | |
| "learning_rate": 4.814697744976835e-06, | |
| "loss": 0.1783, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.1246684350132625, | |
| "grad_norm": 41.890635327188185, | |
| "learning_rate": 4.791547645547727e-06, | |
| "loss": 0.164, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.1273209549071619, | |
| "grad_norm": 25.976617413445858, | |
| "learning_rate": 4.768402021668332e-06, | |
| "loss": 0.0416, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.129973474801061, | |
| "grad_norm": 21.999871034997554, | |
| "learning_rate": 4.7452613702838166e-06, | |
| "loss": 0.0583, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.1326259946949602, | |
| "grad_norm": 17.107901116946703, | |
| "learning_rate": 4.722126188232586e-06, | |
| "loss": 0.0387, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.1352785145888595, | |
| "grad_norm": 31.80702792803695, | |
| "learning_rate": 4.698996972235622e-06, | |
| "loss": 0.0387, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.1379310344827587, | |
| "grad_norm": 36.759953871654126, | |
| "learning_rate": 4.6758742188858074e-06, | |
| "loss": 0.0615, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.1405835543766578, | |
| "grad_norm": 64.71867975826405, | |
| "learning_rate": 4.652758424637271e-06, | |
| "loss": 0.1239, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.143236074270557, | |
| "grad_norm": 42.36347961294335, | |
| "learning_rate": 4.629650085794728e-06, | |
| "loss": 0.1642, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.1458885941644563, | |
| "grad_norm": 28.612720915566218, | |
| "learning_rate": 4.606549698502824e-06, | |
| "loss": 0.0399, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.1485411140583555, | |
| "grad_norm": 58.358639810511, | |
| "learning_rate": 4.583457758735477e-06, | |
| "loss": 0.1346, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.1511936339522546, | |
| "grad_norm": 114.40639234081463, | |
| "learning_rate": 4.56037476228524e-06, | |
| "loss": 0.0897, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.1538461538461537, | |
| "grad_norm": 26.931377081050965, | |
| "learning_rate": 4.537301204752647e-06, | |
| "loss": 0.0307, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.156498673740053, | |
| "grad_norm": 1355.6094818034703, | |
| "learning_rate": 4.514237581535571e-06, | |
| "loss": 0.3762, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.1591511936339522, | |
| "grad_norm": 176.72585501395568, | |
| "learning_rate": 4.4911843878186e-06, | |
| "loss": 0.2698, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.1618037135278514, | |
| "grad_norm": 194.21161828585662, | |
| "learning_rate": 4.468142118562389e-06, | |
| "loss": 0.1413, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.1644562334217508, | |
| "grad_norm": 13.912851064421268, | |
| "learning_rate": 4.4451112684930424e-06, | |
| "loss": 0.0487, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.16710875331565, | |
| "grad_norm": 269.86205592215356, | |
| "learning_rate": 4.422092332091497e-06, | |
| "loss": 0.0496, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.169761273209549, | |
| "grad_norm": 238.93346426173355, | |
| "learning_rate": 4.399085803582889e-06, | |
| "loss": 0.249, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.1724137931034484, | |
| "grad_norm": 115.29380572469265, | |
| "learning_rate": 4.3760921769259585e-06, | |
| "loss": 0.1229, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.1750663129973475, | |
| "grad_norm": 40.49647627652829, | |
| "learning_rate": 4.353111945802433e-06, | |
| "loss": 0.2009, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.1777188328912467, | |
| "grad_norm": 164.6445682937097, | |
| "learning_rate": 4.3301456036064415e-06, | |
| "loss": 0.0714, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.1803713527851458, | |
| "grad_norm": 23.339229986498648, | |
| "learning_rate": 4.307193643433907e-06, | |
| "loss": 0.1288, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.1830238726790452, | |
| "grad_norm": 31.185974598081064, | |
| "learning_rate": 4.28425655807196e-06, | |
| "loss": 0.124, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.1856763925729443, | |
| "grad_norm": 85.10943690811573, | |
| "learning_rate": 4.261334839988375e-06, | |
| "loss": 0.1668, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.1883289124668435, | |
| "grad_norm": 31.6192729191219, | |
| "learning_rate": 4.2384289813209754e-06, | |
| "loss": 0.046, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.1909814323607426, | |
| "grad_norm": 48.50997993691853, | |
| "learning_rate": 4.2155394738670814e-06, | |
| "loss": 0.097, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.193633952254642, | |
| "grad_norm": 77.85377486524548, | |
| "learning_rate": 4.192666809072948e-06, | |
| "loss": 0.1287, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.1962864721485411, | |
| "grad_norm": 17.051079311239025, | |
| "learning_rate": 4.1698114780232085e-06, | |
| "loss": 0.0332, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.1989389920424403, | |
| "grad_norm": 53.39653655915706, | |
| "learning_rate": 4.146973971430333e-06, | |
| "loss": 0.0781, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.2015915119363396, | |
| "grad_norm": 48.12287673583319, | |
| "learning_rate": 4.124154779624101e-06, | |
| "loss": 0.1418, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.2042440318302388, | |
| "grad_norm": 22.493082140413776, | |
| "learning_rate": 4.101354392541061e-06, | |
| "loss": 0.03, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.206896551724138, | |
| "grad_norm": 12.444815384892202, | |
| "learning_rate": 4.078573299714014e-06, | |
| "loss": 0.0182, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.209549071618037, | |
| "grad_norm": 79.10641890975988, | |
| "learning_rate": 4.055811990261518e-06, | |
| "loss": 0.0911, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.2122015915119364, | |
| "grad_norm": 9.816357624428727, | |
| "learning_rate": 4.033070952877362e-06, | |
| "loss": 0.0278, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.2148541114058355, | |
| "grad_norm": 155.48754272522942, | |
| "learning_rate": 4.010350675820091e-06, | |
| "loss": 0.1763, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.2175066312997347, | |
| "grad_norm": 42.298961218738626, | |
| "learning_rate": 3.987651646902518e-06, | |
| "loss": 0.1601, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.2201591511936338, | |
| "grad_norm": 27.108187773217047, | |
| "learning_rate": 3.964974353481254e-06, | |
| "loss": 0.0393, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.2228116710875332, | |
| "grad_norm": 14.036570576626396, | |
| "learning_rate": 3.942319282446236e-06, | |
| "loss": 0.0476, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.2254641909814323, | |
| "grad_norm": 79.20938794137449, | |
| "learning_rate": 3.9196869202102775e-06, | |
| "loss": 0.1278, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.2281167108753315, | |
| "grad_norm": 43.71840756123409, | |
| "learning_rate": 3.89707775269863e-06, | |
| "loss": 0.0832, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.2307692307692308, | |
| "grad_norm": 31.81487734914705, | |
| "learning_rate": 3.874492265338544e-06, | |
| "loss": 0.0867, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.23342175066313, | |
| "grad_norm": 32.092650843497005, | |
| "learning_rate": 3.851930943048845e-06, | |
| "loss": 0.0707, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.2360742705570291, | |
| "grad_norm": 73.1851233470951, | |
| "learning_rate": 3.829394270229531e-06, | |
| "loss": 0.0404, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.2387267904509285, | |
| "grad_norm": 47.88667693514473, | |
| "learning_rate": 3.8068827307513624e-06, | |
| "loss": 0.1492, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.2413793103448276, | |
| "grad_norm": 46.987626815205644, | |
| "learning_rate": 3.7843968079454773e-06, | |
| "loss": 0.0506, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.2440318302387268, | |
| "grad_norm": 27.70516442785767, | |
| "learning_rate": 3.7619369845930195e-06, | |
| "loss": 0.0479, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.246684350132626, | |
| "grad_norm": 78.80561354061712, | |
| "learning_rate": 3.7395037429147615e-06, | |
| "loss": 0.1038, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.2493368700265253, | |
| "grad_norm": 26.63471214948312, | |
| "learning_rate": 3.7170975645607587e-06, | |
| "loss": 0.0385, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.2519893899204244, | |
| "grad_norm": 48.97246093939892, | |
| "learning_rate": 3.694718930600012e-06, | |
| "loss": 0.1947, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.2546419098143236, | |
| "grad_norm": 15.150541777981305, | |
| "learning_rate": 3.672368321510128e-06, | |
| "loss": 0.0203, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.2572944297082227, | |
| "grad_norm": 18.272799625898166, | |
| "learning_rate": 3.6500462171670104e-06, | |
| "loss": 0.0292, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.259946949602122, | |
| "grad_norm": 9.760377684656945, | |
| "learning_rate": 3.6277530968345552e-06, | |
| "loss": 0.0231, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.2625994694960212, | |
| "grad_norm": 143.54418027985838, | |
| "learning_rate": 3.605489439154365e-06, | |
| "loss": 0.0538, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.2652519893899203, | |
| "grad_norm": 6.649675244989913, | |
| "learning_rate": 3.583255722135462e-06, | |
| "loss": 0.0176, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.2679045092838197, | |
| "grad_norm": 34.76279871654229, | |
| "learning_rate": 3.5610524231440324e-06, | |
| "loss": 0.1938, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.2705570291777188, | |
| "grad_norm": 73.45374798187132, | |
| "learning_rate": 3.5388800188931825e-06, | |
| "loss": 0.177, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.273209549071618, | |
| "grad_norm": 35.61683845141919, | |
| "learning_rate": 3.5167389854326907e-06, | |
| "loss": 0.0772, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.2758620689655173, | |
| "grad_norm": 102.38708909096728, | |
| "learning_rate": 3.4946297981387913e-06, | |
| "loss": 0.2324, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.2785145888594165, | |
| "grad_norm": 11.372905805375654, | |
| "learning_rate": 3.472552931703975e-06, | |
| "loss": 0.0166, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.2811671087533156, | |
| "grad_norm": 97.2921392310827, | |
| "learning_rate": 3.4505088601267913e-06, | |
| "loss": 0.0253, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.2838196286472148, | |
| "grad_norm": 44.448015983318996, | |
| "learning_rate": 3.428498056701665e-06, | |
| "loss": 0.0335, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.2864721485411141, | |
| "grad_norm": 35.51304332333862, | |
| "learning_rate": 3.4065209940087507e-06, | |
| "loss": 0.0624, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.2891246684350133, | |
| "grad_norm": 32.840566775237946, | |
| "learning_rate": 3.3845781439037695e-06, | |
| "loss": 0.1112, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.2917771883289124, | |
| "grad_norm": 14.645995648890793, | |
| "learning_rate": 3.3626699775078884e-06, | |
| "loss": 0.0206, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.2944297082228116, | |
| "grad_norm": 26.936644693957238, | |
| "learning_rate": 3.3407969651976045e-06, | |
| "loss": 0.0701, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.297082228116711, | |
| "grad_norm": 120.49105035479118, | |
| "learning_rate": 3.3189595765946394e-06, | |
| "loss": 0.0595, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.29973474801061, | |
| "grad_norm": 38.35154924631288, | |
| "learning_rate": 3.2971582805558622e-06, | |
| "loss": 0.0235, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.3023872679045092, | |
| "grad_norm": 46.11628764819059, | |
| "learning_rate": 3.27539354516322e-06, | |
| "loss": 0.037, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.3050397877984086, | |
| "grad_norm": 17.06815211835833, | |
| "learning_rate": 3.253665837713694e-06, | |
| "loss": 0.0225, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.3076923076923077, | |
| "grad_norm": 21.423911211271946, | |
| "learning_rate": 3.2319756247092552e-06, | |
| "loss": 0.0401, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 1.3103448275862069, | |
| "grad_norm": 51.136063418799814, | |
| "learning_rate": 3.2103233718468574e-06, | |
| "loss": 0.1567, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.3129973474801062, | |
| "grad_norm": 37.8420988921554, | |
| "learning_rate": 3.1887095440084402e-06, | |
| "loss": 0.118, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.3156498673740054, | |
| "grad_norm": 33.53146239619461, | |
| "learning_rate": 3.167134605250938e-06, | |
| "loss": 0.034, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.3183023872679045, | |
| "grad_norm": 44.08224523254291, | |
| "learning_rate": 3.14559901879632e-06, | |
| "loss": 0.0531, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 1.3209549071618036, | |
| "grad_norm": 15.23786197134571, | |
| "learning_rate": 3.1241032470216564e-06, | |
| "loss": 0.0206, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 1.323607427055703, | |
| "grad_norm": 50.411143499351674, | |
| "learning_rate": 3.102647751449174e-06, | |
| "loss": 0.0358, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 1.3262599469496021, | |
| "grad_norm": 5.486481277109229, | |
| "learning_rate": 3.081232992736355e-06, | |
| "loss": 0.0166, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.3289124668435013, | |
| "grad_norm": 58.942038816764864, | |
| "learning_rate": 3.059859430666049e-06, | |
| "loss": 0.0755, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 1.3315649867374004, | |
| "grad_norm": 160.1692316877336, | |
| "learning_rate": 3.0385275241365965e-06, | |
| "loss": 0.0672, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 1.3342175066312998, | |
| "grad_norm": 116.71045710272905, | |
| "learning_rate": 3.017237731151976e-06, | |
| "loss": 0.0485, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 1.336870026525199, | |
| "grad_norm": 47.59824057526637, | |
| "learning_rate": 2.9959905088119777e-06, | |
| "loss": 0.1871, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 1.339522546419098, | |
| "grad_norm": 16.920320901353147, | |
| "learning_rate": 2.9747863133023803e-06, | |
| "loss": 0.0396, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.3421750663129974, | |
| "grad_norm": 79.66742416782449, | |
| "learning_rate": 2.9536255998851615e-06, | |
| "loss": 0.0651, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 1.3448275862068966, | |
| "grad_norm": 86.51363992577608, | |
| "learning_rate": 2.93250882288872e-06, | |
| "loss": 0.1477, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 1.3474801061007957, | |
| "grad_norm": 122.48886729601927, | |
| "learning_rate": 2.9114364356981274e-06, | |
| "loss": 0.0464, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 1.350132625994695, | |
| "grad_norm": 93.31716531548089, | |
| "learning_rate": 2.8904088907453887e-06, | |
| "loss": 0.0968, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 1.3527851458885942, | |
| "grad_norm": 24.658494216440896, | |
| "learning_rate": 2.8694266394997238e-06, | |
| "loss": 0.0497, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.3554376657824934, | |
| "grad_norm": 42.698807981473095, | |
| "learning_rate": 2.8484901324578883e-06, | |
| "loss": 0.0724, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 1.3580901856763925, | |
| "grad_norm": 11.078782479932135, | |
| "learning_rate": 2.827599819134489e-06, | |
| "loss": 0.0323, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 1.3607427055702916, | |
| "grad_norm": 28.288160319770295, | |
| "learning_rate": 2.8067561480523315e-06, | |
| "loss": 0.0351, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 1.363395225464191, | |
| "grad_norm": 71.10722452385636, | |
| "learning_rate": 2.7859595667328027e-06, | |
| "loss": 0.0646, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 1.3660477453580901, | |
| "grad_norm": 26.533628741578823, | |
| "learning_rate": 2.7652105216862536e-06, | |
| "loss": 0.062, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.3687002652519893, | |
| "grad_norm": 53.981212216030144, | |
| "learning_rate": 2.7445094584024067e-06, | |
| "loss": 0.0616, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 1.3713527851458887, | |
| "grad_norm": 83.62333734580827, | |
| "learning_rate": 2.723856821340806e-06, | |
| "loss": 0.0714, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 1.3740053050397878, | |
| "grad_norm": 20.39953135964151, | |
| "learning_rate": 2.703253053921266e-06, | |
| "loss": 0.0465, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 1.376657824933687, | |
| "grad_norm": 43.96544927941002, | |
| "learning_rate": 2.682698598514343e-06, | |
| "loss": 0.0762, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 1.3793103448275863, | |
| "grad_norm": 10.002180522529653, | |
| "learning_rate": 2.6621938964318593e-06, | |
| "loss": 0.023, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.3819628647214854, | |
| "grad_norm": 40.10472050271422, | |
| "learning_rate": 2.6417393879174056e-06, | |
| "loss": 0.1116, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 1.3846153846153846, | |
| "grad_norm": 77.95603280240162, | |
| "learning_rate": 2.621335512136899e-06, | |
| "loss": 0.1205, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 1.387267904509284, | |
| "grad_norm": 7.424807899482766, | |
| "learning_rate": 2.600982707169154e-06, | |
| "loss": 0.0209, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 1.389920424403183, | |
| "grad_norm": 131.93363781761656, | |
| "learning_rate": 2.580681409996477e-06, | |
| "loss": 0.1513, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 1.3925729442970822, | |
| "grad_norm": 27.270787221631757, | |
| "learning_rate": 2.5604320564952846e-06, | |
| "loss": 0.0645, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.3952254641909814, | |
| "grad_norm": 18.171580676508576, | |
| "learning_rate": 2.5402350814267364e-06, | |
| "loss": 0.0336, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 1.3978779840848805, | |
| "grad_norm": 24.9229811714387, | |
| "learning_rate": 2.5200909184274125e-06, | |
| "loss": 0.055, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 1.4005305039787799, | |
| "grad_norm": 12.99994825162711, | |
| "learning_rate": 2.5000000000000015e-06, | |
| "loss": 0.021, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 1.403183023872679, | |
| "grad_norm": 27.138921623282503, | |
| "learning_rate": 2.4799627575040014e-06, | |
| "loss": 0.0353, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 1.4058355437665782, | |
| "grad_norm": 31.261860390743, | |
| "learning_rate": 2.4599796211464772e-06, | |
| "loss": 0.0841, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.4084880636604775, | |
| "grad_norm": 27.668175088299527, | |
| "learning_rate": 2.4400510199728123e-06, | |
| "loss": 0.0403, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 1.4111405835543767, | |
| "grad_norm": 25.332190511055664, | |
| "learning_rate": 2.4201773818574956e-06, | |
| "loss": 0.119, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.4137931034482758, | |
| "grad_norm": 18.2746114690722, | |
| "learning_rate": 2.400359133494944e-06, | |
| "loss": 0.028, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 1.4164456233421752, | |
| "grad_norm": 52.160219105895436, | |
| "learning_rate": 2.3805967003903336e-06, | |
| "loss": 0.225, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 1.4190981432360743, | |
| "grad_norm": 31.18123569823581, | |
| "learning_rate": 2.360890506850464e-06, | |
| "loss": 0.0505, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.4217506631299734, | |
| "grad_norm": 29.792666807984002, | |
| "learning_rate": 2.341240975974653e-06, | |
| "loss": 0.0548, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 1.4244031830238728, | |
| "grad_norm": 36.0670846434295, | |
| "learning_rate": 2.3216485296456514e-06, | |
| "loss": 0.0417, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 1.427055702917772, | |
| "grad_norm": 30.93164615396188, | |
| "learning_rate": 2.302113588520578e-06, | |
| "loss": 0.0447, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 1.429708222811671, | |
| "grad_norm": 37.172850035610146, | |
| "learning_rate": 2.2826365720218984e-06, | |
| "loss": 0.1858, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 1.4323607427055702, | |
| "grad_norm": 17.962259934733847, | |
| "learning_rate": 2.263217898328415e-06, | |
| "loss": 0.0289, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.4350132625994694, | |
| "grad_norm": 37.95184145221939, | |
| "learning_rate": 2.243857984366284e-06, | |
| "loss": 0.076, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 1.4376657824933687, | |
| "grad_norm": 5.994179631068117, | |
| "learning_rate": 2.2245572458000714e-06, | |
| "loss": 0.0148, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 1.4403183023872679, | |
| "grad_norm": 45.71919718088937, | |
| "learning_rate": 2.205316097023826e-06, | |
| "loss": 0.0433, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 1.442970822281167, | |
| "grad_norm": 20.29906763271573, | |
| "learning_rate": 2.1861349511521817e-06, | |
| "loss": 0.0268, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 1.4456233421750664, | |
| "grad_norm": 13.554453290948606, | |
| "learning_rate": 2.1670142200114837e-06, | |
| "loss": 0.0344, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.4482758620689655, | |
| "grad_norm": 37.350081043699, | |
| "learning_rate": 2.147954314130955e-06, | |
| "loss": 0.152, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 1.4509283819628647, | |
| "grad_norm": 59.98816086350694, | |
| "learning_rate": 2.128955642733877e-06, | |
| "loss": 0.0872, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 1.453580901856764, | |
| "grad_norm": 51.93836468257473, | |
| "learning_rate": 2.1100186137288005e-06, | |
| "loss": 0.1183, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 1.4562334217506632, | |
| "grad_norm": 20.013368851058353, | |
| "learning_rate": 2.0911436337007935e-06, | |
| "loss": 0.0242, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 1.4588859416445623, | |
| "grad_norm": 2.8281653740475434, | |
| "learning_rate": 2.072331107902713e-06, | |
| "loss": 0.0129, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.4615384615384617, | |
| "grad_norm": 49.3463666972085, | |
| "learning_rate": 2.0535814402464922e-06, | |
| "loss": 0.0742, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 1.4641909814323608, | |
| "grad_norm": 36.76803200310502, | |
| "learning_rate": 2.034895033294483e-06, | |
| "loss": 0.0546, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 1.46684350132626, | |
| "grad_norm": 4.683103228912366, | |
| "learning_rate": 2.0162722882508072e-06, | |
| "loss": 0.0163, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 1.469496021220159, | |
| "grad_norm": 6.935431057612835, | |
| "learning_rate": 1.9977136049527348e-06, | |
| "loss": 0.0156, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 1.4721485411140582, | |
| "grad_norm": 19.250531549872047, | |
| "learning_rate": 1.9792193818621118e-06, | |
| "loss": 0.0873, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.4748010610079576, | |
| "grad_norm": 28.294139881028673, | |
| "learning_rate": 1.960790016056801e-06, | |
| "loss": 0.0995, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 1.4774535809018567, | |
| "grad_norm": 179.2776003740221, | |
| "learning_rate": 1.9424259032221482e-06, | |
| "loss": 0.0621, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 1.4801061007957559, | |
| "grad_norm": 39.522899359848566, | |
| "learning_rate": 1.9241274376425e-06, | |
| "loss": 0.0766, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 1.4827586206896552, | |
| "grad_norm": 82.04740096096288, | |
| "learning_rate": 1.90589501219273e-06, | |
| "loss": 0.0773, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 1.4854111405835544, | |
| "grad_norm": 51.71654722915068, | |
| "learning_rate": 1.8877290183298058e-06, | |
| "loss": 0.0865, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.4880636604774535, | |
| "grad_norm": 14.462682735256758, | |
| "learning_rate": 1.869629846084382e-06, | |
| "loss": 0.0283, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 1.490716180371353, | |
| "grad_norm": 53.97301165757807, | |
| "learning_rate": 1.8515978840524302e-06, | |
| "loss": 0.131, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 1.493368700265252, | |
| "grad_norm": 71.72614182974992, | |
| "learning_rate": 1.8336335193868955e-06, | |
| "loss": 0.1059, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 1.4960212201591512, | |
| "grad_norm": 18.590927800865042, | |
| "learning_rate": 1.8157371377893769e-06, | |
| "loss": 0.0339, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 1.4986737400530503, | |
| "grad_norm": 28.37316287762548, | |
| "learning_rate": 1.7979091235018564e-06, | |
| "loss": 0.0402, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.5013262599469495, | |
| "grad_norm": 31.636530614590257, | |
| "learning_rate": 1.7801498592984445e-06, | |
| "loss": 0.0637, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 1.5039787798408488, | |
| "grad_norm": 22.37636539131692, | |
| "learning_rate": 1.762459726477157e-06, | |
| "loss": 0.0289, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 1.506631299734748, | |
| "grad_norm": 41.589846697116485, | |
| "learning_rate": 1.7448391048517378e-06, | |
| "loss": 0.0705, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 1.509283819628647, | |
| "grad_norm": 8.583447762326337, | |
| "learning_rate": 1.7272883727434996e-06, | |
| "loss": 0.0179, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 1.5119363395225465, | |
| "grad_norm": 7.730174492056544, | |
| "learning_rate": 1.709807906973196e-06, | |
| "loss": 0.0181, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.5145888594164456, | |
| "grad_norm": 23.737686713013556, | |
| "learning_rate": 1.6923980828529424e-06, | |
| "loss": 0.1221, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 1.5172413793103448, | |
| "grad_norm": 11.39242986580809, | |
| "learning_rate": 1.6750592741781496e-06, | |
| "loss": 0.0245, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 1.5198938992042441, | |
| "grad_norm": 49.87595081789666, | |
| "learning_rate": 1.657791853219497e-06, | |
| "loss": 0.1148, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 1.5225464190981433, | |
| "grad_norm": 2.364680488496631, | |
| "learning_rate": 1.640596190714947e-06, | |
| "loss": 0.0135, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 1.5251989389920424, | |
| "grad_norm": 4.131633358661766, | |
| "learning_rate": 1.623472655861782e-06, | |
| "loss": 0.0162, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.5278514588859418, | |
| "grad_norm": 29.51899540569925, | |
| "learning_rate": 1.6064216163086716e-06, | |
| "loss": 0.0377, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 1.530503978779841, | |
| "grad_norm": 43.08051878126815, | |
| "learning_rate": 1.589443438147789e-06, | |
| "loss": 0.0344, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 1.53315649867374, | |
| "grad_norm": 2.999575821209609, | |
| "learning_rate": 1.5725384859069454e-06, | |
| "loss": 0.0125, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 1.5358090185676394, | |
| "grad_norm": 19.508910931000166, | |
| "learning_rate": 1.5557071225417648e-06, | |
| "loss": 0.0413, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 1.5384615384615383, | |
| "grad_norm": 89.85070628038791, | |
| "learning_rate": 1.5389497094278861e-06, | |
| "loss": 0.0435, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.5411140583554377, | |
| "grad_norm": 35.813057157942374, | |
| "learning_rate": 1.5222666063532138e-06, | |
| "loss": 0.0565, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 1.5437665782493368, | |
| "grad_norm": 103.37386994929619, | |
| "learning_rate": 1.5056581715101887e-06, | |
| "loss": 0.0771, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 1.546419098143236, | |
| "grad_norm": 1.5818065462079043, | |
| "learning_rate": 1.4891247614880904e-06, | |
| "loss": 0.0104, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 1.5490716180371353, | |
| "grad_norm": 55.22745552993425, | |
| "learning_rate": 1.472666731265394e-06, | |
| "loss": 0.0897, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 1.5517241379310345, | |
| "grad_norm": 8.525215910412715, | |
| "learning_rate": 1.456284434202142e-06, | |
| "loss": 0.0194, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.5543766578249336, | |
| "grad_norm": 18.224368163231002, | |
| "learning_rate": 1.4399782220323515e-06, | |
| "loss": 0.0274, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 1.557029177718833, | |
| "grad_norm": 28.045474826554145, | |
| "learning_rate": 1.4237484448564759e-06, | |
| "loss": 0.0211, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 1.5596816976127321, | |
| "grad_norm": 33.215014442106025, | |
| "learning_rate": 1.4075954511338784e-06, | |
| "loss": 0.0296, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 1.5623342175066313, | |
| "grad_norm": 17.154525089940783, | |
| "learning_rate": 1.3915195876753495e-06, | |
| "loss": 0.028, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 1.5649867374005306, | |
| "grad_norm": 5.976715757380159, | |
| "learning_rate": 1.3755211996356687e-06, | |
| "loss": 0.0143, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.5676392572944295, | |
| "grad_norm": 19.99962559170014, | |
| "learning_rate": 1.3596006305061888e-06, | |
| "loss": 0.0299, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 1.570291777188329, | |
| "grad_norm": 34.00900917284991, | |
| "learning_rate": 1.3437582221074574e-06, | |
| "loss": 0.0412, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 1.5729442970822283, | |
| "grad_norm": 6.275523032792328, | |
| "learning_rate": 1.3279943145818874e-06, | |
| "loss": 0.0125, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 1.5755968169761272, | |
| "grad_norm": 104.47358503028816, | |
| "learning_rate": 1.3123092463864456e-06, | |
| "loss": 0.0926, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 1.5782493368700266, | |
| "grad_norm": 39.50094742573104, | |
| "learning_rate": 1.2967033542853918e-06, | |
| "loss": 0.0685, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.5809018567639257, | |
| "grad_norm": 28.78036671386429, | |
| "learning_rate": 1.2811769733430406e-06, | |
| "loss": 0.0364, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 1.5835543766578248, | |
| "grad_norm": 4.501720311787365, | |
| "learning_rate": 1.2657304369165768e-06, | |
| "loss": 0.012, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 1.5862068965517242, | |
| "grad_norm": 8.268254092208545, | |
| "learning_rate": 1.250364076648894e-06, | |
| "loss": 0.0143, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 1.5888594164456233, | |
| "grad_norm": 174.24152115104488, | |
| "learning_rate": 1.2350782224614689e-06, | |
| "loss": 0.1043, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 1.5915119363395225, | |
| "grad_norm": 55.43256083287633, | |
| "learning_rate": 1.2198732025472876e-06, | |
| "loss": 0.1449, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.5941644562334218, | |
| "grad_norm": 78.41447022948844, | |
| "learning_rate": 1.2047493433637935e-06, | |
| "loss": 0.0842, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 1.596816976127321, | |
| "grad_norm": 63.00893503383642, | |
| "learning_rate": 1.1897069696258756e-06, | |
| "loss": 0.0857, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 1.5994694960212201, | |
| "grad_norm": 80.43100476994681, | |
| "learning_rate": 1.1747464042989037e-06, | |
| "loss": 0.0639, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 1.6021220159151195, | |
| "grad_norm": 1.6740847245937969, | |
| "learning_rate": 1.1598679685917901e-06, | |
| "loss": 0.0101, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 1.6047745358090184, | |
| "grad_norm": 28.02526303415943, | |
| "learning_rate": 1.1450719819500906e-06, | |
| "loss": 0.0521, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 1.6074270557029178, | |
| "grad_norm": 26.697863234285077, | |
| "learning_rate": 1.1303587620491513e-06, | |
| "loss": 0.0414, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 1.6100795755968171, | |
| "grad_norm": 5.862144677178779, | |
| "learning_rate": 1.1157286247872873e-06, | |
| "loss": 0.0109, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 1.612732095490716, | |
| "grad_norm": 43.49272407406669, | |
| "learning_rate": 1.1011818842789928e-06, | |
| "loss": 0.0353, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 1.6153846153846154, | |
| "grad_norm": 1.2964944688294937, | |
| "learning_rate": 1.0867188528482087e-06, | |
| "loss": 0.0096, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 1.6180371352785146, | |
| "grad_norm": 44.34978591536543, | |
| "learning_rate": 1.0723398410216085e-06, | |
| "loss": 0.0366, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.6206896551724137, | |
| "grad_norm": 4.619040102092995, | |
| "learning_rate": 1.0580451575219304e-06, | |
| "loss": 0.0112, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 1.623342175066313, | |
| "grad_norm": 19.542144237425273, | |
| "learning_rate": 1.043835109261357e-06, | |
| "loss": 0.0226, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 1.6259946949602122, | |
| "grad_norm": 2.0309228191353026, | |
| "learning_rate": 1.0297100013349181e-06, | |
| "loss": 0.0098, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 1.6286472148541113, | |
| "grad_norm": 51.61437859615333, | |
| "learning_rate": 1.0156701370139454e-06, | |
| "loss": 0.1246, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 1.6312997347480107, | |
| "grad_norm": 69.56328048167647, | |
| "learning_rate": 1.0017158177395531e-06, | |
| "loss": 0.027, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 1.6339522546419099, | |
| "grad_norm": 63.099974960370844, | |
| "learning_rate": 9.878473431161767e-07, | |
| "loss": 0.0947, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 1.636604774535809, | |
| "grad_norm": 55.425411421163865, | |
| "learning_rate": 9.740650109051348e-07, | |
| "loss": 0.0392, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 1.6392572944297084, | |
| "grad_norm": 37.648031716061276, | |
| "learning_rate": 9.603691170182316e-07, | |
| "loss": 0.058, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 1.6419098143236073, | |
| "grad_norm": 7.440459964087829, | |
| "learning_rate": 9.467599555114137e-07, | |
| "loss": 0.0115, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 1.6445623342175066, | |
| "grad_norm": 77.24435836965144, | |
| "learning_rate": 9.332378185784491e-07, | |
| "loss": 0.0528, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.647214854111406, | |
| "grad_norm": 0.4743752266098194, | |
| "learning_rate": 9.198029965446537e-07, | |
| "loss": 0.0083, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 1.649867374005305, | |
| "grad_norm": 79.21582679656966, | |
| "learning_rate": 9.064557778606631e-07, | |
| "loss": 0.0696, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 1.6525198938992043, | |
| "grad_norm": 48.132345696323064, | |
| "learning_rate": 8.931964490962364e-07, | |
| "loss": 0.0279, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 1.6551724137931034, | |
| "grad_norm": 1.1869023442411482, | |
| "learning_rate": 8.800252949340998e-07, | |
| "loss": 0.0091, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 1.6578249336870026, | |
| "grad_norm": 30.282347105122806, | |
| "learning_rate": 8.669425981638413e-07, | |
| "loss": 0.0232, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 1.660477453580902, | |
| "grad_norm": 46.39818958163635, | |
| "learning_rate": 8.539486396758357e-07, | |
| "loss": 0.2452, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 1.663129973474801, | |
| "grad_norm": 110.62754523914514, | |
| "learning_rate": 8.410436984552112e-07, | |
| "loss": 0.0162, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 1.6657824933687002, | |
| "grad_norm": 11.289877918818396, | |
| "learning_rate": 8.282280515758639e-07, | |
| "loss": 0.0105, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 1.6684350132625996, | |
| "grad_norm": 1.0552014437605353, | |
| "learning_rate": 8.15501974194508e-07, | |
| "loss": 0.0086, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 1.6710875331564987, | |
| "grad_norm": 75.25984308785729, | |
| "learning_rate": 8.02865739544767e-07, | |
| "loss": 0.0434, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.6737400530503979, | |
| "grad_norm": 33.485386251907016, | |
| "learning_rate": 7.903196189313039e-07, | |
| "loss": 0.0494, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 1.6763925729442972, | |
| "grad_norm": 2.7176003446837513, | |
| "learning_rate": 7.778638817240042e-07, | |
| "loss": 0.0098, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 1.6790450928381961, | |
| "grad_norm": 9.594233163684546, | |
| "learning_rate": 7.654987953521875e-07, | |
| "loss": 0.0126, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 1.6816976127320955, | |
| "grad_norm": 32.410917232901454, | |
| "learning_rate": 7.532246252988617e-07, | |
| "loss": 0.0412, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 1.6843501326259946, | |
| "grad_norm": 1.4890213161118364, | |
| "learning_rate": 7.410416350950333e-07, | |
| "loss": 0.0087, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 1.6870026525198938, | |
| "grad_norm": 15.643686249186723, | |
| "learning_rate": 7.289500863140414e-07, | |
| "loss": 0.0144, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 1.6896551724137931, | |
| "grad_norm": 5.7419293179211115, | |
| "learning_rate": 7.16950238565941e-07, | |
| "loss": 0.0101, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 1.6923076923076923, | |
| "grad_norm": 2.6965659815581886, | |
| "learning_rate": 7.05042349491935e-07, | |
| "loss": 0.0096, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 1.6949602122015914, | |
| "grad_norm": 61.01472747960395, | |
| "learning_rate": 6.932266747588395e-07, | |
| "loss": 0.0448, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 1.6976127320954908, | |
| "grad_norm": 5.731959406558488, | |
| "learning_rate": 6.815034680535915e-07, | |
| "loss": 0.0093, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.70026525198939, | |
| "grad_norm": 1.4343043837386706, | |
| "learning_rate": 6.698729810778065e-07, | |
| "loss": 0.0083, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 1.702917771883289, | |
| "grad_norm": 3.7542551103645465, | |
| "learning_rate": 6.583354635423755e-07, | |
| "loss": 0.0093, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 1.7055702917771884, | |
| "grad_norm": 2.028260462186616, | |
| "learning_rate": 6.46891163162095e-07, | |
| "loss": 0.0083, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 1.7082228116710876, | |
| "grad_norm": 59.57120021690294, | |
| "learning_rate": 6.355403256503595e-07, | |
| "loss": 0.0543, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 1.7108753315649867, | |
| "grad_norm": 32.700514281376684, | |
| "learning_rate": 6.242831947138806e-07, | |
| "loss": 0.017, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 1.713527851458886, | |
| "grad_norm": 259.0797685477536, | |
| "learning_rate": 6.131200120474512e-07, | |
| "loss": 0.0427, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 1.716180371352785, | |
| "grad_norm": 69.44103749920225, | |
| "learning_rate": 6.020510173287636e-07, | |
| "loss": 0.0689, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 1.7188328912466844, | |
| "grad_norm": 32.75813876688262, | |
| "learning_rate": 5.910764482132575e-07, | |
| "loss": 0.0295, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 1.7214854111405835, | |
| "grad_norm": 60.7557553743672, | |
| "learning_rate": 5.801965403290221e-07, | |
| "loss": 0.0394, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 1.7241379310344827, | |
| "grad_norm": 49.079495000265624, | |
| "learning_rate": 5.694115272717326e-07, | |
| "loss": 0.0495, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.726790450928382, | |
| "grad_norm": 23.89730583666827, | |
| "learning_rate": 5.587216405996343e-07, | |
| "loss": 0.014, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 1.7294429708222812, | |
| "grad_norm": 7.754693291443579, | |
| "learning_rate": 5.481271098285818e-07, | |
| "loss": 0.0108, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 1.7320954907161803, | |
| "grad_norm": 17.364614580977687, | |
| "learning_rate": 5.376281624270946e-07, | |
| "loss": 0.0155, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 1.7347480106100797, | |
| "grad_norm": 32.664792296752104, | |
| "learning_rate": 5.272250238114857e-07, | |
| "loss": 0.0204, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 1.7374005305039788, | |
| "grad_norm": 16.847161826587552, | |
| "learning_rate": 5.169179173410178e-07, | |
| "loss": 0.0204, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.740053050397878, | |
| "grad_norm": 51.06867256696325, | |
| "learning_rate": 5.067070643131056e-07, | |
| "loss": 0.0211, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 1.7427055702917773, | |
| "grad_norm": 6.204869458611106, | |
| "learning_rate": 4.965926839585688e-07, | |
| "loss": 0.0121, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 1.7453580901856764, | |
| "grad_norm": 122.72174105534113, | |
| "learning_rate": 4.865749934369224e-07, | |
| "loss": 0.0325, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 1.7480106100795756, | |
| "grad_norm": 2.8331005645907665, | |
| "learning_rate": 4.766542078317121e-07, | |
| "loss": 0.0095, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 1.750663129973475, | |
| "grad_norm": 2.277598958882793, | |
| "learning_rate": 4.668305401459022e-07, | |
| "loss": 0.0084, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.7533156498673739, | |
| "grad_norm": 39.77450782767968, | |
| "learning_rate": 4.571042012972993e-07, | |
| "loss": 0.0503, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 1.7559681697612732, | |
| "grad_norm": 2.70542382611283, | |
| "learning_rate": 4.4747540011401913e-07, | |
| "loss": 0.0089, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 1.7586206896551724, | |
| "grad_norm": 54.9458721418408, | |
| "learning_rate": 4.379443433300129e-07, | |
| "loss": 0.0328, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 1.7612732095490715, | |
| "grad_norm": 77.12855499937814, | |
| "learning_rate": 4.2851123558061927e-07, | |
| "loss": 0.0893, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 1.7639257294429709, | |
| "grad_norm": 59.42880305544247, | |
| "learning_rate": 4.1917627939817793e-07, | |
| "loss": 0.0772, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 1.76657824933687, | |
| "grad_norm": 11.457095666748941, | |
| "learning_rate": 4.0993967520767455e-07, | |
| "loss": 0.0119, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 1.7692307692307692, | |
| "grad_norm": 0.7776911494937536, | |
| "learning_rate": 4.008016213224408e-07, | |
| "loss": 0.0079, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 1.7718832891246685, | |
| "grad_norm": 23.92789759542629, | |
| "learning_rate": 3.9176231393990183e-07, | |
| "loss": 0.014, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 1.7745358090185677, | |
| "grad_norm": 3.429341886024603, | |
| "learning_rate": 3.8282194713735286e-07, | |
| "loss": 0.0082, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 1.7771883289124668, | |
| "grad_norm": 0.698222487969724, | |
| "learning_rate": 3.739807128677986e-07, | |
| "loss": 0.0078, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.7798408488063662, | |
| "grad_norm": 63.23794158143725, | |
| "learning_rate": 3.6523880095583554e-07, | |
| "loss": 0.03, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 1.782493368700265, | |
| "grad_norm": 65.82679112186635, | |
| "learning_rate": 3.5659639909356725e-07, | |
| "loss": 0.0461, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 1.7851458885941645, | |
| "grad_norm": 64.99395526166062, | |
| "learning_rate": 3.480536928365824e-07, | |
| "loss": 0.0352, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 1.7877984084880638, | |
| "grad_norm": 0.7950476244387096, | |
| "learning_rate": 3.39610865599968e-07, | |
| "loss": 0.008, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 1.7904509283819627, | |
| "grad_norm": 1.765882976596194, | |
| "learning_rate": 3.3126809865436817e-07, | |
| "loss": 0.0081, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 1.793103448275862, | |
| "grad_norm": 6.925447876826482, | |
| "learning_rate": 3.230255711220992e-07, | |
| "loss": 0.0102, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 1.7957559681697612, | |
| "grad_norm": 76.51154678181398, | |
| "learning_rate": 3.1488345997329806e-07, | |
| "loss": 0.0679, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 1.7984084880636604, | |
| "grad_norm": 93.07697686356393, | |
| "learning_rate": 3.0684194002212287e-07, | |
| "loss": 0.0526, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 1.8010610079575597, | |
| "grad_norm": 93.41597512569238, | |
| "learning_rate": 2.9890118392300493e-07, | |
| "loss": 0.0969, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 1.8037135278514589, | |
| "grad_norm": 5.611833628800462, | |
| "learning_rate": 2.910613621669356e-07, | |
| "loss": 0.0098, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.806366047745358, | |
| "grad_norm": 20.19092567145126, | |
| "learning_rate": 2.83322643077808e-07, | |
| "loss": 0.0175, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 1.8090185676392574, | |
| "grad_norm": 1.8368721258077867, | |
| "learning_rate": 2.756851928088056e-07, | |
| "loss": 0.0076, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 1.8116710875331565, | |
| "grad_norm": 3.9050484927641116, | |
| "learning_rate": 2.681491753388282e-07, | |
| "loss": 0.009, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 1.8143236074270557, | |
| "grad_norm": 4.365189701819248, | |
| "learning_rate": 2.607147524689829e-07, | |
| "loss": 0.0094, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 1.816976127320955, | |
| "grad_norm": 32.58154214210014, | |
| "learning_rate": 2.533820838190959e-07, | |
| "loss": 0.0154, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 1.819628647214854, | |
| "grad_norm": 0.9141003830251871, | |
| "learning_rate": 2.461513268242938e-07, | |
| "loss": 0.0078, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 1.8222811671087533, | |
| "grad_norm": 21.79331336536199, | |
| "learning_rate": 2.390226367316262e-07, | |
| "loss": 0.018, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 1.8249336870026527, | |
| "grad_norm": 128.06774726121702, | |
| "learning_rate": 2.3199616659672352e-07, | |
| "loss": 0.0432, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 1.8275862068965516, | |
| "grad_norm": 53.37364432393282, | |
| "learning_rate": 2.2507206728051732e-07, | |
| "loss": 0.0247, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 1.830238726790451, | |
| "grad_norm": 0.7270142813321538, | |
| "learning_rate": 2.1825048744600062e-07, | |
| "loss": 0.0073, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.83289124668435, | |
| "grad_norm": 174.55783956435897, | |
| "learning_rate": 2.1153157355503274e-07, | |
| "loss": 0.1618, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 1.8355437665782492, | |
| "grad_norm": 5.21130990558658, | |
| "learning_rate": 2.0491546986519896e-07, | |
| "loss": 0.0089, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 1.8381962864721486, | |
| "grad_norm": 1.1234441609490853, | |
| "learning_rate": 1.9840231842671087e-07, | |
| "loss": 0.0076, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 1.8408488063660478, | |
| "grad_norm": 0.406457518976773, | |
| "learning_rate": 1.9199225907935492e-07, | |
| "loss": 0.0073, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 1.843501326259947, | |
| "grad_norm": 69.81427764450714, | |
| "learning_rate": 1.8568542944949474e-07, | |
| "loss": 0.076, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 1.8461538461538463, | |
| "grad_norm": 0.44285812333248226, | |
| "learning_rate": 1.794819649471119e-07, | |
| "loss": 0.0073, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 1.8488063660477454, | |
| "grad_norm": 10.714036392240356, | |
| "learning_rate": 1.7338199876289984e-07, | |
| "loss": 0.0089, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 1.8514588859416445, | |
| "grad_norm": 1.3421527125349642, | |
| "learning_rate": 1.6738566186540628e-07, | |
| "loss": 0.0078, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 1.854111405835544, | |
| "grad_norm": 89.3697897536333, | |
| "learning_rate": 1.6149308299821643e-07, | |
| "loss": 0.0709, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 1.8567639257294428, | |
| "grad_norm": 4.611542263926474, | |
| "learning_rate": 1.5570438867719695e-07, | |
| "loss": 0.0091, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.8594164456233422, | |
| "grad_norm": 23.320087228041047, | |
| "learning_rate": 1.500197031877698e-07, | |
| "loss": 0.0163, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 1.8620689655172413, | |
| "grad_norm": 42.64299850468228, | |
| "learning_rate": 1.4443914858224938e-07, | |
| "loss": 0.0306, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 1.8647214854111405, | |
| "grad_norm": 5.952687971891359, | |
| "learning_rate": 1.3896284467722398e-07, | |
| "loss": 0.009, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 1.8673740053050398, | |
| "grad_norm": 3.742186108573706, | |
| "learning_rate": 1.335909090509785e-07, | |
| "loss": 0.008, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 1.870026525198939, | |
| "grad_norm": 1.92877502596486, | |
| "learning_rate": 1.2832345704097082e-07, | |
| "loss": 0.008, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 1.8726790450928381, | |
| "grad_norm": 37.469150195564026, | |
| "learning_rate": 1.2316060174136e-07, | |
| "loss": 0.081, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 1.8753315649867375, | |
| "grad_norm": 43.121003325017995, | |
| "learning_rate": 1.1810245400057152e-07, | |
| "loss": 0.0215, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 1.8779840848806366, | |
| "grad_norm": 1.2052889929930615, | |
| "learning_rate": 1.1314912241892184e-07, | |
| "loss": 0.0078, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 1.8806366047745358, | |
| "grad_norm": 1.2294161301088746, | |
| "learning_rate": 1.0830071334628655e-07, | |
| "loss": 0.0078, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 1.8832891246684351, | |
| "grad_norm": 1.8092789775735767, | |
| "learning_rate": 1.035573308798138e-07, | |
| "loss": 0.008, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.8859416445623343, | |
| "grad_norm": 70.24535100907208, | |
| "learning_rate": 9.891907686169211e-08, | |
| "loss": 0.1506, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 1.8885941644562334, | |
| "grad_norm": 6.196122071829012, | |
| "learning_rate": 9.43860508769645e-08, | |
| "loss": 0.0098, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 1.8912466843501328, | |
| "grad_norm": 73.29972892009195, | |
| "learning_rate": 8.995835025138677e-08, | |
| "loss": 0.0939, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 1.8938992042440317, | |
| "grad_norm": 75.02804554993698, | |
| "learning_rate": 8.563607004934193e-08, | |
| "loss": 0.0148, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 1.896551724137931, | |
| "grad_norm": 1.4881900336993148, | |
| "learning_rate": 8.141930307179468e-08, | |
| "loss": 0.0077, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 1.8992042440318302, | |
| "grad_norm": 27.49604743931196, | |
| "learning_rate": 7.730813985430407e-08, | |
| "loss": 0.0218, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 1.9018567639257293, | |
| "grad_norm": 4.353998782020853, | |
| "learning_rate": 7.330266866507618e-08, | |
| "loss": 0.0088, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 1.9045092838196287, | |
| "grad_norm": 47.93923727811735, | |
| "learning_rate": 6.940297550306895e-08, | |
| "loss": 0.0168, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 1.9071618037135278, | |
| "grad_norm": 42.05910256976991, | |
| "learning_rate": 6.560914409614872e-08, | |
| "loss": 0.0365, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 1.909814323607427, | |
| "grad_norm": 16.18612043878886, | |
| "learning_rate": 6.192125589928821e-08, | |
| "loss": 0.011, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.9124668435013263, | |
| "grad_norm": 52.1180312844014, | |
| "learning_rate": 5.833939009282086e-08, | |
| "loss": 0.0243, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 1.9151193633952255, | |
| "grad_norm": 6.703179422202392, | |
| "learning_rate": 5.486362358074093e-08, | |
| "loss": 0.0083, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 1.9177718832891246, | |
| "grad_norm": 20.06608206843261, | |
| "learning_rate": 5.1494030989049926e-08, | |
| "loss": 0.0186, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 1.920424403183024, | |
| "grad_norm": 55.56707330559414, | |
| "learning_rate": 4.823068466415615e-08, | |
| "loss": 0.0388, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 1.9230769230769231, | |
| "grad_norm": 2.608201296948723, | |
| "learning_rate": 4.5073654671320965e-08, | |
| "loss": 0.0084, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 1.9257294429708223, | |
| "grad_norm": 0.6999967243653099, | |
| "learning_rate": 4.202300879315446e-08, | |
| "loss": 0.0073, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 1.9283819628647216, | |
| "grad_norm": 103.9098856165428, | |
| "learning_rate": 3.907881252816048e-08, | |
| "loss": 0.0468, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 1.9310344827586206, | |
| "grad_norm": 22.491713058289164, | |
| "learning_rate": 3.6241129089329416e-08, | |
| "loss": 0.0171, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 1.93368700265252, | |
| "grad_norm": 1.0830419543991152, | |
| "learning_rate": 3.351001940278209e-08, | |
| "loss": 0.0074, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 1.936339522546419, | |
| "grad_norm": 0.44996729741749725, | |
| "learning_rate": 3.088554210646133e-08, | |
| "loss": 0.0072, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.9389920424403182, | |
| "grad_norm": 0.8055269156120815, | |
| "learning_rate": 2.8367753548871335e-08, | |
| "loss": 0.0075, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 1.9416445623342176, | |
| "grad_norm": 1.4172101731352273, | |
| "learning_rate": 2.595670778787196e-08, | |
| "loss": 0.0075, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 1.9442970822281167, | |
| "grad_norm": 0.8008274289768108, | |
| "learning_rate": 2.3652456589512983e-08, | |
| "loss": 0.0076, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 1.9469496021220158, | |
| "grad_norm": 27.57547230491662, | |
| "learning_rate": 2.1455049426926666e-08, | |
| "loss": 0.0265, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 1.9496021220159152, | |
| "grad_norm": 2.302647662511601, | |
| "learning_rate": 1.9364533479263036e-08, | |
| "loss": 0.0079, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 1.9522546419098143, | |
| "grad_norm": 0.834222628641657, | |
| "learning_rate": 1.7380953630678488e-08, | |
| "loss": 0.0074, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 1.9549071618037135, | |
| "grad_norm": 7.460890222649117, | |
| "learning_rate": 1.5504352469371543e-08, | |
| "loss": 0.0089, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 1.9575596816976129, | |
| "grad_norm": 13.990819309192242, | |
| "learning_rate": 1.373477028666803e-08, | |
| "loss": 0.0161, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 1.9602122015915118, | |
| "grad_norm": 10.547991318429563, | |
| "learning_rate": 1.2072245076156786e-08, | |
| "loss": 0.0089, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 1.9628647214854111, | |
| "grad_norm": 0.436865387598025, | |
| "learning_rate": 1.0516812532873622e-08, | |
| "loss": 0.0073, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.9655172413793105, | |
| "grad_norm": 0.7539830177016554, | |
| "learning_rate": 9.068506052534732e-09, | |
| "loss": 0.0075, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 1.9681697612732094, | |
| "grad_norm": 31.411581219499894, | |
| "learning_rate": 7.727356730820035e-09, | |
| "loss": 0.0331, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 1.9708222811671088, | |
| "grad_norm": 110.80890873557911, | |
| "learning_rate": 6.49339336270427e-09, | |
| "loss": 0.0653, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 1.973474801061008, | |
| "grad_norm": 0.552214904255334, | |
| "learning_rate": 5.366642441841374e-09, | |
| "loss": 0.0073, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 1.976127320954907, | |
| "grad_norm": 11.059061514681844, | |
| "learning_rate": 4.347128159993829e-09, | |
| "loss": 0.0139, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 1.9787798408488064, | |
| "grad_norm": 3.9879729026025434, | |
| "learning_rate": 3.4348724065119687e-09, | |
| "loss": 0.0095, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 1.9814323607427056, | |
| "grad_norm": 2.074633533733444, | |
| "learning_rate": 2.62989476786768e-09, | |
| "loss": 0.0082, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 1.9840848806366047, | |
| "grad_norm": 100.87658775136781, | |
| "learning_rate": 1.9322125272297488e-09, | |
| "loss": 0.0574, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 1.986737400530504, | |
| "grad_norm": 2.099665706708233, | |
| "learning_rate": 1.3418406640969272e-09, | |
| "loss": 0.0081, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 1.9893899204244032, | |
| "grad_norm": 15.46545559542927, | |
| "learning_rate": 8.587918539726403e-10, | |
| "loss": 0.0158, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.9920424403183024, | |
| "grad_norm": 1.371107254109801, | |
| "learning_rate": 4.830764680946453e-10, | |
| "loss": 0.0083, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 1.9946949602122017, | |
| "grad_norm": 84.92722586939128, | |
| "learning_rate": 2.1470257321298815e-10, | |
| "loss": 0.1214, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 1.9973474801061006, | |
| "grad_norm": 0.4226693463546857, | |
| "learning_rate": 5.3675931413477156e-11, | |
| "loss": 0.0073, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 6.432632396146792, | |
| "learning_rate": 0.0, | |
| "loss": 0.0089, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 0.9545881152153015, | |
| "eval_runtime": 176.3571, | |
| "eval_samples_per_second": 11.993, | |
| "eval_steps_per_second": 1.503, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "step": 754, | |
| "total_flos": 22077400891392.0, | |
| "train_loss": 0.7089056190948153, | |
| "train_runtime": 3328.6277, | |
| "train_samples_per_second": 3.623, | |
| "train_steps_per_second": 0.227 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 754, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 22077400891392.0, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |