| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 6.850632911392405, |
| "eval_steps": 500, |
| "global_step": 224, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.030379746835443037, |
| "grad_norm": 7.821046188822606, |
| "learning_rate": 8.695652173913044e-07, |
| "loss": 1.1558, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.060759493670886074, |
| "grad_norm": 7.921825084451515, |
| "learning_rate": 1.7391304347826088e-06, |
| "loss": 1.2051, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.09113924050632911, |
| "grad_norm": 7.949194540802856, |
| "learning_rate": 2.6086956521739132e-06, |
| "loss": 1.2081, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.12151898734177215, |
| "grad_norm": 7.419381898775899, |
| "learning_rate": 3.4782608695652175e-06, |
| "loss": 1.1778, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.1518987341772152, |
| "grad_norm": 5.8147744470812315, |
| "learning_rate": 4.347826086956522e-06, |
| "loss": 1.133, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.18227848101265823, |
| "grad_norm": 3.1488141527120717, |
| "learning_rate": 5.2173913043478265e-06, |
| "loss": 1.0355, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.21265822784810126, |
| "grad_norm": 2.6133812217685786, |
| "learning_rate": 6.086956521739132e-06, |
| "loss": 0.9942, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.2430379746835443, |
| "grad_norm": 4.53053450724563, |
| "learning_rate": 6.956521739130435e-06, |
| "loss": 0.9946, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.27341772151898736, |
| "grad_norm": 4.780306428776297, |
| "learning_rate": 7.82608695652174e-06, |
| "loss": 1.0056, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.3037974683544304, |
| "grad_norm": 4.288850924374228, |
| "learning_rate": 8.695652173913044e-06, |
| "loss": 0.9472, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.3341772151898734, |
| "grad_norm": 4.439969935534219, |
| "learning_rate": 9.565217391304349e-06, |
| "loss": 0.9123, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.36455696202531646, |
| "grad_norm": 4.010491564592064, |
| "learning_rate": 1.0434782608695653e-05, |
| "loss": 0.8883, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.3949367088607595, |
| "grad_norm": 3.3294777717525594, |
| "learning_rate": 1.1304347826086957e-05, |
| "loss": 0.8939, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.4253164556962025, |
| "grad_norm": 2.7664993482699534, |
| "learning_rate": 1.2173913043478263e-05, |
| "loss": 0.875, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.45569620253164556, |
| "grad_norm": 2.8260967544313385, |
| "learning_rate": 1.3043478260869566e-05, |
| "loss": 0.827, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.4860759493670886, |
| "grad_norm": 2.622718788057903, |
| "learning_rate": 1.391304347826087e-05, |
| "loss": 0.8501, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.5164556962025316, |
| "grad_norm": 2.4404720537483193, |
| "learning_rate": 1.4782608695652174e-05, |
| "loss": 0.8602, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.5468354430379747, |
| "grad_norm": 2.378257567990093, |
| "learning_rate": 1.565217391304348e-05, |
| "loss": 0.8492, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.5772151898734177, |
| "grad_norm": 2.136063926093358, |
| "learning_rate": 1.6521739130434785e-05, |
| "loss": 0.8254, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.6075949367088608, |
| "grad_norm": 1.7328481110273704, |
| "learning_rate": 1.739130434782609e-05, |
| "loss": 0.862, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.6379746835443038, |
| "grad_norm": 1.8273457611644959, |
| "learning_rate": 1.8260869565217393e-05, |
| "loss": 0.8062, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.6683544303797468, |
| "grad_norm": 1.502333605934096, |
| "learning_rate": 1.9130434782608697e-05, |
| "loss": 0.8299, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.6987341772151898, |
| "grad_norm": 1.2900310524495506, |
| "learning_rate": 2e-05, |
| "loss": 0.807, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.7291139240506329, |
| "grad_norm": 1.1456325043371915, |
| "learning_rate": 1.999877856940653e-05, |
| "loss": 0.7821, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.759493670886076, |
| "grad_norm": 1.2018149758777508, |
| "learning_rate": 1.999511457600466e-05, |
| "loss": 0.8497, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.789873417721519, |
| "grad_norm": 0.9889852771780683, |
| "learning_rate": 1.9989008914857115e-05, |
| "loss": 0.8075, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.8202531645569621, |
| "grad_norm": 1.1076240476548542, |
| "learning_rate": 1.998046307749216e-05, |
| "loss": 0.7513, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.850632911392405, |
| "grad_norm": 0.8803163368784866, |
| "learning_rate": 1.9969479151539238e-05, |
| "loss": 0.7859, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.8810126582278481, |
| "grad_norm": 1.0491156615385184, |
| "learning_rate": 1.9956059820218982e-05, |
| "loss": 0.7526, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.9113924050632911, |
| "grad_norm": 1.10152854864363, |
| "learning_rate": 1.9940208361687762e-05, |
| "loss": 0.7491, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.9417721518987342, |
| "grad_norm": 1.0228949281794228, |
| "learning_rate": 1.9921928648236855e-05, |
| "loss": 0.7853, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.9721518987341772, |
| "grad_norm": 1.0034300575346555, |
| "learning_rate": 1.990122514534651e-05, |
| "loss": 0.8105, |
| "step": 32 |
| }, |
| { |
| "epoch": 1.010126582278481, |
| "grad_norm": 1.1172162915635493, |
| "learning_rate": 1.9878102910595097e-05, |
| "loss": 0.7464, |
| "step": 33 |
| }, |
| { |
| "epoch": 1.040506329113924, |
| "grad_norm": 0.9591624728466743, |
| "learning_rate": 1.985256759242359e-05, |
| "loss": 0.6626, |
| "step": 34 |
| }, |
| { |
| "epoch": 1.070886075949367, |
| "grad_norm": 1.0340759260382315, |
| "learning_rate": 1.982462542875576e-05, |
| "loss": 0.6708, |
| "step": 35 |
| }, |
| { |
| "epoch": 1.1012658227848102, |
| "grad_norm": 0.9433503158572933, |
| "learning_rate": 1.979428324547432e-05, |
| "loss": 0.6447, |
| "step": 36 |
| }, |
| { |
| "epoch": 1.1316455696202532, |
| "grad_norm": 0.9218281307494127, |
| "learning_rate": 1.9761548454753455e-05, |
| "loss": 0.6418, |
| "step": 37 |
| }, |
| { |
| "epoch": 1.1620253164556962, |
| "grad_norm": 0.9198714860449547, |
| "learning_rate": 1.972642905324813e-05, |
| "loss": 0.6677, |
| "step": 38 |
| }, |
| { |
| "epoch": 1.1924050632911392, |
| "grad_norm": 0.9387185070917101, |
| "learning_rate": 1.9688933620140638e-05, |
| "loss": 0.6054, |
| "step": 39 |
| }, |
| { |
| "epoch": 1.2227848101265824, |
| "grad_norm": 0.8728726429722485, |
| "learning_rate": 1.96490713150448e-05, |
| "loss": 0.6298, |
| "step": 40 |
| }, |
| { |
| "epoch": 1.2531645569620253, |
| "grad_norm": 0.8752898789891731, |
| "learning_rate": 1.9606851875768404e-05, |
| "loss": 0.6111, |
| "step": 41 |
| }, |
| { |
| "epoch": 1.2835443037974683, |
| "grad_norm": 0.893778572005734, |
| "learning_rate": 1.956228561593441e-05, |
| "loss": 0.6404, |
| "step": 42 |
| }, |
| { |
| "epoch": 1.3139240506329113, |
| "grad_norm": 0.8160072296242408, |
| "learning_rate": 1.9515383422461457e-05, |
| "loss": 0.6291, |
| "step": 43 |
| }, |
| { |
| "epoch": 1.3443037974683545, |
| "grad_norm": 0.8392921428856774, |
| "learning_rate": 1.9466156752904344e-05, |
| "loss": 0.6716, |
| "step": 44 |
| }, |
| { |
| "epoch": 1.3746835443037975, |
| "grad_norm": 0.8409571411034287, |
| "learning_rate": 1.9414617632655114e-05, |
| "loss": 0.6197, |
| "step": 45 |
| }, |
| { |
| "epoch": 1.4050632911392404, |
| "grad_norm": 0.7944502463591191, |
| "learning_rate": 1.9360778652005416e-05, |
| "loss": 0.6277, |
| "step": 46 |
| }, |
| { |
| "epoch": 1.4354430379746836, |
| "grad_norm": 0.7646898863461067, |
| "learning_rate": 1.9304652963070868e-05, |
| "loss": 0.6033, |
| "step": 47 |
| }, |
| { |
| "epoch": 1.4658227848101266, |
| "grad_norm": 0.8293604918278636, |
| "learning_rate": 1.9246254276578175e-05, |
| "loss": 0.5995, |
| "step": 48 |
| }, |
| { |
| "epoch": 1.4962025316455696, |
| "grad_norm": 0.8011654631247795, |
| "learning_rate": 1.9185596858515797e-05, |
| "loss": 0.6169, |
| "step": 49 |
| }, |
| { |
| "epoch": 1.5265822784810128, |
| "grad_norm": 0.7824727918529569, |
| "learning_rate": 1.9122695526648968e-05, |
| "loss": 0.6084, |
| "step": 50 |
| }, |
| { |
| "epoch": 1.5569620253164556, |
| "grad_norm": 0.8480236748326004, |
| "learning_rate": 1.905756564689991e-05, |
| "loss": 0.6129, |
| "step": 51 |
| }, |
| { |
| "epoch": 1.5873417721518988, |
| "grad_norm": 0.7639111105033703, |
| "learning_rate": 1.8990223129594146e-05, |
| "loss": 0.6053, |
| "step": 52 |
| }, |
| { |
| "epoch": 1.6177215189873417, |
| "grad_norm": 0.9093137184868099, |
| "learning_rate": 1.8920684425573865e-05, |
| "loss": 0.6422, |
| "step": 53 |
| }, |
| { |
| "epoch": 1.6481012658227847, |
| "grad_norm": 0.6994028366767042, |
| "learning_rate": 1.884896652217917e-05, |
| "loss": 0.5946, |
| "step": 54 |
| }, |
| { |
| "epoch": 1.678481012658228, |
| "grad_norm": 0.8968558583216436, |
| "learning_rate": 1.877508693909831e-05, |
| "loss": 0.6207, |
| "step": 55 |
| }, |
| { |
| "epoch": 1.7088607594936709, |
| "grad_norm": 0.811839332119214, |
| "learning_rate": 1.8699063724087905e-05, |
| "loss": 0.618, |
| "step": 56 |
| }, |
| { |
| "epoch": 1.7392405063291139, |
| "grad_norm": 0.9002757348285215, |
| "learning_rate": 1.862091544856407e-05, |
| "loss": 0.6112, |
| "step": 57 |
| }, |
| { |
| "epoch": 1.769620253164557, |
| "grad_norm": 0.7990305355304187, |
| "learning_rate": 1.854066120306571e-05, |
| "loss": 0.6192, |
| "step": 58 |
| }, |
| { |
| "epoch": 1.8, |
| "grad_norm": 0.861399999161712, |
| "learning_rate": 1.8458320592590976e-05, |
| "loss": 0.5898, |
| "step": 59 |
| }, |
| { |
| "epoch": 1.830379746835443, |
| "grad_norm": 0.8278930782445296, |
| "learning_rate": 1.837391373180801e-05, |
| "loss": 0.6108, |
| "step": 60 |
| }, |
| { |
| "epoch": 1.8607594936708862, |
| "grad_norm": 0.7921513805855641, |
| "learning_rate": 1.8287461240141217e-05, |
| "loss": 0.5995, |
| "step": 61 |
| }, |
| { |
| "epoch": 1.891139240506329, |
| "grad_norm": 0.8900711452206441, |
| "learning_rate": 1.8198984236734246e-05, |
| "loss": 0.623, |
| "step": 62 |
| }, |
| { |
| "epoch": 1.9215189873417722, |
| "grad_norm": 0.8985909391413275, |
| "learning_rate": 1.8108504335290852e-05, |
| "loss": 0.6299, |
| "step": 63 |
| }, |
| { |
| "epoch": 1.9518987341772152, |
| "grad_norm": 0.7194793851130045, |
| "learning_rate": 1.8016043638794975e-05, |
| "loss": 0.5897, |
| "step": 64 |
| }, |
| { |
| "epoch": 1.9822784810126581, |
| "grad_norm": 0.8531941724481863, |
| "learning_rate": 1.7921624734111292e-05, |
| "loss": 0.6191, |
| "step": 65 |
| }, |
| { |
| "epoch": 2.020253164556962, |
| "grad_norm": 0.8209935083153904, |
| "learning_rate": 1.7825270686467567e-05, |
| "loss": 0.4933, |
| "step": 66 |
| }, |
| { |
| "epoch": 2.050632911392405, |
| "grad_norm": 0.7996082296164394, |
| "learning_rate": 1.7727005033820117e-05, |
| "loss": 0.44, |
| "step": 67 |
| }, |
| { |
| "epoch": 2.081012658227848, |
| "grad_norm": 1.2840953839544158, |
| "learning_rate": 1.762685178110382e-05, |
| "loss": 0.4285, |
| "step": 68 |
| }, |
| { |
| "epoch": 2.1113924050632913, |
| "grad_norm": 0.942479353452438, |
| "learning_rate": 1.752483539436807e-05, |
| "loss": 0.4374, |
| "step": 69 |
| }, |
| { |
| "epoch": 2.141772151898734, |
| "grad_norm": 1.2232114430158003, |
| "learning_rate": 1.7420980794800013e-05, |
| "loss": 0.4087, |
| "step": 70 |
| }, |
| { |
| "epoch": 2.1721518987341772, |
| "grad_norm": 1.0356734414329936, |
| "learning_rate": 1.731531335263669e-05, |
| "loss": 0.3946, |
| "step": 71 |
| }, |
| { |
| "epoch": 2.2025316455696204, |
| "grad_norm": 0.7867140091801025, |
| "learning_rate": 1.720785888096743e-05, |
| "loss": 0.4163, |
| "step": 72 |
| }, |
| { |
| "epoch": 2.232911392405063, |
| "grad_norm": 0.9132158139934681, |
| "learning_rate": 1.7098643629428035e-05, |
| "loss": 0.3946, |
| "step": 73 |
| }, |
| { |
| "epoch": 2.2632911392405064, |
| "grad_norm": 0.7763347772570146, |
| "learning_rate": 1.698769427778842e-05, |
| "loss": 0.3886, |
| "step": 74 |
| }, |
| { |
| "epoch": 2.293670886075949, |
| "grad_norm": 0.8389557995027598, |
| "learning_rate": 1.687503792943506e-05, |
| "loss": 0.4095, |
| "step": 75 |
| }, |
| { |
| "epoch": 2.3240506329113924, |
| "grad_norm": 0.7011995417689459, |
| "learning_rate": 1.6760702104750046e-05, |
| "loss": 0.4333, |
| "step": 76 |
| }, |
| { |
| "epoch": 2.3544303797468356, |
| "grad_norm": 0.8420627329845638, |
| "learning_rate": 1.664471473438822e-05, |
| "loss": 0.4036, |
| "step": 77 |
| }, |
| { |
| "epoch": 2.3848101265822783, |
| "grad_norm": 0.7237704302653927, |
| "learning_rate": 1.6527104152454096e-05, |
| "loss": 0.4052, |
| "step": 78 |
| }, |
| { |
| "epoch": 2.4151898734177215, |
| "grad_norm": 0.8791910546924722, |
| "learning_rate": 1.6407899089580263e-05, |
| "loss": 0.4346, |
| "step": 79 |
| }, |
| { |
| "epoch": 2.4455696202531647, |
| "grad_norm": 0.7919528470256998, |
| "learning_rate": 1.628712866590885e-05, |
| "loss": 0.4288, |
| "step": 80 |
| }, |
| { |
| "epoch": 2.4759493670886075, |
| "grad_norm": 0.6758854624059195, |
| "learning_rate": 1.6164822383977912e-05, |
| "loss": 0.3944, |
| "step": 81 |
| }, |
| { |
| "epoch": 2.5063291139240507, |
| "grad_norm": 0.8553142061277994, |
| "learning_rate": 1.604101012151436e-05, |
| "loss": 0.4129, |
| "step": 82 |
| }, |
| { |
| "epoch": 2.536708860759494, |
| "grad_norm": 0.6894610348766053, |
| "learning_rate": 1.5915722124135227e-05, |
| "loss": 0.4011, |
| "step": 83 |
| }, |
| { |
| "epoch": 2.5670886075949366, |
| "grad_norm": 0.7970929002092169, |
| "learning_rate": 1.5788988997959115e-05, |
| "loss": 0.4008, |
| "step": 84 |
| }, |
| { |
| "epoch": 2.59746835443038, |
| "grad_norm": 0.7134359418705393, |
| "learning_rate": 1.5660841702129533e-05, |
| "loss": 0.3664, |
| "step": 85 |
| }, |
| { |
| "epoch": 2.6278481012658226, |
| "grad_norm": 0.7087911471321273, |
| "learning_rate": 1.5531311541251995e-05, |
| "loss": 0.3791, |
| "step": 86 |
| }, |
| { |
| "epoch": 2.6582278481012658, |
| "grad_norm": 0.6891445046493652, |
| "learning_rate": 1.540043015774676e-05, |
| "loss": 0.3847, |
| "step": 87 |
| }, |
| { |
| "epoch": 2.688607594936709, |
| "grad_norm": 0.7637547952556076, |
| "learning_rate": 1.5268229524119007e-05, |
| "loss": 0.4041, |
| "step": 88 |
| }, |
| { |
| "epoch": 2.7189873417721517, |
| "grad_norm": 0.7769177668227595, |
| "learning_rate": 1.513474193514842e-05, |
| "loss": 0.3991, |
| "step": 89 |
| }, |
| { |
| "epoch": 2.749367088607595, |
| "grad_norm": 0.7926731473312283, |
| "learning_rate": 1.5000000000000002e-05, |
| "loss": 0.3976, |
| "step": 90 |
| }, |
| { |
| "epoch": 2.779746835443038, |
| "grad_norm": 0.6806718036324106, |
| "learning_rate": 1.4864036634258112e-05, |
| "loss": 0.3899, |
| "step": 91 |
| }, |
| { |
| "epoch": 2.810126582278481, |
| "grad_norm": 0.7649089907549299, |
| "learning_rate": 1.4726885051885654e-05, |
| "loss": 0.3827, |
| "step": 92 |
| }, |
| { |
| "epoch": 2.840506329113924, |
| "grad_norm": 0.7511392853903036, |
| "learning_rate": 1.4588578757110359e-05, |
| "loss": 0.3684, |
| "step": 93 |
| }, |
| { |
| "epoch": 2.8708860759493673, |
| "grad_norm": 0.6748860783991397, |
| "learning_rate": 1.4449151536240167e-05, |
| "loss": 0.3809, |
| "step": 94 |
| }, |
| { |
| "epoch": 2.90126582278481, |
| "grad_norm": 0.6864747657021409, |
| "learning_rate": 1.4308637449409705e-05, |
| "loss": 0.3956, |
| "step": 95 |
| }, |
| { |
| "epoch": 2.9316455696202532, |
| "grad_norm": 0.7753228864719607, |
| "learning_rate": 1.4167070822259868e-05, |
| "loss": 0.4254, |
| "step": 96 |
| }, |
| { |
| "epoch": 2.962025316455696, |
| "grad_norm": 0.6651509289577628, |
| "learning_rate": 1.402448623755254e-05, |
| "loss": 0.3921, |
| "step": 97 |
| }, |
| { |
| "epoch": 2.992405063291139, |
| "grad_norm": 0.6983791734860466, |
| "learning_rate": 1.3880918526722497e-05, |
| "loss": 0.4287, |
| "step": 98 |
| }, |
| { |
| "epoch": 3.030379746835443, |
| "grad_norm": 0.9220251556978855, |
| "learning_rate": 1.3736402761368597e-05, |
| "loss": 0.2964, |
| "step": 99 |
| }, |
| { |
| "epoch": 3.060759493670886, |
| "grad_norm": 0.6610795758669731, |
| "learning_rate": 1.3590974244686248e-05, |
| "loss": 0.2612, |
| "step": 100 |
| }, |
| { |
| "epoch": 3.091139240506329, |
| "grad_norm": 0.9270782747999269, |
| "learning_rate": 1.344466850284333e-05, |
| "loss": 0.2746, |
| "step": 101 |
| }, |
| { |
| "epoch": 3.1215189873417724, |
| "grad_norm": 1.098737259675843, |
| "learning_rate": 1.3297521276301666e-05, |
| "loss": 0.2855, |
| "step": 102 |
| }, |
| { |
| "epoch": 3.151898734177215, |
| "grad_norm": 0.648219534073433, |
| "learning_rate": 1.3149568511086104e-05, |
| "loss": 0.2586, |
| "step": 103 |
| }, |
| { |
| "epoch": 3.1822784810126583, |
| "grad_norm": 0.7931412589215237, |
| "learning_rate": 1.300084635000341e-05, |
| "loss": 0.2858, |
| "step": 104 |
| }, |
| { |
| "epoch": 3.212658227848101, |
| "grad_norm": 0.7611529900120818, |
| "learning_rate": 1.2851391123813075e-05, |
| "loss": 0.263, |
| "step": 105 |
| }, |
| { |
| "epoch": 3.2430379746835443, |
| "grad_norm": 0.6482399304581031, |
| "learning_rate": 1.2701239342352223e-05, |
| "loss": 0.299, |
| "step": 106 |
| }, |
| { |
| "epoch": 3.2734177215189875, |
| "grad_norm": 0.5954166072264363, |
| "learning_rate": 1.2550427685616767e-05, |
| "loss": 0.2868, |
| "step": 107 |
| }, |
| { |
| "epoch": 3.3037974683544302, |
| "grad_norm": 0.6767360371462975, |
| "learning_rate": 1.239899299480098e-05, |
| "loss": 0.2564, |
| "step": 108 |
| }, |
| { |
| "epoch": 3.3341772151898734, |
| "grad_norm": 0.6094191230978437, |
| "learning_rate": 1.2246972263297718e-05, |
| "loss": 0.2528, |
| "step": 109 |
| }, |
| { |
| "epoch": 3.3645569620253166, |
| "grad_norm": 0.7145739315760229, |
| "learning_rate": 1.2094402627661447e-05, |
| "loss": 0.2853, |
| "step": 110 |
| }, |
| { |
| "epoch": 3.3949367088607594, |
| "grad_norm": 0.6231765035888277, |
| "learning_rate": 1.1941321358536278e-05, |
| "loss": 0.2429, |
| "step": 111 |
| }, |
| { |
| "epoch": 3.4253164556962026, |
| "grad_norm": 0.6449885249273971, |
| "learning_rate": 1.1787765851551296e-05, |
| "loss": 0.2711, |
| "step": 112 |
| }, |
| { |
| "epoch": 3.4556962025316453, |
| "grad_norm": 0.586672980747406, |
| "learning_rate": 1.1633773618185302e-05, |
| "loss": 0.2685, |
| "step": 113 |
| }, |
| { |
| "epoch": 3.4860759493670885, |
| "grad_norm": 0.6467921689659367, |
| "learning_rate": 1.14793822766033e-05, |
| "loss": 0.2818, |
| "step": 114 |
| }, |
| { |
| "epoch": 3.5164556962025317, |
| "grad_norm": 0.6381786906313327, |
| "learning_rate": 1.132462954246688e-05, |
| "loss": 0.2711, |
| "step": 115 |
| }, |
| { |
| "epoch": 3.546835443037975, |
| "grad_norm": 0.6314811402603955, |
| "learning_rate": 1.1169553219720828e-05, |
| "loss": 0.2773, |
| "step": 116 |
| }, |
| { |
| "epoch": 3.5772151898734177, |
| "grad_norm": 0.6097770409637793, |
| "learning_rate": 1.1014191191358118e-05, |
| "loss": 0.2512, |
| "step": 117 |
| }, |
| { |
| "epoch": 3.607594936708861, |
| "grad_norm": 0.6449724975686235, |
| "learning_rate": 1.085858141016566e-05, |
| "loss": 0.2551, |
| "step": 118 |
| }, |
| { |
| "epoch": 3.6379746835443036, |
| "grad_norm": 0.5877968560776582, |
| "learning_rate": 1.070276188945293e-05, |
| "loss": 0.258, |
| "step": 119 |
| }, |
| { |
| "epoch": 3.668354430379747, |
| "grad_norm": 0.5780608872071732, |
| "learning_rate": 1.0546770693765859e-05, |
| "loss": 0.2479, |
| "step": 120 |
| }, |
| { |
| "epoch": 3.69873417721519, |
| "grad_norm": 0.605807815077805, |
| "learning_rate": 1.0390645929588197e-05, |
| "loss": 0.2396, |
| "step": 121 |
| }, |
| { |
| "epoch": 3.729113924050633, |
| "grad_norm": 0.6245049664156804, |
| "learning_rate": 1.0234425736032607e-05, |
| "loss": 0.251, |
| "step": 122 |
| }, |
| { |
| "epoch": 3.759493670886076, |
| "grad_norm": 0.5569457656088611, |
| "learning_rate": 1.007814827552384e-05, |
| "loss": 0.2819, |
| "step": 123 |
| }, |
| { |
| "epoch": 3.7898734177215188, |
| "grad_norm": 0.5624015217598239, |
| "learning_rate": 9.92185172447616e-06, |
| "loss": 0.2689, |
| "step": 124 |
| }, |
| { |
| "epoch": 3.820253164556962, |
| "grad_norm": 0.5812377663228203, |
| "learning_rate": 9.765574263967397e-06, |
| "loss": 0.2813, |
| "step": 125 |
| }, |
| { |
| "epoch": 3.850632911392405, |
| "grad_norm": 0.5599182490663513, |
| "learning_rate": 9.609354070411807e-06, |
| "loss": 0.257, |
| "step": 126 |
| }, |
| { |
| "epoch": 3.8810126582278484, |
| "grad_norm": 0.6016379361147123, |
| "learning_rate": 9.453229306234143e-06, |
| "loss": 0.2843, |
| "step": 127 |
| }, |
| { |
| "epoch": 3.911392405063291, |
| "grad_norm": 0.5684696439235135, |
| "learning_rate": 9.297238110547075e-06, |
| "loss": 0.2456, |
| "step": 128 |
| }, |
| { |
| "epoch": 3.9417721518987343, |
| "grad_norm": 0.6221379537641375, |
| "learning_rate": 9.14141858983434e-06, |
| "loss": 0.281, |
| "step": 129 |
| }, |
| { |
| "epoch": 3.972151898734177, |
| "grad_norm": 0.5382276086945389, |
| "learning_rate": 8.985808808641883e-06, |
| "loss": 0.239, |
| "step": 130 |
| }, |
| { |
| "epoch": 4.010126582278481, |
| "grad_norm": 0.559689443479499, |
| "learning_rate": 8.830446780279175e-06, |
| "loss": 0.2443, |
| "step": 131 |
| }, |
| { |
| "epoch": 4.040506329113924, |
| "grad_norm": 0.6771012317985544, |
| "learning_rate": 8.675370457533122e-06, |
| "loss": 0.1531, |
| "step": 132 |
| }, |
| { |
| "epoch": 4.0708860759493675, |
| "grad_norm": 0.4818948876907526, |
| "learning_rate": 8.520617723396702e-06, |
| "loss": 0.1804, |
| "step": 133 |
| }, |
| { |
| "epoch": 4.10126582278481, |
| "grad_norm": 0.46716252875726966, |
| "learning_rate": 8.366226381814698e-06, |
| "loss": 0.1701, |
| "step": 134 |
| }, |
| { |
| "epoch": 4.131645569620253, |
| "grad_norm": 0.7589325984739113, |
| "learning_rate": 8.212234148448708e-06, |
| "loss": 0.2187, |
| "step": 135 |
| }, |
| { |
| "epoch": 4.162025316455696, |
| "grad_norm": 0.6267149137544038, |
| "learning_rate": 8.058678641463724e-06, |
| "loss": 0.1734, |
| "step": 136 |
| }, |
| { |
| "epoch": 4.192405063291139, |
| "grad_norm": 0.5366476862319358, |
| "learning_rate": 7.905597372338558e-06, |
| "loss": 0.1631, |
| "step": 137 |
| }, |
| { |
| "epoch": 4.222784810126583, |
| "grad_norm": 0.5026922154496697, |
| "learning_rate": 7.753027736702283e-06, |
| "loss": 0.1769, |
| "step": 138 |
| }, |
| { |
| "epoch": 4.253164556962025, |
| "grad_norm": 0.4838485106407902, |
| "learning_rate": 7.601007005199022e-06, |
| "loss": 0.1776, |
| "step": 139 |
| }, |
| { |
| "epoch": 4.283544303797468, |
| "grad_norm": 0.5389049533638387, |
| "learning_rate": 7.449572314383237e-06, |
| "loss": 0.1863, |
| "step": 140 |
| }, |
| { |
| "epoch": 4.313924050632911, |
| "grad_norm": 0.4980671758980592, |
| "learning_rate": 7.298760657647779e-06, |
| "loss": 0.1876, |
| "step": 141 |
| }, |
| { |
| "epoch": 4.3443037974683545, |
| "grad_norm": 0.4831633711677618, |
| "learning_rate": 7.148608876186931e-06, |
| "loss": 0.1855, |
| "step": 142 |
| }, |
| { |
| "epoch": 4.374683544303798, |
| "grad_norm": 0.4641726517540608, |
| "learning_rate": 6.999153649996595e-06, |
| "loss": 0.1737, |
| "step": 143 |
| }, |
| { |
| "epoch": 4.405063291139241, |
| "grad_norm": 0.5158696225253259, |
| "learning_rate": 6.8504314889138956e-06, |
| "loss": 0.1648, |
| "step": 144 |
| }, |
| { |
| "epoch": 4.435443037974683, |
| "grad_norm": 0.48928414438839596, |
| "learning_rate": 6.702478723698336e-06, |
| "loss": 0.1854, |
| "step": 145 |
| }, |
| { |
| "epoch": 4.465822784810126, |
| "grad_norm": 0.4794880251747116, |
| "learning_rate": 6.555331497156671e-06, |
| "loss": 0.1733, |
| "step": 146 |
| }, |
| { |
| "epoch": 4.49620253164557, |
| "grad_norm": 0.4905261382023579, |
| "learning_rate": 6.4090257553137566e-06, |
| "loss": 0.2232, |
| "step": 147 |
| }, |
| { |
| "epoch": 4.526582278481013, |
| "grad_norm": 0.4844078129632487, |
| "learning_rate": 6.263597238631405e-06, |
| "loss": 0.1751, |
| "step": 148 |
| }, |
| { |
| "epoch": 4.556962025316456, |
| "grad_norm": 0.4232714617536975, |
| "learning_rate": 6.119081473277502e-06, |
| "loss": 0.1436, |
| "step": 149 |
| }, |
| { |
| "epoch": 4.587341772151898, |
| "grad_norm": 0.4998005504636481, |
| "learning_rate": 5.975513762447465e-06, |
| "loss": 0.1788, |
| "step": 150 |
| }, |
| { |
| "epoch": 4.6177215189873415, |
| "grad_norm": 0.46096522684791935, |
| "learning_rate": 5.832929177740134e-06, |
| "loss": 0.1723, |
| "step": 151 |
| }, |
| { |
| "epoch": 4.648101265822785, |
| "grad_norm": 0.44310779188262656, |
| "learning_rate": 5.6913625505902966e-06, |
| "loss": 0.2135, |
| "step": 152 |
| }, |
| { |
| "epoch": 4.678481012658228, |
| "grad_norm": 0.43432359900287715, |
| "learning_rate": 5.550848463759835e-06, |
| "loss": 0.2117, |
| "step": 153 |
| }, |
| { |
| "epoch": 4.708860759493671, |
| "grad_norm": 0.45740523869128913, |
| "learning_rate": 5.411421242889643e-06, |
| "loss": 0.1759, |
| "step": 154 |
| }, |
| { |
| "epoch": 4.739240506329114, |
| "grad_norm": 0.43490763870000326, |
| "learning_rate": 5.273114948114346e-06, |
| "loss": 0.1997, |
| "step": 155 |
| }, |
| { |
| "epoch": 4.769620253164557, |
| "grad_norm": 0.42643435294558374, |
| "learning_rate": 5.135963365741892e-06, |
| "loss": 0.1589, |
| "step": 156 |
| }, |
| { |
| "epoch": 4.8, |
| "grad_norm": 0.4579379938540636, |
| "learning_rate": 5.000000000000003e-06, |
| "loss": 0.1688, |
| "step": 157 |
| }, |
| { |
| "epoch": 4.830379746835443, |
| "grad_norm": 0.46926368219814496, |
| "learning_rate": 4.865258064851579e-06, |
| "loss": 0.1789, |
| "step": 158 |
| }, |
| { |
| "epoch": 4.860759493670886, |
| "grad_norm": 0.42237044030544385, |
| "learning_rate": 4.731770475880995e-06, |
| "loss": 0.1665, |
| "step": 159 |
| }, |
| { |
| "epoch": 4.891139240506329, |
| "grad_norm": 0.4169500416949454, |
| "learning_rate": 4.599569842253244e-06, |
| "loss": 0.2036, |
| "step": 160 |
| }, |
| { |
| "epoch": 4.921518987341772, |
| "grad_norm": 0.44081050832610535, |
| "learning_rate": 4.468688458748006e-06, |
| "loss": 0.213, |
| "step": 161 |
| }, |
| { |
| "epoch": 4.951898734177215, |
| "grad_norm": 0.4316863039504753, |
| "learning_rate": 4.339158297870469e-06, |
| "loss": 0.1749, |
| "step": 162 |
| }, |
| { |
| "epoch": 4.982278481012658, |
| "grad_norm": 0.4324971156906421, |
| "learning_rate": 4.211011002040885e-06, |
| "loss": 0.1607, |
| "step": 163 |
| }, |
| { |
| "epoch": 5.020253164556962, |
| "grad_norm": 0.47288082007126075, |
| "learning_rate": 4.084277875864776e-06, |
| "loss": 0.1362, |
| "step": 164 |
| }, |
| { |
| "epoch": 5.050632911392405, |
| "grad_norm": 0.5108205750113229, |
| "learning_rate": 3.958989878485644e-06, |
| "loss": 0.1461, |
| "step": 165 |
| }, |
| { |
| "epoch": 5.0810126582278485, |
| "grad_norm": 0.39521771420241175, |
| "learning_rate": 3.83517761602209e-06, |
| "loss": 0.192, |
| "step": 166 |
| }, |
| { |
| "epoch": 5.111392405063291, |
| "grad_norm": 0.3238187214130239, |
| "learning_rate": 3.712871334091154e-06, |
| "loss": 0.1236, |
| "step": 167 |
| }, |
| { |
| "epoch": 5.141772151898734, |
| "grad_norm": 0.3549472032181739, |
| "learning_rate": 3.592100910419738e-06, |
| "loss": 0.1787, |
| "step": 168 |
| }, |
| { |
| "epoch": 5.172151898734177, |
| "grad_norm": 0.3414716515241834, |
| "learning_rate": 3.4728958475459052e-06, |
| "loss": 0.1366, |
| "step": 169 |
| }, |
| { |
| "epoch": 5.2025316455696204, |
| "grad_norm": 0.4134782796791461, |
| "learning_rate": 3.355285265611784e-06, |
| "loss": 0.1401, |
| "step": 170 |
| }, |
| { |
| "epoch": 5.232911392405064, |
| "grad_norm": 0.42514728956855424, |
| "learning_rate": 3.2392978952499553e-06, |
| "loss": 0.1247, |
| "step": 171 |
| }, |
| { |
| "epoch": 5.263291139240506, |
| "grad_norm": 0.4877649516931133, |
| "learning_rate": 3.1249620705649417e-06, |
| "loss": 0.1472, |
| "step": 172 |
| }, |
| { |
| "epoch": 5.293670886075949, |
| "grad_norm": 0.37482117862804326, |
| "learning_rate": 3.0123057222115835e-06, |
| "loss": 0.1306, |
| "step": 173 |
| }, |
| { |
| "epoch": 5.324050632911392, |
| "grad_norm": 0.3546177403178053, |
| "learning_rate": 2.9013563705719673e-06, |
| "loss": 0.1328, |
| "step": 174 |
| }, |
| { |
| "epoch": 5.3544303797468356, |
| "grad_norm": 0.3259361591237675, |
| "learning_rate": 2.7921411190325753e-06, |
| "loss": 0.1397, |
| "step": 175 |
| }, |
| { |
| "epoch": 5.384810126582279, |
| "grad_norm": 0.3374114179522559, |
| "learning_rate": 2.6846866473633126e-06, |
| "loss": 0.1525, |
| "step": 176 |
| }, |
| { |
| "epoch": 5.415189873417722, |
| "grad_norm": 0.3549149472069972, |
| "learning_rate": 2.579019205199992e-06, |
| "loss": 0.1358, |
| "step": 177 |
| }, |
| { |
| "epoch": 5.445569620253164, |
| "grad_norm": 0.3307718981628869, |
| "learning_rate": 2.4751646056319334e-06, |
| "loss": 0.1314, |
| "step": 178 |
| }, |
| { |
| "epoch": 5.4759493670886075, |
| "grad_norm": 0.32669053163129463, |
| "learning_rate": 2.373148218896182e-06, |
| "loss": 0.1168, |
| "step": 179 |
| }, |
| { |
| "epoch": 5.506329113924051, |
| "grad_norm": 0.3259651847932475, |
| "learning_rate": 2.2729949661798876e-06, |
| "loss": 0.1415, |
| "step": 180 |
| }, |
| { |
| "epoch": 5.536708860759494, |
| "grad_norm": 0.325038079393212, |
| "learning_rate": 2.174729313532433e-06, |
| "loss": 0.1256, |
| "step": 181 |
| }, |
| { |
| "epoch": 5.567088607594937, |
| "grad_norm": 0.3141018286982485, |
| "learning_rate": 2.078375265888707e-06, |
| "loss": 0.1301, |
| "step": 182 |
| }, |
| { |
| "epoch": 5.597468354430379, |
| "grad_norm": 0.3176620545390375, |
| "learning_rate": 1.9839563612050273e-06, |
| "loss": 0.1153, |
| "step": 183 |
| }, |
| { |
| "epoch": 5.627848101265823, |
| "grad_norm": 0.3301647459616926, |
| "learning_rate": 1.8914956647091497e-06, |
| "loss": 0.1537, |
| "step": 184 |
| }, |
| { |
| "epoch": 5.658227848101266, |
| "grad_norm": 0.3397085044879637, |
| "learning_rate": 1.8010157632657544e-06, |
| "loss": 0.1341, |
| "step": 185 |
| }, |
| { |
| "epoch": 5.688607594936709, |
| "grad_norm": 0.3388905356198473, |
| "learning_rate": 1.7125387598587862e-06, |
| "loss": 0.1557, |
| "step": 186 |
| }, |
| { |
| "epoch": 5.718987341772152, |
| "grad_norm": 0.3360444342069633, |
| "learning_rate": 1.6260862681919965e-06, |
| "loss": 0.119, |
| "step": 187 |
| }, |
| { |
| "epoch": 5.749367088607595, |
| "grad_norm": 0.3548570154065725, |
| "learning_rate": 1.5416794074090258e-06, |
| "loss": 0.1572, |
| "step": 188 |
| }, |
| { |
| "epoch": 5.779746835443038, |
| "grad_norm": 0.35386114026628934, |
| "learning_rate": 1.459338796934293e-06, |
| "loss": 0.1387, |
| "step": 189 |
| }, |
| { |
| "epoch": 5.810126582278481, |
| "grad_norm": 0.34933293542753524, |
| "learning_rate": 1.3790845514359363e-06, |
| "loss": 0.1463, |
| "step": 190 |
| }, |
| { |
| "epoch": 5.840506329113924, |
| "grad_norm": 0.33080764916648525, |
| "learning_rate": 1.300936275912098e-06, |
| "loss": 0.1313, |
| "step": 191 |
| }, |
| { |
| "epoch": 5.870886075949367, |
| "grad_norm": 0.32479303869035625, |
| "learning_rate": 1.224913060901688e-06, |
| "loss": 0.123, |
| "step": 192 |
| }, |
| { |
| "epoch": 5.9012658227848105, |
| "grad_norm": 0.3504712677214653, |
| "learning_rate": 1.1510334778208332e-06, |
| "loss": 0.1245, |
| "step": 193 |
| }, |
| { |
| "epoch": 5.931645569620253, |
| "grad_norm": 0.3185986850437643, |
| "learning_rate": 1.0793155744261352e-06, |
| "loss": 0.1238, |
| "step": 194 |
| }, |
| { |
| "epoch": 5.962025316455696, |
| "grad_norm": 0.3265603887208977, |
| "learning_rate": 1.0097768704058542e-06, |
| "loss": 0.1623, |
| "step": 195 |
| }, |
| { |
| "epoch": 5.992405063291139, |
| "grad_norm": 0.33545098544813473, |
| "learning_rate": 9.424343531000968e-07, |
| "loss": 0.1353, |
| "step": 196 |
| }, |
| { |
| "epoch": 6.030379746835443, |
| "grad_norm": 0.31145041522431705, |
| "learning_rate": 8.773044733510338e-07, |
| "loss": 0.1185, |
| "step": 197 |
| }, |
| { |
| "epoch": 6.060759493670886, |
| "grad_norm": 0.32178985162717133, |
| "learning_rate": 8.144031414842012e-07, |
| "loss": 0.1562, |
| "step": 198 |
| }, |
| { |
| "epoch": 6.091139240506329, |
| "grad_norm": 0.3109905615008904, |
| "learning_rate": 7.537457234218271e-07, |
| "loss": 0.1399, |
| "step": 199 |
| }, |
| { |
| "epoch": 6.121518987341772, |
| "grad_norm": 0.29859282658595293, |
| "learning_rate": 6.953470369291349e-07, |
| "loss": 0.1209, |
| "step": 200 |
| }, |
| { |
| "epoch": 6.151898734177215, |
| "grad_norm": 0.32612724958390515, |
| "learning_rate": 6.392213479945852e-07, |
| "loss": 0.1172, |
| "step": 201 |
| }, |
| { |
| "epoch": 6.182278481012658, |
| "grad_norm": 0.26138279202370573, |
| "learning_rate": 5.853823673448877e-07, |
| "loss": 0.1313, |
| "step": 202 |
| }, |
| { |
| "epoch": 6.2126582278481015, |
| "grad_norm": 0.28727956221648643, |
| "learning_rate": 5.33843247095659e-07, |
| "loss": 0.1563, |
| "step": 203 |
| }, |
| { |
| "epoch": 6.243037974683545, |
| "grad_norm": 0.2864062480611515, |
| "learning_rate": 4.846165775385459e-07, |
| "loss": 0.1395, |
| "step": 204 |
| }, |
| { |
| "epoch": 6.273417721518987, |
| "grad_norm": 0.2815198905586832, |
| "learning_rate": 4.3771438406559173e-07, |
| "loss": 0.1196, |
| "step": 205 |
| }, |
| { |
| "epoch": 6.30379746835443, |
| "grad_norm": 0.24928897666772976, |
| "learning_rate": 3.931481242315993e-07, |
| "loss": 0.1037, |
| "step": 206 |
| }, |
| { |
| "epoch": 6.334177215189873, |
| "grad_norm": 0.2645356258035024, |
| "learning_rate": 3.5092868495520294e-07, |
| "loss": 0.1086, |
| "step": 207 |
| }, |
| { |
| "epoch": 6.364556962025317, |
| "grad_norm": 0.26878790834169397, |
| "learning_rate": 3.110663798593616e-07, |
| "loss": 0.1252, |
| "step": 208 |
| }, |
| { |
| "epoch": 6.39493670886076, |
| "grad_norm": 0.24158485069168198, |
| "learning_rate": 2.735709467518699e-07, |
| "loss": 0.1064, |
| "step": 209 |
| }, |
| { |
| "epoch": 6.425316455696202, |
| "grad_norm": 0.2894081062776572, |
| "learning_rate": 2.384515452465475e-07, |
| "loss": 0.1467, |
| "step": 210 |
| }, |
| { |
| "epoch": 6.455696202531645, |
| "grad_norm": 0.2682251204654294, |
| "learning_rate": 2.0571675452567997e-07, |
| "loss": 0.1426, |
| "step": 211 |
| }, |
| { |
| "epoch": 6.4860759493670885, |
| "grad_norm": 0.2757021644220485, |
| "learning_rate": 1.7537457124423896e-07, |
| "loss": 0.108, |
| "step": 212 |
| }, |
| { |
| "epoch": 6.516455696202532, |
| "grad_norm": 0.26666054538960254, |
| "learning_rate": 1.474324075764111e-07, |
| "loss": 0.1308, |
| "step": 213 |
| }, |
| { |
| "epoch": 6.546835443037975, |
| "grad_norm": 0.27128326398580616, |
| "learning_rate": 1.2189708940490653e-07, |
| "loss": 0.1186, |
| "step": 214 |
| }, |
| { |
| "epoch": 6.577215189873417, |
| "grad_norm": 0.2516746818356812, |
| "learning_rate": 9.877485465349057e-08, |
| "loss": 0.1421, |
| "step": 215 |
| }, |
| { |
| "epoch": 6.6075949367088604, |
| "grad_norm": 0.2811026990882255, |
| "learning_rate": 7.807135176314707e-08, |
| "loss": 0.1157, |
| "step": 216 |
| }, |
| { |
| "epoch": 6.637974683544304, |
| "grad_norm": 0.2781430871205741, |
| "learning_rate": 5.979163831223988e-08, |
| "loss": 0.1158, |
| "step": 217 |
| }, |
| { |
| "epoch": 6.668354430379747, |
| "grad_norm": 0.26752367927032317, |
| "learning_rate": 4.394017978101905e-08, |
| "loss": 0.1186, |
| "step": 218 |
| }, |
| { |
| "epoch": 6.69873417721519, |
| "grad_norm": 0.25689888335881983, |
| "learning_rate": 3.0520848460765525e-08, |
| "loss": 0.1144, |
| "step": 219 |
| }, |
| { |
| "epoch": 6.729113924050633, |
| "grad_norm": 0.29632821016666816, |
| "learning_rate": 1.9536922507841227e-08, |
| "loss": 0.1483, |
| "step": 220 |
| }, |
| { |
| "epoch": 6.759493670886076, |
| "grad_norm": 0.26832793754980955, |
| "learning_rate": 1.099108514288627e-08, |
| "loss": 0.1267, |
| "step": 221 |
| }, |
| { |
| "epoch": 6.789873417721519, |
| "grad_norm": 0.2705156579423521, |
| "learning_rate": 4.885423995341088e-09, |
| "loss": 0.1328, |
| "step": 222 |
| }, |
| { |
| "epoch": 6.820253164556962, |
| "grad_norm": 0.2592374115580099, |
| "learning_rate": 1.2214305934699078e-09, |
| "loss": 0.0986, |
| "step": 223 |
| }, |
| { |
| "epoch": 6.850632911392405, |
| "grad_norm": 0.26582605276588783, |
| "learning_rate": 0.0, |
| "loss": 0.1222, |
| "step": 224 |
| }, |
| { |
| "epoch": 6.850632911392405, |
| "step": 224, |
| "total_flos": 2.2606970276859085e+17, |
| "train_loss": 0.3831337048738663, |
| "train_runtime": 9209.4419, |
| "train_samples_per_second": 2.402, |
| "train_steps_per_second": 0.024 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 224, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 7, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.2606970276859085e+17, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|