| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 4.948051948051948, |
| "eval_steps": 500, |
| "global_step": 190, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.025974025974025976, |
| "grad_norm": 7.932461880382458, |
| "learning_rate": 2.105263157894737e-06, |
| "loss": 1.3022, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.05194805194805195, |
| "grad_norm": 7.914768259228103, |
| "learning_rate": 4.210526315789474e-06, |
| "loss": 1.2794, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.07792207792207792, |
| "grad_norm": 7.484077281744907, |
| "learning_rate": 6.31578947368421e-06, |
| "loss": 1.2625, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.1038961038961039, |
| "grad_norm": 5.714449349246834, |
| "learning_rate": 8.421052631578948e-06, |
| "loss": 1.2443, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.12987012987012986, |
| "grad_norm": 3.1210181292904724, |
| "learning_rate": 1.0526315789473684e-05, |
| "loss": 1.1532, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.15584415584415584, |
| "grad_norm": 5.253391384757655, |
| "learning_rate": 1.263157894736842e-05, |
| "loss": 1.0944, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.18181818181818182, |
| "grad_norm": 5.463591834581839, |
| "learning_rate": 1.4736842105263159e-05, |
| "loss": 1.1138, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.2077922077922078, |
| "grad_norm": 7.387714938198011, |
| "learning_rate": 1.6842105263157896e-05, |
| "loss": 1.1008, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.23376623376623376, |
| "grad_norm": 6.205307398999116, |
| "learning_rate": 1.894736842105263e-05, |
| "loss": 1.0759, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.2597402597402597, |
| "grad_norm": 4.626405073408399, |
| "learning_rate": 2.105263157894737e-05, |
| "loss": 1.0411, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.2857142857142857, |
| "grad_norm": 3.5062975875923623, |
| "learning_rate": 2.3157894736842107e-05, |
| "loss": 0.9892, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.3116883116883117, |
| "grad_norm": 3.095922849196239, |
| "learning_rate": 2.526315789473684e-05, |
| "loss": 0.981, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.33766233766233766, |
| "grad_norm": 2.620362610628679, |
| "learning_rate": 2.7368421052631583e-05, |
| "loss": 0.9321, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.36363636363636365, |
| "grad_norm": 2.095917606876017, |
| "learning_rate": 2.9473684210526317e-05, |
| "loss": 0.9431, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.38961038961038963, |
| "grad_norm": 1.6957008846140307, |
| "learning_rate": 3.157894736842106e-05, |
| "loss": 0.937, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.4155844155844156, |
| "grad_norm": 1.7112898298596606, |
| "learning_rate": 3.368421052631579e-05, |
| "loss": 0.9237, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.44155844155844154, |
| "grad_norm": 1.7675227036340526, |
| "learning_rate": 3.578947368421053e-05, |
| "loss": 0.913, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.4675324675324675, |
| "grad_norm": 1.6383266641991525, |
| "learning_rate": 3.789473684210526e-05, |
| "loss": 0.9288, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.4935064935064935, |
| "grad_norm": 1.294665470376389, |
| "learning_rate": 4e-05, |
| "loss": 0.9351, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.5194805194805194, |
| "grad_norm": 1.604347276742893, |
| "learning_rate": 3.999662483266646e-05, |
| "loss": 0.8908, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.5454545454545454, |
| "grad_norm": 1.4355986610502338, |
| "learning_rate": 3.9986500469841275e-05, |
| "loss": 0.9129, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.5714285714285714, |
| "grad_norm": 630.6282393505426, |
| "learning_rate": 3.996963032866633e-05, |
| "loss": 1.137, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.5974025974025974, |
| "grad_norm": 64.18229097051045, |
| "learning_rate": 3.994602010309655e-05, |
| "loss": 0.9911, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.6233766233766234, |
| "grad_norm": 3.1233891543931755, |
| "learning_rate": 3.991567776197815e-05, |
| "loss": 0.9639, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.6493506493506493, |
| "grad_norm": 2.077319867927905, |
| "learning_rate": 3.9878613546358996e-05, |
| "loss": 0.9394, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.6753246753246753, |
| "grad_norm": 6.491527923131102, |
| "learning_rate": 3.983483996603205e-05, |
| "loss": 0.8881, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.7012987012987013, |
| "grad_norm": 39.42852606734072, |
| "learning_rate": 3.978437179531316e-05, |
| "loss": 0.9145, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.7272727272727273, |
| "grad_norm": 3.4103842126583013, |
| "learning_rate": 3.972722606805445e-05, |
| "loss": 0.9565, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.7532467532467533, |
| "grad_norm": 2.603677290738666, |
| "learning_rate": 3.9663422071895103e-05, |
| "loss": 0.9527, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.7792207792207793, |
| "grad_norm": 1.5693962270477466, |
| "learning_rate": 3.959298134175148e-05, |
| "loss": 0.9261, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.8051948051948052, |
| "grad_norm": 1.9966129861478277, |
| "learning_rate": 3.9515927652548714e-05, |
| "loss": 0.8946, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.8311688311688312, |
| "grad_norm": 2.6605968358600744, |
| "learning_rate": 3.943228701119628e-05, |
| "loss": 0.9076, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.8571428571428571, |
| "grad_norm": 2.1460676505845138, |
| "learning_rate": 3.934208764781022e-05, |
| "loss": 0.8956, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.8831168831168831, |
| "grad_norm": 1.5905762959039638, |
| "learning_rate": 3.924536000618501e-05, |
| "loss": 0.8986, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.9090909090909091, |
| "grad_norm": 1.53645783721347, |
| "learning_rate": 3.9142136733518285e-05, |
| "loss": 0.8862, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.935064935064935, |
| "grad_norm": 1.68321757769248, |
| "learning_rate": 3.903245266939184e-05, |
| "loss": 0.9017, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.961038961038961, |
| "grad_norm": 1.30815074965975, |
| "learning_rate": 3.8916344834012695e-05, |
| "loss": 0.8698, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.987012987012987, |
| "grad_norm": 1.165747960130663, |
| "learning_rate": 3.879385241571817e-05, |
| "loss": 0.8618, |
| "step": 38 |
| }, |
| { |
| "epoch": 1.0162337662337662, |
| "grad_norm": 1.182349191382658, |
| "learning_rate": 3.866501675774914e-05, |
| "loss": 0.7754, |
| "step": 39 |
| }, |
| { |
| "epoch": 1.0422077922077921, |
| "grad_norm": 1.1769523292955406, |
| "learning_rate": 3.8529881344296037e-05, |
| "loss": 0.7007, |
| "step": 40 |
| }, |
| { |
| "epoch": 1.0681818181818181, |
| "grad_norm": 1.0627768684949523, |
| "learning_rate": 3.8388491785822154e-05, |
| "loss": 0.6888, |
| "step": 41 |
| }, |
| { |
| "epoch": 1.094155844155844, |
| "grad_norm": 1.0474113890412635, |
| "learning_rate": 3.8240895803669415e-05, |
| "loss": 0.666, |
| "step": 42 |
| }, |
| { |
| "epoch": 1.12012987012987, |
| "grad_norm": 1.1513266436057166, |
| "learning_rate": 3.808714321395155e-05, |
| "loss": 0.6711, |
| "step": 43 |
| }, |
| { |
| "epoch": 1.146103896103896, |
| "grad_norm": 1.0575614450294766, |
| "learning_rate": 3.792728591074041e-05, |
| "loss": 0.6675, |
| "step": 44 |
| }, |
| { |
| "epoch": 1.172077922077922, |
| "grad_norm": 1.0417037432847753, |
| "learning_rate": 3.776137784855076e-05, |
| "loss": 0.6505, |
| "step": 45 |
| }, |
| { |
| "epoch": 1.198051948051948, |
| "grad_norm": 1.0718429071818538, |
| "learning_rate": 3.758947502412978e-05, |
| "loss": 0.6398, |
| "step": 46 |
| }, |
| { |
| "epoch": 1.224025974025974, |
| "grad_norm": 1.035859051149055, |
| "learning_rate": 3.741163545755725e-05, |
| "loss": 0.6344, |
| "step": 47 |
| }, |
| { |
| "epoch": 1.25, |
| "grad_norm": 1.2059393425085077, |
| "learning_rate": 3.722791917266273e-05, |
| "loss": 0.6576, |
| "step": 48 |
| }, |
| { |
| "epoch": 1.275974025974026, |
| "grad_norm": 0.8647464266057819, |
| "learning_rate": 3.703838817676654e-05, |
| "loss": 0.634, |
| "step": 49 |
| }, |
| { |
| "epoch": 1.301948051948052, |
| "grad_norm": 0.9794755119362144, |
| "learning_rate": 3.684310643975132e-05, |
| "loss": 0.6587, |
| "step": 50 |
| }, |
| { |
| "epoch": 1.327922077922078, |
| "grad_norm": 0.8945933571618422, |
| "learning_rate": 3.6642139872471006e-05, |
| "loss": 0.6489, |
| "step": 51 |
| }, |
| { |
| "epoch": 1.353896103896104, |
| "grad_norm": 0.7218628490321455, |
| "learning_rate": 3.64355563045049e-05, |
| "loss": 0.6499, |
| "step": 52 |
| }, |
| { |
| "epoch": 1.37987012987013, |
| "grad_norm": 0.9164999911062107, |
| "learning_rate": 3.622342546126405e-05, |
| "loss": 0.6396, |
| "step": 53 |
| }, |
| { |
| "epoch": 1.405844155844156, |
| "grad_norm": 0.8475563662080481, |
| "learning_rate": 3.600581894045768e-05, |
| "loss": 0.6728, |
| "step": 54 |
| }, |
| { |
| "epoch": 1.4318181818181819, |
| "grad_norm": 0.9702631527667891, |
| "learning_rate": 3.578281018792788e-05, |
| "loss": 0.6644, |
| "step": 55 |
| }, |
| { |
| "epoch": 1.4577922077922079, |
| "grad_norm": 0.9538702840059207, |
| "learning_rate": 3.555447447286028e-05, |
| "loss": 0.643, |
| "step": 56 |
| }, |
| { |
| "epoch": 1.4837662337662338, |
| "grad_norm": 0.8910634080576203, |
| "learning_rate": 3.532088886237956e-05, |
| "loss": 0.6628, |
| "step": 57 |
| }, |
| { |
| "epoch": 1.5097402597402598, |
| "grad_norm": 0.9694684889310542, |
| "learning_rate": 3.508213219553793e-05, |
| "loss": 0.6267, |
| "step": 58 |
| }, |
| { |
| "epoch": 1.5357142857142856, |
| "grad_norm": 0.7645363444588047, |
| "learning_rate": 3.483828505670563e-05, |
| "loss": 0.6621, |
| "step": 59 |
| }, |
| { |
| "epoch": 1.5616883116883118, |
| "grad_norm": 1.0003412015299207, |
| "learning_rate": 3.458942974837242e-05, |
| "loss": 0.6523, |
| "step": 60 |
| }, |
| { |
| "epoch": 1.5876623376623376, |
| "grad_norm": 0.9130736095702605, |
| "learning_rate": 3.433565026336903e-05, |
| "loss": 0.6607, |
| "step": 61 |
| }, |
| { |
| "epoch": 1.6136363636363638, |
| "grad_norm": 0.9845762925311329, |
| "learning_rate": 3.4077032256518236e-05, |
| "loss": 0.6396, |
| "step": 62 |
| }, |
| { |
| "epoch": 1.6396103896103895, |
| "grad_norm": 0.8116185410937748, |
| "learning_rate": 3.381366301572489e-05, |
| "loss": 0.6413, |
| "step": 63 |
| }, |
| { |
| "epoch": 1.6655844155844157, |
| "grad_norm": 0.86437358110291, |
| "learning_rate": 3.354563143251483e-05, |
| "loss": 0.6711, |
| "step": 64 |
| }, |
| { |
| "epoch": 1.6915584415584415, |
| "grad_norm": 0.8420048228007022, |
| "learning_rate": 3.327302797203243e-05, |
| "loss": 0.6604, |
| "step": 65 |
| }, |
| { |
| "epoch": 1.7175324675324677, |
| "grad_norm": 0.7816899345223021, |
| "learning_rate": 3.29959446425072e-05, |
| "loss": 0.651, |
| "step": 66 |
| }, |
| { |
| "epoch": 1.7435064935064934, |
| "grad_norm": 0.9303056765525075, |
| "learning_rate": 3.2714474964199365e-05, |
| "loss": 0.6289, |
| "step": 67 |
| }, |
| { |
| "epoch": 1.7694805194805194, |
| "grad_norm": 0.7805801403650556, |
| "learning_rate": 3.24287139378353e-05, |
| "loss": 0.6411, |
| "step": 68 |
| }, |
| { |
| "epoch": 1.7954545454545454, |
| "grad_norm": 0.753331126461648, |
| "learning_rate": 3.213875801254314e-05, |
| "loss": 0.6646, |
| "step": 69 |
| }, |
| { |
| "epoch": 1.8214285714285714, |
| "grad_norm": 0.7358002312013492, |
| "learning_rate": 3.1844705053299606e-05, |
| "loss": 0.6293, |
| "step": 70 |
| }, |
| { |
| "epoch": 1.8474025974025974, |
| "grad_norm": 0.8070564650099749, |
| "learning_rate": 3.154665430789893e-05, |
| "loss": 0.6596, |
| "step": 71 |
| }, |
| { |
| "epoch": 1.8733766233766234, |
| "grad_norm": 0.7226075974955157, |
| "learning_rate": 3.1244706373455084e-05, |
| "loss": 0.667, |
| "step": 72 |
| }, |
| { |
| "epoch": 1.8993506493506493, |
| "grad_norm": 0.7791333934794239, |
| "learning_rate": 3.093896316244855e-05, |
| "loss": 0.6465, |
| "step": 73 |
| }, |
| { |
| "epoch": 1.9253246753246753, |
| "grad_norm": 0.7754676549638453, |
| "learning_rate": 3.062952786832912e-05, |
| "loss": 0.6554, |
| "step": 74 |
| }, |
| { |
| "epoch": 1.9512987012987013, |
| "grad_norm": 0.6557877212038498, |
| "learning_rate": 3.0316504930686485e-05, |
| "loss": 0.6566, |
| "step": 75 |
| }, |
| { |
| "epoch": 1.9772727272727273, |
| "grad_norm": 0.8194426729485009, |
| "learning_rate": 3.0000000000000004e-05, |
| "loss": 0.6627, |
| "step": 76 |
| }, |
| { |
| "epoch": 2.0064935064935066, |
| "grad_norm": 0.7334083955009458, |
| "learning_rate": 2.9680119901979984e-05, |
| "loss": 0.6111, |
| "step": 77 |
| }, |
| { |
| "epoch": 2.0324675324675323, |
| "grad_norm": 1.1895868881255909, |
| "learning_rate": 2.935697260151216e-05, |
| "loss": 0.3871, |
| "step": 78 |
| }, |
| { |
| "epoch": 2.0584415584415585, |
| "grad_norm": 0.9604279263135902, |
| "learning_rate": 2.903066716621779e-05, |
| "loss": 0.4077, |
| "step": 79 |
| }, |
| { |
| "epoch": 2.0844155844155843, |
| "grad_norm": 1.1393762686141848, |
| "learning_rate": 2.8701313729641467e-05, |
| "loss": 0.3866, |
| "step": 80 |
| }, |
| { |
| "epoch": 2.1103896103896105, |
| "grad_norm": 0.7106851428516716, |
| "learning_rate": 2.8369023454079223e-05, |
| "loss": 0.3825, |
| "step": 81 |
| }, |
| { |
| "epoch": 2.1363636363636362, |
| "grad_norm": 0.8224703275776866, |
| "learning_rate": 2.8033908493059394e-05, |
| "loss": 0.3714, |
| "step": 82 |
| }, |
| { |
| "epoch": 2.1623376623376624, |
| "grad_norm": 0.6836375654772662, |
| "learning_rate": 2.7696081953488917e-05, |
| "loss": 0.3888, |
| "step": 83 |
| }, |
| { |
| "epoch": 2.188311688311688, |
| "grad_norm": 0.7313622114325852, |
| "learning_rate": 2.735565785747787e-05, |
| "loss": 0.3642, |
| "step": 84 |
| }, |
| { |
| "epoch": 2.2142857142857144, |
| "grad_norm": 0.6822815355692488, |
| "learning_rate": 2.7012751103855092e-05, |
| "loss": 0.3783, |
| "step": 85 |
| }, |
| { |
| "epoch": 2.24025974025974, |
| "grad_norm": 0.6553172875405961, |
| "learning_rate": 2.6667477429387915e-05, |
| "loss": 0.3754, |
| "step": 86 |
| }, |
| { |
| "epoch": 2.2662337662337664, |
| "grad_norm": 0.6074920244076911, |
| "learning_rate": 2.6319953369719057e-05, |
| "loss": 0.3473, |
| "step": 87 |
| }, |
| { |
| "epoch": 2.292207792207792, |
| "grad_norm": 0.6021128235097986, |
| "learning_rate": 2.5970296220033894e-05, |
| "loss": 0.3471, |
| "step": 88 |
| }, |
| { |
| "epoch": 2.3181818181818183, |
| "grad_norm": 0.6940589972604451, |
| "learning_rate": 2.5618623995471394e-05, |
| "loss": 0.3837, |
| "step": 89 |
| }, |
| { |
| "epoch": 2.344155844155844, |
| "grad_norm": 0.6033654970817208, |
| "learning_rate": 2.5265055391291986e-05, |
| "loss": 0.3528, |
| "step": 90 |
| }, |
| { |
| "epoch": 2.3701298701298703, |
| "grad_norm": 0.6333023948866248, |
| "learning_rate": 2.4909709742815986e-05, |
| "loss": 0.3649, |
| "step": 91 |
| }, |
| { |
| "epoch": 2.396103896103896, |
| "grad_norm": 0.6001340560802214, |
| "learning_rate": 2.4552706985145873e-05, |
| "loss": 0.3669, |
| "step": 92 |
| }, |
| { |
| "epoch": 2.4220779220779223, |
| "grad_norm": 0.6197546210826222, |
| "learning_rate": 2.4194167612686208e-05, |
| "loss": 0.3532, |
| "step": 93 |
| }, |
| { |
| "epoch": 2.448051948051948, |
| "grad_norm": 0.565080044721377, |
| "learning_rate": 2.3834212638474773e-05, |
| "loss": 0.3657, |
| "step": 94 |
| }, |
| { |
| "epoch": 2.474025974025974, |
| "grad_norm": 0.5902739460347666, |
| "learning_rate": 2.3472963553338614e-05, |
| "loss": 0.3621, |
| "step": 95 |
| }, |
| { |
| "epoch": 2.5, |
| "grad_norm": 0.5749686649380882, |
| "learning_rate": 2.3110542284888866e-05, |
| "loss": 0.3467, |
| "step": 96 |
| }, |
| { |
| "epoch": 2.525974025974026, |
| "grad_norm": 0.5450163127235501, |
| "learning_rate": 2.2747071156368166e-05, |
| "loss": 0.3624, |
| "step": 97 |
| }, |
| { |
| "epoch": 2.551948051948052, |
| "grad_norm": 0.6232436601785027, |
| "learning_rate": 2.2382672845364474e-05, |
| "loss": 0.3564, |
| "step": 98 |
| }, |
| { |
| "epoch": 2.5779220779220777, |
| "grad_norm": 0.5515744279861114, |
| "learning_rate": 2.201747034240537e-05, |
| "loss": 0.3658, |
| "step": 99 |
| }, |
| { |
| "epoch": 2.603896103896104, |
| "grad_norm": 0.5362907375585828, |
| "learning_rate": 2.165158690944665e-05, |
| "loss": 0.3473, |
| "step": 100 |
| }, |
| { |
| "epoch": 2.62987012987013, |
| "grad_norm": 0.6161976462395182, |
| "learning_rate": 2.1285146038269406e-05, |
| "loss": 0.3416, |
| "step": 101 |
| }, |
| { |
| "epoch": 2.655844155844156, |
| "grad_norm": 0.5199919758117065, |
| "learning_rate": 2.091827140879944e-05, |
| "loss": 0.3448, |
| "step": 102 |
| }, |
| { |
| "epoch": 2.6818181818181817, |
| "grad_norm": 0.5858886775846843, |
| "learning_rate": 2.0551086847363245e-05, |
| "loss": 0.3433, |
| "step": 103 |
| }, |
| { |
| "epoch": 2.707792207792208, |
| "grad_norm": 0.5676263304852041, |
| "learning_rate": 2.0183716284894533e-05, |
| "loss": 0.3467, |
| "step": 104 |
| }, |
| { |
| "epoch": 2.7337662337662336, |
| "grad_norm": 0.5586462671917939, |
| "learning_rate": 1.9816283715105474e-05, |
| "loss": 0.3482, |
| "step": 105 |
| }, |
| { |
| "epoch": 2.75974025974026, |
| "grad_norm": 0.6276307789744222, |
| "learning_rate": 1.9448913152636765e-05, |
| "loss": 0.3545, |
| "step": 106 |
| }, |
| { |
| "epoch": 2.7857142857142856, |
| "grad_norm": 0.5484342032145305, |
| "learning_rate": 1.9081728591200565e-05, |
| "loss": 0.3666, |
| "step": 107 |
| }, |
| { |
| "epoch": 2.811688311688312, |
| "grad_norm": 0.5688020278596353, |
| "learning_rate": 1.87148539617306e-05, |
| "loss": 0.344, |
| "step": 108 |
| }, |
| { |
| "epoch": 2.8376623376623376, |
| "grad_norm": 0.539614144085222, |
| "learning_rate": 1.8348413090553356e-05, |
| "loss": 0.3387, |
| "step": 109 |
| }, |
| { |
| "epoch": 2.8636363636363638, |
| "grad_norm": 0.5698805550812537, |
| "learning_rate": 1.7982529657594637e-05, |
| "loss": 0.3604, |
| "step": 110 |
| }, |
| { |
| "epoch": 2.8896103896103895, |
| "grad_norm": 0.5212709930299778, |
| "learning_rate": 1.761732715463553e-05, |
| "loss": 0.3363, |
| "step": 111 |
| }, |
| { |
| "epoch": 2.9155844155844157, |
| "grad_norm": 0.5284822211250491, |
| "learning_rate": 1.7252928843631838e-05, |
| "loss": 0.3546, |
| "step": 112 |
| }, |
| { |
| "epoch": 2.9415584415584415, |
| "grad_norm": 0.5401001882166652, |
| "learning_rate": 1.6889457715111144e-05, |
| "loss": 0.3486, |
| "step": 113 |
| }, |
| { |
| "epoch": 2.9675324675324677, |
| "grad_norm": 0.5229863215386347, |
| "learning_rate": 1.6527036446661396e-05, |
| "loss": 0.3352, |
| "step": 114 |
| }, |
| { |
| "epoch": 2.9935064935064934, |
| "grad_norm": 0.5347563548020706, |
| "learning_rate": 1.6165787361525237e-05, |
| "loss": 0.3593, |
| "step": 115 |
| }, |
| { |
| "epoch": 3.022727272727273, |
| "grad_norm": 0.672978181037337, |
| "learning_rate": 1.5805832387313795e-05, |
| "loss": 0.2407, |
| "step": 116 |
| }, |
| { |
| "epoch": 3.0487012987012987, |
| "grad_norm": 0.679162551176236, |
| "learning_rate": 1.544729301485414e-05, |
| "loss": 0.1911, |
| "step": 117 |
| }, |
| { |
| "epoch": 3.074675324675325, |
| "grad_norm": 0.48652138994921784, |
| "learning_rate": 1.5090290257184019e-05, |
| "loss": 0.1896, |
| "step": 118 |
| }, |
| { |
| "epoch": 3.1006493506493507, |
| "grad_norm": 0.5783075834024586, |
| "learning_rate": 1.4734944608708022e-05, |
| "loss": 0.2093, |
| "step": 119 |
| }, |
| { |
| "epoch": 3.1266233766233764, |
| "grad_norm": 0.8020276248905834, |
| "learning_rate": 1.4381376004528616e-05, |
| "loss": 0.2057, |
| "step": 120 |
| }, |
| { |
| "epoch": 3.1525974025974026, |
| "grad_norm": 0.667374545311432, |
| "learning_rate": 1.4029703779966116e-05, |
| "loss": 0.1884, |
| "step": 121 |
| }, |
| { |
| "epoch": 3.1785714285714284, |
| "grad_norm": 0.5056105500696159, |
| "learning_rate": 1.3680046630280952e-05, |
| "loss": 0.1977, |
| "step": 122 |
| }, |
| { |
| "epoch": 3.2045454545454546, |
| "grad_norm": 0.49060521809385876, |
| "learning_rate": 1.3332522570612097e-05, |
| "loss": 0.1891, |
| "step": 123 |
| }, |
| { |
| "epoch": 3.2305194805194803, |
| "grad_norm": 0.5481519220939601, |
| "learning_rate": 1.2987248896144915e-05, |
| "loss": 0.1912, |
| "step": 124 |
| }, |
| { |
| "epoch": 3.2564935064935066, |
| "grad_norm": 0.5257555666517691, |
| "learning_rate": 1.2644342142522142e-05, |
| "loss": 0.1982, |
| "step": 125 |
| }, |
| { |
| "epoch": 3.2824675324675323, |
| "grad_norm": 0.48753413095850306, |
| "learning_rate": 1.230391804651109e-05, |
| "loss": 0.1825, |
| "step": 126 |
| }, |
| { |
| "epoch": 3.3084415584415585, |
| "grad_norm": 0.4612445791577537, |
| "learning_rate": 1.1966091506940616e-05, |
| "loss": 0.1963, |
| "step": 127 |
| }, |
| { |
| "epoch": 3.3344155844155843, |
| "grad_norm": 0.44552190602064307, |
| "learning_rate": 1.1630976545920777e-05, |
| "loss": 0.1706, |
| "step": 128 |
| }, |
| { |
| "epoch": 3.3603896103896105, |
| "grad_norm": 0.48659675686631854, |
| "learning_rate": 1.1298686270358542e-05, |
| "loss": 0.1913, |
| "step": 129 |
| }, |
| { |
| "epoch": 3.3863636363636362, |
| "grad_norm": 0.4764396048263091, |
| "learning_rate": 1.0969332833782217e-05, |
| "loss": 0.1671, |
| "step": 130 |
| }, |
| { |
| "epoch": 3.4123376623376624, |
| "grad_norm": 0.4740155487267979, |
| "learning_rate": 1.0643027398487848e-05, |
| "loss": 0.1854, |
| "step": 131 |
| }, |
| { |
| "epoch": 3.438311688311688, |
| "grad_norm": 0.4551923057514232, |
| "learning_rate": 1.031988009802003e-05, |
| "loss": 0.1709, |
| "step": 132 |
| }, |
| { |
| "epoch": 3.4642857142857144, |
| "grad_norm": 0.43070005789183097, |
| "learning_rate": 1.0000000000000006e-05, |
| "loss": 0.1975, |
| "step": 133 |
| }, |
| { |
| "epoch": 3.49025974025974, |
| "grad_norm": 0.383014073709059, |
| "learning_rate": 9.683495069313527e-06, |
| "loss": 0.1787, |
| "step": 134 |
| }, |
| { |
| "epoch": 3.5162337662337664, |
| "grad_norm": 0.4286879659999333, |
| "learning_rate": 9.370472131670887e-06, |
| "loss": 0.1979, |
| "step": 135 |
| }, |
| { |
| "epoch": 3.542207792207792, |
| "grad_norm": 0.41592759389422845, |
| "learning_rate": 9.061036837551467e-06, |
| "loss": 0.1672, |
| "step": 136 |
| }, |
| { |
| "epoch": 3.5681818181818183, |
| "grad_norm": 0.4101654305939979, |
| "learning_rate": 8.755293626544921e-06, |
| "loss": 0.1578, |
| "step": 137 |
| }, |
| { |
| "epoch": 3.594155844155844, |
| "grad_norm": 0.41025853609303076, |
| "learning_rate": 8.453345692101076e-06, |
| "loss": 0.1732, |
| "step": 138 |
| }, |
| { |
| "epoch": 3.62012987012987, |
| "grad_norm": 0.4099487747185865, |
| "learning_rate": 8.155294946700402e-06, |
| "loss": 0.1868, |
| "step": 139 |
| }, |
| { |
| "epoch": 3.646103896103896, |
| "grad_norm": 0.39174988018880874, |
| "learning_rate": 7.861241987456869e-06, |
| "loss": 0.1921, |
| "step": 140 |
| }, |
| { |
| "epoch": 3.6720779220779223, |
| "grad_norm": 0.3960964308201075, |
| "learning_rate": 7.571286062164709e-06, |
| "loss": 0.1902, |
| "step": 141 |
| }, |
| { |
| "epoch": 3.698051948051948, |
| "grad_norm": 0.41695917345370964, |
| "learning_rate": 7.285525035800645e-06, |
| "loss": 0.1885, |
| "step": 142 |
| }, |
| { |
| "epoch": 3.724025974025974, |
| "grad_norm": 0.3946638754267475, |
| "learning_rate": 7.0040553574928115e-06, |
| "loss": 0.1646, |
| "step": 143 |
| }, |
| { |
| "epoch": 3.75, |
| "grad_norm": 0.3843896631591302, |
| "learning_rate": 6.7269720279675755e-06, |
| "loss": 0.1769, |
| "step": 144 |
| }, |
| { |
| "epoch": 3.775974025974026, |
| "grad_norm": 0.39754823944143786, |
| "learning_rate": 6.4543685674851834e-06, |
| "loss": 0.1927, |
| "step": 145 |
| }, |
| { |
| "epoch": 3.801948051948052, |
| "grad_norm": 0.39430307748603793, |
| "learning_rate": 6.1863369842751145e-06, |
| "loss": 0.1835, |
| "step": 146 |
| }, |
| { |
| "epoch": 3.8279220779220777, |
| "grad_norm": 0.3925790082334243, |
| "learning_rate": 5.922967743481773e-06, |
| "loss": 0.1912, |
| "step": 147 |
| }, |
| { |
| "epoch": 3.853896103896104, |
| "grad_norm": 0.4061383035436692, |
| "learning_rate": 5.664349736630979e-06, |
| "loss": 0.1688, |
| "step": 148 |
| }, |
| { |
| "epoch": 3.87987012987013, |
| "grad_norm": 0.3887498420237992, |
| "learning_rate": 5.410570251627587e-06, |
| "loss": 0.202, |
| "step": 149 |
| }, |
| { |
| "epoch": 3.905844155844156, |
| "grad_norm": 0.38629968407857657, |
| "learning_rate": 5.161714943294372e-06, |
| "loss": 0.1841, |
| "step": 150 |
| }, |
| { |
| "epoch": 3.9318181818181817, |
| "grad_norm": 0.38720595837879046, |
| "learning_rate": 4.917867804462077e-06, |
| "loss": 0.1819, |
| "step": 151 |
| }, |
| { |
| "epoch": 3.957792207792208, |
| "grad_norm": 0.38121444486474165, |
| "learning_rate": 4.679111137620442e-06, |
| "loss": 0.187, |
| "step": 152 |
| }, |
| { |
| "epoch": 3.9837662337662336, |
| "grad_norm": 0.3830372856152854, |
| "learning_rate": 4.445525527139725e-06, |
| "loss": 0.187, |
| "step": 153 |
| }, |
| { |
| "epoch": 4.012987012987013, |
| "grad_norm": 0.3791401299823691, |
| "learning_rate": 4.217189812072131e-06, |
| "loss": 0.1686, |
| "step": 154 |
| }, |
| { |
| "epoch": 4.038961038961039, |
| "grad_norm": 0.4148718569874963, |
| "learning_rate": 3.994181059542321e-06, |
| "loss": 0.1176, |
| "step": 155 |
| }, |
| { |
| "epoch": 4.064935064935065, |
| "grad_norm": 0.35673107519822433, |
| "learning_rate": 3.7765745387359574e-06, |
| "loss": 0.1154, |
| "step": 156 |
| }, |
| { |
| "epoch": 4.090909090909091, |
| "grad_norm": 0.3402341389305598, |
| "learning_rate": 3.564443695495099e-06, |
| "loss": 0.1369, |
| "step": 157 |
| }, |
| { |
| "epoch": 4.116883116883117, |
| "grad_norm": 0.33628395582708454, |
| "learning_rate": 3.357860127529e-06, |
| "loss": 0.1167, |
| "step": 158 |
| }, |
| { |
| "epoch": 4.142857142857143, |
| "grad_norm": 0.30102455569288517, |
| "learning_rate": 3.156893560248688e-06, |
| "loss": 0.1242, |
| "step": 159 |
| }, |
| { |
| "epoch": 4.1688311688311686, |
| "grad_norm": 0.2771068438895251, |
| "learning_rate": 2.9616118232334613e-06, |
| "loss": 0.1187, |
| "step": 160 |
| }, |
| { |
| "epoch": 4.194805194805195, |
| "grad_norm": 0.294541202996176, |
| "learning_rate": 2.7720808273372823e-06, |
| "loss": 0.1143, |
| "step": 161 |
| }, |
| { |
| "epoch": 4.220779220779221, |
| "grad_norm": 0.3243600487046826, |
| "learning_rate": 2.588364542442754e-06, |
| "loss": 0.1267, |
| "step": 162 |
| }, |
| { |
| "epoch": 4.246753246753247, |
| "grad_norm": 0.3313486959659834, |
| "learning_rate": 2.410524975870221e-06, |
| "loss": 0.127, |
| "step": 163 |
| }, |
| { |
| "epoch": 4.2727272727272725, |
| "grad_norm": 0.35233311448800625, |
| "learning_rate": 2.2386221514492502e-06, |
| "loss": 0.132, |
| "step": 164 |
| }, |
| { |
| "epoch": 4.298701298701299, |
| "grad_norm": 0.34772021499799033, |
| "learning_rate": 2.0727140892595998e-06, |
| "loss": 0.1249, |
| "step": 165 |
| }, |
| { |
| "epoch": 4.324675324675325, |
| "grad_norm": 0.31722684001225576, |
| "learning_rate": 1.9128567860484516e-06, |
| "loss": 0.1297, |
| "step": 166 |
| }, |
| { |
| "epoch": 4.35064935064935, |
| "grad_norm": 0.31190729931884087, |
| "learning_rate": 1.759104196330592e-06, |
| "loss": 0.1163, |
| "step": 167 |
| }, |
| { |
| "epoch": 4.376623376623376, |
| "grad_norm": 0.29649306847713347, |
| "learning_rate": 1.6115082141778459e-06, |
| "loss": 0.1219, |
| "step": 168 |
| }, |
| { |
| "epoch": 4.402597402597403, |
| "grad_norm": 0.28001769998158305, |
| "learning_rate": 1.4701186557039648e-06, |
| "loss": 0.1096, |
| "step": 169 |
| }, |
| { |
| "epoch": 4.428571428571429, |
| "grad_norm": 0.2810796360606971, |
| "learning_rate": 1.334983242250858e-06, |
| "loss": 0.1097, |
| "step": 170 |
| }, |
| { |
| "epoch": 4.454545454545454, |
| "grad_norm": 0.2833719049954468, |
| "learning_rate": 1.2061475842818337e-06, |
| "loss": 0.1279, |
| "step": 171 |
| }, |
| { |
| "epoch": 4.48051948051948, |
| "grad_norm": 0.2634347576438535, |
| "learning_rate": 1.0836551659873073e-06, |
| "loss": 0.123, |
| "step": 172 |
| }, |
| { |
| "epoch": 4.5064935064935066, |
| "grad_norm": 0.2792887436301362, |
| "learning_rate": 9.67547330608165e-07, |
| "loss": 0.1189, |
| "step": 173 |
| }, |
| { |
| "epoch": 4.532467532467533, |
| "grad_norm": 0.2615347102434456, |
| "learning_rate": 8.578632664817177e-07, |
| "loss": 0.1249, |
| "step": 174 |
| }, |
| { |
| "epoch": 4.558441558441558, |
| "grad_norm": 0.2723001428445289, |
| "learning_rate": 7.546399938149918e-07, |
| "loss": 0.1137, |
| "step": 175 |
| }, |
| { |
| "epoch": 4.584415584415584, |
| "grad_norm": 0.2612744190545831, |
| "learning_rate": 6.579123521897867e-07, |
| "loss": 0.1015, |
| "step": 176 |
| }, |
| { |
| "epoch": 4.6103896103896105, |
| "grad_norm": 0.2689642699590416, |
| "learning_rate": 5.677129888037236e-07, |
| "loss": 0.1126, |
| "step": 177 |
| }, |
| { |
| "epoch": 4.636363636363637, |
| "grad_norm": 0.269452974192897, |
| "learning_rate": 4.840723474512876e-07, |
| "loss": 0.123, |
| "step": 178 |
| }, |
| { |
| "epoch": 4.662337662337662, |
| "grad_norm": 0.26461030349016573, |
| "learning_rate": 4.070186582485214e-07, |
| "loss": 0.1227, |
| "step": 179 |
| }, |
| { |
| "epoch": 4.688311688311688, |
| "grad_norm": 0.26195993538778606, |
| "learning_rate": 3.3657792810489975e-07, |
| "loss": 0.1265, |
| "step": 180 |
| }, |
| { |
| "epoch": 4.714285714285714, |
| "grad_norm": 0.2629718231282119, |
| "learning_rate": 2.7277393194555357e-07, |
| "loss": 0.0993, |
| "step": 181 |
| }, |
| { |
| "epoch": 4.740259740259741, |
| "grad_norm": 0.2748041688236983, |
| "learning_rate": 2.1562820468684187e-07, |
| "loss": 0.1271, |
| "step": 182 |
| }, |
| { |
| "epoch": 4.766233766233766, |
| "grad_norm": 0.28368676223609396, |
| "learning_rate": 1.6516003396795489e-07, |
| "loss": 0.132, |
| "step": 183 |
| }, |
| { |
| "epoch": 4.792207792207792, |
| "grad_norm": 0.2691168035289099, |
| "learning_rate": 1.2138645364101032e-07, |
| "loss": 0.099, |
| "step": 184 |
| }, |
| { |
| "epoch": 4.818181818181818, |
| "grad_norm": 0.26074215375130616, |
| "learning_rate": 8.432223802185002e-08, |
| "loss": 0.1111, |
| "step": 185 |
| }, |
| { |
| "epoch": 4.8441558441558445, |
| "grad_norm": 0.27028450763035056, |
| "learning_rate": 5.3979896903453287e-08, |
| "loss": 0.1229, |
| "step": 186 |
| }, |
| { |
| "epoch": 4.87012987012987, |
| "grad_norm": 0.27411525111121277, |
| "learning_rate": 3.036967133367652e-08, |
| "loss": 0.1303, |
| "step": 187 |
| }, |
| { |
| "epoch": 4.896103896103896, |
| "grad_norm": 0.25069357083928406, |
| "learning_rate": 1.349953015872707e-08, |
| "loss": 0.1212, |
| "step": 188 |
| }, |
| { |
| "epoch": 4.922077922077922, |
| "grad_norm": 0.2630680904345287, |
| "learning_rate": 3.3751673335458147e-09, |
| "loss": 0.1268, |
| "step": 189 |
| }, |
| { |
| "epoch": 4.948051948051948, |
| "grad_norm": 0.2705412617221253, |
| "learning_rate": 0.0, |
| "loss": 0.1421, |
| "step": 190 |
| }, |
| { |
| "epoch": 4.948051948051948, |
| "step": 190, |
| "total_flos": 1.8291450201715507e+17, |
| "train_loss": 0.4666951280283301, |
| "train_runtime": 4594.7358, |
| "train_samples_per_second": 5.357, |
| "train_steps_per_second": 0.041 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 190, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 5, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.8291450201715507e+17, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|