| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 1862, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0005370569280343716, | |
| "grad_norm": 24.25113947879833, | |
| "learning_rate": 5.3475935828877005e-08, | |
| "loss": 1.3237, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0026852846401718583, | |
| "grad_norm": 23.161748165504072, | |
| "learning_rate": 2.6737967914438503e-07, | |
| "loss": 1.3357, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.0053705692803437165, | |
| "grad_norm": 15.788644713919888, | |
| "learning_rate": 5.347593582887701e-07, | |
| "loss": 1.2928, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.008055853920515575, | |
| "grad_norm": 12.065360956658548, | |
| "learning_rate": 8.021390374331551e-07, | |
| "loss": 1.1734, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.010741138560687433, | |
| "grad_norm": 8.451481103732192, | |
| "learning_rate": 1.0695187165775401e-06, | |
| "loss": 1.053, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.01342642320085929, | |
| "grad_norm": 3.607181792793628, | |
| "learning_rate": 1.3368983957219254e-06, | |
| "loss": 0.9479, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.01611170784103115, | |
| "grad_norm": 3.4122039231622128, | |
| "learning_rate": 1.6042780748663103e-06, | |
| "loss": 0.9164, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.018796992481203006, | |
| "grad_norm": 2.9894854055400732, | |
| "learning_rate": 1.8716577540106954e-06, | |
| "loss": 0.8869, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.021482277121374866, | |
| "grad_norm": 3.052529317760186, | |
| "learning_rate": 2.1390374331550802e-06, | |
| "loss": 0.8571, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.024167561761546726, | |
| "grad_norm": 2.9434989562495484, | |
| "learning_rate": 2.4064171122994653e-06, | |
| "loss": 0.857, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.02685284640171858, | |
| "grad_norm": 3.0139599082135424, | |
| "learning_rate": 2.673796791443851e-06, | |
| "loss": 0.8329, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.02953813104189044, | |
| "grad_norm": 2.8752390849195306, | |
| "learning_rate": 2.9411764705882355e-06, | |
| "loss": 0.8183, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.0322234156820623, | |
| "grad_norm": 2.9815477478474164, | |
| "learning_rate": 3.2085561497326205e-06, | |
| "loss": 0.8154, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.03490870032223416, | |
| "grad_norm": 3.1961157011348433, | |
| "learning_rate": 3.4759358288770056e-06, | |
| "loss": 0.8127, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.03759398496240601, | |
| "grad_norm": 3.011408828479708, | |
| "learning_rate": 3.7433155080213907e-06, | |
| "loss": 0.7876, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.040279269602577876, | |
| "grad_norm": 3.074730944639507, | |
| "learning_rate": 4.010695187165775e-06, | |
| "loss": 0.7812, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.04296455424274973, | |
| "grad_norm": 3.0954880610087416, | |
| "learning_rate": 4.2780748663101604e-06, | |
| "loss": 0.7938, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.04564983888292159, | |
| "grad_norm": 3.103351806444735, | |
| "learning_rate": 4.5454545454545455e-06, | |
| "loss": 0.7783, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.04833512352309345, | |
| "grad_norm": 3.1872294877792693, | |
| "learning_rate": 4.812834224598931e-06, | |
| "loss": 0.7573, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.05102040816326531, | |
| "grad_norm": 3.1638880990567455, | |
| "learning_rate": 5.0802139037433165e-06, | |
| "loss": 0.7558, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.05370569280343716, | |
| "grad_norm": 3.0115638804221754, | |
| "learning_rate": 5.347593582887702e-06, | |
| "loss": 0.7537, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.05639097744360902, | |
| "grad_norm": 3.2941594696139145, | |
| "learning_rate": 5.614973262032086e-06, | |
| "loss": 0.7515, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.05907626208378088, | |
| "grad_norm": 3.3155309676050013, | |
| "learning_rate": 5.882352941176471e-06, | |
| "loss": 0.7397, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.06176154672395274, | |
| "grad_norm": 3.162740571135442, | |
| "learning_rate": 6.149732620320856e-06, | |
| "loss": 0.7296, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.0644468313641246, | |
| "grad_norm": 3.0840588519609016, | |
| "learning_rate": 6.417112299465241e-06, | |
| "loss": 0.7336, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.06713211600429646, | |
| "grad_norm": 3.150117025536966, | |
| "learning_rate": 6.684491978609626e-06, | |
| "loss": 0.7382, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.06981740064446831, | |
| "grad_norm": 2.9252296896598278, | |
| "learning_rate": 6.951871657754011e-06, | |
| "loss": 0.7211, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.07250268528464017, | |
| "grad_norm": 3.0320480967430092, | |
| "learning_rate": 7.219251336898396e-06, | |
| "loss": 0.73, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.07518796992481203, | |
| "grad_norm": 2.874198329021165, | |
| "learning_rate": 7.486631016042781e-06, | |
| "loss": 0.7434, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.07787325456498388, | |
| "grad_norm": 2.9173140627663656, | |
| "learning_rate": 7.754010695187166e-06, | |
| "loss": 0.7353, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.08055853920515575, | |
| "grad_norm": 2.9848439720434294, | |
| "learning_rate": 8.02139037433155e-06, | |
| "loss": 0.73, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.08324382384532761, | |
| "grad_norm": 2.87264895278571, | |
| "learning_rate": 8.288770053475937e-06, | |
| "loss": 0.7264, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.08592910848549946, | |
| "grad_norm": 2.864090437505294, | |
| "learning_rate": 8.556149732620321e-06, | |
| "loss": 0.716, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.08861439312567132, | |
| "grad_norm": 2.9211346467401826, | |
| "learning_rate": 8.823529411764707e-06, | |
| "loss": 0.7213, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.09129967776584318, | |
| "grad_norm": 2.812817629694785, | |
| "learning_rate": 9.090909090909091e-06, | |
| "loss": 0.7239, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.09398496240601503, | |
| "grad_norm": 3.0966947636468323, | |
| "learning_rate": 9.358288770053477e-06, | |
| "loss": 0.7252, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.0966702470461869, | |
| "grad_norm": 2.8482566526889066, | |
| "learning_rate": 9.625668449197861e-06, | |
| "loss": 0.7111, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.09935553168635876, | |
| "grad_norm": 3.103402165074239, | |
| "learning_rate": 9.893048128342247e-06, | |
| "loss": 0.709, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.10204081632653061, | |
| "grad_norm": 2.833371746409837, | |
| "learning_rate": 9.999920849895074e-06, | |
| "loss": 0.7141, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.10472610096670247, | |
| "grad_norm": 2.7601482578055685, | |
| "learning_rate": 9.99943716388422e-06, | |
| "loss": 0.7091, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.10741138560687433, | |
| "grad_norm": 2.7694618688472334, | |
| "learning_rate": 9.998513806628825e-06, | |
| "loss": 0.6892, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.11009667024704618, | |
| "grad_norm": 2.7627170756712545, | |
| "learning_rate": 9.997150859332753e-06, | |
| "loss": 0.7057, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.11278195488721804, | |
| "grad_norm": 2.641313671359486, | |
| "learning_rate": 9.995348441859243e-06, | |
| "loss": 0.6968, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.11546723952738991, | |
| "grad_norm": 2.7210282435142163, | |
| "learning_rate": 9.993106712720367e-06, | |
| "loss": 0.7126, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.11815252416756176, | |
| "grad_norm": 2.6468526042665563, | |
| "learning_rate": 9.990425869063085e-06, | |
| "loss": 0.7133, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.12083780880773362, | |
| "grad_norm": 2.669248971535401, | |
| "learning_rate": 9.987306146651908e-06, | |
| "loss": 0.6862, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.12352309344790548, | |
| "grad_norm": 2.614929201255673, | |
| "learning_rate": 9.983747819848168e-06, | |
| "loss": 0.6822, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.12620837808807733, | |
| "grad_norm": 2.752612192498847, | |
| "learning_rate": 9.97975120158589e-06, | |
| "loss": 0.7086, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.1288936627282492, | |
| "grad_norm": 2.779012267697376, | |
| "learning_rate": 9.975316643344257e-06, | |
| "loss": 0.7081, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.13157894736842105, | |
| "grad_norm": 2.5890743255074278, | |
| "learning_rate": 9.970444535116721e-06, | |
| "loss": 0.6914, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.13426423200859292, | |
| "grad_norm": 2.6285297060534334, | |
| "learning_rate": 9.965135305376694e-06, | |
| "loss": 0.681, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.13694951664876476, | |
| "grad_norm": 2.5527710360428038, | |
| "learning_rate": 9.959389421039863e-06, | |
| "loss": 0.6912, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.13963480128893663, | |
| "grad_norm": 2.665201173590149, | |
| "learning_rate": 9.95320738742313e-06, | |
| "loss": 0.6911, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.1423200859291085, | |
| "grad_norm": 2.57466357634417, | |
| "learning_rate": 9.946589748200185e-06, | |
| "loss": 0.7051, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.14500537056928034, | |
| "grad_norm": 2.512739592787712, | |
| "learning_rate": 9.939537085353668e-06, | |
| "loss": 0.6789, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.1476906552094522, | |
| "grad_norm": 2.5488133338759544, | |
| "learning_rate": 9.932050019124011e-06, | |
| "loss": 0.6722, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.15037593984962405, | |
| "grad_norm": 2.563447287239947, | |
| "learning_rate": 9.924129207954877e-06, | |
| "loss": 0.68, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.15306122448979592, | |
| "grad_norm": 2.6455202812435066, | |
| "learning_rate": 9.915775348435266e-06, | |
| "loss": 0.6829, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.15574650912996776, | |
| "grad_norm": 2.607428689084183, | |
| "learning_rate": 9.906989175238232e-06, | |
| "loss": 0.6734, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.15843179377013963, | |
| "grad_norm": 2.632494678548499, | |
| "learning_rate": 9.8977714610563e-06, | |
| "loss": 0.6654, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.1611170784103115, | |
| "grad_norm": 2.5679386221927163, | |
| "learning_rate": 9.888123016533496e-06, | |
| "loss": 0.6464, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.16380236305048335, | |
| "grad_norm": 2.4455235169022123, | |
| "learning_rate": 9.878044690194055e-06, | |
| "loss": 0.6808, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.16648764769065522, | |
| "grad_norm": 2.6134103392173254, | |
| "learning_rate": 9.86753736836781e-06, | |
| "loss": 0.6689, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.16917293233082706, | |
| "grad_norm": 2.553706613546006, | |
| "learning_rate": 9.856601975112227e-06, | |
| "loss": 0.6588, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.17185821697099893, | |
| "grad_norm": 2.5066521118011407, | |
| "learning_rate": 9.84523947213116e-06, | |
| "loss": 0.6663, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.17454350161117077, | |
| "grad_norm": 2.337716810935506, | |
| "learning_rate": 9.833450858690257e-06, | |
| "loss": 0.6571, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.17722878625134264, | |
| "grad_norm": 2.568589396910675, | |
| "learning_rate": 9.821237171529088e-06, | |
| "loss": 0.6569, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.1799140708915145, | |
| "grad_norm": 2.385921872954393, | |
| "learning_rate": 9.808599484769971e-06, | |
| "loss": 0.656, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.18259935553168635, | |
| "grad_norm": 2.4819007537288362, | |
| "learning_rate": 9.79553890982351e-06, | |
| "loss": 0.6609, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.18528464017185822, | |
| "grad_norm": 2.437061379076581, | |
| "learning_rate": 9.782056595290848e-06, | |
| "loss": 0.6455, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.18796992481203006, | |
| "grad_norm": 2.564231295853269, | |
| "learning_rate": 9.768153726862652e-06, | |
| "loss": 0.6612, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.19065520945220193, | |
| "grad_norm": 2.4893325625782583, | |
| "learning_rate": 9.753831527214854e-06, | |
| "loss": 0.662, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.1933404940923738, | |
| "grad_norm": 2.4552866529124757, | |
| "learning_rate": 9.739091255901105e-06, | |
| "loss": 0.6486, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.19602577873254565, | |
| "grad_norm": 2.5401346090028154, | |
| "learning_rate": 9.723934209242015e-06, | |
| "loss": 0.6471, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.19871106337271752, | |
| "grad_norm": 2.5620972387781253, | |
| "learning_rate": 9.708361720211146e-06, | |
| "loss": 0.6419, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.20139634801288936, | |
| "grad_norm": 2.4951612644213554, | |
| "learning_rate": 9.692375158317787e-06, | |
| "loss": 0.6492, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.20408163265306123, | |
| "grad_norm": 2.4669992078317917, | |
| "learning_rate": 9.675975929486512e-06, | |
| "loss": 0.6546, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.20676691729323307, | |
| "grad_norm": 2.603925161419725, | |
| "learning_rate": 9.659165475933537e-06, | |
| "loss": 0.6433, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.20945220193340494, | |
| "grad_norm": 2.358390538888436, | |
| "learning_rate": 9.641945276039885e-06, | |
| "loss": 0.6436, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.2121374865735768, | |
| "grad_norm": 2.4363415566836184, | |
| "learning_rate": 9.624316844221376e-06, | |
| "loss": 0.626, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.21482277121374865, | |
| "grad_norm": 2.282557445492374, | |
| "learning_rate": 9.606281730795435e-06, | |
| "loss": 0.6267, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.21750805585392052, | |
| "grad_norm": 2.3912481386253654, | |
| "learning_rate": 9.587841521844755e-06, | |
| "loss": 0.6451, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.22019334049409237, | |
| "grad_norm": 2.468086968145363, | |
| "learning_rate": 9.568997839077812e-06, | |
| "loss": 0.6275, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.22287862513426424, | |
| "grad_norm": 2.777359054522718, | |
| "learning_rate": 9.549752339686243e-06, | |
| "loss": 0.6487, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.22556390977443608, | |
| "grad_norm": 2.435355179570479, | |
| "learning_rate": 9.530106716199103e-06, | |
| "loss": 0.6467, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.22824919441460795, | |
| "grad_norm": 2.383529479537672, | |
| "learning_rate": 9.510062696334024e-06, | |
| "loss": 0.6345, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.23093447905477982, | |
| "grad_norm": 2.4092098189126734, | |
| "learning_rate": 9.489622042845266e-06, | |
| "loss": 0.6239, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.23361976369495166, | |
| "grad_norm": 2.426365302575787, | |
| "learning_rate": 9.468786553368694e-06, | |
| "loss": 0.6238, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.23630504833512353, | |
| "grad_norm": 2.5087002358611574, | |
| "learning_rate": 9.447558060263693e-06, | |
| "loss": 0.6237, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.23899033297529537, | |
| "grad_norm": 2.387385697008228, | |
| "learning_rate": 9.42593843045201e-06, | |
| "loss": 0.622, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.24167561761546724, | |
| "grad_norm": 2.326708486574728, | |
| "learning_rate": 9.403929565253582e-06, | |
| "loss": 0.6306, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.24436090225563908, | |
| "grad_norm": 2.369288401258165, | |
| "learning_rate": 9.381533400219319e-06, | |
| "loss": 0.6073, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.24704618689581095, | |
| "grad_norm": 2.383046130914992, | |
| "learning_rate": 9.358751904960885e-06, | |
| "loss": 0.6205, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.24973147153598282, | |
| "grad_norm": 2.414995815331301, | |
| "learning_rate": 9.335587082977484e-06, | |
| "loss": 0.6106, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.25241675617615467, | |
| "grad_norm": 2.768675173604855, | |
| "learning_rate": 9.312040971479663e-06, | |
| "loss": 0.6272, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.25510204081632654, | |
| "grad_norm": 2.2921439710822185, | |
| "learning_rate": 9.288115641210144e-06, | |
| "loss": 0.6113, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.2577873254564984, | |
| "grad_norm": 2.3081671484334807, | |
| "learning_rate": 9.26381319626173e-06, | |
| "loss": 0.6102, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.2604726100966702, | |
| "grad_norm": 2.5833114870823355, | |
| "learning_rate": 9.239135773892249e-06, | |
| "loss": 0.6042, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.2631578947368421, | |
| "grad_norm": 2.3993711180589425, | |
| "learning_rate": 9.214085544336597e-06, | |
| "loss": 0.6043, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.26584317937701396, | |
| "grad_norm": 2.3928350581872073, | |
| "learning_rate": 9.188664710615886e-06, | |
| "loss": 0.6072, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.26852846401718583, | |
| "grad_norm": 2.291178931562558, | |
| "learning_rate": 9.162875508343691e-06, | |
| "loss": 0.607, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.2712137486573577, | |
| "grad_norm": 2.302429905502462, | |
| "learning_rate": 9.136720205529446e-06, | |
| "loss": 0.6212, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.2738990332975295, | |
| "grad_norm": 2.4002220088345227, | |
| "learning_rate": 9.110201102378991e-06, | |
| "loss": 0.6022, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.2765843179377014, | |
| "grad_norm": 2.345306335150251, | |
| "learning_rate": 9.083320531092268e-06, | |
| "loss": 0.5849, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.27926960257787325, | |
| "grad_norm": 2.361854410468674, | |
| "learning_rate": 9.056080855658233e-06, | |
| "loss": 0.5941, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.2819548872180451, | |
| "grad_norm": 2.329088369479831, | |
| "learning_rate": 9.02848447164695e-06, | |
| "loss": 0.592, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.284640171858217, | |
| "grad_norm": 2.9145104665950834, | |
| "learning_rate": 9.00053380599891e-06, | |
| "loss": 0.6079, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.2873254564983888, | |
| "grad_norm": 2.30854865495231, | |
| "learning_rate": 8.972231316811608e-06, | |
| "loss": 0.5902, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.2900107411385607, | |
| "grad_norm": 2.710514687574968, | |
| "learning_rate": 8.943579493123354e-06, | |
| "loss": 0.6139, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.29269602577873255, | |
| "grad_norm": 2.3749882685361907, | |
| "learning_rate": 8.914580854694389e-06, | |
| "loss": 0.5982, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.2953813104189044, | |
| "grad_norm": 2.341531483158182, | |
| "learning_rate": 8.885237951785275e-06, | |
| "loss": 0.6009, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.2980665950590763, | |
| "grad_norm": 2.31541797689058, | |
| "learning_rate": 8.855553364932628e-06, | |
| "loss": 0.5539, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.3007518796992481, | |
| "grad_norm": 2.3459951500114773, | |
| "learning_rate": 8.82552970472216e-06, | |
| "loss": 0.5793, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.30343716433942, | |
| "grad_norm": 2.305688250517311, | |
| "learning_rate": 8.795169611559108e-06, | |
| "loss": 0.6016, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.30612244897959184, | |
| "grad_norm": 2.5187456404531936, | |
| "learning_rate": 8.764475755436011e-06, | |
| "loss": 0.5785, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.3088077336197637, | |
| "grad_norm": 2.3186145964360336, | |
| "learning_rate": 8.733450835697914e-06, | |
| "loss": 0.5952, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.31149301825993553, | |
| "grad_norm": 2.4852031283314675, | |
| "learning_rate": 8.702097580804962e-06, | |
| "loss": 0.5921, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.3141783029001074, | |
| "grad_norm": 2.48247156500519, | |
| "learning_rate": 8.670418748092465e-06, | |
| "loss": 0.5824, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.31686358754027927, | |
| "grad_norm": 2.4487511752763003, | |
| "learning_rate": 8.638417123528393e-06, | |
| "loss": 0.5766, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.31954887218045114, | |
| "grad_norm": 2.5523301393566973, | |
| "learning_rate": 8.60609552146837e-06, | |
| "loss": 0.5778, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.322234156820623, | |
| "grad_norm": 2.463880439656359, | |
| "learning_rate": 8.573456784408162e-06, | |
| "loss": 0.5876, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.3249194414607948, | |
| "grad_norm": 2.279384394364597, | |
| "learning_rate": 8.540503782733718e-06, | |
| "loss": 0.5564, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.3276047261009667, | |
| "grad_norm": 2.3581766823519703, | |
| "learning_rate": 8.507239414468707e-06, | |
| "loss": 0.5711, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.33029001074113856, | |
| "grad_norm": 2.386244897526467, | |
| "learning_rate": 8.473666605019673e-06, | |
| "loss": 0.5606, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.33297529538131043, | |
| "grad_norm": 2.6538204423039176, | |
| "learning_rate": 8.439788306918759e-06, | |
| "loss": 0.5679, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.3356605800214823, | |
| "grad_norm": 2.3536598330429994, | |
| "learning_rate": 8.405607499564044e-06, | |
| "loss": 0.5686, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.3383458646616541, | |
| "grad_norm": 2.5411210857238737, | |
| "learning_rate": 8.371127188957527e-06, | |
| "loss": 0.5568, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.341031149301826, | |
| "grad_norm": 2.3248484069987447, | |
| "learning_rate": 8.336350407440766e-06, | |
| "loss": 0.5603, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.34371643394199786, | |
| "grad_norm": 2.531937446280703, | |
| "learning_rate": 8.3012802134282e-06, | |
| "loss": 0.559, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.3464017185821697, | |
| "grad_norm": 2.453303019061587, | |
| "learning_rate": 8.26591969113818e-06, | |
| "loss": 0.5515, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.34908700322234154, | |
| "grad_norm": 2.69008655565826, | |
| "learning_rate": 8.230271950321733e-06, | |
| "loss": 0.5581, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.3517722878625134, | |
| "grad_norm": 2.2858934099243546, | |
| "learning_rate": 8.194340125989068e-06, | |
| "loss": 0.5584, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.3544575725026853, | |
| "grad_norm": 2.4227589614633307, | |
| "learning_rate": 8.158127378133886e-06, | |
| "loss": 0.5338, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.35714285714285715, | |
| "grad_norm": 2.473273208390763, | |
| "learning_rate": 8.121636891455462e-06, | |
| "loss": 0.5488, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.359828141783029, | |
| "grad_norm": 4.029648668242547, | |
| "learning_rate": 8.084871875078574e-06, | |
| "loss": 0.5558, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.36251342642320084, | |
| "grad_norm": 2.3823297359151487, | |
| "learning_rate": 8.047835562271289e-06, | |
| "loss": 0.5347, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.3651987110633727, | |
| "grad_norm": 2.3206951441879062, | |
| "learning_rate": 8.010531210160594e-06, | |
| "loss": 0.5474, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.3678839957035446, | |
| "grad_norm": 2.401861048928935, | |
| "learning_rate": 7.972962099445979e-06, | |
| "loss": 0.5336, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.37056928034371645, | |
| "grad_norm": 2.283419604263929, | |
| "learning_rate": 7.935131534110893e-06, | |
| "loss": 0.5423, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.3732545649838883, | |
| "grad_norm": 2.4091957808848377, | |
| "learning_rate": 7.897042841132195e-06, | |
| "loss": 0.5512, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.37593984962406013, | |
| "grad_norm": 2.360101346375749, | |
| "learning_rate": 7.858699370187558e-06, | |
| "loss": 0.5521, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.378625134264232, | |
| "grad_norm": 2.578398145192331, | |
| "learning_rate": 7.820104493360883e-06, | |
| "loss": 0.5446, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.38131041890440387, | |
| "grad_norm": 2.392308313238444, | |
| "learning_rate": 7.781261604845754e-06, | |
| "loss": 0.539, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.38399570354457574, | |
| "grad_norm": 2.4396652511764434, | |
| "learning_rate": 7.742174120646929e-06, | |
| "loss": 0.5459, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.3866809881847476, | |
| "grad_norm": 2.4045120660307138, | |
| "learning_rate": 7.70284547827992e-06, | |
| "loss": 0.5245, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.3893662728249194, | |
| "grad_norm": 2.2765914991878358, | |
| "learning_rate": 7.663279136468696e-06, | |
| "loss": 0.5493, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.3920515574650913, | |
| "grad_norm": 2.350607787315242, | |
| "learning_rate": 7.623478574841499e-06, | |
| "loss": 0.5274, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.39473684210526316, | |
| "grad_norm": 2.4564288550698166, | |
| "learning_rate": 7.58344729362483e-06, | |
| "loss": 0.5272, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.39742212674543503, | |
| "grad_norm": 2.308846635556029, | |
| "learning_rate": 7.5431888133356334e-06, | |
| "loss": 0.5256, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.40010741138560685, | |
| "grad_norm": 2.3648049921250105, | |
| "learning_rate": 7.502706674471678e-06, | |
| "loss": 0.5374, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.4027926960257787, | |
| "grad_norm": 2.372539412693635, | |
| "learning_rate": 7.462004437200195e-06, | |
| "loss": 0.5322, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.4054779806659506, | |
| "grad_norm": 2.35495618473913, | |
| "learning_rate": 7.42108568104479e-06, | |
| "loss": 0.5278, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.40816326530612246, | |
| "grad_norm": 2.432730442205152, | |
| "learning_rate": 7.379954004570628e-06, | |
| "loss": 0.543, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.41084854994629433, | |
| "grad_norm": 2.383640686140904, | |
| "learning_rate": 7.338613025067977e-06, | |
| "loss": 0.5493, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.41353383458646614, | |
| "grad_norm": 2.2937243918245307, | |
| "learning_rate": 7.2970663782340765e-06, | |
| "loss": 0.5159, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.416219119226638, | |
| "grad_norm": 2.584638869408312, | |
| "learning_rate": 7.25531771785341e-06, | |
| "loss": 0.528, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.4189044038668099, | |
| "grad_norm": 2.4008027863806363, | |
| "learning_rate": 7.2133707154763646e-06, | |
| "loss": 0.5264, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.42158968850698175, | |
| "grad_norm": 2.388791471928522, | |
| "learning_rate": 7.17122906009635e-06, | |
| "loss": 0.5123, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.4242749731471536, | |
| "grad_norm": 2.3442732809582774, | |
| "learning_rate": 7.128896457825364e-06, | |
| "loss": 0.5149, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.42696025778732544, | |
| "grad_norm": 2.279111093706145, | |
| "learning_rate": 7.08637663156807e-06, | |
| "loss": 0.5168, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.4296455424274973, | |
| "grad_norm": 2.4062295480857805, | |
| "learning_rate": 7.043673320694386e-06, | |
| "loss": 0.5015, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.4323308270676692, | |
| "grad_norm": 2.5033445807132932, | |
| "learning_rate": 7.000790280710625e-06, | |
| "loss": 0.5221, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.43501611170784105, | |
| "grad_norm": 2.3456722409693698, | |
| "learning_rate": 6.957731282929224e-06, | |
| "loss": 0.498, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.4377013963480129, | |
| "grad_norm": 2.3434221234555936, | |
| "learning_rate": 6.914500114137082e-06, | |
| "loss": 0.4955, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.44038668098818473, | |
| "grad_norm": 2.351670401180038, | |
| "learning_rate": 6.871100576262526e-06, | |
| "loss": 0.5041, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.4430719656283566, | |
| "grad_norm": 2.357200356793434, | |
| "learning_rate": 6.827536486040964e-06, | |
| "loss": 0.5131, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.44575725026852847, | |
| "grad_norm": 2.3716872327140948, | |
| "learning_rate": 6.783811674679216e-06, | |
| "loss": 0.5072, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.44844253490870034, | |
| "grad_norm": 2.3837956868904753, | |
| "learning_rate": 6.7399299875185875e-06, | |
| "loss": 0.5072, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.45112781954887216, | |
| "grad_norm": 2.468749709382942, | |
| "learning_rate": 6.695895283696691e-06, | |
| "loss": 0.4916, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.453813104189044, | |
| "grad_norm": 2.2977664623966576, | |
| "learning_rate": 6.651711435808061e-06, | |
| "loss": 0.5073, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.4564983888292159, | |
| "grad_norm": 2.3750347681190367, | |
| "learning_rate": 6.607382329563581e-06, | |
| "loss": 0.5042, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.45918367346938777, | |
| "grad_norm": 2.3201817159617604, | |
| "learning_rate": 6.562911863448752e-06, | |
| "loss": 0.505, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.46186895810955964, | |
| "grad_norm": 2.317046101118156, | |
| "learning_rate": 6.518303948380854e-06, | |
| "loss": 0.5027, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.46455424274973145, | |
| "grad_norm": 2.4450948832382844, | |
| "learning_rate": 6.473562507364995e-06, | |
| "loss": 0.5057, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.4672395273899033, | |
| "grad_norm": 2.342313602440348, | |
| "learning_rate": 6.428691475149107e-06, | |
| "loss": 0.4859, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.4699248120300752, | |
| "grad_norm": 2.3462970754354084, | |
| "learning_rate": 6.383694797877915e-06, | |
| "loss": 0.4714, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.47261009667024706, | |
| "grad_norm": 2.5885071330202627, | |
| "learning_rate": 6.338576432745891e-06, | |
| "loss": 0.4875, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.47529538131041893, | |
| "grad_norm": 2.2979744167450358, | |
| "learning_rate": 6.293340347649234e-06, | |
| "loss": 0.4951, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.47798066595059074, | |
| "grad_norm": 2.291237469268037, | |
| "learning_rate": 6.247990520836935e-06, | |
| "loss": 0.4883, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.4806659505907626, | |
| "grad_norm": 2.3728016920526565, | |
| "learning_rate": 6.202530940560897e-06, | |
| "loss": 0.485, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.4833512352309345, | |
| "grad_norm": 2.610281937328259, | |
| "learning_rate": 6.156965604725202e-06, | |
| "loss": 0.4882, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.48603651987110635, | |
| "grad_norm": 2.3718642604920834, | |
| "learning_rate": 6.111298520534514e-06, | |
| "loss": 0.4866, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.48872180451127817, | |
| "grad_norm": 2.3757882082854573, | |
| "learning_rate": 6.065533704141666e-06, | |
| "loss": 0.4798, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.49140708915145004, | |
| "grad_norm": 2.3521649976871712, | |
| "learning_rate": 6.019675180294469e-06, | |
| "loss": 0.4876, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.4940923737916219, | |
| "grad_norm": 2.263694366342218, | |
| "learning_rate": 5.973726981981756e-06, | |
| "loss": 0.4838, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.4967776584317938, | |
| "grad_norm": 2.574827738131497, | |
| "learning_rate": 5.9276931500787045e-06, | |
| "loss": 0.4893, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.49946294307196565, | |
| "grad_norm": 2.3280039732972355, | |
| "learning_rate": 5.8815777329914655e-06, | |
| "loss": 0.4748, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.5021482277121375, | |
| "grad_norm": 2.472054452574987, | |
| "learning_rate": 5.83538478630113e-06, | |
| "loss": 0.4906, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.5048335123523093, | |
| "grad_norm": 2.3804196285899186, | |
| "learning_rate": 5.789118372407061e-06, | |
| "loss": 0.4631, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.5075187969924813, | |
| "grad_norm": 2.3558638927371742, | |
| "learning_rate": 5.7427825601696376e-06, | |
| "loss": 0.4691, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.5102040816326531, | |
| "grad_norm": 2.359879850321753, | |
| "learning_rate": 5.696381424552411e-06, | |
| "loss": 0.4639, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.5128893662728249, | |
| "grad_norm": 2.322787328071383, | |
| "learning_rate": 5.649919046263742e-06, | |
| "loss": 0.465, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.5155746509129968, | |
| "grad_norm": 2.4521402907626646, | |
| "learning_rate": 5.6033995113979336e-06, | |
| "loss": 0.4623, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.5182599355531686, | |
| "grad_norm": 2.2573376873830284, | |
| "learning_rate": 5.556826911075867e-06, | |
| "loss": 0.4642, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.5209452201933404, | |
| "grad_norm": 2.398522707712972, | |
| "learning_rate": 5.510205341085224e-06, | |
| "loss": 0.4754, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.5236305048335124, | |
| "grad_norm": 2.4170684561458757, | |
| "learning_rate": 5.463538901520278e-06, | |
| "loss": 0.4602, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.5263157894736842, | |
| "grad_norm": 2.3207712466054096, | |
| "learning_rate": 5.416831696421325e-06, | |
| "loss": 0.464, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.5290010741138561, | |
| "grad_norm": 2.4520936790108108, | |
| "learning_rate": 5.370087833413747e-06, | |
| "loss": 0.47, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.5316863587540279, | |
| "grad_norm": 2.382760350655894, | |
| "learning_rate": 5.323311423346775e-06, | |
| "loss": 0.4744, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.5343716433941997, | |
| "grad_norm": 2.392728462627693, | |
| "learning_rate": 5.2765065799319646e-06, | |
| "loss": 0.4539, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.5370569280343717, | |
| "grad_norm": 2.317069450690583, | |
| "learning_rate": 5.229677419381417e-06, | |
| "loss": 0.4724, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.5397422126745435, | |
| "grad_norm": 2.181814505403613, | |
| "learning_rate": 5.182828060045783e-06, | |
| "loss": 0.4709, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.5424274973147154, | |
| "grad_norm": 2.2356286647456862, | |
| "learning_rate": 5.1359626220520804e-06, | |
| "loss": 0.4543, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.5451127819548872, | |
| "grad_norm": 2.309541338501052, | |
| "learning_rate": 5.08908522694135e-06, | |
| "loss": 0.4445, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.547798066595059, | |
| "grad_norm": 2.334557739373175, | |
| "learning_rate": 5.042199997306185e-06, | |
| "loss": 0.4579, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.550483351235231, | |
| "grad_norm": 2.3861434626116633, | |
| "learning_rate": 4.995311056428192e-06, | |
| "loss": 0.4487, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.5531686358754028, | |
| "grad_norm": 2.277598529970781, | |
| "learning_rate": 4.948422527915348e-06, | |
| "loss": 0.459, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.5558539205155747, | |
| "grad_norm": 2.5154062392897885, | |
| "learning_rate": 4.901538535339369e-06, | |
| "loss": 0.4471, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.5585392051557465, | |
| "grad_norm": 2.290644492599463, | |
| "learning_rate": 4.854663201873066e-06, | |
| "loss": 0.4534, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.5612244897959183, | |
| "grad_norm": 2.3507551432764715, | |
| "learning_rate": 4.807800649927723e-06, | |
| "loss": 0.4463, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.5639097744360902, | |
| "grad_norm": 2.1774339041234225, | |
| "learning_rate": 4.760955000790572e-06, | |
| "loss": 0.4365, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.5665950590762621, | |
| "grad_norm": 2.1529579750128454, | |
| "learning_rate": 4.714130374262339e-06, | |
| "loss": 0.4227, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.569280343716434, | |
| "grad_norm": 2.320093713572853, | |
| "learning_rate": 4.667330888294934e-06, | |
| "loss": 0.4212, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.5719656283566058, | |
| "grad_norm": 2.2930843074583427, | |
| "learning_rate": 4.620560658629303e-06, | |
| "loss": 0.4284, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.5746509129967776, | |
| "grad_norm": 2.2548004697834165, | |
| "learning_rate": 4.573823798433469e-06, | |
| "loss": 0.4446, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.5773361976369495, | |
| "grad_norm": 2.6223716299704347, | |
| "learning_rate": 4.527124417940812e-06, | |
| "loss": 0.4569, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.5800214822771214, | |
| "grad_norm": 2.2996066174355776, | |
| "learning_rate": 4.48046662408858e-06, | |
| "loss": 0.443, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.5827067669172933, | |
| "grad_norm": 2.331188250468318, | |
| "learning_rate": 4.43385452015673e-06, | |
| "loss": 0.4373, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.5853920515574651, | |
| "grad_norm": 2.2636646634515527, | |
| "learning_rate": 4.387292205407055e-06, | |
| "loss": 0.4359, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.5880773361976369, | |
| "grad_norm": 2.2831377194175033, | |
| "learning_rate": 4.3407837747226765e-06, | |
| "loss": 0.4364, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.5907626208378088, | |
| "grad_norm": 2.2946882078675146, | |
| "learning_rate": 4.29433331824793e-06, | |
| "loss": 0.4261, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.5934479054779807, | |
| "grad_norm": 2.2943723069744664, | |
| "learning_rate": 4.247944921028663e-06, | |
| "loss": 0.4358, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.5961331901181526, | |
| "grad_norm": 2.579316764381701, | |
| "learning_rate": 4.201622662652972e-06, | |
| "loss": 0.4204, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.5988184747583244, | |
| "grad_norm": 2.1742173999609737, | |
| "learning_rate": 4.155370616892422e-06, | |
| "loss": 0.4269, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.6015037593984962, | |
| "grad_norm": 2.6709708559911935, | |
| "learning_rate": 4.1091928513438005e-06, | |
| "loss": 0.4259, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.6041890440386681, | |
| "grad_norm": 2.1624211921715277, | |
| "learning_rate": 4.063093427071376e-06, | |
| "loss": 0.4311, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.60687432867884, | |
| "grad_norm": 2.3121026535251294, | |
| "learning_rate": 4.0170763982497716e-06, | |
| "loss": 0.4123, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.6095596133190118, | |
| "grad_norm": 2.291719266377087, | |
| "learning_rate": 3.971145811807397e-06, | |
| "loss": 0.4332, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.6122448979591837, | |
| "grad_norm": 2.151850103320739, | |
| "learning_rate": 3.925305707070572e-06, | |
| "loss": 0.4242, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.6149301825993555, | |
| "grad_norm": 2.199401051328622, | |
| "learning_rate": 3.879560115408279e-06, | |
| "loss": 0.4158, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.6176154672395274, | |
| "grad_norm": 2.229032836245145, | |
| "learning_rate": 3.833913059877622e-06, | |
| "loss": 0.4221, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.6203007518796992, | |
| "grad_norm": 2.29573891109709, | |
| "learning_rate": 3.788368554870034e-06, | |
| "loss": 0.4213, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.6229860365198711, | |
| "grad_norm": 2.1322887575075784, | |
| "learning_rate": 3.7429306057582306e-06, | |
| "loss": 0.4191, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.625671321160043, | |
| "grad_norm": 2.2395834148078198, | |
| "learning_rate": 3.6976032085439563e-06, | |
| "loss": 0.4203, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.6283566058002148, | |
| "grad_norm": 2.4717439033137625, | |
| "learning_rate": 3.6523903495065634e-06, | |
| "loss": 0.4186, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.6310418904403867, | |
| "grad_norm": 2.1334153639365976, | |
| "learning_rate": 3.6072960048524443e-06, | |
| "loss": 0.4142, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.6337271750805585, | |
| "grad_norm": 2.427052147510359, | |
| "learning_rate": 3.562324140365343e-06, | |
| "loss": 0.3999, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.6364124597207304, | |
| "grad_norm": 2.343168557743432, | |
| "learning_rate": 3.5174787110575904e-06, | |
| "loss": 0.4236, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.6390977443609023, | |
| "grad_norm": 2.1562840384388213, | |
| "learning_rate": 3.4727636608222813e-06, | |
| "loss": 0.4245, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.6417830290010741, | |
| "grad_norm": 2.36892688222517, | |
| "learning_rate": 3.428182922086437e-06, | |
| "loss": 0.4228, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.644468313641246, | |
| "grad_norm": 2.156449921799051, | |
| "learning_rate": 3.3837404154651677e-06, | |
| "loss": 0.415, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.6471535982814178, | |
| "grad_norm": 2.368201965573027, | |
| "learning_rate": 3.3394400494168745e-06, | |
| "loss": 0.4045, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.6498388829215896, | |
| "grad_norm": 2.272852040156483, | |
| "learning_rate": 3.2952857198995304e-06, | |
| "loss": 0.4206, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.6525241675617616, | |
| "grad_norm": 2.1923519803290206, | |
| "learning_rate": 3.2512813100280494e-06, | |
| "loss": 0.3881, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.6552094522019334, | |
| "grad_norm": 2.1711514227665556, | |
| "learning_rate": 3.2074306897327913e-06, | |
| "loss": 0.4089, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.6578947368421053, | |
| "grad_norm": 2.209964102937834, | |
| "learning_rate": 3.163737715419216e-06, | |
| "loss": 0.4131, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.6605800214822771, | |
| "grad_norm": 2.143618617488237, | |
| "learning_rate": 3.1202062296287507e-06, | |
| "loss": 0.4015, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.6632653061224489, | |
| "grad_norm": 2.1799040308872875, | |
| "learning_rate": 3.076840060700848e-06, | |
| "loss": 0.4186, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.6659505907626209, | |
| "grad_norm": 2.152210047047405, | |
| "learning_rate": 3.0336430224363067e-06, | |
| "loss": 0.4149, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.6686358754027927, | |
| "grad_norm": 2.2584562292798362, | |
| "learning_rate": 2.990618913761876e-06, | |
| "loss": 0.3926, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.6713211600429646, | |
| "grad_norm": 2.291181695240791, | |
| "learning_rate": 2.9477715183961634e-06, | |
| "loss": 0.3955, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.6740064446831364, | |
| "grad_norm": 2.1922026655553717, | |
| "learning_rate": 2.905104604516872e-06, | |
| "loss": 0.394, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.6766917293233082, | |
| "grad_norm": 2.3493386069538227, | |
| "learning_rate": 2.8626219244294074e-06, | |
| "loss": 0.3969, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.6793770139634802, | |
| "grad_norm": 2.2115983870340172, | |
| "learning_rate": 2.820327214236904e-06, | |
| "loss": 0.3937, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.682062298603652, | |
| "grad_norm": 2.1951996772318934, | |
| "learning_rate": 2.7782241935116337e-06, | |
| "loss": 0.4049, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.6847475832438239, | |
| "grad_norm": 2.425201994001083, | |
| "learning_rate": 2.7363165649679045e-06, | |
| "loss": 0.403, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.6874328678839957, | |
| "grad_norm": 2.2240784535487883, | |
| "learning_rate": 2.6946080141364295e-06, | |
| "loss": 0.3873, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.6901181525241675, | |
| "grad_norm": 2.2329860565273054, | |
| "learning_rate": 2.6531022090401946e-06, | |
| "loss": 0.3902, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.6928034371643395, | |
| "grad_norm": 2.327403916708101, | |
| "learning_rate": 2.611802799871893e-06, | |
| "loss": 0.3913, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.6954887218045113, | |
| "grad_norm": 2.1904991482392018, | |
| "learning_rate": 2.570713418672893e-06, | |
| "loss": 0.3919, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.6981740064446831, | |
| "grad_norm": 2.193162643266735, | |
| "learning_rate": 2.529837679013841e-06, | |
| "loss": 0.3878, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.700859291084855, | |
| "grad_norm": 2.37196563077284, | |
| "learning_rate": 2.4891791756768587e-06, | |
| "loss": 0.393, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.7035445757250268, | |
| "grad_norm": 2.165348884869985, | |
| "learning_rate": 2.4487414843394015e-06, | |
| "loss": 0.3931, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.7062298603651987, | |
| "grad_norm": 2.1614220141607277, | |
| "learning_rate": 2.4085281612598027e-06, | |
| "loss": 0.3912, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.7089151450053706, | |
| "grad_norm": 2.169220432212257, | |
| "learning_rate": 2.368542742964519e-06, | |
| "loss": 0.3869, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.7116004296455424, | |
| "grad_norm": 2.2172750452575154, | |
| "learning_rate": 2.3287887459371193e-06, | |
| "loss": 0.39, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.7142857142857143, | |
| "grad_norm": 2.386818932717909, | |
| "learning_rate": 2.2892696663090223e-06, | |
| "loss": 0.3852, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.7169709989258861, | |
| "grad_norm": 2.1061167664528395, | |
| "learning_rate": 2.2499889795520342e-06, | |
| "loss": 0.3759, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.719656283566058, | |
| "grad_norm": 2.3100938865418454, | |
| "learning_rate": 2.2109501401727097e-06, | |
| "loss": 0.385, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.7223415682062299, | |
| "grad_norm": 2.108999556383698, | |
| "learning_rate": 2.1721565814085355e-06, | |
| "loss": 0.3909, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.7250268528464017, | |
| "grad_norm": 2.2870436769769182, | |
| "learning_rate": 2.133611714926005e-06, | |
| "loss": 0.387, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.7277121374865736, | |
| "grad_norm": 2.0105731033958594, | |
| "learning_rate": 2.0953189305205847e-06, | |
| "loss": 0.3784, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.7303974221267454, | |
| "grad_norm": 2.0901917154894716, | |
| "learning_rate": 2.057281595818592e-06, | |
| "loss": 0.3859, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.7330827067669173, | |
| "grad_norm": 2.225721981804376, | |
| "learning_rate": 2.0195030559810387e-06, | |
| "loss": 0.3877, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.7357679914070892, | |
| "grad_norm": 2.1392668817849643, | |
| "learning_rate": 1.981986633409447e-06, | |
| "loss": 0.3846, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.738453276047261, | |
| "grad_norm": 2.255949862237254, | |
| "learning_rate": 1.944735627453654e-06, | |
| "loss": 0.3702, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.7411385606874329, | |
| "grad_norm": 2.223172130508635, | |
| "learning_rate": 1.907753314121662e-06, | |
| "loss": 0.3706, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.7438238453276047, | |
| "grad_norm": 2.120940618444079, | |
| "learning_rate": 1.8710429457915269e-06, | |
| "loss": 0.3723, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.7465091299677766, | |
| "grad_norm": 2.1971361335849506, | |
| "learning_rate": 1.834607750925333e-06, | |
| "loss": 0.3834, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.7491944146079484, | |
| "grad_norm": 2.0298699482093423, | |
| "learning_rate": 1.7984509337852724e-06, | |
| "loss": 0.3781, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.7518796992481203, | |
| "grad_norm": 1.9834275411391264, | |
| "learning_rate": 1.7625756741518407e-06, | |
| "loss": 0.3788, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.7545649838882922, | |
| "grad_norm": 2.0910842672856993, | |
| "learning_rate": 1.7269851270441978e-06, | |
| "loss": 0.3754, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.757250268528464, | |
| "grad_norm": 2.0335731739498093, | |
| "learning_rate": 1.6916824224427098e-06, | |
| "loss": 0.3725, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.7599355531686359, | |
| "grad_norm": 2.225103100697698, | |
| "learning_rate": 1.6566706650136704e-06, | |
| "loss": 0.3718, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.7626208378088077, | |
| "grad_norm": 2.240436179080048, | |
| "learning_rate": 1.6219529338362756e-06, | |
| "loss": 0.3749, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.7653061224489796, | |
| "grad_norm": 2.2290538372638045, | |
| "learning_rate": 1.587532282131835e-06, | |
| "loss": 0.3825, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.7679914070891515, | |
| "grad_norm": 2.0263712228907957, | |
| "learning_rate": 1.5534117369952523e-06, | |
| "loss": 0.371, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.7706766917293233, | |
| "grad_norm": 2.2797523248416005, | |
| "learning_rate": 1.5195942991288214e-06, | |
| "loss": 0.3638, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.7733619763694952, | |
| "grad_norm": 2.0115399295361733, | |
| "learning_rate": 1.4860829425783151e-06, | |
| "loss": 0.3733, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.776047261009667, | |
| "grad_norm": 2.2080947234810493, | |
| "learning_rate": 1.4528806144714546e-06, | |
| "loss": 0.3778, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.7787325456498388, | |
| "grad_norm": 2.290998397138441, | |
| "learning_rate": 1.4199902347587186e-06, | |
| "loss": 0.3656, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.7814178302900108, | |
| "grad_norm": 2.228838980742681, | |
| "learning_rate": 1.3874146959565387e-06, | |
| "loss": 0.3768, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.7841031149301826, | |
| "grad_norm": 2.257284588661445, | |
| "learning_rate": 1.3551568628929434e-06, | |
| "loss": 0.3702, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.7867883995703545, | |
| "grad_norm": 2.0867760648839884, | |
| "learning_rate": 1.3232195724555919e-06, | |
| "loss": 0.3613, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.7894736842105263, | |
| "grad_norm": 2.0536005270696687, | |
| "learning_rate": 1.291605633342301e-06, | |
| "loss": 0.3672, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.7921589688506981, | |
| "grad_norm": 2.1344491365761695, | |
| "learning_rate": 1.2603178258140264e-06, | |
| "loss": 0.363, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.7948442534908701, | |
| "grad_norm": 2.1698541824902953, | |
| "learning_rate": 1.22935890145036e-06, | |
| "loss": 0.3663, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.7975295381310419, | |
| "grad_norm": 2.1279360172580213, | |
| "learning_rate": 1.198731582907547e-06, | |
| "loss": 0.3645, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.8002148227712137, | |
| "grad_norm": 2.169055503223085, | |
| "learning_rate": 1.1684385636790368e-06, | |
| "loss": 0.3599, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.8029001074113856, | |
| "grad_norm": 1.9187045539119953, | |
| "learning_rate": 1.1384825078586114e-06, | |
| "loss": 0.3522, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.8055853920515574, | |
| "grad_norm": 2.2725376974932945, | |
| "learning_rate": 1.108866049906096e-06, | |
| "loss": 0.3627, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.8082706766917294, | |
| "grad_norm": 2.0445590327981824, | |
| "learning_rate": 1.079591794415667e-06, | |
| "loss": 0.3652, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.8109559613319012, | |
| "grad_norm": 2.160848009226544, | |
| "learning_rate": 1.0506623158867952e-06, | |
| "loss": 0.3692, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.813641245972073, | |
| "grad_norm": 2.073859022962504, | |
| "learning_rate": 1.0220801584978408e-06, | |
| "loss": 0.3639, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.8163265306122449, | |
| "grad_norm": 2.033806455267016, | |
| "learning_rate": 9.93847835882296e-07, | |
| "loss": 0.3517, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.8190118152524167, | |
| "grad_norm": 2.26746303873919, | |
| "learning_rate": 9.659678309077374e-07, | |
| "loss": 0.3516, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.8216970998925887, | |
| "grad_norm": 2.141101672161054, | |
| "learning_rate": 9.384425954574617e-07, | |
| "loss": 0.3607, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.8243823845327605, | |
| "grad_norm": 2.131127304156173, | |
| "learning_rate": 9.112745502148629e-07, | |
| "loss": 0.3618, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.8270676691729323, | |
| "grad_norm": 2.1498231144464026, | |
| "learning_rate": 8.844660844505498e-07, | |
| "loss": 0.3573, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.8297529538131042, | |
| "grad_norm": 1.9731227944157326, | |
| "learning_rate": 8.580195558122167e-07, | |
| "loss": 0.3691, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 0.832438238453276, | |
| "grad_norm": 2.068766813819825, | |
| "learning_rate": 8.319372901173045e-07, | |
| "loss": 0.3525, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.835123523093448, | |
| "grad_norm": 2.0577739725964075, | |
| "learning_rate": 8.062215811484652e-07, | |
| "loss": 0.3679, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 0.8378088077336198, | |
| "grad_norm": 2.0681341001622204, | |
| "learning_rate": 7.808746904518261e-07, | |
| "loss": 0.3589, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.8404940923737916, | |
| "grad_norm": 2.0118053393234216, | |
| "learning_rate": 7.558988471381063e-07, | |
| "loss": 0.3523, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 0.8431793770139635, | |
| "grad_norm": 2.0792733438147453, | |
| "learning_rate": 7.312962476865831e-07, | |
| "loss": 0.3485, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.8458646616541353, | |
| "grad_norm": 2.040693327935837, | |
| "learning_rate": 7.070690557519139e-07, | |
| "loss": 0.3605, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 0.8485499462943072, | |
| "grad_norm": 2.0885341784359195, | |
| "learning_rate": 6.832194019738686e-07, | |
| "loss": 0.3597, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.8512352309344791, | |
| "grad_norm": 2.0494324488359315, | |
| "learning_rate": 6.597493837899377e-07, | |
| "loss": 0.334, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 0.8539205155746509, | |
| "grad_norm": 2.370416793000066, | |
| "learning_rate": 6.366610652508875e-07, | |
| "loss": 0.3699, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.8566058002148228, | |
| "grad_norm": 2.013431987936131, | |
| "learning_rate": 6.13956476839237e-07, | |
| "loss": 0.3433, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 0.8592910848549946, | |
| "grad_norm": 2.248933148588524, | |
| "learning_rate": 5.916376152906761e-07, | |
| "loss": 0.3619, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.8619763694951665, | |
| "grad_norm": 2.199576156596231, | |
| "learning_rate": 5.697064434184835e-07, | |
| "loss": 0.3585, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 0.8646616541353384, | |
| "grad_norm": 2.0492788724705564, | |
| "learning_rate": 5.481648899408943e-07, | |
| "loss": 0.351, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.8673469387755102, | |
| "grad_norm": 2.1643262087841832, | |
| "learning_rate": 5.270148493114896e-07, | |
| "loss": 0.3613, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 0.8700322234156821, | |
| "grad_norm": 2.0363688955790775, | |
| "learning_rate": 5.062581815525819e-07, | |
| "loss": 0.3536, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.8727175080558539, | |
| "grad_norm": 2.2102662292719226, | |
| "learning_rate": 4.858967120916436e-07, | |
| "loss": 0.3691, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.8754027926960258, | |
| "grad_norm": 2.068954213267209, | |
| "learning_rate": 4.659322316007714e-07, | |
| "loss": 0.3579, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.8780880773361976, | |
| "grad_norm": 2.065009690643561, | |
| "learning_rate": 4.463664958392017e-07, | |
| "loss": 0.3476, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 0.8807733619763695, | |
| "grad_norm": 2.132923284888097, | |
| "learning_rate": 4.2720122549890607e-07, | |
| "loss": 0.3465, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.8834586466165414, | |
| "grad_norm": 2.0233890775516388, | |
| "learning_rate": 4.084381060532672e-07, | |
| "loss": 0.3436, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 0.8861439312567132, | |
| "grad_norm": 2.131767022151055, | |
| "learning_rate": 3.9007878760884856e-07, | |
| "loss": 0.3484, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.888829215896885, | |
| "grad_norm": 2.117063081591432, | |
| "learning_rate": 3.721248847602771e-07, | |
| "loss": 0.3557, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 0.8915145005370569, | |
| "grad_norm": 2.0080840103832025, | |
| "learning_rate": 3.5457797644825376e-07, | |
| "loss": 0.3575, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.8941997851772288, | |
| "grad_norm": 1.9992195938670811, | |
| "learning_rate": 3.3743960582068745e-07, | |
| "loss": 0.3563, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 0.8968850698174007, | |
| "grad_norm": 2.0730852522100585, | |
| "learning_rate": 3.207112800969925e-07, | |
| "loss": 0.3673, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.8995703544575725, | |
| "grad_norm": 2.124531809466262, | |
| "learning_rate": 3.0439447043553005e-07, | |
| "loss": 0.3495, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 0.9022556390977443, | |
| "grad_norm": 2.149507198420767, | |
| "learning_rate": 2.884906118042313e-07, | |
| "loss": 0.3464, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.9049409237379162, | |
| "grad_norm": 2.1429851911430773, | |
| "learning_rate": 2.730011028544044e-07, | |
| "loss": 0.3539, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 0.907626208378088, | |
| "grad_norm": 2.033562450055928, | |
| "learning_rate": 2.579273057977244e-07, | |
| "loss": 0.3683, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.91031149301826, | |
| "grad_norm": 2.099140956543066, | |
| "learning_rate": 2.4327054628643764e-07, | |
| "loss": 0.3524, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 0.9129967776584318, | |
| "grad_norm": 2.0654751576888324, | |
| "learning_rate": 2.29032113296781e-07, | |
| "loss": 0.3441, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.9156820622986036, | |
| "grad_norm": 2.065019806707046, | |
| "learning_rate": 2.1521325901561918e-07, | |
| "loss": 0.3489, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 0.9183673469387755, | |
| "grad_norm": 2.0595682015060177, | |
| "learning_rate": 2.018151987303263e-07, | |
| "loss": 0.3499, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.9210526315789473, | |
| "grad_norm": 2.1629799218903494, | |
| "learning_rate": 1.8883911072190619e-07, | |
| "loss": 0.354, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 0.9237379162191193, | |
| "grad_norm": 1.947786552992977, | |
| "learning_rate": 1.7628613616137048e-07, | |
| "loss": 0.3503, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.9264232008592911, | |
| "grad_norm": 2.087692154284924, | |
| "learning_rate": 1.6415737900937811e-07, | |
| "loss": 0.3501, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 0.9291084854994629, | |
| "grad_norm": 2.2226555122389406, | |
| "learning_rate": 1.5245390591914877e-07, | |
| "loss": 0.3426, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.9317937701396348, | |
| "grad_norm": 1.9281690244795662, | |
| "learning_rate": 1.4117674614265885e-07, | |
| "loss": 0.3598, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 0.9344790547798066, | |
| "grad_norm": 2.145136219649984, | |
| "learning_rate": 1.3032689144012457e-07, | |
| "loss": 0.3526, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.9371643394199786, | |
| "grad_norm": 2.0780704803664776, | |
| "learning_rate": 1.1990529599277656e-07, | |
| "loss": 0.3478, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 0.9398496240601504, | |
| "grad_norm": 2.1046408925852664, | |
| "learning_rate": 1.0991287631895498e-07, | |
| "loss": 0.3432, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.9425349087003222, | |
| "grad_norm": 1.8655403585442782, | |
| "learning_rate": 1.0035051119349881e-07, | |
| "loss": 0.3443, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 0.9452201933404941, | |
| "grad_norm": 1.9887511942254457, | |
| "learning_rate": 9.121904157046779e-08, | |
| "loss": 0.3483, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.9479054779806659, | |
| "grad_norm": 2.07044391687569, | |
| "learning_rate": 8.251927050918262e-08, | |
| "loss": 0.3487, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 0.9505907626208379, | |
| "grad_norm": 2.01578129872718, | |
| "learning_rate": 7.425196310360094e-08, | |
| "loss": 0.3576, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.9532760472610097, | |
| "grad_norm": 2.1282924163459325, | |
| "learning_rate": 6.64178464150339e-08, | |
| "loss": 0.3531, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 0.9559613319011815, | |
| "grad_norm": 2.0989453675717407, | |
| "learning_rate": 5.9017609408203445e-08, | |
| "loss": 0.3447, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.9586466165413534, | |
| "grad_norm": 2.0934033811862105, | |
| "learning_rate": 5.2051902890651316e-08, | |
| "loss": 0.3502, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 0.9613319011815252, | |
| "grad_norm": 2.1433944065391026, | |
| "learning_rate": 4.5521339455507075e-08, | |
| "loss": 0.3575, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.9640171858216972, | |
| "grad_norm": 2.0317297365975806, | |
| "learning_rate": 3.9426493427611177e-08, | |
| "loss": 0.3481, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 0.966702470461869, | |
| "grad_norm": 2.169192328216417, | |
| "learning_rate": 3.3767900813007623e-08, | |
| "loss": 0.3494, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.9693877551020408, | |
| "grad_norm": 2.0312846504114392, | |
| "learning_rate": 2.8546059251806092e-08, | |
| "loss": 0.3614, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 0.9720730397422127, | |
| "grad_norm": 2.280532113774367, | |
| "learning_rate": 2.3761427974416383e-08, | |
| "loss": 0.341, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.9747583243823845, | |
| "grad_norm": 2.0782148533974323, | |
| "learning_rate": 1.9414427761162423e-08, | |
| "loss": 0.3356, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 0.9774436090225563, | |
| "grad_norm": 2.0216470643000695, | |
| "learning_rate": 1.5505440905276835e-08, | |
| "loss": 0.3463, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.9801288936627283, | |
| "grad_norm": 2.005745009335829, | |
| "learning_rate": 1.203481117928007e-08, | |
| "loss": 0.3469, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 0.9828141783029001, | |
| "grad_norm": 2.091918639115214, | |
| "learning_rate": 9.002843804748473e-09, | |
| "loss": 0.3589, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.985499462943072, | |
| "grad_norm": 1.9926243886806494, | |
| "learning_rate": 6.409805425470761e-09, | |
| "loss": 0.3425, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 0.9881847475832438, | |
| "grad_norm": 1.9769758283227723, | |
| "learning_rate": 4.255924083999552e-09, | |
| "loss": 0.3621, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.9908700322234156, | |
| "grad_norm": 2.1185976376922038, | |
| "learning_rate": 2.5413892015951903e-09, | |
| "loss": 0.3617, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 0.9935553168635876, | |
| "grad_norm": 1.9588801206495003, | |
| "learning_rate": 1.266351561568513e-09, | |
| "loss": 0.3582, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.9962406015037594, | |
| "grad_norm": 2.0220772450114204, | |
| "learning_rate": 4.3092329601923664e-10, | |
| "loss": 0.3441, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 0.9989258861439313, | |
| "grad_norm": 2.0638722610945814, | |
| "learning_rate": 3.517787597440148e-11, | |
| "loss": 0.3507, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_runtime": 2.6591, | |
| "eval_samples_per_second": 3.761, | |
| "eval_steps_per_second": 1.128, | |
| "step": 1862 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 1862, | |
| "total_flos": 194932459438080.0, | |
| "train_loss": 0.5118633475710831, | |
| "train_runtime": 20414.4633, | |
| "train_samples_per_second": 1.459, | |
| "train_steps_per_second": 0.091 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1862, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 194932459438080.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |