| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 1862, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0005370569280343716, | |
| "grad_norm": 22.588933949376393, | |
| "learning_rate": 5.3475935828877005e-08, | |
| "loss": 1.3431, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0026852846401718583, | |
| "grad_norm": 23.126610303920064, | |
| "learning_rate": 2.6737967914438503e-07, | |
| "loss": 1.3385, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.0053705692803437165, | |
| "grad_norm": 15.096265452912455, | |
| "learning_rate": 5.347593582887701e-07, | |
| "loss": 1.2761, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.008055853920515575, | |
| "grad_norm": 11.796547912841314, | |
| "learning_rate": 8.021390374331551e-07, | |
| "loss": 1.1302, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.010741138560687433, | |
| "grad_norm": 8.258152471654855, | |
| "learning_rate": 1.0695187165775401e-06, | |
| "loss": 1.0209, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.01342642320085929, | |
| "grad_norm": 3.436573845389504, | |
| "learning_rate": 1.3368983957219254e-06, | |
| "loss": 0.924, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.01611170784103115, | |
| "grad_norm": 3.684694803492529, | |
| "learning_rate": 1.6042780748663103e-06, | |
| "loss": 0.89, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.018796992481203006, | |
| "grad_norm": 3.0217633865657, | |
| "learning_rate": 1.8716577540106954e-06, | |
| "loss": 0.8615, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.021482277121374866, | |
| "grad_norm": 2.922890660969875, | |
| "learning_rate": 2.1390374331550802e-06, | |
| "loss": 0.8394, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.024167561761546726, | |
| "grad_norm": 3.0080941873826115, | |
| "learning_rate": 2.4064171122994653e-06, | |
| "loss": 0.8354, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.02685284640171858, | |
| "grad_norm": 3.051808184599371, | |
| "learning_rate": 2.673796791443851e-06, | |
| "loss": 0.8281, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.02953813104189044, | |
| "grad_norm": 2.9407863618110697, | |
| "learning_rate": 2.9411764705882355e-06, | |
| "loss": 0.8099, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.0322234156820623, | |
| "grad_norm": 3.0361086960659502, | |
| "learning_rate": 3.2085561497326205e-06, | |
| "loss": 0.8008, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.03490870032223416, | |
| "grad_norm": 3.0542935660919537, | |
| "learning_rate": 3.4759358288770056e-06, | |
| "loss": 0.7945, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.03759398496240601, | |
| "grad_norm": 3.1339190627740794, | |
| "learning_rate": 3.7433155080213907e-06, | |
| "loss": 0.7788, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.040279269602577876, | |
| "grad_norm": 2.96097897092436, | |
| "learning_rate": 4.010695187165775e-06, | |
| "loss": 0.7661, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.04296455424274973, | |
| "grad_norm": 3.3434671670812426, | |
| "learning_rate": 4.2780748663101604e-06, | |
| "loss": 0.7577, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.04564983888292159, | |
| "grad_norm": 3.154114094287673, | |
| "learning_rate": 4.5454545454545455e-06, | |
| "loss": 0.7561, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.04833512352309345, | |
| "grad_norm": 3.0765656232864638, | |
| "learning_rate": 4.812834224598931e-06, | |
| "loss": 0.753, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.05102040816326531, | |
| "grad_norm": 3.2354663116960323, | |
| "learning_rate": 5.0802139037433165e-06, | |
| "loss": 0.7524, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.05370569280343716, | |
| "grad_norm": 3.1193615385736244, | |
| "learning_rate": 5.347593582887702e-06, | |
| "loss": 0.7427, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.05639097744360902, | |
| "grad_norm": 3.1619975304049732, | |
| "learning_rate": 5.614973262032086e-06, | |
| "loss": 0.744, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.05907626208378088, | |
| "grad_norm": 3.061338630250798, | |
| "learning_rate": 5.882352941176471e-06, | |
| "loss": 0.7315, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.06176154672395274, | |
| "grad_norm": 3.0857057085739927, | |
| "learning_rate": 6.149732620320856e-06, | |
| "loss": 0.7152, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.0644468313641246, | |
| "grad_norm": 2.9043524672550096, | |
| "learning_rate": 6.417112299465241e-06, | |
| "loss": 0.7206, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.06713211600429646, | |
| "grad_norm": 2.9221477672316616, | |
| "learning_rate": 6.684491978609626e-06, | |
| "loss": 0.7281, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.06981740064446831, | |
| "grad_norm": 2.9152912041724166, | |
| "learning_rate": 6.951871657754011e-06, | |
| "loss": 0.7131, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.07250268528464017, | |
| "grad_norm": 2.9329788036774715, | |
| "learning_rate": 7.219251336898396e-06, | |
| "loss": 0.7079, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.07518796992481203, | |
| "grad_norm": 3.0375037435968544, | |
| "learning_rate": 7.486631016042781e-06, | |
| "loss": 0.7093, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.07787325456498388, | |
| "grad_norm": 3.051270674011824, | |
| "learning_rate": 7.754010695187166e-06, | |
| "loss": 0.7006, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.08055853920515575, | |
| "grad_norm": 2.9898744604886835, | |
| "learning_rate": 8.02139037433155e-06, | |
| "loss": 0.718, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.08324382384532761, | |
| "grad_norm": 2.8066106411748484, | |
| "learning_rate": 8.288770053475937e-06, | |
| "loss": 0.7019, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.08592910848549946, | |
| "grad_norm": 2.8665874154337407, | |
| "learning_rate": 8.556149732620321e-06, | |
| "loss": 0.7072, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.08861439312567132, | |
| "grad_norm": 2.8422820645094644, | |
| "learning_rate": 8.823529411764707e-06, | |
| "loss": 0.7059, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.09129967776584318, | |
| "grad_norm": 2.91458979270415, | |
| "learning_rate": 9.090909090909091e-06, | |
| "loss": 0.7098, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.09398496240601503, | |
| "grad_norm": 2.8378395890064994, | |
| "learning_rate": 9.358288770053477e-06, | |
| "loss": 0.6841, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.0966702470461869, | |
| "grad_norm": 2.894382082134305, | |
| "learning_rate": 9.625668449197861e-06, | |
| "loss": 0.7102, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.09935553168635876, | |
| "grad_norm": 2.9031109171921563, | |
| "learning_rate": 9.893048128342247e-06, | |
| "loss": 0.6908, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.10204081632653061, | |
| "grad_norm": 2.9094340000858496, | |
| "learning_rate": 9.999920849895074e-06, | |
| "loss": 0.7011, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.10472610096670247, | |
| "grad_norm": 2.8398808111732294, | |
| "learning_rate": 9.99943716388422e-06, | |
| "loss": 0.7102, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.10741138560687433, | |
| "grad_norm": 2.6933626648843565, | |
| "learning_rate": 9.998513806628825e-06, | |
| "loss": 0.7048, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.11009667024704618, | |
| "grad_norm": 2.7619637369366257, | |
| "learning_rate": 9.997150859332753e-06, | |
| "loss": 0.7019, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.11278195488721804, | |
| "grad_norm": 2.739895854677953, | |
| "learning_rate": 9.995348441859243e-06, | |
| "loss": 0.6989, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.11546723952738991, | |
| "grad_norm": 2.6876684923510457, | |
| "learning_rate": 9.993106712720367e-06, | |
| "loss": 0.6856, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.11815252416756176, | |
| "grad_norm": 2.733126603672575, | |
| "learning_rate": 9.990425869063085e-06, | |
| "loss": 0.6983, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.12083780880773362, | |
| "grad_norm": 2.59577973330998, | |
| "learning_rate": 9.987306146651908e-06, | |
| "loss": 0.6885, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.12352309344790548, | |
| "grad_norm": 2.6403871223270956, | |
| "learning_rate": 9.983747819848168e-06, | |
| "loss": 0.6854, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.12620837808807733, | |
| "grad_norm": 2.5755491533468673, | |
| "learning_rate": 9.97975120158589e-06, | |
| "loss": 0.6825, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.1288936627282492, | |
| "grad_norm": 2.81952622502626, | |
| "learning_rate": 9.975316643344257e-06, | |
| "loss": 0.6928, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.13157894736842105, | |
| "grad_norm": 2.510692499419202, | |
| "learning_rate": 9.970444535116721e-06, | |
| "loss": 0.6871, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.13426423200859292, | |
| "grad_norm": 3.2383700116067438, | |
| "learning_rate": 9.965135305376694e-06, | |
| "loss": 0.676, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.13694951664876476, | |
| "grad_norm": 2.5471306267663114, | |
| "learning_rate": 9.959389421039863e-06, | |
| "loss": 0.6721, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.13963480128893663, | |
| "grad_norm": 2.5819890023767975, | |
| "learning_rate": 9.95320738742313e-06, | |
| "loss": 0.6805, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.1423200859291085, | |
| "grad_norm": 2.4981575777247014, | |
| "learning_rate": 9.946589748200185e-06, | |
| "loss": 0.6819, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.14500537056928034, | |
| "grad_norm": 2.483435225904423, | |
| "learning_rate": 9.939537085353668e-06, | |
| "loss": 0.6742, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.1476906552094522, | |
| "grad_norm": 2.8618041277945125, | |
| "learning_rate": 9.932050019124011e-06, | |
| "loss": 0.6644, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.15037593984962405, | |
| "grad_norm": 2.450026253755743, | |
| "learning_rate": 9.924129207954877e-06, | |
| "loss": 0.6683, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.15306122448979592, | |
| "grad_norm": 2.9403154726852994, | |
| "learning_rate": 9.915775348435266e-06, | |
| "loss": 0.6623, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.15574650912996776, | |
| "grad_norm": 2.4748651098342047, | |
| "learning_rate": 9.906989175238232e-06, | |
| "loss": 0.6699, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.15843179377013963, | |
| "grad_norm": 2.4957674990875067, | |
| "learning_rate": 9.8977714610563e-06, | |
| "loss": 0.6689, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.1611170784103115, | |
| "grad_norm": 2.517686854278863, | |
| "learning_rate": 9.888123016533496e-06, | |
| "loss": 0.6561, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.16380236305048335, | |
| "grad_norm": 2.528466931460507, | |
| "learning_rate": 9.878044690194055e-06, | |
| "loss": 0.6631, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.16648764769065522, | |
| "grad_norm": 2.5147869487065955, | |
| "learning_rate": 9.86753736836781e-06, | |
| "loss": 0.6479, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.16917293233082706, | |
| "grad_norm": 2.497948806933255, | |
| "learning_rate": 9.856601975112227e-06, | |
| "loss": 0.6407, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.17185821697099893, | |
| "grad_norm": 2.510845127090968, | |
| "learning_rate": 9.84523947213116e-06, | |
| "loss": 0.6534, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.17454350161117077, | |
| "grad_norm": 2.5052023969942225, | |
| "learning_rate": 9.833450858690257e-06, | |
| "loss": 0.6439, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.17722878625134264, | |
| "grad_norm": 2.6576117547178955, | |
| "learning_rate": 9.821237171529088e-06, | |
| "loss": 0.6619, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.1799140708915145, | |
| "grad_norm": 2.577684133859449, | |
| "learning_rate": 9.808599484769971e-06, | |
| "loss": 0.6487, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.18259935553168635, | |
| "grad_norm": 2.6259810690957734, | |
| "learning_rate": 9.79553890982351e-06, | |
| "loss": 0.6485, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.18528464017185822, | |
| "grad_norm": 2.423857881211141, | |
| "learning_rate": 9.782056595290848e-06, | |
| "loss": 0.6342, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.18796992481203006, | |
| "grad_norm": 2.4782068169343168, | |
| "learning_rate": 9.768153726862652e-06, | |
| "loss": 0.6452, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.19065520945220193, | |
| "grad_norm": 2.4443808217010283, | |
| "learning_rate": 9.753831527214854e-06, | |
| "loss": 0.651, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.1933404940923738, | |
| "grad_norm": 2.5231724414182968, | |
| "learning_rate": 9.739091255901105e-06, | |
| "loss": 0.6378, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.19602577873254565, | |
| "grad_norm": 2.6836920080961506, | |
| "learning_rate": 9.723934209242015e-06, | |
| "loss": 0.6414, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.19871106337271752, | |
| "grad_norm": 2.6292452798311574, | |
| "learning_rate": 9.708361720211146e-06, | |
| "loss": 0.6425, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.20139634801288936, | |
| "grad_norm": 2.475346280678399, | |
| "learning_rate": 9.692375158317787e-06, | |
| "loss": 0.6347, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.20408163265306123, | |
| "grad_norm": 2.565339369277964, | |
| "learning_rate": 9.675975929486512e-06, | |
| "loss": 0.6341, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.20676691729323307, | |
| "grad_norm": 2.503462410823786, | |
| "learning_rate": 9.659165475933537e-06, | |
| "loss": 0.6516, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.20945220193340494, | |
| "grad_norm": 2.334079164523965, | |
| "learning_rate": 9.641945276039885e-06, | |
| "loss": 0.6316, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.2121374865735768, | |
| "grad_norm": 2.8684602615312915, | |
| "learning_rate": 9.624316844221376e-06, | |
| "loss": 0.6227, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.21482277121374865, | |
| "grad_norm": 2.5720560384477342, | |
| "learning_rate": 9.606281730795435e-06, | |
| "loss": 0.6278, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.21750805585392052, | |
| "grad_norm": 2.3859750726827653, | |
| "learning_rate": 9.587841521844755e-06, | |
| "loss": 0.6395, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.22019334049409237, | |
| "grad_norm": 2.4723928529602084, | |
| "learning_rate": 9.568997839077812e-06, | |
| "loss": 0.6175, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.22287862513426424, | |
| "grad_norm": 2.3971232752859666, | |
| "learning_rate": 9.549752339686243e-06, | |
| "loss": 0.632, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.22556390977443608, | |
| "grad_norm": 2.6540817678288997, | |
| "learning_rate": 9.530106716199103e-06, | |
| "loss": 0.6301, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.22824919441460795, | |
| "grad_norm": 2.6564997195018987, | |
| "learning_rate": 9.510062696334024e-06, | |
| "loss": 0.6369, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.23093447905477982, | |
| "grad_norm": 2.473291236984028, | |
| "learning_rate": 9.489622042845266e-06, | |
| "loss": 0.6191, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.23361976369495166, | |
| "grad_norm": 2.4712483842974606, | |
| "learning_rate": 9.468786553368694e-06, | |
| "loss": 0.6167, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.23630504833512353, | |
| "grad_norm": 2.4149226844204223, | |
| "learning_rate": 9.447558060263693e-06, | |
| "loss": 0.6197, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.23899033297529537, | |
| "grad_norm": 2.5229379978319377, | |
| "learning_rate": 9.42593843045201e-06, | |
| "loss": 0.6222, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.24167561761546724, | |
| "grad_norm": 2.2881877625047125, | |
| "learning_rate": 9.403929565253582e-06, | |
| "loss": 0.624, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.24436090225563908, | |
| "grad_norm": 2.3854975770678575, | |
| "learning_rate": 9.381533400219319e-06, | |
| "loss": 0.6136, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.24704618689581095, | |
| "grad_norm": 2.3711250141523292, | |
| "learning_rate": 9.358751904960885e-06, | |
| "loss": 0.6112, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.24973147153598282, | |
| "grad_norm": 2.47727212085762, | |
| "learning_rate": 9.335587082977484e-06, | |
| "loss": 0.6065, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.25241675617615467, | |
| "grad_norm": 2.431408916174766, | |
| "learning_rate": 9.312040971479663e-06, | |
| "loss": 0.6265, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.25510204081632654, | |
| "grad_norm": 2.335148053124711, | |
| "learning_rate": 9.288115641210144e-06, | |
| "loss": 0.5992, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.2577873254564984, | |
| "grad_norm": 2.4878167642614994, | |
| "learning_rate": 9.26381319626173e-06, | |
| "loss": 0.6053, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.2604726100966702, | |
| "grad_norm": 2.448826496910039, | |
| "learning_rate": 9.239135773892249e-06, | |
| "loss": 0.6059, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.2631578947368421, | |
| "grad_norm": 2.340365685512314, | |
| "learning_rate": 9.214085544336597e-06, | |
| "loss": 0.6051, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.26584317937701396, | |
| "grad_norm": 2.5813420593776324, | |
| "learning_rate": 9.188664710615886e-06, | |
| "loss": 0.5978, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.26852846401718583, | |
| "grad_norm": 2.399275132862282, | |
| "learning_rate": 9.162875508343691e-06, | |
| "loss": 0.5989, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.2712137486573577, | |
| "grad_norm": 2.326846382037779, | |
| "learning_rate": 9.136720205529446e-06, | |
| "loss": 0.5882, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.2738990332975295, | |
| "grad_norm": 2.388380879732621, | |
| "learning_rate": 9.110201102378991e-06, | |
| "loss": 0.6045, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.2765843179377014, | |
| "grad_norm": 2.6554743721903167, | |
| "learning_rate": 9.083320531092268e-06, | |
| "loss": 0.5989, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.27926960257787325, | |
| "grad_norm": 2.3647832778978946, | |
| "learning_rate": 9.056080855658233e-06, | |
| "loss": 0.5823, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.2819548872180451, | |
| "grad_norm": 2.402783295741927, | |
| "learning_rate": 9.02848447164695e-06, | |
| "loss": 0.5954, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.284640171858217, | |
| "grad_norm": 2.4179859370785586, | |
| "learning_rate": 9.00053380599891e-06, | |
| "loss": 0.597, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.2873254564983888, | |
| "grad_norm": 2.490422211781943, | |
| "learning_rate": 8.972231316811608e-06, | |
| "loss": 0.5922, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.2900107411385607, | |
| "grad_norm": 2.375268811880951, | |
| "learning_rate": 8.943579493123354e-06, | |
| "loss": 0.5945, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.29269602577873255, | |
| "grad_norm": 2.3671169905346012, | |
| "learning_rate": 8.914580854694389e-06, | |
| "loss": 0.5745, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.2953813104189044, | |
| "grad_norm": 2.33265034161182, | |
| "learning_rate": 8.885237951785275e-06, | |
| "loss": 0.5748, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.2980665950590763, | |
| "grad_norm": 2.275342128082187, | |
| "learning_rate": 8.855553364932628e-06, | |
| "loss": 0.5816, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.3007518796992481, | |
| "grad_norm": 2.4469604689397992, | |
| "learning_rate": 8.82552970472216e-06, | |
| "loss": 0.5914, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.30343716433942, | |
| "grad_norm": 2.5529524024861057, | |
| "learning_rate": 8.795169611559108e-06, | |
| "loss": 0.5777, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.30612244897959184, | |
| "grad_norm": 2.3500889773905294, | |
| "learning_rate": 8.764475755436011e-06, | |
| "loss": 0.5809, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.3088077336197637, | |
| "grad_norm": 2.5580858779377635, | |
| "learning_rate": 8.733450835697914e-06, | |
| "loss": 0.5782, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.31149301825993553, | |
| "grad_norm": 2.495153557765296, | |
| "learning_rate": 8.702097580804962e-06, | |
| "loss": 0.5812, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.3141783029001074, | |
| "grad_norm": 2.4356256924449573, | |
| "learning_rate": 8.670418748092465e-06, | |
| "loss": 0.5656, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.31686358754027927, | |
| "grad_norm": 2.3744929639725583, | |
| "learning_rate": 8.638417123528393e-06, | |
| "loss": 0.5644, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.31954887218045114, | |
| "grad_norm": 2.336786279322332, | |
| "learning_rate": 8.60609552146837e-06, | |
| "loss": 0.5625, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.322234156820623, | |
| "grad_norm": 2.304022490580247, | |
| "learning_rate": 8.573456784408162e-06, | |
| "loss": 0.5706, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.3249194414607948, | |
| "grad_norm": 2.333873371303564, | |
| "learning_rate": 8.540503782733718e-06, | |
| "loss": 0.5682, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.3276047261009667, | |
| "grad_norm": 2.453791422071271, | |
| "learning_rate": 8.507239414468707e-06, | |
| "loss": 0.5694, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.33029001074113856, | |
| "grad_norm": 2.6000564411409792, | |
| "learning_rate": 8.473666605019673e-06, | |
| "loss": 0.5441, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.33297529538131043, | |
| "grad_norm": 2.492416721336817, | |
| "learning_rate": 8.439788306918759e-06, | |
| "loss": 0.5619, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.3356605800214823, | |
| "grad_norm": 2.3074763084807866, | |
| "learning_rate": 8.405607499564044e-06, | |
| "loss": 0.5632, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.3383458646616541, | |
| "grad_norm": 2.349999778843298, | |
| "learning_rate": 8.371127188957527e-06, | |
| "loss": 0.5463, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.341031149301826, | |
| "grad_norm": 2.2581482340935337, | |
| "learning_rate": 8.336350407440766e-06, | |
| "loss": 0.5549, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.34371643394199786, | |
| "grad_norm": 2.4384782140353924, | |
| "learning_rate": 8.3012802134282e-06, | |
| "loss": 0.5412, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.3464017185821697, | |
| "grad_norm": 2.5751882880924755, | |
| "learning_rate": 8.26591969113818e-06, | |
| "loss": 0.5567, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.34908700322234154, | |
| "grad_norm": 2.518339935894107, | |
| "learning_rate": 8.230271950321733e-06, | |
| "loss": 0.5502, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.3517722878625134, | |
| "grad_norm": 2.448402050557329, | |
| "learning_rate": 8.194340125989068e-06, | |
| "loss": 0.5395, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.3544575725026853, | |
| "grad_norm": 2.4499578429656825, | |
| "learning_rate": 8.158127378133886e-06, | |
| "loss": 0.5342, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.35714285714285715, | |
| "grad_norm": 2.5511091018905896, | |
| "learning_rate": 8.121636891455462e-06, | |
| "loss": 0.5484, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.359828141783029, | |
| "grad_norm": 2.533340828939454, | |
| "learning_rate": 8.084871875078574e-06, | |
| "loss": 0.5457, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.36251342642320084, | |
| "grad_norm": 2.4448715353894066, | |
| "learning_rate": 8.047835562271289e-06, | |
| "loss": 0.5574, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.3651987110633727, | |
| "grad_norm": 2.8289943202459003, | |
| "learning_rate": 8.010531210160594e-06, | |
| "loss": 0.5489, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.3678839957035446, | |
| "grad_norm": 2.493357592170436, | |
| "learning_rate": 7.972962099445979e-06, | |
| "loss": 0.5292, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.37056928034371645, | |
| "grad_norm": 2.263143839690345, | |
| "learning_rate": 7.935131534110893e-06, | |
| "loss": 0.5397, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.3732545649838883, | |
| "grad_norm": 2.3210222371229587, | |
| "learning_rate": 7.897042841132195e-06, | |
| "loss": 0.5345, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.37593984962406013, | |
| "grad_norm": 2.390060459345618, | |
| "learning_rate": 7.858699370187558e-06, | |
| "loss": 0.5315, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.378625134264232, | |
| "grad_norm": 2.2904914358187782, | |
| "learning_rate": 7.820104493360883e-06, | |
| "loss": 0.5492, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.38131041890440387, | |
| "grad_norm": 2.3805535227186385, | |
| "learning_rate": 7.781261604845754e-06, | |
| "loss": 0.5235, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.38399570354457574, | |
| "grad_norm": 2.471879540761431, | |
| "learning_rate": 7.742174120646929e-06, | |
| "loss": 0.5419, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.3866809881847476, | |
| "grad_norm": 2.5316025731740175, | |
| "learning_rate": 7.70284547827992e-06, | |
| "loss": 0.5258, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.3893662728249194, | |
| "grad_norm": 2.356021910277692, | |
| "learning_rate": 7.663279136468696e-06, | |
| "loss": 0.5136, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.3920515574650913, | |
| "grad_norm": 2.300853201320016, | |
| "learning_rate": 7.623478574841499e-06, | |
| "loss": 0.5334, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.39473684210526316, | |
| "grad_norm": 2.3471881512013177, | |
| "learning_rate": 7.58344729362483e-06, | |
| "loss": 0.5242, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.39742212674543503, | |
| "grad_norm": 2.3294738660800998, | |
| "learning_rate": 7.5431888133356334e-06, | |
| "loss": 0.5307, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.40010741138560685, | |
| "grad_norm": 2.405771728312475, | |
| "learning_rate": 7.502706674471678e-06, | |
| "loss": 0.5141, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.4027926960257787, | |
| "grad_norm": 2.334459667478728, | |
| "learning_rate": 7.462004437200195e-06, | |
| "loss": 0.5153, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.4054779806659506, | |
| "grad_norm": 2.201639201589267, | |
| "learning_rate": 7.42108568104479e-06, | |
| "loss": 0.5205, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.40816326530612246, | |
| "grad_norm": 2.34678061148915, | |
| "learning_rate": 7.379954004570628e-06, | |
| "loss": 0.5114, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.41084854994629433, | |
| "grad_norm": 2.2986824569043214, | |
| "learning_rate": 7.338613025067977e-06, | |
| "loss": 0.5256, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.41353383458646614, | |
| "grad_norm": 2.244425672653934, | |
| "learning_rate": 7.2970663782340765e-06, | |
| "loss": 0.5008, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.416219119226638, | |
| "grad_norm": 2.395122328169309, | |
| "learning_rate": 7.25531771785341e-06, | |
| "loss": 0.5033, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.4189044038668099, | |
| "grad_norm": 2.322926845207769, | |
| "learning_rate": 7.2133707154763646e-06, | |
| "loss": 0.5175, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.42158968850698175, | |
| "grad_norm": 2.37505134127978, | |
| "learning_rate": 7.17122906009635e-06, | |
| "loss": 0.5232, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.4242749731471536, | |
| "grad_norm": 2.378834002748694, | |
| "learning_rate": 7.128896457825364e-06, | |
| "loss": 0.4991, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.42696025778732544, | |
| "grad_norm": 2.282722869895024, | |
| "learning_rate": 7.08637663156807e-06, | |
| "loss": 0.5122, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.4296455424274973, | |
| "grad_norm": 2.3359093242506868, | |
| "learning_rate": 7.043673320694386e-06, | |
| "loss": 0.5081, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.4323308270676692, | |
| "grad_norm": 2.3875739801173292, | |
| "learning_rate": 7.000790280710625e-06, | |
| "loss": 0.5176, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.43501611170784105, | |
| "grad_norm": 2.363423980165021, | |
| "learning_rate": 6.957731282929224e-06, | |
| "loss": 0.4986, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.4377013963480129, | |
| "grad_norm": 2.3662866076729547, | |
| "learning_rate": 6.914500114137082e-06, | |
| "loss": 0.5249, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.44038668098818473, | |
| "grad_norm": 2.4041868855326327, | |
| "learning_rate": 6.871100576262526e-06, | |
| "loss": 0.5065, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.4430719656283566, | |
| "grad_norm": 2.2750431563086004, | |
| "learning_rate": 6.827536486040964e-06, | |
| "loss": 0.4998, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.44575725026852847, | |
| "grad_norm": 2.361946244222484, | |
| "learning_rate": 6.783811674679216e-06, | |
| "loss": 0.507, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.44844253490870034, | |
| "grad_norm": 2.2727003764122826, | |
| "learning_rate": 6.7399299875185875e-06, | |
| "loss": 0.5036, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.45112781954887216, | |
| "grad_norm": 2.3517259170227787, | |
| "learning_rate": 6.695895283696691e-06, | |
| "loss": 0.4789, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.453813104189044, | |
| "grad_norm": 2.383189304073773, | |
| "learning_rate": 6.651711435808061e-06, | |
| "loss": 0.4929, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.4564983888292159, | |
| "grad_norm": 2.360092586611737, | |
| "learning_rate": 6.607382329563581e-06, | |
| "loss": 0.5068, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.45918367346938777, | |
| "grad_norm": 2.334536482498263, | |
| "learning_rate": 6.562911863448752e-06, | |
| "loss": 0.4849, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.46186895810955964, | |
| "grad_norm": 2.322335358509275, | |
| "learning_rate": 6.518303948380854e-06, | |
| "loss": 0.4819, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.46455424274973145, | |
| "grad_norm": 2.4157418228384318, | |
| "learning_rate": 6.473562507364995e-06, | |
| "loss": 0.4987, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.4672395273899033, | |
| "grad_norm": 2.3449366499665234, | |
| "learning_rate": 6.428691475149107e-06, | |
| "loss": 0.4809, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.4699248120300752, | |
| "grad_norm": 2.365410848066476, | |
| "learning_rate": 6.383694797877915e-06, | |
| "loss": 0.4783, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.47261009667024706, | |
| "grad_norm": 2.356820466415204, | |
| "learning_rate": 6.338576432745891e-06, | |
| "loss": 0.4927, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.47529538131041893, | |
| "grad_norm": 2.3178581173038513, | |
| "learning_rate": 6.293340347649234e-06, | |
| "loss": 0.4883, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.47798066595059074, | |
| "grad_norm": 2.1615404918804284, | |
| "learning_rate": 6.247990520836935e-06, | |
| "loss": 0.471, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.4806659505907626, | |
| "grad_norm": 2.360653971901331, | |
| "learning_rate": 6.202530940560897e-06, | |
| "loss": 0.4822, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.4833512352309345, | |
| "grad_norm": 2.372568605186213, | |
| "learning_rate": 6.156965604725202e-06, | |
| "loss": 0.4689, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.48603651987110635, | |
| "grad_norm": 2.246153435426788, | |
| "learning_rate": 6.111298520534514e-06, | |
| "loss": 0.4721, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.48872180451127817, | |
| "grad_norm": 2.3625810666773073, | |
| "learning_rate": 6.065533704141666e-06, | |
| "loss": 0.4712, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.49140708915145004, | |
| "grad_norm": 2.324069609401358, | |
| "learning_rate": 6.019675180294469e-06, | |
| "loss": 0.4773, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.4940923737916219, | |
| "grad_norm": 2.3863702418734083, | |
| "learning_rate": 5.973726981981756e-06, | |
| "loss": 0.4767, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.4967776584317938, | |
| "grad_norm": 2.244690254098793, | |
| "learning_rate": 5.9276931500787045e-06, | |
| "loss": 0.4715, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.49946294307196565, | |
| "grad_norm": 2.380284031970335, | |
| "learning_rate": 5.8815777329914655e-06, | |
| "loss": 0.4582, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.5021482277121375, | |
| "grad_norm": 2.312484054724359, | |
| "learning_rate": 5.83538478630113e-06, | |
| "loss": 0.46, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.5048335123523093, | |
| "grad_norm": 2.41550721343238, | |
| "learning_rate": 5.789118372407061e-06, | |
| "loss": 0.4574, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.5075187969924813, | |
| "grad_norm": 2.313665146473085, | |
| "learning_rate": 5.7427825601696376e-06, | |
| "loss": 0.4717, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.5102040816326531, | |
| "grad_norm": 2.343800549458904, | |
| "learning_rate": 5.696381424552411e-06, | |
| "loss": 0.4488, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.5128893662728249, | |
| "grad_norm": 2.308680761760831, | |
| "learning_rate": 5.649919046263742e-06, | |
| "loss": 0.4777, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.5155746509129968, | |
| "grad_norm": 2.1883219395543776, | |
| "learning_rate": 5.6033995113979336e-06, | |
| "loss": 0.4708, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.5182599355531686, | |
| "grad_norm": 2.8333844527470498, | |
| "learning_rate": 5.556826911075867e-06, | |
| "loss": 0.4644, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.5209452201933404, | |
| "grad_norm": 2.2176847173023346, | |
| "learning_rate": 5.510205341085224e-06, | |
| "loss": 0.4467, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.5236305048335124, | |
| "grad_norm": 2.288019101770923, | |
| "learning_rate": 5.463538901520278e-06, | |
| "loss": 0.4664, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.5263157894736842, | |
| "grad_norm": 2.2412931794728617, | |
| "learning_rate": 5.416831696421325e-06, | |
| "loss": 0.4557, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.5290010741138561, | |
| "grad_norm": 2.451382138891403, | |
| "learning_rate": 5.370087833413747e-06, | |
| "loss": 0.4698, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.5316863587540279, | |
| "grad_norm": 2.252017741354611, | |
| "learning_rate": 5.323311423346775e-06, | |
| "loss": 0.4494, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.5343716433941997, | |
| "grad_norm": 2.293986595976978, | |
| "learning_rate": 5.2765065799319646e-06, | |
| "loss": 0.4528, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.5370569280343717, | |
| "grad_norm": 2.2761604256781327, | |
| "learning_rate": 5.229677419381417e-06, | |
| "loss": 0.4614, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.5397422126745435, | |
| "grad_norm": 2.4240090799649034, | |
| "learning_rate": 5.182828060045783e-06, | |
| "loss": 0.4468, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.5424274973147154, | |
| "grad_norm": 2.442934156317595, | |
| "learning_rate": 5.1359626220520804e-06, | |
| "loss": 0.4389, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.5451127819548872, | |
| "grad_norm": 2.2603275521364528, | |
| "learning_rate": 5.08908522694135e-06, | |
| "loss": 0.4567, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.547798066595059, | |
| "grad_norm": 2.218667766895513, | |
| "learning_rate": 5.042199997306185e-06, | |
| "loss": 0.4388, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.550483351235231, | |
| "grad_norm": 2.259809429069565, | |
| "learning_rate": 4.995311056428192e-06, | |
| "loss": 0.4364, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.5531686358754028, | |
| "grad_norm": 2.2730891483819757, | |
| "learning_rate": 4.948422527915348e-06, | |
| "loss": 0.4333, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.5558539205155747, | |
| "grad_norm": 2.39309066580743, | |
| "learning_rate": 4.901538535339369e-06, | |
| "loss": 0.4342, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.5585392051557465, | |
| "grad_norm": 2.3112523965600436, | |
| "learning_rate": 4.854663201873066e-06, | |
| "loss": 0.4266, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.5612244897959183, | |
| "grad_norm": 2.314393277518899, | |
| "learning_rate": 4.807800649927723e-06, | |
| "loss": 0.4268, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.5639097744360902, | |
| "grad_norm": 2.3651606024626117, | |
| "learning_rate": 4.760955000790572e-06, | |
| "loss": 0.4337, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.5665950590762621, | |
| "grad_norm": 2.415473492611108, | |
| "learning_rate": 4.714130374262339e-06, | |
| "loss": 0.4449, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.569280343716434, | |
| "grad_norm": 2.3361082246507934, | |
| "learning_rate": 4.667330888294934e-06, | |
| "loss": 0.4107, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.5719656283566058, | |
| "grad_norm": 2.360677430690215, | |
| "learning_rate": 4.620560658629303e-06, | |
| "loss": 0.4254, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.5746509129967776, | |
| "grad_norm": 2.1700448641934544, | |
| "learning_rate": 4.573823798433469e-06, | |
| "loss": 0.4415, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.5773361976369495, | |
| "grad_norm": 2.312801842492201, | |
| "learning_rate": 4.527124417940812e-06, | |
| "loss": 0.4397, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.5800214822771214, | |
| "grad_norm": 2.2937353441454285, | |
| "learning_rate": 4.48046662408858e-06, | |
| "loss": 0.4476, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.5827067669172933, | |
| "grad_norm": 2.561098663722942, | |
| "learning_rate": 4.43385452015673e-06, | |
| "loss": 0.4323, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.5853920515574651, | |
| "grad_norm": 2.2162314798223206, | |
| "learning_rate": 4.387292205407055e-06, | |
| "loss": 0.4351, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.5880773361976369, | |
| "grad_norm": 2.356008942163047, | |
| "learning_rate": 4.3407837747226765e-06, | |
| "loss": 0.4282, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.5907626208378088, | |
| "grad_norm": 2.339594344704525, | |
| "learning_rate": 4.29433331824793e-06, | |
| "loss": 0.4174, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.5934479054779807, | |
| "grad_norm": 2.2403052317925156, | |
| "learning_rate": 4.247944921028663e-06, | |
| "loss": 0.4134, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.5961331901181526, | |
| "grad_norm": 2.2381645082625776, | |
| "learning_rate": 4.201622662652972e-06, | |
| "loss": 0.4189, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.5988184747583244, | |
| "grad_norm": 2.2380868768618627, | |
| "learning_rate": 4.155370616892422e-06, | |
| "loss": 0.4129, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.6015037593984962, | |
| "grad_norm": 2.339687690650304, | |
| "learning_rate": 4.1091928513438005e-06, | |
| "loss": 0.4158, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.6041890440386681, | |
| "grad_norm": 2.3979680821995784, | |
| "learning_rate": 4.063093427071376e-06, | |
| "loss": 0.4224, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.60687432867884, | |
| "grad_norm": 2.2120117677998126, | |
| "learning_rate": 4.0170763982497716e-06, | |
| "loss": 0.4131, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.6095596133190118, | |
| "grad_norm": 2.2547586947580336, | |
| "learning_rate": 3.971145811807397e-06, | |
| "loss": 0.4205, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.6122448979591837, | |
| "grad_norm": 2.2592067136128406, | |
| "learning_rate": 3.925305707070572e-06, | |
| "loss": 0.4258, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.6149301825993555, | |
| "grad_norm": 2.3340312150224376, | |
| "learning_rate": 3.879560115408279e-06, | |
| "loss": 0.4241, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.6176154672395274, | |
| "grad_norm": 2.1645761230380614, | |
| "learning_rate": 3.833913059877622e-06, | |
| "loss": 0.4146, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.6203007518796992, | |
| "grad_norm": 2.327696071182452, | |
| "learning_rate": 3.788368554870034e-06, | |
| "loss": 0.4176, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.6229860365198711, | |
| "grad_norm": 2.283585936176722, | |
| "learning_rate": 3.7429306057582306e-06, | |
| "loss": 0.4256, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.625671321160043, | |
| "grad_norm": 2.257166305089174, | |
| "learning_rate": 3.6976032085439563e-06, | |
| "loss": 0.4038, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.6283566058002148, | |
| "grad_norm": 2.217414085409638, | |
| "learning_rate": 3.6523903495065634e-06, | |
| "loss": 0.4106, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.6310418904403867, | |
| "grad_norm": 2.2297466820002225, | |
| "learning_rate": 3.6072960048524443e-06, | |
| "loss": 0.4167, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.6337271750805585, | |
| "grad_norm": 2.196582517840786, | |
| "learning_rate": 3.562324140365343e-06, | |
| "loss": 0.4042, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.6364124597207304, | |
| "grad_norm": 2.20250952239818, | |
| "learning_rate": 3.5174787110575904e-06, | |
| "loss": 0.3935, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.6390977443609023, | |
| "grad_norm": 2.526218420849079, | |
| "learning_rate": 3.4727636608222813e-06, | |
| "loss": 0.4063, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.6417830290010741, | |
| "grad_norm": 2.1335154360820088, | |
| "learning_rate": 3.428182922086437e-06, | |
| "loss": 0.4049, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.644468313641246, | |
| "grad_norm": 2.252244271262109, | |
| "learning_rate": 3.3837404154651677e-06, | |
| "loss": 0.4009, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.6471535982814178, | |
| "grad_norm": 2.4694360819768817, | |
| "learning_rate": 3.3394400494168745e-06, | |
| "loss": 0.4054, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.6498388829215896, | |
| "grad_norm": 2.2150632695332515, | |
| "learning_rate": 3.2952857198995304e-06, | |
| "loss": 0.4043, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.6525241675617616, | |
| "grad_norm": 2.4201560036968406, | |
| "learning_rate": 3.2512813100280494e-06, | |
| "loss": 0.401, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.6552094522019334, | |
| "grad_norm": 2.2211755428097226, | |
| "learning_rate": 3.2074306897327913e-06, | |
| "loss": 0.3887, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.6578947368421053, | |
| "grad_norm": 2.2326049552084126, | |
| "learning_rate": 3.163737715419216e-06, | |
| "loss": 0.3966, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.6605800214822771, | |
| "grad_norm": 2.1800000221416798, | |
| "learning_rate": 3.1202062296287507e-06, | |
| "loss": 0.3936, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.6632653061224489, | |
| "grad_norm": 2.2543256840697836, | |
| "learning_rate": 3.076840060700848e-06, | |
| "loss": 0.3999, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.6659505907626209, | |
| "grad_norm": 2.1832410035100014, | |
| "learning_rate": 3.0336430224363067e-06, | |
| "loss": 0.4066, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.6686358754027927, | |
| "grad_norm": 2.1345582981116196, | |
| "learning_rate": 2.990618913761876e-06, | |
| "loss": 0.3978, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.6713211600429646, | |
| "grad_norm": 2.380691403939363, | |
| "learning_rate": 2.9477715183961634e-06, | |
| "loss": 0.3864, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.6740064446831364, | |
| "grad_norm": 2.2738078409954685, | |
| "learning_rate": 2.905104604516872e-06, | |
| "loss": 0.3843, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.6766917293233082, | |
| "grad_norm": 2.1602808837976117, | |
| "learning_rate": 2.8626219244294074e-06, | |
| "loss": 0.3999, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.6793770139634802, | |
| "grad_norm": 2.2085763099047955, | |
| "learning_rate": 2.820327214236904e-06, | |
| "loss": 0.3974, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.682062298603652, | |
| "grad_norm": 2.235386766709275, | |
| "learning_rate": 2.7782241935116337e-06, | |
| "loss": 0.385, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.6847475832438239, | |
| "grad_norm": 2.188012993018142, | |
| "learning_rate": 2.7363165649679045e-06, | |
| "loss": 0.3827, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.6874328678839957, | |
| "grad_norm": 2.226716099786791, | |
| "learning_rate": 2.6946080141364295e-06, | |
| "loss": 0.3943, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.6901181525241675, | |
| "grad_norm": 2.2426965329620954, | |
| "learning_rate": 2.6531022090401946e-06, | |
| "loss": 0.3994, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.6928034371643395, | |
| "grad_norm": 2.173915921830579, | |
| "learning_rate": 2.611802799871893e-06, | |
| "loss": 0.384, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.6954887218045113, | |
| "grad_norm": 2.1709824856734605, | |
| "learning_rate": 2.570713418672893e-06, | |
| "loss": 0.373, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.6981740064446831, | |
| "grad_norm": 2.3391765011254937, | |
| "learning_rate": 2.529837679013841e-06, | |
| "loss": 0.384, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.700859291084855, | |
| "grad_norm": 2.2099937926483704, | |
| "learning_rate": 2.4891791756768587e-06, | |
| "loss": 0.3704, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.7035445757250268, | |
| "grad_norm": 2.176216958310356, | |
| "learning_rate": 2.4487414843394015e-06, | |
| "loss": 0.3928, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.7062298603651987, | |
| "grad_norm": 2.1290888658786415, | |
| "learning_rate": 2.4085281612598027e-06, | |
| "loss": 0.3748, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.7089151450053706, | |
| "grad_norm": 2.1829182895504227, | |
| "learning_rate": 2.368542742964519e-06, | |
| "loss": 0.3869, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.7116004296455424, | |
| "grad_norm": 2.2587981614095085, | |
| "learning_rate": 2.3287887459371193e-06, | |
| "loss": 0.3779, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.7142857142857143, | |
| "grad_norm": 2.372100847324542, | |
| "learning_rate": 2.2892696663090223e-06, | |
| "loss": 0.3761, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.7169709989258861, | |
| "grad_norm": 2.278334338481838, | |
| "learning_rate": 2.2499889795520342e-06, | |
| "loss": 0.3686, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.719656283566058, | |
| "grad_norm": 2.2218577358371046, | |
| "learning_rate": 2.2109501401727097e-06, | |
| "loss": 0.373, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.7223415682062299, | |
| "grad_norm": 2.1254755331259725, | |
| "learning_rate": 2.1721565814085355e-06, | |
| "loss": 0.365, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.7250268528464017, | |
| "grad_norm": 2.1702115107700917, | |
| "learning_rate": 2.133611714926005e-06, | |
| "loss": 0.3702, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.7277121374865736, | |
| "grad_norm": 2.1892322865283806, | |
| "learning_rate": 2.0953189305205847e-06, | |
| "loss": 0.3775, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.7303974221267454, | |
| "grad_norm": 2.151212049347134, | |
| "learning_rate": 2.057281595818592e-06, | |
| "loss": 0.3761, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.7330827067669173, | |
| "grad_norm": 2.393785451565841, | |
| "learning_rate": 2.0195030559810387e-06, | |
| "loss": 0.3787, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.7357679914070892, | |
| "grad_norm": 2.1987458039368954, | |
| "learning_rate": 1.981986633409447e-06, | |
| "loss": 0.3705, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.738453276047261, | |
| "grad_norm": 2.2091176561248083, | |
| "learning_rate": 1.944735627453654e-06, | |
| "loss": 0.3674, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.7411385606874329, | |
| "grad_norm": 2.1125042254421733, | |
| "learning_rate": 1.907753314121662e-06, | |
| "loss": 0.3801, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.7438238453276047, | |
| "grad_norm": 2.310411592482143, | |
| "learning_rate": 1.8710429457915269e-06, | |
| "loss": 0.39, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.7465091299677766, | |
| "grad_norm": 2.2962629079030243, | |
| "learning_rate": 1.834607750925333e-06, | |
| "loss": 0.3676, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.7491944146079484, | |
| "grad_norm": 2.35761038713381, | |
| "learning_rate": 1.7984509337852724e-06, | |
| "loss": 0.3822, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.7518796992481203, | |
| "grad_norm": 2.0186886903779517, | |
| "learning_rate": 1.7625756741518407e-06, | |
| "loss": 0.3765, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.7545649838882922, | |
| "grad_norm": 2.1023056656067567, | |
| "learning_rate": 1.7269851270441978e-06, | |
| "loss": 0.3583, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.757250268528464, | |
| "grad_norm": 2.300679602655581, | |
| "learning_rate": 1.6916824224427098e-06, | |
| "loss": 0.3866, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.7599355531686359, | |
| "grad_norm": 2.0668614724801606, | |
| "learning_rate": 1.6566706650136704e-06, | |
| "loss": 0.3553, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.7626208378088077, | |
| "grad_norm": 2.294458578577542, | |
| "learning_rate": 1.6219529338362756e-06, | |
| "loss": 0.3642, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.7653061224489796, | |
| "grad_norm": 2.0628194904498836, | |
| "learning_rate": 1.587532282131835e-06, | |
| "loss": 0.3596, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.7679914070891515, | |
| "grad_norm": 2.0587824624449826, | |
| "learning_rate": 1.5534117369952523e-06, | |
| "loss": 0.3627, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.7706766917293233, | |
| "grad_norm": 2.1599902106655198, | |
| "learning_rate": 1.5195942991288214e-06, | |
| "loss": 0.3627, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.7733619763694952, | |
| "grad_norm": 2.2640334867648155, | |
| "learning_rate": 1.4860829425783151e-06, | |
| "loss": 0.3625, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.776047261009667, | |
| "grad_norm": 2.2124168087140874, | |
| "learning_rate": 1.4528806144714546e-06, | |
| "loss": 0.3595, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.7787325456498388, | |
| "grad_norm": 2.337523902238021, | |
| "learning_rate": 1.4199902347587186e-06, | |
| "loss": 0.3707, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.7814178302900108, | |
| "grad_norm": 2.096978060408682, | |
| "learning_rate": 1.3874146959565387e-06, | |
| "loss": 0.3713, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.7841031149301826, | |
| "grad_norm": 2.0894953589815253, | |
| "learning_rate": 1.3551568628929434e-06, | |
| "loss": 0.3584, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.7867883995703545, | |
| "grad_norm": 2.1767739733098774, | |
| "learning_rate": 1.3232195724555919e-06, | |
| "loss": 0.3508, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.7894736842105263, | |
| "grad_norm": 2.0541945214281814, | |
| "learning_rate": 1.291605633342301e-06, | |
| "loss": 0.3478, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.7921589688506981, | |
| "grad_norm": 2.024608217596787, | |
| "learning_rate": 1.2603178258140264e-06, | |
| "loss": 0.3523, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.7948442534908701, | |
| "grad_norm": 2.143945397214433, | |
| "learning_rate": 1.22935890145036e-06, | |
| "loss": 0.3476, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.7975295381310419, | |
| "grad_norm": 2.008431036790706, | |
| "learning_rate": 1.198731582907547e-06, | |
| "loss": 0.3667, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.8002148227712137, | |
| "grad_norm": 2.036066654414611, | |
| "learning_rate": 1.1684385636790368e-06, | |
| "loss": 0.362, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.8029001074113856, | |
| "grad_norm": 2.066841030662012, | |
| "learning_rate": 1.1384825078586114e-06, | |
| "loss": 0.3619, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.8055853920515574, | |
| "grad_norm": 2.219785683686726, | |
| "learning_rate": 1.108866049906096e-06, | |
| "loss": 0.3476, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.8082706766917294, | |
| "grad_norm": 2.032441941191992, | |
| "learning_rate": 1.079591794415667e-06, | |
| "loss": 0.3613, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.8109559613319012, | |
| "grad_norm": 2.2803586671679037, | |
| "learning_rate": 1.0506623158867952e-06, | |
| "loss": 0.3468, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.813641245972073, | |
| "grad_norm": 1.9788164235546355, | |
| "learning_rate": 1.0220801584978408e-06, | |
| "loss": 0.3544, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.8163265306122449, | |
| "grad_norm": 2.2037987602808475, | |
| "learning_rate": 9.93847835882296e-07, | |
| "loss": 0.3538, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.8190118152524167, | |
| "grad_norm": 2.0816795942958817, | |
| "learning_rate": 9.659678309077374e-07, | |
| "loss": 0.3481, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.8216970998925887, | |
| "grad_norm": 1.9823282088006486, | |
| "learning_rate": 9.384425954574617e-07, | |
| "loss": 0.3577, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.8243823845327605, | |
| "grad_norm": 2.030845811179057, | |
| "learning_rate": 9.112745502148629e-07, | |
| "loss": 0.3559, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.8270676691729323, | |
| "grad_norm": 2.1777276043921936, | |
| "learning_rate": 8.844660844505498e-07, | |
| "loss": 0.3525, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.8297529538131042, | |
| "grad_norm": 2.0300477472798812, | |
| "learning_rate": 8.580195558122167e-07, | |
| "loss": 0.3508, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 0.832438238453276, | |
| "grad_norm": 2.0221512590268156, | |
| "learning_rate": 8.319372901173045e-07, | |
| "loss": 0.3469, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.835123523093448, | |
| "grad_norm": 2.1138683649194507, | |
| "learning_rate": 8.062215811484652e-07, | |
| "loss": 0.3539, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 0.8378088077336198, | |
| "grad_norm": 2.040376339189134, | |
| "learning_rate": 7.808746904518261e-07, | |
| "loss": 0.351, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.8404940923737916, | |
| "grad_norm": 2.2124273072385106, | |
| "learning_rate": 7.558988471381063e-07, | |
| "loss": 0.3425, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 0.8431793770139635, | |
| "grad_norm": 2.1703815796790598, | |
| "learning_rate": 7.312962476865831e-07, | |
| "loss": 0.3488, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.8458646616541353, | |
| "grad_norm": 2.124785306636511, | |
| "learning_rate": 7.070690557519139e-07, | |
| "loss": 0.3509, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 0.8485499462943072, | |
| "grad_norm": 2.2867701473619055, | |
| "learning_rate": 6.832194019738686e-07, | |
| "loss": 0.3553, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.8512352309344791, | |
| "grad_norm": 1.9550073938057098, | |
| "learning_rate": 6.597493837899377e-07, | |
| "loss": 0.3568, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 0.8539205155746509, | |
| "grad_norm": 1.9281450180746198, | |
| "learning_rate": 6.366610652508875e-07, | |
| "loss": 0.3413, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.8566058002148228, | |
| "grad_norm": 2.1382579942417843, | |
| "learning_rate": 6.13956476839237e-07, | |
| "loss": 0.351, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 0.8592910848549946, | |
| "grad_norm": 2.119115312173987, | |
| "learning_rate": 5.916376152906761e-07, | |
| "loss": 0.3412, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.8619763694951665, | |
| "grad_norm": 2.012427806717363, | |
| "learning_rate": 5.697064434184835e-07, | |
| "loss": 0.3379, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 0.8646616541353384, | |
| "grad_norm": 2.203625853262407, | |
| "learning_rate": 5.481648899408943e-07, | |
| "loss": 0.3432, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.8673469387755102, | |
| "grad_norm": 2.116502964909563, | |
| "learning_rate": 5.270148493114896e-07, | |
| "loss": 0.3452, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 0.8700322234156821, | |
| "grad_norm": 2.1766860996156048, | |
| "learning_rate": 5.062581815525819e-07, | |
| "loss": 0.3367, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.8727175080558539, | |
| "grad_norm": 2.1775147697129373, | |
| "learning_rate": 4.858967120916436e-07, | |
| "loss": 0.3436, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.8754027926960258, | |
| "grad_norm": 2.2442167220688463, | |
| "learning_rate": 4.659322316007714e-07, | |
| "loss": 0.3433, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.8780880773361976, | |
| "grad_norm": 2.075788617747793, | |
| "learning_rate": 4.463664958392017e-07, | |
| "loss": 0.3405, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 0.8807733619763695, | |
| "grad_norm": 1.8687131233491732, | |
| "learning_rate": 4.2720122549890607e-07, | |
| "loss": 0.3384, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.8834586466165414, | |
| "grad_norm": 2.0370021636119935, | |
| "learning_rate": 4.084381060532672e-07, | |
| "loss": 0.3474, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 0.8861439312567132, | |
| "grad_norm": 2.0405991135848827, | |
| "learning_rate": 3.9007878760884856e-07, | |
| "loss": 0.3316, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.888829215896885, | |
| "grad_norm": 2.127376968302403, | |
| "learning_rate": 3.721248847602771e-07, | |
| "loss": 0.3453, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 0.8915145005370569, | |
| "grad_norm": 2.0017662232647906, | |
| "learning_rate": 3.5457797644825376e-07, | |
| "loss": 0.3427, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.8941997851772288, | |
| "grad_norm": 2.1671592537785314, | |
| "learning_rate": 3.3743960582068745e-07, | |
| "loss": 0.3439, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 0.8968850698174007, | |
| "grad_norm": 2.2292025445441466, | |
| "learning_rate": 3.207112800969925e-07, | |
| "loss": 0.3474, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.8995703544575725, | |
| "grad_norm": 2.2814733309910924, | |
| "learning_rate": 3.0439447043553005e-07, | |
| "loss": 0.3369, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 0.9022556390977443, | |
| "grad_norm": 2.0190392021288277, | |
| "learning_rate": 2.884906118042313e-07, | |
| "loss": 0.344, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.9049409237379162, | |
| "grad_norm": 2.0932363804553438, | |
| "learning_rate": 2.730011028544044e-07, | |
| "loss": 0.3386, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 0.907626208378088, | |
| "grad_norm": 2.0530951304640914, | |
| "learning_rate": 2.579273057977244e-07, | |
| "loss": 0.3424, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.91031149301826, | |
| "grad_norm": 2.2057752335811975, | |
| "learning_rate": 2.4327054628643764e-07, | |
| "loss": 0.337, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 0.9129967776584318, | |
| "grad_norm": 1.9338355970542782, | |
| "learning_rate": 2.29032113296781e-07, | |
| "loss": 0.3369, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.9156820622986036, | |
| "grad_norm": 2.0345360518554654, | |
| "learning_rate": 2.1521325901561918e-07, | |
| "loss": 0.3427, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 0.9183673469387755, | |
| "grad_norm": 2.004334408060014, | |
| "learning_rate": 2.018151987303263e-07, | |
| "loss": 0.3447, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.9210526315789473, | |
| "grad_norm": 2.0641637729486177, | |
| "learning_rate": 1.8883911072190619e-07, | |
| "loss": 0.3426, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 0.9237379162191193, | |
| "grad_norm": 1.9761026710032998, | |
| "learning_rate": 1.7628613616137048e-07, | |
| "loss": 0.3342, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.9264232008592911, | |
| "grad_norm": 2.1350718022308004, | |
| "learning_rate": 1.6415737900937811e-07, | |
| "loss": 0.3377, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 0.9291084854994629, | |
| "grad_norm": 2.055196159373768, | |
| "learning_rate": 1.5245390591914877e-07, | |
| "loss": 0.3378, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.9317937701396348, | |
| "grad_norm": 1.9690007377565177, | |
| "learning_rate": 1.4117674614265885e-07, | |
| "loss": 0.3355, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 0.9344790547798066, | |
| "grad_norm": 1.9739725581772325, | |
| "learning_rate": 1.3032689144012457e-07, | |
| "loss": 0.3328, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.9371643394199786, | |
| "grad_norm": 2.021766909349921, | |
| "learning_rate": 1.1990529599277656e-07, | |
| "loss": 0.3317, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 0.9398496240601504, | |
| "grad_norm": 2.031112789362354, | |
| "learning_rate": 1.0991287631895498e-07, | |
| "loss": 0.3395, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.9425349087003222, | |
| "grad_norm": 1.9213664207732846, | |
| "learning_rate": 1.0035051119349881e-07, | |
| "loss": 0.3344, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 0.9452201933404941, | |
| "grad_norm": 2.0021558728542392, | |
| "learning_rate": 9.121904157046779e-08, | |
| "loss": 0.3398, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.9479054779806659, | |
| "grad_norm": 2.0885143729585716, | |
| "learning_rate": 8.251927050918262e-08, | |
| "loss": 0.3322, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 0.9505907626208379, | |
| "grad_norm": 2.113089044300656, | |
| "learning_rate": 7.425196310360094e-08, | |
| "loss": 0.3377, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.9532760472610097, | |
| "grad_norm": 2.074323605100398, | |
| "learning_rate": 6.64178464150339e-08, | |
| "loss": 0.3355, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 0.9559613319011815, | |
| "grad_norm": 1.8952573799315888, | |
| "learning_rate": 5.9017609408203445e-08, | |
| "loss": 0.3424, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.9586466165413534, | |
| "grad_norm": 2.112107225346643, | |
| "learning_rate": 5.2051902890651316e-08, | |
| "loss": 0.3382, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 0.9613319011815252, | |
| "grad_norm": 1.995864796648848, | |
| "learning_rate": 4.5521339455507075e-08, | |
| "loss": 0.345, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.9640171858216972, | |
| "grad_norm": 2.0423181341142085, | |
| "learning_rate": 3.9426493427611177e-08, | |
| "loss": 0.3242, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 0.966702470461869, | |
| "grad_norm": 2.049708585546813, | |
| "learning_rate": 3.3767900813007623e-08, | |
| "loss": 0.3349, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.9693877551020408, | |
| "grad_norm": 2.096305223227572, | |
| "learning_rate": 2.8546059251806092e-08, | |
| "loss": 0.3499, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 0.9720730397422127, | |
| "grad_norm": 2.0761286658261775, | |
| "learning_rate": 2.3761427974416383e-08, | |
| "loss": 0.3517, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.9747583243823845, | |
| "grad_norm": 2.050778707489503, | |
| "learning_rate": 1.9414427761162423e-08, | |
| "loss": 0.3454, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 0.9774436090225563, | |
| "grad_norm": 1.9902866670914756, | |
| "learning_rate": 1.5505440905276835e-08, | |
| "loss": 0.3375, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.9801288936627283, | |
| "grad_norm": 2.1252055010410076, | |
| "learning_rate": 1.203481117928007e-08, | |
| "loss": 0.345, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 0.9828141783029001, | |
| "grad_norm": 1.9597127591634178, | |
| "learning_rate": 9.002843804748473e-09, | |
| "loss": 0.3366, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.985499462943072, | |
| "grad_norm": 1.9937617229080937, | |
| "learning_rate": 6.409805425470761e-09, | |
| "loss": 0.3383, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 0.9881847475832438, | |
| "grad_norm": 2.0752840809597792, | |
| "learning_rate": 4.255924083999552e-09, | |
| "loss": 0.3405, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.9908700322234156, | |
| "grad_norm": 2.0871755576857893, | |
| "learning_rate": 2.5413892015951903e-09, | |
| "loss": 0.3429, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 0.9935553168635876, | |
| "grad_norm": 2.154732166762825, | |
| "learning_rate": 1.266351561568513e-09, | |
| "loss": 0.3208, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.9962406015037594, | |
| "grad_norm": 2.106067074324966, | |
| "learning_rate": 4.3092329601923664e-10, | |
| "loss": 0.3483, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 0.9989258861439313, | |
| "grad_norm": 2.0654612280200046, | |
| "learning_rate": 3.517787597440148e-11, | |
| "loss": 0.3318, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_runtime": 3.3748, | |
| "eval_samples_per_second": 2.963, | |
| "eval_steps_per_second": 0.889, | |
| "step": 1862 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 1862, | |
| "total_flos": 194932459438080.0, | |
| "train_loss": 0.0, | |
| "train_runtime": 0.0092, | |
| "train_samples_per_second": 3252649.769, | |
| "train_steps_per_second": 203358.87 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1862, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 194932459438080.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |