| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.9974326059050065, | |
| "eval_steps": 500, | |
| "global_step": 1038, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0019255455712451862, | |
| "grad_norm": 4.675525031091207, | |
| "learning_rate": 9.615384615384617e-08, | |
| "loss": 2.296, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.009627727856225931, | |
| "grad_norm": 4.383312697034829, | |
| "learning_rate": 4.807692307692308e-07, | |
| "loss": 2.1961, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.019255455712451863, | |
| "grad_norm": 3.163432373683522, | |
| "learning_rate": 9.615384615384617e-07, | |
| "loss": 2.1263, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.028883183568677792, | |
| "grad_norm": 2.265167060038816, | |
| "learning_rate": 1.4423076923076922e-06, | |
| "loss": 1.7774, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.038510911424903725, | |
| "grad_norm": 1.5336348895834757, | |
| "learning_rate": 1.9230769230769234e-06, | |
| "loss": 1.351, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.04813863928112965, | |
| "grad_norm": 0.5295614063340365, | |
| "learning_rate": 2.403846153846154e-06, | |
| "loss": 0.939, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.057766367137355584, | |
| "grad_norm": 0.4334072719900553, | |
| "learning_rate": 2.8846153846153845e-06, | |
| "loss": 0.8218, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.06739409499358151, | |
| "grad_norm": 0.3248649181280309, | |
| "learning_rate": 3.365384615384616e-06, | |
| "loss": 0.6933, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.07702182284980745, | |
| "grad_norm": 0.2543051022910018, | |
| "learning_rate": 3.846153846153847e-06, | |
| "loss": 0.6536, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.08664955070603338, | |
| "grad_norm": 0.24243639068039996, | |
| "learning_rate": 4.326923076923077e-06, | |
| "loss": 0.5727, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.0962772785622593, | |
| "grad_norm": 0.21784634136238626, | |
| "learning_rate": 4.807692307692308e-06, | |
| "loss": 0.5824, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.10590500641848524, | |
| "grad_norm": 0.24558020757705826, | |
| "learning_rate": 5.288461538461539e-06, | |
| "loss": 0.5447, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.11553273427471117, | |
| "grad_norm": 0.27633240690641786, | |
| "learning_rate": 5.769230769230769e-06, | |
| "loss": 0.5196, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.1251604621309371, | |
| "grad_norm": 0.1831824766725952, | |
| "learning_rate": 6.25e-06, | |
| "loss": 0.5183, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.13478818998716302, | |
| "grad_norm": 0.25923345195442493, | |
| "learning_rate": 6.730769230769232e-06, | |
| "loss": 0.4999, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.14441591784338895, | |
| "grad_norm": 0.17651629460554236, | |
| "learning_rate": 7.211538461538462e-06, | |
| "loss": 0.4656, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.1540436456996149, | |
| "grad_norm": 0.23934885323800617, | |
| "learning_rate": 7.692307692307694e-06, | |
| "loss": 0.4705, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.16367137355584083, | |
| "grad_norm": 0.17293134941389127, | |
| "learning_rate": 8.173076923076923e-06, | |
| "loss": 0.4743, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.17329910141206675, | |
| "grad_norm": 0.16896953176677149, | |
| "learning_rate": 8.653846153846155e-06, | |
| "loss": 0.4696, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.18292682926829268, | |
| "grad_norm": 0.14455896542895968, | |
| "learning_rate": 9.134615384615384e-06, | |
| "loss": 0.4671, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.1925545571245186, | |
| "grad_norm": 0.14120629212505248, | |
| "learning_rate": 9.615384615384616e-06, | |
| "loss": 0.4368, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.20218228498074453, | |
| "grad_norm": 0.13472865892209046, | |
| "learning_rate": 9.999971715689765e-06, | |
| "loss": 0.4397, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.21181001283697048, | |
| "grad_norm": 0.15889811986141222, | |
| "learning_rate": 9.99898179843121e-06, | |
| "loss": 0.432, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.2214377406931964, | |
| "grad_norm": 0.15298006269669823, | |
| "learning_rate": 9.996577985645488e-06, | |
| "loss": 0.4484, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.23106546854942234, | |
| "grad_norm": 0.14657423882488327, | |
| "learning_rate": 9.992760957219083e-06, | |
| "loss": 0.4411, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.24069319640564826, | |
| "grad_norm": 0.13147480990020327, | |
| "learning_rate": 9.987531792747726e-06, | |
| "loss": 0.4518, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.2503209242618742, | |
| "grad_norm": 0.14062203555872538, | |
| "learning_rate": 9.980891971231052e-06, | |
| "loss": 0.4325, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.25994865211810014, | |
| "grad_norm": 0.11622965182668187, | |
| "learning_rate": 9.972843370654286e-06, | |
| "loss": 0.4153, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.26957637997432604, | |
| "grad_norm": 0.11345089120636126, | |
| "learning_rate": 9.963388267457071e-06, | |
| "loss": 0.4244, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.279204107830552, | |
| "grad_norm": 0.12785723323300235, | |
| "learning_rate": 9.952529335889615e-06, | |
| "loss": 0.4483, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.2888318356867779, | |
| "grad_norm": 0.1129536919207956, | |
| "learning_rate": 9.940269647256319e-06, | |
| "loss": 0.4269, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.29845956354300385, | |
| "grad_norm": 0.11813817138680209, | |
| "learning_rate": 9.926612669047085e-06, | |
| "loss": 0.4292, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.3080872913992298, | |
| "grad_norm": 0.12171485620288995, | |
| "learning_rate": 9.911562263956593e-06, | |
| "loss": 0.4101, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.3177150192554557, | |
| "grad_norm": 0.11639352426762696, | |
| "learning_rate": 9.895122688791792e-06, | |
| "loss": 0.4239, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.32734274711168165, | |
| "grad_norm": 0.12486408455362112, | |
| "learning_rate": 9.877298593267906e-06, | |
| "loss": 0.4269, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.33697047496790755, | |
| "grad_norm": 0.10765095070330241, | |
| "learning_rate": 9.85809501869334e-06, | |
| "loss": 0.4249, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.3465982028241335, | |
| "grad_norm": 0.10473147389747606, | |
| "learning_rate": 9.837517396543799e-06, | |
| "loss": 0.428, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.35622593068035946, | |
| "grad_norm": 0.11066926415967249, | |
| "learning_rate": 9.815571546926074e-06, | |
| "loss": 0.4067, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.36585365853658536, | |
| "grad_norm": 0.11382788234866194, | |
| "learning_rate": 9.792263676931906e-06, | |
| "loss": 0.418, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.3754813863928113, | |
| "grad_norm": 0.11003473176727298, | |
| "learning_rate": 9.767600378882379e-06, | |
| "loss": 0.4333, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.3851091142490372, | |
| "grad_norm": 0.10956539331792438, | |
| "learning_rate": 9.741588628463384e-06, | |
| "loss": 0.4308, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.39473684210526316, | |
| "grad_norm": 0.10379012730393708, | |
| "learning_rate": 9.714235782752633e-06, | |
| "loss": 0.4234, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.40436456996148906, | |
| "grad_norm": 0.10979551297884416, | |
| "learning_rate": 9.68554957813881e-06, | |
| "loss": 0.3863, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.413992297817715, | |
| "grad_norm": 0.10147077391922903, | |
| "learning_rate": 9.655538128133432e-06, | |
| "loss": 0.4286, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.42362002567394097, | |
| "grad_norm": 0.10441472907725119, | |
| "learning_rate": 9.62420992107605e-06, | |
| "loss": 0.4122, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.43324775353016687, | |
| "grad_norm": 0.11600725245110252, | |
| "learning_rate": 9.59157381773344e-06, | |
| "loss": 0.4001, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.4428754813863928, | |
| "grad_norm": 0.10739569680474984, | |
| "learning_rate": 9.557639048793453e-06, | |
| "loss": 0.4231, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.4525032092426187, | |
| "grad_norm": 0.11403638891480855, | |
| "learning_rate": 9.522415212254229e-06, | |
| "loss": 0.4093, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.4621309370988447, | |
| "grad_norm": 0.09497909577287572, | |
| "learning_rate": 9.485912270709542e-06, | |
| "loss": 0.4113, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.47175866495507063, | |
| "grad_norm": 0.09985569093273962, | |
| "learning_rate": 9.448140548531004e-06, | |
| "loss": 0.3785, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.4813863928112965, | |
| "grad_norm": 0.11631044044684512, | |
| "learning_rate": 9.409110728947964e-06, | |
| "loss": 0.3933, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.4910141206675225, | |
| "grad_norm": 0.0997854084788479, | |
| "learning_rate": 9.368833851025882e-06, | |
| "loss": 0.395, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.5006418485237484, | |
| "grad_norm": 0.10619811125132311, | |
| "learning_rate": 9.327321306544097e-06, | |
| "loss": 0.4137, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.5102695763799743, | |
| "grad_norm": 0.10149827880932093, | |
| "learning_rate": 9.284584836773796e-06, | |
| "loss": 0.4077, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.5198973042362003, | |
| "grad_norm": 0.10925773008402428, | |
| "learning_rate": 9.240636529157158e-06, | |
| "loss": 0.4092, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.5295250320924262, | |
| "grad_norm": 0.10181499630559736, | |
| "learning_rate": 9.195488813888588e-06, | |
| "loss": 0.3975, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.5391527599486521, | |
| "grad_norm": 0.08480738919090135, | |
| "learning_rate": 9.149154460398993e-06, | |
| "loss": 0.4149, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.5487804878048781, | |
| "grad_norm": 0.11272795463115028, | |
| "learning_rate": 9.101646573744118e-06, | |
| "loss": 0.4149, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.558408215661104, | |
| "grad_norm": 0.11249381051877479, | |
| "learning_rate": 9.052978590897964e-06, | |
| "loss": 0.4081, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.5680359435173299, | |
| "grad_norm": 0.09372931069614396, | |
| "learning_rate": 9.003164276952315e-06, | |
| "loss": 0.391, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.5776636713735558, | |
| "grad_norm": 0.10766510897950467, | |
| "learning_rate": 8.95221772122345e-06, | |
| "loss": 0.3918, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.5872913992297818, | |
| "grad_norm": 0.09999194407255885, | |
| "learning_rate": 8.900153333267198e-06, | |
| "loss": 0.3843, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.5969191270860077, | |
| "grad_norm": 0.09518035736085621, | |
| "learning_rate": 8.846985838803357e-06, | |
| "loss": 0.4125, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.6065468549422336, | |
| "grad_norm": 0.08726122835506574, | |
| "learning_rate": 8.792730275550736e-06, | |
| "loss": 0.4079, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.6161745827984596, | |
| "grad_norm": 0.09353747112555597, | |
| "learning_rate": 8.73740198897393e-06, | |
| "loss": 0.4142, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.6258023106546855, | |
| "grad_norm": 0.09406069607300888, | |
| "learning_rate": 8.681016627943063e-06, | |
| "loss": 0.4104, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.6354300385109114, | |
| "grad_norm": 0.09374910799397031, | |
| "learning_rate": 8.623590140307715e-06, | |
| "loss": 0.395, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.6450577663671374, | |
| "grad_norm": 0.10471444555170399, | |
| "learning_rate": 8.565138768386276e-06, | |
| "loss": 0.421, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.6546854942233633, | |
| "grad_norm": 0.0994921653563304, | |
| "learning_rate": 8.50567904437203e-06, | |
| "loss": 0.4156, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.6643132220795892, | |
| "grad_norm": 0.08678612456218597, | |
| "learning_rate": 8.445227785657245e-06, | |
| "loss": 0.4055, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.6739409499358151, | |
| "grad_norm": 0.09635413003916711, | |
| "learning_rate": 8.383802090076589e-06, | |
| "loss": 0.3856, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.6835686777920411, | |
| "grad_norm": 0.09507719640106775, | |
| "learning_rate": 8.321419331071242e-06, | |
| "loss": 0.4057, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.693196405648267, | |
| "grad_norm": 0.1015426141791736, | |
| "learning_rate": 8.258097152775045e-06, | |
| "loss": 0.4212, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.7028241335044929, | |
| "grad_norm": 0.10014829884316678, | |
| "learning_rate": 8.1938534650241e-06, | |
| "loss": 0.3968, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.7124518613607189, | |
| "grad_norm": 0.10046140588576453, | |
| "learning_rate": 8.128706438291193e-06, | |
| "loss": 0.3972, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.7220795892169448, | |
| "grad_norm": 0.08519030816328944, | |
| "learning_rate": 8.062674498546542e-06, | |
| "loss": 0.4011, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.7317073170731707, | |
| "grad_norm": 0.10853398520099701, | |
| "learning_rate": 7.995776322046236e-06, | |
| "loss": 0.3968, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.7413350449293966, | |
| "grad_norm": 0.08705459909141744, | |
| "learning_rate": 7.92803083004991e-06, | |
| "loss": 0.3795, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.7509627727856226, | |
| "grad_norm": 0.09871394890968048, | |
| "learning_rate": 7.859457183469119e-06, | |
| "loss": 0.3924, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.7605905006418485, | |
| "grad_norm": 0.08824376485660647, | |
| "learning_rate": 7.790074777447938e-06, | |
| "loss": 0.351, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.7702182284980744, | |
| "grad_norm": 0.09406342927041313, | |
| "learning_rate": 7.719903235877289e-06, | |
| "loss": 0.4028, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.7798459563543004, | |
| "grad_norm": 0.0910039354481899, | |
| "learning_rate": 7.648962405844587e-06, | |
| "loss": 0.3965, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.7894736842105263, | |
| "grad_norm": 0.10182384336867278, | |
| "learning_rate": 7.577272352020269e-06, | |
| "loss": 0.3767, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.7991014120667522, | |
| "grad_norm": 0.09839014599757313, | |
| "learning_rate": 7.5048533509827474e-06, | |
| "loss": 0.3864, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.8087291399229781, | |
| "grad_norm": 0.08845839135994245, | |
| "learning_rate": 7.43172588548347e-06, | |
| "loss": 0.3961, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.8183568677792041, | |
| "grad_norm": 0.08848040955413092, | |
| "learning_rate": 7.357910638653629e-06, | |
| "loss": 0.3688, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.82798459563543, | |
| "grad_norm": 0.09423759349695249, | |
| "learning_rate": 7.283428488154227e-06, | |
| "loss": 0.3879, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.8376123234916559, | |
| "grad_norm": 0.08488822732748169, | |
| "learning_rate": 7.208300500271097e-06, | |
| "loss": 0.4036, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.8472400513478819, | |
| "grad_norm": 0.07899711812098008, | |
| "learning_rate": 7.1325479239565875e-06, | |
| "loss": 0.4006, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.8568677792041078, | |
| "grad_norm": 0.08340972320218538, | |
| "learning_rate": 7.056192184819582e-06, | |
| "loss": 0.3922, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.8664955070603337, | |
| "grad_norm": 0.08930316294422015, | |
| "learning_rate": 6.9792548790655465e-06, | |
| "loss": 0.3619, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.8761232349165597, | |
| "grad_norm": 0.07843462308791004, | |
| "learning_rate": 6.901757767388331e-06, | |
| "loss": 0.3683, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.8857509627727856, | |
| "grad_norm": 0.0888434612182392, | |
| "learning_rate": 6.823722768815446e-06, | |
| "loss": 0.4155, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.8953786906290115, | |
| "grad_norm": 0.0815067949974782, | |
| "learning_rate": 6.7451719545085585e-06, | |
| "loss": 0.3608, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.9050064184852374, | |
| "grad_norm": 0.09498095494824059, | |
| "learning_rate": 6.666127541520958e-06, | |
| "loss": 0.394, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.9146341463414634, | |
| "grad_norm": 0.09361211338328486, | |
| "learning_rate": 6.586611886513756e-06, | |
| "loss": 0.3995, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.9242618741976893, | |
| "grad_norm": 0.0904791289087828, | |
| "learning_rate": 6.506647479432604e-06, | |
| "loss": 0.3887, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.9338896020539152, | |
| "grad_norm": 0.08454411427597165, | |
| "learning_rate": 6.426256937146707e-06, | |
| "loss": 0.378, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.9435173299101413, | |
| "grad_norm": 0.09668080121210802, | |
| "learning_rate": 6.34546299705195e-06, | |
| "loss": 0.3813, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.9531450577663672, | |
| "grad_norm": 0.10076895625991092, | |
| "learning_rate": 6.264288510639922e-06, | |
| "loss": 0.3629, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.962772785622593, | |
| "grad_norm": 0.08864741394595178, | |
| "learning_rate": 6.182756437034677e-06, | |
| "loss": 0.3924, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.972400513478819, | |
| "grad_norm": 0.09234930786675101, | |
| "learning_rate": 6.100889836499052e-06, | |
| "loss": 0.4034, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.982028241335045, | |
| "grad_norm": 0.10439073600863705, | |
| "learning_rate": 6.018711863912381e-06, | |
| "loss": 0.3867, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.9916559691912709, | |
| "grad_norm": 0.0832820482402092, | |
| "learning_rate": 5.936245762221436e-06, | |
| "loss": 0.3866, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.08006018844085963, | |
| "learning_rate": 5.853514855866481e-06, | |
| "loss": 0.3859, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.3565569818019867, | |
| "eval_runtime": 14.4227, | |
| "eval_samples_per_second": 17.472, | |
| "eval_steps_per_second": 4.368, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.009627727856226, | |
| "grad_norm": 0.09401110743601077, | |
| "learning_rate": 5.770542544184252e-06, | |
| "loss": 0.3628, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.0192554557124518, | |
| "grad_norm": 0.0946480182617177, | |
| "learning_rate": 5.687352294789776e-06, | |
| "loss": 0.3708, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.0288831835686778, | |
| "grad_norm": 0.10846300152568576, | |
| "learning_rate": 5.603967636938864e-06, | |
| "loss": 0.3625, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.0385109114249038, | |
| "grad_norm": 0.09681851328737975, | |
| "learning_rate": 5.52041215487317e-06, | |
| "loss": 0.3666, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.0481386392811296, | |
| "grad_norm": 0.12033097836718823, | |
| "learning_rate": 5.4367094811497105e-06, | |
| "loss": 0.3617, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.0577663671373556, | |
| "grad_norm": 0.09615564194204358, | |
| "learning_rate": 5.352883289956701e-06, | |
| "loss": 0.37, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.0673940949935816, | |
| "grad_norm": 0.09285850703825273, | |
| "learning_rate": 5.268957290417632e-06, | |
| "loss": 0.3856, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.0770218228498074, | |
| "grad_norm": 0.08976885470379573, | |
| "learning_rate": 5.184955219885457e-06, | |
| "loss": 0.3747, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.0866495507060334, | |
| "grad_norm": 0.1022360329654874, | |
| "learning_rate": 5.1009008372288015e-06, | |
| "loss": 0.365, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.0962772785622592, | |
| "grad_norm": 0.1035446669504653, | |
| "learning_rate": 5.016817916112075e-06, | |
| "loss": 0.3624, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.1059050064184852, | |
| "grad_norm": 0.09569503568277107, | |
| "learning_rate": 4.932730238271414e-06, | |
| "loss": 0.3559, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.1155327342747112, | |
| "grad_norm": 0.08861451080285555, | |
| "learning_rate": 4.848661586788334e-06, | |
| "loss": 0.3682, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.125160462130937, | |
| "grad_norm": 0.09786515991475879, | |
| "learning_rate": 4.764635739362988e-06, | |
| "loss": 0.3715, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.134788189987163, | |
| "grad_norm": 0.12183470701722215, | |
| "learning_rate": 4.68067646158898e-06, | |
| "loss": 0.3773, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.144415917843389, | |
| "grad_norm": 0.0848742632850072, | |
| "learning_rate": 4.596807500231573e-06, | |
| "loss": 0.3601, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.1540436456996148, | |
| "grad_norm": 0.10570291615511458, | |
| "learning_rate": 4.513052576511227e-06, | |
| "loss": 0.3635, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.1636713735558408, | |
| "grad_norm": 0.10634802539511004, | |
| "learning_rate": 4.429435379394377e-06, | |
| "loss": 0.3568, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 1.1732991014120668, | |
| "grad_norm": 0.13446103864517522, | |
| "learning_rate": 4.34597955889332e-06, | |
| "loss": 0.3586, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.1829268292682926, | |
| "grad_norm": 0.0941264293467416, | |
| "learning_rate": 4.262708719377118e-06, | |
| "loss": 0.3481, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 1.1925545571245186, | |
| "grad_norm": 0.1410741383886165, | |
| "learning_rate": 4.17964641289543e-06, | |
| "loss": 0.385, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.2021822849807444, | |
| "grad_norm": 0.09241491761949897, | |
| "learning_rate": 4.096816132517108e-06, | |
| "loss": 0.3785, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 1.2118100128369704, | |
| "grad_norm": 0.11124645421714398, | |
| "learning_rate": 4.014241305685514e-06, | |
| "loss": 0.3681, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.2214377406931964, | |
| "grad_norm": 0.11023070742649112, | |
| "learning_rate": 3.9319452875923725e-06, | |
| "loss": 0.3411, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 1.2310654685494224, | |
| "grad_norm": 0.09552140786807434, | |
| "learning_rate": 3.849951354572057e-06, | |
| "loss": 0.3459, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.2406931964056482, | |
| "grad_norm": 0.12231047507514385, | |
| "learning_rate": 3.7682826975182e-06, | |
| "loss": 0.3535, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 1.2503209242618742, | |
| "grad_norm": 0.10830754166725957, | |
| "learning_rate": 3.686962415324452e-06, | |
| "loss": 0.3572, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.2599486521181, | |
| "grad_norm": 0.14342247712840153, | |
| "learning_rate": 3.6060135083512656e-06, | |
| "loss": 0.366, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.269576379974326, | |
| "grad_norm": 0.11323866001401849, | |
| "learning_rate": 3.5254588719205494e-06, | |
| "loss": 0.3539, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.279204107830552, | |
| "grad_norm": 0.09929728772523248, | |
| "learning_rate": 3.4453212898400355e-06, | |
| "loss": 0.348, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 1.2888318356867778, | |
| "grad_norm": 0.17492229609956267, | |
| "learning_rate": 3.365623427959175e-06, | |
| "loss": 0.3389, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.2984595635430038, | |
| "grad_norm": 0.09292411937812878, | |
| "learning_rate": 3.2863878277584125e-06, | |
| "loss": 0.3523, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 1.3080872913992299, | |
| "grad_norm": 0.10385494142168447, | |
| "learning_rate": 3.2076368999736175e-06, | |
| "loss": 0.3546, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.3177150192554556, | |
| "grad_norm": 0.15116008215242238, | |
| "learning_rate": 3.1293929182575154e-06, | |
| "loss": 0.3595, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 1.3273427471116817, | |
| "grad_norm": 0.13134864128367904, | |
| "learning_rate": 3.0516780128798794e-06, | |
| "loss": 0.3604, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.3369704749679077, | |
| "grad_norm": 0.1385810651745671, | |
| "learning_rate": 2.9745141644682684e-06, | |
| "loss": 0.3817, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 1.3465982028241335, | |
| "grad_norm": 0.11308291681302851, | |
| "learning_rate": 2.8979231977911127e-06, | |
| "loss": 0.3436, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.3562259306803595, | |
| "grad_norm": 0.1311446028434404, | |
| "learning_rate": 2.8219267755848613e-06, | |
| "loss": 0.3804, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 1.3658536585365852, | |
| "grad_norm": 0.10654779695663232, | |
| "learning_rate": 2.746546392426963e-06, | |
| "loss": 0.3426, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.3754813863928113, | |
| "grad_norm": 0.14977867299262188, | |
| "learning_rate": 2.671803368656413e-06, | |
| "loss": 0.373, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 1.3851091142490373, | |
| "grad_norm": 0.10008323376413787, | |
| "learning_rate": 2.5977188443435874e-06, | |
| "loss": 0.3707, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.3947368421052633, | |
| "grad_norm": 0.12287994467841869, | |
| "learning_rate": 2.524313773311032e-06, | |
| "loss": 0.3419, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 1.404364569961489, | |
| "grad_norm": 0.13242489561288942, | |
| "learning_rate": 2.4516089172069852e-06, | |
| "loss": 0.3505, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.413992297817715, | |
| "grad_norm": 0.15290371838598157, | |
| "learning_rate": 2.379624839633187e-06, | |
| "loss": 0.3433, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 1.4236200256739409, | |
| "grad_norm": 0.15079312975317574, | |
| "learning_rate": 2.308381900328767e-06, | |
| "loss": 0.3521, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.4332477535301669, | |
| "grad_norm": 0.13283749811762047, | |
| "learning_rate": 2.2379002494117467e-06, | |
| "loss": 0.3694, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 1.4428754813863929, | |
| "grad_norm": 0.13332742256673105, | |
| "learning_rate": 2.1681998216798476e-06, | |
| "loss": 0.3425, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.4525032092426187, | |
| "grad_norm": 0.14406157700699168, | |
| "learning_rate": 2.099300330972209e-06, | |
| "loss": 0.3358, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 1.4621309370988447, | |
| "grad_norm": 0.14024887794079086, | |
| "learning_rate": 2.0312212645935755e-06, | |
| "loss": 0.3461, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.4717586649550707, | |
| "grad_norm": 0.15414062139512394, | |
| "learning_rate": 1.9639818778025836e-06, | |
| "loss": 0.3477, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 1.4813863928112965, | |
| "grad_norm": 0.1369666143345331, | |
| "learning_rate": 1.8976011883656632e-06, | |
| "loss": 0.3611, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.4910141206675225, | |
| "grad_norm": 0.15223059852909734, | |
| "learning_rate": 1.8320979711781212e-06, | |
| "loss": 0.3476, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 1.5006418485237485, | |
| "grad_norm": 0.1714137202575772, | |
| "learning_rate": 1.767490752953896e-06, | |
| "loss": 0.3129, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.5102695763799743, | |
| "grad_norm": 0.14319611843869498, | |
| "learning_rate": 1.7037978069855472e-06, | |
| "loss": 0.3407, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 1.5198973042362003, | |
| "grad_norm": 0.16393330833863626, | |
| "learning_rate": 1.641037147975872e-06, | |
| "loss": 0.3526, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.529525032092426, | |
| "grad_norm": 0.16364031655094152, | |
| "learning_rate": 1.5792265269427082e-06, | |
| "loss": 0.3456, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 1.539152759948652, | |
| "grad_norm": 0.16348327211177935, | |
| "learning_rate": 1.5183834261982804e-06, | |
| "loss": 0.3398, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.548780487804878, | |
| "grad_norm": 0.15134397957607432, | |
| "learning_rate": 1.4585250544045666e-06, | |
| "loss": 0.3479, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 1.558408215661104, | |
| "grad_norm": 0.17575731164707303, | |
| "learning_rate": 1.399668341706053e-06, | |
| "loss": 0.322, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.5680359435173299, | |
| "grad_norm": 0.17640101887621162, | |
| "learning_rate": 1.3418299349412761e-06, | |
| "loss": 0.3439, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 1.5776636713735557, | |
| "grad_norm": 0.15625925540478658, | |
| "learning_rate": 1.2850261929344748e-06, | |
| "loss": 0.338, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.5872913992297817, | |
| "grad_norm": 0.1475923645387349, | |
| "learning_rate": 1.2292731818687204e-06, | |
| "loss": 0.344, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 1.5969191270860077, | |
| "grad_norm": 0.1598002892634283, | |
| "learning_rate": 1.1745866707418146e-06, | |
| "loss": 0.3524, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.6065468549422337, | |
| "grad_norm": 0.16235140780790008, | |
| "learning_rate": 1.1209821269062254e-06, | |
| "loss": 0.3314, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 1.6161745827984597, | |
| "grad_norm": 0.1817156515805022, | |
| "learning_rate": 1.0684747116943683e-06, | |
| "loss": 0.3279, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.6258023106546855, | |
| "grad_norm": 0.162000455486421, | |
| "learning_rate": 1.0170792761304083e-06, | |
| "loss": 0.3562, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 1.6354300385109113, | |
| "grad_norm": 0.22932643721397972, | |
| "learning_rate": 9.668103567298615e-07, | |
| "loss": 0.3374, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.6450577663671373, | |
| "grad_norm": 0.18052968600391292, | |
| "learning_rate": 9.176821713881229e-07, | |
| "loss": 0.3409, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 1.6546854942233633, | |
| "grad_norm": 0.19204286956569327, | |
| "learning_rate": 8.697086153591289e-07, | |
| "loss": 0.3411, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.6643132220795893, | |
| "grad_norm": 0.15338857676660417, | |
| "learning_rate": 8.229032573252671e-07, | |
| "loss": 0.3509, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 1.673940949935815, | |
| "grad_norm": 0.2064824927113351, | |
| "learning_rate": 7.772793355596597e-07, | |
| "loss": 0.3535, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.6835686777920411, | |
| "grad_norm": 0.1901669621594226, | |
| "learning_rate": 7.328497541818891e-07, | |
| "loss": 0.35, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 1.693196405648267, | |
| "grad_norm": 0.20680883575996672, | |
| "learning_rate": 6.896270795082394e-07, | |
| "loss": 0.3284, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.702824133504493, | |
| "grad_norm": 0.208535850753021, | |
| "learning_rate": 6.476235364974848e-07, | |
| "loss": 0.3388, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 1.712451861360719, | |
| "grad_norm": 0.20588654883512666, | |
| "learning_rate": 6.06851005293217e-07, | |
| "loss": 0.3361, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.722079589216945, | |
| "grad_norm": 0.20944379197373206, | |
| "learning_rate": 5.673210178637129e-07, | |
| "loss": 0.3285, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 1.7317073170731707, | |
| "grad_norm": 0.192174693171731, | |
| "learning_rate": 5.290447547402594e-07, | |
| "loss": 0.3357, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.7413350449293965, | |
| "grad_norm": 0.2791441208177208, | |
| "learning_rate": 4.920330418549007e-07, | |
| "loss": 0.3306, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 1.7509627727856225, | |
| "grad_norm": 0.2268656547573065, | |
| "learning_rate": 4.5629634747845764e-07, | |
| "loss": 0.3012, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.7605905006418485, | |
| "grad_norm": 0.20835714245970288, | |
| "learning_rate": 4.218447792597219e-07, | |
| "loss": 0.3425, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 1.7702182284980745, | |
| "grad_norm": 0.23100114988163314, | |
| "learning_rate": 3.8868808136663995e-07, | |
| "loss": 0.3227, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.7798459563543005, | |
| "grad_norm": 0.21698851745344763, | |
| "learning_rate": 3.56835631730309e-07, | |
| "loss": 0.3357, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 1.7894736842105263, | |
| "grad_norm": 0.19672243352771557, | |
| "learning_rate": 3.262964393925433e-07, | |
| "loss": 0.3239, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.7991014120667521, | |
| "grad_norm": 0.23389991198328033, | |
| "learning_rate": 2.970791419577995e-07, | |
| "loss": 0.3047, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 1.8087291399229781, | |
| "grad_norm": 0.23366499263913967, | |
| "learning_rate": 2.6919200315013606e-07, | |
| "loss": 0.287, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.8183568677792041, | |
| "grad_norm": 0.1966679887252355, | |
| "learning_rate": 2.42642910475932e-07, | |
| "loss": 0.3255, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 1.8279845956354301, | |
| "grad_norm": 0.2366162252630318, | |
| "learning_rate": 2.1743937299301242e-07, | |
| "loss": 0.3249, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.837612323491656, | |
| "grad_norm": 0.17846484227641432, | |
| "learning_rate": 1.9358851918680478e-07, | |
| "loss": 0.3319, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 1.847240051347882, | |
| "grad_norm": 0.22183877870838897, | |
| "learning_rate": 1.7109709495415073e-07, | |
| "loss": 0.2987, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.8568677792041077, | |
| "grad_norm": 0.1922746561314516, | |
| "learning_rate": 1.499714616953124e-07, | |
| "loss": 0.3273, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 1.8664955070603337, | |
| "grad_norm": 0.2129526026233563, | |
| "learning_rate": 1.3021759451473548e-07, | |
| "loss": 0.3165, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.8761232349165597, | |
| "grad_norm": 0.22092907981647764, | |
| "learning_rate": 1.1184108053107268e-07, | |
| "loss": 0.3201, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 1.8857509627727858, | |
| "grad_norm": 0.21605685840962405, | |
| "learning_rate": 9.484711729694229e-08, | |
| "loss": 0.3334, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.8953786906290115, | |
| "grad_norm": 0.23787534108452493, | |
| "learning_rate": 7.924051132886357e-08, | |
| "loss": 0.3173, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 1.9050064184852373, | |
| "grad_norm": 0.17867415785242957, | |
| "learning_rate": 6.502567674780524e-08, | |
| "loss": 0.3351, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.9146341463414633, | |
| "grad_norm": 0.2159405119405128, | |
| "learning_rate": 5.220663403070592e-08, | |
| "loss": 0.3144, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 1.9242618741976893, | |
| "grad_norm": 0.1818905292820818, | |
| "learning_rate": 4.078700887333365e-08, | |
| "loss": 0.3399, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.9338896020539154, | |
| "grad_norm": 0.2160735551084158, | |
| "learning_rate": 3.0770031164812366e-08, | |
| "loss": 0.3399, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 1.9435173299101414, | |
| "grad_norm": 0.19974665047237547, | |
| "learning_rate": 2.2158534074083193e-08, | |
| "loss": 0.3046, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.9531450577663672, | |
| "grad_norm": 0.23888039492400656, | |
| "learning_rate": 1.495495324858709e-08, | |
| "loss": 0.329, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 1.962772785622593, | |
| "grad_norm": 0.23943732877322751, | |
| "learning_rate": 9.16132612537035e-09, | |
| "loss": 0.3211, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.972400513478819, | |
| "grad_norm": 0.23280339101940556, | |
| "learning_rate": 4.779291354822802e-09, | |
| "loss": 0.3058, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 1.982028241335045, | |
| "grad_norm": 0.2491328973768298, | |
| "learning_rate": 1.8100883372085266e-09, | |
| "loss": 0.3212, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.991655969191271, | |
| "grad_norm": 0.22142463430680218, | |
| "learning_rate": 2.545568721179503e-10, | |
| "loss": 0.3358, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 1.9974326059050065, | |
| "eval_loss": 0.3029652535915375, | |
| "eval_runtime": 12.342, | |
| "eval_samples_per_second": 20.418, | |
| "eval_steps_per_second": 5.105, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 1.9974326059050065, | |
| "step": 1038, | |
| "total_flos": 2.6515469798631014e+18, | |
| "train_loss": 0.4156393744697461, | |
| "train_runtime": 8276.9589, | |
| "train_samples_per_second": 6.023, | |
| "train_steps_per_second": 0.125 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1038, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.6515469798631014e+18, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |