| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 547, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.009140767824497258, | |
| "grad_norm": 72.8250732421875, | |
| "learning_rate": 1.8181818181818183e-06, | |
| "loss": 3.402, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.018281535648994516, | |
| "grad_norm": 28.21662139892578, | |
| "learning_rate": 3.6363636363636366e-06, | |
| "loss": 2.6895, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.027422303473491772, | |
| "grad_norm": 12.129012107849121, | |
| "learning_rate": 5.4545454545454545e-06, | |
| "loss": 1.2964, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.03656307129798903, | |
| "grad_norm": 4.143538475036621, | |
| "learning_rate": 7.272727272727273e-06, | |
| "loss": 0.386, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.04570383912248629, | |
| "grad_norm": 9.900189399719238, | |
| "learning_rate": 9.090909090909091e-06, | |
| "loss": 0.2348, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.054844606946983544, | |
| "grad_norm": 2.2335424423217773, | |
| "learning_rate": 1.0909090909090909e-05, | |
| "loss": 0.1444, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.06398537477148081, | |
| "grad_norm": 2.3110196590423584, | |
| "learning_rate": 1.2727272727272728e-05, | |
| "loss": 0.1224, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.07312614259597806, | |
| "grad_norm": 2.3454320430755615, | |
| "learning_rate": 1.4545454545454546e-05, | |
| "loss": 0.1052, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.08226691042047532, | |
| "grad_norm": 2.230443239212036, | |
| "learning_rate": 1.6363636363636366e-05, | |
| "loss": 0.0856, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.09140767824497258, | |
| "grad_norm": 2.1253552436828613, | |
| "learning_rate": 1.8181818181818182e-05, | |
| "loss": 0.0625, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.10054844606946983, | |
| "grad_norm": 1.7156140804290771, | |
| "learning_rate": 2e-05, | |
| "loss": 0.0384, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.10968921389396709, | |
| "grad_norm": 1.71486234664917, | |
| "learning_rate": 1.9994903844605973e-05, | |
| "loss": 0.0293, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.11882998171846434, | |
| "grad_norm": 0.5556156039237976, | |
| "learning_rate": 1.9979620572583846e-05, | |
| "loss": 0.0122, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.12797074954296161, | |
| "grad_norm": 0.2656179368495941, | |
| "learning_rate": 1.995416576111945e-05, | |
| "loss": 0.0073, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.13711151736745886, | |
| "grad_norm": 0.21650183200836182, | |
| "learning_rate": 1.9918565354547738e-05, | |
| "loss": 0.0045, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.14625228519195613, | |
| "grad_norm": 0.6048226952552795, | |
| "learning_rate": 1.9872855637909506e-05, | |
| "loss": 0.0042, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.15539305301645337, | |
| "grad_norm": 0.16227181255817413, | |
| "learning_rate": 1.9817083199968552e-05, | |
| "loss": 0.0043, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.16453382084095064, | |
| "grad_norm": 0.6625558137893677, | |
| "learning_rate": 1.9751304885726967e-05, | |
| "loss": 0.0047, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.1736745886654479, | |
| "grad_norm": 0.31208521127700806, | |
| "learning_rate": 1.9675587738486935e-05, | |
| "loss": 0.003, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.18281535648994515, | |
| "grad_norm": 0.21230654418468475, | |
| "learning_rate": 1.9590008931518133e-05, | |
| "loss": 0.0023, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.19195612431444242, | |
| "grad_norm": 0.2529038190841675, | |
| "learning_rate": 1.9494655689400294e-05, | |
| "loss": 0.0017, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.20109689213893966, | |
| "grad_norm": 0.07652478665113449, | |
| "learning_rate": 1.9389625199121264e-05, | |
| "loss": 0.0013, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.21023765996343693, | |
| "grad_norm": 0.07473216205835342, | |
| "learning_rate": 1.927502451102095e-05, | |
| "loss": 0.0008, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.21937842778793418, | |
| "grad_norm": 0.10759185999631882, | |
| "learning_rate": 1.9150970429682316e-05, | |
| "loss": 0.0009, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.22851919561243145, | |
| "grad_norm": 0.2672303020954132, | |
| "learning_rate": 1.9017589394880515e-05, | |
| "loss": 0.0005, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.2376599634369287, | |
| "grad_norm": 1.33341383934021, | |
| "learning_rate": 1.8875017352711547e-05, | |
| "loss": 0.0019, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.24680073126142596, | |
| "grad_norm": 0.5362780690193176, | |
| "learning_rate": 1.8723399617031754e-05, | |
| "loss": 0.0017, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.25594149908592323, | |
| "grad_norm": 0.3548431694507599, | |
| "learning_rate": 1.8562890721349434e-05, | |
| "loss": 0.0015, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.26508226691042047, | |
| "grad_norm": 0.18760216236114502, | |
| "learning_rate": 1.8393654261319504e-05, | |
| "loss": 0.0009, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.2742230347349177, | |
| "grad_norm": 0.2579905688762665, | |
| "learning_rate": 1.821586272800168e-05, | |
| "loss": 0.0007, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.283363802559415, | |
| "grad_norm": 0.2245369404554367, | |
| "learning_rate": 1.8029697332052277e-05, | |
| "loss": 0.0009, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.29250457038391225, | |
| "grad_norm": 0.8462201952934265, | |
| "learning_rate": 1.7835347819028642e-05, | |
| "loss": 0.0026, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.3016453382084095, | |
| "grad_norm": 0.37047141790390015, | |
| "learning_rate": 1.76330122759946e-05, | |
| "loss": 0.0011, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.31078610603290674, | |
| "grad_norm": 0.6407871246337891, | |
| "learning_rate": 1.7422896929623957e-05, | |
| "loss": 0.0011, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.31992687385740404, | |
| "grad_norm": 0.4370276927947998, | |
| "learning_rate": 1.720521593600787e-05, | |
| "loss": 0.0032, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.3290676416819013, | |
| "grad_norm": 0.35974910855293274, | |
| "learning_rate": 1.6980191162380298e-05, | |
| "loss": 0.0039, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.3382084095063985, | |
| "grad_norm": 0.1552378237247467, | |
| "learning_rate": 1.674805196098402e-05, | |
| "loss": 0.002, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.3473491773308958, | |
| "grad_norm": 0.3591105341911316, | |
| "learning_rate": 1.6509034935307716e-05, | |
| "loss": 0.0019, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.35648994515539306, | |
| "grad_norm": 0.04844851791858673, | |
| "learning_rate": 1.6263383698932307e-05, | |
| "loss": 0.0011, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.3656307129798903, | |
| "grad_norm": 0.1252487599849701, | |
| "learning_rate": 1.6011348627232463e-05, | |
| "loss": 0.0014, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.37477148080438755, | |
| "grad_norm": 0.11114215105772018, | |
| "learning_rate": 1.5753186602186207e-05, | |
| "loss": 0.0008, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.38391224862888484, | |
| "grad_norm": 0.023969072848558426, | |
| "learning_rate": 1.5489160750552833e-05, | |
| "loss": 0.0004, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.3930530164533821, | |
| "grad_norm": 0.061600591987371445, | |
| "learning_rate": 1.5219540175685938e-05, | |
| "loss": 0.0003, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.40219378427787933, | |
| "grad_norm": 0.0951351672410965, | |
| "learning_rate": 1.4944599683254903e-05, | |
| "loss": 0.0002, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.4113345521023766, | |
| "grad_norm": 0.08145473152399063, | |
| "learning_rate": 1.4664619501154445e-05, | |
| "loss": 0.0002, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.42047531992687387, | |
| "grad_norm": 0.01746625453233719, | |
| "learning_rate": 1.4379884993887605e-05, | |
| "loss": 0.0002, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.4296160877513711, | |
| "grad_norm": 0.02120082452893257, | |
| "learning_rate": 1.4090686371713403e-05, | |
| "loss": 0.0004, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.43875685557586835, | |
| "grad_norm": 0.019948139786720276, | |
| "learning_rate": 1.3797318394855496e-05, | |
| "loss": 0.0002, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.44789762340036565, | |
| "grad_norm": 0.305644690990448, | |
| "learning_rate": 1.3500080073073436e-05, | |
| "loss": 0.0006, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.4570383912248629, | |
| "grad_norm": 0.5187322497367859, | |
| "learning_rate": 1.319927436090259e-05, | |
| "loss": 0.0006, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.46617915904936014, | |
| "grad_norm": 0.30897632241249084, | |
| "learning_rate": 1.2895207848873488e-05, | |
| "loss": 0.0007, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.4753199268738574, | |
| "grad_norm": 0.07400551438331604, | |
| "learning_rate": 1.2588190451025209e-05, | |
| "loss": 0.0005, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.4844606946983547, | |
| "grad_norm": 0.08495013415813446, | |
| "learning_rate": 1.2278535089031377e-05, | |
| "loss": 0.0005, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.4936014625228519, | |
| "grad_norm": 0.18518061935901642, | |
| "learning_rate": 1.1966557373260654e-05, | |
| "loss": 0.0004, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.5027422303473492, | |
| "grad_norm": 0.16111266613006592, | |
| "learning_rate": 1.165257528109685e-05, | |
| "loss": 0.0003, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.5118829981718465, | |
| "grad_norm": 0.07125530391931534, | |
| "learning_rate": 1.1336908832846485e-05, | |
| "loss": 0.0002, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.5210237659963437, | |
| "grad_norm": 0.027559850364923477, | |
| "learning_rate": 1.1019879765564155e-05, | |
| "loss": 0.0002, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.5301645338208409, | |
| "grad_norm": 0.03874915465712547, | |
| "learning_rate": 1.0701811205128115e-05, | |
| "loss": 0.0004, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.5393053016453382, | |
| "grad_norm": 0.07408824563026428, | |
| "learning_rate": 1.0383027336900356e-05, | |
| "loss": 0.0005, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.5484460694698354, | |
| "grad_norm": 0.19742679595947266, | |
| "learning_rate": 1.0063853075306792e-05, | |
| "loss": 0.0004, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.5575868372943327, | |
| "grad_norm": 0.030554568395018578, | |
| "learning_rate": 9.744613732674401e-06, | |
| "loss": 0.0002, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.56672760511883, | |
| "grad_norm": 0.1797352284193039, | |
| "learning_rate": 9.425634687662768e-06, | |
| "loss": 0.0003, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.5758683729433273, | |
| "grad_norm": 0.08481526374816895, | |
| "learning_rate": 9.107241053628058e-06, | |
| "loss": 0.0001, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.5850091407678245, | |
| "grad_norm": 0.08169592916965485, | |
| "learning_rate": 8.789757347257373e-06, | |
| "loss": 0.0002, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.5941499085923218, | |
| "grad_norm": 0.073927141726017, | |
| "learning_rate": 8.473507157811254e-06, | |
| "loss": 0.0003, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.603290676416819, | |
| "grad_norm": 0.00965256430208683, | |
| "learning_rate": 8.158812817311474e-06, | |
| "loss": 0.0002, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.6124314442413162, | |
| "grad_norm": 0.022856874391436577, | |
| "learning_rate": 7.845995072010188e-06, | |
| "loss": 0.0002, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.6215722120658135, | |
| "grad_norm": 0.06963472068309784, | |
| "learning_rate": 7.535372755475411e-06, | |
| "loss": 0.0002, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.6307129798903108, | |
| "grad_norm": 0.024691695347428322, | |
| "learning_rate": 7.22726246362592e-06, | |
| "loss": 0.0002, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.6398537477148081, | |
| "grad_norm": 0.012590546160936356, | |
| "learning_rate": 6.921978232046878e-06, | |
| "loss": 0.0002, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.6489945155393053, | |
| "grad_norm": 0.05468595027923584, | |
| "learning_rate": 6.619831215914974e-06, | |
| "loss": 0.0002, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.6581352833638026, | |
| "grad_norm": 0.07750693708658218, | |
| "learning_rate": 6.321129372859418e-06, | |
| "loss": 0.0002, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.6672760511882998, | |
| "grad_norm": 0.027170751243829727, | |
| "learning_rate": 6.026177149081949e-06, | |
| "loss": 0.0002, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.676416819012797, | |
| "grad_norm": 0.013442575931549072, | |
| "learning_rate": 5.7352751690558025e-06, | |
| "loss": 0.0002, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.6855575868372943, | |
| "grad_norm": 0.06503641605377197, | |
| "learning_rate": 5.448719929119916e-06, | |
| "loss": 0.0002, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.6946983546617916, | |
| "grad_norm": 0.1688007265329361, | |
| "learning_rate": 5.166803495280614e-06, | |
| "loss": 0.0002, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.7038391224862889, | |
| "grad_norm": 0.16672426462173462, | |
| "learning_rate": 4.889813205528895e-06, | |
| "loss": 0.0002, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.7129798903107861, | |
| "grad_norm": 0.0711933895945549, | |
| "learning_rate": 4.61803137697661e-06, | |
| "loss": 0.0002, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.7221206581352834, | |
| "grad_norm": 0.017142411321401596, | |
| "learning_rate": 4.351735018110066e-06, | |
| "loss": 0.0001, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.7312614259597806, | |
| "grad_norm": 0.06323219835758209, | |
| "learning_rate": 4.091195546454398e-06, | |
| "loss": 0.0002, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.7404021937842779, | |
| "grad_norm": 0.15650759637355804, | |
| "learning_rate": 3.8366785119363624e-06, | |
| "loss": 0.0002, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.7495429616087751, | |
| "grad_norm": 0.10531863570213318, | |
| "learning_rate": 3.5884433262276376e-06, | |
| "loss": 0.0001, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.7586837294332724, | |
| "grad_norm": 0.009012032300233841, | |
| "learning_rate": 3.3467429983443477e-06, | |
| "loss": 0.0003, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.7678244972577697, | |
| "grad_norm": 0.021396493539214134, | |
| "learning_rate": 3.111823876772426e-06, | |
| "loss": 0.0001, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.7769652650822669, | |
| "grad_norm": 0.03978023678064346, | |
| "learning_rate": 2.883925398381585e-06, | |
| "loss": 0.0001, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.7861060329067642, | |
| "grad_norm": 0.02052459307014942, | |
| "learning_rate": 2.663279844383815e-06, | |
| "loss": 0.0001, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.7952468007312614, | |
| "grad_norm": 0.009402839466929436, | |
| "learning_rate": 2.4501121035851494e-06, | |
| "loss": 0.0001, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.8043875685557587, | |
| "grad_norm": 0.02130948193371296, | |
| "learning_rate": 2.244639443172013e-06, | |
| "loss": 0.0001, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.8135283363802559, | |
| "grad_norm": 0.056616611778736115, | |
| "learning_rate": 2.047071287265735e-06, | |
| "loss": 0.0001, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.8226691042047533, | |
| "grad_norm": 0.05803929269313812, | |
| "learning_rate": 1.857609003471007e-06, | |
| "loss": 0.0002, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.8318098720292505, | |
| "grad_norm": 0.005538036115467548, | |
| "learning_rate": 1.6764456976357279e-06, | |
| "loss": 0.0001, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.8409506398537477, | |
| "grad_norm": 0.0059971073642373085, | |
| "learning_rate": 1.503766017031547e-06, | |
| "loss": 0.0001, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.850091407678245, | |
| "grad_norm": 0.0059807188808918, | |
| "learning_rate": 1.339745962155613e-06, | |
| "loss": 0.0001, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.8592321755027422, | |
| "grad_norm": 0.005512280389666557, | |
| "learning_rate": 1.1845527073454045e-06, | |
| "loss": 0.0001, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.8683729433272395, | |
| "grad_norm": 0.021586749702692032, | |
| "learning_rate": 1.0383444303894453e-06, | |
| "loss": 0.0001, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.8775137111517367, | |
| "grad_norm": 0.01218556147068739, | |
| "learning_rate": 9.012701513075839e-07, | |
| "loss": 0.0001, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.886654478976234, | |
| "grad_norm": 0.005609170068055391, | |
| "learning_rate": 7.734695804651693e-07, | |
| "loss": 0.0001, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.8957952468007313, | |
| "grad_norm": 0.02664189226925373, | |
| "learning_rate": 6.550729761758901e-07, | |
| "loss": 0.0001, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.9049360146252285, | |
| "grad_norm": 0.016554079949855804, | |
| "learning_rate": 5.462010119384665e-07, | |
| "loss": 0.0001, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.9140767824497258, | |
| "grad_norm": 0.005938609596341848, | |
| "learning_rate": 4.4696465344245874e-07, | |
| "loss": 0.0001, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.923217550274223, | |
| "grad_norm": 0.01982252486050129, | |
| "learning_rate": 3.574650454685902e-07, | |
| "loss": 0.0001, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.9323583180987203, | |
| "grad_norm": 0.02185707353055477, | |
| "learning_rate": 2.777934087988532e-07, | |
| "loss": 0.0001, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.9414990859232175, | |
| "grad_norm": 0.007851045578718185, | |
| "learning_rate": 2.0803094724143879e-07, | |
| "loss": 0.0001, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.9506398537477148, | |
| "grad_norm": 0.018694985657930374, | |
| "learning_rate": 1.482487648653008e-07, | |
| "loss": 0.0001, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.9597806215722121, | |
| "grad_norm": 0.02687523327767849, | |
| "learning_rate": 9.85077935286749e-08, | |
| "loss": 0.0002, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.9689213893967094, | |
| "grad_norm": 0.007271461188793182, | |
| "learning_rate": 5.8858730775438465e-08, | |
| "loss": 0.0001, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.9780621572212066, | |
| "grad_norm": 0.022476373240351677, | |
| "learning_rate": 2.9341988162595593e-08, | |
| "loss": 0.0004, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.9872029250457038, | |
| "grad_norm": 0.009166369214653969, | |
| "learning_rate": 9.987650071561217e-09, | |
| "loss": 0.0001, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.9963436928702011, | |
| "grad_norm": 0.00787508673965931, | |
| "learning_rate": 8.154430452267381e-10, | |
| "loss": 0.0001, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 547, | |
| "total_flos": 2.567967004866642e+17, | |
| "train_loss": 0.07939198961997766, | |
| "train_runtime": 7762.1038, | |
| "train_samples_per_second": 9.018, | |
| "train_steps_per_second": 0.07 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 547, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": false, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.567967004866642e+17, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |