| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 1206, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0008291873963515755, | |
| "grad_norm": 24.363085939314125, | |
| "learning_rate": 8.264462809917357e-08, | |
| "loss": 1.4091, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0041459369817578775, | |
| "grad_norm": 23.543346063299293, | |
| "learning_rate": 4.132231404958678e-07, | |
| "loss": 1.4056, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.008291873963515755, | |
| "grad_norm": 8.313966376086048, | |
| "learning_rate": 8.264462809917356e-07, | |
| "loss": 1.3082, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.012437810945273632, | |
| "grad_norm": 10.10778485014052, | |
| "learning_rate": 1.2396694214876035e-06, | |
| "loss": 1.1527, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.01658374792703151, | |
| "grad_norm": 3.1507455192980616, | |
| "learning_rate": 1.6528925619834712e-06, | |
| "loss": 1.0115, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.020729684908789386, | |
| "grad_norm": 2.949214823531174, | |
| "learning_rate": 2.066115702479339e-06, | |
| "loss": 0.9662, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.024875621890547265, | |
| "grad_norm": 2.3865704411346944, | |
| "learning_rate": 2.479338842975207e-06, | |
| "loss": 0.9301, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.02902155887230514, | |
| "grad_norm": 2.367353362561768, | |
| "learning_rate": 2.8925619834710743e-06, | |
| "loss": 0.9066, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.03316749585406302, | |
| "grad_norm": 2.168359757453863, | |
| "learning_rate": 3.3057851239669424e-06, | |
| "loss": 0.8864, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.03731343283582089, | |
| "grad_norm": 2.352027725705367, | |
| "learning_rate": 3.71900826446281e-06, | |
| "loss": 0.8776, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.04145936981757877, | |
| "grad_norm": 2.221157238277382, | |
| "learning_rate": 4.132231404958678e-06, | |
| "loss": 0.8738, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.04560530679933665, | |
| "grad_norm": 2.254296003816909, | |
| "learning_rate": 4.5454545454545455e-06, | |
| "loss": 0.8654, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.04975124378109453, | |
| "grad_norm": 2.2549051162675258, | |
| "learning_rate": 4.958677685950414e-06, | |
| "loss": 0.8603, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0538971807628524, | |
| "grad_norm": 2.421942208846336, | |
| "learning_rate": 5.371900826446281e-06, | |
| "loss": 0.8514, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.05804311774461028, | |
| "grad_norm": 2.2852906820767833, | |
| "learning_rate": 5.785123966942149e-06, | |
| "loss": 0.8448, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.06218905472636816, | |
| "grad_norm": 2.5429700413304293, | |
| "learning_rate": 6.198347107438017e-06, | |
| "loss": 0.8342, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.06633499170812604, | |
| "grad_norm": 2.41053818480909, | |
| "learning_rate": 6.611570247933885e-06, | |
| "loss": 0.8233, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.07048092868988391, | |
| "grad_norm": 2.467306342713205, | |
| "learning_rate": 7.0247933884297525e-06, | |
| "loss": 0.8191, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.07462686567164178, | |
| "grad_norm": 2.4370364722694924, | |
| "learning_rate": 7.43801652892562e-06, | |
| "loss": 0.8283, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.07877280265339967, | |
| "grad_norm": 2.3448779174579175, | |
| "learning_rate": 7.851239669421489e-06, | |
| "loss": 0.8109, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.08291873963515754, | |
| "grad_norm": 2.366708369360123, | |
| "learning_rate": 8.264462809917356e-06, | |
| "loss": 0.8175, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.08706467661691543, | |
| "grad_norm": 2.7803642859629445, | |
| "learning_rate": 8.677685950413224e-06, | |
| "loss": 0.8078, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.0912106135986733, | |
| "grad_norm": 2.402987189273672, | |
| "learning_rate": 9.090909090909091e-06, | |
| "loss": 0.8036, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.09535655058043117, | |
| "grad_norm": 2.485685260898161, | |
| "learning_rate": 9.50413223140496e-06, | |
| "loss": 0.801, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.09950248756218906, | |
| "grad_norm": 3.0171204848637294, | |
| "learning_rate": 9.917355371900828e-06, | |
| "loss": 0.7926, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.10364842454394693, | |
| "grad_norm": 2.3850376443520602, | |
| "learning_rate": 9.999664652243188e-06, | |
| "loss": 0.7988, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.1077943615257048, | |
| "grad_norm": 2.489350579580748, | |
| "learning_rate": 9.99830237907608e-06, | |
| "loss": 0.7869, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.11194029850746269, | |
| "grad_norm": 2.4001253138099843, | |
| "learning_rate": 9.995892506564461e-06, | |
| "loss": 0.7817, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.11608623548922056, | |
| "grad_norm": 2.4718753541913037, | |
| "learning_rate": 9.992435539796e-06, | |
| "loss": 0.7853, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.12023217247097844, | |
| "grad_norm": 2.4377853310772966, | |
| "learning_rate": 9.987932203319917e-06, | |
| "loss": 0.7634, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.12437810945273632, | |
| "grad_norm": 2.8232961214963024, | |
| "learning_rate": 9.982383440995146e-06, | |
| "loss": 0.7688, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.1285240464344942, | |
| "grad_norm": 2.25234786421426, | |
| "learning_rate": 9.975790415792497e-06, | |
| "loss": 0.7707, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.13266998341625208, | |
| "grad_norm": 2.653089890533132, | |
| "learning_rate": 9.968154509550914e-06, | |
| "loss": 0.759, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.13681592039800994, | |
| "grad_norm": 2.4692695376219946, | |
| "learning_rate": 9.959477322687852e-06, | |
| "loss": 0.7678, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.14096185737976782, | |
| "grad_norm": 2.465268528338133, | |
| "learning_rate": 9.949760673863846e-06, | |
| "loss": 0.7512, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.1451077943615257, | |
| "grad_norm": 2.487803804934397, | |
| "learning_rate": 9.93900659960133e-06, | |
| "loss": 0.7366, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.14925373134328357, | |
| "grad_norm": 2.55421871649192, | |
| "learning_rate": 9.927217353857809e-06, | |
| "loss": 0.7463, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.15339966832504145, | |
| "grad_norm": 2.639039784633591, | |
| "learning_rate": 9.914395407553444e-06, | |
| "loss": 0.7443, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.15754560530679934, | |
| "grad_norm": 2.501978124475521, | |
| "learning_rate": 9.900543448053164e-06, | |
| "loss": 0.7197, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.16169154228855723, | |
| "grad_norm": 2.3337568323918214, | |
| "learning_rate": 9.885664378603432e-06, | |
| "loss": 0.7332, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.16583747927031509, | |
| "grad_norm": 2.378783071529505, | |
| "learning_rate": 9.869761317723744e-06, | |
| "loss": 0.7136, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.16998341625207297, | |
| "grad_norm": 2.277527337184093, | |
| "learning_rate": 9.85283759855301e-06, | |
| "loss": 0.7201, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.17412935323383086, | |
| "grad_norm": 2.2079190034247365, | |
| "learning_rate": 9.834896768150963e-06, | |
| "loss": 0.7174, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.17827529021558872, | |
| "grad_norm": 2.4371753499407434, | |
| "learning_rate": 9.81594258675473e-06, | |
| "loss": 0.7341, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.1824212271973466, | |
| "grad_norm": 2.2055041342497255, | |
| "learning_rate": 9.795979026990717e-06, | |
| "loss": 0.7024, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.1865671641791045, | |
| "grad_norm": 2.406773347160676, | |
| "learning_rate": 9.775010273041975e-06, | |
| "loss": 0.7173, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.19071310116086235, | |
| "grad_norm": 2.2838878169624173, | |
| "learning_rate": 9.753040719771249e-06, | |
| "loss": 0.6933, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.19485903814262023, | |
| "grad_norm": 2.3948306779315756, | |
| "learning_rate": 9.730074971799837e-06, | |
| "loss": 0.7093, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.19900497512437812, | |
| "grad_norm": 2.3419375521446595, | |
| "learning_rate": 9.706117842542517e-06, | |
| "loss": 0.7026, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.20315091210613598, | |
| "grad_norm": 2.386424037340903, | |
| "learning_rate": 9.681174353198687e-06, | |
| "loss": 0.6928, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.20729684908789386, | |
| "grad_norm": 2.352824469415509, | |
| "learning_rate": 9.655249731699973e-06, | |
| "loss": 0.6758, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.21144278606965175, | |
| "grad_norm": 2.448238996649246, | |
| "learning_rate": 9.628349411614503e-06, | |
| "loss": 0.6846, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.2155887230514096, | |
| "grad_norm": 2.18778612059073, | |
| "learning_rate": 9.600479031008072e-06, | |
| "loss": 0.6783, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.2197346600331675, | |
| "grad_norm": 2.4786933731659384, | |
| "learning_rate": 9.571644431262463e-06, | |
| "loss": 0.6862, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.22388059701492538, | |
| "grad_norm": 2.1664093593847165, | |
| "learning_rate": 9.54185165585114e-06, | |
| "loss": 0.6598, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.22802653399668324, | |
| "grad_norm": 2.3439695927983024, | |
| "learning_rate": 9.511106949072588e-06, | |
| "loss": 0.6538, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.23217247097844113, | |
| "grad_norm": 2.573211098478694, | |
| "learning_rate": 9.479416754741577e-06, | |
| "loss": 0.6619, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.236318407960199, | |
| "grad_norm": 2.7234227642496616, | |
| "learning_rate": 9.446787714838579e-06, | |
| "loss": 0.6562, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.24046434494195687, | |
| "grad_norm": 2.5205068601141525, | |
| "learning_rate": 9.413226668117679e-06, | |
| "loss": 0.6461, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.24461028192371476, | |
| "grad_norm": 2.229496276338444, | |
| "learning_rate": 9.37874064867323e-06, | |
| "loss": 0.6469, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.24875621890547264, | |
| "grad_norm": 2.3470411543105265, | |
| "learning_rate": 9.343336884465577e-06, | |
| "loss": 0.6426, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.25290215588723053, | |
| "grad_norm": 2.3905607702569704, | |
| "learning_rate": 9.307022795806125e-06, | |
| "loss": 0.6278, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.2570480928689884, | |
| "grad_norm": 2.159892061781749, | |
| "learning_rate": 9.26980599380213e-06, | |
| "loss": 0.6404, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.26119402985074625, | |
| "grad_norm": 2.573673941571869, | |
| "learning_rate": 9.231694278761455e-06, | |
| "loss": 0.629, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.26533996683250416, | |
| "grad_norm": 2.1819450147556494, | |
| "learning_rate": 9.192695638557723e-06, | |
| "loss": 0.6297, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.269485903814262, | |
| "grad_norm": 2.307987631760188, | |
| "learning_rate": 9.1528182469561e-06, | |
| "loss": 0.6254, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.2736318407960199, | |
| "grad_norm": 2.4579207582542906, | |
| "learning_rate": 9.112070461900178e-06, | |
| "loss": 0.6322, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.2777777777777778, | |
| "grad_norm": 2.3690336709142064, | |
| "learning_rate": 9.070460823760197e-06, | |
| "loss": 0.6164, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.28192371475953565, | |
| "grad_norm": 2.7180849820218067, | |
| "learning_rate": 9.027998053543079e-06, | |
| "loss": 0.6298, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.2860696517412935, | |
| "grad_norm": 2.4829451807056118, | |
| "learning_rate": 8.984691051064576e-06, | |
| "loss": 0.6162, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.2902155887230514, | |
| "grad_norm": 2.459002185250392, | |
| "learning_rate": 8.94054889308395e-06, | |
| "loss": 0.6045, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.2943615257048093, | |
| "grad_norm": 2.2572208925258597, | |
| "learning_rate": 8.895580831401563e-06, | |
| "loss": 0.6112, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.29850746268656714, | |
| "grad_norm": 2.276783038981904, | |
| "learning_rate": 8.849796290919787e-06, | |
| "loss": 0.6115, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.30265339966832505, | |
| "grad_norm": 2.415622228387365, | |
| "learning_rate": 8.803204867667624e-06, | |
| "loss": 0.5991, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.3067993366500829, | |
| "grad_norm": 2.045217334174227, | |
| "learning_rate": 8.755816326789469e-06, | |
| "loss": 0.5909, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.31094527363184077, | |
| "grad_norm": 2.3646147099166526, | |
| "learning_rate": 8.70764060049842e-06, | |
| "loss": 0.5929, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.3150912106135987, | |
| "grad_norm": 2.179575894376864, | |
| "learning_rate": 8.658687785994579e-06, | |
| "loss": 0.602, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.31923714759535654, | |
| "grad_norm": 2.303527412372746, | |
| "learning_rate": 8.608968143348765e-06, | |
| "loss": 0.5911, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.32338308457711445, | |
| "grad_norm": 2.184412340951817, | |
| "learning_rate": 8.558492093352098e-06, | |
| "loss": 0.5834, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.3275290215588723, | |
| "grad_norm": 2.2982310068640963, | |
| "learning_rate": 8.50727021533189e-06, | |
| "loss": 0.573, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.33167495854063017, | |
| "grad_norm": 2.3823718371379288, | |
| "learning_rate": 8.455313244934324e-06, | |
| "loss": 0.5704, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.3358208955223881, | |
| "grad_norm": 2.308301112102417, | |
| "learning_rate": 8.402632071874348e-06, | |
| "loss": 0.5676, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.33996683250414594, | |
| "grad_norm": 2.1617090999565782, | |
| "learning_rate": 8.349237737653288e-06, | |
| "loss": 0.5709, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.3441127694859038, | |
| "grad_norm": 2.280161952890472, | |
| "learning_rate": 8.29514143324466e-06, | |
| "loss": 0.5779, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.3482587064676617, | |
| "grad_norm": 2.2490286708106577, | |
| "learning_rate": 8.24035449674863e-06, | |
| "loss": 0.5564, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.3524046434494196, | |
| "grad_norm": 2.2807466234509777, | |
| "learning_rate": 8.184888411015655e-06, | |
| "loss": 0.5554, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.35655058043117743, | |
| "grad_norm": 2.180528318028485, | |
| "learning_rate": 8.128754801239781e-06, | |
| "loss": 0.5606, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.36069651741293535, | |
| "grad_norm": 2.3314447428823, | |
| "learning_rate": 8.071965432522107e-06, | |
| "loss": 0.5593, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.3648424543946932, | |
| "grad_norm": 2.2300833962237974, | |
| "learning_rate": 8.01453220740492e-06, | |
| "loss": 0.5513, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.36898839137645106, | |
| "grad_norm": 2.4375150414786044, | |
| "learning_rate": 7.956467163377037e-06, | |
| "loss": 0.5451, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.373134328358209, | |
| "grad_norm": 2.345552450223764, | |
| "learning_rate": 7.89778247035085e-06, | |
| "loss": 0.5513, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.37728026533996684, | |
| "grad_norm": 2.302225232727345, | |
| "learning_rate": 7.838490428111625e-06, | |
| "loss": 0.5454, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.3814262023217247, | |
| "grad_norm": 2.3901207498612274, | |
| "learning_rate": 7.77860346373957e-06, | |
| "loss": 0.5354, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.3855721393034826, | |
| "grad_norm": 2.1013639490757527, | |
| "learning_rate": 7.718134129005238e-06, | |
| "loss": 0.5338, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.38971807628524047, | |
| "grad_norm": 2.1095521859807604, | |
| "learning_rate": 7.657095097738793e-06, | |
| "loss": 0.5281, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.3938640132669983, | |
| "grad_norm": 2.1090684046109778, | |
| "learning_rate": 7.59549916317367e-06, | |
| "loss": 0.5269, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.39800995024875624, | |
| "grad_norm": 2.1756383908491466, | |
| "learning_rate": 7.533359235265248e-06, | |
| "loss": 0.5404, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.4021558872305141, | |
| "grad_norm": 2.067392945377543, | |
| "learning_rate": 7.470688337985029e-06, | |
| "loss": 0.5257, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.40630182421227196, | |
| "grad_norm": 2.178779204316918, | |
| "learning_rate": 7.407499606590934e-06, | |
| "loss": 0.5108, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.41044776119402987, | |
| "grad_norm": 2.1482896888352303, | |
| "learning_rate": 7.343806284874268e-06, | |
| "loss": 0.5195, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.41459369817578773, | |
| "grad_norm": 2.122877082306712, | |
| "learning_rate": 7.279621722383939e-06, | |
| "loss": 0.5238, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.4187396351575456, | |
| "grad_norm": 2.513475103077436, | |
| "learning_rate": 7.214959371628522e-06, | |
| "loss": 0.5239, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.4228855721393035, | |
| "grad_norm": 2.256666838754752, | |
| "learning_rate": 7.149832785256718e-06, | |
| "loss": 0.5111, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.42703150912106136, | |
| "grad_norm": 2.126071871777049, | |
| "learning_rate": 7.084255613216855e-06, | |
| "loss": 0.5109, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.4311774461028192, | |
| "grad_norm": 2.2958892839783003, | |
| "learning_rate": 7.018241599895974e-06, | |
| "loss": 0.5075, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.43532338308457713, | |
| "grad_norm": 2.1249877107238166, | |
| "learning_rate": 6.95180458123913e-06, | |
| "loss": 0.5021, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.439469320066335, | |
| "grad_norm": 2.268170350249536, | |
| "learning_rate": 6.8849584818494984e-06, | |
| "loss": 0.5075, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.44361525704809285, | |
| "grad_norm": 2.1889766909888304, | |
| "learning_rate": 6.817717312069913e-06, | |
| "loss": 0.4951, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.44776119402985076, | |
| "grad_norm": 2.116309228498843, | |
| "learning_rate": 6.750095165046415e-06, | |
| "loss": 0.4993, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.4519071310116086, | |
| "grad_norm": 2.2014038727267735, | |
| "learning_rate": 6.682106213774459e-06, | |
| "loss": 0.5014, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.4560530679933665, | |
| "grad_norm": 2.1791180363289833, | |
| "learning_rate": 6.6137647081283776e-06, | |
| "loss": 0.5004, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.4601990049751244, | |
| "grad_norm": 2.181037493892715, | |
| "learning_rate": 6.545084971874738e-06, | |
| "loss": 0.4869, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.46434494195688225, | |
| "grad_norm": 2.1234780810442317, | |
| "learning_rate": 6.476081399670212e-06, | |
| "loss": 0.487, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.4684908789386401, | |
| "grad_norm": 2.1577583891835137, | |
| "learning_rate": 6.406768454044581e-06, | |
| "loss": 0.4925, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.472636815920398, | |
| "grad_norm": 2.1435505057127053, | |
| "learning_rate": 6.337160662369519e-06, | |
| "loss": 0.4852, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.4767827529021559, | |
| "grad_norm": 2.1004015267155056, | |
| "learning_rate": 6.267272613813789e-06, | |
| "loss": 0.4894, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.48092868988391374, | |
| "grad_norm": 2.275508822904309, | |
| "learning_rate": 6.19711895628548e-06, | |
| "loss": 0.4783, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.48507462686567165, | |
| "grad_norm": 2.0957567591112456, | |
| "learning_rate": 6.126714393361939e-06, | |
| "loss": 0.4811, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.4892205638474295, | |
| "grad_norm": 2.2453086852427138, | |
| "learning_rate": 6.056073681208038e-06, | |
| "loss": 0.4778, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.49336650082918737, | |
| "grad_norm": 2.086146491253835, | |
| "learning_rate": 5.985211625483403e-06, | |
| "loss": 0.468, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.4975124378109453, | |
| "grad_norm": 2.08746099925638, | |
| "learning_rate": 5.914143078239293e-06, | |
| "loss": 0.4702, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.5016583747927031, | |
| "grad_norm": 2.183555664254409, | |
| "learning_rate": 5.842882934805731e-06, | |
| "loss": 0.4683, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.5058043117744611, | |
| "grad_norm": 2.141477284770924, | |
| "learning_rate": 5.771446130669589e-06, | |
| "loss": 0.4692, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.5099502487562189, | |
| "grad_norm": 1.999530939511801, | |
| "learning_rate": 5.6998476383442345e-06, | |
| "loss": 0.4672, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.5140961857379768, | |
| "grad_norm": 2.3861168673535476, | |
| "learning_rate": 5.628102464231429e-06, | |
| "loss": 0.4647, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.5182421227197347, | |
| "grad_norm": 2.112621163685018, | |
| "learning_rate": 5.556225645476119e-06, | |
| "loss": 0.4569, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.5223880597014925, | |
| "grad_norm": 2.2162792041456023, | |
| "learning_rate": 5.4842322468147926e-06, | |
| "loss": 0.4636, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.5265339966832504, | |
| "grad_norm": 2.1411819631797444, | |
| "learning_rate": 5.412137357418037e-06, | |
| "loss": 0.4609, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.5306799336650083, | |
| "grad_norm": 2.0506863143987277, | |
| "learning_rate": 5.339956087727985e-06, | |
| "loss": 0.4501, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.5348258706467661, | |
| "grad_norm": 2.0133156015781193, | |
| "learning_rate": 5.2677035662913116e-06, | |
| "loss": 0.4507, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.538971807628524, | |
| "grad_norm": 2.0603049501006336, | |
| "learning_rate": 5.195394936588409e-06, | |
| "loss": 0.441, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.543117744610282, | |
| "grad_norm": 2.012154333900926, | |
| "learning_rate": 5.123045353859465e-06, | |
| "loss": 0.4577, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.5472636815920398, | |
| "grad_norm": 2.0263592161269184, | |
| "learning_rate": 5.050669981928056e-06, | |
| "loss": 0.4568, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.5514096185737977, | |
| "grad_norm": 2.0917399842428157, | |
| "learning_rate": 4.978283990022936e-06, | |
| "loss": 0.4532, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.5555555555555556, | |
| "grad_norm": 2.188577844097009, | |
| "learning_rate": 4.905902549598719e-06, | |
| "loss": 0.4382, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.5597014925373134, | |
| "grad_norm": 2.199659099615686, | |
| "learning_rate": 4.833540831156062e-06, | |
| "loss": 0.4437, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.5638474295190713, | |
| "grad_norm": 2.0640167310940027, | |
| "learning_rate": 4.761214001062079e-06, | |
| "loss": 0.4379, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.5679933665008292, | |
| "grad_norm": 2.179786737229522, | |
| "learning_rate": 4.688937218371592e-06, | |
| "loss": 0.4409, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.572139303482587, | |
| "grad_norm": 2.074519322035315, | |
| "learning_rate": 4.616725631649938e-06, | |
| "loss": 0.4383, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.5762852404643449, | |
| "grad_norm": 2.1399873216394036, | |
| "learning_rate": 4.544594375797969e-06, | |
| "loss": 0.4288, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.5804311774461028, | |
| "grad_norm": 2.1123595173850935, | |
| "learning_rate": 4.472558568879901e-06, | |
| "loss": 0.4351, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.5845771144278606, | |
| "grad_norm": 2.031607056740364, | |
| "learning_rate": 4.400633308954713e-06, | |
| "loss": 0.4244, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.5887230514096186, | |
| "grad_norm": 1.9626506911231658, | |
| "learning_rate": 4.3288336709117246e-06, | |
| "loss": 0.4192, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.5928689883913765, | |
| "grad_norm": 2.10360277006623, | |
| "learning_rate": 4.257174703311032e-06, | |
| "loss": 0.4191, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.5970149253731343, | |
| "grad_norm": 2.008208706865011, | |
| "learning_rate": 4.185671425229477e-06, | |
| "loss": 0.4191, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.6011608623548922, | |
| "grad_norm": 2.0304909176887977, | |
| "learning_rate": 4.11433882311277e-06, | |
| "loss": 0.4193, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.6053067993366501, | |
| "grad_norm": 2.0261313089901303, | |
| "learning_rate": 4.043191847634469e-06, | |
| "loss": 0.4164, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.6094527363184079, | |
| "grad_norm": 2.0660700135752945, | |
| "learning_rate": 3.9722454105624545e-06, | |
| "loss": 0.4121, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.6135986733001658, | |
| "grad_norm": 1.9536775589165198, | |
| "learning_rate": 3.901514381633555e-06, | |
| "loss": 0.4184, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.6177446102819237, | |
| "grad_norm": 2.096984004473796, | |
| "learning_rate": 3.831013585436985e-06, | |
| "loss": 0.4209, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.6218905472636815, | |
| "grad_norm": 1.9585159850477776, | |
| "learning_rate": 3.7607577983072486e-06, | |
| "loss": 0.4076, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.6260364842454395, | |
| "grad_norm": 2.021551204900084, | |
| "learning_rate": 3.6907617452271394e-06, | |
| "loss": 0.4054, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.6301824212271974, | |
| "grad_norm": 2.2587727736565095, | |
| "learning_rate": 3.621040096741526e-06, | |
| "loss": 0.4024, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.6343283582089553, | |
| "grad_norm": 2.1288856262867517, | |
| "learning_rate": 3.55160746588254e-06, | |
| "loss": 0.4071, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.6384742951907131, | |
| "grad_norm": 2.0991322193698485, | |
| "learning_rate": 3.482478405106803e-06, | |
| "loss": 0.4119, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.642620232172471, | |
| "grad_norm": 2.1933844049539153, | |
| "learning_rate": 3.4136674032453787e-06, | |
| "loss": 0.4057, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.6467661691542289, | |
| "grad_norm": 1.9800816023792092, | |
| "learning_rate": 3.34518888246703e-06, | |
| "loss": 0.4081, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.6509121061359867, | |
| "grad_norm": 2.134679203262353, | |
| "learning_rate": 3.2770571952554674e-06, | |
| "loss": 0.4055, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.6550580431177446, | |
| "grad_norm": 2.0810403529065824, | |
| "learning_rate": 3.2092866214011984e-06, | |
| "loss": 0.3953, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.6592039800995025, | |
| "grad_norm": 1.9681607355510164, | |
| "learning_rate": 3.141891365008609e-06, | |
| "loss": 0.4043, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.6633499170812603, | |
| "grad_norm": 2.0452177198825683, | |
| "learning_rate": 3.0748855515189104e-06, | |
| "loss": 0.4, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.6674958540630183, | |
| "grad_norm": 2.0817931631950914, | |
| "learning_rate": 3.00828322474958e-06, | |
| "loss": 0.3938, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.6716417910447762, | |
| "grad_norm": 2.0579281745324876, | |
| "learning_rate": 2.942098343950891e-06, | |
| "loss": 0.3848, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.675787728026534, | |
| "grad_norm": 2.0378046645796446, | |
| "learning_rate": 2.8763447808801914e-06, | |
| "loss": 0.3895, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.6799336650082919, | |
| "grad_norm": 2.089276342993108, | |
| "learning_rate": 2.8110363168944976e-06, | |
| "loss": 0.3947, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.6840796019900498, | |
| "grad_norm": 1.9512325130647332, | |
| "learning_rate": 2.7461866400620506e-06, | |
| "loss": 0.3981, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.6882255389718076, | |
| "grad_norm": 2.006701131264969, | |
| "learning_rate": 2.6818093422934254e-06, | |
| "loss": 0.3908, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.6923714759535655, | |
| "grad_norm": 2.134162418048178, | |
| "learning_rate": 2.617917916492776e-06, | |
| "loss": 0.3922, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.6965174129353234, | |
| "grad_norm": 2.1121757305097226, | |
| "learning_rate": 2.5545257537298497e-06, | |
| "loss": 0.3903, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.7006633499170812, | |
| "grad_norm": 2.06027998760199, | |
| "learning_rate": 2.491646140433346e-06, | |
| "loss": 0.3795, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.7048092868988391, | |
| "grad_norm": 2.1221824714111035, | |
| "learning_rate": 2.4292922556061877e-06, | |
| "loss": 0.3891, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.7089552238805971, | |
| "grad_norm": 2.0687201549733905, | |
| "learning_rate": 2.367477168063326e-06, | |
| "loss": 0.3804, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.7131011608623549, | |
| "grad_norm": 2.1096088311491323, | |
| "learning_rate": 2.3062138336926406e-06, | |
| "loss": 0.3861, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.7172470978441128, | |
| "grad_norm": 1.9369468070487719, | |
| "learning_rate": 2.245515092739488e-06, | |
| "loss": 0.38, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.7213930348258707, | |
| "grad_norm": 2.0347831655952198, | |
| "learning_rate": 2.185393667115513e-06, | |
| "loss": 0.3697, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.7255389718076285, | |
| "grad_norm": 2.0345030208735735, | |
| "learning_rate": 2.125862157732245e-06, | |
| "loss": 0.3686, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.7296849087893864, | |
| "grad_norm": 1.9521605057602363, | |
| "learning_rate": 2.066933041860059e-06, | |
| "loss": 0.3804, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.7338308457711443, | |
| "grad_norm": 2.0778756164223364, | |
| "learning_rate": 2.0086186705130545e-06, | |
| "loss": 0.3804, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.7379767827529021, | |
| "grad_norm": 2.1591475619282847, | |
| "learning_rate": 1.9509312658603954e-06, | |
| "loss": 0.3752, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.74212271973466, | |
| "grad_norm": 2.0870382174991993, | |
| "learning_rate": 1.8938829186646484e-06, | |
| "loss": 0.3705, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.746268656716418, | |
| "grad_norm": 1.9907262732293516, | |
| "learning_rate": 1.8374855857476687e-06, | |
| "loss": 0.3646, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.7504145936981758, | |
| "grad_norm": 2.112014948477697, | |
| "learning_rate": 1.7817510874845585e-06, | |
| "loss": 0.378, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.7545605306799337, | |
| "grad_norm": 2.00612301158268, | |
| "learning_rate": 1.7266911053262196e-06, | |
| "loss": 0.3722, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.7587064676616916, | |
| "grad_norm": 1.9805581686834959, | |
| "learning_rate": 1.6723171793510363e-06, | |
| "loss": 0.3571, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.7628524046434494, | |
| "grad_norm": 2.1705390220024876, | |
| "learning_rate": 1.6186407058461622e-06, | |
| "loss": 0.3717, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.7669983416252073, | |
| "grad_norm": 1.997191295027296, | |
| "learning_rate": 1.5656729349189742e-06, | |
| "loss": 0.3702, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.7711442786069652, | |
| "grad_norm": 1.9480055411783619, | |
| "learning_rate": 1.5134249681391416e-06, | |
| "loss": 0.3662, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.775290215588723, | |
| "grad_norm": 2.1202087648855708, | |
| "learning_rate": 1.4619077562118477e-06, | |
| "loss": 0.3624, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.7794361525704809, | |
| "grad_norm": 2.0287381557784436, | |
| "learning_rate": 1.411132096682606e-06, | |
| "loss": 0.3647, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.7835820895522388, | |
| "grad_norm": 2.1095441746045074, | |
| "learning_rate": 1.3611086316742057e-06, | |
| "loss": 0.3666, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.7877280265339967, | |
| "grad_norm": 2.0271810521548077, | |
| "learning_rate": 1.3118478456562073e-06, | |
| "loss": 0.3644, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.7918739635157546, | |
| "grad_norm": 2.017879350372662, | |
| "learning_rate": 1.2633600632474962e-06, | |
| "loss": 0.3555, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.7960199004975125, | |
| "grad_norm": 2.0158894267700416, | |
| "learning_rate": 1.2156554470523364e-06, | |
| "loss": 0.3579, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.8001658374792703, | |
| "grad_norm": 2.135171063836108, | |
| "learning_rate": 1.1687439955303764e-06, | |
| "loss": 0.3549, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.8043117744610282, | |
| "grad_norm": 2.051748632880903, | |
| "learning_rate": 1.1226355409010686e-06, | |
| "loss": 0.3567, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.8084577114427861, | |
| "grad_norm": 1.9895182337910944, | |
| "learning_rate": 1.0773397470829145e-06, | |
| "loss": 0.3506, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.8126036484245439, | |
| "grad_norm": 2.0734006194975025, | |
| "learning_rate": 1.032866107667999e-06, | |
| "loss": 0.3527, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.8167495854063018, | |
| "grad_norm": 1.9987662217078168, | |
| "learning_rate": 9.892239439322243e-07, | |
| "loss": 0.3581, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.8208955223880597, | |
| "grad_norm": 2.0823516989471083, | |
| "learning_rate": 9.464224028816427e-07, | |
| "loss": 0.3665, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.8250414593698175, | |
| "grad_norm": 2.051484969572584, | |
| "learning_rate": 9.044704553353323e-07, | |
| "loss": 0.3537, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.8291873963515755, | |
| "grad_norm": 2.019033384913334, | |
| "learning_rate": 8.633768940451981e-07, | |
| "loss": 0.3484, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.8333333333333334, | |
| "grad_norm": 2.071090455759551, | |
| "learning_rate": 8.231503318530814e-07, | |
| "loss": 0.3515, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.8374792703150912, | |
| "grad_norm": 1.987573001153748, | |
| "learning_rate": 7.837991998855899e-07, | |
| "loss": 0.3458, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.8416252072968491, | |
| "grad_norm": 2.1342753070628544, | |
| "learning_rate": 7.453317457870096e-07, | |
| "loss": 0.3481, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.845771144278607, | |
| "grad_norm": 1.9443796196582224, | |
| "learning_rate": 7.077560319906696e-07, | |
| "loss": 0.3468, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.8499170812603648, | |
| "grad_norm": 2.1533371687865737, | |
| "learning_rate": 6.710799340291341e-07, | |
| "loss": 0.3516, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.8540630182421227, | |
| "grad_norm": 2.046122945823328, | |
| "learning_rate": 6.353111388835564e-07, | |
| "loss": 0.348, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.8582089552238806, | |
| "grad_norm": 2.0087071630631192, | |
| "learning_rate": 6.00457143372557e-07, | |
| "loss": 0.3464, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.8623548922056384, | |
| "grad_norm": 2.031472375135797, | |
| "learning_rate": 5.665252525809583e-07, | |
| "loss": 0.351, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.8665008291873963, | |
| "grad_norm": 2.085600740397554, | |
| "learning_rate": 5.335225783287051e-07, | |
| "loss": 0.3522, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.8706467661691543, | |
| "grad_norm": 2.0629885384115543, | |
| "learning_rate": 5.014560376802913e-07, | |
| "loss": 0.3507, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.8747927031509121, | |
| "grad_norm": 2.0514710921617265, | |
| "learning_rate": 4.703323514950042e-07, | |
| "loss": 0.3414, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.87893864013267, | |
| "grad_norm": 2.0910097285997415, | |
| "learning_rate": 4.401580430182928e-07, | |
| "loss": 0.3432, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.8830845771144279, | |
| "grad_norm": 2.007785736762621, | |
| "learning_rate": 4.1093943651455305e-07, | |
| "loss": 0.3479, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.8872305140961857, | |
| "grad_norm": 2.0849923696855326, | |
| "learning_rate": 3.826826559416219e-07, | |
| "loss": 0.3472, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.8913764510779436, | |
| "grad_norm": 2.0132819974862843, | |
| "learning_rate": 3.5539362366724784e-07, | |
| "loss": 0.3399, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.8955223880597015, | |
| "grad_norm": 2.074275526756306, | |
| "learning_rate": 3.290780592278148e-07, | |
| "loss": 0.3416, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.8996683250414593, | |
| "grad_norm": 2.05724879908193, | |
| "learning_rate": 3.0374147812958387e-07, | |
| "loss": 0.3505, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.9038142620232172, | |
| "grad_norm": 2.157618991705193, | |
| "learning_rate": 2.7938919069268654e-07, | |
| "loss": 0.3505, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.9079601990049752, | |
| "grad_norm": 2.1499572962789775, | |
| "learning_rate": 2.5602630093813253e-07, | |
| "loss": 0.3546, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.912106135986733, | |
| "grad_norm": 2.0332963522319485, | |
| "learning_rate": 2.3365770551805223e-07, | |
| "loss": 0.3411, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.9162520729684909, | |
| "grad_norm": 1.9439734508841187, | |
| "learning_rate": 2.1228809268940164e-07, | |
| "loss": 0.3467, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.9203980099502488, | |
| "grad_norm": 2.01922575745273, | |
| "learning_rate": 1.919219413313478e-07, | |
| "loss": 0.3456, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.9245439469320066, | |
| "grad_norm": 2.0723878543911143, | |
| "learning_rate": 1.725635200065323e-07, | |
| "loss": 0.3351, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.9286898839137645, | |
| "grad_norm": 2.02921530849707, | |
| "learning_rate": 1.5421688606642392e-07, | |
| "loss": 0.3399, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.9328358208955224, | |
| "grad_norm": 2.0191708205978904, | |
| "learning_rate": 1.3688588480092913e-07, | |
| "loss": 0.3401, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.9369817578772802, | |
| "grad_norm": 2.1261706152763393, | |
| "learning_rate": 1.205741486324552e-07, | |
| "loss": 0.3417, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.9411276948590381, | |
| "grad_norm": 2.1058790836653287, | |
| "learning_rate": 1.0528509635458873e-07, | |
| "loss": 0.3404, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.945273631840796, | |
| "grad_norm": 2.074684841336411, | |
| "learning_rate": 9.102193241554757e-08, | |
| "loss": 0.3375, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.9494195688225538, | |
| "grad_norm": 1.991625852605994, | |
| "learning_rate": 7.778764624655433e-08, | |
| "loss": 0.3322, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.9535655058043118, | |
| "grad_norm": 2.104924653809554, | |
| "learning_rate": 6.558501163527964e-08, | |
| "loss": 0.336, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.9577114427860697, | |
| "grad_norm": 2.0384736104779226, | |
| "learning_rate": 5.44165861444812e-08, | |
| "loss": 0.3448, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.9618573797678275, | |
| "grad_norm": 2.0936118845033405, | |
| "learning_rate": 4.428471057596362e-08, | |
| "loss": 0.3381, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.9660033167495854, | |
| "grad_norm": 2.0037182601593466, | |
| "learning_rate": 3.519150847996422e-08, | |
| "loss": 0.3457, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.9701492537313433, | |
| "grad_norm": 2.09482647445627, | |
| "learning_rate": 2.713888571007739e-08, | |
| "loss": 0.3398, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.9742951907131011, | |
| "grad_norm": 2.085192366585127, | |
| "learning_rate": 2.012853002380466e-08, | |
| "loss": 0.3442, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.978441127694859, | |
| "grad_norm": 1.9611348789077625, | |
| "learning_rate": 1.4161910728816009e-08, | |
| "loss": 0.3373, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.9825870646766169, | |
| "grad_norm": 2.109979592147509, | |
| "learning_rate": 9.240278374995637e-09, | |
| "loss": 0.3365, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.9867330016583747, | |
| "grad_norm": 2.112199186520632, | |
| "learning_rate": 5.364664492337746e-09, | |
| "loss": 0.3407, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.9908789386401327, | |
| "grad_norm": 2.1961533969052236, | |
| "learning_rate": 2.5358813747500266e-09, | |
| "loss": 0.3407, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.9950248756218906, | |
| "grad_norm": 2.050107312724138, | |
| "learning_rate": 7.545219097987444e-10, | |
| "loss": 0.3484, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.9991708126036484, | |
| "grad_norm": 1.9981992682594107, | |
| "learning_rate": 2.0959454449243076e-11, | |
| "loss": 0.345, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 0.3594575822353363, | |
| "eval_runtime": 0.9533, | |
| "eval_samples_per_second": 3.147, | |
| "eval_steps_per_second": 1.049, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 1206, | |
| "total_flos": 252459519836160.0, | |
| "train_loss": 0.5262548677858033, | |
| "train_runtime": 29272.1392, | |
| "train_samples_per_second": 1.318, | |
| "train_steps_per_second": 0.041 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1206, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 252459519836160.0, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |