| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 5.0, | |
| "eval_steps": 500, | |
| "global_step": 3160, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.007911392405063292, | |
| "grad_norm": 2.3738328422587087, | |
| "learning_rate": 1.5822784810126583e-06, | |
| "loss": 1.1261, | |
| "num_tokens": 5242880.0, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.015822784810126583, | |
| "grad_norm": 1.7722440301570659, | |
| "learning_rate": 3.1645569620253167e-06, | |
| "loss": 1.1058, | |
| "num_tokens": 10470029.0, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.023734177215189875, | |
| "grad_norm": 1.083276838182905, | |
| "learning_rate": 4.746835443037975e-06, | |
| "loss": 1.0806, | |
| "num_tokens": 15712909.0, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.03164556962025317, | |
| "grad_norm": 0.8611689905530553, | |
| "learning_rate": 6.329113924050633e-06, | |
| "loss": 1.02, | |
| "num_tokens": 20951872.0, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.03955696202531646, | |
| "grad_norm": 0.7250421722694049, | |
| "learning_rate": 7.911392405063292e-06, | |
| "loss": 0.9766, | |
| "num_tokens": 26185341.0, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.04746835443037975, | |
| "grad_norm": 0.6382913905764741, | |
| "learning_rate": 9.49367088607595e-06, | |
| "loss": 0.9456, | |
| "num_tokens": 31428221.0, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.055379746835443035, | |
| "grad_norm": 0.5436432996737252, | |
| "learning_rate": 1.1075949367088608e-05, | |
| "loss": 0.9187, | |
| "num_tokens": 36669330.0, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.06329113924050633, | |
| "grad_norm": 0.45175884987072906, | |
| "learning_rate": 1.2658227848101267e-05, | |
| "loss": 0.8956, | |
| "num_tokens": 41910897.0, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.07120253164556962, | |
| "grad_norm": 0.3906460936286765, | |
| "learning_rate": 1.4240506329113925e-05, | |
| "loss": 0.8787, | |
| "num_tokens": 47153777.0, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.07911392405063292, | |
| "grad_norm": 0.38060702422443193, | |
| "learning_rate": 1.5822784810126583e-05, | |
| "loss": 0.8674, | |
| "num_tokens": 52391193.0, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.08702531645569621, | |
| "grad_norm": 0.3976893756810291, | |
| "learning_rate": 1.7405063291139243e-05, | |
| "loss": 0.8589, | |
| "num_tokens": 57634073.0, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.0949367088607595, | |
| "grad_norm": 0.39610453911616206, | |
| "learning_rate": 1.89873417721519e-05, | |
| "loss": 0.8577, | |
| "num_tokens": 62876953.0, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.10284810126582279, | |
| "grad_norm": 0.3232899679188804, | |
| "learning_rate": 2.056962025316456e-05, | |
| "loss": 0.8456, | |
| "num_tokens": 68119833.0, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.11075949367088607, | |
| "grad_norm": 0.3785345815272384, | |
| "learning_rate": 2.2151898734177217e-05, | |
| "loss": 0.8354, | |
| "num_tokens": 73362713.0, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.11867088607594936, | |
| "grad_norm": 0.42958423811773794, | |
| "learning_rate": 2.3734177215189873e-05, | |
| "loss": 0.8277, | |
| "num_tokens": 78605593.0, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.12658227848101267, | |
| "grad_norm": 0.3759983838903421, | |
| "learning_rate": 2.5316455696202533e-05, | |
| "loss": 0.8257, | |
| "num_tokens": 83848473.0, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.13449367088607594, | |
| "grad_norm": 0.4694604862178893, | |
| "learning_rate": 2.689873417721519e-05, | |
| "loss": 0.8212, | |
| "num_tokens": 89091353.0, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.14240506329113925, | |
| "grad_norm": 0.41926661815869387, | |
| "learning_rate": 2.848101265822785e-05, | |
| "loss": 0.8132, | |
| "num_tokens": 94334233.0, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.15031645569620253, | |
| "grad_norm": 0.8644371279587081, | |
| "learning_rate": 3.0063291139240506e-05, | |
| "loss": 0.8356, | |
| "num_tokens": 99577113.0, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.15822784810126583, | |
| "grad_norm": 0.7239620838593109, | |
| "learning_rate": 3.1645569620253167e-05, | |
| "loss": 0.8115, | |
| "num_tokens": 104819993.0, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.1661392405063291, | |
| "grad_norm": 0.6759442627573765, | |
| "learning_rate": 3.322784810126582e-05, | |
| "loss": 0.8131, | |
| "num_tokens": 110062873.0, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.17405063291139242, | |
| "grad_norm": 0.5445796635610345, | |
| "learning_rate": 3.4810126582278487e-05, | |
| "loss": 0.8051, | |
| "num_tokens": 115305753.0, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.1819620253164557, | |
| "grad_norm": 0.6030674874140043, | |
| "learning_rate": 3.639240506329114e-05, | |
| "loss": 0.8069, | |
| "num_tokens": 120539230.0, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.189873417721519, | |
| "grad_norm": 0.5775646724321816, | |
| "learning_rate": 3.79746835443038e-05, | |
| "loss": 0.8001, | |
| "num_tokens": 125782110.0, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.19778481012658228, | |
| "grad_norm": 1.1999946695600228, | |
| "learning_rate": 3.9556962025316456e-05, | |
| "loss": 0.7923, | |
| "num_tokens": 131014137.0, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.20569620253164558, | |
| "grad_norm": 0.9396643672179282, | |
| "learning_rate": 4.113924050632912e-05, | |
| "loss": 0.8032, | |
| "num_tokens": 136257017.0, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.21360759493670886, | |
| "grad_norm": 0.989562272615721, | |
| "learning_rate": 4.2721518987341776e-05, | |
| "loss": 0.7945, | |
| "num_tokens": 141499897.0, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.22151898734177214, | |
| "grad_norm": 0.7938020654922184, | |
| "learning_rate": 4.430379746835443e-05, | |
| "loss": 0.7911, | |
| "num_tokens": 146712990.0, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.22943037974683544, | |
| "grad_norm": 0.772547411213572, | |
| "learning_rate": 4.588607594936709e-05, | |
| "loss": 0.788, | |
| "num_tokens": 151927597.0, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.23734177215189872, | |
| "grad_norm": 0.5427524570272351, | |
| "learning_rate": 4.7468354430379746e-05, | |
| "loss": 0.7931, | |
| "num_tokens": 157170477.0, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.24525316455696203, | |
| "grad_norm": 0.520616997801594, | |
| "learning_rate": 4.905063291139241e-05, | |
| "loss": 0.7761, | |
| "num_tokens": 162397909.0, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.25316455696202533, | |
| "grad_norm": 1.321171420646125, | |
| "learning_rate": 4.9999950717727614e-05, | |
| "loss": 0.8103, | |
| "num_tokens": 167639712.0, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.2610759493670886, | |
| "grad_norm": 0.8179961728401967, | |
| "learning_rate": 4.999939629464255e-05, | |
| "loss": 0.7829, | |
| "num_tokens": 172882592.0, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.2689873417721519, | |
| "grad_norm": 1.07950233433435, | |
| "learning_rate": 4.999822586086208e-05, | |
| "loss": 0.7815, | |
| "num_tokens": 178125472.0, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.27689873417721517, | |
| "grad_norm": 0.764049579486957, | |
| "learning_rate": 4.9996439448431495e-05, | |
| "loss": 0.7936, | |
| "num_tokens": 183355239.0, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.2848101265822785, | |
| "grad_norm": 0.6643682550526433, | |
| "learning_rate": 4.9994037106260965e-05, | |
| "loss": 0.777, | |
| "num_tokens": 188598119.0, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.2927215189873418, | |
| "grad_norm": 0.4988533797702287, | |
| "learning_rate": 4.999101890012418e-05, | |
| "loss": 0.7755, | |
| "num_tokens": 193840999.0, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.30063291139240506, | |
| "grad_norm": 0.49381791616490034, | |
| "learning_rate": 4.998738491265655e-05, | |
| "loss": 0.7646, | |
| "num_tokens": 199082063.0, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.30854430379746833, | |
| "grad_norm": 0.5570710586204755, | |
| "learning_rate": 4.998313524335302e-05, | |
| "loss": 0.7698, | |
| "num_tokens": 204324943.0, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.31645569620253167, | |
| "grad_norm": 0.4911267149039519, | |
| "learning_rate": 4.9978270008565184e-05, | |
| "loss": 0.7641, | |
| "num_tokens": 209561370.0, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.32436708860759494, | |
| "grad_norm": 0.6409945115911554, | |
| "learning_rate": 4.997278934149828e-05, | |
| "loss": 0.7677, | |
| "num_tokens": 214804250.0, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.3322784810126582, | |
| "grad_norm": 0.5401344460770882, | |
| "learning_rate": 4.996669339220741e-05, | |
| "loss": 0.7657, | |
| "num_tokens": 220047130.0, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.3401898734177215, | |
| "grad_norm": 0.5361201681791574, | |
| "learning_rate": 4.995998232759349e-05, | |
| "loss": 0.7663, | |
| "num_tokens": 225290010.0, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.34810126582278483, | |
| "grad_norm": 0.5803989669904568, | |
| "learning_rate": 4.995265633139869e-05, | |
| "loss": 0.7636, | |
| "num_tokens": 230532890.0, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.3560126582278481, | |
| "grad_norm": 0.726211587240982, | |
| "learning_rate": 4.994471560420137e-05, | |
| "loss": 0.7631, | |
| "num_tokens": 235775770.0, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.3639240506329114, | |
| "grad_norm": 0.698163213622165, | |
| "learning_rate": 4.9936160363410584e-05, | |
| "loss": 0.7545, | |
| "num_tokens": 241018650.0, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.37183544303797467, | |
| "grad_norm": 0.5153045338635585, | |
| "learning_rate": 4.992699084326018e-05, | |
| "loss": 0.7541, | |
| "num_tokens": 246261530.0, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.379746835443038, | |
| "grad_norm": 0.6959186613242923, | |
| "learning_rate": 4.9917207294802346e-05, | |
| "loss": 0.7635, | |
| "num_tokens": 251504410.0, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.3876582278481013, | |
| "grad_norm": 0.475983385570461, | |
| "learning_rate": 4.990680998590071e-05, | |
| "loss": 0.755, | |
| "num_tokens": 256747290.0, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.39556962025316456, | |
| "grad_norm": 0.6721509783638105, | |
| "learning_rate": 4.989579920122307e-05, | |
| "loss": 0.7532, | |
| "num_tokens": 261990170.0, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.40348101265822783, | |
| "grad_norm": 0.4813755853658302, | |
| "learning_rate": 4.9884175242233585e-05, | |
| "loss": 0.7552, | |
| "num_tokens": 267233050.0, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.41139240506329117, | |
| "grad_norm": 0.46711611820253635, | |
| "learning_rate": 4.987193842718448e-05, | |
| "loss": 0.7513, | |
| "num_tokens": 272475930.0, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.41930379746835444, | |
| "grad_norm": 0.5972032077244416, | |
| "learning_rate": 4.985908909110735e-05, | |
| "loss": 0.7443, | |
| "num_tokens": 277718810.0, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.4272151898734177, | |
| "grad_norm": 0.5059990764921701, | |
| "learning_rate": 4.9845627585804025e-05, | |
| "loss": 0.7458, | |
| "num_tokens": 282942349.0, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.435126582278481, | |
| "grad_norm": 0.5582209701112856, | |
| "learning_rate": 4.9831554279836884e-05, | |
| "loss": 0.7521, | |
| "num_tokens": 288185229.0, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.4430379746835443, | |
| "grad_norm": 0.4595435461264603, | |
| "learning_rate": 4.9816869558518796e-05, | |
| "loss": 0.7517, | |
| "num_tokens": 293428109.0, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.4509493670886076, | |
| "grad_norm": 0.4675296642906588, | |
| "learning_rate": 4.980157382390258e-05, | |
| "loss": 0.7427, | |
| "num_tokens": 298670989.0, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.4588607594936709, | |
| "grad_norm": 0.4659606979020265, | |
| "learning_rate": 4.978566749476995e-05, | |
| "loss": 0.7397, | |
| "num_tokens": 303913869.0, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.46677215189873417, | |
| "grad_norm": 0.5143550840646993, | |
| "learning_rate": 4.9769151006620115e-05, | |
| "loss": 0.7488, | |
| "num_tokens": 309151588.0, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.47468354430379744, | |
| "grad_norm": 0.5527231062409742, | |
| "learning_rate": 4.975202481165778e-05, | |
| "loss": 0.7528, | |
| "num_tokens": 314394468.0, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.4825949367088608, | |
| "grad_norm": 0.6582518727489046, | |
| "learning_rate": 4.973428937878085e-05, | |
| "loss": 0.7388, | |
| "num_tokens": 319622567.0, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.49050632911392406, | |
| "grad_norm": 0.5202900734758532, | |
| "learning_rate": 4.9715945193567506e-05, | |
| "loss": 0.7332, | |
| "num_tokens": 324865447.0, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.49841772151898733, | |
| "grad_norm": 0.38453517395197584, | |
| "learning_rate": 4.969699275826298e-05, | |
| "loss": 0.7364, | |
| "num_tokens": 330108327.0, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.5063291139240507, | |
| "grad_norm": 0.5632153181654556, | |
| "learning_rate": 4.967743259176575e-05, | |
| "loss": 0.7387, | |
| "num_tokens": 335351207.0, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.5142405063291139, | |
| "grad_norm": 0.44887504740519496, | |
| "learning_rate": 4.96572652296134e-05, | |
| "loss": 0.7354, | |
| "num_tokens": 340594087.0, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.5221518987341772, | |
| "grad_norm": 0.48735207873608405, | |
| "learning_rate": 4.963649122396788e-05, | |
| "loss": 0.7376, | |
| "num_tokens": 345825159.0, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.5300632911392406, | |
| "grad_norm": 0.4193108797718534, | |
| "learning_rate": 4.961511114360043e-05, | |
| "loss": 0.7361, | |
| "num_tokens": 351068039.0, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.5379746835443038, | |
| "grad_norm": 0.5662379674502074, | |
| "learning_rate": 4.959312557387601e-05, | |
| "loss": 0.739, | |
| "num_tokens": 356304704.0, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.5458860759493671, | |
| "grad_norm": 0.5775103716382664, | |
| "learning_rate": 4.957053511673728e-05, | |
| "loss": 0.739, | |
| "num_tokens": 361547584.0, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.5537974683544303, | |
| "grad_norm": 0.4784805413121098, | |
| "learning_rate": 4.954734039068806e-05, | |
| "loss": 0.7369, | |
| "num_tokens": 366775047.0, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.5617088607594937, | |
| "grad_norm": 0.3826615225742145, | |
| "learning_rate": 4.95235420307765e-05, | |
| "loss": 0.7371, | |
| "num_tokens": 372017927.0, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.569620253164557, | |
| "grad_norm": 0.5250012256720773, | |
| "learning_rate": 4.949914068857758e-05, | |
| "loss": 0.7401, | |
| "num_tokens": 377260807.0, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.5775316455696202, | |
| "grad_norm": 0.5294573268848846, | |
| "learning_rate": 4.947413703217537e-05, | |
| "loss": 0.7303, | |
| "num_tokens": 382503687.0, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.5854430379746836, | |
| "grad_norm": 0.4657426470359354, | |
| "learning_rate": 4.944853174614465e-05, | |
| "loss": 0.7276, | |
| "num_tokens": 387746567.0, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.5933544303797469, | |
| "grad_norm": 0.6110352375166948, | |
| "learning_rate": 4.9422325531532254e-05, | |
| "loss": 0.7332, | |
| "num_tokens": 392989447.0, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.6012658227848101, | |
| "grad_norm": 0.5173863425147587, | |
| "learning_rate": 4.9395519105837794e-05, | |
| "loss": 0.7345, | |
| "num_tokens": 398220435.0, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.6091772151898734, | |
| "grad_norm": 0.44630851057828674, | |
| "learning_rate": 4.936811320299409e-05, | |
| "loss": 0.7344, | |
| "num_tokens": 403454091.0, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.6170886075949367, | |
| "grad_norm": 0.5511830323483669, | |
| "learning_rate": 4.9340108573346994e-05, | |
| "loss": 0.7277, | |
| "num_tokens": 408695566.0, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 0.488590082197144, | |
| "learning_rate": 4.931150598363494e-05, | |
| "loss": 0.7262, | |
| "num_tokens": 413938446.0, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.6329113924050633, | |
| "grad_norm": 0.5980267797290539, | |
| "learning_rate": 4.9282306216967866e-05, | |
| "loss": 0.7367, | |
| "num_tokens": 419181326.0, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.6408227848101266, | |
| "grad_norm": 0.4042639446511123, | |
| "learning_rate": 4.925251007280584e-05, | |
| "loss": 0.7277, | |
| "num_tokens": 424410675.0, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.6487341772151899, | |
| "grad_norm": 0.5873987854936606, | |
| "learning_rate": 4.922211836693711e-05, | |
| "loss": 0.7216, | |
| "num_tokens": 429653555.0, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.6566455696202531, | |
| "grad_norm": 0.49135762238140734, | |
| "learning_rate": 4.9191131931455836e-05, | |
| "loss": 0.7249, | |
| "num_tokens": 434896435.0, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.6645569620253164, | |
| "grad_norm": 0.5977070057438132, | |
| "learning_rate": 4.915955161473925e-05, | |
| "loss": 0.7213, | |
| "num_tokens": 440139315.0, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.6724683544303798, | |
| "grad_norm": 0.5865545939218848, | |
| "learning_rate": 4.9127378281424454e-05, | |
| "loss": 0.7291, | |
| "num_tokens": 445382195.0, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.680379746835443, | |
| "grad_norm": 0.5141049741136753, | |
| "learning_rate": 4.909461281238475e-05, | |
| "loss": 0.7275, | |
| "num_tokens": 450625075.0, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.6882911392405063, | |
| "grad_norm": 0.586173282775543, | |
| "learning_rate": 4.9061256104705514e-05, | |
| "loss": 0.7147, | |
| "num_tokens": 455867955.0, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.6962025316455697, | |
| "grad_norm": 0.4884462198074093, | |
| "learning_rate": 4.902730907165962e-05, | |
| "loss": 0.7262, | |
| "num_tokens": 461110835.0, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.7041139240506329, | |
| "grad_norm": 0.5155782485627577, | |
| "learning_rate": 4.899277264268249e-05, | |
| "loss": 0.726, | |
| "num_tokens": 466329951.0, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.7120253164556962, | |
| "grad_norm": 0.5894348115226837, | |
| "learning_rate": 4.8957647763346575e-05, | |
| "loss": 0.7276, | |
| "num_tokens": 471572831.0, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.7199367088607594, | |
| "grad_norm": 0.6178813430658108, | |
| "learning_rate": 4.892193539533553e-05, | |
| "loss": 0.7234, | |
| "num_tokens": 476815711.0, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.7278481012658228, | |
| "grad_norm": 0.7787566109612348, | |
| "learning_rate": 4.888563651641784e-05, | |
| "loss": 0.7297, | |
| "num_tokens": 482044487.0, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.7357594936708861, | |
| "grad_norm": 0.5460153094045689, | |
| "learning_rate": 4.884875212042005e-05, | |
| "loss": 0.7257, | |
| "num_tokens": 487287367.0, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.7436708860759493, | |
| "grad_norm": 0.5019931926593302, | |
| "learning_rate": 4.88112832171996e-05, | |
| "loss": 0.7219, | |
| "num_tokens": 492530247.0, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.7515822784810127, | |
| "grad_norm": 0.46922376523806203, | |
| "learning_rate": 4.877323083261713e-05, | |
| "loss": 0.7207, | |
| "num_tokens": 497773127.0, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.759493670886076, | |
| "grad_norm": 0.4491131631584376, | |
| "learning_rate": 4.873459600850841e-05, | |
| "loss": 0.718, | |
| "num_tokens": 503013415.0, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.7674050632911392, | |
| "grad_norm": 0.4576902411009356, | |
| "learning_rate": 4.869537980265581e-05, | |
| "loss": 0.7119, | |
| "num_tokens": 508255611.0, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.7753164556962026, | |
| "grad_norm": 0.6179813478813936, | |
| "learning_rate": 4.865558328875937e-05, | |
| "loss": 0.7086, | |
| "num_tokens": 513498491.0, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.7832278481012658, | |
| "grad_norm": 0.5471403707624125, | |
| "learning_rate": 4.8615207556407324e-05, | |
| "loss": 0.7242, | |
| "num_tokens": 518723580.0, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.7911392405063291, | |
| "grad_norm": 0.4589250795263245, | |
| "learning_rate": 4.857425371104638e-05, | |
| "loss": 0.7159, | |
| "num_tokens": 523966460.0, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.7990506329113924, | |
| "grad_norm": 0.4660626316391112, | |
| "learning_rate": 4.853272287395135e-05, | |
| "loss": 0.7251, | |
| "num_tokens": 529209340.0, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.8069620253164557, | |
| "grad_norm": 0.5382100486924232, | |
| "learning_rate": 4.8490616182194534e-05, | |
| "loss": 0.7187, | |
| "num_tokens": 534429844.0, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.814873417721519, | |
| "grad_norm": 0.5238715230138492, | |
| "learning_rate": 4.8447934788614515e-05, | |
| "loss": 0.7187, | |
| "num_tokens": 539672724.0, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.8227848101265823, | |
| "grad_norm": 0.5194663159593585, | |
| "learning_rate": 4.840467986178464e-05, | |
| "loss": 0.7168, | |
| "num_tokens": 544915604.0, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.8306962025316456, | |
| "grad_norm": 0.4135374652648048, | |
| "learning_rate": 4.8360852585981034e-05, | |
| "loss": 0.7236, | |
| "num_tokens": 550158484.0, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.8386075949367089, | |
| "grad_norm": 0.47687000377153205, | |
| "learning_rate": 4.831645416115014e-05, | |
| "loss": 0.7091, | |
| "num_tokens": 555401364.0, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.8465189873417721, | |
| "grad_norm": 0.4659031703902228, | |
| "learning_rate": 4.827148580287588e-05, | |
| "loss": 0.7042, | |
| "num_tokens": 560644244.0, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.8544303797468354, | |
| "grad_norm": 0.432869438663762, | |
| "learning_rate": 4.82259487423464e-05, | |
| "loss": 0.7119, | |
| "num_tokens": 565887124.0, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.8623417721518988, | |
| "grad_norm": 0.3989063394441302, | |
| "learning_rate": 4.81798442263203e-05, | |
| "loss": 0.7164, | |
| "num_tokens": 571114334.0, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.870253164556962, | |
| "grad_norm": 0.5839119244408789, | |
| "learning_rate": 4.8133173517092575e-05, | |
| "loss": 0.7147, | |
| "num_tokens": 576351352.0, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.8781645569620253, | |
| "grad_norm": 0.4989762196407589, | |
| "learning_rate": 4.808593789246e-05, | |
| "loss": 0.7116, | |
| "num_tokens": 581594232.0, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.8860759493670886, | |
| "grad_norm": 0.6013182449359468, | |
| "learning_rate": 4.803813864568616e-05, | |
| "loss": 0.7101, | |
| "num_tokens": 586837112.0, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.8939873417721519, | |
| "grad_norm": 0.47611561215232195, | |
| "learning_rate": 4.7989777085466054e-05, | |
| "loss": 0.7089, | |
| "num_tokens": 592079992.0, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.9018987341772152, | |
| "grad_norm": 0.5649210203680972, | |
| "learning_rate": 4.794085453589022e-05, | |
| "loss": 0.7074, | |
| "num_tokens": 597322872.0, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.9098101265822784, | |
| "grad_norm": 0.4954711968927705, | |
| "learning_rate": 4.789137233640858e-05, | |
| "loss": 0.7057, | |
| "num_tokens": 602545993.0, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.9177215189873418, | |
| "grad_norm": 0.45694982466683615, | |
| "learning_rate": 4.7841331841793646e-05, | |
| "loss": 0.7099, | |
| "num_tokens": 607788873.0, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.9256329113924051, | |
| "grad_norm": 0.4331078260141868, | |
| "learning_rate": 4.7790734422103503e-05, | |
| "loss": 0.7074, | |
| "num_tokens": 613031753.0, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.9335443037974683, | |
| "grad_norm": 0.6029099335069901, | |
| "learning_rate": 4.77395814626443e-05, | |
| "loss": 0.7132, | |
| "num_tokens": 618274069.0, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.9414556962025317, | |
| "grad_norm": 0.3964222914078214, | |
| "learning_rate": 4.7687874363932277e-05, | |
| "loss": 0.7107, | |
| "num_tokens": 623516949.0, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.9493670886075949, | |
| "grad_norm": 0.40557860921547184, | |
| "learning_rate": 4.763561454165546e-05, | |
| "loss": 0.7114, | |
| "num_tokens": 628759829.0, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.9572784810126582, | |
| "grad_norm": 0.3911037173148, | |
| "learning_rate": 4.758280342663488e-05, | |
| "loss": 0.7011, | |
| "num_tokens": 634002709.0, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.9651898734177216, | |
| "grad_norm": 0.5205226454815663, | |
| "learning_rate": 4.7529442464785404e-05, | |
| "loss": 0.7035, | |
| "num_tokens": 639245589.0, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.9731012658227848, | |
| "grad_norm": 0.6081952511481521, | |
| "learning_rate": 4.747553311707616e-05, | |
| "loss": 0.7109, | |
| "num_tokens": 644488469.0, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.9810126582278481, | |
| "grad_norm": 0.5230701333389108, | |
| "learning_rate": 4.742107685949052e-05, | |
| "loss": 0.7054, | |
| "num_tokens": 649731349.0, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.9889240506329114, | |
| "grad_norm": 0.45553268701488436, | |
| "learning_rate": 4.736607518298568e-05, | |
| "loss": 0.7084, | |
| "num_tokens": 654971226.0, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.9968354430379747, | |
| "grad_norm": 0.4252255621189171, | |
| "learning_rate": 4.731052959345188e-05, | |
| "loss": 0.698, | |
| "num_tokens": 660214106.0, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.004746835443038, | |
| "grad_norm": 0.5213909939853125, | |
| "learning_rate": 4.725444161167113e-05, | |
| "loss": 0.6984, | |
| "num_tokens": 665456986.0, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 1.0126582278481013, | |
| "grad_norm": 0.6092766382306241, | |
| "learning_rate": 4.71978127732756e-05, | |
| "loss": 0.6834, | |
| "num_tokens": 670699866.0, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.0205696202531644, | |
| "grad_norm": 0.6016879209190829, | |
| "learning_rate": 4.714064462870556e-05, | |
| "loss": 0.6799, | |
| "num_tokens": 675941341.0, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 1.0284810126582278, | |
| "grad_norm": 0.46770946063736324, | |
| "learning_rate": 4.708293874316693e-05, | |
| "loss": 0.6718, | |
| "num_tokens": 681184221.0, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.0363924050632911, | |
| "grad_norm": 0.5193625042396812, | |
| "learning_rate": 4.702469669658845e-05, | |
| "loss": 0.6824, | |
| "num_tokens": 686427101.0, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.0443037974683544, | |
| "grad_norm": 0.4310707796867463, | |
| "learning_rate": 4.6965920083578406e-05, | |
| "loss": 0.6814, | |
| "num_tokens": 691643199.0, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.0522151898734178, | |
| "grad_norm": 0.46509456309972086, | |
| "learning_rate": 4.690661051338096e-05, | |
| "loss": 0.6783, | |
| "num_tokens": 696885395.0, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 1.0601265822784811, | |
| "grad_norm": 0.4290977849490662, | |
| "learning_rate": 4.6846769609832106e-05, | |
| "loss": 0.6745, | |
| "num_tokens": 702099431.0, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.0680379746835442, | |
| "grad_norm": 0.44562761654453537, | |
| "learning_rate": 4.6786399011315215e-05, | |
| "loss": 0.6708, | |
| "num_tokens": 707335241.0, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 1.0759493670886076, | |
| "grad_norm": 0.4027263571632704, | |
| "learning_rate": 4.672550037071616e-05, | |
| "loss": 0.6742, | |
| "num_tokens": 712578121.0, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.0838607594936709, | |
| "grad_norm": 0.4509630202842952, | |
| "learning_rate": 4.6664075355378064e-05, | |
| "loss": 0.6787, | |
| "num_tokens": 717813308.0, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 1.0917721518987342, | |
| "grad_norm": 0.579102128781878, | |
| "learning_rate": 4.660212564705569e-05, | |
| "loss": 0.6827, | |
| "num_tokens": 723056188.0, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.0996835443037976, | |
| "grad_norm": 0.4750686074161037, | |
| "learning_rate": 4.653965294186933e-05, | |
| "loss": 0.6846, | |
| "num_tokens": 728299068.0, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 1.1075949367088607, | |
| "grad_norm": 0.3720445468877555, | |
| "learning_rate": 4.647665895025842e-05, | |
| "loss": 0.68, | |
| "num_tokens": 733541948.0, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.115506329113924, | |
| "grad_norm": 0.41896058368158273, | |
| "learning_rate": 4.6413145396934677e-05, | |
| "loss": 0.6727, | |
| "num_tokens": 738784828.0, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 1.1234177215189873, | |
| "grad_norm": 0.41709918206048263, | |
| "learning_rate": 4.634911402083491e-05, | |
| "loss": 0.683, | |
| "num_tokens": 744027708.0, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.1313291139240507, | |
| "grad_norm": 0.44679889502370607, | |
| "learning_rate": 4.628456657507336e-05, | |
| "loss": 0.6745, | |
| "num_tokens": 749270588.0, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 1.139240506329114, | |
| "grad_norm": 0.4049444543836899, | |
| "learning_rate": 4.6219504826893774e-05, | |
| "loss": 0.6754, | |
| "num_tokens": 754513468.0, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.1471518987341773, | |
| "grad_norm": 0.46788618407855487, | |
| "learning_rate": 4.615393055762095e-05, | |
| "loss": 0.6705, | |
| "num_tokens": 759755275.0, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 1.1550632911392404, | |
| "grad_norm": 0.45889187811298193, | |
| "learning_rate": 4.6087845562612e-05, | |
| "loss": 0.6775, | |
| "num_tokens": 764993238.0, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.1629746835443038, | |
| "grad_norm": 0.5053059074244526, | |
| "learning_rate": 4.602125165120721e-05, | |
| "loss": 0.6739, | |
| "num_tokens": 770236118.0, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 1.1708860759493671, | |
| "grad_norm": 0.5163271050928327, | |
| "learning_rate": 4.595415064668044e-05, | |
| "loss": 0.6768, | |
| "num_tokens": 775478998.0, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.1787974683544304, | |
| "grad_norm": 0.4282870421961532, | |
| "learning_rate": 4.5886544386189286e-05, | |
| "loss": 0.6681, | |
| "num_tokens": 780721878.0, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 1.1867088607594938, | |
| "grad_norm": 0.46026364834735695, | |
| "learning_rate": 4.581843472072472e-05, | |
| "loss": 0.677, | |
| "num_tokens": 785964758.0, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.1946202531645569, | |
| "grad_norm": 0.3647491363050659, | |
| "learning_rate": 4.574982351506044e-05, | |
| "loss": 0.673, | |
| "num_tokens": 791207638.0, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 1.2025316455696202, | |
| "grad_norm": 0.40334584668377854, | |
| "learning_rate": 4.568071264770179e-05, | |
| "loss": 0.6733, | |
| "num_tokens": 796450518.0, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.2104430379746836, | |
| "grad_norm": 0.6172425933341248, | |
| "learning_rate": 4.5611104010834384e-05, | |
| "loss": 0.6714, | |
| "num_tokens": 801691627.0, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 1.2183544303797469, | |
| "grad_norm": 0.5660318449559009, | |
| "learning_rate": 4.554099951027223e-05, | |
| "loss": 0.6695, | |
| "num_tokens": 806934507.0, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.2262658227848102, | |
| "grad_norm": 0.5013160792670336, | |
| "learning_rate": 4.5470401065405574e-05, | |
| "loss": 0.6733, | |
| "num_tokens": 812177387.0, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 1.2341772151898733, | |
| "grad_norm": 0.48633536466547755, | |
| "learning_rate": 4.539931060914841e-05, | |
| "loss": 0.6775, | |
| "num_tokens": 817420267.0, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.2420886075949367, | |
| "grad_norm": 0.5345960534136472, | |
| "learning_rate": 4.532773008788542e-05, | |
| "loss": 0.6742, | |
| "num_tokens": 822663147.0, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 0.46751637317248607, | |
| "learning_rate": 4.5255661461418854e-05, | |
| "loss": 0.6683, | |
| "num_tokens": 827902185.0, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.2579113924050633, | |
| "grad_norm": 0.40210236140036404, | |
| "learning_rate": 4.5183106702914744e-05, | |
| "loss": 0.6684, | |
| "num_tokens": 833145065.0, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 1.2658227848101267, | |
| "grad_norm": 0.40800636198331647, | |
| "learning_rate": 4.511006779884894e-05, | |
| "loss": 0.6717, | |
| "num_tokens": 838387945.0, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.2737341772151898, | |
| "grad_norm": 0.39143626003055415, | |
| "learning_rate": 4.503654674895268e-05, | |
| "loss": 0.6729, | |
| "num_tokens": 843630825.0, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 1.2816455696202531, | |
| "grad_norm": 0.4114368812178738, | |
| "learning_rate": 4.49625455661579e-05, | |
| "loss": 0.6662, | |
| "num_tokens": 848873705.0, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.2895569620253164, | |
| "grad_norm": 0.3850979319103424, | |
| "learning_rate": 4.4888066276542076e-05, | |
| "loss": 0.6676, | |
| "num_tokens": 854116585.0, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 1.2974683544303798, | |
| "grad_norm": 0.3942547463552602, | |
| "learning_rate": 4.481311091927278e-05, | |
| "loss": 0.6697, | |
| "num_tokens": 859340010.0, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.3053797468354431, | |
| "grad_norm": 0.3827683736050918, | |
| "learning_rate": 4.47376815465518e-05, | |
| "loss": 0.6648, | |
| "num_tokens": 864560605.0, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 1.3132911392405062, | |
| "grad_norm": 0.4613241812192372, | |
| "learning_rate": 4.466178022355902e-05, | |
| "loss": 0.6757, | |
| "num_tokens": 869803485.0, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.3212025316455696, | |
| "grad_norm": 0.3407656518079187, | |
| "learning_rate": 4.458540902839582e-05, | |
| "loss": 0.6643, | |
| "num_tokens": 875046365.0, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 1.3291139240506329, | |
| "grad_norm": 0.3433476064598642, | |
| "learning_rate": 4.450857005202823e-05, | |
| "loss": 0.6684, | |
| "num_tokens": 880272119.0, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.3370253164556962, | |
| "grad_norm": 0.4072649317968132, | |
| "learning_rate": 4.443126539822962e-05, | |
| "loss": 0.6758, | |
| "num_tokens": 885514999.0, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 1.3449367088607596, | |
| "grad_norm": 0.38388206068514286, | |
| "learning_rate": 4.435349718352319e-05, | |
| "loss": 0.6651, | |
| "num_tokens": 890751664.0, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.3528481012658227, | |
| "grad_norm": 0.4906281471488268, | |
| "learning_rate": 4.427526753712392e-05, | |
| "loss": 0.6758, | |
| "num_tokens": 895978998.0, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 1.360759493670886, | |
| "grad_norm": 0.5690161031589217, | |
| "learning_rate": 4.419657860088033e-05, | |
| "loss": 0.6661, | |
| "num_tokens": 901206430.0, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.3686708860759493, | |
| "grad_norm": 0.5239364843573313, | |
| "learning_rate": 4.411743252921587e-05, | |
| "loss": 0.6711, | |
| "num_tokens": 906449310.0, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 1.3765822784810127, | |
| "grad_norm": 0.4590626844046868, | |
| "learning_rate": 4.403783148906984e-05, | |
| "loss": 0.6742, | |
| "num_tokens": 911692190.0, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.384493670886076, | |
| "grad_norm": 0.6369825573287001, | |
| "learning_rate": 4.3957777659838156e-05, | |
| "loss": 0.6691, | |
| "num_tokens": 916923938.0, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 1.3924050632911391, | |
| "grad_norm": 0.5309325547971587, | |
| "learning_rate": 4.387727323331362e-05, | |
| "loss": 0.6671, | |
| "num_tokens": 922166818.0, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.4003164556962027, | |
| "grad_norm": 0.4182221238855379, | |
| "learning_rate": 4.379632041362594e-05, | |
| "loss": 0.6673, | |
| "num_tokens": 927409698.0, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 1.4082278481012658, | |
| "grad_norm": 0.37453984190952516, | |
| "learning_rate": 4.371492141718138e-05, | |
| "loss": 0.6669, | |
| "num_tokens": 932652578.0, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.4161392405063291, | |
| "grad_norm": 0.4472314832066788, | |
| "learning_rate": 4.363307847260206e-05, | |
| "loss": 0.6609, | |
| "num_tokens": 937883810.0, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 1.4240506329113924, | |
| "grad_norm": 0.40759290291866934, | |
| "learning_rate": 4.3550793820664965e-05, | |
| "loss": 0.6657, | |
| "num_tokens": 943113159.0, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.4319620253164558, | |
| "grad_norm": 0.38119165043031206, | |
| "learning_rate": 4.3468069714240556e-05, | |
| "loss": 0.6674, | |
| "num_tokens": 948346815.0, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 1.439873417721519, | |
| "grad_norm": 0.3408121507908347, | |
| "learning_rate": 4.3384908418231144e-05, | |
| "loss": 0.6649, | |
| "num_tokens": 953589695.0, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.4477848101265822, | |
| "grad_norm": 0.3824255603451359, | |
| "learning_rate": 4.330131220950883e-05, | |
| "loss": 0.664, | |
| "num_tokens": 958831498.0, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 1.4556962025316456, | |
| "grad_norm": 0.42035687659685655, | |
| "learning_rate": 4.321728337685318e-05, | |
| "loss": 0.667, | |
| "num_tokens": 964074378.0, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.4636075949367089, | |
| "grad_norm": 0.45598858937412523, | |
| "learning_rate": 4.313282422088859e-05, | |
| "loss": 0.6685, | |
| "num_tokens": 969317258.0, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 1.4715189873417722, | |
| "grad_norm": 0.44573187825703264, | |
| "learning_rate": 4.304793705402124e-05, | |
| "loss": 0.6613, | |
| "num_tokens": 974546034.0, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.4794303797468356, | |
| "grad_norm": 0.3946235668573835, | |
| "learning_rate": 4.2962624200375835e-05, | |
| "loss": 0.6671, | |
| "num_tokens": 979788914.0, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 1.4873417721518987, | |
| "grad_norm": 0.3228711597439979, | |
| "learning_rate": 4.287688799573195e-05, | |
| "loss": 0.6686, | |
| "num_tokens": 985031794.0, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.495253164556962, | |
| "grad_norm": 0.3636228559548819, | |
| "learning_rate": 4.2790730787460096e-05, | |
| "loss": 0.6657, | |
| "num_tokens": 990255694.0, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 1.5031645569620253, | |
| "grad_norm": 0.36277524743915257, | |
| "learning_rate": 4.270415493445739e-05, | |
| "loss": 0.6618, | |
| "num_tokens": 995498574.0, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.5110759493670884, | |
| "grad_norm": 0.449377328776007, | |
| "learning_rate": 4.2617162807083084e-05, | |
| "loss": 0.664, | |
| "num_tokens": 1000741454.0, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 1.518987341772152, | |
| "grad_norm": 0.5226848678404952, | |
| "learning_rate": 4.252975678709354e-05, | |
| "loss": 0.664, | |
| "num_tokens": 1005981331.0, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.5268987341772151, | |
| "grad_norm": 0.426961497295756, | |
| "learning_rate": 4.244193926757713e-05, | |
| "loss": 0.6672, | |
| "num_tokens": 1011224211.0, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 1.5348101265822784, | |
| "grad_norm": 0.3336409027569924, | |
| "learning_rate": 4.235371265288864e-05, | |
| "loss": 0.6681, | |
| "num_tokens": 1016467091.0, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.5427215189873418, | |
| "grad_norm": 0.4036839066490488, | |
| "learning_rate": 4.2265079358583455e-05, | |
| "loss": 0.6643, | |
| "num_tokens": 1021695784.0, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 1.5506329113924051, | |
| "grad_norm": 0.7140710792273346, | |
| "learning_rate": 4.217604181135147e-05, | |
| "loss": 0.6697, | |
| "num_tokens": 1026938664.0, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.5585443037974684, | |
| "grad_norm": 0.5962932136354121, | |
| "learning_rate": 4.20866024489506e-05, | |
| "loss": 0.6636, | |
| "num_tokens": 1032181544.0, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 1.5664556962025316, | |
| "grad_norm": 0.5399117523136845, | |
| "learning_rate": 4.199676372014004e-05, | |
| "loss": 0.6642, | |
| "num_tokens": 1037424424.0, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.5743670886075949, | |
| "grad_norm": 0.5727471985317703, | |
| "learning_rate": 4.190652808461326e-05, | |
| "loss": 0.6673, | |
| "num_tokens": 1042667304.0, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 1.5822784810126582, | |
| "grad_norm": 0.4218917245287635, | |
| "learning_rate": 4.1815898012930603e-05, | |
| "loss": 0.6597, | |
| "num_tokens": 1047910184.0, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.5901898734177216, | |
| "grad_norm": 0.3919982695655947, | |
| "learning_rate": 4.172487598645171e-05, | |
| "loss": 0.6587, | |
| "num_tokens": 1053153064.0, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 1.5981012658227849, | |
| "grad_norm": 0.35433285883968835, | |
| "learning_rate": 4.163346449726752e-05, | |
| "loss": 0.6658, | |
| "num_tokens": 1058395944.0, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.606012658227848, | |
| "grad_norm": 0.40624024878352494, | |
| "learning_rate": 4.154166604813206e-05, | |
| "loss": 0.661, | |
| "num_tokens": 1063638824.0, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 1.6139240506329116, | |
| "grad_norm": 0.45360696653600874, | |
| "learning_rate": 4.144948315239397e-05, | |
| "loss": 0.6636, | |
| "num_tokens": 1068880391.0, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.6218354430379747, | |
| "grad_norm": 0.528510428398564, | |
| "learning_rate": 4.13569183339276e-05, | |
| "loss": 0.6561, | |
| "num_tokens": 1074123271.0, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 1.629746835443038, | |
| "grad_norm": 0.435117205930865, | |
| "learning_rate": 4.1263974127063994e-05, | |
| "loss": 0.6572, | |
| "num_tokens": 1079366151.0, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.6376582278481013, | |
| "grad_norm": 0.35010239698525997, | |
| "learning_rate": 4.117065307652145e-05, | |
| "loss": 0.6675, | |
| "num_tokens": 1084603567.0, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 1.6455696202531644, | |
| "grad_norm": 0.34340761605514514, | |
| "learning_rate": 4.107695773733586e-05, | |
| "loss": 0.6634, | |
| "num_tokens": 1089846447.0, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.653481012658228, | |
| "grad_norm": 0.3800590366717773, | |
| "learning_rate": 4.098289067479077e-05, | |
| "loss": 0.6529, | |
| "num_tokens": 1095089327.0, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 1.6613924050632911, | |
| "grad_norm": 0.5301647765231198, | |
| "learning_rate": 4.0888454464347156e-05, | |
| "loss": 0.6608, | |
| "num_tokens": 1100328290.0, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.6693037974683544, | |
| "grad_norm": 0.40371596065925885, | |
| "learning_rate": 4.079365169157283e-05, | |
| "loss": 0.6596, | |
| "num_tokens": 1105565945.0, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 1.6772151898734178, | |
| "grad_norm": 0.44878584304658214, | |
| "learning_rate": 4.069848495207176e-05, | |
| "loss": 0.6584, | |
| "num_tokens": 1110808825.0, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.685126582278481, | |
| "grad_norm": 0.42760590310826146, | |
| "learning_rate": 4.060295685141295e-05, | |
| "loss": 0.6601, | |
| "num_tokens": 1116051705.0, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 1.6930379746835444, | |
| "grad_norm": 0.42094155594543997, | |
| "learning_rate": 4.0507070005059086e-05, | |
| "loss": 0.6636, | |
| "num_tokens": 1121294585.0, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.7009493670886076, | |
| "grad_norm": 0.366450602326517, | |
| "learning_rate": 4.0410827038294966e-05, | |
| "loss": 0.6565, | |
| "num_tokens": 1126537465.0, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 1.7088607594936709, | |
| "grad_norm": 0.3382264432193073, | |
| "learning_rate": 4.031423058615559e-05, | |
| "loss": 0.6637, | |
| "num_tokens": 1131778529.0, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.7167721518987342, | |
| "grad_norm": 0.4135053453091331, | |
| "learning_rate": 4.0217283293354044e-05, | |
| "loss": 0.6598, | |
| "num_tokens": 1137021409.0, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 1.7246835443037973, | |
| "grad_norm": 0.32789223717337174, | |
| "learning_rate": 4.011998781420907e-05, | |
| "loss": 0.66, | |
| "num_tokens": 1142264289.0, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.7325949367088609, | |
| "grad_norm": 0.37159495208450527, | |
| "learning_rate": 4.002234681257239e-05, | |
| "loss": 0.6634, | |
| "num_tokens": 1147488495.0, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 1.740506329113924, | |
| "grad_norm": 0.41706053572545077, | |
| "learning_rate": 3.992436296175581e-05, | |
| "loss": 0.6554, | |
| "num_tokens": 1152731375.0, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.7484177215189873, | |
| "grad_norm": 0.3410031339365452, | |
| "learning_rate": 3.982603894445796e-05, | |
| "loss": 0.6574, | |
| "num_tokens": 1157974255.0, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 1.7563291139240507, | |
| "grad_norm": 0.40387170849869736, | |
| "learning_rate": 3.97273774526909e-05, | |
| "loss": 0.6518, | |
| "num_tokens": 1163196277.0, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.7642405063291138, | |
| "grad_norm": 0.41494353564073877, | |
| "learning_rate": 3.962838118770643e-05, | |
| "loss": 0.6521, | |
| "num_tokens": 1168438593.0, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 1.7721518987341773, | |
| "grad_norm": 0.33845287541155555, | |
| "learning_rate": 3.952905285992206e-05, | |
| "loss": 0.6636, | |
| "num_tokens": 1173674462.0, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.7800632911392404, | |
| "grad_norm": 0.3616932835037642, | |
| "learning_rate": 3.942939518884686e-05, | |
| "loss": 0.6566, | |
| "num_tokens": 1178914750.0, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 1.7879746835443038, | |
| "grad_norm": 0.38849401008078355, | |
| "learning_rate": 3.932941090300699e-05, | |
| "loss": 0.6546, | |
| "num_tokens": 1184150094.0, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.7958860759493671, | |
| "grad_norm": 0.42281182227916236, | |
| "learning_rate": 3.922910273987098e-05, | |
| "loss": 0.6548, | |
| "num_tokens": 1189392974.0, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 1.8037974683544302, | |
| "grad_norm": 0.4210942734836147, | |
| "learning_rate": 3.912847344577481e-05, | |
| "loss": 0.6555, | |
| "num_tokens": 1194635854.0, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.8117088607594938, | |
| "grad_norm": 0.5474931691520276, | |
| "learning_rate": 3.9027525775846666e-05, | |
| "loss": 0.6533, | |
| "num_tokens": 1199878734.0, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 1.8196202531645569, | |
| "grad_norm": 0.3732844050319297, | |
| "learning_rate": 3.892626249393159e-05, | |
| "loss": 0.6533, | |
| "num_tokens": 1205121614.0, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.8275316455696202, | |
| "grad_norm": 0.45982960737179485, | |
| "learning_rate": 3.882468637251573e-05, | |
| "loss": 0.658, | |
| "num_tokens": 1210364494.0, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 1.8354430379746836, | |
| "grad_norm": 0.3489396715842175, | |
| "learning_rate": 3.872280019265046e-05, | |
| "loss": 0.6583, | |
| "num_tokens": 1215607374.0, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.8433544303797469, | |
| "grad_norm": 0.35751783832753387, | |
| "learning_rate": 3.8620606743876264e-05, | |
| "loss": 0.6522, | |
| "num_tokens": 1220843801.0, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 1.8512658227848102, | |
| "grad_norm": 0.4188675688211132, | |
| "learning_rate": 3.851810882414632e-05, | |
| "loss": 0.6497, | |
| "num_tokens": 1226086681.0, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.8591772151898733, | |
| "grad_norm": 0.3704256158964576, | |
| "learning_rate": 3.841530923974991e-05, | |
| "loss": 0.6542, | |
| "num_tokens": 1231329561.0, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 1.8670886075949367, | |
| "grad_norm": 0.35739760458738973, | |
| "learning_rate": 3.83122108052356e-05, | |
| "loss": 0.6558, | |
| "num_tokens": 1236572441.0, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.875, | |
| "grad_norm": 0.4108797442075266, | |
| "learning_rate": 3.8208816343334156e-05, | |
| "loss": 0.6503, | |
| "num_tokens": 1241815321.0, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 1.8829113924050633, | |
| "grad_norm": 0.3989382930649431, | |
| "learning_rate": 3.810512868488129e-05, | |
| "loss": 0.6567, | |
| "num_tokens": 1247058201.0, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.8908227848101267, | |
| "grad_norm": 0.36123580918393156, | |
| "learning_rate": 3.800115066874014e-05, | |
| "loss": 0.6539, | |
| "num_tokens": 1252301081.0, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 1.8987341772151898, | |
| "grad_norm": 0.3372531720914223, | |
| "learning_rate": 3.789688514172353e-05, | |
| "loss": 0.6473, | |
| "num_tokens": 1257543961.0, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.9066455696202531, | |
| "grad_norm": 0.3512013159202198, | |
| "learning_rate": 3.779233495851604e-05, | |
| "loss": 0.6581, | |
| "num_tokens": 1262786841.0, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 1.9145569620253164, | |
| "grad_norm": 0.3542753269510345, | |
| "learning_rate": 3.768750298159587e-05, | |
| "loss": 0.6445, | |
| "num_tokens": 1268029721.0, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.9224683544303798, | |
| "grad_norm": 0.4031550450820841, | |
| "learning_rate": 3.75823920811564e-05, | |
| "loss": 0.6508, | |
| "num_tokens": 1273258360.0, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 1.9303797468354431, | |
| "grad_norm": 0.4403211137259473, | |
| "learning_rate": 3.747700513502772e-05, | |
| "loss": 0.6573, | |
| "num_tokens": 1278501240.0, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.9382911392405062, | |
| "grad_norm": 0.4812191269085378, | |
| "learning_rate": 3.737134502859772e-05, | |
| "loss": 0.6536, | |
| "num_tokens": 1283744120.0, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 1.9462025316455698, | |
| "grad_norm": 0.39299909706187625, | |
| "learning_rate": 3.726541465473317e-05, | |
| "loss": 0.649, | |
| "num_tokens": 1288987000.0, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.9541139240506329, | |
| "grad_norm": 0.39824695051848524, | |
| "learning_rate": 3.7159216913700456e-05, | |
| "loss": 0.6478, | |
| "num_tokens": 1294229880.0, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 1.9620253164556962, | |
| "grad_norm": 0.42039758141791633, | |
| "learning_rate": 3.7052754713086246e-05, | |
| "loss": 0.6534, | |
| "num_tokens": 1299472760.0, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.9699367088607596, | |
| "grad_norm": 0.3960672982967281, | |
| "learning_rate": 3.694603096771781e-05, | |
| "loss": 0.6568, | |
| "num_tokens": 1304715640.0, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 1.9778481012658227, | |
| "grad_norm": 0.3747599520232236, | |
| "learning_rate": 3.683904859958329e-05, | |
| "loss": 0.651, | |
| "num_tokens": 1309942452.0, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.9857594936708862, | |
| "grad_norm": 0.34963602368732627, | |
| "learning_rate": 3.673181053775162e-05, | |
| "loss": 0.6591, | |
| "num_tokens": 1315185332.0, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 1.9936708860759493, | |
| "grad_norm": 0.3336233742912806, | |
| "learning_rate": 3.662431971829237e-05, | |
| "loss": 0.6567, | |
| "num_tokens": 1320428212.0, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 2.0015822784810124, | |
| "grad_norm": 0.34626223476299406, | |
| "learning_rate": 3.651657908419537e-05, | |
| "loss": 0.6528, | |
| "num_tokens": 1325656988.0, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 2.009493670886076, | |
| "grad_norm": 0.3739591180690111, | |
| "learning_rate": 3.640859158529014e-05, | |
| "loss": 0.6228, | |
| "num_tokens": 1330899868.0, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 2.017405063291139, | |
| "grad_norm": 0.36163026913569957, | |
| "learning_rate": 3.6300360178165065e-05, | |
| "loss": 0.6198, | |
| "num_tokens": 1336142748.0, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 2.0253164556962027, | |
| "grad_norm": 0.3413456124109847, | |
| "learning_rate": 3.619188782608653e-05, | |
| "loss": 0.6249, | |
| "num_tokens": 1341371387.0, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 2.0332278481012658, | |
| "grad_norm": 0.41603416587113357, | |
| "learning_rate": 3.6083177498917745e-05, | |
| "loss": 0.6137, | |
| "num_tokens": 1346614267.0, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 2.041139240506329, | |
| "grad_norm": 0.39514773706497214, | |
| "learning_rate": 3.5974232173037385e-05, | |
| "loss": 0.6292, | |
| "num_tokens": 1351857147.0, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 2.0490506329113924, | |
| "grad_norm": 0.3949477431561823, | |
| "learning_rate": 3.586505483125823e-05, | |
| "loss": 0.6204, | |
| "num_tokens": 1357100027.0, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 2.0569620253164556, | |
| "grad_norm": 0.324048846717354, | |
| "learning_rate": 3.5755648462745366e-05, | |
| "loss": 0.6263, | |
| "num_tokens": 1362342907.0, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 2.064873417721519, | |
| "grad_norm": 0.3478785681280589, | |
| "learning_rate": 3.5646016062934413e-05, | |
| "loss": 0.6216, | |
| "num_tokens": 1367576563.0, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 2.0727848101265822, | |
| "grad_norm": 0.38436773833943666, | |
| "learning_rate": 3.553616063344951e-05, | |
| "loss": 0.6255, | |
| "num_tokens": 1372800486.0, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 2.0806962025316458, | |
| "grad_norm": 0.38048821813178435, | |
| "learning_rate": 3.5426085182021114e-05, | |
| "loss": 0.6208, | |
| "num_tokens": 1378043366.0, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 2.088607594936709, | |
| "grad_norm": 0.3868214542706034, | |
| "learning_rate": 3.531579272240366e-05, | |
| "loss": 0.6235, | |
| "num_tokens": 1383286246.0, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 2.096518987341772, | |
| "grad_norm": 0.29924119376240554, | |
| "learning_rate": 3.520528627429304e-05, | |
| "loss": 0.616, | |
| "num_tokens": 1388529126.0, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 2.1044303797468356, | |
| "grad_norm": 0.30143694906012813, | |
| "learning_rate": 3.509456886324395e-05, | |
| "loss": 0.6228, | |
| "num_tokens": 1393772006.0, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 2.1123417721518987, | |
| "grad_norm": 0.3430834549488217, | |
| "learning_rate": 3.498364352058703e-05, | |
| "loss": 0.6231, | |
| "num_tokens": 1399014886.0, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 2.1202531645569622, | |
| "grad_norm": 0.3918758750528519, | |
| "learning_rate": 3.487251328334588e-05, | |
| "loss": 0.6184, | |
| "num_tokens": 1404254509.0, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 2.1281645569620253, | |
| "grad_norm": 0.3860740822731293, | |
| "learning_rate": 3.47611811941539e-05, | |
| "loss": 0.6223, | |
| "num_tokens": 1409497389.0, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 2.1360759493670884, | |
| "grad_norm": 0.30507806139472327, | |
| "learning_rate": 3.464965030117099e-05, | |
| "loss": 0.6227, | |
| "num_tokens": 1414740269.0, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 2.143987341772152, | |
| "grad_norm": 0.31931935671102873, | |
| "learning_rate": 3.453792365800011e-05, | |
| "loss": 0.6213, | |
| "num_tokens": 1419976138.0, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 2.151898734177215, | |
| "grad_norm": 0.4296009660896887, | |
| "learning_rate": 3.4426004323603655e-05, | |
| "loss": 0.6195, | |
| "num_tokens": 1425219018.0, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 2.1598101265822787, | |
| "grad_norm": 0.36080348698318637, | |
| "learning_rate": 3.4313895362219704e-05, | |
| "loss": 0.6321, | |
| "num_tokens": 1430461898.0, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 2.1677215189873418, | |
| "grad_norm": 0.31266622534218946, | |
| "learning_rate": 3.420159984327814e-05, | |
| "loss": 0.6329, | |
| "num_tokens": 1435704778.0, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 2.175632911392405, | |
| "grad_norm": 0.29368200019966956, | |
| "learning_rate": 3.40891208413166e-05, | |
| "loss": 0.622, | |
| "num_tokens": 1440941205.0, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 2.1835443037974684, | |
| "grad_norm": 0.3318857805197203, | |
| "learning_rate": 3.397646143589629e-05, | |
| "loss": 0.6218, | |
| "num_tokens": 1446182176.0, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 2.1914556962025316, | |
| "grad_norm": 0.3469231890169258, | |
| "learning_rate": 3.38636247115177e-05, | |
| "loss": 0.6241, | |
| "num_tokens": 1451425056.0, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 2.199367088607595, | |
| "grad_norm": 0.3555477367643954, | |
| "learning_rate": 3.3750613757536134e-05, | |
| "loss": 0.6237, | |
| "num_tokens": 1456667936.0, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 2.2072784810126582, | |
| "grad_norm": 0.4308050328882218, | |
| "learning_rate": 3.36374316680771e-05, | |
| "loss": 0.6269, | |
| "num_tokens": 1461909000.0, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 2.2151898734177213, | |
| "grad_norm": 0.35870973114272886, | |
| "learning_rate": 3.3524081541951644e-05, | |
| "loss": 0.6222, | |
| "num_tokens": 1467150803.0, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 2.223101265822785, | |
| "grad_norm": 0.3606193086420081, | |
| "learning_rate": 3.3410566482571484e-05, | |
| "loss": 0.625, | |
| "num_tokens": 1472393683.0, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 2.231012658227848, | |
| "grad_norm": 0.3173225301332037, | |
| "learning_rate": 3.329688959786403e-05, | |
| "loss": 0.6225, | |
| "num_tokens": 1477635158.0, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 2.2389240506329116, | |
| "grad_norm": 0.3691990634713518, | |
| "learning_rate": 3.318305400018732e-05, | |
| "loss": 0.6231, | |
| "num_tokens": 1482878038.0, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 2.2468354430379747, | |
| "grad_norm": 0.4172329830772149, | |
| "learning_rate": 3.306906280624479e-05, | |
| "loss": 0.6286, | |
| "num_tokens": 1488105187.0, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 2.254746835443038, | |
| "grad_norm": 0.3279503525329187, | |
| "learning_rate": 3.295491913699994e-05, | |
| "loss": 0.6233, | |
| "num_tokens": 1493346994.0, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 2.2626582278481013, | |
| "grad_norm": 0.3632060308412657, | |
| "learning_rate": 3.284062611759089e-05, | |
| "loss": 0.6226, | |
| "num_tokens": 1498589874.0, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 2.2705696202531644, | |
| "grad_norm": 0.46252170164225265, | |
| "learning_rate": 3.272618687724481e-05, | |
| "loss": 0.6225, | |
| "num_tokens": 1503832754.0, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 2.278481012658228, | |
| "grad_norm": 0.3317711593176921, | |
| "learning_rate": 3.261160454919228e-05, | |
| "loss": 0.6236, | |
| "num_tokens": 1509075634.0, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 2.286392405063291, | |
| "grad_norm": 0.3470857725220561, | |
| "learning_rate": 3.249688227058145e-05, | |
| "loss": 0.6273, | |
| "num_tokens": 1514318514.0, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 2.2943037974683547, | |
| "grad_norm": 0.29280649749694565, | |
| "learning_rate": 3.238202318239216e-05, | |
| "loss": 0.6304, | |
| "num_tokens": 1519551983.0, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 2.3022151898734178, | |
| "grad_norm": 0.37241388988066654, | |
| "learning_rate": 3.226703042935e-05, | |
| "loss": 0.6184, | |
| "num_tokens": 1524794863.0, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 2.310126582278481, | |
| "grad_norm": 0.33976317790538424, | |
| "learning_rate": 3.215190715984013e-05, | |
| "loss": 0.622, | |
| "num_tokens": 1530031528.0, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 2.3180379746835444, | |
| "grad_norm": 0.29066911940116874, | |
| "learning_rate": 3.2036656525821144e-05, | |
| "loss": 0.6231, | |
| "num_tokens": 1535274408.0, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 2.3259493670886076, | |
| "grad_norm": 0.28162811956261474, | |
| "learning_rate": 3.1921281682738734e-05, | |
| "loss": 0.6182, | |
| "num_tokens": 1540517288.0, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 2.333860759493671, | |
| "grad_norm": 0.3524371457891882, | |
| "learning_rate": 3.180578578943933e-05, | |
| "loss": 0.6294, | |
| "num_tokens": 1545748994.0, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 2.3417721518987342, | |
| "grad_norm": 0.3468858213633566, | |
| "learning_rate": 3.1690172008083554e-05, | |
| "loss": 0.619, | |
| "num_tokens": 1550991874.0, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 2.3496835443037973, | |
| "grad_norm": 0.4224520445380759, | |
| "learning_rate": 3.157444350405972e-05, | |
| "loss": 0.6257, | |
| "num_tokens": 1556223622.0, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 2.357594936708861, | |
| "grad_norm": 0.3754485859450013, | |
| "learning_rate": 3.1458603445897136e-05, | |
| "loss": 0.6255, | |
| "num_tokens": 1561439202.0, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 2.365506329113924, | |
| "grad_norm": 0.3607328127982716, | |
| "learning_rate": 3.134265500517932e-05, | |
| "loss": 0.6184, | |
| "num_tokens": 1566661187.0, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 2.3734177215189876, | |
| "grad_norm": 0.3016702669665355, | |
| "learning_rate": 3.122660135645721e-05, | |
| "loss": 0.6226, | |
| "num_tokens": 1571904067.0, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 2.3813291139240507, | |
| "grad_norm": 0.3397158516715143, | |
| "learning_rate": 3.111044567716223e-05, | |
| "loss": 0.6237, | |
| "num_tokens": 1577146947.0, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 2.3892405063291138, | |
| "grad_norm": 0.34056042132518993, | |
| "learning_rate": 3.0994191147519296e-05, | |
| "loss": 0.6205, | |
| "num_tokens": 1582389827.0, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 2.3971518987341773, | |
| "grad_norm": 0.34821134798667835, | |
| "learning_rate": 3.0877840950459763e-05, | |
| "loss": 0.6217, | |
| "num_tokens": 1587632707.0, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 2.4050632911392404, | |
| "grad_norm": 0.29071305472990666, | |
| "learning_rate": 3.076139827153425e-05, | |
| "loss": 0.6212, | |
| "num_tokens": 1592875587.0, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 2.412974683544304, | |
| "grad_norm": 0.3213307109034431, | |
| "learning_rate": 3.064486629882543e-05, | |
| "loss": 0.6261, | |
| "num_tokens": 1598118467.0, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 2.420886075949367, | |
| "grad_norm": 0.317409439920391, | |
| "learning_rate": 3.052824822286075e-05, | |
| "loss": 0.6192, | |
| "num_tokens": 1603361347.0, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 2.4287974683544302, | |
| "grad_norm": 0.31888869908174583, | |
| "learning_rate": 3.041154723652509e-05, | |
| "loss": 0.6208, | |
| "num_tokens": 1608604227.0, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 2.4367088607594938, | |
| "grad_norm": 0.2946434998719226, | |
| "learning_rate": 3.0294766534973335e-05, | |
| "loss": 0.6291, | |
| "num_tokens": 1613847107.0, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 2.444620253164557, | |
| "grad_norm": 0.34443852085875215, | |
| "learning_rate": 3.0177909315542862e-05, | |
| "loss": 0.6256, | |
| "num_tokens": 1619089987.0, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 2.4525316455696204, | |
| "grad_norm": 0.35106351147093817, | |
| "learning_rate": 3.0060978777666054e-05, | |
| "loss": 0.6217, | |
| "num_tokens": 1624332867.0, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 2.4604430379746836, | |
| "grad_norm": 0.31920875041898267, | |
| "learning_rate": 2.994397812278269e-05, | |
| "loss": 0.6229, | |
| "num_tokens": 1629575747.0, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 2.4683544303797467, | |
| "grad_norm": 0.3877394978298516, | |
| "learning_rate": 2.9826910554252253e-05, | |
| "loss": 0.6272, | |
| "num_tokens": 1634818627.0, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 2.4762658227848102, | |
| "grad_norm": 0.31347418712210595, | |
| "learning_rate": 2.9709779277266258e-05, | |
| "loss": 0.6276, | |
| "num_tokens": 1640061507.0, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 2.4841772151898733, | |
| "grad_norm": 0.3663502650533136, | |
| "learning_rate": 2.959258749876052e-05, | |
| "loss": 0.6228, | |
| "num_tokens": 1645292739.0, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 2.492088607594937, | |
| "grad_norm": 0.4369820023540263, | |
| "learning_rate": 2.9475338427327298e-05, | |
| "loss": 0.622, | |
| "num_tokens": 1650520838.0, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.38676813237014673, | |
| "learning_rate": 2.9358035273127483e-05, | |
| "loss": 0.6269, | |
| "num_tokens": 1655763718.0, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 2.507911392405063, | |
| "grad_norm": 0.3206842281049778, | |
| "learning_rate": 2.924068124780269e-05, | |
| "loss": 0.625, | |
| "num_tokens": 1661006598.0, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 2.5158227848101267, | |
| "grad_norm": 0.3173236770874382, | |
| "learning_rate": 2.9123279564387355e-05, | |
| "loss": 0.6134, | |
| "num_tokens": 1666249478.0, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 2.5237341772151898, | |
| "grad_norm": 0.32043576387698464, | |
| "learning_rate": 2.900583343722072e-05, | |
| "loss": 0.6215, | |
| "num_tokens": 1671492358.0, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 2.5316455696202533, | |
| "grad_norm": 0.34279988720816984, | |
| "learning_rate": 2.8888346081858858e-05, | |
| "loss": 0.6176, | |
| "num_tokens": 1676735238.0, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 2.5395569620253164, | |
| "grad_norm": 0.3445942670476214, | |
| "learning_rate": 2.8770820714986647e-05, | |
| "loss": 0.6256, | |
| "num_tokens": 1681960978.0, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 2.5474683544303796, | |
| "grad_norm": 0.3249239294351048, | |
| "learning_rate": 2.8653260554329643e-05, | |
| "loss": 0.6229, | |
| "num_tokens": 1687203858.0, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 2.555379746835443, | |
| "grad_norm": 0.4195352553077635, | |
| "learning_rate": 2.8535668818566065e-05, | |
| "loss": 0.6209, | |
| "num_tokens": 1692446738.0, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 2.5632911392405062, | |
| "grad_norm": 0.3148180466101991, | |
| "learning_rate": 2.8418048727238585e-05, | |
| "loss": 0.6243, | |
| "num_tokens": 1697685701.0, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 2.5712025316455698, | |
| "grad_norm": 0.3145292653905644, | |
| "learning_rate": 2.8300403500666256e-05, | |
| "loss": 0.6118, | |
| "num_tokens": 1702928581.0, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 2.579113924050633, | |
| "grad_norm": 0.34661533020564694, | |
| "learning_rate": 2.81827363598563e-05, | |
| "loss": 0.6209, | |
| "num_tokens": 1708171461.0, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 2.587025316455696, | |
| "grad_norm": 0.32721458461346575, | |
| "learning_rate": 2.8065050526415916e-05, | |
| "loss": 0.6231, | |
| "num_tokens": 1713414341.0, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 2.5949367088607596, | |
| "grad_norm": 0.3293080910499085, | |
| "learning_rate": 2.794734922246408e-05, | |
| "loss": 0.6227, | |
| "num_tokens": 1718657221.0, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 2.6028481012658227, | |
| "grad_norm": 0.3006610821739955, | |
| "learning_rate": 2.7829635670543375e-05, | |
| "loss": 0.6198, | |
| "num_tokens": 1723897509.0, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 2.6107594936708862, | |
| "grad_norm": 0.3221265574540506, | |
| "learning_rate": 2.7711913093531682e-05, | |
| "loss": 0.6215, | |
| "num_tokens": 1729134925.0, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 2.6186708860759493, | |
| "grad_norm": 0.3629658305187627, | |
| "learning_rate": 2.7594184714554e-05, | |
| "loss": 0.6193, | |
| "num_tokens": 1734377805.0, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 2.6265822784810124, | |
| "grad_norm": 0.3305255086161955, | |
| "learning_rate": 2.7476453756894173e-05, | |
| "loss": 0.6163, | |
| "num_tokens": 1739620685.0, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 2.634493670886076, | |
| "grad_norm": 0.30385118713526643, | |
| "learning_rate": 2.7358723443906653e-05, | |
| "loss": 0.6168, | |
| "num_tokens": 1744863565.0, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 2.642405063291139, | |
| "grad_norm": 0.3729938271215438, | |
| "learning_rate": 2.724099699892822e-05, | |
| "loss": 0.6247, | |
| "num_tokens": 1750106445.0, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 2.6503164556962027, | |
| "grad_norm": 0.3363024823177763, | |
| "learning_rate": 2.712327764518979e-05, | |
| "loss": 0.6227, | |
| "num_tokens": 1755349325.0, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 2.6582278481012658, | |
| "grad_norm": 0.3055930853692746, | |
| "learning_rate": 2.7005568605728087e-05, | |
| "loss": 0.6229, | |
| "num_tokens": 1760592205.0, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 2.666139240506329, | |
| "grad_norm": 0.3298226038292838, | |
| "learning_rate": 2.6887873103297474e-05, | |
| "loss": 0.6214, | |
| "num_tokens": 1765835085.0, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 2.6740506329113924, | |
| "grad_norm": 0.2867154405096028, | |
| "learning_rate": 2.6770194360281653e-05, | |
| "loss": 0.6199, | |
| "num_tokens": 1771077965.0, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 2.6819620253164556, | |
| "grad_norm": 0.3195638173711784, | |
| "learning_rate": 2.6652535598605504e-05, | |
| "loss": 0.6191, | |
| "num_tokens": 1776320845.0, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 2.689873417721519, | |
| "grad_norm": 0.32434124204868575, | |
| "learning_rate": 2.6534900039646816e-05, | |
| "loss": 0.6181, | |
| "num_tokens": 1781532860.0, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 2.6977848101265822, | |
| "grad_norm": 0.2918992438055131, | |
| "learning_rate": 2.6417290904148116e-05, | |
| "loss": 0.6197, | |
| "num_tokens": 1786775740.0, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 2.7056962025316453, | |
| "grad_norm": 0.3055847287718812, | |
| "learning_rate": 2.629971141212848e-05, | |
| "loss": 0.6173, | |
| "num_tokens": 1792018620.0, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 2.713607594936709, | |
| "grad_norm": 0.2985953721458501, | |
| "learning_rate": 2.618216478279538e-05, | |
| "loss": 0.6165, | |
| "num_tokens": 1797261500.0, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 2.721518987341772, | |
| "grad_norm": 0.3212918053380019, | |
| "learning_rate": 2.6064654234456554e-05, | |
| "loss": 0.6214, | |
| "num_tokens": 1802504380.0, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 2.7294303797468356, | |
| "grad_norm": 0.2919099745172365, | |
| "learning_rate": 2.5947182984431827e-05, | |
| "loss": 0.6255, | |
| "num_tokens": 1807747260.0, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 2.7373417721518987, | |
| "grad_norm": 0.2860751140237954, | |
| "learning_rate": 2.5829754248965154e-05, | |
| "loss": 0.6189, | |
| "num_tokens": 1812990140.0, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 2.745253164556962, | |
| "grad_norm": 0.3019949199367483, | |
| "learning_rate": 2.571237124313643e-05, | |
| "loss": 0.6124, | |
| "num_tokens": 1818233020.0, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 2.7531645569620253, | |
| "grad_norm": 0.3187461502303022, | |
| "learning_rate": 2.5595037180773524e-05, | |
| "loss": 0.6196, | |
| "num_tokens": 1823475900.0, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 2.7610759493670884, | |
| "grad_norm": 0.31735001215836417, | |
| "learning_rate": 2.5477755274364308e-05, | |
| "loss": 0.623, | |
| "num_tokens": 1828718780.0, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 2.768987341772152, | |
| "grad_norm": 0.33876769709652277, | |
| "learning_rate": 2.5360528734968647e-05, | |
| "loss": 0.611, | |
| "num_tokens": 1833961660.0, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 2.776898734177215, | |
| "grad_norm": 0.3542233915904492, | |
| "learning_rate": 2.5243360772130532e-05, | |
| "loss": 0.625, | |
| "num_tokens": 1839200698.0, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 2.7848101265822782, | |
| "grad_norm": 0.3630368367495965, | |
| "learning_rate": 2.5126254593790182e-05, | |
| "loss": 0.6179, | |
| "num_tokens": 1844443578.0, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 2.7927215189873418, | |
| "grad_norm": 0.3826705315790488, | |
| "learning_rate": 2.500921340619621e-05, | |
| "loss": 0.6205, | |
| "num_tokens": 1849680596.0, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 2.8006329113924053, | |
| "grad_norm": 0.3644864833948713, | |
| "learning_rate": 2.489224041381787e-05, | |
| "loss": 0.6156, | |
| "num_tokens": 1854907408.0, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 2.8085443037974684, | |
| "grad_norm": 0.3386672412828899, | |
| "learning_rate": 2.477533881925727e-05, | |
| "loss": 0.6238, | |
| "num_tokens": 1860134742.0, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 2.8164556962025316, | |
| "grad_norm": 0.2921538507032039, | |
| "learning_rate": 2.465851182316176e-05, | |
| "loss": 0.6202, | |
| "num_tokens": 1865377622.0, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 2.8243670886075947, | |
| "grad_norm": 0.28494983081811437, | |
| "learning_rate": 2.4541762624136212e-05, | |
| "loss": 0.6176, | |
| "num_tokens": 1870620502.0, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 2.8322784810126582, | |
| "grad_norm": 0.29021546121305103, | |
| "learning_rate": 2.4425094418655526e-05, | |
| "loss": 0.6177, | |
| "num_tokens": 1875863382.0, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 2.8401898734177218, | |
| "grad_norm": 0.27953269634031763, | |
| "learning_rate": 2.430851040097707e-05, | |
| "loss": 0.6158, | |
| "num_tokens": 1881101588.0, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 2.848101265822785, | |
| "grad_norm": 0.28367003015841197, | |
| "learning_rate": 2.4192013763053222e-05, | |
| "loss": 0.6151, | |
| "num_tokens": 1886333356.0, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 2.856012658227848, | |
| "grad_norm": 0.30695047341331355, | |
| "learning_rate": 2.4075607694444014e-05, | |
| "loss": 0.615, | |
| "num_tokens": 1891576236.0, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 2.8639240506329116, | |
| "grad_norm": 0.3389129881606512, | |
| "learning_rate": 2.395929538222978e-05, | |
| "loss": 0.6194, | |
| "num_tokens": 1896816113.0, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 2.8718354430379747, | |
| "grad_norm": 0.2993951418009312, | |
| "learning_rate": 2.3843080010923873e-05, | |
| "loss": 0.6151, | |
| "num_tokens": 1902045462.0, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 2.879746835443038, | |
| "grad_norm": 0.2747247961261455, | |
| "learning_rate": 2.3726964762385546e-05, | |
| "loss": 0.6152, | |
| "num_tokens": 1907275468.0, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 2.8876582278481013, | |
| "grad_norm": 0.27656654436718103, | |
| "learning_rate": 2.361095281573274e-05, | |
| "loss": 0.6204, | |
| "num_tokens": 1912517035.0, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 2.8955696202531644, | |
| "grad_norm": 0.26450516299877097, | |
| "learning_rate": 2.3495047347255122e-05, | |
| "loss": 0.6141, | |
| "num_tokens": 1917752379.0, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 2.903481012658228, | |
| "grad_norm": 0.2680090640698674, | |
| "learning_rate": 2.3379251530327112e-05, | |
| "loss": 0.6098, | |
| "num_tokens": 1922995259.0, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 2.911392405063291, | |
| "grad_norm": 0.3020470058551071, | |
| "learning_rate": 2.326356853532094e-05, | |
| "loss": 0.6121, | |
| "num_tokens": 1928238139.0, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 2.9193037974683547, | |
| "grad_norm": 0.35207295628986174, | |
| "learning_rate": 2.3148001529519918e-05, | |
| "loss": 0.6176, | |
| "num_tokens": 1933481019.0, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 2.9272151898734178, | |
| "grad_norm": 0.31909804688235877, | |
| "learning_rate": 2.303255367703169e-05, | |
| "loss": 0.6206, | |
| "num_tokens": 1938714322.0, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 2.935126582278481, | |
| "grad_norm": 0.30462726559520964, | |
| "learning_rate": 2.2917228138701596e-05, | |
| "loss": 0.6154, | |
| "num_tokens": 1943957202.0, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 2.9430379746835444, | |
| "grad_norm": 0.32103060565165314, | |
| "learning_rate": 2.2802028072026115e-05, | |
| "loss": 0.6089, | |
| "num_tokens": 1949191447.0, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 2.9509493670886076, | |
| "grad_norm": 0.3062127051035162, | |
| "learning_rate": 2.268695663106648e-05, | |
| "loss": 0.6127, | |
| "num_tokens": 1954434327.0, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 2.958860759493671, | |
| "grad_norm": 0.3239852849229511, | |
| "learning_rate": 2.257201696636225e-05, | |
| "loss": 0.615, | |
| "num_tokens": 1959677207.0, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 2.9667721518987342, | |
| "grad_norm": 0.2893785533195971, | |
| "learning_rate": 2.2457212224845115e-05, | |
| "loss": 0.6182, | |
| "num_tokens": 1964920087.0, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 2.9746835443037973, | |
| "grad_norm": 0.3565583813030718, | |
| "learning_rate": 2.2342545549752658e-05, | |
| "loss": 0.6187, | |
| "num_tokens": 1970162967.0, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 2.982594936708861, | |
| "grad_norm": 0.2901982983640066, | |
| "learning_rate": 2.2228020080542394e-05, | |
| "loss": 0.6155, | |
| "num_tokens": 1975405847.0, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 2.990506329113924, | |
| "grad_norm": 0.2552747365991554, | |
| "learning_rate": 2.211363895280573e-05, | |
| "loss": 0.617, | |
| "num_tokens": 1980648163.0, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 2.9984177215189876, | |
| "grad_norm": 0.2925757603065985, | |
| "learning_rate": 2.1999405298182176e-05, | |
| "loss": 0.6114, | |
| "num_tokens": 1985885198.0, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 3.0063291139240507, | |
| "grad_norm": 0.3169901185514265, | |
| "learning_rate": 2.1885322244273537e-05, | |
| "loss": 0.6001, | |
| "num_tokens": 1991128078.0, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 3.0142405063291138, | |
| "grad_norm": 0.2733791677119331, | |
| "learning_rate": 2.177139291455837e-05, | |
| "loss": 0.5909, | |
| "num_tokens": 1996370958.0, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 3.0221518987341773, | |
| "grad_norm": 0.2969117337975924, | |
| "learning_rate": 2.1657620428306386e-05, | |
| "loss": 0.5979, | |
| "num_tokens": 2001613838.0, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 3.0300632911392404, | |
| "grad_norm": 0.259222996285033, | |
| "learning_rate": 2.154400790049309e-05, | |
| "loss": 0.5935, | |
| "num_tokens": 2006856718.0, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 3.037974683544304, | |
| "grad_norm": 0.29751625615948685, | |
| "learning_rate": 2.1430558441714488e-05, | |
| "loss": 0.5892, | |
| "num_tokens": 2012099598.0, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 3.045886075949367, | |
| "grad_norm": 0.2808339111842118, | |
| "learning_rate": 2.131727515810193e-05, | |
| "loss": 0.5881, | |
| "num_tokens": 2017342478.0, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 3.0537974683544302, | |
| "grad_norm": 0.2665237486778169, | |
| "learning_rate": 2.1204161151237044e-05, | |
| "loss": 0.5869, | |
| "num_tokens": 2022585358.0, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 3.0617088607594938, | |
| "grad_norm": 0.28147532434254685, | |
| "learning_rate": 2.1091219518066846e-05, | |
| "loss": 0.5857, | |
| "num_tokens": 2027828238.0, | |
| "step": 1935 | |
| }, | |
| { | |
| "epoch": 3.069620253164557, | |
| "grad_norm": 0.27827204435611375, | |
| "learning_rate": 2.0978453350818923e-05, | |
| "loss": 0.5892, | |
| "num_tokens": 2033071118.0, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 3.0775316455696204, | |
| "grad_norm": 0.27504620542469616, | |
| "learning_rate": 2.0865865736916767e-05, | |
| "loss": 0.5927, | |
| "num_tokens": 2038313998.0, | |
| "step": 1945 | |
| }, | |
| { | |
| "epoch": 3.0854430379746836, | |
| "grad_norm": 0.2625667359010651, | |
| "learning_rate": 2.0753459758895305e-05, | |
| "loss": 0.5878, | |
| "num_tokens": 2043556878.0, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 3.0933544303797467, | |
| "grad_norm": 0.31412193685640266, | |
| "learning_rate": 2.0641238494316412e-05, | |
| "loss": 0.5954, | |
| "num_tokens": 2048795916.0, | |
| "step": 1955 | |
| }, | |
| { | |
| "epoch": 3.1012658227848102, | |
| "grad_norm": 0.2551742932124429, | |
| "learning_rate": 2.0529205015684733e-05, | |
| "loss": 0.5896, | |
| "num_tokens": 2054038796.0, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 3.1091772151898733, | |
| "grad_norm": 0.27107418641432374, | |
| "learning_rate": 2.0417362390363497e-05, | |
| "loss": 0.5949, | |
| "num_tokens": 2059281676.0, | |
| "step": 1965 | |
| }, | |
| { | |
| "epoch": 3.117088607594937, | |
| "grad_norm": 0.2910292647945281, | |
| "learning_rate": 2.030571368049058e-05, | |
| "loss": 0.593, | |
| "num_tokens": 2064524556.0, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 3.125, | |
| "grad_norm": 0.2702497087199772, | |
| "learning_rate": 2.0194261942894628e-05, | |
| "loss": 0.5913, | |
| "num_tokens": 2069763519.0, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 3.132911392405063, | |
| "grad_norm": 0.27879096153789196, | |
| "learning_rate": 2.0083010229011405e-05, | |
| "loss": 0.5871, | |
| "num_tokens": 2075006399.0, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 3.1408227848101267, | |
| "grad_norm": 0.2760119851379016, | |
| "learning_rate": 1.9971961584800218e-05, | |
| "loss": 0.5889, | |
| "num_tokens": 2080249279.0, | |
| "step": 1985 | |
| }, | |
| { | |
| "epoch": 3.1487341772151898, | |
| "grad_norm": 0.2980189742569214, | |
| "learning_rate": 1.986111905066055e-05, | |
| "loss": 0.5936, | |
| "num_tokens": 2085492159.0, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 3.1566455696202533, | |
| "grad_norm": 0.27221033878781475, | |
| "learning_rate": 1.9750485661348762e-05, | |
| "loss": 0.5925, | |
| "num_tokens": 2090735039.0, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 3.1645569620253164, | |
| "grad_norm": 0.282708489024635, | |
| "learning_rate": 1.964006444589509e-05, | |
| "loss": 0.5906, | |
| "num_tokens": 2095977919.0, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 3.1724683544303796, | |
| "grad_norm": 0.2689336862752935, | |
| "learning_rate": 1.952985842752062e-05, | |
| "loss": 0.5908, | |
| "num_tokens": 2101206695.0, | |
| "step": 2005 | |
| }, | |
| { | |
| "epoch": 3.180379746835443, | |
| "grad_norm": 0.26260733949618775, | |
| "learning_rate": 1.941987062355458e-05, | |
| "loss": 0.5907, | |
| "num_tokens": 2106449575.0, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 3.1882911392405062, | |
| "grad_norm": 0.2829230017612488, | |
| "learning_rate": 1.9310104045351724e-05, | |
| "loss": 0.5869, | |
| "num_tokens": 2111692455.0, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 3.1962025316455698, | |
| "grad_norm": 0.2496685091023237, | |
| "learning_rate": 1.9200561698209828e-05, | |
| "loss": 0.5895, | |
| "num_tokens": 2116935335.0, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 3.204113924050633, | |
| "grad_norm": 0.30588754751321845, | |
| "learning_rate": 1.9091246581287487e-05, | |
| "loss": 0.5911, | |
| "num_tokens": 2122178215.0, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 3.212025316455696, | |
| "grad_norm": 0.3013756834853421, | |
| "learning_rate": 1.8982161687521938e-05, | |
| "loss": 0.5926, | |
| "num_tokens": 2127416178.0, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 3.2199367088607596, | |
| "grad_norm": 0.3260497257360817, | |
| "learning_rate": 1.8873310003547152e-05, | |
| "loss": 0.5957, | |
| "num_tokens": 2132659058.0, | |
| "step": 2035 | |
| }, | |
| { | |
| "epoch": 3.2278481012658227, | |
| "grad_norm": 18.37765842944507, | |
| "learning_rate": 1.8764694509612057e-05, | |
| "loss": 0.6045, | |
| "num_tokens": 2137900029.0, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 3.2357594936708862, | |
| "grad_norm": 0.2844360272669497, | |
| "learning_rate": 1.865631817949893e-05, | |
| "loss": 0.5961, | |
| "num_tokens": 2143139652.0, | |
| "step": 2045 | |
| }, | |
| { | |
| "epoch": 3.2436708860759493, | |
| "grad_norm": 0.25708276328077995, | |
| "learning_rate": 1.8548183980441985e-05, | |
| "loss": 0.5988, | |
| "num_tokens": 2148382532.0, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 3.2515822784810124, | |
| "grad_norm": 0.27434743791141486, | |
| "learning_rate": 1.8440294873046126e-05, | |
| "loss": 0.5904, | |
| "num_tokens": 2153625412.0, | |
| "step": 2055 | |
| }, | |
| { | |
| "epoch": 3.259493670886076, | |
| "grad_norm": 0.25975458422652054, | |
| "learning_rate": 1.8332653811205906e-05, | |
| "loss": 0.5928, | |
| "num_tokens": 2158854105.0, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 3.267405063291139, | |
| "grad_norm": 0.26925828595298595, | |
| "learning_rate": 1.8225263742024638e-05, | |
| "loss": 0.5833, | |
| "num_tokens": 2164096985.0, | |
| "step": 2065 | |
| }, | |
| { | |
| "epoch": 3.2753164556962027, | |
| "grad_norm": 0.282185762273433, | |
| "learning_rate": 1.8118127605733697e-05, | |
| "loss": 0.5981, | |
| "num_tokens": 2169337273.0, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 3.2832278481012658, | |
| "grad_norm": 0.25816071189825335, | |
| "learning_rate": 1.801124833561208e-05, | |
| "loss": 0.5961, | |
| "num_tokens": 2174564736.0, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 3.291139240506329, | |
| "grad_norm": 0.24619197376885332, | |
| "learning_rate": 1.7904628857905982e-05, | |
| "loss": 0.6011, | |
| "num_tokens": 2179797828.0, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 3.2990506329113924, | |
| "grad_norm": 0.24421414534208322, | |
| "learning_rate": 1.7798272091748785e-05, | |
| "loss": 0.5865, | |
| "num_tokens": 2185040708.0, | |
| "step": 2085 | |
| }, | |
| { | |
| "epoch": 3.3069620253164556, | |
| "grad_norm": 0.2524526157340073, | |
| "learning_rate": 1.7692180949081093e-05, | |
| "loss": 0.5959, | |
| "num_tokens": 2190283588.0, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 3.314873417721519, | |
| "grad_norm": 0.27130450376385096, | |
| "learning_rate": 1.758635833457099e-05, | |
| "loss": 0.5848, | |
| "num_tokens": 2195513594.0, | |
| "step": 2095 | |
| }, | |
| { | |
| "epoch": 3.3227848101265822, | |
| "grad_norm": 0.26771998653275103, | |
| "learning_rate": 1.748080714553455e-05, | |
| "loss": 0.5879, | |
| "num_tokens": 2200745342.0, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 3.3306962025316453, | |
| "grad_norm": 0.26138164229192307, | |
| "learning_rate": 1.7375530271856487e-05, | |
| "loss": 0.5922, | |
| "num_tokens": 2205988222.0, | |
| "step": 2105 | |
| }, | |
| { | |
| "epoch": 3.338607594936709, | |
| "grad_norm": 0.2715626154228517, | |
| "learning_rate": 1.727053059591101e-05, | |
| "loss": 0.5971, | |
| "num_tokens": 2211231102.0, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 3.346518987341772, | |
| "grad_norm": 0.2455227895466599, | |
| "learning_rate": 1.7165810992482994e-05, | |
| "loss": 0.5887, | |
| "num_tokens": 2216460869.0, | |
| "step": 2115 | |
| }, | |
| { | |
| "epoch": 3.3544303797468356, | |
| "grad_norm": 0.2777380961416323, | |
| "learning_rate": 1.706137432868917e-05, | |
| "loss": 0.5943, | |
| "num_tokens": 2221703749.0, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 3.3623417721518987, | |
| "grad_norm": 0.25563580174980555, | |
| "learning_rate": 1.6957223463899658e-05, | |
| "loss": 0.5897, | |
| "num_tokens": 2226946629.0, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 3.370253164556962, | |
| "grad_norm": 0.25164624412979186, | |
| "learning_rate": 1.6853361249659722e-05, | |
| "loss": 0.5929, | |
| "num_tokens": 2232189509.0, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 3.3781645569620253, | |
| "grad_norm": 0.2806108194410442, | |
| "learning_rate": 1.674979052961166e-05, | |
| "loss": 0.5975, | |
| "num_tokens": 2237432389.0, | |
| "step": 2135 | |
| }, | |
| { | |
| "epoch": 3.3860759493670884, | |
| "grad_norm": 0.2605837907585764, | |
| "learning_rate": 1.664651413941693e-05, | |
| "loss": 0.5907, | |
| "num_tokens": 2242665858.0, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 3.393987341772152, | |
| "grad_norm": 0.2598679967531081, | |
| "learning_rate": 1.6543534906678552e-05, | |
| "loss": 0.592, | |
| "num_tokens": 2247908738.0, | |
| "step": 2145 | |
| }, | |
| { | |
| "epoch": 3.401898734177215, | |
| "grad_norm": 0.25797499837806687, | |
| "learning_rate": 1.6440855650863695e-05, | |
| "loss": 0.5894, | |
| "num_tokens": 2253151618.0, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 3.4098101265822787, | |
| "grad_norm": 0.2427493258825347, | |
| "learning_rate": 1.6338479183226436e-05, | |
| "loss": 0.5924, | |
| "num_tokens": 2258394498.0, | |
| "step": 2155 | |
| }, | |
| { | |
| "epoch": 3.4177215189873418, | |
| "grad_norm": 0.2359671364270129, | |
| "learning_rate": 1.6236408306730828e-05, | |
| "loss": 0.5916, | |
| "num_tokens": 2263637378.0, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 3.425632911392405, | |
| "grad_norm": 0.23492984902262662, | |
| "learning_rate": 1.6134645815974153e-05, | |
| "loss": 0.588, | |
| "num_tokens": 2268880258.0, | |
| "step": 2165 | |
| }, | |
| { | |
| "epoch": 3.4335443037974684, | |
| "grad_norm": 0.2684129651198138, | |
| "learning_rate": 1.6033194497110387e-05, | |
| "loss": 0.5863, | |
| "num_tokens": 2274107070.0, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 3.4414556962025316, | |
| "grad_norm": 0.2551028754173729, | |
| "learning_rate": 1.5932057127773956e-05, | |
| "loss": 0.5912, | |
| "num_tokens": 2279335709.0, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 3.449367088607595, | |
| "grad_norm": 0.27552281820576113, | |
| "learning_rate": 1.5831236477003657e-05, | |
| "loss": 0.5901, | |
| "num_tokens": 2284578589.0, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 3.4572784810126582, | |
| "grad_norm": 0.245355717229245, | |
| "learning_rate": 1.573073530516683e-05, | |
| "loss": 0.5984, | |
| "num_tokens": 2289805799.0, | |
| "step": 2185 | |
| }, | |
| { | |
| "epoch": 3.4651898734177213, | |
| "grad_norm": 0.2711024543904534, | |
| "learning_rate": 1.563055636388385e-05, | |
| "loss": 0.5901, | |
| "num_tokens": 2295041668.0, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 3.473101265822785, | |
| "grad_norm": 0.25534456241862863, | |
| "learning_rate": 1.5530702395952726e-05, | |
| "loss": 0.5885, | |
| "num_tokens": 2300284548.0, | |
| "step": 2195 | |
| }, | |
| { | |
| "epoch": 3.481012658227848, | |
| "grad_norm": 0.28472473513191726, | |
| "learning_rate": 1.5431176135274004e-05, | |
| "loss": 0.5875, | |
| "num_tokens": 2305527428.0, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 3.4889240506329116, | |
| "grad_norm": 0.26401107803555085, | |
| "learning_rate": 1.533198030677596e-05, | |
| "loss": 0.587, | |
| "num_tokens": 2310770308.0, | |
| "step": 2205 | |
| }, | |
| { | |
| "epoch": 3.4968354430379747, | |
| "grad_norm": 0.23581382627455794, | |
| "learning_rate": 1.5233117626339988e-05, | |
| "loss": 0.5829, | |
| "num_tokens": 2316013188.0, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 3.504746835443038, | |
| "grad_norm": 0.2919647262861195, | |
| "learning_rate": 1.513459080072617e-05, | |
| "loss": 0.593, | |
| "num_tokens": 2321256068.0, | |
| "step": 2215 | |
| }, | |
| { | |
| "epoch": 3.5126582278481013, | |
| "grad_norm": 0.27795974121603223, | |
| "learning_rate": 1.503640252749928e-05, | |
| "loss": 0.5948, | |
| "num_tokens": 2326498948.0, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 3.5205696202531644, | |
| "grad_norm": 0.2513485774758495, | |
| "learning_rate": 1.4938555494954838e-05, | |
| "loss": 0.5928, | |
| "num_tokens": 2331740515.0, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 3.528481012658228, | |
| "grad_norm": 0.27468633702239875, | |
| "learning_rate": 1.484105238204555e-05, | |
| "loss": 0.592, | |
| "num_tokens": 2336983395.0, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 3.536392405063291, | |
| "grad_norm": 0.24973282593959645, | |
| "learning_rate": 1.4743895858307951e-05, | |
| "loss": 0.5858, | |
| "num_tokens": 2342226275.0, | |
| "step": 2235 | |
| }, | |
| { | |
| "epoch": 3.5443037974683547, | |
| "grad_norm": 0.236224838159416, | |
| "learning_rate": 1.4647088583789325e-05, | |
| "loss": 0.5943, | |
| "num_tokens": 2347461462.0, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 3.5522151898734178, | |
| "grad_norm": 0.25744574090772937, | |
| "learning_rate": 1.4550633208974832e-05, | |
| "loss": 0.5888, | |
| "num_tokens": 2352704342.0, | |
| "step": 2245 | |
| }, | |
| { | |
| "epoch": 3.560126582278481, | |
| "grad_norm": 0.24904139861794714, | |
| "learning_rate": 1.445453237471501e-05, | |
| "loss": 0.5937, | |
| "num_tokens": 2357947222.0, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 3.5680379746835444, | |
| "grad_norm": 0.26268941423266656, | |
| "learning_rate": 1.435878871215342e-05, | |
| "loss": 0.5903, | |
| "num_tokens": 2363190102.0, | |
| "step": 2255 | |
| }, | |
| { | |
| "epoch": 3.5759493670886076, | |
| "grad_norm": 0.24790895208789265, | |
| "learning_rate": 1.4263404842654638e-05, | |
| "loss": 0.5972, | |
| "num_tokens": 2368432982.0, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 3.583860759493671, | |
| "grad_norm": 0.2613242366100201, | |
| "learning_rate": 1.416838337773243e-05, | |
| "loss": 0.595, | |
| "num_tokens": 2373661081.0, | |
| "step": 2265 | |
| }, | |
| { | |
| "epoch": 3.5917721518987342, | |
| "grad_norm": 0.2922830460241977, | |
| "learning_rate": 1.4073726918978347e-05, | |
| "loss": 0.594, | |
| "num_tokens": 2378903961.0, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 3.5996835443037973, | |
| "grad_norm": 0.27101780547058585, | |
| "learning_rate": 1.3979438057990399e-05, | |
| "loss": 0.592, | |
| "num_tokens": 2384146841.0, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 3.607594936708861, | |
| "grad_norm": 0.26512432923141993, | |
| "learning_rate": 1.3885519376302159e-05, | |
| "loss": 0.5878, | |
| "num_tokens": 2389389721.0, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 3.615506329113924, | |
| "grad_norm": 0.23952238274180007, | |
| "learning_rate": 1.3791973445312058e-05, | |
| "loss": 0.589, | |
| "num_tokens": 2394632601.0, | |
| "step": 2285 | |
| }, | |
| { | |
| "epoch": 3.6234177215189876, | |
| "grad_norm": 0.25423971405929885, | |
| "learning_rate": 1.3698802826213008e-05, | |
| "loss": 0.5908, | |
| "num_tokens": 2399860042.0, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 3.6313291139240507, | |
| "grad_norm": 0.24740192193842994, | |
| "learning_rate": 1.3606010069922232e-05, | |
| "loss": 0.5882, | |
| "num_tokens": 2405102922.0, | |
| "step": 2295 | |
| }, | |
| { | |
| "epoch": 3.6392405063291138, | |
| "grad_norm": 0.5035309107226975, | |
| "learning_rate": 1.351359771701147e-05, | |
| "loss": 0.5966, | |
| "num_tokens": 2410345802.0, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 3.6471518987341773, | |
| "grad_norm": 0.231935472046041, | |
| "learning_rate": 1.3421568297637404e-05, | |
| "loss": 0.5873, | |
| "num_tokens": 2415588682.0, | |
| "step": 2305 | |
| }, | |
| { | |
| "epoch": 3.6550632911392404, | |
| "grad_norm": 0.2584802973832586, | |
| "learning_rate": 1.3329924331472376e-05, | |
| "loss": 0.5925, | |
| "num_tokens": 2420806919.0, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 3.662974683544304, | |
| "grad_norm": 0.31314246833170745, | |
| "learning_rate": 1.3238668327635422e-05, | |
| "loss": 0.5922, | |
| "num_tokens": 2426049799.0, | |
| "step": 2315 | |
| }, | |
| { | |
| "epoch": 3.670886075949367, | |
| "grad_norm": 0.2663616787106021, | |
| "learning_rate": 1.3147802784623536e-05, | |
| "loss": 0.5962, | |
| "num_tokens": 2431292679.0, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 3.6787974683544302, | |
| "grad_norm": 0.23711780957648615, | |
| "learning_rate": 1.3057330190243317e-05, | |
| "loss": 0.5894, | |
| "num_tokens": 2436523336.0, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 3.6867088607594938, | |
| "grad_norm": 0.2549540114874716, | |
| "learning_rate": 1.2967253021542813e-05, | |
| "loss": 0.5957, | |
| "num_tokens": 2441766216.0, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 3.694620253164557, | |
| "grad_norm": 0.24052219589182647, | |
| "learning_rate": 1.2877573744743737e-05, | |
| "loss": 0.5935, | |
| "num_tokens": 2447009096.0, | |
| "step": 2335 | |
| }, | |
| { | |
| "epoch": 3.7025316455696204, | |
| "grad_norm": 0.23451782912483585, | |
| "learning_rate": 1.278829481517388e-05, | |
| "loss": 0.5907, | |
| "num_tokens": 2452248973.0, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 3.7104430379746836, | |
| "grad_norm": 0.2572531172651155, | |
| "learning_rate": 1.2699418677199992e-05, | |
| "loss": 0.5871, | |
| "num_tokens": 2457491853.0, | |
| "step": 2345 | |
| }, | |
| { | |
| "epoch": 3.7183544303797467, | |
| "grad_norm": 0.22205564327904748, | |
| "learning_rate": 1.2610947764160743e-05, | |
| "loss": 0.5911, | |
| "num_tokens": 2462732233.0, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 3.7262658227848102, | |
| "grad_norm": 0.25061314597163403, | |
| "learning_rate": 1.2522884498300185e-05, | |
| "loss": 0.589, | |
| "num_tokens": 2467975113.0, | |
| "step": 2355 | |
| }, | |
| { | |
| "epoch": 3.7341772151898733, | |
| "grad_norm": 0.24341650567160555, | |
| "learning_rate": 1.2435231290701398e-05, | |
| "loss": 0.5839, | |
| "num_tokens": 2473217993.0, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 3.742088607594937, | |
| "grad_norm": 0.23957280785298024, | |
| "learning_rate": 1.234799054122048e-05, | |
| "loss": 0.5856, | |
| "num_tokens": 2478460873.0, | |
| "step": 2365 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "grad_norm": 0.23496985031896744, | |
| "learning_rate": 1.2261164638420832e-05, | |
| "loss": 0.5943, | |
| "num_tokens": 2483695118.0, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 3.757911392405063, | |
| "grad_norm": 0.2562958360759268, | |
| "learning_rate": 1.2174755959507785e-05, | |
| "loss": 0.5905, | |
| "num_tokens": 2488915999.0, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 3.7658227848101267, | |
| "grad_norm": 0.23818568022151865, | |
| "learning_rate": 1.2088766870263504e-05, | |
| "loss": 0.5833, | |
| "num_tokens": 2494158879.0, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 3.7737341772151898, | |
| "grad_norm": 0.271749751046968, | |
| "learning_rate": 1.2003199724982206e-05, | |
| "loss": 0.5898, | |
| "num_tokens": 2499392356.0, | |
| "step": 2385 | |
| }, | |
| { | |
| "epoch": 3.7816455696202533, | |
| "grad_norm": 0.2768085008184621, | |
| "learning_rate": 1.1918056866405717e-05, | |
| "loss": 0.5964, | |
| "num_tokens": 2504635236.0, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 3.7895569620253164, | |
| "grad_norm": 0.24209821208112645, | |
| "learning_rate": 1.1833340625659325e-05, | |
| "loss": 0.5907, | |
| "num_tokens": 2509878116.0, | |
| "step": 2395 | |
| }, | |
| { | |
| "epoch": 3.7974683544303796, | |
| "grad_norm": 0.2587091544484298, | |
| "learning_rate": 1.1749053322187933e-05, | |
| "loss": 0.5887, | |
| "num_tokens": 2515098012.0, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 3.805379746835443, | |
| "grad_norm": 0.241562313900529, | |
| "learning_rate": 1.1665197263692593e-05, | |
| "loss": 0.5914, | |
| "num_tokens": 2520340892.0, | |
| "step": 2405 | |
| }, | |
| { | |
| "epoch": 3.8132911392405062, | |
| "grad_norm": 0.21894193923034058, | |
| "learning_rate": 1.1581774746067315e-05, | |
| "loss": 0.5861, | |
| "num_tokens": 2525582367.0, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 3.8212025316455698, | |
| "grad_norm": 0.241635293617956, | |
| "learning_rate": 1.149878805333616e-05, | |
| "loss": 0.5875, | |
| "num_tokens": 2530819032.0, | |
| "step": 2415 | |
| }, | |
| { | |
| "epoch": 3.829113924050633, | |
| "grad_norm": 0.2377026625673294, | |
| "learning_rate": 1.1416239457590797e-05, | |
| "loss": 0.5895, | |
| "num_tokens": 2536061912.0, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 3.837025316455696, | |
| "grad_norm": 0.2500766928834093, | |
| "learning_rate": 1.1334131218928215e-05, | |
| "loss": 0.5955, | |
| "num_tokens": 2541294406.0, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 3.8449367088607596, | |
| "grad_norm": 0.24157809368048716, | |
| "learning_rate": 1.1252465585388861e-05, | |
| "loss": 0.593, | |
| "num_tokens": 2546537286.0, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 3.8528481012658227, | |
| "grad_norm": 0.23659644820965803, | |
| "learning_rate": 1.1171244792895122e-05, | |
| "loss": 0.5875, | |
| "num_tokens": 2551780166.0, | |
| "step": 2435 | |
| }, | |
| { | |
| "epoch": 3.8607594936708862, | |
| "grad_norm": 0.22665381320819925, | |
| "learning_rate": 1.1090471065190087e-05, | |
| "loss": 0.5895, | |
| "num_tokens": 2557023046.0, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 3.8686708860759493, | |
| "grad_norm": 0.2531032288030383, | |
| "learning_rate": 1.1010146613776646e-05, | |
| "loss": 0.5911, | |
| "num_tokens": 2562249118.0, | |
| "step": 2445 | |
| }, | |
| { | |
| "epoch": 3.8765822784810124, | |
| "grad_norm": 0.2375978750430171, | |
| "learning_rate": 1.0930273637856969e-05, | |
| "loss": 0.5913, | |
| "num_tokens": 2567491998.0, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 3.884493670886076, | |
| "grad_norm": 0.24286465990334033, | |
| "learning_rate": 1.0850854324272289e-05, | |
| "loss": 0.5877, | |
| "num_tokens": 2572734878.0, | |
| "step": 2455 | |
| }, | |
| { | |
| "epoch": 3.892405063291139, | |
| "grad_norm": 0.2575357182094295, | |
| "learning_rate": 1.0771890847443022e-05, | |
| "loss": 0.596, | |
| "num_tokens": 2577975987.0, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 3.9003164556962027, | |
| "grad_norm": 0.27638888789609667, | |
| "learning_rate": 1.069338536930923e-05, | |
| "loss": 0.5865, | |
| "num_tokens": 2583218867.0, | |
| "step": 2465 | |
| }, | |
| { | |
| "epoch": 3.9082278481012658, | |
| "grad_norm": 0.2344539051269508, | |
| "learning_rate": 1.0615340039271443e-05, | |
| "loss": 0.5892, | |
| "num_tokens": 2588461747.0, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 3.916139240506329, | |
| "grad_norm": 0.27080111151637076, | |
| "learning_rate": 1.0537756994131783e-05, | |
| "loss": 0.5895, | |
| "num_tokens": 2593704627.0, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 3.9240506329113924, | |
| "grad_norm": 0.2249920243150278, | |
| "learning_rate": 1.0460638358035496e-05, | |
| "loss": 0.5924, | |
| "num_tokens": 2598947507.0, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 3.9319620253164556, | |
| "grad_norm": 0.28780985267539305, | |
| "learning_rate": 1.0383986242412785e-05, | |
| "loss": 0.5871, | |
| "num_tokens": 2604190387.0, | |
| "step": 2485 | |
| }, | |
| { | |
| "epoch": 3.939873417721519, | |
| "grad_norm": 0.23427765653838825, | |
| "learning_rate": 1.0307802745920964e-05, | |
| "loss": 0.5839, | |
| "num_tokens": 2609433267.0, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 3.9477848101265822, | |
| "grad_norm": 0.2511572467034619, | |
| "learning_rate": 1.0232089954387068e-05, | |
| "loss": 0.591, | |
| "num_tokens": 2614676147.0, | |
| "step": 2495 | |
| }, | |
| { | |
| "epoch": 3.9556962025316453, | |
| "grad_norm": 0.24133671906166224, | |
| "learning_rate": 1.0156849940750688e-05, | |
| "loss": 0.5873, | |
| "num_tokens": 2619919027.0, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 3.963607594936709, | |
| "grad_norm": 0.24130871799729056, | |
| "learning_rate": 1.0082084765007226e-05, | |
| "loss": 0.5919, | |
| "num_tokens": 2625150643.0, | |
| "step": 2505 | |
| }, | |
| { | |
| "epoch": 3.971518987341772, | |
| "grad_norm": 0.23545792709918428, | |
| "learning_rate": 1.0007796474151514e-05, | |
| "loss": 0.5972, | |
| "num_tokens": 2630393523.0, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 3.9794303797468356, | |
| "grad_norm": 0.2506437876450052, | |
| "learning_rate": 9.933987102121764e-06, | |
| "loss": 0.5814, | |
| "num_tokens": 2635636403.0, | |
| "step": 2515 | |
| }, | |
| { | |
| "epoch": 3.9873417721518987, | |
| "grad_norm": 0.2284653998147006, | |
| "learning_rate": 9.860658669743861e-06, | |
| "loss": 0.5897, | |
| "num_tokens": 2640879283.0, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 3.995253164556962, | |
| "grad_norm": 0.22307394105006934, | |
| "learning_rate": 9.787813184676056e-06, | |
| "loss": 0.5861, | |
| "num_tokens": 2646099304.0, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 4.003164556962025, | |
| "grad_norm": 0.28089029948590827, | |
| "learning_rate": 9.715452641353992e-06, | |
| "loss": 0.5826, | |
| "num_tokens": 2651342184.0, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 4.011075949367089, | |
| "grad_norm": 0.23002963489023126, | |
| "learning_rate": 9.643579020936106e-06, | |
| "loss": 0.5662, | |
| "num_tokens": 2656585064.0, | |
| "step": 2535 | |
| }, | |
| { | |
| "epoch": 4.018987341772152, | |
| "grad_norm": 0.24076823558688779, | |
| "learning_rate": 9.572194291249362e-06, | |
| "loss": 0.5767, | |
| "num_tokens": 2661827944.0, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 4.026898734177215, | |
| "grad_norm": 0.22483920300780846, | |
| "learning_rate": 9.501300406735406e-06, | |
| "loss": 0.5743, | |
| "num_tokens": 2667070260.0, | |
| "step": 2545 | |
| }, | |
| { | |
| "epoch": 4.034810126582278, | |
| "grad_norm": 0.2425268841198098, | |
| "learning_rate": 9.430899308397024e-06, | |
| "loss": 0.5691, | |
| "num_tokens": 2672313140.0, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 4.042721518987341, | |
| "grad_norm": 0.22463870074019276, | |
| "learning_rate": 9.360992923745032e-06, | |
| "loss": 0.5727, | |
| "num_tokens": 2677556020.0, | |
| "step": 2555 | |
| }, | |
| { | |
| "epoch": 4.050632911392405, | |
| "grad_norm": 0.2257862359365115, | |
| "learning_rate": 9.29158316674548e-06, | |
| "loss": 0.5771, | |
| "num_tokens": 2682798900.0, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 4.0585443037974684, | |
| "grad_norm": 0.21855947078972188, | |
| "learning_rate": 9.222671937767247e-06, | |
| "loss": 0.5693, | |
| "num_tokens": 2688041780.0, | |
| "step": 2565 | |
| }, | |
| { | |
| "epoch": 4.0664556962025316, | |
| "grad_norm": 0.22655998729700472, | |
| "learning_rate": 9.154261123530024e-06, | |
| "loss": 0.5688, | |
| "num_tokens": 2693280743.0, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 4.074367088607595, | |
| "grad_norm": 0.2417803758293443, | |
| "learning_rate": 9.086352597052674e-06, | |
| "loss": 0.5699, | |
| "num_tokens": 2698523623.0, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 4.082278481012658, | |
| "grad_norm": 0.23491529993846316, | |
| "learning_rate": 9.018948217601894e-06, | |
| "loss": 0.5776, | |
| "num_tokens": 2703760050.0, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 4.090189873417722, | |
| "grad_norm": 0.21683310271118783, | |
| "learning_rate": 8.952049830641368e-06, | |
| "loss": 0.569, | |
| "num_tokens": 2709001853.0, | |
| "step": 2585 | |
| }, | |
| { | |
| "epoch": 4.098101265822785, | |
| "grad_norm": 0.22683758792329442, | |
| "learning_rate": 8.885659267781218e-06, | |
| "loss": 0.5723, | |
| "num_tokens": 2714244733.0, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 4.106012658227848, | |
| "grad_norm": 0.23440417433660363, | |
| "learning_rate": 8.819778346727839e-06, | |
| "loss": 0.571, | |
| "num_tokens": 2719487613.0, | |
| "step": 2595 | |
| }, | |
| { | |
| "epoch": 4.113924050632911, | |
| "grad_norm": 0.2192458553426772, | |
| "learning_rate": 8.754408871234168e-06, | |
| "loss": 0.5733, | |
| "num_tokens": 2724715076.0, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 4.121835443037975, | |
| "grad_norm": 0.2530876360164053, | |
| "learning_rate": 8.689552631050274e-06, | |
| "loss": 0.5714, | |
| "num_tokens": 2729957956.0, | |
| "step": 2605 | |
| }, | |
| { | |
| "epoch": 4.129746835443038, | |
| "grad_norm": 0.21492163566185005, | |
| "learning_rate": 8.625211401874362e-06, | |
| "loss": 0.5694, | |
| "num_tokens": 2735200836.0, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 4.137658227848101, | |
| "grad_norm": 0.22363359389581403, | |
| "learning_rate": 8.561386945304155e-06, | |
| "loss": 0.5727, | |
| "num_tokens": 2740443716.0, | |
| "step": 2615 | |
| }, | |
| { | |
| "epoch": 4.1455696202531644, | |
| "grad_norm": 0.2663268042449171, | |
| "learning_rate": 8.498081008788677e-06, | |
| "loss": 0.5689, | |
| "num_tokens": 2745686596.0, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 4.1534810126582276, | |
| "grad_norm": 0.2379773928525528, | |
| "learning_rate": 8.435295325580373e-06, | |
| "loss": 0.5693, | |
| "num_tokens": 2750929476.0, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 4.1613924050632916, | |
| "grad_norm": 0.2227866522568964, | |
| "learning_rate": 8.373031614687699e-06, | |
| "loss": 0.5698, | |
| "num_tokens": 2756172356.0, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 4.169303797468355, | |
| "grad_norm": 0.23722314775734296, | |
| "learning_rate": 8.311291580828034e-06, | |
| "loss": 0.5723, | |
| "num_tokens": 2761415236.0, | |
| "step": 2635 | |
| }, | |
| { | |
| "epoch": 4.177215189873418, | |
| "grad_norm": 0.2122410135791533, | |
| "learning_rate": 8.250076914381017e-06, | |
| "loss": 0.5661, | |
| "num_tokens": 2766658116.0, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 4.185126582278481, | |
| "grad_norm": 0.21872455435466664, | |
| "learning_rate": 8.189389291342229e-06, | |
| "loss": 0.5668, | |
| "num_tokens": 2771900996.0, | |
| "step": 2645 | |
| }, | |
| { | |
| "epoch": 4.193037974683544, | |
| "grad_norm": 0.22736670887055646, | |
| "learning_rate": 8.12923037327738e-06, | |
| "loss": 0.5723, | |
| "num_tokens": 2777143876.0, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 4.200949367088608, | |
| "grad_norm": 0.24119729706862408, | |
| "learning_rate": 8.06960180727674e-06, | |
| "loss": 0.5689, | |
| "num_tokens": 2782386756.0, | |
| "step": 2655 | |
| }, | |
| { | |
| "epoch": 4.208860759493671, | |
| "grad_norm": 0.22351050734609607, | |
| "learning_rate": 8.010505225910083e-06, | |
| "loss": 0.5677, | |
| "num_tokens": 2787629636.0, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 4.216772151898734, | |
| "grad_norm": 0.265494072724875, | |
| "learning_rate": 7.951942247181992e-06, | |
| "loss": 0.5718, | |
| "num_tokens": 2792872516.0, | |
| "step": 2665 | |
| }, | |
| { | |
| "epoch": 4.224683544303797, | |
| "grad_norm": 0.21820442695718212, | |
| "learning_rate": 7.89391447448755e-06, | |
| "loss": 0.5682, | |
| "num_tokens": 2798109181.0, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 4.2325949367088604, | |
| "grad_norm": 0.2420612432104863, | |
| "learning_rate": 7.836423496568418e-06, | |
| "loss": 0.5753, | |
| "num_tokens": 2803337280.0, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 4.2405063291139244, | |
| "grad_norm": 0.23517468870764166, | |
| "learning_rate": 7.779470887469387e-06, | |
| "loss": 0.5716, | |
| "num_tokens": 2808580160.0, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 4.2484177215189876, | |
| "grad_norm": 0.2267612123573776, | |
| "learning_rate": 7.723058206495242e-06, | |
| "loss": 0.5746, | |
| "num_tokens": 2813823040.0, | |
| "step": 2685 | |
| }, | |
| { | |
| "epoch": 4.256329113924051, | |
| "grad_norm": 0.22002454461146628, | |
| "learning_rate": 7.667186998168082e-06, | |
| "loss": 0.5684, | |
| "num_tokens": 2819065920.0, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 4.264240506329114, | |
| "grad_norm": 0.22912652065839756, | |
| "learning_rate": 7.611858792185038e-06, | |
| "loss": 0.5751, | |
| "num_tokens": 2824308800.0, | |
| "step": 2695 | |
| }, | |
| { | |
| "epoch": 4.272151898734177, | |
| "grad_norm": 0.22408651950567107, | |
| "learning_rate": 7.557075103376383e-06, | |
| "loss": 0.5724, | |
| "num_tokens": 2829542456.0, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 4.280063291139241, | |
| "grad_norm": 0.22342018834903565, | |
| "learning_rate": 7.502837431664059e-06, | |
| "loss": 0.5731, | |
| "num_tokens": 2834785336.0, | |
| "step": 2705 | |
| }, | |
| { | |
| "epoch": 4.287974683544304, | |
| "grad_norm": 0.22864174140723967, | |
| "learning_rate": 7.449147262020616e-06, | |
| "loss": 0.5679, | |
| "num_tokens": 2840015342.0, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 4.295886075949367, | |
| "grad_norm": 0.22865644771070281, | |
| "learning_rate": 7.396006064428554e-06, | |
| "loss": 0.5627, | |
| "num_tokens": 2845248811.0, | |
| "step": 2715 | |
| }, | |
| { | |
| "epoch": 4.30379746835443, | |
| "grad_norm": 0.2674648148165605, | |
| "learning_rate": 7.34341529384006e-06, | |
| "loss": 0.5765, | |
| "num_tokens": 2850491691.0, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 4.311708860759493, | |
| "grad_norm": 0.22345156469242552, | |
| "learning_rate": 7.291376390137214e-06, | |
| "loss": 0.5805, | |
| "num_tokens": 2855734571.0, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 4.319620253164557, | |
| "grad_norm": 0.2122765810320669, | |
| "learning_rate": 7.239890778092509e-06, | |
| "loss": 0.5712, | |
| "num_tokens": 2860977451.0, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 4.3275316455696204, | |
| "grad_norm": 0.2477019061431497, | |
| "learning_rate": 7.188959867329893e-06, | |
| "loss": 0.5814, | |
| "num_tokens": 2866220331.0, | |
| "step": 2735 | |
| }, | |
| { | |
| "epoch": 4.3354430379746836, | |
| "grad_norm": 0.21235713827113287, | |
| "learning_rate": 7.138585052286142e-06, | |
| "loss": 0.5732, | |
| "num_tokens": 2871451563.0, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 4.343354430379747, | |
| "grad_norm": 0.2146641271693942, | |
| "learning_rate": 7.088767712172711e-06, | |
| "loss": 0.5717, | |
| "num_tokens": 2876694443.0, | |
| "step": 2745 | |
| }, | |
| { | |
| "epoch": 4.35126582278481, | |
| "grad_norm": 0.23309761275535962, | |
| "learning_rate": 7.039509210937932e-06, | |
| "loss": 0.5732, | |
| "num_tokens": 2881937323.0, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 4.359177215189874, | |
| "grad_norm": 0.2234172047939556, | |
| "learning_rate": 6.990810897229709e-06, | |
| "loss": 0.57, | |
| "num_tokens": 2887179519.0, | |
| "step": 2755 | |
| }, | |
| { | |
| "epoch": 4.367088607594937, | |
| "grad_norm": 0.22237797838934822, | |
| "learning_rate": 6.942674104358582e-06, | |
| "loss": 0.5738, | |
| "num_tokens": 2892422399.0, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 4.375, | |
| "grad_norm": 0.21593708745753062, | |
| "learning_rate": 6.8951001502612065e-06, | |
| "loss": 0.5658, | |
| "num_tokens": 2897636988.0, | |
| "step": 2765 | |
| }, | |
| { | |
| "epoch": 4.382911392405063, | |
| "grad_norm": 0.22987570961197548, | |
| "learning_rate": 6.848090337464294e-06, | |
| "loss": 0.5682, | |
| "num_tokens": 2902879868.0, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 4.390822784810126, | |
| "grad_norm": 0.22029484227034318, | |
| "learning_rate": 6.8016459530489335e-06, | |
| "loss": 0.5746, | |
| "num_tokens": 2908122748.0, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 4.39873417721519, | |
| "grad_norm": 0.2171355743200665, | |
| "learning_rate": 6.755768268615354e-06, | |
| "loss": 0.567, | |
| "num_tokens": 2913363036.0, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 4.406645569620253, | |
| "grad_norm": 0.2213627866085991, | |
| "learning_rate": 6.710458540248109e-06, | |
| "loss": 0.5725, | |
| "num_tokens": 2918605916.0, | |
| "step": 2785 | |
| }, | |
| { | |
| "epoch": 4.4145569620253164, | |
| "grad_norm": 0.21863302278329563, | |
| "learning_rate": 6.66571800848171e-06, | |
| "loss": 0.5733, | |
| "num_tokens": 2923835265.0, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 4.4224683544303796, | |
| "grad_norm": 0.2114144591578047, | |
| "learning_rate": 6.621547898266615e-06, | |
| "loss": 0.5731, | |
| "num_tokens": 2929076236.0, | |
| "step": 2795 | |
| }, | |
| { | |
| "epoch": 4.430379746835443, | |
| "grad_norm": 0.22099052084655507, | |
| "learning_rate": 6.577949418935732e-06, | |
| "loss": 0.5741, | |
| "num_tokens": 2934319116.0, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 4.438291139240507, | |
| "grad_norm": 0.23002587290313806, | |
| "learning_rate": 6.534923764171305e-06, | |
| "loss": 0.572, | |
| "num_tokens": 2939556532.0, | |
| "step": 2805 | |
| }, | |
| { | |
| "epoch": 4.44620253164557, | |
| "grad_norm": 0.2525898694118869, | |
| "learning_rate": 6.492472111972193e-06, | |
| "loss": 0.5758, | |
| "num_tokens": 2944799412.0, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 4.454113924050633, | |
| "grad_norm": 0.22052197863760098, | |
| "learning_rate": 6.450595624621672e-06, | |
| "loss": 0.5687, | |
| "num_tokens": 2950042292.0, | |
| "step": 2815 | |
| }, | |
| { | |
| "epoch": 4.462025316455696, | |
| "grad_norm": 0.22012961127429426, | |
| "learning_rate": 6.409295448655572e-06, | |
| "loss": 0.5701, | |
| "num_tokens": 2955275769.0, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 4.469936708860759, | |
| "grad_norm": 0.22397593983188469, | |
| "learning_rate": 6.3685727148309014e-06, | |
| "loss": 0.5708, | |
| "num_tokens": 2960518649.0, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 4.477848101265823, | |
| "grad_norm": 0.21586061078083138, | |
| "learning_rate": 6.328428538094895e-06, | |
| "loss": 0.5763, | |
| "num_tokens": 2965750136.0, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 4.485759493670886, | |
| "grad_norm": 0.24077949998848439, | |
| "learning_rate": 6.288864017554471e-06, | |
| "loss": 0.5733, | |
| "num_tokens": 2970993016.0, | |
| "step": 2835 | |
| }, | |
| { | |
| "epoch": 4.493670886075949, | |
| "grad_norm": 0.225684038542863, | |
| "learning_rate": 6.249880236446157e-06, | |
| "loss": 0.5733, | |
| "num_tokens": 2976232054.0, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 4.5015822784810124, | |
| "grad_norm": 0.212864190216861, | |
| "learning_rate": 6.211478262106416e-06, | |
| "loss": 0.5757, | |
| "num_tokens": 2981474934.0, | |
| "step": 2845 | |
| }, | |
| { | |
| "epoch": 4.509493670886076, | |
| "grad_norm": 0.2240303672598969, | |
| "learning_rate": 6.173659145942439e-06, | |
| "loss": 0.5715, | |
| "num_tokens": 2986716501.0, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 4.5174050632911396, | |
| "grad_norm": 0.21964401447515855, | |
| "learning_rate": 6.136423923403332e-06, | |
| "loss": 0.5729, | |
| "num_tokens": 2991948269.0, | |
| "step": 2855 | |
| }, | |
| { | |
| "epoch": 4.525316455696203, | |
| "grad_norm": 0.21745396683364604, | |
| "learning_rate": 6.099773613951805e-06, | |
| "loss": 0.5691, | |
| "num_tokens": 2997172161.0, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 4.533227848101266, | |
| "grad_norm": 0.2051009997691557, | |
| "learning_rate": 6.063709221036231e-06, | |
| "loss": 0.5729, | |
| "num_tokens": 3002415041.0, | |
| "step": 2865 | |
| }, | |
| { | |
| "epoch": 4.541139240506329, | |
| "grad_norm": 0.2082354488706409, | |
| "learning_rate": 6.028231732063179e-06, | |
| "loss": 0.5683, | |
| "num_tokens": 3007657921.0, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 4.549050632911392, | |
| "grad_norm": 0.20333733144092334, | |
| "learning_rate": 5.993342118370382e-06, | |
| "loss": 0.5728, | |
| "num_tokens": 3012900801.0, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 4.556962025316456, | |
| "grad_norm": 0.22876886064683172, | |
| "learning_rate": 5.959041335200154e-06, | |
| "loss": 0.5713, | |
| "num_tokens": 3018143681.0, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 4.564873417721519, | |
| "grad_norm": 0.23107232316277548, | |
| "learning_rate": 5.925330321673209e-06, | |
| "loss": 0.5695, | |
| "num_tokens": 3023386561.0, | |
| "step": 2885 | |
| }, | |
| { | |
| "epoch": 4.572784810126582, | |
| "grad_norm": 0.2134849519354519, | |
| "learning_rate": 5.892210000762978e-06, | |
| "loss": 0.5669, | |
| "num_tokens": 3028629441.0, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 4.580696202531645, | |
| "grad_norm": 0.22796244574283725, | |
| "learning_rate": 5.859681279270323e-06, | |
| "loss": 0.5737, | |
| "num_tokens": 3033867096.0, | |
| "step": 2895 | |
| }, | |
| { | |
| "epoch": 4.588607594936709, | |
| "grad_norm": 0.2241995743982485, | |
| "learning_rate": 5.827745047798706e-06, | |
| "loss": 0.574, | |
| "num_tokens": 3039109976.0, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 4.5965189873417724, | |
| "grad_norm": 0.2432096355202257, | |
| "learning_rate": 5.796402180729816e-06, | |
| "loss": 0.5727, | |
| "num_tokens": 3044352856.0, | |
| "step": 2905 | |
| }, | |
| { | |
| "epoch": 4.6044303797468356, | |
| "grad_norm": 0.22578199424608314, | |
| "learning_rate": 5.7656535361996335e-06, | |
| "loss": 0.5732, | |
| "num_tokens": 3049595736.0, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 4.612341772151899, | |
| "grad_norm": 0.237878631640341, | |
| "learning_rate": 5.735499956074914e-06, | |
| "loss": 0.5694, | |
| "num_tokens": 3054838616.0, | |
| "step": 2915 | |
| }, | |
| { | |
| "epoch": 4.620253164556962, | |
| "grad_norm": 0.20655029863391322, | |
| "learning_rate": 5.705942265930159e-06, | |
| "loss": 0.568, | |
| "num_tokens": 3060076579.0, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 4.628164556962025, | |
| "grad_norm": 0.2102514850204724, | |
| "learning_rate": 5.676981275025011e-06, | |
| "loss": 0.5716, | |
| "num_tokens": 3065319459.0, | |
| "step": 2925 | |
| }, | |
| { | |
| "epoch": 4.636075949367089, | |
| "grad_norm": 0.21521201282981267, | |
| "learning_rate": 5.648617776282077e-06, | |
| "loss": 0.5727, | |
| "num_tokens": 3070562339.0, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 4.643987341772152, | |
| "grad_norm": 0.22220846495501412, | |
| "learning_rate": 5.620852546265245e-06, | |
| "loss": 0.5734, | |
| "num_tokens": 3075805219.0, | |
| "step": 2935 | |
| }, | |
| { | |
| "epoch": 4.651898734177215, | |
| "grad_norm": 0.20514030112007264, | |
| "learning_rate": 5.5936863451584056e-06, | |
| "loss": 0.5746, | |
| "num_tokens": 3081039464.0, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 4.659810126582278, | |
| "grad_norm": 0.1984930661211808, | |
| "learning_rate": 5.56711991674465e-06, | |
| "loss": 0.5775, | |
| "num_tokens": 3086282344.0, | |
| "step": 2945 | |
| }, | |
| { | |
| "epoch": 4.667721518987342, | |
| "grad_norm": 0.21848535912224024, | |
| "learning_rate": 5.54115398838589e-06, | |
| "loss": 0.5747, | |
| "num_tokens": 3091525224.0, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 4.675632911392405, | |
| "grad_norm": 0.21623565097615416, | |
| "learning_rate": 5.515789271002967e-06, | |
| "loss": 0.572, | |
| "num_tokens": 3096751361.0, | |
| "step": 2955 | |
| }, | |
| { | |
| "epoch": 4.6835443037974684, | |
| "grad_norm": 0.2067122434127961, | |
| "learning_rate": 5.491026459056163e-06, | |
| "loss": 0.5702, | |
| "num_tokens": 3101978695.0, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 4.6914556962025316, | |
| "grad_norm": 0.23059832785126186, | |
| "learning_rate": 5.466866230526202e-06, | |
| "loss": 0.5763, | |
| "num_tokens": 3107221575.0, | |
| "step": 2965 | |
| }, | |
| { | |
| "epoch": 4.699367088607595, | |
| "grad_norm": 0.21776141057424536, | |
| "learning_rate": 5.443309246895688e-06, | |
| "loss": 0.5724, | |
| "num_tokens": 3112464455.0, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 4.707278481012658, | |
| "grad_norm": 0.2086382988283168, | |
| "learning_rate": 5.420356153130986e-06, | |
| "loss": 0.5756, | |
| "num_tokens": 3117704332.0, | |
| "step": 2975 | |
| }, | |
| { | |
| "epoch": 4.715189873417722, | |
| "grad_norm": 0.22426323834508063, | |
| "learning_rate": 5.398007577664566e-06, | |
| "loss": 0.5766, | |
| "num_tokens": 3122940201.0, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 4.723101265822785, | |
| "grad_norm": 0.2134984091805231, | |
| "learning_rate": 5.376264132377806e-06, | |
| "loss": 0.5715, | |
| "num_tokens": 3128183081.0, | |
| "step": 2985 | |
| }, | |
| { | |
| "epoch": 4.731012658227848, | |
| "grad_norm": 0.2078865591432323, | |
| "learning_rate": 5.355126412584226e-06, | |
| "loss": 0.5796, | |
| "num_tokens": 3133425961.0, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 4.738924050632911, | |
| "grad_norm": 0.21033825281525734, | |
| "learning_rate": 5.334594997013194e-06, | |
| "loss": 0.5722, | |
| "num_tokens": 3138668841.0, | |
| "step": 2995 | |
| }, | |
| { | |
| "epoch": 4.746835443037975, | |
| "grad_norm": 0.20218242486649357, | |
| "learning_rate": 5.314670447794094e-06, | |
| "loss": 0.5697, | |
| "num_tokens": 3143911721.0, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 4.754746835443038, | |
| "grad_norm": 0.20528211055221243, | |
| "learning_rate": 5.295353310440908e-06, | |
| "loss": 0.5775, | |
| "num_tokens": 3149154601.0, | |
| "step": 3005 | |
| }, | |
| { | |
| "epoch": 4.762658227848101, | |
| "grad_norm": 0.20386433324381634, | |
| "learning_rate": 5.276644113837305e-06, | |
| "loss": 0.5703, | |
| "num_tokens": 3154397481.0, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 4.7705696202531644, | |
| "grad_norm": 0.21879527292522297, | |
| "learning_rate": 5.258543370222146e-06, | |
| "loss": 0.5739, | |
| "num_tokens": 3159640361.0, | |
| "step": 3015 | |
| }, | |
| { | |
| "epoch": 4.7784810126582276, | |
| "grad_norm": 0.238651659867707, | |
| "learning_rate": 5.241051575175476e-06, | |
| "loss": 0.567, | |
| "num_tokens": 3164869054.0, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 4.786392405063291, | |
| "grad_norm": 0.21930826191732458, | |
| "learning_rate": 5.2241692076049294e-06, | |
| "loss": 0.5713, | |
| "num_tokens": 3170097693.0, | |
| "step": 3025 | |
| }, | |
| { | |
| "epoch": 4.794303797468355, | |
| "grad_norm": 0.24549638164106027, | |
| "learning_rate": 5.207896729732644e-06, | |
| "loss": 0.5784, | |
| "num_tokens": 3175338802.0, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 4.802215189873418, | |
| "grad_norm": 0.20497121752386122, | |
| "learning_rate": 5.1922345870825936e-06, | |
| "loss": 0.5718, | |
| "num_tokens": 3180581682.0, | |
| "step": 3035 | |
| }, | |
| { | |
| "epoch": 4.810126582278481, | |
| "grad_norm": 0.21556589673067728, | |
| "learning_rate": 5.1771832084683874e-06, | |
| "loss": 0.5755, | |
| "num_tokens": 3185824562.0, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 4.818037974683544, | |
| "grad_norm": 0.2177509045329286, | |
| "learning_rate": 5.162743005981538e-06, | |
| "loss": 0.5693, | |
| "num_tokens": 3191054329.0, | |
| "step": 3045 | |
| }, | |
| { | |
| "epoch": 4.825949367088608, | |
| "grad_norm": 0.22258143026660726, | |
| "learning_rate": 5.148914374980175e-06, | |
| "loss": 0.5697, | |
| "num_tokens": 3196297209.0, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 4.833860759493671, | |
| "grad_norm": 0.21877472366506615, | |
| "learning_rate": 5.135697694078209e-06, | |
| "loss": 0.5732, | |
| "num_tokens": 3201522963.0, | |
| "step": 3055 | |
| }, | |
| { | |
| "epoch": 4.841772151898734, | |
| "grad_norm": 0.2194924983288656, | |
| "learning_rate": 5.123093325134992e-06, | |
| "loss": 0.577, | |
| "num_tokens": 3206765843.0, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 4.849683544303797, | |
| "grad_norm": 0.24236174132097704, | |
| "learning_rate": 5.111101613245384e-06, | |
| "loss": 0.5734, | |
| "num_tokens": 3212008723.0, | |
| "step": 3065 | |
| }, | |
| { | |
| "epoch": 4.8575949367088604, | |
| "grad_norm": 0.22136019033385568, | |
| "learning_rate": 5.099722886730315e-06, | |
| "loss": 0.5738, | |
| "num_tokens": 3217244067.0, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 4.865506329113924, | |
| "grad_norm": 0.21908628318619908, | |
| "learning_rate": 5.088957457127804e-06, | |
| "loss": 0.5657, | |
| "num_tokens": 3222479254.0, | |
| "step": 3075 | |
| }, | |
| { | |
| "epoch": 4.8734177215189876, | |
| "grad_norm": 0.23149848485508415, | |
| "learning_rate": 5.078805619184415e-06, | |
| "loss": 0.5762, | |
| "num_tokens": 3227722134.0, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 4.881329113924051, | |
| "grad_norm": 0.21576735952327258, | |
| "learning_rate": 5.069267650847191e-06, | |
| "loss": 0.5687, | |
| "num_tokens": 3232960340.0, | |
| "step": 3085 | |
| }, | |
| { | |
| "epoch": 4.889240506329114, | |
| "grad_norm": 0.21146979612133376, | |
| "learning_rate": 5.060343813256054e-06, | |
| "loss": 0.5749, | |
| "num_tokens": 3238203220.0, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 4.897151898734177, | |
| "grad_norm": 0.22343231609158218, | |
| "learning_rate": 5.052034350736642e-06, | |
| "loss": 0.577, | |
| "num_tokens": 3243446100.0, | |
| "step": 3095 | |
| }, | |
| { | |
| "epoch": 4.905063291139241, | |
| "grad_norm": 0.21750437307740444, | |
| "learning_rate": 5.044339490793628e-06, | |
| "loss": 0.5746, | |
| "num_tokens": 3248682414.0, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 4.912974683544304, | |
| "grad_norm": 0.2040207080561066, | |
| "learning_rate": 5.037259444104488e-06, | |
| "loss": 0.5752, | |
| "num_tokens": 3253925294.0, | |
| "step": 3105 | |
| }, | |
| { | |
| "epoch": 4.920886075949367, | |
| "grad_norm": 0.21492843271025994, | |
| "learning_rate": 5.030794404513734e-06, | |
| "loss": 0.5723, | |
| "num_tokens": 3259168174.0, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 4.92879746835443, | |
| "grad_norm": 0.22472996856568292, | |
| "learning_rate": 5.024944549027605e-06, | |
| "loss": 0.5757, | |
| "num_tokens": 3264411054.0, | |
| "step": 3115 | |
| }, | |
| { | |
| "epoch": 4.936708860759493, | |
| "grad_norm": 0.2105235983069695, | |
| "learning_rate": 5.019710037809223e-06, | |
| "loss": 0.5764, | |
| "num_tokens": 3269653934.0, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 4.944620253164557, | |
| "grad_norm": 0.20160858018191585, | |
| "learning_rate": 5.015091014174209e-06, | |
| "loss": 0.5676, | |
| "num_tokens": 3274885682.0, | |
| "step": 3125 | |
| }, | |
| { | |
| "epoch": 4.9525316455696204, | |
| "grad_norm": 0.2079284452125249, | |
| "learning_rate": 5.0110876045867496e-06, | |
| "loss": 0.5662, | |
| "num_tokens": 3280128562.0, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 4.9604430379746836, | |
| "grad_norm": 0.21834231623167524, | |
| "learning_rate": 5.007699918656152e-06, | |
| "loss": 0.5719, | |
| "num_tokens": 3285371442.0, | |
| "step": 3135 | |
| }, | |
| { | |
| "epoch": 4.968354430379747, | |
| "grad_norm": 0.21543773946569159, | |
| "learning_rate": 5.004928049133823e-06, | |
| "loss": 0.5694, | |
| "num_tokens": 3290614322.0, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 4.97626582278481, | |
| "grad_norm": 0.2150336576277292, | |
| "learning_rate": 5.002772071910747e-06, | |
| "loss": 0.5747, | |
| "num_tokens": 3295843098.0, | |
| "step": 3145 | |
| }, | |
| { | |
| "epoch": 4.984177215189874, | |
| "grad_norm": 0.20873534752474804, | |
| "learning_rate": 5.0012320460153974e-06, | |
| "loss": 0.5696, | |
| "num_tokens": 3301070530.0, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 4.992088607594937, | |
| "grad_norm": 0.21825458015635657, | |
| "learning_rate": 5.000308013612126e-06, | |
| "loss": 0.5762, | |
| "num_tokens": 3306313410.0, | |
| "step": 3155 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "grad_norm": 0.2206973186589871, | |
| "learning_rate": 5e-06, | |
| "loss": 0.5717, | |
| "num_tokens": 3311556290.0, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "step": 3160, | |
| "total_flos": 2880677515100160.0, | |
| "train_loss": 0.6431828314744974, | |
| "train_runtime": 46582.8084, | |
| "train_samples_per_second": 4.34, | |
| "train_steps_per_second": 0.068 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 3160, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2880677515100160.0, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |