| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 655, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 3.0303030303030305e-07, | |
| "loss": 2.7433, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.5151515151515152e-06, | |
| "loss": 2.672, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.0303030303030305e-06, | |
| "loss": 2.5663, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.5454545454545455e-06, | |
| "loss": 2.6422, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 6.060606060606061e-06, | |
| "loss": 2.2644, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 7.5757575757575764e-06, | |
| "loss": 2.5506, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 9.090909090909091e-06, | |
| "loss": 2.3298, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.0606060606060606e-05, | |
| "loss": 2.1904, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.2121212121212122e-05, | |
| "loss": 2.0981, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.3636363636363637e-05, | |
| "loss": 1.8446, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.5151515151515153e-05, | |
| "loss": 1.8529, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 1.7525, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.8181818181818182e-05, | |
| "loss": 1.7954, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.96969696969697e-05, | |
| "loss": 1.6559, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.9997724155059836e-05, | |
| "loss": 1.7205, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.9988480310346606e-05, | |
| "loss": 1.6599, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.9972132794589515e-05, | |
| "loss": 1.4984, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.994869323391895e-05, | |
| "loss": 1.5442, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9918178298230955e-05, | |
| "loss": 1.7301, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9880609689331837e-05, | |
| "loss": 1.5913, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9836014125504146e-05, | |
| "loss": 1.5133, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9784423322505e-05, | |
| "loss": 1.4516, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.9725873971010256e-05, | |
| "loss": 1.3623, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9660407710520613e-05, | |
| "loss": 1.3844, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.958807109974816e-05, | |
| "loss": 1.4235, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.950891558350446e-05, | |
| "loss": 1.3142, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9422997456113695e-05, | |
| "loss": 1.3499, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9330377821376916e-05, | |
| "loss": 1.4318, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9231122549115854e-05, | |
| "loss": 1.4776, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.9125302228327194e-05, | |
| "loss": 1.4596, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.901299211698064e-05, | |
| "loss": 1.506, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.8894272088496488e-05, | |
| "loss": 1.4941, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.8769226574940727e-05, | |
| "loss": 1.495, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.8637944506978093e-05, | |
| "loss": 1.3905, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.8500519250625833e-05, | |
| "loss": 1.3955, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.8357048540853005e-05, | |
| "loss": 1.4266, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.8207634412072765e-05, | |
| "loss": 1.4394, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.8052383125576833e-05, | |
| "loss": 1.4724, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.789140509396394e-05, | |
| "loss": 1.4783, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.772481480261587e-05, | |
| "loss": 1.4837, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.7552730728277052e-05, | |
| "loss": 1.5081, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.7375275254795474e-05, | |
| "loss": 1.4547, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.719257458608498e-05, | |
| "loss": 1.5518, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.700475865637077e-05, | |
| "loss": 1.4316, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.6811961037781936e-05, | |
| "loss": 1.442, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.66143188453568e-05, | |
| "loss": 1.5635, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.6411972639528555e-05, | |
| "loss": 1.4194, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.620506632616061e-05, | |
| "loss": 1.4804, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.5993747054202683e-05, | |
| "loss": 1.6032, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.5778165111040512e-05, | |
| "loss": 1.2669, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.5558473815613476e-05, | |
| "loss": 1.4045, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.5334829409376272e-05, | |
| "loss": 1.4802, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.5107390945182119e-05, | |
| "loss": 1.5424, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.4876320174166544e-05, | |
| "loss": 1.4566, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.464178143071217e-05, | |
| "loss": 1.5269, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.4403941515576344e-05, | |
| "loss": 1.5352, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.416296957726472e-05, | |
| "loss": 1.3673, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.391903699173514e-05, | |
| "loss": 1.354, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.3672317240517388e-05, | |
| "loss": 1.5526, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.3422985787335494e-05, | |
| "loss": 1.4736, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.3171219953320331e-05, | |
| "loss": 1.4636, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.2917198790901231e-05, | |
| "loss": 1.4855, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.2661102956466345e-05, | |
| "loss": 1.4595, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.240311458188229e-05, | |
| "loss": 1.6073, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.2143417144964425e-05, | |
| "loss": 1.5, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.1882195338989959e-05, | |
| "loss": 1.5174, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.1619634941346587e-05, | |
| "loss": 1.5918, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.1355922681410155e-05, | |
| "loss": 1.4199, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.109124610774527e-05, | |
| "loss": 1.4567, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.0825793454723325e-05, | |
| "loss": 1.3144, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.055975350865276e-05, | |
| "loss": 1.3933, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.0293315473516833e-05, | |
| "loss": 1.5519, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.0026668836414325e-05, | |
| "loss": 1.4626, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 9.760003232798879e-06, | |
| "loss": 1.428, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 9.493508311612874e-06, | |
| "loss": 1.3678, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 9.227373600411668e-06, | |
| "loss": 1.4338, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 8.961788370574184e-06, | |
| "loss": 1.4409, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 8.6969415026957e-06, | |
| "loss": 1.3239, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 8.433021352258522e-06, | |
| "loss": 1.457, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 8.170215615676145e-06, | |
| "loss": 1.4046, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 7.908711196806131e-06, | |
| "loss": 1.516, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 7.648694074026616e-06, | |
| "loss": 1.3609, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 7.390349167971026e-06, | |
| "loss": 1.3218, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 7.133860210015049e-06, | |
| "loss": 1.6356, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 6.879409611609394e-06, | |
| "loss": 1.3087, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 6.627178334551227e-06, | |
| "loss": 1.3472, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 6.377345762286633e-06, | |
| "loss": 1.4116, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 6.130089572335536e-06, | |
| "loss": 1.3421, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 5.885585609929891e-06, | |
| "loss": 1.2693, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 5.644007762954926e-06, | |
| "loss": 1.4595, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 5.405527838282458e-06, | |
| "loss": 1.553, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 5.170315439584212e-06, | |
| "loss": 1.5539, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.938537846711953e-06, | |
| "loss": 1.4485, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.710359896730379e-06, | |
| "loss": 1.4261, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 4.485943866687217e-06, | |
| "loss": 1.5028, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 4.2654493582040345e-06, | |
| "loss": 1.4476, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 4.049033183969697e-06, | |
| "loss": 1.3764, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 3.836849256217356e-06, | |
| "loss": 1.4286, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.629048477264152e-06, | |
| "loss": 1.4175, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 3.425778632191551e-06, | |
| "loss": 1.4036, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 3.2271842837425917e-06, | |
| "loss": 1.4983, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 3.0334066695108566e-06, | |
| "loss": 1.5578, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 2.8445836014942073e-06, | |
| "loss": 1.4271, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.6608493680847757e-06, | |
| "loss": 1.5306, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 2.4823346385648774e-06, | |
| "loss": 1.4684, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 2.3091663701768165e-06, | |
| "loss": 1.3496, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 2.141467717832616e-06, | |
| "loss": 1.4561, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.9793579465279245e-06, | |
| "loss": 1.3217, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.8229523465223787e-06, | |
| "loss": 1.377, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.6723621513467381e-06, | |
| "loss": 1.5578, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.5276944586951204e-06, | |
| "loss": 1.6085, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.3890521542585723e-06, | |
| "loss": 1.4866, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.2565338385541792e-06, | |
| "loss": 1.3809, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.1302337568017142e-06, | |
| "loss": 1.5613, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.0102417318977253e-06, | |
| "loss": 1.3391, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 8.966431005347109e-07, | |
| "loss": 1.3588, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 7.895186525108411e-07, | |
| "loss": 1.4464, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 6.889445732733324e-07, | |
| "loss": 1.5436, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 5.949923897364174e-07, | |
| "loss": 1.5203, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 5.077289194123624e-07, | |
| "loss": 1.3951, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 4.272162228917809e-07, | |
| "loss": 1.4145, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 3.5351155970696603e-07, | |
| "loss": 1.4482, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2.8666734760970927e-07, | |
| "loss": 1.4787, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 2.2673112529249752e-07, | |
| "loss": 1.4071, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.7374551857965882e-07, | |
| "loss": 1.4387, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.277482101124383e-07, | |
| "loss": 1.3938, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 8.87719125496278e-08, | |
| "loss": 1.4241, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 5.684434530277005e-08, | |
| "loss": 1.3593, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.198821482248593e-08, | |
| "loss": 1.4297, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.4221198449960727e-08, | |
| "loss": 1.4025, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 3.555931845053984e-09, | |
| "loss": 1.4367, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.0, | |
| "loss": 1.5173, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 1.4042837619781494, | |
| "eval_runtime": 40.6612, | |
| "eval_samples_per_second": 3.517, | |
| "eval_steps_per_second": 3.517, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 655, | |
| "total_flos": 5.756872988557312e+16, | |
| "train_loss": 1.5273605372159536, | |
| "train_runtime": 1271.7396, | |
| "train_samples_per_second": 1.03, | |
| "train_steps_per_second": 0.515 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 655, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "total_flos": 5.756872988557312e+16, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |