| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.5076142131979695, | |
| "eval_steps": 500, | |
| "global_step": 1000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.005076142131979695, | |
| "grad_norm": 3.015625, | |
| "learning_rate": 3.384094754653131e-07, | |
| "loss": 3.4275, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01015228426395939, | |
| "grad_norm": 3.1875, | |
| "learning_rate": 6.768189509306262e-07, | |
| "loss": 3.5207, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.015228426395939087, | |
| "grad_norm": 3.3125, | |
| "learning_rate": 1.0152284263959392e-06, | |
| "loss": 3.4307, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.02030456852791878, | |
| "grad_norm": 3.078125, | |
| "learning_rate": 1.3536379018612523e-06, | |
| "loss": 3.5356, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.025380710659898477, | |
| "grad_norm": 2.84375, | |
| "learning_rate": 1.6920473773265652e-06, | |
| "loss": 3.4517, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.030456852791878174, | |
| "grad_norm": 2.6875, | |
| "learning_rate": 2.0304568527918785e-06, | |
| "loss": 3.4387, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.03553299492385787, | |
| "grad_norm": 3.484375, | |
| "learning_rate": 2.3688663282571915e-06, | |
| "loss": 3.5372, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.04060913705583756, | |
| "grad_norm": 3.625, | |
| "learning_rate": 2.7072758037225046e-06, | |
| "loss": 3.4386, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.04568527918781726, | |
| "grad_norm": 3.25, | |
| "learning_rate": 3.0456852791878177e-06, | |
| "loss": 3.447, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.050761421319796954, | |
| "grad_norm": 3.21875, | |
| "learning_rate": 3.3840947546531303e-06, | |
| "loss": 3.4071, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.05583756345177665, | |
| "grad_norm": 2.625, | |
| "learning_rate": 3.722504230118444e-06, | |
| "loss": 3.4005, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.06091370558375635, | |
| "grad_norm": 3.5, | |
| "learning_rate": 4.060913705583757e-06, | |
| "loss": 3.446, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.06598984771573604, | |
| "grad_norm": 2.609375, | |
| "learning_rate": 4.39932318104907e-06, | |
| "loss": 3.5267, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.07106598984771574, | |
| "grad_norm": 2.59375, | |
| "learning_rate": 4.737732656514383e-06, | |
| "loss": 3.4858, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.07614213197969544, | |
| "grad_norm": 2.265625, | |
| "learning_rate": 5.076142131979695e-06, | |
| "loss": 3.51, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.08121827411167512, | |
| "grad_norm": 1.8515625, | |
| "learning_rate": 5.414551607445009e-06, | |
| "loss": 3.4237, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.08629441624365482, | |
| "grad_norm": 1.71875, | |
| "learning_rate": 5.752961082910322e-06, | |
| "loss": 3.4005, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.09137055837563451, | |
| "grad_norm": 1.8828125, | |
| "learning_rate": 6.091370558375635e-06, | |
| "loss": 3.3946, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.09644670050761421, | |
| "grad_norm": 1.90625, | |
| "learning_rate": 6.429780033840948e-06, | |
| "loss": 3.3383, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.10152284263959391, | |
| "grad_norm": 1.703125, | |
| "learning_rate": 6.768189509306261e-06, | |
| "loss": 3.28, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.1065989847715736, | |
| "grad_norm": 1.6171875, | |
| "learning_rate": 7.106598984771575e-06, | |
| "loss": 3.3579, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.1116751269035533, | |
| "grad_norm": 1.4921875, | |
| "learning_rate": 7.445008460236888e-06, | |
| "loss": 3.3075, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.116751269035533, | |
| "grad_norm": 1.3984375, | |
| "learning_rate": 7.7834179357022e-06, | |
| "loss": 3.315, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.1218274111675127, | |
| "grad_norm": 1.125, | |
| "learning_rate": 8.121827411167514e-06, | |
| "loss": 3.2354, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.12690355329949238, | |
| "grad_norm": 1.140625, | |
| "learning_rate": 8.460236886632826e-06, | |
| "loss": 3.2279, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.1319796954314721, | |
| "grad_norm": 1.453125, | |
| "learning_rate": 8.79864636209814e-06, | |
| "loss": 3.2781, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.13705583756345177, | |
| "grad_norm": 1.3828125, | |
| "learning_rate": 9.137055837563452e-06, | |
| "loss": 3.3718, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.14213197969543148, | |
| "grad_norm": 1.0703125, | |
| "learning_rate": 9.475465313028766e-06, | |
| "loss": 3.2864, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.14720812182741116, | |
| "grad_norm": 1.3984375, | |
| "learning_rate": 9.813874788494078e-06, | |
| "loss": 3.2203, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.15228426395939088, | |
| "grad_norm": 1.0078125, | |
| "learning_rate": 1.015228426395939e-05, | |
| "loss": 3.3262, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.15736040609137056, | |
| "grad_norm": 1.390625, | |
| "learning_rate": 1.0490693739424704e-05, | |
| "loss": 3.2488, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.16243654822335024, | |
| "grad_norm": 1.4140625, | |
| "learning_rate": 1.0829103214890018e-05, | |
| "loss": 3.2138, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.16751269035532995, | |
| "grad_norm": 1.3359375, | |
| "learning_rate": 1.116751269035533e-05, | |
| "loss": 3.1632, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.17258883248730963, | |
| "grad_norm": 1.28125, | |
| "learning_rate": 1.1505922165820645e-05, | |
| "loss": 3.255, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.17766497461928935, | |
| "grad_norm": 1.265625, | |
| "learning_rate": 1.1844331641285958e-05, | |
| "loss": 3.1887, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.18274111675126903, | |
| "grad_norm": 1.3125, | |
| "learning_rate": 1.218274111675127e-05, | |
| "loss": 3.2509, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.18781725888324874, | |
| "grad_norm": 1.1953125, | |
| "learning_rate": 1.2521150592216583e-05, | |
| "loss": 3.1895, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.19289340101522842, | |
| "grad_norm": 2.078125, | |
| "learning_rate": 1.2859560067681895e-05, | |
| "loss": 3.1627, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.19796954314720813, | |
| "grad_norm": 1.078125, | |
| "learning_rate": 1.3197969543147209e-05, | |
| "loss": 3.2273, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.20304568527918782, | |
| "grad_norm": 1.234375, | |
| "learning_rate": 1.3536379018612521e-05, | |
| "loss": 3.111, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.20812182741116753, | |
| "grad_norm": 1.5859375, | |
| "learning_rate": 1.3874788494077835e-05, | |
| "loss": 3.1964, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.2131979695431472, | |
| "grad_norm": 1.359375, | |
| "learning_rate": 1.421319796954315e-05, | |
| "loss": 3.1705, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.2182741116751269, | |
| "grad_norm": 1.3515625, | |
| "learning_rate": 1.4551607445008461e-05, | |
| "loss": 3.0863, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.2233502538071066, | |
| "grad_norm": 1.2890625, | |
| "learning_rate": 1.4890016920473775e-05, | |
| "loss": 3.2557, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.22842639593908629, | |
| "grad_norm": 1.4296875, | |
| "learning_rate": 1.5228426395939086e-05, | |
| "loss": 3.1892, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.233502538071066, | |
| "grad_norm": 1.15625, | |
| "learning_rate": 1.55668358714044e-05, | |
| "loss": 3.1675, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.23857868020304568, | |
| "grad_norm": 1.203125, | |
| "learning_rate": 1.5905245346869714e-05, | |
| "loss": 3.1534, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.2436548223350254, | |
| "grad_norm": 1.09375, | |
| "learning_rate": 1.6243654822335028e-05, | |
| "loss": 3.252, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.24873096446700507, | |
| "grad_norm": 1.1484375, | |
| "learning_rate": 1.658206429780034e-05, | |
| "loss": 3.2352, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.25380710659898476, | |
| "grad_norm": 1.109375, | |
| "learning_rate": 1.6920473773265652e-05, | |
| "loss": 3.3122, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.25888324873096447, | |
| "grad_norm": 1.4765625, | |
| "learning_rate": 1.7258883248730966e-05, | |
| "loss": 3.1933, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.2639593908629442, | |
| "grad_norm": 1.265625, | |
| "learning_rate": 1.759729272419628e-05, | |
| "loss": 3.2737, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.26903553299492383, | |
| "grad_norm": 0.97265625, | |
| "learning_rate": 1.793570219966159e-05, | |
| "loss": 3.1775, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.27411167512690354, | |
| "grad_norm": 1.203125, | |
| "learning_rate": 1.8274111675126904e-05, | |
| "loss": 3.1394, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.27918781725888325, | |
| "grad_norm": 1.6171875, | |
| "learning_rate": 1.8612521150592218e-05, | |
| "loss": 3.1789, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.28426395939086296, | |
| "grad_norm": 1.21875, | |
| "learning_rate": 1.8950930626057532e-05, | |
| "loss": 3.1097, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.2893401015228426, | |
| "grad_norm": 1.21875, | |
| "learning_rate": 1.9289340101522843e-05, | |
| "loss": 3.2086, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.29441624365482233, | |
| "grad_norm": 1.25, | |
| "learning_rate": 1.9627749576988157e-05, | |
| "loss": 3.1974, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.29949238578680204, | |
| "grad_norm": 1.046875, | |
| "learning_rate": 1.996615905245347e-05, | |
| "loss": 3.0517, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.30456852791878175, | |
| "grad_norm": 1.1640625, | |
| "learning_rate": 1.996615905245347e-05, | |
| "loss": 3.1709, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.3096446700507614, | |
| "grad_norm": 1.1484375, | |
| "learning_rate": 1.992855799962399e-05, | |
| "loss": 3.1709, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.3147208121827411, | |
| "grad_norm": 1.5, | |
| "learning_rate": 1.9890956946794512e-05, | |
| "loss": 3.159, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.3197969543147208, | |
| "grad_norm": 1.234375, | |
| "learning_rate": 1.9853355893965033e-05, | |
| "loss": 3.2162, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.3248730964467005, | |
| "grad_norm": 1.21875, | |
| "learning_rate": 1.9815754841135553e-05, | |
| "loss": 3.1009, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.3299492385786802, | |
| "grad_norm": 1.5703125, | |
| "learning_rate": 1.9778153788306074e-05, | |
| "loss": 3.1409, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.3350253807106599, | |
| "grad_norm": 1.1484375, | |
| "learning_rate": 1.9740552735476595e-05, | |
| "loss": 3.1621, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.3401015228426396, | |
| "grad_norm": 1.3359375, | |
| "learning_rate": 1.9702951682647115e-05, | |
| "loss": 3.0988, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.34517766497461927, | |
| "grad_norm": 1.1875, | |
| "learning_rate": 1.9665350629817636e-05, | |
| "loss": 3.2079, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.350253807106599, | |
| "grad_norm": 1.25, | |
| "learning_rate": 1.9627749576988157e-05, | |
| "loss": 3.1075, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.3553299492385787, | |
| "grad_norm": 1.3515625, | |
| "learning_rate": 1.9590148524158677e-05, | |
| "loss": 3.1286, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.3604060913705584, | |
| "grad_norm": 1.2578125, | |
| "learning_rate": 1.9552547471329198e-05, | |
| "loss": 3.1015, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.36548223350253806, | |
| "grad_norm": 1.09375, | |
| "learning_rate": 1.9514946418499722e-05, | |
| "loss": 3.1522, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.37055837563451777, | |
| "grad_norm": 1.0078125, | |
| "learning_rate": 1.947734536567024e-05, | |
| "loss": 3.1892, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.3756345177664975, | |
| "grad_norm": 1.046875, | |
| "learning_rate": 1.943974431284076e-05, | |
| "loss": 3.0202, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.38071065989847713, | |
| "grad_norm": 1.0625, | |
| "learning_rate": 1.940214326001128e-05, | |
| "loss": 3.0803, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.38578680203045684, | |
| "grad_norm": 1.671875, | |
| "learning_rate": 1.93645422071818e-05, | |
| "loss": 3.1514, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.39086294416243655, | |
| "grad_norm": 1.4140625, | |
| "learning_rate": 1.9326941154352322e-05, | |
| "loss": 3.1203, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.39593908629441626, | |
| "grad_norm": 1.1796875, | |
| "learning_rate": 1.9289340101522843e-05, | |
| "loss": 3.1549, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.4010152284263959, | |
| "grad_norm": 1.1796875, | |
| "learning_rate": 1.9251739048693367e-05, | |
| "loss": 3.0981, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.40609137055837563, | |
| "grad_norm": 1.5078125, | |
| "learning_rate": 1.9214137995863887e-05, | |
| "loss": 3.1134, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.41116751269035534, | |
| "grad_norm": 1.1640625, | |
| "learning_rate": 1.9176536943034408e-05, | |
| "loss": 3.177, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.41624365482233505, | |
| "grad_norm": 1.0703125, | |
| "learning_rate": 1.913893589020493e-05, | |
| "loss": 3.0828, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.4213197969543147, | |
| "grad_norm": 1.734375, | |
| "learning_rate": 1.9101334837375446e-05, | |
| "loss": 3.22, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.4263959390862944, | |
| "grad_norm": 1.4296875, | |
| "learning_rate": 1.9063733784545967e-05, | |
| "loss": 3.1142, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.43147208121827413, | |
| "grad_norm": 1.0546875, | |
| "learning_rate": 1.9026132731716487e-05, | |
| "loss": 3.1719, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.4365482233502538, | |
| "grad_norm": 1.546875, | |
| "learning_rate": 1.898853167888701e-05, | |
| "loss": 3.0995, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.4416243654822335, | |
| "grad_norm": 1.390625, | |
| "learning_rate": 1.8950930626057532e-05, | |
| "loss": 3.1097, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.4467005076142132, | |
| "grad_norm": 1.125, | |
| "learning_rate": 1.8913329573228053e-05, | |
| "loss": 3.0527, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.4517766497461929, | |
| "grad_norm": 1.125, | |
| "learning_rate": 1.8875728520398574e-05, | |
| "loss": 3.1668, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.45685279187817257, | |
| "grad_norm": 0.921875, | |
| "learning_rate": 1.8838127467569094e-05, | |
| "loss": 3.1204, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.4619289340101523, | |
| "grad_norm": 0.94921875, | |
| "learning_rate": 1.8800526414739615e-05, | |
| "loss": 3.1525, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.467005076142132, | |
| "grad_norm": 1.03125, | |
| "learning_rate": 1.8762925361910136e-05, | |
| "loss": 3.1031, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.4720812182741117, | |
| "grad_norm": 1.1875, | |
| "learning_rate": 1.8725324309080656e-05, | |
| "loss": 3.1314, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.47715736040609136, | |
| "grad_norm": 1.3984375, | |
| "learning_rate": 1.8687723256251177e-05, | |
| "loss": 3.1854, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.48223350253807107, | |
| "grad_norm": 1.265625, | |
| "learning_rate": 1.8650122203421698e-05, | |
| "loss": 3.1285, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.4873096446700508, | |
| "grad_norm": 1.7109375, | |
| "learning_rate": 1.8612521150592218e-05, | |
| "loss": 3.0433, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.49238578680203043, | |
| "grad_norm": 1.234375, | |
| "learning_rate": 1.857492009776274e-05, | |
| "loss": 3.1891, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.49746192893401014, | |
| "grad_norm": 1.25, | |
| "learning_rate": 1.853731904493326e-05, | |
| "loss": 3.1181, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.5025380710659898, | |
| "grad_norm": 1.0234375, | |
| "learning_rate": 1.849971799210378e-05, | |
| "loss": 3.0071, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.5076142131979695, | |
| "grad_norm": 1.140625, | |
| "learning_rate": 1.84621169392743e-05, | |
| "loss": 3.0957, | |
| "step": 1000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 5910, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.40731264008192e+17, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |