| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 21.05263157894737, | |
| "eval_steps": 500, | |
| "global_step": 6000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.08771929824561403, | |
| "grad_norm": 14.743780136108398, | |
| "learning_rate": 1.4942583732057417e-05, | |
| "loss": 0.9366, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.17543859649122806, | |
| "grad_norm": 22.20017433166504, | |
| "learning_rate": 1.4882775119617225e-05, | |
| "loss": 1.2015, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.2631578947368421, | |
| "grad_norm": 25.090787887573242, | |
| "learning_rate": 1.4822966507177032e-05, | |
| "loss": 1.0156, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.3508771929824561, | |
| "grad_norm": 13.580609321594238, | |
| "learning_rate": 1.4763157894736842e-05, | |
| "loss": 1.117, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.43859649122807015, | |
| "grad_norm": 12.062329292297363, | |
| "learning_rate": 1.470334928229665e-05, | |
| "loss": 0.8312, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.5263157894736842, | |
| "grad_norm": 18.625316619873047, | |
| "learning_rate": 1.464354066985646e-05, | |
| "loss": 0.9938, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.6140350877192983, | |
| "grad_norm": 11.628118515014648, | |
| "learning_rate": 1.4583732057416269e-05, | |
| "loss": 0.8922, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.7017543859649122, | |
| "grad_norm": 12.121639251708984, | |
| "learning_rate": 1.4523923444976078e-05, | |
| "loss": 0.9787, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.7894736842105263, | |
| "grad_norm": 14.091375350952148, | |
| "learning_rate": 1.4464114832535886e-05, | |
| "loss": 0.806, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.8771929824561403, | |
| "grad_norm": 9.919096946716309, | |
| "learning_rate": 1.4404306220095696e-05, | |
| "loss": 0.9935, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.9649122807017544, | |
| "grad_norm": 13.80988597869873, | |
| "learning_rate": 1.4344497607655503e-05, | |
| "loss": 0.8116, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.0526315789473684, | |
| "grad_norm": 13.865740776062012, | |
| "learning_rate": 1.4284688995215311e-05, | |
| "loss": 0.7288, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.1403508771929824, | |
| "grad_norm": 9.76303768157959, | |
| "learning_rate": 1.4224880382775121e-05, | |
| "loss": 0.8283, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.2280701754385965, | |
| "grad_norm": 9.364381790161133, | |
| "learning_rate": 1.4165071770334929e-05, | |
| "loss": 0.6827, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.3157894736842106, | |
| "grad_norm": 13.79258918762207, | |
| "learning_rate": 1.4105263157894738e-05, | |
| "loss": 0.7745, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.4035087719298245, | |
| "grad_norm": 9.642690658569336, | |
| "learning_rate": 1.4045454545454546e-05, | |
| "loss": 0.7514, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.4912280701754386, | |
| "grad_norm": 14.490239143371582, | |
| "learning_rate": 1.3985645933014356e-05, | |
| "loss": 0.6594, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.5789473684210527, | |
| "grad_norm": 13.364012718200684, | |
| "learning_rate": 1.3925837320574163e-05, | |
| "loss": 0.6036, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.6666666666666665, | |
| "grad_norm": 6.50977087020874, | |
| "learning_rate": 1.3866028708133971e-05, | |
| "loss": 0.7616, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.7543859649122808, | |
| "grad_norm": 30.46417808532715, | |
| "learning_rate": 1.380622009569378e-05, | |
| "loss": 0.6735, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.8421052631578947, | |
| "grad_norm": 17.846593856811523, | |
| "learning_rate": 1.3746411483253589e-05, | |
| "loss": 0.5902, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.9298245614035088, | |
| "grad_norm": 19.83995819091797, | |
| "learning_rate": 1.3686602870813398e-05, | |
| "loss": 0.5928, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.017543859649123, | |
| "grad_norm": 6.053647994995117, | |
| "learning_rate": 1.3626794258373206e-05, | |
| "loss": 0.5816, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.1052631578947367, | |
| "grad_norm": 9.594230651855469, | |
| "learning_rate": 1.3566985645933015e-05, | |
| "loss": 0.488, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.192982456140351, | |
| "grad_norm": 13.84262752532959, | |
| "learning_rate": 1.3507177033492823e-05, | |
| "loss": 0.568, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 2.280701754385965, | |
| "grad_norm": 10.86662483215332, | |
| "learning_rate": 1.3447368421052633e-05, | |
| "loss": 0.4017, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.3684210526315788, | |
| "grad_norm": 16.06069564819336, | |
| "learning_rate": 1.338755980861244e-05, | |
| "loss": 0.5112, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 2.456140350877193, | |
| "grad_norm": 6.421328544616699, | |
| "learning_rate": 1.3327751196172248e-05, | |
| "loss": 0.3609, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.543859649122807, | |
| "grad_norm": 6.225348949432373, | |
| "learning_rate": 1.3267942583732058e-05, | |
| "loss": 0.5531, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 2.6315789473684212, | |
| "grad_norm": 14.942207336425781, | |
| "learning_rate": 1.3208133971291866e-05, | |
| "loss": 0.4272, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 2.719298245614035, | |
| "grad_norm": 8.975879669189453, | |
| "learning_rate": 1.3148325358851675e-05, | |
| "loss": 0.4509, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 2.807017543859649, | |
| "grad_norm": 21.482088088989258, | |
| "learning_rate": 1.3088516746411483e-05, | |
| "loss": 0.4233, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 2.8947368421052633, | |
| "grad_norm": 5.1405792236328125, | |
| "learning_rate": 1.3028708133971293e-05, | |
| "loss": 0.418, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 2.982456140350877, | |
| "grad_norm": 14.574483871459961, | |
| "learning_rate": 1.29688995215311e-05, | |
| "loss": 0.6778, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 3.0701754385964914, | |
| "grad_norm": 7.385740280151367, | |
| "learning_rate": 1.290909090909091e-05, | |
| "loss": 0.4127, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 3.1578947368421053, | |
| "grad_norm": 48.35734939575195, | |
| "learning_rate": 1.2849282296650718e-05, | |
| "loss": 0.4752, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 3.245614035087719, | |
| "grad_norm": 8.056925773620605, | |
| "learning_rate": 1.2789473684210526e-05, | |
| "loss": 0.411, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 3.3333333333333335, | |
| "grad_norm": 20.510982513427734, | |
| "learning_rate": 1.2729665071770335e-05, | |
| "loss": 0.495, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 3.4210526315789473, | |
| "grad_norm": 25.063684463500977, | |
| "learning_rate": 1.2669856459330143e-05, | |
| "loss": 0.2436, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 3.5087719298245617, | |
| "grad_norm": 15.523259162902832, | |
| "learning_rate": 1.2610047846889953e-05, | |
| "loss": 0.4612, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 3.5964912280701755, | |
| "grad_norm": 1.9646857976913452, | |
| "learning_rate": 1.255023923444976e-05, | |
| "loss": 0.4426, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 3.6842105263157894, | |
| "grad_norm": 30.676250457763672, | |
| "learning_rate": 1.249043062200957e-05, | |
| "loss": 0.3333, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 3.7719298245614032, | |
| "grad_norm": 6.475268363952637, | |
| "learning_rate": 1.2430622009569378e-05, | |
| "loss": 0.2945, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 3.8596491228070176, | |
| "grad_norm": 24.684438705444336, | |
| "learning_rate": 1.2370813397129186e-05, | |
| "loss": 0.4339, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 3.9473684210526314, | |
| "grad_norm": 38.601226806640625, | |
| "learning_rate": 1.2311004784688995e-05, | |
| "loss": 0.3701, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 4.035087719298246, | |
| "grad_norm": 2.8431146144866943, | |
| "learning_rate": 1.2251196172248803e-05, | |
| "loss": 0.2782, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 4.12280701754386, | |
| "grad_norm": 23.384532928466797, | |
| "learning_rate": 1.2191387559808613e-05, | |
| "loss": 0.312, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 4.2105263157894735, | |
| "grad_norm": 3.222729444503784, | |
| "learning_rate": 1.213157894736842e-05, | |
| "loss": 0.264, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 4.298245614035087, | |
| "grad_norm": 3.5861947536468506, | |
| "learning_rate": 1.207177033492823e-05, | |
| "loss": 0.3104, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 4.385964912280702, | |
| "grad_norm": 4.091048717498779, | |
| "learning_rate": 1.2011961722488038e-05, | |
| "loss": 0.3567, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 4.473684210526316, | |
| "grad_norm": 1.3814598321914673, | |
| "learning_rate": 1.1952153110047847e-05, | |
| "loss": 0.3455, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 4.56140350877193, | |
| "grad_norm": 2.480316162109375, | |
| "learning_rate": 1.1892344497607655e-05, | |
| "loss": 0.1769, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 4.649122807017544, | |
| "grad_norm": 20.069162368774414, | |
| "learning_rate": 1.1832535885167463e-05, | |
| "loss": 0.2483, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 4.7368421052631575, | |
| "grad_norm": 1.7885949611663818, | |
| "learning_rate": 1.1772727272727272e-05, | |
| "loss": 0.2283, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 4.824561403508772, | |
| "grad_norm": 40.37774658203125, | |
| "learning_rate": 1.171291866028708e-05, | |
| "loss": 0.2277, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 4.912280701754386, | |
| "grad_norm": 2.960739850997925, | |
| "learning_rate": 1.1653110047846891e-05, | |
| "loss": 0.2077, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "grad_norm": 14.10015869140625, | |
| "learning_rate": 1.15933014354067e-05, | |
| "loss": 0.3859, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 5.087719298245614, | |
| "grad_norm": 4.440461158752441, | |
| "learning_rate": 1.1533492822966509e-05, | |
| "loss": 0.1354, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 5.175438596491228, | |
| "grad_norm": 17.193849563598633, | |
| "learning_rate": 1.1473684210526317e-05, | |
| "loss": 0.2935, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 5.2631578947368425, | |
| "grad_norm": 1.2828125953674316, | |
| "learning_rate": 1.1413875598086125e-05, | |
| "loss": 0.1394, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 5.350877192982456, | |
| "grad_norm": 4.809077262878418, | |
| "learning_rate": 1.1354066985645934e-05, | |
| "loss": 0.3651, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 5.43859649122807, | |
| "grad_norm": 43.53572463989258, | |
| "learning_rate": 1.1294258373205742e-05, | |
| "loss": 0.1187, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 5.526315789473684, | |
| "grad_norm": 5.647329330444336, | |
| "learning_rate": 1.1234449760765551e-05, | |
| "loss": 0.2719, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 5.614035087719298, | |
| "grad_norm": 25.37187957763672, | |
| "learning_rate": 1.117464114832536e-05, | |
| "loss": 0.2477, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 5.701754385964913, | |
| "grad_norm": 4.228896617889404, | |
| "learning_rate": 1.1114832535885169e-05, | |
| "loss": 0.3608, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 5.7894736842105265, | |
| "grad_norm": 2.1760356426239014, | |
| "learning_rate": 1.1055023923444977e-05, | |
| "loss": 0.1223, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 5.87719298245614, | |
| "grad_norm": 28.078975677490234, | |
| "learning_rate": 1.0995215311004786e-05, | |
| "loss": 0.3207, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 5.964912280701754, | |
| "grad_norm": 12.583624839782715, | |
| "learning_rate": 1.0935406698564594e-05, | |
| "loss": 0.1094, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 6.052631578947368, | |
| "grad_norm": 0.4104742109775543, | |
| "learning_rate": 1.0875598086124402e-05, | |
| "loss": 0.1937, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 6.140350877192983, | |
| "grad_norm": 1.5135095119476318, | |
| "learning_rate": 1.0815789473684211e-05, | |
| "loss": 0.3113, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 6.228070175438597, | |
| "grad_norm": 1.9227218627929688, | |
| "learning_rate": 1.0755980861244019e-05, | |
| "loss": 0.1179, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 6.315789473684211, | |
| "grad_norm": 7.322267055511475, | |
| "learning_rate": 1.0696172248803829e-05, | |
| "loss": 0.209, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 6.4035087719298245, | |
| "grad_norm": 2.8994693756103516, | |
| "learning_rate": 1.0636363636363636e-05, | |
| "loss": 0.1743, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 6.491228070175438, | |
| "grad_norm": 21.65730857849121, | |
| "learning_rate": 1.0576555023923446e-05, | |
| "loss": 0.2483, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 6.578947368421053, | |
| "grad_norm": 24.210969924926758, | |
| "learning_rate": 1.0516746411483254e-05, | |
| "loss": 0.1348, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 6.666666666666667, | |
| "grad_norm": 1.589753270149231, | |
| "learning_rate": 1.0456937799043063e-05, | |
| "loss": 0.229, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 6.754385964912281, | |
| "grad_norm": 3.508387327194214, | |
| "learning_rate": 1.0397129186602871e-05, | |
| "loss": 0.0909, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 6.842105263157895, | |
| "grad_norm": 1.89371919631958, | |
| "learning_rate": 1.0337320574162679e-05, | |
| "loss": 0.1859, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 6.9298245614035086, | |
| "grad_norm": 0.35574591159820557, | |
| "learning_rate": 1.0277511961722489e-05, | |
| "loss": 0.1675, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 7.017543859649122, | |
| "grad_norm": 0.621152937412262, | |
| "learning_rate": 1.0217703349282296e-05, | |
| "loss": 0.1082, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 7.105263157894737, | |
| "grad_norm": 0.5694774985313416, | |
| "learning_rate": 1.0157894736842106e-05, | |
| "loss": 0.0886, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 7.192982456140351, | |
| "grad_norm": 7.276403427124023, | |
| "learning_rate": 1.0098086124401914e-05, | |
| "loss": 0.2697, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 7.280701754385965, | |
| "grad_norm": 0.4015137851238251, | |
| "learning_rate": 1.0038277511961723e-05, | |
| "loss": 0.1301, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 7.368421052631579, | |
| "grad_norm": 0.27876588702201843, | |
| "learning_rate": 9.978468899521531e-06, | |
| "loss": 0.0687, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 7.456140350877193, | |
| "grad_norm": 7.235930442810059, | |
| "learning_rate": 9.918660287081339e-06, | |
| "loss": 0.0734, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 7.543859649122807, | |
| "grad_norm": 1.6751177310943604, | |
| "learning_rate": 9.858851674641148e-06, | |
| "loss": 0.1574, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 7.631578947368421, | |
| "grad_norm": 0.8539127111434937, | |
| "learning_rate": 9.799043062200956e-06, | |
| "loss": 0.0483, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 7.719298245614035, | |
| "grad_norm": 0.26878660917282104, | |
| "learning_rate": 9.739234449760766e-06, | |
| "loss": 0.1632, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 7.807017543859649, | |
| "grad_norm": 106.50234985351562, | |
| "learning_rate": 9.679425837320574e-06, | |
| "loss": 0.1156, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 7.894736842105263, | |
| "grad_norm": 2.002487897872925, | |
| "learning_rate": 9.619617224880383e-06, | |
| "loss": 0.2036, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 7.982456140350877, | |
| "grad_norm": 0.5302895307540894, | |
| "learning_rate": 9.559808612440191e-06, | |
| "loss": 0.1417, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 8.070175438596491, | |
| "grad_norm": 0.314165323972702, | |
| "learning_rate": 9.5e-06, | |
| "loss": 0.0673, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 8.157894736842104, | |
| "grad_norm": 0.5242946743965149, | |
| "learning_rate": 9.440191387559808e-06, | |
| "loss": 0.0689, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 8.24561403508772, | |
| "grad_norm": 48.71388244628906, | |
| "learning_rate": 9.380382775119616e-06, | |
| "loss": 0.1863, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 8.333333333333334, | |
| "grad_norm": 0.5670514106750488, | |
| "learning_rate": 9.320574162679426e-06, | |
| "loss": 0.0823, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 8.421052631578947, | |
| "grad_norm": 0.6025238633155823, | |
| "learning_rate": 9.260765550239234e-06, | |
| "loss": 0.1118, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 8.508771929824562, | |
| "grad_norm": 42.649696350097656, | |
| "learning_rate": 9.200956937799043e-06, | |
| "loss": 0.1333, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 8.596491228070175, | |
| "grad_norm": 0.4079756438732147, | |
| "learning_rate": 9.141148325358851e-06, | |
| "loss": 0.156, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 8.68421052631579, | |
| "grad_norm": 0.31388023495674133, | |
| "learning_rate": 9.08133971291866e-06, | |
| "loss": 0.2159, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 8.771929824561404, | |
| "grad_norm": 0.2017977237701416, | |
| "learning_rate": 9.021531100478468e-06, | |
| "loss": 0.0936, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 8.859649122807017, | |
| "grad_norm": 2.2358014583587646, | |
| "learning_rate": 6.697368421052631e-06, | |
| "loss": 0.1884, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 8.947368421052632, | |
| "grad_norm": 0.5669077038764954, | |
| "learning_rate": 6.615131578947369e-06, | |
| "loss": 0.2183, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 9.035087719298245, | |
| "grad_norm": 27.18621253967285, | |
| "learning_rate": 6.532894736842106e-06, | |
| "loss": 0.1232, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 9.12280701754386, | |
| "grad_norm": 468.5591735839844, | |
| "learning_rate": 6.450657894736842e-06, | |
| "loss": 0.0516, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 9.210526315789474, | |
| "grad_norm": 0.24007272720336914, | |
| "learning_rate": 6.368421052631579e-06, | |
| "loss": 0.0695, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 9.298245614035087, | |
| "grad_norm": 99.38546752929688, | |
| "learning_rate": 6.286184210526316e-06, | |
| "loss": 0.0947, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 9.385964912280702, | |
| "grad_norm": 3.8673267364501953, | |
| "learning_rate": 6.203947368421053e-06, | |
| "loss": 0.1062, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 9.473684210526315, | |
| "grad_norm": 0.2559914290904999, | |
| "learning_rate": 6.12171052631579e-06, | |
| "loss": 0.0523, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 9.56140350877193, | |
| "grad_norm": 0.12371934205293655, | |
| "learning_rate": 6.039473684210526e-06, | |
| "loss": 0.0495, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 9.649122807017545, | |
| "grad_norm": 2.5249898433685303, | |
| "learning_rate": 5.957236842105263e-06, | |
| "loss": 0.1678, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 9.736842105263158, | |
| "grad_norm": 0.2619442939758301, | |
| "learning_rate": 5.8750000000000005e-06, | |
| "loss": 0.0534, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 9.824561403508772, | |
| "grad_norm": 1.3705483675003052, | |
| "learning_rate": 5.792763157894737e-06, | |
| "loss": 0.1326, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 9.912280701754385, | |
| "grad_norm": 74.48184967041016, | |
| "learning_rate": 5.710526315789474e-06, | |
| "loss": 0.1795, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "grad_norm": 79.78992462158203, | |
| "learning_rate": 5.6282894736842106e-06, | |
| "loss": 0.092, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 10.087719298245615, | |
| "grad_norm": 0.5286645889282227, | |
| "learning_rate": 5.546052631578947e-06, | |
| "loss": 0.0418, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 10.175438596491228, | |
| "grad_norm": 109.82157135009766, | |
| "learning_rate": 5.463815789473684e-06, | |
| "loss": 0.193, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 10.263157894736842, | |
| "grad_norm": 3.963012933731079, | |
| "learning_rate": 5.3815789473684215e-06, | |
| "loss": 0.0897, | |
| "step": 2925 | |
| }, | |
| { | |
| "epoch": 10.350877192982455, | |
| "grad_norm": 0.41540616750717163, | |
| "learning_rate": 5.299342105263158e-06, | |
| "loss": 0.1723, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 10.43859649122807, | |
| "grad_norm": 5.900774002075195, | |
| "learning_rate": 5.217105263157895e-06, | |
| "loss": 0.0784, | |
| "step": 2975 | |
| }, | |
| { | |
| "epoch": 10.526315789473685, | |
| "grad_norm": 12.152785301208496, | |
| "learning_rate": 5.1348684210526315e-06, | |
| "loss": 0.1186, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 10.614035087719298, | |
| "grad_norm": 0.21494676172733307, | |
| "learning_rate": 3.6315789473684213e-06, | |
| "loss": 0.0358, | |
| "step": 3025 | |
| }, | |
| { | |
| "epoch": 10.701754385964913, | |
| "grad_norm": 90.48876953125, | |
| "learning_rate": 3.537593984962406e-06, | |
| "loss": 0.0471, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 10.789473684210526, | |
| "grad_norm": 0.23261310160160065, | |
| "learning_rate": 3.4436090225563912e-06, | |
| "loss": 0.0276, | |
| "step": 3075 | |
| }, | |
| { | |
| "epoch": 10.87719298245614, | |
| "grad_norm": 0.782564640045166, | |
| "learning_rate": 3.349624060150376e-06, | |
| "loss": 0.0718, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 10.964912280701755, | |
| "grad_norm": 0.33198878169059753, | |
| "learning_rate": 3.255639097744361e-06, | |
| "loss": 0.0564, | |
| "step": 3125 | |
| }, | |
| { | |
| "epoch": 11.052631578947368, | |
| "grad_norm": 0.14864841103553772, | |
| "learning_rate": 3.161654135338346e-06, | |
| "loss": 0.0508, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 11.140350877192983, | |
| "grad_norm": 0.1588129848241806, | |
| "learning_rate": 3.067669172932331e-06, | |
| "loss": 0.1641, | |
| "step": 3175 | |
| }, | |
| { | |
| "epoch": 11.228070175438596, | |
| "grad_norm": 0.6562497019767761, | |
| "learning_rate": 2.973684210526316e-06, | |
| "loss": 0.0544, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 11.31578947368421, | |
| "grad_norm": 0.3608649969100952, | |
| "learning_rate": 2.879699248120301e-06, | |
| "loss": 0.0391, | |
| "step": 3225 | |
| }, | |
| { | |
| "epoch": 11.403508771929825, | |
| "grad_norm": 20.602819442749023, | |
| "learning_rate": 2.785714285714286e-06, | |
| "loss": 0.0338, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 11.491228070175438, | |
| "grad_norm": 157.23171997070312, | |
| "learning_rate": 2.691729323308271e-06, | |
| "loss": 0.0771, | |
| "step": 3275 | |
| }, | |
| { | |
| "epoch": 11.578947368421053, | |
| "grad_norm": 0.22338764369487762, | |
| "learning_rate": 2.597744360902256e-06, | |
| "loss": 0.1187, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 11.666666666666666, | |
| "grad_norm": 57.78382110595703, | |
| "learning_rate": 2.503759398496241e-06, | |
| "loss": 0.0165, | |
| "step": 3325 | |
| }, | |
| { | |
| "epoch": 11.75438596491228, | |
| "grad_norm": 7.400601863861084, | |
| "learning_rate": 2.4097744360902257e-06, | |
| "loss": 0.0092, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 11.842105263157894, | |
| "grad_norm": 0.18190287053585052, | |
| "learning_rate": 2.3157894736842105e-06, | |
| "loss": 0.1059, | |
| "step": 3375 | |
| }, | |
| { | |
| "epoch": 11.929824561403509, | |
| "grad_norm": 0.1269238144159317, | |
| "learning_rate": 2.2218045112781957e-06, | |
| "loss": 0.0571, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 12.017543859649123, | |
| "grad_norm": 0.10843072831630707, | |
| "learning_rate": 2.1278195488721805e-06, | |
| "loss": 0.1341, | |
| "step": 3425 | |
| }, | |
| { | |
| "epoch": 12.105263157894736, | |
| "grad_norm": 0.7199507355690002, | |
| "learning_rate": 2.0338345864661656e-06, | |
| "loss": 0.0063, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 12.192982456140351, | |
| "grad_norm": 0.19009971618652344, | |
| "learning_rate": 1.9398496240601504e-06, | |
| "loss": 0.054, | |
| "step": 3475 | |
| }, | |
| { | |
| "epoch": 12.280701754385966, | |
| "grad_norm": 0.18020951747894287, | |
| "learning_rate": 1.8458646616541354e-06, | |
| "loss": 0.0148, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 12.368421052631579, | |
| "grad_norm": 0.2494085282087326, | |
| "learning_rate": 6.569377990430622e-06, | |
| "loss": 0.1168, | |
| "step": 3525 | |
| }, | |
| { | |
| "epoch": 12.456140350877194, | |
| "grad_norm": 0.16571170091629028, | |
| "learning_rate": 6.509569377990431e-06, | |
| "loss": 0.0482, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 12.543859649122806, | |
| "grad_norm": 0.10973577946424484, | |
| "learning_rate": 6.44976076555024e-06, | |
| "loss": 0.1372, | |
| "step": 3575 | |
| }, | |
| { | |
| "epoch": 12.631578947368421, | |
| "grad_norm": 0.14920943975448608, | |
| "learning_rate": 6.389952153110048e-06, | |
| "loss": 0.0791, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 12.719298245614034, | |
| "grad_norm": 4.5087504386901855, | |
| "learning_rate": 6.330143540669857e-06, | |
| "loss": 0.1614, | |
| "step": 3625 | |
| }, | |
| { | |
| "epoch": 12.807017543859649, | |
| "grad_norm": 0.11903402209281921, | |
| "learning_rate": 6.270334928229666e-06, | |
| "loss": 0.0082, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 12.894736842105264, | |
| "grad_norm": 0.24349439144134521, | |
| "learning_rate": 6.210526315789474e-06, | |
| "loss": 0.1936, | |
| "step": 3675 | |
| }, | |
| { | |
| "epoch": 12.982456140350877, | |
| "grad_norm": 146.2913360595703, | |
| "learning_rate": 6.150717703349282e-06, | |
| "loss": 0.1465, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 13.070175438596491, | |
| "grad_norm": 88.66224670410156, | |
| "learning_rate": 6.090909090909091e-06, | |
| "loss": 0.0751, | |
| "step": 3725 | |
| }, | |
| { | |
| "epoch": 13.157894736842104, | |
| "grad_norm": 0.11558452993631363, | |
| "learning_rate": 6.0311004784689e-06, | |
| "loss": 0.0652, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 13.24561403508772, | |
| "grad_norm": 3.1845107078552246, | |
| "learning_rate": 5.971291866028708e-06, | |
| "loss": 0.0969, | |
| "step": 3775 | |
| }, | |
| { | |
| "epoch": 13.333333333333334, | |
| "grad_norm": 0.5500511527061462, | |
| "learning_rate": 5.911483253588517e-06, | |
| "loss": 0.107, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 13.421052631578947, | |
| "grad_norm": 0.08417954295873642, | |
| "learning_rate": 5.851674641148326e-06, | |
| "loss": 0.0165, | |
| "step": 3825 | |
| }, | |
| { | |
| "epoch": 13.508771929824562, | |
| "grad_norm": 0.0910855233669281, | |
| "learning_rate": 5.791866028708134e-06, | |
| "loss": 0.1326, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 13.596491228070175, | |
| "grad_norm": 65.31088256835938, | |
| "learning_rate": 5.732057416267942e-06, | |
| "loss": 0.0365, | |
| "step": 3875 | |
| }, | |
| { | |
| "epoch": 13.68421052631579, | |
| "grad_norm": 0.09439278393983841, | |
| "learning_rate": 5.672248803827751e-06, | |
| "loss": 0.0784, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 13.771929824561404, | |
| "grad_norm": 0.17843316495418549, | |
| "learning_rate": 5.6124401913875595e-06, | |
| "loss": 0.0232, | |
| "step": 3925 | |
| }, | |
| { | |
| "epoch": 13.859649122807017, | |
| "grad_norm": 0.16249841451644897, | |
| "learning_rate": 5.552631578947368e-06, | |
| "loss": 0.0107, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 13.947368421052632, | |
| "grad_norm": 0.545814573764801, | |
| "learning_rate": 5.492822966507177e-06, | |
| "loss": 0.0947, | |
| "step": 3975 | |
| }, | |
| { | |
| "epoch": 14.035087719298245, | |
| "grad_norm": 0.7138233184814453, | |
| "learning_rate": 5.433014354066986e-06, | |
| "loss": 0.1812, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 14.12280701754386, | |
| "grad_norm": 381.4528503417969, | |
| "learning_rate": 5.373205741626794e-06, | |
| "loss": 0.0399, | |
| "step": 4025 | |
| }, | |
| { | |
| "epoch": 14.210526315789474, | |
| "grad_norm": 0.053043171763420105, | |
| "learning_rate": 5.313397129186603e-06, | |
| "loss": 0.1249, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 14.298245614035087, | |
| "grad_norm": 1.4563120603561401, | |
| "learning_rate": 5.253588516746412e-06, | |
| "loss": 0.0361, | |
| "step": 4075 | |
| }, | |
| { | |
| "epoch": 14.385964912280702, | |
| "grad_norm": 0.06027642637491226, | |
| "learning_rate": 5.19377990430622e-06, | |
| "loss": 0.0799, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 14.473684210526315, | |
| "grad_norm": 0.11955183744430542, | |
| "learning_rate": 5.133971291866029e-06, | |
| "loss": 0.2006, | |
| "step": 4125 | |
| }, | |
| { | |
| "epoch": 14.56140350877193, | |
| "grad_norm": 0.07738160341978073, | |
| "learning_rate": 5.074162679425838e-06, | |
| "loss": 0.16, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 14.649122807017545, | |
| "grad_norm": 28.58780860900879, | |
| "learning_rate": 5.014354066985646e-06, | |
| "loss": 0.0675, | |
| "step": 4175 | |
| }, | |
| { | |
| "epoch": 14.736842105263158, | |
| "grad_norm": 0.08654920011758804, | |
| "learning_rate": 4.954545454545455e-06, | |
| "loss": 0.0584, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 14.824561403508772, | |
| "grad_norm": 0.1421671062707901, | |
| "learning_rate": 4.894736842105264e-06, | |
| "loss": 0.0049, | |
| "step": 4225 | |
| }, | |
| { | |
| "epoch": 14.912280701754385, | |
| "grad_norm": 0.23447291553020477, | |
| "learning_rate": 4.834928229665072e-06, | |
| "loss": 0.0122, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 15.0, | |
| "grad_norm": 54.224788665771484, | |
| "learning_rate": 4.775119617224881e-06, | |
| "loss": 0.0921, | |
| "step": 4275 | |
| }, | |
| { | |
| "epoch": 15.087719298245615, | |
| "grad_norm": 0.08427131175994873, | |
| "learning_rate": 4.715311004784689e-06, | |
| "loss": 0.0061, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 15.175438596491228, | |
| "grad_norm": 0.232700914144516, | |
| "learning_rate": 4.6555023923444975e-06, | |
| "loss": 0.1011, | |
| "step": 4325 | |
| }, | |
| { | |
| "epoch": 15.263157894736842, | |
| "grad_norm": 0.09075941890478134, | |
| "learning_rate": 4.595693779904306e-06, | |
| "loss": 0.0419, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 15.350877192982455, | |
| "grad_norm": 0.20714852213859558, | |
| "learning_rate": 4.535885167464115e-06, | |
| "loss": 0.1786, | |
| "step": 4375 | |
| }, | |
| { | |
| "epoch": 15.43859649122807, | |
| "grad_norm": 0.07586350291967392, | |
| "learning_rate": 4.476076555023924e-06, | |
| "loss": 0.0314, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 15.526315789473685, | |
| "grad_norm": 0.29287371039390564, | |
| "learning_rate": 4.416267942583732e-06, | |
| "loss": 0.0635, | |
| "step": 4425 | |
| }, | |
| { | |
| "epoch": 15.614035087719298, | |
| "grad_norm": 50.64339065551758, | |
| "learning_rate": 4.356459330143541e-06, | |
| "loss": 0.0854, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 15.701754385964913, | |
| "grad_norm": 0.12661752104759216, | |
| "learning_rate": 4.29665071770335e-06, | |
| "loss": 0.1378, | |
| "step": 4475 | |
| }, | |
| { | |
| "epoch": 15.789473684210526, | |
| "grad_norm": 2.418790340423584, | |
| "learning_rate": 4.2368421052631575e-06, | |
| "loss": 0.0325, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 15.87719298245614, | |
| "grad_norm": 231.30796813964844, | |
| "learning_rate": 4.177033492822966e-06, | |
| "loss": 0.1398, | |
| "step": 4525 | |
| }, | |
| { | |
| "epoch": 15.964912280701755, | |
| "grad_norm": 0.23073101043701172, | |
| "learning_rate": 4.117224880382775e-06, | |
| "loss": 0.0237, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 16.05263157894737, | |
| "grad_norm": 0.044060900807380676, | |
| "learning_rate": 4.0574162679425835e-06, | |
| "loss": 0.0074, | |
| "step": 4575 | |
| }, | |
| { | |
| "epoch": 16.140350877192983, | |
| "grad_norm": 1.4264813661575317, | |
| "learning_rate": 3.997607655502392e-06, | |
| "loss": 0.025, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 16.228070175438596, | |
| "grad_norm": 0.41092243790626526, | |
| "learning_rate": 3.937799043062201e-06, | |
| "loss": 0.0049, | |
| "step": 4625 | |
| }, | |
| { | |
| "epoch": 16.31578947368421, | |
| "grad_norm": 3.6420750617980957, | |
| "learning_rate": 3.8779904306220095e-06, | |
| "loss": 0.0491, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 16.403508771929825, | |
| "grad_norm": 0.08417033404111862, | |
| "learning_rate": 3.818181818181818e-06, | |
| "loss": 0.003, | |
| "step": 4675 | |
| }, | |
| { | |
| "epoch": 16.49122807017544, | |
| "grad_norm": 0.41752633452415466, | |
| "learning_rate": 3.7583732057416273e-06, | |
| "loss": 0.0649, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 16.57894736842105, | |
| "grad_norm": 0.04892294108867645, | |
| "learning_rate": 3.6985645933014356e-06, | |
| "loss": 0.1577, | |
| "step": 4725 | |
| }, | |
| { | |
| "epoch": 16.666666666666668, | |
| "grad_norm": 0.07886885106563568, | |
| "learning_rate": 3.6387559808612442e-06, | |
| "loss": 0.1133, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 16.75438596491228, | |
| "grad_norm": 202.08302307128906, | |
| "learning_rate": 3.5789473684210525e-06, | |
| "loss": 0.0151, | |
| "step": 4775 | |
| }, | |
| { | |
| "epoch": 16.842105263157894, | |
| "grad_norm": 77.71251678466797, | |
| "learning_rate": 3.519138755980861e-06, | |
| "loss": 0.0644, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 16.92982456140351, | |
| "grad_norm": 0.04525836929678917, | |
| "learning_rate": 3.45933014354067e-06, | |
| "loss": 0.0283, | |
| "step": 4825 | |
| }, | |
| { | |
| "epoch": 17.017543859649123, | |
| "grad_norm": 0.035384029150009155, | |
| "learning_rate": 3.399521531100479e-06, | |
| "loss": 0.0431, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 17.105263157894736, | |
| "grad_norm": 0.05048861354589462, | |
| "learning_rate": 3.339712918660287e-06, | |
| "loss": 0.0358, | |
| "step": 4875 | |
| }, | |
| { | |
| "epoch": 17.19298245614035, | |
| "grad_norm": 0.1347443163394928, | |
| "learning_rate": 3.279904306220096e-06, | |
| "loss": 0.0206, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 17.280701754385966, | |
| "grad_norm": 252.46458435058594, | |
| "learning_rate": 3.2200956937799046e-06, | |
| "loss": 0.0804, | |
| "step": 4925 | |
| }, | |
| { | |
| "epoch": 17.36842105263158, | |
| "grad_norm": 0.05461292341351509, | |
| "learning_rate": 3.1602870813397132e-06, | |
| "loss": 0.0784, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 17.45614035087719, | |
| "grad_norm": 2.0001866817474365, | |
| "learning_rate": 3.1004784688995215e-06, | |
| "loss": 0.0947, | |
| "step": 4975 | |
| }, | |
| { | |
| "epoch": 17.54385964912281, | |
| "grad_norm": 0.04590539261698723, | |
| "learning_rate": 3.04066985645933e-06, | |
| "loss": 0.075, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 17.63157894736842, | |
| "grad_norm": 0.04810748249292374, | |
| "learning_rate": 2.980861244019139e-06, | |
| "loss": 0.0342, | |
| "step": 5025 | |
| }, | |
| { | |
| "epoch": 17.719298245614034, | |
| "grad_norm": 0.8750467896461487, | |
| "learning_rate": 2.9210526315789475e-06, | |
| "loss": 0.0618, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 17.80701754385965, | |
| "grad_norm": 56.14554977416992, | |
| "learning_rate": 2.8612440191387558e-06, | |
| "loss": 0.026, | |
| "step": 5075 | |
| }, | |
| { | |
| "epoch": 17.894736842105264, | |
| "grad_norm": 0.8380083441734314, | |
| "learning_rate": 2.8014354066985645e-06, | |
| "loss": 0.0135, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 17.982456140350877, | |
| "grad_norm": 0.08012706786394119, | |
| "learning_rate": 2.741626794258373e-06, | |
| "loss": 0.0586, | |
| "step": 5125 | |
| }, | |
| { | |
| "epoch": 18.07017543859649, | |
| "grad_norm": 1.2549360990524292, | |
| "learning_rate": 2.6818181818181822e-06, | |
| "loss": 0.0047, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 18.157894736842106, | |
| "grad_norm": 0.03993632644414902, | |
| "learning_rate": 2.6220095693779905e-06, | |
| "loss": 0.0869, | |
| "step": 5175 | |
| }, | |
| { | |
| "epoch": 18.24561403508772, | |
| "grad_norm": 62.21980285644531, | |
| "learning_rate": 2.562200956937799e-06, | |
| "loss": 0.0987, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 18.333333333333332, | |
| "grad_norm": 0.10092537105083466, | |
| "learning_rate": 2.502392344497608e-06, | |
| "loss": 0.1554, | |
| "step": 5225 | |
| }, | |
| { | |
| "epoch": 18.42105263157895, | |
| "grad_norm": 0.7630422115325928, | |
| "learning_rate": 2.4425837320574165e-06, | |
| "loss": 0.0522, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 18.50877192982456, | |
| "grad_norm": 62.42081069946289, | |
| "learning_rate": 2.382775119617225e-06, | |
| "loss": 0.0298, | |
| "step": 5275 | |
| }, | |
| { | |
| "epoch": 18.596491228070175, | |
| "grad_norm": 0.046070296317338943, | |
| "learning_rate": 2.3229665071770335e-06, | |
| "loss": 0.1207, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 18.68421052631579, | |
| "grad_norm": 0.04533836990594864, | |
| "learning_rate": 2.263157894736842e-06, | |
| "loss": 0.146, | |
| "step": 5325 | |
| }, | |
| { | |
| "epoch": 18.771929824561404, | |
| "grad_norm": 0.03193026781082153, | |
| "learning_rate": 2.203349282296651e-06, | |
| "loss": 0.0106, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 18.859649122807017, | |
| "grad_norm": 0.03594537451863289, | |
| "learning_rate": 2.143540669856459e-06, | |
| "loss": 0.0266, | |
| "step": 5375 | |
| }, | |
| { | |
| "epoch": 18.94736842105263, | |
| "grad_norm": 0.07382319122552872, | |
| "learning_rate": 2.0837320574162678e-06, | |
| "loss": 0.0954, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 19.035087719298247, | |
| "grad_norm": 0.0792032852768898, | |
| "learning_rate": 2.0239234449760764e-06, | |
| "loss": 0.0871, | |
| "step": 5425 | |
| }, | |
| { | |
| "epoch": 19.12280701754386, | |
| "grad_norm": 0.06994396448135376, | |
| "learning_rate": 1.9641148325358855e-06, | |
| "loss": 0.0092, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 19.210526315789473, | |
| "grad_norm": 36.98837661743164, | |
| "learning_rate": 1.904306220095694e-06, | |
| "loss": 0.1442, | |
| "step": 5475 | |
| }, | |
| { | |
| "epoch": 19.29824561403509, | |
| "grad_norm": 0.11950813978910446, | |
| "learning_rate": 1.8444976076555023e-06, | |
| "loss": 0.0344, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 19.385964912280702, | |
| "grad_norm": 0.053549814969301224, | |
| "learning_rate": 1.7846889952153112e-06, | |
| "loss": 0.0609, | |
| "step": 5525 | |
| }, | |
| { | |
| "epoch": 19.473684210526315, | |
| "grad_norm": 0.05915123224258423, | |
| "learning_rate": 1.7248803827751196e-06, | |
| "loss": 0.0212, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 19.56140350877193, | |
| "grad_norm": 0.03509140759706497, | |
| "learning_rate": 1.6650717703349283e-06, | |
| "loss": 0.0019, | |
| "step": 5575 | |
| }, | |
| { | |
| "epoch": 19.649122807017545, | |
| "grad_norm": 0.04774395003914833, | |
| "learning_rate": 1.605263157894737e-06, | |
| "loss": 0.032, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 19.736842105263158, | |
| "grad_norm": 0.1909104436635971, | |
| "learning_rate": 1.5454545454545454e-06, | |
| "loss": 0.0351, | |
| "step": 5625 | |
| }, | |
| { | |
| "epoch": 19.82456140350877, | |
| "grad_norm": 0.11199568212032318, | |
| "learning_rate": 1.4856459330143541e-06, | |
| "loss": 0.0268, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 19.912280701754387, | |
| "grad_norm": 17.852022171020508, | |
| "learning_rate": 1.4258373205741628e-06, | |
| "loss": 0.0397, | |
| "step": 5675 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "grad_norm": 0.18822583556175232, | |
| "learning_rate": 1.3660287081339715e-06, | |
| "loss": 0.0541, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 20.087719298245613, | |
| "grad_norm": 50.338890075683594, | |
| "learning_rate": 1.30622009569378e-06, | |
| "loss": 0.016, | |
| "step": 5725 | |
| }, | |
| { | |
| "epoch": 20.17543859649123, | |
| "grad_norm": 0.05340347811579704, | |
| "learning_rate": 1.2464114832535886e-06, | |
| "loss": 0.0084, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 20.263157894736842, | |
| "grad_norm": 0.04629586637020111, | |
| "learning_rate": 1.186602870813397e-06, | |
| "loss": 0.0356, | |
| "step": 5775 | |
| }, | |
| { | |
| "epoch": 20.350877192982455, | |
| "grad_norm": 0.9073031544685364, | |
| "learning_rate": 1.1267942583732058e-06, | |
| "loss": 0.0786, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 20.43859649122807, | |
| "grad_norm": 10.095131874084473, | |
| "learning_rate": 1.0669856459330144e-06, | |
| "loss": 0.0419, | |
| "step": 5825 | |
| }, | |
| { | |
| "epoch": 20.526315789473685, | |
| "grad_norm": 0.16011740267276764, | |
| "learning_rate": 1.0071770334928231e-06, | |
| "loss": 0.0653, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 20.614035087719298, | |
| "grad_norm": 0.18243364989757538, | |
| "learning_rate": 9.473684210526316e-07, | |
| "loss": 0.0015, | |
| "step": 5875 | |
| }, | |
| { | |
| "epoch": 20.70175438596491, | |
| "grad_norm": 0.14108963310718536, | |
| "learning_rate": 8.875598086124402e-07, | |
| "loss": 0.0817, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 20.789473684210527, | |
| "grad_norm": 0.03629032149910927, | |
| "learning_rate": 8.277511961722487e-07, | |
| "loss": 0.002, | |
| "step": 5925 | |
| }, | |
| { | |
| "epoch": 20.87719298245614, | |
| "grad_norm": 21.195932388305664, | |
| "learning_rate": 7.679425837320574e-07, | |
| "loss": 0.1265, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 20.964912280701753, | |
| "grad_norm": 0.036704275757074356, | |
| "learning_rate": 7.081339712918661e-07, | |
| "loss": 0.0369, | |
| "step": 5975 | |
| }, | |
| { | |
| "epoch": 21.05263157894737, | |
| "grad_norm": 327.310791015625, | |
| "learning_rate": 6.483253588516747e-07, | |
| "loss": 0.0288, | |
| "step": 6000 | |
| } | |
| ], | |
| "logging_steps": 25, | |
| "max_steps": 6270, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 22, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.6186141004321981e+18, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |