| { | |
| "best_metric": 0.8358, | |
| "best_model_checkpoint": "./results/checkpoint-4680", | |
| "epoch": 9.9984, | |
| "global_step": 4680, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.0683760683760685e-06, | |
| "loss": 2.5122, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 2.136752136752137e-06, | |
| "loss": 2.5472, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 3.205128205128205e-06, | |
| "loss": 2.5657, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.273504273504274e-06, | |
| "loss": 2.5424, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 5.341880341880342e-06, | |
| "loss": 2.5233, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 6.41025641025641e-06, | |
| "loss": 2.4851, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 7.478632478632479e-06, | |
| "loss": 2.4275, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 8.547008547008548e-06, | |
| "loss": 2.4113, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 9.615384615384616e-06, | |
| "loss": 2.3931, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.0683760683760684e-05, | |
| "loss": 2.3975, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.1752136752136752e-05, | |
| "loss": 2.3228, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.282051282051282e-05, | |
| "loss": 2.2772, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.388888888888889e-05, | |
| "loss": 2.2564, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.4957264957264958e-05, | |
| "loss": 2.1786, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.602564102564103e-05, | |
| "loss": 2.1342, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.7094017094017095e-05, | |
| "loss": 2.0996, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.8162393162393162e-05, | |
| "loss": 2.0469, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.923076923076923e-05, | |
| "loss": 1.9912, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 2.02991452991453e-05, | |
| "loss": 1.9157, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 2.1367521367521368e-05, | |
| "loss": 1.9226, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.2435897435897437e-05, | |
| "loss": 1.8344, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.3504273504273504e-05, | |
| "loss": 1.7494, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.4572649572649573e-05, | |
| "loss": 1.7794, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.564102564102564e-05, | |
| "loss": 1.6851, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.670940170940171e-05, | |
| "loss": 1.6689, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.777777777777778e-05, | |
| "loss": 1.5957, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 2.8846153846153845e-05, | |
| "loss": 1.5902, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 2.9914529914529915e-05, | |
| "loss": 1.5815, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 3.098290598290599e-05, | |
| "loss": 1.4559, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 3.205128205128206e-05, | |
| "loss": 1.4774, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 3.311965811965812e-05, | |
| "loss": 1.4321, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 3.418803418803419e-05, | |
| "loss": 1.3998, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 3.525641025641026e-05, | |
| "loss": 1.331, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3.6324786324786323e-05, | |
| "loss": 1.3203, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 3.739316239316239e-05, | |
| "loss": 1.286, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 3.846153846153846e-05, | |
| "loss": 1.2602, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 3.952991452991453e-05, | |
| "loss": 1.1832, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.05982905982906e-05, | |
| "loss": 1.2183, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 4.166666666666667e-05, | |
| "loss": 1.1764, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 4.2735042735042735e-05, | |
| "loss": 1.0967, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 4.3803418803418805e-05, | |
| "loss": 1.1271, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.4871794871794874e-05, | |
| "loss": 1.0845, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 4.594017094017094e-05, | |
| "loss": 1.0432, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.700854700854701e-05, | |
| "loss": 1.0321, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 4.8076923076923084e-05, | |
| "loss": 1.0472, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.9145299145299147e-05, | |
| "loss": 0.9605, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.687, | |
| "eval_loss": 0.8900136947631836, | |
| "eval_runtime": 21.116, | |
| "eval_samples_per_second": 473.574, | |
| "eval_steps_per_second": 14.823, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.997625830959164e-05, | |
| "loss": 1.018, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 4.985754985754986e-05, | |
| "loss": 0.9973, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 4.9738841405508076e-05, | |
| "loss": 0.9714, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 4.962013295346629e-05, | |
| "loss": 0.9567, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 4.95014245014245e-05, | |
| "loss": 0.9347, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 4.938271604938271e-05, | |
| "loss": 0.9179, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 4.926400759734093e-05, | |
| "loss": 0.8909, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 4.9145299145299147e-05, | |
| "loss": 0.9269, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 4.902659069325736e-05, | |
| "loss": 0.8686, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 4.890788224121557e-05, | |
| "loss": 0.8799, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 4.878917378917379e-05, | |
| "loss": 0.9077, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 4.867046533713201e-05, | |
| "loss": 0.8822, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 4.8551756885090224e-05, | |
| "loss": 0.8425, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 4.8433048433048433e-05, | |
| "loss": 0.8427, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 4.831433998100665e-05, | |
| "loss": 0.8856, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 4.819563152896486e-05, | |
| "loss": 0.8423, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 4.8076923076923084e-05, | |
| "loss": 0.8685, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 4.7958214624881294e-05, | |
| "loss": 0.8587, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 4.783950617283951e-05, | |
| "loss": 0.8236, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 4.772079772079772e-05, | |
| "loss": 0.8337, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 4.760208926875594e-05, | |
| "loss": 0.8655, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 4.7483380816714154e-05, | |
| "loss": 0.809, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 4.736467236467237e-05, | |
| "loss": 0.8312, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 4.724596391263058e-05, | |
| "loss": 0.7782, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 4.71272554605888e-05, | |
| "loss": 0.8134, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 4.700854700854701e-05, | |
| "loss": 0.8137, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 4.688983855650523e-05, | |
| "loss": 0.7623, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 4.677113010446344e-05, | |
| "loss": 0.806, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 4.665242165242166e-05, | |
| "loss": 0.8405, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 4.653371320037987e-05, | |
| "loss": 0.7952, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 4.6415004748338084e-05, | |
| "loss": 0.7881, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 4.62962962962963e-05, | |
| "loss": 0.7727, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 4.617758784425451e-05, | |
| "loss": 0.7154, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 4.605887939221273e-05, | |
| "loss": 0.77, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 4.594017094017094e-05, | |
| "loss": 0.7493, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 4.582146248812916e-05, | |
| "loss": 0.7086, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 4.570275403608737e-05, | |
| "loss": 0.8065, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 4.558404558404559e-05, | |
| "loss": 0.7705, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 4.54653371320038e-05, | |
| "loss": 0.6841, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 4.5346628679962014e-05, | |
| "loss": 0.7043, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 4.522792022792023e-05, | |
| "loss": 0.7388, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 4.510921177587845e-05, | |
| "loss": 0.7132, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 4.499050332383666e-05, | |
| "loss": 0.6625, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 4.4871794871794874e-05, | |
| "loss": 0.7285, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 4.4753086419753084e-05, | |
| "loss": 0.6788, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 4.463437796771131e-05, | |
| "loss": 0.7557, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 4.451566951566952e-05, | |
| "loss": 0.6594, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.7678, | |
| "eval_loss": 0.6456298232078552, | |
| "eval_runtime": 21.2497, | |
| "eval_samples_per_second": 470.595, | |
| "eval_steps_per_second": 14.73, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 4.4396961063627735e-05, | |
| "loss": 0.8174, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 4.4278252611585945e-05, | |
| "loss": 0.6592, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 4.415954415954416e-05, | |
| "loss": 0.73, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 4.404083570750238e-05, | |
| "loss": 0.6779, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 4.3922127255460595e-05, | |
| "loss": 0.6957, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 4.3803418803418805e-05, | |
| "loss": 0.6562, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 4.368471035137702e-05, | |
| "loss": 0.724, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 4.356600189933523e-05, | |
| "loss": 0.6781, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 4.344729344729345e-05, | |
| "loss": 0.6892, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 4.3328584995251665e-05, | |
| "loss": 0.7035, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 4.3209876543209875e-05, | |
| "loss": 0.6824, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 4.309116809116809e-05, | |
| "loss": 0.6601, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 4.297245963912631e-05, | |
| "loss": 0.6688, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 4.2853751187084525e-05, | |
| "loss": 0.6977, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 4.2735042735042735e-05, | |
| "loss": 0.7354, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 4.261633428300095e-05, | |
| "loss": 0.6231, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 4.249762583095916e-05, | |
| "loss": 0.6716, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 4.2378917378917385e-05, | |
| "loss": 0.6857, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 4.2260208926875595e-05, | |
| "loss": 0.6855, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 4.214150047483381e-05, | |
| "loss": 0.6914, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 4.202279202279202e-05, | |
| "loss": 0.6566, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 4.190408357075024e-05, | |
| "loss": 0.6659, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 4.1785375118708455e-05, | |
| "loss": 0.6288, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 4.166666666666667e-05, | |
| "loss": 0.6506, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 4.154795821462488e-05, | |
| "loss": 0.6128, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 4.14292497625831e-05, | |
| "loss": 0.6094, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 4.131054131054131e-05, | |
| "loss": 0.7048, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 4.119183285849953e-05, | |
| "loss": 0.6738, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 4.107312440645774e-05, | |
| "loss": 0.6113, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 4.095441595441596e-05, | |
| "loss": 0.6592, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 4.083570750237417e-05, | |
| "loss": 0.6671, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 4.0716999050332386e-05, | |
| "loss": 0.6513, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 4.05982905982906e-05, | |
| "loss": 0.6391, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 4.047958214624881e-05, | |
| "loss": 0.6241, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 4.036087369420703e-05, | |
| "loss": 0.6036, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 4.024216524216524e-05, | |
| "loss": 0.6585, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 4.012345679012346e-05, | |
| "loss": 0.7027, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 4.000474833808167e-05, | |
| "loss": 0.6101, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 3.988603988603989e-05, | |
| "loss": 0.619, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 3.97673314339981e-05, | |
| "loss": 0.6883, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 3.9648622981956316e-05, | |
| "loss": 0.6182, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 3.952991452991453e-05, | |
| "loss": 0.6315, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 3.941120607787275e-05, | |
| "loss": 0.6402, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 3.929249762583096e-05, | |
| "loss": 0.6355, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 3.9173789173789176e-05, | |
| "loss": 0.5992, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 3.9055080721747386e-05, | |
| "loss": 0.6006, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 3.893637226970561e-05, | |
| "loss": 0.6022, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_accuracy": 0.794, | |
| "eval_loss": 0.5784328579902649, | |
| "eval_runtime": 21.2253, | |
| "eval_samples_per_second": 471.135, | |
| "eval_steps_per_second": 14.747, | |
| "step": 1404 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 3.881766381766382e-05, | |
| "loss": 0.7359, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 3.8698955365622036e-05, | |
| "loss": 0.6312, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 3.8580246913580246e-05, | |
| "loss": 0.6066, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 3.846153846153846e-05, | |
| "loss": 0.597, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 3.834283000949668e-05, | |
| "loss": 0.6428, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 3.8224121557454896e-05, | |
| "loss": 0.5524, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 3.8105413105413106e-05, | |
| "loss": 0.5814, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 3.798670465337132e-05, | |
| "loss": 0.6541, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 3.786799620132954e-05, | |
| "loss": 0.5789, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 3.774928774928775e-05, | |
| "loss": 0.6399, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 3.7630579297245966e-05, | |
| "loss": 0.6521, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "learning_rate": 3.7511870845204176e-05, | |
| "loss": 0.6345, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 3.739316239316239e-05, | |
| "loss": 0.6147, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 3.727445394112061e-05, | |
| "loss": 0.6226, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 3.7155745489078827e-05, | |
| "loss": 0.5948, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 3.7037037037037037e-05, | |
| "loss": 0.6254, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 3.691832858499525e-05, | |
| "loss": 0.6278, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 3.679962013295346e-05, | |
| "loss": 0.5576, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 3.668091168091169e-05, | |
| "loss": 0.6125, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 3.65622032288699e-05, | |
| "loss": 0.5937, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 3.6443494776828113e-05, | |
| "loss": 0.5742, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 3.6324786324786323e-05, | |
| "loss": 0.5925, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 3.620607787274454e-05, | |
| "loss": 0.6244, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 3.608736942070276e-05, | |
| "loss": 0.5918, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 3.5968660968660974e-05, | |
| "loss": 0.6122, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 3.5849952516619184e-05, | |
| "loss": 0.5986, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 3.57312440645774e-05, | |
| "loss": 0.6189, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 3.561253561253561e-05, | |
| "loss": 0.5764, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 3.5493827160493834e-05, | |
| "loss": 0.5119, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 3.5375118708452044e-05, | |
| "loss": 0.5894, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 3.525641025641026e-05, | |
| "loss": 0.5738, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 3.67, | |
| "learning_rate": 3.513770180436847e-05, | |
| "loss": 0.5619, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 3.501899335232669e-05, | |
| "loss": 0.616, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 3.4900284900284904e-05, | |
| "loss": 0.5725, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 3.4781576448243114e-05, | |
| "loss": 0.5865, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 3.466286799620133e-05, | |
| "loss": 0.5679, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 3.454415954415954e-05, | |
| "loss": 0.5111, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 3.4425451092117764e-05, | |
| "loss": 0.5868, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 3.4306742640075974e-05, | |
| "loss": 0.5759, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 3.418803418803419e-05, | |
| "loss": 0.5404, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 3.40693257359924e-05, | |
| "loss": 0.5866, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 3.395061728395062e-05, | |
| "loss": 0.5865, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 3.3831908831908834e-05, | |
| "loss": 0.5784, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 3.371320037986705e-05, | |
| "loss": 0.5685, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 3.359449192782526e-05, | |
| "loss": 0.5839, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 3.347578347578348e-05, | |
| "loss": 0.5234, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 3.335707502374169e-05, | |
| "loss": 0.5507, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_accuracy": 0.8112, | |
| "eval_loss": 0.5229769349098206, | |
| "eval_runtime": 21.3489, | |
| "eval_samples_per_second": 468.409, | |
| "eval_steps_per_second": 14.661, | |
| "step": 1872 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 3.323836657169991e-05, | |
| "loss": 0.5862, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 3.311965811965812e-05, | |
| "loss": 0.6056, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 3.300094966761634e-05, | |
| "loss": 0.5565, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 3.288224121557455e-05, | |
| "loss": 0.5985, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 3.2763532763532764e-05, | |
| "loss": 0.5704, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 3.264482431149098e-05, | |
| "loss": 0.5913, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 3.25261158594492e-05, | |
| "loss": 0.5804, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 3.240740740740741e-05, | |
| "loss": 0.6079, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 3.2288698955365625e-05, | |
| "loss": 0.5369, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 3.216999050332384e-05, | |
| "loss": 0.5552, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 3.205128205128206e-05, | |
| "loss": 0.5675, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 3.193257359924027e-05, | |
| "loss": 0.5972, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "learning_rate": 3.181386514719848e-05, | |
| "loss": 0.567, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 3.1695156695156695e-05, | |
| "loss": 0.5687, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "learning_rate": 3.157644824311491e-05, | |
| "loss": 0.5447, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 3.145773979107313e-05, | |
| "loss": 0.551, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 3.133903133903134e-05, | |
| "loss": 0.5206, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 3.1220322886989555e-05, | |
| "loss": 0.5896, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 3.1101614434947765e-05, | |
| "loss": 0.52, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 4.42, | |
| "learning_rate": 3.098290598290599e-05, | |
| "loss": 0.5419, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 3.08641975308642e-05, | |
| "loss": 0.5957, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 3.0745489078822415e-05, | |
| "loss": 0.5718, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 3.0626780626780625e-05, | |
| "loss": 0.5631, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 3.0508072174738845e-05, | |
| "loss": 0.5291, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 3.0389363722697055e-05, | |
| "loss": 0.5231, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "learning_rate": 3.0270655270655275e-05, | |
| "loss": 0.5584, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "learning_rate": 3.0151946818613485e-05, | |
| "loss": 0.5358, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 3.0033238366571702e-05, | |
| "loss": 0.5586, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 2.9914529914529915e-05, | |
| "loss": 0.524, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 2.9795821462488132e-05, | |
| "loss": 0.5691, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 4.66, | |
| "learning_rate": 2.9677113010446345e-05, | |
| "loss": 0.5332, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 2.9558404558404562e-05, | |
| "loss": 0.5056, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 2.9439696106362775e-05, | |
| "loss": 0.5249, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "learning_rate": 2.9320987654320992e-05, | |
| "loss": 0.5079, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 2.9202279202279202e-05, | |
| "loss": 0.5237, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "learning_rate": 2.9083570750237422e-05, | |
| "loss": 0.5777, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 2.8964862298195632e-05, | |
| "loss": 0.4953, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 2.8846153846153845e-05, | |
| "loss": 0.5031, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 2.8727445394112062e-05, | |
| "loss": 0.5307, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 2.8608736942070276e-05, | |
| "loss": 0.557, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 4.87, | |
| "learning_rate": 2.8490028490028492e-05, | |
| "loss": 0.5504, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 2.8371320037986702e-05, | |
| "loss": 0.5421, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 2.8252611585944922e-05, | |
| "loss": 0.5242, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 2.8133903133903132e-05, | |
| "loss": 0.543, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "learning_rate": 2.8015194681861352e-05, | |
| "loss": 0.5266, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "learning_rate": 2.7896486229819562e-05, | |
| "loss": 0.4883, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 2.777777777777778e-05, | |
| "loss": 0.5017, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_accuracy": 0.8279, | |
| "eval_loss": 0.4852665066719055, | |
| "eval_runtime": 21.4449, | |
| "eval_samples_per_second": 466.31, | |
| "eval_steps_per_second": 14.596, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 2.7659069325735992e-05, | |
| "loss": 0.5633, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "learning_rate": 2.754036087369421e-05, | |
| "loss": 0.5191, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 2.7421652421652423e-05, | |
| "loss": 0.5179, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 5.09, | |
| "learning_rate": 2.730294396961064e-05, | |
| "loss": 0.5143, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 2.7184235517568853e-05, | |
| "loss": 0.5288, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 5.13, | |
| "learning_rate": 2.706552706552707e-05, | |
| "loss": 0.5625, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 2.694681861348528e-05, | |
| "loss": 0.5763, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "learning_rate": 2.68281101614435e-05, | |
| "loss": 0.5449, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 5.19, | |
| "learning_rate": 2.670940170940171e-05, | |
| "loss": 0.5854, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 2.6590693257359926e-05, | |
| "loss": 0.555, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 5.23, | |
| "learning_rate": 2.647198480531814e-05, | |
| "loss": 0.518, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "learning_rate": 2.6353276353276356e-05, | |
| "loss": 0.5439, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "learning_rate": 2.623456790123457e-05, | |
| "loss": 0.5278, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 5.3, | |
| "learning_rate": 2.611585944919278e-05, | |
| "loss": 0.4893, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "learning_rate": 2.5997150997151e-05, | |
| "loss": 0.5088, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 5.34, | |
| "learning_rate": 2.587844254510921e-05, | |
| "loss": 0.5006, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 5.36, | |
| "learning_rate": 2.575973409306743e-05, | |
| "loss": 0.5255, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 2.564102564102564e-05, | |
| "loss": 0.4822, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 2.5522317188983856e-05, | |
| "loss": 0.4957, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 5.43, | |
| "learning_rate": 2.540360873694207e-05, | |
| "loss": 0.4955, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 5.45, | |
| "learning_rate": 2.5284900284900286e-05, | |
| "loss": 0.5487, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 5.47, | |
| "learning_rate": 2.51661918328585e-05, | |
| "loss": 0.5006, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 5.49, | |
| "learning_rate": 2.5047483380816717e-05, | |
| "loss": 0.6019, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 5.51, | |
| "learning_rate": 2.492877492877493e-05, | |
| "loss": 0.5318, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 5.53, | |
| "learning_rate": 2.4810066476733147e-05, | |
| "loss": 0.501, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 5.55, | |
| "learning_rate": 2.4691358024691357e-05, | |
| "loss": 0.5177, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 5.58, | |
| "learning_rate": 2.4572649572649573e-05, | |
| "loss": 0.511, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 5.6, | |
| "learning_rate": 2.4453941120607787e-05, | |
| "loss": 0.521, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 5.62, | |
| "learning_rate": 2.4335232668566003e-05, | |
| "loss": 0.5401, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "learning_rate": 2.4216524216524217e-05, | |
| "loss": 0.5247, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 5.66, | |
| "learning_rate": 2.409781576448243e-05, | |
| "loss": 0.5437, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 5.68, | |
| "learning_rate": 2.3979107312440647e-05, | |
| "loss": 0.5283, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "learning_rate": 2.386039886039886e-05, | |
| "loss": 0.5262, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 5.73, | |
| "learning_rate": 2.3741690408357077e-05, | |
| "loss": 0.5207, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 5.75, | |
| "learning_rate": 2.362298195631529e-05, | |
| "loss": 0.4631, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 2.3504273504273504e-05, | |
| "loss": 0.5307, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 5.79, | |
| "learning_rate": 2.338556505223172e-05, | |
| "loss": 0.5224, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 5.81, | |
| "learning_rate": 2.3266856600189934e-05, | |
| "loss": 0.5514, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "learning_rate": 2.314814814814815e-05, | |
| "loss": 0.5298, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "learning_rate": 2.3029439696106364e-05, | |
| "loss": 0.509, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 5.87, | |
| "learning_rate": 2.291073124406458e-05, | |
| "loss": 0.5223, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "learning_rate": 2.2792022792022794e-05, | |
| "loss": 0.514, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "learning_rate": 2.2673314339981007e-05, | |
| "loss": 0.5104, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 5.94, | |
| "learning_rate": 2.2554605887939224e-05, | |
| "loss": 0.5158, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 5.96, | |
| "learning_rate": 2.2435897435897437e-05, | |
| "loss": 0.4896, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 5.98, | |
| "learning_rate": 2.2317188983855654e-05, | |
| "loss": 0.507, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "eval_accuracy": 0.8286, | |
| "eval_loss": 0.47876212000846863, | |
| "eval_runtime": 21.3708, | |
| "eval_samples_per_second": 467.927, | |
| "eval_steps_per_second": 14.646, | |
| "step": 2808 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "learning_rate": 2.2198480531813867e-05, | |
| "loss": 0.5624, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 6.03, | |
| "learning_rate": 2.207977207977208e-05, | |
| "loss": 0.4918, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 6.05, | |
| "learning_rate": 2.1961063627730297e-05, | |
| "loss": 0.5042, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 6.07, | |
| "learning_rate": 2.184235517568851e-05, | |
| "loss": 0.5235, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 6.09, | |
| "learning_rate": 2.1723646723646724e-05, | |
| "loss": 0.5313, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 6.11, | |
| "learning_rate": 2.1604938271604937e-05, | |
| "loss": 0.5547, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 6.13, | |
| "learning_rate": 2.1486229819563154e-05, | |
| "loss": 0.5049, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 6.15, | |
| "learning_rate": 2.1367521367521368e-05, | |
| "loss": 0.5309, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 6.17, | |
| "learning_rate": 2.124881291547958e-05, | |
| "loss": 0.5157, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 6.2, | |
| "learning_rate": 2.1130104463437798e-05, | |
| "loss": 0.5061, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 6.22, | |
| "learning_rate": 2.101139601139601e-05, | |
| "loss": 0.5457, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 6.24, | |
| "learning_rate": 2.0892687559354228e-05, | |
| "loss": 0.5072, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 6.26, | |
| "learning_rate": 2.077397910731244e-05, | |
| "loss": 0.4898, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "learning_rate": 2.0655270655270654e-05, | |
| "loss": 0.5464, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 6.3, | |
| "learning_rate": 2.053656220322887e-05, | |
| "loss": 0.5298, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 6.32, | |
| "learning_rate": 2.0417853751187084e-05, | |
| "loss": 0.4767, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 6.35, | |
| "learning_rate": 2.02991452991453e-05, | |
| "loss": 0.4861, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 6.37, | |
| "learning_rate": 2.0180436847103515e-05, | |
| "loss": 0.5123, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 6.39, | |
| "learning_rate": 2.006172839506173e-05, | |
| "loss": 0.5093, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 6.41, | |
| "learning_rate": 1.9943019943019945e-05, | |
| "loss": 0.5399, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 1.9824311490978158e-05, | |
| "loss": 0.5012, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 6.45, | |
| "learning_rate": 1.9705603038936375e-05, | |
| "loss": 0.5185, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 6.47, | |
| "learning_rate": 1.9586894586894588e-05, | |
| "loss": 0.5036, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 6.49, | |
| "learning_rate": 1.9468186134852805e-05, | |
| "loss": 0.475, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 6.52, | |
| "learning_rate": 1.9349477682811018e-05, | |
| "loss": 0.5041, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 6.54, | |
| "learning_rate": 1.923076923076923e-05, | |
| "loss": 0.509, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 6.56, | |
| "learning_rate": 1.9112060778727448e-05, | |
| "loss": 0.5313, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 6.58, | |
| "learning_rate": 1.899335232668566e-05, | |
| "loss": 0.51, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 6.6, | |
| "learning_rate": 1.8874643874643875e-05, | |
| "loss": 0.5137, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 6.62, | |
| "learning_rate": 1.8755935422602088e-05, | |
| "loss": 0.4943, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 6.64, | |
| "learning_rate": 1.8637226970560305e-05, | |
| "loss": 0.4786, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 6.67, | |
| "learning_rate": 1.8518518518518518e-05, | |
| "loss": 0.4836, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 6.69, | |
| "learning_rate": 1.839981006647673e-05, | |
| "loss": 0.5024, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 6.71, | |
| "learning_rate": 1.828110161443495e-05, | |
| "loss": 0.5013, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 6.73, | |
| "learning_rate": 1.8162393162393162e-05, | |
| "loss": 0.5167, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 6.75, | |
| "learning_rate": 1.804368471035138e-05, | |
| "loss": 0.5161, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 6.77, | |
| "learning_rate": 1.7924976258309592e-05, | |
| "loss": 0.4702, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 6.79, | |
| "learning_rate": 1.7806267806267805e-05, | |
| "loss": 0.5331, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 6.81, | |
| "learning_rate": 1.7687559354226022e-05, | |
| "loss": 0.465, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 6.84, | |
| "learning_rate": 1.7568850902184235e-05, | |
| "loss": 0.5203, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 6.86, | |
| "learning_rate": 1.7450142450142452e-05, | |
| "loss": 0.4921, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 6.88, | |
| "learning_rate": 1.7331433998100665e-05, | |
| "loss": 0.4776, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 6.9, | |
| "learning_rate": 1.7212725546058882e-05, | |
| "loss": 0.5065, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 6.92, | |
| "learning_rate": 1.7094017094017095e-05, | |
| "loss": 0.4806, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 6.94, | |
| "learning_rate": 1.697530864197531e-05, | |
| "loss": 0.4963, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 6.96, | |
| "learning_rate": 1.6856600189933525e-05, | |
| "loss": 0.52, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 6.99, | |
| "learning_rate": 1.673789173789174e-05, | |
| "loss": 0.4921, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "eval_accuracy": 0.8208, | |
| "eval_loss": 0.4884471297264099, | |
| "eval_runtime": 21.37, | |
| "eval_samples_per_second": 467.945, | |
| "eval_steps_per_second": 14.647, | |
| "step": 3276 | |
| }, | |
| { | |
| "epoch": 7.01, | |
| "learning_rate": 1.6619183285849956e-05, | |
| "loss": 0.5273, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 7.03, | |
| "learning_rate": 1.650047483380817e-05, | |
| "loss": 0.5583, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 7.05, | |
| "learning_rate": 1.6381766381766382e-05, | |
| "loss": 0.4898, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 7.07, | |
| "learning_rate": 1.62630579297246e-05, | |
| "loss": 0.4591, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 7.09, | |
| "learning_rate": 1.6144349477682812e-05, | |
| "loss": 0.4956, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 7.12, | |
| "learning_rate": 1.602564102564103e-05, | |
| "loss": 0.5032, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 7.14, | |
| "learning_rate": 1.590693257359924e-05, | |
| "loss": 0.4485, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 7.16, | |
| "learning_rate": 1.5788224121557456e-05, | |
| "loss": 0.5178, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 7.18, | |
| "learning_rate": 1.566951566951567e-05, | |
| "loss": 0.4991, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 7.2, | |
| "learning_rate": 1.5550807217473882e-05, | |
| "loss": 0.5482, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 7.22, | |
| "learning_rate": 1.54320987654321e-05, | |
| "loss": 0.4417, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 7.24, | |
| "learning_rate": 1.5313390313390312e-05, | |
| "loss": 0.5275, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 7.26, | |
| "learning_rate": 1.5194681861348528e-05, | |
| "loss": 0.4982, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 7.29, | |
| "learning_rate": 1.5075973409306743e-05, | |
| "loss": 0.5263, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 7.31, | |
| "learning_rate": 1.4957264957264958e-05, | |
| "loss": 0.5081, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 7.33, | |
| "learning_rate": 1.4838556505223173e-05, | |
| "loss": 0.5072, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 7.35, | |
| "learning_rate": 1.4719848053181388e-05, | |
| "loss": 0.4599, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 7.37, | |
| "learning_rate": 1.4601139601139601e-05, | |
| "loss": 0.473, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 7.39, | |
| "learning_rate": 1.4482431149097816e-05, | |
| "loss": 0.444, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 7.41, | |
| "learning_rate": 1.4363722697056031e-05, | |
| "loss": 0.472, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 7.44, | |
| "learning_rate": 1.4245014245014246e-05, | |
| "loss": 0.5026, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 7.46, | |
| "learning_rate": 1.4126305792972461e-05, | |
| "loss": 0.5378, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 7.48, | |
| "learning_rate": 1.4007597340930676e-05, | |
| "loss": 0.484, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "learning_rate": 1.388888888888889e-05, | |
| "loss": 0.5081, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 7.52, | |
| "learning_rate": 1.3770180436847105e-05, | |
| "loss": 0.4989, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 7.54, | |
| "learning_rate": 1.365147198480532e-05, | |
| "loss": 0.4824, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 7.56, | |
| "learning_rate": 1.3532763532763535e-05, | |
| "loss": 0.5288, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 7.58, | |
| "learning_rate": 1.341405508072175e-05, | |
| "loss": 0.5052, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 7.61, | |
| "learning_rate": 1.3295346628679963e-05, | |
| "loss": 0.5127, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 7.63, | |
| "learning_rate": 1.3176638176638178e-05, | |
| "loss": 0.5377, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 7.65, | |
| "learning_rate": 1.305792972459639e-05, | |
| "loss": 0.4534, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 7.67, | |
| "learning_rate": 1.2939221272554605e-05, | |
| "loss": 0.5001, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 7.69, | |
| "learning_rate": 1.282051282051282e-05, | |
| "loss": 0.4492, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 7.71, | |
| "learning_rate": 1.2701804368471035e-05, | |
| "loss": 0.4998, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 7.73, | |
| "learning_rate": 1.258309591642925e-05, | |
| "loss": 0.4512, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 7.76, | |
| "learning_rate": 1.2464387464387465e-05, | |
| "loss": 0.4569, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 7.78, | |
| "learning_rate": 1.2345679012345678e-05, | |
| "loss": 0.4876, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 7.8, | |
| "learning_rate": 1.2226970560303893e-05, | |
| "loss": 0.5116, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 7.82, | |
| "learning_rate": 1.2108262108262108e-05, | |
| "loss": 0.4963, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 7.84, | |
| "learning_rate": 1.1989553656220323e-05, | |
| "loss": 0.465, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 7.86, | |
| "learning_rate": 1.1870845204178538e-05, | |
| "loss": 0.5033, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 7.88, | |
| "learning_rate": 1.1752136752136752e-05, | |
| "loss": 0.4657, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 7.9, | |
| "learning_rate": 1.1633428300094967e-05, | |
| "loss": 0.4686, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 7.93, | |
| "learning_rate": 1.1514719848053182e-05, | |
| "loss": 0.4633, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 7.95, | |
| "learning_rate": 1.1396011396011397e-05, | |
| "loss": 0.4896, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 7.97, | |
| "learning_rate": 1.1277302943969612e-05, | |
| "loss": 0.469, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 7.99, | |
| "learning_rate": 1.1158594491927827e-05, | |
| "loss": 0.5245, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "eval_accuracy": 0.8281, | |
| "eval_loss": 0.48483797907829285, | |
| "eval_runtime": 21.3581, | |
| "eval_samples_per_second": 468.206, | |
| "eval_steps_per_second": 14.655, | |
| "step": 3744 | |
| }, | |
| { | |
| "epoch": 8.01, | |
| "learning_rate": 1.103988603988604e-05, | |
| "loss": 0.4722, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 8.03, | |
| "learning_rate": 1.0921177587844255e-05, | |
| "loss": 0.4614, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 8.06, | |
| "learning_rate": 1.0802469135802469e-05, | |
| "loss": 0.4887, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 8.08, | |
| "learning_rate": 1.0683760683760684e-05, | |
| "loss": 0.4374, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 8.1, | |
| "learning_rate": 1.0565052231718899e-05, | |
| "loss": 0.479, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 8.12, | |
| "learning_rate": 1.0446343779677114e-05, | |
| "loss": 0.4642, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 8.14, | |
| "learning_rate": 1.0327635327635327e-05, | |
| "loss": 0.5005, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 8.16, | |
| "learning_rate": 1.0208926875593542e-05, | |
| "loss": 0.4856, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 8.18, | |
| "learning_rate": 1.0090218423551757e-05, | |
| "loss": 0.4865, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 8.2, | |
| "learning_rate": 9.971509971509972e-06, | |
| "loss": 0.4613, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 8.23, | |
| "learning_rate": 9.852801519468187e-06, | |
| "loss": 0.5187, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 8.25, | |
| "learning_rate": 9.734093067426402e-06, | |
| "loss": 0.4637, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 8.27, | |
| "learning_rate": 9.615384615384616e-06, | |
| "loss": 0.5122, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 8.29, | |
| "learning_rate": 9.49667616334283e-06, | |
| "loss": 0.5287, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 8.31, | |
| "learning_rate": 9.377967711301044e-06, | |
| "loss": 0.5041, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 8.33, | |
| "learning_rate": 9.259259259259259e-06, | |
| "loss": 0.5201, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 8.35, | |
| "learning_rate": 9.140550807217474e-06, | |
| "loss": 0.5113, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 8.38, | |
| "learning_rate": 9.02184235517569e-06, | |
| "loss": 0.444, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 8.4, | |
| "learning_rate": 8.903133903133903e-06, | |
| "loss": 0.4543, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 8.42, | |
| "learning_rate": 8.784425451092118e-06, | |
| "loss": 0.4853, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 8.44, | |
| "learning_rate": 8.665716999050333e-06, | |
| "loss": 0.5047, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 8.46, | |
| "learning_rate": 8.547008547008548e-06, | |
| "loss": 0.5277, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 8.48, | |
| "learning_rate": 8.428300094966763e-06, | |
| "loss": 0.5079, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 8.5, | |
| "learning_rate": 8.309591642924978e-06, | |
| "loss": 0.5105, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 8.52, | |
| "learning_rate": 8.190883190883191e-06, | |
| "loss": 0.4957, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 8.55, | |
| "learning_rate": 8.072174738841406e-06, | |
| "loss": 0.4925, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 8.57, | |
| "learning_rate": 7.95346628679962e-06, | |
| "loss": 0.4598, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 8.59, | |
| "learning_rate": 7.834757834757835e-06, | |
| "loss": 0.4641, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 8.61, | |
| "learning_rate": 7.71604938271605e-06, | |
| "loss": 0.4663, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 8.63, | |
| "learning_rate": 7.597340930674264e-06, | |
| "loss": 0.4348, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 8.65, | |
| "learning_rate": 7.478632478632479e-06, | |
| "loss": 0.4792, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 8.67, | |
| "learning_rate": 7.359924026590694e-06, | |
| "loss": 0.5119, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 8.7, | |
| "learning_rate": 7.241215574548908e-06, | |
| "loss": 0.5036, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 8.72, | |
| "learning_rate": 7.122507122507123e-06, | |
| "loss": 0.4743, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "learning_rate": 7.003798670465338e-06, | |
| "loss": 0.5022, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 8.76, | |
| "learning_rate": 6.885090218423552e-06, | |
| "loss": 0.4308, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 8.78, | |
| "learning_rate": 6.766381766381767e-06, | |
| "loss": 0.4926, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 8.8, | |
| "learning_rate": 6.6476733143399815e-06, | |
| "loss": 0.4794, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 8.82, | |
| "learning_rate": 6.528964862298195e-06, | |
| "loss": 0.4819, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 8.84, | |
| "learning_rate": 6.41025641025641e-06, | |
| "loss": 0.5506, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 8.87, | |
| "learning_rate": 6.291547958214625e-06, | |
| "loss": 0.4911, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 8.89, | |
| "learning_rate": 6.172839506172839e-06, | |
| "loss": 0.5158, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 8.91, | |
| "learning_rate": 6.054131054131054e-06, | |
| "loss": 0.509, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 8.93, | |
| "learning_rate": 5.935422602089269e-06, | |
| "loss": 0.4803, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 8.95, | |
| "learning_rate": 5.816714150047483e-06, | |
| "loss": 0.4789, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 8.97, | |
| "learning_rate": 5.6980056980056985e-06, | |
| "loss": 0.4754, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 8.99, | |
| "learning_rate": 5.5792972459639135e-06, | |
| "loss": 0.4995, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "eval_accuracy": 0.8322, | |
| "eval_loss": 0.459750771522522, | |
| "eval_runtime": 21.4299, | |
| "eval_samples_per_second": 466.638, | |
| "eval_steps_per_second": 14.606, | |
| "step": 4212 | |
| }, | |
| { | |
| "epoch": 9.02, | |
| "learning_rate": 5.460588793922128e-06, | |
| "loss": 0.4865, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 9.04, | |
| "learning_rate": 5.341880341880342e-06, | |
| "loss": 0.5137, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 9.06, | |
| "learning_rate": 5.223171889838557e-06, | |
| "loss": 0.4679, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 9.08, | |
| "learning_rate": 5.104463437796771e-06, | |
| "loss": 0.507, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 9.1, | |
| "learning_rate": 4.985754985754986e-06, | |
| "loss": 0.4584, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 9.12, | |
| "learning_rate": 4.867046533713201e-06, | |
| "loss": 0.4757, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 9.15, | |
| "learning_rate": 4.748338081671415e-06, | |
| "loss": 0.4757, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 9.17, | |
| "learning_rate": 4.6296296296296296e-06, | |
| "loss": 0.4316, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 9.19, | |
| "learning_rate": 4.510921177587845e-06, | |
| "loss": 0.5198, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 9.21, | |
| "learning_rate": 4.392212725546059e-06, | |
| "loss": 0.4834, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 9.23, | |
| "learning_rate": 4.273504273504274e-06, | |
| "loss": 0.5192, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 9.25, | |
| "learning_rate": 4.154795821462489e-06, | |
| "loss": 0.4508, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 9.27, | |
| "learning_rate": 4.036087369420703e-06, | |
| "loss": 0.458, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 9.29, | |
| "learning_rate": 3.917378917378917e-06, | |
| "loss": 0.4476, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 9.32, | |
| "learning_rate": 3.798670465337132e-06, | |
| "loss": 0.4887, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 9.34, | |
| "learning_rate": 3.679962013295347e-06, | |
| "loss": 0.5174, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 9.36, | |
| "learning_rate": 3.5612535612535615e-06, | |
| "loss": 0.423, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "learning_rate": 3.442545109211776e-06, | |
| "loss": 0.4798, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 9.4, | |
| "learning_rate": 3.3238366571699908e-06, | |
| "loss": 0.4391, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 9.42, | |
| "learning_rate": 3.205128205128205e-06, | |
| "loss": 0.5056, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 9.44, | |
| "learning_rate": 3.0864197530864196e-06, | |
| "loss": 0.4621, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 9.47, | |
| "learning_rate": 2.9677113010446346e-06, | |
| "loss": 0.4783, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 9.49, | |
| "learning_rate": 2.8490028490028492e-06, | |
| "loss": 0.46, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 9.51, | |
| "learning_rate": 2.730294396961064e-06, | |
| "loss": 0.4905, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 9.53, | |
| "learning_rate": 2.6115859449192785e-06, | |
| "loss": 0.4581, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 9.55, | |
| "learning_rate": 2.492877492877493e-06, | |
| "loss": 0.4687, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 9.57, | |
| "learning_rate": 2.3741690408357077e-06, | |
| "loss": 0.4702, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 9.59, | |
| "learning_rate": 2.2554605887939223e-06, | |
| "loss": 0.4857, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 9.61, | |
| "learning_rate": 2.136752136752137e-06, | |
| "loss": 0.4641, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 9.64, | |
| "learning_rate": 2.0180436847103515e-06, | |
| "loss": 0.5004, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 9.66, | |
| "learning_rate": 1.899335232668566e-06, | |
| "loss": 0.4858, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 9.68, | |
| "learning_rate": 1.7806267806267808e-06, | |
| "loss": 0.4585, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 9.7, | |
| "learning_rate": 1.6619183285849954e-06, | |
| "loss": 0.4975, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 9.72, | |
| "learning_rate": 1.5432098765432098e-06, | |
| "loss": 0.4665, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 9.74, | |
| "learning_rate": 1.4245014245014246e-06, | |
| "loss": 0.4832, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 9.76, | |
| "learning_rate": 1.3057929724596392e-06, | |
| "loss": 0.4638, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 9.79, | |
| "learning_rate": 1.1870845204178538e-06, | |
| "loss": 0.4654, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 9.81, | |
| "learning_rate": 1.0683760683760685e-06, | |
| "loss": 0.5019, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 9.83, | |
| "learning_rate": 9.49667616334283e-07, | |
| "loss": 0.457, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 9.85, | |
| "learning_rate": 8.309591642924977e-07, | |
| "loss": 0.509, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 9.87, | |
| "learning_rate": 7.122507122507123e-07, | |
| "loss": 0.4658, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 9.89, | |
| "learning_rate": 5.935422602089269e-07, | |
| "loss": 0.428, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 9.91, | |
| "learning_rate": 4.748338081671415e-07, | |
| "loss": 0.4631, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 9.93, | |
| "learning_rate": 3.5612535612535615e-07, | |
| "loss": 0.4929, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 9.96, | |
| "learning_rate": 2.3741690408357074e-07, | |
| "loss": 0.4634, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 9.98, | |
| "learning_rate": 1.1870845204178537e-07, | |
| "loss": 0.4831, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "learning_rate": 0.0, | |
| "loss": 0.4526, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "eval_accuracy": 0.8358, | |
| "eval_loss": 0.47183701395988464, | |
| "eval_runtime": 21.5096, | |
| "eval_samples_per_second": 464.91, | |
| "eval_steps_per_second": 14.552, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "step": 4680, | |
| "total_flos": 1.2118658355831767e+18, | |
| "train_loss": 0.6860576226161077, | |
| "train_runtime": 2151.6233, | |
| "train_samples_per_second": 278.859, | |
| "train_steps_per_second": 2.175 | |
| } | |
| ], | |
| "max_steps": 4680, | |
| "num_train_epochs": 10, | |
| "total_flos": 1.2118658355831767e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |