| { | |
| "best_metric": 1.0960041284561157, | |
| "best_model_checkpoint": "/exports/eddie/scratch/s1970716/models/question_gen/t5_3b_epoch_10/checkpoint-342", | |
| "epoch": 9.994520547945205, | |
| "global_step": 3420, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 9.97076023391813e-05, | |
| "loss": 1.7391, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 9.941520467836257e-05, | |
| "loss": 1.3208, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 9.912280701754386e-05, | |
| "loss": 1.2679, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 9.883040935672515e-05, | |
| "loss": 1.2292, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 9.853801169590643e-05, | |
| "loss": 1.2356, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 9.824561403508771e-05, | |
| "loss": 1.2327, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 9.795321637426901e-05, | |
| "loss": 1.2397, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 9.76608187134503e-05, | |
| "loss": 1.1967, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 9.736842105263158e-05, | |
| "loss": 1.1946, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 9.707602339181286e-05, | |
| "loss": 1.2225, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 9.678362573099416e-05, | |
| "loss": 1.2038, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 9.649122807017544e-05, | |
| "loss": 1.1921, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 9.619883040935673e-05, | |
| "loss": 1.1498, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 9.590643274853801e-05, | |
| "loss": 1.1471, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 9.56140350877193e-05, | |
| "loss": 1.1799, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 9.532163742690059e-05, | |
| "loss": 1.1812, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 9.502923976608188e-05, | |
| "loss": 1.1693, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.473684210526316e-05, | |
| "loss": 1.1852, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 9.444444444444444e-05, | |
| "loss": 1.1751, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 9.415204678362574e-05, | |
| "loss": 1.1588, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 9.385964912280703e-05, | |
| "loss": 1.1564, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 9.35672514619883e-05, | |
| "loss": 1.181, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 9.327485380116959e-05, | |
| "loss": 1.1835, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 9.298245614035089e-05, | |
| "loss": 1.1421, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 9.269005847953217e-05, | |
| "loss": 1.1774, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 9.239766081871345e-05, | |
| "loss": 1.1355, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 9.210526315789474e-05, | |
| "loss": 1.1467, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 9.181286549707603e-05, | |
| "loss": 1.1534, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 9.152046783625731e-05, | |
| "loss": 1.1597, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 9.12280701754386e-05, | |
| "loss": 1.1345, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 9.093567251461989e-05, | |
| "loss": 1.1458, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 9.064327485380117e-05, | |
| "loss": 1.1065, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 9.035087719298246e-05, | |
| "loss": 1.1485, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 9.005847953216374e-05, | |
| "loss": 1.147, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 1.0960041284561157, | |
| "eval_runtime": 132.8151, | |
| "eval_samples_per_second": 79.584, | |
| "eval_steps_per_second": 9.954, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 8.976608187134502e-05, | |
| "loss": 1.0368, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 8.947368421052632e-05, | |
| "loss": 0.9974, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 8.918128654970761e-05, | |
| "loss": 0.9777, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 8.888888888888889e-05, | |
| "loss": 0.9939, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 8.859649122807017e-05, | |
| "loss": 0.9984, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 8.830409356725147e-05, | |
| "loss": 0.9941, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 8.801169590643276e-05, | |
| "loss": 1.0135, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 8.771929824561403e-05, | |
| "loss": 0.9955, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 8.742690058479532e-05, | |
| "loss": 0.984, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 8.713450292397662e-05, | |
| "loss": 1.0194, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 8.68421052631579e-05, | |
| "loss": 0.9848, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 8.654970760233918e-05, | |
| "loss": 0.9885, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 8.625730994152047e-05, | |
| "loss": 0.995, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 8.596491228070177e-05, | |
| "loss": 1.0075, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 8.567251461988305e-05, | |
| "loss": 1.0036, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 8.538011695906433e-05, | |
| "loss": 0.9981, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 8.508771929824562e-05, | |
| "loss": 0.9783, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 8.47953216374269e-05, | |
| "loss": 0.9974, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 8.45029239766082e-05, | |
| "loss": 0.9985, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 8.421052631578948e-05, | |
| "loss": 1.0136, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 8.391812865497076e-05, | |
| "loss": 1.0077, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 8.362573099415205e-05, | |
| "loss": 0.9968, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 8.333333333333334e-05, | |
| "loss": 1.0064, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 8.304093567251462e-05, | |
| "loss": 0.994, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 8.27485380116959e-05, | |
| "loss": 0.9906, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 8.24561403508772e-05, | |
| "loss": 0.9842, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 8.216374269005849e-05, | |
| "loss": 0.9906, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 8.187134502923976e-05, | |
| "loss": 0.9925, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 8.157894736842105e-05, | |
| "loss": 0.9849, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 8.128654970760235e-05, | |
| "loss": 0.9992, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 8.099415204678363e-05, | |
| "loss": 0.9861, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 8.070175438596491e-05, | |
| "loss": 0.9953, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 8.04093567251462e-05, | |
| "loss": 0.9758, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 8.01169590643275e-05, | |
| "loss": 0.9928, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_loss": 1.102066159248352, | |
| "eval_runtime": 133.0084, | |
| "eval_samples_per_second": 79.469, | |
| "eval_steps_per_second": 9.939, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 7.982456140350878e-05, | |
| "loss": 0.9297, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 7.953216374269006e-05, | |
| "loss": 0.8537, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 7.923976608187135e-05, | |
| "loss": 0.8556, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 7.894736842105263e-05, | |
| "loss": 0.8347, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 7.865497076023393e-05, | |
| "loss": 0.8416, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 7.836257309941521e-05, | |
| "loss": 0.8436, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 7.807017543859649e-05, | |
| "loss": 0.8352, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 7.777777777777778e-05, | |
| "loss": 0.8387, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 7.748538011695908e-05, | |
| "loss": 0.8336, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 7.719298245614036e-05, | |
| "loss": 0.8653, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 7.690058479532164e-05, | |
| "loss": 0.8509, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 7.660818713450293e-05, | |
| "loss": 0.8525, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 7.631578947368422e-05, | |
| "loss": 0.8481, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 7.602339181286549e-05, | |
| "loss": 0.8615, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 7.573099415204679e-05, | |
| "loss": 0.8697, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 7.543859649122808e-05, | |
| "loss": 0.8498, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 7.514619883040936e-05, | |
| "loss": 0.8691, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 7.485380116959064e-05, | |
| "loss": 0.8661, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 7.456140350877193e-05, | |
| "loss": 0.8747, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 7.426900584795321e-05, | |
| "loss": 0.8701, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 7.397660818713451e-05, | |
| "loss": 0.8556, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 7.368421052631579e-05, | |
| "loss": 0.8589, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 7.339181286549708e-05, | |
| "loss": 0.8526, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 7.309941520467836e-05, | |
| "loss": 0.8838, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 7.280701754385966e-05, | |
| "loss": 0.872, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 7.251461988304094e-05, | |
| "loss": 0.858, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 7.222222222222222e-05, | |
| "loss": 0.852, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 7.192982456140351e-05, | |
| "loss": 0.8461, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 7.163742690058481e-05, | |
| "loss": 0.8773, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 7.134502923976609e-05, | |
| "loss": 0.8413, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 7.105263157894737e-05, | |
| "loss": 0.8462, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 7.076023391812866e-05, | |
| "loss": 0.8504, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 7.046783625730996e-05, | |
| "loss": 0.8554, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 7.017543859649122e-05, | |
| "loss": 0.861, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_loss": 1.1422148942947388, | |
| "eval_runtime": 133.1909, | |
| "eval_samples_per_second": 79.36, | |
| "eval_steps_per_second": 9.926, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 6.988304093567252e-05, | |
| "loss": 0.8067, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 6.959064327485381e-05, | |
| "loss": 0.7203, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 6.929824561403509e-05, | |
| "loss": 0.7223, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 6.900584795321637e-05, | |
| "loss": 0.7184, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 6.871345029239767e-05, | |
| "loss": 0.7215, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 6.842105263157895e-05, | |
| "loss": 0.718, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 6.812865497076024e-05, | |
| "loss": 0.7349, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 6.783625730994152e-05, | |
| "loss": 0.7185, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 6.754385964912281e-05, | |
| "loss": 0.7486, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 6.72514619883041e-05, | |
| "loss": 0.7279, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 6.695906432748539e-05, | |
| "loss": 0.7447, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 6.666666666666667e-05, | |
| "loss": 0.7423, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 6.637426900584795e-05, | |
| "loss": 0.7391, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 6.608187134502924e-05, | |
| "loss": 0.7337, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 6.578947368421054e-05, | |
| "loss": 0.7282, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 6.549707602339182e-05, | |
| "loss": 0.7438, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 6.52046783625731e-05, | |
| "loss": 0.7302, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 6.49122807017544e-05, | |
| "loss": 0.7418, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "learning_rate": 6.461988304093567e-05, | |
| "loss": 0.7289, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 6.432748538011695e-05, | |
| "loss": 0.7352, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 6.403508771929825e-05, | |
| "loss": 0.7342, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 6.374269005847954e-05, | |
| "loss": 0.7361, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 6.345029239766082e-05, | |
| "loss": 0.7489, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 6.31578947368421e-05, | |
| "loss": 0.7334, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 6.28654970760234e-05, | |
| "loss": 0.7474, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 6.257309941520468e-05, | |
| "loss": 0.7385, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 6.228070175438597e-05, | |
| "loss": 0.7334, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 6.198830409356725e-05, | |
| "loss": 0.7521, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 3.83, | |
| "learning_rate": 6.169590643274855e-05, | |
| "loss": 0.7486, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 6.140350877192983e-05, | |
| "loss": 0.7468, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 6.111111111111112e-05, | |
| "loss": 0.7351, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 6.0818713450292395e-05, | |
| "loss": 0.7363, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 6.052631578947369e-05, | |
| "loss": 0.7444, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 6.0233918128654976e-05, | |
| "loss": 0.7545, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_loss": 1.2032016515731812, | |
| "eval_runtime": 133.2982, | |
| "eval_samples_per_second": 79.296, | |
| "eval_steps_per_second": 9.918, | |
| "step": 1368 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 5.994152046783626e-05, | |
| "loss": 0.73, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 5.9649122807017544e-05, | |
| "loss": 0.6411, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 5.935672514619883e-05, | |
| "loss": 0.6374, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 5.9064327485380125e-05, | |
| "loss": 0.636, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 5.877192982456141e-05, | |
| "loss": 0.6276, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 5.847953216374269e-05, | |
| "loss": 0.6348, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 5.818713450292398e-05, | |
| "loss": 0.6405, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 5.789473684210527e-05, | |
| "loss": 0.6305, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 5.7602339181286554e-05, | |
| "loss": 0.6175, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "learning_rate": 5.7309941520467835e-05, | |
| "loss": 0.6242, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 5.701754385964912e-05, | |
| "loss": 0.6253, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "learning_rate": 5.6725146198830416e-05, | |
| "loss": 0.6299, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 5.64327485380117e-05, | |
| "loss": 0.6331, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 5.6140350877192984e-05, | |
| "loss": 0.6449, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 5.584795321637427e-05, | |
| "loss": 0.6246, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 5.555555555555556e-05, | |
| "loss": 0.6248, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 5.526315789473685e-05, | |
| "loss": 0.6332, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "learning_rate": 5.4970760233918126e-05, | |
| "loss": 0.6333, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 5.467836257309942e-05, | |
| "loss": 0.6364, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 5.438596491228071e-05, | |
| "loss": 0.6289, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 5.4093567251461994e-05, | |
| "loss": 0.635, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 5.3801169590643275e-05, | |
| "loss": 0.6447, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 5.350877192982456e-05, | |
| "loss": 0.6368, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 5.3216374269005856e-05, | |
| "loss": 0.649, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 5.2923976608187143e-05, | |
| "loss": 0.6354, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "learning_rate": 5.2631578947368424e-05, | |
| "loss": 0.6386, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "learning_rate": 5.233918128654971e-05, | |
| "loss": 0.6423, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 4.79, | |
| "learning_rate": 5.2046783625731e-05, | |
| "loss": 0.6382, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 5.1754385964912286e-05, | |
| "loss": 0.6348, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 5.1461988304093566e-05, | |
| "loss": 0.6391, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 5.1169590643274853e-05, | |
| "loss": 0.6573, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 5.087719298245615e-05, | |
| "loss": 0.6919, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "learning_rate": 5.0584795321637435e-05, | |
| "loss": 0.6598, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 5.0292397660818715e-05, | |
| "loss": 0.6526, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 5e-05, | |
| "loss": 0.6504, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_loss": 1.2719171047210693, | |
| "eval_runtime": 133.3649, | |
| "eval_samples_per_second": 79.256, | |
| "eval_steps_per_second": 9.913, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 5.03, | |
| "learning_rate": 4.970760233918128e-05, | |
| "loss": 0.5766, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 4.941520467836258e-05, | |
| "loss": 0.5547, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 5.08, | |
| "learning_rate": 4.912280701754386e-05, | |
| "loss": 0.5504, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 4.883040935672515e-05, | |
| "loss": 0.5593, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 5.14, | |
| "learning_rate": 4.853801169590643e-05, | |
| "loss": 0.5595, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "learning_rate": 4.824561403508772e-05, | |
| "loss": 0.5492, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 5.2, | |
| "learning_rate": 4.7953216374269006e-05, | |
| "loss": 0.5603, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 5.23, | |
| "learning_rate": 4.7660818713450294e-05, | |
| "loss": 0.5475, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "learning_rate": 4.736842105263158e-05, | |
| "loss": 0.557, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 5.29, | |
| "learning_rate": 4.707602339181287e-05, | |
| "loss": 0.5563, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "learning_rate": 4.678362573099415e-05, | |
| "loss": 0.5759, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "learning_rate": 4.649122807017544e-05, | |
| "loss": 0.5608, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 4.619883040935672e-05, | |
| "loss": 0.5651, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 4.590643274853802e-05, | |
| "loss": 0.5627, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 5.44, | |
| "learning_rate": 4.56140350877193e-05, | |
| "loss": 0.5729, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 4.5321637426900585e-05, | |
| "loss": 0.5678, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 5.49, | |
| "learning_rate": 4.502923976608187e-05, | |
| "loss": 0.5661, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 5.52, | |
| "learning_rate": 4.473684210526316e-05, | |
| "loss": 0.5694, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 5.55, | |
| "learning_rate": 4.4444444444444447e-05, | |
| "loss": 0.5694, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 5.58, | |
| "learning_rate": 4.4152046783625734e-05, | |
| "loss": 0.5576, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 5.61, | |
| "learning_rate": 4.3859649122807014e-05, | |
| "loss": 0.565, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "learning_rate": 4.356725146198831e-05, | |
| "loss": 0.5655, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 5.67, | |
| "learning_rate": 4.327485380116959e-05, | |
| "loss": 0.5611, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "learning_rate": 4.298245614035088e-05, | |
| "loss": 0.5808, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 5.73, | |
| "learning_rate": 4.269005847953216e-05, | |
| "loss": 0.551, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 5.76, | |
| "learning_rate": 4.239766081871345e-05, | |
| "loss": 0.5609, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 5.79, | |
| "learning_rate": 4.210526315789474e-05, | |
| "loss": 0.5629, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 5.82, | |
| "learning_rate": 4.1812865497076025e-05, | |
| "loss": 0.5675, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 5.84, | |
| "learning_rate": 4.152046783625731e-05, | |
| "loss": 0.5697, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 5.87, | |
| "learning_rate": 4.12280701754386e-05, | |
| "loss": 0.5544, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "learning_rate": 4.093567251461988e-05, | |
| "loss": 0.5676, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 5.93, | |
| "learning_rate": 4.0643274853801174e-05, | |
| "loss": 0.5622, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 5.96, | |
| "learning_rate": 4.0350877192982455e-05, | |
| "loss": 0.5656, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 5.99, | |
| "learning_rate": 4.005847953216375e-05, | |
| "loss": 0.5708, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "eval_loss": 1.340452790260315, | |
| "eval_runtime": 133.0036, | |
| "eval_samples_per_second": 79.472, | |
| "eval_steps_per_second": 9.94, | |
| "step": 2053 | |
| }, | |
| { | |
| "epoch": 6.02, | |
| "learning_rate": 3.976608187134503e-05, | |
| "loss": 0.5084, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 6.05, | |
| "learning_rate": 3.9473684210526316e-05, | |
| "loss": 0.4929, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 6.08, | |
| "learning_rate": 3.9181286549707604e-05, | |
| "loss": 0.4962, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 6.11, | |
| "learning_rate": 3.888888888888889e-05, | |
| "loss": 0.4901, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 6.14, | |
| "learning_rate": 3.859649122807018e-05, | |
| "loss": 0.4999, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 6.17, | |
| "learning_rate": 3.8304093567251465e-05, | |
| "loss": 0.4905, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 6.2, | |
| "learning_rate": 3.8011695906432746e-05, | |
| "loss": 0.4871, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 6.22, | |
| "learning_rate": 3.771929824561404e-05, | |
| "loss": 0.4965, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "learning_rate": 3.742690058479532e-05, | |
| "loss": 0.4862, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "learning_rate": 3.713450292397661e-05, | |
| "loss": 0.4926, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 6.31, | |
| "learning_rate": 3.6842105263157895e-05, | |
| "loss": 0.4931, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 6.34, | |
| "learning_rate": 3.654970760233918e-05, | |
| "loss": 0.4931, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 6.37, | |
| "learning_rate": 3.625730994152047e-05, | |
| "loss": 0.5032, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 6.4, | |
| "learning_rate": 3.5964912280701756e-05, | |
| "loss": 0.5071, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 3.5672514619883044e-05, | |
| "loss": 0.4958, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 6.46, | |
| "learning_rate": 3.538011695906433e-05, | |
| "loss": 0.5028, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 6.49, | |
| "learning_rate": 3.508771929824561e-05, | |
| "loss": 0.5033, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 6.52, | |
| "learning_rate": 3.4795321637426905e-05, | |
| "loss": 0.5038, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 6.55, | |
| "learning_rate": 3.4502923976608186e-05, | |
| "loss": 0.4832, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 6.58, | |
| "learning_rate": 3.421052631578947e-05, | |
| "loss": 0.5049, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 6.6, | |
| "learning_rate": 3.391812865497076e-05, | |
| "loss": 0.496, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 6.63, | |
| "learning_rate": 3.362573099415205e-05, | |
| "loss": 0.5157, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 6.66, | |
| "learning_rate": 3.3333333333333335e-05, | |
| "loss": 0.496, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 6.69, | |
| "learning_rate": 3.304093567251462e-05, | |
| "loss": 0.4893, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 6.72, | |
| "learning_rate": 3.274853801169591e-05, | |
| "loss": 0.4947, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 6.75, | |
| "learning_rate": 3.24561403508772e-05, | |
| "loss": 0.4953, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 6.78, | |
| "learning_rate": 3.216374269005848e-05, | |
| "loss": 0.5045, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 6.81, | |
| "learning_rate": 3.187134502923977e-05, | |
| "loss": 0.4985, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 6.84, | |
| "learning_rate": 3.157894736842105e-05, | |
| "loss": 0.4938, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 6.87, | |
| "learning_rate": 3.128654970760234e-05, | |
| "loss": 0.5037, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 6.9, | |
| "learning_rate": 3.0994152046783626e-05, | |
| "loss": 0.4967, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 6.93, | |
| "learning_rate": 3.0701754385964913e-05, | |
| "loss": 0.4956, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 6.96, | |
| "learning_rate": 3.0409356725146197e-05, | |
| "loss": 0.4982, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 6.98, | |
| "learning_rate": 3.0116959064327488e-05, | |
| "loss": 0.5077, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "eval_loss": 1.416868805885315, | |
| "eval_runtime": 133.3362, | |
| "eval_samples_per_second": 79.273, | |
| "eval_steps_per_second": 9.915, | |
| "step": 2395 | |
| }, | |
| { | |
| "epoch": 7.01, | |
| "learning_rate": 2.9824561403508772e-05, | |
| "loss": 0.4655, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "learning_rate": 2.9532163742690062e-05, | |
| "loss": 0.4391, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 7.07, | |
| "learning_rate": 2.9239766081871346e-05, | |
| "loss": 0.4432, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 7.1, | |
| "learning_rate": 2.8947368421052634e-05, | |
| "loss": 0.4383, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 7.13, | |
| "learning_rate": 2.8654970760233917e-05, | |
| "loss": 0.4335, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 7.16, | |
| "learning_rate": 2.8362573099415208e-05, | |
| "loss": 0.4526, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 7.19, | |
| "learning_rate": 2.8070175438596492e-05, | |
| "loss": 0.4407, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 7.22, | |
| "learning_rate": 2.777777777777778e-05, | |
| "loss": 0.4462, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 7.25, | |
| "learning_rate": 2.7485380116959063e-05, | |
| "loss": 0.4548, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 7.28, | |
| "learning_rate": 2.7192982456140354e-05, | |
| "loss": 0.4519, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 7.31, | |
| "learning_rate": 2.6900584795321637e-05, | |
| "loss": 0.4407, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 7.34, | |
| "learning_rate": 2.6608187134502928e-05, | |
| "loss": 0.4479, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 7.36, | |
| "learning_rate": 2.6315789473684212e-05, | |
| "loss": 0.456, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 7.39, | |
| "learning_rate": 2.60233918128655e-05, | |
| "loss": 0.4416, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 7.42, | |
| "learning_rate": 2.5730994152046783e-05, | |
| "loss": 0.4542, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 7.45, | |
| "learning_rate": 2.5438596491228074e-05, | |
| "loss": 0.4582, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 7.48, | |
| "learning_rate": 2.5146198830409358e-05, | |
| "loss": 0.4545, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 7.51, | |
| "learning_rate": 2.485380116959064e-05, | |
| "loss": 0.4476, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 7.54, | |
| "learning_rate": 2.456140350877193e-05, | |
| "loss": 0.4472, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 7.57, | |
| "learning_rate": 2.4269005847953216e-05, | |
| "loss": 0.4567, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 7.6, | |
| "learning_rate": 2.3976608187134503e-05, | |
| "loss": 0.4689, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 7.63, | |
| "learning_rate": 2.368421052631579e-05, | |
| "loss": 0.4587, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 7.66, | |
| "learning_rate": 2.3391812865497074e-05, | |
| "loss": 0.4494, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 7.69, | |
| "learning_rate": 2.309941520467836e-05, | |
| "loss": 0.4494, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 7.72, | |
| "learning_rate": 2.280701754385965e-05, | |
| "loss": 0.4447, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 7.74, | |
| "learning_rate": 2.2514619883040936e-05, | |
| "loss": 0.4506, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 7.77, | |
| "learning_rate": 2.2222222222222223e-05, | |
| "loss": 0.4435, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 7.8, | |
| "learning_rate": 2.1929824561403507e-05, | |
| "loss": 0.4374, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 7.83, | |
| "learning_rate": 2.1637426900584794e-05, | |
| "loss": 0.4503, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 7.86, | |
| "learning_rate": 2.134502923976608e-05, | |
| "loss": 0.4491, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 7.89, | |
| "learning_rate": 2.105263157894737e-05, | |
| "loss": 0.4479, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 7.92, | |
| "learning_rate": 2.0760233918128656e-05, | |
| "loss": 0.4404, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 7.95, | |
| "learning_rate": 2.046783625730994e-05, | |
| "loss": 0.4483, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 7.98, | |
| "learning_rate": 2.0175438596491227e-05, | |
| "loss": 0.452, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "eval_loss": 1.4924665689468384, | |
| "eval_runtime": 133.3032, | |
| "eval_samples_per_second": 79.293, | |
| "eval_steps_per_second": 9.917, | |
| "step": 2737 | |
| }, | |
| { | |
| "epoch": 8.01, | |
| "learning_rate": 1.9883040935672515e-05, | |
| "loss": 0.4449, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 8.04, | |
| "learning_rate": 1.9590643274853802e-05, | |
| "loss": 0.4108, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 8.07, | |
| "learning_rate": 1.929824561403509e-05, | |
| "loss": 0.4182, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 8.09, | |
| "learning_rate": 1.9005847953216373e-05, | |
| "loss": 0.4163, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 8.12, | |
| "learning_rate": 1.871345029239766e-05, | |
| "loss": 0.3994, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 8.15, | |
| "learning_rate": 1.8421052631578947e-05, | |
| "loss": 0.4102, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 8.18, | |
| "learning_rate": 1.8128654970760235e-05, | |
| "loss": 0.4185, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 8.21, | |
| "learning_rate": 1.7836257309941522e-05, | |
| "loss": 0.4114, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 8.24, | |
| "learning_rate": 1.7543859649122806e-05, | |
| "loss": 0.4082, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 8.27, | |
| "learning_rate": 1.7251461988304093e-05, | |
| "loss": 0.4179, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 8.3, | |
| "learning_rate": 1.695906432748538e-05, | |
| "loss": 0.4098, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 8.33, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 0.4143, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 8.36, | |
| "learning_rate": 1.6374269005847955e-05, | |
| "loss": 0.4146, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 8.39, | |
| "learning_rate": 1.608187134502924e-05, | |
| "loss": 0.405, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 8.42, | |
| "learning_rate": 1.5789473684210526e-05, | |
| "loss": 0.4176, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 8.45, | |
| "learning_rate": 1.5497076023391813e-05, | |
| "loss": 0.4099, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 8.47, | |
| "learning_rate": 1.5204678362573099e-05, | |
| "loss": 0.4022, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 8.5, | |
| "learning_rate": 1.4912280701754386e-05, | |
| "loss": 0.4215, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 8.53, | |
| "learning_rate": 1.4619883040935673e-05, | |
| "loss": 0.4115, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 8.56, | |
| "learning_rate": 1.4327485380116959e-05, | |
| "loss": 0.4194, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 8.59, | |
| "learning_rate": 1.4035087719298246e-05, | |
| "loss": 0.4145, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 8.62, | |
| "learning_rate": 1.3742690058479531e-05, | |
| "loss": 0.4121, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 8.65, | |
| "learning_rate": 1.3450292397660819e-05, | |
| "loss": 0.409, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 8.68, | |
| "learning_rate": 1.3157894736842106e-05, | |
| "loss": 0.4139, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 8.71, | |
| "learning_rate": 1.2865497076023392e-05, | |
| "loss": 0.4091, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "learning_rate": 1.2573099415204679e-05, | |
| "loss": 0.4111, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 8.77, | |
| "learning_rate": 1.2280701754385964e-05, | |
| "loss": 0.4187, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 8.8, | |
| "learning_rate": 1.1988304093567252e-05, | |
| "loss": 0.4153, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 8.83, | |
| "learning_rate": 1.1695906432748537e-05, | |
| "loss": 0.4184, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 8.85, | |
| "learning_rate": 1.1403508771929824e-05, | |
| "loss": 0.4155, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 8.88, | |
| "learning_rate": 1.1111111111111112e-05, | |
| "loss": 0.4108, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 8.91, | |
| "learning_rate": 1.0818713450292397e-05, | |
| "loss": 0.4184, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 8.94, | |
| "learning_rate": 1.0526315789473684e-05, | |
| "loss": 0.4152, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 8.97, | |
| "learning_rate": 1.023391812865497e-05, | |
| "loss": 0.4063, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "eval_loss": 1.5488338470458984, | |
| "eval_runtime": 133.0253, | |
| "eval_samples_per_second": 79.459, | |
| "eval_steps_per_second": 9.938, | |
| "step": 3079 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "learning_rate": 9.941520467836257e-06, | |
| "loss": 0.4007, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 9.03, | |
| "learning_rate": 9.649122807017545e-06, | |
| "loss": 0.391, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 9.06, | |
| "learning_rate": 9.35672514619883e-06, | |
| "loss": 0.3912, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 9.09, | |
| "learning_rate": 9.064327485380117e-06, | |
| "loss": 0.3925, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 9.12, | |
| "learning_rate": 8.771929824561403e-06, | |
| "loss": 0.3948, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 9.15, | |
| "learning_rate": 8.47953216374269e-06, | |
| "loss": 0.3826, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 9.18, | |
| "learning_rate": 8.187134502923977e-06, | |
| "loss": 0.4017, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 9.21, | |
| "learning_rate": 7.894736842105263e-06, | |
| "loss": 0.4034, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 9.23, | |
| "learning_rate": 7.602339181286549e-06, | |
| "loss": 0.3979, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 9.26, | |
| "learning_rate": 7.3099415204678366e-06, | |
| "loss": 0.3977, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 9.29, | |
| "learning_rate": 7.017543859649123e-06, | |
| "loss": 0.3863, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 9.32, | |
| "learning_rate": 6.725146198830409e-06, | |
| "loss": 0.3948, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 9.35, | |
| "learning_rate": 6.432748538011696e-06, | |
| "loss": 0.3949, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "learning_rate": 6.140350877192982e-06, | |
| "loss": 0.3886, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 9.41, | |
| "learning_rate": 5.8479532163742686e-06, | |
| "loss": 0.3825, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 9.44, | |
| "learning_rate": 5.555555555555556e-06, | |
| "loss": 0.3844, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 9.47, | |
| "learning_rate": 5.263157894736842e-06, | |
| "loss": 0.3958, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 9.5, | |
| "learning_rate": 4.970760233918129e-06, | |
| "loss": 0.3972, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 9.53, | |
| "learning_rate": 4.678362573099415e-06, | |
| "loss": 0.3875, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 9.56, | |
| "learning_rate": 4.3859649122807014e-06, | |
| "loss": 0.3966, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 9.59, | |
| "learning_rate": 4.093567251461989e-06, | |
| "loss": 0.3868, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 9.61, | |
| "learning_rate": 3.8011695906432747e-06, | |
| "loss": 0.3945, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 9.64, | |
| "learning_rate": 3.5087719298245615e-06, | |
| "loss": 0.3953, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 9.67, | |
| "learning_rate": 3.216374269005848e-06, | |
| "loss": 0.3939, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 9.7, | |
| "learning_rate": 2.9239766081871343e-06, | |
| "loss": 0.3935, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 9.73, | |
| "learning_rate": 2.631578947368421e-06, | |
| "loss": 0.3936, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 9.76, | |
| "learning_rate": 2.3391812865497075e-06, | |
| "loss": 0.3958, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 9.79, | |
| "learning_rate": 2.0467836257309943e-06, | |
| "loss": 0.3952, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 9.82, | |
| "learning_rate": 1.7543859649122807e-06, | |
| "loss": 0.3856, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 9.85, | |
| "learning_rate": 1.4619883040935671e-06, | |
| "loss": 0.382, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 9.88, | |
| "learning_rate": 1.1695906432748538e-06, | |
| "loss": 0.3845, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 9.91, | |
| "learning_rate": 8.771929824561404e-07, | |
| "loss": 0.3855, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 9.94, | |
| "learning_rate": 5.847953216374269e-07, | |
| "loss": 0.3948, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 9.97, | |
| "learning_rate": 2.9239766081871344e-07, | |
| "loss": 0.4005, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 9.99, | |
| "learning_rate": 0.0, | |
| "loss": 0.3863, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 9.99, | |
| "eval_loss": 1.5926491022109985, | |
| "eval_runtime": 133.4623, | |
| "eval_samples_per_second": 79.198, | |
| "eval_steps_per_second": 9.905, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 9.99, | |
| "step": 3420, | |
| "total_flos": 4.554369038996914e+18, | |
| "train_loss": 0.6737501335422895, | |
| "train_runtime": 54303.7835, | |
| "train_samples_per_second": 16.131, | |
| "train_steps_per_second": 0.063 | |
| } | |
| ], | |
| "max_steps": 3420, | |
| "num_train_epochs": 10, | |
| "total_flos": 4.554369038996914e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |