| { | |
| "best_metric": 13.04479235930605, | |
| "best_model_checkpoint": "./checkpoint-19000", | |
| "epoch": 116.95906432748538, | |
| "eval_steps": 1000, | |
| "global_step": 20000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.4e-07, | |
| "loss": 3.0551, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 9.400000000000001e-07, | |
| "loss": 2.0629, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.44e-06, | |
| "loss": 1.3277, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.94e-06, | |
| "loss": 0.9916, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 2.4400000000000004e-06, | |
| "loss": 0.8245, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 2.9400000000000002e-06, | |
| "loss": 0.7138, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.44e-06, | |
| "loss": 0.6379, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 3.94e-06, | |
| "loss": 0.5487, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 4.440000000000001e-06, | |
| "loss": 0.5077, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 4.94e-06, | |
| "loss": 0.4637, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 5.4400000000000004e-06, | |
| "loss": 0.4441, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 5.94e-06, | |
| "loss": 0.4001, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 6.440000000000001e-06, | |
| "loss": 0.3794, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 6.9400000000000005e-06, | |
| "loss": 0.345, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 7.440000000000001e-06, | |
| "loss": 0.2828, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 7.94e-06, | |
| "loss": 0.2814, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 8.44e-06, | |
| "loss": 0.2693, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 8.94e-06, | |
| "loss": 0.2596, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 9.440000000000001e-06, | |
| "loss": 0.252, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 9.940000000000001e-06, | |
| "loss": 0.2404, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 9.98871794871795e-06, | |
| "loss": 0.2083, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 9.975897435897436e-06, | |
| "loss": 0.1686, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 9.963076923076925e-06, | |
| "loss": 0.164, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 9.950256410256412e-06, | |
| "loss": 0.1688, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 9.937435897435898e-06, | |
| "loss": 0.164, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 9.924615384615385e-06, | |
| "loss": 0.1606, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 9.911794871794874e-06, | |
| "loss": 0.1573, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 9.89897435897436e-06, | |
| "loss": 0.1188, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 9.886153846153846e-06, | |
| "loss": 0.1076, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 9.873333333333334e-06, | |
| "loss": 0.1037, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 9.860512820512821e-06, | |
| "loss": 0.1036, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 9.847692307692308e-06, | |
| "loss": 0.1039, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 9.834871794871795e-06, | |
| "loss": 0.1018, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 9.822051282051283e-06, | |
| "loss": 0.1039, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 5.12, | |
| "learning_rate": 9.80923076923077e-06, | |
| "loss": 0.0777, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "learning_rate": 9.796410256410257e-06, | |
| "loss": 0.0663, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 9.783589743589744e-06, | |
| "loss": 0.0655, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 5.56, | |
| "learning_rate": 9.770769230769232e-06, | |
| "loss": 0.0682, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "learning_rate": 9.757948717948719e-06, | |
| "loss": 0.0701, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "learning_rate": 9.745128205128206e-06, | |
| "loss": 0.067, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "eval_loss": 0.26439839601516724, | |
| "eval_runtime": 1444.9613, | |
| "eval_samples_per_second": 4.561, | |
| "eval_steps_per_second": 0.285, | |
| "eval_wer": 15.86773862056459, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 5.99, | |
| "learning_rate": 9.732307692307693e-06, | |
| "loss": 0.072, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 6.14, | |
| "learning_rate": 9.71948717948718e-06, | |
| "loss": 0.0437, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 6.29, | |
| "learning_rate": 9.706666666666668e-06, | |
| "loss": 0.0448, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 9.693846153846155e-06, | |
| "loss": 0.047, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 6.58, | |
| "learning_rate": 9.681025641025642e-06, | |
| "loss": 0.0448, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 6.73, | |
| "learning_rate": 9.668205128205129e-06, | |
| "loss": 0.0473, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 6.87, | |
| "learning_rate": 9.655384615384617e-06, | |
| "loss": 0.0487, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 7.02, | |
| "learning_rate": 9.642564102564104e-06, | |
| "loss": 0.0434, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 7.16, | |
| "learning_rate": 9.62974358974359e-06, | |
| "loss": 0.032, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 7.31, | |
| "learning_rate": 9.616923076923077e-06, | |
| "loss": 0.0333, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 7.46, | |
| "learning_rate": 9.604102564102566e-06, | |
| "loss": 0.033, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 7.6, | |
| "learning_rate": 9.591282051282053e-06, | |
| "loss": 0.0344, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 7.75, | |
| "learning_rate": 9.578461538461538e-06, | |
| "loss": 0.0344, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 7.89, | |
| "learning_rate": 9.565641025641026e-06, | |
| "loss": 0.0322, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 8.04, | |
| "learning_rate": 9.552820512820513e-06, | |
| "loss": 0.0287, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 8.19, | |
| "learning_rate": 9.54e-06, | |
| "loss": 0.0233, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 8.33, | |
| "learning_rate": 9.527179487179487e-06, | |
| "loss": 0.0238, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 8.48, | |
| "learning_rate": 9.514358974358975e-06, | |
| "loss": 0.0239, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 8.63, | |
| "learning_rate": 9.501538461538462e-06, | |
| "loss": 0.025, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 8.77, | |
| "learning_rate": 9.488717948717949e-06, | |
| "loss": 0.025, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 8.92, | |
| "learning_rate": 9.475897435897436e-06, | |
| "loss": 0.0239, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 9.06, | |
| "learning_rate": 9.463076923076924e-06, | |
| "loss": 0.0226, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 9.21, | |
| "learning_rate": 9.450256410256411e-06, | |
| "loss": 0.0188, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 9.36, | |
| "learning_rate": 9.437435897435898e-06, | |
| "loss": 0.017, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 9.5, | |
| "learning_rate": 9.424615384615385e-06, | |
| "loss": 0.0178, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 9.65, | |
| "learning_rate": 9.411794871794872e-06, | |
| "loss": 0.0188, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 9.8, | |
| "learning_rate": 9.39897435897436e-06, | |
| "loss": 0.0182, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 9.94, | |
| "learning_rate": 9.386153846153847e-06, | |
| "loss": 0.0197, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 10.09, | |
| "learning_rate": 9.373333333333334e-06, | |
| "loss": 0.0171, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 10.23, | |
| "learning_rate": 9.36051282051282e-06, | |
| "loss": 0.0141, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 10.38, | |
| "learning_rate": 9.34769230769231e-06, | |
| "loss": 0.0127, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 10.53, | |
| "learning_rate": 9.334871794871796e-06, | |
| "loss": 0.0142, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 10.67, | |
| "learning_rate": 9.322051282051283e-06, | |
| "loss": 0.0147, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 10.82, | |
| "learning_rate": 9.30923076923077e-06, | |
| "loss": 0.0143, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 10.96, | |
| "learning_rate": 9.296410256410258e-06, | |
| "loss": 0.0152, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 11.11, | |
| "learning_rate": 9.283589743589745e-06, | |
| "loss": 0.0117, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 11.26, | |
| "learning_rate": 9.270769230769232e-06, | |
| "loss": 0.0108, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 11.4, | |
| "learning_rate": 9.257948717948719e-06, | |
| "loss": 0.0105, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 11.55, | |
| "learning_rate": 9.245128205128206e-06, | |
| "loss": 0.0112, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 11.7, | |
| "learning_rate": 9.232307692307692e-06, | |
| "loss": 0.0123, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 11.7, | |
| "eval_loss": 0.30771297216415405, | |
| "eval_runtime": 1427.397, | |
| "eval_samples_per_second": 4.617, | |
| "eval_steps_per_second": 0.289, | |
| "eval_wer": 14.63257369683048, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 11.84, | |
| "learning_rate": 9.21948717948718e-06, | |
| "loss": 0.0106, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 11.99, | |
| "learning_rate": 9.206666666666668e-06, | |
| "loss": 0.0122, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 12.13, | |
| "learning_rate": 9.193846153846155e-06, | |
| "loss": 0.0089, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 12.28, | |
| "learning_rate": 9.181025641025641e-06, | |
| "loss": 0.0091, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 12.43, | |
| "learning_rate": 9.168205128205128e-06, | |
| "loss": 0.0093, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 12.57, | |
| "learning_rate": 9.155384615384617e-06, | |
| "loss": 0.0096, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 12.72, | |
| "learning_rate": 9.142564102564104e-06, | |
| "loss": 0.0094, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 12.87, | |
| "learning_rate": 9.12974358974359e-06, | |
| "loss": 0.0091, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 13.01, | |
| "learning_rate": 9.116923076923077e-06, | |
| "loss": 0.0102, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 13.16, | |
| "learning_rate": 9.104102564102566e-06, | |
| "loss": 0.0085, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 13.3, | |
| "learning_rate": 9.091282051282053e-06, | |
| "loss": 0.0075, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 13.45, | |
| "learning_rate": 9.07846153846154e-06, | |
| "loss": 0.0065, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 13.6, | |
| "learning_rate": 9.065641025641026e-06, | |
| "loss": 0.0087, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 13.74, | |
| "learning_rate": 9.052820512820513e-06, | |
| "loss": 0.0087, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 13.89, | |
| "learning_rate": 9.040000000000002e-06, | |
| "loss": 0.0083, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 14.04, | |
| "learning_rate": 9.027179487179488e-06, | |
| "loss": 0.008, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 14.18, | |
| "learning_rate": 9.014358974358975e-06, | |
| "loss": 0.0064, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 14.33, | |
| "learning_rate": 9.001538461538462e-06, | |
| "loss": 0.0072, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 14.47, | |
| "learning_rate": 8.98871794871795e-06, | |
| "loss": 0.0063, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 14.62, | |
| "learning_rate": 8.975897435897437e-06, | |
| "loss": 0.0064, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 14.77, | |
| "learning_rate": 8.963076923076924e-06, | |
| "loss": 0.0071, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 14.91, | |
| "learning_rate": 8.950256410256411e-06, | |
| "loss": 0.0069, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 15.06, | |
| "learning_rate": 8.937435897435898e-06, | |
| "loss": 0.006, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 15.2, | |
| "learning_rate": 8.924615384615385e-06, | |
| "loss": 0.0046, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 15.35, | |
| "learning_rate": 8.911794871794871e-06, | |
| "loss": 0.0053, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 15.5, | |
| "learning_rate": 8.89897435897436e-06, | |
| "loss": 0.0053, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 15.64, | |
| "learning_rate": 8.886153846153847e-06, | |
| "loss": 0.0054, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 15.79, | |
| "learning_rate": 8.873333333333334e-06, | |
| "loss": 0.0057, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 15.94, | |
| "learning_rate": 8.86051282051282e-06, | |
| "loss": 0.0064, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 16.08, | |
| "learning_rate": 8.847692307692309e-06, | |
| "loss": 0.0052, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 16.23, | |
| "learning_rate": 8.834871794871796e-06, | |
| "loss": 0.0049, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 16.37, | |
| "learning_rate": 8.822051282051283e-06, | |
| "loss": 0.0055, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 16.52, | |
| "learning_rate": 8.80923076923077e-06, | |
| "loss": 0.0051, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 16.67, | |
| "learning_rate": 8.796410256410258e-06, | |
| "loss": 0.0064, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 16.81, | |
| "learning_rate": 8.783589743589745e-06, | |
| "loss": 0.0053, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 16.96, | |
| "learning_rate": 8.770769230769232e-06, | |
| "loss": 0.007, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 17.11, | |
| "learning_rate": 8.757948717948718e-06, | |
| "loss": 0.0062, | |
| "step": 2925 | |
| }, | |
| { | |
| "epoch": 17.25, | |
| "learning_rate": 8.745128205128205e-06, | |
| "loss": 0.0041, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 17.4, | |
| "learning_rate": 8.732307692307694e-06, | |
| "loss": 0.0038, | |
| "step": 2975 | |
| }, | |
| { | |
| "epoch": 17.54, | |
| "learning_rate": 8.71948717948718e-06, | |
| "loss": 0.0052, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 17.54, | |
| "eval_loss": 0.3317411243915558, | |
| "eval_runtime": 1431.0066, | |
| "eval_samples_per_second": 4.606, | |
| "eval_steps_per_second": 0.288, | |
| "eval_wer": 14.185254589050755, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 17.69, | |
| "learning_rate": 8.706666666666667e-06, | |
| "loss": 0.0055, | |
| "step": 3025 | |
| }, | |
| { | |
| "epoch": 17.84, | |
| "learning_rate": 8.693846153846154e-06, | |
| "loss": 0.0047, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 17.98, | |
| "learning_rate": 8.681025641025643e-06, | |
| "loss": 0.0041, | |
| "step": 3075 | |
| }, | |
| { | |
| "epoch": 18.13, | |
| "learning_rate": 8.66820512820513e-06, | |
| "loss": 0.004, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 18.27, | |
| "learning_rate": 8.655384615384616e-06, | |
| "loss": 0.0045, | |
| "step": 3125 | |
| }, | |
| { | |
| "epoch": 18.42, | |
| "learning_rate": 8.642564102564103e-06, | |
| "loss": 0.0041, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 18.57, | |
| "learning_rate": 8.62974358974359e-06, | |
| "loss": 0.0042, | |
| "step": 3175 | |
| }, | |
| { | |
| "epoch": 18.71, | |
| "learning_rate": 8.616923076923077e-06, | |
| "loss": 0.0045, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 18.86, | |
| "learning_rate": 8.604102564102564e-06, | |
| "loss": 0.0048, | |
| "step": 3225 | |
| }, | |
| { | |
| "epoch": 19.01, | |
| "learning_rate": 8.591282051282052e-06, | |
| "loss": 0.0046, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 19.15, | |
| "learning_rate": 8.578461538461539e-06, | |
| "loss": 0.0041, | |
| "step": 3275 | |
| }, | |
| { | |
| "epoch": 19.3, | |
| "learning_rate": 8.565641025641026e-06, | |
| "loss": 0.0041, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 19.44, | |
| "learning_rate": 8.552820512820513e-06, | |
| "loss": 0.0035, | |
| "step": 3325 | |
| }, | |
| { | |
| "epoch": 19.59, | |
| "learning_rate": 8.540000000000001e-06, | |
| "loss": 0.0037, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 19.74, | |
| "learning_rate": 8.527179487179488e-06, | |
| "loss": 0.0047, | |
| "step": 3375 | |
| }, | |
| { | |
| "epoch": 19.88, | |
| "learning_rate": 8.514358974358975e-06, | |
| "loss": 0.0043, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 20.03, | |
| "learning_rate": 8.501538461538462e-06, | |
| "loss": 0.0038, | |
| "step": 3425 | |
| }, | |
| { | |
| "epoch": 20.18, | |
| "learning_rate": 8.48871794871795e-06, | |
| "loss": 0.0039, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 20.32, | |
| "learning_rate": 8.475897435897437e-06, | |
| "loss": 0.0036, | |
| "step": 3475 | |
| }, | |
| { | |
| "epoch": 20.47, | |
| "learning_rate": 8.463076923076924e-06, | |
| "loss": 0.0031, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 20.61, | |
| "learning_rate": 8.45025641025641e-06, | |
| "loss": 0.0036, | |
| "step": 3525 | |
| }, | |
| { | |
| "epoch": 20.76, | |
| "learning_rate": 8.437435897435898e-06, | |
| "loss": 0.0032, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 20.91, | |
| "learning_rate": 8.424615384615386e-06, | |
| "loss": 0.0046, | |
| "step": 3575 | |
| }, | |
| { | |
| "epoch": 21.05, | |
| "learning_rate": 8.411794871794873e-06, | |
| "loss": 0.0043, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 21.2, | |
| "learning_rate": 8.39897435897436e-06, | |
| "loss": 0.0045, | |
| "step": 3625 | |
| }, | |
| { | |
| "epoch": 21.35, | |
| "learning_rate": 8.386153846153847e-06, | |
| "loss": 0.005, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 21.49, | |
| "learning_rate": 8.373333333333335e-06, | |
| "loss": 0.0035, | |
| "step": 3675 | |
| }, | |
| { | |
| "epoch": 21.64, | |
| "learning_rate": 8.360512820512822e-06, | |
| "loss": 0.0045, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 21.78, | |
| "learning_rate": 8.347692307692309e-06, | |
| "loss": 0.0046, | |
| "step": 3725 | |
| }, | |
| { | |
| "epoch": 21.93, | |
| "learning_rate": 8.334871794871796e-06, | |
| "loss": 0.0043, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 22.08, | |
| "learning_rate": 8.322051282051282e-06, | |
| "loss": 0.0051, | |
| "step": 3775 | |
| }, | |
| { | |
| "epoch": 22.22, | |
| "learning_rate": 8.30923076923077e-06, | |
| "loss": 0.0042, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 22.37, | |
| "learning_rate": 8.296410256410256e-06, | |
| "loss": 0.0041, | |
| "step": 3825 | |
| }, | |
| { | |
| "epoch": 22.51, | |
| "learning_rate": 8.283589743589745e-06, | |
| "loss": 0.0035, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 22.66, | |
| "learning_rate": 8.270769230769231e-06, | |
| "loss": 0.0041, | |
| "step": 3875 | |
| }, | |
| { | |
| "epoch": 22.81, | |
| "learning_rate": 8.257948717948718e-06, | |
| "loss": 0.0037, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 22.95, | |
| "learning_rate": 8.245128205128205e-06, | |
| "loss": 0.0043, | |
| "step": 3925 | |
| }, | |
| { | |
| "epoch": 23.1, | |
| "learning_rate": 8.232307692307694e-06, | |
| "loss": 0.0031, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 23.25, | |
| "learning_rate": 8.21948717948718e-06, | |
| "loss": 0.0038, | |
| "step": 3975 | |
| }, | |
| { | |
| "epoch": 23.39, | |
| "learning_rate": 8.206666666666667e-06, | |
| "loss": 0.0037, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 23.39, | |
| "eval_loss": 0.3387215733528137, | |
| "eval_runtime": 1422.1346, | |
| "eval_samples_per_second": 4.635, | |
| "eval_steps_per_second": 0.29, | |
| "eval_wer": 14.08853694412541, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 23.54, | |
| "learning_rate": 8.193846153846154e-06, | |
| "loss": 0.0036, | |
| "step": 4025 | |
| }, | |
| { | |
| "epoch": 23.68, | |
| "learning_rate": 8.181025641025642e-06, | |
| "loss": 0.004, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 23.83, | |
| "learning_rate": 8.16820512820513e-06, | |
| "loss": 0.0045, | |
| "step": 4075 | |
| }, | |
| { | |
| "epoch": 23.98, | |
| "learning_rate": 8.155384615384616e-06, | |
| "loss": 0.006, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 24.12, | |
| "learning_rate": 8.142564102564103e-06, | |
| "loss": 0.0037, | |
| "step": 4125 | |
| }, | |
| { | |
| "epoch": 24.27, | |
| "learning_rate": 8.12974358974359e-06, | |
| "loss": 0.0034, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 24.42, | |
| "learning_rate": 8.116923076923078e-06, | |
| "loss": 0.0037, | |
| "step": 4175 | |
| }, | |
| { | |
| "epoch": 24.56, | |
| "learning_rate": 8.104102564102565e-06, | |
| "loss": 0.0033, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 24.71, | |
| "learning_rate": 8.091282051282052e-06, | |
| "loss": 0.0037, | |
| "step": 4225 | |
| }, | |
| { | |
| "epoch": 24.85, | |
| "learning_rate": 8.078461538461539e-06, | |
| "loss": 0.0029, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "learning_rate": 8.065641025641027e-06, | |
| "loss": 0.003, | |
| "step": 4275 | |
| }, | |
| { | |
| "epoch": 25.15, | |
| "learning_rate": 8.052820512820514e-06, | |
| "loss": 0.0029, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 25.29, | |
| "learning_rate": 8.040000000000001e-06, | |
| "loss": 0.0026, | |
| "step": 4325 | |
| }, | |
| { | |
| "epoch": 25.44, | |
| "learning_rate": 8.027179487179488e-06, | |
| "loss": 0.002, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 25.58, | |
| "learning_rate": 8.014358974358975e-06, | |
| "loss": 0.0025, | |
| "step": 4375 | |
| }, | |
| { | |
| "epoch": 25.73, | |
| "learning_rate": 8.001538461538461e-06, | |
| "loss": 0.0027, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 25.88, | |
| "learning_rate": 7.988717948717948e-06, | |
| "loss": 0.0035, | |
| "step": 4425 | |
| }, | |
| { | |
| "epoch": 26.02, | |
| "learning_rate": 7.975897435897437e-06, | |
| "loss": 0.0037, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 26.17, | |
| "learning_rate": 7.963076923076924e-06, | |
| "loss": 0.0031, | |
| "step": 4475 | |
| }, | |
| { | |
| "epoch": 26.32, | |
| "learning_rate": 7.95025641025641e-06, | |
| "loss": 0.0025, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 26.46, | |
| "learning_rate": 7.937435897435897e-06, | |
| "loss": 0.0029, | |
| "step": 4525 | |
| }, | |
| { | |
| "epoch": 26.61, | |
| "learning_rate": 7.924615384615386e-06, | |
| "loss": 0.0031, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 26.75, | |
| "learning_rate": 7.911794871794873e-06, | |
| "loss": 0.0032, | |
| "step": 4575 | |
| }, | |
| { | |
| "epoch": 26.9, | |
| "learning_rate": 7.89897435897436e-06, | |
| "loss": 0.0035, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 27.05, | |
| "learning_rate": 7.886153846153846e-06, | |
| "loss": 0.0027, | |
| "step": 4625 | |
| }, | |
| { | |
| "epoch": 27.19, | |
| "learning_rate": 7.873333333333335e-06, | |
| "loss": 0.0031, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 27.34, | |
| "learning_rate": 7.860512820512822e-06, | |
| "loss": 0.0028, | |
| "step": 4675 | |
| }, | |
| { | |
| "epoch": 27.49, | |
| "learning_rate": 7.847692307692308e-06, | |
| "loss": 0.003, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 27.63, | |
| "learning_rate": 7.834871794871795e-06, | |
| "loss": 0.0021, | |
| "step": 4725 | |
| }, | |
| { | |
| "epoch": 27.78, | |
| "learning_rate": 7.822051282051282e-06, | |
| "loss": 0.0025, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 27.92, | |
| "learning_rate": 7.80923076923077e-06, | |
| "loss": 0.0036, | |
| "step": 4775 | |
| }, | |
| { | |
| "epoch": 28.07, | |
| "learning_rate": 7.796410256410257e-06, | |
| "loss": 0.0033, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 28.22, | |
| "learning_rate": 7.783589743589744e-06, | |
| "loss": 0.0036, | |
| "step": 4825 | |
| }, | |
| { | |
| "epoch": 28.36, | |
| "learning_rate": 7.770769230769231e-06, | |
| "loss": 0.0026, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 28.51, | |
| "learning_rate": 7.75794871794872e-06, | |
| "loss": 0.0023, | |
| "step": 4875 | |
| }, | |
| { | |
| "epoch": 28.65, | |
| "learning_rate": 7.745128205128206e-06, | |
| "loss": 0.003, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 28.8, | |
| "learning_rate": 7.732307692307693e-06, | |
| "loss": 0.0027, | |
| "step": 4925 | |
| }, | |
| { | |
| "epoch": 28.95, | |
| "learning_rate": 7.71948717948718e-06, | |
| "loss": 0.0026, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 29.09, | |
| "learning_rate": 7.706666666666669e-06, | |
| "loss": 0.0029, | |
| "step": 4975 | |
| }, | |
| { | |
| "epoch": 29.24, | |
| "learning_rate": 7.693846153846154e-06, | |
| "loss": 0.0026, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 29.24, | |
| "eval_loss": 0.35591921210289, | |
| "eval_runtime": 1430.818, | |
| "eval_samples_per_second": 4.606, | |
| "eval_steps_per_second": 0.288, | |
| "eval_wer": 14.261822724616655, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 29.39, | |
| "learning_rate": 7.68102564102564e-06, | |
| "loss": 0.0041, | |
| "step": 5025 | |
| }, | |
| { | |
| "epoch": 29.53, | |
| "learning_rate": 7.668205128205129e-06, | |
| "loss": 0.0039, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 29.68, | |
| "learning_rate": 7.655384615384616e-06, | |
| "loss": 0.0028, | |
| "step": 5075 | |
| }, | |
| { | |
| "epoch": 29.82, | |
| "learning_rate": 7.642564102564103e-06, | |
| "loss": 0.003, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 29.97, | |
| "learning_rate": 7.62974358974359e-06, | |
| "loss": 0.0028, | |
| "step": 5125 | |
| }, | |
| { | |
| "epoch": 30.12, | |
| "learning_rate": 7.616923076923077e-06, | |
| "loss": 0.0028, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 30.26, | |
| "learning_rate": 7.604102564102565e-06, | |
| "loss": 0.0026, | |
| "step": 5175 | |
| }, | |
| { | |
| "epoch": 30.41, | |
| "learning_rate": 7.591282051282052e-06, | |
| "loss": 0.0034, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 30.56, | |
| "learning_rate": 7.578461538461539e-06, | |
| "loss": 0.0033, | |
| "step": 5225 | |
| }, | |
| { | |
| "epoch": 30.7, | |
| "learning_rate": 7.565641025641026e-06, | |
| "loss": 0.0032, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 30.85, | |
| "learning_rate": 7.552820512820514e-06, | |
| "loss": 0.0025, | |
| "step": 5275 | |
| }, | |
| { | |
| "epoch": 30.99, | |
| "learning_rate": 7.540000000000001e-06, | |
| "loss": 0.0031, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 31.14, | |
| "learning_rate": 7.5271794871794875e-06, | |
| "loss": 0.0021, | |
| "step": 5325 | |
| }, | |
| { | |
| "epoch": 31.29, | |
| "learning_rate": 7.514358974358975e-06, | |
| "loss": 0.0027, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 31.43, | |
| "learning_rate": 7.501538461538462e-06, | |
| "loss": 0.003, | |
| "step": 5375 | |
| }, | |
| { | |
| "epoch": 31.58, | |
| "learning_rate": 7.48871794871795e-06, | |
| "loss": 0.0038, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 31.73, | |
| "learning_rate": 7.4758974358974365e-06, | |
| "loss": 0.0029, | |
| "step": 5425 | |
| }, | |
| { | |
| "epoch": 31.87, | |
| "learning_rate": 7.463076923076924e-06, | |
| "loss": 0.0022, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 32.02, | |
| "learning_rate": 7.450256410256411e-06, | |
| "loss": 0.0022, | |
| "step": 5475 | |
| }, | |
| { | |
| "epoch": 32.16, | |
| "learning_rate": 7.437435897435899e-06, | |
| "loss": 0.0025, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 32.31, | |
| "learning_rate": 7.4246153846153855e-06, | |
| "loss": 0.0021, | |
| "step": 5525 | |
| }, | |
| { | |
| "epoch": 32.46, | |
| "learning_rate": 7.411794871794873e-06, | |
| "loss": 0.0028, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 32.6, | |
| "learning_rate": 7.39897435897436e-06, | |
| "loss": 0.0027, | |
| "step": 5575 | |
| }, | |
| { | |
| "epoch": 32.75, | |
| "learning_rate": 7.386153846153846e-06, | |
| "loss": 0.0022, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 32.89, | |
| "learning_rate": 7.373333333333334e-06, | |
| "loss": 0.0024, | |
| "step": 5625 | |
| }, | |
| { | |
| "epoch": 33.04, | |
| "learning_rate": 7.3605128205128204e-06, | |
| "loss": 0.0024, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 33.19, | |
| "learning_rate": 7.347692307692308e-06, | |
| "loss": 0.0026, | |
| "step": 5675 | |
| }, | |
| { | |
| "epoch": 33.33, | |
| "learning_rate": 7.334871794871795e-06, | |
| "loss": 0.003, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 33.48, | |
| "learning_rate": 7.322051282051283e-06, | |
| "loss": 0.0032, | |
| "step": 5725 | |
| }, | |
| { | |
| "epoch": 33.63, | |
| "learning_rate": 7.309230769230769e-06, | |
| "loss": 0.0028, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 33.77, | |
| "learning_rate": 7.296410256410257e-06, | |
| "loss": 0.0018, | |
| "step": 5775 | |
| }, | |
| { | |
| "epoch": 33.92, | |
| "learning_rate": 7.283589743589744e-06, | |
| "loss": 0.0021, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 34.06, | |
| "learning_rate": 7.270769230769232e-06, | |
| "loss": 0.0023, | |
| "step": 5825 | |
| }, | |
| { | |
| "epoch": 34.21, | |
| "learning_rate": 7.257948717948718e-06, | |
| "loss": 0.0018, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 34.36, | |
| "learning_rate": 7.245128205128206e-06, | |
| "loss": 0.0017, | |
| "step": 5875 | |
| }, | |
| { | |
| "epoch": 34.5, | |
| "learning_rate": 7.232307692307693e-06, | |
| "loss": 0.0024, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 34.65, | |
| "learning_rate": 7.21948717948718e-06, | |
| "loss": 0.002, | |
| "step": 5925 | |
| }, | |
| { | |
| "epoch": 34.8, | |
| "learning_rate": 7.206666666666667e-06, | |
| "loss": 0.0028, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 34.94, | |
| "learning_rate": 7.193846153846154e-06, | |
| "loss": 0.0019, | |
| "step": 5975 | |
| }, | |
| { | |
| "epoch": 35.09, | |
| "learning_rate": 7.181025641025642e-06, | |
| "loss": 0.0026, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 35.09, | |
| "eval_loss": 0.36038053035736084, | |
| "eval_runtime": 1431.3338, | |
| "eval_samples_per_second": 4.605, | |
| "eval_steps_per_second": 0.288, | |
| "eval_wer": 14.215478853089927, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 35.23, | |
| "learning_rate": 7.168205128205129e-06, | |
| "loss": 0.0018, | |
| "step": 6025 | |
| }, | |
| { | |
| "epoch": 35.38, | |
| "learning_rate": 7.155384615384616e-06, | |
| "loss": 0.0022, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 35.53, | |
| "learning_rate": 7.142564102564103e-06, | |
| "loss": 0.0017, | |
| "step": 6075 | |
| }, | |
| { | |
| "epoch": 35.67, | |
| "learning_rate": 7.129743589743591e-06, | |
| "loss": 0.002, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 35.82, | |
| "learning_rate": 7.116923076923078e-06, | |
| "loss": 0.0023, | |
| "step": 6125 | |
| }, | |
| { | |
| "epoch": 35.96, | |
| "learning_rate": 7.104102564102565e-06, | |
| "loss": 0.0026, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 36.11, | |
| "learning_rate": 7.091282051282052e-06, | |
| "loss": 0.0021, | |
| "step": 6175 | |
| }, | |
| { | |
| "epoch": 36.26, | |
| "learning_rate": 7.078461538461538e-06, | |
| "loss": 0.0017, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 36.4, | |
| "learning_rate": 7.065641025641026e-06, | |
| "loss": 0.0019, | |
| "step": 6225 | |
| }, | |
| { | |
| "epoch": 36.55, | |
| "learning_rate": 7.052820512820513e-06, | |
| "loss": 0.0012, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 36.7, | |
| "learning_rate": 7.04e-06, | |
| "loss": 0.0015, | |
| "step": 6275 | |
| }, | |
| { | |
| "epoch": 36.84, | |
| "learning_rate": 7.027179487179487e-06, | |
| "loss": 0.0025, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 36.99, | |
| "learning_rate": 7.014358974358975e-06, | |
| "loss": 0.002, | |
| "step": 6325 | |
| }, | |
| { | |
| "epoch": 37.13, | |
| "learning_rate": 7.001538461538462e-06, | |
| "loss": 0.0022, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 37.28, | |
| "learning_rate": 6.988717948717949e-06, | |
| "loss": 0.0017, | |
| "step": 6375 | |
| }, | |
| { | |
| "epoch": 37.43, | |
| "learning_rate": 6.975897435897436e-06, | |
| "loss": 0.0024, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 37.57, | |
| "learning_rate": 6.963076923076924e-06, | |
| "loss": 0.0019, | |
| "step": 6425 | |
| }, | |
| { | |
| "epoch": 37.72, | |
| "learning_rate": 6.950256410256411e-06, | |
| "loss": 0.0021, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 37.87, | |
| "learning_rate": 6.937435897435898e-06, | |
| "loss": 0.002, | |
| "step": 6475 | |
| }, | |
| { | |
| "epoch": 38.01, | |
| "learning_rate": 6.924615384615385e-06, | |
| "loss": 0.0014, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 38.16, | |
| "learning_rate": 6.911794871794872e-06, | |
| "loss": 0.0015, | |
| "step": 6525 | |
| }, | |
| { | |
| "epoch": 38.3, | |
| "learning_rate": 6.89897435897436e-06, | |
| "loss": 0.0018, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 38.45, | |
| "learning_rate": 6.8861538461538465e-06, | |
| "loss": 0.0014, | |
| "step": 6575 | |
| }, | |
| { | |
| "epoch": 38.6, | |
| "learning_rate": 6.873333333333334e-06, | |
| "loss": 0.0015, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 38.74, | |
| "learning_rate": 6.860512820512821e-06, | |
| "loss": 0.0022, | |
| "step": 6625 | |
| }, | |
| { | |
| "epoch": 38.89, | |
| "learning_rate": 6.847692307692309e-06, | |
| "loss": 0.0019, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 39.04, | |
| "learning_rate": 6.8348717948717955e-06, | |
| "loss": 0.0032, | |
| "step": 6675 | |
| }, | |
| { | |
| "epoch": 39.18, | |
| "learning_rate": 6.822051282051283e-06, | |
| "loss": 0.0017, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 39.33, | |
| "learning_rate": 6.80923076923077e-06, | |
| "loss": 0.0023, | |
| "step": 6725 | |
| }, | |
| { | |
| "epoch": 39.47, | |
| "learning_rate": 6.796410256410258e-06, | |
| "loss": 0.002, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 39.62, | |
| "learning_rate": 6.7835897435897445e-06, | |
| "loss": 0.0019, | |
| "step": 6775 | |
| }, | |
| { | |
| "epoch": 39.77, | |
| "learning_rate": 6.770769230769232e-06, | |
| "loss": 0.0013, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 39.91, | |
| "learning_rate": 6.757948717948718e-06, | |
| "loss": 0.0024, | |
| "step": 6825 | |
| }, | |
| { | |
| "epoch": 40.06, | |
| "learning_rate": 6.745128205128205e-06, | |
| "loss": 0.0013, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 40.2, | |
| "learning_rate": 6.732307692307693e-06, | |
| "loss": 0.0012, | |
| "step": 6875 | |
| }, | |
| { | |
| "epoch": 40.35, | |
| "learning_rate": 6.7194871794871794e-06, | |
| "loss": 0.0012, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 40.5, | |
| "learning_rate": 6.706666666666667e-06, | |
| "loss": 0.0021, | |
| "step": 6925 | |
| }, | |
| { | |
| "epoch": 40.64, | |
| "learning_rate": 6.693846153846154e-06, | |
| "loss": 0.002, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 40.79, | |
| "learning_rate": 6.681025641025642e-06, | |
| "loss": 0.0017, | |
| "step": 6975 | |
| }, | |
| { | |
| "epoch": 40.94, | |
| "learning_rate": 6.6682051282051284e-06, | |
| "loss": 0.002, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 40.94, | |
| "eval_loss": 0.3734375238418579, | |
| "eval_runtime": 1428.3736, | |
| "eval_samples_per_second": 4.614, | |
| "eval_steps_per_second": 0.288, | |
| "eval_wer": 14.12279111003647, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 41.08, | |
| "learning_rate": 6.655384615384616e-06, | |
| "loss": 0.0012, | |
| "step": 7025 | |
| }, | |
| { | |
| "epoch": 41.23, | |
| "learning_rate": 6.642564102564103e-06, | |
| "loss": 0.0014, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 41.37, | |
| "learning_rate": 6.629743589743591e-06, | |
| "loss": 0.0018, | |
| "step": 7075 | |
| }, | |
| { | |
| "epoch": 41.52, | |
| "learning_rate": 6.616923076923077e-06, | |
| "loss": 0.0014, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 41.67, | |
| "learning_rate": 6.604102564102565e-06, | |
| "loss": 0.0013, | |
| "step": 7125 | |
| }, | |
| { | |
| "epoch": 41.81, | |
| "learning_rate": 6.591282051282052e-06, | |
| "loss": 0.002, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 41.96, | |
| "learning_rate": 6.578461538461539e-06, | |
| "loss": 0.0018, | |
| "step": 7175 | |
| }, | |
| { | |
| "epoch": 42.11, | |
| "learning_rate": 6.565641025641026e-06, | |
| "loss": 0.0017, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 42.25, | |
| "learning_rate": 6.552820512820513e-06, | |
| "loss": 0.0017, | |
| "step": 7225 | |
| }, | |
| { | |
| "epoch": 42.4, | |
| "learning_rate": 6.540000000000001e-06, | |
| "loss": 0.0028, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 42.54, | |
| "learning_rate": 6.527179487179488e-06, | |
| "loss": 0.0018, | |
| "step": 7275 | |
| }, | |
| { | |
| "epoch": 42.69, | |
| "learning_rate": 6.514358974358975e-06, | |
| "loss": 0.0013, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 42.84, | |
| "learning_rate": 6.501538461538462e-06, | |
| "loss": 0.0017, | |
| "step": 7325 | |
| }, | |
| { | |
| "epoch": 42.98, | |
| "learning_rate": 6.48871794871795e-06, | |
| "loss": 0.0038, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 43.13, | |
| "learning_rate": 6.475897435897437e-06, | |
| "loss": 0.0032, | |
| "step": 7375 | |
| }, | |
| { | |
| "epoch": 43.27, | |
| "learning_rate": 6.463076923076924e-06, | |
| "loss": 0.0027, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 43.42, | |
| "learning_rate": 6.45025641025641e-06, | |
| "loss": 0.0022, | |
| "step": 7425 | |
| }, | |
| { | |
| "epoch": 43.57, | |
| "learning_rate": 6.437435897435897e-06, | |
| "loss": 0.0024, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 43.71, | |
| "learning_rate": 6.424615384615385e-06, | |
| "loss": 0.0024, | |
| "step": 7475 | |
| }, | |
| { | |
| "epoch": 43.86, | |
| "learning_rate": 6.411794871794872e-06, | |
| "loss": 0.0025, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 44.01, | |
| "learning_rate": 6.398974358974359e-06, | |
| "loss": 0.0022, | |
| "step": 7525 | |
| }, | |
| { | |
| "epoch": 44.15, | |
| "learning_rate": 6.386153846153846e-06, | |
| "loss": 0.0017, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 44.3, | |
| "learning_rate": 6.373333333333334e-06, | |
| "loss": 0.0016, | |
| "step": 7575 | |
| }, | |
| { | |
| "epoch": 44.44, | |
| "learning_rate": 6.360512820512821e-06, | |
| "loss": 0.002, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 44.59, | |
| "learning_rate": 6.347692307692308e-06, | |
| "loss": 0.002, | |
| "step": 7625 | |
| }, | |
| { | |
| "epoch": 44.74, | |
| "learning_rate": 6.334871794871795e-06, | |
| "loss": 0.0018, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 44.88, | |
| "learning_rate": 6.322051282051283e-06, | |
| "loss": 0.0031, | |
| "step": 7675 | |
| }, | |
| { | |
| "epoch": 45.03, | |
| "learning_rate": 6.30923076923077e-06, | |
| "loss": 0.0016, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 45.18, | |
| "learning_rate": 6.296410256410257e-06, | |
| "loss": 0.0014, | |
| "step": 7725 | |
| }, | |
| { | |
| "epoch": 45.32, | |
| "learning_rate": 6.283589743589744e-06, | |
| "loss": 0.0014, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 45.47, | |
| "learning_rate": 6.270769230769231e-06, | |
| "loss": 0.0019, | |
| "step": 7775 | |
| }, | |
| { | |
| "epoch": 45.61, | |
| "learning_rate": 6.257948717948719e-06, | |
| "loss": 0.0015, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 45.76, | |
| "learning_rate": 6.2451282051282055e-06, | |
| "loss": 0.0017, | |
| "step": 7825 | |
| }, | |
| { | |
| "epoch": 45.91, | |
| "learning_rate": 6.232307692307693e-06, | |
| "loss": 0.0015, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 46.05, | |
| "learning_rate": 6.21948717948718e-06, | |
| "loss": 0.0017, | |
| "step": 7875 | |
| }, | |
| { | |
| "epoch": 46.2, | |
| "learning_rate": 6.206666666666668e-06, | |
| "loss": 0.0012, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 46.35, | |
| "learning_rate": 6.1938461538461545e-06, | |
| "loss": 0.0018, | |
| "step": 7925 | |
| }, | |
| { | |
| "epoch": 46.49, | |
| "learning_rate": 6.181025641025642e-06, | |
| "loss": 0.0012, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 46.64, | |
| "learning_rate": 6.168205128205129e-06, | |
| "loss": 0.0012, | |
| "step": 7975 | |
| }, | |
| { | |
| "epoch": 46.78, | |
| "learning_rate": 6.155384615384617e-06, | |
| "loss": 0.0012, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 46.78, | |
| "eval_loss": 0.3772992491722107, | |
| "eval_runtime": 1427.2389, | |
| "eval_samples_per_second": 4.618, | |
| "eval_steps_per_second": 0.289, | |
| "eval_wer": 14.030103366983013, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 46.93, | |
| "learning_rate": 6.142564102564103e-06, | |
| "loss": 0.0011, | |
| "step": 8025 | |
| }, | |
| { | |
| "epoch": 47.08, | |
| "learning_rate": 6.1297435897435895e-06, | |
| "loss": 0.0014, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 47.22, | |
| "learning_rate": 6.116923076923077e-06, | |
| "loss": 0.0014, | |
| "step": 8075 | |
| }, | |
| { | |
| "epoch": 47.37, | |
| "learning_rate": 6.104102564102564e-06, | |
| "loss": 0.0008, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 47.51, | |
| "learning_rate": 6.091282051282052e-06, | |
| "loss": 0.0012, | |
| "step": 8125 | |
| }, | |
| { | |
| "epoch": 47.66, | |
| "learning_rate": 6.0784615384615384e-06, | |
| "loss": 0.0013, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 47.81, | |
| "learning_rate": 6.065641025641026e-06, | |
| "loss": 0.0024, | |
| "step": 8175 | |
| }, | |
| { | |
| "epoch": 47.95, | |
| "learning_rate": 6.0533333333333335e-06, | |
| "loss": 0.0012, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 48.1, | |
| "learning_rate": 6.04051282051282e-06, | |
| "loss": 0.0015, | |
| "step": 8225 | |
| }, | |
| { | |
| "epoch": 48.25, | |
| "learning_rate": 6.027692307692308e-06, | |
| "loss": 0.0009, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 48.39, | |
| "learning_rate": 6.014871794871795e-06, | |
| "loss": 0.0009, | |
| "step": 8275 | |
| }, | |
| { | |
| "epoch": 48.54, | |
| "learning_rate": 6.0020512820512825e-06, | |
| "loss": 0.0012, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 48.68, | |
| "learning_rate": 5.989230769230769e-06, | |
| "loss": 0.0014, | |
| "step": 8325 | |
| }, | |
| { | |
| "epoch": 48.83, | |
| "learning_rate": 5.976410256410257e-06, | |
| "loss": 0.001, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 48.98, | |
| "learning_rate": 5.963589743589744e-06, | |
| "loss": 0.0015, | |
| "step": 8375 | |
| }, | |
| { | |
| "epoch": 49.12, | |
| "learning_rate": 5.950769230769231e-06, | |
| "loss": 0.0009, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 49.27, | |
| "learning_rate": 5.937948717948718e-06, | |
| "loss": 0.0016, | |
| "step": 8425 | |
| }, | |
| { | |
| "epoch": 49.42, | |
| "learning_rate": 5.925128205128205e-06, | |
| "loss": 0.002, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 49.56, | |
| "learning_rate": 5.912307692307693e-06, | |
| "loss": 0.0017, | |
| "step": 8475 | |
| }, | |
| { | |
| "epoch": 49.71, | |
| "learning_rate": 5.89948717948718e-06, | |
| "loss": 0.0011, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 49.85, | |
| "learning_rate": 5.886666666666667e-06, | |
| "loss": 0.0015, | |
| "step": 8525 | |
| }, | |
| { | |
| "epoch": 50.0, | |
| "learning_rate": 5.873846153846154e-06, | |
| "loss": 0.0012, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 50.15, | |
| "learning_rate": 5.861025641025642e-06, | |
| "loss": 0.001, | |
| "step": 8575 | |
| }, | |
| { | |
| "epoch": 50.29, | |
| "learning_rate": 5.848205128205129e-06, | |
| "loss": 0.0012, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 50.44, | |
| "learning_rate": 5.835384615384616e-06, | |
| "loss": 0.0006, | |
| "step": 8625 | |
| }, | |
| { | |
| "epoch": 50.58, | |
| "learning_rate": 5.822564102564103e-06, | |
| "loss": 0.0006, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 50.73, | |
| "learning_rate": 5.809743589743591e-06, | |
| "loss": 0.001, | |
| "step": 8675 | |
| }, | |
| { | |
| "epoch": 50.88, | |
| "learning_rate": 5.796923076923078e-06, | |
| "loss": 0.0017, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 51.02, | |
| "learning_rate": 5.784102564102565e-06, | |
| "loss": 0.0011, | |
| "step": 8725 | |
| }, | |
| { | |
| "epoch": 51.17, | |
| "learning_rate": 5.771282051282052e-06, | |
| "loss": 0.0012, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 51.32, | |
| "learning_rate": 5.75846153846154e-06, | |
| "loss": 0.0014, | |
| "step": 8775 | |
| }, | |
| { | |
| "epoch": 51.46, | |
| "learning_rate": 5.745641025641027e-06, | |
| "loss": 0.0012, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 51.61, | |
| "learning_rate": 5.732820512820513e-06, | |
| "loss": 0.0011, | |
| "step": 8825 | |
| }, | |
| { | |
| "epoch": 51.75, | |
| "learning_rate": 5.72e-06, | |
| "loss": 0.0007, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 51.9, | |
| "learning_rate": 5.707179487179487e-06, | |
| "loss": 0.0007, | |
| "step": 8875 | |
| }, | |
| { | |
| "epoch": 52.05, | |
| "learning_rate": 5.694358974358975e-06, | |
| "loss": 0.0008, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 52.19, | |
| "learning_rate": 5.681538461538462e-06, | |
| "loss": 0.0009, | |
| "step": 8925 | |
| }, | |
| { | |
| "epoch": 52.34, | |
| "learning_rate": 5.668717948717949e-06, | |
| "loss": 0.001, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 52.49, | |
| "learning_rate": 5.655897435897436e-06, | |
| "loss": 0.0009, | |
| "step": 8975 | |
| }, | |
| { | |
| "epoch": 52.63, | |
| "learning_rate": 5.643076923076923e-06, | |
| "loss": 0.0012, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 52.63, | |
| "eval_loss": 0.3801773190498352, | |
| "eval_runtime": 1427.5399, | |
| "eval_samples_per_second": 4.617, | |
| "eval_steps_per_second": 0.289, | |
| "eval_wer": 13.907191359890387, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 52.78, | |
| "learning_rate": 5.630256410256411e-06, | |
| "loss": 0.0006, | |
| "step": 9025 | |
| }, | |
| { | |
| "epoch": 52.92, | |
| "learning_rate": 5.6174358974358974e-06, | |
| "loss": 0.001, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 53.07, | |
| "learning_rate": 5.604615384615385e-06, | |
| "loss": 0.0008, | |
| "step": 9075 | |
| }, | |
| { | |
| "epoch": 53.22, | |
| "learning_rate": 5.591794871794872e-06, | |
| "loss": 0.0009, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 53.36, | |
| "learning_rate": 5.57897435897436e-06, | |
| "loss": 0.0011, | |
| "step": 9125 | |
| }, | |
| { | |
| "epoch": 53.51, | |
| "learning_rate": 5.566153846153846e-06, | |
| "loss": 0.0007, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 53.65, | |
| "learning_rate": 5.553333333333334e-06, | |
| "loss": 0.0008, | |
| "step": 9175 | |
| }, | |
| { | |
| "epoch": 53.8, | |
| "learning_rate": 5.540512820512821e-06, | |
| "loss": 0.0007, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 53.95, | |
| "learning_rate": 5.527692307692309e-06, | |
| "loss": 0.0009, | |
| "step": 9225 | |
| }, | |
| { | |
| "epoch": 54.09, | |
| "learning_rate": 5.514871794871795e-06, | |
| "loss": 0.001, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 54.24, | |
| "learning_rate": 5.502051282051283e-06, | |
| "loss": 0.0012, | |
| "step": 9275 | |
| }, | |
| { | |
| "epoch": 54.39, | |
| "learning_rate": 5.48923076923077e-06, | |
| "loss": 0.0009, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 54.53, | |
| "learning_rate": 5.476410256410258e-06, | |
| "loss": 0.0012, | |
| "step": 9325 | |
| }, | |
| { | |
| "epoch": 54.68, | |
| "learning_rate": 5.463589743589744e-06, | |
| "loss": 0.0012, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 54.82, | |
| "learning_rate": 5.450769230769232e-06, | |
| "loss": 0.0013, | |
| "step": 9375 | |
| }, | |
| { | |
| "epoch": 54.97, | |
| "learning_rate": 5.437948717948719e-06, | |
| "loss": 0.001, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 55.12, | |
| "learning_rate": 5.425128205128205e-06, | |
| "loss": 0.0006, | |
| "step": 9425 | |
| }, | |
| { | |
| "epoch": 55.26, | |
| "learning_rate": 5.4123076923076925e-06, | |
| "loss": 0.001, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 55.41, | |
| "learning_rate": 5.399487179487179e-06, | |
| "loss": 0.0017, | |
| "step": 9475 | |
| }, | |
| { | |
| "epoch": 55.56, | |
| "learning_rate": 5.386666666666667e-06, | |
| "loss": 0.0009, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 55.7, | |
| "learning_rate": 5.373846153846154e-06, | |
| "loss": 0.0018, | |
| "step": 9525 | |
| }, | |
| { | |
| "epoch": 55.85, | |
| "learning_rate": 5.3610256410256415e-06, | |
| "loss": 0.0013, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 55.99, | |
| "learning_rate": 5.348205128205128e-06, | |
| "loss": 0.0015, | |
| "step": 9575 | |
| }, | |
| { | |
| "epoch": 56.14, | |
| "learning_rate": 5.335384615384615e-06, | |
| "loss": 0.0015, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 56.29, | |
| "learning_rate": 5.322564102564103e-06, | |
| "loss": 0.0012, | |
| "step": 9625 | |
| }, | |
| { | |
| "epoch": 56.43, | |
| "learning_rate": 5.30974358974359e-06, | |
| "loss": 0.0014, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 56.58, | |
| "learning_rate": 5.296923076923077e-06, | |
| "loss": 0.0012, | |
| "step": 9675 | |
| }, | |
| { | |
| "epoch": 56.73, | |
| "learning_rate": 5.284102564102564e-06, | |
| "loss": 0.0011, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 56.87, | |
| "learning_rate": 5.271282051282052e-06, | |
| "loss": 0.0014, | |
| "step": 9725 | |
| }, | |
| { | |
| "epoch": 57.02, | |
| "learning_rate": 5.258461538461539e-06, | |
| "loss": 0.0022, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 57.16, | |
| "learning_rate": 5.245641025641026e-06, | |
| "loss": 0.0013, | |
| "step": 9775 | |
| }, | |
| { | |
| "epoch": 57.31, | |
| "learning_rate": 5.232820512820513e-06, | |
| "loss": 0.0009, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 57.46, | |
| "learning_rate": 5.220000000000001e-06, | |
| "loss": 0.0014, | |
| "step": 9825 | |
| }, | |
| { | |
| "epoch": 57.6, | |
| "learning_rate": 5.207179487179488e-06, | |
| "loss": 0.0017, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 57.75, | |
| "learning_rate": 5.194358974358975e-06, | |
| "loss": 0.0008, | |
| "step": 9875 | |
| }, | |
| { | |
| "epoch": 57.89, | |
| "learning_rate": 5.181538461538462e-06, | |
| "loss": 0.0009, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 58.04, | |
| "learning_rate": 5.16871794871795e-06, | |
| "loss": 0.0011, | |
| "step": 9925 | |
| }, | |
| { | |
| "epoch": 58.19, | |
| "learning_rate": 5.155897435897437e-06, | |
| "loss": 0.0011, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 58.33, | |
| "learning_rate": 5.143076923076924e-06, | |
| "loss": 0.0007, | |
| "step": 9975 | |
| }, | |
| { | |
| "epoch": 58.48, | |
| "learning_rate": 5.130256410256411e-06, | |
| "loss": 0.0012, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 58.48, | |
| "eval_loss": 0.3849584758281708, | |
| "eval_runtime": 1432.8921, | |
| "eval_samples_per_second": 4.6, | |
| "eval_steps_per_second": 0.288, | |
| "eval_wer": 14.47339257289085, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 58.63, | |
| "learning_rate": 5.117435897435897e-06, | |
| "loss": 0.0015, | |
| "step": 10025 | |
| }, | |
| { | |
| "epoch": 58.77, | |
| "learning_rate": 5.104615384615385e-06, | |
| "loss": 0.001, | |
| "step": 10050 | |
| }, | |
| { | |
| "epoch": 58.92, | |
| "learning_rate": 5.091794871794872e-06, | |
| "loss": 0.0008, | |
| "step": 10075 | |
| }, | |
| { | |
| "epoch": 59.06, | |
| "learning_rate": 5.078974358974359e-06, | |
| "loss": 0.0008, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 59.21, | |
| "learning_rate": 5.066153846153846e-06, | |
| "loss": 0.0005, | |
| "step": 10125 | |
| }, | |
| { | |
| "epoch": 59.36, | |
| "learning_rate": 5.053333333333334e-06, | |
| "loss": 0.0008, | |
| "step": 10150 | |
| }, | |
| { | |
| "epoch": 59.5, | |
| "learning_rate": 5.040512820512821e-06, | |
| "loss": 0.0008, | |
| "step": 10175 | |
| }, | |
| { | |
| "epoch": 59.65, | |
| "learning_rate": 5.027692307692308e-06, | |
| "loss": 0.0016, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 59.8, | |
| "learning_rate": 5.014871794871795e-06, | |
| "loss": 0.0006, | |
| "step": 10225 | |
| }, | |
| { | |
| "epoch": 59.94, | |
| "learning_rate": 5.002051282051282e-06, | |
| "loss": 0.0013, | |
| "step": 10250 | |
| }, | |
| { | |
| "epoch": 60.09, | |
| "learning_rate": 4.98923076923077e-06, | |
| "loss": 0.0012, | |
| "step": 10275 | |
| }, | |
| { | |
| "epoch": 60.23, | |
| "learning_rate": 4.9764102564102564e-06, | |
| "loss": 0.0008, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 60.38, | |
| "learning_rate": 4.963589743589744e-06, | |
| "loss": 0.0012, | |
| "step": 10325 | |
| }, | |
| { | |
| "epoch": 60.53, | |
| "learning_rate": 4.950769230769231e-06, | |
| "loss": 0.0006, | |
| "step": 10350 | |
| }, | |
| { | |
| "epoch": 60.67, | |
| "learning_rate": 4.937948717948719e-06, | |
| "loss": 0.0007, | |
| "step": 10375 | |
| }, | |
| { | |
| "epoch": 60.82, | |
| "learning_rate": 4.9251282051282054e-06, | |
| "loss": 0.0007, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 60.96, | |
| "learning_rate": 4.912307692307693e-06, | |
| "loss": 0.001, | |
| "step": 10425 | |
| }, | |
| { | |
| "epoch": 61.11, | |
| "learning_rate": 4.89948717948718e-06, | |
| "loss": 0.0009, | |
| "step": 10450 | |
| }, | |
| { | |
| "epoch": 61.26, | |
| "learning_rate": 4.886666666666668e-06, | |
| "loss": 0.0014, | |
| "step": 10475 | |
| }, | |
| { | |
| "epoch": 61.4, | |
| "learning_rate": 4.873846153846154e-06, | |
| "loss": 0.0008, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 61.55, | |
| "learning_rate": 4.861025641025641e-06, | |
| "loss": 0.0008, | |
| "step": 10525 | |
| }, | |
| { | |
| "epoch": 61.7, | |
| "learning_rate": 4.848205128205128e-06, | |
| "loss": 0.0008, | |
| "step": 10550 | |
| }, | |
| { | |
| "epoch": 61.84, | |
| "learning_rate": 4.835384615384616e-06, | |
| "loss": 0.0005, | |
| "step": 10575 | |
| }, | |
| { | |
| "epoch": 61.99, | |
| "learning_rate": 4.8225641025641026e-06, | |
| "loss": 0.0007, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 62.13, | |
| "learning_rate": 4.80974358974359e-06, | |
| "loss": 0.0005, | |
| "step": 10625 | |
| }, | |
| { | |
| "epoch": 62.28, | |
| "learning_rate": 4.796923076923077e-06, | |
| "loss": 0.0003, | |
| "step": 10650 | |
| }, | |
| { | |
| "epoch": 62.43, | |
| "learning_rate": 4.784102564102565e-06, | |
| "loss": 0.0005, | |
| "step": 10675 | |
| }, | |
| { | |
| "epoch": 62.57, | |
| "learning_rate": 4.7712820512820516e-06, | |
| "loss": 0.0007, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 62.72, | |
| "learning_rate": 4.758461538461539e-06, | |
| "loss": 0.001, | |
| "step": 10725 | |
| }, | |
| { | |
| "epoch": 62.87, | |
| "learning_rate": 4.745641025641026e-06, | |
| "loss": 0.0009, | |
| "step": 10750 | |
| }, | |
| { | |
| "epoch": 63.01, | |
| "learning_rate": 4.732820512820514e-06, | |
| "loss": 0.0004, | |
| "step": 10775 | |
| }, | |
| { | |
| "epoch": 63.16, | |
| "learning_rate": 4.7200000000000005e-06, | |
| "loss": 0.0003, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 63.3, | |
| "learning_rate": 4.707179487179487e-06, | |
| "loss": 0.0006, | |
| "step": 10825 | |
| }, | |
| { | |
| "epoch": 63.45, | |
| "learning_rate": 4.694358974358974e-06, | |
| "loss": 0.0006, | |
| "step": 10850 | |
| }, | |
| { | |
| "epoch": 63.6, | |
| "learning_rate": 4.681538461538462e-06, | |
| "loss": 0.0005, | |
| "step": 10875 | |
| }, | |
| { | |
| "epoch": 63.74, | |
| "learning_rate": 4.668717948717949e-06, | |
| "loss": 0.0006, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 63.89, | |
| "learning_rate": 4.655897435897436e-06, | |
| "loss": 0.0009, | |
| "step": 10925 | |
| }, | |
| { | |
| "epoch": 64.04, | |
| "learning_rate": 4.643076923076923e-06, | |
| "loss": 0.0003, | |
| "step": 10950 | |
| }, | |
| { | |
| "epoch": 64.18, | |
| "learning_rate": 4.630256410256411e-06, | |
| "loss": 0.0013, | |
| "step": 10975 | |
| }, | |
| { | |
| "epoch": 64.33, | |
| "learning_rate": 4.617435897435898e-06, | |
| "loss": 0.0006, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 64.33, | |
| "eval_loss": 0.3895852267742157, | |
| "eval_runtime": 1430.8895, | |
| "eval_samples_per_second": 4.606, | |
| "eval_steps_per_second": 0.288, | |
| "eval_wer": 13.651292591025408, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 64.47, | |
| "learning_rate": 4.604615384615385e-06, | |
| "loss": 0.0005, | |
| "step": 11025 | |
| }, | |
| { | |
| "epoch": 64.62, | |
| "learning_rate": 4.591794871794872e-06, | |
| "loss": 0.0006, | |
| "step": 11050 | |
| }, | |
| { | |
| "epoch": 64.77, | |
| "learning_rate": 4.57897435897436e-06, | |
| "loss": 0.001, | |
| "step": 11075 | |
| }, | |
| { | |
| "epoch": 64.91, | |
| "learning_rate": 4.566153846153847e-06, | |
| "loss": 0.0006, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 65.06, | |
| "learning_rate": 4.5533333333333335e-06, | |
| "loss": 0.0011, | |
| "step": 11125 | |
| }, | |
| { | |
| "epoch": 65.2, | |
| "learning_rate": 4.54051282051282e-06, | |
| "loss": 0.0023, | |
| "step": 11150 | |
| }, | |
| { | |
| "epoch": 65.35, | |
| "learning_rate": 4.527692307692308e-06, | |
| "loss": 0.0009, | |
| "step": 11175 | |
| }, | |
| { | |
| "epoch": 65.5, | |
| "learning_rate": 4.514871794871795e-06, | |
| "loss": 0.0009, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 65.64, | |
| "learning_rate": 4.5020512820512825e-06, | |
| "loss": 0.0012, | |
| "step": 11225 | |
| }, | |
| { | |
| "epoch": 65.79, | |
| "learning_rate": 4.489230769230769e-06, | |
| "loss": 0.001, | |
| "step": 11250 | |
| }, | |
| { | |
| "epoch": 65.94, | |
| "learning_rate": 4.476410256410257e-06, | |
| "loss": 0.0014, | |
| "step": 11275 | |
| }, | |
| { | |
| "epoch": 66.08, | |
| "learning_rate": 4.463589743589744e-06, | |
| "loss": 0.0011, | |
| "step": 11300 | |
| }, | |
| { | |
| "epoch": 66.23, | |
| "learning_rate": 4.4507692307692315e-06, | |
| "loss": 0.0009, | |
| "step": 11325 | |
| }, | |
| { | |
| "epoch": 66.37, | |
| "learning_rate": 4.437948717948718e-06, | |
| "loss": 0.0007, | |
| "step": 11350 | |
| }, | |
| { | |
| "epoch": 66.52, | |
| "learning_rate": 4.425128205128206e-06, | |
| "loss": 0.0011, | |
| "step": 11375 | |
| }, | |
| { | |
| "epoch": 66.67, | |
| "learning_rate": 4.412307692307693e-06, | |
| "loss": 0.0009, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 66.81, | |
| "learning_rate": 4.39948717948718e-06, | |
| "loss": 0.0014, | |
| "step": 11425 | |
| }, | |
| { | |
| "epoch": 66.96, | |
| "learning_rate": 4.3866666666666665e-06, | |
| "loss": 0.0008, | |
| "step": 11450 | |
| }, | |
| { | |
| "epoch": 67.11, | |
| "learning_rate": 4.373846153846154e-06, | |
| "loss": 0.0007, | |
| "step": 11475 | |
| }, | |
| { | |
| "epoch": 67.25, | |
| "learning_rate": 4.361025641025641e-06, | |
| "loss": 0.0003, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 67.4, | |
| "learning_rate": 4.348205128205129e-06, | |
| "loss": 0.0003, | |
| "step": 11525 | |
| }, | |
| { | |
| "epoch": 67.54, | |
| "learning_rate": 4.3353846153846154e-06, | |
| "loss": 0.0006, | |
| "step": 11550 | |
| }, | |
| { | |
| "epoch": 67.69, | |
| "learning_rate": 4.322564102564103e-06, | |
| "loss": 0.0004, | |
| "step": 11575 | |
| }, | |
| { | |
| "epoch": 67.84, | |
| "learning_rate": 4.30974358974359e-06, | |
| "loss": 0.0006, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 67.98, | |
| "learning_rate": 4.296923076923078e-06, | |
| "loss": 0.001, | |
| "step": 11625 | |
| }, | |
| { | |
| "epoch": 68.13, | |
| "learning_rate": 4.2841025641025644e-06, | |
| "loss": 0.001, | |
| "step": 11650 | |
| }, | |
| { | |
| "epoch": 68.27, | |
| "learning_rate": 4.271282051282052e-06, | |
| "loss": 0.0008, | |
| "step": 11675 | |
| }, | |
| { | |
| "epoch": 68.42, | |
| "learning_rate": 4.258461538461539e-06, | |
| "loss": 0.0005, | |
| "step": 11700 | |
| }, | |
| { | |
| "epoch": 68.57, | |
| "learning_rate": 4.245641025641026e-06, | |
| "loss": 0.0006, | |
| "step": 11725 | |
| }, | |
| { | |
| "epoch": 68.71, | |
| "learning_rate": 4.232820512820513e-06, | |
| "loss": 0.0006, | |
| "step": 11750 | |
| }, | |
| { | |
| "epoch": 68.86, | |
| "learning_rate": 4.22e-06, | |
| "loss": 0.0016, | |
| "step": 11775 | |
| }, | |
| { | |
| "epoch": 69.01, | |
| "learning_rate": 4.207179487179487e-06, | |
| "loss": 0.0009, | |
| "step": 11800 | |
| }, | |
| { | |
| "epoch": 69.15, | |
| "learning_rate": 4.194358974358975e-06, | |
| "loss": 0.0004, | |
| "step": 11825 | |
| }, | |
| { | |
| "epoch": 69.3, | |
| "learning_rate": 4.1815384615384616e-06, | |
| "loss": 0.0006, | |
| "step": 11850 | |
| }, | |
| { | |
| "epoch": 69.44, | |
| "learning_rate": 4.168717948717949e-06, | |
| "loss": 0.0005, | |
| "step": 11875 | |
| }, | |
| { | |
| "epoch": 69.59, | |
| "learning_rate": 4.155897435897436e-06, | |
| "loss": 0.0003, | |
| "step": 11900 | |
| }, | |
| { | |
| "epoch": 69.74, | |
| "learning_rate": 4.143076923076924e-06, | |
| "loss": 0.0003, | |
| "step": 11925 | |
| }, | |
| { | |
| "epoch": 69.88, | |
| "learning_rate": 4.1302564102564106e-06, | |
| "loss": 0.0003, | |
| "step": 11950 | |
| }, | |
| { | |
| "epoch": 70.03, | |
| "learning_rate": 4.117435897435898e-06, | |
| "loss": 0.0003, | |
| "step": 11975 | |
| }, | |
| { | |
| "epoch": 70.18, | |
| "learning_rate": 4.104615384615385e-06, | |
| "loss": 0.0011, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 70.18, | |
| "eval_loss": 0.3981473743915558, | |
| "eval_runtime": 1432.7461, | |
| "eval_samples_per_second": 4.6, | |
| "eval_steps_per_second": 0.288, | |
| "eval_wer": 13.631143081665961, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 70.32, | |
| "learning_rate": 4.091794871794872e-06, | |
| "loss": 0.0004, | |
| "step": 12025 | |
| }, | |
| { | |
| "epoch": 70.47, | |
| "learning_rate": 4.078974358974359e-06, | |
| "loss": 0.0009, | |
| "step": 12050 | |
| }, | |
| { | |
| "epoch": 70.61, | |
| "learning_rate": 4.066153846153846e-06, | |
| "loss": 0.0007, | |
| "step": 12075 | |
| }, | |
| { | |
| "epoch": 70.76, | |
| "learning_rate": 4.053333333333333e-06, | |
| "loss": 0.0008, | |
| "step": 12100 | |
| }, | |
| { | |
| "epoch": 70.91, | |
| "learning_rate": 4.040512820512821e-06, | |
| "loss": 0.0009, | |
| "step": 12125 | |
| }, | |
| { | |
| "epoch": 71.05, | |
| "learning_rate": 4.027692307692308e-06, | |
| "loss": 0.0006, | |
| "step": 12150 | |
| }, | |
| { | |
| "epoch": 71.2, | |
| "learning_rate": 4.014871794871795e-06, | |
| "loss": 0.0005, | |
| "step": 12175 | |
| }, | |
| { | |
| "epoch": 71.35, | |
| "learning_rate": 4.002051282051282e-06, | |
| "loss": 0.001, | |
| "step": 12200 | |
| }, | |
| { | |
| "epoch": 71.49, | |
| "learning_rate": 3.98923076923077e-06, | |
| "loss": 0.0007, | |
| "step": 12225 | |
| }, | |
| { | |
| "epoch": 71.64, | |
| "learning_rate": 3.976410256410257e-06, | |
| "loss": 0.0006, | |
| "step": 12250 | |
| }, | |
| { | |
| "epoch": 71.78, | |
| "learning_rate": 3.963589743589744e-06, | |
| "loss": 0.0005, | |
| "step": 12275 | |
| }, | |
| { | |
| "epoch": 71.93, | |
| "learning_rate": 3.950769230769231e-06, | |
| "loss": 0.0003, | |
| "step": 12300 | |
| }, | |
| { | |
| "epoch": 72.08, | |
| "learning_rate": 3.937948717948718e-06, | |
| "loss": 0.0003, | |
| "step": 12325 | |
| }, | |
| { | |
| "epoch": 72.22, | |
| "learning_rate": 3.925128205128205e-06, | |
| "loss": 0.0004, | |
| "step": 12350 | |
| }, | |
| { | |
| "epoch": 72.37, | |
| "learning_rate": 3.9123076923076925e-06, | |
| "loss": 0.0005, | |
| "step": 12375 | |
| }, | |
| { | |
| "epoch": 72.51, | |
| "learning_rate": 3.899487179487179e-06, | |
| "loss": 0.0003, | |
| "step": 12400 | |
| }, | |
| { | |
| "epoch": 72.66, | |
| "learning_rate": 3.886666666666667e-06, | |
| "loss": 0.0011, | |
| "step": 12425 | |
| }, | |
| { | |
| "epoch": 72.81, | |
| "learning_rate": 3.873846153846154e-06, | |
| "loss": 0.0003, | |
| "step": 12450 | |
| }, | |
| { | |
| "epoch": 72.95, | |
| "learning_rate": 3.8610256410256415e-06, | |
| "loss": 0.0005, | |
| "step": 12475 | |
| }, | |
| { | |
| "epoch": 73.1, | |
| "learning_rate": 3.848205128205128e-06, | |
| "loss": 0.0003, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 73.25, | |
| "learning_rate": 3.835384615384616e-06, | |
| "loss": 0.0007, | |
| "step": 12525 | |
| }, | |
| { | |
| "epoch": 73.39, | |
| "learning_rate": 3.822564102564103e-06, | |
| "loss": 0.0004, | |
| "step": 12550 | |
| }, | |
| { | |
| "epoch": 73.54, | |
| "learning_rate": 3.80974358974359e-06, | |
| "loss": 0.0002, | |
| "step": 12575 | |
| }, | |
| { | |
| "epoch": 73.68, | |
| "learning_rate": 3.7969230769230773e-06, | |
| "loss": 0.0002, | |
| "step": 12600 | |
| }, | |
| { | |
| "epoch": 73.83, | |
| "learning_rate": 3.784102564102564e-06, | |
| "loss": 0.0002, | |
| "step": 12625 | |
| }, | |
| { | |
| "epoch": 73.98, | |
| "learning_rate": 3.7712820512820514e-06, | |
| "loss": 0.0002, | |
| "step": 12650 | |
| }, | |
| { | |
| "epoch": 74.12, | |
| "learning_rate": 3.7584615384615386e-06, | |
| "loss": 0.0002, | |
| "step": 12675 | |
| }, | |
| { | |
| "epoch": 74.27, | |
| "learning_rate": 3.745641025641026e-06, | |
| "loss": 0.0002, | |
| "step": 12700 | |
| }, | |
| { | |
| "epoch": 74.42, | |
| "learning_rate": 3.732820512820513e-06, | |
| "loss": 0.0002, | |
| "step": 12725 | |
| }, | |
| { | |
| "epoch": 74.56, | |
| "learning_rate": 3.7200000000000004e-06, | |
| "loss": 0.0004, | |
| "step": 12750 | |
| }, | |
| { | |
| "epoch": 74.71, | |
| "learning_rate": 3.7071794871794876e-06, | |
| "loss": 0.0003, | |
| "step": 12775 | |
| }, | |
| { | |
| "epoch": 74.85, | |
| "learning_rate": 3.694358974358975e-06, | |
| "loss": 0.0005, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 75.0, | |
| "learning_rate": 3.681538461538462e-06, | |
| "loss": 0.0007, | |
| "step": 12825 | |
| }, | |
| { | |
| "epoch": 75.15, | |
| "learning_rate": 3.6687179487179494e-06, | |
| "loss": 0.0002, | |
| "step": 12850 | |
| }, | |
| { | |
| "epoch": 75.29, | |
| "learning_rate": 3.655897435897436e-06, | |
| "loss": 0.0002, | |
| "step": 12875 | |
| }, | |
| { | |
| "epoch": 75.44, | |
| "learning_rate": 3.6430769230769234e-06, | |
| "loss": 0.0003, | |
| "step": 12900 | |
| }, | |
| { | |
| "epoch": 75.58, | |
| "learning_rate": 3.6302564102564103e-06, | |
| "loss": 0.0005, | |
| "step": 12925 | |
| }, | |
| { | |
| "epoch": 75.73, | |
| "learning_rate": 3.6174358974358975e-06, | |
| "loss": 0.0009, | |
| "step": 12950 | |
| }, | |
| { | |
| "epoch": 75.88, | |
| "learning_rate": 3.6046153846153848e-06, | |
| "loss": 0.0011, | |
| "step": 12975 | |
| }, | |
| { | |
| "epoch": 76.02, | |
| "learning_rate": 3.591794871794872e-06, | |
| "loss": 0.001, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 76.02, | |
| "eval_loss": 0.39472073316574097, | |
| "eval_runtime": 1431.6929, | |
| "eval_samples_per_second": 4.604, | |
| "eval_steps_per_second": 0.288, | |
| "eval_wer": 13.594873964818957, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 76.17, | |
| "learning_rate": 3.5789743589743593e-06, | |
| "loss": 0.0011, | |
| "step": 13025 | |
| }, | |
| { | |
| "epoch": 76.32, | |
| "learning_rate": 3.5661538461538465e-06, | |
| "loss": 0.0006, | |
| "step": 13050 | |
| }, | |
| { | |
| "epoch": 76.46, | |
| "learning_rate": 3.5533333333333338e-06, | |
| "loss": 0.0004, | |
| "step": 13075 | |
| }, | |
| { | |
| "epoch": 76.61, | |
| "learning_rate": 3.540512820512821e-06, | |
| "loss": 0.0004, | |
| "step": 13100 | |
| }, | |
| { | |
| "epoch": 76.75, | |
| "learning_rate": 3.5276923076923083e-06, | |
| "loss": 0.0003, | |
| "step": 13125 | |
| }, | |
| { | |
| "epoch": 76.9, | |
| "learning_rate": 3.5148717948717955e-06, | |
| "loss": 0.0004, | |
| "step": 13150 | |
| }, | |
| { | |
| "epoch": 77.05, | |
| "learning_rate": 3.5020512820512823e-06, | |
| "loss": 0.0005, | |
| "step": 13175 | |
| }, | |
| { | |
| "epoch": 77.19, | |
| "learning_rate": 3.4892307692307696e-06, | |
| "loss": 0.0006, | |
| "step": 13200 | |
| }, | |
| { | |
| "epoch": 77.34, | |
| "learning_rate": 3.4764102564102564e-06, | |
| "loss": 0.0006, | |
| "step": 13225 | |
| }, | |
| { | |
| "epoch": 77.49, | |
| "learning_rate": 3.4635897435897436e-06, | |
| "loss": 0.0005, | |
| "step": 13250 | |
| }, | |
| { | |
| "epoch": 77.63, | |
| "learning_rate": 3.450769230769231e-06, | |
| "loss": 0.0003, | |
| "step": 13275 | |
| }, | |
| { | |
| "epoch": 77.78, | |
| "learning_rate": 3.437948717948718e-06, | |
| "loss": 0.0004, | |
| "step": 13300 | |
| }, | |
| { | |
| "epoch": 77.92, | |
| "learning_rate": 3.4251282051282054e-06, | |
| "loss": 0.0006, | |
| "step": 13325 | |
| }, | |
| { | |
| "epoch": 78.07, | |
| "learning_rate": 3.4123076923076926e-06, | |
| "loss": 0.0004, | |
| "step": 13350 | |
| }, | |
| { | |
| "epoch": 78.22, | |
| "learning_rate": 3.39948717948718e-06, | |
| "loss": 0.0004, | |
| "step": 13375 | |
| }, | |
| { | |
| "epoch": 78.36, | |
| "learning_rate": 3.386666666666667e-06, | |
| "loss": 0.0005, | |
| "step": 13400 | |
| }, | |
| { | |
| "epoch": 78.51, | |
| "learning_rate": 3.3738461538461544e-06, | |
| "loss": 0.0003, | |
| "step": 13425 | |
| }, | |
| { | |
| "epoch": 78.65, | |
| "learning_rate": 3.3610256410256416e-06, | |
| "loss": 0.0003, | |
| "step": 13450 | |
| }, | |
| { | |
| "epoch": 78.8, | |
| "learning_rate": 3.3482051282051285e-06, | |
| "loss": 0.0003, | |
| "step": 13475 | |
| }, | |
| { | |
| "epoch": 78.95, | |
| "learning_rate": 3.3353846153846157e-06, | |
| "loss": 0.0003, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 79.09, | |
| "learning_rate": 3.322564102564103e-06, | |
| "loss": 0.0003, | |
| "step": 13525 | |
| }, | |
| { | |
| "epoch": 79.24, | |
| "learning_rate": 3.3097435897435898e-06, | |
| "loss": 0.0005, | |
| "step": 13550 | |
| }, | |
| { | |
| "epoch": 79.39, | |
| "learning_rate": 3.296923076923077e-06, | |
| "loss": 0.0004, | |
| "step": 13575 | |
| }, | |
| { | |
| "epoch": 79.53, | |
| "learning_rate": 3.2841025641025643e-06, | |
| "loss": 0.0001, | |
| "step": 13600 | |
| }, | |
| { | |
| "epoch": 79.68, | |
| "learning_rate": 3.2712820512820515e-06, | |
| "loss": 0.0002, | |
| "step": 13625 | |
| }, | |
| { | |
| "epoch": 79.82, | |
| "learning_rate": 3.2584615384615388e-06, | |
| "loss": 0.0004, | |
| "step": 13650 | |
| }, | |
| { | |
| "epoch": 79.97, | |
| "learning_rate": 3.245641025641026e-06, | |
| "loss": 0.0003, | |
| "step": 13675 | |
| }, | |
| { | |
| "epoch": 80.12, | |
| "learning_rate": 3.2328205128205133e-06, | |
| "loss": 0.0006, | |
| "step": 13700 | |
| }, | |
| { | |
| "epoch": 80.26, | |
| "learning_rate": 3.2200000000000005e-06, | |
| "loss": 0.0002, | |
| "step": 13725 | |
| }, | |
| { | |
| "epoch": 80.41, | |
| "learning_rate": 3.2071794871794878e-06, | |
| "loss": 0.0003, | |
| "step": 13750 | |
| }, | |
| { | |
| "epoch": 80.56, | |
| "learning_rate": 3.194358974358975e-06, | |
| "loss": 0.0003, | |
| "step": 13775 | |
| }, | |
| { | |
| "epoch": 80.7, | |
| "learning_rate": 3.181538461538462e-06, | |
| "loss": 0.0002, | |
| "step": 13800 | |
| }, | |
| { | |
| "epoch": 80.85, | |
| "learning_rate": 3.168717948717949e-06, | |
| "loss": 0.0003, | |
| "step": 13825 | |
| }, | |
| { | |
| "epoch": 80.99, | |
| "learning_rate": 3.155897435897436e-06, | |
| "loss": 0.0003, | |
| "step": 13850 | |
| }, | |
| { | |
| "epoch": 81.14, | |
| "learning_rate": 3.143076923076923e-06, | |
| "loss": 0.0002, | |
| "step": 13875 | |
| }, | |
| { | |
| "epoch": 81.29, | |
| "learning_rate": 3.1302564102564104e-06, | |
| "loss": 0.0002, | |
| "step": 13900 | |
| }, | |
| { | |
| "epoch": 81.43, | |
| "learning_rate": 3.1174358974358976e-06, | |
| "loss": 0.0003, | |
| "step": 13925 | |
| }, | |
| { | |
| "epoch": 81.58, | |
| "learning_rate": 3.104615384615385e-06, | |
| "loss": 0.0003, | |
| "step": 13950 | |
| }, | |
| { | |
| "epoch": 81.73, | |
| "learning_rate": 3.091794871794872e-06, | |
| "loss": 0.0003, | |
| "step": 13975 | |
| }, | |
| { | |
| "epoch": 81.87, | |
| "learning_rate": 3.0789743589743594e-06, | |
| "loss": 0.0002, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 81.87, | |
| "eval_loss": 0.40385952591896057, | |
| "eval_runtime": 1432.4631, | |
| "eval_samples_per_second": 4.601, | |
| "eval_steps_per_second": 0.288, | |
| "eval_wer": 13.617038425114348, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 82.02, | |
| "learning_rate": 3.0661538461538466e-06, | |
| "loss": 0.0002, | |
| "step": 14025 | |
| }, | |
| { | |
| "epoch": 82.16, | |
| "learning_rate": 3.053333333333334e-06, | |
| "loss": 0.0001, | |
| "step": 14050 | |
| }, | |
| { | |
| "epoch": 82.31, | |
| "learning_rate": 3.040512820512821e-06, | |
| "loss": 0.0004, | |
| "step": 14075 | |
| }, | |
| { | |
| "epoch": 82.46, | |
| "learning_rate": 3.027692307692308e-06, | |
| "loss": 0.0002, | |
| "step": 14100 | |
| }, | |
| { | |
| "epoch": 82.6, | |
| "learning_rate": 3.014871794871795e-06, | |
| "loss": 0.0002, | |
| "step": 14125 | |
| }, | |
| { | |
| "epoch": 82.75, | |
| "learning_rate": 3.002051282051282e-06, | |
| "loss": 0.0002, | |
| "step": 14150 | |
| }, | |
| { | |
| "epoch": 82.89, | |
| "learning_rate": 2.9892307692307693e-06, | |
| "loss": 0.0003, | |
| "step": 14175 | |
| }, | |
| { | |
| "epoch": 83.04, | |
| "learning_rate": 2.9764102564102565e-06, | |
| "loss": 0.0004, | |
| "step": 14200 | |
| }, | |
| { | |
| "epoch": 83.19, | |
| "learning_rate": 2.9635897435897438e-06, | |
| "loss": 0.0007, | |
| "step": 14225 | |
| }, | |
| { | |
| "epoch": 83.33, | |
| "learning_rate": 2.950769230769231e-06, | |
| "loss": 0.0003, | |
| "step": 14250 | |
| }, | |
| { | |
| "epoch": 83.48, | |
| "learning_rate": 2.9379487179487183e-06, | |
| "loss": 0.0004, | |
| "step": 14275 | |
| }, | |
| { | |
| "epoch": 83.63, | |
| "learning_rate": 2.9251282051282055e-06, | |
| "loss": 0.0008, | |
| "step": 14300 | |
| }, | |
| { | |
| "epoch": 83.77, | |
| "learning_rate": 2.9123076923076928e-06, | |
| "loss": 0.0002, | |
| "step": 14325 | |
| }, | |
| { | |
| "epoch": 83.92, | |
| "learning_rate": 2.89948717948718e-06, | |
| "loss": 0.0005, | |
| "step": 14350 | |
| }, | |
| { | |
| "epoch": 84.06, | |
| "learning_rate": 2.8866666666666673e-06, | |
| "loss": 0.0002, | |
| "step": 14375 | |
| }, | |
| { | |
| "epoch": 84.21, | |
| "learning_rate": 2.873846153846154e-06, | |
| "loss": 0.0006, | |
| "step": 14400 | |
| }, | |
| { | |
| "epoch": 84.36, | |
| "learning_rate": 2.8610256410256413e-06, | |
| "loss": 0.0003, | |
| "step": 14425 | |
| }, | |
| { | |
| "epoch": 84.5, | |
| "learning_rate": 2.848205128205128e-06, | |
| "loss": 0.0006, | |
| "step": 14450 | |
| }, | |
| { | |
| "epoch": 84.65, | |
| "learning_rate": 2.8353846153846154e-06, | |
| "loss": 0.0005, | |
| "step": 14475 | |
| }, | |
| { | |
| "epoch": 84.8, | |
| "learning_rate": 2.8225641025641027e-06, | |
| "loss": 0.0002, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 84.94, | |
| "learning_rate": 2.80974358974359e-06, | |
| "loss": 0.0004, | |
| "step": 14525 | |
| }, | |
| { | |
| "epoch": 85.09, | |
| "learning_rate": 2.796923076923077e-06, | |
| "loss": 0.0002, | |
| "step": 14550 | |
| }, | |
| { | |
| "epoch": 85.23, | |
| "learning_rate": 2.7841025641025644e-06, | |
| "loss": 0.0003, | |
| "step": 14575 | |
| }, | |
| { | |
| "epoch": 85.38, | |
| "learning_rate": 2.7712820512820516e-06, | |
| "loss": 0.0001, | |
| "step": 14600 | |
| }, | |
| { | |
| "epoch": 85.53, | |
| "learning_rate": 2.758461538461539e-06, | |
| "loss": 0.0006, | |
| "step": 14625 | |
| }, | |
| { | |
| "epoch": 85.67, | |
| "learning_rate": 2.745641025641026e-06, | |
| "loss": 0.0004, | |
| "step": 14650 | |
| }, | |
| { | |
| "epoch": 85.82, | |
| "learning_rate": 2.7328205128205134e-06, | |
| "loss": 0.0005, | |
| "step": 14675 | |
| }, | |
| { | |
| "epoch": 85.96, | |
| "learning_rate": 2.7200000000000002e-06, | |
| "loss": 0.0002, | |
| "step": 14700 | |
| }, | |
| { | |
| "epoch": 86.11, | |
| "learning_rate": 2.7071794871794875e-06, | |
| "loss": 0.0003, | |
| "step": 14725 | |
| }, | |
| { | |
| "epoch": 86.26, | |
| "learning_rate": 2.6943589743589743e-06, | |
| "loss": 0.0001, | |
| "step": 14750 | |
| }, | |
| { | |
| "epoch": 86.4, | |
| "learning_rate": 2.6815384615384615e-06, | |
| "loss": 0.0004, | |
| "step": 14775 | |
| }, | |
| { | |
| "epoch": 86.55, | |
| "learning_rate": 2.6687179487179488e-06, | |
| "loss": 0.0002, | |
| "step": 14800 | |
| }, | |
| { | |
| "epoch": 86.7, | |
| "learning_rate": 2.6564102564102566e-06, | |
| "loss": 0.0003, | |
| "step": 14825 | |
| }, | |
| { | |
| "epoch": 86.84, | |
| "learning_rate": 2.643589743589744e-06, | |
| "loss": 0.0001, | |
| "step": 14850 | |
| }, | |
| { | |
| "epoch": 86.99, | |
| "learning_rate": 2.630769230769231e-06, | |
| "loss": 0.0001, | |
| "step": 14875 | |
| }, | |
| { | |
| "epoch": 87.13, | |
| "learning_rate": 2.6179487179487184e-06, | |
| "loss": 0.0001, | |
| "step": 14900 | |
| }, | |
| { | |
| "epoch": 87.28, | |
| "learning_rate": 2.605128205128205e-06, | |
| "loss": 0.0001, | |
| "step": 14925 | |
| }, | |
| { | |
| "epoch": 87.43, | |
| "learning_rate": 2.5923076923076924e-06, | |
| "loss": 0.0002, | |
| "step": 14950 | |
| }, | |
| { | |
| "epoch": 87.57, | |
| "learning_rate": 2.5794871794871797e-06, | |
| "loss": 0.0001, | |
| "step": 14975 | |
| }, | |
| { | |
| "epoch": 87.72, | |
| "learning_rate": 2.566666666666667e-06, | |
| "loss": 0.0001, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 87.72, | |
| "eval_loss": 0.40570446848869324, | |
| "eval_runtime": 1430.9463, | |
| "eval_samples_per_second": 4.606, | |
| "eval_steps_per_second": 0.288, | |
| "eval_wer": 13.457857301174716, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 87.87, | |
| "learning_rate": 2.5538461538461538e-06, | |
| "loss": 0.0001, | |
| "step": 15025 | |
| }, | |
| { | |
| "epoch": 88.01, | |
| "learning_rate": 2.541025641025641e-06, | |
| "loss": 0.0003, | |
| "step": 15050 | |
| }, | |
| { | |
| "epoch": 88.16, | |
| "learning_rate": 2.5282051282051283e-06, | |
| "loss": 0.0002, | |
| "step": 15075 | |
| }, | |
| { | |
| "epoch": 88.3, | |
| "learning_rate": 2.5153846153846155e-06, | |
| "loss": 0.0002, | |
| "step": 15100 | |
| }, | |
| { | |
| "epoch": 88.45, | |
| "learning_rate": 2.5025641025641028e-06, | |
| "loss": 0.0002, | |
| "step": 15125 | |
| }, | |
| { | |
| "epoch": 88.6, | |
| "learning_rate": 2.48974358974359e-06, | |
| "loss": 0.0002, | |
| "step": 15150 | |
| }, | |
| { | |
| "epoch": 88.74, | |
| "learning_rate": 2.4769230769230773e-06, | |
| "loss": 0.0001, | |
| "step": 15175 | |
| }, | |
| { | |
| "epoch": 88.89, | |
| "learning_rate": 2.464102564102564e-06, | |
| "loss": 0.0002, | |
| "step": 15200 | |
| }, | |
| { | |
| "epoch": 89.04, | |
| "learning_rate": 2.4512820512820513e-06, | |
| "loss": 0.0003, | |
| "step": 15225 | |
| }, | |
| { | |
| "epoch": 89.18, | |
| "learning_rate": 2.4384615384615386e-06, | |
| "loss": 0.0002, | |
| "step": 15250 | |
| }, | |
| { | |
| "epoch": 89.33, | |
| "learning_rate": 2.425641025641026e-06, | |
| "loss": 0.0002, | |
| "step": 15275 | |
| }, | |
| { | |
| "epoch": 89.47, | |
| "learning_rate": 2.412820512820513e-06, | |
| "loss": 0.0003, | |
| "step": 15300 | |
| }, | |
| { | |
| "epoch": 89.62, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.0002, | |
| "step": 15325 | |
| }, | |
| { | |
| "epoch": 89.77, | |
| "learning_rate": 2.387179487179487e-06, | |
| "loss": 0.0001, | |
| "step": 15350 | |
| }, | |
| { | |
| "epoch": 89.91, | |
| "learning_rate": 2.3743589743589744e-06, | |
| "loss": 0.0003, | |
| "step": 15375 | |
| }, | |
| { | |
| "epoch": 90.06, | |
| "learning_rate": 2.3615384615384616e-06, | |
| "loss": 0.0001, | |
| "step": 15400 | |
| }, | |
| { | |
| "epoch": 90.2, | |
| "learning_rate": 2.348717948717949e-06, | |
| "loss": 0.0002, | |
| "step": 15425 | |
| }, | |
| { | |
| "epoch": 90.35, | |
| "learning_rate": 2.335897435897436e-06, | |
| "loss": 0.0002, | |
| "step": 15450 | |
| }, | |
| { | |
| "epoch": 90.5, | |
| "learning_rate": 2.3230769230769234e-06, | |
| "loss": 0.0002, | |
| "step": 15475 | |
| }, | |
| { | |
| "epoch": 90.64, | |
| "learning_rate": 2.31025641025641e-06, | |
| "loss": 0.0002, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 90.79, | |
| "learning_rate": 2.2974358974358975e-06, | |
| "loss": 0.0001, | |
| "step": 15525 | |
| }, | |
| { | |
| "epoch": 90.94, | |
| "learning_rate": 2.2846153846153847e-06, | |
| "loss": 0.0001, | |
| "step": 15550 | |
| }, | |
| { | |
| "epoch": 91.08, | |
| "learning_rate": 2.271794871794872e-06, | |
| "loss": 0.0001, | |
| "step": 15575 | |
| }, | |
| { | |
| "epoch": 91.23, | |
| "learning_rate": 2.258974358974359e-06, | |
| "loss": 0.0003, | |
| "step": 15600 | |
| }, | |
| { | |
| "epoch": 91.37, | |
| "learning_rate": 2.2461538461538464e-06, | |
| "loss": 0.0002, | |
| "step": 15625 | |
| }, | |
| { | |
| "epoch": 91.52, | |
| "learning_rate": 2.2333333333333333e-06, | |
| "loss": 0.0002, | |
| "step": 15650 | |
| }, | |
| { | |
| "epoch": 91.67, | |
| "learning_rate": 2.2205128205128205e-06, | |
| "loss": 0.0001, | |
| "step": 15675 | |
| }, | |
| { | |
| "epoch": 91.81, | |
| "learning_rate": 2.2076923076923078e-06, | |
| "loss": 0.0003, | |
| "step": 15700 | |
| }, | |
| { | |
| "epoch": 91.96, | |
| "learning_rate": 2.194871794871795e-06, | |
| "loss": 0.0005, | |
| "step": 15725 | |
| }, | |
| { | |
| "epoch": 92.11, | |
| "learning_rate": 2.1820512820512823e-06, | |
| "loss": 0.0001, | |
| "step": 15750 | |
| }, | |
| { | |
| "epoch": 92.25, | |
| "learning_rate": 2.1692307692307695e-06, | |
| "loss": 0.0001, | |
| "step": 15775 | |
| }, | |
| { | |
| "epoch": 92.4, | |
| "learning_rate": 2.1564102564102563e-06, | |
| "loss": 0.0002, | |
| "step": 15800 | |
| }, | |
| { | |
| "epoch": 92.54, | |
| "learning_rate": 2.1435897435897436e-06, | |
| "loss": 0.0001, | |
| "step": 15825 | |
| }, | |
| { | |
| "epoch": 92.69, | |
| "learning_rate": 2.130769230769231e-06, | |
| "loss": 0.0001, | |
| "step": 15850 | |
| }, | |
| { | |
| "epoch": 92.84, | |
| "learning_rate": 2.117948717948718e-06, | |
| "loss": 0.0001, | |
| "step": 15875 | |
| }, | |
| { | |
| "epoch": 92.98, | |
| "learning_rate": 2.1051282051282053e-06, | |
| "loss": 0.0001, | |
| "step": 15900 | |
| }, | |
| { | |
| "epoch": 93.13, | |
| "learning_rate": 2.0923076923076926e-06, | |
| "loss": 0.0001, | |
| "step": 15925 | |
| }, | |
| { | |
| "epoch": 93.27, | |
| "learning_rate": 2.0794871794871794e-06, | |
| "loss": 0.0002, | |
| "step": 15950 | |
| }, | |
| { | |
| "epoch": 93.42, | |
| "learning_rate": 2.0666666666666666e-06, | |
| "loss": 0.0001, | |
| "step": 15975 | |
| }, | |
| { | |
| "epoch": 93.57, | |
| "learning_rate": 2.053846153846154e-06, | |
| "loss": 0.0008, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 93.57, | |
| "eval_loss": 0.4119240939617157, | |
| "eval_runtime": 1431.8205, | |
| "eval_samples_per_second": 4.603, | |
| "eval_steps_per_second": 0.288, | |
| "eval_wer": 13.274496766003749, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 93.71, | |
| "learning_rate": 2.041025641025641e-06, | |
| "loss": 0.0002, | |
| "step": 16025 | |
| }, | |
| { | |
| "epoch": 93.86, | |
| "learning_rate": 2.0282051282051284e-06, | |
| "loss": 0.0003, | |
| "step": 16050 | |
| }, | |
| { | |
| "epoch": 94.01, | |
| "learning_rate": 2.0153846153846156e-06, | |
| "loss": 0.0002, | |
| "step": 16075 | |
| }, | |
| { | |
| "epoch": 94.15, | |
| "learning_rate": 2.0025641025641025e-06, | |
| "loss": 0.0002, | |
| "step": 16100 | |
| }, | |
| { | |
| "epoch": 94.3, | |
| "learning_rate": 1.9897435897435897e-06, | |
| "loss": 0.0002, | |
| "step": 16125 | |
| }, | |
| { | |
| "epoch": 94.44, | |
| "learning_rate": 1.976923076923077e-06, | |
| "loss": 0.0003, | |
| "step": 16150 | |
| }, | |
| { | |
| "epoch": 94.59, | |
| "learning_rate": 1.964102564102564e-06, | |
| "loss": 0.0003, | |
| "step": 16175 | |
| }, | |
| { | |
| "epoch": 94.74, | |
| "learning_rate": 1.9512820512820515e-06, | |
| "loss": 0.0001, | |
| "step": 16200 | |
| }, | |
| { | |
| "epoch": 94.88, | |
| "learning_rate": 1.9384615384615387e-06, | |
| "loss": 0.0003, | |
| "step": 16225 | |
| }, | |
| { | |
| "epoch": 95.03, | |
| "learning_rate": 1.9256410256410255e-06, | |
| "loss": 0.0001, | |
| "step": 16250 | |
| }, | |
| { | |
| "epoch": 95.18, | |
| "learning_rate": 1.9128205128205128e-06, | |
| "loss": 0.0002, | |
| "step": 16275 | |
| }, | |
| { | |
| "epoch": 95.32, | |
| "learning_rate": 1.9000000000000002e-06, | |
| "loss": 0.0001, | |
| "step": 16300 | |
| }, | |
| { | |
| "epoch": 95.47, | |
| "learning_rate": 1.8871794871794875e-06, | |
| "loss": 0.0001, | |
| "step": 16325 | |
| }, | |
| { | |
| "epoch": 95.61, | |
| "learning_rate": 1.8743589743589747e-06, | |
| "loss": 0.0001, | |
| "step": 16350 | |
| }, | |
| { | |
| "epoch": 95.76, | |
| "learning_rate": 1.8615384615384616e-06, | |
| "loss": 0.0001, | |
| "step": 16375 | |
| }, | |
| { | |
| "epoch": 95.91, | |
| "learning_rate": 1.8487179487179488e-06, | |
| "loss": 0.0001, | |
| "step": 16400 | |
| }, | |
| { | |
| "epoch": 96.05, | |
| "learning_rate": 1.8364102564102564e-06, | |
| "loss": 0.0001, | |
| "step": 16425 | |
| }, | |
| { | |
| "epoch": 96.2, | |
| "learning_rate": 1.8235897435897437e-06, | |
| "loss": 0.0001, | |
| "step": 16450 | |
| }, | |
| { | |
| "epoch": 96.35, | |
| "learning_rate": 1.810769230769231e-06, | |
| "loss": 0.0004, | |
| "step": 16475 | |
| }, | |
| { | |
| "epoch": 96.49, | |
| "learning_rate": 1.7979487179487182e-06, | |
| "loss": 0.0002, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 96.64, | |
| "learning_rate": 1.7851282051282054e-06, | |
| "loss": 0.0001, | |
| "step": 16525 | |
| }, | |
| { | |
| "epoch": 96.78, | |
| "learning_rate": 1.7723076923076922e-06, | |
| "loss": 0.0001, | |
| "step": 16550 | |
| }, | |
| { | |
| "epoch": 96.93, | |
| "learning_rate": 1.7594871794871795e-06, | |
| "loss": 0.0002, | |
| "step": 16575 | |
| }, | |
| { | |
| "epoch": 97.08, | |
| "learning_rate": 1.7466666666666667e-06, | |
| "loss": 0.0001, | |
| "step": 16600 | |
| }, | |
| { | |
| "epoch": 97.22, | |
| "learning_rate": 1.733846153846154e-06, | |
| "loss": 0.0001, | |
| "step": 16625 | |
| }, | |
| { | |
| "epoch": 97.37, | |
| "learning_rate": 1.7210256410256412e-06, | |
| "loss": 0.0001, | |
| "step": 16650 | |
| }, | |
| { | |
| "epoch": 97.51, | |
| "learning_rate": 1.7082051282051285e-06, | |
| "loss": 0.0001, | |
| "step": 16675 | |
| }, | |
| { | |
| "epoch": 97.66, | |
| "learning_rate": 1.6953846153846153e-06, | |
| "loss": 0.0001, | |
| "step": 16700 | |
| }, | |
| { | |
| "epoch": 97.81, | |
| "learning_rate": 1.6825641025641026e-06, | |
| "loss": 0.0001, | |
| "step": 16725 | |
| }, | |
| { | |
| "epoch": 97.95, | |
| "learning_rate": 1.6697435897435898e-06, | |
| "loss": 0.0001, | |
| "step": 16750 | |
| }, | |
| { | |
| "epoch": 98.1, | |
| "learning_rate": 1.656923076923077e-06, | |
| "loss": 0.0001, | |
| "step": 16775 | |
| }, | |
| { | |
| "epoch": 98.25, | |
| "learning_rate": 1.6441025641025643e-06, | |
| "loss": 0.0002, | |
| "step": 16800 | |
| }, | |
| { | |
| "epoch": 98.39, | |
| "learning_rate": 1.6312820512820516e-06, | |
| "loss": 0.0001, | |
| "step": 16825 | |
| }, | |
| { | |
| "epoch": 98.54, | |
| "learning_rate": 1.6184615384615384e-06, | |
| "loss": 0.0001, | |
| "step": 16850 | |
| }, | |
| { | |
| "epoch": 98.68, | |
| "learning_rate": 1.6056410256410256e-06, | |
| "loss": 0.0002, | |
| "step": 16875 | |
| }, | |
| { | |
| "epoch": 98.83, | |
| "learning_rate": 1.5928205128205129e-06, | |
| "loss": 0.0002, | |
| "step": 16900 | |
| }, | |
| { | |
| "epoch": 98.98, | |
| "learning_rate": 1.5800000000000001e-06, | |
| "loss": 0.0001, | |
| "step": 16925 | |
| }, | |
| { | |
| "epoch": 99.12, | |
| "learning_rate": 1.5671794871794874e-06, | |
| "loss": 0.0001, | |
| "step": 16950 | |
| }, | |
| { | |
| "epoch": 99.27, | |
| "learning_rate": 1.5543589743589746e-06, | |
| "loss": 0.0004, | |
| "step": 16975 | |
| }, | |
| { | |
| "epoch": 99.42, | |
| "learning_rate": 1.5415384615384614e-06, | |
| "loss": 0.0001, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 99.42, | |
| "eval_loss": 0.42030245065689087, | |
| "eval_runtime": 1429.9307, | |
| "eval_samples_per_second": 4.609, | |
| "eval_steps_per_second": 0.288, | |
| "eval_wer": 13.17173426827057, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 99.56, | |
| "learning_rate": 1.5287179487179487e-06, | |
| "loss": 0.0002, | |
| "step": 17025 | |
| }, | |
| { | |
| "epoch": 99.71, | |
| "learning_rate": 1.515897435897436e-06, | |
| "loss": 0.0001, | |
| "step": 17050 | |
| }, | |
| { | |
| "epoch": 99.85, | |
| "learning_rate": 1.5030769230769232e-06, | |
| "loss": 0.0002, | |
| "step": 17075 | |
| }, | |
| { | |
| "epoch": 100.0, | |
| "learning_rate": 1.4902564102564104e-06, | |
| "loss": 0.0002, | |
| "step": 17100 | |
| }, | |
| { | |
| "epoch": 100.15, | |
| "learning_rate": 1.4774358974358977e-06, | |
| "loss": 0.0003, | |
| "step": 17125 | |
| }, | |
| { | |
| "epoch": 100.29, | |
| "learning_rate": 1.4646153846153845e-06, | |
| "loss": 0.0002, | |
| "step": 17150 | |
| }, | |
| { | |
| "epoch": 100.44, | |
| "learning_rate": 1.4517948717948718e-06, | |
| "loss": 0.0002, | |
| "step": 17175 | |
| }, | |
| { | |
| "epoch": 100.58, | |
| "learning_rate": 1.438974358974359e-06, | |
| "loss": 0.0001, | |
| "step": 17200 | |
| }, | |
| { | |
| "epoch": 100.73, | |
| "learning_rate": 1.4261538461538462e-06, | |
| "loss": 0.0002, | |
| "step": 17225 | |
| }, | |
| { | |
| "epoch": 100.88, | |
| "learning_rate": 1.4133333333333335e-06, | |
| "loss": 0.0002, | |
| "step": 17250 | |
| }, | |
| { | |
| "epoch": 101.02, | |
| "learning_rate": 1.4005128205128207e-06, | |
| "loss": 0.0002, | |
| "step": 17275 | |
| }, | |
| { | |
| "epoch": 101.17, | |
| "learning_rate": 1.3876923076923076e-06, | |
| "loss": 0.0004, | |
| "step": 17300 | |
| }, | |
| { | |
| "epoch": 101.32, | |
| "learning_rate": 1.3748717948717948e-06, | |
| "loss": 0.0001, | |
| "step": 17325 | |
| }, | |
| { | |
| "epoch": 101.46, | |
| "learning_rate": 1.362051282051282e-06, | |
| "loss": 0.0001, | |
| "step": 17350 | |
| }, | |
| { | |
| "epoch": 101.61, | |
| "learning_rate": 1.3492307692307693e-06, | |
| "loss": 0.0002, | |
| "step": 17375 | |
| }, | |
| { | |
| "epoch": 101.75, | |
| "learning_rate": 1.3364102564102566e-06, | |
| "loss": 0.0001, | |
| "step": 17400 | |
| }, | |
| { | |
| "epoch": 101.9, | |
| "learning_rate": 1.3235897435897438e-06, | |
| "loss": 0.0001, | |
| "step": 17425 | |
| }, | |
| { | |
| "epoch": 102.05, | |
| "learning_rate": 1.3107692307692308e-06, | |
| "loss": 0.0001, | |
| "step": 17450 | |
| }, | |
| { | |
| "epoch": 102.19, | |
| "learning_rate": 1.2979487179487179e-06, | |
| "loss": 0.0002, | |
| "step": 17475 | |
| }, | |
| { | |
| "epoch": 102.34, | |
| "learning_rate": 1.2851282051282051e-06, | |
| "loss": 0.0001, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 102.49, | |
| "learning_rate": 1.2723076923076924e-06, | |
| "loss": 0.0001, | |
| "step": 17525 | |
| }, | |
| { | |
| "epoch": 102.63, | |
| "learning_rate": 1.2594871794871796e-06, | |
| "loss": 0.0001, | |
| "step": 17550 | |
| }, | |
| { | |
| "epoch": 102.78, | |
| "learning_rate": 1.2466666666666667e-06, | |
| "loss": 0.0001, | |
| "step": 17575 | |
| }, | |
| { | |
| "epoch": 102.92, | |
| "learning_rate": 1.233846153846154e-06, | |
| "loss": 0.0001, | |
| "step": 17600 | |
| }, | |
| { | |
| "epoch": 103.07, | |
| "learning_rate": 1.2210256410256412e-06, | |
| "loss": 0.0001, | |
| "step": 17625 | |
| }, | |
| { | |
| "epoch": 103.22, | |
| "learning_rate": 1.2082051282051282e-06, | |
| "loss": 0.0001, | |
| "step": 17650 | |
| }, | |
| { | |
| "epoch": 103.36, | |
| "learning_rate": 1.1953846153846154e-06, | |
| "loss": 0.0001, | |
| "step": 17675 | |
| }, | |
| { | |
| "epoch": 103.51, | |
| "learning_rate": 1.1825641025641027e-06, | |
| "loss": 0.0001, | |
| "step": 17700 | |
| }, | |
| { | |
| "epoch": 103.65, | |
| "learning_rate": 1.1697435897435897e-06, | |
| "loss": 0.0001, | |
| "step": 17725 | |
| }, | |
| { | |
| "epoch": 103.8, | |
| "learning_rate": 1.156923076923077e-06, | |
| "loss": 0.0001, | |
| "step": 17750 | |
| }, | |
| { | |
| "epoch": 103.95, | |
| "learning_rate": 1.1441025641025642e-06, | |
| "loss": 0.0001, | |
| "step": 17775 | |
| }, | |
| { | |
| "epoch": 104.09, | |
| "learning_rate": 1.1312820512820513e-06, | |
| "loss": 0.0001, | |
| "step": 17800 | |
| }, | |
| { | |
| "epoch": 104.24, | |
| "learning_rate": 1.1184615384615385e-06, | |
| "loss": 0.0001, | |
| "step": 17825 | |
| }, | |
| { | |
| "epoch": 104.39, | |
| "learning_rate": 1.1056410256410258e-06, | |
| "loss": 0.0003, | |
| "step": 17850 | |
| }, | |
| { | |
| "epoch": 104.53, | |
| "learning_rate": 1.0928205128205128e-06, | |
| "loss": 0.0001, | |
| "step": 17875 | |
| }, | |
| { | |
| "epoch": 104.68, | |
| "learning_rate": 1.0805128205128206e-06, | |
| "loss": 0.0003, | |
| "step": 17900 | |
| }, | |
| { | |
| "epoch": 104.82, | |
| "learning_rate": 1.0676923076923079e-06, | |
| "loss": 0.0001, | |
| "step": 17925 | |
| }, | |
| { | |
| "epoch": 104.97, | |
| "learning_rate": 1.054871794871795e-06, | |
| "loss": 0.0001, | |
| "step": 17950 | |
| }, | |
| { | |
| "epoch": 105.12, | |
| "learning_rate": 1.0420512820512822e-06, | |
| "loss": 0.0001, | |
| "step": 17975 | |
| }, | |
| { | |
| "epoch": 105.26, | |
| "learning_rate": 1.0292307692307694e-06, | |
| "loss": 0.0001, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 105.26, | |
| "eval_loss": 0.41656923294067383, | |
| "eval_runtime": 1428.2992, | |
| "eval_samples_per_second": 4.615, | |
| "eval_steps_per_second": 0.288, | |
| "eval_wer": 13.097181083640614, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 105.41, | |
| "learning_rate": 1.0164102564102564e-06, | |
| "loss": 0.0003, | |
| "step": 18025 | |
| }, | |
| { | |
| "epoch": 105.56, | |
| "learning_rate": 1.0035897435897437e-06, | |
| "loss": 0.0001, | |
| "step": 18050 | |
| }, | |
| { | |
| "epoch": 105.7, | |
| "learning_rate": 9.90769230769231e-07, | |
| "loss": 0.0001, | |
| "step": 18075 | |
| }, | |
| { | |
| "epoch": 105.85, | |
| "learning_rate": 9.77948717948718e-07, | |
| "loss": 0.0001, | |
| "step": 18100 | |
| }, | |
| { | |
| "epoch": 105.99, | |
| "learning_rate": 9.651282051282052e-07, | |
| "loss": 0.0001, | |
| "step": 18125 | |
| }, | |
| { | |
| "epoch": 106.14, | |
| "learning_rate": 9.523076923076924e-07, | |
| "loss": 0.0001, | |
| "step": 18150 | |
| }, | |
| { | |
| "epoch": 106.29, | |
| "learning_rate": 9.394871794871796e-07, | |
| "loss": 0.0001, | |
| "step": 18175 | |
| }, | |
| { | |
| "epoch": 106.43, | |
| "learning_rate": 9.266666666666667e-07, | |
| "loss": 0.0002, | |
| "step": 18200 | |
| }, | |
| { | |
| "epoch": 106.58, | |
| "learning_rate": 9.138461538461539e-07, | |
| "loss": 0.0001, | |
| "step": 18225 | |
| }, | |
| { | |
| "epoch": 106.73, | |
| "learning_rate": 9.010256410256411e-07, | |
| "loss": 0.0001, | |
| "step": 18250 | |
| }, | |
| { | |
| "epoch": 106.87, | |
| "learning_rate": 8.882051282051282e-07, | |
| "loss": 0.0001, | |
| "step": 18275 | |
| }, | |
| { | |
| "epoch": 107.02, | |
| "learning_rate": 8.753846153846154e-07, | |
| "loss": 0.0001, | |
| "step": 18300 | |
| }, | |
| { | |
| "epoch": 107.16, | |
| "learning_rate": 8.625641025641027e-07, | |
| "loss": 0.0001, | |
| "step": 18325 | |
| }, | |
| { | |
| "epoch": 107.31, | |
| "learning_rate": 8.497435897435897e-07, | |
| "loss": 0.0003, | |
| "step": 18350 | |
| }, | |
| { | |
| "epoch": 107.46, | |
| "learning_rate": 8.36923076923077e-07, | |
| "loss": 0.0001, | |
| "step": 18375 | |
| }, | |
| { | |
| "epoch": 107.6, | |
| "learning_rate": 8.241025641025642e-07, | |
| "loss": 0.0001, | |
| "step": 18400 | |
| }, | |
| { | |
| "epoch": 107.75, | |
| "learning_rate": 8.112820512820512e-07, | |
| "loss": 0.0001, | |
| "step": 18425 | |
| }, | |
| { | |
| "epoch": 107.89, | |
| "learning_rate": 7.984615384615385e-07, | |
| "loss": 0.0001, | |
| "step": 18450 | |
| }, | |
| { | |
| "epoch": 108.04, | |
| "learning_rate": 7.856410256410257e-07, | |
| "loss": 0.0001, | |
| "step": 18475 | |
| }, | |
| { | |
| "epoch": 108.19, | |
| "learning_rate": 7.728205128205128e-07, | |
| "loss": 0.0001, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 108.33, | |
| "learning_rate": 7.6e-07, | |
| "loss": 0.0001, | |
| "step": 18525 | |
| }, | |
| { | |
| "epoch": 108.48, | |
| "learning_rate": 7.471794871794873e-07, | |
| "loss": 0.0001, | |
| "step": 18550 | |
| }, | |
| { | |
| "epoch": 108.63, | |
| "learning_rate": 7.343589743589743e-07, | |
| "loss": 0.0001, | |
| "step": 18575 | |
| }, | |
| { | |
| "epoch": 108.77, | |
| "learning_rate": 7.215384615384616e-07, | |
| "loss": 0.0001, | |
| "step": 18600 | |
| }, | |
| { | |
| "epoch": 108.92, | |
| "learning_rate": 7.087179487179488e-07, | |
| "loss": 0.0, | |
| "step": 18625 | |
| }, | |
| { | |
| "epoch": 109.06, | |
| "learning_rate": 6.958974358974358e-07, | |
| "loss": 0.0001, | |
| "step": 18650 | |
| }, | |
| { | |
| "epoch": 109.21, | |
| "learning_rate": 6.830769230769231e-07, | |
| "loss": 0.0001, | |
| "step": 18675 | |
| }, | |
| { | |
| "epoch": 109.36, | |
| "learning_rate": 6.702564102564103e-07, | |
| "loss": 0.0001, | |
| "step": 18700 | |
| }, | |
| { | |
| "epoch": 109.5, | |
| "learning_rate": 6.574358974358976e-07, | |
| "loss": 0.0001, | |
| "step": 18725 | |
| }, | |
| { | |
| "epoch": 109.65, | |
| "learning_rate": 6.446153846153846e-07, | |
| "loss": 0.0, | |
| "step": 18750 | |
| }, | |
| { | |
| "epoch": 109.8, | |
| "learning_rate": 6.317948717948719e-07, | |
| "loss": 0.0001, | |
| "step": 18775 | |
| }, | |
| { | |
| "epoch": 109.94, | |
| "learning_rate": 6.18974358974359e-07, | |
| "loss": 0.0001, | |
| "step": 18800 | |
| }, | |
| { | |
| "epoch": 110.09, | |
| "learning_rate": 6.061538461538462e-07, | |
| "loss": 0.0001, | |
| "step": 18825 | |
| }, | |
| { | |
| "epoch": 110.23, | |
| "learning_rate": 5.933333333333334e-07, | |
| "loss": 0.0001, | |
| "step": 18850 | |
| }, | |
| { | |
| "epoch": 110.38, | |
| "learning_rate": 5.805128205128205e-07, | |
| "loss": 0.0, | |
| "step": 18875 | |
| }, | |
| { | |
| "epoch": 110.53, | |
| "learning_rate": 5.676923076923077e-07, | |
| "loss": 0.0001, | |
| "step": 18900 | |
| }, | |
| { | |
| "epoch": 110.67, | |
| "learning_rate": 5.548717948717949e-07, | |
| "loss": 0.0001, | |
| "step": 18925 | |
| }, | |
| { | |
| "epoch": 110.82, | |
| "learning_rate": 5.420512820512821e-07, | |
| "loss": 0.0, | |
| "step": 18950 | |
| }, | |
| { | |
| "epoch": 110.96, | |
| "learning_rate": 5.292307692307692e-07, | |
| "loss": 0.0001, | |
| "step": 18975 | |
| }, | |
| { | |
| "epoch": 111.11, | |
| "learning_rate": 5.164102564102565e-07, | |
| "loss": 0.0001, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 111.11, | |
| "eval_loss": 0.42425432801246643, | |
| "eval_runtime": 1427.3017, | |
| "eval_samples_per_second": 4.618, | |
| "eval_steps_per_second": 0.289, | |
| "eval_wer": 13.04479235930605, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 111.26, | |
| "learning_rate": 5.035897435897436e-07, | |
| "loss": 0.0, | |
| "step": 19025 | |
| }, | |
| { | |
| "epoch": 111.4, | |
| "learning_rate": 4.907692307692308e-07, | |
| "loss": 0.0001, | |
| "step": 19050 | |
| }, | |
| { | |
| "epoch": 111.55, | |
| "learning_rate": 4.77948717948718e-07, | |
| "loss": 0.0001, | |
| "step": 19075 | |
| }, | |
| { | |
| "epoch": 111.7, | |
| "learning_rate": 4.6512820512820514e-07, | |
| "loss": 0.0, | |
| "step": 19100 | |
| }, | |
| { | |
| "epoch": 111.84, | |
| "learning_rate": 4.523076923076924e-07, | |
| "loss": 0.0001, | |
| "step": 19125 | |
| }, | |
| { | |
| "epoch": 111.99, | |
| "learning_rate": 4.3948717948717953e-07, | |
| "loss": 0.0, | |
| "step": 19150 | |
| }, | |
| { | |
| "epoch": 112.13, | |
| "learning_rate": 4.266666666666667e-07, | |
| "loss": 0.0, | |
| "step": 19175 | |
| }, | |
| { | |
| "epoch": 112.28, | |
| "learning_rate": 4.138461538461539e-07, | |
| "loss": 0.0001, | |
| "step": 19200 | |
| }, | |
| { | |
| "epoch": 112.43, | |
| "learning_rate": 4.0102564102564107e-07, | |
| "loss": 0.0002, | |
| "step": 19225 | |
| }, | |
| { | |
| "epoch": 112.57, | |
| "learning_rate": 3.882051282051282e-07, | |
| "loss": 0.0001, | |
| "step": 19250 | |
| }, | |
| { | |
| "epoch": 112.72, | |
| "learning_rate": 3.7538461538461546e-07, | |
| "loss": 0.0001, | |
| "step": 19275 | |
| }, | |
| { | |
| "epoch": 112.87, | |
| "learning_rate": 3.625641025641026e-07, | |
| "loss": 0.0001, | |
| "step": 19300 | |
| }, | |
| { | |
| "epoch": 113.01, | |
| "learning_rate": 3.4974358974358974e-07, | |
| "loss": 0.0001, | |
| "step": 19325 | |
| }, | |
| { | |
| "epoch": 113.16, | |
| "learning_rate": 3.36923076923077e-07, | |
| "loss": 0.0001, | |
| "step": 19350 | |
| }, | |
| { | |
| "epoch": 113.3, | |
| "learning_rate": 3.2410256410256413e-07, | |
| "loss": 0.0, | |
| "step": 19375 | |
| }, | |
| { | |
| "epoch": 113.45, | |
| "learning_rate": 3.112820512820513e-07, | |
| "loss": 0.0001, | |
| "step": 19400 | |
| }, | |
| { | |
| "epoch": 113.6, | |
| "learning_rate": 2.9846153846153847e-07, | |
| "loss": 0.0, | |
| "step": 19425 | |
| }, | |
| { | |
| "epoch": 113.74, | |
| "learning_rate": 2.8564102564102566e-07, | |
| "loss": 0.0001, | |
| "step": 19450 | |
| }, | |
| { | |
| "epoch": 113.89, | |
| "learning_rate": 2.7282051282051286e-07, | |
| "loss": 0.0, | |
| "step": 19475 | |
| }, | |
| { | |
| "epoch": 114.04, | |
| "learning_rate": 2.6e-07, | |
| "loss": 0.0001, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 114.18, | |
| "learning_rate": 2.471794871794872e-07, | |
| "loss": 0.0, | |
| "step": 19525 | |
| }, | |
| { | |
| "epoch": 114.33, | |
| "learning_rate": 2.343589743589744e-07, | |
| "loss": 0.0, | |
| "step": 19550 | |
| }, | |
| { | |
| "epoch": 114.47, | |
| "learning_rate": 2.2153846153846153e-07, | |
| "loss": 0.0001, | |
| "step": 19575 | |
| }, | |
| { | |
| "epoch": 114.62, | |
| "learning_rate": 2.0871794871794873e-07, | |
| "loss": 0.0001, | |
| "step": 19600 | |
| }, | |
| { | |
| "epoch": 114.77, | |
| "learning_rate": 1.9589743589743592e-07, | |
| "loss": 0.0, | |
| "step": 19625 | |
| }, | |
| { | |
| "epoch": 114.91, | |
| "learning_rate": 1.8307692307692306e-07, | |
| "loss": 0.0001, | |
| "step": 19650 | |
| }, | |
| { | |
| "epoch": 115.06, | |
| "learning_rate": 1.7025641025641026e-07, | |
| "loss": 0.0, | |
| "step": 19675 | |
| }, | |
| { | |
| "epoch": 115.2, | |
| "learning_rate": 1.5743589743589745e-07, | |
| "loss": 0.0001, | |
| "step": 19700 | |
| }, | |
| { | |
| "epoch": 115.35, | |
| "learning_rate": 1.4461538461538462e-07, | |
| "loss": 0.0001, | |
| "step": 19725 | |
| }, | |
| { | |
| "epoch": 115.5, | |
| "learning_rate": 1.317948717948718e-07, | |
| "loss": 0.0, | |
| "step": 19750 | |
| }, | |
| { | |
| "epoch": 115.64, | |
| "learning_rate": 1.1897435897435898e-07, | |
| "loss": 0.0, | |
| "step": 19775 | |
| }, | |
| { | |
| "epoch": 115.79, | |
| "learning_rate": 1.0615384615384615e-07, | |
| "loss": 0.0001, | |
| "step": 19800 | |
| }, | |
| { | |
| "epoch": 115.94, | |
| "learning_rate": 9.333333333333335e-08, | |
| "loss": 0.0, | |
| "step": 19825 | |
| }, | |
| { | |
| "epoch": 116.08, | |
| "learning_rate": 8.051282051282052e-08, | |
| "loss": 0.0001, | |
| "step": 19850 | |
| }, | |
| { | |
| "epoch": 116.23, | |
| "learning_rate": 6.76923076923077e-08, | |
| "loss": 0.0003, | |
| "step": 19875 | |
| }, | |
| { | |
| "epoch": 116.37, | |
| "learning_rate": 5.4871794871794874e-08, | |
| "loss": 0.0001, | |
| "step": 19900 | |
| }, | |
| { | |
| "epoch": 116.52, | |
| "learning_rate": 4.2051282051282056e-08, | |
| "loss": 0.0001, | |
| "step": 19925 | |
| }, | |
| { | |
| "epoch": 116.67, | |
| "learning_rate": 2.9230769230769234e-08, | |
| "loss": 0.0, | |
| "step": 19950 | |
| }, | |
| { | |
| "epoch": 116.81, | |
| "learning_rate": 1.641025641025641e-08, | |
| "loss": 0.0, | |
| "step": 19975 | |
| }, | |
| { | |
| "epoch": 116.96, | |
| "learning_rate": 3.5897435897435903e-09, | |
| "loss": 0.0, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 116.96, | |
| "eval_loss": 0.4229063093662262, | |
| "eval_runtime": 1433.9128, | |
| "eval_samples_per_second": 4.597, | |
| "eval_steps_per_second": 0.287, | |
| "eval_wer": 13.167704366398677, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 116.96, | |
| "step": 20000, | |
| "total_flos": 2.711255422825263e+21, | |
| "train_loss": 0.023442575373809085, | |
| "train_runtime": 144057.814, | |
| "train_samples_per_second": 8.885, | |
| "train_steps_per_second": 0.139 | |
| } | |
| ], | |
| "logging_steps": 25, | |
| "max_steps": 20000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 117, | |
| "save_steps": 1000, | |
| "total_flos": 2.711255422825263e+21, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |