| { | |
| "best_metric": 10.612013447284216, | |
| "best_model_checkpoint": "./checkpoint-17000", | |
| "epoch": 97.0873786407767, | |
| "eval_steps": 1000, | |
| "global_step": 20000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 0.9486, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 0.6167, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.3962, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.3335, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.3167, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 3e-06, | |
| "loss": 0.2985, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 3.5e-06, | |
| "loss": 0.2635, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.2606, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 4.5e-06, | |
| "loss": 0.2147, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 5e-06, | |
| "loss": 0.2012, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 5.500000000000001e-06, | |
| "loss": 0.1905, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 6e-06, | |
| "loss": 0.1895, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 6.5000000000000004e-06, | |
| "loss": 0.1846, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 7e-06, | |
| "loss": 0.177, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.1854, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 0.1718, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 8.5e-06, | |
| "loss": 0.1343, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 9e-06, | |
| "loss": 0.1114, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 9.5e-06, | |
| "loss": 0.1049, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1e-05, | |
| "loss": 0.105, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 9.987179487179488e-06, | |
| "loss": 0.1093, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 9.974358974358974e-06, | |
| "loss": 0.1059, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 9.961538461538463e-06, | |
| "loss": 0.114, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 9.94871794871795e-06, | |
| "loss": 0.1051, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 9.935897435897437e-06, | |
| "loss": 0.0921, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 9.923076923076923e-06, | |
| "loss": 0.0565, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "learning_rate": 9.910256410256412e-06, | |
| "loss": 0.0554, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 9.897435897435899e-06, | |
| "loss": 0.0575, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 9.884615384615386e-06, | |
| "loss": 0.0588, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 9.871794871794872e-06, | |
| "loss": 0.0558, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 9.858974358974361e-06, | |
| "loss": 0.0601, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 9.846153846153848e-06, | |
| "loss": 0.0596, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 9.833333333333333e-06, | |
| "loss": 0.0569, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 9.820512820512821e-06, | |
| "loss": 0.0311, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 9.807692307692308e-06, | |
| "loss": 0.0293, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 4.37, | |
| "learning_rate": 9.794871794871795e-06, | |
| "loss": 0.035, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 9.782051282051282e-06, | |
| "loss": 0.0316, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 4.61, | |
| "learning_rate": 9.76923076923077e-06, | |
| "loss": 0.0313, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "learning_rate": 9.756410256410257e-06, | |
| "loss": 0.0346, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 9.743589743589744e-06, | |
| "loss": 0.0326, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "eval_loss": 0.22997283935546875, | |
| "eval_runtime": 1414.2169, | |
| "eval_samples_per_second": 4.661, | |
| "eval_steps_per_second": 0.291, | |
| "eval_wer": 13.327797804690347, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "learning_rate": 9.730769230769231e-06, | |
| "loss": 0.0351, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 5.1, | |
| "learning_rate": 9.71794871794872e-06, | |
| "loss": 0.0214, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 5.22, | |
| "learning_rate": 9.705128205128206e-06, | |
| "loss": 0.0172, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 5.34, | |
| "learning_rate": 9.692307692307693e-06, | |
| "loss": 0.0174, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 9.67948717948718e-06, | |
| "loss": 0.0206, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 5.58, | |
| "learning_rate": 9.666666666666667e-06, | |
| "loss": 0.0194, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "learning_rate": 9.653846153846155e-06, | |
| "loss": 0.0215, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "learning_rate": 9.641025641025642e-06, | |
| "loss": 0.0209, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 5.95, | |
| "learning_rate": 9.628205128205129e-06, | |
| "loss": 0.0211, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 6.07, | |
| "learning_rate": 9.615384615384616e-06, | |
| "loss": 0.0164, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 6.19, | |
| "learning_rate": 9.602564102564104e-06, | |
| "loss": 0.0117, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 6.31, | |
| "learning_rate": 9.589743589743591e-06, | |
| "loss": 0.0133, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 9.576923076923078e-06, | |
| "loss": 0.0113, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 6.55, | |
| "learning_rate": 9.564102564102565e-06, | |
| "loss": 0.0109, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 6.67, | |
| "learning_rate": 9.551282051282053e-06, | |
| "loss": 0.0121, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 6.8, | |
| "learning_rate": 9.53846153846154e-06, | |
| "loss": 0.0109, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 6.92, | |
| "learning_rate": 9.525641025641025e-06, | |
| "loss": 0.0112, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "learning_rate": 9.512820512820514e-06, | |
| "loss": 0.0115, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 7.16, | |
| "learning_rate": 9.5e-06, | |
| "loss": 0.0078, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 7.28, | |
| "learning_rate": 9.487179487179487e-06, | |
| "loss": 0.0065, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 7.4, | |
| "learning_rate": 9.474358974358974e-06, | |
| "loss": 0.008, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 7.52, | |
| "learning_rate": 9.461538461538463e-06, | |
| "loss": 0.0097, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 7.65, | |
| "learning_rate": 9.44871794871795e-06, | |
| "loss": 0.0084, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 7.77, | |
| "learning_rate": 9.435897435897436e-06, | |
| "loss": 0.0083, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 7.89, | |
| "learning_rate": 9.423076923076923e-06, | |
| "loss": 0.0082, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 8.01, | |
| "learning_rate": 9.410256410256412e-06, | |
| "loss": 0.0077, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 8.13, | |
| "learning_rate": 9.397435897435899e-06, | |
| "loss": 0.0051, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 8.25, | |
| "learning_rate": 9.384615384615385e-06, | |
| "loss": 0.0043, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 8.37, | |
| "learning_rate": 9.371794871794872e-06, | |
| "loss": 0.0045, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 8.5, | |
| "learning_rate": 9.358974358974359e-06, | |
| "loss": 0.0047, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 8.62, | |
| "learning_rate": 9.346153846153847e-06, | |
| "loss": 0.007, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "learning_rate": 9.333333333333334e-06, | |
| "loss": 0.0058, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 8.86, | |
| "learning_rate": 9.320512820512821e-06, | |
| "loss": 0.0063, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 8.98, | |
| "learning_rate": 9.307692307692308e-06, | |
| "loss": 0.0066, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 9.1, | |
| "learning_rate": 9.294871794871796e-06, | |
| "loss": 0.005, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 9.22, | |
| "learning_rate": 9.282051282051283e-06, | |
| "loss": 0.0044, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 9.34, | |
| "learning_rate": 9.26923076923077e-06, | |
| "loss": 0.0036, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 9.47, | |
| "learning_rate": 9.256410256410257e-06, | |
| "loss": 0.0041, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 9.59, | |
| "learning_rate": 9.243589743589745e-06, | |
| "loss": 0.0041, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 9.71, | |
| "learning_rate": 9.230769230769232e-06, | |
| "loss": 0.004, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 9.71, | |
| "eval_loss": 0.27230334281921387, | |
| "eval_runtime": 1403.6044, | |
| "eval_samples_per_second": 4.696, | |
| "eval_steps_per_second": 0.294, | |
| "eval_wer": 12.203815464376849, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 9.83, | |
| "learning_rate": 9.217948717948717e-06, | |
| "loss": 0.0057, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 9.95, | |
| "learning_rate": 9.205128205128206e-06, | |
| "loss": 0.0063, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 10.07, | |
| "learning_rate": 9.192307692307693e-06, | |
| "loss": 0.0049, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 10.19, | |
| "learning_rate": 9.17948717948718e-06, | |
| "loss": 0.0037, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 10.32, | |
| "learning_rate": 9.166666666666666e-06, | |
| "loss": 0.0026, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 10.44, | |
| "learning_rate": 9.153846153846155e-06, | |
| "loss": 0.0027, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 10.56, | |
| "learning_rate": 9.141025641025642e-06, | |
| "loss": 0.0036, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 10.68, | |
| "learning_rate": 9.128205128205129e-06, | |
| "loss": 0.0037, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 10.8, | |
| "learning_rate": 9.115384615384615e-06, | |
| "loss": 0.0043, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 10.92, | |
| "learning_rate": 9.102564102564104e-06, | |
| "loss": 0.0031, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 11.04, | |
| "learning_rate": 9.08974358974359e-06, | |
| "loss": 0.004, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 11.17, | |
| "learning_rate": 9.076923076923078e-06, | |
| "loss": 0.0028, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 11.29, | |
| "learning_rate": 9.064102564102564e-06, | |
| "loss": 0.0036, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 11.41, | |
| "learning_rate": 9.051282051282051e-06, | |
| "loss": 0.0038, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 11.53, | |
| "learning_rate": 9.03846153846154e-06, | |
| "loss": 0.0036, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 11.65, | |
| "learning_rate": 9.025641025641027e-06, | |
| "loss": 0.0046, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 11.77, | |
| "learning_rate": 9.012820512820513e-06, | |
| "loss": 0.0041, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 11.89, | |
| "learning_rate": 9e-06, | |
| "loss": 0.0044, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 12.01, | |
| "learning_rate": 8.987179487179489e-06, | |
| "loss": 0.0044, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 12.14, | |
| "learning_rate": 8.974358974358976e-06, | |
| "loss": 0.0031, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 12.26, | |
| "learning_rate": 8.961538461538462e-06, | |
| "loss": 0.0026, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 12.38, | |
| "learning_rate": 8.94871794871795e-06, | |
| "loss": 0.0034, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "learning_rate": 8.935897435897438e-06, | |
| "loss": 0.0037, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 12.62, | |
| "learning_rate": 8.923076923076925e-06, | |
| "loss": 0.0037, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 12.74, | |
| "learning_rate": 8.910256410256411e-06, | |
| "loss": 0.0033, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 12.86, | |
| "learning_rate": 8.897435897435898e-06, | |
| "loss": 0.0029, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 12.99, | |
| "learning_rate": 8.884615384615385e-06, | |
| "loss": 0.0048, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 13.11, | |
| "learning_rate": 8.871794871794872e-06, | |
| "loss": 0.0035, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 13.23, | |
| "learning_rate": 8.858974358974359e-06, | |
| "loss": 0.0021, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 13.35, | |
| "learning_rate": 8.846153846153847e-06, | |
| "loss": 0.0037, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 13.47, | |
| "learning_rate": 8.833333333333334e-06, | |
| "loss": 0.0049, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 13.59, | |
| "learning_rate": 8.820512820512821e-06, | |
| "loss": 0.0039, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 13.71, | |
| "learning_rate": 8.807692307692308e-06, | |
| "loss": 0.0044, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 13.83, | |
| "learning_rate": 8.794871794871796e-06, | |
| "loss": 0.0055, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 13.96, | |
| "learning_rate": 8.782051282051283e-06, | |
| "loss": 0.0046, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 14.08, | |
| "learning_rate": 8.76923076923077e-06, | |
| "loss": 0.0043, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 14.2, | |
| "learning_rate": 8.756410256410257e-06, | |
| "loss": 0.0039, | |
| "step": 2925 | |
| }, | |
| { | |
| "epoch": 14.32, | |
| "learning_rate": 8.743589743589743e-06, | |
| "loss": 0.0042, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 14.44, | |
| "learning_rate": 8.730769230769232e-06, | |
| "loss": 0.0048, | |
| "step": 2975 | |
| }, | |
| { | |
| "epoch": 14.56, | |
| "learning_rate": 8.717948717948719e-06, | |
| "loss": 0.0058, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 14.56, | |
| "eval_loss": 0.2770913541316986, | |
| "eval_runtime": 1404.2904, | |
| "eval_samples_per_second": 4.693, | |
| "eval_steps_per_second": 0.293, | |
| "eval_wer": 12.424561545627608, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 14.68, | |
| "learning_rate": 8.705128205128206e-06, | |
| "loss": 0.0065, | |
| "step": 3025 | |
| }, | |
| { | |
| "epoch": 14.81, | |
| "learning_rate": 8.692307692307692e-06, | |
| "loss": 0.0071, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 14.93, | |
| "learning_rate": 8.679487179487181e-06, | |
| "loss": 0.0058, | |
| "step": 3075 | |
| }, | |
| { | |
| "epoch": 15.05, | |
| "learning_rate": 8.666666666666668e-06, | |
| "loss": 0.0048, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 15.17, | |
| "learning_rate": 8.653846153846155e-06, | |
| "loss": 0.0042, | |
| "step": 3125 | |
| }, | |
| { | |
| "epoch": 15.29, | |
| "learning_rate": 8.641025641025641e-06, | |
| "loss": 0.0061, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 15.41, | |
| "learning_rate": 8.62820512820513e-06, | |
| "loss": 0.0041, | |
| "step": 3175 | |
| }, | |
| { | |
| "epoch": 15.53, | |
| "learning_rate": 8.615384615384617e-06, | |
| "loss": 0.0038, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 15.66, | |
| "learning_rate": 8.602564102564104e-06, | |
| "loss": 0.0051, | |
| "step": 3225 | |
| }, | |
| { | |
| "epoch": 15.78, | |
| "learning_rate": 8.58974358974359e-06, | |
| "loss": 0.0047, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 15.9, | |
| "learning_rate": 8.576923076923077e-06, | |
| "loss": 0.0037, | |
| "step": 3275 | |
| }, | |
| { | |
| "epoch": 16.02, | |
| "learning_rate": 8.564102564102564e-06, | |
| "loss": 0.0034, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 16.14, | |
| "learning_rate": 8.551282051282051e-06, | |
| "loss": 0.0035, | |
| "step": 3325 | |
| }, | |
| { | |
| "epoch": 16.26, | |
| "learning_rate": 8.53846153846154e-06, | |
| "loss": 0.0028, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 16.38, | |
| "learning_rate": 8.525641025641026e-06, | |
| "loss": 0.0024, | |
| "step": 3375 | |
| }, | |
| { | |
| "epoch": 16.5, | |
| "learning_rate": 8.512820512820513e-06, | |
| "loss": 0.002, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 16.63, | |
| "learning_rate": 8.5e-06, | |
| "loss": 0.0035, | |
| "step": 3425 | |
| }, | |
| { | |
| "epoch": 16.75, | |
| "learning_rate": 8.487179487179488e-06, | |
| "loss": 0.0025, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 16.87, | |
| "learning_rate": 8.474358974358975e-06, | |
| "loss": 0.0036, | |
| "step": 3475 | |
| }, | |
| { | |
| "epoch": 16.99, | |
| "learning_rate": 8.461538461538462e-06, | |
| "loss": 0.0043, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 17.11, | |
| "learning_rate": 8.448717948717949e-06, | |
| "loss": 0.0032, | |
| "step": 3525 | |
| }, | |
| { | |
| "epoch": 17.23, | |
| "learning_rate": 8.435897435897436e-06, | |
| "loss": 0.0031, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 17.35, | |
| "learning_rate": 8.423076923076924e-06, | |
| "loss": 0.003, | |
| "step": 3575 | |
| }, | |
| { | |
| "epoch": 17.48, | |
| "learning_rate": 8.410256410256411e-06, | |
| "loss": 0.0026, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 17.6, | |
| "learning_rate": 8.397435897435898e-06, | |
| "loss": 0.0027, | |
| "step": 3625 | |
| }, | |
| { | |
| "epoch": 17.72, | |
| "learning_rate": 8.384615384615385e-06, | |
| "loss": 0.0022, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 17.84, | |
| "learning_rate": 8.371794871794873e-06, | |
| "loss": 0.0024, | |
| "step": 3675 | |
| }, | |
| { | |
| "epoch": 17.96, | |
| "learning_rate": 8.35897435897436e-06, | |
| "loss": 0.0017, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 18.08, | |
| "learning_rate": 8.346153846153847e-06, | |
| "loss": 0.0017, | |
| "step": 3725 | |
| }, | |
| { | |
| "epoch": 18.2, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 0.0019, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 18.33, | |
| "learning_rate": 8.320512820512822e-06, | |
| "loss": 0.0016, | |
| "step": 3775 | |
| }, | |
| { | |
| "epoch": 18.45, | |
| "learning_rate": 8.307692307692309e-06, | |
| "loss": 0.0019, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 18.57, | |
| "learning_rate": 8.294871794871796e-06, | |
| "loss": 0.0015, | |
| "step": 3825 | |
| }, | |
| { | |
| "epoch": 18.69, | |
| "learning_rate": 8.282051282051283e-06, | |
| "loss": 0.0027, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 18.81, | |
| "learning_rate": 8.26923076923077e-06, | |
| "loss": 0.0034, | |
| "step": 3875 | |
| }, | |
| { | |
| "epoch": 18.93, | |
| "learning_rate": 8.256410256410256e-06, | |
| "loss": 0.0034, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 19.05, | |
| "learning_rate": 8.243589743589743e-06, | |
| "loss": 0.0047, | |
| "step": 3925 | |
| }, | |
| { | |
| "epoch": 19.17, | |
| "learning_rate": 8.230769230769232e-06, | |
| "loss": 0.0034, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 19.3, | |
| "learning_rate": 8.217948717948719e-06, | |
| "loss": 0.0027, | |
| "step": 3975 | |
| }, | |
| { | |
| "epoch": 19.42, | |
| "learning_rate": 8.205128205128205e-06, | |
| "loss": 0.003, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 19.42, | |
| "eval_loss": 0.28377285599708557, | |
| "eval_runtime": 1406.4432, | |
| "eval_samples_per_second": 4.686, | |
| "eval_steps_per_second": 0.293, | |
| "eval_wer": 12.211916238000729, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 19.54, | |
| "learning_rate": 8.192307692307692e-06, | |
| "loss": 0.0033, | |
| "step": 4025 | |
| }, | |
| { | |
| "epoch": 19.66, | |
| "learning_rate": 8.17948717948718e-06, | |
| "loss": 0.0039, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 19.78, | |
| "learning_rate": 8.166666666666668e-06, | |
| "loss": 0.0047, | |
| "step": 4075 | |
| }, | |
| { | |
| "epoch": 19.9, | |
| "learning_rate": 8.153846153846154e-06, | |
| "loss": 0.0058, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 20.02, | |
| "learning_rate": 8.141025641025641e-06, | |
| "loss": 0.0056, | |
| "step": 4125 | |
| }, | |
| { | |
| "epoch": 20.15, | |
| "learning_rate": 8.12820512820513e-06, | |
| "loss": 0.0043, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 20.27, | |
| "learning_rate": 8.115384615384617e-06, | |
| "loss": 0.0036, | |
| "step": 4175 | |
| }, | |
| { | |
| "epoch": 20.39, | |
| "learning_rate": 8.102564102564103e-06, | |
| "loss": 0.0034, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 20.51, | |
| "learning_rate": 8.08974358974359e-06, | |
| "loss": 0.0046, | |
| "step": 4225 | |
| }, | |
| { | |
| "epoch": 20.63, | |
| "learning_rate": 8.076923076923077e-06, | |
| "loss": 0.0037, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 20.75, | |
| "learning_rate": 8.064102564102566e-06, | |
| "loss": 0.0035, | |
| "step": 4275 | |
| }, | |
| { | |
| "epoch": 20.87, | |
| "learning_rate": 8.051282051282052e-06, | |
| "loss": 0.0041, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 21.0, | |
| "learning_rate": 8.03846153846154e-06, | |
| "loss": 0.0041, | |
| "step": 4325 | |
| }, | |
| { | |
| "epoch": 21.12, | |
| "learning_rate": 8.025641025641026e-06, | |
| "loss": 0.0025, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 21.24, | |
| "learning_rate": 8.012820512820515e-06, | |
| "loss": 0.0019, | |
| "step": 4375 | |
| }, | |
| { | |
| "epoch": 21.36, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 0.0034, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 21.48, | |
| "learning_rate": 7.987179487179488e-06, | |
| "loss": 0.0028, | |
| "step": 4425 | |
| }, | |
| { | |
| "epoch": 21.6, | |
| "learning_rate": 7.974358974358975e-06, | |
| "loss": 0.0027, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 21.72, | |
| "learning_rate": 7.961538461538462e-06, | |
| "loss": 0.0023, | |
| "step": 4475 | |
| }, | |
| { | |
| "epoch": 21.84, | |
| "learning_rate": 7.948717948717949e-06, | |
| "loss": 0.0033, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 21.97, | |
| "learning_rate": 7.935897435897435e-06, | |
| "loss": 0.0024, | |
| "step": 4525 | |
| }, | |
| { | |
| "epoch": 22.09, | |
| "learning_rate": 7.923076923076924e-06, | |
| "loss": 0.0023, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 22.21, | |
| "learning_rate": 7.91025641025641e-06, | |
| "loss": 0.0023, | |
| "step": 4575 | |
| }, | |
| { | |
| "epoch": 22.33, | |
| "learning_rate": 7.897435897435898e-06, | |
| "loss": 0.0026, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 22.45, | |
| "learning_rate": 7.884615384615384e-06, | |
| "loss": 0.0033, | |
| "step": 4625 | |
| }, | |
| { | |
| "epoch": 22.57, | |
| "learning_rate": 7.871794871794873e-06, | |
| "loss": 0.0034, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 22.69, | |
| "learning_rate": 7.85897435897436e-06, | |
| "loss": 0.0034, | |
| "step": 4675 | |
| }, | |
| { | |
| "epoch": 22.82, | |
| "learning_rate": 7.846153846153847e-06, | |
| "loss": 0.004, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 22.94, | |
| "learning_rate": 7.833333333333333e-06, | |
| "loss": 0.0047, | |
| "step": 4725 | |
| }, | |
| { | |
| "epoch": 23.06, | |
| "learning_rate": 7.820512820512822e-06, | |
| "loss": 0.0051, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 23.18, | |
| "learning_rate": 7.807692307692309e-06, | |
| "loss": 0.0024, | |
| "step": 4775 | |
| }, | |
| { | |
| "epoch": 23.3, | |
| "learning_rate": 7.794871794871796e-06, | |
| "loss": 0.0028, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 23.42, | |
| "learning_rate": 7.782051282051282e-06, | |
| "loss": 0.0037, | |
| "step": 4825 | |
| }, | |
| { | |
| "epoch": 23.54, | |
| "learning_rate": 7.76923076923077e-06, | |
| "loss": 0.0029, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 23.67, | |
| "learning_rate": 7.756410256410258e-06, | |
| "loss": 0.0028, | |
| "step": 4875 | |
| }, | |
| { | |
| "epoch": 23.79, | |
| "learning_rate": 7.743589743589745e-06, | |
| "loss": 0.0023, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 23.91, | |
| "learning_rate": 7.730769230769231e-06, | |
| "loss": 0.0021, | |
| "step": 4925 | |
| }, | |
| { | |
| "epoch": 24.03, | |
| "learning_rate": 7.717948717948718e-06, | |
| "loss": 0.0021, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 24.15, | |
| "learning_rate": 7.705128205128207e-06, | |
| "loss": 0.0017, | |
| "step": 4975 | |
| }, | |
| { | |
| "epoch": 24.27, | |
| "learning_rate": 7.692307692307694e-06, | |
| "loss": 0.003, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 24.27, | |
| "eval_loss": 0.27398020029067993, | |
| "eval_runtime": 1401.4636, | |
| "eval_samples_per_second": 4.703, | |
| "eval_steps_per_second": 0.294, | |
| "eval_wer": 11.77042407549921, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 24.39, | |
| "learning_rate": 7.67948717948718e-06, | |
| "loss": 0.0023, | |
| "step": 5025 | |
| }, | |
| { | |
| "epoch": 24.51, | |
| "learning_rate": 7.666666666666667e-06, | |
| "loss": 0.0022, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 24.64, | |
| "learning_rate": 7.653846153846154e-06, | |
| "loss": 0.0027, | |
| "step": 5075 | |
| }, | |
| { | |
| "epoch": 24.76, | |
| "learning_rate": 7.641025641025641e-06, | |
| "loss": 0.003, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 24.88, | |
| "learning_rate": 7.6282051282051286e-06, | |
| "loss": 0.0025, | |
| "step": 5125 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "learning_rate": 7.615384615384615e-06, | |
| "loss": 0.0028, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 25.12, | |
| "learning_rate": 7.602564102564103e-06, | |
| "loss": 0.0035, | |
| "step": 5175 | |
| }, | |
| { | |
| "epoch": 25.24, | |
| "learning_rate": 7.58974358974359e-06, | |
| "loss": 0.0023, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 25.36, | |
| "learning_rate": 7.5769230769230775e-06, | |
| "loss": 0.0019, | |
| "step": 5225 | |
| }, | |
| { | |
| "epoch": 25.49, | |
| "learning_rate": 7.564102564102564e-06, | |
| "loss": 0.0018, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 25.61, | |
| "learning_rate": 7.551282051282052e-06, | |
| "loss": 0.0024, | |
| "step": 5275 | |
| }, | |
| { | |
| "epoch": 25.73, | |
| "learning_rate": 7.538461538461539e-06, | |
| "loss": 0.002, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 25.85, | |
| "learning_rate": 7.5256410256410265e-06, | |
| "loss": 0.0019, | |
| "step": 5325 | |
| }, | |
| { | |
| "epoch": 25.97, | |
| "learning_rate": 7.512820512820513e-06, | |
| "loss": 0.0017, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 26.09, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.002, | |
| "step": 5375 | |
| }, | |
| { | |
| "epoch": 26.21, | |
| "learning_rate": 7.487179487179488e-06, | |
| "loss": 0.0029, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 26.33, | |
| "learning_rate": 7.474358974358975e-06, | |
| "loss": 0.0022, | |
| "step": 5425 | |
| }, | |
| { | |
| "epoch": 26.46, | |
| "learning_rate": 7.461538461538462e-06, | |
| "loss": 0.0019, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 26.58, | |
| "learning_rate": 7.448717948717949e-06, | |
| "loss": 0.0025, | |
| "step": 5475 | |
| }, | |
| { | |
| "epoch": 26.7, | |
| "learning_rate": 7.435897435897437e-06, | |
| "loss": 0.0017, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 26.82, | |
| "learning_rate": 7.423076923076924e-06, | |
| "loss": 0.0025, | |
| "step": 5525 | |
| }, | |
| { | |
| "epoch": 26.94, | |
| "learning_rate": 7.410256410256411e-06, | |
| "loss": 0.0031, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 27.06, | |
| "learning_rate": 7.397435897435898e-06, | |
| "loss": 0.0022, | |
| "step": 5575 | |
| }, | |
| { | |
| "epoch": 27.18, | |
| "learning_rate": 7.384615384615386e-06, | |
| "loss": 0.0018, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 27.31, | |
| "learning_rate": 7.371794871794873e-06, | |
| "loss": 0.0017, | |
| "step": 5625 | |
| }, | |
| { | |
| "epoch": 27.43, | |
| "learning_rate": 7.35897435897436e-06, | |
| "loss": 0.0017, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 27.55, | |
| "learning_rate": 7.346153846153847e-06, | |
| "loss": 0.0013, | |
| "step": 5675 | |
| }, | |
| { | |
| "epoch": 27.67, | |
| "learning_rate": 7.333333333333333e-06, | |
| "loss": 0.001, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 27.79, | |
| "learning_rate": 7.320512820512821e-06, | |
| "loss": 0.0015, | |
| "step": 5725 | |
| }, | |
| { | |
| "epoch": 27.91, | |
| "learning_rate": 7.307692307692308e-06, | |
| "loss": 0.0022, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 28.03, | |
| "learning_rate": 7.294871794871795e-06, | |
| "loss": 0.0016, | |
| "step": 5775 | |
| }, | |
| { | |
| "epoch": 28.16, | |
| "learning_rate": 7.282051282051282e-06, | |
| "loss": 0.0017, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 28.28, | |
| "learning_rate": 7.26923076923077e-06, | |
| "loss": 0.0017, | |
| "step": 5825 | |
| }, | |
| { | |
| "epoch": 28.4, | |
| "learning_rate": 7.256410256410257e-06, | |
| "loss": 0.0015, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 28.52, | |
| "learning_rate": 7.243589743589744e-06, | |
| "loss": 0.0025, | |
| "step": 5875 | |
| }, | |
| { | |
| "epoch": 28.64, | |
| "learning_rate": 7.230769230769231e-06, | |
| "loss": 0.002, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 28.76, | |
| "learning_rate": 7.217948717948719e-06, | |
| "loss": 0.0023, | |
| "step": 5925 | |
| }, | |
| { | |
| "epoch": 28.88, | |
| "learning_rate": 7.205128205128206e-06, | |
| "loss": 0.0021, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 29.0, | |
| "learning_rate": 7.192307692307693e-06, | |
| "loss": 0.002, | |
| "step": 5975 | |
| }, | |
| { | |
| "epoch": 29.13, | |
| "learning_rate": 7.17948717948718e-06, | |
| "loss": 0.0014, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 29.13, | |
| "eval_loss": 0.29363375902175903, | |
| "eval_runtime": 1399.0954, | |
| "eval_samples_per_second": 4.711, | |
| "eval_steps_per_second": 0.294, | |
| "eval_wer": 11.543602414030541, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 29.25, | |
| "learning_rate": 7.166666666666667e-06, | |
| "loss": 0.0013, | |
| "step": 6025 | |
| }, | |
| { | |
| "epoch": 29.37, | |
| "learning_rate": 7.153846153846155e-06, | |
| "loss": 0.001, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 29.49, | |
| "learning_rate": 7.1410256410256414e-06, | |
| "loss": 0.0012, | |
| "step": 6075 | |
| }, | |
| { | |
| "epoch": 29.61, | |
| "learning_rate": 7.128205128205129e-06, | |
| "loss": 0.0016, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 29.73, | |
| "learning_rate": 7.115384615384616e-06, | |
| "loss": 0.0024, | |
| "step": 6125 | |
| }, | |
| { | |
| "epoch": 29.85, | |
| "learning_rate": 7.102564102564104e-06, | |
| "loss": 0.0019, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 29.98, | |
| "learning_rate": 7.0897435897435904e-06, | |
| "loss": 0.0035, | |
| "step": 6175 | |
| }, | |
| { | |
| "epoch": 30.1, | |
| "learning_rate": 7.076923076923078e-06, | |
| "loss": 0.0034, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 30.22, | |
| "learning_rate": 7.0646153846153855e-06, | |
| "loss": 0.0027, | |
| "step": 6225 | |
| }, | |
| { | |
| "epoch": 30.34, | |
| "learning_rate": 7.051794871794872e-06, | |
| "loss": 0.0022, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 30.46, | |
| "learning_rate": 7.03897435897436e-06, | |
| "loss": 0.0029, | |
| "step": 6275 | |
| }, | |
| { | |
| "epoch": 30.58, | |
| "learning_rate": 7.026153846153847e-06, | |
| "loss": 0.0023, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 30.7, | |
| "learning_rate": 7.0133333333333345e-06, | |
| "loss": 0.0015, | |
| "step": 6325 | |
| }, | |
| { | |
| "epoch": 30.83, | |
| "learning_rate": 7.000512820512821e-06, | |
| "loss": 0.0032, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 30.95, | |
| "learning_rate": 6.987692307692309e-06, | |
| "loss": 0.0023, | |
| "step": 6375 | |
| }, | |
| { | |
| "epoch": 31.07, | |
| "learning_rate": 6.974871794871796e-06, | |
| "loss": 0.0025, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 31.19, | |
| "learning_rate": 6.9620512820512835e-06, | |
| "loss": 0.0021, | |
| "step": 6425 | |
| }, | |
| { | |
| "epoch": 31.31, | |
| "learning_rate": 6.94923076923077e-06, | |
| "loss": 0.002, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 31.43, | |
| "learning_rate": 6.936410256410256e-06, | |
| "loss": 0.0026, | |
| "step": 6475 | |
| }, | |
| { | |
| "epoch": 31.55, | |
| "learning_rate": 6.923589743589744e-06, | |
| "loss": 0.0031, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 31.67, | |
| "learning_rate": 6.910769230769231e-06, | |
| "loss": 0.0024, | |
| "step": 6525 | |
| }, | |
| { | |
| "epoch": 31.8, | |
| "learning_rate": 6.8979487179487185e-06, | |
| "loss": 0.0016, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 31.92, | |
| "learning_rate": 6.885128205128205e-06, | |
| "loss": 0.0017, | |
| "step": 6575 | |
| }, | |
| { | |
| "epoch": 32.04, | |
| "learning_rate": 6.872307692307693e-06, | |
| "loss": 0.0019, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 32.16, | |
| "learning_rate": 6.85948717948718e-06, | |
| "loss": 0.0018, | |
| "step": 6625 | |
| }, | |
| { | |
| "epoch": 32.28, | |
| "learning_rate": 6.846666666666667e-06, | |
| "loss": 0.0017, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 32.4, | |
| "learning_rate": 6.833846153846154e-06, | |
| "loss": 0.0013, | |
| "step": 6675 | |
| }, | |
| { | |
| "epoch": 32.52, | |
| "learning_rate": 6.821025641025641e-06, | |
| "loss": 0.0014, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 32.65, | |
| "learning_rate": 6.808205128205129e-06, | |
| "loss": 0.0012, | |
| "step": 6725 | |
| }, | |
| { | |
| "epoch": 32.77, | |
| "learning_rate": 6.795384615384616e-06, | |
| "loss": 0.0017, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 32.89, | |
| "learning_rate": 6.782564102564103e-06, | |
| "loss": 0.0019, | |
| "step": 6775 | |
| }, | |
| { | |
| "epoch": 33.01, | |
| "learning_rate": 6.76974358974359e-06, | |
| "loss": 0.0025, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 33.13, | |
| "learning_rate": 6.756923076923078e-06, | |
| "loss": 0.0015, | |
| "step": 6825 | |
| }, | |
| { | |
| "epoch": 33.25, | |
| "learning_rate": 6.744102564102565e-06, | |
| "loss": 0.0014, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 33.37, | |
| "learning_rate": 6.731282051282052e-06, | |
| "loss": 0.0017, | |
| "step": 6875 | |
| }, | |
| { | |
| "epoch": 33.5, | |
| "learning_rate": 6.718461538461539e-06, | |
| "loss": 0.0014, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 33.62, | |
| "learning_rate": 6.705641025641027e-06, | |
| "loss": 0.0013, | |
| "step": 6925 | |
| }, | |
| { | |
| "epoch": 33.74, | |
| "learning_rate": 6.692820512820514e-06, | |
| "loss": 0.0017, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 33.86, | |
| "learning_rate": 6.680000000000001e-06, | |
| "loss": 0.0014, | |
| "step": 6975 | |
| }, | |
| { | |
| "epoch": 33.98, | |
| "learning_rate": 6.667179487179488e-06, | |
| "loss": 0.0015, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 33.98, | |
| "eval_loss": 0.291135311126709, | |
| "eval_runtime": 1399.6671, | |
| "eval_samples_per_second": 4.709, | |
| "eval_steps_per_second": 0.294, | |
| "eval_wer": 11.519300093158897, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 34.1, | |
| "learning_rate": 6.654358974358976e-06, | |
| "loss": 0.0024, | |
| "step": 7025 | |
| }, | |
| { | |
| "epoch": 34.22, | |
| "learning_rate": 6.641538461538463e-06, | |
| "loss": 0.0022, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 34.34, | |
| "learning_rate": 6.6287179487179486e-06, | |
| "loss": 0.0012, | |
| "step": 7075 | |
| }, | |
| { | |
| "epoch": 34.47, | |
| "learning_rate": 6.615897435897436e-06, | |
| "loss": 0.0013, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 34.59, | |
| "learning_rate": 6.603076923076923e-06, | |
| "loss": 0.0019, | |
| "step": 7125 | |
| }, | |
| { | |
| "epoch": 34.71, | |
| "learning_rate": 6.590256410256411e-06, | |
| "loss": 0.0019, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 34.83, | |
| "learning_rate": 6.5774358974358976e-06, | |
| "loss": 0.0019, | |
| "step": 7175 | |
| }, | |
| { | |
| "epoch": 34.95, | |
| "learning_rate": 6.564615384615385e-06, | |
| "loss": 0.0018, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 35.07, | |
| "learning_rate": 6.551794871794872e-06, | |
| "loss": 0.0022, | |
| "step": 7225 | |
| }, | |
| { | |
| "epoch": 35.19, | |
| "learning_rate": 6.538974358974359e-06, | |
| "loss": 0.0014, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 35.32, | |
| "learning_rate": 6.5261538461538465e-06, | |
| "loss": 0.0011, | |
| "step": 7275 | |
| }, | |
| { | |
| "epoch": 35.44, | |
| "learning_rate": 6.513333333333333e-06, | |
| "loss": 0.0009, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 35.56, | |
| "learning_rate": 6.500512820512821e-06, | |
| "loss": 0.0009, | |
| "step": 7325 | |
| }, | |
| { | |
| "epoch": 35.68, | |
| "learning_rate": 6.487692307692308e-06, | |
| "loss": 0.0011, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 35.8, | |
| "learning_rate": 6.4748717948717955e-06, | |
| "loss": 0.001, | |
| "step": 7375 | |
| }, | |
| { | |
| "epoch": 35.92, | |
| "learning_rate": 6.462051282051282e-06, | |
| "loss": 0.0006, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 36.04, | |
| "learning_rate": 6.44923076923077e-06, | |
| "loss": 0.0008, | |
| "step": 7425 | |
| }, | |
| { | |
| "epoch": 36.17, | |
| "learning_rate": 6.436410256410257e-06, | |
| "loss": 0.0004, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 36.29, | |
| "learning_rate": 6.4235897435897445e-06, | |
| "loss": 0.0004, | |
| "step": 7475 | |
| }, | |
| { | |
| "epoch": 36.41, | |
| "learning_rate": 6.410769230769231e-06, | |
| "loss": 0.0008, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 36.53, | |
| "learning_rate": 6.397948717948719e-06, | |
| "loss": 0.0004, | |
| "step": 7525 | |
| }, | |
| { | |
| "epoch": 36.65, | |
| "learning_rate": 6.385128205128206e-06, | |
| "loss": 0.0004, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 36.77, | |
| "learning_rate": 6.3723076923076935e-06, | |
| "loss": 0.001, | |
| "step": 7575 | |
| }, | |
| { | |
| "epoch": 36.89, | |
| "learning_rate": 6.35948717948718e-06, | |
| "loss": 0.0012, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 37.01, | |
| "learning_rate": 6.346666666666668e-06, | |
| "loss": 0.0016, | |
| "step": 7625 | |
| }, | |
| { | |
| "epoch": 37.14, | |
| "learning_rate": 6.333846153846155e-06, | |
| "loss": 0.0013, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 37.26, | |
| "learning_rate": 6.3210256410256425e-06, | |
| "loss": 0.0015, | |
| "step": 7675 | |
| }, | |
| { | |
| "epoch": 37.38, | |
| "learning_rate": 6.3082051282051285e-06, | |
| "loss": 0.002, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 37.5, | |
| "learning_rate": 6.295384615384615e-06, | |
| "loss": 0.001, | |
| "step": 7725 | |
| }, | |
| { | |
| "epoch": 37.62, | |
| "learning_rate": 6.282564102564103e-06, | |
| "loss": 0.0014, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 37.74, | |
| "learning_rate": 6.26974358974359e-06, | |
| "loss": 0.0026, | |
| "step": 7775 | |
| }, | |
| { | |
| "epoch": 37.86, | |
| "learning_rate": 6.2569230769230775e-06, | |
| "loss": 0.0016, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 37.99, | |
| "learning_rate": 6.244102564102564e-06, | |
| "loss": 0.0017, | |
| "step": 7825 | |
| }, | |
| { | |
| "epoch": 38.11, | |
| "learning_rate": 6.231282051282051e-06, | |
| "loss": 0.0013, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 38.23, | |
| "learning_rate": 6.218461538461539e-06, | |
| "loss": 0.001, | |
| "step": 7875 | |
| }, | |
| { | |
| "epoch": 38.35, | |
| "learning_rate": 6.205641025641026e-06, | |
| "loss": 0.0017, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 38.47, | |
| "learning_rate": 6.192820512820513e-06, | |
| "loss": 0.0025, | |
| "step": 7925 | |
| }, | |
| { | |
| "epoch": 38.59, | |
| "learning_rate": 6.18e-06, | |
| "loss": 0.0023, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 38.71, | |
| "learning_rate": 6.167179487179488e-06, | |
| "loss": 0.0018, | |
| "step": 7975 | |
| }, | |
| { | |
| "epoch": 38.83, | |
| "learning_rate": 6.154358974358975e-06, | |
| "loss": 0.0012, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 38.83, | |
| "eval_loss": 0.29390278458595276, | |
| "eval_runtime": 1396.9481, | |
| "eval_samples_per_second": 4.718, | |
| "eval_steps_per_second": 0.295, | |
| "eval_wer": 11.367410587711127, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 38.96, | |
| "learning_rate": 6.141538461538462e-06, | |
| "loss": 0.0011, | |
| "step": 8025 | |
| }, | |
| { | |
| "epoch": 39.08, | |
| "learning_rate": 6.128717948717949e-06, | |
| "loss": 0.0017, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 39.2, | |
| "learning_rate": 6.115897435897437e-06, | |
| "loss": 0.0014, | |
| "step": 8075 | |
| }, | |
| { | |
| "epoch": 39.32, | |
| "learning_rate": 6.103076923076924e-06, | |
| "loss": 0.0011, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 39.44, | |
| "learning_rate": 6.090256410256411e-06, | |
| "loss": 0.0015, | |
| "step": 8125 | |
| }, | |
| { | |
| "epoch": 39.56, | |
| "learning_rate": 6.077435897435898e-06, | |
| "loss": 0.0011, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 39.68, | |
| "learning_rate": 6.064615384615386e-06, | |
| "loss": 0.0022, | |
| "step": 8175 | |
| }, | |
| { | |
| "epoch": 39.81, | |
| "learning_rate": 6.051794871794873e-06, | |
| "loss": 0.0018, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 39.93, | |
| "learning_rate": 6.03897435897436e-06, | |
| "loss": 0.0014, | |
| "step": 8225 | |
| }, | |
| { | |
| "epoch": 40.05, | |
| "learning_rate": 6.026153846153847e-06, | |
| "loss": 0.0007, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 40.17, | |
| "learning_rate": 6.013333333333335e-06, | |
| "loss": 0.0008, | |
| "step": 8275 | |
| }, | |
| { | |
| "epoch": 40.29, | |
| "learning_rate": 6.000512820512821e-06, | |
| "loss": 0.0008, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 40.41, | |
| "learning_rate": 5.9876923076923076e-06, | |
| "loss": 0.0009, | |
| "step": 8325 | |
| }, | |
| { | |
| "epoch": 40.53, | |
| "learning_rate": 5.974871794871795e-06, | |
| "loss": 0.0012, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 40.66, | |
| "learning_rate": 5.962051282051282e-06, | |
| "loss": 0.0013, | |
| "step": 8375 | |
| }, | |
| { | |
| "epoch": 40.78, | |
| "learning_rate": 5.94923076923077e-06, | |
| "loss": 0.001, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 40.9, | |
| "learning_rate": 5.9364102564102566e-06, | |
| "loss": 0.0008, | |
| "step": 8425 | |
| }, | |
| { | |
| "epoch": 41.02, | |
| "learning_rate": 5.923589743589744e-06, | |
| "loss": 0.0006, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 41.14, | |
| "learning_rate": 5.910769230769231e-06, | |
| "loss": 0.0002, | |
| "step": 8475 | |
| }, | |
| { | |
| "epoch": 41.26, | |
| "learning_rate": 5.897948717948718e-06, | |
| "loss": 0.0005, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 41.38, | |
| "learning_rate": 5.8851282051282056e-06, | |
| "loss": 0.0003, | |
| "step": 8525 | |
| }, | |
| { | |
| "epoch": 41.5, | |
| "learning_rate": 5.872307692307692e-06, | |
| "loss": 0.0002, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 41.63, | |
| "learning_rate": 5.85948717948718e-06, | |
| "loss": 0.0003, | |
| "step": 8575 | |
| }, | |
| { | |
| "epoch": 41.75, | |
| "learning_rate": 5.846666666666667e-06, | |
| "loss": 0.0009, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 41.87, | |
| "learning_rate": 5.8338461538461545e-06, | |
| "loss": 0.0006, | |
| "step": 8625 | |
| }, | |
| { | |
| "epoch": 41.99, | |
| "learning_rate": 5.821025641025641e-06, | |
| "loss": 0.0006, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 42.11, | |
| "learning_rate": 5.808205128205129e-06, | |
| "loss": 0.0003, | |
| "step": 8675 | |
| }, | |
| { | |
| "epoch": 42.23, | |
| "learning_rate": 5.795384615384616e-06, | |
| "loss": 0.0002, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 42.35, | |
| "learning_rate": 5.7825641025641035e-06, | |
| "loss": 0.0005, | |
| "step": 8725 | |
| }, | |
| { | |
| "epoch": 42.48, | |
| "learning_rate": 5.76974358974359e-06, | |
| "loss": 0.001, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 42.6, | |
| "learning_rate": 5.756923076923078e-06, | |
| "loss": 0.001, | |
| "step": 8775 | |
| }, | |
| { | |
| "epoch": 42.72, | |
| "learning_rate": 5.744102564102565e-06, | |
| "loss": 0.0009, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 42.84, | |
| "learning_rate": 5.7312820512820525e-06, | |
| "loss": 0.0008, | |
| "step": 8825 | |
| }, | |
| { | |
| "epoch": 42.96, | |
| "learning_rate": 5.718461538461539e-06, | |
| "loss": 0.0009, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 43.08, | |
| "learning_rate": 5.705641025641027e-06, | |
| "loss": 0.0009, | |
| "step": 8875 | |
| }, | |
| { | |
| "epoch": 43.2, | |
| "learning_rate": 5.692820512820513e-06, | |
| "loss": 0.0007, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 43.33, | |
| "learning_rate": 5.68e-06, | |
| "loss": 0.0003, | |
| "step": 8925 | |
| }, | |
| { | |
| "epoch": 43.45, | |
| "learning_rate": 5.6671794871794875e-06, | |
| "loss": 0.0003, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 43.57, | |
| "learning_rate": 5.654871794871796e-06, | |
| "loss": 0.0005, | |
| "step": 8975 | |
| }, | |
| { | |
| "epoch": 43.69, | |
| "learning_rate": 5.642051282051283e-06, | |
| "loss": 0.0009, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 43.69, | |
| "eval_loss": 0.303938627243042, | |
| "eval_runtime": 1404.6996, | |
| "eval_samples_per_second": 4.692, | |
| "eval_steps_per_second": 0.293, | |
| "eval_wer": 11.413990036048443, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 43.81, | |
| "learning_rate": 5.62923076923077e-06, | |
| "loss": 0.0013, | |
| "step": 9025 | |
| }, | |
| { | |
| "epoch": 43.93, | |
| "learning_rate": 5.616410256410257e-06, | |
| "loss": 0.001, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 44.05, | |
| "learning_rate": 5.603589743589744e-06, | |
| "loss": 0.0019, | |
| "step": 9075 | |
| }, | |
| { | |
| "epoch": 44.17, | |
| "learning_rate": 5.590769230769231e-06, | |
| "loss": 0.0023, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 44.3, | |
| "learning_rate": 5.5779487179487176e-06, | |
| "loss": 0.0026, | |
| "step": 9125 | |
| }, | |
| { | |
| "epoch": 44.42, | |
| "learning_rate": 5.565128205128205e-06, | |
| "loss": 0.0021, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 44.54, | |
| "learning_rate": 5.552307692307692e-06, | |
| "loss": 0.002, | |
| "step": 9175 | |
| }, | |
| { | |
| "epoch": 44.66, | |
| "learning_rate": 5.53948717948718e-06, | |
| "loss": 0.0023, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 44.78, | |
| "learning_rate": 5.5266666666666666e-06, | |
| "loss": 0.0021, | |
| "step": 9225 | |
| }, | |
| { | |
| "epoch": 44.9, | |
| "learning_rate": 5.513846153846154e-06, | |
| "loss": 0.0025, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 45.02, | |
| "learning_rate": 5.501025641025641e-06, | |
| "loss": 0.002, | |
| "step": 9275 | |
| }, | |
| { | |
| "epoch": 45.15, | |
| "learning_rate": 5.488205128205129e-06, | |
| "loss": 0.0013, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 45.27, | |
| "learning_rate": 5.4753846153846155e-06, | |
| "loss": 0.001, | |
| "step": 9325 | |
| }, | |
| { | |
| "epoch": 45.39, | |
| "learning_rate": 5.462564102564103e-06, | |
| "loss": 0.001, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 45.51, | |
| "learning_rate": 5.44974358974359e-06, | |
| "loss": 0.0016, | |
| "step": 9375 | |
| }, | |
| { | |
| "epoch": 45.63, | |
| "learning_rate": 5.436923076923078e-06, | |
| "loss": 0.0021, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 45.75, | |
| "learning_rate": 5.4241025641025645e-06, | |
| "loss": 0.001, | |
| "step": 9425 | |
| }, | |
| { | |
| "epoch": 45.87, | |
| "learning_rate": 5.411282051282052e-06, | |
| "loss": 0.0018, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 46.0, | |
| "learning_rate": 5.398461538461539e-06, | |
| "loss": 0.0019, | |
| "step": 9475 | |
| }, | |
| { | |
| "epoch": 46.12, | |
| "learning_rate": 5.385641025641027e-06, | |
| "loss": 0.0005, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 46.24, | |
| "learning_rate": 5.3728205128205135e-06, | |
| "loss": 0.0006, | |
| "step": 9525 | |
| }, | |
| { | |
| "epoch": 46.36, | |
| "learning_rate": 5.36e-06, | |
| "loss": 0.0005, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 46.48, | |
| "learning_rate": 5.347179487179488e-06, | |
| "loss": 0.0003, | |
| "step": 9575 | |
| }, | |
| { | |
| "epoch": 46.6, | |
| "learning_rate": 5.334358974358975e-06, | |
| "loss": 0.0003, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 46.72, | |
| "learning_rate": 5.3215384615384625e-06, | |
| "loss": 0.0003, | |
| "step": 9625 | |
| }, | |
| { | |
| "epoch": 46.84, | |
| "learning_rate": 5.308717948717949e-06, | |
| "loss": 0.0012, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 46.97, | |
| "learning_rate": 5.295897435897437e-06, | |
| "loss": 0.0004, | |
| "step": 9675 | |
| }, | |
| { | |
| "epoch": 47.09, | |
| "learning_rate": 5.283076923076923e-06, | |
| "loss": 0.0005, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 47.21, | |
| "learning_rate": 5.27025641025641e-06, | |
| "loss": 0.0003, | |
| "step": 9725 | |
| }, | |
| { | |
| "epoch": 47.33, | |
| "learning_rate": 5.2574358974358975e-06, | |
| "loss": 0.0004, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 47.45, | |
| "learning_rate": 5.244615384615384e-06, | |
| "loss": 0.0009, | |
| "step": 9775 | |
| }, | |
| { | |
| "epoch": 47.57, | |
| "learning_rate": 5.231794871794872e-06, | |
| "loss": 0.0003, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 47.69, | |
| "learning_rate": 5.218974358974359e-06, | |
| "loss": 0.0002, | |
| "step": 9825 | |
| }, | |
| { | |
| "epoch": 47.82, | |
| "learning_rate": 5.2061538461538465e-06, | |
| "loss": 0.0001, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 47.94, | |
| "learning_rate": 5.193333333333333e-06, | |
| "loss": 0.0003, | |
| "step": 9875 | |
| }, | |
| { | |
| "epoch": 48.06, | |
| "learning_rate": 5.180512820512821e-06, | |
| "loss": 0.0004, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 48.18, | |
| "learning_rate": 5.167692307692308e-06, | |
| "loss": 0.0004, | |
| "step": 9925 | |
| }, | |
| { | |
| "epoch": 48.3, | |
| "learning_rate": 5.1548717948717955e-06, | |
| "loss": 0.0002, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 48.42, | |
| "learning_rate": 5.142051282051282e-06, | |
| "loss": 0.0003, | |
| "step": 9975 | |
| }, | |
| { | |
| "epoch": 48.54, | |
| "learning_rate": 5.12923076923077e-06, | |
| "loss": 0.0002, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 48.54, | |
| "eval_loss": 0.30629321932792664, | |
| "eval_runtime": 1399.0538, | |
| "eval_samples_per_second": 4.711, | |
| "eval_steps_per_second": 0.294, | |
| "eval_wer": 10.962371906517072, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 48.67, | |
| "learning_rate": 5.116410256410257e-06, | |
| "loss": 0.0003, | |
| "step": 10025 | |
| }, | |
| { | |
| "epoch": 48.79, | |
| "learning_rate": 5.1035897435897445e-06, | |
| "loss": 0.0003, | |
| "step": 10050 | |
| }, | |
| { | |
| "epoch": 48.91, | |
| "learning_rate": 5.090769230769231e-06, | |
| "loss": 0.0003, | |
| "step": 10075 | |
| }, | |
| { | |
| "epoch": 49.03, | |
| "learning_rate": 5.077948717948719e-06, | |
| "loss": 0.0008, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 49.15, | |
| "learning_rate": 5.065128205128206e-06, | |
| "loss": 0.0007, | |
| "step": 10125 | |
| }, | |
| { | |
| "epoch": 49.27, | |
| "learning_rate": 5.052307692307693e-06, | |
| "loss": 0.0015, | |
| "step": 10150 | |
| }, | |
| { | |
| "epoch": 49.39, | |
| "learning_rate": 5.03948717948718e-06, | |
| "loss": 0.0006, | |
| "step": 10175 | |
| }, | |
| { | |
| "epoch": 49.51, | |
| "learning_rate": 5.026666666666667e-06, | |
| "loss": 0.0007, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 49.64, | |
| "learning_rate": 5.013846153846155e-06, | |
| "loss": 0.001, | |
| "step": 10225 | |
| }, | |
| { | |
| "epoch": 49.76, | |
| "learning_rate": 5.001025641025642e-06, | |
| "loss": 0.0009, | |
| "step": 10250 | |
| }, | |
| { | |
| "epoch": 49.88, | |
| "learning_rate": 4.988205128205128e-06, | |
| "loss": 0.0006, | |
| "step": 10275 | |
| }, | |
| { | |
| "epoch": 50.0, | |
| "learning_rate": 4.975384615384616e-06, | |
| "loss": 0.0008, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 50.12, | |
| "learning_rate": 4.962564102564103e-06, | |
| "loss": 0.0008, | |
| "step": 10325 | |
| }, | |
| { | |
| "epoch": 50.24, | |
| "learning_rate": 4.949743589743591e-06, | |
| "loss": 0.0005, | |
| "step": 10350 | |
| }, | |
| { | |
| "epoch": 50.36, | |
| "learning_rate": 4.936923076923077e-06, | |
| "loss": 0.0008, | |
| "step": 10375 | |
| }, | |
| { | |
| "epoch": 50.49, | |
| "learning_rate": 4.924102564102565e-06, | |
| "loss": 0.0005, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 50.61, | |
| "learning_rate": 4.911282051282052e-06, | |
| "loss": 0.0014, | |
| "step": 10425 | |
| }, | |
| { | |
| "epoch": 50.73, | |
| "learning_rate": 4.898461538461539e-06, | |
| "loss": 0.0019, | |
| "step": 10450 | |
| }, | |
| { | |
| "epoch": 50.85, | |
| "learning_rate": 4.8856410256410256e-06, | |
| "loss": 0.0012, | |
| "step": 10475 | |
| }, | |
| { | |
| "epoch": 50.97, | |
| "learning_rate": 4.872820512820513e-06, | |
| "loss": 0.0011, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 51.09, | |
| "learning_rate": 4.86e-06, | |
| "loss": 0.0007, | |
| "step": 10525 | |
| }, | |
| { | |
| "epoch": 51.21, | |
| "learning_rate": 4.847179487179488e-06, | |
| "loss": 0.0006, | |
| "step": 10550 | |
| }, | |
| { | |
| "epoch": 51.33, | |
| "learning_rate": 4.8343589743589746e-06, | |
| "loss": 0.0006, | |
| "step": 10575 | |
| }, | |
| { | |
| "epoch": 51.46, | |
| "learning_rate": 4.821538461538462e-06, | |
| "loss": 0.0005, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 51.58, | |
| "learning_rate": 4.808717948717949e-06, | |
| "loss": 0.0013, | |
| "step": 10625 | |
| }, | |
| { | |
| "epoch": 51.7, | |
| "learning_rate": 4.795897435897437e-06, | |
| "loss": 0.0007, | |
| "step": 10650 | |
| }, | |
| { | |
| "epoch": 51.82, | |
| "learning_rate": 4.7830769230769235e-06, | |
| "loss": 0.0006, | |
| "step": 10675 | |
| }, | |
| { | |
| "epoch": 51.94, | |
| "learning_rate": 4.770256410256411e-06, | |
| "loss": 0.0009, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 52.06, | |
| "learning_rate": 4.757435897435898e-06, | |
| "loss": 0.0005, | |
| "step": 10725 | |
| }, | |
| { | |
| "epoch": 52.18, | |
| "learning_rate": 4.744615384615385e-06, | |
| "loss": 0.0009, | |
| "step": 10750 | |
| }, | |
| { | |
| "epoch": 52.31, | |
| "learning_rate": 4.731794871794872e-06, | |
| "loss": 0.0004, | |
| "step": 10775 | |
| }, | |
| { | |
| "epoch": 52.43, | |
| "learning_rate": 4.718974358974359e-06, | |
| "loss": 0.0009, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 52.55, | |
| "learning_rate": 4.706153846153846e-06, | |
| "loss": 0.0008, | |
| "step": 10825 | |
| }, | |
| { | |
| "epoch": 52.67, | |
| "learning_rate": 4.693333333333334e-06, | |
| "loss": 0.0007, | |
| "step": 10850 | |
| }, | |
| { | |
| "epoch": 52.79, | |
| "learning_rate": 4.680512820512821e-06, | |
| "loss": 0.001, | |
| "step": 10875 | |
| }, | |
| { | |
| "epoch": 52.91, | |
| "learning_rate": 4.667692307692308e-06, | |
| "loss": 0.0009, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 53.03, | |
| "learning_rate": 4.654871794871795e-06, | |
| "loss": 0.0013, | |
| "step": 10925 | |
| }, | |
| { | |
| "epoch": 53.16, | |
| "learning_rate": 4.642051282051283e-06, | |
| "loss": 0.0007, | |
| "step": 10950 | |
| }, | |
| { | |
| "epoch": 53.28, | |
| "learning_rate": 4.62923076923077e-06, | |
| "loss": 0.0011, | |
| "step": 10975 | |
| }, | |
| { | |
| "epoch": 53.4, | |
| "learning_rate": 4.616410256410257e-06, | |
| "loss": 0.0009, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 53.4, | |
| "eval_loss": 0.30143803358078003, | |
| "eval_runtime": 1398.8263, | |
| "eval_samples_per_second": 4.712, | |
| "eval_steps_per_second": 0.295, | |
| "eval_wer": 11.3350074932156, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 53.52, | |
| "learning_rate": 4.603589743589744e-06, | |
| "loss": 0.0011, | |
| "step": 11025 | |
| }, | |
| { | |
| "epoch": 53.64, | |
| "learning_rate": 4.590769230769232e-06, | |
| "loss": 0.0004, | |
| "step": 11050 | |
| }, | |
| { | |
| "epoch": 53.76, | |
| "learning_rate": 4.577948717948718e-06, | |
| "loss": 0.0004, | |
| "step": 11075 | |
| }, | |
| { | |
| "epoch": 53.88, | |
| "learning_rate": 4.5651282051282055e-06, | |
| "loss": 0.0008, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 54.0, | |
| "learning_rate": 4.552307692307692e-06, | |
| "loss": 0.0013, | |
| "step": 11125 | |
| }, | |
| { | |
| "epoch": 54.13, | |
| "learning_rate": 4.53948717948718e-06, | |
| "loss": 0.0009, | |
| "step": 11150 | |
| }, | |
| { | |
| "epoch": 54.25, | |
| "learning_rate": 4.526666666666667e-06, | |
| "loss": 0.0003, | |
| "step": 11175 | |
| }, | |
| { | |
| "epoch": 54.37, | |
| "learning_rate": 4.5138461538461545e-06, | |
| "loss": 0.0003, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 54.49, | |
| "learning_rate": 4.501025641025641e-06, | |
| "loss": 0.0004, | |
| "step": 11225 | |
| }, | |
| { | |
| "epoch": 54.61, | |
| "learning_rate": 4.488205128205129e-06, | |
| "loss": 0.0004, | |
| "step": 11250 | |
| }, | |
| { | |
| "epoch": 54.73, | |
| "learning_rate": 4.475384615384616e-06, | |
| "loss": 0.0002, | |
| "step": 11275 | |
| }, | |
| { | |
| "epoch": 54.85, | |
| "learning_rate": 4.4625641025641035e-06, | |
| "loss": 0.0006, | |
| "step": 11300 | |
| }, | |
| { | |
| "epoch": 54.98, | |
| "learning_rate": 4.44974358974359e-06, | |
| "loss": 0.0007, | |
| "step": 11325 | |
| }, | |
| { | |
| "epoch": 55.1, | |
| "learning_rate": 4.436923076923078e-06, | |
| "loss": 0.0003, | |
| "step": 11350 | |
| }, | |
| { | |
| "epoch": 55.22, | |
| "learning_rate": 4.424102564102564e-06, | |
| "loss": 0.0002, | |
| "step": 11375 | |
| }, | |
| { | |
| "epoch": 55.34, | |
| "learning_rate": 4.411282051282052e-06, | |
| "loss": 0.0001, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 55.46, | |
| "learning_rate": 4.3984615384615384e-06, | |
| "loss": 0.0005, | |
| "step": 11425 | |
| }, | |
| { | |
| "epoch": 55.58, | |
| "learning_rate": 4.385641025641026e-06, | |
| "loss": 0.0003, | |
| "step": 11450 | |
| }, | |
| { | |
| "epoch": 55.7, | |
| "learning_rate": 4.372820512820513e-06, | |
| "loss": 0.0003, | |
| "step": 11475 | |
| }, | |
| { | |
| "epoch": 55.83, | |
| "learning_rate": 4.360000000000001e-06, | |
| "loss": 0.0005, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 55.95, | |
| "learning_rate": 4.3471794871794874e-06, | |
| "loss": 0.0006, | |
| "step": 11525 | |
| }, | |
| { | |
| "epoch": 56.07, | |
| "learning_rate": 4.334358974358975e-06, | |
| "loss": 0.0003, | |
| "step": 11550 | |
| }, | |
| { | |
| "epoch": 56.19, | |
| "learning_rate": 4.321538461538462e-06, | |
| "loss": 0.0003, | |
| "step": 11575 | |
| }, | |
| { | |
| "epoch": 56.31, | |
| "learning_rate": 4.30871794871795e-06, | |
| "loss": 0.0002, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 56.43, | |
| "learning_rate": 4.2958974358974364e-06, | |
| "loss": 0.0001, | |
| "step": 11625 | |
| }, | |
| { | |
| "epoch": 56.55, | |
| "learning_rate": 4.283076923076924e-06, | |
| "loss": 0.0003, | |
| "step": 11650 | |
| }, | |
| { | |
| "epoch": 56.67, | |
| "learning_rate": 4.27025641025641e-06, | |
| "loss": 0.0002, | |
| "step": 11675 | |
| }, | |
| { | |
| "epoch": 56.8, | |
| "learning_rate": 4.257435897435898e-06, | |
| "loss": 0.0008, | |
| "step": 11700 | |
| }, | |
| { | |
| "epoch": 56.92, | |
| "learning_rate": 4.2446153846153846e-06, | |
| "loss": 0.0005, | |
| "step": 11725 | |
| }, | |
| { | |
| "epoch": 57.04, | |
| "learning_rate": 4.231794871794872e-06, | |
| "loss": 0.0003, | |
| "step": 11750 | |
| }, | |
| { | |
| "epoch": 57.16, | |
| "learning_rate": 4.218974358974359e-06, | |
| "loss": 0.0003, | |
| "step": 11775 | |
| }, | |
| { | |
| "epoch": 57.28, | |
| "learning_rate": 4.206153846153847e-06, | |
| "loss": 0.0002, | |
| "step": 11800 | |
| }, | |
| { | |
| "epoch": 57.4, | |
| "learning_rate": 4.1933333333333336e-06, | |
| "loss": 0.0002, | |
| "step": 11825 | |
| }, | |
| { | |
| "epoch": 57.52, | |
| "learning_rate": 4.180512820512821e-06, | |
| "loss": 0.0001, | |
| "step": 11850 | |
| }, | |
| { | |
| "epoch": 57.65, | |
| "learning_rate": 4.167692307692308e-06, | |
| "loss": 0.0005, | |
| "step": 11875 | |
| }, | |
| { | |
| "epoch": 57.77, | |
| "learning_rate": 4.154871794871796e-06, | |
| "loss": 0.0001, | |
| "step": 11900 | |
| }, | |
| { | |
| "epoch": 57.89, | |
| "learning_rate": 4.1420512820512826e-06, | |
| "loss": 0.0001, | |
| "step": 11925 | |
| }, | |
| { | |
| "epoch": 58.01, | |
| "learning_rate": 4.12974358974359e-06, | |
| "loss": 0.0005, | |
| "step": 11950 | |
| }, | |
| { | |
| "epoch": 58.13, | |
| "learning_rate": 4.116923076923078e-06, | |
| "loss": 0.0012, | |
| "step": 11975 | |
| }, | |
| { | |
| "epoch": 58.25, | |
| "learning_rate": 4.1041025641025645e-06, | |
| "loss": 0.0011, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 58.25, | |
| "eval_loss": 0.30517110228538513, | |
| "eval_runtime": 1398.4457, | |
| "eval_samples_per_second": 4.713, | |
| "eval_steps_per_second": 0.295, | |
| "eval_wer": 11.047430029567824, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 58.37, | |
| "learning_rate": 4.091282051282051e-06, | |
| "loss": 0.001, | |
| "step": 12025 | |
| }, | |
| { | |
| "epoch": 58.5, | |
| "learning_rate": 4.078461538461539e-06, | |
| "loss": 0.0004, | |
| "step": 12050 | |
| }, | |
| { | |
| "epoch": 58.62, | |
| "learning_rate": 4.065641025641026e-06, | |
| "loss": 0.0002, | |
| "step": 12075 | |
| }, | |
| { | |
| "epoch": 58.74, | |
| "learning_rate": 4.0528205128205135e-06, | |
| "loss": 0.0013, | |
| "step": 12100 | |
| }, | |
| { | |
| "epoch": 58.86, | |
| "learning_rate": 4.04e-06, | |
| "loss": 0.0007, | |
| "step": 12125 | |
| }, | |
| { | |
| "epoch": 58.98, | |
| "learning_rate": 4.027179487179487e-06, | |
| "loss": 0.0003, | |
| "step": 12150 | |
| }, | |
| { | |
| "epoch": 59.1, | |
| "learning_rate": 4.014358974358975e-06, | |
| "loss": 0.0003, | |
| "step": 12175 | |
| }, | |
| { | |
| "epoch": 59.22, | |
| "learning_rate": 4.001538461538462e-06, | |
| "loss": 0.0003, | |
| "step": 12200 | |
| }, | |
| { | |
| "epoch": 59.34, | |
| "learning_rate": 3.988717948717949e-06, | |
| "loss": 0.0002, | |
| "step": 12225 | |
| }, | |
| { | |
| "epoch": 59.47, | |
| "learning_rate": 3.975897435897436e-06, | |
| "loss": 0.0001, | |
| "step": 12250 | |
| }, | |
| { | |
| "epoch": 59.59, | |
| "learning_rate": 3.963076923076924e-06, | |
| "loss": 0.0003, | |
| "step": 12275 | |
| }, | |
| { | |
| "epoch": 59.71, | |
| "learning_rate": 3.950256410256411e-06, | |
| "loss": 0.0009, | |
| "step": 12300 | |
| }, | |
| { | |
| "epoch": 59.83, | |
| "learning_rate": 3.937435897435897e-06, | |
| "loss": 0.0012, | |
| "step": 12325 | |
| }, | |
| { | |
| "epoch": 59.95, | |
| "learning_rate": 3.924615384615385e-06, | |
| "loss": 0.0005, | |
| "step": 12350 | |
| }, | |
| { | |
| "epoch": 60.07, | |
| "learning_rate": 3.911794871794872e-06, | |
| "loss": 0.0009, | |
| "step": 12375 | |
| }, | |
| { | |
| "epoch": 60.19, | |
| "learning_rate": 3.89897435897436e-06, | |
| "loss": 0.0007, | |
| "step": 12400 | |
| }, | |
| { | |
| "epoch": 60.32, | |
| "learning_rate": 3.886153846153846e-06, | |
| "loss": 0.0011, | |
| "step": 12425 | |
| }, | |
| { | |
| "epoch": 60.44, | |
| "learning_rate": 3.873333333333333e-06, | |
| "loss": 0.001, | |
| "step": 12450 | |
| }, | |
| { | |
| "epoch": 60.56, | |
| "learning_rate": 3.860512820512821e-06, | |
| "loss": 0.0007, | |
| "step": 12475 | |
| }, | |
| { | |
| "epoch": 60.68, | |
| "learning_rate": 3.847692307692308e-06, | |
| "loss": 0.0006, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 60.8, | |
| "learning_rate": 3.834871794871795e-06, | |
| "loss": 0.0007, | |
| "step": 12525 | |
| }, | |
| { | |
| "epoch": 60.92, | |
| "learning_rate": 3.822051282051282e-06, | |
| "loss": 0.0004, | |
| "step": 12550 | |
| }, | |
| { | |
| "epoch": 61.04, | |
| "learning_rate": 3.8092307692307695e-06, | |
| "loss": 0.0002, | |
| "step": 12575 | |
| }, | |
| { | |
| "epoch": 61.17, | |
| "learning_rate": 3.7964102564102567e-06, | |
| "loss": 0.0002, | |
| "step": 12600 | |
| }, | |
| { | |
| "epoch": 61.29, | |
| "learning_rate": 3.783589743589744e-06, | |
| "loss": 0.0002, | |
| "step": 12625 | |
| }, | |
| { | |
| "epoch": 61.41, | |
| "learning_rate": 3.7707692307692312e-06, | |
| "loss": 0.0003, | |
| "step": 12650 | |
| }, | |
| { | |
| "epoch": 61.53, | |
| "learning_rate": 3.7579487179487185e-06, | |
| "loss": 0.0001, | |
| "step": 12675 | |
| }, | |
| { | |
| "epoch": 61.65, | |
| "learning_rate": 3.7451282051282057e-06, | |
| "loss": 0.0001, | |
| "step": 12700 | |
| }, | |
| { | |
| "epoch": 61.77, | |
| "learning_rate": 3.732307692307693e-06, | |
| "loss": 0.0002, | |
| "step": 12725 | |
| }, | |
| { | |
| "epoch": 61.89, | |
| "learning_rate": 3.7194871794871794e-06, | |
| "loss": 0.0004, | |
| "step": 12750 | |
| }, | |
| { | |
| "epoch": 62.01, | |
| "learning_rate": 3.7066666666666666e-06, | |
| "loss": 0.0002, | |
| "step": 12775 | |
| }, | |
| { | |
| "epoch": 62.14, | |
| "learning_rate": 3.693846153846154e-06, | |
| "loss": 0.0001, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 62.26, | |
| "learning_rate": 3.681025641025641e-06, | |
| "loss": 0.0001, | |
| "step": 12825 | |
| }, | |
| { | |
| "epoch": 62.38, | |
| "learning_rate": 3.6682051282051284e-06, | |
| "loss": 0.0001, | |
| "step": 12850 | |
| }, | |
| { | |
| "epoch": 62.5, | |
| "learning_rate": 3.6553846153846156e-06, | |
| "loss": 0.0001, | |
| "step": 12875 | |
| }, | |
| { | |
| "epoch": 62.62, | |
| "learning_rate": 3.642564102564103e-06, | |
| "loss": 0.0001, | |
| "step": 12900 | |
| }, | |
| { | |
| "epoch": 62.74, | |
| "learning_rate": 3.62974358974359e-06, | |
| "loss": 0.0001, | |
| "step": 12925 | |
| }, | |
| { | |
| "epoch": 62.86, | |
| "learning_rate": 3.6169230769230773e-06, | |
| "loss": 0.0001, | |
| "step": 12950 | |
| }, | |
| { | |
| "epoch": 62.99, | |
| "learning_rate": 3.6041025641025646e-06, | |
| "loss": 0.0001, | |
| "step": 12975 | |
| }, | |
| { | |
| "epoch": 63.11, | |
| "learning_rate": 3.591282051282052e-06, | |
| "loss": 0.0001, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 63.11, | |
| "eval_loss": 0.3204333782196045, | |
| "eval_runtime": 1398.4876, | |
| "eval_samples_per_second": 4.713, | |
| "eval_steps_per_second": 0.295, | |
| "eval_wer": 10.86921300984244, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 63.23, | |
| "learning_rate": 3.578461538461539e-06, | |
| "loss": 0.0001, | |
| "step": 13025 | |
| }, | |
| { | |
| "epoch": 63.35, | |
| "learning_rate": 3.5656410256410263e-06, | |
| "loss": 0.0, | |
| "step": 13050 | |
| }, | |
| { | |
| "epoch": 63.47, | |
| "learning_rate": 3.5528205128205127e-06, | |
| "loss": 0.0, | |
| "step": 13075 | |
| }, | |
| { | |
| "epoch": 63.59, | |
| "learning_rate": 3.54e-06, | |
| "loss": 0.0, | |
| "step": 13100 | |
| }, | |
| { | |
| "epoch": 63.71, | |
| "learning_rate": 3.5271794871794872e-06, | |
| "loss": 0.0001, | |
| "step": 13125 | |
| }, | |
| { | |
| "epoch": 63.83, | |
| "learning_rate": 3.5143589743589745e-06, | |
| "loss": 0.0001, | |
| "step": 13150 | |
| }, | |
| { | |
| "epoch": 63.96, | |
| "learning_rate": 3.5015384615384617e-06, | |
| "loss": 0.0001, | |
| "step": 13175 | |
| }, | |
| { | |
| "epoch": 64.08, | |
| "learning_rate": 3.488717948717949e-06, | |
| "loss": 0.0, | |
| "step": 13200 | |
| }, | |
| { | |
| "epoch": 64.2, | |
| "learning_rate": 3.4758974358974362e-06, | |
| "loss": 0.0002, | |
| "step": 13225 | |
| }, | |
| { | |
| "epoch": 64.32, | |
| "learning_rate": 3.4630769230769235e-06, | |
| "loss": 0.0001, | |
| "step": 13250 | |
| }, | |
| { | |
| "epoch": 64.44, | |
| "learning_rate": 3.4502564102564107e-06, | |
| "loss": 0.0, | |
| "step": 13275 | |
| }, | |
| { | |
| "epoch": 64.56, | |
| "learning_rate": 3.437435897435898e-06, | |
| "loss": 0.0, | |
| "step": 13300 | |
| }, | |
| { | |
| "epoch": 64.68, | |
| "learning_rate": 3.4246153846153852e-06, | |
| "loss": 0.0001, | |
| "step": 13325 | |
| }, | |
| { | |
| "epoch": 64.81, | |
| "learning_rate": 3.4117948717948725e-06, | |
| "loss": 0.0, | |
| "step": 13350 | |
| }, | |
| { | |
| "epoch": 64.93, | |
| "learning_rate": 3.398974358974359e-06, | |
| "loss": 0.0001, | |
| "step": 13375 | |
| }, | |
| { | |
| "epoch": 65.05, | |
| "learning_rate": 3.386153846153846e-06, | |
| "loss": 0.0001, | |
| "step": 13400 | |
| }, | |
| { | |
| "epoch": 65.17, | |
| "learning_rate": 3.3733333333333334e-06, | |
| "loss": 0.0001, | |
| "step": 13425 | |
| }, | |
| { | |
| "epoch": 65.29, | |
| "learning_rate": 3.3605128205128206e-06, | |
| "loss": 0.0, | |
| "step": 13450 | |
| }, | |
| { | |
| "epoch": 65.41, | |
| "learning_rate": 3.347692307692308e-06, | |
| "loss": 0.0001, | |
| "step": 13475 | |
| }, | |
| { | |
| "epoch": 65.53, | |
| "learning_rate": 3.334871794871795e-06, | |
| "loss": 0.0001, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 65.66, | |
| "learning_rate": 3.3220512820512824e-06, | |
| "loss": 0.0, | |
| "step": 13525 | |
| }, | |
| { | |
| "epoch": 65.78, | |
| "learning_rate": 3.3092307692307696e-06, | |
| "loss": 0.0, | |
| "step": 13550 | |
| }, | |
| { | |
| "epoch": 65.9, | |
| "learning_rate": 3.296410256410257e-06, | |
| "loss": 0.0, | |
| "step": 13575 | |
| }, | |
| { | |
| "epoch": 66.02, | |
| "learning_rate": 3.283589743589744e-06, | |
| "loss": 0.0, | |
| "step": 13600 | |
| }, | |
| { | |
| "epoch": 66.14, | |
| "learning_rate": 3.2707692307692313e-06, | |
| "loss": 0.0, | |
| "step": 13625 | |
| }, | |
| { | |
| "epoch": 66.26, | |
| "learning_rate": 3.2579487179487186e-06, | |
| "loss": 0.0, | |
| "step": 13650 | |
| }, | |
| { | |
| "epoch": 66.38, | |
| "learning_rate": 3.245128205128205e-06, | |
| "loss": 0.0, | |
| "step": 13675 | |
| }, | |
| { | |
| "epoch": 66.5, | |
| "learning_rate": 3.2323076923076922e-06, | |
| "loss": 0.0, | |
| "step": 13700 | |
| }, | |
| { | |
| "epoch": 66.63, | |
| "learning_rate": 3.2194871794871795e-06, | |
| "loss": 0.0, | |
| "step": 13725 | |
| }, | |
| { | |
| "epoch": 66.75, | |
| "learning_rate": 3.2066666666666667e-06, | |
| "loss": 0.0, | |
| "step": 13750 | |
| }, | |
| { | |
| "epoch": 66.87, | |
| "learning_rate": 3.193846153846154e-06, | |
| "loss": 0.0, | |
| "step": 13775 | |
| }, | |
| { | |
| "epoch": 66.99, | |
| "learning_rate": 3.1810256410256412e-06, | |
| "loss": 0.0, | |
| "step": 13800 | |
| }, | |
| { | |
| "epoch": 67.11, | |
| "learning_rate": 3.1682051282051285e-06, | |
| "loss": 0.0, | |
| "step": 13825 | |
| }, | |
| { | |
| "epoch": 67.23, | |
| "learning_rate": 3.1553846153846157e-06, | |
| "loss": 0.0, | |
| "step": 13850 | |
| }, | |
| { | |
| "epoch": 67.35, | |
| "learning_rate": 3.142564102564103e-06, | |
| "loss": 0.0, | |
| "step": 13875 | |
| }, | |
| { | |
| "epoch": 67.48, | |
| "learning_rate": 3.1297435897435902e-06, | |
| "loss": 0.0, | |
| "step": 13900 | |
| }, | |
| { | |
| "epoch": 67.6, | |
| "learning_rate": 3.1169230769230775e-06, | |
| "loss": 0.0, | |
| "step": 13925 | |
| }, | |
| { | |
| "epoch": 67.72, | |
| "learning_rate": 3.1041025641025647e-06, | |
| "loss": 0.0, | |
| "step": 13950 | |
| }, | |
| { | |
| "epoch": 67.84, | |
| "learning_rate": 3.091282051282051e-06, | |
| "loss": 0.0, | |
| "step": 13975 | |
| }, | |
| { | |
| "epoch": 67.96, | |
| "learning_rate": 3.0784615384615384e-06, | |
| "loss": 0.0, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 67.96, | |
| "eval_loss": 0.3413444459438324, | |
| "eval_runtime": 1398.7408, | |
| "eval_samples_per_second": 4.712, | |
| "eval_steps_per_second": 0.295, | |
| "eval_wer": 10.709222730770788, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 68.08, | |
| "learning_rate": 3.0656410256410256e-06, | |
| "loss": 0.0, | |
| "step": 14025 | |
| }, | |
| { | |
| "epoch": 68.2, | |
| "learning_rate": 3.052820512820513e-06, | |
| "loss": 0.0, | |
| "step": 14050 | |
| }, | |
| { | |
| "epoch": 68.33, | |
| "learning_rate": 3.04e-06, | |
| "loss": 0.0, | |
| "step": 14075 | |
| }, | |
| { | |
| "epoch": 68.45, | |
| "learning_rate": 3.0271794871794874e-06, | |
| "loss": 0.0, | |
| "step": 14100 | |
| }, | |
| { | |
| "epoch": 68.57, | |
| "learning_rate": 3.0143589743589746e-06, | |
| "loss": 0.0, | |
| "step": 14125 | |
| }, | |
| { | |
| "epoch": 68.69, | |
| "learning_rate": 3.001538461538462e-06, | |
| "loss": 0.0, | |
| "step": 14150 | |
| }, | |
| { | |
| "epoch": 68.81, | |
| "learning_rate": 2.988717948717949e-06, | |
| "loss": 0.0, | |
| "step": 14175 | |
| }, | |
| { | |
| "epoch": 68.93, | |
| "learning_rate": 2.9758974358974364e-06, | |
| "loss": 0.0, | |
| "step": 14200 | |
| }, | |
| { | |
| "epoch": 69.05, | |
| "learning_rate": 2.9630769230769236e-06, | |
| "loss": 0.0, | |
| "step": 14225 | |
| }, | |
| { | |
| "epoch": 69.17, | |
| "learning_rate": 2.950256410256411e-06, | |
| "loss": 0.0, | |
| "step": 14250 | |
| }, | |
| { | |
| "epoch": 69.3, | |
| "learning_rate": 2.9374358974358973e-06, | |
| "loss": 0.0, | |
| "step": 14275 | |
| }, | |
| { | |
| "epoch": 69.42, | |
| "learning_rate": 2.9246153846153845e-06, | |
| "loss": 0.0, | |
| "step": 14300 | |
| }, | |
| { | |
| "epoch": 69.54, | |
| "learning_rate": 2.9117948717948717e-06, | |
| "loss": 0.0, | |
| "step": 14325 | |
| }, | |
| { | |
| "epoch": 69.66, | |
| "learning_rate": 2.898974358974359e-06, | |
| "loss": 0.0, | |
| "step": 14350 | |
| }, | |
| { | |
| "epoch": 69.78, | |
| "learning_rate": 2.8861538461538462e-06, | |
| "loss": 0.0, | |
| "step": 14375 | |
| }, | |
| { | |
| "epoch": 69.9, | |
| "learning_rate": 2.8733333333333335e-06, | |
| "loss": 0.0, | |
| "step": 14400 | |
| }, | |
| { | |
| "epoch": 70.02, | |
| "learning_rate": 2.8605128205128207e-06, | |
| "loss": 0.0, | |
| "step": 14425 | |
| }, | |
| { | |
| "epoch": 70.15, | |
| "learning_rate": 2.847692307692308e-06, | |
| "loss": 0.0, | |
| "step": 14450 | |
| }, | |
| { | |
| "epoch": 70.27, | |
| "learning_rate": 2.8348717948717952e-06, | |
| "loss": 0.0, | |
| "step": 14475 | |
| }, | |
| { | |
| "epoch": 70.39, | |
| "learning_rate": 2.8220512820512825e-06, | |
| "loss": 0.0, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 70.51, | |
| "learning_rate": 2.8092307692307697e-06, | |
| "loss": 0.0, | |
| "step": 14525 | |
| }, | |
| { | |
| "epoch": 70.63, | |
| "learning_rate": 2.796410256410257e-06, | |
| "loss": 0.0, | |
| "step": 14550 | |
| }, | |
| { | |
| "epoch": 70.75, | |
| "learning_rate": 2.7835897435897434e-06, | |
| "loss": 0.0, | |
| "step": 14575 | |
| }, | |
| { | |
| "epoch": 70.87, | |
| "learning_rate": 2.7707692307692306e-06, | |
| "loss": 0.0, | |
| "step": 14600 | |
| }, | |
| { | |
| "epoch": 71.0, | |
| "learning_rate": 2.757948717948718e-06, | |
| "loss": 0.0, | |
| "step": 14625 | |
| }, | |
| { | |
| "epoch": 71.12, | |
| "learning_rate": 2.745128205128205e-06, | |
| "loss": 0.0, | |
| "step": 14650 | |
| }, | |
| { | |
| "epoch": 71.24, | |
| "learning_rate": 2.7323076923076924e-06, | |
| "loss": 0.0, | |
| "step": 14675 | |
| }, | |
| { | |
| "epoch": 71.36, | |
| "learning_rate": 2.7194871794871796e-06, | |
| "loss": 0.0, | |
| "step": 14700 | |
| }, | |
| { | |
| "epoch": 71.48, | |
| "learning_rate": 2.706666666666667e-06, | |
| "loss": 0.0, | |
| "step": 14725 | |
| }, | |
| { | |
| "epoch": 71.6, | |
| "learning_rate": 2.693846153846154e-06, | |
| "loss": 0.0, | |
| "step": 14750 | |
| }, | |
| { | |
| "epoch": 71.72, | |
| "learning_rate": 2.6810256410256414e-06, | |
| "loss": 0.0, | |
| "step": 14775 | |
| }, | |
| { | |
| "epoch": 71.84, | |
| "learning_rate": 2.6682051282051286e-06, | |
| "loss": 0.0, | |
| "step": 14800 | |
| }, | |
| { | |
| "epoch": 71.97, | |
| "learning_rate": 2.655384615384616e-06, | |
| "loss": 0.0, | |
| "step": 14825 | |
| }, | |
| { | |
| "epoch": 72.09, | |
| "learning_rate": 2.642564102564103e-06, | |
| "loss": 0.0, | |
| "step": 14850 | |
| }, | |
| { | |
| "epoch": 72.21, | |
| "learning_rate": 2.6297435897435904e-06, | |
| "loss": 0.0, | |
| "step": 14875 | |
| }, | |
| { | |
| "epoch": 72.33, | |
| "learning_rate": 2.6169230769230768e-06, | |
| "loss": 0.0, | |
| "step": 14900 | |
| }, | |
| { | |
| "epoch": 72.45, | |
| "learning_rate": 2.604102564102564e-06, | |
| "loss": 0.0, | |
| "step": 14925 | |
| }, | |
| { | |
| "epoch": 72.57, | |
| "learning_rate": 2.5912820512820513e-06, | |
| "loss": 0.0, | |
| "step": 14950 | |
| }, | |
| { | |
| "epoch": 72.69, | |
| "learning_rate": 2.5784615384615385e-06, | |
| "loss": 0.0, | |
| "step": 14975 | |
| }, | |
| { | |
| "epoch": 72.82, | |
| "learning_rate": 2.5656410256410257e-06, | |
| "loss": 0.0, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 72.82, | |
| "eval_loss": 0.3523952066898346, | |
| "eval_runtime": 1397.4432, | |
| "eval_samples_per_second": 4.716, | |
| "eval_steps_per_second": 0.295, | |
| "eval_wer": 10.664668475839441, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 72.94, | |
| "learning_rate": 2.552820512820513e-06, | |
| "loss": 0.0, | |
| "step": 15025 | |
| }, | |
| { | |
| "epoch": 73.06, | |
| "learning_rate": 2.5400000000000002e-06, | |
| "loss": 0.0, | |
| "step": 15050 | |
| }, | |
| { | |
| "epoch": 73.18, | |
| "learning_rate": 2.5271794871794875e-06, | |
| "loss": 0.0, | |
| "step": 15075 | |
| }, | |
| { | |
| "epoch": 73.3, | |
| "learning_rate": 2.5143589743589747e-06, | |
| "loss": 0.0, | |
| "step": 15100 | |
| }, | |
| { | |
| "epoch": 73.42, | |
| "learning_rate": 2.501538461538462e-06, | |
| "loss": 0.0, | |
| "step": 15125 | |
| }, | |
| { | |
| "epoch": 73.54, | |
| "learning_rate": 2.488717948717949e-06, | |
| "loss": 0.0, | |
| "step": 15150 | |
| }, | |
| { | |
| "epoch": 73.67, | |
| "learning_rate": 2.475897435897436e-06, | |
| "loss": 0.0, | |
| "step": 15175 | |
| }, | |
| { | |
| "epoch": 73.79, | |
| "learning_rate": 2.4630769230769233e-06, | |
| "loss": 0.0, | |
| "step": 15200 | |
| }, | |
| { | |
| "epoch": 73.91, | |
| "learning_rate": 2.4502564102564106e-06, | |
| "loss": 0.0, | |
| "step": 15225 | |
| }, | |
| { | |
| "epoch": 74.03, | |
| "learning_rate": 2.437435897435898e-06, | |
| "loss": 0.0, | |
| "step": 15250 | |
| }, | |
| { | |
| "epoch": 74.15, | |
| "learning_rate": 2.4246153846153846e-06, | |
| "loss": 0.0, | |
| "step": 15275 | |
| }, | |
| { | |
| "epoch": 74.27, | |
| "learning_rate": 2.411794871794872e-06, | |
| "loss": 0.0, | |
| "step": 15300 | |
| }, | |
| { | |
| "epoch": 74.39, | |
| "learning_rate": 2.398974358974359e-06, | |
| "loss": 0.0, | |
| "step": 15325 | |
| }, | |
| { | |
| "epoch": 74.51, | |
| "learning_rate": 2.3861538461538464e-06, | |
| "loss": 0.0, | |
| "step": 15350 | |
| }, | |
| { | |
| "epoch": 74.64, | |
| "learning_rate": 2.3733333333333336e-06, | |
| "loss": 0.0, | |
| "step": 15375 | |
| }, | |
| { | |
| "epoch": 74.76, | |
| "learning_rate": 2.360512820512821e-06, | |
| "loss": 0.0, | |
| "step": 15400 | |
| }, | |
| { | |
| "epoch": 74.88, | |
| "learning_rate": 2.3476923076923077e-06, | |
| "loss": 0.0, | |
| "step": 15425 | |
| }, | |
| { | |
| "epoch": 75.0, | |
| "learning_rate": 2.334871794871795e-06, | |
| "loss": 0.0, | |
| "step": 15450 | |
| }, | |
| { | |
| "epoch": 75.12, | |
| "learning_rate": 2.322051282051282e-06, | |
| "loss": 0.0, | |
| "step": 15475 | |
| }, | |
| { | |
| "epoch": 75.24, | |
| "learning_rate": 2.3092307692307694e-06, | |
| "loss": 0.0, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 75.36, | |
| "learning_rate": 2.2964102564102567e-06, | |
| "loss": 0.0, | |
| "step": 15525 | |
| }, | |
| { | |
| "epoch": 75.49, | |
| "learning_rate": 2.283589743589744e-06, | |
| "loss": 0.0, | |
| "step": 15550 | |
| }, | |
| { | |
| "epoch": 75.61, | |
| "learning_rate": 2.2707692307692308e-06, | |
| "loss": 0.0, | |
| "step": 15575 | |
| }, | |
| { | |
| "epoch": 75.73, | |
| "learning_rate": 2.257948717948718e-06, | |
| "loss": 0.0, | |
| "step": 15600 | |
| }, | |
| { | |
| "epoch": 75.85, | |
| "learning_rate": 2.2451282051282053e-06, | |
| "loss": 0.0, | |
| "step": 15625 | |
| }, | |
| { | |
| "epoch": 75.97, | |
| "learning_rate": 2.2323076923076925e-06, | |
| "loss": 0.0, | |
| "step": 15650 | |
| }, | |
| { | |
| "epoch": 76.09, | |
| "learning_rate": 2.2194871794871797e-06, | |
| "loss": 0.0, | |
| "step": 15675 | |
| }, | |
| { | |
| "epoch": 76.21, | |
| "learning_rate": 2.206666666666667e-06, | |
| "loss": 0.0, | |
| "step": 15700 | |
| }, | |
| { | |
| "epoch": 76.33, | |
| "learning_rate": 2.193846153846154e-06, | |
| "loss": 0.0, | |
| "step": 15725 | |
| }, | |
| { | |
| "epoch": 76.46, | |
| "learning_rate": 2.181025641025641e-06, | |
| "loss": 0.0, | |
| "step": 15750 | |
| }, | |
| { | |
| "epoch": 76.58, | |
| "learning_rate": 2.1682051282051283e-06, | |
| "loss": 0.0, | |
| "step": 15775 | |
| }, | |
| { | |
| "epoch": 76.7, | |
| "learning_rate": 2.1553846153846156e-06, | |
| "loss": 0.0, | |
| "step": 15800 | |
| }, | |
| { | |
| "epoch": 76.82, | |
| "learning_rate": 2.142564102564103e-06, | |
| "loss": 0.0, | |
| "step": 15825 | |
| }, | |
| { | |
| "epoch": 76.94, | |
| "learning_rate": 2.12974358974359e-06, | |
| "loss": 0.0, | |
| "step": 15850 | |
| }, | |
| { | |
| "epoch": 77.06, | |
| "learning_rate": 2.116923076923077e-06, | |
| "loss": 0.0, | |
| "step": 15875 | |
| }, | |
| { | |
| "epoch": 77.18, | |
| "learning_rate": 2.104102564102564e-06, | |
| "loss": 0.0, | |
| "step": 15900 | |
| }, | |
| { | |
| "epoch": 77.31, | |
| "learning_rate": 2.0912820512820514e-06, | |
| "loss": 0.0, | |
| "step": 15925 | |
| }, | |
| { | |
| "epoch": 77.43, | |
| "learning_rate": 2.0784615384615386e-06, | |
| "loss": 0.0, | |
| "step": 15950 | |
| }, | |
| { | |
| "epoch": 77.55, | |
| "learning_rate": 2.065641025641026e-06, | |
| "loss": 0.0, | |
| "step": 15975 | |
| }, | |
| { | |
| "epoch": 77.67, | |
| "learning_rate": 2.052820512820513e-06, | |
| "loss": 0.0, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 77.67, | |
| "eval_loss": 0.36066606640815735, | |
| "eval_runtime": 1397.551, | |
| "eval_samples_per_second": 4.716, | |
| "eval_steps_per_second": 0.295, | |
| "eval_wer": 10.656567702215563, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 77.79, | |
| "learning_rate": 2.04e-06, | |
| "loss": 0.0, | |
| "step": 16025 | |
| }, | |
| { | |
| "epoch": 77.91, | |
| "learning_rate": 2.027179487179487e-06, | |
| "loss": 0.0, | |
| "step": 16050 | |
| }, | |
| { | |
| "epoch": 78.03, | |
| "learning_rate": 2.0143589743589744e-06, | |
| "loss": 0.0, | |
| "step": 16075 | |
| }, | |
| { | |
| "epoch": 78.16, | |
| "learning_rate": 2.0015384615384617e-06, | |
| "loss": 0.0, | |
| "step": 16100 | |
| }, | |
| { | |
| "epoch": 78.28, | |
| "learning_rate": 1.988717948717949e-06, | |
| "loss": 0.0, | |
| "step": 16125 | |
| }, | |
| { | |
| "epoch": 78.4, | |
| "learning_rate": 1.975897435897436e-06, | |
| "loss": 0.0, | |
| "step": 16150 | |
| }, | |
| { | |
| "epoch": 78.52, | |
| "learning_rate": 1.963076923076923e-06, | |
| "loss": 0.0, | |
| "step": 16175 | |
| }, | |
| { | |
| "epoch": 78.64, | |
| "learning_rate": 1.9502564102564103e-06, | |
| "loss": 0.0, | |
| "step": 16200 | |
| }, | |
| { | |
| "epoch": 78.76, | |
| "learning_rate": 1.9374358974358975e-06, | |
| "loss": 0.0, | |
| "step": 16225 | |
| }, | |
| { | |
| "epoch": 78.88, | |
| "learning_rate": 1.9246153846153848e-06, | |
| "loss": 0.0, | |
| "step": 16250 | |
| }, | |
| { | |
| "epoch": 79.0, | |
| "learning_rate": 1.911794871794872e-06, | |
| "loss": 0.0, | |
| "step": 16275 | |
| }, | |
| { | |
| "epoch": 79.13, | |
| "learning_rate": 1.8989743589743593e-06, | |
| "loss": 0.0, | |
| "step": 16300 | |
| }, | |
| { | |
| "epoch": 79.25, | |
| "learning_rate": 1.8861538461538465e-06, | |
| "loss": 0.0, | |
| "step": 16325 | |
| }, | |
| { | |
| "epoch": 79.37, | |
| "learning_rate": 1.8733333333333333e-06, | |
| "loss": 0.0, | |
| "step": 16350 | |
| }, | |
| { | |
| "epoch": 79.49, | |
| "learning_rate": 1.8605128205128206e-06, | |
| "loss": 0.0, | |
| "step": 16375 | |
| }, | |
| { | |
| "epoch": 79.61, | |
| "learning_rate": 1.8476923076923078e-06, | |
| "loss": 0.0, | |
| "step": 16400 | |
| }, | |
| { | |
| "epoch": 79.73, | |
| "learning_rate": 1.834871794871795e-06, | |
| "loss": 0.0, | |
| "step": 16425 | |
| }, | |
| { | |
| "epoch": 79.85, | |
| "learning_rate": 1.8220512820512823e-06, | |
| "loss": 0.0, | |
| "step": 16450 | |
| }, | |
| { | |
| "epoch": 79.98, | |
| "learning_rate": 1.8092307692307696e-06, | |
| "loss": 0.0, | |
| "step": 16475 | |
| }, | |
| { | |
| "epoch": 80.1, | |
| "learning_rate": 1.7964102564102564e-06, | |
| "loss": 0.0, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 80.22, | |
| "learning_rate": 1.7835897435897436e-06, | |
| "loss": 0.0, | |
| "step": 16525 | |
| }, | |
| { | |
| "epoch": 80.34, | |
| "learning_rate": 1.7707692307692309e-06, | |
| "loss": 0.0, | |
| "step": 16550 | |
| }, | |
| { | |
| "epoch": 80.46, | |
| "learning_rate": 1.7579487179487181e-06, | |
| "loss": 0.0, | |
| "step": 16575 | |
| }, | |
| { | |
| "epoch": 80.58, | |
| "learning_rate": 1.7451282051282054e-06, | |
| "loss": 0.0, | |
| "step": 16600 | |
| }, | |
| { | |
| "epoch": 80.7, | |
| "learning_rate": 1.7323076923076926e-06, | |
| "loss": 0.0, | |
| "step": 16625 | |
| }, | |
| { | |
| "epoch": 80.83, | |
| "learning_rate": 1.7194871794871795e-06, | |
| "loss": 0.0, | |
| "step": 16650 | |
| }, | |
| { | |
| "epoch": 80.95, | |
| "learning_rate": 1.7066666666666667e-06, | |
| "loss": 0.0, | |
| "step": 16675 | |
| }, | |
| { | |
| "epoch": 81.07, | |
| "learning_rate": 1.693846153846154e-06, | |
| "loss": 0.0, | |
| "step": 16700 | |
| }, | |
| { | |
| "epoch": 81.19, | |
| "learning_rate": 1.6810256410256412e-06, | |
| "loss": 0.0, | |
| "step": 16725 | |
| }, | |
| { | |
| "epoch": 81.31, | |
| "learning_rate": 1.6682051282051284e-06, | |
| "loss": 0.0, | |
| "step": 16750 | |
| }, | |
| { | |
| "epoch": 81.43, | |
| "learning_rate": 1.6553846153846157e-06, | |
| "loss": 0.0, | |
| "step": 16775 | |
| }, | |
| { | |
| "epoch": 81.55, | |
| "learning_rate": 1.6425641025641025e-06, | |
| "loss": 0.0, | |
| "step": 16800 | |
| }, | |
| { | |
| "epoch": 81.67, | |
| "learning_rate": 1.6297435897435898e-06, | |
| "loss": 0.0, | |
| "step": 16825 | |
| }, | |
| { | |
| "epoch": 81.8, | |
| "learning_rate": 1.616923076923077e-06, | |
| "loss": 0.0, | |
| "step": 16850 | |
| }, | |
| { | |
| "epoch": 81.92, | |
| "learning_rate": 1.6041025641025643e-06, | |
| "loss": 0.0, | |
| "step": 16875 | |
| }, | |
| { | |
| "epoch": 82.04, | |
| "learning_rate": 1.5912820512820515e-06, | |
| "loss": 0.0, | |
| "step": 16900 | |
| }, | |
| { | |
| "epoch": 82.16, | |
| "learning_rate": 1.5784615384615388e-06, | |
| "loss": 0.0, | |
| "step": 16925 | |
| }, | |
| { | |
| "epoch": 82.28, | |
| "learning_rate": 1.5656410256410256e-06, | |
| "loss": 0.0, | |
| "step": 16950 | |
| }, | |
| { | |
| "epoch": 82.4, | |
| "learning_rate": 1.5528205128205128e-06, | |
| "loss": 0.0, | |
| "step": 16975 | |
| }, | |
| { | |
| "epoch": 82.52, | |
| "learning_rate": 1.54e-06, | |
| "loss": 0.0, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 82.52, | |
| "eval_loss": 0.36754557490348816, | |
| "eval_runtime": 1397.4337, | |
| "eval_samples_per_second": 4.717, | |
| "eval_steps_per_second": 0.295, | |
| "eval_wer": 10.612013447284216, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 82.65, | |
| "learning_rate": 1.5271794871794873e-06, | |
| "loss": 0.0, | |
| "step": 17025 | |
| }, | |
| { | |
| "epoch": 82.77, | |
| "learning_rate": 1.5143589743589746e-06, | |
| "loss": 0.0, | |
| "step": 17050 | |
| }, | |
| { | |
| "epoch": 82.89, | |
| "learning_rate": 1.5015384615384618e-06, | |
| "loss": 0.0, | |
| "step": 17075 | |
| }, | |
| { | |
| "epoch": 83.01, | |
| "learning_rate": 1.4887179487179486e-06, | |
| "loss": 0.0, | |
| "step": 17100 | |
| }, | |
| { | |
| "epoch": 83.13, | |
| "learning_rate": 1.475897435897436e-06, | |
| "loss": 0.0, | |
| "step": 17125 | |
| }, | |
| { | |
| "epoch": 83.25, | |
| "learning_rate": 1.4630769230769231e-06, | |
| "loss": 0.0, | |
| "step": 17150 | |
| }, | |
| { | |
| "epoch": 83.37, | |
| "learning_rate": 1.4502564102564104e-06, | |
| "loss": 0.0, | |
| "step": 17175 | |
| }, | |
| { | |
| "epoch": 83.5, | |
| "learning_rate": 1.4374358974358976e-06, | |
| "loss": 0.0, | |
| "step": 17200 | |
| }, | |
| { | |
| "epoch": 83.62, | |
| "learning_rate": 1.4246153846153849e-06, | |
| "loss": 0.0, | |
| "step": 17225 | |
| }, | |
| { | |
| "epoch": 83.74, | |
| "learning_rate": 1.4117948717948717e-06, | |
| "loss": 0.0, | |
| "step": 17250 | |
| }, | |
| { | |
| "epoch": 83.86, | |
| "learning_rate": 1.398974358974359e-06, | |
| "loss": 0.0, | |
| "step": 17275 | |
| }, | |
| { | |
| "epoch": 83.98, | |
| "learning_rate": 1.3861538461538462e-06, | |
| "loss": 0.0, | |
| "step": 17300 | |
| }, | |
| { | |
| "epoch": 84.1, | |
| "learning_rate": 1.3733333333333335e-06, | |
| "loss": 0.0, | |
| "step": 17325 | |
| }, | |
| { | |
| "epoch": 84.22, | |
| "learning_rate": 1.3605128205128207e-06, | |
| "loss": 0.0, | |
| "step": 17350 | |
| }, | |
| { | |
| "epoch": 84.34, | |
| "learning_rate": 1.347692307692308e-06, | |
| "loss": 0.0, | |
| "step": 17375 | |
| }, | |
| { | |
| "epoch": 84.47, | |
| "learning_rate": 1.3348717948717948e-06, | |
| "loss": 0.0, | |
| "step": 17400 | |
| }, | |
| { | |
| "epoch": 84.59, | |
| "learning_rate": 1.322051282051282e-06, | |
| "loss": 0.0, | |
| "step": 17425 | |
| }, | |
| { | |
| "epoch": 84.71, | |
| "learning_rate": 1.3092307692307693e-06, | |
| "loss": 0.0, | |
| "step": 17450 | |
| }, | |
| { | |
| "epoch": 84.83, | |
| "learning_rate": 1.2964102564102565e-06, | |
| "loss": 0.0, | |
| "step": 17475 | |
| }, | |
| { | |
| "epoch": 84.95, | |
| "learning_rate": 1.2835897435897438e-06, | |
| "loss": 0.0, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 85.07, | |
| "learning_rate": 1.270769230769231e-06, | |
| "loss": 0.0, | |
| "step": 17525 | |
| }, | |
| { | |
| "epoch": 85.19, | |
| "learning_rate": 1.2579487179487178e-06, | |
| "loss": 0.0, | |
| "step": 17550 | |
| }, | |
| { | |
| "epoch": 85.32, | |
| "learning_rate": 1.2451282051282053e-06, | |
| "loss": 0.0, | |
| "step": 17575 | |
| }, | |
| { | |
| "epoch": 85.44, | |
| "learning_rate": 1.2323076923076923e-06, | |
| "loss": 0.0, | |
| "step": 17600 | |
| }, | |
| { | |
| "epoch": 85.56, | |
| "learning_rate": 1.2194871794871796e-06, | |
| "loss": 0.0, | |
| "step": 17625 | |
| }, | |
| { | |
| "epoch": 85.68, | |
| "learning_rate": 1.2066666666666668e-06, | |
| "loss": 0.0, | |
| "step": 17650 | |
| }, | |
| { | |
| "epoch": 85.8, | |
| "learning_rate": 1.1938461538461539e-06, | |
| "loss": 0.0, | |
| "step": 17675 | |
| }, | |
| { | |
| "epoch": 85.92, | |
| "learning_rate": 1.1810256410256411e-06, | |
| "loss": 0.0, | |
| "step": 17700 | |
| }, | |
| { | |
| "epoch": 86.04, | |
| "learning_rate": 1.1682051282051284e-06, | |
| "loss": 0.0, | |
| "step": 17725 | |
| }, | |
| { | |
| "epoch": 86.17, | |
| "learning_rate": 1.1553846153846154e-06, | |
| "loss": 0.0, | |
| "step": 17750 | |
| }, | |
| { | |
| "epoch": 86.29, | |
| "learning_rate": 1.1425641025641026e-06, | |
| "loss": 0.0, | |
| "step": 17775 | |
| }, | |
| { | |
| "epoch": 86.41, | |
| "learning_rate": 1.12974358974359e-06, | |
| "loss": 0.0, | |
| "step": 17800 | |
| }, | |
| { | |
| "epoch": 86.53, | |
| "learning_rate": 1.116923076923077e-06, | |
| "loss": 0.0, | |
| "step": 17825 | |
| }, | |
| { | |
| "epoch": 86.65, | |
| "learning_rate": 1.1041025641025642e-06, | |
| "loss": 0.0, | |
| "step": 17850 | |
| }, | |
| { | |
| "epoch": 86.77, | |
| "learning_rate": 1.0912820512820514e-06, | |
| "loss": 0.0, | |
| "step": 17875 | |
| }, | |
| { | |
| "epoch": 86.89, | |
| "learning_rate": 1.0784615384615385e-06, | |
| "loss": 0.0, | |
| "step": 17900 | |
| }, | |
| { | |
| "epoch": 87.01, | |
| "learning_rate": 1.0656410256410257e-06, | |
| "loss": 0.0, | |
| "step": 17925 | |
| }, | |
| { | |
| "epoch": 87.14, | |
| "learning_rate": 1.052820512820513e-06, | |
| "loss": 0.0, | |
| "step": 17950 | |
| }, | |
| { | |
| "epoch": 87.26, | |
| "learning_rate": 1.04e-06, | |
| "loss": 0.0, | |
| "step": 17975 | |
| }, | |
| { | |
| "epoch": 87.38, | |
| "learning_rate": 1.0271794871794872e-06, | |
| "loss": 0.0, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 87.38, | |
| "eval_loss": 0.3736671507358551, | |
| "eval_runtime": 1397.7933, | |
| "eval_samples_per_second": 4.715, | |
| "eval_steps_per_second": 0.295, | |
| "eval_wer": 10.614038640690186, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 87.5, | |
| "learning_rate": 1.0143589743589745e-06, | |
| "loss": 0.0, | |
| "step": 18025 | |
| }, | |
| { | |
| "epoch": 87.62, | |
| "learning_rate": 1.0015384615384615e-06, | |
| "loss": 0.0, | |
| "step": 18050 | |
| }, | |
| { | |
| "epoch": 87.74, | |
| "learning_rate": 9.887179487179488e-07, | |
| "loss": 0.0, | |
| "step": 18075 | |
| }, | |
| { | |
| "epoch": 87.86, | |
| "learning_rate": 9.75897435897436e-07, | |
| "loss": 0.0, | |
| "step": 18100 | |
| }, | |
| { | |
| "epoch": 87.99, | |
| "learning_rate": 9.63076923076923e-07, | |
| "loss": 0.0, | |
| "step": 18125 | |
| }, | |
| { | |
| "epoch": 88.11, | |
| "learning_rate": 9.502564102564103e-07, | |
| "loss": 0.0, | |
| "step": 18150 | |
| }, | |
| { | |
| "epoch": 88.23, | |
| "learning_rate": 9.374358974358976e-07, | |
| "loss": 0.0, | |
| "step": 18175 | |
| }, | |
| { | |
| "epoch": 88.35, | |
| "learning_rate": 9.246153846153846e-07, | |
| "loss": 0.0, | |
| "step": 18200 | |
| }, | |
| { | |
| "epoch": 88.47, | |
| "learning_rate": 9.117948717948718e-07, | |
| "loss": 0.0, | |
| "step": 18225 | |
| }, | |
| { | |
| "epoch": 88.59, | |
| "learning_rate": 8.989743589743591e-07, | |
| "loss": 0.0, | |
| "step": 18250 | |
| }, | |
| { | |
| "epoch": 88.71, | |
| "learning_rate": 8.861538461538461e-07, | |
| "loss": 0.0, | |
| "step": 18275 | |
| }, | |
| { | |
| "epoch": 88.83, | |
| "learning_rate": 8.733333333333334e-07, | |
| "loss": 0.0, | |
| "step": 18300 | |
| }, | |
| { | |
| "epoch": 88.96, | |
| "learning_rate": 8.605128205128206e-07, | |
| "loss": 0.0, | |
| "step": 18325 | |
| }, | |
| { | |
| "epoch": 89.08, | |
| "learning_rate": 8.476923076923077e-07, | |
| "loss": 0.0, | |
| "step": 18350 | |
| }, | |
| { | |
| "epoch": 89.2, | |
| "learning_rate": 8.348717948717949e-07, | |
| "loss": 0.0, | |
| "step": 18375 | |
| }, | |
| { | |
| "epoch": 89.32, | |
| "learning_rate": 8.220512820512822e-07, | |
| "loss": 0.0, | |
| "step": 18400 | |
| }, | |
| { | |
| "epoch": 89.44, | |
| "learning_rate": 8.092307692307692e-07, | |
| "loss": 0.0, | |
| "step": 18425 | |
| }, | |
| { | |
| "epoch": 89.56, | |
| "learning_rate": 7.964102564102564e-07, | |
| "loss": 0.0, | |
| "step": 18450 | |
| }, | |
| { | |
| "epoch": 89.68, | |
| "learning_rate": 7.835897435897437e-07, | |
| "loss": 0.0, | |
| "step": 18475 | |
| }, | |
| { | |
| "epoch": 89.81, | |
| "learning_rate": 7.707692307692307e-07, | |
| "loss": 0.0, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 89.93, | |
| "learning_rate": 7.57948717948718e-07, | |
| "loss": 0.0, | |
| "step": 18525 | |
| }, | |
| { | |
| "epoch": 90.05, | |
| "learning_rate": 7.451282051282052e-07, | |
| "loss": 0.0, | |
| "step": 18550 | |
| }, | |
| { | |
| "epoch": 90.17, | |
| "learning_rate": 7.323076923076923e-07, | |
| "loss": 0.0, | |
| "step": 18575 | |
| }, | |
| { | |
| "epoch": 90.29, | |
| "learning_rate": 7.194871794871795e-07, | |
| "loss": 0.0, | |
| "step": 18600 | |
| }, | |
| { | |
| "epoch": 90.41, | |
| "learning_rate": 7.066666666666667e-07, | |
| "loss": 0.0, | |
| "step": 18625 | |
| }, | |
| { | |
| "epoch": 90.53, | |
| "learning_rate": 6.938461538461538e-07, | |
| "loss": 0.0, | |
| "step": 18650 | |
| }, | |
| { | |
| "epoch": 90.66, | |
| "learning_rate": 6.81025641025641e-07, | |
| "loss": 0.0, | |
| "step": 18675 | |
| }, | |
| { | |
| "epoch": 90.78, | |
| "learning_rate": 6.682051282051283e-07, | |
| "loss": 0.0, | |
| "step": 18700 | |
| }, | |
| { | |
| "epoch": 90.9, | |
| "learning_rate": 6.553846153846154e-07, | |
| "loss": 0.0, | |
| "step": 18725 | |
| }, | |
| { | |
| "epoch": 91.02, | |
| "learning_rate": 6.425641025641026e-07, | |
| "loss": 0.0, | |
| "step": 18750 | |
| }, | |
| { | |
| "epoch": 91.14, | |
| "learning_rate": 6.297435897435898e-07, | |
| "loss": 0.0, | |
| "step": 18775 | |
| }, | |
| { | |
| "epoch": 91.26, | |
| "learning_rate": 6.16923076923077e-07, | |
| "loss": 0.0, | |
| "step": 18800 | |
| }, | |
| { | |
| "epoch": 91.38, | |
| "learning_rate": 6.041025641025641e-07, | |
| "loss": 0.0, | |
| "step": 18825 | |
| }, | |
| { | |
| "epoch": 91.5, | |
| "learning_rate": 5.912820512820513e-07, | |
| "loss": 0.0, | |
| "step": 18850 | |
| }, | |
| { | |
| "epoch": 91.63, | |
| "learning_rate": 5.784615384615385e-07, | |
| "loss": 0.0, | |
| "step": 18875 | |
| }, | |
| { | |
| "epoch": 91.75, | |
| "learning_rate": 5.656410256410256e-07, | |
| "loss": 0.0, | |
| "step": 18900 | |
| }, | |
| { | |
| "epoch": 91.87, | |
| "learning_rate": 5.528205128205129e-07, | |
| "loss": 0.0, | |
| "step": 18925 | |
| }, | |
| { | |
| "epoch": 91.99, | |
| "learning_rate": 5.4e-07, | |
| "loss": 0.0, | |
| "step": 18950 | |
| }, | |
| { | |
| "epoch": 92.11, | |
| "learning_rate": 5.271794871794872e-07, | |
| "loss": 0.0, | |
| "step": 18975 | |
| }, | |
| { | |
| "epoch": 92.23, | |
| "learning_rate": 5.143589743589744e-07, | |
| "loss": 0.0, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 92.23, | |
| "eval_loss": 0.37819185853004456, | |
| "eval_runtime": 1397.9151, | |
| "eval_samples_per_second": 4.715, | |
| "eval_steps_per_second": 0.295, | |
| "eval_wer": 10.618089027502126, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 92.35, | |
| "learning_rate": 5.015384615384615e-07, | |
| "loss": 0.0, | |
| "step": 19025 | |
| }, | |
| { | |
| "epoch": 92.48, | |
| "learning_rate": 4.887179487179487e-07, | |
| "loss": 0.0, | |
| "step": 19050 | |
| }, | |
| { | |
| "epoch": 92.6, | |
| "learning_rate": 4.7589743589743594e-07, | |
| "loss": 0.0, | |
| "step": 19075 | |
| }, | |
| { | |
| "epoch": 92.72, | |
| "learning_rate": 4.630769230769231e-07, | |
| "loss": 0.0, | |
| "step": 19100 | |
| }, | |
| { | |
| "epoch": 92.84, | |
| "learning_rate": 4.502564102564103e-07, | |
| "loss": 0.0, | |
| "step": 19125 | |
| }, | |
| { | |
| "epoch": 92.96, | |
| "learning_rate": 4.3743589743589747e-07, | |
| "loss": 0.0, | |
| "step": 19150 | |
| }, | |
| { | |
| "epoch": 93.08, | |
| "learning_rate": 4.246153846153846e-07, | |
| "loss": 0.0, | |
| "step": 19175 | |
| }, | |
| { | |
| "epoch": 93.2, | |
| "learning_rate": 4.1179487179487186e-07, | |
| "loss": 0.0, | |
| "step": 19200 | |
| }, | |
| { | |
| "epoch": 93.33, | |
| "learning_rate": 3.98974358974359e-07, | |
| "loss": 0.0, | |
| "step": 19225 | |
| }, | |
| { | |
| "epoch": 93.45, | |
| "learning_rate": 3.8615384615384615e-07, | |
| "loss": 0.0, | |
| "step": 19250 | |
| }, | |
| { | |
| "epoch": 93.57, | |
| "learning_rate": 3.733333333333334e-07, | |
| "loss": 0.0, | |
| "step": 19275 | |
| }, | |
| { | |
| "epoch": 93.69, | |
| "learning_rate": 3.6051282051282054e-07, | |
| "loss": 0.0, | |
| "step": 19300 | |
| }, | |
| { | |
| "epoch": 93.81, | |
| "learning_rate": 3.476923076923077e-07, | |
| "loss": 0.0, | |
| "step": 19325 | |
| }, | |
| { | |
| "epoch": 93.93, | |
| "learning_rate": 3.348717948717949e-07, | |
| "loss": 0.0, | |
| "step": 19350 | |
| }, | |
| { | |
| "epoch": 94.05, | |
| "learning_rate": 3.2205128205128207e-07, | |
| "loss": 0.0, | |
| "step": 19375 | |
| }, | |
| { | |
| "epoch": 94.17, | |
| "learning_rate": 3.0923076923076926e-07, | |
| "loss": 0.0, | |
| "step": 19400 | |
| }, | |
| { | |
| "epoch": 94.3, | |
| "learning_rate": 2.964102564102564e-07, | |
| "loss": 0.0, | |
| "step": 19425 | |
| }, | |
| { | |
| "epoch": 94.42, | |
| "learning_rate": 2.835897435897436e-07, | |
| "loss": 0.0, | |
| "step": 19450 | |
| }, | |
| { | |
| "epoch": 94.54, | |
| "learning_rate": 2.707692307692308e-07, | |
| "loss": 0.0, | |
| "step": 19475 | |
| }, | |
| { | |
| "epoch": 94.66, | |
| "learning_rate": 2.5794871794871794e-07, | |
| "loss": 0.0, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 94.78, | |
| "learning_rate": 2.4512820512820513e-07, | |
| "loss": 0.0, | |
| "step": 19525 | |
| }, | |
| { | |
| "epoch": 94.9, | |
| "learning_rate": 2.3230769230769233e-07, | |
| "loss": 0.0, | |
| "step": 19550 | |
| }, | |
| { | |
| "epoch": 95.02, | |
| "learning_rate": 2.194871794871795e-07, | |
| "loss": 0.0, | |
| "step": 19575 | |
| }, | |
| { | |
| "epoch": 95.15, | |
| "learning_rate": 2.066666666666667e-07, | |
| "loss": 0.0, | |
| "step": 19600 | |
| }, | |
| { | |
| "epoch": 95.27, | |
| "learning_rate": 1.9384615384615386e-07, | |
| "loss": 0.0, | |
| "step": 19625 | |
| }, | |
| { | |
| "epoch": 95.39, | |
| "learning_rate": 1.8102564102564103e-07, | |
| "loss": 0.0, | |
| "step": 19650 | |
| }, | |
| { | |
| "epoch": 95.51, | |
| "learning_rate": 1.6820512820512822e-07, | |
| "loss": 0.0, | |
| "step": 19675 | |
| }, | |
| { | |
| "epoch": 95.63, | |
| "learning_rate": 1.553846153846154e-07, | |
| "loss": 0.0, | |
| "step": 19700 | |
| }, | |
| { | |
| "epoch": 95.75, | |
| "learning_rate": 1.4256410256410259e-07, | |
| "loss": 0.0, | |
| "step": 19725 | |
| }, | |
| { | |
| "epoch": 95.87, | |
| "learning_rate": 1.2974358974358975e-07, | |
| "loss": 0.0, | |
| "step": 19750 | |
| }, | |
| { | |
| "epoch": 96.0, | |
| "learning_rate": 1.1692307692307694e-07, | |
| "loss": 0.0, | |
| "step": 19775 | |
| }, | |
| { | |
| "epoch": 96.12, | |
| "learning_rate": 1.041025641025641e-07, | |
| "loss": 0.0, | |
| "step": 19800 | |
| }, | |
| { | |
| "epoch": 96.24, | |
| "learning_rate": 9.128205128205129e-08, | |
| "loss": 0.0, | |
| "step": 19825 | |
| }, | |
| { | |
| "epoch": 96.36, | |
| "learning_rate": 7.846153846153847e-08, | |
| "loss": 0.0, | |
| "step": 19850 | |
| }, | |
| { | |
| "epoch": 96.48, | |
| "learning_rate": 6.564102564102564e-08, | |
| "loss": 0.0, | |
| "step": 19875 | |
| }, | |
| { | |
| "epoch": 96.6, | |
| "learning_rate": 5.2820512820512826e-08, | |
| "loss": 0.0, | |
| "step": 19900 | |
| }, | |
| { | |
| "epoch": 96.72, | |
| "learning_rate": 4e-08, | |
| "loss": 0.0, | |
| "step": 19925 | |
| }, | |
| { | |
| "epoch": 96.84, | |
| "learning_rate": 2.717948717948718e-08, | |
| "loss": 0.0, | |
| "step": 19950 | |
| }, | |
| { | |
| "epoch": 96.97, | |
| "learning_rate": 1.4358974358974361e-08, | |
| "loss": 0.0, | |
| "step": 19975 | |
| }, | |
| { | |
| "epoch": 97.09, | |
| "learning_rate": 1.5384615384615387e-09, | |
| "loss": 0.0, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 97.09, | |
| "eval_loss": 0.38027650117874146, | |
| "eval_runtime": 1398.6239, | |
| "eval_samples_per_second": 4.712, | |
| "eval_steps_per_second": 0.295, | |
| "eval_wer": 10.620114220908098, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 97.09, | |
| "step": 20000, | |
| "total_flos": 4.3481387177253274e+21, | |
| "train_loss": 0.009814149574722979, | |
| "train_runtime": 138976.8667, | |
| "train_samples_per_second": 9.21, | |
| "train_steps_per_second": 0.144 | |
| } | |
| ], | |
| "logging_steps": 25, | |
| "max_steps": 20000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 98, | |
| "save_steps": 1000, | |
| "total_flos": 4.3481387177253274e+21, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |