| { | |
| "best_metric": 19.351588788812993, | |
| "best_model_checkpoint": "./checkpoint-5000", | |
| "epoch": 29.239766081871345, | |
| "eval_steps": 1000, | |
| "global_step": 5000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.2000000000000006e-07, | |
| "loss": 7.9039, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 9.200000000000001e-07, | |
| "loss": 6.9602, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.42e-06, | |
| "loss": 5.6872, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.9200000000000003e-06, | |
| "loss": 4.1627, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 2.42e-06, | |
| "loss": 3.1188, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 2.92e-06, | |
| "loss": 2.4665, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 3.4200000000000007e-06, | |
| "loss": 2.0708, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 3.920000000000001e-06, | |
| "loss": 1.7783, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 4.42e-06, | |
| "loss": 1.564, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 4.92e-06, | |
| "loss": 1.4148, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 5.420000000000001e-06, | |
| "loss": 1.2528, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 5.92e-06, | |
| "loss": 1.1481, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 6.42e-06, | |
| "loss": 1.0653, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 6.92e-06, | |
| "loss": 0.9792, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 7.420000000000001e-06, | |
| "loss": 0.8902, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 7.92e-06, | |
| "loss": 0.8395, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 8.42e-06, | |
| "loss": 0.7904, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 8.920000000000001e-06, | |
| "loss": 0.7524, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 9.42e-06, | |
| "loss": 0.7051, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 9.920000000000002e-06, | |
| "loss": 0.6702, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 3.07, | |
| "learning_rate": 9.953333333333333e-06, | |
| "loss": 0.6214, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 9.89777777777778e-06, | |
| "loss": 0.5769, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "learning_rate": 9.842222222222223e-06, | |
| "loss": 0.5554, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 3.51, | |
| "learning_rate": 9.786666666666667e-06, | |
| "loss": 0.5413, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 9.731111111111113e-06, | |
| "loss": 0.5321, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "learning_rate": 9.675555555555555e-06, | |
| "loss": 0.4991, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 9.620000000000001e-06, | |
| "loss": 0.4837, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 9.564444444444445e-06, | |
| "loss": 0.4403, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 9.508888888888889e-06, | |
| "loss": 0.4337, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 9.453333333333335e-06, | |
| "loss": 0.4113, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 9.397777777777779e-06, | |
| "loss": 0.4035, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 9.342222222222223e-06, | |
| "loss": 0.4139, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 9.286666666666667e-06, | |
| "loss": 0.3987, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 9.231111111111111e-06, | |
| "loss": 0.3832, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 5.12, | |
| "learning_rate": 9.175555555555557e-06, | |
| "loss": 0.345, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "learning_rate": 9.12e-06, | |
| "loss": 0.3434, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 9.064444444444447e-06, | |
| "loss": 0.3375, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 5.56, | |
| "learning_rate": 9.008888888888889e-06, | |
| "loss": 0.3373, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "learning_rate": 8.953333333333335e-06, | |
| "loss": 0.3266, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "learning_rate": 8.897777777777779e-06, | |
| "loss": 0.337, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "eval_loss": 0.3837755024433136, | |
| "eval_runtime": 254.2294, | |
| "eval_samples_per_second": 25.925, | |
| "eval_steps_per_second": 0.81, | |
| "eval_wer": 26.88146043643837, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 5.99, | |
| "learning_rate": 8.842222222222223e-06, | |
| "loss": 0.3175, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 6.14, | |
| "learning_rate": 8.786666666666668e-06, | |
| "loss": 0.2787, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 6.29, | |
| "learning_rate": 8.73111111111111e-06, | |
| "loss": 0.2821, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 8.675555555555556e-06, | |
| "loss": 0.2702, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 6.58, | |
| "learning_rate": 8.62e-06, | |
| "loss": 0.2797, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 6.73, | |
| "learning_rate": 8.564444444444445e-06, | |
| "loss": 0.2719, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 6.87, | |
| "learning_rate": 8.50888888888889e-06, | |
| "loss": 0.2813, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 7.02, | |
| "learning_rate": 8.453333333333334e-06, | |
| "loss": 0.2664, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 7.16, | |
| "learning_rate": 8.397777777777778e-06, | |
| "loss": 0.2369, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 7.31, | |
| "learning_rate": 8.342222222222222e-06, | |
| "loss": 0.2405, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 7.46, | |
| "learning_rate": 8.286666666666668e-06, | |
| "loss": 0.2355, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 7.6, | |
| "learning_rate": 8.231111111111112e-06, | |
| "loss": 0.2342, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 7.75, | |
| "learning_rate": 8.175555555555556e-06, | |
| "loss": 0.2299, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 7.89, | |
| "learning_rate": 8.120000000000002e-06, | |
| "loss": 0.2329, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 8.04, | |
| "learning_rate": 8.064444444444444e-06, | |
| "loss": 0.2188, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 8.19, | |
| "learning_rate": 8.00888888888889e-06, | |
| "loss": 0.2041, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 8.33, | |
| "learning_rate": 7.953333333333334e-06, | |
| "loss": 0.1999, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 8.48, | |
| "learning_rate": 7.897777777777778e-06, | |
| "loss": 0.2084, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 8.63, | |
| "learning_rate": 7.842222222222224e-06, | |
| "loss": 0.1966, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 8.77, | |
| "learning_rate": 7.786666666666666e-06, | |
| "loss": 0.2042, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 8.92, | |
| "learning_rate": 7.731111111111112e-06, | |
| "loss": 0.2045, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 9.06, | |
| "learning_rate": 7.675555555555556e-06, | |
| "loss": 0.1865, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 9.21, | |
| "learning_rate": 7.620000000000001e-06, | |
| "loss": 0.1725, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 9.36, | |
| "learning_rate": 7.564444444444446e-06, | |
| "loss": 0.1764, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 9.5, | |
| "learning_rate": 7.50888888888889e-06, | |
| "loss": 0.1796, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 9.65, | |
| "learning_rate": 7.453333333333334e-06, | |
| "loss": 0.1803, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 9.8, | |
| "learning_rate": 7.3977777777777786e-06, | |
| "loss": 0.1754, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 9.94, | |
| "learning_rate": 7.342222222222223e-06, | |
| "loss": 0.1808, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 10.09, | |
| "learning_rate": 7.2866666666666675e-06, | |
| "loss": 0.1646, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 10.23, | |
| "learning_rate": 7.231111111111112e-06, | |
| "loss": 0.1538, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 10.38, | |
| "learning_rate": 7.1755555555555556e-06, | |
| "loss": 0.1525, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 10.53, | |
| "learning_rate": 7.1200000000000004e-06, | |
| "loss": 0.1611, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 10.67, | |
| "learning_rate": 7.0644444444444445e-06, | |
| "loss": 0.1566, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 10.82, | |
| "learning_rate": 7.008888888888889e-06, | |
| "loss": 0.1524, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 10.96, | |
| "learning_rate": 6.953333333333334e-06, | |
| "loss": 0.1572, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 11.11, | |
| "learning_rate": 6.897777777777779e-06, | |
| "loss": 0.1373, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 11.26, | |
| "learning_rate": 6.842222222222222e-06, | |
| "loss": 0.1392, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 11.4, | |
| "learning_rate": 6.786666666666667e-06, | |
| "loss": 0.1341, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 11.55, | |
| "learning_rate": 6.731111111111111e-06, | |
| "loss": 0.1414, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 11.7, | |
| "learning_rate": 6.675555555555556e-06, | |
| "loss": 0.1384, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 11.7, | |
| "eval_loss": 0.34312698245048523, | |
| "eval_runtime": 251.4417, | |
| "eval_samples_per_second": 26.213, | |
| "eval_steps_per_second": 0.819, | |
| "eval_wer": 22.154385540712084, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 11.84, | |
| "learning_rate": 6.620000000000001e-06, | |
| "loss": 0.1392, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 11.99, | |
| "learning_rate": 6.564444444444446e-06, | |
| "loss": 0.1324, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 12.13, | |
| "learning_rate": 6.508888888888889e-06, | |
| "loss": 0.1215, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 12.28, | |
| "learning_rate": 6.453333333333334e-06, | |
| "loss": 0.1198, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 12.43, | |
| "learning_rate": 6.397777777777778e-06, | |
| "loss": 0.1194, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 12.57, | |
| "learning_rate": 6.342222222222223e-06, | |
| "loss": 0.1211, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 12.72, | |
| "learning_rate": 6.286666666666668e-06, | |
| "loss": 0.1233, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 12.87, | |
| "learning_rate": 6.231111111111111e-06, | |
| "loss": 0.121, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 13.01, | |
| "learning_rate": 6.175555555555556e-06, | |
| "loss": 0.1218, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 13.16, | |
| "learning_rate": 6.120000000000001e-06, | |
| "loss": 0.1043, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 13.3, | |
| "learning_rate": 6.064444444444445e-06, | |
| "loss": 0.1092, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 13.45, | |
| "learning_rate": 6.00888888888889e-06, | |
| "loss": 0.1163, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 13.6, | |
| "learning_rate": 5.9533333333333345e-06, | |
| "loss": 0.1142, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 13.74, | |
| "learning_rate": 5.897777777777778e-06, | |
| "loss": 0.1083, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 13.89, | |
| "learning_rate": 5.8422222222222226e-06, | |
| "loss": 0.1049, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 14.04, | |
| "learning_rate": 5.7866666666666674e-06, | |
| "loss": 0.1029, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 14.18, | |
| "learning_rate": 5.7311111111111115e-06, | |
| "loss": 0.0921, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 14.33, | |
| "learning_rate": 5.675555555555556e-06, | |
| "loss": 0.0961, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 14.47, | |
| "learning_rate": 5.620000000000001e-06, | |
| "loss": 0.1002, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 14.62, | |
| "learning_rate": 5.5644444444444444e-06, | |
| "loss": 0.0957, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 14.77, | |
| "learning_rate": 5.508888888888889e-06, | |
| "loss": 0.1004, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 14.91, | |
| "learning_rate": 5.453333333333334e-06, | |
| "loss": 0.0976, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 15.06, | |
| "learning_rate": 5.397777777777778e-06, | |
| "loss": 0.0953, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 15.2, | |
| "learning_rate": 5.342222222222223e-06, | |
| "loss": 0.0887, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 15.35, | |
| "learning_rate": 5.286666666666666e-06, | |
| "loss": 0.0909, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 15.5, | |
| "learning_rate": 5.231111111111111e-06, | |
| "loss": 0.086, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 15.64, | |
| "learning_rate": 5.175555555555556e-06, | |
| "loss": 0.0876, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 15.79, | |
| "learning_rate": 5.12e-06, | |
| "loss": 0.0885, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 15.94, | |
| "learning_rate": 5.064444444444445e-06, | |
| "loss": 0.0877, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 16.08, | |
| "learning_rate": 5.00888888888889e-06, | |
| "loss": 0.0805, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 16.23, | |
| "learning_rate": 4.953333333333334e-06, | |
| "loss": 0.082, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 16.37, | |
| "learning_rate": 4.897777777777778e-06, | |
| "loss": 0.0801, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 16.52, | |
| "learning_rate": 4.842222222222223e-06, | |
| "loss": 0.0775, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 16.67, | |
| "learning_rate": 4.786666666666667e-06, | |
| "loss": 0.0822, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 16.81, | |
| "learning_rate": 4.731111111111112e-06, | |
| "loss": 0.0787, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 16.96, | |
| "learning_rate": 4.675555555555556e-06, | |
| "loss": 0.079, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 17.11, | |
| "learning_rate": 4.620000000000001e-06, | |
| "loss": 0.0691, | |
| "step": 2925 | |
| }, | |
| { | |
| "epoch": 17.25, | |
| "learning_rate": 4.564444444444445e-06, | |
| "loss": 0.0718, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 17.4, | |
| "learning_rate": 4.50888888888889e-06, | |
| "loss": 0.0694, | |
| "step": 2975 | |
| }, | |
| { | |
| "epoch": 17.54, | |
| "learning_rate": 4.453333333333334e-06, | |
| "loss": 0.0732, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 17.54, | |
| "eval_loss": 0.337632417678833, | |
| "eval_runtime": 250.4962, | |
| "eval_samples_per_second": 26.312, | |
| "eval_steps_per_second": 0.822, | |
| "eval_wer": 20.044731910777973, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 17.69, | |
| "learning_rate": 4.397777777777778e-06, | |
| "loss": 0.0741, | |
| "step": 3025 | |
| }, | |
| { | |
| "epoch": 17.84, | |
| "learning_rate": 4.3422222222222225e-06, | |
| "loss": 0.0755, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 17.98, | |
| "learning_rate": 4.2866666666666666e-06, | |
| "loss": 0.0741, | |
| "step": 3075 | |
| }, | |
| { | |
| "epoch": 18.13, | |
| "learning_rate": 4.2311111111111114e-06, | |
| "loss": 0.0639, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 18.27, | |
| "learning_rate": 4.175555555555556e-06, | |
| "loss": 0.0641, | |
| "step": 3125 | |
| }, | |
| { | |
| "epoch": 18.42, | |
| "learning_rate": 4.12e-06, | |
| "loss": 0.0678, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 18.57, | |
| "learning_rate": 4.064444444444444e-06, | |
| "loss": 0.0631, | |
| "step": 3175 | |
| }, | |
| { | |
| "epoch": 18.71, | |
| "learning_rate": 4.008888888888889e-06, | |
| "loss": 0.0664, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 18.86, | |
| "learning_rate": 3.953333333333333e-06, | |
| "loss": 0.0649, | |
| "step": 3225 | |
| }, | |
| { | |
| "epoch": 19.01, | |
| "learning_rate": 3.897777777777778e-06, | |
| "loss": 0.0672, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 19.15, | |
| "learning_rate": 3.842222222222223e-06, | |
| "loss": 0.0609, | |
| "step": 3275 | |
| }, | |
| { | |
| "epoch": 19.3, | |
| "learning_rate": 3.7866666666666667e-06, | |
| "loss": 0.0596, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 19.44, | |
| "learning_rate": 3.7311111111111116e-06, | |
| "loss": 0.0608, | |
| "step": 3325 | |
| }, | |
| { | |
| "epoch": 19.59, | |
| "learning_rate": 3.675555555555556e-06, | |
| "loss": 0.0601, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 19.74, | |
| "learning_rate": 3.62e-06, | |
| "loss": 0.0608, | |
| "step": 3375 | |
| }, | |
| { | |
| "epoch": 19.88, | |
| "learning_rate": 3.564444444444445e-06, | |
| "loss": 0.0587, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 20.03, | |
| "learning_rate": 3.508888888888889e-06, | |
| "loss": 0.0592, | |
| "step": 3425 | |
| }, | |
| { | |
| "epoch": 20.18, | |
| "learning_rate": 3.4533333333333334e-06, | |
| "loss": 0.0556, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 20.32, | |
| "learning_rate": 3.3977777777777783e-06, | |
| "loss": 0.0564, | |
| "step": 3475 | |
| }, | |
| { | |
| "epoch": 20.47, | |
| "learning_rate": 3.3422222222222224e-06, | |
| "loss": 0.0561, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 20.61, | |
| "learning_rate": 3.286666666666667e-06, | |
| "loss": 0.0538, | |
| "step": 3525 | |
| }, | |
| { | |
| "epoch": 20.76, | |
| "learning_rate": 3.2311111111111117e-06, | |
| "loss": 0.0551, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 20.91, | |
| "learning_rate": 3.1755555555555557e-06, | |
| "loss": 0.0594, | |
| "step": 3575 | |
| }, | |
| { | |
| "epoch": 21.05, | |
| "learning_rate": 3.12e-06, | |
| "loss": 0.0518, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 21.2, | |
| "learning_rate": 3.064444444444445e-06, | |
| "loss": 0.0507, | |
| "step": 3625 | |
| }, | |
| { | |
| "epoch": 21.35, | |
| "learning_rate": 3.008888888888889e-06, | |
| "loss": 0.0548, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 21.49, | |
| "learning_rate": 2.9533333333333336e-06, | |
| "loss": 0.0533, | |
| "step": 3675 | |
| }, | |
| { | |
| "epoch": 21.64, | |
| "learning_rate": 2.8977777777777785e-06, | |
| "loss": 0.0531, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 21.78, | |
| "learning_rate": 2.8422222222222225e-06, | |
| "loss": 0.0513, | |
| "step": 3725 | |
| }, | |
| { | |
| "epoch": 21.93, | |
| "learning_rate": 2.786666666666667e-06, | |
| "loss": 0.0525, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 22.08, | |
| "learning_rate": 2.7311111111111114e-06, | |
| "loss": 0.0508, | |
| "step": 3775 | |
| }, | |
| { | |
| "epoch": 22.22, | |
| "learning_rate": 2.675555555555556e-06, | |
| "loss": 0.0468, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 22.37, | |
| "learning_rate": 2.6200000000000003e-06, | |
| "loss": 0.0502, | |
| "step": 3825 | |
| }, | |
| { | |
| "epoch": 22.51, | |
| "learning_rate": 2.5644444444444444e-06, | |
| "loss": 0.0483, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 22.66, | |
| "learning_rate": 2.5088888888888892e-06, | |
| "loss": 0.0516, | |
| "step": 3875 | |
| }, | |
| { | |
| "epoch": 22.81, | |
| "learning_rate": 2.4533333333333333e-06, | |
| "loss": 0.0494, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 22.95, | |
| "learning_rate": 2.397777777777778e-06, | |
| "loss": 0.0492, | |
| "step": 3925 | |
| }, | |
| { | |
| "epoch": 23.1, | |
| "learning_rate": 2.342222222222222e-06, | |
| "loss": 0.0473, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 23.25, | |
| "learning_rate": 2.2866666666666667e-06, | |
| "loss": 0.0457, | |
| "step": 3975 | |
| }, | |
| { | |
| "epoch": 23.39, | |
| "learning_rate": 2.2311111111111115e-06, | |
| "loss": 0.0432, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 23.39, | |
| "eval_loss": 0.345840722322464, | |
| "eval_runtime": 250.3931, | |
| "eval_samples_per_second": 26.323, | |
| "eval_steps_per_second": 0.823, | |
| "eval_wer": 19.53494932398396, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 23.54, | |
| "learning_rate": 2.1755555555555556e-06, | |
| "loss": 0.0459, | |
| "step": 4025 | |
| }, | |
| { | |
| "epoch": 23.68, | |
| "learning_rate": 2.12e-06, | |
| "loss": 0.0454, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 23.83, | |
| "learning_rate": 2.064444444444445e-06, | |
| "loss": 0.0462, | |
| "step": 4075 | |
| }, | |
| { | |
| "epoch": 23.98, | |
| "learning_rate": 2.008888888888889e-06, | |
| "loss": 0.0462, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 24.12, | |
| "learning_rate": 1.9533333333333334e-06, | |
| "loss": 0.0457, | |
| "step": 4125 | |
| }, | |
| { | |
| "epoch": 24.27, | |
| "learning_rate": 1.8977777777777779e-06, | |
| "loss": 0.0419, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 24.42, | |
| "learning_rate": 1.8422222222222225e-06, | |
| "loss": 0.0457, | |
| "step": 4175 | |
| }, | |
| { | |
| "epoch": 24.56, | |
| "learning_rate": 1.7866666666666668e-06, | |
| "loss": 0.0437, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 24.71, | |
| "learning_rate": 1.7311111111111112e-06, | |
| "loss": 0.0435, | |
| "step": 4225 | |
| }, | |
| { | |
| "epoch": 24.85, | |
| "learning_rate": 1.675555555555556e-06, | |
| "loss": 0.0419, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "learning_rate": 1.6200000000000002e-06, | |
| "loss": 0.0445, | |
| "step": 4275 | |
| }, | |
| { | |
| "epoch": 25.15, | |
| "learning_rate": 1.5644444444444446e-06, | |
| "loss": 0.0421, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 25.29, | |
| "learning_rate": 1.5088888888888889e-06, | |
| "loss": 0.0415, | |
| "step": 4325 | |
| }, | |
| { | |
| "epoch": 25.44, | |
| "learning_rate": 1.4533333333333335e-06, | |
| "loss": 0.0422, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 25.58, | |
| "learning_rate": 1.397777777777778e-06, | |
| "loss": 0.0422, | |
| "step": 4375 | |
| }, | |
| { | |
| "epoch": 25.73, | |
| "learning_rate": 1.3422222222222222e-06, | |
| "loss": 0.0424, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 25.88, | |
| "learning_rate": 1.286666666666667e-06, | |
| "loss": 0.0393, | |
| "step": 4425 | |
| }, | |
| { | |
| "epoch": 26.02, | |
| "learning_rate": 1.2311111111111112e-06, | |
| "loss": 0.0435, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 26.17, | |
| "learning_rate": 1.1755555555555556e-06, | |
| "loss": 0.0401, | |
| "step": 4475 | |
| }, | |
| { | |
| "epoch": 26.32, | |
| "learning_rate": 1.12e-06, | |
| "loss": 0.0409, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 26.46, | |
| "learning_rate": 1.0644444444444445e-06, | |
| "loss": 0.0401, | |
| "step": 4525 | |
| }, | |
| { | |
| "epoch": 26.61, | |
| "learning_rate": 1.008888888888889e-06, | |
| "loss": 0.0377, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 26.75, | |
| "learning_rate": 9.533333333333335e-07, | |
| "loss": 0.0409, | |
| "step": 4575 | |
| }, | |
| { | |
| "epoch": 26.9, | |
| "learning_rate": 8.977777777777778e-07, | |
| "loss": 0.0396, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 27.05, | |
| "learning_rate": 8.422222222222224e-07, | |
| "loss": 0.0397, | |
| "step": 4625 | |
| }, | |
| { | |
| "epoch": 27.19, | |
| "learning_rate": 7.866666666666667e-07, | |
| "loss": 0.0389, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 27.34, | |
| "learning_rate": 7.311111111111112e-07, | |
| "loss": 0.0392, | |
| "step": 4675 | |
| }, | |
| { | |
| "epoch": 27.49, | |
| "learning_rate": 6.755555555555555e-07, | |
| "loss": 0.0377, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 27.63, | |
| "learning_rate": 6.200000000000001e-07, | |
| "loss": 0.039, | |
| "step": 4725 | |
| }, | |
| { | |
| "epoch": 27.78, | |
| "learning_rate": 5.644444444444445e-07, | |
| "loss": 0.0406, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 27.92, | |
| "learning_rate": 5.088888888888889e-07, | |
| "loss": 0.0397, | |
| "step": 4775 | |
| }, | |
| { | |
| "epoch": 28.07, | |
| "learning_rate": 4.533333333333334e-07, | |
| "loss": 0.0375, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 28.22, | |
| "learning_rate": 3.9777777777777783e-07, | |
| "loss": 0.0376, | |
| "step": 4825 | |
| }, | |
| { | |
| "epoch": 28.36, | |
| "learning_rate": 3.422222222222223e-07, | |
| "loss": 0.039, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 28.51, | |
| "learning_rate": 2.866666666666667e-07, | |
| "loss": 0.04, | |
| "step": 4875 | |
| }, | |
| { | |
| "epoch": 28.65, | |
| "learning_rate": 2.3111111111111112e-07, | |
| "loss": 0.0417, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 28.8, | |
| "learning_rate": 1.7555555555555558e-07, | |
| "loss": 0.0377, | |
| "step": 4925 | |
| }, | |
| { | |
| "epoch": 28.95, | |
| "learning_rate": 1.2000000000000002e-07, | |
| "loss": 0.0382, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 29.09, | |
| "learning_rate": 6.444444444444445e-08, | |
| "loss": 0.0385, | |
| "step": 4975 | |
| }, | |
| { | |
| "epoch": 29.24, | |
| "learning_rate": 8.88888888888889e-09, | |
| "loss": 0.0378, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 29.24, | |
| "eval_loss": 0.34853628277778625, | |
| "eval_runtime": 249.5278, | |
| "eval_samples_per_second": 26.414, | |
| "eval_steps_per_second": 0.826, | |
| "eval_wer": 19.351588788812993, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 29.24, | |
| "step": 5000, | |
| "total_flos": 9.212973471055872e+19, | |
| "train_loss": 0.3546963795423508, | |
| "train_runtime": 6917.3435, | |
| "train_samples_per_second": 46.261, | |
| "train_steps_per_second": 0.723 | |
| } | |
| ], | |
| "logging_steps": 25, | |
| "max_steps": 5000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 30, | |
| "save_steps": 1000, | |
| "total_flos": 9.212973471055872e+19, | |
| "train_batch_size": 64, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |