| { | |
| "best_metric": 26.59332245259828, | |
| "best_model_checkpoint": "./checkpoint-5000", | |
| "epoch": 58.13953488372093, | |
| "eval_steps": 1000, | |
| "global_step": 5000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.0500000000000001e-06, | |
| "loss": 10.9344, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 2.3e-06, | |
| "loss": 8.9018, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 3.55e-06, | |
| "loss": 6.8368, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 5.7971, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 6.0500000000000005e-06, | |
| "loss": 5.2383, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 7.2999999999999996e-06, | |
| "loss": 4.9224, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 8.550000000000001e-06, | |
| "loss": 4.6693, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 9.800000000000001e-06, | |
| "loss": 4.2509, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 1.1050000000000001e-05, | |
| "loss": 3.401, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 1.23e-05, | |
| "loss": 2.597, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 1.3550000000000002e-05, | |
| "loss": 2.0982, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 1.48e-05, | |
| "loss": 1.7696, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 1.605e-05, | |
| "loss": 1.5481, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 1.73e-05, | |
| "loss": 1.371, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 1.855e-05, | |
| "loss": 1.2037, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 1.9800000000000004e-05, | |
| "loss": 1.1067, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "learning_rate": 2.105e-05, | |
| "loss": 1.0239, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 5.23, | |
| "learning_rate": 2.23e-05, | |
| "loss": 0.9223, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 5.52, | |
| "learning_rate": 2.355e-05, | |
| "loss": 0.8587, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 5.81, | |
| "learning_rate": 2.48e-05, | |
| "loss": 0.8111, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 6.1, | |
| "learning_rate": 2.4883333333333333e-05, | |
| "loss": 0.7575, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 6.4, | |
| "learning_rate": 2.4744444444444445e-05, | |
| "loss": 0.6958, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 6.69, | |
| "learning_rate": 2.4605555555555558e-05, | |
| "loss": 0.6532, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 6.98, | |
| "learning_rate": 2.4466666666666667e-05, | |
| "loss": 0.634, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 7.27, | |
| "learning_rate": 2.432777777777778e-05, | |
| "loss": 0.5807, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 7.56, | |
| "learning_rate": 2.418888888888889e-05, | |
| "loss": 0.5566, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 7.85, | |
| "learning_rate": 2.4050000000000002e-05, | |
| "loss": 0.5485, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 8.14, | |
| "learning_rate": 2.391111111111111e-05, | |
| "loss": 0.5172, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 8.43, | |
| "learning_rate": 2.3772222222222224e-05, | |
| "loss": 0.4834, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 8.72, | |
| "learning_rate": 2.3633333333333336e-05, | |
| "loss": 0.4736, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 9.01, | |
| "learning_rate": 2.3494444444444446e-05, | |
| "loss": 0.4607, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 9.3, | |
| "learning_rate": 2.3355555555555555e-05, | |
| "loss": 0.4195, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 9.59, | |
| "learning_rate": 2.3216666666666667e-05, | |
| "loss": 0.419, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 9.88, | |
| "learning_rate": 2.307777777777778e-05, | |
| "loss": 0.4146, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 10.17, | |
| "learning_rate": 2.293888888888889e-05, | |
| "loss": 0.3901, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 10.47, | |
| "learning_rate": 2.2800000000000002e-05, | |
| "loss": 0.3701, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 10.76, | |
| "learning_rate": 2.2661111111111115e-05, | |
| "loss": 0.3653, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 11.05, | |
| "learning_rate": 2.2522222222222224e-05, | |
| "loss": 0.3621, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 11.34, | |
| "learning_rate": 2.2383333333333333e-05, | |
| "loss": 0.3274, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 11.63, | |
| "learning_rate": 2.2244444444444446e-05, | |
| "loss": 0.3263, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 11.63, | |
| "eval_loss": 0.5019487738609314, | |
| "eval_runtime": 82.1078, | |
| "eval_samples_per_second": 80.273, | |
| "eval_steps_per_second": 1.254, | |
| "eval_wer": 33.17012230752181, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 11.92, | |
| "learning_rate": 2.2105555555555558e-05, | |
| "loss": 0.329, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 12.21, | |
| "learning_rate": 2.1966666666666668e-05, | |
| "loss": 0.3065, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "learning_rate": 2.1827777777777777e-05, | |
| "loss": 0.2965, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 12.79, | |
| "learning_rate": 2.168888888888889e-05, | |
| "loss": 0.2945, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 13.08, | |
| "learning_rate": 2.1550000000000002e-05, | |
| "loss": 0.2871, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 13.37, | |
| "learning_rate": 2.141111111111111e-05, | |
| "loss": 0.2643, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 13.66, | |
| "learning_rate": 2.1272222222222224e-05, | |
| "loss": 0.2681, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 13.95, | |
| "learning_rate": 2.1133333333333337e-05, | |
| "loss": 0.2721, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 14.24, | |
| "learning_rate": 2.0994444444444446e-05, | |
| "loss": 0.2535, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 14.53, | |
| "learning_rate": 2.0855555555555555e-05, | |
| "loss": 0.2442, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 14.83, | |
| "learning_rate": 2.0716666666666668e-05, | |
| "loss": 0.247, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 15.12, | |
| "learning_rate": 2.057777777777778e-05, | |
| "loss": 0.2396, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 15.41, | |
| "learning_rate": 2.043888888888889e-05, | |
| "loss": 0.2182, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 15.7, | |
| "learning_rate": 2.0300000000000002e-05, | |
| "loss": 0.2218, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 15.99, | |
| "learning_rate": 2.016111111111111e-05, | |
| "loss": 0.2257, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 16.28, | |
| "learning_rate": 2.0022222222222224e-05, | |
| "loss": 0.2006, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 16.57, | |
| "learning_rate": 1.9883333333333333e-05, | |
| "loss": 0.2043, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 16.86, | |
| "learning_rate": 1.9744444444444446e-05, | |
| "loss": 0.2079, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 17.15, | |
| "learning_rate": 1.960555555555556e-05, | |
| "loss": 0.1949, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 17.44, | |
| "learning_rate": 1.9466666666666668e-05, | |
| "loss": 0.1872, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 17.73, | |
| "learning_rate": 1.9327777777777777e-05, | |
| "loss": 0.1905, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 18.02, | |
| "learning_rate": 1.918888888888889e-05, | |
| "loss": 0.1896, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 18.31, | |
| "learning_rate": 1.9050000000000002e-05, | |
| "loss": 0.1688, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 18.6, | |
| "learning_rate": 1.891111111111111e-05, | |
| "loss": 0.1728, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 18.9, | |
| "learning_rate": 1.8772222222222224e-05, | |
| "loss": 0.1752, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 19.19, | |
| "learning_rate": 1.8633333333333333e-05, | |
| "loss": 0.1625, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 19.48, | |
| "learning_rate": 1.8494444444444446e-05, | |
| "loss": 0.1575, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 19.77, | |
| "learning_rate": 1.8355555555555555e-05, | |
| "loss": 0.1604, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 20.06, | |
| "learning_rate": 1.8216666666666668e-05, | |
| "loss": 0.1617, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 20.35, | |
| "learning_rate": 1.807777777777778e-05, | |
| "loss": 0.1467, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 20.64, | |
| "learning_rate": 1.793888888888889e-05, | |
| "loss": 0.1473, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 20.93, | |
| "learning_rate": 1.78e-05, | |
| "loss": 0.1503, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 21.22, | |
| "learning_rate": 1.766111111111111e-05, | |
| "loss": 0.1364, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 21.51, | |
| "learning_rate": 1.7522222222222224e-05, | |
| "loss": 0.1349, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 21.8, | |
| "learning_rate": 1.7383333333333333e-05, | |
| "loss": 0.1371, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 22.09, | |
| "learning_rate": 1.7244444444444446e-05, | |
| "loss": 0.1339, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 22.38, | |
| "learning_rate": 1.7105555555555555e-05, | |
| "loss": 0.1245, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 22.67, | |
| "learning_rate": 1.6966666666666668e-05, | |
| "loss": 0.1305, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 22.97, | |
| "learning_rate": 1.6827777777777777e-05, | |
| "loss": 0.1276, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 23.26, | |
| "learning_rate": 1.668888888888889e-05, | |
| "loss": 0.1194, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 23.26, | |
| "eval_loss": 0.4646737277507782, | |
| "eval_runtime": 79.0718, | |
| "eval_samples_per_second": 83.355, | |
| "eval_steps_per_second": 1.303, | |
| "eval_wer": 28.285881238791838, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 23.55, | |
| "learning_rate": 1.6550000000000002e-05, | |
| "loss": 0.1155, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 23.84, | |
| "learning_rate": 1.6411111111111112e-05, | |
| "loss": 0.1184, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 24.13, | |
| "learning_rate": 1.627222222222222e-05, | |
| "loss": 0.1118, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 24.42, | |
| "learning_rate": 1.6133333333333334e-05, | |
| "loss": 0.1083, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 24.71, | |
| "learning_rate": 1.5994444444444446e-05, | |
| "loss": 0.1086, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "learning_rate": 1.5855555555555555e-05, | |
| "loss": 0.1137, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 25.29, | |
| "learning_rate": 1.5716666666666668e-05, | |
| "loss": 0.0999, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 25.58, | |
| "learning_rate": 1.5577777777777777e-05, | |
| "loss": 0.1013, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 25.87, | |
| "learning_rate": 1.543888888888889e-05, | |
| "loss": 0.1034, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 26.16, | |
| "learning_rate": 1.53e-05, | |
| "loss": 0.0971, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 26.45, | |
| "learning_rate": 1.5161111111111112e-05, | |
| "loss": 0.0956, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 26.74, | |
| "learning_rate": 1.5022222222222224e-05, | |
| "loss": 0.0951, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 27.03, | |
| "learning_rate": 1.4883333333333335e-05, | |
| "loss": 0.0949, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 27.33, | |
| "learning_rate": 1.4744444444444445e-05, | |
| "loss": 0.0866, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 27.62, | |
| "learning_rate": 1.4605555555555556e-05, | |
| "loss": 0.088, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 27.91, | |
| "learning_rate": 1.4466666666666667e-05, | |
| "loss": 0.0899, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 28.2, | |
| "learning_rate": 1.4327777777777779e-05, | |
| "loss": 0.0878, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 28.49, | |
| "learning_rate": 1.418888888888889e-05, | |
| "loss": 0.082, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 28.78, | |
| "learning_rate": 1.4050000000000003e-05, | |
| "loss": 0.0844, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 29.07, | |
| "learning_rate": 1.391111111111111e-05, | |
| "loss": 0.083, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 29.36, | |
| "learning_rate": 1.3772222222222223e-05, | |
| "loss": 0.078, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 29.65, | |
| "learning_rate": 1.3633333333333334e-05, | |
| "loss": 0.0782, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 29.94, | |
| "learning_rate": 1.3494444444444446e-05, | |
| "loss": 0.0792, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 30.23, | |
| "learning_rate": 1.3355555555555557e-05, | |
| "loss": 0.0737, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 30.52, | |
| "learning_rate": 1.3216666666666667e-05, | |
| "loss": 0.0722, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 30.81, | |
| "learning_rate": 1.3077777777777778e-05, | |
| "loss": 0.0741, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 31.1, | |
| "learning_rate": 1.2938888888888888e-05, | |
| "loss": 0.0729, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 31.4, | |
| "learning_rate": 1.2800000000000001e-05, | |
| "loss": 0.0681, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 31.69, | |
| "learning_rate": 1.2661111111111112e-05, | |
| "loss": 0.0701, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 31.98, | |
| "learning_rate": 1.2522222222222225e-05, | |
| "loss": 0.0696, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 32.27, | |
| "learning_rate": 1.2383333333333334e-05, | |
| "loss": 0.0629, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 32.56, | |
| "learning_rate": 1.2244444444444445e-05, | |
| "loss": 0.067, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 32.85, | |
| "learning_rate": 1.2105555555555556e-05, | |
| "loss": 0.0664, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 33.14, | |
| "learning_rate": 1.1966666666666668e-05, | |
| "loss": 0.0617, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 33.43, | |
| "learning_rate": 1.1827777777777778e-05, | |
| "loss": 0.059, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 33.72, | |
| "learning_rate": 1.168888888888889e-05, | |
| "loss": 0.0635, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 34.01, | |
| "learning_rate": 1.1550000000000001e-05, | |
| "loss": 0.0614, | |
| "step": 2925 | |
| }, | |
| { | |
| "epoch": 34.3, | |
| "learning_rate": 1.141111111111111e-05, | |
| "loss": 0.0564, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 34.59, | |
| "learning_rate": 1.1272222222222223e-05, | |
| "loss": 0.0596, | |
| "step": 2975 | |
| }, | |
| { | |
| "epoch": 34.88, | |
| "learning_rate": 1.1133333333333334e-05, | |
| "loss": 0.0576, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 34.88, | |
| "eval_loss": 0.48633939027786255, | |
| "eval_runtime": 79.4438, | |
| "eval_samples_per_second": 82.964, | |
| "eval_steps_per_second": 1.297, | |
| "eval_wer": 27.24012170303653, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 35.17, | |
| "learning_rate": 1.0994444444444445e-05, | |
| "loss": 0.0562, | |
| "step": 3025 | |
| }, | |
| { | |
| "epoch": 35.47, | |
| "learning_rate": 1.0855555555555556e-05, | |
| "loss": 0.0546, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 35.76, | |
| "learning_rate": 1.0716666666666667e-05, | |
| "loss": 0.0549, | |
| "step": 3075 | |
| }, | |
| { | |
| "epoch": 36.05, | |
| "learning_rate": 1.0577777777777778e-05, | |
| "loss": 0.0556, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 36.34, | |
| "learning_rate": 1.043888888888889e-05, | |
| "loss": 0.0509, | |
| "step": 3125 | |
| }, | |
| { | |
| "epoch": 36.63, | |
| "learning_rate": 1.03e-05, | |
| "loss": 0.0533, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 36.92, | |
| "learning_rate": 1.0161111111111112e-05, | |
| "loss": 0.0524, | |
| "step": 3175 | |
| }, | |
| { | |
| "epoch": 37.21, | |
| "learning_rate": 1.0022222222222223e-05, | |
| "loss": 0.0499, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 37.5, | |
| "learning_rate": 9.883333333333334e-06, | |
| "loss": 0.0499, | |
| "step": 3225 | |
| }, | |
| { | |
| "epoch": 37.79, | |
| "learning_rate": 9.744444444444445e-06, | |
| "loss": 0.0486, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 38.08, | |
| "learning_rate": 9.605555555555556e-06, | |
| "loss": 0.0493, | |
| "step": 3275 | |
| }, | |
| { | |
| "epoch": 38.37, | |
| "learning_rate": 9.466666666666667e-06, | |
| "loss": 0.0466, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 38.66, | |
| "learning_rate": 9.327777777777778e-06, | |
| "loss": 0.0453, | |
| "step": 3325 | |
| }, | |
| { | |
| "epoch": 38.95, | |
| "learning_rate": 9.18888888888889e-06, | |
| "loss": 0.0487, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 39.24, | |
| "learning_rate": 9.05e-06, | |
| "loss": 0.0454, | |
| "step": 3375 | |
| }, | |
| { | |
| "epoch": 39.53, | |
| "learning_rate": 8.911111111111112e-06, | |
| "loss": 0.0432, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 39.83, | |
| "learning_rate": 8.772222222222222e-06, | |
| "loss": 0.0451, | |
| "step": 3425 | |
| }, | |
| { | |
| "epoch": 40.12, | |
| "learning_rate": 8.633333333333334e-06, | |
| "loss": 0.0442, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 40.41, | |
| "learning_rate": 8.494444444444445e-06, | |
| "loss": 0.0446, | |
| "step": 3475 | |
| }, | |
| { | |
| "epoch": 40.7, | |
| "learning_rate": 8.355555555555556e-06, | |
| "loss": 0.0432, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 40.99, | |
| "learning_rate": 8.216666666666667e-06, | |
| "loss": 0.0422, | |
| "step": 3525 | |
| }, | |
| { | |
| "epoch": 41.28, | |
| "learning_rate": 8.077777777777778e-06, | |
| "loss": 0.0407, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 41.57, | |
| "learning_rate": 7.938888888888889e-06, | |
| "loss": 0.0401, | |
| "step": 3575 | |
| }, | |
| { | |
| "epoch": 41.86, | |
| "learning_rate": 7.8e-06, | |
| "loss": 0.0419, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 42.15, | |
| "learning_rate": 7.661111111111112e-06, | |
| "loss": 0.0404, | |
| "step": 3625 | |
| }, | |
| { | |
| "epoch": 42.44, | |
| "learning_rate": 7.5222222222222226e-06, | |
| "loss": 0.0397, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 42.73, | |
| "learning_rate": 7.3833333333333335e-06, | |
| "loss": 0.0403, | |
| "step": 3675 | |
| }, | |
| { | |
| "epoch": 43.02, | |
| "learning_rate": 7.244444444444445e-06, | |
| "loss": 0.0401, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 43.31, | |
| "learning_rate": 7.105555555555555e-06, | |
| "loss": 0.0376, | |
| "step": 3725 | |
| }, | |
| { | |
| "epoch": 43.6, | |
| "learning_rate": 6.966666666666667e-06, | |
| "loss": 0.0376, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 43.9, | |
| "learning_rate": 6.827777777777779e-06, | |
| "loss": 0.0388, | |
| "step": 3775 | |
| }, | |
| { | |
| "epoch": 44.19, | |
| "learning_rate": 6.688888888888889e-06, | |
| "loss": 0.037, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 44.48, | |
| "learning_rate": 6.550000000000001e-06, | |
| "loss": 0.0376, | |
| "step": 3825 | |
| }, | |
| { | |
| "epoch": 44.77, | |
| "learning_rate": 6.411111111111111e-06, | |
| "loss": 0.037, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 45.06, | |
| "learning_rate": 6.272222222222223e-06, | |
| "loss": 0.0362, | |
| "step": 3875 | |
| }, | |
| { | |
| "epoch": 45.35, | |
| "learning_rate": 6.133333333333334e-06, | |
| "loss": 0.0361, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 45.64, | |
| "learning_rate": 5.9944444444444446e-06, | |
| "loss": 0.0357, | |
| "step": 3925 | |
| }, | |
| { | |
| "epoch": 45.93, | |
| "learning_rate": 5.8555555555555555e-06, | |
| "loss": 0.0353, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 46.22, | |
| "learning_rate": 5.7166666666666664e-06, | |
| "loss": 0.0339, | |
| "step": 3975 | |
| }, | |
| { | |
| "epoch": 46.51, | |
| "learning_rate": 5.577777777777778e-06, | |
| "loss": 0.0337, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 46.51, | |
| "eval_loss": 0.5027862191200256, | |
| "eval_runtime": 78.998, | |
| "eval_samples_per_second": 83.432, | |
| "eval_steps_per_second": 1.304, | |
| "eval_wer": 26.613471961957725, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 46.8, | |
| "learning_rate": 5.438888888888889e-06, | |
| "loss": 0.0354, | |
| "step": 4025 | |
| }, | |
| { | |
| "epoch": 47.09, | |
| "learning_rate": 5.3e-06, | |
| "loss": 0.0346, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 47.38, | |
| "learning_rate": 5.161111111111112e-06, | |
| "loss": 0.0336, | |
| "step": 4075 | |
| }, | |
| { | |
| "epoch": 47.67, | |
| "learning_rate": 5.022222222222223e-06, | |
| "loss": 0.0322, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 47.97, | |
| "learning_rate": 4.883333333333334e-06, | |
| "loss": 0.0343, | |
| "step": 4125 | |
| }, | |
| { | |
| "epoch": 48.26, | |
| "learning_rate": 4.744444444444445e-06, | |
| "loss": 0.0332, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 48.55, | |
| "learning_rate": 4.605555555555556e-06, | |
| "loss": 0.0313, | |
| "step": 4175 | |
| }, | |
| { | |
| "epoch": 48.84, | |
| "learning_rate": 4.4666666666666665e-06, | |
| "loss": 0.0327, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 49.13, | |
| "learning_rate": 4.3277777777777775e-06, | |
| "loss": 0.0323, | |
| "step": 4225 | |
| }, | |
| { | |
| "epoch": 49.42, | |
| "learning_rate": 4.188888888888889e-06, | |
| "loss": 0.0311, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 49.71, | |
| "learning_rate": 4.05e-06, | |
| "loss": 0.032, | |
| "step": 4275 | |
| }, | |
| { | |
| "epoch": 50.0, | |
| "learning_rate": 3.911111111111111e-06, | |
| "loss": 0.0318, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 50.29, | |
| "learning_rate": 3.772222222222222e-06, | |
| "loss": 0.0308, | |
| "step": 4325 | |
| }, | |
| { | |
| "epoch": 50.58, | |
| "learning_rate": 3.633333333333334e-06, | |
| "loss": 0.0303, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 50.87, | |
| "learning_rate": 3.4944444444444448e-06, | |
| "loss": 0.0312, | |
| "step": 4375 | |
| }, | |
| { | |
| "epoch": 51.16, | |
| "learning_rate": 3.3555555555555557e-06, | |
| "loss": 0.0298, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 51.45, | |
| "learning_rate": 3.216666666666667e-06, | |
| "loss": 0.0304, | |
| "step": 4425 | |
| }, | |
| { | |
| "epoch": 51.74, | |
| "learning_rate": 3.077777777777778e-06, | |
| "loss": 0.0303, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 52.03, | |
| "learning_rate": 2.938888888888889e-06, | |
| "loss": 0.0303, | |
| "step": 4475 | |
| }, | |
| { | |
| "epoch": 52.33, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 0.0302, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 52.62, | |
| "learning_rate": 2.6611111111111112e-06, | |
| "loss": 0.0293, | |
| "step": 4525 | |
| }, | |
| { | |
| "epoch": 52.91, | |
| "learning_rate": 2.522222222222222e-06, | |
| "loss": 0.0292, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 53.2, | |
| "learning_rate": 2.3833333333333335e-06, | |
| "loss": 0.0295, | |
| "step": 4575 | |
| }, | |
| { | |
| "epoch": 53.49, | |
| "learning_rate": 2.2444444444444445e-06, | |
| "loss": 0.0285, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 53.78, | |
| "learning_rate": 2.105555555555556e-06, | |
| "loss": 0.0286, | |
| "step": 4625 | |
| }, | |
| { | |
| "epoch": 54.07, | |
| "learning_rate": 1.9666666666666668e-06, | |
| "loss": 0.0294, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 54.36, | |
| "learning_rate": 1.827777777777778e-06, | |
| "loss": 0.0286, | |
| "step": 4675 | |
| }, | |
| { | |
| "epoch": 54.65, | |
| "learning_rate": 1.6888888888888888e-06, | |
| "loss": 0.029, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 54.94, | |
| "learning_rate": 1.55e-06, | |
| "loss": 0.0288, | |
| "step": 4725 | |
| }, | |
| { | |
| "epoch": 55.23, | |
| "learning_rate": 1.4111111111111111e-06, | |
| "loss": 0.0281, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 55.52, | |
| "learning_rate": 1.2722222222222223e-06, | |
| "loss": 0.0272, | |
| "step": 4775 | |
| }, | |
| { | |
| "epoch": 55.81, | |
| "learning_rate": 1.1333333333333334e-06, | |
| "loss": 0.029, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 56.1, | |
| "learning_rate": 9.944444444444446e-07, | |
| "loss": 0.028, | |
| "step": 4825 | |
| }, | |
| { | |
| "epoch": 56.4, | |
| "learning_rate": 8.555555555555556e-07, | |
| "loss": 0.029, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 56.69, | |
| "learning_rate": 7.166666666666667e-07, | |
| "loss": 0.0276, | |
| "step": 4875 | |
| }, | |
| { | |
| "epoch": 56.98, | |
| "learning_rate": 5.777777777777778e-07, | |
| "loss": 0.0269, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 57.27, | |
| "learning_rate": 4.3888888888888895e-07, | |
| "loss": 0.0282, | |
| "step": 4925 | |
| }, | |
| { | |
| "epoch": 57.56, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 0.0275, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 57.85, | |
| "learning_rate": 1.611111111111111e-07, | |
| "loss": 0.028, | |
| "step": 4975 | |
| }, | |
| { | |
| "epoch": 58.14, | |
| "learning_rate": 2.2222222222222224e-08, | |
| "loss": 0.0276, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 58.14, | |
| "eval_loss": 0.507084310054779, | |
| "eval_runtime": 78.9001, | |
| "eval_samples_per_second": 83.536, | |
| "eval_steps_per_second": 1.305, | |
| "eval_wer": 26.59332245259828, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 58.14, | |
| "step": 5000, | |
| "total_flos": 4.11718283624448e+19, | |
| "train_loss": 0.4713757135152817, | |
| "train_runtime": 4451.1, | |
| "train_samples_per_second": 143.785, | |
| "train_steps_per_second": 1.123 | |
| } | |
| ], | |
| "logging_steps": 25, | |
| "max_steps": 5000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 59, | |
| "save_steps": 1000, | |
| "total_flos": 4.11718283624448e+19, | |
| "train_batch_size": 128, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |