| { | |
| "best_metric": 10.252029047415634, | |
| "best_model_checkpoint": "./checkpoint-3000", | |
| "epoch": 17.6056338028169, | |
| "eval_steps": 1000, | |
| "global_step": 5000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.4e-08, | |
| "loss": 1.0187, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 9.199999999999999e-08, | |
| "loss": 1.022, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.4199999999999997e-07, | |
| "loss": 0.9959, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.92e-07, | |
| "loss": 0.9747, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 2.4199999999999997e-07, | |
| "loss": 0.9056, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.9199999999999997e-07, | |
| "loss": 0.8132, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 3.42e-07, | |
| "loss": 0.6903, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 3.92e-07, | |
| "loss": 0.4915, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.4199999999999996e-07, | |
| "loss": 0.3533, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 4.92e-07, | |
| "loss": 0.3211, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 5.420000000000001e-07, | |
| "loss": 0.2896, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 5.919999999999999e-07, | |
| "loss": 0.2791, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 6.42e-07, | |
| "loss": 0.2705, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 6.919999999999999e-07, | |
| "loss": 0.2698, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 7.42e-07, | |
| "loss": 0.2545, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 7.92e-07, | |
| "loss": 0.2737, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 8.419999999999999e-07, | |
| "loss": 0.2551, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 8.92e-07, | |
| "loss": 0.2465, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 9.419999999999999e-07, | |
| "loss": 0.241, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 9.92e-07, | |
| "loss": 0.247, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 9.953333333333332e-07, | |
| "loss": 0.2458, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 9.897777777777776e-07, | |
| "loss": 0.2407, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 9.842222222222222e-07, | |
| "loss": 0.236, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 9.786666666666666e-07, | |
| "loss": 0.202, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 9.73111111111111e-07, | |
| "loss": 0.2167, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 9.675555555555554e-07, | |
| "loss": 0.2213, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 9.619999999999999e-07, | |
| "loss": 0.2159, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 9.564444444444445e-07, | |
| "loss": 0.2058, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 9.508888888888889e-07, | |
| "loss": 0.2139, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 9.453333333333333e-07, | |
| "loss": 0.2116, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 9.397777777777778e-07, | |
| "loss": 0.2084, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 9.342222222222221e-07, | |
| "loss": 0.1982, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 9.286666666666666e-07, | |
| "loss": 0.216, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 9.231111111111111e-07, | |
| "loss": 0.2113, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 9.175555555555555e-07, | |
| "loss": 0.1948, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 9.12e-07, | |
| "loss": 0.1744, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 9.064444444444444e-07, | |
| "loss": 0.1755, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 9.008888888888888e-07, | |
| "loss": 0.188, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 3.43, | |
| "learning_rate": 8.953333333333332e-07, | |
| "loss": 0.1833, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 8.897777777777777e-07, | |
| "loss": 0.1957, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "eval_loss": 0.24422240257263184, | |
| "eval_runtime": 547.169, | |
| "eval_samples_per_second": 16.58, | |
| "eval_steps_per_second": 0.519, | |
| "eval_wer": 10.593763348996156, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 8.842222222222222e-07, | |
| "loss": 0.1813, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 8.786666666666666e-07, | |
| "loss": 0.1778, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 8.73111111111111e-07, | |
| "loss": 0.1799, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 8.675555555555555e-07, | |
| "loss": 0.1823, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "learning_rate": 8.62e-07, | |
| "loss": 0.1841, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 8.564444444444444e-07, | |
| "loss": 0.1664, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 4.14, | |
| "learning_rate": 8.508888888888889e-07, | |
| "loss": 0.1651, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 8.453333333333334e-07, | |
| "loss": 0.158, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 8.397777777777777e-07, | |
| "loss": 0.16, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 8.342222222222222e-07, | |
| "loss": 0.1653, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 8.286666666666666e-07, | |
| "loss": 0.1588, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 8.231111111111111e-07, | |
| "loss": 0.1552, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 8.175555555555555e-07, | |
| "loss": 0.1709, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "learning_rate": 8.12e-07, | |
| "loss": 0.1658, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 4.84, | |
| "learning_rate": 8.064444444444444e-07, | |
| "loss": 0.1573, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 8.008888888888888e-07, | |
| "loss": 0.1628, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 7.953333333333333e-07, | |
| "loss": 0.158, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 7.897777777777777e-07, | |
| "loss": 0.1508, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 5.19, | |
| "learning_rate": 7.842222222222222e-07, | |
| "loss": 0.1488, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "learning_rate": 7.786666666666665e-07, | |
| "loss": 0.1387, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 5.37, | |
| "learning_rate": 7.731111111111111e-07, | |
| "loss": 0.14, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 7.675555555555556e-07, | |
| "loss": 0.1476, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 5.55, | |
| "learning_rate": 7.62e-07, | |
| "loss": 0.1448, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 5.63, | |
| "learning_rate": 7.564444444444445e-07, | |
| "loss": 0.1444, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 5.72, | |
| "learning_rate": 7.508888888888889e-07, | |
| "loss": 0.1425, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 5.81, | |
| "learning_rate": 7.453333333333333e-07, | |
| "loss": 0.1435, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "learning_rate": 7.397777777777777e-07, | |
| "loss": 0.1475, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 5.99, | |
| "learning_rate": 7.342222222222222e-07, | |
| "loss": 0.147, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 6.07, | |
| "learning_rate": 7.286666666666666e-07, | |
| "loss": 0.1273, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 6.16, | |
| "learning_rate": 7.231111111111111e-07, | |
| "loss": 0.1301, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "learning_rate": 7.175555555555555e-07, | |
| "loss": 0.1379, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 6.34, | |
| "learning_rate": 7.119999999999999e-07, | |
| "loss": 0.1322, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 7.064444444444444e-07, | |
| "loss": 0.1344, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 6.51, | |
| "learning_rate": 7.008888888888888e-07, | |
| "loss": 0.134, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 6.6, | |
| "learning_rate": 6.953333333333333e-07, | |
| "loss": 0.126, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 6.69, | |
| "learning_rate": 6.897777777777777e-07, | |
| "loss": 0.131, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 6.78, | |
| "learning_rate": 6.842222222222221e-07, | |
| "loss": 0.1298, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 6.87, | |
| "learning_rate": 6.786666666666667e-07, | |
| "loss": 0.1357, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 6.95, | |
| "learning_rate": 6.731111111111111e-07, | |
| "loss": 0.1299, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "learning_rate": 6.675555555555556e-07, | |
| "loss": 0.1297, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "eval_loss": 0.23775987327098846, | |
| "eval_runtime": 430.2745, | |
| "eval_samples_per_second": 21.084, | |
| "eval_steps_per_second": 0.66, | |
| "eval_wer": 10.284888114875299, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 7.13, | |
| "learning_rate": 6.62e-07, | |
| "loss": 0.1243, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 7.22, | |
| "learning_rate": 6.564444444444445e-07, | |
| "loss": 0.1197, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 7.31, | |
| "learning_rate": 6.508888888888888e-07, | |
| "loss": 0.1178, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 7.39, | |
| "learning_rate": 6.453333333333333e-07, | |
| "loss": 0.1202, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 7.48, | |
| "learning_rate": 6.397777777777778e-07, | |
| "loss": 0.1199, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 7.57, | |
| "learning_rate": 6.342222222222222e-07, | |
| "loss": 0.1174, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 7.66, | |
| "learning_rate": 6.286666666666667e-07, | |
| "loss": 0.1238, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 7.75, | |
| "learning_rate": 6.23111111111111e-07, | |
| "loss": 0.1227, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 7.83, | |
| "learning_rate": 6.175555555555555e-07, | |
| "loss": 0.1146, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 7.92, | |
| "learning_rate": 6.119999999999999e-07, | |
| "loss": 0.1235, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 8.01, | |
| "learning_rate": 6.064444444444444e-07, | |
| "loss": 0.1177, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 8.1, | |
| "learning_rate": 6.008888888888889e-07, | |
| "loss": 0.1125, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 8.19, | |
| "learning_rate": 5.953333333333333e-07, | |
| "loss": 0.1061, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 8.27, | |
| "learning_rate": 5.897777777777777e-07, | |
| "loss": 0.1079, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 8.36, | |
| "learning_rate": 5.842222222222222e-07, | |
| "loss": 0.1065, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 8.45, | |
| "learning_rate": 5.786666666666667e-07, | |
| "loss": 0.1101, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 8.54, | |
| "learning_rate": 5.731111111111111e-07, | |
| "loss": 0.1081, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 8.63, | |
| "learning_rate": 5.675555555555556e-07, | |
| "loss": 0.1081, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 8.71, | |
| "learning_rate": 5.620000000000001e-07, | |
| "loss": 0.1105, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 8.8, | |
| "learning_rate": 5.564444444444444e-07, | |
| "loss": 0.1209, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 8.89, | |
| "learning_rate": 5.508888888888889e-07, | |
| "loss": 0.1131, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 8.98, | |
| "learning_rate": 5.453333333333333e-07, | |
| "loss": 0.114, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 9.07, | |
| "learning_rate": 5.397777777777778e-07, | |
| "loss": 0.1058, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 9.15, | |
| "learning_rate": 5.342222222222222e-07, | |
| "loss": 0.1013, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 9.24, | |
| "learning_rate": 5.286666666666666e-07, | |
| "loss": 0.1033, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 9.33, | |
| "learning_rate": 5.23111111111111e-07, | |
| "loss": 0.1048, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 9.42, | |
| "learning_rate": 5.175555555555555e-07, | |
| "loss": 0.1063, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 9.51, | |
| "learning_rate": 5.12e-07, | |
| "loss": 0.0958, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 9.6, | |
| "learning_rate": 5.064444444444444e-07, | |
| "loss": 0.1075, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 9.68, | |
| "learning_rate": 5.008888888888889e-07, | |
| "loss": 0.1061, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 9.77, | |
| "learning_rate": 4.953333333333333e-07, | |
| "loss": 0.1055, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 9.86, | |
| "learning_rate": 4.897777777777778e-07, | |
| "loss": 0.095, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 9.95, | |
| "learning_rate": 4.842222222222222e-07, | |
| "loss": 0.0977, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 10.04, | |
| "learning_rate": 4.786666666666667e-07, | |
| "loss": 0.0953, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 10.12, | |
| "learning_rate": 4.7311111111111107e-07, | |
| "loss": 0.0982, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 10.21, | |
| "learning_rate": 4.675555555555555e-07, | |
| "loss": 0.0917, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 10.3, | |
| "learning_rate": 4.62e-07, | |
| "loss": 0.0983, | |
| "step": 2925 | |
| }, | |
| { | |
| "epoch": 10.39, | |
| "learning_rate": 4.564444444444444e-07, | |
| "loss": 0.0966, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 10.48, | |
| "learning_rate": 4.508888888888889e-07, | |
| "loss": 0.0919, | |
| "step": 2975 | |
| }, | |
| { | |
| "epoch": 10.56, | |
| "learning_rate": 4.4533333333333335e-07, | |
| "loss": 0.0998, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 10.56, | |
| "eval_loss": 0.24282890558242798, | |
| "eval_runtime": 429.0566, | |
| "eval_samples_per_second": 21.144, | |
| "eval_steps_per_second": 0.662, | |
| "eval_wer": 10.252029047415634, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 10.65, | |
| "learning_rate": 4.3977777777777775e-07, | |
| "loss": 0.0964, | |
| "step": 3025 | |
| }, | |
| { | |
| "epoch": 10.74, | |
| "learning_rate": 4.342222222222222e-07, | |
| "loss": 0.0976, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 10.83, | |
| "learning_rate": 4.286666666666666e-07, | |
| "loss": 0.0951, | |
| "step": 3075 | |
| }, | |
| { | |
| "epoch": 10.92, | |
| "learning_rate": 4.2311111111111107e-07, | |
| "loss": 0.0911, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 11.0, | |
| "learning_rate": 4.1755555555555553e-07, | |
| "loss": 0.0911, | |
| "step": 3125 | |
| }, | |
| { | |
| "epoch": 11.09, | |
| "learning_rate": 4.12e-07, | |
| "loss": 0.0841, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 11.18, | |
| "learning_rate": 4.0644444444444444e-07, | |
| "loss": 0.0865, | |
| "step": 3175 | |
| }, | |
| { | |
| "epoch": 11.27, | |
| "learning_rate": 4.008888888888889e-07, | |
| "loss": 0.0936, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 11.36, | |
| "learning_rate": 3.953333333333333e-07, | |
| "loss": 0.0868, | |
| "step": 3225 | |
| }, | |
| { | |
| "epoch": 11.44, | |
| "learning_rate": 3.8977777777777776e-07, | |
| "loss": 0.0934, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 11.53, | |
| "learning_rate": 3.842222222222222e-07, | |
| "loss": 0.0872, | |
| "step": 3275 | |
| }, | |
| { | |
| "epoch": 11.62, | |
| "learning_rate": 3.786666666666666e-07, | |
| "loss": 0.0957, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 11.71, | |
| "learning_rate": 3.7311111111111107e-07, | |
| "loss": 0.0909, | |
| "step": 3325 | |
| }, | |
| { | |
| "epoch": 11.8, | |
| "learning_rate": 3.675555555555556e-07, | |
| "loss": 0.0843, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 11.88, | |
| "learning_rate": 3.62e-07, | |
| "loss": 0.0888, | |
| "step": 3375 | |
| }, | |
| { | |
| "epoch": 11.97, | |
| "learning_rate": 3.5644444444444444e-07, | |
| "loss": 0.0916, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 12.06, | |
| "learning_rate": 3.5088888888888885e-07, | |
| "loss": 0.0828, | |
| "step": 3425 | |
| }, | |
| { | |
| "epoch": 12.15, | |
| "learning_rate": 3.453333333333333e-07, | |
| "loss": 0.084, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 12.24, | |
| "learning_rate": 3.3977777777777776e-07, | |
| "loss": 0.0868, | |
| "step": 3475 | |
| }, | |
| { | |
| "epoch": 12.32, | |
| "learning_rate": 3.3422222222222216e-07, | |
| "loss": 0.084, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 12.41, | |
| "learning_rate": 3.2866666666666667e-07, | |
| "loss": 0.0852, | |
| "step": 3525 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "learning_rate": 3.2311111111111113e-07, | |
| "loss": 0.0825, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 12.59, | |
| "learning_rate": 3.1755555555555553e-07, | |
| "loss": 0.0845, | |
| "step": 3575 | |
| }, | |
| { | |
| "epoch": 12.68, | |
| "learning_rate": 3.12e-07, | |
| "loss": 0.0862, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 12.76, | |
| "learning_rate": 3.0644444444444444e-07, | |
| "loss": 0.0868, | |
| "step": 3625 | |
| }, | |
| { | |
| "epoch": 12.85, | |
| "learning_rate": 3.0088888888888885e-07, | |
| "loss": 0.0866, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 12.94, | |
| "learning_rate": 2.953333333333333e-07, | |
| "loss": 0.0795, | |
| "step": 3675 | |
| }, | |
| { | |
| "epoch": 13.03, | |
| "learning_rate": 2.897777777777778e-07, | |
| "loss": 0.0847, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 13.12, | |
| "learning_rate": 2.842222222222222e-07, | |
| "loss": 0.0845, | |
| "step": 3725 | |
| }, | |
| { | |
| "epoch": 13.2, | |
| "learning_rate": 2.786666666666667e-07, | |
| "loss": 0.0772, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 13.29, | |
| "learning_rate": 2.7311111111111113e-07, | |
| "loss": 0.0794, | |
| "step": 3775 | |
| }, | |
| { | |
| "epoch": 13.38, | |
| "learning_rate": 2.6755555555555553e-07, | |
| "loss": 0.0834, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 13.47, | |
| "learning_rate": 2.62e-07, | |
| "loss": 0.0822, | |
| "step": 3825 | |
| }, | |
| { | |
| "epoch": 13.56, | |
| "learning_rate": 2.564444444444444e-07, | |
| "loss": 0.0766, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 13.64, | |
| "learning_rate": 2.5088888888888885e-07, | |
| "loss": 0.0744, | |
| "step": 3875 | |
| }, | |
| { | |
| "epoch": 13.73, | |
| "learning_rate": 2.453333333333333e-07, | |
| "loss": 0.0818, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 13.82, | |
| "learning_rate": 2.3977777777777776e-07, | |
| "loss": 0.0803, | |
| "step": 3925 | |
| }, | |
| { | |
| "epoch": 13.91, | |
| "learning_rate": 2.3422222222222222e-07, | |
| "loss": 0.0779, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 14.0, | |
| "learning_rate": 2.2866666666666665e-07, | |
| "loss": 0.0809, | |
| "step": 3975 | |
| }, | |
| { | |
| "epoch": 14.08, | |
| "learning_rate": 2.231111111111111e-07, | |
| "loss": 0.0742, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 14.08, | |
| "eval_loss": 0.24894225597381592, | |
| "eval_runtime": 429.8433, | |
| "eval_samples_per_second": 21.105, | |
| "eval_steps_per_second": 0.661, | |
| "eval_wer": 10.401537804357112, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 14.17, | |
| "learning_rate": 2.1755555555555554e-07, | |
| "loss": 0.0778, | |
| "step": 4025 | |
| }, | |
| { | |
| "epoch": 14.26, | |
| "learning_rate": 2.12e-07, | |
| "loss": 0.0756, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 14.35, | |
| "learning_rate": 2.0644444444444445e-07, | |
| "loss": 0.0758, | |
| "step": 4075 | |
| }, | |
| { | |
| "epoch": 14.44, | |
| "learning_rate": 2.0088888888888888e-07, | |
| "loss": 0.0778, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 14.52, | |
| "learning_rate": 1.953333333333333e-07, | |
| "loss": 0.076, | |
| "step": 4125 | |
| }, | |
| { | |
| "epoch": 14.61, | |
| "learning_rate": 1.8977777777777777e-07, | |
| "loss": 0.0817, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 14.7, | |
| "learning_rate": 1.8422222222222222e-07, | |
| "loss": 0.077, | |
| "step": 4175 | |
| }, | |
| { | |
| "epoch": 14.79, | |
| "learning_rate": 1.7866666666666665e-07, | |
| "loss": 0.0741, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 14.88, | |
| "learning_rate": 1.731111111111111e-07, | |
| "loss": 0.0785, | |
| "step": 4225 | |
| }, | |
| { | |
| "epoch": 14.96, | |
| "learning_rate": 1.6755555555555556e-07, | |
| "loss": 0.0751, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 15.05, | |
| "learning_rate": 1.62e-07, | |
| "loss": 0.077, | |
| "step": 4275 | |
| }, | |
| { | |
| "epoch": 15.14, | |
| "learning_rate": 1.5644444444444442e-07, | |
| "loss": 0.0737, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 15.23, | |
| "learning_rate": 1.5088888888888888e-07, | |
| "loss": 0.0744, | |
| "step": 4325 | |
| }, | |
| { | |
| "epoch": 15.32, | |
| "learning_rate": 1.4533333333333334e-07, | |
| "loss": 0.07, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 15.4, | |
| "learning_rate": 1.3977777777777777e-07, | |
| "loss": 0.069, | |
| "step": 4375 | |
| }, | |
| { | |
| "epoch": 15.49, | |
| "learning_rate": 1.342222222222222e-07, | |
| "loss": 0.0746, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 15.58, | |
| "learning_rate": 1.2866666666666668e-07, | |
| "loss": 0.0759, | |
| "step": 4425 | |
| }, | |
| { | |
| "epoch": 15.67, | |
| "learning_rate": 1.231111111111111e-07, | |
| "loss": 0.0741, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 15.76, | |
| "learning_rate": 1.1755555555555554e-07, | |
| "loss": 0.0745, | |
| "step": 4475 | |
| }, | |
| { | |
| "epoch": 15.85, | |
| "learning_rate": 1.12e-07, | |
| "loss": 0.0744, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 15.93, | |
| "learning_rate": 1.0644444444444444e-07, | |
| "loss": 0.0776, | |
| "step": 4525 | |
| }, | |
| { | |
| "epoch": 16.02, | |
| "learning_rate": 1.0088888888888888e-07, | |
| "loss": 0.0759, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 16.11, | |
| "learning_rate": 9.533333333333334e-08, | |
| "loss": 0.0734, | |
| "step": 4575 | |
| }, | |
| { | |
| "epoch": 16.2, | |
| "learning_rate": 8.977777777777777e-08, | |
| "loss": 0.072, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 16.29, | |
| "learning_rate": 8.422222222222223e-08, | |
| "loss": 0.074, | |
| "step": 4625 | |
| }, | |
| { | |
| "epoch": 16.37, | |
| "learning_rate": 7.866666666666666e-08, | |
| "loss": 0.0711, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 16.46, | |
| "learning_rate": 7.311111111111111e-08, | |
| "loss": 0.0719, | |
| "step": 4675 | |
| }, | |
| { | |
| "epoch": 16.55, | |
| "learning_rate": 6.755555555555554e-08, | |
| "loss": 0.076, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 16.64, | |
| "learning_rate": 6.2e-08, | |
| "loss": 0.0704, | |
| "step": 4725 | |
| }, | |
| { | |
| "epoch": 16.73, | |
| "learning_rate": 5.644444444444444e-08, | |
| "loss": 0.0726, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 16.81, | |
| "learning_rate": 5.0888888888888886e-08, | |
| "loss": 0.0703, | |
| "step": 4775 | |
| }, | |
| { | |
| "epoch": 16.9, | |
| "learning_rate": 4.5333333333333336e-08, | |
| "loss": 0.07, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 16.99, | |
| "learning_rate": 3.977777777777778e-08, | |
| "loss": 0.0736, | |
| "step": 4825 | |
| }, | |
| { | |
| "epoch": 17.08, | |
| "learning_rate": 3.422222222222222e-08, | |
| "loss": 0.0722, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 17.17, | |
| "learning_rate": 2.8666666666666665e-08, | |
| "loss": 0.0704, | |
| "step": 4875 | |
| }, | |
| { | |
| "epoch": 17.25, | |
| "learning_rate": 2.311111111111111e-08, | |
| "loss": 0.0692, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 17.34, | |
| "learning_rate": 1.7555555555555555e-08, | |
| "loss": 0.0717, | |
| "step": 4925 | |
| }, | |
| { | |
| "epoch": 17.43, | |
| "learning_rate": 1.2e-08, | |
| "loss": 0.0679, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 17.52, | |
| "learning_rate": 6.444444444444444e-09, | |
| "loss": 0.0715, | |
| "step": 4975 | |
| }, | |
| { | |
| "epoch": 17.61, | |
| "learning_rate": 8.888888888888889e-10, | |
| "loss": 0.0738, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 17.61, | |
| "eval_loss": 0.25284647941589355, | |
| "eval_runtime": 426.5297, | |
| "eval_samples_per_second": 21.269, | |
| "eval_steps_per_second": 0.666, | |
| "eval_wer": 10.414681431340979, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 17.61, | |
| "step": 5000, | |
| "total_flos": 9.219033764462592e+19, | |
| "train_loss": 0.15552212369441987, | |
| "train_runtime": 15529.1238, | |
| "train_samples_per_second": 20.606, | |
| "train_steps_per_second": 0.322 | |
| } | |
| ], | |
| "logging_steps": 25, | |
| "max_steps": 5000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 18, | |
| "save_steps": 1000, | |
| "total_flos": 9.219033764462592e+19, | |
| "train_batch_size": 64, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |