| { | |
| "best_metric": 13.789654186910546, | |
| "best_model_checkpoint": "./checkpoint-5000", | |
| "epoch": 19.0272, | |
| "eval_steps": 1000, | |
| "global_step": 5000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.15e-06, | |
| "loss": 1.4878, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 1.1269, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 3.6499999999999998e-06, | |
| "loss": 0.7581, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9000000000000005e-06, | |
| "loss": 0.6181, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 6.15e-06, | |
| "loss": 0.5697, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 7.4e-06, | |
| "loss": 0.4979, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 8.65e-06, | |
| "loss": 0.4626, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 9.900000000000002e-06, | |
| "loss": 0.4434, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.115e-05, | |
| "loss": 0.4234, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.24e-05, | |
| "loss": 0.3901, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.3650000000000001e-05, | |
| "loss": 0.4219, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.49e-05, | |
| "loss": 0.4558, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.6150000000000003e-05, | |
| "loss": 0.4314, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.74e-05, | |
| "loss": 0.3732, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.865e-05, | |
| "loss": 0.3687, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.9900000000000003e-05, | |
| "loss": 0.3768, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 2.115e-05, | |
| "loss": 0.3734, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 2.2400000000000002e-05, | |
| "loss": 0.3721, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 2.365e-05, | |
| "loss": 0.3363, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 2.4900000000000002e-05, | |
| "loss": 0.3305, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 2.4872222222222223e-05, | |
| "loss": 0.3124, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 2.4733333333333333e-05, | |
| "loss": 0.2916, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 2.4594444444444445e-05, | |
| "loss": 0.2948, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 2.4455555555555558e-05, | |
| "loss": 0.2927, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 2.4316666666666667e-05, | |
| "loss": 0.2957, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 2.417777777777778e-05, | |
| "loss": 0.276, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 2.4038888888888892e-05, | |
| "loss": 0.2716, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 2.39e-05, | |
| "loss": 0.2185, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 2.376111111111111e-05, | |
| "loss": 0.1877, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 2.3622222222222223e-05, | |
| "loss": 0.1687, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 2.3483333333333336e-05, | |
| "loss": 0.1758, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 2.3344444444444445e-05, | |
| "loss": 0.1751, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 2.3205555555555555e-05, | |
| "loss": 0.1747, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 2.3066666666666667e-05, | |
| "loss": 0.1694, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 3.02, | |
| "learning_rate": 2.292777777777778e-05, | |
| "loss": 0.1783, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 2.278888888888889e-05, | |
| "loss": 0.1693, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 2.265e-05, | |
| "loss": 0.1626, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 2.2511111111111114e-05, | |
| "loss": 0.141, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 2.2372222222222224e-05, | |
| "loss": 0.1032, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 2.2233333333333333e-05, | |
| "loss": 0.0975, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "eval_loss": 0.35604599118232727, | |
| "eval_runtime": 1632.0592, | |
| "eval_samples_per_second": 10.036, | |
| "eval_steps_per_second": 0.157, | |
| "eval_wer": 19.44212632180623, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 2.2094444444444445e-05, | |
| "loss": 0.1018, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 2.1955555555555558e-05, | |
| "loss": 0.2025, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 2.1816666666666667e-05, | |
| "loss": 0.1857, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 2.167777777777778e-05, | |
| "loss": 0.1559, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 2.153888888888889e-05, | |
| "loss": 0.1463, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 2.1400000000000002e-05, | |
| "loss": 0.1969, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 2.126111111111111e-05, | |
| "loss": 0.163, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 2.1122222222222224e-05, | |
| "loss": 0.1714, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 2.0983333333333336e-05, | |
| "loss": 0.1744, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 2.0844444444444446e-05, | |
| "loss": 0.1704, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 2.0705555555555555e-05, | |
| "loss": 0.2309, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 2.0566666666666667e-05, | |
| "loss": 0.2039, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 5.01, | |
| "learning_rate": 2.042777777777778e-05, | |
| "loss": 0.1874, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 5.01, | |
| "learning_rate": 2.028888888888889e-05, | |
| "loss": 0.1688, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 2.0150000000000002e-05, | |
| "loss": 0.168, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 2.001111111111111e-05, | |
| "loss": 0.1221, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 5.03, | |
| "learning_rate": 1.9872222222222224e-05, | |
| "loss": 0.0861, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 5.03, | |
| "learning_rate": 1.9733333333333333e-05, | |
| "loss": 0.0923, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "learning_rate": 1.9594444444444446e-05, | |
| "loss": 0.0836, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "learning_rate": 1.9455555555555558e-05, | |
| "loss": 0.119, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 5.05, | |
| "learning_rate": 1.9316666666666668e-05, | |
| "loss": 0.1351, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "learning_rate": 1.9177777777777777e-05, | |
| "loss": 0.1592, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 6.01, | |
| "learning_rate": 1.903888888888889e-05, | |
| "loss": 0.1755, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 6.01, | |
| "learning_rate": 1.8900000000000002e-05, | |
| "loss": 0.186, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 6.02, | |
| "learning_rate": 1.876111111111111e-05, | |
| "loss": 0.1717, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 6.02, | |
| "learning_rate": 1.8622222222222224e-05, | |
| "loss": 0.1649, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 6.03, | |
| "learning_rate": 1.8483333333333333e-05, | |
| "loss": 0.1623, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 6.03, | |
| "learning_rate": 1.8344444444444446e-05, | |
| "loss": 0.1553, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 6.04, | |
| "learning_rate": 1.8205555555555555e-05, | |
| "loss": 0.1566, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 6.04, | |
| "learning_rate": 1.8066666666666668e-05, | |
| "loss": 0.1637, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 6.05, | |
| "learning_rate": 1.792777777777778e-05, | |
| "loss": 0.163, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "learning_rate": 1.778888888888889e-05, | |
| "loss": 0.1398, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 7.01, | |
| "learning_rate": 1.765e-05, | |
| "loss": 0.0795, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 7.01, | |
| "learning_rate": 1.751111111111111e-05, | |
| "loss": 0.1083, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 7.02, | |
| "learning_rate": 1.7372222222222224e-05, | |
| "loss": 0.1092, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 7.02, | |
| "learning_rate": 1.7233333333333333e-05, | |
| "loss": 0.1141, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 7.03, | |
| "learning_rate": 1.7094444444444446e-05, | |
| "loss": 0.1087, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 7.03, | |
| "learning_rate": 1.6955555555555555e-05, | |
| "loss": 0.1175, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "learning_rate": 1.6816666666666668e-05, | |
| "loss": 0.1324, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "learning_rate": 1.6677777777777777e-05, | |
| "loss": 0.1381, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "eval_loss": 0.30660438537597656, | |
| "eval_runtime": 1702.0806, | |
| "eval_samples_per_second": 9.624, | |
| "eval_steps_per_second": 0.15, | |
| "eval_wer": 16.148613889682768, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 7.05, | |
| "learning_rate": 1.653888888888889e-05, | |
| "loss": 0.1663, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "learning_rate": 1.6400000000000002e-05, | |
| "loss": 0.1677, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 8.01, | |
| "learning_rate": 1.626111111111111e-05, | |
| "loss": 0.1238, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 8.01, | |
| "learning_rate": 1.612222222222222e-05, | |
| "loss": 0.1224, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 8.02, | |
| "learning_rate": 1.5983333333333333e-05, | |
| "loss": 0.1049, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 8.02, | |
| "learning_rate": 1.5844444444444446e-05, | |
| "loss": 0.1034, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 8.03, | |
| "learning_rate": 1.5705555555555555e-05, | |
| "loss": 0.1026, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 8.03, | |
| "learning_rate": 1.5566666666666668e-05, | |
| "loss": 0.0968, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 8.04, | |
| "learning_rate": 1.542777777777778e-05, | |
| "loss": 0.1005, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 8.04, | |
| "learning_rate": 1.528888888888889e-05, | |
| "loss": 0.0967, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 8.05, | |
| "learning_rate": 1.515e-05, | |
| "loss": 0.1017, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 8.05, | |
| "learning_rate": 1.5011111111111112e-05, | |
| "loss": 0.1519, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "learning_rate": 1.4872222222222224e-05, | |
| "loss": 0.0569, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 9.01, | |
| "learning_rate": 1.4733333333333335e-05, | |
| "loss": 0.0462, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 9.01, | |
| "learning_rate": 1.4594444444444444e-05, | |
| "loss": 0.0614, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 9.02, | |
| "learning_rate": 1.4455555555555555e-05, | |
| "loss": 0.0585, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 9.02, | |
| "learning_rate": 1.4316666666666668e-05, | |
| "loss": 0.0612, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 9.03, | |
| "learning_rate": 1.4177777777777779e-05, | |
| "loss": 0.061, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 9.03, | |
| "learning_rate": 1.403888888888889e-05, | |
| "loss": 0.0673, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 9.04, | |
| "learning_rate": 1.3900000000000002e-05, | |
| "loss": 0.0753, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 9.04, | |
| "learning_rate": 1.376111111111111e-05, | |
| "loss": 0.0879, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 9.05, | |
| "learning_rate": 1.3622222222222223e-05, | |
| "loss": 0.1121, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "learning_rate": 1.3483333333333334e-05, | |
| "loss": 0.0974, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 10.01, | |
| "learning_rate": 1.3344444444444446e-05, | |
| "loss": 0.093, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 10.01, | |
| "learning_rate": 1.3205555555555557e-05, | |
| "loss": 0.0935, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 10.02, | |
| "learning_rate": 1.3066666666666666e-05, | |
| "loss": 0.0897, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 10.02, | |
| "learning_rate": 1.2927777777777777e-05, | |
| "loss": 0.0848, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 10.03, | |
| "learning_rate": 1.278888888888889e-05, | |
| "loss": 0.0877, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 10.03, | |
| "learning_rate": 1.2650000000000001e-05, | |
| "loss": 0.0887, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 10.04, | |
| "learning_rate": 1.2511111111111112e-05, | |
| "loss": 0.0958, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 10.04, | |
| "learning_rate": 1.2372222222222223e-05, | |
| "loss": 0.0877, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 10.05, | |
| "learning_rate": 1.2233333333333334e-05, | |
| "loss": 0.1059, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 11.0, | |
| "learning_rate": 1.2094444444444445e-05, | |
| "loss": 0.1311, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 11.01, | |
| "learning_rate": 1.1955555555555556e-05, | |
| "loss": 0.1406, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 11.01, | |
| "learning_rate": 1.1816666666666668e-05, | |
| "loss": 0.1156, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 11.02, | |
| "learning_rate": 1.1677777777777777e-05, | |
| "loss": 0.1152, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 11.02, | |
| "learning_rate": 1.153888888888889e-05, | |
| "loss": 0.1222, | |
| "step": 2925 | |
| }, | |
| { | |
| "epoch": 11.03, | |
| "learning_rate": 1.1400000000000001e-05, | |
| "loss": 0.1134, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 11.03, | |
| "learning_rate": 1.1261111111111112e-05, | |
| "loss": 0.1452, | |
| "step": 2975 | |
| }, | |
| { | |
| "epoch": 11.04, | |
| "learning_rate": 1.1122222222222223e-05, | |
| "loss": 0.1302, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 11.04, | |
| "eval_loss": 0.29019278287887573, | |
| "eval_runtime": 1621.3431, | |
| "eval_samples_per_second": 10.103, | |
| "eval_steps_per_second": 0.158, | |
| "eval_wer": 15.429551300371536, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 11.04, | |
| "learning_rate": 1.0983333333333334e-05, | |
| "loss": 0.1211, | |
| "step": 3025 | |
| }, | |
| { | |
| "epoch": 11.05, | |
| "learning_rate": 1.0844444444444445e-05, | |
| "loss": 0.1086, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 12.0, | |
| "learning_rate": 1.0705555555555556e-05, | |
| "loss": 0.1383, | |
| "step": 3075 | |
| }, | |
| { | |
| "epoch": 12.01, | |
| "learning_rate": 1.0566666666666668e-05, | |
| "loss": 0.1564, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 12.01, | |
| "learning_rate": 1.0427777777777778e-05, | |
| "loss": 0.0935, | |
| "step": 3125 | |
| }, | |
| { | |
| "epoch": 12.02, | |
| "learning_rate": 1.028888888888889e-05, | |
| "loss": 0.0932, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 12.02, | |
| "learning_rate": 1.0150000000000001e-05, | |
| "loss": 0.0871, | |
| "step": 3175 | |
| }, | |
| { | |
| "epoch": 12.03, | |
| "learning_rate": 1.0011111111111112e-05, | |
| "loss": 0.0849, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 12.03, | |
| "learning_rate": 9.872222222222223e-06, | |
| "loss": 0.117, | |
| "step": 3225 | |
| }, | |
| { | |
| "epoch": 12.04, | |
| "learning_rate": 9.733333333333334e-06, | |
| "loss": 0.1088, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 12.04, | |
| "learning_rate": 9.594444444444445e-06, | |
| "loss": 0.1203, | |
| "step": 3275 | |
| }, | |
| { | |
| "epoch": 12.05, | |
| "learning_rate": 9.455555555555556e-06, | |
| "loss": 0.2177, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 12.05, | |
| "learning_rate": 9.316666666666667e-06, | |
| "loss": 0.2322, | |
| "step": 3325 | |
| }, | |
| { | |
| "epoch": 13.0, | |
| "learning_rate": 9.177777777777778e-06, | |
| "loss": 0.0939, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 13.01, | |
| "learning_rate": 9.03888888888889e-06, | |
| "loss": 0.0818, | |
| "step": 3375 | |
| }, | |
| { | |
| "epoch": 13.01, | |
| "learning_rate": 8.9e-06, | |
| "loss": 0.0832, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 13.02, | |
| "learning_rate": 8.761111111111112e-06, | |
| "loss": 0.0884, | |
| "step": 3425 | |
| }, | |
| { | |
| "epoch": 13.02, | |
| "learning_rate": 8.622222222222223e-06, | |
| "loss": 0.0814, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 13.03, | |
| "learning_rate": 8.483333333333334e-06, | |
| "loss": 0.0811, | |
| "step": 3475 | |
| }, | |
| { | |
| "epoch": 13.03, | |
| "learning_rate": 8.344444444444445e-06, | |
| "loss": 0.133, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 13.04, | |
| "learning_rate": 8.205555555555556e-06, | |
| "loss": 0.1353, | |
| "step": 3525 | |
| }, | |
| { | |
| "epoch": 13.04, | |
| "learning_rate": 8.066666666666667e-06, | |
| "loss": 0.1403, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 13.05, | |
| "learning_rate": 7.927777777777778e-06, | |
| "loss": 0.1346, | |
| "step": 3575 | |
| }, | |
| { | |
| "epoch": 14.0, | |
| "learning_rate": 7.788888888888889e-06, | |
| "loss": 0.1203, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 14.01, | |
| "learning_rate": 7.65e-06, | |
| "loss": 0.1217, | |
| "step": 3625 | |
| }, | |
| { | |
| "epoch": 14.01, | |
| "learning_rate": 7.511111111111112e-06, | |
| "loss": 0.1338, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 14.02, | |
| "learning_rate": 7.372222222222222e-06, | |
| "loss": 0.1273, | |
| "step": 3675 | |
| }, | |
| { | |
| "epoch": 14.02, | |
| "learning_rate": 7.233333333333333e-06, | |
| "loss": 0.118, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 14.03, | |
| "learning_rate": 7.094444444444445e-06, | |
| "loss": 0.1204, | |
| "step": 3725 | |
| }, | |
| { | |
| "epoch": 14.03, | |
| "learning_rate": 6.955555555555555e-06, | |
| "loss": 0.1134, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 14.04, | |
| "learning_rate": 6.816666666666667e-06, | |
| "loss": 0.1072, | |
| "step": 3775 | |
| }, | |
| { | |
| "epoch": 14.04, | |
| "learning_rate": 6.677777777777779e-06, | |
| "loss": 0.1272, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 14.05, | |
| "learning_rate": 6.538888888888889e-06, | |
| "loss": 0.1333, | |
| "step": 3825 | |
| }, | |
| { | |
| "epoch": 15.0, | |
| "learning_rate": 6.4000000000000006e-06, | |
| "loss": 0.1117, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 15.01, | |
| "learning_rate": 6.261111111111112e-06, | |
| "loss": 0.1086, | |
| "step": 3875 | |
| }, | |
| { | |
| "epoch": 15.01, | |
| "learning_rate": 6.1222222222222224e-06, | |
| "loss": 0.1172, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 15.02, | |
| "learning_rate": 5.983333333333334e-06, | |
| "loss": 0.0843, | |
| "step": 3925 | |
| }, | |
| { | |
| "epoch": 15.02, | |
| "learning_rate": 5.844444444444445e-06, | |
| "loss": 0.0879, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 15.03, | |
| "learning_rate": 5.705555555555555e-06, | |
| "loss": 0.0939, | |
| "step": 3975 | |
| }, | |
| { | |
| "epoch": 15.03, | |
| "learning_rate": 5.566666666666667e-06, | |
| "loss": 0.1089, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 15.03, | |
| "eval_loss": 0.269902765750885, | |
| "eval_runtime": 1917.0677, | |
| "eval_samples_per_second": 8.544, | |
| "eval_steps_per_second": 0.134, | |
| "eval_wer": 14.072592169191198, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 15.04, | |
| "learning_rate": 5.427777777777778e-06, | |
| "loss": 0.122, | |
| "step": 4025 | |
| }, | |
| { | |
| "epoch": 15.04, | |
| "learning_rate": 5.288888888888889e-06, | |
| "loss": 0.0815, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 15.05, | |
| "learning_rate": 5.15e-06, | |
| "loss": 0.0934, | |
| "step": 4075 | |
| }, | |
| { | |
| "epoch": 16.0, | |
| "learning_rate": 5.011111111111112e-06, | |
| "loss": 0.0918, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 16.01, | |
| "learning_rate": 4.8722222222222225e-06, | |
| "loss": 0.0797, | |
| "step": 4125 | |
| }, | |
| { | |
| "epoch": 16.01, | |
| "learning_rate": 4.7333333333333335e-06, | |
| "loss": 0.0638, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 16.02, | |
| "learning_rate": 4.594444444444445e-06, | |
| "loss": 0.0672, | |
| "step": 4175 | |
| }, | |
| { | |
| "epoch": 16.02, | |
| "learning_rate": 4.455555555555556e-06, | |
| "loss": 0.0727, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 16.03, | |
| "learning_rate": 4.316666666666667e-06, | |
| "loss": 0.1006, | |
| "step": 4225 | |
| }, | |
| { | |
| "epoch": 16.03, | |
| "learning_rate": 4.177777777777778e-06, | |
| "loss": 0.0968, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 16.04, | |
| "learning_rate": 4.038888888888889e-06, | |
| "loss": 0.0806, | |
| "step": 4275 | |
| }, | |
| { | |
| "epoch": 16.04, | |
| "learning_rate": 3.9e-06, | |
| "loss": 0.0873, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 16.05, | |
| "learning_rate": 3.7611111111111113e-06, | |
| "loss": 0.09, | |
| "step": 4325 | |
| }, | |
| { | |
| "epoch": 16.05, | |
| "learning_rate": 3.6222222222222226e-06, | |
| "loss": 0.1166, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 17.0, | |
| "learning_rate": 3.4833333333333336e-06, | |
| "loss": 0.1679, | |
| "step": 4375 | |
| }, | |
| { | |
| "epoch": 17.01, | |
| "learning_rate": 3.3444444444444445e-06, | |
| "loss": 0.0887, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 17.01, | |
| "learning_rate": 3.2055555555555555e-06, | |
| "loss": 0.0882, | |
| "step": 4425 | |
| }, | |
| { | |
| "epoch": 17.02, | |
| "learning_rate": 3.066666666666667e-06, | |
| "loss": 0.0593, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 17.02, | |
| "learning_rate": 2.9277777777777777e-06, | |
| "loss": 0.0996, | |
| "step": 4475 | |
| }, | |
| { | |
| "epoch": 17.03, | |
| "learning_rate": 2.788888888888889e-06, | |
| "loss": 0.1502, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 17.03, | |
| "learning_rate": 2.65e-06, | |
| "loss": 0.0987, | |
| "step": 4525 | |
| }, | |
| { | |
| "epoch": 17.04, | |
| "learning_rate": 2.5111111111111114e-06, | |
| "loss": 0.119, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 17.04, | |
| "learning_rate": 2.3722222222222223e-06, | |
| "loss": 0.108, | |
| "step": 4575 | |
| }, | |
| { | |
| "epoch": 17.05, | |
| "learning_rate": 2.2333333333333333e-06, | |
| "loss": 0.1164, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 18.0, | |
| "learning_rate": 2.0944444444444446e-06, | |
| "loss": 0.0979, | |
| "step": 4625 | |
| }, | |
| { | |
| "epoch": 18.01, | |
| "learning_rate": 1.9555555555555556e-06, | |
| "loss": 0.0748, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 18.01, | |
| "learning_rate": 1.816666666666667e-06, | |
| "loss": 0.0705, | |
| "step": 4675 | |
| }, | |
| { | |
| "epoch": 18.02, | |
| "learning_rate": 1.6777777777777779e-06, | |
| "loss": 0.0703, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 18.02, | |
| "learning_rate": 1.538888888888889e-06, | |
| "loss": 0.0718, | |
| "step": 4725 | |
| }, | |
| { | |
| "epoch": 18.03, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 0.0697, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 18.03, | |
| "learning_rate": 1.261111111111111e-06, | |
| "loss": 0.0699, | |
| "step": 4775 | |
| }, | |
| { | |
| "epoch": 18.04, | |
| "learning_rate": 1.1222222222222222e-06, | |
| "loss": 0.0677, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 18.04, | |
| "learning_rate": 9.833333333333334e-07, | |
| "loss": 0.0644, | |
| "step": 4825 | |
| }, | |
| { | |
| "epoch": 18.05, | |
| "learning_rate": 8.444444444444444e-07, | |
| "loss": 0.0645, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 19.0, | |
| "learning_rate": 7.055555555555556e-07, | |
| "loss": 0.0593, | |
| "step": 4875 | |
| }, | |
| { | |
| "epoch": 19.01, | |
| "learning_rate": 5.666666666666667e-07, | |
| "loss": 0.0544, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 19.01, | |
| "learning_rate": 4.277777777777778e-07, | |
| "loss": 0.0537, | |
| "step": 4925 | |
| }, | |
| { | |
| "epoch": 19.02, | |
| "learning_rate": 2.888888888888889e-07, | |
| "loss": 0.0526, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 19.02, | |
| "learning_rate": 1.5000000000000002e-07, | |
| "loss": 0.05, | |
| "step": 4975 | |
| }, | |
| { | |
| "epoch": 19.03, | |
| "learning_rate": 1.1111111111111112e-08, | |
| "loss": 0.0505, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 19.03, | |
| "eval_loss": 0.2782333791255951, | |
| "eval_runtime": 2016.6365, | |
| "eval_samples_per_second": 8.122, | |
| "eval_steps_per_second": 0.127, | |
| "eval_wer": 13.789654186910546, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 19.03, | |
| "step": 5000, | |
| "total_flos": 4.150053882298368e+19, | |
| "train_loss": 0.16429689004421233, | |
| "train_runtime": 73989.538, | |
| "train_samples_per_second": 8.65, | |
| "train_steps_per_second": 0.068 | |
| } | |
| ], | |
| "logging_steps": 25, | |
| "max_steps": 5000, | |
| "num_train_epochs": 9223372036854775807, | |
| "save_steps": 1000, | |
| "total_flos": 4.150053882298368e+19, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |