| { | |
| "best_metric": 19.28991555219663, | |
| "best_model_checkpoint": "./checkpoint-4000", | |
| "epoch": 35.2112676056338, | |
| "eval_steps": 1000, | |
| "global_step": 5000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.4e-08, | |
| "loss": 1.2187, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 9.4e-08, | |
| "loss": 1.2095, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.44e-07, | |
| "loss": 1.1872, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.94e-07, | |
| "loss": 1.1709, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 2.4399999999999996e-07, | |
| "loss": 1.1096, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 2.9399999999999996e-07, | |
| "loss": 1.0602, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 3.4399999999999996e-07, | |
| "loss": 0.9914, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 3.94e-07, | |
| "loss": 0.9003, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 4.44e-07, | |
| "loss": 0.8196, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 4.94e-07, | |
| "loss": 0.6752, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 5.44e-07, | |
| "loss": 0.5487, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 5.939999999999999e-07, | |
| "loss": 0.5037, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 6.44e-07, | |
| "loss": 0.4675, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 6.939999999999999e-07, | |
| "loss": 0.4716, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 7.44e-07, | |
| "loss": 0.4602, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 7.94e-07, | |
| "loss": 0.432, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 8.439999999999999e-07, | |
| "loss": 0.453, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 3.17, | |
| "learning_rate": 8.939999999999999e-07, | |
| "loss": 0.4161, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 9.439999999999999e-07, | |
| "loss": 0.4119, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 9.94e-07, | |
| "loss": 0.4225, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 9.95111111111111e-07, | |
| "loss": 0.4105, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 9.895555555555554e-07, | |
| "loss": 0.4202, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 9.84e-07, | |
| "loss": 0.3985, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 9.784444444444444e-07, | |
| "loss": 0.3901, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 9.728888888888888e-07, | |
| "loss": 0.3751, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "learning_rate": 9.673333333333332e-07, | |
| "loss": 0.3857, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "learning_rate": 9.617777777777776e-07, | |
| "loss": 0.3814, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 9.562222222222223e-07, | |
| "loss": 0.3884, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 9.506666666666667e-07, | |
| "loss": 0.3731, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "learning_rate": 9.451111111111111e-07, | |
| "loss": 0.3576, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 9.395555555555556e-07, | |
| "loss": 0.3611, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 5.63, | |
| "learning_rate": 9.34e-07, | |
| "loss": 0.3531, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 5.81, | |
| "learning_rate": 9.284444444444444e-07, | |
| "loss": 0.3601, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 5.99, | |
| "learning_rate": 9.228888888888888e-07, | |
| "loss": 0.3636, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 6.16, | |
| "learning_rate": 9.173333333333333e-07, | |
| "loss": 0.3504, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 6.34, | |
| "learning_rate": 9.117777777777778e-07, | |
| "loss": 0.3394, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 6.51, | |
| "learning_rate": 9.062222222222222e-07, | |
| "loss": 0.3536, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 6.69, | |
| "learning_rate": 9.006666666666666e-07, | |
| "loss": 0.345, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 6.87, | |
| "learning_rate": 8.95111111111111e-07, | |
| "loss": 0.3403, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "learning_rate": 8.895555555555555e-07, | |
| "loss": 0.3261, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "eval_loss": 0.40974706411361694, | |
| "eval_runtime": 301.6855, | |
| "eval_samples_per_second": 30.071, | |
| "eval_steps_per_second": 0.471, | |
| "eval_wer": 20.676568198994513, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 7.22, | |
| "learning_rate": 8.839999999999999e-07, | |
| "loss": 0.3246, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 7.39, | |
| "learning_rate": 8.784444444444444e-07, | |
| "loss": 0.3261, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 7.57, | |
| "learning_rate": 8.728888888888889e-07, | |
| "loss": 0.3307, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 7.75, | |
| "learning_rate": 8.673333333333332e-07, | |
| "loss": 0.3244, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 7.92, | |
| "learning_rate": 8.617777777777777e-07, | |
| "loss": 0.3277, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 8.1, | |
| "learning_rate": 8.562222222222222e-07, | |
| "loss": 0.3232, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 8.27, | |
| "learning_rate": 8.506666666666667e-07, | |
| "loss": 0.3197, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 8.45, | |
| "learning_rate": 8.451111111111111e-07, | |
| "loss": 0.3073, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 8.63, | |
| "learning_rate": 8.395555555555556e-07, | |
| "loss": 0.3146, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 8.8, | |
| "learning_rate": 8.34e-07, | |
| "loss": 0.3195, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 8.98, | |
| "learning_rate": 8.284444444444444e-07, | |
| "loss": 0.3117, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 9.15, | |
| "learning_rate": 8.228888888888889e-07, | |
| "loss": 0.3093, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 9.33, | |
| "learning_rate": 8.173333333333333e-07, | |
| "loss": 0.2997, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 9.51, | |
| "learning_rate": 8.117777777777778e-07, | |
| "loss": 0.3, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 9.68, | |
| "learning_rate": 8.062222222222221e-07, | |
| "loss": 0.3048, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 9.86, | |
| "learning_rate": 8.006666666666666e-07, | |
| "loss": 0.3, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 10.04, | |
| "learning_rate": 7.95111111111111e-07, | |
| "loss": 0.3062, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 10.21, | |
| "learning_rate": 7.895555555555555e-07, | |
| "loss": 0.2855, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 10.39, | |
| "learning_rate": 7.84e-07, | |
| "loss": 0.2954, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 10.56, | |
| "learning_rate": 7.784444444444444e-07, | |
| "loss": 0.2931, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 10.74, | |
| "learning_rate": 7.728888888888888e-07, | |
| "loss": 0.2873, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 10.92, | |
| "learning_rate": 7.673333333333332e-07, | |
| "loss": 0.2949, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 11.09, | |
| "learning_rate": 7.617777777777778e-07, | |
| "loss": 0.2882, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 11.27, | |
| "learning_rate": 7.562222222222222e-07, | |
| "loss": 0.2797, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 11.44, | |
| "learning_rate": 7.506666666666667e-07, | |
| "loss": 0.281, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 11.62, | |
| "learning_rate": 7.451111111111111e-07, | |
| "loss": 0.2901, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 11.8, | |
| "learning_rate": 7.395555555555555e-07, | |
| "loss": 0.2768, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 11.97, | |
| "learning_rate": 7.34e-07, | |
| "loss": 0.284, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 12.15, | |
| "learning_rate": 7.284444444444444e-07, | |
| "loss": 0.2822, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 12.32, | |
| "learning_rate": 7.228888888888889e-07, | |
| "loss": 0.2735, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "learning_rate": 7.173333333333333e-07, | |
| "loss": 0.2735, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 12.68, | |
| "learning_rate": 7.117777777777777e-07, | |
| "loss": 0.2736, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 12.85, | |
| "learning_rate": 7.062222222222222e-07, | |
| "loss": 0.2715, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 13.03, | |
| "learning_rate": 7.006666666666666e-07, | |
| "loss": 0.2722, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 13.2, | |
| "learning_rate": 6.951111111111111e-07, | |
| "loss": 0.2666, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 13.38, | |
| "learning_rate": 6.895555555555555e-07, | |
| "loss": 0.2716, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 13.56, | |
| "learning_rate": 6.84e-07, | |
| "loss": 0.2628, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 13.73, | |
| "learning_rate": 6.784444444444443e-07, | |
| "loss": 0.2654, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 13.91, | |
| "learning_rate": 6.728888888888888e-07, | |
| "loss": 0.2616, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 14.08, | |
| "learning_rate": 6.673333333333334e-07, | |
| "loss": 0.2632, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 14.08, | |
| "eval_loss": 0.3883862793445587, | |
| "eval_runtime": 138.6173, | |
| "eval_samples_per_second": 65.446, | |
| "eval_steps_per_second": 1.024, | |
| "eval_wer": 19.51007130417639, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 14.26, | |
| "learning_rate": 6.617777777777778e-07, | |
| "loss": 0.259, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 14.44, | |
| "learning_rate": 6.562222222222223e-07, | |
| "loss": 0.2604, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 14.61, | |
| "learning_rate": 6.506666666666666e-07, | |
| "loss": 0.2608, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 14.79, | |
| "learning_rate": 6.451111111111111e-07, | |
| "loss": 0.2545, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 14.96, | |
| "learning_rate": 6.395555555555555e-07, | |
| "loss": 0.256, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 15.14, | |
| "learning_rate": 6.34e-07, | |
| "loss": 0.2562, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 15.32, | |
| "learning_rate": 6.284444444444445e-07, | |
| "loss": 0.251, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 15.49, | |
| "learning_rate": 6.228888888888889e-07, | |
| "loss": 0.2457, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 15.67, | |
| "learning_rate": 6.173333333333333e-07, | |
| "loss": 0.2542, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 15.85, | |
| "learning_rate": 6.117777777777777e-07, | |
| "loss": 0.2568, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 16.02, | |
| "learning_rate": 6.062222222222222e-07, | |
| "loss": 0.2504, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 16.2, | |
| "learning_rate": 6.006666666666666e-07, | |
| "loss": 0.2415, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 16.37, | |
| "learning_rate": 5.951111111111111e-07, | |
| "loss": 0.247, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 16.55, | |
| "learning_rate": 5.895555555555555e-07, | |
| "loss": 0.2471, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 16.73, | |
| "learning_rate": 5.839999999999999e-07, | |
| "loss": 0.2474, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 16.9, | |
| "learning_rate": 5.784444444444444e-07, | |
| "loss": 0.2483, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 17.08, | |
| "learning_rate": 5.728888888888888e-07, | |
| "loss": 0.2423, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 17.25, | |
| "learning_rate": 5.673333333333334e-07, | |
| "loss": 0.2427, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 17.43, | |
| "learning_rate": 5.617777777777778e-07, | |
| "loss": 0.242, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 17.61, | |
| "learning_rate": 5.562222222222222e-07, | |
| "loss": 0.2438, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 17.78, | |
| "learning_rate": 5.506666666666666e-07, | |
| "loss": 0.2319, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 17.96, | |
| "learning_rate": 5.451111111111111e-07, | |
| "loss": 0.2404, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 18.13, | |
| "learning_rate": 5.395555555555556e-07, | |
| "loss": 0.2331, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 18.31, | |
| "learning_rate": 5.34e-07, | |
| "loss": 0.2315, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 18.49, | |
| "learning_rate": 5.284444444444445e-07, | |
| "loss": 0.2337, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 18.66, | |
| "learning_rate": 5.228888888888888e-07, | |
| "loss": 0.2412, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 18.84, | |
| "learning_rate": 5.173333333333333e-07, | |
| "loss": 0.2362, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 19.01, | |
| "learning_rate": 5.117777777777777e-07, | |
| "loss": 0.2353, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 19.19, | |
| "learning_rate": 5.062222222222222e-07, | |
| "loss": 0.2287, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 19.37, | |
| "learning_rate": 5.006666666666667e-07, | |
| "loss": 0.2265, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 19.54, | |
| "learning_rate": 4.951111111111111e-07, | |
| "loss": 0.2387, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 19.72, | |
| "learning_rate": 4.895555555555555e-07, | |
| "loss": 0.2288, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 19.89, | |
| "learning_rate": 4.839999999999999e-07, | |
| "loss": 0.2281, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 20.07, | |
| "learning_rate": 4.784444444444444e-07, | |
| "loss": 0.2283, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 20.25, | |
| "learning_rate": 4.728888888888889e-07, | |
| "loss": 0.2252, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 20.42, | |
| "learning_rate": 4.673333333333333e-07, | |
| "loss": 0.228, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 20.6, | |
| "learning_rate": 4.6177777777777777e-07, | |
| "loss": 0.2246, | |
| "step": 2925 | |
| }, | |
| { | |
| "epoch": 20.77, | |
| "learning_rate": 4.5622222222222217e-07, | |
| "loss": 0.2249, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 20.95, | |
| "learning_rate": 4.506666666666666e-07, | |
| "loss": 0.2245, | |
| "step": 2975 | |
| }, | |
| { | |
| "epoch": 21.13, | |
| "learning_rate": 4.451111111111111e-07, | |
| "loss": 0.2241, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 21.13, | |
| "eval_loss": 0.3826531767845154, | |
| "eval_runtime": 141.4024, | |
| "eval_samples_per_second": 64.157, | |
| "eval_steps_per_second": 1.004, | |
| "eval_wer": 19.468997469851807, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 21.3, | |
| "learning_rate": 4.3955555555555554e-07, | |
| "loss": 0.2228, | |
| "step": 3025 | |
| }, | |
| { | |
| "epoch": 21.48, | |
| "learning_rate": 4.34e-07, | |
| "loss": 0.2223, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 21.65, | |
| "learning_rate": 4.2844444444444445e-07, | |
| "loss": 0.2263, | |
| "step": 3075 | |
| }, | |
| { | |
| "epoch": 21.83, | |
| "learning_rate": 4.2288888888888886e-07, | |
| "loss": 0.2201, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 22.01, | |
| "learning_rate": 4.173333333333333e-07, | |
| "loss": 0.2166, | |
| "step": 3125 | |
| }, | |
| { | |
| "epoch": 22.18, | |
| "learning_rate": 4.1177777777777777e-07, | |
| "loss": 0.2212, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 22.36, | |
| "learning_rate": 4.0622222222222217e-07, | |
| "loss": 0.2161, | |
| "step": 3175 | |
| }, | |
| { | |
| "epoch": 22.54, | |
| "learning_rate": 4.0066666666666663e-07, | |
| "loss": 0.2116, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 22.71, | |
| "learning_rate": 3.9511111111111114e-07, | |
| "loss": 0.2204, | |
| "step": 3225 | |
| }, | |
| { | |
| "epoch": 22.89, | |
| "learning_rate": 3.8955555555555554e-07, | |
| "loss": 0.2173, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 23.06, | |
| "learning_rate": 3.84e-07, | |
| "loss": 0.2238, | |
| "step": 3275 | |
| }, | |
| { | |
| "epoch": 23.24, | |
| "learning_rate": 3.7844444444444445e-07, | |
| "loss": 0.2102, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 23.42, | |
| "learning_rate": 3.7288888888888886e-07, | |
| "loss": 0.2184, | |
| "step": 3325 | |
| }, | |
| { | |
| "epoch": 23.59, | |
| "learning_rate": 3.673333333333333e-07, | |
| "loss": 0.2131, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 23.77, | |
| "learning_rate": 3.617777777777777e-07, | |
| "loss": 0.2123, | |
| "step": 3375 | |
| }, | |
| { | |
| "epoch": 23.94, | |
| "learning_rate": 3.5622222222222223e-07, | |
| "loss": 0.2151, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 24.12, | |
| "learning_rate": 3.506666666666667e-07, | |
| "loss": 0.2117, | |
| "step": 3425 | |
| }, | |
| { | |
| "epoch": 24.3, | |
| "learning_rate": 3.451111111111111e-07, | |
| "loss": 0.2127, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 24.47, | |
| "learning_rate": 3.3955555555555554e-07, | |
| "loss": 0.211, | |
| "step": 3475 | |
| }, | |
| { | |
| "epoch": 24.65, | |
| "learning_rate": 3.34e-07, | |
| "loss": 0.2037, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 24.82, | |
| "learning_rate": 3.284444444444444e-07, | |
| "loss": 0.211, | |
| "step": 3525 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "learning_rate": 3.2288888888888886e-07, | |
| "loss": 0.2169, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 25.18, | |
| "learning_rate": 3.173333333333333e-07, | |
| "loss": 0.2068, | |
| "step": 3575 | |
| }, | |
| { | |
| "epoch": 25.35, | |
| "learning_rate": 3.1177777777777777e-07, | |
| "loss": 0.2089, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 25.53, | |
| "learning_rate": 3.0622222222222223e-07, | |
| "loss": 0.2099, | |
| "step": 3625 | |
| }, | |
| { | |
| "epoch": 25.7, | |
| "learning_rate": 3.006666666666667e-07, | |
| "loss": 0.2092, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 25.88, | |
| "learning_rate": 2.951111111111111e-07, | |
| "loss": 0.2107, | |
| "step": 3675 | |
| }, | |
| { | |
| "epoch": 26.06, | |
| "learning_rate": 2.8955555555555555e-07, | |
| "loss": 0.2026, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 26.23, | |
| "learning_rate": 2.8399999999999995e-07, | |
| "loss": 0.2065, | |
| "step": 3725 | |
| }, | |
| { | |
| "epoch": 26.41, | |
| "learning_rate": 2.784444444444444e-07, | |
| "loss": 0.1987, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 26.58, | |
| "learning_rate": 2.728888888888889e-07, | |
| "loss": 0.2115, | |
| "step": 3775 | |
| }, | |
| { | |
| "epoch": 26.76, | |
| "learning_rate": 2.673333333333333e-07, | |
| "loss": 0.2062, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 26.94, | |
| "learning_rate": 2.617777777777778e-07, | |
| "loss": 0.2074, | |
| "step": 3825 | |
| }, | |
| { | |
| "epoch": 27.11, | |
| "learning_rate": 2.5622222222222223e-07, | |
| "loss": 0.2022, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 27.29, | |
| "learning_rate": 2.5066666666666663e-07, | |
| "loss": 0.2109, | |
| "step": 3875 | |
| }, | |
| { | |
| "epoch": 27.46, | |
| "learning_rate": 2.451111111111111e-07, | |
| "loss": 0.2001, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 27.64, | |
| "learning_rate": 2.3955555555555555e-07, | |
| "loss": 0.2069, | |
| "step": 3925 | |
| }, | |
| { | |
| "epoch": 27.82, | |
| "learning_rate": 2.34e-07, | |
| "loss": 0.2032, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 27.99, | |
| "learning_rate": 2.2844444444444443e-07, | |
| "loss": 0.2013, | |
| "step": 3975 | |
| }, | |
| { | |
| "epoch": 28.17, | |
| "learning_rate": 2.2288888888888886e-07, | |
| "loss": 0.2048, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 28.17, | |
| "eval_loss": 0.38145098090171814, | |
| "eval_runtime": 140.8417, | |
| "eval_samples_per_second": 64.413, | |
| "eval_steps_per_second": 1.008, | |
| "eval_wer": 19.28991555219663, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 28.35, | |
| "learning_rate": 2.1733333333333332e-07, | |
| "loss": 0.1993, | |
| "step": 4025 | |
| }, | |
| { | |
| "epoch": 28.52, | |
| "learning_rate": 2.1177777777777778e-07, | |
| "loss": 0.2037, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 28.7, | |
| "learning_rate": 2.0644444444444445e-07, | |
| "loss": 0.2028, | |
| "step": 4075 | |
| }, | |
| { | |
| "epoch": 28.87, | |
| "learning_rate": 2.0088888888888888e-07, | |
| "loss": 0.1994, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 29.05, | |
| "learning_rate": 1.953333333333333e-07, | |
| "loss": 0.1988, | |
| "step": 4125 | |
| }, | |
| { | |
| "epoch": 29.23, | |
| "learning_rate": 1.8977777777777777e-07, | |
| "loss": 0.2023, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 29.4, | |
| "learning_rate": 1.8422222222222222e-07, | |
| "loss": 0.199, | |
| "step": 4175 | |
| }, | |
| { | |
| "epoch": 29.58, | |
| "learning_rate": 1.7866666666666665e-07, | |
| "loss": 0.205, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 29.75, | |
| "learning_rate": 1.731111111111111e-07, | |
| "loss": 0.1975, | |
| "step": 4225 | |
| }, | |
| { | |
| "epoch": 29.93, | |
| "learning_rate": 1.6755555555555556e-07, | |
| "loss": 0.1989, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 30.11, | |
| "learning_rate": 1.62e-07, | |
| "loss": 0.1995, | |
| "step": 4275 | |
| }, | |
| { | |
| "epoch": 30.28, | |
| "learning_rate": 1.5644444444444442e-07, | |
| "loss": 0.1992, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 30.46, | |
| "learning_rate": 1.5088888888888888e-07, | |
| "loss": 0.2002, | |
| "step": 4325 | |
| }, | |
| { | |
| "epoch": 30.63, | |
| "learning_rate": 1.4533333333333334e-07, | |
| "loss": 0.194, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 30.81, | |
| "learning_rate": 1.3977777777777777e-07, | |
| "loss": 0.199, | |
| "step": 4375 | |
| }, | |
| { | |
| "epoch": 30.99, | |
| "learning_rate": 1.342222222222222e-07, | |
| "loss": 0.1993, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 31.16, | |
| "learning_rate": 1.2866666666666668e-07, | |
| "loss": 0.194, | |
| "step": 4425 | |
| }, | |
| { | |
| "epoch": 31.34, | |
| "learning_rate": 1.231111111111111e-07, | |
| "loss": 0.1967, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 31.51, | |
| "learning_rate": 1.1755555555555554e-07, | |
| "loss": 0.2012, | |
| "step": 4475 | |
| }, | |
| { | |
| "epoch": 31.69, | |
| "learning_rate": 1.12e-07, | |
| "loss": 0.1941, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 31.87, | |
| "learning_rate": 1.0644444444444444e-07, | |
| "loss": 0.1997, | |
| "step": 4525 | |
| }, | |
| { | |
| "epoch": 32.04, | |
| "learning_rate": 1.0088888888888888e-07, | |
| "loss": 0.195, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 32.22, | |
| "learning_rate": 9.533333333333334e-08, | |
| "loss": 0.1945, | |
| "step": 4575 | |
| }, | |
| { | |
| "epoch": 32.39, | |
| "learning_rate": 8.977777777777777e-08, | |
| "loss": 0.1985, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 32.57, | |
| "learning_rate": 8.422222222222223e-08, | |
| "loss": 0.1988, | |
| "step": 4625 | |
| }, | |
| { | |
| "epoch": 32.75, | |
| "learning_rate": 7.866666666666666e-08, | |
| "loss": 0.2003, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 32.92, | |
| "learning_rate": 7.311111111111111e-08, | |
| "loss": 0.1939, | |
| "step": 4675 | |
| }, | |
| { | |
| "epoch": 33.1, | |
| "learning_rate": 6.755555555555554e-08, | |
| "loss": 0.2, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 33.27, | |
| "learning_rate": 6.2e-08, | |
| "loss": 0.194, | |
| "step": 4725 | |
| }, | |
| { | |
| "epoch": 33.45, | |
| "learning_rate": 5.644444444444444e-08, | |
| "loss": 0.1938, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 33.63, | |
| "learning_rate": 5.0888888888888886e-08, | |
| "loss": 0.1974, | |
| "step": 4775 | |
| }, | |
| { | |
| "epoch": 33.8, | |
| "learning_rate": 4.5333333333333336e-08, | |
| "loss": 0.1901, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 33.98, | |
| "learning_rate": 3.977777777777778e-08, | |
| "loss": 0.2007, | |
| "step": 4825 | |
| }, | |
| { | |
| "epoch": 34.15, | |
| "learning_rate": 3.422222222222222e-08, | |
| "loss": 0.1894, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 34.33, | |
| "learning_rate": 2.8666666666666665e-08, | |
| "loss": 0.1919, | |
| "step": 4875 | |
| }, | |
| { | |
| "epoch": 34.51, | |
| "learning_rate": 2.311111111111111e-08, | |
| "loss": 0.203, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 34.68, | |
| "learning_rate": 1.7555555555555555e-08, | |
| "loss": 0.194, | |
| "step": 4925 | |
| }, | |
| { | |
| "epoch": 34.86, | |
| "learning_rate": 1.2e-08, | |
| "loss": 0.2002, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 35.04, | |
| "learning_rate": 6.444444444444444e-09, | |
| "loss": 0.1946, | |
| "step": 4975 | |
| }, | |
| { | |
| "epoch": 35.21, | |
| "learning_rate": 8.888888888888889e-10, | |
| "loss": 0.1956, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 35.21, | |
| "eval_loss": 0.38145187497138977, | |
| "eval_runtime": 141.6355, | |
| "eval_samples_per_second": 64.052, | |
| "eval_steps_per_second": 1.003, | |
| "eval_wer": 19.403279334932474, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 35.21, | |
| "step": 5000, | |
| "total_flos": 4.14377543467008e+19, | |
| "train_loss": 0.30019377393722535, | |
| "train_runtime": 10549.3759, | |
| "train_samples_per_second": 60.667, | |
| "train_steps_per_second": 0.474 | |
| } | |
| ], | |
| "logging_steps": 25, | |
| "max_steps": 5000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 36, | |
| "save_steps": 1000, | |
| "total_flos": 4.14377543467008e+19, | |
| "train_batch_size": 128, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |