| { | |
| "best_metric": 99.95633187772926, | |
| "best_model_checkpoint": "./whisper-small-gom/checkpoint-1000", | |
| "epoch": 1.41643059490085, | |
| "global_step": 1000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.6000000000000004e-07, | |
| "loss": 2.481, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 9.600000000000001e-07, | |
| "loss": 2.0967, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.46e-06, | |
| "loss": 1.8738, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9600000000000003e-06, | |
| "loss": 1.5778, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 2.46e-06, | |
| "loss": 1.5275, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 2.96e-06, | |
| "loss": 1.4266, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 3.46e-06, | |
| "loss": 1.3163, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 3.96e-06, | |
| "loss": 1.3454, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.4600000000000005e-06, | |
| "loss": 1.248, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.960000000000001e-06, | |
| "loss": 1.2251, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 5.460000000000001e-06, | |
| "loss": 1.2044, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 5.9600000000000005e-06, | |
| "loss": 1.1433, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 6.460000000000001e-06, | |
| "loss": 1.1663, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 6.96e-06, | |
| "loss": 1.1463, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 7.4600000000000006e-06, | |
| "loss": 1.0479, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 7.960000000000002e-06, | |
| "loss": 1.0869, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 8.46e-06, | |
| "loss": 1.0555, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 8.96e-06, | |
| "loss": 0.9438, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 9.460000000000001e-06, | |
| "loss": 0.9683, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 9.960000000000001e-06, | |
| "loss": 0.9701, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 9.934285714285715e-06, | |
| "loss": 0.9346, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 9.862857142857144e-06, | |
| "loss": 0.959, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 9.791428571428571e-06, | |
| "loss": 0.8805, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 9.72e-06, | |
| "loss": 0.8644, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 9.648571428571429e-06, | |
| "loss": 0.8948, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 9.577142857142858e-06, | |
| "loss": 0.9455, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 9.505714285714287e-06, | |
| "loss": 0.8696, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 9.434285714285714e-06, | |
| "loss": 0.8829, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 9.362857142857143e-06, | |
| "loss": 0.7547, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 9.291428571428572e-06, | |
| "loss": 0.815, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 9.220000000000002e-06, | |
| "loss": 0.8324, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 9.148571428571429e-06, | |
| "loss": 0.7684, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 9.077142857142858e-06, | |
| "loss": 0.7822, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 9.005714285714287e-06, | |
| "loss": 0.7616, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 8.934285714285716e-06, | |
| "loss": 0.8268, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 8.862857142857143e-06, | |
| "loss": 0.8051, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 8.791428571428572e-06, | |
| "loss": 0.804, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 8.720000000000001e-06, | |
| "loss": 0.7387, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 8.64857142857143e-06, | |
| "loss": 0.7993, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 8.577142857142858e-06, | |
| "loss": 0.8099, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "eval_loss": 0.8143458366394043, | |
| "eval_runtime": 1609.901, | |
| "eval_samples_per_second": 1.238, | |
| "eval_steps_per_second": 0.155, | |
| "eval_wer": 99.95633187772926, | |
| "step": 1000 | |
| } | |
| ], | |
| "max_steps": 4000, | |
| "num_train_epochs": 6, | |
| "total_flos": 4.6159234781184e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |