| { |
| "best_metric": 40.657571667832, |
| "best_model_checkpoint": "./whisper-small-fa-aug/checkpoint-1000", |
| "epoch": 0.025, |
| "global_step": 1000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 4.2000000000000006e-07, |
| "loss": 3.8, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 9.200000000000001e-07, |
| "loss": 3.0497, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.42e-06, |
| "loss": 2.0102, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.9200000000000003e-06, |
| "loss": 1.5948, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.42e-06, |
| "loss": 1.4094, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 2.92e-06, |
| "loss": 1.2611, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 3.4200000000000007e-06, |
| "loss": 1.1366, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.920000000000001e-06, |
| "loss": 1.0817, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.42e-06, |
| "loss": 1.0324, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 4.92e-06, |
| "loss": 0.9176, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 5.420000000000001e-06, |
| "loss": 0.7975, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 5.92e-06, |
| "loss": 0.7079, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 6.42e-06, |
| "loss": 0.5832, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 6.92e-06, |
| "loss": 0.4469, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 7.420000000000001e-06, |
| "loss": 0.3913, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 7.92e-06, |
| "loss": 0.4189, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 8.42e-06, |
| "loss": 0.3756, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 8.920000000000001e-06, |
| "loss": 0.3876, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 9.42e-06, |
| "loss": 0.3802, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 9.920000000000002e-06, |
| "loss": 0.4061, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 9.994683544303798e-06, |
| "loss": 0.3885, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 9.988354430379748e-06, |
| "loss": 0.3449, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 9.982025316455697e-06, |
| "loss": 0.3292, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 9.975696202531647e-06, |
| "loss": 0.3439, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 9.969367088607596e-06, |
| "loss": 0.3031, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 9.963037974683545e-06, |
| "loss": 0.3159, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 9.956708860759495e-06, |
| "loss": 0.3304, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 9.950379746835444e-06, |
| "loss": 0.2918, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 9.944050632911392e-06, |
| "loss": 0.2785, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 9.937721518987341e-06, |
| "loss": 0.2618, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 9.931392405063291e-06, |
| "loss": 0.2862, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 9.92506329113924e-06, |
| "loss": 0.2962, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 9.91873417721519e-06, |
| "loss": 0.2935, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 9.912405063291141e-06, |
| "loss": 0.2827, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 9.90607594936709e-06, |
| "loss": 0.2702, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 9.89974683544304e-06, |
| "loss": 0.2987, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 9.89341772151899e-06, |
| "loss": 0.3067, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 9.887088607594939e-06, |
| "loss": 0.2962, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 9.880759493670888e-06, |
| "loss": 0.2807, |
| "step": 975 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 9.874430379746836e-06, |
| "loss": 0.2533, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.03, |
| "eval_loss": 0.3274388611316681, |
| "eval_runtime": 1481.9106, |
| "eval_samples_per_second": 7.025, |
| "eval_steps_per_second": 0.439, |
| "eval_wer": 40.657571667832, |
| "step": 1000 |
| } |
| ], |
| "max_steps": 40000, |
| "num_train_epochs": 9223372036854775807, |
| "total_flos": 4.61736640512e+18, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|