| { | |
| "best_metric": 114.85714285714286, | |
| "best_model_checkpoint": "./medium_TH/checkpoint-1000", | |
| "epoch": 55.55555555555556, | |
| "global_step": 1000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 4.6000000000000004e-07, | |
| "loss": 0.8382, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 9.400000000000001e-07, | |
| "loss": 0.7178, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 1.44e-06, | |
| "loss": 0.5937, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 5.56, | |
| "learning_rate": 1.94e-06, | |
| "loss": 0.5143, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 6.94, | |
| "learning_rate": 2.4400000000000004e-06, | |
| "loss": 0.3861, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 8.33, | |
| "learning_rate": 2.9400000000000002e-06, | |
| "loss": 0.3526, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 9.72, | |
| "learning_rate": 3.44e-06, | |
| "loss": 0.287, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 11.11, | |
| "learning_rate": 3.94e-06, | |
| "loss": 0.1953, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "learning_rate": 4.440000000000001e-06, | |
| "loss": 0.1874, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 13.89, | |
| "learning_rate": 4.94e-06, | |
| "loss": 0.172, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 15.28, | |
| "learning_rate": 5.4400000000000004e-06, | |
| "loss": 0.0642, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 16.67, | |
| "learning_rate": 5.94e-06, | |
| "loss": 0.09, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 18.06, | |
| "learning_rate": 6.440000000000001e-06, | |
| "loss": 0.0578, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 19.44, | |
| "learning_rate": 6.9400000000000005e-06, | |
| "loss": 0.0384, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 20.83, | |
| "learning_rate": 7.440000000000001e-06, | |
| "loss": 0.0672, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 22.22, | |
| "learning_rate": 7.94e-06, | |
| "loss": 0.0689, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 23.61, | |
| "learning_rate": 8.44e-06, | |
| "loss": 0.0902, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "learning_rate": 8.94e-06, | |
| "loss": 0.0497, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 26.39, | |
| "learning_rate": 9.440000000000001e-06, | |
| "loss": 0.0232, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 27.78, | |
| "learning_rate": 9.940000000000001e-06, | |
| "loss": 0.0465, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 29.17, | |
| "learning_rate": 9.937142857142858e-06, | |
| "loss": 0.0282, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 30.56, | |
| "learning_rate": 9.865714285714285e-06, | |
| "loss": 0.0546, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 31.94, | |
| "learning_rate": 9.794285714285714e-06, | |
| "loss": 0.0786, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 33.33, | |
| "learning_rate": 9.722857142857143e-06, | |
| "loss": 0.0628, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 34.72, | |
| "learning_rate": 9.651428571428572e-06, | |
| "loss": 0.0325, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 36.11, | |
| "learning_rate": 9.58e-06, | |
| "loss": 0.0254, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 37.5, | |
| "learning_rate": 9.508571428571429e-06, | |
| "loss": 0.0358, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 38.89, | |
| "learning_rate": 9.437142857142858e-06, | |
| "loss": 0.0331, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 40.28, | |
| "learning_rate": 9.365714285714287e-06, | |
| "loss": 0.0281, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 41.67, | |
| "learning_rate": 9.294285714285714e-06, | |
| "loss": 0.0314, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 43.06, | |
| "learning_rate": 9.222857142857143e-06, | |
| "loss": 0.0131, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 44.44, | |
| "learning_rate": 9.151428571428572e-06, | |
| "loss": 0.0245, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 45.83, | |
| "learning_rate": 9.080000000000001e-06, | |
| "loss": 0.0071, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 47.22, | |
| "learning_rate": 9.00857142857143e-06, | |
| "loss": 0.0249, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 48.61, | |
| "learning_rate": 8.937142857142857e-06, | |
| "loss": 0.0186, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 50.0, | |
| "learning_rate": 8.865714285714287e-06, | |
| "loss": 0.0066, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 51.39, | |
| "learning_rate": 8.794285714285716e-06, | |
| "loss": 0.0114, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 52.78, | |
| "learning_rate": 8.722857142857145e-06, | |
| "loss": 0.0178, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 54.17, | |
| "learning_rate": 8.651428571428572e-06, | |
| "loss": 0.0188, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 55.56, | |
| "learning_rate": 8.580000000000001e-06, | |
| "loss": 0.0272, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 55.56, | |
| "eval_loss": 0.37656262516975403, | |
| "eval_runtime": 10.4546, | |
| "eval_samples_per_second": 2.2, | |
| "eval_steps_per_second": 0.287, | |
| "eval_wer": 114.85714285714286, | |
| "step": 1000 | |
| } | |
| ], | |
| "max_steps": 4000, | |
| "num_train_epochs": 223, | |
| "total_flos": 5.842963095552e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |