| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 71.42857142857143, | |
| "global_step": 500, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 2.8734, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 2.7517, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 2.2084, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 1.7218, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 4.4e-06, | |
| "loss": 1.3608, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 5.400000000000001e-06, | |
| "loss": 1.046, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 6.4000000000000006e-06, | |
| "loss": 0.8672, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "learning_rate": 7.4e-06, | |
| "loss": 0.6926, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 8.400000000000001e-06, | |
| "loss": 0.5528, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 7.14, | |
| "learning_rate": 9.4e-06, | |
| "loss": 0.4595, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 7.86, | |
| "learning_rate": 9.955555555555556e-06, | |
| "loss": 0.3677, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 8.57, | |
| "learning_rate": 9.844444444444446e-06, | |
| "loss": 0.2566, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 9.29, | |
| "learning_rate": 9.733333333333334e-06, | |
| "loss": 0.2176, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "learning_rate": 9.622222222222222e-06, | |
| "loss": 0.1514, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 10.71, | |
| "learning_rate": 9.511111111111112e-06, | |
| "loss": 0.0989, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 11.43, | |
| "learning_rate": 9.4e-06, | |
| "loss": 0.0644, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 12.14, | |
| "learning_rate": 9.28888888888889e-06, | |
| "loss": 0.0445, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 12.86, | |
| "learning_rate": 9.17777777777778e-06, | |
| "loss": 0.0279, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 13.57, | |
| "learning_rate": 9.066666666666667e-06, | |
| "loss": 0.0191, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 14.29, | |
| "learning_rate": 8.955555555555555e-06, | |
| "loss": 0.0139, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 14.29, | |
| "eval_loss": 1.0302417278289795, | |
| "eval_runtime": 1389.5619, | |
| "eval_samples_per_second": 0.368, | |
| "eval_steps_per_second": 0.023, | |
| "eval_wer": 50.12106537530266, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 15.0, | |
| "learning_rate": 8.844444444444445e-06, | |
| "loss": 0.0104, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 15.71, | |
| "learning_rate": 8.733333333333333e-06, | |
| "loss": 0.0091, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 16.43, | |
| "learning_rate": 8.622222222222223e-06, | |
| "loss": 0.0062, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 17.14, | |
| "learning_rate": 8.511111111111113e-06, | |
| "loss": 0.0056, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 17.86, | |
| "learning_rate": 8.400000000000001e-06, | |
| "loss": 0.0052, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 18.57, | |
| "learning_rate": 8.288888888888889e-06, | |
| "loss": 0.0047, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 19.29, | |
| "learning_rate": 8.177777777777779e-06, | |
| "loss": 0.0049, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "learning_rate": 8.066666666666667e-06, | |
| "loss": 0.0042, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 20.71, | |
| "learning_rate": 7.955555555555557e-06, | |
| "loss": 0.0033, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 21.43, | |
| "learning_rate": 7.844444444444446e-06, | |
| "loss": 0.0028, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 22.14, | |
| "learning_rate": 7.733333333333334e-06, | |
| "loss": 0.002, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 22.86, | |
| "learning_rate": 7.622222222222223e-06, | |
| "loss": 0.0018, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 23.57, | |
| "learning_rate": 7.511111111111111e-06, | |
| "loss": 0.0016, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 24.29, | |
| "learning_rate": 7.4e-06, | |
| "loss": 0.0014, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "learning_rate": 7.28888888888889e-06, | |
| "loss": 0.0013, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 25.71, | |
| "learning_rate": 7.177777777777778e-06, | |
| "loss": 0.0013, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 26.43, | |
| "learning_rate": 7.066666666666667e-06, | |
| "loss": 0.0012, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 27.14, | |
| "learning_rate": 6.955555555555557e-06, | |
| "loss": 0.0011, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 27.86, | |
| "learning_rate": 6.844444444444445e-06, | |
| "loss": 0.0011, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 28.57, | |
| "learning_rate": 6.733333333333334e-06, | |
| "loss": 0.0011, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 28.57, | |
| "eval_loss": 1.2129117250442505, | |
| "eval_runtime": 1370.5005, | |
| "eval_samples_per_second": 0.374, | |
| "eval_steps_per_second": 0.023, | |
| "eval_wer": 49.78056900726393, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 29.29, | |
| "learning_rate": 6.6222222222222236e-06, | |
| "loss": 0.001, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 30.0, | |
| "learning_rate": 6.511111111111112e-06, | |
| "loss": 0.001, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 30.71, | |
| "learning_rate": 6.4000000000000006e-06, | |
| "loss": 0.001, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 31.43, | |
| "learning_rate": 6.28888888888889e-06, | |
| "loss": 0.001, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 32.14, | |
| "learning_rate": 6.177777777777778e-06, | |
| "loss": 0.001, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 32.86, | |
| "learning_rate": 6.066666666666667e-06, | |
| "loss": 0.0009, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 33.57, | |
| "learning_rate": 5.955555555555555e-06, | |
| "loss": 0.0009, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 34.29, | |
| "learning_rate": 5.844444444444445e-06, | |
| "loss": 0.0009, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 35.0, | |
| "learning_rate": 5.733333333333334e-06, | |
| "loss": 0.0009, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 35.71, | |
| "learning_rate": 5.622222222222222e-06, | |
| "loss": 0.0009, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 36.43, | |
| "learning_rate": 5.511111111111112e-06, | |
| "loss": 0.0009, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 37.14, | |
| "learning_rate": 5.400000000000001e-06, | |
| "loss": 0.0008, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 37.86, | |
| "learning_rate": 5.288888888888889e-06, | |
| "loss": 0.0008, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 38.57, | |
| "learning_rate": 5.177777777777779e-06, | |
| "loss": 0.0008, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 39.29, | |
| "learning_rate": 5.0666666666666676e-06, | |
| "loss": 0.0008, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 40.0, | |
| "learning_rate": 4.9555555555555565e-06, | |
| "loss": 0.0008, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 40.71, | |
| "learning_rate": 4.8444444444444446e-06, | |
| "loss": 0.0008, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 41.43, | |
| "learning_rate": 4.7333333333333335e-06, | |
| "loss": 0.0008, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 42.14, | |
| "learning_rate": 4.622222222222222e-06, | |
| "loss": 0.0008, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 42.86, | |
| "learning_rate": 4.511111111111111e-06, | |
| "loss": 0.0008, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 42.86, | |
| "eval_loss": 1.2581015825271606, | |
| "eval_runtime": 1407.7801, | |
| "eval_samples_per_second": 0.364, | |
| "eval_steps_per_second": 0.023, | |
| "eval_wer": 50.317796610169495, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 43.57, | |
| "learning_rate": 4.4e-06, | |
| "loss": 0.0008, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 44.29, | |
| "learning_rate": 4.288888888888889e-06, | |
| "loss": 0.0008, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 45.0, | |
| "learning_rate": 4.177777777777778e-06, | |
| "loss": 0.0007, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 45.71, | |
| "learning_rate": 4.066666666666667e-06, | |
| "loss": 0.0008, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 46.43, | |
| "learning_rate": 3.955555555555556e-06, | |
| "loss": 0.0007, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 47.14, | |
| "learning_rate": 3.844444444444445e-06, | |
| "loss": 0.0007, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 47.86, | |
| "learning_rate": 3.7333333333333337e-06, | |
| "loss": 0.0007, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 48.57, | |
| "learning_rate": 3.6222222222222226e-06, | |
| "loss": 0.0007, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 49.29, | |
| "learning_rate": 3.511111111111111e-06, | |
| "loss": 0.0007, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 50.0, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 0.0007, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 50.71, | |
| "learning_rate": 3.2888888888888894e-06, | |
| "loss": 0.0007, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 51.43, | |
| "learning_rate": 3.177777777777778e-06, | |
| "loss": 0.0007, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 52.14, | |
| "learning_rate": 3.066666666666667e-06, | |
| "loss": 0.0007, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 52.86, | |
| "learning_rate": 2.955555555555556e-06, | |
| "loss": 0.0007, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 53.57, | |
| "learning_rate": 2.8444444444444446e-06, | |
| "loss": 0.0007, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 54.29, | |
| "learning_rate": 2.7333333333333336e-06, | |
| "loss": 0.0007, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 55.0, | |
| "learning_rate": 2.6222222222222225e-06, | |
| "loss": 0.0007, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 55.71, | |
| "learning_rate": 2.5111111111111114e-06, | |
| "loss": 0.0007, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 56.43, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.0007, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 57.14, | |
| "learning_rate": 2.2888888888888892e-06, | |
| "loss": 0.0007, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 57.14, | |
| "eval_loss": 1.2849922180175781, | |
| "eval_runtime": 1412.1951, | |
| "eval_samples_per_second": 0.363, | |
| "eval_steps_per_second": 0.023, | |
| "eval_wer": 50.5523607748184, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 57.86, | |
| "learning_rate": 2.1777777777777777e-06, | |
| "loss": 0.0007, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 58.57, | |
| "learning_rate": 2.0666666666666666e-06, | |
| "loss": 0.0007, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 59.29, | |
| "learning_rate": 1.955555555555556e-06, | |
| "loss": 0.0006, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 60.0, | |
| "learning_rate": 1.8444444444444445e-06, | |
| "loss": 0.0007, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 60.71, | |
| "learning_rate": 1.7333333333333336e-06, | |
| "loss": 0.0007, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 61.43, | |
| "learning_rate": 1.6222222222222223e-06, | |
| "loss": 0.0006, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 62.14, | |
| "learning_rate": 1.5111111111111112e-06, | |
| "loss": 0.0007, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 62.86, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 0.0007, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 63.57, | |
| "learning_rate": 1.288888888888889e-06, | |
| "loss": 0.0007, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 64.29, | |
| "learning_rate": 1.1777777777777778e-06, | |
| "loss": 0.0006, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 65.0, | |
| "learning_rate": 1.066666666666667e-06, | |
| "loss": 0.0006, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 65.71, | |
| "learning_rate": 9.555555555555556e-07, | |
| "loss": 0.0006, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 66.43, | |
| "learning_rate": 8.444444444444445e-07, | |
| "loss": 0.0006, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 67.14, | |
| "learning_rate": 7.333333333333334e-07, | |
| "loss": 0.0006, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 67.86, | |
| "learning_rate": 6.222222222222223e-07, | |
| "loss": 0.0007, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 68.57, | |
| "learning_rate": 5.111111111111112e-07, | |
| "loss": 0.0006, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 69.29, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 0.0006, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 70.0, | |
| "learning_rate": 2.888888888888889e-07, | |
| "loss": 0.0006, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 70.71, | |
| "learning_rate": 1.777777777777778e-07, | |
| "loss": 0.0006, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 71.43, | |
| "learning_rate": 6.666666666666668e-08, | |
| "loss": 0.0007, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 71.43, | |
| "eval_loss": 1.2950359582901, | |
| "eval_runtime": 1407.5689, | |
| "eval_samples_per_second": 0.364, | |
| "eval_steps_per_second": 0.023, | |
| "eval_wer": 50.6431598062954, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 71.43, | |
| "step": 500, | |
| "total_flos": 3.142748691726336e+19, | |
| "train_loss": 0.15911377191729845, | |
| "train_runtime": 10886.8048, | |
| "train_samples_per_second": 2.939, | |
| "train_steps_per_second": 0.046 | |
| } | |
| ], | |
| "max_steps": 500, | |
| "num_train_epochs": 72, | |
| "total_flos": 3.142748691726336e+19, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |