| { |
| "best_metric": 25.035120389427945, |
| "best_model_checkpoint": "./whisper-small-pl/checkpoint-2000", |
| "epoch": 1.9342359767891684, |
| "eval_steps": 1000, |
| "global_step": 2000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.024177949709864602, |
| "grad_norm": 13.08698844909668, |
| "learning_rate": 4.6000000000000004e-07, |
| "loss": 0.7941, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.048355899419729204, |
| "grad_norm": 9.012430191040039, |
| "learning_rate": 9.600000000000001e-07, |
| "loss": 0.7344, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.0725338491295938, |
| "grad_norm": 9.37657356262207, |
| "learning_rate": 1.46e-06, |
| "loss": 0.5611, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.09671179883945841, |
| "grad_norm": 7.428127288818359, |
| "learning_rate": 1.9600000000000003e-06, |
| "loss": 0.2917, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.12088974854932302, |
| "grad_norm": 7.270956039428711, |
| "learning_rate": 2.46e-06, |
| "loss": 0.2767, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.1450676982591876, |
| "grad_norm": 6.277708530426025, |
| "learning_rate": 2.96e-06, |
| "loss": 0.2621, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.16924564796905223, |
| "grad_norm": 7.922985553741455, |
| "learning_rate": 3.46e-06, |
| "loss": 0.2482, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.19342359767891681, |
| "grad_norm": 7.432052135467529, |
| "learning_rate": 3.96e-06, |
| "loss": 0.2468, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.21760154738878143, |
| "grad_norm": 5.777613162994385, |
| "learning_rate": 4.4600000000000005e-06, |
| "loss": 0.2526, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.24177949709864605, |
| "grad_norm": 7.955958843231201, |
| "learning_rate": 4.960000000000001e-06, |
| "loss": 0.2293, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.26595744680851063, |
| "grad_norm": 8.523419380187988, |
| "learning_rate": 5.460000000000001e-06, |
| "loss": 0.2112, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.2901353965183752, |
| "grad_norm": 6.596875190734863, |
| "learning_rate": 5.9600000000000005e-06, |
| "loss": 0.2224, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.31431334622823986, |
| "grad_norm": 7.191504955291748, |
| "learning_rate": 6.460000000000001e-06, |
| "loss": 0.2112, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.33849129593810445, |
| "grad_norm": 8.093416213989258, |
| "learning_rate": 6.96e-06, |
| "loss": 0.215, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.36266924564796904, |
| "grad_norm": 6.397820949554443, |
| "learning_rate": 7.4600000000000006e-06, |
| "loss": 0.2079, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.38684719535783363, |
| "grad_norm": 5.670892238616943, |
| "learning_rate": 7.960000000000002e-06, |
| "loss": 0.2377, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.41102514506769827, |
| "grad_norm": 6.631639003753662, |
| "learning_rate": 8.46e-06, |
| "loss": 0.2409, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.43520309477756286, |
| "grad_norm": 5.997097015380859, |
| "learning_rate": 8.96e-06, |
| "loss": 0.219, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.45938104448742745, |
| "grad_norm": 5.482093334197998, |
| "learning_rate": 9.460000000000001e-06, |
| "loss": 0.2004, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.4835589941972921, |
| "grad_norm": 7.19942045211792, |
| "learning_rate": 9.960000000000001e-06, |
| "loss": 0.2118, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.5077369439071566, |
| "grad_norm": 4.876007080078125, |
| "learning_rate": 9.846666666666668e-06, |
| "loss": 0.1984, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.5319148936170213, |
| "grad_norm": 5.836966037750244, |
| "learning_rate": 9.68e-06, |
| "loss": 0.218, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.5560928433268859, |
| "grad_norm": 5.95539665222168, |
| "learning_rate": 9.513333333333334e-06, |
| "loss": 0.2, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.5802707930367504, |
| "grad_norm": 7.50021505355835, |
| "learning_rate": 9.346666666666666e-06, |
| "loss": 0.2087, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.6044487427466151, |
| "grad_norm": 5.989735126495361, |
| "learning_rate": 9.180000000000002e-06, |
| "loss": 0.198, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.6286266924564797, |
| "grad_norm": 7.257640838623047, |
| "learning_rate": 9.013333333333334e-06, |
| "loss": 0.2134, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.6528046421663443, |
| "grad_norm": 7.139352798461914, |
| "learning_rate": 8.846666666666668e-06, |
| "loss": 0.1896, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.6769825918762089, |
| "grad_norm": 5.189645290374756, |
| "learning_rate": 8.68e-06, |
| "loss": 0.2043, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.7011605415860735, |
| "grad_norm": 6.447940349578857, |
| "learning_rate": 8.513333333333335e-06, |
| "loss": 0.2004, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.7253384912959381, |
| "grad_norm": 6.197971820831299, |
| "learning_rate": 8.346666666666668e-06, |
| "loss": 0.2128, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.7495164410058027, |
| "grad_norm": 6.4797186851501465, |
| "learning_rate": 8.18e-06, |
| "loss": 0.2027, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.7736943907156673, |
| "grad_norm": 6.49001407623291, |
| "learning_rate": 8.013333333333333e-06, |
| "loss": 0.2039, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.7978723404255319, |
| "grad_norm": 7.675256729125977, |
| "learning_rate": 7.846666666666667e-06, |
| "loss": 0.2132, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.8220502901353965, |
| "grad_norm": 6.154118537902832, |
| "learning_rate": 7.680000000000001e-06, |
| "loss": 0.185, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.8462282398452611, |
| "grad_norm": 4.915777206420898, |
| "learning_rate": 7.513333333333334e-06, |
| "loss": 0.1964, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.8704061895551257, |
| "grad_norm": 6.114099025726318, |
| "learning_rate": 7.346666666666668e-06, |
| "loss": 0.1829, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.8945841392649904, |
| "grad_norm": 4.89927339553833, |
| "learning_rate": 7.180000000000001e-06, |
| "loss": 0.1811, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.9187620889748549, |
| "grad_norm": 6.713924884796143, |
| "learning_rate": 7.0133333333333345e-06, |
| "loss": 0.2002, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.9429400386847195, |
| "grad_norm": 3.8542282581329346, |
| "learning_rate": 6.846666666666667e-06, |
| "loss": 0.1919, |
| "step": 975 |
| }, |
| { |
| "epoch": 0.9671179883945842, |
| "grad_norm": 5.772714138031006, |
| "learning_rate": 6.680000000000001e-06, |
| "loss": 0.1948, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.9671179883945842, |
| "eval_loss": 0.3445706367492676, |
| "eval_runtime": 2461.0106, |
| "eval_samples_per_second": 3.37, |
| "eval_steps_per_second": 0.421, |
| "eval_wer": 27.341631546277238, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.9912959381044487, |
| "grad_norm": 5.760690689086914, |
| "learning_rate": 6.513333333333333e-06, |
| "loss": 0.1792, |
| "step": 1025 |
| }, |
| { |
| "epoch": 1.0154738878143132, |
| "grad_norm": 2.4457898139953613, |
| "learning_rate": 6.346666666666668e-06, |
| "loss": 0.1373, |
| "step": 1050 |
| }, |
| { |
| "epoch": 1.039651837524178, |
| "grad_norm": 6.8382792472839355, |
| "learning_rate": 6.18e-06, |
| "loss": 0.1037, |
| "step": 1075 |
| }, |
| { |
| "epoch": 1.0638297872340425, |
| "grad_norm": 2.8886797428131104, |
| "learning_rate": 6.013333333333335e-06, |
| "loss": 0.0919, |
| "step": 1100 |
| }, |
| { |
| "epoch": 1.0880077369439072, |
| "grad_norm": 3.747055768966675, |
| "learning_rate": 5.846666666666667e-06, |
| "loss": 0.0998, |
| "step": 1125 |
| }, |
| { |
| "epoch": 1.1121856866537718, |
| "grad_norm": 5.849277973175049, |
| "learning_rate": 5.68e-06, |
| "loss": 0.0987, |
| "step": 1150 |
| }, |
| { |
| "epoch": 1.1363636363636362, |
| "grad_norm": 4.248702526092529, |
| "learning_rate": 5.513333333333334e-06, |
| "loss": 0.0986, |
| "step": 1175 |
| }, |
| { |
| "epoch": 1.1605415860735009, |
| "grad_norm": 2.798823595046997, |
| "learning_rate": 5.346666666666667e-06, |
| "loss": 0.0984, |
| "step": 1200 |
| }, |
| { |
| "epoch": 1.1847195357833655, |
| "grad_norm": 3.916948080062866, |
| "learning_rate": 5.18e-06, |
| "loss": 0.1046, |
| "step": 1225 |
| }, |
| { |
| "epoch": 1.2088974854932302, |
| "grad_norm": 4.392205238342285, |
| "learning_rate": 5.013333333333333e-06, |
| "loss": 0.1004, |
| "step": 1250 |
| }, |
| { |
| "epoch": 1.2330754352030948, |
| "grad_norm": 4.6209940910339355, |
| "learning_rate": 4.846666666666667e-06, |
| "loss": 0.0974, |
| "step": 1275 |
| }, |
| { |
| "epoch": 1.2572533849129595, |
| "grad_norm": 3.8491697311401367, |
| "learning_rate": 4.680000000000001e-06, |
| "loss": 0.0964, |
| "step": 1300 |
| }, |
| { |
| "epoch": 1.281431334622824, |
| "grad_norm": 3.726794958114624, |
| "learning_rate": 4.513333333333333e-06, |
| "loss": 0.1073, |
| "step": 1325 |
| }, |
| { |
| "epoch": 1.3056092843326885, |
| "grad_norm": 2.047708511352539, |
| "learning_rate": 4.346666666666667e-06, |
| "loss": 0.0946, |
| "step": 1350 |
| }, |
| { |
| "epoch": 1.3297872340425532, |
| "grad_norm": 3.641178846359253, |
| "learning_rate": 4.18e-06, |
| "loss": 0.1003, |
| "step": 1375 |
| }, |
| { |
| "epoch": 1.3539651837524178, |
| "grad_norm": 4.149508476257324, |
| "learning_rate": 4.013333333333334e-06, |
| "loss": 0.0878, |
| "step": 1400 |
| }, |
| { |
| "epoch": 1.3781431334622825, |
| "grad_norm": 3.5799102783203125, |
| "learning_rate": 3.8466666666666665e-06, |
| "loss": 0.1025, |
| "step": 1425 |
| }, |
| { |
| "epoch": 1.402321083172147, |
| "grad_norm": 3.7035670280456543, |
| "learning_rate": 3.6800000000000003e-06, |
| "loss": 0.0968, |
| "step": 1450 |
| }, |
| { |
| "epoch": 1.4264990328820115, |
| "grad_norm": 3.0954935550689697, |
| "learning_rate": 3.5133333333333337e-06, |
| "loss": 0.0914, |
| "step": 1475 |
| }, |
| { |
| "epoch": 1.4506769825918762, |
| "grad_norm": 5.010723114013672, |
| "learning_rate": 3.346666666666667e-06, |
| "loss": 0.096, |
| "step": 1500 |
| }, |
| { |
| "epoch": 1.4748549323017408, |
| "grad_norm": 4.114623546600342, |
| "learning_rate": 3.1800000000000005e-06, |
| "loss": 0.0895, |
| "step": 1525 |
| }, |
| { |
| "epoch": 1.4990328820116054, |
| "grad_norm": 2.368100881576538, |
| "learning_rate": 3.013333333333334e-06, |
| "loss": 0.0875, |
| "step": 1550 |
| }, |
| { |
| "epoch": 1.52321083172147, |
| "grad_norm": 3.3124420642852783, |
| "learning_rate": 2.8466666666666672e-06, |
| "loss": 0.0901, |
| "step": 1575 |
| }, |
| { |
| "epoch": 1.5473887814313345, |
| "grad_norm": 2.9692959785461426, |
| "learning_rate": 2.68e-06, |
| "loss": 0.0958, |
| "step": 1600 |
| }, |
| { |
| "epoch": 1.5715667311411994, |
| "grad_norm": 3.253309726715088, |
| "learning_rate": 2.5133333333333336e-06, |
| "loss": 0.0844, |
| "step": 1625 |
| }, |
| { |
| "epoch": 1.5957446808510638, |
| "grad_norm": 4.483941078186035, |
| "learning_rate": 2.346666666666667e-06, |
| "loss": 0.0964, |
| "step": 1650 |
| }, |
| { |
| "epoch": 1.6199226305609284, |
| "grad_norm": 3.4813811779022217, |
| "learning_rate": 2.1800000000000003e-06, |
| "loss": 0.0898, |
| "step": 1675 |
| }, |
| { |
| "epoch": 1.644100580270793, |
| "grad_norm": 2.9097344875335693, |
| "learning_rate": 2.0133333333333337e-06, |
| "loss": 0.0785, |
| "step": 1700 |
| }, |
| { |
| "epoch": 1.6682785299806575, |
| "grad_norm": 4.8369011878967285, |
| "learning_rate": 1.8466666666666668e-06, |
| "loss": 0.0858, |
| "step": 1725 |
| }, |
| { |
| "epoch": 1.6924564796905224, |
| "grad_norm": 3.383228302001953, |
| "learning_rate": 1.6800000000000002e-06, |
| "loss": 0.0925, |
| "step": 1750 |
| }, |
| { |
| "epoch": 1.7166344294003868, |
| "grad_norm": 3.398839235305786, |
| "learning_rate": 1.5133333333333334e-06, |
| "loss": 0.0966, |
| "step": 1775 |
| }, |
| { |
| "epoch": 1.7408123791102514, |
| "grad_norm": 4.022014617919922, |
| "learning_rate": 1.3466666666666668e-06, |
| "loss": 0.086, |
| "step": 1800 |
| }, |
| { |
| "epoch": 1.764990328820116, |
| "grad_norm": 4.035211086273193, |
| "learning_rate": 1.1800000000000001e-06, |
| "loss": 0.0932, |
| "step": 1825 |
| }, |
| { |
| "epoch": 1.7891682785299805, |
| "grad_norm": 3.886571168899536, |
| "learning_rate": 1.0133333333333333e-06, |
| "loss": 0.0823, |
| "step": 1850 |
| }, |
| { |
| "epoch": 1.8133462282398454, |
| "grad_norm": 3.486004590988159, |
| "learning_rate": 8.466666666666668e-07, |
| "loss": 0.0984, |
| "step": 1875 |
| }, |
| { |
| "epoch": 1.8375241779497098, |
| "grad_norm": 2.604168176651001, |
| "learning_rate": 6.800000000000001e-07, |
| "loss": 0.0892, |
| "step": 1900 |
| }, |
| { |
| "epoch": 1.8617021276595744, |
| "grad_norm": 3.2812445163726807, |
| "learning_rate": 5.133333333333334e-07, |
| "loss": 0.0817, |
| "step": 1925 |
| }, |
| { |
| "epoch": 1.885880077369439, |
| "grad_norm": 3.6800663471221924, |
| "learning_rate": 3.466666666666667e-07, |
| "loss": 0.0925, |
| "step": 1950 |
| }, |
| { |
| "epoch": 1.9100580270793037, |
| "grad_norm": 4.335522651672363, |
| "learning_rate": 1.8e-07, |
| "loss": 0.0908, |
| "step": 1975 |
| }, |
| { |
| "epoch": 1.9342359767891684, |
| "grad_norm": 1.6400020122528076, |
| "learning_rate": 1.3333333333333334e-08, |
| "loss": 0.0809, |
| "step": 2000 |
| }, |
| { |
| "epoch": 1.9342359767891684, |
| "eval_loss": 0.33456456661224365, |
| "eval_runtime": 2469.2143, |
| "eval_samples_per_second": 3.359, |
| "eval_steps_per_second": 0.42, |
| "eval_wer": 25.035120389427945, |
| "step": 2000 |
| } |
| ], |
| "logging_steps": 25, |
| "max_steps": 2000, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 2, |
| "save_steps": 1000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 9.2332898832384e+18, |
| "train_batch_size": 8, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|