| { |
| "best_metric": 9.104533533158486, |
| "best_model_checkpoint": "./checkpoint-10000", |
| "epoch": 1.0, |
| "eval_steps": 500, |
| "global_step": 10000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0025, |
| "grad_norm": 18.99473762512207, |
| "learning_rate": 2.875e-07, |
| "loss": 1.9791, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.005, |
| "grad_norm": 16.013668060302734, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 1.8344, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.0075, |
| "grad_norm": 11.794272422790527, |
| "learning_rate": 9.124999999999999e-07, |
| "loss": 1.4287, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 9.81549072265625, |
| "learning_rate": 1.2250000000000001e-06, |
| "loss": 0.9655, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.0125, |
| "grad_norm": 9.262007713317871, |
| "learning_rate": 1.5375e-06, |
| "loss": 0.7411, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.015, |
| "grad_norm": 9.255334854125977, |
| "learning_rate": 1.85e-06, |
| "loss": 0.7377, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.0175, |
| "grad_norm": 9.121560096740723, |
| "learning_rate": 2.1625e-06, |
| "loss": 0.5628, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 7.131228446960449, |
| "learning_rate": 2.4750000000000004e-06, |
| "loss": 0.5042, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.0225, |
| "grad_norm": 7.482190132141113, |
| "learning_rate": 2.7875e-06, |
| "loss": 0.4541, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.025, |
| "grad_norm": 7.280477046966553, |
| "learning_rate": 3.1e-06, |
| "loss": 0.4409, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.0275, |
| "grad_norm": 7.783284664154053, |
| "learning_rate": 3.4125000000000004e-06, |
| "loss": 0.3708, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 6.559817314147949, |
| "learning_rate": 3.725e-06, |
| "loss": 0.3778, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.0325, |
| "grad_norm": 6.608455657958984, |
| "learning_rate": 4.037500000000001e-06, |
| "loss": 0.342, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.035, |
| "grad_norm": 6.155163764953613, |
| "learning_rate": 4.35e-06, |
| "loss": 0.348, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.0375, |
| "grad_norm": 5.380521774291992, |
| "learning_rate": 4.6625e-06, |
| "loss": 0.3389, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 6.0660929679870605, |
| "learning_rate": 4.975000000000001e-06, |
| "loss": 0.3001, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.0425, |
| "grad_norm": 6.305624961853027, |
| "learning_rate": 5.2875e-06, |
| "loss": 0.2827, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.045, |
| "grad_norm": 4.712560653686523, |
| "learning_rate": 5.600000000000001e-06, |
| "loss": 0.2521, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.0475, |
| "grad_norm": 7.551612377166748, |
| "learning_rate": 5.9125e-06, |
| "loss": 0.2724, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 8.22182846069336, |
| "learning_rate": 6.2250000000000005e-06, |
| "loss": 0.3412, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.05, |
| "eval_loss": 0.5112436413764954, |
| "eval_runtime": 311.4745, |
| "eval_samples_per_second": 5.429, |
| "eval_steps_per_second": 0.681, |
| "eval_wer": 28.868490071187715, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.0525, |
| "grad_norm": 7.577310085296631, |
| "learning_rate": 6.234868421052632e-06, |
| "loss": 0.3666, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.055, |
| "grad_norm": 6.28400182723999, |
| "learning_rate": 6.218421052631579e-06, |
| "loss": 0.3399, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.0575, |
| "grad_norm": 6.58062744140625, |
| "learning_rate": 6.2019736842105266e-06, |
| "loss": 0.362, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 7.695873737335205, |
| "learning_rate": 6.185526315789474e-06, |
| "loss": 0.3334, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.0625, |
| "grad_norm": 6.803170680999756, |
| "learning_rate": 6.169078947368422e-06, |
| "loss": 0.3236, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.065, |
| "grad_norm": 5.460644245147705, |
| "learning_rate": 6.152631578947369e-06, |
| "loss": 0.2595, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.0675, |
| "grad_norm": 5.147243499755859, |
| "learning_rate": 6.1361842105263165e-06, |
| "loss": 0.2391, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 4.784495830535889, |
| "learning_rate": 6.119736842105264e-06, |
| "loss": 0.218, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.0725, |
| "grad_norm": 4.8554253578186035, |
| "learning_rate": 6.103289473684211e-06, |
| "loss": 0.1849, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.075, |
| "grad_norm": 4.66990327835083, |
| "learning_rate": 6.086842105263158e-06, |
| "loss": 0.1968, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.0775, |
| "grad_norm": 3.727851390838623, |
| "learning_rate": 6.070394736842106e-06, |
| "loss": 0.1986, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 4.329863548278809, |
| "learning_rate": 6.053947368421053e-06, |
| "loss": 0.194, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.0825, |
| "grad_norm": 4.098700046539307, |
| "learning_rate": 6.0375e-06, |
| "loss": 0.1806, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.085, |
| "grad_norm": 4.882187843322754, |
| "learning_rate": 6.0210526315789475e-06, |
| "loss": 0.1657, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.0875, |
| "grad_norm": 4.075649738311768, |
| "learning_rate": 6.004605263157895e-06, |
| "loss": 0.1717, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 4.04005765914917, |
| "learning_rate": 5.988157894736842e-06, |
| "loss": 0.1724, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.0925, |
| "grad_norm": 4.920064449310303, |
| "learning_rate": 5.97171052631579e-06, |
| "loss": 0.1613, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.095, |
| "grad_norm": 4.736133575439453, |
| "learning_rate": 5.9552631578947375e-06, |
| "loss": 0.1539, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.0975, |
| "grad_norm": 5.089536666870117, |
| "learning_rate": 5.938815789473685e-06, |
| "loss": 0.1578, |
| "step": 975 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 4.11244535446167, |
| "learning_rate": 5.922368421052632e-06, |
| "loss": 0.1464, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.1, |
| "eval_loss": 0.4178035259246826, |
| "eval_runtime": 311.0898, |
| "eval_samples_per_second": 5.436, |
| "eval_steps_per_second": 0.681, |
| "eval_wer": 20.557012613962783, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.1025, |
| "grad_norm": 8.338362693786621, |
| "learning_rate": 5.905921052631579e-06, |
| "loss": 0.1547, |
| "step": 1025 |
| }, |
| { |
| "epoch": 0.105, |
| "grad_norm": 5.687260627746582, |
| "learning_rate": 5.889473684210527e-06, |
| "loss": 0.1373, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.1075, |
| "grad_norm": 5.075960159301758, |
| "learning_rate": 5.873026315789474e-06, |
| "loss": 0.1458, |
| "step": 1075 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 3.7344439029693604, |
| "learning_rate": 5.856578947368421e-06, |
| "loss": 0.1476, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.1125, |
| "grad_norm": 3.558634042739868, |
| "learning_rate": 5.8401315789473684e-06, |
| "loss": 0.141, |
| "step": 1125 |
| }, |
| { |
| "epoch": 0.115, |
| "grad_norm": 4.66357421875, |
| "learning_rate": 5.823684210526316e-06, |
| "loss": 0.1374, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.1175, |
| "grad_norm": 2.8398051261901855, |
| "learning_rate": 5.807236842105263e-06, |
| "loss": 0.1382, |
| "step": 1175 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 5.431055545806885, |
| "learning_rate": 5.79078947368421e-06, |
| "loss": 0.146, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.1225, |
| "grad_norm": 3.4897637367248535, |
| "learning_rate": 5.7743421052631576e-06, |
| "loss": 0.1241, |
| "step": 1225 |
| }, |
| { |
| "epoch": 0.125, |
| "grad_norm": 3.5461232662200928, |
| "learning_rate": 5.757894736842106e-06, |
| "loss": 0.122, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.1275, |
| "grad_norm": 4.466988563537598, |
| "learning_rate": 5.741447368421053e-06, |
| "loss": 0.1269, |
| "step": 1275 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 3.7680704593658447, |
| "learning_rate": 5.725e-06, |
| "loss": 0.147, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.1325, |
| "grad_norm": 3.8308804035186768, |
| "learning_rate": 5.7085526315789475e-06, |
| "loss": 0.1315, |
| "step": 1325 |
| }, |
| { |
| "epoch": 0.135, |
| "grad_norm": 4.032102584838867, |
| "learning_rate": 5.692105263157895e-06, |
| "loss": 0.1364, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.1375, |
| "grad_norm": 3.5933330059051514, |
| "learning_rate": 5.675657894736842e-06, |
| "loss": 0.1349, |
| "step": 1375 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 4.046933650970459, |
| "learning_rate": 5.659210526315789e-06, |
| "loss": 0.1329, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.1425, |
| "grad_norm": 3.5506181716918945, |
| "learning_rate": 5.6427631578947375e-06, |
| "loss": 0.1394, |
| "step": 1425 |
| }, |
| { |
| "epoch": 0.145, |
| "grad_norm": 7.220362186431885, |
| "learning_rate": 5.626315789473685e-06, |
| "loss": 0.1697, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.1475, |
| "grad_norm": 6.2873406410217285, |
| "learning_rate": 5.609868421052632e-06, |
| "loss": 0.2135, |
| "step": 1475 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 5.356851100921631, |
| "learning_rate": 5.593421052631579e-06, |
| "loss": 0.2504, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.15, |
| "eval_loss": 0.36252710223197937, |
| "eval_runtime": 312.0138, |
| "eval_samples_per_second": 5.42, |
| "eval_steps_per_second": 0.679, |
| "eval_wer": 18.127888097914322, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.1525, |
| "grad_norm": 5.587256908416748, |
| "learning_rate": 5.576973684210527e-06, |
| "loss": 0.2411, |
| "step": 1525 |
| }, |
| { |
| "epoch": 0.155, |
| "grad_norm": 5.078568935394287, |
| "learning_rate": 5.560526315789474e-06, |
| "loss": 0.2064, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.1575, |
| "grad_norm": 6.567940711975098, |
| "learning_rate": 5.544078947368421e-06, |
| "loss": 0.2169, |
| "step": 1575 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 7.1573262214660645, |
| "learning_rate": 5.527631578947369e-06, |
| "loss": 0.2777, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.1625, |
| "grad_norm": 4.171093940734863, |
| "learning_rate": 5.511184210526317e-06, |
| "loss": 0.1822, |
| "step": 1625 |
| }, |
| { |
| "epoch": 0.165, |
| "grad_norm": 3.8850343227386475, |
| "learning_rate": 5.494736842105264e-06, |
| "loss": 0.1432, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.1675, |
| "grad_norm": 4.077279090881348, |
| "learning_rate": 5.478289473684211e-06, |
| "loss": 0.127, |
| "step": 1675 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 3.6042423248291016, |
| "learning_rate": 5.4618421052631584e-06, |
| "loss": 0.1257, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.1725, |
| "grad_norm": 3.1928248405456543, |
| "learning_rate": 5.445394736842106e-06, |
| "loss": 0.118, |
| "step": 1725 |
| }, |
| { |
| "epoch": 0.175, |
| "grad_norm": 2.867123603820801, |
| "learning_rate": 5.428947368421053e-06, |
| "loss": 0.1106, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.1775, |
| "grad_norm": 5.2130126953125, |
| "learning_rate": 5.4125e-06, |
| "loss": 0.1563, |
| "step": 1775 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 4.600515365600586, |
| "learning_rate": 5.3960526315789476e-06, |
| "loss": 0.192, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.1825, |
| "grad_norm": 6.257545471191406, |
| "learning_rate": 5.379605263157895e-06, |
| "loss": 0.2121, |
| "step": 1825 |
| }, |
| { |
| "epoch": 0.185, |
| "grad_norm": 6.946799278259277, |
| "learning_rate": 5.363157894736842e-06, |
| "loss": 0.2132, |
| "step": 1850 |
| }, |
| { |
| "epoch": 0.1875, |
| "grad_norm": 5.795095920562744, |
| "learning_rate": 5.346710526315789e-06, |
| "loss": 0.1966, |
| "step": 1875 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 5.471553325653076, |
| "learning_rate": 5.330263157894737e-06, |
| "loss": 0.1909, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.1925, |
| "grad_norm": 4.954725742340088, |
| "learning_rate": 5.313815789473685e-06, |
| "loss": 0.2012, |
| "step": 1925 |
| }, |
| { |
| "epoch": 0.195, |
| "grad_norm": 11.061971664428711, |
| "learning_rate": 5.297368421052632e-06, |
| "loss": 0.5868, |
| "step": 1950 |
| }, |
| { |
| "epoch": 0.1975, |
| "grad_norm": 5.9034199714660645, |
| "learning_rate": 5.280921052631579e-06, |
| "loss": 0.3758, |
| "step": 1975 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 6.9372358322143555, |
| "learning_rate": 5.264473684210527e-06, |
| "loss": 0.2615, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.2, |
| "eval_loss": 0.32360586524009705, |
| "eval_runtime": 312.1518, |
| "eval_samples_per_second": 5.417, |
| "eval_steps_per_second": 0.679, |
| "eval_wer": 15.536405645060572, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.2025, |
| "grad_norm": 5.795039176940918, |
| "learning_rate": 5.248026315789474e-06, |
| "loss": 0.2473, |
| "step": 2025 |
| }, |
| { |
| "epoch": 0.205, |
| "grad_norm": 5.314310073852539, |
| "learning_rate": 5.231578947368421e-06, |
| "loss": 0.2305, |
| "step": 2050 |
| }, |
| { |
| "epoch": 0.2075, |
| "grad_norm": 4.202529430389404, |
| "learning_rate": 5.2151315789473685e-06, |
| "loss": 0.1926, |
| "step": 2075 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 3.7446370124816895, |
| "learning_rate": 5.198684210526316e-06, |
| "loss": 0.1342, |
| "step": 2100 |
| }, |
| { |
| "epoch": 0.2125, |
| "grad_norm": 3.008561611175537, |
| "learning_rate": 5.182236842105263e-06, |
| "loss": 0.106, |
| "step": 2125 |
| }, |
| { |
| "epoch": 0.215, |
| "grad_norm": 3.4551093578338623, |
| "learning_rate": 5.16578947368421e-06, |
| "loss": 0.1213, |
| "step": 2150 |
| }, |
| { |
| "epoch": 0.2175, |
| "grad_norm": 3.4330601692199707, |
| "learning_rate": 5.149342105263158e-06, |
| "loss": 0.1063, |
| "step": 2175 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 2.9976322650909424, |
| "learning_rate": 5.132894736842105e-06, |
| "loss": 0.0969, |
| "step": 2200 |
| }, |
| { |
| "epoch": 0.2225, |
| "grad_norm": 3.3232624530792236, |
| "learning_rate": 5.116447368421053e-06, |
| "loss": 0.0976, |
| "step": 2225 |
| }, |
| { |
| "epoch": 0.225, |
| "grad_norm": 2.5418078899383545, |
| "learning_rate": 5.1e-06, |
| "loss": 0.1, |
| "step": 2250 |
| }, |
| { |
| "epoch": 0.2275, |
| "grad_norm": 4.438797950744629, |
| "learning_rate": 5.083552631578948e-06, |
| "loss": 0.1022, |
| "step": 2275 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 3.545685291290283, |
| "learning_rate": 5.067105263157895e-06, |
| "loss": 0.1066, |
| "step": 2300 |
| }, |
| { |
| "epoch": 0.2325, |
| "grad_norm": 3.276963472366333, |
| "learning_rate": 5.050657894736842e-06, |
| "loss": 0.1074, |
| "step": 2325 |
| }, |
| { |
| "epoch": 0.235, |
| "grad_norm": 3.519914150238037, |
| "learning_rate": 5.03421052631579e-06, |
| "loss": 0.108, |
| "step": 2350 |
| }, |
| { |
| "epoch": 0.2375, |
| "grad_norm": 2.806739330291748, |
| "learning_rate": 5.0177631578947375e-06, |
| "loss": 0.1023, |
| "step": 2375 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 3.1058409214019775, |
| "learning_rate": 5.001315789473685e-06, |
| "loss": 0.1051, |
| "step": 2400 |
| }, |
| { |
| "epoch": 0.2425, |
| "grad_norm": 3.6954598426818848, |
| "learning_rate": 4.984868421052632e-06, |
| "loss": 0.1455, |
| "step": 2425 |
| }, |
| { |
| "epoch": 0.245, |
| "grad_norm": 7.356727123260498, |
| "learning_rate": 4.968421052631579e-06, |
| "loss": 0.1683, |
| "step": 2450 |
| }, |
| { |
| "epoch": 0.2475, |
| "grad_norm": 5.583164691925049, |
| "learning_rate": 4.951973684210527e-06, |
| "loss": 0.1793, |
| "step": 2475 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 4.397960186004639, |
| "learning_rate": 4.935526315789474e-06, |
| "loss": 0.1648, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.25, |
| "eval_loss": 0.32087442278862, |
| "eval_runtime": 309.4082, |
| "eval_samples_per_second": 5.465, |
| "eval_steps_per_second": 0.685, |
| "eval_wer": 13.812913700512052, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.2525, |
| "grad_norm": 5.315972328186035, |
| "learning_rate": 4.919078947368421e-06, |
| "loss": 0.1876, |
| "step": 2525 |
| }, |
| { |
| "epoch": 0.255, |
| "grad_norm": 6.717661380767822, |
| "learning_rate": 4.9026315789473685e-06, |
| "loss": 0.1991, |
| "step": 2550 |
| }, |
| { |
| "epoch": 0.2575, |
| "grad_norm": 7.158634662628174, |
| "learning_rate": 4.886184210526316e-06, |
| "loss": 0.1923, |
| "step": 2575 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 5.125967025756836, |
| "learning_rate": 4.869736842105264e-06, |
| "loss": 0.2005, |
| "step": 2600 |
| }, |
| { |
| "epoch": 0.2625, |
| "grad_norm": 5.86124849319458, |
| "learning_rate": 4.853947368421053e-06, |
| "loss": 0.1708, |
| "step": 2625 |
| }, |
| { |
| "epoch": 0.265, |
| "grad_norm": 5.399341583251953, |
| "learning_rate": 4.837500000000001e-06, |
| "loss": 0.1687, |
| "step": 2650 |
| }, |
| { |
| "epoch": 0.2675, |
| "grad_norm": 4.922569751739502, |
| "learning_rate": 4.821052631578948e-06, |
| "loss": 0.1819, |
| "step": 2675 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 6.5883049964904785, |
| "learning_rate": 4.804605263157895e-06, |
| "loss": 0.1755, |
| "step": 2700 |
| }, |
| { |
| "epoch": 0.2725, |
| "grad_norm": 5.689633369445801, |
| "learning_rate": 4.7881578947368425e-06, |
| "loss": 0.1623, |
| "step": 2725 |
| }, |
| { |
| "epoch": 0.275, |
| "grad_norm": 5.627523422241211, |
| "learning_rate": 4.77171052631579e-06, |
| "loss": 0.161, |
| "step": 2750 |
| }, |
| { |
| "epoch": 0.2775, |
| "grad_norm": 4.8818769454956055, |
| "learning_rate": 4.755263157894737e-06, |
| "loss": 0.1527, |
| "step": 2775 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 5.591118812561035, |
| "learning_rate": 4.738815789473684e-06, |
| "loss": 0.167, |
| "step": 2800 |
| }, |
| { |
| "epoch": 0.2825, |
| "grad_norm": 6.264982223510742, |
| "learning_rate": 4.722368421052632e-06, |
| "loss": 0.1644, |
| "step": 2825 |
| }, |
| { |
| "epoch": 0.285, |
| "grad_norm": 5.138685703277588, |
| "learning_rate": 4.705921052631579e-06, |
| "loss": 0.1584, |
| "step": 2850 |
| }, |
| { |
| "epoch": 0.2875, |
| "grad_norm": 4.735789775848389, |
| "learning_rate": 4.689473684210526e-06, |
| "loss": 0.1699, |
| "step": 2875 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 4.896319389343262, |
| "learning_rate": 4.673026315789474e-06, |
| "loss": 0.1431, |
| "step": 2900 |
| }, |
| { |
| "epoch": 0.2925, |
| "grad_norm": 2.7798383235931396, |
| "learning_rate": 4.656578947368422e-06, |
| "loss": 0.1196, |
| "step": 2925 |
| }, |
| { |
| "epoch": 0.295, |
| "grad_norm": 2.8537399768829346, |
| "learning_rate": 4.640131578947369e-06, |
| "loss": 0.099, |
| "step": 2950 |
| }, |
| { |
| "epoch": 0.2975, |
| "grad_norm": 3.941833019256592, |
| "learning_rate": 4.623684210526316e-06, |
| "loss": 0.0958, |
| "step": 2975 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 3.61875581741333, |
| "learning_rate": 4.6072368421052635e-06, |
| "loss": 0.0933, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.3, |
| "eval_loss": 0.29909124970436096, |
| "eval_runtime": 310.6607, |
| "eval_samples_per_second": 5.443, |
| "eval_steps_per_second": 0.682, |
| "eval_wer": 12.888722367928063, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.3025, |
| "grad_norm": 2.9920144081115723, |
| "learning_rate": 4.590789473684211e-06, |
| "loss": 0.109, |
| "step": 3025 |
| }, |
| { |
| "epoch": 0.305, |
| "grad_norm": 4.337179660797119, |
| "learning_rate": 4.574342105263158e-06, |
| "loss": 0.0964, |
| "step": 3050 |
| }, |
| { |
| "epoch": 0.3075, |
| "grad_norm": 4.3010759353637695, |
| "learning_rate": 4.557894736842105e-06, |
| "loss": 0.0997, |
| "step": 3075 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 3.9736640453338623, |
| "learning_rate": 4.541447368421053e-06, |
| "loss": 0.1043, |
| "step": 3100 |
| }, |
| { |
| "epoch": 0.3125, |
| "grad_norm": 3.1543047428131104, |
| "learning_rate": 4.525e-06, |
| "loss": 0.0863, |
| "step": 3125 |
| }, |
| { |
| "epoch": 0.315, |
| "grad_norm": 3.2818803787231445, |
| "learning_rate": 4.508552631578947e-06, |
| "loss": 0.0779, |
| "step": 3150 |
| }, |
| { |
| "epoch": 0.3175, |
| "grad_norm": 3.1993777751922607, |
| "learning_rate": 4.4921052631578944e-06, |
| "loss": 0.0879, |
| "step": 3175 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 4.348583698272705, |
| "learning_rate": 4.475657894736842e-06, |
| "loss": 0.0908, |
| "step": 3200 |
| }, |
| { |
| "epoch": 0.3225, |
| "grad_norm": 4.457339763641357, |
| "learning_rate": 4.45921052631579e-06, |
| "loss": 0.1101, |
| "step": 3225 |
| }, |
| { |
| "epoch": 0.325, |
| "grad_norm": 3.821202516555786, |
| "learning_rate": 4.442763157894737e-06, |
| "loss": 0.1572, |
| "step": 3250 |
| }, |
| { |
| "epoch": 0.3275, |
| "grad_norm": 4.823634147644043, |
| "learning_rate": 4.426315789473684e-06, |
| "loss": 0.1408, |
| "step": 3275 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 3.4454030990600586, |
| "learning_rate": 4.409868421052632e-06, |
| "loss": 0.1369, |
| "step": 3300 |
| }, |
| { |
| "epoch": 0.3325, |
| "grad_norm": 4.38395357131958, |
| "learning_rate": 4.393421052631579e-06, |
| "loss": 0.1358, |
| "step": 3325 |
| }, |
| { |
| "epoch": 0.335, |
| "grad_norm": 5.808355808258057, |
| "learning_rate": 4.376973684210526e-06, |
| "loss": 0.1513, |
| "step": 3350 |
| }, |
| { |
| "epoch": 0.3375, |
| "grad_norm": 3.999112606048584, |
| "learning_rate": 4.3605263157894735e-06, |
| "loss": 0.1586, |
| "step": 3375 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 3.027531147003174, |
| "learning_rate": 4.344078947368422e-06, |
| "loss": 0.1204, |
| "step": 3400 |
| }, |
| { |
| "epoch": 0.3425, |
| "grad_norm": 3.536228656768799, |
| "learning_rate": 4.327631578947369e-06, |
| "loss": 0.1079, |
| "step": 3425 |
| }, |
| { |
| "epoch": 0.345, |
| "grad_norm": 3.9071717262268066, |
| "learning_rate": 4.311184210526316e-06, |
| "loss": 0.1014, |
| "step": 3450 |
| }, |
| { |
| "epoch": 0.3475, |
| "grad_norm": 3.9856207370758057, |
| "learning_rate": 4.2947368421052635e-06, |
| "loss": 0.0936, |
| "step": 3475 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 3.4355082511901855, |
| "learning_rate": 4.278289473684211e-06, |
| "loss": 0.1016, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.35, |
| "eval_loss": 0.2822968363761902, |
| "eval_runtime": 309.9663, |
| "eval_samples_per_second": 5.455, |
| "eval_steps_per_second": 0.684, |
| "eval_wer": 12.43287123766704, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.3525, |
| "grad_norm": 3.297853946685791, |
| "learning_rate": 4.261842105263158e-06, |
| "loss": 0.0876, |
| "step": 3525 |
| }, |
| { |
| "epoch": 0.355, |
| "grad_norm": 5.695490837097168, |
| "learning_rate": 4.245394736842105e-06, |
| "loss": 0.085, |
| "step": 3550 |
| }, |
| { |
| "epoch": 0.3575, |
| "grad_norm": 3.8094873428344727, |
| "learning_rate": 4.2289473684210535e-06, |
| "loss": 0.1017, |
| "step": 3575 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 2.3558785915374756, |
| "learning_rate": 4.212500000000001e-06, |
| "loss": 0.0878, |
| "step": 3600 |
| }, |
| { |
| "epoch": 0.3625, |
| "grad_norm": 2.606311082839966, |
| "learning_rate": 4.196052631578948e-06, |
| "loss": 0.0882, |
| "step": 3625 |
| }, |
| { |
| "epoch": 0.365, |
| "grad_norm": 2.40248441696167, |
| "learning_rate": 4.179605263157895e-06, |
| "loss": 0.0825, |
| "step": 3650 |
| }, |
| { |
| "epoch": 0.3675, |
| "grad_norm": 3.3909413814544678, |
| "learning_rate": 4.163157894736843e-06, |
| "loss": 0.0903, |
| "step": 3675 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 3.329094171524048, |
| "learning_rate": 4.14671052631579e-06, |
| "loss": 0.0801, |
| "step": 3700 |
| }, |
| { |
| "epoch": 0.3725, |
| "grad_norm": 2.8874080181121826, |
| "learning_rate": 4.130263157894737e-06, |
| "loss": 0.0844, |
| "step": 3725 |
| }, |
| { |
| "epoch": 0.375, |
| "grad_norm": 2.9029712677001953, |
| "learning_rate": 4.1138157894736844e-06, |
| "loss": 0.0864, |
| "step": 3750 |
| }, |
| { |
| "epoch": 0.3775, |
| "grad_norm": 3.936645269393921, |
| "learning_rate": 4.097368421052632e-06, |
| "loss": 0.0908, |
| "step": 3775 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 2.8034231662750244, |
| "learning_rate": 4.080921052631579e-06, |
| "loss": 0.09, |
| "step": 3800 |
| }, |
| { |
| "epoch": 0.3825, |
| "grad_norm": 3.077610969543457, |
| "learning_rate": 4.064473684210526e-06, |
| "loss": 0.086, |
| "step": 3825 |
| }, |
| { |
| "epoch": 0.385, |
| "grad_norm": 1.7661832571029663, |
| "learning_rate": 4.0480263157894736e-06, |
| "loss": 0.0781, |
| "step": 3850 |
| }, |
| { |
| "epoch": 0.3875, |
| "grad_norm": 3.0048747062683105, |
| "learning_rate": 4.031578947368421e-06, |
| "loss": 0.0927, |
| "step": 3875 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 4.764255523681641, |
| "learning_rate": 4.015131578947368e-06, |
| "loss": 0.1247, |
| "step": 3900 |
| }, |
| { |
| "epoch": 0.3925, |
| "grad_norm": 4.866943359375, |
| "learning_rate": 3.998684210526316e-06, |
| "loss": 0.1457, |
| "step": 3925 |
| }, |
| { |
| "epoch": 0.395, |
| "grad_norm": 4.022182464599609, |
| "learning_rate": 3.9822368421052635e-06, |
| "loss": 0.1532, |
| "step": 3950 |
| }, |
| { |
| "epoch": 0.3975, |
| "grad_norm": 5.281564712524414, |
| "learning_rate": 3.965789473684211e-06, |
| "loss": 0.1538, |
| "step": 3975 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 4.242926597595215, |
| "learning_rate": 3.949342105263158e-06, |
| "loss": 0.1449, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.4, |
| "eval_loss": 0.2741491496562958, |
| "eval_runtime": 310.8913, |
| "eval_samples_per_second": 5.439, |
| "eval_steps_per_second": 0.682, |
| "eval_wer": 11.745972274260023, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.4025, |
| "grad_norm": 4.753086090087891, |
| "learning_rate": 3.932894736842105e-06, |
| "loss": 0.1337, |
| "step": 4025 |
| }, |
| { |
| "epoch": 0.405, |
| "grad_norm": 4.307924747467041, |
| "learning_rate": 3.916447368421053e-06, |
| "loss": 0.1036, |
| "step": 4050 |
| }, |
| { |
| "epoch": 0.4075, |
| "grad_norm": 3.23429012298584, |
| "learning_rate": 3.9e-06, |
| "loss": 0.0855, |
| "step": 4075 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 2.5869216918945312, |
| "learning_rate": 3.883552631578947e-06, |
| "loss": 0.0849, |
| "step": 4100 |
| }, |
| { |
| "epoch": 0.4125, |
| "grad_norm": 2.809333562850952, |
| "learning_rate": 3.8671052631578945e-06, |
| "loss": 0.0856, |
| "step": 4125 |
| }, |
| { |
| "epoch": 0.415, |
| "grad_norm": 2.9336133003234863, |
| "learning_rate": 3.850657894736842e-06, |
| "loss": 0.0835, |
| "step": 4150 |
| }, |
| { |
| "epoch": 0.4175, |
| "grad_norm": 3.562512159347534, |
| "learning_rate": 3.834210526315789e-06, |
| "loss": 0.0854, |
| "step": 4175 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 3.9816362857818604, |
| "learning_rate": 3.817763157894737e-06, |
| "loss": 0.0884, |
| "step": 4200 |
| }, |
| { |
| "epoch": 0.4225, |
| "grad_norm": 2.4420506954193115, |
| "learning_rate": 3.801315789473684e-06, |
| "loss": 0.0736, |
| "step": 4225 |
| }, |
| { |
| "epoch": 0.425, |
| "grad_norm": 3.9136462211608887, |
| "learning_rate": 3.784868421052632e-06, |
| "loss": 0.0744, |
| "step": 4250 |
| }, |
| { |
| "epoch": 0.4275, |
| "grad_norm": 2.172910451889038, |
| "learning_rate": 3.7684210526315794e-06, |
| "loss": 0.0682, |
| "step": 4275 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 3.7281970977783203, |
| "learning_rate": 3.7519736842105267e-06, |
| "loss": 0.0741, |
| "step": 4300 |
| }, |
| { |
| "epoch": 0.4325, |
| "grad_norm": 2.0469653606414795, |
| "learning_rate": 3.735526315789474e-06, |
| "loss": 0.0682, |
| "step": 4325 |
| }, |
| { |
| "epoch": 0.435, |
| "grad_norm": 4.093785762786865, |
| "learning_rate": 3.7190789473684213e-06, |
| "loss": 0.0808, |
| "step": 4350 |
| }, |
| { |
| "epoch": 0.4375, |
| "grad_norm": 4.987372398376465, |
| "learning_rate": 3.7026315789473686e-06, |
| "loss": 0.1262, |
| "step": 4375 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 4.799498081207275, |
| "learning_rate": 3.686184210526316e-06, |
| "loss": 0.137, |
| "step": 4400 |
| }, |
| { |
| "epoch": 0.4425, |
| "grad_norm": 6.20047664642334, |
| "learning_rate": 3.669736842105263e-06, |
| "loss": 0.134, |
| "step": 4425 |
| }, |
| { |
| "epoch": 0.445, |
| "grad_norm": 4.391836166381836, |
| "learning_rate": 3.653289473684211e-06, |
| "loss": 0.1236, |
| "step": 4450 |
| }, |
| { |
| "epoch": 0.4475, |
| "grad_norm": 3.504007577896118, |
| "learning_rate": 3.636842105263158e-06, |
| "loss": 0.1367, |
| "step": 4475 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 4.362712860107422, |
| "learning_rate": 3.6203947368421054e-06, |
| "loss": 0.151, |
| "step": 4500 |
| }, |
| { |
| "epoch": 0.45, |
| "eval_loss": 0.27905017137527466, |
| "eval_runtime": 310.3753, |
| "eval_samples_per_second": 5.448, |
| "eval_steps_per_second": 0.683, |
| "eval_wer": 11.577369801423753, |
| "step": 4500 |
| }, |
| { |
| "epoch": 0.4525, |
| "grad_norm": 3.1015007495880127, |
| "learning_rate": 3.6039473684210527e-06, |
| "loss": 0.1064, |
| "step": 4525 |
| }, |
| { |
| "epoch": 0.455, |
| "grad_norm": 2.9454779624938965, |
| "learning_rate": 3.5875e-06, |
| "loss": 0.0809, |
| "step": 4550 |
| }, |
| { |
| "epoch": 0.4575, |
| "grad_norm": 2.50055193901062, |
| "learning_rate": 3.5710526315789472e-06, |
| "loss": 0.0787, |
| "step": 4575 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 3.667400360107422, |
| "learning_rate": 3.5546052631578954e-06, |
| "loss": 0.0633, |
| "step": 4600 |
| }, |
| { |
| "epoch": 0.4625, |
| "grad_norm": 3.109917640686035, |
| "learning_rate": 3.5381578947368426e-06, |
| "loss": 0.0729, |
| "step": 4625 |
| }, |
| { |
| "epoch": 0.465, |
| "grad_norm": 3.5514161586761475, |
| "learning_rate": 3.52171052631579e-06, |
| "loss": 0.0718, |
| "step": 4650 |
| }, |
| { |
| "epoch": 0.4675, |
| "grad_norm": 2.248009443283081, |
| "learning_rate": 3.505263157894737e-06, |
| "loss": 0.0672, |
| "step": 4675 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 2.2858293056488037, |
| "learning_rate": 3.4888157894736845e-06, |
| "loss": 0.0637, |
| "step": 4700 |
| }, |
| { |
| "epoch": 0.4725, |
| "grad_norm": 3.329707145690918, |
| "learning_rate": 3.4723684210526318e-06, |
| "loss": 0.0767, |
| "step": 4725 |
| }, |
| { |
| "epoch": 0.475, |
| "grad_norm": 2.267868995666504, |
| "learning_rate": 3.455921052631579e-06, |
| "loss": 0.0735, |
| "step": 4750 |
| }, |
| { |
| "epoch": 0.4775, |
| "grad_norm": 2.906242847442627, |
| "learning_rate": 3.4394736842105263e-06, |
| "loss": 0.0773, |
| "step": 4775 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 2.577254056930542, |
| "learning_rate": 3.4230263157894736e-06, |
| "loss": 0.0703, |
| "step": 4800 |
| }, |
| { |
| "epoch": 0.4825, |
| "grad_norm": 3.4477596282958984, |
| "learning_rate": 3.406578947368421e-06, |
| "loss": 0.0748, |
| "step": 4825 |
| }, |
| { |
| "epoch": 0.485, |
| "grad_norm": 2.1221792697906494, |
| "learning_rate": 3.390131578947368e-06, |
| "loss": 0.0776, |
| "step": 4850 |
| }, |
| { |
| "epoch": 0.4875, |
| "grad_norm": 3.1995036602020264, |
| "learning_rate": 3.373684210526316e-06, |
| "loss": 0.0778, |
| "step": 4875 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 3.112896680831909, |
| "learning_rate": 3.357236842105263e-06, |
| "loss": 0.0708, |
| "step": 4900 |
| }, |
| { |
| "epoch": 0.4925, |
| "grad_norm": 2.4315624237060547, |
| "learning_rate": 3.340789473684211e-06, |
| "loss": 0.0724, |
| "step": 4925 |
| }, |
| { |
| "epoch": 0.495, |
| "grad_norm": 2.819709062576294, |
| "learning_rate": 3.324342105263158e-06, |
| "loss": 0.0818, |
| "step": 4950 |
| }, |
| { |
| "epoch": 0.4975, |
| "grad_norm": 2.1634130477905273, |
| "learning_rate": 3.307894736842106e-06, |
| "loss": 0.0882, |
| "step": 4975 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 4.342939853668213, |
| "learning_rate": 3.291447368421053e-06, |
| "loss": 0.0917, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.5, |
| "eval_loss": 0.2744329869747162, |
| "eval_runtime": 312.3843, |
| "eval_samples_per_second": 5.413, |
| "eval_steps_per_second": 0.679, |
| "eval_wer": 11.240164855751217, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.5025, |
| "grad_norm": 3.250617265701294, |
| "learning_rate": 3.2750000000000004e-06, |
| "loss": 0.1103, |
| "step": 5025 |
| }, |
| { |
| "epoch": 0.505, |
| "grad_norm": 4.073126316070557, |
| "learning_rate": 3.2585526315789477e-06, |
| "loss": 0.1159, |
| "step": 5050 |
| }, |
| { |
| "epoch": 0.5075, |
| "grad_norm": 4.243088245391846, |
| "learning_rate": 3.242105263157895e-06, |
| "loss": 0.1032, |
| "step": 5075 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 4.274135589599609, |
| "learning_rate": 3.2256578947368422e-06, |
| "loss": 0.1059, |
| "step": 5100 |
| }, |
| { |
| "epoch": 0.5125, |
| "grad_norm": 4.127582550048828, |
| "learning_rate": 3.2092105263157895e-06, |
| "loss": 0.1382, |
| "step": 5125 |
| }, |
| { |
| "epoch": 0.515, |
| "grad_norm": 4.066865921020508, |
| "learning_rate": 3.192763157894737e-06, |
| "loss": 0.1305, |
| "step": 5150 |
| }, |
| { |
| "epoch": 0.5175, |
| "grad_norm": 3.285428047180176, |
| "learning_rate": 3.176315789473684e-06, |
| "loss": 0.111, |
| "step": 5175 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 3.5091636180877686, |
| "learning_rate": 3.1598684210526314e-06, |
| "loss": 0.0872, |
| "step": 5200 |
| }, |
| { |
| "epoch": 0.5225, |
| "grad_norm": 3.08402156829834, |
| "learning_rate": 3.1434210526315787e-06, |
| "loss": 0.0802, |
| "step": 5225 |
| }, |
| { |
| "epoch": 0.525, |
| "grad_norm": 3.693411350250244, |
| "learning_rate": 3.126973684210526e-06, |
| "loss": 0.0775, |
| "step": 5250 |
| }, |
| { |
| "epoch": 0.5275, |
| "grad_norm": 3.014009714126587, |
| "learning_rate": 3.1105263157894736e-06, |
| "loss": 0.0789, |
| "step": 5275 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 2.3794004917144775, |
| "learning_rate": 3.094078947368421e-06, |
| "loss": 0.0695, |
| "step": 5300 |
| }, |
| { |
| "epoch": 0.5325, |
| "grad_norm": 2.7316601276397705, |
| "learning_rate": 3.0776315789473686e-06, |
| "loss": 0.0682, |
| "step": 5325 |
| }, |
| { |
| "epoch": 0.535, |
| "grad_norm": 2.8209574222564697, |
| "learning_rate": 3.061184210526316e-06, |
| "loss": 0.0738, |
| "step": 5350 |
| }, |
| { |
| "epoch": 0.5375, |
| "grad_norm": 2.3420145511627197, |
| "learning_rate": 3.0447368421052636e-06, |
| "loss": 0.068, |
| "step": 5375 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 1.3665214776992798, |
| "learning_rate": 3.028289473684211e-06, |
| "loss": 0.0652, |
| "step": 5400 |
| }, |
| { |
| "epoch": 0.5425, |
| "grad_norm": 4.0754194259643555, |
| "learning_rate": 3.011842105263158e-06, |
| "loss": 0.08, |
| "step": 5425 |
| }, |
| { |
| "epoch": 0.545, |
| "grad_norm": 2.3295063972473145, |
| "learning_rate": 2.9953947368421055e-06, |
| "loss": 0.0886, |
| "step": 5450 |
| }, |
| { |
| "epoch": 0.5475, |
| "grad_norm": 3.3749866485595703, |
| "learning_rate": 2.9789473684210527e-06, |
| "loss": 0.0785, |
| "step": 5475 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 3.397775411605835, |
| "learning_rate": 2.9625e-06, |
| "loss": 0.0913, |
| "step": 5500 |
| }, |
| { |
| "epoch": 0.55, |
| "eval_loss": 0.2900996208190918, |
| "eval_runtime": 312.5057, |
| "eval_samples_per_second": 5.411, |
| "eval_steps_per_second": 0.678, |
| "eval_wer": 11.134007743224679, |
| "step": 5500 |
| }, |
| { |
| "epoch": 0.5525, |
| "grad_norm": 4.3687520027160645, |
| "learning_rate": 2.9460526315789477e-06, |
| "loss": 0.1542, |
| "step": 5525 |
| }, |
| { |
| "epoch": 0.555, |
| "grad_norm": 3.6556851863861084, |
| "learning_rate": 2.929605263157895e-06, |
| "loss": 0.156, |
| "step": 5550 |
| }, |
| { |
| "epoch": 0.5575, |
| "grad_norm": 4.368980407714844, |
| "learning_rate": 2.9131578947368423e-06, |
| "loss": 0.1429, |
| "step": 5575 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 3.384490966796875, |
| "learning_rate": 2.8967105263157896e-06, |
| "loss": 0.1299, |
| "step": 5600 |
| }, |
| { |
| "epoch": 0.5625, |
| "grad_norm": 4.780158996582031, |
| "learning_rate": 2.880263157894737e-06, |
| "loss": 0.109, |
| "step": 5625 |
| }, |
| { |
| "epoch": 0.565, |
| "grad_norm": 4.373064994812012, |
| "learning_rate": 2.863815789473684e-06, |
| "loss": 0.1238, |
| "step": 5650 |
| }, |
| { |
| "epoch": 0.5675, |
| "grad_norm": 3.6999049186706543, |
| "learning_rate": 2.8473684210526314e-06, |
| "loss": 0.1186, |
| "step": 5675 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 3.49452805519104, |
| "learning_rate": 2.830921052631579e-06, |
| "loss": 0.1206, |
| "step": 5700 |
| }, |
| { |
| "epoch": 0.5725, |
| "grad_norm": 5.771860122680664, |
| "learning_rate": 2.8144736842105264e-06, |
| "loss": 0.1247, |
| "step": 5725 |
| }, |
| { |
| "epoch": 0.575, |
| "grad_norm": 4.202964782714844, |
| "learning_rate": 2.7980263157894737e-06, |
| "loss": 0.126, |
| "step": 5750 |
| }, |
| { |
| "epoch": 0.5775, |
| "grad_norm": 4.583528518676758, |
| "learning_rate": 2.7815789473684214e-06, |
| "loss": 0.1139, |
| "step": 5775 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 4.833715438842773, |
| "learning_rate": 2.7651315789473687e-06, |
| "loss": 0.1198, |
| "step": 5800 |
| }, |
| { |
| "epoch": 0.5825, |
| "grad_norm": 2.9196882247924805, |
| "learning_rate": 2.748684210526316e-06, |
| "loss": 0.0993, |
| "step": 5825 |
| }, |
| { |
| "epoch": 0.585, |
| "grad_norm": 3.0068795680999756, |
| "learning_rate": 2.7322368421052632e-06, |
| "loss": 0.0959, |
| "step": 5850 |
| }, |
| { |
| "epoch": 0.5875, |
| "grad_norm": 2.4897634983062744, |
| "learning_rate": 2.715789473684211e-06, |
| "loss": 0.0701, |
| "step": 5875 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 3.400179147720337, |
| "learning_rate": 2.699342105263158e-06, |
| "loss": 0.0711, |
| "step": 5900 |
| }, |
| { |
| "epoch": 0.5925, |
| "grad_norm": 3.5197243690490723, |
| "learning_rate": 2.6828947368421055e-06, |
| "loss": 0.0712, |
| "step": 5925 |
| }, |
| { |
| "epoch": 0.595, |
| "grad_norm": 2.5338480472564697, |
| "learning_rate": 2.6664473684210528e-06, |
| "loss": 0.0793, |
| "step": 5950 |
| }, |
| { |
| "epoch": 0.5975, |
| "grad_norm": 3.2590153217315674, |
| "learning_rate": 2.65e-06, |
| "loss": 0.0998, |
| "step": 5975 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 4.8374786376953125, |
| "learning_rate": 2.6335526315789473e-06, |
| "loss": 0.1085, |
| "step": 6000 |
| }, |
| { |
| "epoch": 0.6, |
| "eval_loss": 0.26629793643951416, |
| "eval_runtime": 310.8598, |
| "eval_samples_per_second": 5.44, |
| "eval_steps_per_second": 0.682, |
| "eval_wer": 10.328462595229174, |
| "step": 6000 |
| }, |
| { |
| "epoch": 0.6025, |
| "grad_norm": 4.294407367706299, |
| "learning_rate": 2.617105263157895e-06, |
| "loss": 0.1181, |
| "step": 6025 |
| }, |
| { |
| "epoch": 0.605, |
| "grad_norm": 6.029516696929932, |
| "learning_rate": 2.6006578947368423e-06, |
| "loss": 0.128, |
| "step": 6050 |
| }, |
| { |
| "epoch": 0.6075, |
| "grad_norm": 5.578451156616211, |
| "learning_rate": 2.5842105263157896e-06, |
| "loss": 0.1395, |
| "step": 6075 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 5.231232166290283, |
| "learning_rate": 2.567763157894737e-06, |
| "loss": 0.1483, |
| "step": 6100 |
| }, |
| { |
| "epoch": 0.6125, |
| "grad_norm": 4.35582971572876, |
| "learning_rate": 2.551315789473684e-06, |
| "loss": 0.1121, |
| "step": 6125 |
| }, |
| { |
| "epoch": 0.615, |
| "grad_norm": 2.8660449981689453, |
| "learning_rate": 2.5348684210526314e-06, |
| "loss": 0.0824, |
| "step": 6150 |
| }, |
| { |
| "epoch": 0.6175, |
| "grad_norm": 2.893280029296875, |
| "learning_rate": 2.518421052631579e-06, |
| "loss": 0.0716, |
| "step": 6175 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 6.665101051330566, |
| "learning_rate": 2.5019736842105264e-06, |
| "loss": 0.0723, |
| "step": 6200 |
| }, |
| { |
| "epoch": 0.6225, |
| "grad_norm": 2.260016441345215, |
| "learning_rate": 2.485526315789474e-06, |
| "loss": 0.0656, |
| "step": 6225 |
| }, |
| { |
| "epoch": 0.625, |
| "grad_norm": 2.472695827484131, |
| "learning_rate": 2.4690789473684214e-06, |
| "loss": 0.0695, |
| "step": 6250 |
| }, |
| { |
| "epoch": 0.6275, |
| "grad_norm": 3.830807685852051, |
| "learning_rate": 2.4526315789473687e-06, |
| "loss": 0.077, |
| "step": 6275 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 3.547133445739746, |
| "learning_rate": 2.436184210526316e-06, |
| "loss": 0.0841, |
| "step": 6300 |
| }, |
| { |
| "epoch": 0.6325, |
| "grad_norm": 3.731278657913208, |
| "learning_rate": 2.4197368421052632e-06, |
| "loss": 0.1223, |
| "step": 6325 |
| }, |
| { |
| "epoch": 0.635, |
| "grad_norm": 4.081872463226318, |
| "learning_rate": 2.4032894736842105e-06, |
| "loss": 0.1381, |
| "step": 6350 |
| }, |
| { |
| "epoch": 0.6375, |
| "grad_norm": 4.540515899658203, |
| "learning_rate": 2.3868421052631582e-06, |
| "loss": 0.1093, |
| "step": 6375 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 5.636559009552002, |
| "learning_rate": 2.3703947368421055e-06, |
| "loss": 0.1285, |
| "step": 6400 |
| }, |
| { |
| "epoch": 0.6425, |
| "grad_norm": 3.959172248840332, |
| "learning_rate": 2.3539473684210528e-06, |
| "loss": 0.1263, |
| "step": 6425 |
| }, |
| { |
| "epoch": 0.645, |
| "grad_norm": 3.610605239868164, |
| "learning_rate": 2.3375e-06, |
| "loss": 0.1241, |
| "step": 6450 |
| }, |
| { |
| "epoch": 0.6475, |
| "grad_norm": 2.831744432449341, |
| "learning_rate": 2.3210526315789473e-06, |
| "loss": 0.0929, |
| "step": 6475 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 4.5434112548828125, |
| "learning_rate": 2.3046052631578946e-06, |
| "loss": 0.0928, |
| "step": 6500 |
| }, |
| { |
| "epoch": 0.65, |
| "eval_loss": 0.27054235339164734, |
| "eval_runtime": 310.9109, |
| "eval_samples_per_second": 5.439, |
| "eval_steps_per_second": 0.682, |
| "eval_wer": 10.290995379043338, |
| "step": 6500 |
| }, |
| { |
| "epoch": 0.6525, |
| "grad_norm": 3.8071448802948, |
| "learning_rate": 2.288157894736842e-06, |
| "loss": 0.1015, |
| "step": 6525 |
| }, |
| { |
| "epoch": 0.655, |
| "grad_norm": 4.561729907989502, |
| "learning_rate": 2.2717105263157896e-06, |
| "loss": 0.1075, |
| "step": 6550 |
| }, |
| { |
| "epoch": 0.6575, |
| "grad_norm": 6.857303142547607, |
| "learning_rate": 2.255263157894737e-06, |
| "loss": 0.1283, |
| "step": 6575 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 2.453704357147217, |
| "learning_rate": 2.238815789473684e-06, |
| "loss": 0.1116, |
| "step": 6600 |
| }, |
| { |
| "epoch": 0.6625, |
| "grad_norm": 3.7849581241607666, |
| "learning_rate": 2.2230263157894737e-06, |
| "loss": 0.1082, |
| "step": 6625 |
| }, |
| { |
| "epoch": 0.665, |
| "grad_norm": 3.687464714050293, |
| "learning_rate": 2.2065789473684214e-06, |
| "loss": 0.0785, |
| "step": 6650 |
| }, |
| { |
| "epoch": 0.6675, |
| "grad_norm": 2.490171194076538, |
| "learning_rate": 2.1901315789473687e-06, |
| "loss": 0.0693, |
| "step": 6675 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 3.452415943145752, |
| "learning_rate": 2.173684210526316e-06, |
| "loss": 0.0748, |
| "step": 6700 |
| }, |
| { |
| "epoch": 0.6725, |
| "grad_norm": 2.4257311820983887, |
| "learning_rate": 2.157236842105263e-06, |
| "loss": 0.0618, |
| "step": 6725 |
| }, |
| { |
| "epoch": 0.675, |
| "grad_norm": 2.6125080585479736, |
| "learning_rate": 2.1407894736842105e-06, |
| "loss": 0.0597, |
| "step": 6750 |
| }, |
| { |
| "epoch": 0.6775, |
| "grad_norm": 2.49169921875, |
| "learning_rate": 2.1243421052631578e-06, |
| "loss": 0.0639, |
| "step": 6775 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 3.193976640701294, |
| "learning_rate": 2.107894736842105e-06, |
| "loss": 0.0712, |
| "step": 6800 |
| }, |
| { |
| "epoch": 0.6825, |
| "grad_norm": 3.9939494132995605, |
| "learning_rate": 2.0914473684210528e-06, |
| "loss": 0.0758, |
| "step": 6825 |
| }, |
| { |
| "epoch": 0.685, |
| "grad_norm": 2.6171138286590576, |
| "learning_rate": 2.075e-06, |
| "loss": 0.0741, |
| "step": 6850 |
| }, |
| { |
| "epoch": 0.6875, |
| "grad_norm": 2.4052670001983643, |
| "learning_rate": 2.0585526315789473e-06, |
| "loss": 0.0731, |
| "step": 6875 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 2.032867670059204, |
| "learning_rate": 2.042105263157895e-06, |
| "loss": 0.0637, |
| "step": 6900 |
| }, |
| { |
| "epoch": 0.6925, |
| "grad_norm": 2.387608528137207, |
| "learning_rate": 2.0256578947368423e-06, |
| "loss": 0.0609, |
| "step": 6925 |
| }, |
| { |
| "epoch": 0.695, |
| "grad_norm": 2.9969546794891357, |
| "learning_rate": 2.0092105263157896e-06, |
| "loss": 0.0525, |
| "step": 6950 |
| }, |
| { |
| "epoch": 0.6975, |
| "grad_norm": 3.8997292518615723, |
| "learning_rate": 1.992763157894737e-06, |
| "loss": 0.0695, |
| "step": 6975 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 3.1098289489746094, |
| "learning_rate": 1.9763157894736846e-06, |
| "loss": 0.0725, |
| "step": 7000 |
| }, |
| { |
| "epoch": 0.7, |
| "eval_loss": 0.25059443712234497, |
| "eval_runtime": 311.2211, |
| "eval_samples_per_second": 5.433, |
| "eval_steps_per_second": 0.681, |
| "eval_wer": 10.303484451105282, |
| "step": 7000 |
| }, |
| { |
| "epoch": 0.7025, |
| "grad_norm": 2.383833169937134, |
| "learning_rate": 1.959868421052632e-06, |
| "loss": 0.0645, |
| "step": 7025 |
| }, |
| { |
| "epoch": 0.705, |
| "grad_norm": 2.534926414489746, |
| "learning_rate": 1.943421052631579e-06, |
| "loss": 0.0645, |
| "step": 7050 |
| }, |
| { |
| "epoch": 0.7075, |
| "grad_norm": 2.4160711765289307, |
| "learning_rate": 1.9269736842105264e-06, |
| "loss": 0.0604, |
| "step": 7075 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 2.3084723949432373, |
| "learning_rate": 1.9105263157894737e-06, |
| "loss": 0.0728, |
| "step": 7100 |
| }, |
| { |
| "epoch": 0.7125, |
| "grad_norm": 2.9332683086395264, |
| "learning_rate": 1.894078947368421e-06, |
| "loss": 0.0987, |
| "step": 7125 |
| }, |
| { |
| "epoch": 0.715, |
| "grad_norm": 4.322076320648193, |
| "learning_rate": 1.8776315789473683e-06, |
| "loss": 0.1165, |
| "step": 7150 |
| }, |
| { |
| "epoch": 0.7175, |
| "grad_norm": 4.348486423492432, |
| "learning_rate": 1.861184210526316e-06, |
| "loss": 0.121, |
| "step": 7175 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 3.1339285373687744, |
| "learning_rate": 1.8447368421052632e-06, |
| "loss": 0.1193, |
| "step": 7200 |
| }, |
| { |
| "epoch": 0.7225, |
| "grad_norm": 4.583907604217529, |
| "learning_rate": 1.8282894736842107e-06, |
| "loss": 0.1266, |
| "step": 7225 |
| }, |
| { |
| "epoch": 0.725, |
| "grad_norm": 4.42141056060791, |
| "learning_rate": 1.811842105263158e-06, |
| "loss": 0.1038, |
| "step": 7250 |
| }, |
| { |
| "epoch": 0.7275, |
| "grad_norm": 7.883315563201904, |
| "learning_rate": 1.7953947368421053e-06, |
| "loss": 0.0684, |
| "step": 7275 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 2.6488800048828125, |
| "learning_rate": 1.7789473684210526e-06, |
| "loss": 0.0702, |
| "step": 7300 |
| }, |
| { |
| "epoch": 0.7325, |
| "grad_norm": 3.029294013977051, |
| "learning_rate": 1.7624999999999999e-06, |
| "loss": 0.0673, |
| "step": 7325 |
| }, |
| { |
| "epoch": 0.735, |
| "grad_norm": 2.9928712844848633, |
| "learning_rate": 1.7460526315789476e-06, |
| "loss": 0.0533, |
| "step": 7350 |
| }, |
| { |
| "epoch": 0.7375, |
| "grad_norm": 2.252870798110962, |
| "learning_rate": 1.7296052631578948e-06, |
| "loss": 0.0661, |
| "step": 7375 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 2.5947258472442627, |
| "learning_rate": 1.7131578947368421e-06, |
| "loss": 0.0658, |
| "step": 7400 |
| }, |
| { |
| "epoch": 0.7425, |
| "grad_norm": 2.529686212539673, |
| "learning_rate": 1.6967105263157896e-06, |
| "loss": 0.0816, |
| "step": 7425 |
| }, |
| { |
| "epoch": 0.745, |
| "grad_norm": 3.286830186843872, |
| "learning_rate": 1.6802631578947369e-06, |
| "loss": 0.0726, |
| "step": 7450 |
| }, |
| { |
| "epoch": 0.7475, |
| "grad_norm": 3.698190689086914, |
| "learning_rate": 1.6638157894736842e-06, |
| "loss": 0.0672, |
| "step": 7475 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 5.162793159484863, |
| "learning_rate": 1.6473684210526319e-06, |
| "loss": 0.1216, |
| "step": 7500 |
| }, |
| { |
| "epoch": 0.75, |
| "eval_loss": 0.2758373022079468, |
| "eval_runtime": 310.6157, |
| "eval_samples_per_second": 5.444, |
| "eval_steps_per_second": 0.683, |
| "eval_wer": 9.710253528162857, |
| "step": 7500 |
| }, |
| { |
| "epoch": 0.7525, |
| "grad_norm": 4.540894985198975, |
| "learning_rate": 1.6309210526315792e-06, |
| "loss": 0.136, |
| "step": 7525 |
| }, |
| { |
| "epoch": 0.755, |
| "grad_norm": 4.004950523376465, |
| "learning_rate": 1.6144736842105264e-06, |
| "loss": 0.1097, |
| "step": 7550 |
| }, |
| { |
| "epoch": 0.7575, |
| "grad_norm": 3.7303011417388916, |
| "learning_rate": 1.5980263157894737e-06, |
| "loss": 0.1079, |
| "step": 7575 |
| }, |
| { |
| "epoch": 0.76, |
| "grad_norm": 4.908395290374756, |
| "learning_rate": 1.581578947368421e-06, |
| "loss": 0.1315, |
| "step": 7600 |
| }, |
| { |
| "epoch": 0.7625, |
| "grad_norm": 5.0614914894104, |
| "learning_rate": 1.5651315789473685e-06, |
| "loss": 0.1239, |
| "step": 7625 |
| }, |
| { |
| "epoch": 0.765, |
| "grad_norm": 4.0791754722595215, |
| "learning_rate": 1.548684210526316e-06, |
| "loss": 0.1088, |
| "step": 7650 |
| }, |
| { |
| "epoch": 0.7675, |
| "grad_norm": 4.49265193939209, |
| "learning_rate": 1.5322368421052633e-06, |
| "loss": 0.1215, |
| "step": 7675 |
| }, |
| { |
| "epoch": 0.77, |
| "grad_norm": 5.465856552124023, |
| "learning_rate": 1.5157894736842105e-06, |
| "loss": 0.1255, |
| "step": 7700 |
| }, |
| { |
| "epoch": 0.7725, |
| "grad_norm": 4.9540276527404785, |
| "learning_rate": 1.499342105263158e-06, |
| "loss": 0.1218, |
| "step": 7725 |
| }, |
| { |
| "epoch": 0.775, |
| "grad_norm": 2.4361021518707275, |
| "learning_rate": 1.4828947368421053e-06, |
| "loss": 0.1007, |
| "step": 7750 |
| }, |
| { |
| "epoch": 0.7775, |
| "grad_norm": 2.6608471870422363, |
| "learning_rate": 1.4664473684210526e-06, |
| "loss": 0.0823, |
| "step": 7775 |
| }, |
| { |
| "epoch": 0.78, |
| "grad_norm": 4.3350419998168945, |
| "learning_rate": 1.45e-06, |
| "loss": 0.074, |
| "step": 7800 |
| }, |
| { |
| "epoch": 0.7825, |
| "grad_norm": 2.1602139472961426, |
| "learning_rate": 1.4335526315789474e-06, |
| "loss": 0.0662, |
| "step": 7825 |
| }, |
| { |
| "epoch": 0.785, |
| "grad_norm": 2.477482795715332, |
| "learning_rate": 1.4171052631578949e-06, |
| "loss": 0.0748, |
| "step": 7850 |
| }, |
| { |
| "epoch": 0.7875, |
| "grad_norm": 3.253352403640747, |
| "learning_rate": 1.4006578947368421e-06, |
| "loss": 0.0598, |
| "step": 7875 |
| }, |
| { |
| "epoch": 0.79, |
| "grad_norm": 2.5888381004333496, |
| "learning_rate": 1.3842105263157896e-06, |
| "loss": 0.0649, |
| "step": 7900 |
| }, |
| { |
| "epoch": 0.7925, |
| "grad_norm": 3.9677650928497314, |
| "learning_rate": 1.367763157894737e-06, |
| "loss": 0.1026, |
| "step": 7925 |
| }, |
| { |
| "epoch": 0.795, |
| "grad_norm": 5.140591144561768, |
| "learning_rate": 1.3513157894736842e-06, |
| "loss": 0.1107, |
| "step": 7950 |
| }, |
| { |
| "epoch": 0.7975, |
| "grad_norm": 3.5645763874053955, |
| "learning_rate": 1.3348684210526317e-06, |
| "loss": 0.1665, |
| "step": 7975 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 4.830741882324219, |
| "learning_rate": 1.318421052631579e-06, |
| "loss": 0.131, |
| "step": 8000 |
| }, |
| { |
| "epoch": 0.8, |
| "eval_loss": 0.2519243061542511, |
| "eval_runtime": 311.2776, |
| "eval_samples_per_second": 5.432, |
| "eval_steps_per_second": 0.681, |
| "eval_wer": 9.429249406769078, |
| "step": 8000 |
| }, |
| { |
| "epoch": 0.8025, |
| "grad_norm": 3.7254385948181152, |
| "learning_rate": 1.3019736842105263e-06, |
| "loss": 0.1076, |
| "step": 8025 |
| }, |
| { |
| "epoch": 0.805, |
| "grad_norm": 3.583624839782715, |
| "learning_rate": 1.2855263157894737e-06, |
| "loss": 0.1035, |
| "step": 8050 |
| }, |
| { |
| "epoch": 0.8075, |
| "grad_norm": 5.079929828643799, |
| "learning_rate": 1.2690789473684212e-06, |
| "loss": 0.1234, |
| "step": 8075 |
| }, |
| { |
| "epoch": 0.81, |
| "grad_norm": 6.1625518798828125, |
| "learning_rate": 1.2526315789473685e-06, |
| "loss": 0.188, |
| "step": 8100 |
| }, |
| { |
| "epoch": 0.8125, |
| "grad_norm": 5.406413555145264, |
| "learning_rate": 1.2361842105263158e-06, |
| "loss": 0.1156, |
| "step": 8125 |
| }, |
| { |
| "epoch": 0.815, |
| "grad_norm": 3.7361137866973877, |
| "learning_rate": 1.2197368421052633e-06, |
| "loss": 0.1003, |
| "step": 8150 |
| }, |
| { |
| "epoch": 0.8175, |
| "grad_norm": 4.679581642150879, |
| "learning_rate": 1.2032894736842106e-06, |
| "loss": 0.109, |
| "step": 8175 |
| }, |
| { |
| "epoch": 0.82, |
| "grad_norm": 2.9385409355163574, |
| "learning_rate": 1.1868421052631579e-06, |
| "loss": 0.1023, |
| "step": 8200 |
| }, |
| { |
| "epoch": 0.8225, |
| "grad_norm": 2.628890037536621, |
| "learning_rate": 1.1703947368421053e-06, |
| "loss": 0.0779, |
| "step": 8225 |
| }, |
| { |
| "epoch": 0.825, |
| "grad_norm": 3.1687421798706055, |
| "learning_rate": 1.1539473684210526e-06, |
| "loss": 0.0637, |
| "step": 8250 |
| }, |
| { |
| "epoch": 0.8275, |
| "grad_norm": 4.13815975189209, |
| "learning_rate": 1.1375000000000001e-06, |
| "loss": 0.0644, |
| "step": 8275 |
| }, |
| { |
| "epoch": 0.83, |
| "grad_norm": 2.5407676696777344, |
| "learning_rate": 1.1210526315789474e-06, |
| "loss": 0.0605, |
| "step": 8300 |
| }, |
| { |
| "epoch": 0.8325, |
| "grad_norm": 2.8727688789367676, |
| "learning_rate": 1.1046052631578949e-06, |
| "loss": 0.0605, |
| "step": 8325 |
| }, |
| { |
| "epoch": 0.835, |
| "grad_norm": 2.3151321411132812, |
| "learning_rate": 1.0881578947368422e-06, |
| "loss": 0.0661, |
| "step": 8350 |
| }, |
| { |
| "epoch": 0.8375, |
| "grad_norm": 2.992656946182251, |
| "learning_rate": 1.0717105263157895e-06, |
| "loss": 0.057, |
| "step": 8375 |
| }, |
| { |
| "epoch": 0.84, |
| "grad_norm": 2.188429355621338, |
| "learning_rate": 1.055263157894737e-06, |
| "loss": 0.0566, |
| "step": 8400 |
| }, |
| { |
| "epoch": 0.8425, |
| "grad_norm": 2.960677146911621, |
| "learning_rate": 1.0388157894736842e-06, |
| "loss": 0.061, |
| "step": 8425 |
| }, |
| { |
| "epoch": 0.845, |
| "grad_norm": 2.4574601650238037, |
| "learning_rate": 1.0223684210526315e-06, |
| "loss": 0.064, |
| "step": 8450 |
| }, |
| { |
| "epoch": 0.8475, |
| "grad_norm": 2.7294602394104004, |
| "learning_rate": 1.005921052631579e-06, |
| "loss": 0.0673, |
| "step": 8475 |
| }, |
| { |
| "epoch": 0.85, |
| "grad_norm": 2.5712780952453613, |
| "learning_rate": 9.894736842105265e-07, |
| "loss": 0.0525, |
| "step": 8500 |
| }, |
| { |
| "epoch": 0.85, |
| "eval_loss": 0.2601895332336426, |
| "eval_runtime": 310.5098, |
| "eval_samples_per_second": 5.446, |
| "eval_steps_per_second": 0.683, |
| "eval_wer": 9.310603222180593, |
| "step": 8500 |
| }, |
| { |
| "epoch": 0.8525, |
| "grad_norm": 2.370037078857422, |
| "learning_rate": 9.730263157894738e-07, |
| "loss": 0.059, |
| "step": 8525 |
| }, |
| { |
| "epoch": 0.855, |
| "grad_norm": 3.927023410797119, |
| "learning_rate": 9.56578947368421e-07, |
| "loss": 0.0758, |
| "step": 8550 |
| }, |
| { |
| "epoch": 0.8575, |
| "grad_norm": 9.844391822814941, |
| "learning_rate": 9.401315789473685e-07, |
| "loss": 0.1047, |
| "step": 8575 |
| }, |
| { |
| "epoch": 0.86, |
| "grad_norm": 3.986123561859131, |
| "learning_rate": 9.236842105263158e-07, |
| "loss": 0.1226, |
| "step": 8600 |
| }, |
| { |
| "epoch": 0.8625, |
| "grad_norm": 3.9579930305480957, |
| "learning_rate": 9.072368421052631e-07, |
| "loss": 0.0941, |
| "step": 8625 |
| }, |
| { |
| "epoch": 0.865, |
| "grad_norm": 3.573427438735962, |
| "learning_rate": 8.914473684210526e-07, |
| "loss": 0.0898, |
| "step": 8650 |
| }, |
| { |
| "epoch": 0.8675, |
| "grad_norm": 4.6763200759887695, |
| "learning_rate": 8.750000000000001e-07, |
| "loss": 0.1048, |
| "step": 8675 |
| }, |
| { |
| "epoch": 0.87, |
| "grad_norm": 2.764732599258423, |
| "learning_rate": 8.585526315789475e-07, |
| "loss": 0.0942, |
| "step": 8700 |
| }, |
| { |
| "epoch": 0.8725, |
| "grad_norm": 2.765056848526001, |
| "learning_rate": 8.421052631578948e-07, |
| "loss": 0.0591, |
| "step": 8725 |
| }, |
| { |
| "epoch": 0.875, |
| "grad_norm": 2.8801324367523193, |
| "learning_rate": 8.25657894736842e-07, |
| "loss": 0.0582, |
| "step": 8750 |
| }, |
| { |
| "epoch": 0.8775, |
| "grad_norm": 3.052685260772705, |
| "learning_rate": 8.092105263157895e-07, |
| "loss": 0.0535, |
| "step": 8775 |
| }, |
| { |
| "epoch": 0.88, |
| "grad_norm": 2.4700539112091064, |
| "learning_rate": 7.927631578947369e-07, |
| "loss": 0.0493, |
| "step": 8800 |
| }, |
| { |
| "epoch": 0.8825, |
| "grad_norm": 2.6473586559295654, |
| "learning_rate": 7.763157894736843e-07, |
| "loss": 0.0577, |
| "step": 8825 |
| }, |
| { |
| "epoch": 0.885, |
| "grad_norm": 3.352184295654297, |
| "learning_rate": 7.598684210526316e-07, |
| "loss": 0.0577, |
| "step": 8850 |
| }, |
| { |
| "epoch": 0.8875, |
| "grad_norm": 3.41894268989563, |
| "learning_rate": 7.43421052631579e-07, |
| "loss": 0.054, |
| "step": 8875 |
| }, |
| { |
| "epoch": 0.89, |
| "grad_norm": 2.602926731109619, |
| "learning_rate": 7.269736842105264e-07, |
| "loss": 0.0736, |
| "step": 8900 |
| }, |
| { |
| "epoch": 0.8925, |
| "grad_norm": 3.690354347229004, |
| "learning_rate": 7.105263157894737e-07, |
| "loss": 0.0777, |
| "step": 8925 |
| }, |
| { |
| "epoch": 0.895, |
| "grad_norm": 2.299851417541504, |
| "learning_rate": 6.940789473684211e-07, |
| "loss": 0.0678, |
| "step": 8950 |
| }, |
| { |
| "epoch": 0.8975, |
| "grad_norm": 3.3586931228637695, |
| "learning_rate": 6.776315789473684e-07, |
| "loss": 0.0613, |
| "step": 8975 |
| }, |
| { |
| "epoch": 0.9, |
| "grad_norm": 3.6000983715057373, |
| "learning_rate": 6.611842105263158e-07, |
| "loss": 0.0729, |
| "step": 9000 |
| }, |
| { |
| "epoch": 0.9, |
| "eval_loss": 0.25485458970069885, |
| "eval_runtime": 310.9704, |
| "eval_samples_per_second": 5.438, |
| "eval_steps_per_second": 0.682, |
| "eval_wer": 9.360559510428375, |
| "step": 9000 |
| }, |
| { |
| "epoch": 0.9025, |
| "grad_norm": 4.219956398010254, |
| "learning_rate": 6.447368421052632e-07, |
| "loss": 0.0798, |
| "step": 9025 |
| }, |
| { |
| "epoch": 0.905, |
| "grad_norm": 3.451380968093872, |
| "learning_rate": 6.282894736842106e-07, |
| "loss": 0.0883, |
| "step": 9050 |
| }, |
| { |
| "epoch": 0.9075, |
| "grad_norm": 6.049487113952637, |
| "learning_rate": 6.118421052631579e-07, |
| "loss": 0.1061, |
| "step": 9075 |
| }, |
| { |
| "epoch": 0.91, |
| "grad_norm": 4.350587368011475, |
| "learning_rate": 5.953947368421052e-07, |
| "loss": 0.1121, |
| "step": 9100 |
| }, |
| { |
| "epoch": 0.9125, |
| "grad_norm": 3.110172748565674, |
| "learning_rate": 5.789473684210527e-07, |
| "loss": 0.1057, |
| "step": 9125 |
| }, |
| { |
| "epoch": 0.915, |
| "grad_norm": 3.715651035308838, |
| "learning_rate": 5.625e-07, |
| "loss": 0.1151, |
| "step": 9150 |
| }, |
| { |
| "epoch": 0.9175, |
| "grad_norm": 4.230985641479492, |
| "learning_rate": 5.460526315789474e-07, |
| "loss": 0.1148, |
| "step": 9175 |
| }, |
| { |
| "epoch": 0.92, |
| "grad_norm": 4.521861553192139, |
| "learning_rate": 5.296052631578947e-07, |
| "loss": 0.1118, |
| "step": 9200 |
| }, |
| { |
| "epoch": 0.9225, |
| "grad_norm": 5.54771614074707, |
| "learning_rate": 5.131578947368422e-07, |
| "loss": 0.1116, |
| "step": 9225 |
| }, |
| { |
| "epoch": 0.925, |
| "grad_norm": 4.935422897338867, |
| "learning_rate": 4.967105263157896e-07, |
| "loss": 0.1148, |
| "step": 9250 |
| }, |
| { |
| "epoch": 0.9275, |
| "grad_norm": 4.736454963684082, |
| "learning_rate": 4.802631578947368e-07, |
| "loss": 0.1133, |
| "step": 9275 |
| }, |
| { |
| "epoch": 0.93, |
| "grad_norm": 4.2254719734191895, |
| "learning_rate": 4.6381578947368423e-07, |
| "loss": 0.1112, |
| "step": 9300 |
| }, |
| { |
| "epoch": 0.9325, |
| "grad_norm": 3.934567451477051, |
| "learning_rate": 4.4736842105263156e-07, |
| "loss": 0.0977, |
| "step": 9325 |
| }, |
| { |
| "epoch": 0.935, |
| "grad_norm": 4.1997551918029785, |
| "learning_rate": 4.3092105263157895e-07, |
| "loss": 0.0977, |
| "step": 9350 |
| }, |
| { |
| "epoch": 0.9375, |
| "grad_norm": 4.566051959991455, |
| "learning_rate": 4.144736842105264e-07, |
| "loss": 0.0772, |
| "step": 9375 |
| }, |
| { |
| "epoch": 0.94, |
| "grad_norm": 2.0367753505706787, |
| "learning_rate": 3.9802631578947367e-07, |
| "loss": 0.0946, |
| "step": 9400 |
| }, |
| { |
| "epoch": 0.9425, |
| "grad_norm": 4.239922523498535, |
| "learning_rate": 3.8157894736842105e-07, |
| "loss": 0.0994, |
| "step": 9425 |
| }, |
| { |
| "epoch": 0.945, |
| "grad_norm": 3.877979278564453, |
| "learning_rate": 3.6513157894736844e-07, |
| "loss": 0.0916, |
| "step": 9450 |
| }, |
| { |
| "epoch": 0.9475, |
| "grad_norm": 2.586454153060913, |
| "learning_rate": 3.4868421052631583e-07, |
| "loss": 0.0961, |
| "step": 9475 |
| }, |
| { |
| "epoch": 0.95, |
| "grad_norm": 4.278528213500977, |
| "learning_rate": 3.3223684210526316e-07, |
| "loss": 0.0939, |
| "step": 9500 |
| }, |
| { |
| "epoch": 0.95, |
| "eval_loss": 0.2469732165336609, |
| "eval_runtime": 310.3095, |
| "eval_samples_per_second": 5.449, |
| "eval_steps_per_second": 0.683, |
| "eval_wer": 9.191957037592106, |
| "step": 9500 |
| }, |
| { |
| "epoch": 0.9525, |
| "grad_norm": 4.1038312911987305, |
| "learning_rate": 3.1578947368421055e-07, |
| "loss": 0.0676, |
| "step": 9525 |
| }, |
| { |
| "epoch": 0.955, |
| "grad_norm": 2.402942657470703, |
| "learning_rate": 2.993421052631579e-07, |
| "loss": 0.0607, |
| "step": 9550 |
| }, |
| { |
| "epoch": 0.9575, |
| "grad_norm": 3.902301073074341, |
| "learning_rate": 2.828947368421053e-07, |
| "loss": 0.0684, |
| "step": 9575 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 2.5158138275146484, |
| "learning_rate": 2.6644736842105266e-07, |
| "loss": 0.0682, |
| "step": 9600 |
| }, |
| { |
| "epoch": 0.9625, |
| "grad_norm": 2.193450450897217, |
| "learning_rate": 2.5000000000000004e-07, |
| "loss": 0.0581, |
| "step": 9625 |
| }, |
| { |
| "epoch": 0.965, |
| "grad_norm": 2.9007558822631836, |
| "learning_rate": 2.3355263157894738e-07, |
| "loss": 0.0573, |
| "step": 9650 |
| }, |
| { |
| "epoch": 0.9675, |
| "grad_norm": 3.9886229038238525, |
| "learning_rate": 2.1710526315789474e-07, |
| "loss": 0.0698, |
| "step": 9675 |
| }, |
| { |
| "epoch": 0.97, |
| "grad_norm": 2.9294111728668213, |
| "learning_rate": 2.006578947368421e-07, |
| "loss": 0.0886, |
| "step": 9700 |
| }, |
| { |
| "epoch": 0.9725, |
| "grad_norm": 3.0293190479278564, |
| "learning_rate": 1.8421052631578948e-07, |
| "loss": 0.0885, |
| "step": 9725 |
| }, |
| { |
| "epoch": 0.975, |
| "grad_norm": 4.1082024574279785, |
| "learning_rate": 1.6776315789473687e-07, |
| "loss": 0.0903, |
| "step": 9750 |
| }, |
| { |
| "epoch": 0.9775, |
| "grad_norm": 3.775738477706909, |
| "learning_rate": 1.5131578947368423e-07, |
| "loss": 0.0911, |
| "step": 9775 |
| }, |
| { |
| "epoch": 0.98, |
| "grad_norm": 3.829111337661743, |
| "learning_rate": 1.348684210526316e-07, |
| "loss": 0.0948, |
| "step": 9800 |
| }, |
| { |
| "epoch": 0.9825, |
| "grad_norm": 2.917050361633301, |
| "learning_rate": 1.1842105263157896e-07, |
| "loss": 0.0949, |
| "step": 9825 |
| }, |
| { |
| "epoch": 0.985, |
| "grad_norm": 3.1675846576690674, |
| "learning_rate": 1.0197368421052632e-07, |
| "loss": 0.0698, |
| "step": 9850 |
| }, |
| { |
| "epoch": 0.9875, |
| "grad_norm": 2.507418394088745, |
| "learning_rate": 8.55263157894737e-08, |
| "loss": 0.0646, |
| "step": 9875 |
| }, |
| { |
| "epoch": 0.99, |
| "grad_norm": 1.9800117015838623, |
| "learning_rate": 6.907894736842106e-08, |
| "loss": 0.0701, |
| "step": 9900 |
| }, |
| { |
| "epoch": 0.9925, |
| "grad_norm": 2.250523328781128, |
| "learning_rate": 5.263157894736842e-08, |
| "loss": 0.0584, |
| "step": 9925 |
| }, |
| { |
| "epoch": 0.995, |
| "grad_norm": 2.814079523086548, |
| "learning_rate": 3.6184210526315796e-08, |
| "loss": 0.0656, |
| "step": 9950 |
| }, |
| { |
| "epoch": 0.9975, |
| "grad_norm": 1.7679643630981445, |
| "learning_rate": 1.973684210526316e-08, |
| "loss": 0.0589, |
| "step": 9975 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 2.8489911556243896, |
| "learning_rate": 3.2894736842105264e-09, |
| "loss": 0.0639, |
| "step": 10000 |
| }, |
| { |
| "epoch": 1.0, |
| "eval_loss": 0.24879895150661469, |
| "eval_runtime": 311.7211, |
| "eval_samples_per_second": 5.425, |
| "eval_steps_per_second": 0.68, |
| "eval_wer": 9.104533533158486, |
| "step": 10000 |
| }, |
| { |
| "epoch": 1.0, |
| "step": 10000, |
| "total_flos": 1.632967852032e+20, |
| "train_loss": 0.14161870262622833, |
| "train_runtime": 23298.766, |
| "train_samples_per_second": 6.867, |
| "train_steps_per_second": 0.429 |
| } |
| ], |
| "logging_steps": 25, |
| "max_steps": 10000, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 9223372036854775807, |
| "save_steps": 1000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.632967852032e+20, |
| "train_batch_size": 16, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|