| { |
| "best_metric": 90.76850542197077, |
| "best_model_checkpoint": "./iteboshi_student_model_temp/checkpoint-5000", |
| "epoch": 22.026431718061673, |
| "eval_steps": 1000, |
| "global_step": 20000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.02753303964757709, |
| "grad_norm": 7.0630106925964355, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 11.955, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.05506607929515418, |
| "grad_norm": 5.473441123962402, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 10.7888, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.08259911894273128, |
| "grad_norm": 3.244906187057495, |
| "learning_rate": 3e-06, |
| "loss": 8.3248, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.11013215859030837, |
| "grad_norm": 2.3983211517333984, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 6.2859, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.13766519823788545, |
| "grad_norm": 1.7205311059951782, |
| "learning_rate": 5e-06, |
| "loss": 5.1612, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.16519823788546256, |
| "grad_norm": 1.4956705570220947, |
| "learning_rate": 6e-06, |
| "loss": 4.6601, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.19273127753303965, |
| "grad_norm": 1.3801974058151245, |
| "learning_rate": 7e-06, |
| "loss": 4.2995, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.22026431718061673, |
| "grad_norm": 1.2982876300811768, |
| "learning_rate": 8.000000000000001e-06, |
| "loss": 4.1134, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.24779735682819384, |
| "grad_norm": 4.815393447875977, |
| "learning_rate": 9e-06, |
| "loss": 3.9592, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.2753303964757709, |
| "grad_norm": 1.2964017391204834, |
| "learning_rate": 1e-05, |
| "loss": 3.8346, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.30286343612334804, |
| "grad_norm": 1.2769396305084229, |
| "learning_rate": 1.1000000000000001e-05, |
| "loss": 3.7025, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.3303964757709251, |
| "grad_norm": 1.2632490396499634, |
| "learning_rate": 1.2e-05, |
| "loss": 3.5762, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.3579295154185022, |
| "grad_norm": 1.296427607536316, |
| "learning_rate": 1.3000000000000001e-05, |
| "loss": 3.4899, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.3854625550660793, |
| "grad_norm": 1.271353006362915, |
| "learning_rate": 1.4e-05, |
| "loss": 3.4158, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.4129955947136564, |
| "grad_norm": 1.4885754585266113, |
| "learning_rate": 1.5000000000000002e-05, |
| "loss": 3.2939, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.44052863436123346, |
| "grad_norm": 1.4233094453811646, |
| "learning_rate": 1.6000000000000003e-05, |
| "loss": 3.1291, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.46806167400881055, |
| "grad_norm": 1.2837520837783813, |
| "learning_rate": 1.7e-05, |
| "loss": 3.0305, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.4955947136563877, |
| "grad_norm": 1.5688410997390747, |
| "learning_rate": 1.8e-05, |
| "loss": 2.9491, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.5231277533039648, |
| "grad_norm": 1.5300930738449097, |
| "learning_rate": 1.9e-05, |
| "loss": 2.8143, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.5506607929515418, |
| "grad_norm": 1.5693153142929077, |
| "learning_rate": 2e-05, |
| "loss": 2.6308, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.5781938325991189, |
| "grad_norm": 1.5616507530212402, |
| "learning_rate": 1.9974358974358975e-05, |
| "loss": 2.5482, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.6057268722466961, |
| "grad_norm": 1.5664052963256836, |
| "learning_rate": 1.994871794871795e-05, |
| "loss": 2.3664, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.6332599118942731, |
| "grad_norm": 1.5610151290893555, |
| "learning_rate": 1.9923076923076926e-05, |
| "loss": 2.2403, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.6607929515418502, |
| "grad_norm": 1.4449105262756348, |
| "learning_rate": 1.98974358974359e-05, |
| "loss": 2.0901, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.6883259911894273, |
| "grad_norm": 1.5401190519332886, |
| "learning_rate": 1.9871794871794873e-05, |
| "loss": 1.9593, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.7158590308370044, |
| "grad_norm": 1.6314514875411987, |
| "learning_rate": 1.9846153846153847e-05, |
| "loss": 1.9258, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.7433920704845814, |
| "grad_norm": 2.754133939743042, |
| "learning_rate": 1.9820512820512824e-05, |
| "loss": 1.8044, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.7709251101321586, |
| "grad_norm": 1.4502687454223633, |
| "learning_rate": 1.9794871794871798e-05, |
| "loss": 1.7527, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.7984581497797357, |
| "grad_norm": 2.3516573905944824, |
| "learning_rate": 1.976923076923077e-05, |
| "loss": 1.7341, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.8259911894273128, |
| "grad_norm": 1.7115260362625122, |
| "learning_rate": 1.9743589743589745e-05, |
| "loss": 1.6904, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.8535242290748899, |
| "grad_norm": 2.4372172355651855, |
| "learning_rate": 1.9717948717948722e-05, |
| "loss": 1.562, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.8810572687224669, |
| "grad_norm": 1.575111746788025, |
| "learning_rate": 1.9692307692307696e-05, |
| "loss": 1.556, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.9085903083700441, |
| "grad_norm": 2.196908473968506, |
| "learning_rate": 1.9666666666666666e-05, |
| "loss": 1.4173, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.9361233480176211, |
| "grad_norm": 2.892843008041382, |
| "learning_rate": 1.9641025641025643e-05, |
| "loss": 1.453, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.9636563876651982, |
| "grad_norm": 1.197956919670105, |
| "learning_rate": 1.9615384615384617e-05, |
| "loss": 1.4298, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.9911894273127754, |
| "grad_norm": 3.7183637619018555, |
| "learning_rate": 1.958974358974359e-05, |
| "loss": 1.3977, |
| "step": 900 |
| }, |
| { |
| "epoch": 1.0187224669603525, |
| "grad_norm": 3.0768394470214844, |
| "learning_rate": 1.9564102564102564e-05, |
| "loss": 1.3247, |
| "step": 925 |
| }, |
| { |
| "epoch": 1.0462555066079295, |
| "grad_norm": 1.5889824628829956, |
| "learning_rate": 1.953846153846154e-05, |
| "loss": 1.1798, |
| "step": 950 |
| }, |
| { |
| "epoch": 1.0737885462555066, |
| "grad_norm": 1.464847207069397, |
| "learning_rate": 1.9512820512820515e-05, |
| "loss": 1.1679, |
| "step": 975 |
| }, |
| { |
| "epoch": 1.1013215859030836, |
| "grad_norm": 1.1854747533798218, |
| "learning_rate": 1.9487179487179488e-05, |
| "loss": 1.1451, |
| "step": 1000 |
| }, |
| { |
| "epoch": 1.1013215859030836, |
| "eval_cer": 52.99542604289661, |
| "eval_loss": 1.312658667564392, |
| "eval_runtime": 802.8816, |
| "eval_samples_per_second": 13.179, |
| "eval_steps_per_second": 3.296, |
| "eval_wer": 98.00094295143799, |
| "step": 1000 |
| }, |
| { |
| "epoch": 1.1288546255506609, |
| "grad_norm": 1.2571742534637451, |
| "learning_rate": 1.9461538461538462e-05, |
| "loss": 1.138, |
| "step": 1025 |
| }, |
| { |
| "epoch": 1.1563876651982379, |
| "grad_norm": 2.522019624710083, |
| "learning_rate": 1.943589743589744e-05, |
| "loss": 1.1544, |
| "step": 1050 |
| }, |
| { |
| "epoch": 1.183920704845815, |
| "grad_norm": 2.5862576961517334, |
| "learning_rate": 1.9410256410256413e-05, |
| "loss": 1.1321, |
| "step": 1075 |
| }, |
| { |
| "epoch": 1.2114537444933922, |
| "grad_norm": 1.393262267112732, |
| "learning_rate": 1.9384615384615386e-05, |
| "loss": 1.063, |
| "step": 1100 |
| }, |
| { |
| "epoch": 1.2389867841409692, |
| "grad_norm": 1.243342638015747, |
| "learning_rate": 1.935897435897436e-05, |
| "loss": 1.137, |
| "step": 1125 |
| }, |
| { |
| "epoch": 1.2665198237885462, |
| "grad_norm": 1.507082462310791, |
| "learning_rate": 1.9333333333333333e-05, |
| "loss": 1.053, |
| "step": 1150 |
| }, |
| { |
| "epoch": 1.2940528634361232, |
| "grad_norm": 1.4709209203720093, |
| "learning_rate": 1.930769230769231e-05, |
| "loss": 1.0388, |
| "step": 1175 |
| }, |
| { |
| "epoch": 1.3215859030837005, |
| "grad_norm": 1.3831913471221924, |
| "learning_rate": 1.9282051282051284e-05, |
| "loss": 1.075, |
| "step": 1200 |
| }, |
| { |
| "epoch": 1.3491189427312775, |
| "grad_norm": 2.1179986000061035, |
| "learning_rate": 1.9256410256410258e-05, |
| "loss": 1.0266, |
| "step": 1225 |
| }, |
| { |
| "epoch": 1.3766519823788546, |
| "grad_norm": 2.009942054748535, |
| "learning_rate": 1.923076923076923e-05, |
| "loss": 1.0482, |
| "step": 1250 |
| }, |
| { |
| "epoch": 1.4041850220264318, |
| "grad_norm": 1.2496631145477295, |
| "learning_rate": 1.920512820512821e-05, |
| "loss": 0.9894, |
| "step": 1275 |
| }, |
| { |
| "epoch": 1.4317180616740088, |
| "grad_norm": 1.1840794086456299, |
| "learning_rate": 1.9179487179487182e-05, |
| "loss": 0.9793, |
| "step": 1300 |
| }, |
| { |
| "epoch": 1.4592511013215859, |
| "grad_norm": 1.4337009191513062, |
| "learning_rate": 1.9153846153846156e-05, |
| "loss": 1.037, |
| "step": 1325 |
| }, |
| { |
| "epoch": 1.4867841409691631, |
| "grad_norm": 1.1954481601715088, |
| "learning_rate": 1.912820512820513e-05, |
| "loss": 0.9891, |
| "step": 1350 |
| }, |
| { |
| "epoch": 1.51431718061674, |
| "grad_norm": 1.710187554359436, |
| "learning_rate": 1.9102564102564106e-05, |
| "loss": 0.9872, |
| "step": 1375 |
| }, |
| { |
| "epoch": 1.5418502202643172, |
| "grad_norm": 1.7128686904907227, |
| "learning_rate": 1.907692307692308e-05, |
| "loss": 0.996, |
| "step": 1400 |
| }, |
| { |
| "epoch": 1.5693832599118944, |
| "grad_norm": 1.127489686012268, |
| "learning_rate": 1.905128205128205e-05, |
| "loss": 0.9847, |
| "step": 1425 |
| }, |
| { |
| "epoch": 1.5969162995594712, |
| "grad_norm": 2.570929765701294, |
| "learning_rate": 1.9025641025641027e-05, |
| "loss": 0.9708, |
| "step": 1450 |
| }, |
| { |
| "epoch": 1.6244493392070485, |
| "grad_norm": 1.8463544845581055, |
| "learning_rate": 1.9e-05, |
| "loss": 0.9262, |
| "step": 1475 |
| }, |
| { |
| "epoch": 1.6519823788546255, |
| "grad_norm": 2.259000062942505, |
| "learning_rate": 1.8974358974358975e-05, |
| "loss": 0.8969, |
| "step": 1500 |
| }, |
| { |
| "epoch": 1.6795154185022025, |
| "grad_norm": 1.1015965938568115, |
| "learning_rate": 1.894871794871795e-05, |
| "loss": 0.9137, |
| "step": 1525 |
| }, |
| { |
| "epoch": 1.7070484581497798, |
| "grad_norm": 1.5946825742721558, |
| "learning_rate": 1.8923076923076925e-05, |
| "loss": 0.9043, |
| "step": 1550 |
| }, |
| { |
| "epoch": 1.7345814977973568, |
| "grad_norm": 1.1002482175827026, |
| "learning_rate": 1.88974358974359e-05, |
| "loss": 0.8996, |
| "step": 1575 |
| }, |
| { |
| "epoch": 1.7621145374449338, |
| "grad_norm": 1.141486644744873, |
| "learning_rate": 1.8871794871794873e-05, |
| "loss": 0.9047, |
| "step": 1600 |
| }, |
| { |
| "epoch": 1.789647577092511, |
| "grad_norm": 1.5526583194732666, |
| "learning_rate": 1.8846153846153846e-05, |
| "loss": 0.957, |
| "step": 1625 |
| }, |
| { |
| "epoch": 1.8171806167400881, |
| "grad_norm": 2.08101224899292, |
| "learning_rate": 1.8820512820512823e-05, |
| "loss": 0.9406, |
| "step": 1650 |
| }, |
| { |
| "epoch": 1.8447136563876652, |
| "grad_norm": 1.4825433492660522, |
| "learning_rate": 1.8794871794871797e-05, |
| "loss": 0.9434, |
| "step": 1675 |
| }, |
| { |
| "epoch": 1.8722466960352424, |
| "grad_norm": 2.4470510482788086, |
| "learning_rate": 1.876923076923077e-05, |
| "loss": 0.8763, |
| "step": 1700 |
| }, |
| { |
| "epoch": 1.8997797356828194, |
| "grad_norm": 1.226882815361023, |
| "learning_rate": 1.8743589743589744e-05, |
| "loss": 0.8782, |
| "step": 1725 |
| }, |
| { |
| "epoch": 1.9273127753303965, |
| "grad_norm": 1.1586799621582031, |
| "learning_rate": 1.8717948717948718e-05, |
| "loss": 0.8885, |
| "step": 1750 |
| }, |
| { |
| "epoch": 1.9548458149779737, |
| "grad_norm": 2.1358814239501953, |
| "learning_rate": 1.8692307692307695e-05, |
| "loss": 0.8911, |
| "step": 1775 |
| }, |
| { |
| "epoch": 1.9823788546255505, |
| "grad_norm": 1.2554844617843628, |
| "learning_rate": 1.866666666666667e-05, |
| "loss": 0.8718, |
| "step": 1800 |
| }, |
| { |
| "epoch": 2.0099118942731278, |
| "grad_norm": 1.0800217390060425, |
| "learning_rate": 1.8641025641025642e-05, |
| "loss": 0.8163, |
| "step": 1825 |
| }, |
| { |
| "epoch": 2.037444933920705, |
| "grad_norm": 1.413068413734436, |
| "learning_rate": 1.8615384615384616e-05, |
| "loss": 0.6879, |
| "step": 1850 |
| }, |
| { |
| "epoch": 2.064977973568282, |
| "grad_norm": 1.199027419090271, |
| "learning_rate": 1.8589743589743593e-05, |
| "loss": 0.6669, |
| "step": 1875 |
| }, |
| { |
| "epoch": 2.092511013215859, |
| "grad_norm": 0.9802116751670837, |
| "learning_rate": 1.8564102564102567e-05, |
| "loss": 0.6631, |
| "step": 1900 |
| }, |
| { |
| "epoch": 2.1200440528634363, |
| "grad_norm": 1.1249160766601562, |
| "learning_rate": 1.853846153846154e-05, |
| "loss": 0.7245, |
| "step": 1925 |
| }, |
| { |
| "epoch": 2.147577092511013, |
| "grad_norm": 1.1133592128753662, |
| "learning_rate": 1.8512820512820514e-05, |
| "loss": 0.646, |
| "step": 1950 |
| }, |
| { |
| "epoch": 2.1751101321585904, |
| "grad_norm": 0.9653272032737732, |
| "learning_rate": 1.848717948717949e-05, |
| "loss": 0.6674, |
| "step": 1975 |
| }, |
| { |
| "epoch": 2.202643171806167, |
| "grad_norm": 0.9605304002761841, |
| "learning_rate": 1.8461538461538465e-05, |
| "loss": 0.6729, |
| "step": 2000 |
| }, |
| { |
| "epoch": 2.202643171806167, |
| "eval_cer": 36.00758887118407, |
| "eval_loss": 0.9211130738258362, |
| "eval_runtime": 725.5723, |
| "eval_samples_per_second": 14.583, |
| "eval_steps_per_second": 3.647, |
| "eval_wer": 94.93635077793495, |
| "step": 2000 |
| }, |
| { |
| "epoch": 2.2301762114537445, |
| "grad_norm": 1.5967915058135986, |
| "learning_rate": 1.8435897435897435e-05, |
| "loss": 0.6661, |
| "step": 2025 |
| }, |
| { |
| "epoch": 2.2577092511013217, |
| "grad_norm": 1.0568780899047852, |
| "learning_rate": 1.8410256410256412e-05, |
| "loss": 0.6776, |
| "step": 2050 |
| }, |
| { |
| "epoch": 2.2852422907488985, |
| "grad_norm": 0.981234073638916, |
| "learning_rate": 1.8384615384615386e-05, |
| "loss": 0.6297, |
| "step": 2075 |
| }, |
| { |
| "epoch": 2.3127753303964758, |
| "grad_norm": 1.799085259437561, |
| "learning_rate": 1.835897435897436e-05, |
| "loss": 0.6518, |
| "step": 2100 |
| }, |
| { |
| "epoch": 2.340308370044053, |
| "grad_norm": 1.3996877670288086, |
| "learning_rate": 1.8333333333333333e-05, |
| "loss": 0.6158, |
| "step": 2125 |
| }, |
| { |
| "epoch": 2.36784140969163, |
| "grad_norm": 1.4223437309265137, |
| "learning_rate": 1.830769230769231e-05, |
| "loss": 0.6413, |
| "step": 2150 |
| }, |
| { |
| "epoch": 2.395374449339207, |
| "grad_norm": 0.9599340558052063, |
| "learning_rate": 1.8282051282051284e-05, |
| "loss": 0.6391, |
| "step": 2175 |
| }, |
| { |
| "epoch": 2.4229074889867843, |
| "grad_norm": 1.0826489925384521, |
| "learning_rate": 1.8256410256410257e-05, |
| "loss": 0.6207, |
| "step": 2200 |
| }, |
| { |
| "epoch": 2.450440528634361, |
| "grad_norm": 1.255297303199768, |
| "learning_rate": 1.823076923076923e-05, |
| "loss": 0.6033, |
| "step": 2225 |
| }, |
| { |
| "epoch": 2.4779735682819384, |
| "grad_norm": 1.2208808660507202, |
| "learning_rate": 1.8205128205128208e-05, |
| "loss": 0.6331, |
| "step": 2250 |
| }, |
| { |
| "epoch": 2.505506607929515, |
| "grad_norm": 0.898612916469574, |
| "learning_rate": 1.817948717948718e-05, |
| "loss": 0.6794, |
| "step": 2275 |
| }, |
| { |
| "epoch": 2.5330396475770924, |
| "grad_norm": 1.1741799116134644, |
| "learning_rate": 1.8153846153846155e-05, |
| "loss": 0.6322, |
| "step": 2300 |
| }, |
| { |
| "epoch": 2.5605726872246697, |
| "grad_norm": 1.1415616273880005, |
| "learning_rate": 1.812820512820513e-05, |
| "loss": 0.6263, |
| "step": 2325 |
| }, |
| { |
| "epoch": 2.5881057268722465, |
| "grad_norm": 0.988058865070343, |
| "learning_rate": 1.8102564102564102e-05, |
| "loss": 0.6351, |
| "step": 2350 |
| }, |
| { |
| "epoch": 2.6156387665198237, |
| "grad_norm": 0.9876915812492371, |
| "learning_rate": 1.807692307692308e-05, |
| "loss": 0.629, |
| "step": 2375 |
| }, |
| { |
| "epoch": 2.643171806167401, |
| "grad_norm": 1.1506072282791138, |
| "learning_rate": 1.8051282051282053e-05, |
| "loss": 0.598, |
| "step": 2400 |
| }, |
| { |
| "epoch": 2.670704845814978, |
| "grad_norm": 0.9459457397460938, |
| "learning_rate": 1.8025641025641027e-05, |
| "loss": 0.5881, |
| "step": 2425 |
| }, |
| { |
| "epoch": 2.698237885462555, |
| "grad_norm": 1.0385560989379883, |
| "learning_rate": 1.8e-05, |
| "loss": 0.6527, |
| "step": 2450 |
| }, |
| { |
| "epoch": 2.7257709251101323, |
| "grad_norm": 1.0275869369506836, |
| "learning_rate": 1.7974358974358977e-05, |
| "loss": 0.5693, |
| "step": 2475 |
| }, |
| { |
| "epoch": 2.753303964757709, |
| "grad_norm": 1.068461537361145, |
| "learning_rate": 1.794871794871795e-05, |
| "loss": 0.6406, |
| "step": 2500 |
| }, |
| { |
| "epoch": 2.7808370044052864, |
| "grad_norm": 0.8605853915214539, |
| "learning_rate": 1.7923076923076925e-05, |
| "loss": 0.6138, |
| "step": 2525 |
| }, |
| { |
| "epoch": 2.8083700440528636, |
| "grad_norm": 1.1672123670578003, |
| "learning_rate": 1.78974358974359e-05, |
| "loss": 0.5752, |
| "step": 2550 |
| }, |
| { |
| "epoch": 2.8359030837004404, |
| "grad_norm": 1.1020793914794922, |
| "learning_rate": 1.7871794871794875e-05, |
| "loss": 0.6057, |
| "step": 2575 |
| }, |
| { |
| "epoch": 2.8634361233480177, |
| "grad_norm": 0.9963453412055969, |
| "learning_rate": 1.784615384615385e-05, |
| "loss": 0.6324, |
| "step": 2600 |
| }, |
| { |
| "epoch": 2.890969162995595, |
| "grad_norm": 0.9259366989135742, |
| "learning_rate": 1.7820512820512823e-05, |
| "loss": 0.5967, |
| "step": 2625 |
| }, |
| { |
| "epoch": 2.9185022026431717, |
| "grad_norm": 0.9898194074630737, |
| "learning_rate": 1.7794871794871796e-05, |
| "loss": 0.5975, |
| "step": 2650 |
| }, |
| { |
| "epoch": 2.946035242290749, |
| "grad_norm": 0.9436148405075073, |
| "learning_rate": 1.776923076923077e-05, |
| "loss": 0.6018, |
| "step": 2675 |
| }, |
| { |
| "epoch": 2.9735682819383262, |
| "grad_norm": 1.2582800388336182, |
| "learning_rate": 1.7743589743589744e-05, |
| "loss": 0.58, |
| "step": 2700 |
| }, |
| { |
| "epoch": 3.001101321585903, |
| "grad_norm": 0.7944229245185852, |
| "learning_rate": 1.7717948717948717e-05, |
| "loss": 0.6239, |
| "step": 2725 |
| }, |
| { |
| "epoch": 3.0286343612334803, |
| "grad_norm": 0.9075860977172852, |
| "learning_rate": 1.7692307692307694e-05, |
| "loss": 0.4649, |
| "step": 2750 |
| }, |
| { |
| "epoch": 3.056167400881057, |
| "grad_norm": 1.5187524557113647, |
| "learning_rate": 1.7666666666666668e-05, |
| "loss": 0.4216, |
| "step": 2775 |
| }, |
| { |
| "epoch": 3.0837004405286343, |
| "grad_norm": 0.9071484804153442, |
| "learning_rate": 1.7641025641025642e-05, |
| "loss": 0.4377, |
| "step": 2800 |
| }, |
| { |
| "epoch": 3.1112334801762116, |
| "grad_norm": 0.5699480772018433, |
| "learning_rate": 1.7615384615384615e-05, |
| "loss": 0.4108, |
| "step": 2825 |
| }, |
| { |
| "epoch": 3.1387665198237884, |
| "grad_norm": 0.8742319941520691, |
| "learning_rate": 1.7589743589743592e-05, |
| "loss": 0.4213, |
| "step": 2850 |
| }, |
| { |
| "epoch": 3.1662995594713657, |
| "grad_norm": 0.9574070572853088, |
| "learning_rate": 1.7564102564102566e-05, |
| "loss": 0.4529, |
| "step": 2875 |
| }, |
| { |
| "epoch": 3.193832599118943, |
| "grad_norm": 0.9607532620429993, |
| "learning_rate": 1.753846153846154e-05, |
| "loss": 0.4519, |
| "step": 2900 |
| }, |
| { |
| "epoch": 3.2213656387665197, |
| "grad_norm": 0.8327065706253052, |
| "learning_rate": 1.7512820512820513e-05, |
| "loss": 0.4539, |
| "step": 2925 |
| }, |
| { |
| "epoch": 3.248898678414097, |
| "grad_norm": 0.8266808390617371, |
| "learning_rate": 1.7487179487179487e-05, |
| "loss": 0.4381, |
| "step": 2950 |
| }, |
| { |
| "epoch": 3.2764317180616738, |
| "grad_norm": 1.3495973348617554, |
| "learning_rate": 1.7461538461538464e-05, |
| "loss": 0.4255, |
| "step": 2975 |
| }, |
| { |
| "epoch": 3.303964757709251, |
| "grad_norm": 0.781352698802948, |
| "learning_rate": 1.7435897435897438e-05, |
| "loss": 0.4087, |
| "step": 3000 |
| }, |
| { |
| "epoch": 3.303964757709251, |
| "eval_cer": 33.05916339000149, |
| "eval_loss": 0.8169652223587036, |
| "eval_runtime": 726.2485, |
| "eval_samples_per_second": 14.569, |
| "eval_steps_per_second": 3.643, |
| "eval_wer": 91.47571900047147, |
| "step": 3000 |
| }, |
| { |
| "epoch": 3.3314977973568283, |
| "grad_norm": 0.8976072669029236, |
| "learning_rate": 1.741025641025641e-05, |
| "loss": 0.4375, |
| "step": 3025 |
| }, |
| { |
| "epoch": 3.359030837004405, |
| "grad_norm": 0.9293546080589294, |
| "learning_rate": 1.7384615384615385e-05, |
| "loss": 0.4645, |
| "step": 3050 |
| }, |
| { |
| "epoch": 3.3865638766519823, |
| "grad_norm": 0.8952093124389648, |
| "learning_rate": 1.7358974358974362e-05, |
| "loss": 0.4217, |
| "step": 3075 |
| }, |
| { |
| "epoch": 3.4140969162995596, |
| "grad_norm": 0.9495489597320557, |
| "learning_rate": 1.7333333333333336e-05, |
| "loss": 0.4842, |
| "step": 3100 |
| }, |
| { |
| "epoch": 3.4416299559471364, |
| "grad_norm": 0.9268874526023865, |
| "learning_rate": 1.730769230769231e-05, |
| "loss": 0.4474, |
| "step": 3125 |
| }, |
| { |
| "epoch": 3.4691629955947136, |
| "grad_norm": 1.2101101875305176, |
| "learning_rate": 1.7282051282051283e-05, |
| "loss": 0.4241, |
| "step": 3150 |
| }, |
| { |
| "epoch": 3.496696035242291, |
| "grad_norm": 0.9907839894294739, |
| "learning_rate": 1.725641025641026e-05, |
| "loss": 0.445, |
| "step": 3175 |
| }, |
| { |
| "epoch": 3.5242290748898677, |
| "grad_norm": 0.9446345567703247, |
| "learning_rate": 1.7230769230769234e-05, |
| "loss": 0.4501, |
| "step": 3200 |
| }, |
| { |
| "epoch": 3.551762114537445, |
| "grad_norm": 0.8528251051902771, |
| "learning_rate": 1.7205128205128207e-05, |
| "loss": 0.4304, |
| "step": 3225 |
| }, |
| { |
| "epoch": 3.579295154185022, |
| "grad_norm": 0.8596051335334778, |
| "learning_rate": 1.717948717948718e-05, |
| "loss": 0.4218, |
| "step": 3250 |
| }, |
| { |
| "epoch": 3.606828193832599, |
| "grad_norm": 0.7655468583106995, |
| "learning_rate": 1.7153846153846155e-05, |
| "loss": 0.4416, |
| "step": 3275 |
| }, |
| { |
| "epoch": 3.6343612334801763, |
| "grad_norm": 0.9918355941772461, |
| "learning_rate": 1.7128205128205128e-05, |
| "loss": 0.452, |
| "step": 3300 |
| }, |
| { |
| "epoch": 3.6618942731277535, |
| "grad_norm": 0.6974489688873291, |
| "learning_rate": 1.7102564102564102e-05, |
| "loss": 0.3897, |
| "step": 3325 |
| }, |
| { |
| "epoch": 3.6894273127753303, |
| "grad_norm": 1.001642107963562, |
| "learning_rate": 1.707692307692308e-05, |
| "loss": 0.4241, |
| "step": 3350 |
| }, |
| { |
| "epoch": 3.7169603524229076, |
| "grad_norm": 1.0877162218093872, |
| "learning_rate": 1.7051282051282053e-05, |
| "loss": 0.3882, |
| "step": 3375 |
| }, |
| { |
| "epoch": 3.744493392070485, |
| "grad_norm": 0.8201428055763245, |
| "learning_rate": 1.7025641025641026e-05, |
| "loss": 0.4318, |
| "step": 3400 |
| }, |
| { |
| "epoch": 3.7720264317180616, |
| "grad_norm": 0.9629787802696228, |
| "learning_rate": 1.7e-05, |
| "loss": 0.4128, |
| "step": 3425 |
| }, |
| { |
| "epoch": 3.799559471365639, |
| "grad_norm": 0.8146516680717468, |
| "learning_rate": 1.6974358974358977e-05, |
| "loss": 0.4122, |
| "step": 3450 |
| }, |
| { |
| "epoch": 3.827092511013216, |
| "grad_norm": 0.9245569109916687, |
| "learning_rate": 1.694871794871795e-05, |
| "loss": 0.4031, |
| "step": 3475 |
| }, |
| { |
| "epoch": 3.854625550660793, |
| "grad_norm": 0.8833574652671814, |
| "learning_rate": 1.6923076923076924e-05, |
| "loss": 0.479, |
| "step": 3500 |
| }, |
| { |
| "epoch": 3.88215859030837, |
| "grad_norm": 0.7967398166656494, |
| "learning_rate": 1.6897435897435898e-05, |
| "loss": 0.4076, |
| "step": 3525 |
| }, |
| { |
| "epoch": 3.909691629955947, |
| "grad_norm": 0.8753622770309448, |
| "learning_rate": 1.687179487179487e-05, |
| "loss": 0.4233, |
| "step": 3550 |
| }, |
| { |
| "epoch": 3.9372246696035242, |
| "grad_norm": 0.8417605757713318, |
| "learning_rate": 1.684615384615385e-05, |
| "loss": 0.4512, |
| "step": 3575 |
| }, |
| { |
| "epoch": 3.964757709251101, |
| "grad_norm": 0.8642501831054688, |
| "learning_rate": 1.6820512820512822e-05, |
| "loss": 0.4442, |
| "step": 3600 |
| }, |
| { |
| "epoch": 3.9922907488986783, |
| "grad_norm": 1.236092209815979, |
| "learning_rate": 1.6794871794871796e-05, |
| "loss": 0.4411, |
| "step": 3625 |
| }, |
| { |
| "epoch": 4.0198237885462555, |
| "grad_norm": 1.4108121395111084, |
| "learning_rate": 1.676923076923077e-05, |
| "loss": 0.3219, |
| "step": 3650 |
| }, |
| { |
| "epoch": 4.047356828193832, |
| "grad_norm": 1.2637825012207031, |
| "learning_rate": 1.6743589743589747e-05, |
| "loss": 0.3046, |
| "step": 3675 |
| }, |
| { |
| "epoch": 4.07488986784141, |
| "grad_norm": 0.7874261736869812, |
| "learning_rate": 1.671794871794872e-05, |
| "loss": 0.2738, |
| "step": 3700 |
| }, |
| { |
| "epoch": 4.102422907488987, |
| "grad_norm": 0.6468262672424316, |
| "learning_rate": 1.6692307692307694e-05, |
| "loss": 0.3136, |
| "step": 3725 |
| }, |
| { |
| "epoch": 4.129955947136564, |
| "grad_norm": 0.8033445477485657, |
| "learning_rate": 1.6666666666666667e-05, |
| "loss": 0.3052, |
| "step": 3750 |
| }, |
| { |
| "epoch": 4.157488986784141, |
| "grad_norm": 0.8105751872062683, |
| "learning_rate": 1.6641025641025645e-05, |
| "loss": 0.2836, |
| "step": 3775 |
| }, |
| { |
| "epoch": 4.185022026431718, |
| "grad_norm": 0.6188549995422363, |
| "learning_rate": 1.6615384615384618e-05, |
| "loss": 0.2522, |
| "step": 3800 |
| }, |
| { |
| "epoch": 4.212555066079295, |
| "grad_norm": 0.697938859462738, |
| "learning_rate": 1.6589743589743592e-05, |
| "loss": 0.2917, |
| "step": 3825 |
| }, |
| { |
| "epoch": 4.240088105726873, |
| "grad_norm": 0.7022973895072937, |
| "learning_rate": 1.6564102564102565e-05, |
| "loss": 0.2803, |
| "step": 3850 |
| }, |
| { |
| "epoch": 4.2676211453744495, |
| "grad_norm": 0.7884867191314697, |
| "learning_rate": 1.653846153846154e-05, |
| "loss": 0.3195, |
| "step": 3875 |
| }, |
| { |
| "epoch": 4.295154185022026, |
| "grad_norm": 0.6753418445587158, |
| "learning_rate": 1.6512820512820513e-05, |
| "loss": 0.309, |
| "step": 3900 |
| }, |
| { |
| "epoch": 4.322687224669604, |
| "grad_norm": 0.822180986404419, |
| "learning_rate": 1.6487179487179486e-05, |
| "loss": 0.3211, |
| "step": 3925 |
| }, |
| { |
| "epoch": 4.350220264317181, |
| "grad_norm": 0.8807072639465332, |
| "learning_rate": 1.6461538461538463e-05, |
| "loss": 0.2958, |
| "step": 3950 |
| }, |
| { |
| "epoch": 4.377753303964758, |
| "grad_norm": 0.6565355062484741, |
| "learning_rate": 1.6435897435897437e-05, |
| "loss": 0.3209, |
| "step": 3975 |
| }, |
| { |
| "epoch": 4.405286343612334, |
| "grad_norm": 0.910705029964447, |
| "learning_rate": 1.641025641025641e-05, |
| "loss": 0.303, |
| "step": 4000 |
| }, |
| { |
| "epoch": 4.405286343612334, |
| "eval_cer": 35.491155869911616, |
| "eval_loss": 0.7996189594268799, |
| "eval_runtime": 729.2489, |
| "eval_samples_per_second": 14.509, |
| "eval_steps_per_second": 3.628, |
| "eval_wer": 95.96416784535596, |
| "step": 4000 |
| }, |
| { |
| "epoch": 4.432819383259912, |
| "grad_norm": 1.0489437580108643, |
| "learning_rate": 1.6384615384615384e-05, |
| "loss": 0.3326, |
| "step": 4025 |
| }, |
| { |
| "epoch": 4.460352422907489, |
| "grad_norm": 0.9662296175956726, |
| "learning_rate": 1.635897435897436e-05, |
| "loss": 0.332, |
| "step": 4050 |
| }, |
| { |
| "epoch": 4.487885462555066, |
| "grad_norm": 0.8950140476226807, |
| "learning_rate": 1.6333333333333335e-05, |
| "loss": 0.3279, |
| "step": 4075 |
| }, |
| { |
| "epoch": 4.515418502202643, |
| "grad_norm": 0.7907012104988098, |
| "learning_rate": 1.630769230769231e-05, |
| "loss": 0.2761, |
| "step": 4100 |
| }, |
| { |
| "epoch": 4.54295154185022, |
| "grad_norm": 0.723153829574585, |
| "learning_rate": 1.6282051282051282e-05, |
| "loss": 0.3136, |
| "step": 4125 |
| }, |
| { |
| "epoch": 4.570484581497797, |
| "grad_norm": 0.7711037397384644, |
| "learning_rate": 1.625641025641026e-05, |
| "loss": 0.3254, |
| "step": 4150 |
| }, |
| { |
| "epoch": 4.598017621145375, |
| "grad_norm": 0.8112226128578186, |
| "learning_rate": 1.6230769230769233e-05, |
| "loss": 0.3089, |
| "step": 4175 |
| }, |
| { |
| "epoch": 4.6255506607929515, |
| "grad_norm": 0.6841309070587158, |
| "learning_rate": 1.6205128205128207e-05, |
| "loss": 0.2948, |
| "step": 4200 |
| }, |
| { |
| "epoch": 4.653083700440528, |
| "grad_norm": 0.8256305456161499, |
| "learning_rate": 1.617948717948718e-05, |
| "loss": 0.3045, |
| "step": 4225 |
| }, |
| { |
| "epoch": 4.680616740088106, |
| "grad_norm": 0.8179898262023926, |
| "learning_rate": 1.6153846153846154e-05, |
| "loss": 0.2873, |
| "step": 4250 |
| }, |
| { |
| "epoch": 4.708149779735683, |
| "grad_norm": 0.6983807682991028, |
| "learning_rate": 1.612820512820513e-05, |
| "loss": 0.2981, |
| "step": 4275 |
| }, |
| { |
| "epoch": 4.73568281938326, |
| "grad_norm": 0.9936075806617737, |
| "learning_rate": 1.6102564102564105e-05, |
| "loss": 0.31, |
| "step": 4300 |
| }, |
| { |
| "epoch": 4.763215859030837, |
| "grad_norm": 0.7161526679992676, |
| "learning_rate": 1.607692307692308e-05, |
| "loss": 0.303, |
| "step": 4325 |
| }, |
| { |
| "epoch": 4.790748898678414, |
| "grad_norm": 0.7693957686424255, |
| "learning_rate": 1.6051282051282052e-05, |
| "loss": 0.3067, |
| "step": 4350 |
| }, |
| { |
| "epoch": 4.818281938325991, |
| "grad_norm": 0.8263546824455261, |
| "learning_rate": 1.602564102564103e-05, |
| "loss": 0.3104, |
| "step": 4375 |
| }, |
| { |
| "epoch": 4.845814977973569, |
| "grad_norm": 0.540008544921875, |
| "learning_rate": 1.6000000000000003e-05, |
| "loss": 0.2855, |
| "step": 4400 |
| }, |
| { |
| "epoch": 4.8733480176211454, |
| "grad_norm": 0.7057309746742249, |
| "learning_rate": 1.5974358974358976e-05, |
| "loss": 0.2847, |
| "step": 4425 |
| }, |
| { |
| "epoch": 4.900881057268722, |
| "grad_norm": 0.7432449460029602, |
| "learning_rate": 1.594871794871795e-05, |
| "loss": 0.303, |
| "step": 4450 |
| }, |
| { |
| "epoch": 4.9284140969163, |
| "grad_norm": 0.7021868824958801, |
| "learning_rate": 1.5923076923076924e-05, |
| "loss": 0.2833, |
| "step": 4475 |
| }, |
| { |
| "epoch": 4.955947136563877, |
| "grad_norm": 0.7491558194160461, |
| "learning_rate": 1.5897435897435897e-05, |
| "loss": 0.2936, |
| "step": 4500 |
| }, |
| { |
| "epoch": 4.983480176211454, |
| "grad_norm": 0.6752347350120544, |
| "learning_rate": 1.587179487179487e-05, |
| "loss": 0.3019, |
| "step": 4525 |
| }, |
| { |
| "epoch": 5.011013215859031, |
| "grad_norm": 0.6153631806373596, |
| "learning_rate": 1.5846153846153848e-05, |
| "loss": 0.2446, |
| "step": 4550 |
| }, |
| { |
| "epoch": 5.038546255506608, |
| "grad_norm": 0.3978467881679535, |
| "learning_rate": 1.582051282051282e-05, |
| "loss": 0.2114, |
| "step": 4575 |
| }, |
| { |
| "epoch": 5.066079295154185, |
| "grad_norm": 0.6924332976341248, |
| "learning_rate": 1.5794871794871795e-05, |
| "loss": 0.1749, |
| "step": 4600 |
| }, |
| { |
| "epoch": 5.093612334801762, |
| "grad_norm": 0.5500088930130005, |
| "learning_rate": 1.576923076923077e-05, |
| "loss": 0.2242, |
| "step": 4625 |
| }, |
| { |
| "epoch": 5.121145374449339, |
| "grad_norm": 0.515316367149353, |
| "learning_rate": 1.5743589743589746e-05, |
| "loss": 0.2148, |
| "step": 4650 |
| }, |
| { |
| "epoch": 5.148678414096916, |
| "grad_norm": 0.446718692779541, |
| "learning_rate": 1.571794871794872e-05, |
| "loss": 0.2136, |
| "step": 4675 |
| }, |
| { |
| "epoch": 5.176211453744493, |
| "grad_norm": 0.48106926679611206, |
| "learning_rate": 1.5692307692307693e-05, |
| "loss": 0.1853, |
| "step": 4700 |
| }, |
| { |
| "epoch": 5.203744493392071, |
| "grad_norm": 0.49837005138397217, |
| "learning_rate": 1.5666666666666667e-05, |
| "loss": 0.2107, |
| "step": 4725 |
| }, |
| { |
| "epoch": 5.2312775330396475, |
| "grad_norm": 0.9187563061714172, |
| "learning_rate": 1.5641025641025644e-05, |
| "loss": 0.1851, |
| "step": 4750 |
| }, |
| { |
| "epoch": 5.258810572687224, |
| "grad_norm": 0.6566996574401855, |
| "learning_rate": 1.5615384615384618e-05, |
| "loss": 0.2419, |
| "step": 4775 |
| }, |
| { |
| "epoch": 5.286343612334802, |
| "grad_norm": 0.48451924324035645, |
| "learning_rate": 1.558974358974359e-05, |
| "loss": 0.1932, |
| "step": 4800 |
| }, |
| { |
| "epoch": 5.313876651982379, |
| "grad_norm": 0.43492308259010315, |
| "learning_rate": 1.5564102564102565e-05, |
| "loss": 0.1785, |
| "step": 4825 |
| }, |
| { |
| "epoch": 5.341409691629956, |
| "grad_norm": 0.5435590147972107, |
| "learning_rate": 1.553846153846154e-05, |
| "loss": 0.1986, |
| "step": 4850 |
| }, |
| { |
| "epoch": 5.368942731277533, |
| "grad_norm": 0.5883561372756958, |
| "learning_rate": 1.5512820512820516e-05, |
| "loss": 0.2044, |
| "step": 4875 |
| }, |
| { |
| "epoch": 5.39647577092511, |
| "grad_norm": 0.7071317434310913, |
| "learning_rate": 1.548717948717949e-05, |
| "loss": 0.1901, |
| "step": 4900 |
| }, |
| { |
| "epoch": 5.424008810572687, |
| "grad_norm": 0.6903939247131348, |
| "learning_rate": 1.5461538461538463e-05, |
| "loss": 0.2427, |
| "step": 4925 |
| }, |
| { |
| "epoch": 5.451541850220265, |
| "grad_norm": 0.8299788236618042, |
| "learning_rate": 1.5435897435897436e-05, |
| "loss": 0.2219, |
| "step": 4950 |
| }, |
| { |
| "epoch": 5.479074889867841, |
| "grad_norm": 0.7349267601966858, |
| "learning_rate": 1.5410256410256414e-05, |
| "loss": 0.2021, |
| "step": 4975 |
| }, |
| { |
| "epoch": 5.506607929515418, |
| "grad_norm": 0.7333081960678101, |
| "learning_rate": 1.5384615384615387e-05, |
| "loss": 0.211, |
| "step": 5000 |
| }, |
| { |
| "epoch": 5.506607929515418, |
| "eval_cer": 39.8708057730446, |
| "eval_loss": 0.7910177707672119, |
| "eval_runtime": 793.4489, |
| "eval_samples_per_second": 13.335, |
| "eval_steps_per_second": 3.335, |
| "eval_wer": 90.76850542197077, |
| "step": 5000 |
| }, |
| { |
| "epoch": 5.534140969162996, |
| "grad_norm": 0.6456535458564758, |
| "learning_rate": 1.535897435897436e-05, |
| "loss": 0.2052, |
| "step": 5025 |
| }, |
| { |
| "epoch": 5.561674008810573, |
| "grad_norm": 0.5858263969421387, |
| "learning_rate": 1.5333333333333334e-05, |
| "loss": 0.2287, |
| "step": 5050 |
| }, |
| { |
| "epoch": 5.5892070484581495, |
| "grad_norm": 0.8086832165718079, |
| "learning_rate": 1.5307692307692308e-05, |
| "loss": 0.2084, |
| "step": 5075 |
| }, |
| { |
| "epoch": 5.616740088105727, |
| "grad_norm": 0.7293650507926941, |
| "learning_rate": 1.5282051282051282e-05, |
| "loss": 0.2037, |
| "step": 5100 |
| }, |
| { |
| "epoch": 5.644273127753304, |
| "grad_norm": 0.7970709800720215, |
| "learning_rate": 1.5256410256410257e-05, |
| "loss": 0.2093, |
| "step": 5125 |
| }, |
| { |
| "epoch": 5.671806167400881, |
| "grad_norm": 0.6010832786560059, |
| "learning_rate": 1.523076923076923e-05, |
| "loss": 0.224, |
| "step": 5150 |
| }, |
| { |
| "epoch": 5.6993392070484585, |
| "grad_norm": 0.6445685625076294, |
| "learning_rate": 1.5205128205128206e-05, |
| "loss": 0.184, |
| "step": 5175 |
| }, |
| { |
| "epoch": 5.726872246696035, |
| "grad_norm": 0.6301259994506836, |
| "learning_rate": 1.517948717948718e-05, |
| "loss": 0.2357, |
| "step": 5200 |
| }, |
| { |
| "epoch": 5.754405286343612, |
| "grad_norm": 0.6070804595947266, |
| "learning_rate": 1.5153846153846155e-05, |
| "loss": 0.2029, |
| "step": 5225 |
| }, |
| { |
| "epoch": 5.78193832599119, |
| "grad_norm": 0.7481094002723694, |
| "learning_rate": 1.5128205128205129e-05, |
| "loss": 0.2419, |
| "step": 5250 |
| }, |
| { |
| "epoch": 5.809471365638767, |
| "grad_norm": 0.6122421622276306, |
| "learning_rate": 1.5102564102564104e-05, |
| "loss": 0.2123, |
| "step": 5275 |
| }, |
| { |
| "epoch": 5.8370044052863435, |
| "grad_norm": 0.7841750383377075, |
| "learning_rate": 1.5076923076923078e-05, |
| "loss": 0.2072, |
| "step": 5300 |
| }, |
| { |
| "epoch": 5.864537444933921, |
| "grad_norm": 0.7243798971176147, |
| "learning_rate": 1.5051282051282053e-05, |
| "loss": 0.2082, |
| "step": 5325 |
| }, |
| { |
| "epoch": 5.892070484581498, |
| "grad_norm": 0.7279136180877686, |
| "learning_rate": 1.5025641025641027e-05, |
| "loss": 0.2209, |
| "step": 5350 |
| }, |
| { |
| "epoch": 5.919603524229075, |
| "grad_norm": 0.5851007699966431, |
| "learning_rate": 1.5000000000000002e-05, |
| "loss": 0.1917, |
| "step": 5375 |
| }, |
| { |
| "epoch": 5.9471365638766525, |
| "grad_norm": 0.7515987157821655, |
| "learning_rate": 1.4974358974358976e-05, |
| "loss": 0.201, |
| "step": 5400 |
| }, |
| { |
| "epoch": 5.974669603524229, |
| "grad_norm": 0.7539699077606201, |
| "learning_rate": 1.494871794871795e-05, |
| "loss": 0.1881, |
| "step": 5425 |
| }, |
| { |
| "epoch": 6.002202643171806, |
| "grad_norm": 0.5623589158058167, |
| "learning_rate": 1.4923076923076925e-05, |
| "loss": 0.1731, |
| "step": 5450 |
| }, |
| { |
| "epoch": 6.029735682819383, |
| "grad_norm": 0.5110183358192444, |
| "learning_rate": 1.4897435897435898e-05, |
| "loss": 0.1283, |
| "step": 5475 |
| }, |
| { |
| "epoch": 6.057268722466961, |
| "grad_norm": 0.48959916830062866, |
| "learning_rate": 1.4871794871794874e-05, |
| "loss": 0.1116, |
| "step": 5500 |
| }, |
| { |
| "epoch": 6.084801762114537, |
| "grad_norm": 0.5873674750328064, |
| "learning_rate": 1.4846153846153847e-05, |
| "loss": 0.1396, |
| "step": 5525 |
| }, |
| { |
| "epoch": 6.112334801762114, |
| "grad_norm": 0.6229241490364075, |
| "learning_rate": 1.4820512820512823e-05, |
| "loss": 0.1478, |
| "step": 5550 |
| }, |
| { |
| "epoch": 6.139867841409692, |
| "grad_norm": 0.39609479904174805, |
| "learning_rate": 1.4794871794871796e-05, |
| "loss": 0.1335, |
| "step": 5575 |
| }, |
| { |
| "epoch": 6.167400881057269, |
| "grad_norm": 0.5592531561851501, |
| "learning_rate": 1.4769230769230772e-05, |
| "loss": 0.1574, |
| "step": 5600 |
| }, |
| { |
| "epoch": 6.1949339207048455, |
| "grad_norm": 0.4108933210372925, |
| "learning_rate": 1.4743589743589745e-05, |
| "loss": 0.1082, |
| "step": 5625 |
| }, |
| { |
| "epoch": 6.222466960352423, |
| "grad_norm": 0.7125318646430969, |
| "learning_rate": 1.471794871794872e-05, |
| "loss": 0.1455, |
| "step": 5650 |
| }, |
| { |
| "epoch": 6.25, |
| "grad_norm": 0.6028311252593994, |
| "learning_rate": 1.4692307692307694e-05, |
| "loss": 0.1476, |
| "step": 5675 |
| }, |
| { |
| "epoch": 6.277533039647577, |
| "grad_norm": 0.3542225956916809, |
| "learning_rate": 1.4666666666666666e-05, |
| "loss": 0.1148, |
| "step": 5700 |
| }, |
| { |
| "epoch": 6.3050660792951545, |
| "grad_norm": 0.3824445307254791, |
| "learning_rate": 1.4641025641025642e-05, |
| "loss": 0.1366, |
| "step": 5725 |
| }, |
| { |
| "epoch": 6.332599118942731, |
| "grad_norm": 0.4482337534427643, |
| "learning_rate": 1.4615384615384615e-05, |
| "loss": 0.1176, |
| "step": 5750 |
| }, |
| { |
| "epoch": 6.360132158590308, |
| "grad_norm": 0.6524671316146851, |
| "learning_rate": 1.458974358974359e-05, |
| "loss": 0.1275, |
| "step": 5775 |
| }, |
| { |
| "epoch": 6.387665198237886, |
| "grad_norm": 0.6443710327148438, |
| "learning_rate": 1.4564102564102564e-05, |
| "loss": 0.1425, |
| "step": 5800 |
| }, |
| { |
| "epoch": 6.415198237885463, |
| "grad_norm": 0.4919815957546234, |
| "learning_rate": 1.453846153846154e-05, |
| "loss": 0.1277, |
| "step": 5825 |
| }, |
| { |
| "epoch": 6.442731277533039, |
| "grad_norm": 0.6433417201042175, |
| "learning_rate": 1.4512820512820513e-05, |
| "loss": 0.1519, |
| "step": 5850 |
| }, |
| { |
| "epoch": 6.470264317180617, |
| "grad_norm": 0.687128484249115, |
| "learning_rate": 1.4487179487179489e-05, |
| "loss": 0.1572, |
| "step": 5875 |
| }, |
| { |
| "epoch": 6.497797356828194, |
| "grad_norm": 0.5720986127853394, |
| "learning_rate": 1.4461538461538462e-05, |
| "loss": 0.1427, |
| "step": 5900 |
| }, |
| { |
| "epoch": 6.525330396475771, |
| "grad_norm": 0.5719828009605408, |
| "learning_rate": 1.4435897435897438e-05, |
| "loss": 0.155, |
| "step": 5925 |
| }, |
| { |
| "epoch": 6.5528634361233475, |
| "grad_norm": 1.1837940216064453, |
| "learning_rate": 1.4410256410256411e-05, |
| "loss": 0.1468, |
| "step": 5950 |
| }, |
| { |
| "epoch": 6.580396475770925, |
| "grad_norm": 0.5758116245269775, |
| "learning_rate": 1.4384615384615387e-05, |
| "loss": 0.1384, |
| "step": 5975 |
| }, |
| { |
| "epoch": 6.607929515418502, |
| "grad_norm": 0.46409040689468384, |
| "learning_rate": 1.435897435897436e-05, |
| "loss": 0.1389, |
| "step": 6000 |
| }, |
| { |
| "epoch": 6.607929515418502, |
| "eval_cer": 46.33109029839625, |
| "eval_loss": 0.813330888748169, |
| "eval_runtime": 879.8873, |
| "eval_samples_per_second": 12.025, |
| "eval_steps_per_second": 3.007, |
| "eval_wer": 91.25884016973126, |
| "step": 6000 |
| }, |
| { |
| "epoch": 6.635462555066079, |
| "grad_norm": 0.6982928514480591, |
| "learning_rate": 1.4333333333333334e-05, |
| "loss": 0.1128, |
| "step": 6025 |
| }, |
| { |
| "epoch": 6.6629955947136565, |
| "grad_norm": 0.4645637273788452, |
| "learning_rate": 1.430769230769231e-05, |
| "loss": 0.1611, |
| "step": 6050 |
| }, |
| { |
| "epoch": 6.690528634361233, |
| "grad_norm": 0.7097769379615784, |
| "learning_rate": 1.4282051282051283e-05, |
| "loss": 0.1686, |
| "step": 6075 |
| }, |
| { |
| "epoch": 6.71806167400881, |
| "grad_norm": 0.6409720182418823, |
| "learning_rate": 1.4256410256410258e-05, |
| "loss": 0.1645, |
| "step": 6100 |
| }, |
| { |
| "epoch": 6.745594713656388, |
| "grad_norm": 0.4065110683441162, |
| "learning_rate": 1.4230769230769232e-05, |
| "loss": 0.1556, |
| "step": 6125 |
| }, |
| { |
| "epoch": 6.773127753303965, |
| "grad_norm": 0.5424155592918396, |
| "learning_rate": 1.4205128205128207e-05, |
| "loss": 0.1433, |
| "step": 6150 |
| }, |
| { |
| "epoch": 6.8006607929515415, |
| "grad_norm": 0.547636091709137, |
| "learning_rate": 1.4179487179487181e-05, |
| "loss": 0.1419, |
| "step": 6175 |
| }, |
| { |
| "epoch": 6.828193832599119, |
| "grad_norm": 0.6900294423103333, |
| "learning_rate": 1.4153846153846156e-05, |
| "loss": 0.1317, |
| "step": 6200 |
| }, |
| { |
| "epoch": 6.855726872246696, |
| "grad_norm": 0.5622290372848511, |
| "learning_rate": 1.412820512820513e-05, |
| "loss": 0.1604, |
| "step": 6225 |
| }, |
| { |
| "epoch": 6.883259911894273, |
| "grad_norm": 0.47399094700813293, |
| "learning_rate": 1.4102564102564105e-05, |
| "loss": 0.1352, |
| "step": 6250 |
| }, |
| { |
| "epoch": 6.9107929515418505, |
| "grad_norm": 0.5894147157669067, |
| "learning_rate": 1.4076923076923079e-05, |
| "loss": 0.1316, |
| "step": 6275 |
| }, |
| { |
| "epoch": 6.938325991189427, |
| "grad_norm": 0.5824093222618103, |
| "learning_rate": 1.405128205128205e-05, |
| "loss": 0.1425, |
| "step": 6300 |
| }, |
| { |
| "epoch": 6.965859030837004, |
| "grad_norm": 0.6649166345596313, |
| "learning_rate": 1.4025641025641026e-05, |
| "loss": 0.147, |
| "step": 6325 |
| }, |
| { |
| "epoch": 6.993392070484582, |
| "grad_norm": 0.6389392018318176, |
| "learning_rate": 1.4e-05, |
| "loss": 0.1444, |
| "step": 6350 |
| }, |
| { |
| "epoch": 7.020925110132159, |
| "grad_norm": 0.2867998480796814, |
| "learning_rate": 1.3974358974358975e-05, |
| "loss": 0.0929, |
| "step": 6375 |
| }, |
| { |
| "epoch": 7.048458149779735, |
| "grad_norm": 0.4151160717010498, |
| "learning_rate": 1.3948717948717949e-05, |
| "loss": 0.0895, |
| "step": 6400 |
| }, |
| { |
| "epoch": 7.075991189427313, |
| "grad_norm": 0.28654003143310547, |
| "learning_rate": 1.3923076923076924e-05, |
| "loss": 0.0773, |
| "step": 6425 |
| }, |
| { |
| "epoch": 7.10352422907489, |
| "grad_norm": 0.40104398131370544, |
| "learning_rate": 1.3897435897435898e-05, |
| "loss": 0.0717, |
| "step": 6450 |
| }, |
| { |
| "epoch": 7.131057268722467, |
| "grad_norm": 0.3279833495616913, |
| "learning_rate": 1.3871794871794873e-05, |
| "loss": 0.0896, |
| "step": 6475 |
| }, |
| { |
| "epoch": 7.158590308370044, |
| "grad_norm": 0.5332910418510437, |
| "learning_rate": 1.3846153846153847e-05, |
| "loss": 0.099, |
| "step": 6500 |
| }, |
| { |
| "epoch": 7.186123348017621, |
| "grad_norm": 0.5952982902526855, |
| "learning_rate": 1.3820512820512822e-05, |
| "loss": 0.0917, |
| "step": 6525 |
| }, |
| { |
| "epoch": 7.213656387665198, |
| "grad_norm": 0.6255844831466675, |
| "learning_rate": 1.3794871794871796e-05, |
| "loss": 0.0912, |
| "step": 6550 |
| }, |
| { |
| "epoch": 7.241189427312776, |
| "grad_norm": 0.26723840832710266, |
| "learning_rate": 1.3769230769230771e-05, |
| "loss": 0.0916, |
| "step": 6575 |
| }, |
| { |
| "epoch": 7.2687224669603525, |
| "grad_norm": 0.6658725738525391, |
| "learning_rate": 1.3743589743589745e-05, |
| "loss": 0.0843, |
| "step": 6600 |
| }, |
| { |
| "epoch": 7.296255506607929, |
| "grad_norm": 0.7884182333946228, |
| "learning_rate": 1.3717948717948718e-05, |
| "loss": 0.1023, |
| "step": 6625 |
| }, |
| { |
| "epoch": 7.323788546255507, |
| "grad_norm": 0.3756254017353058, |
| "learning_rate": 1.3692307692307694e-05, |
| "loss": 0.0919, |
| "step": 6650 |
| }, |
| { |
| "epoch": 7.351321585903084, |
| "grad_norm": 0.4542330205440521, |
| "learning_rate": 1.3666666666666667e-05, |
| "loss": 0.1014, |
| "step": 6675 |
| }, |
| { |
| "epoch": 7.378854625550661, |
| "grad_norm": 0.4459224343299866, |
| "learning_rate": 1.3641025641025643e-05, |
| "loss": 0.0799, |
| "step": 6700 |
| }, |
| { |
| "epoch": 7.406387665198238, |
| "grad_norm": 0.27258846163749695, |
| "learning_rate": 1.3615384615384616e-05, |
| "loss": 0.0781, |
| "step": 6725 |
| }, |
| { |
| "epoch": 7.433920704845815, |
| "grad_norm": 0.3643723130226135, |
| "learning_rate": 1.3589743589743592e-05, |
| "loss": 0.0927, |
| "step": 6750 |
| }, |
| { |
| "epoch": 7.461453744493392, |
| "grad_norm": 0.4624654948711395, |
| "learning_rate": 1.3564102564102565e-05, |
| "loss": 0.104, |
| "step": 6775 |
| }, |
| { |
| "epoch": 7.48898678414097, |
| "grad_norm": 0.7944092750549316, |
| "learning_rate": 1.353846153846154e-05, |
| "loss": 0.0994, |
| "step": 6800 |
| }, |
| { |
| "epoch": 7.516519823788546, |
| "grad_norm": 0.47494593262672424, |
| "learning_rate": 1.3512820512820514e-05, |
| "loss": 0.0905, |
| "step": 6825 |
| }, |
| { |
| "epoch": 7.544052863436123, |
| "grad_norm": 0.38293811678886414, |
| "learning_rate": 1.348717948717949e-05, |
| "loss": 0.0881, |
| "step": 6850 |
| }, |
| { |
| "epoch": 7.5715859030837, |
| "grad_norm": 0.43859726190567017, |
| "learning_rate": 1.3461538461538463e-05, |
| "loss": 0.0844, |
| "step": 6875 |
| }, |
| { |
| "epoch": 7.599118942731278, |
| "grad_norm": 0.4808247983455658, |
| "learning_rate": 1.3435897435897435e-05, |
| "loss": 0.1076, |
| "step": 6900 |
| }, |
| { |
| "epoch": 7.6266519823788546, |
| "grad_norm": 0.2447817623615265, |
| "learning_rate": 1.341025641025641e-05, |
| "loss": 0.0839, |
| "step": 6925 |
| }, |
| { |
| "epoch": 7.654185022026431, |
| "grad_norm": 0.587374210357666, |
| "learning_rate": 1.3384615384615384e-05, |
| "loss": 0.1139, |
| "step": 6950 |
| }, |
| { |
| "epoch": 7.681718061674009, |
| "grad_norm": 0.5492860078811646, |
| "learning_rate": 1.335897435897436e-05, |
| "loss": 0.1034, |
| "step": 6975 |
| }, |
| { |
| "epoch": 7.709251101321586, |
| "grad_norm": 0.5353982448577881, |
| "learning_rate": 1.3333333333333333e-05, |
| "loss": 0.0864, |
| "step": 7000 |
| }, |
| { |
| "epoch": 7.709251101321586, |
| "eval_cer": 39.91780633475864, |
| "eval_loss": 0.8311607837677002, |
| "eval_runtime": 794.0532, |
| "eval_samples_per_second": 13.325, |
| "eval_steps_per_second": 3.332, |
| "eval_wer": 92.66383781235267, |
| "step": 7000 |
| }, |
| { |
| "epoch": 7.736784140969163, |
| "grad_norm": 0.6064012050628662, |
| "learning_rate": 1.3307692307692309e-05, |
| "loss": 0.1134, |
| "step": 7025 |
| }, |
| { |
| "epoch": 7.76431718061674, |
| "grad_norm": 0.4052427411079407, |
| "learning_rate": 1.3282051282051282e-05, |
| "loss": 0.0944, |
| "step": 7050 |
| }, |
| { |
| "epoch": 7.791850220264317, |
| "grad_norm": 0.5817214846611023, |
| "learning_rate": 1.3256410256410258e-05, |
| "loss": 0.0905, |
| "step": 7075 |
| }, |
| { |
| "epoch": 7.819383259911894, |
| "grad_norm": 0.566748321056366, |
| "learning_rate": 1.3230769230769231e-05, |
| "loss": 0.0856, |
| "step": 7100 |
| }, |
| { |
| "epoch": 7.846916299559472, |
| "grad_norm": 0.568682074546814, |
| "learning_rate": 1.3205128205128207e-05, |
| "loss": 0.0793, |
| "step": 7125 |
| }, |
| { |
| "epoch": 7.8744493392070485, |
| "grad_norm": 0.3249245882034302, |
| "learning_rate": 1.317948717948718e-05, |
| "loss": 0.1105, |
| "step": 7150 |
| }, |
| { |
| "epoch": 7.901982378854625, |
| "grad_norm": 0.33808597922325134, |
| "learning_rate": 1.3153846153846156e-05, |
| "loss": 0.1038, |
| "step": 7175 |
| }, |
| { |
| "epoch": 7.929515418502203, |
| "grad_norm": 0.7098983526229858, |
| "learning_rate": 1.312820512820513e-05, |
| "loss": 0.1197, |
| "step": 7200 |
| }, |
| { |
| "epoch": 7.95704845814978, |
| "grad_norm": 0.5019989609718323, |
| "learning_rate": 1.3102564102564103e-05, |
| "loss": 0.1096, |
| "step": 7225 |
| }, |
| { |
| "epoch": 7.984581497797357, |
| "grad_norm": 0.3687189817428589, |
| "learning_rate": 1.3076923076923078e-05, |
| "loss": 0.104, |
| "step": 7250 |
| }, |
| { |
| "epoch": 8.012114537444933, |
| "grad_norm": 0.3454643785953522, |
| "learning_rate": 1.3051282051282052e-05, |
| "loss": 0.1124, |
| "step": 7275 |
| }, |
| { |
| "epoch": 8.039647577092511, |
| "grad_norm": 0.4261622428894043, |
| "learning_rate": 1.3025641025641027e-05, |
| "loss": 0.0676, |
| "step": 7300 |
| }, |
| { |
| "epoch": 8.067180616740089, |
| "grad_norm": 0.6200215220451355, |
| "learning_rate": 1.3000000000000001e-05, |
| "loss": 0.0654, |
| "step": 7325 |
| }, |
| { |
| "epoch": 8.094713656387665, |
| "grad_norm": 0.2601291239261627, |
| "learning_rate": 1.2974358974358976e-05, |
| "loss": 0.0684, |
| "step": 7350 |
| }, |
| { |
| "epoch": 8.122246696035242, |
| "grad_norm": 0.43284061551094055, |
| "learning_rate": 1.294871794871795e-05, |
| "loss": 0.0666, |
| "step": 7375 |
| }, |
| { |
| "epoch": 8.14977973568282, |
| "grad_norm": 0.3861032724380493, |
| "learning_rate": 1.2923076923076925e-05, |
| "loss": 0.062, |
| "step": 7400 |
| }, |
| { |
| "epoch": 8.177312775330396, |
| "grad_norm": 0.20073507726192474, |
| "learning_rate": 1.2897435897435899e-05, |
| "loss": 0.0634, |
| "step": 7425 |
| }, |
| { |
| "epoch": 8.204845814977974, |
| "grad_norm": 0.16386602818965912, |
| "learning_rate": 1.2871794871794874e-05, |
| "loss": 0.0558, |
| "step": 7450 |
| }, |
| { |
| "epoch": 8.232378854625551, |
| "grad_norm": 0.5257241129875183, |
| "learning_rate": 1.2846153846153848e-05, |
| "loss": 0.0798, |
| "step": 7475 |
| }, |
| { |
| "epoch": 8.259911894273127, |
| "grad_norm": 0.2548966407775879, |
| "learning_rate": 1.2820512820512823e-05, |
| "loss": 0.056, |
| "step": 7500 |
| }, |
| { |
| "epoch": 8.287444933920705, |
| "grad_norm": 0.5919742584228516, |
| "learning_rate": 1.2794871794871795e-05, |
| "loss": 0.0608, |
| "step": 7525 |
| }, |
| { |
| "epoch": 8.314977973568283, |
| "grad_norm": 0.6152638792991638, |
| "learning_rate": 1.2769230769230769e-05, |
| "loss": 0.0592, |
| "step": 7550 |
| }, |
| { |
| "epoch": 8.342511013215859, |
| "grad_norm": 0.6119657158851624, |
| "learning_rate": 1.2743589743589744e-05, |
| "loss": 0.071, |
| "step": 7575 |
| }, |
| { |
| "epoch": 8.370044052863436, |
| "grad_norm": 0.2810268998146057, |
| "learning_rate": 1.2717948717948718e-05, |
| "loss": 0.0561, |
| "step": 7600 |
| }, |
| { |
| "epoch": 8.397577092511014, |
| "grad_norm": 0.5555744171142578, |
| "learning_rate": 1.2692307692307693e-05, |
| "loss": 0.0744, |
| "step": 7625 |
| }, |
| { |
| "epoch": 8.42511013215859, |
| "grad_norm": 0.3237517476081848, |
| "learning_rate": 1.2666666666666667e-05, |
| "loss": 0.0563, |
| "step": 7650 |
| }, |
| { |
| "epoch": 8.452643171806168, |
| "grad_norm": 0.48900526762008667, |
| "learning_rate": 1.2641025641025642e-05, |
| "loss": 0.0635, |
| "step": 7675 |
| }, |
| { |
| "epoch": 8.480176211453745, |
| "grad_norm": 0.22445560991764069, |
| "learning_rate": 1.2615384615384616e-05, |
| "loss": 0.0565, |
| "step": 7700 |
| }, |
| { |
| "epoch": 8.507709251101321, |
| "grad_norm": 0.2636544108390808, |
| "learning_rate": 1.2589743589743591e-05, |
| "loss": 0.0562, |
| "step": 7725 |
| }, |
| { |
| "epoch": 8.535242290748899, |
| "grad_norm": 0.5531124472618103, |
| "learning_rate": 1.2564102564102565e-05, |
| "loss": 0.0739, |
| "step": 7750 |
| }, |
| { |
| "epoch": 8.562775330396477, |
| "grad_norm": 0.3963180482387543, |
| "learning_rate": 1.253846153846154e-05, |
| "loss": 0.0723, |
| "step": 7775 |
| }, |
| { |
| "epoch": 8.590308370044053, |
| "grad_norm": 0.39044126868247986, |
| "learning_rate": 1.2512820512820514e-05, |
| "loss": 0.0709, |
| "step": 7800 |
| }, |
| { |
| "epoch": 8.61784140969163, |
| "grad_norm": 0.17790181934833527, |
| "learning_rate": 1.2487179487179487e-05, |
| "loss": 0.0761, |
| "step": 7825 |
| }, |
| { |
| "epoch": 8.645374449339208, |
| "grad_norm": 0.4327470064163208, |
| "learning_rate": 1.2461538461538463e-05, |
| "loss": 0.0558, |
| "step": 7850 |
| }, |
| { |
| "epoch": 8.672907488986784, |
| "grad_norm": 0.40538233518600464, |
| "learning_rate": 1.2435897435897436e-05, |
| "loss": 0.0525, |
| "step": 7875 |
| }, |
| { |
| "epoch": 8.700440528634362, |
| "grad_norm": 0.6931684613227844, |
| "learning_rate": 1.2410256410256412e-05, |
| "loss": 0.0714, |
| "step": 7900 |
| }, |
| { |
| "epoch": 8.72797356828194, |
| "grad_norm": 0.34026893973350525, |
| "learning_rate": 1.2384615384615385e-05, |
| "loss": 0.0672, |
| "step": 7925 |
| }, |
| { |
| "epoch": 8.755506607929515, |
| "grad_norm": 0.5128918886184692, |
| "learning_rate": 1.235897435897436e-05, |
| "loss": 0.0683, |
| "step": 7950 |
| }, |
| { |
| "epoch": 8.783039647577093, |
| "grad_norm": 0.6940311789512634, |
| "learning_rate": 1.2333333333333334e-05, |
| "loss": 0.0662, |
| "step": 7975 |
| }, |
| { |
| "epoch": 8.810572687224669, |
| "grad_norm": 0.6918107271194458, |
| "learning_rate": 1.230769230769231e-05, |
| "loss": 0.0729, |
| "step": 8000 |
| }, |
| { |
| "epoch": 8.810572687224669, |
| "eval_cer": 50.74341132369631, |
| "eval_loss": 0.8530433773994446, |
| "eval_runtime": 917.533, |
| "eval_samples_per_second": 11.532, |
| "eval_steps_per_second": 2.884, |
| "eval_wer": 91.61716171617161, |
| "step": 8000 |
| }, |
| { |
| "epoch": 8.838105726872246, |
| "grad_norm": 0.41443535685539246, |
| "learning_rate": 1.2282051282051283e-05, |
| "loss": 0.0601, |
| "step": 8025 |
| }, |
| { |
| "epoch": 8.865638766519824, |
| "grad_norm": 0.7258812785148621, |
| "learning_rate": 1.2256410256410259e-05, |
| "loss": 0.0713, |
| "step": 8050 |
| }, |
| { |
| "epoch": 8.8931718061674, |
| "grad_norm": 0.4442296326160431, |
| "learning_rate": 1.2230769230769232e-05, |
| "loss": 0.0656, |
| "step": 8075 |
| }, |
| { |
| "epoch": 8.920704845814978, |
| "grad_norm": 0.2696678638458252, |
| "learning_rate": 1.2205128205128208e-05, |
| "loss": 0.0781, |
| "step": 8100 |
| }, |
| { |
| "epoch": 8.948237885462555, |
| "grad_norm": 0.37608885765075684, |
| "learning_rate": 1.217948717948718e-05, |
| "loss": 0.0617, |
| "step": 8125 |
| }, |
| { |
| "epoch": 8.975770925110131, |
| "grad_norm": 0.34984931349754333, |
| "learning_rate": 1.2153846153846153e-05, |
| "loss": 0.052, |
| "step": 8150 |
| }, |
| { |
| "epoch": 9.003303964757709, |
| "grad_norm": 0.19837628304958344, |
| "learning_rate": 1.2128205128205129e-05, |
| "loss": 0.0659, |
| "step": 8175 |
| }, |
| { |
| "epoch": 9.030837004405287, |
| "grad_norm": 0.3520928621292114, |
| "learning_rate": 1.2102564102564102e-05, |
| "loss": 0.0356, |
| "step": 8200 |
| }, |
| { |
| "epoch": 9.058370044052863, |
| "grad_norm": 0.1488259732723236, |
| "learning_rate": 1.2076923076923078e-05, |
| "loss": 0.0365, |
| "step": 8225 |
| }, |
| { |
| "epoch": 9.08590308370044, |
| "grad_norm": 0.5500857830047607, |
| "learning_rate": 1.2051282051282051e-05, |
| "loss": 0.0328, |
| "step": 8250 |
| }, |
| { |
| "epoch": 9.113436123348018, |
| "grad_norm": 0.4940767288208008, |
| "learning_rate": 1.2025641025641027e-05, |
| "loss": 0.0385, |
| "step": 8275 |
| }, |
| { |
| "epoch": 9.140969162995594, |
| "grad_norm": 0.499952495098114, |
| "learning_rate": 1.2e-05, |
| "loss": 0.0441, |
| "step": 8300 |
| }, |
| { |
| "epoch": 9.168502202643172, |
| "grad_norm": 0.28660815954208374, |
| "learning_rate": 1.1974358974358976e-05, |
| "loss": 0.0302, |
| "step": 8325 |
| }, |
| { |
| "epoch": 9.19603524229075, |
| "grad_norm": 0.29444143176078796, |
| "learning_rate": 1.194871794871795e-05, |
| "loss": 0.0435, |
| "step": 8350 |
| }, |
| { |
| "epoch": 9.223568281938325, |
| "grad_norm": 0.17032678425312042, |
| "learning_rate": 1.1923076923076925e-05, |
| "loss": 0.0383, |
| "step": 8375 |
| }, |
| { |
| "epoch": 9.251101321585903, |
| "grad_norm": 0.2526102364063263, |
| "learning_rate": 1.1897435897435898e-05, |
| "loss": 0.0392, |
| "step": 8400 |
| }, |
| { |
| "epoch": 9.27863436123348, |
| "grad_norm": 0.21672683954238892, |
| "learning_rate": 1.1871794871794872e-05, |
| "loss": 0.0368, |
| "step": 8425 |
| }, |
| { |
| "epoch": 9.306167400881057, |
| "grad_norm": 0.17891252040863037, |
| "learning_rate": 1.1846153846153847e-05, |
| "loss": 0.045, |
| "step": 8450 |
| }, |
| { |
| "epoch": 9.333700440528634, |
| "grad_norm": 0.29890188574790955, |
| "learning_rate": 1.1820512820512821e-05, |
| "loss": 0.0374, |
| "step": 8475 |
| }, |
| { |
| "epoch": 9.361233480176212, |
| "grad_norm": 0.2425035685300827, |
| "learning_rate": 1.1794871794871796e-05, |
| "loss": 0.0463, |
| "step": 8500 |
| }, |
| { |
| "epoch": 9.388766519823788, |
| "grad_norm": 0.1933910995721817, |
| "learning_rate": 1.176923076923077e-05, |
| "loss": 0.0423, |
| "step": 8525 |
| }, |
| { |
| "epoch": 9.416299559471366, |
| "grad_norm": 0.26760435104370117, |
| "learning_rate": 1.1743589743589745e-05, |
| "loss": 0.0483, |
| "step": 8550 |
| }, |
| { |
| "epoch": 9.443832599118943, |
| "grad_norm": 0.2656070291996002, |
| "learning_rate": 1.1717948717948719e-05, |
| "loss": 0.0413, |
| "step": 8575 |
| }, |
| { |
| "epoch": 9.47136563876652, |
| "grad_norm": 0.4633423686027527, |
| "learning_rate": 1.1692307692307694e-05, |
| "loss": 0.0402, |
| "step": 8600 |
| }, |
| { |
| "epoch": 9.498898678414097, |
| "grad_norm": 0.52508544921875, |
| "learning_rate": 1.1666666666666668e-05, |
| "loss": 0.052, |
| "step": 8625 |
| }, |
| { |
| "epoch": 9.526431718061675, |
| "grad_norm": 0.5961471796035767, |
| "learning_rate": 1.1641025641025643e-05, |
| "loss": 0.0533, |
| "step": 8650 |
| }, |
| { |
| "epoch": 9.55396475770925, |
| "grad_norm": 0.25339117646217346, |
| "learning_rate": 1.1615384615384617e-05, |
| "loss": 0.0457, |
| "step": 8675 |
| }, |
| { |
| "epoch": 9.581497797356828, |
| "grad_norm": 0.3615815043449402, |
| "learning_rate": 1.1589743589743592e-05, |
| "loss": 0.0363, |
| "step": 8700 |
| }, |
| { |
| "epoch": 9.609030837004406, |
| "grad_norm": 0.17995741963386536, |
| "learning_rate": 1.1564102564102566e-05, |
| "loss": 0.0501, |
| "step": 8725 |
| }, |
| { |
| "epoch": 9.636563876651982, |
| "grad_norm": 0.5352070927619934, |
| "learning_rate": 1.1538461538461538e-05, |
| "loss": 0.0513, |
| "step": 8750 |
| }, |
| { |
| "epoch": 9.66409691629956, |
| "grad_norm": 0.48025304079055786, |
| "learning_rate": 1.1512820512820513e-05, |
| "loss": 0.043, |
| "step": 8775 |
| }, |
| { |
| "epoch": 9.691629955947137, |
| "grad_norm": 0.45890381932258606, |
| "learning_rate": 1.1487179487179487e-05, |
| "loss": 0.0389, |
| "step": 8800 |
| }, |
| { |
| "epoch": 9.719162995594713, |
| "grad_norm": 0.572181761264801, |
| "learning_rate": 1.1461538461538462e-05, |
| "loss": 0.0487, |
| "step": 8825 |
| }, |
| { |
| "epoch": 9.746696035242291, |
| "grad_norm": 0.382355272769928, |
| "learning_rate": 1.1435897435897436e-05, |
| "loss": 0.0527, |
| "step": 8850 |
| }, |
| { |
| "epoch": 9.774229074889869, |
| "grad_norm": 0.3971492052078247, |
| "learning_rate": 1.1410256410256411e-05, |
| "loss": 0.0493, |
| "step": 8875 |
| }, |
| { |
| "epoch": 9.801762114537445, |
| "grad_norm": 0.520282506942749, |
| "learning_rate": 1.1384615384615385e-05, |
| "loss": 0.0389, |
| "step": 8900 |
| }, |
| { |
| "epoch": 9.829295154185022, |
| "grad_norm": 0.2088589370250702, |
| "learning_rate": 1.135897435897436e-05, |
| "loss": 0.052, |
| "step": 8925 |
| }, |
| { |
| "epoch": 9.8568281938326, |
| "grad_norm": 0.569845974445343, |
| "learning_rate": 1.1333333333333334e-05, |
| "loss": 0.043, |
| "step": 8950 |
| }, |
| { |
| "epoch": 9.884361233480176, |
| "grad_norm": 0.4208398759365082, |
| "learning_rate": 1.1307692307692309e-05, |
| "loss": 0.0378, |
| "step": 8975 |
| }, |
| { |
| "epoch": 9.911894273127754, |
| "grad_norm": 0.24408133327960968, |
| "learning_rate": 1.1282051282051283e-05, |
| "loss": 0.0381, |
| "step": 9000 |
| }, |
| { |
| "epoch": 9.911894273127754, |
| "eval_cer": 47.71588733621451, |
| "eval_loss": 0.8698176741600037, |
| "eval_runtime": 865.0989, |
| "eval_samples_per_second": 12.231, |
| "eval_steps_per_second": 3.059, |
| "eval_wer": 91.57001414427157, |
| "step": 9000 |
| }, |
| { |
| "epoch": 9.939427312775331, |
| "grad_norm": 0.8426101207733154, |
| "learning_rate": 1.1256410256410258e-05, |
| "loss": 0.0433, |
| "step": 9025 |
| }, |
| { |
| "epoch": 9.966960352422907, |
| "grad_norm": 0.5663429498672485, |
| "learning_rate": 1.1230769230769232e-05, |
| "loss": 0.0461, |
| "step": 9050 |
| }, |
| { |
| "epoch": 9.994493392070485, |
| "grad_norm": 0.4001486003398895, |
| "learning_rate": 1.1205128205128205e-05, |
| "loss": 0.0508, |
| "step": 9075 |
| }, |
| { |
| "epoch": 10.022026431718063, |
| "grad_norm": 0.11667744815349579, |
| "learning_rate": 1.117948717948718e-05, |
| "loss": 0.0281, |
| "step": 9100 |
| }, |
| { |
| "epoch": 10.049559471365638, |
| "grad_norm": 0.3421935439109802, |
| "learning_rate": 1.1153846153846154e-05, |
| "loss": 0.0277, |
| "step": 9125 |
| }, |
| { |
| "epoch": 10.077092511013216, |
| "grad_norm": 0.39479511976242065, |
| "learning_rate": 1.112820512820513e-05, |
| "loss": 0.0341, |
| "step": 9150 |
| }, |
| { |
| "epoch": 10.104625550660794, |
| "grad_norm": 0.5420315861701965, |
| "learning_rate": 1.1102564102564103e-05, |
| "loss": 0.0249, |
| "step": 9175 |
| }, |
| { |
| "epoch": 10.13215859030837, |
| "grad_norm": 0.4009842276573181, |
| "learning_rate": 1.1076923076923079e-05, |
| "loss": 0.0277, |
| "step": 9200 |
| }, |
| { |
| "epoch": 10.159691629955947, |
| "grad_norm": 0.14988014101982117, |
| "learning_rate": 1.1051282051282052e-05, |
| "loss": 0.0254, |
| "step": 9225 |
| }, |
| { |
| "epoch": 10.187224669603523, |
| "grad_norm": 0.10612168908119202, |
| "learning_rate": 1.1025641025641028e-05, |
| "loss": 0.0293, |
| "step": 9250 |
| }, |
| { |
| "epoch": 10.214757709251101, |
| "grad_norm": 0.07644204050302505, |
| "learning_rate": 1.1000000000000001e-05, |
| "loss": 0.02, |
| "step": 9275 |
| }, |
| { |
| "epoch": 10.242290748898679, |
| "grad_norm": 0.37732774019241333, |
| "learning_rate": 1.0974358974358977e-05, |
| "loss": 0.0252, |
| "step": 9300 |
| }, |
| { |
| "epoch": 10.269823788546255, |
| "grad_norm": 0.362834095954895, |
| "learning_rate": 1.094871794871795e-05, |
| "loss": 0.0287, |
| "step": 9325 |
| }, |
| { |
| "epoch": 10.297356828193832, |
| "grad_norm": 0.37372374534606934, |
| "learning_rate": 1.0923076923076922e-05, |
| "loss": 0.0279, |
| "step": 9350 |
| }, |
| { |
| "epoch": 10.32488986784141, |
| "grad_norm": 0.1827729046344757, |
| "learning_rate": 1.0897435897435898e-05, |
| "loss": 0.0383, |
| "step": 9375 |
| }, |
| { |
| "epoch": 10.352422907488986, |
| "grad_norm": 0.79392009973526, |
| "learning_rate": 1.0871794871794871e-05, |
| "loss": 0.0218, |
| "step": 9400 |
| }, |
| { |
| "epoch": 10.379955947136564, |
| "grad_norm": 0.4169488251209259, |
| "learning_rate": 1.0846153846153847e-05, |
| "loss": 0.0331, |
| "step": 9425 |
| }, |
| { |
| "epoch": 10.407488986784141, |
| "grad_norm": 0.197611466050148, |
| "learning_rate": 1.082051282051282e-05, |
| "loss": 0.0285, |
| "step": 9450 |
| }, |
| { |
| "epoch": 10.435022026431717, |
| "grad_norm": 0.20039740204811096, |
| "learning_rate": 1.0794871794871796e-05, |
| "loss": 0.0219, |
| "step": 9475 |
| }, |
| { |
| "epoch": 10.462555066079295, |
| "grad_norm": 0.5181693434715271, |
| "learning_rate": 1.076923076923077e-05, |
| "loss": 0.031, |
| "step": 9500 |
| }, |
| { |
| "epoch": 10.490088105726873, |
| "grad_norm": 0.31759241223335266, |
| "learning_rate": 1.0743589743589745e-05, |
| "loss": 0.0386, |
| "step": 9525 |
| }, |
| { |
| "epoch": 10.517621145374449, |
| "grad_norm": 0.20832429826259613, |
| "learning_rate": 1.0717948717948718e-05, |
| "loss": 0.0266, |
| "step": 9550 |
| }, |
| { |
| "epoch": 10.545154185022026, |
| "grad_norm": 0.1244155541062355, |
| "learning_rate": 1.0692307692307694e-05, |
| "loss": 0.0282, |
| "step": 9575 |
| }, |
| { |
| "epoch": 10.572687224669604, |
| "grad_norm": 0.1275760680437088, |
| "learning_rate": 1.0666666666666667e-05, |
| "loss": 0.026, |
| "step": 9600 |
| }, |
| { |
| "epoch": 10.60022026431718, |
| "grad_norm": 0.5201808214187622, |
| "learning_rate": 1.0641025641025643e-05, |
| "loss": 0.0284, |
| "step": 9625 |
| }, |
| { |
| "epoch": 10.627753303964758, |
| "grad_norm": 0.6102043390274048, |
| "learning_rate": 1.0615384615384616e-05, |
| "loss": 0.0279, |
| "step": 9650 |
| }, |
| { |
| "epoch": 10.655286343612335, |
| "grad_norm": 0.19192653894424438, |
| "learning_rate": 1.058974358974359e-05, |
| "loss": 0.0317, |
| "step": 9675 |
| }, |
| { |
| "epoch": 10.682819383259911, |
| "grad_norm": 0.2622509300708771, |
| "learning_rate": 1.0564102564102565e-05, |
| "loss": 0.0356, |
| "step": 9700 |
| }, |
| { |
| "epoch": 10.710352422907489, |
| "grad_norm": 0.16083978116512299, |
| "learning_rate": 1.0538461538461539e-05, |
| "loss": 0.0318, |
| "step": 9725 |
| }, |
| { |
| "epoch": 10.737885462555067, |
| "grad_norm": 0.29113614559173584, |
| "learning_rate": 1.0512820512820514e-05, |
| "loss": 0.0301, |
| "step": 9750 |
| }, |
| { |
| "epoch": 10.765418502202643, |
| "grad_norm": 0.1394374966621399, |
| "learning_rate": 1.0487179487179488e-05, |
| "loss": 0.0249, |
| "step": 9775 |
| }, |
| { |
| "epoch": 10.79295154185022, |
| "grad_norm": 0.45710182189941406, |
| "learning_rate": 1.0461538461538463e-05, |
| "loss": 0.0326, |
| "step": 9800 |
| }, |
| { |
| "epoch": 10.820484581497798, |
| "grad_norm": 0.3842594027519226, |
| "learning_rate": 1.0435897435897437e-05, |
| "loss": 0.028, |
| "step": 9825 |
| }, |
| { |
| "epoch": 10.848017621145374, |
| "grad_norm": 0.1398581862449646, |
| "learning_rate": 1.0410256410256412e-05, |
| "loss": 0.0258, |
| "step": 9850 |
| }, |
| { |
| "epoch": 10.875550660792952, |
| "grad_norm": 0.30973750352859497, |
| "learning_rate": 1.0384615384615386e-05, |
| "loss": 0.0287, |
| "step": 9875 |
| }, |
| { |
| "epoch": 10.90308370044053, |
| "grad_norm": 0.4438304901123047, |
| "learning_rate": 1.0358974358974361e-05, |
| "loss": 0.0208, |
| "step": 9900 |
| }, |
| { |
| "epoch": 10.930616740088105, |
| "grad_norm": 0.3679158389568329, |
| "learning_rate": 1.0333333333333335e-05, |
| "loss": 0.0193, |
| "step": 9925 |
| }, |
| { |
| "epoch": 10.958149779735683, |
| "grad_norm": 0.19606485962867737, |
| "learning_rate": 1.0307692307692307e-05, |
| "loss": 0.0281, |
| "step": 9950 |
| }, |
| { |
| "epoch": 10.98568281938326, |
| "grad_norm": 0.2789362967014313, |
| "learning_rate": 1.0282051282051282e-05, |
| "loss": 0.033, |
| "step": 9975 |
| }, |
| { |
| "epoch": 11.013215859030836, |
| "grad_norm": 0.4207296669483185, |
| "learning_rate": 1.0256410256410256e-05, |
| "loss": 0.028, |
| "step": 10000 |
| }, |
| { |
| "epoch": 11.013215859030836, |
| "eval_cer": 54.37563766005984, |
| "eval_loss": 0.886362612247467, |
| "eval_runtime": 981.5076, |
| "eval_samples_per_second": 10.78, |
| "eval_steps_per_second": 2.696, |
| "eval_wer": 92.14521452145215, |
| "step": 10000 |
| }, |
| { |
| "epoch": 11.040748898678414, |
| "grad_norm": 0.2317088097333908, |
| "learning_rate": 1.0230769230769231e-05, |
| "loss": 0.0188, |
| "step": 10025 |
| }, |
| { |
| "epoch": 11.068281938325992, |
| "grad_norm": 0.1417902708053589, |
| "learning_rate": 1.0205128205128205e-05, |
| "loss": 0.0215, |
| "step": 10050 |
| }, |
| { |
| "epoch": 11.095814977973568, |
| "grad_norm": 0.07895007729530334, |
| "learning_rate": 1.017948717948718e-05, |
| "loss": 0.0206, |
| "step": 10075 |
| }, |
| { |
| "epoch": 11.123348017621145, |
| "grad_norm": 0.23651820421218872, |
| "learning_rate": 1.0153846153846154e-05, |
| "loss": 0.0193, |
| "step": 10100 |
| }, |
| { |
| "epoch": 11.150881057268723, |
| "grad_norm": 0.30718091130256653, |
| "learning_rate": 1.012820512820513e-05, |
| "loss": 0.0181, |
| "step": 10125 |
| }, |
| { |
| "epoch": 11.178414096916299, |
| "grad_norm": 0.5389750003814697, |
| "learning_rate": 1.0102564102564103e-05, |
| "loss": 0.0227, |
| "step": 10150 |
| }, |
| { |
| "epoch": 11.205947136563877, |
| "grad_norm": 0.1611407846212387, |
| "learning_rate": 1.0076923076923078e-05, |
| "loss": 0.0179, |
| "step": 10175 |
| }, |
| { |
| "epoch": 11.233480176211454, |
| "grad_norm": 0.12234646826982498, |
| "learning_rate": 1.0051282051282052e-05, |
| "loss": 0.0177, |
| "step": 10200 |
| }, |
| { |
| "epoch": 11.26101321585903, |
| "grad_norm": 0.17735643684864044, |
| "learning_rate": 1.0025641025641027e-05, |
| "loss": 0.018, |
| "step": 10225 |
| }, |
| { |
| "epoch": 11.288546255506608, |
| "grad_norm": 0.4353015720844269, |
| "learning_rate": 1e-05, |
| "loss": 0.0148, |
| "step": 10250 |
| }, |
| { |
| "epoch": 11.316079295154186, |
| "grad_norm": 0.5004972219467163, |
| "learning_rate": 9.974358974358974e-06, |
| "loss": 0.0226, |
| "step": 10275 |
| }, |
| { |
| "epoch": 11.343612334801762, |
| "grad_norm": 0.21323665976524353, |
| "learning_rate": 9.94871794871795e-06, |
| "loss": 0.0175, |
| "step": 10300 |
| }, |
| { |
| "epoch": 11.37114537444934, |
| "grad_norm": 0.1589115411043167, |
| "learning_rate": 9.923076923076923e-06, |
| "loss": 0.0171, |
| "step": 10325 |
| }, |
| { |
| "epoch": 11.398678414096917, |
| "grad_norm": 0.09055918455123901, |
| "learning_rate": 9.897435897435899e-06, |
| "loss": 0.0202, |
| "step": 10350 |
| }, |
| { |
| "epoch": 11.426211453744493, |
| "grad_norm": 0.15250088274478912, |
| "learning_rate": 9.871794871794872e-06, |
| "loss": 0.0165, |
| "step": 10375 |
| }, |
| { |
| "epoch": 11.45374449339207, |
| "grad_norm": 0.17833657562732697, |
| "learning_rate": 9.846153846153848e-06, |
| "loss": 0.0226, |
| "step": 10400 |
| }, |
| { |
| "epoch": 11.481277533039648, |
| "grad_norm": 0.29985514283180237, |
| "learning_rate": 9.820512820512821e-06, |
| "loss": 0.0215, |
| "step": 10425 |
| }, |
| { |
| "epoch": 11.508810572687224, |
| "grad_norm": 0.1902422308921814, |
| "learning_rate": 9.794871794871795e-06, |
| "loss": 0.0202, |
| "step": 10450 |
| }, |
| { |
| "epoch": 11.536343612334802, |
| "grad_norm": 0.20311668515205383, |
| "learning_rate": 9.76923076923077e-06, |
| "loss": 0.015, |
| "step": 10475 |
| }, |
| { |
| "epoch": 11.56387665198238, |
| "grad_norm": 0.29688936471939087, |
| "learning_rate": 9.743589743589744e-06, |
| "loss": 0.0165, |
| "step": 10500 |
| }, |
| { |
| "epoch": 11.591409691629956, |
| "grad_norm": 0.16763679683208466, |
| "learning_rate": 9.71794871794872e-06, |
| "loss": 0.0226, |
| "step": 10525 |
| }, |
| { |
| "epoch": 11.618942731277533, |
| "grad_norm": 0.2504405081272125, |
| "learning_rate": 9.692307692307693e-06, |
| "loss": 0.0154, |
| "step": 10550 |
| }, |
| { |
| "epoch": 11.646475770925111, |
| "grad_norm": 0.4480707347393036, |
| "learning_rate": 9.666666666666667e-06, |
| "loss": 0.0197, |
| "step": 10575 |
| }, |
| { |
| "epoch": 11.674008810572687, |
| "grad_norm": 0.1413351148366928, |
| "learning_rate": 9.641025641025642e-06, |
| "loss": 0.0169, |
| "step": 10600 |
| }, |
| { |
| "epoch": 11.701541850220265, |
| "grad_norm": 0.32270994782447815, |
| "learning_rate": 9.615384615384616e-06, |
| "loss": 0.0186, |
| "step": 10625 |
| }, |
| { |
| "epoch": 11.729074889867842, |
| "grad_norm": 0.10015879571437836, |
| "learning_rate": 9.589743589743591e-06, |
| "loss": 0.0193, |
| "step": 10650 |
| }, |
| { |
| "epoch": 11.756607929515418, |
| "grad_norm": 0.2840658128261566, |
| "learning_rate": 9.564102564102565e-06, |
| "loss": 0.0219, |
| "step": 10675 |
| }, |
| { |
| "epoch": 11.784140969162996, |
| "grad_norm": 0.3592904508113861, |
| "learning_rate": 9.53846153846154e-06, |
| "loss": 0.0296, |
| "step": 10700 |
| }, |
| { |
| "epoch": 11.811674008810574, |
| "grad_norm": 0.3074311912059784, |
| "learning_rate": 9.512820512820514e-06, |
| "loss": 0.0219, |
| "step": 10725 |
| }, |
| { |
| "epoch": 11.83920704845815, |
| "grad_norm": 0.09315291047096252, |
| "learning_rate": 9.487179487179487e-06, |
| "loss": 0.0245, |
| "step": 10750 |
| }, |
| { |
| "epoch": 11.866740088105727, |
| "grad_norm": 0.23775820434093475, |
| "learning_rate": 9.461538461538463e-06, |
| "loss": 0.0176, |
| "step": 10775 |
| }, |
| { |
| "epoch": 11.894273127753303, |
| "grad_norm": 0.3934062123298645, |
| "learning_rate": 9.435897435897436e-06, |
| "loss": 0.0193, |
| "step": 10800 |
| }, |
| { |
| "epoch": 11.92180616740088, |
| "grad_norm": 0.11783640831708908, |
| "learning_rate": 9.410256410256412e-06, |
| "loss": 0.0173, |
| "step": 10825 |
| }, |
| { |
| "epoch": 11.949339207048459, |
| "grad_norm": 0.2406715750694275, |
| "learning_rate": 9.384615384615385e-06, |
| "loss": 0.0176, |
| "step": 10850 |
| }, |
| { |
| "epoch": 11.976872246696034, |
| "grad_norm": 0.23296691477298737, |
| "learning_rate": 9.358974358974359e-06, |
| "loss": 0.0225, |
| "step": 10875 |
| }, |
| { |
| "epoch": 12.004405286343612, |
| "grad_norm": 0.07544398307800293, |
| "learning_rate": 9.333333333333334e-06, |
| "loss": 0.0192, |
| "step": 10900 |
| }, |
| { |
| "epoch": 12.03193832599119, |
| "grad_norm": 0.11225544661283493, |
| "learning_rate": 9.307692307692308e-06, |
| "loss": 0.0151, |
| "step": 10925 |
| }, |
| { |
| "epoch": 12.059471365638766, |
| "grad_norm": 0.2554400563240051, |
| "learning_rate": 9.282051282051283e-06, |
| "loss": 0.018, |
| "step": 10950 |
| }, |
| { |
| "epoch": 12.087004405286343, |
| "grad_norm": 0.278838574886322, |
| "learning_rate": 9.256410256410257e-06, |
| "loss": 0.018, |
| "step": 10975 |
| }, |
| { |
| "epoch": 12.114537444933921, |
| "grad_norm": 0.15447276830673218, |
| "learning_rate": 9.230769230769232e-06, |
| "loss": 0.0142, |
| "step": 11000 |
| }, |
| { |
| "epoch": 12.114537444933921, |
| "eval_cer": 53.64139717767359, |
| "eval_loss": 0.8987538814544678, |
| "eval_runtime": 874.0135, |
| "eval_samples_per_second": 12.106, |
| "eval_steps_per_second": 3.027, |
| "eval_wer": 93.21074964639321, |
| "step": 11000 |
| }, |
| { |
| "epoch": 12.142070484581497, |
| "grad_norm": 0.14940273761749268, |
| "learning_rate": 9.205128205128206e-06, |
| "loss": 0.0134, |
| "step": 11025 |
| }, |
| { |
| "epoch": 12.169603524229075, |
| "grad_norm": 0.16579824686050415, |
| "learning_rate": 9.17948717948718e-06, |
| "loss": 0.0101, |
| "step": 11050 |
| }, |
| { |
| "epoch": 12.197136563876652, |
| "grad_norm": 0.19196206331253052, |
| "learning_rate": 9.153846153846155e-06, |
| "loss": 0.0175, |
| "step": 11075 |
| }, |
| { |
| "epoch": 12.224669603524228, |
| "grad_norm": 0.17125152051448822, |
| "learning_rate": 9.128205128205129e-06, |
| "loss": 0.0146, |
| "step": 11100 |
| }, |
| { |
| "epoch": 12.252202643171806, |
| "grad_norm": 0.1070375144481659, |
| "learning_rate": 9.102564102564104e-06, |
| "loss": 0.0135, |
| "step": 11125 |
| }, |
| { |
| "epoch": 12.279735682819384, |
| "grad_norm": 0.18338176608085632, |
| "learning_rate": 9.076923076923078e-06, |
| "loss": 0.0114, |
| "step": 11150 |
| }, |
| { |
| "epoch": 12.30726872246696, |
| "grad_norm": 0.08506150543689728, |
| "learning_rate": 9.051282051282051e-06, |
| "loss": 0.0109, |
| "step": 11175 |
| }, |
| { |
| "epoch": 12.334801762114537, |
| "grad_norm": 0.22113458812236786, |
| "learning_rate": 9.025641025641027e-06, |
| "loss": 0.0146, |
| "step": 11200 |
| }, |
| { |
| "epoch": 12.362334801762115, |
| "grad_norm": 0.7724553942680359, |
| "learning_rate": 9e-06, |
| "loss": 0.0144, |
| "step": 11225 |
| }, |
| { |
| "epoch": 12.389867841409691, |
| "grad_norm": 0.2879244089126587, |
| "learning_rate": 8.974358974358976e-06, |
| "loss": 0.0158, |
| "step": 11250 |
| }, |
| { |
| "epoch": 12.417400881057269, |
| "grad_norm": 0.10946013778448105, |
| "learning_rate": 8.94871794871795e-06, |
| "loss": 0.0149, |
| "step": 11275 |
| }, |
| { |
| "epoch": 12.444933920704846, |
| "grad_norm": 0.16864745318889618, |
| "learning_rate": 8.923076923076925e-06, |
| "loss": 0.0125, |
| "step": 11300 |
| }, |
| { |
| "epoch": 12.472466960352422, |
| "grad_norm": 0.13665378093719482, |
| "learning_rate": 8.897435897435898e-06, |
| "loss": 0.0125, |
| "step": 11325 |
| }, |
| { |
| "epoch": 12.5, |
| "grad_norm": 0.7579739093780518, |
| "learning_rate": 8.871794871794872e-06, |
| "loss": 0.0187, |
| "step": 11350 |
| }, |
| { |
| "epoch": 12.527533039647578, |
| "grad_norm": 0.2458302229642868, |
| "learning_rate": 8.846153846153847e-06, |
| "loss": 0.0163, |
| "step": 11375 |
| }, |
| { |
| "epoch": 12.555066079295154, |
| "grad_norm": 0.1165897399187088, |
| "learning_rate": 8.820512820512821e-06, |
| "loss": 0.0203, |
| "step": 11400 |
| }, |
| { |
| "epoch": 12.582599118942731, |
| "grad_norm": 0.18201911449432373, |
| "learning_rate": 8.794871794871796e-06, |
| "loss": 0.0176, |
| "step": 11425 |
| }, |
| { |
| "epoch": 12.610132158590309, |
| "grad_norm": 0.24092814326286316, |
| "learning_rate": 8.76923076923077e-06, |
| "loss": 0.0184, |
| "step": 11450 |
| }, |
| { |
| "epoch": 12.637665198237885, |
| "grad_norm": 0.22921068966388702, |
| "learning_rate": 8.743589743589743e-06, |
| "loss": 0.0165, |
| "step": 11475 |
| }, |
| { |
| "epoch": 12.665198237885463, |
| "grad_norm": 0.11735259741544724, |
| "learning_rate": 8.717948717948719e-06, |
| "loss": 0.0148, |
| "step": 11500 |
| }, |
| { |
| "epoch": 12.69273127753304, |
| "grad_norm": 0.3713417053222656, |
| "learning_rate": 8.692307692307692e-06, |
| "loss": 0.013, |
| "step": 11525 |
| }, |
| { |
| "epoch": 12.720264317180616, |
| "grad_norm": 0.12812820076942444, |
| "learning_rate": 8.666666666666668e-06, |
| "loss": 0.0131, |
| "step": 11550 |
| }, |
| { |
| "epoch": 12.747797356828194, |
| "grad_norm": 0.08026193827390671, |
| "learning_rate": 8.641025641025641e-06, |
| "loss": 0.0122, |
| "step": 11575 |
| }, |
| { |
| "epoch": 12.775330396475772, |
| "grad_norm": 0.30850231647491455, |
| "learning_rate": 8.615384615384617e-06, |
| "loss": 0.0162, |
| "step": 11600 |
| }, |
| { |
| "epoch": 12.802863436123348, |
| "grad_norm": 0.11349613219499588, |
| "learning_rate": 8.58974358974359e-06, |
| "loss": 0.0142, |
| "step": 11625 |
| }, |
| { |
| "epoch": 12.830396475770925, |
| "grad_norm": 0.0808100774884224, |
| "learning_rate": 8.564102564102564e-06, |
| "loss": 0.0133, |
| "step": 11650 |
| }, |
| { |
| "epoch": 12.857929515418503, |
| "grad_norm": 0.21294915676116943, |
| "learning_rate": 8.53846153846154e-06, |
| "loss": 0.0131, |
| "step": 11675 |
| }, |
| { |
| "epoch": 12.885462555066079, |
| "grad_norm": 0.3267536163330078, |
| "learning_rate": 8.512820512820513e-06, |
| "loss": 0.0132, |
| "step": 11700 |
| }, |
| { |
| "epoch": 12.912995594713657, |
| "grad_norm": 0.1333635449409485, |
| "learning_rate": 8.487179487179488e-06, |
| "loss": 0.0127, |
| "step": 11725 |
| }, |
| { |
| "epoch": 12.940528634361234, |
| "grad_norm": 0.15748950839042664, |
| "learning_rate": 8.461538461538462e-06, |
| "loss": 0.014, |
| "step": 11750 |
| }, |
| { |
| "epoch": 12.96806167400881, |
| "grad_norm": 0.10807313024997711, |
| "learning_rate": 8.435897435897436e-06, |
| "loss": 0.0113, |
| "step": 11775 |
| }, |
| { |
| "epoch": 12.995594713656388, |
| "grad_norm": 0.31078580021858215, |
| "learning_rate": 8.410256410256411e-06, |
| "loss": 0.0143, |
| "step": 11800 |
| }, |
| { |
| "epoch": 13.023127753303966, |
| "grad_norm": 0.39036718010902405, |
| "learning_rate": 8.384615384615385e-06, |
| "loss": 0.0143, |
| "step": 11825 |
| }, |
| { |
| "epoch": 13.050660792951541, |
| "grad_norm": 0.2274981439113617, |
| "learning_rate": 8.35897435897436e-06, |
| "loss": 0.0095, |
| "step": 11850 |
| }, |
| { |
| "epoch": 13.07819383259912, |
| "grad_norm": 0.20098534226417542, |
| "learning_rate": 8.333333333333334e-06, |
| "loss": 0.0098, |
| "step": 11875 |
| }, |
| { |
| "epoch": 13.105726872246697, |
| "grad_norm": 0.06564383208751678, |
| "learning_rate": 8.307692307692309e-06, |
| "loss": 0.0089, |
| "step": 11900 |
| }, |
| { |
| "epoch": 13.133259911894273, |
| "grad_norm": 0.3132261633872986, |
| "learning_rate": 8.282051282051283e-06, |
| "loss": 0.01, |
| "step": 11925 |
| }, |
| { |
| "epoch": 13.16079295154185, |
| "grad_norm": 0.2533165514469147, |
| "learning_rate": 8.256410256410256e-06, |
| "loss": 0.0115, |
| "step": 11950 |
| }, |
| { |
| "epoch": 13.188325991189428, |
| "grad_norm": 0.2507915198802948, |
| "learning_rate": 8.230769230769232e-06, |
| "loss": 0.0119, |
| "step": 11975 |
| }, |
| { |
| "epoch": 13.215859030837004, |
| "grad_norm": 0.2795217037200928, |
| "learning_rate": 8.205128205128205e-06, |
| "loss": 0.0131, |
| "step": 12000 |
| }, |
| { |
| "epoch": 13.215859030837004, |
| "eval_cer": 46.21530842685681, |
| "eval_loss": 0.9192056655883789, |
| "eval_runtime": 848.6891, |
| "eval_samples_per_second": 12.467, |
| "eval_steps_per_second": 3.118, |
| "eval_wer": 92.8052805280528, |
| "step": 12000 |
| }, |
| { |
| "epoch": 13.243392070484582, |
| "grad_norm": 0.08327347785234451, |
| "learning_rate": 8.17948717948718e-06, |
| "loss": 0.0108, |
| "step": 12025 |
| }, |
| { |
| "epoch": 13.270925110132158, |
| "grad_norm": 0.1631169319152832, |
| "learning_rate": 8.153846153846154e-06, |
| "loss": 0.0073, |
| "step": 12050 |
| }, |
| { |
| "epoch": 13.298458149779735, |
| "grad_norm": 0.20178398489952087, |
| "learning_rate": 8.12820512820513e-06, |
| "loss": 0.0089, |
| "step": 12075 |
| }, |
| { |
| "epoch": 13.325991189427313, |
| "grad_norm": 0.1222369447350502, |
| "learning_rate": 8.102564102564103e-06, |
| "loss": 0.0102, |
| "step": 12100 |
| }, |
| { |
| "epoch": 13.353524229074889, |
| "grad_norm": 0.08848530054092407, |
| "learning_rate": 8.076923076923077e-06, |
| "loss": 0.0132, |
| "step": 12125 |
| }, |
| { |
| "epoch": 13.381057268722467, |
| "grad_norm": 0.17377710342407227, |
| "learning_rate": 8.051282051282052e-06, |
| "loss": 0.0104, |
| "step": 12150 |
| }, |
| { |
| "epoch": 13.408590308370044, |
| "grad_norm": 0.2735235095024109, |
| "learning_rate": 8.025641025641026e-06, |
| "loss": 0.009, |
| "step": 12175 |
| }, |
| { |
| "epoch": 13.43612334801762, |
| "grad_norm": 0.1769276261329651, |
| "learning_rate": 8.000000000000001e-06, |
| "loss": 0.0097, |
| "step": 12200 |
| }, |
| { |
| "epoch": 13.463656387665198, |
| "grad_norm": 0.11117183417081833, |
| "learning_rate": 7.974358974358975e-06, |
| "loss": 0.0092, |
| "step": 12225 |
| }, |
| { |
| "epoch": 13.491189427312776, |
| "grad_norm": 0.12082304805517197, |
| "learning_rate": 7.948717948717949e-06, |
| "loss": 0.0111, |
| "step": 12250 |
| }, |
| { |
| "epoch": 13.518722466960352, |
| "grad_norm": 0.05868387967348099, |
| "learning_rate": 7.923076923076924e-06, |
| "loss": 0.0109, |
| "step": 12275 |
| }, |
| { |
| "epoch": 13.54625550660793, |
| "grad_norm": 0.6051620244979858, |
| "learning_rate": 7.897435897435898e-06, |
| "loss": 0.0116, |
| "step": 12300 |
| }, |
| { |
| "epoch": 13.573788546255507, |
| "grad_norm": 0.09658774733543396, |
| "learning_rate": 7.871794871794873e-06, |
| "loss": 0.0106, |
| "step": 12325 |
| }, |
| { |
| "epoch": 13.601321585903083, |
| "grad_norm": 0.0718512088060379, |
| "learning_rate": 7.846153846153847e-06, |
| "loss": 0.01, |
| "step": 12350 |
| }, |
| { |
| "epoch": 13.62885462555066, |
| "grad_norm": 0.36982008814811707, |
| "learning_rate": 7.820512820512822e-06, |
| "loss": 0.0113, |
| "step": 12375 |
| }, |
| { |
| "epoch": 13.656387665198238, |
| "grad_norm": 0.04029988497495651, |
| "learning_rate": 7.794871794871796e-06, |
| "loss": 0.0096, |
| "step": 12400 |
| }, |
| { |
| "epoch": 13.683920704845814, |
| "grad_norm": 0.05479320138692856, |
| "learning_rate": 7.76923076923077e-06, |
| "loss": 0.0094, |
| "step": 12425 |
| }, |
| { |
| "epoch": 13.711453744493392, |
| "grad_norm": 0.05368011072278023, |
| "learning_rate": 7.743589743589745e-06, |
| "loss": 0.0088, |
| "step": 12450 |
| }, |
| { |
| "epoch": 13.73898678414097, |
| "grad_norm": 0.080776646733284, |
| "learning_rate": 7.717948717948718e-06, |
| "loss": 0.0083, |
| "step": 12475 |
| }, |
| { |
| "epoch": 13.766519823788546, |
| "grad_norm": 0.14407703280448914, |
| "learning_rate": 7.692307692307694e-06, |
| "loss": 0.0095, |
| "step": 12500 |
| }, |
| { |
| "epoch": 13.794052863436123, |
| "grad_norm": 0.04322797805070877, |
| "learning_rate": 7.666666666666667e-06, |
| "loss": 0.0099, |
| "step": 12525 |
| }, |
| { |
| "epoch": 13.821585903083701, |
| "grad_norm": 0.3888992965221405, |
| "learning_rate": 7.641025641025641e-06, |
| "loss": 0.0101, |
| "step": 12550 |
| }, |
| { |
| "epoch": 13.849118942731277, |
| "grad_norm": 0.07158606499433517, |
| "learning_rate": 7.615384615384615e-06, |
| "loss": 0.0086, |
| "step": 12575 |
| }, |
| { |
| "epoch": 13.876651982378855, |
| "grad_norm": 0.044584546238183975, |
| "learning_rate": 7.58974358974359e-06, |
| "loss": 0.0096, |
| "step": 12600 |
| }, |
| { |
| "epoch": 13.904185022026432, |
| "grad_norm": 0.06555311381816864, |
| "learning_rate": 7.564102564102564e-06, |
| "loss": 0.0106, |
| "step": 12625 |
| }, |
| { |
| "epoch": 13.931718061674008, |
| "grad_norm": 0.07280756533145905, |
| "learning_rate": 7.538461538461539e-06, |
| "loss": 0.0114, |
| "step": 12650 |
| }, |
| { |
| "epoch": 13.959251101321586, |
| "grad_norm": 0.09671244770288467, |
| "learning_rate": 7.512820512820513e-06, |
| "loss": 0.0107, |
| "step": 12675 |
| }, |
| { |
| "epoch": 13.986784140969164, |
| "grad_norm": 0.16803735494613647, |
| "learning_rate": 7.487179487179488e-06, |
| "loss": 0.0132, |
| "step": 12700 |
| }, |
| { |
| "epoch": 14.01431718061674, |
| "grad_norm": 0.07752186059951782, |
| "learning_rate": 7.461538461538462e-06, |
| "loss": 0.0113, |
| "step": 12725 |
| }, |
| { |
| "epoch": 14.041850220264317, |
| "grad_norm": 0.1300247758626938, |
| "learning_rate": 7.435897435897437e-06, |
| "loss": 0.0093, |
| "step": 12750 |
| }, |
| { |
| "epoch": 14.069383259911895, |
| "grad_norm": 0.08789961785078049, |
| "learning_rate": 7.410256410256411e-06, |
| "loss": 0.0073, |
| "step": 12775 |
| }, |
| { |
| "epoch": 14.09691629955947, |
| "grad_norm": 0.07116591930389404, |
| "learning_rate": 7.384615384615386e-06, |
| "loss": 0.0069, |
| "step": 12800 |
| }, |
| { |
| "epoch": 14.124449339207048, |
| "grad_norm": 0.09893615543842316, |
| "learning_rate": 7.35897435897436e-06, |
| "loss": 0.0082, |
| "step": 12825 |
| }, |
| { |
| "epoch": 14.151982378854626, |
| "grad_norm": 0.0769718661904335, |
| "learning_rate": 7.333333333333333e-06, |
| "loss": 0.0088, |
| "step": 12850 |
| }, |
| { |
| "epoch": 14.179515418502202, |
| "grad_norm": 0.05386563390493393, |
| "learning_rate": 7.307692307692308e-06, |
| "loss": 0.0058, |
| "step": 12875 |
| }, |
| { |
| "epoch": 14.20704845814978, |
| "grad_norm": 0.11878286302089691, |
| "learning_rate": 7.282051282051282e-06, |
| "loss": 0.0083, |
| "step": 12900 |
| }, |
| { |
| "epoch": 14.234581497797357, |
| "grad_norm": 0.2804628908634186, |
| "learning_rate": 7.256410256410257e-06, |
| "loss": 0.0099, |
| "step": 12925 |
| }, |
| { |
| "epoch": 14.262114537444933, |
| "grad_norm": 0.18361924588680267, |
| "learning_rate": 7.230769230769231e-06, |
| "loss": 0.0069, |
| "step": 12950 |
| }, |
| { |
| "epoch": 14.289647577092511, |
| "grad_norm": 0.23271577060222626, |
| "learning_rate": 7.205128205128206e-06, |
| "loss": 0.0099, |
| "step": 12975 |
| }, |
| { |
| "epoch": 14.317180616740089, |
| "grad_norm": 0.3786558508872986, |
| "learning_rate": 7.17948717948718e-06, |
| "loss": 0.0088, |
| "step": 13000 |
| }, |
| { |
| "epoch": 14.317180616740089, |
| "eval_cer": 54.3102954157257, |
| "eval_loss": 0.9230103492736816, |
| "eval_runtime": 846.9631, |
| "eval_samples_per_second": 12.493, |
| "eval_steps_per_second": 3.124, |
| "eval_wer": 93.81423856671381, |
| "step": 13000 |
| }, |
| { |
| "epoch": 14.344713656387665, |
| "grad_norm": 0.03461921960115433, |
| "learning_rate": 7.153846153846155e-06, |
| "loss": 0.0071, |
| "step": 13025 |
| }, |
| { |
| "epoch": 14.372246696035242, |
| "grad_norm": 0.06544573605060577, |
| "learning_rate": 7.128205128205129e-06, |
| "loss": 0.0073, |
| "step": 13050 |
| }, |
| { |
| "epoch": 14.39977973568282, |
| "grad_norm": 0.18345648050308228, |
| "learning_rate": 7.102564102564104e-06, |
| "loss": 0.0089, |
| "step": 13075 |
| }, |
| { |
| "epoch": 14.427312775330396, |
| "grad_norm": 0.14921247959136963, |
| "learning_rate": 7.076923076923078e-06, |
| "loss": 0.0085, |
| "step": 13100 |
| }, |
| { |
| "epoch": 14.454845814977974, |
| "grad_norm": 0.04616712033748627, |
| "learning_rate": 7.051282051282053e-06, |
| "loss": 0.0083, |
| "step": 13125 |
| }, |
| { |
| "epoch": 14.482378854625551, |
| "grad_norm": 0.5685108304023743, |
| "learning_rate": 7.025641025641025e-06, |
| "loss": 0.0106, |
| "step": 13150 |
| }, |
| { |
| "epoch": 14.509911894273127, |
| "grad_norm": 0.13684484362602234, |
| "learning_rate": 7e-06, |
| "loss": 0.0103, |
| "step": 13175 |
| }, |
| { |
| "epoch": 14.537444933920705, |
| "grad_norm": 0.17398440837860107, |
| "learning_rate": 6.974358974358974e-06, |
| "loss": 0.0095, |
| "step": 13200 |
| }, |
| { |
| "epoch": 14.564977973568283, |
| "grad_norm": 0.13759827613830566, |
| "learning_rate": 6.948717948717949e-06, |
| "loss": 0.0085, |
| "step": 13225 |
| }, |
| { |
| "epoch": 14.592511013215859, |
| "grad_norm": 0.06375741213560104, |
| "learning_rate": 6.923076923076923e-06, |
| "loss": 0.0081, |
| "step": 13250 |
| }, |
| { |
| "epoch": 14.620044052863436, |
| "grad_norm": 0.37382373213768005, |
| "learning_rate": 6.897435897435898e-06, |
| "loss": 0.0084, |
| "step": 13275 |
| }, |
| { |
| "epoch": 14.647577092511014, |
| "grad_norm": 0.09081553667783737, |
| "learning_rate": 6.871794871794872e-06, |
| "loss": 0.0097, |
| "step": 13300 |
| }, |
| { |
| "epoch": 14.67511013215859, |
| "grad_norm": 0.06937942653894424, |
| "learning_rate": 6.846153846153847e-06, |
| "loss": 0.01, |
| "step": 13325 |
| }, |
| { |
| "epoch": 14.702643171806168, |
| "grad_norm": 0.11505573987960815, |
| "learning_rate": 6.820512820512821e-06, |
| "loss": 0.0072, |
| "step": 13350 |
| }, |
| { |
| "epoch": 14.730176211453745, |
| "grad_norm": 0.2015366107225418, |
| "learning_rate": 6.794871794871796e-06, |
| "loss": 0.0093, |
| "step": 13375 |
| }, |
| { |
| "epoch": 14.757709251101321, |
| "grad_norm": 0.21869924664497375, |
| "learning_rate": 6.76923076923077e-06, |
| "loss": 0.01, |
| "step": 13400 |
| }, |
| { |
| "epoch": 14.785242290748899, |
| "grad_norm": 0.1208106279373169, |
| "learning_rate": 6.743589743589745e-06, |
| "loss": 0.009, |
| "step": 13425 |
| }, |
| { |
| "epoch": 14.812775330396477, |
| "grad_norm": 0.07886633276939392, |
| "learning_rate": 6.717948717948718e-06, |
| "loss": 0.0097, |
| "step": 13450 |
| }, |
| { |
| "epoch": 14.840308370044053, |
| "grad_norm": 0.26228100061416626, |
| "learning_rate": 6.692307692307692e-06, |
| "loss": 0.0078, |
| "step": 13475 |
| }, |
| { |
| "epoch": 14.86784140969163, |
| "grad_norm": 0.38411983847618103, |
| "learning_rate": 6.666666666666667e-06, |
| "loss": 0.0098, |
| "step": 13500 |
| }, |
| { |
| "epoch": 14.895374449339208, |
| "grad_norm": 0.33114126324653625, |
| "learning_rate": 6.641025641025641e-06, |
| "loss": 0.0087, |
| "step": 13525 |
| }, |
| { |
| "epoch": 14.922907488986784, |
| "grad_norm": 0.10805019736289978, |
| "learning_rate": 6.615384615384616e-06, |
| "loss": 0.007, |
| "step": 13550 |
| }, |
| { |
| "epoch": 14.950440528634362, |
| "grad_norm": 0.07796984165906906, |
| "learning_rate": 6.58974358974359e-06, |
| "loss": 0.0089, |
| "step": 13575 |
| }, |
| { |
| "epoch": 14.97797356828194, |
| "grad_norm": 0.18418745696544647, |
| "learning_rate": 6.564102564102565e-06, |
| "loss": 0.0092, |
| "step": 13600 |
| }, |
| { |
| "epoch": 15.005506607929515, |
| "grad_norm": 0.2542673647403717, |
| "learning_rate": 6.538461538461539e-06, |
| "loss": 0.0079, |
| "step": 13625 |
| }, |
| { |
| "epoch": 15.033039647577093, |
| "grad_norm": 0.8175550103187561, |
| "learning_rate": 6.512820512820514e-06, |
| "loss": 0.0075, |
| "step": 13650 |
| }, |
| { |
| "epoch": 15.060572687224669, |
| "grad_norm": 0.2477434277534485, |
| "learning_rate": 6.487179487179488e-06, |
| "loss": 0.0076, |
| "step": 13675 |
| }, |
| { |
| "epoch": 15.088105726872246, |
| "grad_norm": 0.1775578409433365, |
| "learning_rate": 6.461538461538463e-06, |
| "loss": 0.0093, |
| "step": 13700 |
| }, |
| { |
| "epoch": 15.115638766519824, |
| "grad_norm": 0.043849751353263855, |
| "learning_rate": 6.435897435897437e-06, |
| "loss": 0.0065, |
| "step": 13725 |
| }, |
| { |
| "epoch": 15.1431718061674, |
| "grad_norm": 0.391318142414093, |
| "learning_rate": 6.410256410256412e-06, |
| "loss": 0.0064, |
| "step": 13750 |
| }, |
| { |
| "epoch": 15.170704845814978, |
| "grad_norm": 0.057629019021987915, |
| "learning_rate": 6.384615384615384e-06, |
| "loss": 0.007, |
| "step": 13775 |
| }, |
| { |
| "epoch": 15.198237885462555, |
| "grad_norm": 0.3805502951145172, |
| "learning_rate": 6.358974358974359e-06, |
| "loss": 0.0096, |
| "step": 13800 |
| }, |
| { |
| "epoch": 15.225770925110131, |
| "grad_norm": 0.09331201016902924, |
| "learning_rate": 6.333333333333333e-06, |
| "loss": 0.0063, |
| "step": 13825 |
| }, |
| { |
| "epoch": 15.253303964757709, |
| "grad_norm": 0.26768723130226135, |
| "learning_rate": 6.307692307692308e-06, |
| "loss": 0.0059, |
| "step": 13850 |
| }, |
| { |
| "epoch": 15.280837004405287, |
| "grad_norm": 0.03280387446284294, |
| "learning_rate": 6.282051282051282e-06, |
| "loss": 0.006, |
| "step": 13875 |
| }, |
| { |
| "epoch": 15.308370044052863, |
| "grad_norm": 0.050882868468761444, |
| "learning_rate": 6.256410256410257e-06, |
| "loss": 0.0074, |
| "step": 13900 |
| }, |
| { |
| "epoch": 15.33590308370044, |
| "grad_norm": 0.06220351159572601, |
| "learning_rate": 6.230769230769231e-06, |
| "loss": 0.0061, |
| "step": 13925 |
| }, |
| { |
| "epoch": 15.363436123348018, |
| "grad_norm": 0.034684717655181885, |
| "learning_rate": 6.205128205128206e-06, |
| "loss": 0.007, |
| "step": 13950 |
| }, |
| { |
| "epoch": 15.390969162995594, |
| "grad_norm": 0.31570619344711304, |
| "learning_rate": 6.17948717948718e-06, |
| "loss": 0.0074, |
| "step": 13975 |
| }, |
| { |
| "epoch": 15.418502202643172, |
| "grad_norm": 0.13296958804130554, |
| "learning_rate": 6.153846153846155e-06, |
| "loss": 0.0092, |
| "step": 14000 |
| }, |
| { |
| "epoch": 15.418502202643172, |
| "eval_cer": 53.11923240058235, |
| "eval_loss": 0.930982768535614, |
| "eval_runtime": 896.4107, |
| "eval_samples_per_second": 11.804, |
| "eval_steps_per_second": 2.952, |
| "eval_wer": 94.03111739745404, |
| "step": 14000 |
| }, |
| { |
| "epoch": 15.44603524229075, |
| "grad_norm": 0.05069255828857422, |
| "learning_rate": 6.128205128205129e-06, |
| "loss": 0.0061, |
| "step": 14025 |
| }, |
| { |
| "epoch": 15.473568281938325, |
| "grad_norm": 0.30910825729370117, |
| "learning_rate": 6.102564102564104e-06, |
| "loss": 0.0063, |
| "step": 14050 |
| }, |
| { |
| "epoch": 15.501101321585903, |
| "grad_norm": 0.04979943856596947, |
| "learning_rate": 6.076923076923077e-06, |
| "loss": 0.0062, |
| "step": 14075 |
| }, |
| { |
| "epoch": 15.52863436123348, |
| "grad_norm": 0.273824006319046, |
| "learning_rate": 6.051282051282051e-06, |
| "loss": 0.0052, |
| "step": 14100 |
| }, |
| { |
| "epoch": 15.556167400881057, |
| "grad_norm": 0.04723271727561951, |
| "learning_rate": 6.025641025641026e-06, |
| "loss": 0.007, |
| "step": 14125 |
| }, |
| { |
| "epoch": 15.583700440528634, |
| "grad_norm": 0.08832963556051254, |
| "learning_rate": 6e-06, |
| "loss": 0.0062, |
| "step": 14150 |
| }, |
| { |
| "epoch": 15.611233480176212, |
| "grad_norm": 0.36821451783180237, |
| "learning_rate": 5.974358974358975e-06, |
| "loss": 0.0066, |
| "step": 14175 |
| }, |
| { |
| "epoch": 15.638766519823788, |
| "grad_norm": 0.04720625281333923, |
| "learning_rate": 5.948717948717949e-06, |
| "loss": 0.0052, |
| "step": 14200 |
| }, |
| { |
| "epoch": 15.666299559471366, |
| "grad_norm": 0.05838828161358833, |
| "learning_rate": 5.923076923076924e-06, |
| "loss": 0.0061, |
| "step": 14225 |
| }, |
| { |
| "epoch": 15.693832599118943, |
| "grad_norm": 0.40729036927223206, |
| "learning_rate": 5.897435897435898e-06, |
| "loss": 0.0081, |
| "step": 14250 |
| }, |
| { |
| "epoch": 15.72136563876652, |
| "grad_norm": 0.04247939959168434, |
| "learning_rate": 5.871794871794873e-06, |
| "loss": 0.007, |
| "step": 14275 |
| }, |
| { |
| "epoch": 15.748898678414097, |
| "grad_norm": 0.0786793977022171, |
| "learning_rate": 5.846153846153847e-06, |
| "loss": 0.0089, |
| "step": 14300 |
| }, |
| { |
| "epoch": 15.776431718061675, |
| "grad_norm": 0.066864013671875, |
| "learning_rate": 5.820512820512822e-06, |
| "loss": 0.0066, |
| "step": 14325 |
| }, |
| { |
| "epoch": 15.80396475770925, |
| "grad_norm": 0.05071844533085823, |
| "learning_rate": 5.794871794871796e-06, |
| "loss": 0.0087, |
| "step": 14350 |
| }, |
| { |
| "epoch": 15.831497797356828, |
| "grad_norm": 0.03305233642458916, |
| "learning_rate": 5.769230769230769e-06, |
| "loss": 0.0054, |
| "step": 14375 |
| }, |
| { |
| "epoch": 15.859030837004406, |
| "grad_norm": 0.1291572004556656, |
| "learning_rate": 5.743589743589743e-06, |
| "loss": 0.0048, |
| "step": 14400 |
| }, |
| { |
| "epoch": 15.886563876651982, |
| "grad_norm": 0.13923364877700806, |
| "learning_rate": 5.717948717948718e-06, |
| "loss": 0.0059, |
| "step": 14425 |
| }, |
| { |
| "epoch": 15.91409691629956, |
| "grad_norm": 0.09947941452264786, |
| "learning_rate": 5.692307692307692e-06, |
| "loss": 0.0063, |
| "step": 14450 |
| }, |
| { |
| "epoch": 15.941629955947137, |
| "grad_norm": 0.27415186166763306, |
| "learning_rate": 5.666666666666667e-06, |
| "loss": 0.0088, |
| "step": 14475 |
| }, |
| { |
| "epoch": 15.969162995594713, |
| "grad_norm": 0.30286744236946106, |
| "learning_rate": 5.641025641025641e-06, |
| "loss": 0.0076, |
| "step": 14500 |
| }, |
| { |
| "epoch": 15.996696035242291, |
| "grad_norm": 0.07530049234628677, |
| "learning_rate": 5.615384615384616e-06, |
| "loss": 0.0074, |
| "step": 14525 |
| }, |
| { |
| "epoch": 16.024229074889867, |
| "grad_norm": 0.24431009590625763, |
| "learning_rate": 5.58974358974359e-06, |
| "loss": 0.0056, |
| "step": 14550 |
| }, |
| { |
| "epoch": 16.051762114537446, |
| "grad_norm": 0.02309373766183853, |
| "learning_rate": 5.564102564102565e-06, |
| "loss": 0.0047, |
| "step": 14575 |
| }, |
| { |
| "epoch": 16.079295154185022, |
| "grad_norm": 0.02125135436654091, |
| "learning_rate": 5.538461538461539e-06, |
| "loss": 0.0054, |
| "step": 14600 |
| }, |
| { |
| "epoch": 16.106828193832598, |
| "grad_norm": 0.029789986088871956, |
| "learning_rate": 5.512820512820514e-06, |
| "loss": 0.0039, |
| "step": 14625 |
| }, |
| { |
| "epoch": 16.134361233480178, |
| "grad_norm": 0.022308455780148506, |
| "learning_rate": 5.487179487179488e-06, |
| "loss": 0.0044, |
| "step": 14650 |
| }, |
| { |
| "epoch": 16.161894273127754, |
| "grad_norm": 0.05178627371788025, |
| "learning_rate": 5.461538461538461e-06, |
| "loss": 0.0065, |
| "step": 14675 |
| }, |
| { |
| "epoch": 16.18942731277533, |
| "grad_norm": 0.039131879806518555, |
| "learning_rate": 5.435897435897436e-06, |
| "loss": 0.0042, |
| "step": 14700 |
| }, |
| { |
| "epoch": 16.21696035242291, |
| "grad_norm": 0.03180893883109093, |
| "learning_rate": 5.41025641025641e-06, |
| "loss": 0.0061, |
| "step": 14725 |
| }, |
| { |
| "epoch": 16.244493392070485, |
| "grad_norm": 0.2699941098690033, |
| "learning_rate": 5.384615384615385e-06, |
| "loss": 0.0037, |
| "step": 14750 |
| }, |
| { |
| "epoch": 16.27202643171806, |
| "grad_norm": 0.17495819926261902, |
| "learning_rate": 5.358974358974359e-06, |
| "loss": 0.0056, |
| "step": 14775 |
| }, |
| { |
| "epoch": 16.29955947136564, |
| "grad_norm": 0.1452856808900833, |
| "learning_rate": 5.333333333333334e-06, |
| "loss": 0.0041, |
| "step": 14800 |
| }, |
| { |
| "epoch": 16.327092511013216, |
| "grad_norm": 0.042813967913389206, |
| "learning_rate": 5.307692307692308e-06, |
| "loss": 0.0056, |
| "step": 14825 |
| }, |
| { |
| "epoch": 16.354625550660792, |
| "grad_norm": 0.020087067037820816, |
| "learning_rate": 5.282051282051283e-06, |
| "loss": 0.0039, |
| "step": 14850 |
| }, |
| { |
| "epoch": 16.38215859030837, |
| "grad_norm": 0.12951348721981049, |
| "learning_rate": 5.256410256410257e-06, |
| "loss": 0.0057, |
| "step": 14875 |
| }, |
| { |
| "epoch": 16.409691629955947, |
| "grad_norm": 0.13158725202083588, |
| "learning_rate": 5.230769230769232e-06, |
| "loss": 0.0049, |
| "step": 14900 |
| }, |
| { |
| "epoch": 16.437224669603523, |
| "grad_norm": 0.07236882299184799, |
| "learning_rate": 5.205128205128206e-06, |
| "loss": 0.0097, |
| "step": 14925 |
| }, |
| { |
| "epoch": 16.464757709251103, |
| "grad_norm": 0.2643989324569702, |
| "learning_rate": 5.179487179487181e-06, |
| "loss": 0.0058, |
| "step": 14950 |
| }, |
| { |
| "epoch": 16.49229074889868, |
| "grad_norm": 0.09582165628671646, |
| "learning_rate": 5.1538461538461534e-06, |
| "loss": 0.0092, |
| "step": 14975 |
| }, |
| { |
| "epoch": 16.519823788546255, |
| "grad_norm": 0.12667202949523926, |
| "learning_rate": 5.128205128205128e-06, |
| "loss": 0.0069, |
| "step": 15000 |
| }, |
| { |
| "epoch": 16.519823788546255, |
| "eval_cer": 51.977462657480544, |
| "eval_loss": 0.9370369911193848, |
| "eval_runtime": 863.3538, |
| "eval_samples_per_second": 12.256, |
| "eval_steps_per_second": 3.065, |
| "eval_wer": 93.88024516737387, |
| "step": 15000 |
| }, |
| { |
| "epoch": 16.547356828193834, |
| "grad_norm": 0.09567159414291382, |
| "learning_rate": 5.1025641025641024e-06, |
| "loss": 0.0038, |
| "step": 15025 |
| }, |
| { |
| "epoch": 16.57488986784141, |
| "grad_norm": 0.15499436855316162, |
| "learning_rate": 5.076923076923077e-06, |
| "loss": 0.006, |
| "step": 15050 |
| }, |
| { |
| "epoch": 16.602422907488986, |
| "grad_norm": 0.043445032089948654, |
| "learning_rate": 5.051282051282051e-06, |
| "loss": 0.0044, |
| "step": 15075 |
| }, |
| { |
| "epoch": 16.629955947136565, |
| "grad_norm": 0.24302434921264648, |
| "learning_rate": 5.025641025641026e-06, |
| "loss": 0.0043, |
| "step": 15100 |
| }, |
| { |
| "epoch": 16.65748898678414, |
| "grad_norm": 0.06441080570220947, |
| "learning_rate": 5e-06, |
| "loss": 0.0038, |
| "step": 15125 |
| }, |
| { |
| "epoch": 16.685022026431717, |
| "grad_norm": 0.022898901253938675, |
| "learning_rate": 4.974358974358975e-06, |
| "loss": 0.0035, |
| "step": 15150 |
| }, |
| { |
| "epoch": 16.712555066079297, |
| "grad_norm": 0.03623131662607193, |
| "learning_rate": 4.948717948717949e-06, |
| "loss": 0.0046, |
| "step": 15175 |
| }, |
| { |
| "epoch": 16.740088105726873, |
| "grad_norm": 0.023917708545923233, |
| "learning_rate": 4.923076923076924e-06, |
| "loss": 0.0058, |
| "step": 15200 |
| }, |
| { |
| "epoch": 16.76762114537445, |
| "grad_norm": 0.21040736138820648, |
| "learning_rate": 4.8974358974358975e-06, |
| "loss": 0.005, |
| "step": 15225 |
| }, |
| { |
| "epoch": 16.795154185022028, |
| "grad_norm": 0.07802154123783112, |
| "learning_rate": 4.871794871794872e-06, |
| "loss": 0.0061, |
| "step": 15250 |
| }, |
| { |
| "epoch": 16.822687224669604, |
| "grad_norm": 0.03765368461608887, |
| "learning_rate": 4.8461538461538465e-06, |
| "loss": 0.0043, |
| "step": 15275 |
| }, |
| { |
| "epoch": 16.85022026431718, |
| "grad_norm": 0.06764553487300873, |
| "learning_rate": 4.820512820512821e-06, |
| "loss": 0.0041, |
| "step": 15300 |
| }, |
| { |
| "epoch": 16.87775330396476, |
| "grad_norm": 0.042156316339969635, |
| "learning_rate": 4.7948717948717955e-06, |
| "loss": 0.0057, |
| "step": 15325 |
| }, |
| { |
| "epoch": 16.905286343612335, |
| "grad_norm": 0.032781120389699936, |
| "learning_rate": 4.76923076923077e-06, |
| "loss": 0.0052, |
| "step": 15350 |
| }, |
| { |
| "epoch": 16.93281938325991, |
| "grad_norm": 0.05068659782409668, |
| "learning_rate": 4.743589743589744e-06, |
| "loss": 0.0044, |
| "step": 15375 |
| }, |
| { |
| "epoch": 16.96035242290749, |
| "grad_norm": 0.03536130487918854, |
| "learning_rate": 4.717948717948718e-06, |
| "loss": 0.0053, |
| "step": 15400 |
| }, |
| { |
| "epoch": 16.987885462555067, |
| "grad_norm": 0.21238285303115845, |
| "learning_rate": 4.692307692307693e-06, |
| "loss": 0.005, |
| "step": 15425 |
| }, |
| { |
| "epoch": 17.015418502202643, |
| "grad_norm": 0.06675603240728378, |
| "learning_rate": 4.666666666666667e-06, |
| "loss": 0.0044, |
| "step": 15450 |
| }, |
| { |
| "epoch": 17.042951541850222, |
| "grad_norm": 0.13563722372055054, |
| "learning_rate": 4.641025641025642e-06, |
| "loss": 0.0039, |
| "step": 15475 |
| }, |
| { |
| "epoch": 17.070484581497798, |
| "grad_norm": 0.04386042803525925, |
| "learning_rate": 4.615384615384616e-06, |
| "loss": 0.0028, |
| "step": 15500 |
| }, |
| { |
| "epoch": 17.098017621145374, |
| "grad_norm": 0.014782898128032684, |
| "learning_rate": 4.58974358974359e-06, |
| "loss": 0.0029, |
| "step": 15525 |
| }, |
| { |
| "epoch": 17.125550660792953, |
| "grad_norm": 0.01799604296684265, |
| "learning_rate": 4.564102564102564e-06, |
| "loss": 0.0035, |
| "step": 15550 |
| }, |
| { |
| "epoch": 17.15308370044053, |
| "grad_norm": 0.03504837304353714, |
| "learning_rate": 4.538461538461539e-06, |
| "loss": 0.0036, |
| "step": 15575 |
| }, |
| { |
| "epoch": 17.180616740088105, |
| "grad_norm": 0.017535801976919174, |
| "learning_rate": 4.512820512820513e-06, |
| "loss": 0.0021, |
| "step": 15600 |
| }, |
| { |
| "epoch": 17.208149779735685, |
| "grad_norm": 0.0199030339717865, |
| "learning_rate": 4.487179487179488e-06, |
| "loss": 0.0037, |
| "step": 15625 |
| }, |
| { |
| "epoch": 17.23568281938326, |
| "grad_norm": 0.026048993691802025, |
| "learning_rate": 4.461538461538462e-06, |
| "loss": 0.0049, |
| "step": 15650 |
| }, |
| { |
| "epoch": 17.263215859030836, |
| "grad_norm": 0.029507996514439583, |
| "learning_rate": 4.435897435897436e-06, |
| "loss": 0.0037, |
| "step": 15675 |
| }, |
| { |
| "epoch": 17.290748898678412, |
| "grad_norm": 0.25132548809051514, |
| "learning_rate": 4.4102564102564104e-06, |
| "loss": 0.0035, |
| "step": 15700 |
| }, |
| { |
| "epoch": 17.318281938325992, |
| "grad_norm": 0.02435138076543808, |
| "learning_rate": 4.384615384615385e-06, |
| "loss": 0.0034, |
| "step": 15725 |
| }, |
| { |
| "epoch": 17.345814977973568, |
| "grad_norm": 0.020107686519622803, |
| "learning_rate": 4.358974358974359e-06, |
| "loss": 0.0037, |
| "step": 15750 |
| }, |
| { |
| "epoch": 17.373348017621144, |
| "grad_norm": 0.07011119276285172, |
| "learning_rate": 4.333333333333334e-06, |
| "loss": 0.0024, |
| "step": 15775 |
| }, |
| { |
| "epoch": 17.400881057268723, |
| "grad_norm": 0.016648728400468826, |
| "learning_rate": 4.307692307692308e-06, |
| "loss": 0.0025, |
| "step": 15800 |
| }, |
| { |
| "epoch": 17.4284140969163, |
| "grad_norm": 0.03225812688469887, |
| "learning_rate": 4.282051282051282e-06, |
| "loss": 0.0043, |
| "step": 15825 |
| }, |
| { |
| "epoch": 17.455947136563875, |
| "grad_norm": 0.028381196781992912, |
| "learning_rate": 4.2564102564102566e-06, |
| "loss": 0.0044, |
| "step": 15850 |
| }, |
| { |
| "epoch": 17.483480176211454, |
| "grad_norm": 0.01819649152457714, |
| "learning_rate": 4.230769230769231e-06, |
| "loss": 0.0032, |
| "step": 15875 |
| }, |
| { |
| "epoch": 17.51101321585903, |
| "grad_norm": 0.018908126279711723, |
| "learning_rate": 4.2051282051282055e-06, |
| "loss": 0.0047, |
| "step": 15900 |
| }, |
| { |
| "epoch": 17.538546255506606, |
| "grad_norm": 0.022597506642341614, |
| "learning_rate": 4.17948717948718e-06, |
| "loss": 0.0039, |
| "step": 15925 |
| }, |
| { |
| "epoch": 17.566079295154186, |
| "grad_norm": 0.024837888777256012, |
| "learning_rate": 4.1538461538461545e-06, |
| "loss": 0.0031, |
| "step": 15950 |
| }, |
| { |
| "epoch": 17.59361233480176, |
| "grad_norm": 0.025047263130545616, |
| "learning_rate": 4.128205128205128e-06, |
| "loss": 0.0028, |
| "step": 15975 |
| }, |
| { |
| "epoch": 17.621145374449338, |
| "grad_norm": 0.021962720900774002, |
| "learning_rate": 4.102564102564103e-06, |
| "loss": 0.0023, |
| "step": 16000 |
| }, |
| { |
| "epoch": 17.621145374449338, |
| "eval_cer": 49.68761821787626, |
| "eval_loss": 0.9436636567115784, |
| "eval_runtime": 856.2417, |
| "eval_samples_per_second": 12.357, |
| "eval_steps_per_second": 3.09, |
| "eval_wer": 94.23856671381424, |
| "step": 16000 |
| }, |
| { |
| "epoch": 17.648678414096917, |
| "grad_norm": 0.024941807612776756, |
| "learning_rate": 4.076923076923077e-06, |
| "loss": 0.0032, |
| "step": 16025 |
| }, |
| { |
| "epoch": 17.676211453744493, |
| "grad_norm": 0.018255887553095818, |
| "learning_rate": 4.051282051282052e-06, |
| "loss": 0.0055, |
| "step": 16050 |
| }, |
| { |
| "epoch": 17.70374449339207, |
| "grad_norm": 0.08047758787870407, |
| "learning_rate": 4.025641025641026e-06, |
| "loss": 0.003, |
| "step": 16075 |
| }, |
| { |
| "epoch": 17.73127753303965, |
| "grad_norm": 0.028241898864507675, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.0036, |
| "step": 16100 |
| }, |
| { |
| "epoch": 17.758810572687224, |
| "grad_norm": 0.12309114634990692, |
| "learning_rate": 3.974358974358974e-06, |
| "loss": 0.0036, |
| "step": 16125 |
| }, |
| { |
| "epoch": 17.7863436123348, |
| "grad_norm": 0.019569188356399536, |
| "learning_rate": 3.948717948717949e-06, |
| "loss": 0.0039, |
| "step": 16150 |
| }, |
| { |
| "epoch": 17.81387665198238, |
| "grad_norm": 0.027806563302874565, |
| "learning_rate": 3.923076923076923e-06, |
| "loss": 0.0035, |
| "step": 16175 |
| }, |
| { |
| "epoch": 17.841409691629956, |
| "grad_norm": 0.05405218526721001, |
| "learning_rate": 3.897435897435898e-06, |
| "loss": 0.0036, |
| "step": 16200 |
| }, |
| { |
| "epoch": 17.86894273127753, |
| "grad_norm": 0.15282440185546875, |
| "learning_rate": 3.871794871794872e-06, |
| "loss": 0.0037, |
| "step": 16225 |
| }, |
| { |
| "epoch": 17.89647577092511, |
| "grad_norm": 0.023264387622475624, |
| "learning_rate": 3.846153846153847e-06, |
| "loss": 0.0028, |
| "step": 16250 |
| }, |
| { |
| "epoch": 17.924008810572687, |
| "grad_norm": 0.07771216332912445, |
| "learning_rate": 3.8205128205128204e-06, |
| "loss": 0.0065, |
| "step": 16275 |
| }, |
| { |
| "epoch": 17.951541850220263, |
| "grad_norm": 0.289384663105011, |
| "learning_rate": 3.794871794871795e-06, |
| "loss": 0.0085, |
| "step": 16300 |
| }, |
| { |
| "epoch": 17.979074889867842, |
| "grad_norm": 0.050969693809747696, |
| "learning_rate": 3.7692307692307694e-06, |
| "loss": 0.0048, |
| "step": 16325 |
| }, |
| { |
| "epoch": 18.006607929515418, |
| "grad_norm": 0.029267285019159317, |
| "learning_rate": 3.743589743589744e-06, |
| "loss": 0.0029, |
| "step": 16350 |
| }, |
| { |
| "epoch": 18.034140969162994, |
| "grad_norm": 0.04442453011870384, |
| "learning_rate": 3.7179487179487184e-06, |
| "loss": 0.0042, |
| "step": 16375 |
| }, |
| { |
| "epoch": 18.061674008810574, |
| "grad_norm": 0.019591569900512695, |
| "learning_rate": 3.692307692307693e-06, |
| "loss": 0.0035, |
| "step": 16400 |
| }, |
| { |
| "epoch": 18.08920704845815, |
| "grad_norm": 0.041334908455610275, |
| "learning_rate": 3.6666666666666666e-06, |
| "loss": 0.005, |
| "step": 16425 |
| }, |
| { |
| "epoch": 18.116740088105725, |
| "grad_norm": 0.04090972989797592, |
| "learning_rate": 3.641025641025641e-06, |
| "loss": 0.0045, |
| "step": 16450 |
| }, |
| { |
| "epoch": 18.144273127753305, |
| "grad_norm": 0.024983281269669533, |
| "learning_rate": 3.6153846153846156e-06, |
| "loss": 0.0028, |
| "step": 16475 |
| }, |
| { |
| "epoch": 18.17180616740088, |
| "grad_norm": 0.0352012999355793, |
| "learning_rate": 3.58974358974359e-06, |
| "loss": 0.0026, |
| "step": 16500 |
| }, |
| { |
| "epoch": 18.199339207048457, |
| "grad_norm": 0.017669979482889175, |
| "learning_rate": 3.5641025641025646e-06, |
| "loss": 0.0028, |
| "step": 16525 |
| }, |
| { |
| "epoch": 18.226872246696036, |
| "grad_norm": 0.015492716804146767, |
| "learning_rate": 3.538461538461539e-06, |
| "loss": 0.0024, |
| "step": 16550 |
| }, |
| { |
| "epoch": 18.254405286343612, |
| "grad_norm": 0.031913984566926956, |
| "learning_rate": 3.5128205128205127e-06, |
| "loss": 0.0029, |
| "step": 16575 |
| }, |
| { |
| "epoch": 18.281938325991188, |
| "grad_norm": 0.019828276708722115, |
| "learning_rate": 3.487179487179487e-06, |
| "loss": 0.0028, |
| "step": 16600 |
| }, |
| { |
| "epoch": 18.309471365638768, |
| "grad_norm": 0.015073757618665695, |
| "learning_rate": 3.4615384615384617e-06, |
| "loss": 0.002, |
| "step": 16625 |
| }, |
| { |
| "epoch": 18.337004405286343, |
| "grad_norm": 0.03369276598095894, |
| "learning_rate": 3.435897435897436e-06, |
| "loss": 0.0023, |
| "step": 16650 |
| }, |
| { |
| "epoch": 18.36453744493392, |
| "grad_norm": 0.028322968631982803, |
| "learning_rate": 3.4102564102564107e-06, |
| "loss": 0.0039, |
| "step": 16675 |
| }, |
| { |
| "epoch": 18.3920704845815, |
| "grad_norm": 0.03424219414591789, |
| "learning_rate": 3.384615384615385e-06, |
| "loss": 0.0028, |
| "step": 16700 |
| }, |
| { |
| "epoch": 18.419603524229075, |
| "grad_norm": 0.04083619639277458, |
| "learning_rate": 3.358974358974359e-06, |
| "loss": 0.0026, |
| "step": 16725 |
| }, |
| { |
| "epoch": 18.44713656387665, |
| "grad_norm": 0.023958973586559296, |
| "learning_rate": 3.3333333333333333e-06, |
| "loss": 0.0033, |
| "step": 16750 |
| }, |
| { |
| "epoch": 18.47466960352423, |
| "grad_norm": 0.03387632593512535, |
| "learning_rate": 3.307692307692308e-06, |
| "loss": 0.0024, |
| "step": 16775 |
| }, |
| { |
| "epoch": 18.502202643171806, |
| "grad_norm": 0.033548954874277115, |
| "learning_rate": 3.2820512820512823e-06, |
| "loss": 0.0046, |
| "step": 16800 |
| }, |
| { |
| "epoch": 18.529735682819382, |
| "grad_norm": 0.020479055121541023, |
| "learning_rate": 3.256410256410257e-06, |
| "loss": 0.0027, |
| "step": 16825 |
| }, |
| { |
| "epoch": 18.55726872246696, |
| "grad_norm": 0.07224612683057785, |
| "learning_rate": 3.2307692307692313e-06, |
| "loss": 0.0038, |
| "step": 16850 |
| }, |
| { |
| "epoch": 18.584801762114537, |
| "grad_norm": 0.02132277749478817, |
| "learning_rate": 3.205128205128206e-06, |
| "loss": 0.0028, |
| "step": 16875 |
| }, |
| { |
| "epoch": 18.612334801762113, |
| "grad_norm": 0.014213715679943562, |
| "learning_rate": 3.1794871794871795e-06, |
| "loss": 0.0021, |
| "step": 16900 |
| }, |
| { |
| "epoch": 18.639867841409693, |
| "grad_norm": 0.11814253032207489, |
| "learning_rate": 3.153846153846154e-06, |
| "loss": 0.0031, |
| "step": 16925 |
| }, |
| { |
| "epoch": 18.66740088105727, |
| "grad_norm": 0.11767589300870895, |
| "learning_rate": 3.1282051282051284e-06, |
| "loss": 0.0039, |
| "step": 16950 |
| }, |
| { |
| "epoch": 18.694933920704845, |
| "grad_norm": 0.01782354526221752, |
| "learning_rate": 3.102564102564103e-06, |
| "loss": 0.0029, |
| "step": 16975 |
| }, |
| { |
| "epoch": 18.722466960352424, |
| "grad_norm": 0.22178053855895996, |
| "learning_rate": 3.0769230769230774e-06, |
| "loss": 0.0026, |
| "step": 17000 |
| }, |
| { |
| "epoch": 18.722466960352424, |
| "eval_cer": 51.592860500040125, |
| "eval_loss": 0.9495370984077454, |
| "eval_runtime": 876.7679, |
| "eval_samples_per_second": 12.068, |
| "eval_steps_per_second": 3.018, |
| "eval_wer": 94.0971239981141, |
| "step": 17000 |
| }, |
| { |
| "epoch": 18.75, |
| "grad_norm": 0.028327735140919685, |
| "learning_rate": 3.051282051282052e-06, |
| "loss": 0.0024, |
| "step": 17025 |
| }, |
| { |
| "epoch": 18.777533039647576, |
| "grad_norm": 0.2968517243862152, |
| "learning_rate": 3.0256410256410256e-06, |
| "loss": 0.002, |
| "step": 17050 |
| }, |
| { |
| "epoch": 18.805066079295155, |
| "grad_norm": 0.02121434547007084, |
| "learning_rate": 3e-06, |
| "loss": 0.003, |
| "step": 17075 |
| }, |
| { |
| "epoch": 18.83259911894273, |
| "grad_norm": 0.02059946022927761, |
| "learning_rate": 2.9743589743589746e-06, |
| "loss": 0.0022, |
| "step": 17100 |
| }, |
| { |
| "epoch": 18.860132158590307, |
| "grad_norm": 0.03535676375031471, |
| "learning_rate": 2.948717948717949e-06, |
| "loss": 0.0028, |
| "step": 17125 |
| }, |
| { |
| "epoch": 18.887665198237887, |
| "grad_norm": 0.017292790114879608, |
| "learning_rate": 2.9230769230769236e-06, |
| "loss": 0.0032, |
| "step": 17150 |
| }, |
| { |
| "epoch": 18.915198237885463, |
| "grad_norm": 0.010941505432128906, |
| "learning_rate": 2.897435897435898e-06, |
| "loss": 0.002, |
| "step": 17175 |
| }, |
| { |
| "epoch": 18.94273127753304, |
| "grad_norm": 0.07651369273662567, |
| "learning_rate": 2.8717948717948717e-06, |
| "loss": 0.0024, |
| "step": 17200 |
| }, |
| { |
| "epoch": 18.970264317180618, |
| "grad_norm": 0.033956125378608704, |
| "learning_rate": 2.846153846153846e-06, |
| "loss": 0.0021, |
| "step": 17225 |
| }, |
| { |
| "epoch": 18.997797356828194, |
| "grad_norm": 0.01717967540025711, |
| "learning_rate": 2.8205128205128207e-06, |
| "loss": 0.0039, |
| "step": 17250 |
| }, |
| { |
| "epoch": 19.02533039647577, |
| "grad_norm": 0.06303483992815018, |
| "learning_rate": 2.794871794871795e-06, |
| "loss": 0.0026, |
| "step": 17275 |
| }, |
| { |
| "epoch": 19.05286343612335, |
| "grad_norm": 0.024857090786099434, |
| "learning_rate": 2.7692307692307697e-06, |
| "loss": 0.0018, |
| "step": 17300 |
| }, |
| { |
| "epoch": 19.080396475770925, |
| "grad_norm": 0.02651827596127987, |
| "learning_rate": 2.743589743589744e-06, |
| "loss": 0.0018, |
| "step": 17325 |
| }, |
| { |
| "epoch": 19.1079295154185, |
| "grad_norm": 0.018129022791981697, |
| "learning_rate": 2.717948717948718e-06, |
| "loss": 0.0016, |
| "step": 17350 |
| }, |
| { |
| "epoch": 19.13546255506608, |
| "grad_norm": 0.012260057963430882, |
| "learning_rate": 2.6923076923076923e-06, |
| "loss": 0.0023, |
| "step": 17375 |
| }, |
| { |
| "epoch": 19.162995594713657, |
| "grad_norm": 0.013982090167701244, |
| "learning_rate": 2.666666666666667e-06, |
| "loss": 0.0016, |
| "step": 17400 |
| }, |
| { |
| "epoch": 19.190528634361232, |
| "grad_norm": 0.02801360934972763, |
| "learning_rate": 2.6410256410256413e-06, |
| "loss": 0.0019, |
| "step": 17425 |
| }, |
| { |
| "epoch": 19.218061674008812, |
| "grad_norm": 0.016421474516391754, |
| "learning_rate": 2.615384615384616e-06, |
| "loss": 0.0022, |
| "step": 17450 |
| }, |
| { |
| "epoch": 19.245594713656388, |
| "grad_norm": 0.27571967244148254, |
| "learning_rate": 2.5897435897435903e-06, |
| "loss": 0.0025, |
| "step": 17475 |
| }, |
| { |
| "epoch": 19.273127753303964, |
| "grad_norm": 0.0503142848610878, |
| "learning_rate": 2.564102564102564e-06, |
| "loss": 0.0017, |
| "step": 17500 |
| }, |
| { |
| "epoch": 19.300660792951543, |
| "grad_norm": 0.02715384215116501, |
| "learning_rate": 2.5384615384615385e-06, |
| "loss": 0.002, |
| "step": 17525 |
| }, |
| { |
| "epoch": 19.32819383259912, |
| "grad_norm": 0.01945788972079754, |
| "learning_rate": 2.512820512820513e-06, |
| "loss": 0.0018, |
| "step": 17550 |
| }, |
| { |
| "epoch": 19.355726872246695, |
| "grad_norm": 0.010799442417919636, |
| "learning_rate": 2.4871794871794875e-06, |
| "loss": 0.0016, |
| "step": 17575 |
| }, |
| { |
| "epoch": 19.383259911894275, |
| "grad_norm": 0.02447289600968361, |
| "learning_rate": 2.461538461538462e-06, |
| "loss": 0.0026, |
| "step": 17600 |
| }, |
| { |
| "epoch": 19.41079295154185, |
| "grad_norm": 0.009756376035511494, |
| "learning_rate": 2.435897435897436e-06, |
| "loss": 0.0021, |
| "step": 17625 |
| }, |
| { |
| "epoch": 19.438325991189426, |
| "grad_norm": 0.013634881004691124, |
| "learning_rate": 2.4102564102564105e-06, |
| "loss": 0.0017, |
| "step": 17650 |
| }, |
| { |
| "epoch": 19.465859030837006, |
| "grad_norm": 0.016096901148557663, |
| "learning_rate": 2.384615384615385e-06, |
| "loss": 0.0025, |
| "step": 17675 |
| }, |
| { |
| "epoch": 19.493392070484582, |
| "grad_norm": 0.010127868503332138, |
| "learning_rate": 2.358974358974359e-06, |
| "loss": 0.0019, |
| "step": 17700 |
| }, |
| { |
| "epoch": 19.520925110132158, |
| "grad_norm": 0.026323504745960236, |
| "learning_rate": 2.3333333333333336e-06, |
| "loss": 0.0026, |
| "step": 17725 |
| }, |
| { |
| "epoch": 19.548458149779737, |
| "grad_norm": 0.01952183246612549, |
| "learning_rate": 2.307692307692308e-06, |
| "loss": 0.0023, |
| "step": 17750 |
| }, |
| { |
| "epoch": 19.575991189427313, |
| "grad_norm": 0.013677220791578293, |
| "learning_rate": 2.282051282051282e-06, |
| "loss": 0.0018, |
| "step": 17775 |
| }, |
| { |
| "epoch": 19.60352422907489, |
| "grad_norm": 0.01971456967294216, |
| "learning_rate": 2.2564102564102566e-06, |
| "loss": 0.0032, |
| "step": 17800 |
| }, |
| { |
| "epoch": 19.63105726872247, |
| "grad_norm": 0.016475846990942955, |
| "learning_rate": 2.230769230769231e-06, |
| "loss": 0.0021, |
| "step": 17825 |
| }, |
| { |
| "epoch": 19.658590308370044, |
| "grad_norm": 0.01166547928005457, |
| "learning_rate": 2.2051282051282052e-06, |
| "loss": 0.0015, |
| "step": 17850 |
| }, |
| { |
| "epoch": 19.68612334801762, |
| "grad_norm": 0.010217426344752312, |
| "learning_rate": 2.1794871794871797e-06, |
| "loss": 0.0017, |
| "step": 17875 |
| }, |
| { |
| "epoch": 19.7136563876652, |
| "grad_norm": 0.011065994389355183, |
| "learning_rate": 2.153846153846154e-06, |
| "loss": 0.0019, |
| "step": 17900 |
| }, |
| { |
| "epoch": 19.741189427312776, |
| "grad_norm": 0.011596587486565113, |
| "learning_rate": 2.1282051282051283e-06, |
| "loss": 0.0016, |
| "step": 17925 |
| }, |
| { |
| "epoch": 19.76872246696035, |
| "grad_norm": 0.010966944508254528, |
| "learning_rate": 2.1025641025641028e-06, |
| "loss": 0.0028, |
| "step": 17950 |
| }, |
| { |
| "epoch": 19.79625550660793, |
| "grad_norm": 0.014122538268566132, |
| "learning_rate": 2.0769230769230773e-06, |
| "loss": 0.0025, |
| "step": 17975 |
| }, |
| { |
| "epoch": 19.823788546255507, |
| "grad_norm": 0.01831120438873768, |
| "learning_rate": 2.0512820512820513e-06, |
| "loss": 0.0016, |
| "step": 18000 |
| }, |
| { |
| "epoch": 19.823788546255507, |
| "eval_cer": 51.094196003805905, |
| "eval_loss": 0.9530871510505676, |
| "eval_runtime": 859.385, |
| "eval_samples_per_second": 12.312, |
| "eval_steps_per_second": 3.079, |
| "eval_wer": 94.16313059877416, |
| "step": 18000 |
| }, |
| { |
| "epoch": 19.851321585903083, |
| "grad_norm": 0.011395452544093132, |
| "learning_rate": 2.025641025641026e-06, |
| "loss": 0.0019, |
| "step": 18025 |
| }, |
| { |
| "epoch": 19.878854625550662, |
| "grad_norm": 0.009908227249979973, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.0016, |
| "step": 18050 |
| }, |
| { |
| "epoch": 19.90638766519824, |
| "grad_norm": 0.015528595075011253, |
| "learning_rate": 1.9743589743589744e-06, |
| "loss": 0.0027, |
| "step": 18075 |
| }, |
| { |
| "epoch": 19.933920704845814, |
| "grad_norm": 0.010827014222741127, |
| "learning_rate": 1.948717948717949e-06, |
| "loss": 0.0025, |
| "step": 18100 |
| }, |
| { |
| "epoch": 19.961453744493394, |
| "grad_norm": 0.012095007114112377, |
| "learning_rate": 1.9230769230769234e-06, |
| "loss": 0.0016, |
| "step": 18125 |
| }, |
| { |
| "epoch": 19.98898678414097, |
| "grad_norm": 0.0175089742988348, |
| "learning_rate": 1.8974358974358975e-06, |
| "loss": 0.0026, |
| "step": 18150 |
| }, |
| { |
| "epoch": 20.016519823788546, |
| "grad_norm": 0.008043349720537663, |
| "learning_rate": 1.871794871794872e-06, |
| "loss": 0.0016, |
| "step": 18175 |
| }, |
| { |
| "epoch": 20.044052863436125, |
| "grad_norm": 0.012720318511128426, |
| "learning_rate": 1.8461538461538465e-06, |
| "loss": 0.0014, |
| "step": 18200 |
| }, |
| { |
| "epoch": 20.0715859030837, |
| "grad_norm": 0.008536312729120255, |
| "learning_rate": 1.8205128205128205e-06, |
| "loss": 0.0015, |
| "step": 18225 |
| }, |
| { |
| "epoch": 20.099118942731277, |
| "grad_norm": 0.007359918672591448, |
| "learning_rate": 1.794871794871795e-06, |
| "loss": 0.0016, |
| "step": 18250 |
| }, |
| { |
| "epoch": 20.126651982378856, |
| "grad_norm": 0.007894999347627163, |
| "learning_rate": 1.7692307692307695e-06, |
| "loss": 0.0013, |
| "step": 18275 |
| }, |
| { |
| "epoch": 20.154185022026432, |
| "grad_norm": 0.011457731947302818, |
| "learning_rate": 1.7435897435897436e-06, |
| "loss": 0.0013, |
| "step": 18300 |
| }, |
| { |
| "epoch": 20.181718061674008, |
| "grad_norm": 0.013349108397960663, |
| "learning_rate": 1.717948717948718e-06, |
| "loss": 0.0014, |
| "step": 18325 |
| }, |
| { |
| "epoch": 20.209251101321588, |
| "grad_norm": 0.009640936739742756, |
| "learning_rate": 1.6923076923076926e-06, |
| "loss": 0.0015, |
| "step": 18350 |
| }, |
| { |
| "epoch": 20.236784140969164, |
| "grad_norm": 0.009478888474404812, |
| "learning_rate": 1.6666666666666667e-06, |
| "loss": 0.002, |
| "step": 18375 |
| }, |
| { |
| "epoch": 20.26431718061674, |
| "grad_norm": 0.010656708851456642, |
| "learning_rate": 1.6410256410256412e-06, |
| "loss": 0.0014, |
| "step": 18400 |
| }, |
| { |
| "epoch": 20.291850220264315, |
| "grad_norm": 0.009404104202985764, |
| "learning_rate": 1.6153846153846157e-06, |
| "loss": 0.0014, |
| "step": 18425 |
| }, |
| { |
| "epoch": 20.319383259911895, |
| "grad_norm": 0.0218101404607296, |
| "learning_rate": 1.5897435897435897e-06, |
| "loss": 0.0017, |
| "step": 18450 |
| }, |
| { |
| "epoch": 20.34691629955947, |
| "grad_norm": 0.007058306131511927, |
| "learning_rate": 1.5641025641025642e-06, |
| "loss": 0.0013, |
| "step": 18475 |
| }, |
| { |
| "epoch": 20.374449339207047, |
| "grad_norm": 0.012106086127460003, |
| "learning_rate": 1.5384615384615387e-06, |
| "loss": 0.0023, |
| "step": 18500 |
| }, |
| { |
| "epoch": 20.401982378854626, |
| "grad_norm": 0.00871138833463192, |
| "learning_rate": 1.5128205128205128e-06, |
| "loss": 0.0017, |
| "step": 18525 |
| }, |
| { |
| "epoch": 20.429515418502202, |
| "grad_norm": 0.012688432820141315, |
| "learning_rate": 1.4871794871794873e-06, |
| "loss": 0.0015, |
| "step": 18550 |
| }, |
| { |
| "epoch": 20.457048458149778, |
| "grad_norm": 0.008517405949532986, |
| "learning_rate": 1.4615384615384618e-06, |
| "loss": 0.0014, |
| "step": 18575 |
| }, |
| { |
| "epoch": 20.484581497797357, |
| "grad_norm": 0.009141705930233002, |
| "learning_rate": 1.4358974358974359e-06, |
| "loss": 0.0012, |
| "step": 18600 |
| }, |
| { |
| "epoch": 20.512114537444933, |
| "grad_norm": 0.01858842372894287, |
| "learning_rate": 1.4102564102564104e-06, |
| "loss": 0.0016, |
| "step": 18625 |
| }, |
| { |
| "epoch": 20.53964757709251, |
| "grad_norm": 0.10274213552474976, |
| "learning_rate": 1.3846153846153848e-06, |
| "loss": 0.0017, |
| "step": 18650 |
| }, |
| { |
| "epoch": 20.56718061674009, |
| "grad_norm": 0.009239338338375092, |
| "learning_rate": 1.358974358974359e-06, |
| "loss": 0.0014, |
| "step": 18675 |
| }, |
| { |
| "epoch": 20.594713656387665, |
| "grad_norm": 0.0146435322239995, |
| "learning_rate": 1.3333333333333334e-06, |
| "loss": 0.0013, |
| "step": 18700 |
| }, |
| { |
| "epoch": 20.62224669603524, |
| "grad_norm": 0.01037636585533619, |
| "learning_rate": 1.307692307692308e-06, |
| "loss": 0.0015, |
| "step": 18725 |
| }, |
| { |
| "epoch": 20.64977973568282, |
| "grad_norm": 0.007970661856234074, |
| "learning_rate": 1.282051282051282e-06, |
| "loss": 0.0012, |
| "step": 18750 |
| }, |
| { |
| "epoch": 20.677312775330396, |
| "grad_norm": 0.01924486644566059, |
| "learning_rate": 1.2564102564102565e-06, |
| "loss": 0.0016, |
| "step": 18775 |
| }, |
| { |
| "epoch": 20.704845814977972, |
| "grad_norm": 0.009840236976742744, |
| "learning_rate": 1.230769230769231e-06, |
| "loss": 0.0015, |
| "step": 18800 |
| }, |
| { |
| "epoch": 20.73237885462555, |
| "grad_norm": 0.09442971646785736, |
| "learning_rate": 1.2051282051282053e-06, |
| "loss": 0.0016, |
| "step": 18825 |
| }, |
| { |
| "epoch": 20.759911894273127, |
| "grad_norm": 0.008129543624818325, |
| "learning_rate": 1.1794871794871795e-06, |
| "loss": 0.0014, |
| "step": 18850 |
| }, |
| { |
| "epoch": 20.787444933920703, |
| "grad_norm": 0.00982487853616476, |
| "learning_rate": 1.153846153846154e-06, |
| "loss": 0.0016, |
| "step": 18875 |
| }, |
| { |
| "epoch": 20.814977973568283, |
| "grad_norm": 0.009310917928814888, |
| "learning_rate": 1.1282051282051283e-06, |
| "loss": 0.002, |
| "step": 18900 |
| }, |
| { |
| "epoch": 20.84251101321586, |
| "grad_norm": 0.012313717044889927, |
| "learning_rate": 1.1025641025641026e-06, |
| "loss": 0.0015, |
| "step": 18925 |
| }, |
| { |
| "epoch": 20.870044052863435, |
| "grad_norm": 0.013325495645403862, |
| "learning_rate": 1.076923076923077e-06, |
| "loss": 0.002, |
| "step": 18950 |
| }, |
| { |
| "epoch": 20.897577092511014, |
| "grad_norm": 0.009593603201210499, |
| "learning_rate": 1.0512820512820514e-06, |
| "loss": 0.0015, |
| "step": 18975 |
| }, |
| { |
| "epoch": 20.92511013215859, |
| "grad_norm": 0.013929825276136398, |
| "learning_rate": 1.0256410256410257e-06, |
| "loss": 0.0015, |
| "step": 19000 |
| }, |
| { |
| "epoch": 20.92511013215859, |
| "eval_cer": 50.91249871035044, |
| "eval_loss": 0.9608182907104492, |
| "eval_runtime": 847.4888, |
| "eval_samples_per_second": 12.485, |
| "eval_steps_per_second": 3.122, |
| "eval_wer": 94.7006129184347, |
| "step": 19000 |
| }, |
| { |
| "epoch": 20.952643171806166, |
| "grad_norm": 0.012814809568226337, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 0.0024, |
| "step": 19025 |
| }, |
| { |
| "epoch": 20.980176211453745, |
| "grad_norm": 0.009638777002692223, |
| "learning_rate": 9.743589743589745e-07, |
| "loss": 0.0014, |
| "step": 19050 |
| }, |
| { |
| "epoch": 21.00770925110132, |
| "grad_norm": 0.01127631589770317, |
| "learning_rate": 9.487179487179487e-07, |
| "loss": 0.0014, |
| "step": 19075 |
| }, |
| { |
| "epoch": 21.035242290748897, |
| "grad_norm": 0.008207294158637524, |
| "learning_rate": 9.230769230769232e-07, |
| "loss": 0.002, |
| "step": 19100 |
| }, |
| { |
| "epoch": 21.062775330396477, |
| "grad_norm": 0.009193181060254574, |
| "learning_rate": 8.974358974358975e-07, |
| "loss": 0.0015, |
| "step": 19125 |
| }, |
| { |
| "epoch": 21.090308370044053, |
| "grad_norm": 0.01055130921304226, |
| "learning_rate": 8.717948717948718e-07, |
| "loss": 0.0013, |
| "step": 19150 |
| }, |
| { |
| "epoch": 21.11784140969163, |
| "grad_norm": 0.00705339340493083, |
| "learning_rate": 8.461538461538463e-07, |
| "loss": 0.0012, |
| "step": 19175 |
| }, |
| { |
| "epoch": 21.145374449339208, |
| "grad_norm": 0.012715994380414486, |
| "learning_rate": 8.205128205128206e-07, |
| "loss": 0.0012, |
| "step": 19200 |
| }, |
| { |
| "epoch": 21.172907488986784, |
| "grad_norm": 0.006566143594682217, |
| "learning_rate": 7.948717948717949e-07, |
| "loss": 0.0013, |
| "step": 19225 |
| }, |
| { |
| "epoch": 21.20044052863436, |
| "grad_norm": 0.00661145756021142, |
| "learning_rate": 7.692307692307694e-07, |
| "loss": 0.0012, |
| "step": 19250 |
| }, |
| { |
| "epoch": 21.22797356828194, |
| "grad_norm": 0.008768678642809391, |
| "learning_rate": 7.435897435897436e-07, |
| "loss": 0.0013, |
| "step": 19275 |
| }, |
| { |
| "epoch": 21.255506607929515, |
| "grad_norm": 0.008478008210659027, |
| "learning_rate": 7.179487179487179e-07, |
| "loss": 0.0011, |
| "step": 19300 |
| }, |
| { |
| "epoch": 21.28303964757709, |
| "grad_norm": 0.012739352881908417, |
| "learning_rate": 6.923076923076924e-07, |
| "loss": 0.0016, |
| "step": 19325 |
| }, |
| { |
| "epoch": 21.31057268722467, |
| "grad_norm": 0.006703950930386782, |
| "learning_rate": 6.666666666666667e-07, |
| "loss": 0.0012, |
| "step": 19350 |
| }, |
| { |
| "epoch": 21.338105726872246, |
| "grad_norm": 0.00934862531721592, |
| "learning_rate": 6.41025641025641e-07, |
| "loss": 0.0013, |
| "step": 19375 |
| }, |
| { |
| "epoch": 21.365638766519822, |
| "grad_norm": 0.011370371095836163, |
| "learning_rate": 6.153846153846155e-07, |
| "loss": 0.0013, |
| "step": 19400 |
| }, |
| { |
| "epoch": 21.393171806167402, |
| "grad_norm": 0.010093637742102146, |
| "learning_rate": 5.897435897435898e-07, |
| "loss": 0.0013, |
| "step": 19425 |
| }, |
| { |
| "epoch": 21.420704845814978, |
| "grad_norm": 0.007553795352578163, |
| "learning_rate": 5.641025641025642e-07, |
| "loss": 0.0013, |
| "step": 19450 |
| }, |
| { |
| "epoch": 21.448237885462554, |
| "grad_norm": 0.009227064438164234, |
| "learning_rate": 5.384615384615386e-07, |
| "loss": 0.0013, |
| "step": 19475 |
| }, |
| { |
| "epoch": 21.475770925110133, |
| "grad_norm": 0.006895294412970543, |
| "learning_rate": 5.128205128205128e-07, |
| "loss": 0.0013, |
| "step": 19500 |
| }, |
| { |
| "epoch": 21.50330396475771, |
| "grad_norm": 0.006935523357242346, |
| "learning_rate": 4.871794871794872e-07, |
| "loss": 0.0012, |
| "step": 19525 |
| }, |
| { |
| "epoch": 21.530837004405285, |
| "grad_norm": 0.009774105623364449, |
| "learning_rate": 4.615384615384616e-07, |
| "loss": 0.0011, |
| "step": 19550 |
| }, |
| { |
| "epoch": 21.558370044052865, |
| "grad_norm": 0.008834905922412872, |
| "learning_rate": 4.358974358974359e-07, |
| "loss": 0.0011, |
| "step": 19575 |
| }, |
| { |
| "epoch": 21.58590308370044, |
| "grad_norm": 0.011201854795217514, |
| "learning_rate": 4.102564102564103e-07, |
| "loss": 0.0012, |
| "step": 19600 |
| }, |
| { |
| "epoch": 21.613436123348016, |
| "grad_norm": 0.006510408595204353, |
| "learning_rate": 3.846153846153847e-07, |
| "loss": 0.0013, |
| "step": 19625 |
| }, |
| { |
| "epoch": 21.640969162995596, |
| "grad_norm": 0.0077276709489524364, |
| "learning_rate": 3.5897435897435896e-07, |
| "loss": 0.0012, |
| "step": 19650 |
| }, |
| { |
| "epoch": 21.66850220264317, |
| "grad_norm": 0.006777653470635414, |
| "learning_rate": 3.3333333333333335e-07, |
| "loss": 0.0012, |
| "step": 19675 |
| }, |
| { |
| "epoch": 21.696035242290748, |
| "grad_norm": 0.007512731943279505, |
| "learning_rate": 3.0769230769230774e-07, |
| "loss": 0.0013, |
| "step": 19700 |
| }, |
| { |
| "epoch": 21.723568281938327, |
| "grad_norm": 0.007246058434247971, |
| "learning_rate": 2.820512820512821e-07, |
| "loss": 0.0015, |
| "step": 19725 |
| }, |
| { |
| "epoch": 21.751101321585903, |
| "grad_norm": 0.007962902076542377, |
| "learning_rate": 2.564102564102564e-07, |
| "loss": 0.0013, |
| "step": 19750 |
| }, |
| { |
| "epoch": 21.77863436123348, |
| "grad_norm": 0.009491794742643833, |
| "learning_rate": 2.307692307692308e-07, |
| "loss": 0.0012, |
| "step": 19775 |
| }, |
| { |
| "epoch": 21.80616740088106, |
| "grad_norm": 0.012724686414003372, |
| "learning_rate": 2.0512820512820514e-07, |
| "loss": 0.0013, |
| "step": 19800 |
| }, |
| { |
| "epoch": 21.833700440528634, |
| "grad_norm": 0.0072784461081027985, |
| "learning_rate": 1.7948717948717948e-07, |
| "loss": 0.0012, |
| "step": 19825 |
| }, |
| { |
| "epoch": 21.86123348017621, |
| "grad_norm": 0.012166726402938366, |
| "learning_rate": 1.5384615384615387e-07, |
| "loss": 0.0012, |
| "step": 19850 |
| }, |
| { |
| "epoch": 21.88876651982379, |
| "grad_norm": 0.10767704248428345, |
| "learning_rate": 1.282051282051282e-07, |
| "loss": 0.0014, |
| "step": 19875 |
| }, |
| { |
| "epoch": 21.916299559471366, |
| "grad_norm": 0.008811806328594685, |
| "learning_rate": 1.0256410256410257e-07, |
| "loss": 0.0011, |
| "step": 19900 |
| }, |
| { |
| "epoch": 21.94383259911894, |
| "grad_norm": 0.010796192102134228, |
| "learning_rate": 7.692307692307694e-08, |
| "loss": 0.0012, |
| "step": 19925 |
| }, |
| { |
| "epoch": 21.97136563876652, |
| "grad_norm": 0.00970546342432499, |
| "learning_rate": 5.1282051282051286e-08, |
| "loss": 0.0012, |
| "step": 19950 |
| }, |
| { |
| "epoch": 21.998898678414097, |
| "grad_norm": 0.0086264219135046, |
| "learning_rate": 2.5641025641025643e-08, |
| "loss": 0.0015, |
| "step": 19975 |
| }, |
| { |
| "epoch": 22.026431718061673, |
| "grad_norm": 0.008207079023122787, |
| "learning_rate": 0.0, |
| "loss": 0.0012, |
| "step": 20000 |
| }, |
| { |
| "epoch": 22.026431718061673, |
| "eval_cer": 50.30034505290429, |
| "eval_loss": 0.9635043144226074, |
| "eval_runtime": 839.8575, |
| "eval_samples_per_second": 12.599, |
| "eval_steps_per_second": 3.151, |
| "eval_wer": 94.62517680339462, |
| "step": 20000 |
| }, |
| { |
| "epoch": 22.026431718061673, |
| "step": 20000, |
| "total_flos": 1.150199251255427e+20, |
| "train_loss": 0.29341357232630255, |
| "train_runtime": 51592.7978, |
| "train_samples_per_second": 12.405, |
| "train_steps_per_second": 0.388 |
| } |
| ], |
| "logging_steps": 25, |
| "max_steps": 20000, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 23, |
| "save_steps": 1000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.150199251255427e+20, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|